Merge branch 'main' into codex/add-process-id-to-logging

This commit is contained in:
marina-oai
2026-01-20 17:02:55 +09:00
committed by GitHub
121 changed files with 7165 additions and 1060 deletions

View File

@@ -365,6 +365,26 @@ pub(crate) struct App {
}
impl App {
pub fn chatwidget_init_for_forked_or_resumed_thread(
&self,
tui: &mut tui::Tui,
cfg: codex_core::config::Config,
) -> crate::chatwidget::ChatWidgetInit {
crate::chatwidget::ChatWidgetInit {
config: cfg,
frame_requester: tui.frame_requester(),
app_event_tx: self.app_event_tx.clone(),
// Fork/resume bootstraps here don't carry any prefilled message content.
initial_user_message: None,
enhanced_keys_supported: self.enhanced_keys_supported,
auth_manager: self.auth_manager.clone(),
models_manager: self.server.get_models_manager(),
feedback: self.feedback.clone(),
is_first_run: false,
model: Some(self.current_model.clone()),
}
}
async fn shutdown_current_thread(&mut self) {
if let Some(thread_id) = self.chat_widget.thread_id() {
// Clear any in-flight rollback guard when switching threads.
@@ -428,8 +448,12 @@ impl App {
config: config.clone(),
frame_requester: tui.frame_requester(),
app_event_tx: app_event_tx.clone(),
initial_prompt: initial_prompt.clone(),
initial_images: initial_images.clone(),
initial_user_message: crate::chatwidget::create_initial_user_message(
initial_prompt.clone(),
initial_images.clone(),
// CLI prompt args are plain strings, so they don't provide element ranges.
Vec::new(),
),
enhanced_keys_supported,
auth_manager: auth_manager.clone(),
models_manager: thread_manager.get_models_manager(),
@@ -451,8 +475,12 @@ impl App {
config: config.clone(),
frame_requester: tui.frame_requester(),
app_event_tx: app_event_tx.clone(),
initial_prompt: initial_prompt.clone(),
initial_images: initial_images.clone(),
initial_user_message: crate::chatwidget::create_initial_user_message(
initial_prompt.clone(),
initial_images.clone(),
// CLI prompt args are plain strings, so they don't provide element ranges.
Vec::new(),
),
enhanced_keys_supported,
auth_manager: auth_manager.clone(),
models_manager: thread_manager.get_models_manager(),
@@ -474,8 +502,12 @@ impl App {
config: config.clone(),
frame_requester: tui.frame_requester(),
app_event_tx: app_event_tx.clone(),
initial_prompt: initial_prompt.clone(),
initial_images: initial_images.clone(),
initial_user_message: crate::chatwidget::create_initial_user_message(
initial_prompt.clone(),
initial_images.clone(),
// CLI prompt args are plain strings, so they don't provide element ranges.
Vec::new(),
),
enhanced_keys_supported,
auth_manager: auth_manager.clone(),
models_manager: thread_manager.get_models_manager(),
@@ -672,12 +704,15 @@ impl App {
let summary =
session_summary(self.chat_widget.token_usage(), self.chat_widget.thread_id());
self.shutdown_current_thread().await;
if let Err(err) = self.server.remove_and_close_all_threads().await {
tracing::warn!(error = %err, "failed to close all threads");
}
let init = crate::chatwidget::ChatWidgetInit {
config: self.config.clone(),
frame_requester: tui.frame_requester(),
app_event_tx: self.app_event_tx.clone(),
initial_prompt: None,
initial_images: Vec::new(),
// New sessions start without prefilled message content.
initial_user_message: None,
enhanced_keys_supported: self.enhanced_keys_supported,
auth_manager: self.auth_manager.clone(),
models_manager: self.server.get_models_manager(),
@@ -722,19 +757,10 @@ impl App {
{
Ok(resumed) => {
self.shutdown_current_thread().await;
let init = crate::chatwidget::ChatWidgetInit {
config: self.config.clone(),
frame_requester: tui.frame_requester(),
app_event_tx: self.app_event_tx.clone(),
initial_prompt: None,
initial_images: Vec::new(),
enhanced_keys_supported: self.enhanced_keys_supported,
auth_manager: self.auth_manager.clone(),
models_manager: self.server.get_models_manager(),
feedback: self.feedback.clone(),
is_first_run: false,
model: Some(self.current_model.clone()),
};
let init = self.chatwidget_init_for_forked_or_resumed_thread(
tui,
self.config.clone(),
);
self.chat_widget = ChatWidget::new_from_existing(
init,
resumed.thread,
@@ -781,19 +807,10 @@ impl App {
{
Ok(forked) => {
self.shutdown_current_thread().await;
let init = crate::chatwidget::ChatWidgetInit {
config: self.config.clone(),
frame_requester: tui.frame_requester(),
app_event_tx: self.app_event_tx.clone(),
initial_prompt: None,
initial_images: Vec::new(),
enhanced_keys_supported: self.enhanced_keys_supported,
auth_manager: self.auth_manager.clone(),
models_manager: self.server.get_models_manager(),
feedback: self.feedback.clone(),
is_first_run: false,
model: Some(self.current_model.clone()),
};
let init = self.chatwidget_init_for_forked_or_resumed_thread(
tui,
self.config.clone(),
);
self.chat_widget = ChatWidget::new_from_existing(
init,
forked.thread,
@@ -1999,6 +2016,8 @@ mod tests {
let user_cell = |text: &str| -> Arc<dyn HistoryCell> {
Arc::new(UserHistoryCell {
message: text.to_string(),
text_elements: Vec::new(),
local_image_paths: Vec::new(),
}) as Arc<dyn HistoryCell>
};
let agent_cell = |text: &str| -> Arc<dyn HistoryCell> {

View File

@@ -204,7 +204,10 @@ impl App {
});
self.chat_widget.submit_op(Op::ThreadRollback { num_turns });
if !prefill.is_empty() {
self.chat_widget.set_composer_text(prefill);
// TODO: Rehydrate text_elements/local_image_paths from the selected user cell so
// backtrack preserves image placeholders and attachments.
self.chat_widget
.set_composer_text(prefill, Vec::new(), Vec::new());
}
}
@@ -554,6 +557,8 @@ mod tests {
let mut cells: Vec<Arc<dyn HistoryCell>> = vec![
Arc::new(UserHistoryCell {
message: "first user".to_string(),
text_elements: Vec::new(),
local_image_paths: Vec::new(),
}) as Arc<dyn HistoryCell>,
Arc::new(AgentMessageCell::new(vec![Line::from("assistant")], true))
as Arc<dyn HistoryCell>,
@@ -570,6 +575,8 @@ mod tests {
as Arc<dyn HistoryCell>,
Arc::new(UserHistoryCell {
message: "first".to_string(),
text_elements: Vec::new(),
local_image_paths: Vec::new(),
}) as Arc<dyn HistoryCell>,
Arc::new(AgentMessageCell::new(vec![Line::from("after")], false))
as Arc<dyn HistoryCell>,
@@ -598,11 +605,15 @@ mod tests {
as Arc<dyn HistoryCell>,
Arc::new(UserHistoryCell {
message: "first".to_string(),
text_elements: Vec::new(),
local_image_paths: Vec::new(),
}) as Arc<dyn HistoryCell>,
Arc::new(AgentMessageCell::new(vec![Line::from("between")], false))
as Arc<dyn HistoryCell>,
Arc::new(UserHistoryCell {
message: "second".to_string(),
text_elements: Vec::new(),
local_image_paths: Vec::new(),
}) as Arc<dyn HistoryCell>,
Arc::new(AgentMessageCell::new(vec![Line::from("tail")], false))
as Arc<dyn HistoryCell>,

File diff suppressed because it is too large Load Diff

View File

@@ -37,13 +37,20 @@ pub(crate) struct CommandPopup {
state: ScrollState,
}
#[derive(Clone, Copy, Debug, Default)]
pub(crate) struct CommandPopupFlags {
pub(crate) skills_enabled: bool,
pub(crate) collaboration_modes_enabled: bool,
}
impl CommandPopup {
pub(crate) fn new(mut prompts: Vec<CustomPrompt>, skills_enabled: bool) -> Self {
pub(crate) fn new(mut prompts: Vec<CustomPrompt>, flags: CommandPopupFlags) -> Self {
let allow_elevate_sandbox = windows_degraded_sandbox_active();
let builtins: Vec<(&'static str, SlashCommand)> = built_in_slash_commands()
.into_iter()
.filter(|(_, cmd)| skills_enabled || *cmd != SlashCommand::Skills)
.filter(|(_, cmd)| flags.skills_enabled || *cmd != SlashCommand::Skills)
.filter(|(_, cmd)| allow_elevate_sandbox || *cmd != SlashCommand::ElevateSandbox)
.filter(|(_, cmd)| flags.collaboration_modes_enabled || *cmd != SlashCommand::Collab)
.collect();
// Exclude prompts that collide with builtin command names and sort by name.
let exclude: HashSet<String> = builtins.iter().map(|(n, _)| (*n).to_string()).collect();
@@ -231,7 +238,7 @@ mod tests {
#[test]
fn filter_includes_init_when_typing_prefix() {
let mut popup = CommandPopup::new(Vec::new(), false);
let mut popup = CommandPopup::new(Vec::new(), CommandPopupFlags::default());
// Simulate the composer line starting with '/in' so the popup filters
// matching commands by prefix.
popup.on_composer_text_change("/in".to_string());
@@ -251,7 +258,7 @@ mod tests {
#[test]
fn selecting_init_by_exact_match() {
let mut popup = CommandPopup::new(Vec::new(), false);
let mut popup = CommandPopup::new(Vec::new(), CommandPopupFlags::default());
popup.on_composer_text_change("/init".to_string());
// When an exact match exists, the selected command should be that
@@ -266,7 +273,7 @@ mod tests {
#[test]
fn model_is_first_suggestion_for_mo() {
let mut popup = CommandPopup::new(Vec::new(), false);
let mut popup = CommandPopup::new(Vec::new(), CommandPopupFlags::default());
popup.on_composer_text_change("/mo".to_string());
let matches = popup.filtered_items();
match matches.first() {
@@ -280,7 +287,7 @@ mod tests {
#[test]
fn filtered_commands_keep_presentation_order() {
let mut popup = CommandPopup::new(Vec::new(), false);
let mut popup = CommandPopup::new(Vec::new(), CommandPopupFlags::default());
popup.on_composer_text_change("/m".to_string());
let cmds: Vec<&str> = popup
@@ -322,7 +329,7 @@ mod tests {
argument_hint: None,
},
];
let popup = CommandPopup::new(prompts, false);
let popup = CommandPopup::new(prompts, CommandPopupFlags::default());
let items = popup.filtered_items();
let mut prompt_names: Vec<String> = items
.into_iter()
@@ -346,7 +353,7 @@ mod tests {
description: None,
argument_hint: None,
}],
false,
CommandPopupFlags::default(),
);
let items = popup.filtered_items();
let has_collision_prompt = items.into_iter().any(|it| match it {
@@ -369,7 +376,7 @@ mod tests {
description: Some("Create feature branch, commit and open draft PR.".to_string()),
argument_hint: None,
}],
false,
CommandPopupFlags::default(),
);
let rows = popup.rows_from_matches(vec![(CommandItem::UserPrompt(0), None, 0)]);
let description = rows.first().and_then(|row| row.description.as_deref());
@@ -389,7 +396,7 @@ mod tests {
description: None,
argument_hint: None,
}],
false,
CommandPopupFlags::default(),
);
let rows = popup.rows_from_matches(vec![(CommandItem::UserPrompt(0), None, 0)]);
let description = rows.first().and_then(|row| row.description.as_deref());
@@ -398,7 +405,7 @@ mod tests {
#[test]
fn fuzzy_filter_matches_subsequence_for_ac() {
let mut popup = CommandPopup::new(Vec::new(), false);
let mut popup = CommandPopup::new(Vec::new(), CommandPopupFlags::default());
popup.on_composer_text_change("/ac".to_string());
let cmds: Vec<&str> = popup
@@ -414,4 +421,40 @@ mod tests {
"expected fuzzy search for '/ac' to include compact and feedback, got {cmds:?}"
);
}
#[test]
fn collab_command_hidden_when_collaboration_modes_disabled() {
let mut popup = CommandPopup::new(Vec::new(), CommandPopupFlags::default());
popup.on_composer_text_change("/coll".to_string());
let cmds: Vec<&str> = popup
.filtered_items()
.into_iter()
.filter_map(|item| match item {
CommandItem::Builtin(cmd) => Some(cmd.command()),
CommandItem::UserPrompt(_) => None,
})
.collect();
assert!(
!cmds.contains(&"collab"),
"expected '/collab' to be hidden when collaboration modes are disabled, got {cmds:?}"
);
}
#[test]
fn collab_command_visible_when_collaboration_modes_enabled() {
let mut popup = CommandPopup::new(
Vec::new(),
CommandPopupFlags {
skills_enabled: false,
collaboration_modes_enabled: true,
},
);
popup.on_composer_text_change("/collab".to_string());
match popup.selected_item() {
Some(CommandItem::Builtin(cmd)) => assert_eq!(cmd.command(), "collab"),
other => panic!("expected collab to be selected for exact match, got {other:?}"),
}
}
}

View File

@@ -37,6 +37,7 @@ pub(crate) struct FooterProps {
pub(crate) use_shift_enter_hint: bool,
pub(crate) is_task_running: bool,
pub(crate) steer_enabled: bool,
pub(crate) collaboration_modes_enabled: bool,
/// Which key the user must press again to quit.
///
/// This is rendered when `mode` is `FooterMode::QuitShortcutReminder`.
@@ -103,6 +104,31 @@ pub(crate) fn render_footer(area: Rect, buf: &mut Buffer, props: FooterProps) {
.render(area, buf);
}
pub(crate) fn inset_footer_hint_area(mut area: Rect) -> Rect {
if area.width > 2 {
area.x += 2;
area.width = area.width.saturating_sub(2);
}
area
}
pub(crate) fn render_footer_hint_items(area: Rect, buf: &mut Buffer, items: &[(String, String)]) {
if items.is_empty() {
return;
}
let mut spans = Vec::with_capacity(items.len() * 4);
for (idx, (key, label)) in items.iter().enumerate() {
spans.push(" ".into());
spans.push(key.clone().bold());
spans.push(format!(" {label}").into());
if idx + 1 != items.len() {
spans.push(" ".into());
}
}
Line::from(spans).render(inset_footer_hint_area(area), buf);
}
fn footer_lines(props: FooterProps) -> Vec<Line<'static>> {
// Show the context indicator on the left, appended after the primary hint
// (e.g., "? for shortcuts"). Keep it visible even when typing (i.e., when
@@ -134,6 +160,7 @@ fn footer_lines(props: FooterProps) -> Vec<Line<'static>> {
use_shift_enter_hint: props.use_shift_enter_hint,
esc_backtrack_hint: props.esc_backtrack_hint,
is_wsl,
collaboration_modes_enabled: props.collaboration_modes_enabled,
};
shortcut_overlay_lines(state)
}
@@ -158,6 +185,7 @@ struct ShortcutsState {
use_shift_enter_hint: bool,
esc_backtrack_hint: bool,
is_wsl: bool,
collaboration_modes_enabled: bool,
}
fn quit_shortcut_reminder_line(key: KeyBinding) -> Line<'static> {
@@ -190,6 +218,7 @@ fn shortcut_overlay_lines(state: ShortcutsState) -> Vec<Line<'static>> {
let mut edit_previous = Line::from("");
let mut quit = Line::from("");
let mut show_transcript = Line::from("");
let mut change_mode = Line::from("");
for descriptor in SHORTCUTS {
if let Some(text) = descriptor.overlay_entry(state) {
@@ -204,11 +233,12 @@ fn shortcut_overlay_lines(state: ShortcutsState) -> Vec<Line<'static>> {
ShortcutId::EditPrevious => edit_previous = text,
ShortcutId::Quit => quit = text,
ShortcutId::ShowTranscript => show_transcript = text,
ShortcutId::ChangeMode => change_mode = text,
}
}
}
let ordered = vec![
let mut ordered = vec![
commands,
shell_commands,
newline,
@@ -218,9 +248,12 @@ fn shortcut_overlay_lines(state: ShortcutsState) -> Vec<Line<'static>> {
external_editor,
edit_previous,
quit,
Line::from(""),
show_transcript,
];
if change_mode.width() > 0 {
ordered.push(change_mode);
}
ordered.push(Line::from(""));
ordered.push(show_transcript);
build_columns(ordered)
}
@@ -298,6 +331,7 @@ enum ShortcutId {
EditPrevious,
Quit,
ShowTranscript,
ChangeMode,
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
@@ -318,6 +352,7 @@ enum DisplayCondition {
WhenShiftEnterHint,
WhenNotShiftEnterHint,
WhenUnderWSL,
WhenCollaborationModesEnabled,
}
impl DisplayCondition {
@@ -327,6 +362,7 @@ impl DisplayCondition {
DisplayCondition::WhenShiftEnterHint => state.use_shift_enter_hint,
DisplayCondition::WhenNotShiftEnterHint => !state.use_shift_enter_hint,
DisplayCondition::WhenUnderWSL => state.is_wsl,
DisplayCondition::WhenCollaborationModesEnabled => state.collaboration_modes_enabled,
}
}
}
@@ -469,6 +505,15 @@ const SHORTCUTS: &[ShortcutDescriptor] = &[
prefix: "",
label: " to view transcript",
},
ShortcutDescriptor {
id: ShortcutId::ChangeMode,
bindings: &[ShortcutBinding {
key: key_hint::shift(KeyCode::Tab),
condition: DisplayCondition::WhenCollaborationModesEnabled,
}],
prefix: "",
label: " to change mode",
},
];
#[cfg(test)]
@@ -500,6 +545,7 @@ mod tests {
use_shift_enter_hint: false,
is_task_running: false,
steer_enabled: false,
collaboration_modes_enabled: false,
quit_shortcut_key: key_hint::ctrl(KeyCode::Char('c')),
context_window_percent: None,
context_window_used_tokens: None,
@@ -514,6 +560,22 @@ mod tests {
use_shift_enter_hint: true,
is_task_running: false,
steer_enabled: false,
collaboration_modes_enabled: false,
quit_shortcut_key: key_hint::ctrl(KeyCode::Char('c')),
context_window_percent: None,
context_window_used_tokens: None,
},
);
snapshot_footer(
"footer_shortcuts_collaboration_modes_enabled",
FooterProps {
mode: FooterMode::ShortcutOverlay,
esc_backtrack_hint: false,
use_shift_enter_hint: false,
is_task_running: false,
steer_enabled: false,
collaboration_modes_enabled: true,
quit_shortcut_key: key_hint::ctrl(KeyCode::Char('c')),
context_window_percent: None,
context_window_used_tokens: None,
@@ -528,6 +590,7 @@ mod tests {
use_shift_enter_hint: false,
is_task_running: false,
steer_enabled: false,
collaboration_modes_enabled: false,
quit_shortcut_key: key_hint::ctrl(KeyCode::Char('c')),
context_window_percent: None,
context_window_used_tokens: None,
@@ -542,6 +605,7 @@ mod tests {
use_shift_enter_hint: false,
is_task_running: true,
steer_enabled: false,
collaboration_modes_enabled: false,
quit_shortcut_key: key_hint::ctrl(KeyCode::Char('c')),
context_window_percent: None,
context_window_used_tokens: None,
@@ -556,6 +620,7 @@ mod tests {
use_shift_enter_hint: false,
is_task_running: false,
steer_enabled: false,
collaboration_modes_enabled: false,
quit_shortcut_key: key_hint::ctrl(KeyCode::Char('c')),
context_window_percent: None,
context_window_used_tokens: None,
@@ -570,6 +635,7 @@ mod tests {
use_shift_enter_hint: false,
is_task_running: false,
steer_enabled: false,
collaboration_modes_enabled: false,
quit_shortcut_key: key_hint::ctrl(KeyCode::Char('c')),
context_window_percent: None,
context_window_used_tokens: None,
@@ -584,6 +650,7 @@ mod tests {
use_shift_enter_hint: false,
is_task_running: true,
steer_enabled: false,
collaboration_modes_enabled: false,
quit_shortcut_key: key_hint::ctrl(KeyCode::Char('c')),
context_window_percent: Some(72),
context_window_used_tokens: None,
@@ -598,6 +665,7 @@ mod tests {
use_shift_enter_hint: false,
is_task_running: false,
steer_enabled: false,
collaboration_modes_enabled: false,
quit_shortcut_key: key_hint::ctrl(KeyCode::Char('c')),
context_window_percent: None,
context_window_used_tokens: Some(123_456),
@@ -612,6 +680,7 @@ mod tests {
use_shift_enter_hint: false,
is_task_running: true,
steer_enabled: false,
collaboration_modes_enabled: false,
quit_shortcut_key: key_hint::ctrl(KeyCode::Char('c')),
context_window_percent: None,
context_window_used_tokens: None,
@@ -626,6 +695,7 @@ mod tests {
use_shift_enter_hint: false,
is_task_running: true,
steer_enabled: true,
collaboration_modes_enabled: false,
quit_shortcut_key: key_hint::ctrl(KeyCode::Char('c')),
context_window_percent: None,
context_window_used_tokens: None,

View File

@@ -28,16 +28,24 @@ use bottom_pane_view::BottomPaneView;
use codex_core::features::Features;
use codex_core::skills::model::SkillMetadata;
use codex_file_search::FileMatch;
use codex_protocol::user_input::TextElement;
use crossterm::event::KeyCode;
use crossterm::event::KeyEvent;
use ratatui::buffer::Buffer;
use ratatui::layout::Rect;
use ratatui::text::Line;
use std::time::Duration;
mod approval_overlay;
pub(crate) use approval_overlay::ApprovalOverlay;
pub(crate) use approval_overlay::ApprovalRequest;
mod bottom_pane_view;
#[derive(Clone, Debug, PartialEq, Eq)]
pub(crate) struct LocalImageAttachment {
pub(crate) placeholder: String,
pub(crate) path: PathBuf,
}
mod chat_composer;
mod chat_composer_history;
mod command_popup;
@@ -188,6 +196,11 @@ impl BottomPane {
self.composer.set_steer_enabled(enabled);
}
pub fn set_collaboration_modes_enabled(&mut self, enabled: bool) {
self.composer.set_collaboration_modes_enabled(enabled);
self.request_redraw();
}
pub fn status_widget(&self) -> Option<&StatusIndicatorWidget> {
self.status.as_ref()
}
@@ -237,8 +250,10 @@ impl BottomPane {
} else {
// If a task is running and a status line is visible, allow Esc to
// send an interrupt even while the composer has focus.
if matches!(key_event.code, crossterm::event::KeyCode::Esc)
// When a popup is active, prefer dismissing it over interrupting the task.
if key_event.code == KeyCode::Esc
&& self.is_task_running
&& !self.composer.popup_active()
&& let Some(status) = &self.status
{
// Send Op::Interrupt
@@ -309,8 +324,14 @@ impl BottomPane {
}
/// Replace the composer text with `text`.
pub(crate) fn set_composer_text(&mut self, text: String) {
self.composer.set_text_content(text);
pub(crate) fn set_composer_text(
&mut self,
text: String,
text_elements: Vec<TextElement>,
local_image_paths: Vec<PathBuf>,
) {
self.composer
.set_text_content(text, text_elements, local_image_paths);
self.request_redraw();
}
@@ -334,6 +355,19 @@ impl BottomPane {
self.composer.current_text()
}
pub(crate) fn composer_text_elements(&self) -> Vec<TextElement> {
self.composer.text_elements()
}
pub(crate) fn composer_local_images(&self) -> Vec<LocalImageAttachment> {
self.composer.local_images()
}
#[cfg(test)]
pub(crate) fn composer_local_image_paths(&self) -> Vec<PathBuf> {
self.composer.local_image_paths()
}
pub(crate) fn composer_text_with_pending(&self) -> String {
self.composer.current_text_with_pending()
}
@@ -508,6 +542,23 @@ impl BottomPane {
self.request_redraw();
}
pub(crate) fn flash_footer_hint(&mut self, line: Line<'static>, duration: Duration) {
self.composer.show_footer_flash(line, duration);
let frame_requester = self.frame_requester.clone();
if let Ok(handle) = tokio::runtime::Handle::try_current() {
handle.spawn(async move {
tokio::time::sleep(duration).await;
frame_requester.schedule_frame();
});
} else {
std::thread::spawn(move || {
std::thread::sleep(duration);
frame_requester.schedule_frame();
});
}
self.request_redraw();
}
pub(crate) fn composer_is_empty(&self) -> bool {
self.composer.is_empty()
}
@@ -627,10 +678,18 @@ impl BottomPane {
}
}
#[cfg(test)]
pub(crate) fn take_recent_submission_images(&mut self) -> Vec<PathBuf> {
self.composer.take_recent_submission_images()
}
pub(crate) fn take_recent_submission_images_with_placeholders(
&mut self,
) -> Vec<LocalImageAttachment> {
self.composer
.take_recent_submission_images_with_placeholders()
}
fn as_renderable(&'_ self) -> RenderableItem<'_> {
if let Some(view) = self.active_view() {
RenderableItem::Borrowed(view)
@@ -673,9 +732,13 @@ impl Renderable for BottomPane {
mod tests {
use super::*;
use crate::app_event::AppEvent;
use codex_core::protocol::Op;
use codex_protocol::protocol::SkillScope;
use crossterm::event::KeyModifiers;
use insta::assert_snapshot;
use ratatui::buffer::Buffer;
use ratatui::layout::Rect;
use std::path::PathBuf;
use tokio::sync::mpsc::unbounded_channel;
fn snapshot_buffer(buf: &Buffer) -> String {
@@ -942,4 +1005,109 @@ mod tests {
render_snapshot(&pane, area)
);
}
#[test]
fn esc_with_skill_popup_does_not_interrupt_task() {
let (tx_raw, mut rx) = unbounded_channel::<AppEvent>();
let tx = AppEventSender::new(tx_raw);
let mut pane = BottomPane::new(BottomPaneParams {
app_event_tx: tx,
frame_requester: FrameRequester::test_dummy(),
has_input_focus: true,
enhanced_keys_supported: false,
placeholder_text: "Ask Codex to do anything".to_string(),
disable_paste_burst: false,
animations_enabled: true,
skills: Some(vec![SkillMetadata {
name: "test-skill".to_string(),
description: "test skill".to_string(),
short_description: None,
interface: None,
path: PathBuf::from("test-skill"),
scope: SkillScope::User,
}]),
});
pane.set_task_running(true);
// Repro: a running task + skill popup + Esc should dismiss the popup, not interrupt.
pane.insert_str("$");
assert!(
pane.composer.popup_active(),
"expected skill popup after typing `$`"
);
pane.handle_key_event(KeyEvent::new(KeyCode::Esc, KeyModifiers::NONE));
while let Ok(ev) = rx.try_recv() {
assert!(
!matches!(ev, AppEvent::CodexOp(Op::Interrupt)),
"expected Esc to not send Op::Interrupt when dismissing skill popup"
);
}
assert!(
!pane.composer.popup_active(),
"expected Esc to dismiss skill popup"
);
}
#[test]
fn esc_with_slash_command_popup_does_not_interrupt_task() {
let (tx_raw, mut rx) = unbounded_channel::<AppEvent>();
let tx = AppEventSender::new(tx_raw);
let mut pane = BottomPane::new(BottomPaneParams {
app_event_tx: tx,
frame_requester: FrameRequester::test_dummy(),
has_input_focus: true,
enhanced_keys_supported: false,
placeholder_text: "Ask Codex to do anything".to_string(),
disable_paste_burst: false,
animations_enabled: true,
skills: Some(Vec::new()),
});
pane.set_task_running(true);
// Repro: a running task + slash-command popup + Esc should not interrupt the task.
pane.insert_str("/");
assert!(
pane.composer.popup_active(),
"expected command popup after typing `/`"
);
pane.handle_key_event(KeyEvent::new(KeyCode::Esc, KeyModifiers::NONE));
while let Ok(ev) = rx.try_recv() {
assert!(
!matches!(ev, AppEvent::CodexOp(Op::Interrupt)),
"expected Esc to not send Op::Interrupt while command popup is active"
);
}
assert_eq!(pane.composer_text(), "/");
}
#[test]
fn esc_interrupts_running_task_when_no_popup() {
let (tx_raw, mut rx) = unbounded_channel::<AppEvent>();
let tx = AppEventSender::new(tx_raw);
let mut pane = BottomPane::new(BottomPaneParams {
app_event_tx: tx,
frame_requester: FrameRequester::test_dummy(),
has_input_focus: true,
enhanced_keys_supported: false,
placeholder_text: "Ask Codex to do anything".to_string(),
disable_paste_burst: false,
animations_enabled: true,
skills: Some(Vec::new()),
});
pane.set_task_running(true);
pane.handle_key_event(KeyEvent::new(KeyCode::Esc, KeyModifiers::NONE));
assert!(
matches!(rx.try_recv(), Ok(AppEvent::CodexOp(Op::Interrupt))),
"expected Esc to send Op::Interrupt while a task is running"
);
}
}

View File

@@ -0,0 +1,11 @@
---
source: tui/src/bottom_pane/footer.rs
assertion_line: 535
expression: terminal.backend()
---
" / for commands ! for shell commands "
" ctrl + j for newline tab to queue message "
" @ for file paths ctrl + v to paste images "
" ctrl + g to edit in external editor esc esc to edit previous message "
" ctrl + c to exit shift + tab to change mode "
" ctrl + t to view transcript "

View File

@@ -1,4 +1,6 @@
use crate::key_hint::is_altgr;
use codex_protocol::user_input::ByteRange;
use codex_protocol::user_input::TextElement as UserTextElement;
use crossterm::event::KeyCode;
use crossterm::event::KeyEvent;
use crossterm::event::KeyModifiers;
@@ -60,10 +62,36 @@ impl TextArea {
}
}
pub fn set_text(&mut self, text: &str) {
/// Replace the textarea text and clear any existing text elements.
pub fn set_text_clearing_elements(&mut self, text: &str) {
self.set_text_inner(text, None);
}
/// Replace the textarea text and set the provided text elements.
pub fn set_text_with_elements(&mut self, text: &str, elements: &[UserTextElement]) {
self.set_text_inner(text, Some(elements));
}
fn set_text_inner(&mut self, text: &str, elements: Option<&[UserTextElement]>) {
// Stage 1: replace the raw text and keep the cursor in a safe byte range.
self.text = text.to_string();
self.cursor_pos = self.cursor_pos.clamp(0, self.text.len());
// Stage 2: rebuild element ranges from scratch against the new text.
self.elements.clear();
if let Some(elements) = elements {
for elem in elements {
let mut start = elem.byte_range.start.min(self.text.len());
let mut end = elem.byte_range.end.min(self.text.len());
start = self.clamp_pos_to_char_boundary(start);
end = self.clamp_pos_to_char_boundary(end);
if start >= end {
continue;
}
self.elements.push(TextElement { range: start..end });
}
self.elements.sort_by_key(|e| e.range.start);
}
// Stage 3: clamp the cursor and reset derived state tied to the prior content.
self.cursor_pos = self.clamp_pos_to_nearest_boundary(self.cursor_pos);
self.wrap_cache.replace(None);
self.preferred_col = None;
@@ -722,6 +750,22 @@ impl TextArea {
.collect()
}
pub fn text_elements(&self) -> Vec<UserTextElement> {
self.elements
.iter()
.map(|e| {
let placeholder = self.text.get(e.range.clone()).map(str::to_string);
UserTextElement {
byte_range: ByteRange {
start: e.range.start,
end: e.range.end,
},
placeholder,
}
})
.collect()
}
pub fn element_payload_starting_at(&self, pos: usize) -> Option<String> {
let pos = pos.min(self.text.len());
let elem = self.elements.iter().find(|e| e.range.start == pos)?;
@@ -1251,7 +1295,7 @@ mod tests {
let mut t = TextArea::new();
t.insert_str("abcd");
t.set_cursor(1);
t.set_text("");
t.set_text_clearing_elements("");
assert_eq!(t.cursor(), 0);
t.insert_str("a");
assert_eq!(t.text(), "a你");
@@ -1933,7 +1977,7 @@ mod tests {
for _ in 0..base_len {
base.push_str(&rand_grapheme(&mut rng));
}
ta.set_text(&base);
ta.set_text_clearing_elements(&base);
// Choose a valid char boundary for initial cursor
let mut boundaries: Vec<usize> = vec![0];
boundaries.extend(ta.text().char_indices().map(|(i, _)| i).skip(1));

View File

@@ -92,7 +92,9 @@ use codex_core::skills::model::SkillMetadata;
use codex_protocol::ThreadId;
use codex_protocol::account::PlanType;
use codex_protocol::approvals::ElicitationRequestEvent;
use codex_protocol::models::local_image_label_text;
use codex_protocol::parse_command::ParsedCommand;
use codex_protocol::user_input::TextElement;
use codex_protocol::user_input::UserInput;
use crossterm::event::KeyCode;
use crossterm::event::KeyEvent;
@@ -128,6 +130,7 @@ use crate::bottom_pane::CancellationEvent;
use crate::bottom_pane::DOUBLE_PRESS_QUIT_SHORTCUT_ENABLED;
use crate::bottom_pane::ExperimentalFeaturesView;
use crate::bottom_pane::InputResult;
use crate::bottom_pane::LocalImageAttachment;
use crate::bottom_pane::QUIT_SHORTCUT_TIMEOUT;
use crate::bottom_pane::SelectionAction;
use crate::bottom_pane::SelectionItem;
@@ -136,6 +139,7 @@ use crate::bottom_pane::custom_prompt_view::CustomPromptView;
use crate::bottom_pane::popup_consts::standard_popup_hint_line;
use crate::clipboard_paste::paste_image_to_temp_png;
use crate::collab;
use crate::collaboration_modes;
use crate::diff_render::display_path_for;
use crate::exec_cell::CommandOutput;
use crate::exec_cell::ExecCell;
@@ -345,8 +349,7 @@ pub(crate) struct ChatWidgetInit {
pub(crate) config: Config,
pub(crate) frame_requester: FrameRequester,
pub(crate) app_event_tx: AppEventSender,
pub(crate) initial_prompt: Option<String>,
pub(crate) initial_images: Vec<PathBuf>,
pub(crate) initial_user_message: Option<UserMessage>,
pub(crate) enhanced_keys_supported: bool,
pub(crate) auth_manager: Arc<AuthManager>,
pub(crate) models_manager: Arc<ModelsManager>,
@@ -371,6 +374,8 @@ pub(crate) enum ExternalEditorState {
Active,
}
type CollaborationModeSelection = collaboration_modes::Selection;
/// Maintains the per-session UI state and interaction state machines for the chat screen.
///
/// `ChatWidget` owns the state derived from the protocol event stream (history cells, streaming
@@ -400,6 +405,11 @@ pub(crate) struct ChatWidget {
active_cell_revision: u64,
config: Config,
model: Option<String>,
/// Current UI selection for collaboration modes.
///
/// This selection is only meaningful when `Feature::CollaborationModes` is enabled; when the
/// feature is disabled, the value is effectively inert.
collaboration_mode: CollaborationModeSelection,
auth_manager: Arc<AuthManager>,
models_manager: Arc<ModelsManager>,
session_header: SessionHeader,
@@ -507,16 +517,19 @@ pub(crate) struct ActiveCellTranscriptKey {
pub(crate) animation_tick: Option<u64>,
}
struct UserMessage {
pub(crate) struct UserMessage {
text: String,
image_paths: Vec<PathBuf>,
local_images: Vec<LocalImageAttachment>,
text_elements: Vec<TextElement>,
}
impl From<String> for UserMessage {
fn from(text: String) -> Self {
Self {
text,
image_paths: Vec::new(),
local_images: Vec::new(),
// Plain text conversion has no UI element ranges.
text_elements: Vec::new(),
}
}
}
@@ -525,16 +538,107 @@ impl From<&str> for UserMessage {
fn from(text: &str) -> Self {
Self {
text: text.to_string(),
image_paths: Vec::new(),
local_images: Vec::new(),
// Plain text conversion has no UI element ranges.
text_elements: Vec::new(),
}
}
}
fn create_initial_user_message(text: String, image_paths: Vec<PathBuf>) -> Option<UserMessage> {
if text.is_empty() && image_paths.is_empty() {
pub(crate) fn create_initial_user_message(
text: Option<String>,
local_image_paths: Vec<PathBuf>,
text_elements: Vec<TextElement>,
) -> Option<UserMessage> {
let text = text.unwrap_or_default();
if text.is_empty() && local_image_paths.is_empty() {
None
} else {
Some(UserMessage { text, image_paths })
let local_images = local_image_paths
.into_iter()
.enumerate()
.map(|(idx, path)| LocalImageAttachment {
placeholder: local_image_label_text(idx + 1),
path,
})
.collect();
Some(UserMessage {
text,
local_images,
text_elements,
})
}
}
// When merging multiple queued drafts (e.g., after interrupt), each draft starts numbering
// its attachments at [Image #1]. Reassign placeholder labels based on the attachment list so
// the combined local_image_paths order matches the labels, even if placeholders were moved
// in the text (e.g., [Image #2] appearing before [Image #1]).
fn remap_placeholders_for_message(message: UserMessage, next_label: &mut usize) -> UserMessage {
let UserMessage {
text,
text_elements,
local_images,
} = message;
if local_images.is_empty() {
return UserMessage {
text,
text_elements,
local_images,
};
}
let mut mapping: HashMap<String, String> = HashMap::new();
let mut remapped_images = Vec::new();
for attachment in local_images {
let new_placeholder = local_image_label_text(*next_label);
*next_label += 1;
mapping.insert(attachment.placeholder.clone(), new_placeholder.clone());
remapped_images.push(LocalImageAttachment {
placeholder: new_placeholder,
path: attachment.path,
});
}
let mut elements = text_elements;
elements.sort_by_key(|elem| elem.byte_range.start);
let mut cursor = 0usize;
let mut rebuilt = String::new();
let mut rebuilt_elements = Vec::new();
for mut elem in elements {
let start = elem.byte_range.start.min(text.len());
let end = elem.byte_range.end.min(text.len());
if let Some(segment) = text.get(cursor..start) {
rebuilt.push_str(segment);
}
let original = text.get(start..end).unwrap_or("");
let placeholder_key = elem.placeholder.as_deref().unwrap_or(original);
let replacement = mapping
.get(placeholder_key)
.map(String::as_str)
.unwrap_or(original);
let elem_start = rebuilt.len();
rebuilt.push_str(replacement);
let elem_end = rebuilt.len();
if let Some(remapped) = mapping.get(placeholder_key) {
elem.placeholder = Some(remapped.clone());
}
elem.byte_range = (elem_start..elem_end).into();
rebuilt_elements.push(elem);
cursor = end;
}
if let Some(segment) = text.get(cursor..) {
rebuilt.push_str(segment);
}
UserMessage {
text: rebuilt,
local_images: remapped_images,
text_elements: rebuilt_elements,
}
}
@@ -994,31 +1098,76 @@ impl ChatWidget {
));
}
// If any messages were queued during the task, restore them into the composer.
if !self.queued_user_messages.is_empty() {
let queued_text = self
.queued_user_messages
if let Some(combined) = self.drain_queued_messages_for_restore() {
let combined_local_image_paths = combined
.local_images
.iter()
.map(|m| m.text.clone())
.collect::<Vec<_>>()
.join("\n");
let existing_text = self.bottom_pane.composer_text();
let combined = if existing_text.is_empty() {
queued_text
} else if queued_text.is_empty() {
existing_text
} else {
format!("{queued_text}\n{existing_text}")
};
self.bottom_pane.set_composer_text(combined);
// Clear the queue and update the status indicator list.
self.queued_user_messages.clear();
.map(|img| img.path.clone())
.collect();
self.bottom_pane.set_composer_text(
combined.text,
combined.text_elements,
combined_local_image_paths,
);
self.refresh_queued_user_messages();
}
self.request_redraw();
}
/// Merge queued drafts (plus the current composer state) into a single message for restore.
///
/// Each queued draft numbers attachments from `[Image #1]`. When we concatenate drafts, we
/// must renumber placeholders in a stable order so the merged attachment list stays aligned
/// with the labels embedded in text. This helper drains the queue, remaps placeholders, and
/// fixes text element byte ranges as content is appended. Returns `None` when there is nothing
/// to restore.
fn drain_queued_messages_for_restore(&mut self) -> Option<UserMessage> {
if self.queued_user_messages.is_empty() {
return None;
}
let existing_message = UserMessage {
text: self.bottom_pane.composer_text(),
text_elements: self.bottom_pane.composer_text_elements(),
local_images: self.bottom_pane.composer_local_images(),
};
let mut to_merge: Vec<UserMessage> = self.queued_user_messages.drain(..).collect();
if !existing_message.text.is_empty() || !existing_message.local_images.is_empty() {
to_merge.push(existing_message);
}
let mut combined = UserMessage {
text: String::new(),
text_elements: Vec::new(),
local_images: Vec::new(),
};
let mut combined_offset = 0usize;
let mut next_image_label = 1usize;
for (idx, message) in to_merge.into_iter().enumerate() {
if idx > 0 {
combined.text.push('\n');
combined_offset += 1;
}
let message = remap_placeholders_for_message(message, &mut next_image_label);
let base = combined_offset;
combined.text.push_str(&message.text);
combined_offset += message.text.len();
combined
.text_elements
.extend(message.text_elements.into_iter().map(|mut elem| {
elem.byte_range.start += base;
elem.byte_range.end += base;
elem
}));
combined.local_images.extend(message.local_images);
}
Some(combined)
}
fn on_plan_update(&mut self, update: UpdatePlanArgs) {
self.add_to_history(history_cell::new_plan_update(update));
}
@@ -1629,8 +1778,7 @@ impl ChatWidget {
config,
frame_requester,
app_event_tx,
initial_prompt,
initial_images,
initial_user_message,
enhanced_keys_supported,
auth_manager,
models_manager,
@@ -1673,13 +1821,11 @@ impl ChatWidget {
active_cell_revision: 0,
config,
model,
collaboration_mode: CollaborationModeSelection::default(),
auth_manager,
models_manager,
session_header: SessionHeader::new(model_for_header),
initial_user_message: create_initial_user_message(
initial_prompt.unwrap_or_default(),
initial_images,
),
initial_user_message,
token_info: None,
rate_limit_snapshot: None,
plan_type: None,
@@ -1722,6 +1868,9 @@ impl ChatWidget {
widget
.bottom_pane
.set_steer_enabled(widget.config.features.enabled(Feature::Steer));
widget.bottom_pane.set_collaboration_modes_enabled(
widget.config.features.enabled(Feature::CollaborationModes),
);
widget
}
@@ -1736,8 +1885,7 @@ impl ChatWidget {
config,
frame_requester,
app_event_tx,
initial_prompt,
initial_images,
initial_user_message,
enhanced_keys_supported,
auth_manager,
models_manager,
@@ -1772,13 +1920,11 @@ impl ChatWidget {
active_cell_revision: 0,
config,
model: Some(header_model.clone()),
collaboration_mode: CollaborationModeSelection::default(),
auth_manager,
models_manager,
session_header: SessionHeader::new(header_model),
initial_user_message: create_initial_user_message(
initial_prompt.unwrap_or_default(),
initial_images,
),
initial_user_message,
token_info: None,
rate_limit_snapshot: None,
plan_type: None,
@@ -1821,6 +1967,9 @@ impl ChatWidget {
widget
.bottom_pane
.set_steer_enabled(widget.config.features.enabled(Feature::Steer));
widget.bottom_pane.set_collaboration_modes_enabled(
widget.config.features.enabled(Feature::CollaborationModes),
);
widget
}
@@ -1885,6 +2034,16 @@ impl ChatWidget {
}
match key_event {
KeyEvent {
code: KeyCode::BackTab,
kind: KeyEventKind::Press,
..
} if self.collaboration_modes_enabled()
&& !self.bottom_pane.is_task_running()
&& self.bottom_pane.no_modal_or_popup_active() =>
{
self.cycle_collaboration_mode();
}
KeyEvent {
code: KeyCode::Up,
modifiers: KeyModifiers::ALT,
@@ -1893,46 +2052,64 @@ impl ChatWidget {
} if !self.queued_user_messages.is_empty() => {
// Prefer the most recently queued item.
if let Some(user_message) = self.queued_user_messages.pop_back() {
self.bottom_pane.set_composer_text(user_message.text);
let local_image_paths = user_message
.local_images
.iter()
.map(|img| img.path.clone())
.collect();
self.bottom_pane.set_composer_text(
user_message.text,
user_message.text_elements,
local_image_paths,
);
self.refresh_queued_user_messages();
self.request_redraw();
}
}
_ => {
match self.bottom_pane.handle_key_event(key_event) {
InputResult::Submitted(text) => {
// Enter always sends messages immediately (bypasses queue check)
// Clear any reasoning status header when submitting a new message
_ => match self.bottom_pane.handle_key_event(key_event) {
InputResult::Submitted {
text,
text_elements,
} => {
let user_message = UserMessage {
text,
local_images: self
.bottom_pane
.take_recent_submission_images_with_placeholders(),
text_elements,
};
if self.is_session_configured() {
// Submitted is only emitted when steer is enabled (Enter sends immediately).
// Reset any reasoning header only when we are actually submitting a turn.
self.reasoning_buffer.clear();
self.full_reasoning_buffer.clear();
self.set_status_header(String::from("Working"));
let user_message = UserMessage {
text,
image_paths: self.bottom_pane.take_recent_submission_images(),
};
if !self.is_session_configured() {
self.queue_user_message(user_message);
} else {
self.submit_user_message(user_message);
}
}
InputResult::Queued(text) => {
// Tab queues the message if a task is running, otherwise submits immediately
let user_message = UserMessage {
text,
image_paths: self.bottom_pane.take_recent_submission_images(),
};
self.submit_user_message(user_message);
} else {
self.queue_user_message(user_message);
}
InputResult::Command(cmd) => {
self.dispatch_command(cmd);
}
InputResult::CommandWithArgs(cmd, args) => {
self.dispatch_command_with_args(cmd, args);
}
InputResult::None => {}
}
}
InputResult::Queued {
text,
text_elements,
} => {
let user_message = UserMessage {
text,
local_images: self
.bottom_pane
.take_recent_submission_images_with_placeholders(),
text_elements,
};
self.queue_user_message(user_message);
}
InputResult::Command(cmd) => {
self.dispatch_command(cmd);
}
InputResult::CommandWithArgs(cmd, args) => {
self.dispatch_command_with_args(cmd, args);
}
InputResult::None => {}
},
}
}
@@ -2022,6 +2199,11 @@ impl ChatWidget {
SlashCommand::Model => {
self.open_model_popup();
}
SlashCommand::Collab => {
if self.collaboration_modes_enabled() {
self.cycle_collaboration_mode();
}
}
SlashCommand::Approvals => {
self.open_approvals_popup();
}
@@ -2178,6 +2360,16 @@ impl ChatWidget {
let trimmed = args.trim();
match cmd {
SlashCommand::Collab if !trimmed.is_empty() => {
if let Some(selection) = collaboration_modes::parse_selection(trimmed) {
self.set_collaboration_mode(selection);
} else {
self.add_error_message(format!(
"Unknown collaboration mode '{trimmed}'. Try: plan, pair, execute."
));
self.request_redraw();
}
}
SlashCommand::Review if !trimmed.is_empty() => {
self.submit_op(Op::Review {
review_request: ReviewRequest {
@@ -2255,8 +2447,20 @@ impl ChatWidget {
}
fn submit_user_message(&mut self, user_message: UserMessage) {
let UserMessage { text, image_paths } = user_message;
if text.is_empty() && image_paths.is_empty() {
let Some(model) = self.current_model().or(self.config.model.as_deref()) else {
tracing::warn!("cannot submit user message before model is known; queueing");
self.queued_user_messages.push_front(user_message);
self.refresh_queued_user_messages();
return;
};
let model = model.to_string();
let UserMessage {
text,
local_images,
text_elements,
} = user_message;
if text.is_empty() && local_images.is_empty() {
return;
}
@@ -2280,15 +2484,16 @@ impl ChatWidget {
return;
}
for path in image_paths {
items.push(UserInput::LocalImage { path });
for image in &local_images {
items.push(UserInput::LocalImage {
path: image.path.clone(),
});
}
if !text.is_empty() {
// TODO: Thread text element ranges from the composer input. Empty keeps old behavior.
items.push(UserInput::Text {
text: text.clone(),
text_elements: Vec::new(),
text_elements: text_elements.clone(),
});
}
@@ -2302,14 +2507,29 @@ impl ChatWidget {
}
}
self.codex_op_tx
.send(Op::UserInput {
items,
final_output_json_schema: None,
})
.unwrap_or_else(|e| {
tracing::error!("failed to send message: {e}");
});
let collaboration_mode = self.collaboration_modes_enabled().then(|| {
collaboration_modes::resolve_mode_or_fallback(
self.models_manager.as_ref(),
self.collaboration_mode,
model.as_str(),
self.config.model_reasoning_effort,
)
});
let op = Op::UserTurn {
items,
cwd: self.config.cwd.clone(),
approval_policy: self.config.approval_policy.value(),
sandbox_policy: self.config.sandbox_policy.get().clone(),
model,
effort: self.config.model_reasoning_effort,
summary: self.config.model_reasoning_summary,
final_output_json_schema: None,
collaboration_mode,
};
self.codex_op_tx.send(op).unwrap_or_else(|e| {
tracing::error!("failed to send message: {e}");
});
// Persist the text to cross-session message history.
if !text.is_empty() {
@@ -2322,7 +2542,12 @@ impl ChatWidget {
// Only show the text portion in conversation history.
if !text.is_empty() {
self.add_to_history(history_cell::new_user_prompt(text));
let local_image_paths = local_images.into_iter().map(|img| img.path).collect();
self.add_to_history(history_cell::new_user_prompt(
text,
text_elements,
local_image_paths,
));
}
self.needs_final_message_separator = false;
@@ -2479,7 +2704,8 @@ impl ChatWidget {
| EventMsg::ItemCompleted(_)
| EventMsg::AgentMessageContentDelta(_)
| EventMsg::ReasoningContentDelta(_)
| EventMsg::ReasoningRawContentDelta(_) => {}
| EventMsg::ReasoningRawContentDelta(_)
| EventMsg::RequestUserInput(_) => {}
}
}
@@ -2537,10 +2763,16 @@ impl ChatWidget {
}
fn on_user_message_event(&mut self, event: UserMessageEvent) {
let message = event.message.trim();
if !message.is_empty() {
self.add_to_history(history_cell::new_user_prompt(message.to_string()));
if !event.message.trim().is_empty() {
self.add_to_history(history_cell::new_user_prompt(
event.message,
event.text_elements,
event.local_images,
));
}
// User messages reset separator state so the next agent response doesn't add a stray break.
self.needs_final_message_separator = false;
}
/// Exit the UI immediately without waiting for shutdown.
@@ -2633,6 +2865,11 @@ impl ChatWidget {
let total_usage = token_info
.map(|ti| &ti.total_token_usage)
.unwrap_or(&default_usage);
let collaboration_mode = if self.collaboration_modes_enabled() {
Some(self.collaboration_mode.label())
} else {
None
};
self.add_to_history(crate::status::new_status_output(
&self.config,
self.auth_manager.as_ref(),
@@ -2644,6 +2881,7 @@ impl ChatWidget {
self.plan_type,
Local::now(),
self.model_display_name(),
collaboration_mode,
));
}
@@ -3893,6 +4131,8 @@ impl ChatWidget {
}
if feature == Feature::Steer {
self.bottom_pane.set_steer_enabled(enabled);
} else if feature == Feature::CollaborationModes {
self.bottom_pane.set_collaboration_modes_enabled(enabled);
}
}
@@ -3930,10 +4170,39 @@ impl ChatWidget {
self.model = Some(model.to_string());
}
fn cycle_collaboration_mode(&mut self) {
if !self.collaboration_modes_enabled() {
return;
}
let next = self.collaboration_mode.next();
self.set_collaboration_mode(next);
}
/// Update the selected collaboration mode.
///
/// When collaboration modes are enabled, the current selection is attached to *every*
/// submission as `Op::UserTurn { collaboration_mode: Some(...) }`.
fn set_collaboration_mode(&mut self, selection: CollaborationModeSelection) {
if !self.collaboration_modes_enabled() {
return;
}
const FLASH_DURATION: Duration = Duration::from_secs(1);
self.collaboration_mode = selection;
let flash = collaboration_modes::flash_line(selection);
self.bottom_pane.flash_footer_hint(flash, FLASH_DURATION);
self.request_redraw();
}
fn current_model(&self) -> Option<&str> {
self.model.as_deref()
}
fn collaboration_modes_enabled(&self) -> bool {
self.config.features.enabled(Feature::CollaborationModes)
}
fn model_display_name(&self) -> &str {
self.model.as_deref().unwrap_or(DEFAULT_MODEL_DISPLAY_NAME)
}
@@ -4126,8 +4395,14 @@ impl ChatWidget {
}
/// Replace the composer content with the provided text and reset cursor.
pub(crate) fn set_composer_text(&mut self, text: String) {
self.bottom_pane.set_composer_text(text);
pub(crate) fn set_composer_text(
&mut self,
text: String,
text_elements: Vec<TextElement>,
local_image_paths: Vec<PathBuf>,
) {
self.bottom_pane
.set_composer_text(text, text_elements, local_image_paths);
}
pub(crate) fn show_esc_backtrack_hint(&mut self) {

View File

@@ -8,6 +8,8 @@ use super::*;
use crate::app_event::AppEvent;
use crate::app_event::ExitMode;
use crate::app_event_sender::AppEventSender;
use crate::bottom_pane::LocalImageAttachment;
use crate::history_cell::UserHistoryCell;
use crate::test_backend::VT100Backend;
use crate::tui::FrameRequester;
use assert_matches::assert_matches;
@@ -59,6 +61,7 @@ use codex_core::protocol::ViewImageToolCallEvent;
use codex_core::protocol::WarningEvent;
use codex_protocol::ThreadId;
use codex_protocol::account::PlanType;
use codex_protocol::config_types::CollaborationMode;
use codex_protocol::openai_models::ModelPreset;
use codex_protocol::openai_models::ReasoningEffortPreset;
use codex_protocol::parse_command::ParsedCommand;
@@ -66,6 +69,8 @@ use codex_protocol::plan_tool::PlanItemArg;
use codex_protocol::plan_tool::StepStatus;
use codex_protocol::plan_tool::UpdatePlanArgs;
use codex_protocol::protocol::CodexErrorInfo;
use codex_protocol::user_input::TextElement;
use codex_protocol::user_input::UserInput;
use codex_utils_absolute_path::AbsolutePathBuf;
use crossterm::event::KeyCode;
use crossterm::event::KeyEvent;
@@ -181,6 +186,364 @@ async fn resumed_initial_messages_render_history() {
);
}
#[tokio::test]
async fn replayed_user_message_preserves_text_elements_and_local_images() {
let (mut chat, mut rx, _ops) = make_chatwidget_manual(None).await;
let placeholder = "[Image #1]";
let message = format!("{placeholder} replayed");
let text_elements = vec![TextElement {
byte_range: (0..placeholder.len()).into(),
placeholder: Some(placeholder.to_string()),
}];
let local_images = vec![PathBuf::from("/tmp/replay.png")];
let conversation_id = ThreadId::new();
let rollout_file = NamedTempFile::new().unwrap();
let configured = codex_core::protocol::SessionConfiguredEvent {
session_id: conversation_id,
forked_from_id: None,
model: "test-model".to_string(),
model_provider_id: "test-provider".to_string(),
approval_policy: AskForApproval::Never,
sandbox_policy: SandboxPolicy::ReadOnly,
cwd: PathBuf::from("/home/user/project"),
reasoning_effort: Some(ReasoningEffortConfig::default()),
history_log_id: 0,
history_entry_count: 0,
initial_messages: Some(vec![EventMsg::UserMessage(UserMessageEvent {
message: message.clone(),
images: None,
text_elements: text_elements.clone(),
local_images: local_images.clone(),
})]),
rollout_path: rollout_file.path().to_path_buf(),
};
chat.handle_codex_event(Event {
id: "initial".into(),
msg: EventMsg::SessionConfigured(configured),
});
let mut user_cell = None;
while let Ok(ev) = rx.try_recv() {
if let AppEvent::InsertHistoryCell(cell) = ev
&& let Some(cell) = cell.as_any().downcast_ref::<UserHistoryCell>()
{
user_cell = Some((
cell.message.clone(),
cell.text_elements.clone(),
cell.local_image_paths.clone(),
));
break;
}
}
let (stored_message, stored_elements, stored_images) =
user_cell.expect("expected a replayed user history cell");
assert_eq!(stored_message, message);
assert_eq!(stored_elements, text_elements);
assert_eq!(stored_images, local_images);
}
#[tokio::test]
async fn submission_preserves_text_elements_and_local_images() {
let (mut chat, mut rx, mut op_rx) = make_chatwidget_manual(None).await;
let conversation_id = ThreadId::new();
let rollout_file = NamedTempFile::new().unwrap();
let configured = codex_core::protocol::SessionConfiguredEvent {
session_id: conversation_id,
forked_from_id: None,
model: "test-model".to_string(),
model_provider_id: "test-provider".to_string(),
approval_policy: AskForApproval::Never,
sandbox_policy: SandboxPolicy::ReadOnly,
cwd: PathBuf::from("/home/user/project"),
reasoning_effort: Some(ReasoningEffortConfig::default()),
history_log_id: 0,
history_entry_count: 0,
initial_messages: None,
rollout_path: rollout_file.path().to_path_buf(),
};
chat.handle_codex_event(Event {
id: "initial".into(),
msg: EventMsg::SessionConfigured(configured),
});
drain_insert_history(&mut rx);
let placeholder = "[Image #1]";
let text = format!("{placeholder} submit");
let text_elements = vec![TextElement {
byte_range: (0..placeholder.len()).into(),
placeholder: Some(placeholder.to_string()),
}];
let local_images = vec![PathBuf::from("/tmp/submitted.png")];
chat.bottom_pane
.set_composer_text(text.clone(), text_elements.clone(), local_images.clone());
chat.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
let items = match next_submit_op(&mut op_rx) {
Op::UserTurn { items, .. } => items,
other => panic!("expected Op::UserTurn, got {other:?}"),
};
assert_eq!(items.len(), 2);
assert_eq!(
items[0],
UserInput::LocalImage {
path: local_images[0].clone()
}
);
assert_eq!(
items[1],
UserInput::Text {
text: text.clone(),
text_elements: text_elements.clone(),
}
);
let mut user_cell = None;
while let Ok(ev) = rx.try_recv() {
if let AppEvent::InsertHistoryCell(cell) = ev
&& let Some(cell) = cell.as_any().downcast_ref::<UserHistoryCell>()
{
user_cell = Some((
cell.message.clone(),
cell.text_elements.clone(),
cell.local_image_paths.clone(),
));
break;
}
}
let (stored_message, stored_elements, stored_images) =
user_cell.expect("expected submitted user history cell");
assert_eq!(stored_message, text);
assert_eq!(stored_elements, text_elements);
assert_eq!(stored_images, local_images);
}
#[tokio::test]
async fn interrupted_turn_restores_queued_messages_with_images_and_elements() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(None).await;
let first_placeholder = "[Image #1]";
let first_text = format!("{first_placeholder} first");
let first_elements = vec![TextElement {
byte_range: (0..first_placeholder.len()).into(),
placeholder: Some(first_placeholder.to_string()),
}];
let first_images = [PathBuf::from("/tmp/first.png")];
let second_placeholder = "[Image #1]";
let second_text = format!("{second_placeholder} second");
let second_elements = vec![TextElement {
byte_range: (0..second_placeholder.len()).into(),
placeholder: Some(second_placeholder.to_string()),
}];
let second_images = [PathBuf::from("/tmp/second.png")];
let existing_placeholder = "[Image #1]";
let existing_text = format!("{existing_placeholder} existing");
let existing_elements = vec![TextElement {
byte_range: (0..existing_placeholder.len()).into(),
placeholder: Some(existing_placeholder.to_string()),
}];
let existing_images = vec![PathBuf::from("/tmp/existing.png")];
chat.queued_user_messages.push_back(UserMessage {
text: first_text,
local_images: vec![LocalImageAttachment {
placeholder: first_placeholder.to_string(),
path: first_images[0].clone(),
}],
text_elements: first_elements,
});
chat.queued_user_messages.push_back(UserMessage {
text: second_text,
local_images: vec![LocalImageAttachment {
placeholder: second_placeholder.to_string(),
path: second_images[0].clone(),
}],
text_elements: second_elements,
});
chat.refresh_queued_user_messages();
chat.bottom_pane
.set_composer_text(existing_text, existing_elements, existing_images.clone());
// When interrupted, queued messages are merged into the composer; image placeholders
// must be renumbered to match the combined local image list.
chat.handle_codex_event(Event {
id: "interrupt".into(),
msg: EventMsg::TurnAborted(codex_core::protocol::TurnAbortedEvent {
reason: TurnAbortReason::Interrupted,
}),
});
let first = "[Image #1] first".to_string();
let second = "[Image #2] second".to_string();
let third = "[Image #3] existing".to_string();
let expected_text = format!("{first}\n{second}\n{third}");
assert_eq!(chat.bottom_pane.composer_text(), expected_text);
let first_start = 0;
let second_start = first.len() + 1;
let third_start = second_start + second.len() + 1;
let expected_elements = vec![
TextElement {
byte_range: (first_start..first_start + "[Image #1]".len()).into(),
placeholder: Some("[Image #1]".to_string()),
},
TextElement {
byte_range: (second_start..second_start + "[Image #2]".len()).into(),
placeholder: Some("[Image #2]".to_string()),
},
TextElement {
byte_range: (third_start..third_start + "[Image #3]".len()).into(),
placeholder: Some("[Image #3]".to_string()),
},
];
assert_eq!(chat.bottom_pane.composer_text_elements(), expected_elements);
assert_eq!(
chat.bottom_pane.composer_local_image_paths(),
vec![
first_images[0].clone(),
second_images[0].clone(),
existing_images[0].clone(),
]
);
}
#[tokio::test]
async fn remap_placeholders_uses_attachment_labels() {
let placeholder_one = "[Image #1]";
let placeholder_two = "[Image #2]";
let text = format!("{placeholder_two} before {placeholder_one}");
let elements = vec![
TextElement {
byte_range: (0..placeholder_two.len()).into(),
placeholder: Some(placeholder_two.to_string()),
},
TextElement {
byte_range: ("[Image #2] before ".len().."[Image #2] before [Image #1]".len()).into(),
placeholder: Some(placeholder_one.to_string()),
},
];
let attachments = vec![
LocalImageAttachment {
placeholder: placeholder_one.to_string(),
path: PathBuf::from("/tmp/one.png"),
},
LocalImageAttachment {
placeholder: placeholder_two.to_string(),
path: PathBuf::from("/tmp/two.png"),
},
];
let message = UserMessage {
text,
text_elements: elements,
local_images: attachments,
};
let mut next_label = 3usize;
let remapped = remap_placeholders_for_message(message, &mut next_label);
assert_eq!(remapped.text, "[Image #4] before [Image #3]");
assert_eq!(
remapped.text_elements,
vec![
TextElement {
byte_range: (0.."[Image #4]".len()).into(),
placeholder: Some("[Image #4]".to_string()),
},
TextElement {
byte_range: ("[Image #4] before ".len().."[Image #4] before [Image #3]".len())
.into(),
placeholder: Some("[Image #3]".to_string()),
},
]
);
assert_eq!(
remapped.local_images,
vec![
LocalImageAttachment {
placeholder: "[Image #3]".to_string(),
path: PathBuf::from("/tmp/one.png"),
},
LocalImageAttachment {
placeholder: "[Image #4]".to_string(),
path: PathBuf::from("/tmp/two.png"),
},
]
);
}
#[tokio::test]
async fn remap_placeholders_uses_byte_ranges_when_placeholder_missing() {
let placeholder_one = "[Image #1]";
let placeholder_two = "[Image #2]";
let text = format!("{placeholder_two} before {placeholder_one}");
let elements = vec![
TextElement {
byte_range: (0..placeholder_two.len()).into(),
placeholder: None,
},
TextElement {
byte_range: ("[Image #2] before ".len().."[Image #2] before [Image #1]".len()).into(),
placeholder: None,
},
];
let attachments = vec![
LocalImageAttachment {
placeholder: placeholder_one.to_string(),
path: PathBuf::from("/tmp/one.png"),
},
LocalImageAttachment {
placeholder: placeholder_two.to_string(),
path: PathBuf::from("/tmp/two.png"),
},
];
let message = UserMessage {
text,
text_elements: elements,
local_images: attachments,
};
let mut next_label = 3usize;
let remapped = remap_placeholders_for_message(message, &mut next_label);
assert_eq!(remapped.text, "[Image #4] before [Image #3]");
assert_eq!(
remapped.text_elements,
vec![
TextElement {
byte_range: (0.."[Image #4]".len()).into(),
placeholder: Some("[Image #4]".to_string()),
},
TextElement {
byte_range: ("[Image #4] before ".len().."[Image #4] before [Image #3]".len())
.into(),
placeholder: Some("[Image #3]".to_string()),
},
]
);
assert_eq!(
remapped.local_images,
vec![
LocalImageAttachment {
placeholder: "[Image #3]".to_string(),
path: PathBuf::from("/tmp/one.png"),
},
LocalImageAttachment {
placeholder: "[Image #4]".to_string(),
path: PathBuf::from("/tmp/two.png"),
},
]
);
}
/// Entering review mode uses the hint provided by the review request.
#[tokio::test]
async fn entered_review_mode_uses_request_hint() {
@@ -351,8 +714,7 @@ async fn helpers_are_available_and_do_not_panic() {
config: cfg,
frame_requester: FrameRequester::test_dummy(),
app_event_tx: tx,
initial_prompt: None,
initial_images: Vec::new(),
initial_user_message: None,
enhanced_keys_supported: false,
auth_manager,
models_manager: thread_manager.get_models_manager(),
@@ -404,6 +766,7 @@ async fn make_chatwidget_manual(
active_cell_revision: 0,
config: cfg,
model: Some(resolved_model.clone()),
collaboration_mode: CollaborationModeSelection::default(),
auth_manager: auth_manager.clone(),
models_manager: Arc::new(ModelsManager::new(codex_home, auth_manager)),
session_header: SessionHeader::new(resolved_model),
@@ -449,6 +812,19 @@ async fn make_chatwidget_manual(
(widget, rx, op_rx)
}
// ChatWidget may emit other `Op`s (e.g. history/logging updates) on the same channel; this helper
// filters until we see a submission op.
fn next_submit_op(op_rx: &mut tokio::sync::mpsc::UnboundedReceiver<Op>) -> Op {
loop {
match op_rx.try_recv() {
Ok(op @ Op::UserTurn { .. }) => return op,
Ok(_) => continue,
Err(TryRecvError::Empty) => panic!("expected a submit op but queue was empty"),
Err(TryRecvError::Disconnected) => panic!("expected submit op but channel closed"),
}
}
}
fn set_chatgpt_auth(chat: &mut ChatWidget) {
chat.auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::create_dummy_chatgpt_auth_for_testing());
@@ -1069,7 +1445,8 @@ async fn enqueueing_history_prompt_multiple_times_is_stable() {
chat.thread_id = Some(ThreadId::new());
// Submit an initial prompt to seed history.
chat.bottom_pane.set_composer_text("repeat me".to_string());
chat.bottom_pane
.set_composer_text("repeat me".to_string(), Vec::new(), Vec::new());
chat.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
// Simulate an active task so further submissions are queued.
@@ -1103,7 +1480,7 @@ async fn streaming_final_answer_keeps_task_running_state() {
assert!(chat.bottom_pane.status_widget().is_none());
chat.bottom_pane
.set_composer_text("queued submission".to_string());
.set_composer_text("queued submission".to_string(), Vec::new(), Vec::new());
chat.handle_key_event(KeyEvent::new(KeyCode::Tab, KeyModifiers::NONE));
assert_eq!(chat.queued_user_messages.len(), 1);
@@ -1511,6 +1888,107 @@ async fn slash_init_skips_when_project_doc_exists() {
);
}
#[test]
fn parse_collaboration_mode_selection_accepts_common_aliases() {
assert_eq!(
collaboration_modes::parse_selection("plan"),
Some(CollaborationModeSelection::Plan)
);
assert_eq!(
collaboration_modes::parse_selection("PAIR"),
Some(CollaborationModeSelection::PairProgramming)
);
assert_eq!(
collaboration_modes::parse_selection("pair_programming"),
Some(CollaborationModeSelection::PairProgramming)
);
assert_eq!(
collaboration_modes::parse_selection("pp"),
Some(CollaborationModeSelection::PairProgramming)
);
assert_eq!(
collaboration_modes::parse_selection(" exec "),
Some(CollaborationModeSelection::Execute)
);
assert_eq!(
collaboration_modes::parse_selection("execute"),
Some(CollaborationModeSelection::Execute)
);
assert_eq!(collaboration_modes::parse_selection("unknown"), None);
}
#[tokio::test]
async fn collab_mode_shift_tab_cycles_only_when_enabled_and_idle() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(None).await;
chat.set_feature_enabled(Feature::CollaborationModes, false);
let initial = chat.collaboration_mode;
chat.handle_key_event(KeyEvent::from(KeyCode::BackTab));
assert_eq!(chat.collaboration_mode, initial);
chat.set_feature_enabled(Feature::CollaborationModes, true);
chat.handle_key_event(KeyEvent::from(KeyCode::BackTab));
assert_eq!(chat.collaboration_mode, CollaborationModeSelection::Execute);
chat.handle_key_event(KeyEvent::from(KeyCode::BackTab));
assert_eq!(chat.collaboration_mode, CollaborationModeSelection::Plan);
chat.on_task_started();
chat.handle_key_event(KeyEvent::from(KeyCode::BackTab));
assert_eq!(chat.collaboration_mode, CollaborationModeSelection::Plan);
}
#[tokio::test]
async fn collab_slash_command_sets_mode_and_next_submit_sends_user_turn() {
let (mut chat, _rx, mut op_rx) = make_chatwidget_manual(None).await;
chat.thread_id = Some(ThreadId::new());
chat.set_feature_enabled(Feature::CollaborationModes, true);
chat.dispatch_command_with_args(SlashCommand::Collab, "plan".to_string());
assert_eq!(chat.collaboration_mode, CollaborationModeSelection::Plan);
chat.bottom_pane
.set_composer_text("hello".to_string(), Vec::new(), Vec::new());
chat.handle_key_event(KeyEvent::from(KeyCode::Enter));
match next_submit_op(&mut op_rx) {
Op::UserTurn {
collaboration_mode: Some(CollaborationMode::Plan(_)),
..
} => {}
other => panic!("expected Op::UserTurn with plan collab mode, got {other:?}"),
}
chat.bottom_pane
.set_composer_text("follow up".to_string(), Vec::new(), Vec::new());
chat.handle_key_event(KeyEvent::from(KeyCode::Enter));
match next_submit_op(&mut op_rx) {
Op::UserTurn {
collaboration_mode: Some(CollaborationMode::Plan(_)),
..
} => {}
other => panic!("expected Op::UserTurn with plan collab mode, got {other:?}"),
}
}
#[tokio::test]
async fn collab_mode_defaults_to_pair_programming_when_enabled() {
let (mut chat, _rx, mut op_rx) = make_chatwidget_manual(None).await;
chat.thread_id = Some(ThreadId::new());
chat.set_feature_enabled(Feature::CollaborationModes, true);
chat.bottom_pane
.set_composer_text("hello".to_string(), Vec::new(), Vec::new());
chat.handle_key_event(KeyEvent::from(KeyCode::Enter));
match next_submit_op(&mut op_rx) {
Op::UserTurn {
collaboration_mode: Some(CollaborationMode::PairProgramming(_)),
..
} => {}
other => panic!("expected Op::UserTurn with pair programming collab mode, got {other:?}"),
}
}
#[tokio::test]
async fn slash_quit_requests_exit() {
let (mut chat, mut rx, _op_rx) = make_chatwidget_manual(None).await;
@@ -2755,7 +3233,7 @@ async fn interrupt_prepends_queued_messages_before_existing_composer_text() {
chat.bottom_pane.set_task_running(true);
chat.bottom_pane
.set_composer_text("current draft".to_string());
.set_composer_text("current draft".to_string(), Vec::new(), Vec::new());
chat.queued_user_messages
.push_back(UserMessage::from("first queued".to_string()));
@@ -3781,8 +4259,11 @@ async fn chatwidget_exec_and_status_layout_vt100_snapshot() {
delta: "**Investigating rendering code**".into(),
}),
});
chat.bottom_pane
.set_composer_text("Summarize recent commits".to_string());
chat.bottom_pane.set_composer_text(
"Summarize recent commits".to_string(),
Vec::new(),
Vec::new(),
);
let width: u16 = 80;
let ui_height: u16 = chat.desired_height(width);

View File

@@ -244,9 +244,14 @@ pub fn paste_image_to_temp_png() -> Result<(PathBuf, PastedImageInfo), PasteImag
/// - shell-escaped single paths (via `shlex`)
pub fn normalize_pasted_path(pasted: &str) -> Option<PathBuf> {
let pasted = pasted.trim();
let unquoted = pasted
.strip_prefix('"')
.and_then(|s| s.strip_suffix('"'))
.or_else(|| pasted.strip_prefix('\'').and_then(|s| s.strip_suffix('\'')))
.unwrap_or(pasted);
// file:// URL → filesystem path
if let Ok(url) = url::Url::parse(pasted)
if let Ok(url) = url::Url::parse(unquoted)
&& url.scheme() == "file"
{
return url.to_file_path().ok();
@@ -258,38 +263,18 @@ pub fn normalize_pasted_path(pasted: &str) -> Option<PathBuf> {
// Detect unquoted Windows paths and bypass POSIX shlex which
// treats backslashes as escapes (e.g., C:\Users\Alice\file.png).
// Also handles UNC paths (\\server\share\path).
let looks_like_windows_path = {
// Drive letter path: C:\ or C:/
let drive = pasted
.chars()
.next()
.map(|c| c.is_ascii_alphabetic())
.unwrap_or(false)
&& pasted.get(1..2) == Some(":")
&& pasted
.get(2..3)
.map(|s| s == "\\" || s == "/")
.unwrap_or(false);
// UNC path: \\server\share
let unc = pasted.starts_with("\\\\");
drive || unc
};
if looks_like_windows_path {
#[cfg(target_os = "linux")]
{
if is_probably_wsl()
&& let Some(converted) = convert_windows_path_to_wsl(pasted)
{
return Some(converted);
}
}
return Some(PathBuf::from(pasted));
if let Some(path) = normalize_windows_path(unquoted) {
return Some(path);
}
// shell-escaped single path → unescaped
let parts: Vec<String> = shlex::Shlex::new(pasted).collect();
if parts.len() == 1 {
return parts.into_iter().next().map(PathBuf::from);
let part = parts.into_iter().next()?;
if let Some(path) = normalize_windows_path(&part) {
return Some(path);
}
return Some(PathBuf::from(part));
}
None
@@ -339,6 +324,36 @@ fn convert_windows_path_to_wsl(input: &str) -> Option<PathBuf> {
Some(result)
}
fn normalize_windows_path(input: &str) -> Option<PathBuf> {
// Drive letter path: C:\ or C:/
let drive = input
.chars()
.next()
.map(|c| c.is_ascii_alphabetic())
.unwrap_or(false)
&& input.get(1..2) == Some(":")
&& input
.get(2..3)
.map(|s| s == "\\" || s == "/")
.unwrap_or(false);
// UNC path: \\server\share
let unc = input.starts_with("\\\\");
if !drive && !unc {
return None;
}
#[cfg(target_os = "linux")]
{
if is_probably_wsl()
&& let Some(converted) = convert_windows_path_to_wsl(input)
{
return Some(converted);
}
}
Some(PathBuf::from(input))
}
/// Infer an image format for the provided path based on its extension.
pub fn pasted_image_format(path: &Path) -> EncodedImageFormat {
match path
@@ -438,9 +453,39 @@ mod pasted_paths_tests {
#[test]
fn normalize_single_quoted_windows_path() {
let input = r"'C:\\Users\\Alice\\My File.jpeg'";
let unquoted = r"C:\\Users\\Alice\\My File.jpeg";
let result =
normalize_pasted_path(input).expect("should trim single quotes on windows path");
assert_eq!(result, PathBuf::from(r"C:\\Users\\Alice\\My File.jpeg"));
#[cfg(target_os = "linux")]
let expected = if is_probably_wsl()
&& let Some(converted) = convert_windows_path_to_wsl(unquoted)
{
converted
} else {
PathBuf::from(unquoted)
};
#[cfg(not(target_os = "linux"))]
let expected = PathBuf::from(unquoted);
assert_eq!(result, expected);
}
#[test]
fn normalize_double_quoted_windows_path() {
let input = r#""C:\\Users\\Alice\\My File.jpeg""#;
let unquoted = r"C:\\Users\\Alice\\My File.jpeg";
let result =
normalize_pasted_path(input).expect("should trim double quotes on windows path");
#[cfg(target_os = "linux")]
let expected = if is_probably_wsl()
&& let Some(converted) = convert_windows_path_to_wsl(unquoted)
{
converted
} else {
PathBuf::from(unquoted)
};
#[cfg(not(target_os = "linux"))]
let expected = PathBuf::from(unquoted);
assert_eq!(result, expected);
}
#[test]

View File

@@ -0,0 +1,135 @@
//! Collaboration mode selection + rendering helpers for the TUI.
//!
//! This module is intentionally UI-focused:
//! - It owns the user-facing set of selectable collaboration modes and how they cycle.
//! - It parses `/collab <mode>` arguments into a selection.
//! - It resolves a `Selection` to a concrete `codex_protocol::config_types::CollaborationMode` by
//! picking from the `ModelsManager` builtin collaboration presets.
//! - It builds the small footer "flash" line shown after changing modes.
//!
//! The `ChatWidget` owns the session state and decides *when* selection/mode changes are allowed
//! (feature flag, task running, modals open, etc.). This module just provides the building blocks.
use crate::key_hint;
use codex_core::models_manager::manager::ModelsManager;
use codex_protocol::config_types::CollaborationMode;
use codex_protocol::config_types::Settings;
use codex_protocol::openai_models::ReasoningEffort;
use crossterm::event::KeyCode;
use ratatui::style::Stylize;
use ratatui::text::Line;
/// The user-facing collaboration mode choices supported by the TUI.
///
/// This is distinct from `CollaborationMode`: it represents a stable UI selection and the cycling
/// order, while `CollaborationMode` can carry nested settings/prompt configuration.
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
pub(crate) enum Selection {
Plan,
#[default]
PairProgramming,
Execute,
}
impl Selection {
/// Cycle to the next selection.
///
/// The TUI cycles through a small, fixed set of presets.
pub(crate) fn next(self) -> Self {
match self {
Self::Plan => Self::PairProgramming,
Self::PairProgramming => Self::Execute,
Self::Execute => Self::Plan,
}
}
/// User-facing label used in UI surfaces like `/status` and the footer flash.
pub(crate) fn label(self) -> &'static str {
match self {
Self::Plan => "Plan",
Self::PairProgramming => "Pair Programming",
Self::Execute => "Execute",
}
}
}
/// Parse a user argument (e.g. `/collab plan`, `/collab pair_programming`) into a selection.
///
/// The parser is forgiving: it strips whitespace, `-`, and `_`, and matches case-insensitively.
pub(crate) fn parse_selection(input: &str) -> Option<Selection> {
let normalized: String = input
.chars()
.filter(|c| !c.is_ascii_whitespace() && *c != '-' && *c != '_')
.flat_map(char::to_lowercase)
.collect();
match normalized.as_str() {
"plan" => Some(Selection::Plan),
"pair" | "pairprogramming" | "pp" => Some(Selection::PairProgramming),
"execute" | "exec" => Some(Selection::Execute),
_ => None,
}
}
/// Resolve a selection to a concrete collaboration mode preset.
///
/// `ModelsManager::list_collaboration_modes()` is expected to return a builtin set of presets; this
/// function selects the first preset of the desired variant.
pub(crate) fn resolve_mode(
models_manager: &ModelsManager,
selection: Selection,
) -> Option<CollaborationMode> {
match selection {
Selection::Plan => models_manager
.list_collaboration_modes()
.into_iter()
.find(|mode| matches!(mode, CollaborationMode::Plan(_))),
Selection::PairProgramming => models_manager
.list_collaboration_modes()
.into_iter()
.find(|mode| matches!(mode, CollaborationMode::PairProgramming(_))),
Selection::Execute => models_manager
.list_collaboration_modes()
.into_iter()
.find(|mode| matches!(mode, CollaborationMode::Execute(_))),
}
}
/// Resolve a selection to a concrete collaboration mode preset, falling back to a synthesized mode
/// when the desired preset is unavailable.
///
/// This keeps the TUI behavior stable when collaboration presets are missing (for example, when
/// running in offline/unit-test contexts): if the feature flag is enabled, every submission carries
/// an explicit collaboration mode so core doesn't fall back to `Custom`.
pub(crate) fn resolve_mode_or_fallback(
models_manager: &ModelsManager,
selection: Selection,
fallback_model: &str,
fallback_effort: Option<ReasoningEffort>,
) -> CollaborationMode {
resolve_mode(models_manager, selection).unwrap_or_else(|| {
let settings = Settings {
model: fallback_model.to_string(),
reasoning_effort: fallback_effort,
developer_instructions: None,
};
match selection {
Selection::Plan => CollaborationMode::Plan(settings),
Selection::PairProgramming => CollaborationMode::PairProgramming(settings),
Selection::Execute => CollaborationMode::Execute(settings),
}
})
}
/// Build a 1-line footer "flash" that is shown after switching modes.
///
/// The `ChatWidget` controls when to show this and how long it should remain visible.
pub(crate) fn flash_line(selection: Selection) -> Line<'static> {
Line::from(vec![
selection.label().bold(),
" (".dim(),
key_hint::shift(KeyCode::Tab).into(),
" to change mode)".dim(),
])
}

View File

@@ -47,6 +47,7 @@ use codex_protocol::openai_models::ReasoningEffort as ReasoningEffortConfig;
use codex_protocol::plan_tool::PlanItemArg;
use codex_protocol::plan_tool::StepStatus;
use codex_protocol::plan_tool::UpdatePlanArgs;
use codex_protocol::user_input::TextElement;
use image::DynamicImage;
use image::ImageReader;
use mcp_types::EmbeddedResourceResource;
@@ -54,6 +55,7 @@ use mcp_types::Resource;
use mcp_types::ResourceLink;
use mcp_types::ResourceTemplate;
use ratatui::prelude::*;
use ratatui::style::Color;
use ratatui::style::Modifier;
use ratatui::style::Style;
use ratatui::style::Styled;
@@ -158,6 +160,75 @@ impl dyn HistoryCell {
#[derive(Debug)]
pub(crate) struct UserHistoryCell {
pub message: String,
pub text_elements: Vec<TextElement>,
#[allow(dead_code)]
pub local_image_paths: Vec<PathBuf>,
}
/// Build logical lines for a user message with styled text elements.
///
/// This preserves explicit newlines while interleaving element spans and skips
/// malformed byte ranges instead of panicking during history rendering.
fn build_user_message_lines_with_elements(
message: &str,
elements: &[TextElement],
style: Style,
element_style: Style,
) -> Vec<Line<'static>> {
let mut elements = elements.to_vec();
elements.sort_by_key(|e| e.byte_range.start);
let mut offset = 0usize;
let mut raw_lines: Vec<Line<'static>> = Vec::new();
for line_text in message.split('\n') {
let line_start = offset;
let line_end = line_start + line_text.len();
let mut spans: Vec<Span<'static>> = Vec::new();
// Track how much of the line we've emitted to interleave plain and styled spans.
let mut cursor = line_start;
for elem in &elements {
let start = elem.byte_range.start.max(line_start);
let end = elem.byte_range.end.min(line_end);
if start >= end {
continue;
}
let rel_start = start - line_start;
let rel_end = end - line_start;
// Guard against malformed UTF-8 byte ranges from upstream data; skip
// invalid elements rather than panicking while rendering history.
if !line_text.is_char_boundary(rel_start) || !line_text.is_char_boundary(rel_end) {
continue;
}
let rel_cursor = cursor - line_start;
if cursor < start
&& line_text.is_char_boundary(rel_cursor)
&& let Some(segment) = line_text.get(rel_cursor..rel_start)
{
spans.push(Span::from(segment.to_string()));
}
if let Some(segment) = line_text.get(rel_start..rel_end) {
spans.push(Span::styled(segment.to_string(), element_style));
cursor = end;
}
}
let rel_cursor = cursor - line_start;
if cursor < line_end
&& line_text.is_char_boundary(rel_cursor)
&& let Some(segment) = line_text.get(rel_cursor..)
{
spans.push(Span::from(segment.to_string()));
}
let line = if spans.is_empty() {
Line::from(line_text.to_string()).style(style)
} else {
Line::from(spans).style(style)
};
raw_lines.push(line);
// Split on '\n' so any '\r' stays in the line; advancing by 1 accounts
// for the separator byte.
offset = line_end + 1;
}
raw_lines
}
impl HistoryCell for UserHistoryCell {
@@ -171,13 +242,28 @@ impl HistoryCell for UserHistoryCell {
.max(1);
let style = user_message_style();
let element_style = style.fg(Color::Cyan);
let wrapped = word_wrap_lines(
self.message.lines().map(|l| Line::from(l).style(style)),
// Wrap algorithm matches textarea.rs.
RtOptions::new(usize::from(wrap_width))
.wrap_algorithm(textwrap::WrapAlgorithm::FirstFit),
);
let wrapped = if self.text_elements.is_empty() {
word_wrap_lines(
self.message.split('\n').map(|l| Line::from(l).style(style)),
// Wrap algorithm matches textarea.rs.
RtOptions::new(usize::from(wrap_width))
.wrap_algorithm(textwrap::WrapAlgorithm::FirstFit),
)
} else {
let raw_lines = build_user_message_lines_with_elements(
&self.message,
&self.text_elements,
style,
element_style,
);
word_wrap_lines(
raw_lines,
RtOptions::new(usize::from(wrap_width))
.wrap_algorithm(textwrap::WrapAlgorithm::FirstFit),
)
};
lines.push(Line::from("").style(style));
lines.extend(prefix_lines(wrapped, " ".bold().dim(), " ".into()));
@@ -886,8 +972,16 @@ pub(crate) fn new_session_info(
SessionInfoCell(CompositeHistoryCell { parts })
}
pub(crate) fn new_user_prompt(message: String) -> UserHistoryCell {
UserHistoryCell { message }
pub(crate) fn new_user_prompt(
message: String,
text_elements: Vec<TextElement>,
local_image_paths: Vec<PathBuf>,
) -> UserHistoryCell {
UserHistoryCell {
message,
text_elements,
local_image_paths,
}
}
#[derive(Debug)]
@@ -1331,7 +1425,8 @@ pub(crate) fn empty_mcp_output() -> PlainHistoryCell {
" • No MCP servers configured.".italic().into(),
Line::from(vec![
" See the ".into(),
"\u{1b}]8;;https://github.com/openai/codex/blob/main/docs/config.md#mcp_servers\u{7}MCP docs\u{1b}]8;;\u{7}".underlined(),
"\u{1b}]8;;https://developers.openai.com/codex/mcp\u{7}MCP docs\u{1b}]8;;\u{7}"
.underlined(),
" to configure them.".into(),
])
.style(Style::default().add_modifier(Modifier::DIM)),
@@ -2581,6 +2676,8 @@ mod tests {
let msg = "one two three four five six seven";
let cell = UserHistoryCell {
message: msg.to_string(),
text_elements: Vec::new(),
local_image_paths: Vec::new(),
};
// Small width to force wrapping more clearly. Effective wrap width is width-2 due to the ▌ prefix and trailing space.

View File

@@ -47,6 +47,7 @@ mod chatwidget;
mod cli;
mod clipboard_paste;
mod collab;
mod collaboration_modes;
mod color;
pub mod custom_terminal;
mod diff_render;

View File

@@ -48,13 +48,14 @@ impl ComposerInput {
/// Clear the input text.
pub fn clear(&mut self) {
self.inner.set_text_content(String::new());
self.inner
.set_text_content(String::new(), Vec::new(), Vec::new());
}
/// Feed a key event into the composer and return a high-level action.
pub fn input(&mut self, key: KeyEvent) -> ComposerAction {
let action = match self.inner.handle_key_event(key).0 {
InputResult::Submitted(text) => ComposerAction::Submitted(text),
InputResult::Submitted { text, .. } => ComposerAction::Submitted(text),
_ => ComposerAction::None,
};
self.drain_app_events();

View File

@@ -24,6 +24,7 @@ pub enum SlashCommand {
Fork,
Init,
Compact,
Collab,
// Undo,
Diff,
Mention,
@@ -57,6 +58,7 @@ impl SlashCommand {
SlashCommand::Status => "show current session configuration and token usage",
SlashCommand::Ps => "list background terminals",
SlashCommand::Model => "choose what model and reasoning effort to use",
SlashCommand::Collab => "change collaboration mode (experimental)",
SlashCommand::Approvals => "choose what Codex can do without approval",
SlashCommand::ElevateSandbox => "set up elevated agent sandbox",
SlashCommand::Experimental => "toggle beta features",
@@ -99,6 +101,7 @@ impl SlashCommand {
| SlashCommand::Exit => true,
SlashCommand::Rollout => true,
SlashCommand::TestApproval => true,
SlashCommand::Collab => true,
}
}

View File

@@ -63,6 +63,7 @@ struct StatusHistoryCell {
approval: String,
sandbox: String,
agents_summary: String,
collaboration_mode: Option<String>,
model_provider: Option<String>,
account: Option<StatusAccountDisplay>,
session_id: Option<String>,
@@ -83,6 +84,7 @@ pub(crate) fn new_status_output(
plan_type: Option<PlanType>,
now: DateTime<Local>,
model_name: &str,
collaboration_mode: Option<&str>,
) -> CompositeHistoryCell {
let command = PlainHistoryCell::new(vec!["/status".magenta().into()]);
let card = StatusHistoryCell::new(
@@ -96,6 +98,7 @@ pub(crate) fn new_status_output(
plan_type,
now,
model_name,
collaboration_mode,
);
CompositeHistoryCell::new(vec![Box::new(command), Box::new(card)])
@@ -114,6 +117,7 @@ impl StatusHistoryCell {
plan_type: Option<PlanType>,
now: DateTime<Local>,
model_name: &str,
collaboration_mode: Option<&str>,
) -> Self {
let config_entries = create_config_summary_entries(config, model_name);
let (model_name, model_details) = compose_model_display(model_name, &config_entries);
@@ -165,6 +169,7 @@ impl StatusHistoryCell {
approval,
sandbox,
agents_summary,
collaboration_mode: collaboration_mode.map(ToString::to_string),
model_provider,
account,
session_id,
@@ -360,6 +365,9 @@ impl HistoryCell for StatusHistoryCell {
if self.session_id.is_some() && self.forked_from.is_some() {
push_label(&mut labels, &mut seen, "Forked from");
}
if self.collaboration_mode.is_some() {
push_label(&mut labels, &mut seen, "Collaboration mode");
}
push_label(&mut labels, &mut seen, "Token usage");
if self.token_usage.context_window.is_some() {
push_label(&mut labels, &mut seen, "Context window");
@@ -409,6 +417,10 @@ impl HistoryCell for StatusHistoryCell {
lines.push(formatter.line("Account", vec![Span::from(account_value)]));
}
if let Some(collab_mode) = self.collaboration_mode.as_ref() {
lines.push(formatter.line("Collaboration mode", vec![Span::from(collab_mode.clone())]));
}
if let Some(session) = self.session_id.as_ref() {
lines.push(formatter.line("Session", vec![Span::from(session.clone())]));
}

View File

@@ -152,6 +152,7 @@ async fn status_snapshot_includes_reasoning_details() {
None,
captured_at,
&model_slug,
None,
);
let mut rendered_lines = render_lines(&composite.display_lines(80));
if cfg!(windows) {
@@ -203,6 +204,7 @@ async fn status_snapshot_includes_forked_from() {
None,
captured_at,
&model_slug,
None,
);
let mut rendered_lines = render_lines(&composite.display_lines(80));
if cfg!(windows) {
@@ -260,6 +262,7 @@ async fn status_snapshot_includes_monthly_limit() {
None,
captured_at,
&model_slug,
None,
);
let mut rendered_lines = render_lines(&composite.display_lines(80));
if cfg!(windows) {
@@ -305,6 +308,7 @@ async fn status_snapshot_shows_unlimited_credits() {
None,
captured_at,
&model_slug,
None,
);
let rendered = render_lines(&composite.display_lines(120));
assert!(
@@ -349,6 +353,7 @@ async fn status_snapshot_shows_positive_credits() {
None,
captured_at,
&model_slug,
None,
);
let rendered = render_lines(&composite.display_lines(120));
assert!(
@@ -393,6 +398,7 @@ async fn status_snapshot_hides_zero_credits() {
None,
captured_at,
&model_slug,
None,
);
let rendered = render_lines(&composite.display_lines(120));
assert!(
@@ -435,6 +441,7 @@ async fn status_snapshot_hides_when_has_no_credits_flag() {
None,
captured_at,
&model_slug,
None,
);
let rendered = render_lines(&composite.display_lines(120));
assert!(
@@ -477,6 +484,7 @@ async fn status_card_token_usage_excludes_cached_tokens() {
None,
now,
&model_slug,
None,
);
let rendered = render_lines(&composite.display_lines(120));
@@ -534,6 +542,7 @@ async fn status_snapshot_truncates_in_narrow_terminal() {
None,
captured_at,
&model_slug,
None,
);
let mut rendered_lines = render_lines(&composite.display_lines(70));
if cfg!(windows) {
@@ -580,6 +589,7 @@ async fn status_snapshot_shows_missing_limits_message() {
None,
now,
&model_slug,
None,
);
let mut rendered_lines = render_lines(&composite.display_lines(80));
if cfg!(windows) {
@@ -644,6 +654,7 @@ async fn status_snapshot_includes_credits_and_limits() {
None,
captured_at,
&model_slug,
None,
);
let mut rendered_lines = render_lines(&composite.display_lines(80));
if cfg!(windows) {
@@ -696,6 +707,7 @@ async fn status_snapshot_shows_empty_limits_message() {
None,
captured_at,
&model_slug,
None,
);
let mut rendered_lines = render_lines(&composite.display_lines(80));
if cfg!(windows) {
@@ -757,6 +769,7 @@ async fn status_snapshot_shows_stale_limits_message() {
None,
now,
&model_slug,
None,
);
let mut rendered_lines = render_lines(&composite.display_lines(80));
if cfg!(windows) {
@@ -822,6 +835,7 @@ async fn status_snapshot_cached_limits_hide_credits_without_flag() {
None,
now,
&model_slug,
None,
);
let mut rendered_lines = render_lines(&composite.display_lines(80));
if cfg!(windows) {
@@ -877,6 +891,7 @@ async fn status_context_window_uses_last_usage() {
None,
now,
&model_slug,
None,
);
let rendered_lines = render_lines(&composite.display_lines(80));
let context_line = rendered_lines

View File

@@ -19,10 +19,10 @@ pub fn user_message_style_for(terminal_bg: Option<(u8, u8, u8)>) -> Style {
#[allow(clippy::disallowed_methods)]
pub fn user_message_bg(terminal_bg: (u8, u8, u8)) -> Color {
let top = if is_light(terminal_bg) {
(0, 0, 0)
let (top, alpha) = if is_light(terminal_bg) {
((0, 0, 0), 0.04)
} else {
(255, 255, 255)
((255, 255, 255), 0.12)
};
best_color(blend(top, terminal_bg, 0.1))
best_color(blend(top, terminal_bg, alpha))
}