Compare commits

...

2 Commits

Author SHA1 Message Date
Charles Cunningham
953cd450a2 tui: defer mode switch for queued plan nudges 2026-02-02 21:02:21 -08:00
Charles Cunningham
ebe80ee7d9 tui: queue steer/nudged messages while plan generation is active 2026-02-02 21:01:22 -08:00
2 changed files with 165 additions and 4 deletions

View File

@@ -607,6 +607,7 @@ pub(crate) struct UserMessage {
local_images: Vec<LocalImageAttachment>,
text_elements: Vec<TextElement>,
mention_paths: HashMap<String, String>,
collaboration_mode_override: Option<CollaborationModeMask>,
}
impl From<String> for UserMessage {
@@ -617,6 +618,7 @@ impl From<String> for UserMessage {
// Plain text conversion has no UI element ranges.
text_elements: Vec::new(),
mention_paths: HashMap::new(),
collaboration_mode_override: None,
}
}
}
@@ -629,6 +631,7 @@ impl From<&str> for UserMessage {
// Plain text conversion has no UI element ranges.
text_elements: Vec::new(),
mention_paths: HashMap::new(),
collaboration_mode_override: None,
}
}
}
@@ -655,6 +658,7 @@ pub(crate) fn create_initial_user_message(
local_images,
text_elements,
mention_paths: HashMap::new(),
collaboration_mode_override: None,
})
}
}
@@ -669,6 +673,7 @@ fn remap_placeholders_for_message(message: UserMessage, next_label: &mut usize)
text_elements,
local_images,
mention_paths,
collaboration_mode_override,
} = message;
if local_images.is_empty() {
return UserMessage {
@@ -676,6 +681,7 @@ fn remap_placeholders_for_message(message: UserMessage, next_label: &mut usize)
text_elements,
local_images,
mention_paths,
collaboration_mode_override,
};
}
@@ -731,6 +737,7 @@ fn remap_placeholders_for_message(message: UserMessage, next_label: &mut usize)
local_images: remapped_images,
text_elements: rebuilt_elements,
mention_paths,
collaboration_mode_override,
}
}
@@ -1437,6 +1444,7 @@ impl ChatWidget {
text_elements: self.bottom_pane.composer_text_elements(),
local_images: self.bottom_pane.composer_local_images(),
mention_paths: HashMap::new(),
collaboration_mode_override: None,
};
let mut to_merge: Vec<UserMessage> = self.queued_user_messages.drain(..).collect();
@@ -1449,6 +1457,7 @@ impl ChatWidget {
text_elements: Vec::new(),
local_images: Vec::new(),
mention_paths: HashMap::new(),
collaboration_mode_override: None,
};
let mut combined_offset = 0usize;
let mut next_image_label = 1usize;
@@ -2713,9 +2722,10 @@ impl ChatWidget {
.take_recent_submission_images_with_placeholders(),
text_elements,
mention_paths: self.bottom_pane.take_mention_paths(),
collaboration_mode_override: None,
};
if self.is_session_configured() {
// Submitted is only emitted when steer is enabled (Enter sends immediately).
if self.is_session_configured() && !self.is_plan_streaming_in_tui() {
// Submitted is only emitted when steer is enabled.
// Reset any reasoning header only when we are actually submitting a turn.
self.reasoning_buffer.clear();
self.full_reasoning_buffer.clear();
@@ -2736,6 +2746,7 @@ impl ChatWidget {
.take_recent_submission_images_with_placeholders(),
text_elements,
mention_paths: self.bottom_pane.take_mention_paths(),
collaboration_mode_override: None,
};
self.queue_user_message(user_message);
}
@@ -3097,6 +3108,7 @@ impl ChatWidget {
.take_recent_submission_images_with_placeholders(),
text_elements: prepared_elements,
mention_paths: self.bottom_pane.take_mention_paths(),
collaboration_mode_override: None,
};
if self.is_session_configured() {
self.reasoning_buffer.clear();
@@ -3234,6 +3246,7 @@ impl ChatWidget {
local_images,
text_elements,
mention_paths,
collaboration_mode_override,
} = user_message;
if text.is_empty() && local_images.is_empty() {
return;
@@ -3304,6 +3317,9 @@ impl ChatWidget {
}
}
if let Some(mask) = collaboration_mode_override {
self.set_collaboration_mask(mask);
}
let effective_mode = self.effective_collaboration_mode();
let collaboration_mode = if self.collaboration_modes_enabled() {
self.active_collaboration_mask
@@ -5860,6 +5876,10 @@ impl ChatWidget {
self.bottom_pane.is_task_running() || self.is_review_mode
}
fn is_plan_streaming_in_tui(&self) -> bool {
self.plan_stream_controller.is_some()
}
pub(crate) fn composer_is_empty(&self) -> bool {
self.bottom_pane.composer_is_empty()
}
@@ -5869,8 +5889,19 @@ impl ChatWidget {
text: String,
collaboration_mode: CollaborationModeMask,
) {
self.set_collaboration_mask(collaboration_mode);
self.submit_user_message(text.into());
let should_queue = self.is_plan_streaming_in_tui();
let user_message = UserMessage {
text,
local_images: Vec::new(),
text_elements: Vec::new(),
mention_paths: HashMap::new(),
collaboration_mode_override: Some(collaboration_mode),
};
if should_queue {
self.queue_user_message(user_message);
} else {
self.submit_user_message(user_message);
}
}
/// True when the UI is in the regular composer state with no running task,

View File

@@ -404,6 +404,7 @@ async fn interrupted_turn_restores_queued_messages_with_images_and_elements() {
}],
text_elements: first_elements,
mention_paths: HashMap::new(),
collaboration_mode_override: None,
});
chat.queued_user_messages.push_back(UserMessage {
text: second_text,
@@ -413,6 +414,7 @@ async fn interrupted_turn_restores_queued_messages_with_images_and_elements() {
}],
text_elements: second_elements,
mention_paths: HashMap::new(),
collaboration_mode_override: None,
});
chat.refresh_queued_user_messages();
@@ -493,6 +495,7 @@ async fn remap_placeholders_uses_attachment_labels() {
text_elements: elements,
local_images: attachments,
mention_paths: HashMap::new(),
collaboration_mode_override: None,
};
let mut next_label = 3usize;
let remapped = remap_placeholders_for_message(message, &mut next_label);
@@ -554,6 +557,7 @@ async fn remap_placeholders_uses_byte_ranges_when_placeholder_missing() {
text_elements: elements,
local_images: attachments,
mention_paths: HashMap::new(),
collaboration_mode_override: None,
};
let mut next_label = 3usize;
let remapped = remap_placeholders_for_message(message, &mut next_label);
@@ -1284,6 +1288,83 @@ async fn submit_user_message_with_mode_sets_coding_collaboration_mode() {
}
}
#[tokio::test]
async fn submit_user_message_with_mode_queues_while_plan_stream_is_active() {
let (mut chat, _rx, mut op_rx) = make_chatwidget_manual(Some("gpt-5")).await;
chat.thread_id = Some(ThreadId::new());
chat.set_feature_enabled(Feature::CollaborationModes, true);
let plan_mask =
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
.expect("expected plan collaboration mask");
chat.set_collaboration_mask(plan_mask);
chat.on_task_started();
chat.on_plan_delta("- Step 1".to_string());
let code_mode = collaboration_modes::code_mask(chat.models_manager.as_ref())
.expect("expected code collaboration mode");
chat.submit_user_message_with_mode("Implement the plan.".to_string(), code_mode);
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Plan);
assert_eq!(chat.queued_user_messages.len(), 1);
assert_eq!(
chat.queued_user_messages.front().unwrap().text,
"Implement the plan."
);
assert_matches!(op_rx.try_recv(), Err(TryRecvError::Empty));
chat.on_task_complete(None, false);
assert!(chat.queued_user_messages.is_empty());
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Code);
match next_submit_op(&mut op_rx) {
Op::UserTurn {
collaboration_mode:
Some(CollaborationMode {
mode: ModeKind::Code,
..
}),
personality: None,
..
} => {}
other => {
panic!("expected Op::UserTurn with code collab mode, got {other:?}")
}
}
}
#[tokio::test]
async fn submit_user_message_with_mode_submits_when_plan_stream_is_not_active() {
let (mut chat, _rx, mut op_rx) = make_chatwidget_manual(Some("gpt-5")).await;
chat.thread_id = Some(ThreadId::new());
chat.set_feature_enabled(Feature::CollaborationModes, true);
let plan_mask =
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
.expect("expected plan collaboration mask");
chat.set_collaboration_mask(plan_mask);
chat.on_task_started();
let code_mode = collaboration_modes::code_mask(chat.models_manager.as_ref())
.expect("expected code collaboration mode");
chat.submit_user_message_with_mode("Implement the plan.".to_string(), code_mode);
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Code);
assert!(chat.queued_user_messages.is_empty());
match next_submit_op(&mut op_rx) {
Op::UserTurn {
collaboration_mode:
Some(CollaborationMode {
mode: ModeKind::Code,
..
}),
personality: None,
..
} => {}
other => {
panic!("expected Op::UserTurn with code collab mode, got {other:?}")
}
}
}
#[tokio::test]
async fn plan_implementation_popup_skips_replayed_turn_complete() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5")).await;
@@ -1849,6 +1930,55 @@ async fn exec_begin_restores_status_indicator_after_preamble() {
assert_eq!(chat.bottom_pane.status_indicator_visible(), true);
}
#[tokio::test]
async fn steer_enter_queues_while_plan_stream_is_active() {
let (mut chat, _rx, mut op_rx) = make_chatwidget_manual(None).await;
chat.thread_id = Some(ThreadId::new());
chat.set_feature_enabled(Feature::CollaborationModes, true);
let plan_mask =
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
.expect("expected plan collaboration mask");
chat.set_collaboration_mask(plan_mask);
chat.on_task_started();
chat.on_plan_delta("- Step 1".to_string());
chat.bottom_pane
.set_composer_text("queued submission".to_string(), Vec::new(), Vec::new());
chat.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Plan);
assert_eq!(chat.queued_user_messages.len(), 1);
assert_eq!(
chat.queued_user_messages.front().unwrap().text,
"queued submission"
);
assert_matches!(op_rx.try_recv(), Err(TryRecvError::Empty));
}
#[tokio::test]
async fn steer_enter_submits_when_plan_stream_is_not_active() {
let (mut chat, _rx, mut op_rx) = make_chatwidget_manual(None).await;
chat.thread_id = Some(ThreadId::new());
chat.set_feature_enabled(Feature::CollaborationModes, true);
let plan_mask =
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
.expect("expected plan collaboration mask");
chat.set_collaboration_mask(plan_mask);
chat.on_task_started();
chat.bottom_pane
.set_composer_text("submitted immediately".to_string(), Vec::new(), Vec::new());
chat.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
assert!(chat.queued_user_messages.is_empty());
match next_submit_op(&mut op_rx) {
Op::UserTurn {
personality: None, ..
} => {}
other => panic!("expected Op::UserTurn, got {other:?}"),
}
}
#[tokio::test]
async fn ctrl_c_shutdown_works_with_caps_lock() {
let (mut chat, mut rx, _op_rx) = make_chatwidget_manual(None).await;