mirror of
https://github.com/openai/codex.git
synced 2026-02-06 08:53:41 +00:00
Compare commits
2 Commits
latest-alp
...
queue-nudg
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
595016b0c0 | ||
|
|
3c29c56401 |
@@ -1643,16 +1643,7 @@ impl ChatWidget {
|
||||
}
|
||||
|
||||
if let Some(combined) = self.drain_queued_messages_for_restore() {
|
||||
let combined_local_image_paths = combined
|
||||
.local_images
|
||||
.iter()
|
||||
.map(|img| img.path.clone())
|
||||
.collect();
|
||||
self.bottom_pane.set_composer_text(
|
||||
combined.text,
|
||||
combined.text_elements,
|
||||
combined_local_image_paths,
|
||||
);
|
||||
self.restore_user_message_to_composer(combined);
|
||||
self.refresh_queued_user_messages();
|
||||
}
|
||||
|
||||
@@ -1715,6 +1706,18 @@ impl ChatWidget {
|
||||
Some(combined)
|
||||
}
|
||||
|
||||
fn restore_user_message_to_composer(&mut self, user_message: UserMessage) {
|
||||
let UserMessage {
|
||||
text,
|
||||
local_images,
|
||||
text_elements,
|
||||
mention_paths: _,
|
||||
} = user_message;
|
||||
let local_image_paths = local_images.into_iter().map(|img| img.path).collect();
|
||||
self.bottom_pane
|
||||
.set_composer_text(text, text_elements, local_image_paths);
|
||||
}
|
||||
|
||||
fn on_plan_update(&mut self, update: UpdatePlanArgs) {
|
||||
self.saw_plan_update_this_turn = true;
|
||||
self.add_to_history(history_cell::new_plan_update(update));
|
||||
@@ -3003,16 +3006,7 @@ impl ChatWidget {
|
||||
} if !self.queued_user_messages.is_empty() => {
|
||||
// Prefer the most recently queued item.
|
||||
if let Some(user_message) = self.queued_user_messages.pop_back() {
|
||||
let local_image_paths = user_message
|
||||
.local_images
|
||||
.iter()
|
||||
.map(|img| img.path.clone())
|
||||
.collect();
|
||||
self.bottom_pane.set_composer_text(
|
||||
user_message.text,
|
||||
user_message.text_elements,
|
||||
local_image_paths,
|
||||
);
|
||||
self.restore_user_message_to_composer(user_message);
|
||||
self.refresh_queued_user_messages();
|
||||
self.request_redraw();
|
||||
}
|
||||
@@ -3030,8 +3024,8 @@ impl ChatWidget {
|
||||
text_elements,
|
||||
mention_paths: self.bottom_pane.take_mention_paths(),
|
||||
};
|
||||
if self.is_session_configured() {
|
||||
// Submitted is only emitted when steer is enabled (Enter sends immediately).
|
||||
if self.is_session_configured() && !self.is_plan_streaming_in_tui() {
|
||||
// Submitted is only emitted when steer is enabled.
|
||||
// Reset any reasoning header only when we are actually submitting a turn.
|
||||
self.reasoning_buffer.clear();
|
||||
self.full_reasoning_buffer.clear();
|
||||
@@ -6417,6 +6411,10 @@ impl ChatWidget {
|
||||
self.bottom_pane.is_task_running() || self.is_review_mode
|
||||
}
|
||||
|
||||
fn is_plan_streaming_in_tui(&self) -> bool {
|
||||
self.plan_stream_controller.is_some()
|
||||
}
|
||||
|
||||
pub(crate) fn composer_is_empty(&self) -> bool {
|
||||
self.bottom_pane.composer_is_empty()
|
||||
}
|
||||
@@ -6426,8 +6424,27 @@ impl ChatWidget {
|
||||
text: String,
|
||||
collaboration_mode: CollaborationModeMask,
|
||||
) {
|
||||
if self.agent_turn_running
|
||||
&& self.active_collaboration_mask.as_ref() != Some(&collaboration_mode)
|
||||
{
|
||||
self.add_error_message(
|
||||
"Cannot switch collaboration mode while a turn is running.".to_string(),
|
||||
);
|
||||
return;
|
||||
}
|
||||
self.set_collaboration_mask(collaboration_mode);
|
||||
self.submit_user_message(text.into());
|
||||
let should_queue = self.is_plan_streaming_in_tui();
|
||||
let user_message = UserMessage {
|
||||
text,
|
||||
local_images: Vec::new(),
|
||||
text_elements: Vec::new(),
|
||||
mention_paths: HashMap::new(),
|
||||
};
|
||||
if should_queue {
|
||||
self.queue_user_message(user_message);
|
||||
} else {
|
||||
self.submit_user_message(user_message);
|
||||
}
|
||||
}
|
||||
|
||||
/// True when the UI is in the regular composer state with no running task,
|
||||
|
||||
@@ -527,6 +527,54 @@ async fn interrupted_turn_restores_queued_messages_with_images_and_elements() {
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn interrupted_turn_restore_keeps_active_mode_for_resubmission() {
|
||||
let (mut chat, _rx, mut op_rx) = make_chatwidget_manual(Some("gpt-5")).await;
|
||||
chat.thread_id = Some(ThreadId::new());
|
||||
chat.set_feature_enabled(Feature::CollaborationModes, true);
|
||||
|
||||
let plan_mask = collaboration_modes::plan_mask(chat.models_manager.as_ref())
|
||||
.expect("expected plan collaboration mode");
|
||||
let expected_mode = plan_mask
|
||||
.mode
|
||||
.expect("expected mode kind on plan collaboration mode");
|
||||
|
||||
chat.set_collaboration_mask(plan_mask);
|
||||
chat.on_task_started();
|
||||
chat.queued_user_messages.push_back(UserMessage {
|
||||
text: "Implement the plan.".to_string(),
|
||||
local_images: Vec::new(),
|
||||
text_elements: Vec::new(),
|
||||
mention_paths: HashMap::new(),
|
||||
});
|
||||
chat.refresh_queued_user_messages();
|
||||
|
||||
chat.handle_codex_event(Event {
|
||||
id: "interrupt".into(),
|
||||
msg: EventMsg::TurnAborted(codex_core::protocol::TurnAbortedEvent {
|
||||
reason: TurnAbortReason::Interrupted,
|
||||
}),
|
||||
});
|
||||
|
||||
assert_eq!(chat.bottom_pane.composer_text(), "Implement the plan.");
|
||||
assert!(chat.queued_user_messages.is_empty());
|
||||
assert_eq!(chat.active_collaboration_mode_kind(), expected_mode);
|
||||
|
||||
chat.handle_key_event(KeyEvent::from(KeyCode::Enter));
|
||||
|
||||
match next_submit_op(&mut op_rx) {
|
||||
Op::UserTurn {
|
||||
collaboration_mode: Some(CollaborationMode { mode, .. }),
|
||||
personality: None,
|
||||
..
|
||||
} => assert_eq!(mode, expected_mode),
|
||||
other => {
|
||||
panic!("expected Op::UserTurn with active mode, got {other:?}")
|
||||
}
|
||||
}
|
||||
assert_eq!(chat.active_collaboration_mode_kind(), expected_mode);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn remap_placeholders_uses_attachment_labels() {
|
||||
let placeholder_one = "[Image #1]";
|
||||
@@ -1358,6 +1406,97 @@ async fn submit_user_message_with_mode_sets_coding_collaboration_mode() {
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn submit_user_message_with_mode_errors_when_mode_changes_during_running_turn() {
|
||||
let (mut chat, mut rx, mut op_rx) = make_chatwidget_manual(Some("gpt-5")).await;
|
||||
chat.thread_id = Some(ThreadId::new());
|
||||
chat.set_feature_enabled(Feature::CollaborationModes, true);
|
||||
let plan_mask =
|
||||
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
|
||||
.expect("expected plan collaboration mask");
|
||||
chat.set_collaboration_mask(plan_mask);
|
||||
chat.on_task_started();
|
||||
|
||||
let default_mode = collaboration_modes::default_mask(chat.models_manager.as_ref())
|
||||
.expect("expected default collaboration mode");
|
||||
chat.submit_user_message_with_mode("Implement the plan.".to_string(), default_mode);
|
||||
|
||||
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Plan);
|
||||
assert!(chat.queued_user_messages.is_empty());
|
||||
assert_matches!(op_rx.try_recv(), Err(TryRecvError::Empty));
|
||||
let rendered = drain_insert_history(&mut rx)
|
||||
.iter()
|
||||
.map(|lines| lines_to_single_string(lines))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
assert!(
|
||||
rendered.contains("Cannot switch collaboration mode while a turn is running."),
|
||||
"expected running-turn error message, got: {rendered:?}"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn submit_user_message_with_mode_allows_same_mode_during_running_turn() {
|
||||
let (mut chat, _rx, mut op_rx) = make_chatwidget_manual(Some("gpt-5")).await;
|
||||
chat.thread_id = Some(ThreadId::new());
|
||||
chat.set_feature_enabled(Feature::CollaborationModes, true);
|
||||
let plan_mask =
|
||||
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
|
||||
.expect("expected plan collaboration mask");
|
||||
chat.set_collaboration_mask(plan_mask.clone());
|
||||
chat.on_task_started();
|
||||
|
||||
chat.submit_user_message_with_mode("Continue planning.".to_string(), plan_mask);
|
||||
|
||||
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Plan);
|
||||
assert!(chat.queued_user_messages.is_empty());
|
||||
match next_submit_op(&mut op_rx) {
|
||||
Op::UserTurn {
|
||||
collaboration_mode:
|
||||
Some(CollaborationMode {
|
||||
mode: ModeKind::Plan,
|
||||
..
|
||||
}),
|
||||
personality: None,
|
||||
..
|
||||
} => {}
|
||||
other => {
|
||||
panic!("expected Op::UserTurn with plan collab mode, got {other:?}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn submit_user_message_with_mode_submits_when_plan_stream_is_not_active() {
|
||||
let (mut chat, _rx, mut op_rx) = make_chatwidget_manual(Some("gpt-5")).await;
|
||||
chat.thread_id = Some(ThreadId::new());
|
||||
chat.set_feature_enabled(Feature::CollaborationModes, true);
|
||||
let plan_mask =
|
||||
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
|
||||
.expect("expected plan collaboration mask");
|
||||
chat.set_collaboration_mask(plan_mask);
|
||||
|
||||
let default_mode = collaboration_modes::default_mask(chat.models_manager.as_ref())
|
||||
.expect("expected default collaboration mode");
|
||||
let expected_mode = default_mode
|
||||
.mode
|
||||
.expect("expected default collaboration mode kind");
|
||||
chat.submit_user_message_with_mode("Implement the plan.".to_string(), default_mode);
|
||||
|
||||
assert_eq!(chat.active_collaboration_mode_kind(), expected_mode);
|
||||
assert!(chat.queued_user_messages.is_empty());
|
||||
match next_submit_op(&mut op_rx) {
|
||||
Op::UserTurn {
|
||||
collaboration_mode: Some(CollaborationMode { mode, .. }),
|
||||
personality: None,
|
||||
..
|
||||
} => assert_eq!(mode, expected_mode),
|
||||
other => {
|
||||
panic!("expected Op::UserTurn with default collab mode, got {other:?}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn plan_implementation_popup_skips_replayed_turn_complete() {
|
||||
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5")).await;
|
||||
@@ -1986,6 +2125,55 @@ async fn unified_exec_begin_restores_working_status_snapshot() {
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn steer_enter_queues_while_plan_stream_is_active() {
|
||||
let (mut chat, _rx, mut op_rx) = make_chatwidget_manual(None).await;
|
||||
chat.thread_id = Some(ThreadId::new());
|
||||
chat.set_feature_enabled(Feature::CollaborationModes, true);
|
||||
let plan_mask =
|
||||
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
|
||||
.expect("expected plan collaboration mask");
|
||||
chat.set_collaboration_mask(plan_mask);
|
||||
chat.on_task_started();
|
||||
chat.on_plan_delta("- Step 1".to_string());
|
||||
|
||||
chat.bottom_pane
|
||||
.set_composer_text("queued submission".to_string(), Vec::new(), Vec::new());
|
||||
chat.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
|
||||
|
||||
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Plan);
|
||||
assert_eq!(chat.queued_user_messages.len(), 1);
|
||||
assert_eq!(
|
||||
chat.queued_user_messages.front().unwrap().text,
|
||||
"queued submission"
|
||||
);
|
||||
assert_matches!(op_rx.try_recv(), Err(TryRecvError::Empty));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn steer_enter_submits_when_plan_stream_is_not_active() {
|
||||
let (mut chat, _rx, mut op_rx) = make_chatwidget_manual(None).await;
|
||||
chat.thread_id = Some(ThreadId::new());
|
||||
chat.set_feature_enabled(Feature::CollaborationModes, true);
|
||||
let plan_mask =
|
||||
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
|
||||
.expect("expected plan collaboration mask");
|
||||
chat.set_collaboration_mask(plan_mask);
|
||||
chat.on_task_started();
|
||||
|
||||
chat.bottom_pane
|
||||
.set_composer_text("submitted immediately".to_string(), Vec::new(), Vec::new());
|
||||
chat.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
|
||||
|
||||
assert!(chat.queued_user_messages.is_empty());
|
||||
match next_submit_op(&mut op_rx) {
|
||||
Op::UserTurn {
|
||||
personality: None, ..
|
||||
} => {}
|
||||
other => panic!("expected Op::UserTurn, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn ctrl_c_shutdown_works_with_caps_lock() {
|
||||
let (mut chat, mut rx, _op_rx) = make_chatwidget_manual(None).await;
|
||||
|
||||
Reference in New Issue
Block a user