mirror of
https://github.com/openai/codex.git
synced 2026-02-06 08:53:41 +00:00
Compare commits
2 Commits
main
...
queue-nudg
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
595016b0c0 | ||
|
|
3c29c56401 |
@@ -29,6 +29,7 @@ use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_app_server_protocol::TurnStartResponse;
|
||||
use codex_app_server_protocol::UserInput as V2UserInput;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use codex_core::features::Feature;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use core_test_support::responses;
|
||||
@@ -142,10 +143,12 @@ async fn auto_compaction_remote_emits_started_and_completed_items() -> Result<()
|
||||
.await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
let mut features = BTreeMap::default();
|
||||
features.insert(Feature::RemoteCompaction, true);
|
||||
write_mock_responses_config_toml(
|
||||
codex_home.path(),
|
||||
&server.uri(),
|
||||
&BTreeMap::default(),
|
||||
&features,
|
||||
AUTO_COMPACT_LIMIT,
|
||||
Some(true),
|
||||
"openai",
|
||||
|
||||
@@ -199,6 +199,9 @@
|
||||
"powershell_utf8": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"remote_compaction": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"remote_models": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -1229,6 +1232,9 @@
|
||||
"powershell_utf8": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"remote_compaction": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"remote_models": {
|
||||
"type": "boolean"
|
||||
},
|
||||
|
||||
@@ -3877,7 +3877,7 @@ pub(crate) async fn run_turn(
|
||||
}
|
||||
|
||||
async fn run_auto_compact(sess: &Arc<Session>, turn_context: &Arc<TurnContext>) {
|
||||
if should_use_remote_compact_task(&turn_context.provider) {
|
||||
if should_use_remote_compact_task(sess.as_ref(), &turn_context.provider) {
|
||||
run_inline_remote_auto_compact_task(Arc::clone(sess), Arc::clone(turn_context)).await;
|
||||
} else {
|
||||
run_inline_auto_compact_task(Arc::clone(sess), Arc::clone(turn_context)).await;
|
||||
|
||||
@@ -9,6 +9,7 @@ use crate::codex::TurnContext;
|
||||
use crate::codex::get_last_assistant_message_from_turn;
|
||||
use crate::error::CodexErr;
|
||||
use crate::error::Result as CodexResult;
|
||||
use crate::features::Feature;
|
||||
use crate::protocol::CompactedItem;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::TurnContextItem;
|
||||
@@ -33,8 +34,11 @@ pub const SUMMARIZATION_PROMPT: &str = include_str!("../templates/compact/prompt
|
||||
pub const SUMMARY_PREFIX: &str = include_str!("../templates/compact/summary_prefix.md");
|
||||
const COMPACT_USER_MESSAGE_MAX_TOKENS: usize = 20_000;
|
||||
|
||||
pub(crate) fn should_use_remote_compact_task(provider: &ModelProviderInfo) -> bool {
|
||||
provider.is_openai()
|
||||
pub(crate) fn should_use_remote_compact_task(
|
||||
session: &Session,
|
||||
provider: &ModelProviderInfo,
|
||||
) -> bool {
|
||||
provider.is_openai() && session.enabled(Feature::RemoteCompaction)
|
||||
}
|
||||
|
||||
pub(crate) async fn run_inline_auto_compact_task(
|
||||
|
||||
@@ -774,7 +774,7 @@ unified_exec = true
|
||||
service
|
||||
.write_value(ConfigValueWriteParams {
|
||||
file_path: Some(tmp.path().join(CONFIG_TOML_FILE).display().to_string()),
|
||||
key_path: "features.remote_models".to_string(),
|
||||
key_path: "features.remote_compaction".to_string(),
|
||||
value: serde_json::json!(true),
|
||||
merge_strategy: MergeStrategy::Replace,
|
||||
expected_version: None,
|
||||
@@ -794,7 +794,7 @@ hide_full_access_warning = true
|
||||
|
||||
[features]
|
||||
unified_exec = true
|
||||
remote_models = true
|
||||
remote_compaction = true
|
||||
"#;
|
||||
assert_eq!(updated, expected);
|
||||
Ok(())
|
||||
|
||||
@@ -97,6 +97,8 @@ pub enum Feature {
|
||||
WindowsSandbox,
|
||||
/// Use the elevated Windows sandbox pipeline (setup + runner).
|
||||
WindowsSandboxElevated,
|
||||
/// Remote compaction enabled (only for ChatGPT auth)
|
||||
RemoteCompaction,
|
||||
/// Refresh remote models and emit AppReady once the list is available.
|
||||
RemoteModels,
|
||||
/// Experimental shell snapshotting.
|
||||
@@ -497,6 +499,12 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::RemoteCompaction,
|
||||
key: "remote_compaction",
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: true,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::RemoteModels,
|
||||
key: "remote_models",
|
||||
|
||||
@@ -19,7 +19,6 @@ use tokio::sync::broadcast;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::time::Instant;
|
||||
use tokio::time::sleep_until;
|
||||
use tracing::info;
|
||||
use tracing::warn;
|
||||
|
||||
use crate::config::Config;
|
||||
@@ -163,12 +162,6 @@ impl FileWatcher {
|
||||
res = raw_rx.recv() => {
|
||||
match res {
|
||||
Some(Ok(event)) => {
|
||||
info!(
|
||||
event_kind = ?event.kind,
|
||||
event_paths = ?event.paths,
|
||||
event_attrs = ?event.attrs,
|
||||
"file watcher received filesystem event"
|
||||
);
|
||||
let skills_paths = classify_event(&event, &state);
|
||||
let now = Instant::now();
|
||||
skills.add(skills_paths);
|
||||
|
||||
@@ -25,7 +25,7 @@ impl SessionTask for CompactTask {
|
||||
_cancellation_token: CancellationToken,
|
||||
) -> Option<String> {
|
||||
let session = session.clone_session();
|
||||
if crate::compact::should_use_remote_compact_task(&ctx.provider) {
|
||||
if crate::compact::should_use_remote_compact_task(session.as_ref(), &ctx.provider) {
|
||||
let _ = session.services.otel_manager.counter(
|
||||
"codex.task.compact",
|
||||
1,
|
||||
|
||||
@@ -5,6 +5,7 @@ use codex_core::built_in_model_providers;
|
||||
use codex_core::compact::SUMMARIZATION_PROMPT;
|
||||
use codex_core::compact::SUMMARY_PREFIX;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::ItemCompletedEvent;
|
||||
@@ -1463,6 +1464,7 @@ async fn auto_compact_runs_after_resume_when_token_usage_is_over_limit() {
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
set_test_compact_prompt(config);
|
||||
config.model_auto_compact_token_limit = Some(limit);
|
||||
config.features.enable(Feature::RemoteCompaction);
|
||||
});
|
||||
let initial = builder.build(&server).await.unwrap();
|
||||
let home = initial.home.clone();
|
||||
@@ -1491,6 +1493,7 @@ async fn auto_compact_runs_after_resume_when_token_usage_is_over_limit() {
|
||||
let mut resume_builder = test_codex().with_config(move |config| {
|
||||
set_test_compact_prompt(config);
|
||||
config.model_auto_compact_token_limit = Some(limit);
|
||||
config.features.enable(Feature::RemoteCompaction);
|
||||
});
|
||||
let resumed = resume_builder
|
||||
.resume(&server, home, rollout_path)
|
||||
@@ -2287,6 +2290,7 @@ async fn auto_compact_counts_encrypted_reasoning_before_last_user() {
|
||||
.with_config(|config| {
|
||||
set_test_compact_prompt(config);
|
||||
config.model_auto_compact_token_limit = Some(300);
|
||||
config.features.enable(Feature::RemoteCompaction);
|
||||
})
|
||||
.build(&server)
|
||||
.await
|
||||
@@ -2407,6 +2411,7 @@ async fn auto_compact_runs_when_reasoning_header_clears_between_turns() {
|
||||
.with_config(|config| {
|
||||
set_test_compact_prompt(config);
|
||||
config.model_auto_compact_token_limit = Some(300);
|
||||
config.features.enable(Feature::RemoteCompaction);
|
||||
})
|
||||
.build(&server)
|
||||
.await
|
||||
|
||||
@@ -4,6 +4,7 @@ use std::fs;
|
||||
|
||||
use anyhow::Result;
|
||||
use codex_core::CodexAuth;
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::ItemCompletedEvent;
|
||||
use codex_core::protocol::ItemStartedEvent;
|
||||
@@ -44,7 +45,11 @@ async fn remote_compact_replaces_history_for_followups() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let harness = TestCodexHarness::with_builder(
|
||||
test_codex().with_auth(CodexAuth::create_dummy_chatgpt_auth_for_testing()),
|
||||
test_codex()
|
||||
.with_auth(CodexAuth::create_dummy_chatgpt_auth_for_testing())
|
||||
.with_config(|config| {
|
||||
config.features.enable(Feature::RemoteCompaction);
|
||||
}),
|
||||
)
|
||||
.await?;
|
||||
let codex = harness.test().codex.clone();
|
||||
@@ -161,7 +166,11 @@ async fn remote_compact_runs_automatically() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let harness = TestCodexHarness::with_builder(
|
||||
test_codex().with_auth(CodexAuth::create_dummy_chatgpt_auth_for_testing()),
|
||||
test_codex()
|
||||
.with_auth(CodexAuth::create_dummy_chatgpt_auth_for_testing())
|
||||
.with_config(|config| {
|
||||
config.features.enable(Feature::RemoteCompaction);
|
||||
}),
|
||||
)
|
||||
.await?;
|
||||
let codex = harness.test().codex.clone();
|
||||
@@ -244,6 +253,7 @@ async fn remote_compact_trims_function_call_history_to_fit_context_window() -> R
|
||||
test_codex()
|
||||
.with_auth(CodexAuth::create_dummy_chatgpt_auth_for_testing())
|
||||
.with_config(|config| {
|
||||
config.features.enable(Feature::RemoteCompaction);
|
||||
config.model_context_window = Some(2_000);
|
||||
}),
|
||||
)
|
||||
@@ -374,6 +384,7 @@ async fn remote_compact_trim_estimate_uses_session_base_instructions() -> Result
|
||||
test_codex()
|
||||
.with_auth(CodexAuth::create_dummy_chatgpt_auth_for_testing())
|
||||
.with_config(|config| {
|
||||
config.features.enable(Feature::RemoteCompaction);
|
||||
config.model_context_window = Some(200_000);
|
||||
}),
|
||||
)
|
||||
@@ -471,6 +482,7 @@ async fn remote_compact_trim_estimate_uses_session_base_instructions() -> Result
|
||||
.with_config({
|
||||
let override_base_instructions = override_base_instructions.clone();
|
||||
move |config| {
|
||||
config.features.enable(Feature::RemoteCompaction);
|
||||
config.model_context_window = Some(override_context_window);
|
||||
config.base_instructions = Some(override_base_instructions);
|
||||
}
|
||||
@@ -563,7 +575,11 @@ async fn remote_manual_compact_emits_context_compaction_items() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let harness = TestCodexHarness::with_builder(
|
||||
test_codex().with_auth(CodexAuth::create_dummy_chatgpt_auth_for_testing()),
|
||||
test_codex()
|
||||
.with_auth(CodexAuth::create_dummy_chatgpt_auth_for_testing())
|
||||
.with_config(|config| {
|
||||
config.features.enable(Feature::RemoteCompaction);
|
||||
}),
|
||||
)
|
||||
.await?;
|
||||
let codex = harness.test().codex.clone();
|
||||
@@ -655,7 +671,11 @@ async fn remote_compact_persists_replacement_history_in_rollout() -> Result<()>
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let harness = TestCodexHarness::with_builder(
|
||||
test_codex().with_auth(CodexAuth::create_dummy_chatgpt_auth_for_testing()),
|
||||
test_codex()
|
||||
.with_auth(CodexAuth::create_dummy_chatgpt_auth_for_testing())
|
||||
.with_config(|config| {
|
||||
config.features.enable(Feature::RemoteCompaction);
|
||||
}),
|
||||
)
|
||||
.await?;
|
||||
let codex = harness.test().codex.clone();
|
||||
|
||||
@@ -1643,16 +1643,7 @@ impl ChatWidget {
|
||||
}
|
||||
|
||||
if let Some(combined) = self.drain_queued_messages_for_restore() {
|
||||
let combined_local_image_paths = combined
|
||||
.local_images
|
||||
.iter()
|
||||
.map(|img| img.path.clone())
|
||||
.collect();
|
||||
self.bottom_pane.set_composer_text(
|
||||
combined.text,
|
||||
combined.text_elements,
|
||||
combined_local_image_paths,
|
||||
);
|
||||
self.restore_user_message_to_composer(combined);
|
||||
self.refresh_queued_user_messages();
|
||||
}
|
||||
|
||||
@@ -1715,6 +1706,18 @@ impl ChatWidget {
|
||||
Some(combined)
|
||||
}
|
||||
|
||||
fn restore_user_message_to_composer(&mut self, user_message: UserMessage) {
|
||||
let UserMessage {
|
||||
text,
|
||||
local_images,
|
||||
text_elements,
|
||||
mention_paths: _,
|
||||
} = user_message;
|
||||
let local_image_paths = local_images.into_iter().map(|img| img.path).collect();
|
||||
self.bottom_pane
|
||||
.set_composer_text(text, text_elements, local_image_paths);
|
||||
}
|
||||
|
||||
fn on_plan_update(&mut self, update: UpdatePlanArgs) {
|
||||
self.saw_plan_update_this_turn = true;
|
||||
self.add_to_history(history_cell::new_plan_update(update));
|
||||
@@ -3003,16 +3006,7 @@ impl ChatWidget {
|
||||
} if !self.queued_user_messages.is_empty() => {
|
||||
// Prefer the most recently queued item.
|
||||
if let Some(user_message) = self.queued_user_messages.pop_back() {
|
||||
let local_image_paths = user_message
|
||||
.local_images
|
||||
.iter()
|
||||
.map(|img| img.path.clone())
|
||||
.collect();
|
||||
self.bottom_pane.set_composer_text(
|
||||
user_message.text,
|
||||
user_message.text_elements,
|
||||
local_image_paths,
|
||||
);
|
||||
self.restore_user_message_to_composer(user_message);
|
||||
self.refresh_queued_user_messages();
|
||||
self.request_redraw();
|
||||
}
|
||||
@@ -3030,8 +3024,8 @@ impl ChatWidget {
|
||||
text_elements,
|
||||
mention_paths: self.bottom_pane.take_mention_paths(),
|
||||
};
|
||||
if self.is_session_configured() {
|
||||
// Submitted is only emitted when steer is enabled (Enter sends immediately).
|
||||
if self.is_session_configured() && !self.is_plan_streaming_in_tui() {
|
||||
// Submitted is only emitted when steer is enabled.
|
||||
// Reset any reasoning header only when we are actually submitting a turn.
|
||||
self.reasoning_buffer.clear();
|
||||
self.full_reasoning_buffer.clear();
|
||||
@@ -4572,7 +4566,9 @@ impl ChatWidget {
|
||||
|
||||
let mut header = ColumnRenderable::new();
|
||||
header.push(Line::from("Select Personality".bold()));
|
||||
header.push(Line::from("Choose a communication style for Codex.".dim()));
|
||||
header.push(Line::from(
|
||||
"Choose a communication style for Codex. Disable in /experimental.".dim(),
|
||||
));
|
||||
|
||||
self.bottom_pane.show_selection_view(SelectionViewParams {
|
||||
header: Box::new(header),
|
||||
@@ -6415,6 +6411,10 @@ impl ChatWidget {
|
||||
self.bottom_pane.is_task_running() || self.is_review_mode
|
||||
}
|
||||
|
||||
fn is_plan_streaming_in_tui(&self) -> bool {
|
||||
self.plan_stream_controller.is_some()
|
||||
}
|
||||
|
||||
pub(crate) fn composer_is_empty(&self) -> bool {
|
||||
self.bottom_pane.composer_is_empty()
|
||||
}
|
||||
@@ -6424,8 +6424,27 @@ impl ChatWidget {
|
||||
text: String,
|
||||
collaboration_mode: CollaborationModeMask,
|
||||
) {
|
||||
if self.agent_turn_running
|
||||
&& self.active_collaboration_mask.as_ref() != Some(&collaboration_mode)
|
||||
{
|
||||
self.add_error_message(
|
||||
"Cannot switch collaboration mode while a turn is running.".to_string(),
|
||||
);
|
||||
return;
|
||||
}
|
||||
self.set_collaboration_mask(collaboration_mode);
|
||||
self.submit_user_message(text.into());
|
||||
let should_queue = self.is_plan_streaming_in_tui();
|
||||
let user_message = UserMessage {
|
||||
text,
|
||||
local_images: Vec::new(),
|
||||
text_elements: Vec::new(),
|
||||
mention_paths: HashMap::new(),
|
||||
};
|
||||
if should_queue {
|
||||
self.queue_user_message(user_message);
|
||||
} else {
|
||||
self.submit_user_message(user_message);
|
||||
}
|
||||
}
|
||||
|
||||
/// True when the UI is in the regular composer state with no running task,
|
||||
|
||||
@@ -3,7 +3,7 @@ source: tui/src/chatwidget/tests.rs
|
||||
expression: popup
|
||||
---
|
||||
Select Personality
|
||||
Choose a communication style for Codex.
|
||||
Choose a communication style for Codex. Disable in /experimental.
|
||||
|
||||
1. Friendly Warm, collaborative, and helpful.
|
||||
› 2. Pragmatic (current) Concise, task-focused, and direct.
|
||||
|
||||
@@ -527,6 +527,54 @@ async fn interrupted_turn_restores_queued_messages_with_images_and_elements() {
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn interrupted_turn_restore_keeps_active_mode_for_resubmission() {
|
||||
let (mut chat, _rx, mut op_rx) = make_chatwidget_manual(Some("gpt-5")).await;
|
||||
chat.thread_id = Some(ThreadId::new());
|
||||
chat.set_feature_enabled(Feature::CollaborationModes, true);
|
||||
|
||||
let plan_mask = collaboration_modes::plan_mask(chat.models_manager.as_ref())
|
||||
.expect("expected plan collaboration mode");
|
||||
let expected_mode = plan_mask
|
||||
.mode
|
||||
.expect("expected mode kind on plan collaboration mode");
|
||||
|
||||
chat.set_collaboration_mask(plan_mask);
|
||||
chat.on_task_started();
|
||||
chat.queued_user_messages.push_back(UserMessage {
|
||||
text: "Implement the plan.".to_string(),
|
||||
local_images: Vec::new(),
|
||||
text_elements: Vec::new(),
|
||||
mention_paths: HashMap::new(),
|
||||
});
|
||||
chat.refresh_queued_user_messages();
|
||||
|
||||
chat.handle_codex_event(Event {
|
||||
id: "interrupt".into(),
|
||||
msg: EventMsg::TurnAborted(codex_core::protocol::TurnAbortedEvent {
|
||||
reason: TurnAbortReason::Interrupted,
|
||||
}),
|
||||
});
|
||||
|
||||
assert_eq!(chat.bottom_pane.composer_text(), "Implement the plan.");
|
||||
assert!(chat.queued_user_messages.is_empty());
|
||||
assert_eq!(chat.active_collaboration_mode_kind(), expected_mode);
|
||||
|
||||
chat.handle_key_event(KeyEvent::from(KeyCode::Enter));
|
||||
|
||||
match next_submit_op(&mut op_rx) {
|
||||
Op::UserTurn {
|
||||
collaboration_mode: Some(CollaborationMode { mode, .. }),
|
||||
personality: None,
|
||||
..
|
||||
} => assert_eq!(mode, expected_mode),
|
||||
other => {
|
||||
panic!("expected Op::UserTurn with active mode, got {other:?}")
|
||||
}
|
||||
}
|
||||
assert_eq!(chat.active_collaboration_mode_kind(), expected_mode);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn remap_placeholders_uses_attachment_labels() {
|
||||
let placeholder_one = "[Image #1]";
|
||||
@@ -1358,6 +1406,97 @@ async fn submit_user_message_with_mode_sets_coding_collaboration_mode() {
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn submit_user_message_with_mode_errors_when_mode_changes_during_running_turn() {
|
||||
let (mut chat, mut rx, mut op_rx) = make_chatwidget_manual(Some("gpt-5")).await;
|
||||
chat.thread_id = Some(ThreadId::new());
|
||||
chat.set_feature_enabled(Feature::CollaborationModes, true);
|
||||
let plan_mask =
|
||||
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
|
||||
.expect("expected plan collaboration mask");
|
||||
chat.set_collaboration_mask(plan_mask);
|
||||
chat.on_task_started();
|
||||
|
||||
let default_mode = collaboration_modes::default_mask(chat.models_manager.as_ref())
|
||||
.expect("expected default collaboration mode");
|
||||
chat.submit_user_message_with_mode("Implement the plan.".to_string(), default_mode);
|
||||
|
||||
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Plan);
|
||||
assert!(chat.queued_user_messages.is_empty());
|
||||
assert_matches!(op_rx.try_recv(), Err(TryRecvError::Empty));
|
||||
let rendered = drain_insert_history(&mut rx)
|
||||
.iter()
|
||||
.map(|lines| lines_to_single_string(lines))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
assert!(
|
||||
rendered.contains("Cannot switch collaboration mode while a turn is running."),
|
||||
"expected running-turn error message, got: {rendered:?}"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn submit_user_message_with_mode_allows_same_mode_during_running_turn() {
|
||||
let (mut chat, _rx, mut op_rx) = make_chatwidget_manual(Some("gpt-5")).await;
|
||||
chat.thread_id = Some(ThreadId::new());
|
||||
chat.set_feature_enabled(Feature::CollaborationModes, true);
|
||||
let plan_mask =
|
||||
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
|
||||
.expect("expected plan collaboration mask");
|
||||
chat.set_collaboration_mask(plan_mask.clone());
|
||||
chat.on_task_started();
|
||||
|
||||
chat.submit_user_message_with_mode("Continue planning.".to_string(), plan_mask);
|
||||
|
||||
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Plan);
|
||||
assert!(chat.queued_user_messages.is_empty());
|
||||
match next_submit_op(&mut op_rx) {
|
||||
Op::UserTurn {
|
||||
collaboration_mode:
|
||||
Some(CollaborationMode {
|
||||
mode: ModeKind::Plan,
|
||||
..
|
||||
}),
|
||||
personality: None,
|
||||
..
|
||||
} => {}
|
||||
other => {
|
||||
panic!("expected Op::UserTurn with plan collab mode, got {other:?}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn submit_user_message_with_mode_submits_when_plan_stream_is_not_active() {
|
||||
let (mut chat, _rx, mut op_rx) = make_chatwidget_manual(Some("gpt-5")).await;
|
||||
chat.thread_id = Some(ThreadId::new());
|
||||
chat.set_feature_enabled(Feature::CollaborationModes, true);
|
||||
let plan_mask =
|
||||
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
|
||||
.expect("expected plan collaboration mask");
|
||||
chat.set_collaboration_mask(plan_mask);
|
||||
|
||||
let default_mode = collaboration_modes::default_mask(chat.models_manager.as_ref())
|
||||
.expect("expected default collaboration mode");
|
||||
let expected_mode = default_mode
|
||||
.mode
|
||||
.expect("expected default collaboration mode kind");
|
||||
chat.submit_user_message_with_mode("Implement the plan.".to_string(), default_mode);
|
||||
|
||||
assert_eq!(chat.active_collaboration_mode_kind(), expected_mode);
|
||||
assert!(chat.queued_user_messages.is_empty());
|
||||
match next_submit_op(&mut op_rx) {
|
||||
Op::UserTurn {
|
||||
collaboration_mode: Some(CollaborationMode { mode, .. }),
|
||||
personality: None,
|
||||
..
|
||||
} => assert_eq!(mode, expected_mode),
|
||||
other => {
|
||||
panic!("expected Op::UserTurn with default collab mode, got {other:?}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn plan_implementation_popup_skips_replayed_turn_complete() {
|
||||
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5")).await;
|
||||
@@ -1986,6 +2125,55 @@ async fn unified_exec_begin_restores_working_status_snapshot() {
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn steer_enter_queues_while_plan_stream_is_active() {
|
||||
let (mut chat, _rx, mut op_rx) = make_chatwidget_manual(None).await;
|
||||
chat.thread_id = Some(ThreadId::new());
|
||||
chat.set_feature_enabled(Feature::CollaborationModes, true);
|
||||
let plan_mask =
|
||||
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
|
||||
.expect("expected plan collaboration mask");
|
||||
chat.set_collaboration_mask(plan_mask);
|
||||
chat.on_task_started();
|
||||
chat.on_plan_delta("- Step 1".to_string());
|
||||
|
||||
chat.bottom_pane
|
||||
.set_composer_text("queued submission".to_string(), Vec::new(), Vec::new());
|
||||
chat.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
|
||||
|
||||
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Plan);
|
||||
assert_eq!(chat.queued_user_messages.len(), 1);
|
||||
assert_eq!(
|
||||
chat.queued_user_messages.front().unwrap().text,
|
||||
"queued submission"
|
||||
);
|
||||
assert_matches!(op_rx.try_recv(), Err(TryRecvError::Empty));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn steer_enter_submits_when_plan_stream_is_not_active() {
|
||||
let (mut chat, _rx, mut op_rx) = make_chatwidget_manual(None).await;
|
||||
chat.thread_id = Some(ThreadId::new());
|
||||
chat.set_feature_enabled(Feature::CollaborationModes, true);
|
||||
let plan_mask =
|
||||
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
|
||||
.expect("expected plan collaboration mask");
|
||||
chat.set_collaboration_mask(plan_mask);
|
||||
chat.on_task_started();
|
||||
|
||||
chat.bottom_pane
|
||||
.set_composer_text("submitted immediately".to_string(), Vec::new(), Vec::new());
|
||||
chat.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
|
||||
|
||||
assert!(chat.queued_user_messages.is_empty());
|
||||
match next_submit_op(&mut op_rx) {
|
||||
Op::UserTurn {
|
||||
personality: None, ..
|
||||
} => {}
|
||||
other => panic!("expected Op::UserTurn, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn ctrl_c_shutdown_works_with_caps_lock() {
|
||||
let (mut chat, mut rx, _op_rx) = make_chatwidget_manual(None).await;
|
||||
|
||||
@@ -6,13 +6,9 @@ use rand::Rng;
|
||||
const ANNOUNCEMENT_TIP_URL: &str =
|
||||
"https://raw.githubusercontent.com/openai/codex/main/announcement_tip.toml";
|
||||
|
||||
const IS_MACOS: bool = cfg!(target_os = "macos");
|
||||
|
||||
const PAID_TOOLTIP: &str = "*New* Try the **Codex App** with 2x rate limits until *April 2nd*. Run 'codex app' or visit https://chatgpt.com/codex";
|
||||
const PAID_TOOLTIP_NON_MAC: &str = "*New* 2x rate limits until *April 2nd*.";
|
||||
const OTHER_TOOLTIP: &str =
|
||||
"*New* Build faster with the **Codex App**. Run 'codex app' or visit https://chatgpt.com/codex";
|
||||
const OTHER_TOOLTIP_NON_MAC: &str = "*New* Build faster with Codex.";
|
||||
const FREE_GO_TOOLTIP: &str =
|
||||
"*New* Codex is included in your plan for free through *March 2nd* – let’s build together.";
|
||||
|
||||
@@ -22,15 +18,7 @@ lazy_static! {
|
||||
static ref TOOLTIPS: Vec<&'static str> = RAW_TOOLTIPS
|
||||
.lines()
|
||||
.map(str::trim)
|
||||
.filter(|line| {
|
||||
if line.is_empty() || line.starts_with('#') {
|
||||
return false;
|
||||
}
|
||||
if !IS_MACOS && line.contains("codex app") {
|
||||
return false;
|
||||
}
|
||||
true
|
||||
})
|
||||
.filter(|line| !line.is_empty() && !line.starts_with('#'))
|
||||
.collect();
|
||||
static ref ALL_TOOLTIPS: Vec<&'static str> = {
|
||||
let mut tips = Vec::new();
|
||||
@@ -63,24 +51,12 @@ pub(crate) fn get_tooltip(plan: Option<PlanType>) -> Option<String> {
|
||||
| Some(PlanType::Team)
|
||||
| Some(PlanType::Enterprise)
|
||||
| Some(PlanType::Pro) => {
|
||||
let tooltip = if IS_MACOS {
|
||||
PAID_TOOLTIP
|
||||
} else {
|
||||
PAID_TOOLTIP_NON_MAC
|
||||
};
|
||||
return Some(tooltip.to_string());
|
||||
return Some(PAID_TOOLTIP.to_string());
|
||||
}
|
||||
Some(PlanType::Go) | Some(PlanType::Free) => {
|
||||
return Some(FREE_GO_TOOLTIP.to_string());
|
||||
}
|
||||
_ => {
|
||||
let tooltip = if IS_MACOS {
|
||||
OTHER_TOOLTIP
|
||||
} else {
|
||||
OTHER_TOOLTIP_NON_MAC
|
||||
};
|
||||
return Some(tooltip.to_string());
|
||||
}
|
||||
_ => return Some(OTHER_TOOLTIP.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user