Compare commits

..

1 Commits

Author SHA1 Message Date
Ahmed Ibrahim
ad1a8040c7 fix: use platform python for notify test 2026-03-02 10:22:12 -07:00
15 changed files with 67 additions and 1305 deletions

View File

@@ -146,8 +146,9 @@ jobs:
shell: bash
run: |
set -euo pipefail
git clone https://git.savannah.gnu.org/git/bash /tmp/bash
git clone --depth 1 https://github.com/bolinfest/bash /tmp/bash
cd /tmp/bash
git fetch --depth 1 origin a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
git checkout a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
git apply "${GITHUB_WORKSPACE}/shell-tool-mcp/patches/bash-exec-wrapper.patch"
./configure --without-bash-malloc
@@ -187,8 +188,9 @@ jobs:
shell: bash
run: |
set -euo pipefail
git clone https://git.savannah.gnu.org/git/bash /tmp/bash
git clone --depth 1 https://github.com/bolinfest/bash /tmp/bash
cd /tmp/bash
git fetch --depth 1 origin a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
git checkout a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
git apply "${GITHUB_WORKSPACE}/shell-tool-mcp/patches/bash-exec-wrapper.patch"
./configure --without-bash-malloc

View File

@@ -207,12 +207,13 @@ tmp_path.replace(payload_path)
let notify_script = notify_script
.to_str()
.expect("notify script path should be valid UTF-8");
let notify_command = if cfg!(windows) { "python" } else { "python3" };
create_config_toml_with_extra(
codex_home.path(),
&server.uri(),
"never",
&format!(
"notify = [\"python3\", {}]",
"notify = [\"{notify_command}\", {}]",
toml_basic_string(notify_script)
),
)?;
@@ -261,7 +262,12 @@ tmp_path.replace(payload_path)
)
.await??;
fs_wait::wait_for_path_exists(&notify_file, Duration::from_secs(5)).await?;
let notify_timeout = if cfg!(windows) {
Duration::from_secs(15)
} else {
Duration::from_secs(5)
};
fs_wait::wait_for_path_exists(&notify_file, notify_timeout).await?;
let payload_raw = tokio::fs::read_to_string(&notify_file).await?;
let payload: Value = serde_json::from_str(&payload_raw)?;
assert_eq!(payload["client"], "xcode");

View File

@@ -6,6 +6,7 @@ mod macos;
mod tests;
use crate::config::ConfigToml;
use crate::config::deserialize_config_toml_with_base;
use crate::config_loader::layer_io::LoadedConfigLayers;
use crate::git_info::resolve_root_git_project_for_trust;
use codex_app_server_protocol::ConfigLayerSource;
@@ -575,11 +576,6 @@ struct ProjectTrustContext {
user_config_file: AbsolutePathBuf,
}
#[derive(Deserialize)]
struct ProjectTrustConfigToml {
projects: Option<std::collections::HashMap<String, crate::config::ProjectConfig>>,
}
struct ProjectTrustDecision {
trust_level: Option<TrustLevel>,
trust_key: String,
@@ -670,16 +666,10 @@ async fn project_trust_context(
config_base_dir: &Path,
user_config_file: &AbsolutePathBuf,
) -> io::Result<ProjectTrustContext> {
let project_trust_config: ProjectTrustConfigToml = {
let _guard = AbsolutePathBufGuard::new(config_base_dir);
merged_config
.clone()
.try_into()
.map_err(|err| std::io::Error::new(std::io::ErrorKind::InvalidData, err))?
};
let config_toml = deserialize_config_toml_with_base(merged_config.clone(), config_base_dir)?;
let project_root = find_project_root(cwd, project_root_markers).await?;
let projects = project_trust_config.projects.unwrap_or_default();
let projects = config_toml.projects.unwrap_or_default();
let project_root_key = project_root.as_path().to_string_lossy().to_string();
let repo_root = resolve_root_git_project_for_trust(cwd.as_path());

View File

@@ -1114,91 +1114,6 @@ async fn project_layers_disabled_when_untrusted_or_unknown() -> std::io::Result<
Ok(())
}
#[tokio::test]
async fn cli_override_can_update_project_local_mcp_server_when_project_is_trusted()
-> std::io::Result<()> {
let tmp = tempdir()?;
let project_root = tmp.path().join("project");
let nested = project_root.join("child");
let dot_codex = project_root.join(".codex");
let codex_home = tmp.path().join("home");
tokio::fs::create_dir_all(&nested).await?;
tokio::fs::create_dir_all(&dot_codex).await?;
tokio::fs::create_dir_all(&codex_home).await?;
tokio::fs::write(project_root.join(".git"), "gitdir: here").await?;
tokio::fs::write(
dot_codex.join(CONFIG_TOML_FILE),
r#"
[mcp_servers.sentry]
url = "https://mcp.sentry.dev/mcp"
enabled = false
"#,
)
.await?;
make_config_for_test(&codex_home, &project_root, TrustLevel::Trusted, None).await?;
let config = ConfigBuilder::default()
.codex_home(codex_home)
.cli_overrides(vec![(
"mcp_servers.sentry.enabled".to_string(),
TomlValue::Boolean(true),
)])
.fallback_cwd(Some(nested))
.build()
.await?;
let server = config
.mcp_servers
.get()
.get("sentry")
.expect("trusted project MCP server should load");
assert!(server.enabled);
Ok(())
}
#[tokio::test]
async fn cli_override_for_disabled_project_local_mcp_server_returns_invalid_transport()
-> std::io::Result<()> {
let tmp = tempdir()?;
let project_root = tmp.path().join("project");
let nested = project_root.join("child");
let dot_codex = project_root.join(".codex");
let codex_home = tmp.path().join("home");
tokio::fs::create_dir_all(&nested).await?;
tokio::fs::create_dir_all(&dot_codex).await?;
tokio::fs::create_dir_all(&codex_home).await?;
tokio::fs::write(project_root.join(".git"), "gitdir: here").await?;
tokio::fs::write(
dot_codex.join(CONFIG_TOML_FILE),
r#"
[mcp_servers.sentry]
url = "https://mcp.sentry.dev/mcp"
enabled = false
"#,
)
.await?;
let err = ConfigBuilder::default()
.codex_home(codex_home)
.cli_overrides(vec![(
"mcp_servers.sentry.enabled".to_string(),
TomlValue::Boolean(true),
)])
.fallback_cwd(Some(nested))
.build()
.await
.expect_err("untrusted project layer should not provide MCP transport");
assert!(
err.to_string().contains("invalid transport")
&& err.to_string().contains("mcp_servers.sentry"),
"unexpected error: {err}"
);
Ok(())
}
#[tokio::test]
async fn invalid_project_config_ignored_when_untrusted_or_unknown() -> std::io::Result<()> {
let tmp = tempdir()?;

View File

@@ -20,7 +20,7 @@ decision to the shell-escalation protocol over a shared file descriptor (specifi
We carry a small patch to `execute_cmd.c` (see `patches/bash-exec-wrapper.patch`) that adds support for `EXEC_WRAPPER`. The original commit message is “add support for BASH_EXEC_WRAPPER” and the patch applies cleanly to `a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b` from https://github.com/bminor/bash. To rebuild manually:
```bash
git clone https://git.savannah.gnu.org/git/bash
git clone https://github.com/bminor/bash
git checkout a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
git apply /path/to/patches/bash-exec-wrapper.patch
./configure --without-bash-malloc

View File

@@ -12,7 +12,6 @@ use crate::bottom_pane::SelectionViewParams;
use crate::bottom_pane::popup_consts::standard_popup_hint_line;
use crate::chatwidget::ChatWidget;
use crate::chatwidget::ExternalEditorState;
use crate::chatwidget::ThreadInputState;
use crate::cwd_prompt::CwdPromptAction;
use crate::diff_render::DiffSummary;
use crate::exec_command::strip_bash_lc_and_escape;
@@ -255,7 +254,6 @@ struct SessionSummary {
struct ThreadEventSnapshot {
session_configured: Option<Event>,
events: Vec<Event>,
input_state: Option<ThreadInputState>,
}
#[derive(Debug)]
@@ -264,7 +262,6 @@ struct ThreadEventStore {
buffer: VecDeque<Event>,
user_message_ids: HashSet<String>,
pending_interactive_replay: PendingInteractiveReplayState,
input_state: Option<ThreadInputState>,
capacity: usize,
active: bool,
}
@@ -276,7 +273,6 @@ impl ThreadEventStore {
buffer: VecDeque::new(),
user_message_ids: HashSet::new(),
pending_interactive_replay: PendingInteractiveReplayState::default(),
input_state: None,
capacity,
active: false,
}
@@ -346,7 +342,6 @@ impl ThreadEventStore {
})
.cloned()
.collect(),
input_state: self.input_state.clone(),
}
}
@@ -922,15 +917,13 @@ impl App {
let Some(active_id) = self.active_thread_id else {
return;
};
let input_state = self.chat_widget.capture_thread_input_state();
let Some(receiver) = self.active_thread_rx.take() else {
return;
};
if let Some(channel) = self.thread_event_channels.get_mut(&active_id) {
let receiver = self.active_thread_rx.take();
let mut store = channel.store.lock().await;
store.active = false;
store.input_state = input_state;
if let Some(receiver) = receiver {
channel.receiver = Some(receiver);
}
channel.receiver = Some(receiver);
}
}
@@ -1208,15 +1201,6 @@ impl App {
}
}
let has_non_primary_agent_thread = self
.agent_picker_threads
.keys()
.any(|thread_id| Some(*thread_id) != self.primary_thread_id);
if !self.config.features.enabled(Feature::Collab) && !has_non_primary_agent_thread {
self.chat_widget.open_multi_agent_enable_prompt();
return;
}
if self.agent_picker_threads.is_empty() {
self.chat_widget
.add_info_message("No agents available yet.".to_string(), None);
@@ -1342,7 +1326,7 @@ impl App {
self.chat_widget = ChatWidget::new_with_op_sender(init, codex_op_tx);
self.reset_for_thread_switch(tui)?;
self.replay_thread_snapshot(snapshot, !is_replay_only);
self.replay_thread_snapshot(snapshot);
if is_replay_only {
self.chat_widget.add_info_message(
format!("Agent thread {thread_id} is closed. Replaying saved transcript."),
@@ -1473,24 +1457,13 @@ impl App {
(active_thread_id != primary_thread_id).then_some((active_thread_id, primary_thread_id))
}
fn replay_thread_snapshot(
&mut self,
snapshot: ThreadEventSnapshot,
resume_restored_queue: bool,
) {
fn replay_thread_snapshot(&mut self, snapshot: ThreadEventSnapshot) {
if let Some(event) = snapshot.session_configured {
self.handle_codex_event_replay(event);
}
self.chat_widget.set_queue_autosend_suppressed(true);
self.chat_widget
.restore_thread_input_state(snapshot.input_state);
for event in snapshot.events {
self.handle_codex_event_replay(event);
}
self.chat_widget.set_queue_autosend_suppressed(false);
if resume_restored_queue {
self.chat_widget.maybe_send_next_queued_input();
}
self.refresh_status_line();
}
@@ -3628,19 +3601,13 @@ mod tests {
use crate::history_cell::HistoryCell;
use crate::history_cell::UserHistoryCell;
use crate::history_cell::new_session_info;
use assert_matches::assert_matches;
use codex_core::CodexAuth;
use codex_core::config::ConfigBuilder;
use codex_core::config::ConfigOverrides;
use codex_core::config::types::ModelAvailabilityNuxConfig;
use codex_otel::OtelManager;
use codex_protocol::ThreadId;
use codex_protocol::config_types::CollaborationMode;
use codex_protocol::config_types::CollaborationModeMask;
use codex_protocol::config_types::ModeKind;
use codex_protocol::config_types::Settings;
use codex_protocol::openai_models::ModelAvailabilityNux;
use codex_protocol::protocol::AgentMessageDeltaEvent;
use codex_protocol::protocol::AskForApproval;
use codex_protocol::protocol::Event;
use codex_protocol::protocol::EventMsg;
@@ -3648,10 +3615,6 @@ mod tests {
use codex_protocol::protocol::SessionConfiguredEvent;
use codex_protocol::protocol::SessionSource;
use codex_protocol::protocol::ThreadRolledBackEvent;
use codex_protocol::protocol::TurnAbortReason;
use codex_protocol::protocol::TurnAbortedEvent;
use codex_protocol::protocol::TurnCompleteEvent;
use codex_protocol::protocol::TurnStartedEvent;
use codex_protocol::protocol::UserMessageEvent;
use codex_protocol::user_input::TextElement;
use codex_protocol::user_input::UserInput;
@@ -3953,755 +3916,6 @@ mod tests {
Ok(())
}
#[tokio::test]
async fn replay_thread_snapshot_restores_draft_and_queued_input() {
let mut app = make_test_app().await;
let thread_id = ThreadId::new();
app.thread_event_channels.insert(
thread_id,
ThreadEventChannel::new_with_session_configured(
THREAD_EVENT_CHANNEL_CAPACITY,
Event {
id: "session-configured".to_string(),
msg: EventMsg::SessionConfigured(SessionConfiguredEvent {
session_id: thread_id,
forked_from_id: None,
thread_name: None,
model: "gpt-test".to_string(),
model_provider_id: "test-provider".to_string(),
approval_policy: AskForApproval::Never,
sandbox_policy: SandboxPolicy::new_read_only_policy(),
cwd: PathBuf::from("/tmp/project"),
reasoning_effort: None,
history_log_id: 0,
history_entry_count: 0,
initial_messages: None,
network_proxy: None,
rollout_path: Some(PathBuf::new()),
}),
},
),
);
app.activate_thread_channel(thread_id).await;
app.chat_widget
.apply_external_edit("draft prompt".to_string());
app.chat_widget.submit_user_message_with_mode(
"queued follow-up".to_string(),
CollaborationModeMask {
name: "Default".to_string(),
mode: None,
model: None,
reasoning_effort: None,
developer_instructions: None,
},
);
let expected_input_state = app
.chat_widget
.capture_thread_input_state()
.expect("expected thread input state");
app.store_active_thread_receiver().await;
let snapshot = {
let channel = app
.thread_event_channels
.get(&thread_id)
.expect("thread channel should exist");
let store = channel.store.lock().await;
assert_eq!(store.input_state, Some(expected_input_state));
store.snapshot()
};
let (chat_widget, _app_event_tx, _rx, mut new_op_rx) =
make_chatwidget_manual_with_sender().await;
app.chat_widget = chat_widget;
app.replay_thread_snapshot(snapshot, true);
assert_eq!(app.chat_widget.composer_text_with_pending(), "draft prompt");
assert!(app.chat_widget.queued_user_message_texts().is_empty());
match next_user_turn_op(&mut new_op_rx) {
Op::UserTurn { items, .. } => assert_eq!(
items,
vec![UserInput::Text {
text: "queued follow-up".to_string(),
text_elements: Vec::new(),
}]
),
other => panic!("expected queued follow-up submission, got {other:?}"),
}
}
#[tokio::test]
async fn replayed_turn_complete_submits_restored_queued_follow_up() {
let (mut app, _app_event_rx, _op_rx) = make_test_app_with_channels().await;
let thread_id = ThreadId::new();
let session_configured = Event {
id: "session-configured".to_string(),
msg: EventMsg::SessionConfigured(SessionConfiguredEvent {
session_id: thread_id,
forked_from_id: None,
thread_name: None,
model: "gpt-test".to_string(),
model_provider_id: "test-provider".to_string(),
approval_policy: AskForApproval::Never,
sandbox_policy: SandboxPolicy::new_read_only_policy(),
cwd: PathBuf::from("/tmp/project"),
reasoning_effort: None,
history_log_id: 0,
history_entry_count: 0,
initial_messages: None,
network_proxy: None,
rollout_path: Some(PathBuf::new()),
}),
};
app.chat_widget
.handle_codex_event(session_configured.clone());
app.chat_widget.handle_codex_event(Event {
id: "turn-started".to_string(),
msg: EventMsg::TurnStarted(TurnStartedEvent {
turn_id: "turn-1".to_string(),
model_context_window: None,
collaboration_mode_kind: Default::default(),
}),
});
app.chat_widget.handle_codex_event(Event {
id: "agent-delta".to_string(),
msg: EventMsg::AgentMessageDelta(AgentMessageDeltaEvent {
delta: "streaming".to_string(),
}),
});
app.chat_widget
.apply_external_edit("queued follow-up".to_string());
app.chat_widget
.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
let input_state = app
.chat_widget
.capture_thread_input_state()
.expect("expected queued follow-up state");
let (chat_widget, _app_event_tx, _rx, mut new_op_rx) =
make_chatwidget_manual_with_sender().await;
app.chat_widget = chat_widget;
app.chat_widget.handle_codex_event(session_configured);
while new_op_rx.try_recv().is_ok() {}
app.replay_thread_snapshot(
ThreadEventSnapshot {
session_configured: None,
events: vec![Event {
id: "turn-complete".to_string(),
msg: EventMsg::TurnComplete(TurnCompleteEvent {
turn_id: "turn-1".to_string(),
last_agent_message: None,
}),
}],
input_state: Some(input_state),
},
true,
);
match next_user_turn_op(&mut new_op_rx) {
Op::UserTurn { items, .. } => assert_eq!(
items,
vec![UserInput::Text {
text: "queued follow-up".to_string(),
text_elements: Vec::new(),
}]
),
other => panic!("expected queued follow-up submission, got {other:?}"),
}
}
#[tokio::test]
async fn replay_only_thread_keeps_restored_queue_visible() {
let (mut app, _app_event_rx, _op_rx) = make_test_app_with_channels().await;
let thread_id = ThreadId::new();
let session_configured = Event {
id: "session-configured".to_string(),
msg: EventMsg::SessionConfigured(SessionConfiguredEvent {
session_id: thread_id,
forked_from_id: None,
thread_name: None,
model: "gpt-test".to_string(),
model_provider_id: "test-provider".to_string(),
approval_policy: AskForApproval::Never,
sandbox_policy: SandboxPolicy::new_read_only_policy(),
cwd: PathBuf::from("/tmp/project"),
reasoning_effort: None,
history_log_id: 0,
history_entry_count: 0,
initial_messages: None,
network_proxy: None,
rollout_path: Some(PathBuf::new()),
}),
};
app.chat_widget
.handle_codex_event(session_configured.clone());
app.chat_widget.handle_codex_event(Event {
id: "turn-started".to_string(),
msg: EventMsg::TurnStarted(TurnStartedEvent {
turn_id: "turn-1".to_string(),
model_context_window: None,
collaboration_mode_kind: Default::default(),
}),
});
app.chat_widget.handle_codex_event(Event {
id: "agent-delta".to_string(),
msg: EventMsg::AgentMessageDelta(AgentMessageDeltaEvent {
delta: "streaming".to_string(),
}),
});
app.chat_widget
.apply_external_edit("queued follow-up".to_string());
app.chat_widget
.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
let input_state = app
.chat_widget
.capture_thread_input_state()
.expect("expected queued follow-up state");
let (chat_widget, _app_event_tx, _rx, mut new_op_rx) =
make_chatwidget_manual_with_sender().await;
app.chat_widget = chat_widget;
app.chat_widget.handle_codex_event(session_configured);
while new_op_rx.try_recv().is_ok() {}
app.replay_thread_snapshot(
ThreadEventSnapshot {
session_configured: None,
events: vec![Event {
id: "turn-complete".to_string(),
msg: EventMsg::TurnComplete(TurnCompleteEvent {
turn_id: "turn-1".to_string(),
last_agent_message: None,
}),
}],
input_state: Some(input_state),
},
false,
);
assert_eq!(
app.chat_widget.queued_user_message_texts(),
vec!["queued follow-up".to_string()]
);
assert!(
new_op_rx.try_recv().is_err(),
"replay-only threads should not auto-submit restored queue"
);
}
#[tokio::test]
async fn replay_thread_snapshot_keeps_queue_when_running_state_only_comes_from_snapshot() {
let (mut app, _app_event_rx, _op_rx) = make_test_app_with_channels().await;
let thread_id = ThreadId::new();
let session_configured = Event {
id: "session-configured".to_string(),
msg: EventMsg::SessionConfigured(SessionConfiguredEvent {
session_id: thread_id,
forked_from_id: None,
thread_name: None,
model: "gpt-test".to_string(),
model_provider_id: "test-provider".to_string(),
approval_policy: AskForApproval::Never,
sandbox_policy: SandboxPolicy::new_read_only_policy(),
cwd: PathBuf::from("/tmp/project"),
reasoning_effort: None,
history_log_id: 0,
history_entry_count: 0,
initial_messages: None,
network_proxy: None,
rollout_path: Some(PathBuf::new()),
}),
};
app.chat_widget
.handle_codex_event(session_configured.clone());
app.chat_widget.handle_codex_event(Event {
id: "turn-started".to_string(),
msg: EventMsg::TurnStarted(TurnStartedEvent {
turn_id: "turn-1".to_string(),
model_context_window: None,
collaboration_mode_kind: Default::default(),
}),
});
app.chat_widget.handle_codex_event(Event {
id: "agent-delta".to_string(),
msg: EventMsg::AgentMessageDelta(AgentMessageDeltaEvent {
delta: "streaming".to_string(),
}),
});
app.chat_widget
.apply_external_edit("queued follow-up".to_string());
app.chat_widget
.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
let input_state = app
.chat_widget
.capture_thread_input_state()
.expect("expected queued follow-up state");
let (chat_widget, _app_event_tx, _rx, mut new_op_rx) =
make_chatwidget_manual_with_sender().await;
app.chat_widget = chat_widget;
app.chat_widget.handle_codex_event(session_configured);
while new_op_rx.try_recv().is_ok() {}
app.replay_thread_snapshot(
ThreadEventSnapshot {
session_configured: None,
events: vec![],
input_state: Some(input_state),
},
true,
);
assert_eq!(
app.chat_widget.queued_user_message_texts(),
vec!["queued follow-up".to_string()]
);
assert!(
new_op_rx.try_recv().is_err(),
"restored queue should stay queued when replay did not prove the turn finished"
);
}
#[tokio::test]
async fn replay_thread_snapshot_does_not_submit_queue_before_replay_catches_up() {
let (mut app, _app_event_rx, _op_rx) = make_test_app_with_channels().await;
let thread_id = ThreadId::new();
let session_configured = Event {
id: "session-configured".to_string(),
msg: EventMsg::SessionConfigured(SessionConfiguredEvent {
session_id: thread_id,
forked_from_id: None,
thread_name: None,
model: "gpt-test".to_string(),
model_provider_id: "test-provider".to_string(),
approval_policy: AskForApproval::Never,
sandbox_policy: SandboxPolicy::new_read_only_policy(),
cwd: PathBuf::from("/tmp/project"),
reasoning_effort: None,
history_log_id: 0,
history_entry_count: 0,
initial_messages: None,
network_proxy: None,
rollout_path: Some(PathBuf::new()),
}),
};
app.chat_widget
.handle_codex_event(session_configured.clone());
app.chat_widget.handle_codex_event(Event {
id: "turn-started".to_string(),
msg: EventMsg::TurnStarted(TurnStartedEvent {
turn_id: "turn-1".to_string(),
model_context_window: None,
collaboration_mode_kind: Default::default(),
}),
});
app.chat_widget.handle_codex_event(Event {
id: "agent-delta".to_string(),
msg: EventMsg::AgentMessageDelta(AgentMessageDeltaEvent {
delta: "streaming".to_string(),
}),
});
app.chat_widget
.apply_external_edit("queued follow-up".to_string());
app.chat_widget
.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
let input_state = app
.chat_widget
.capture_thread_input_state()
.expect("expected queued follow-up state");
let (chat_widget, _app_event_tx, _rx, mut new_op_rx) =
make_chatwidget_manual_with_sender().await;
app.chat_widget = chat_widget;
app.chat_widget.handle_codex_event(session_configured);
while new_op_rx.try_recv().is_ok() {}
app.replay_thread_snapshot(
ThreadEventSnapshot {
session_configured: None,
events: vec![
Event {
id: "older-turn-complete".to_string(),
msg: EventMsg::TurnComplete(TurnCompleteEvent {
turn_id: "turn-0".to_string(),
last_agent_message: None,
}),
},
Event {
id: "latest-turn-started".to_string(),
msg: EventMsg::TurnStarted(TurnStartedEvent {
turn_id: "turn-1".to_string(),
model_context_window: None,
collaboration_mode_kind: Default::default(),
}),
},
],
input_state: Some(input_state),
},
true,
);
assert!(
new_op_rx.try_recv().is_err(),
"queued follow-up should stay queued until the latest turn completes"
);
assert_eq!(
app.chat_widget.queued_user_message_texts(),
vec!["queued follow-up".to_string()]
);
app.chat_widget.handle_codex_event(Event {
id: "latest-turn-complete".to_string(),
msg: EventMsg::TurnComplete(TurnCompleteEvent {
turn_id: "turn-1".to_string(),
last_agent_message: None,
}),
});
match next_user_turn_op(&mut new_op_rx) {
Op::UserTurn { items, .. } => assert_eq!(
items,
vec![UserInput::Text {
text: "queued follow-up".to_string(),
text_elements: Vec::new(),
}]
),
other => panic!("expected queued follow-up submission, got {other:?}"),
}
}
#[tokio::test]
async fn replay_thread_snapshot_restores_pending_pastes_for_submit() {
let (mut app, _app_event_rx, _op_rx) = make_test_app_with_channels().await;
let thread_id = ThreadId::new();
app.thread_event_channels.insert(
thread_id,
ThreadEventChannel::new_with_session_configured(
THREAD_EVENT_CHANNEL_CAPACITY,
Event {
id: "session-configured".to_string(),
msg: EventMsg::SessionConfigured(SessionConfiguredEvent {
session_id: thread_id,
forked_from_id: None,
thread_name: None,
model: "gpt-test".to_string(),
model_provider_id: "test-provider".to_string(),
approval_policy: AskForApproval::Never,
sandbox_policy: SandboxPolicy::new_read_only_policy(),
cwd: PathBuf::from("/tmp/project"),
reasoning_effort: None,
history_log_id: 0,
history_entry_count: 0,
initial_messages: None,
network_proxy: None,
rollout_path: Some(PathBuf::new()),
}),
},
),
);
app.activate_thread_channel(thread_id).await;
let large = "x".repeat(1005);
app.chat_widget.handle_paste(large.clone());
let expected_input_state = app
.chat_widget
.capture_thread_input_state()
.expect("expected thread input state");
app.store_active_thread_receiver().await;
let snapshot = {
let channel = app
.thread_event_channels
.get(&thread_id)
.expect("thread channel should exist");
let store = channel.store.lock().await;
assert_eq!(store.input_state, Some(expected_input_state));
store.snapshot()
};
let (chat_widget, _app_event_tx, _rx, mut new_op_rx) =
make_chatwidget_manual_with_sender().await;
app.chat_widget = chat_widget;
app.replay_thread_snapshot(snapshot, true);
assert_eq!(app.chat_widget.composer_text_with_pending(), large);
app.chat_widget
.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
match next_user_turn_op(&mut new_op_rx) {
Op::UserTurn { items, .. } => assert_eq!(
items,
vec![UserInput::Text {
text: large,
text_elements: Vec::new(),
}]
),
other => panic!("expected restored paste submission, got {other:?}"),
}
}
#[tokio::test]
async fn replay_thread_snapshot_restores_collaboration_mode_for_draft_submit() {
let (mut app, _app_event_rx, _op_rx) = make_test_app_with_channels().await;
let thread_id = ThreadId::new();
let session_configured = Event {
id: "session-configured".to_string(),
msg: EventMsg::SessionConfigured(SessionConfiguredEvent {
session_id: thread_id,
forked_from_id: None,
thread_name: None,
model: "gpt-test".to_string(),
model_provider_id: "test-provider".to_string(),
approval_policy: AskForApproval::Never,
sandbox_policy: SandboxPolicy::new_read_only_policy(),
cwd: PathBuf::from("/tmp/project"),
reasoning_effort: None,
history_log_id: 0,
history_entry_count: 0,
initial_messages: None,
network_proxy: None,
rollout_path: Some(PathBuf::new()),
}),
};
app.chat_widget
.handle_codex_event(session_configured.clone());
app.chat_widget
.set_reasoning_effort(Some(ReasoningEffortConfig::High));
app.chat_widget
.set_collaboration_mask(CollaborationModeMask {
name: "Plan".to_string(),
mode: Some(ModeKind::Plan),
model: Some("gpt-restored".to_string()),
reasoning_effort: Some(Some(ReasoningEffortConfig::High)),
developer_instructions: None,
});
app.chat_widget
.apply_external_edit("draft prompt".to_string());
let input_state = app
.chat_widget
.capture_thread_input_state()
.expect("expected draft input state");
let (chat_widget, _app_event_tx, _rx, mut new_op_rx) =
make_chatwidget_manual_with_sender().await;
app.chat_widget = chat_widget;
app.chat_widget.handle_codex_event(session_configured);
app.chat_widget
.set_reasoning_effort(Some(ReasoningEffortConfig::Low));
app.chat_widget
.set_collaboration_mask(CollaborationModeMask {
name: "Default".to_string(),
mode: Some(ModeKind::Default),
model: Some("gpt-replacement".to_string()),
reasoning_effort: Some(Some(ReasoningEffortConfig::Low)),
developer_instructions: None,
});
while new_op_rx.try_recv().is_ok() {}
app.replay_thread_snapshot(
ThreadEventSnapshot {
session_configured: None,
events: vec![],
input_state: Some(input_state),
},
true,
);
app.chat_widget
.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
match next_user_turn_op(&mut new_op_rx) {
Op::UserTurn {
items,
model,
effort,
collaboration_mode,
..
} => {
assert_eq!(
items,
vec![UserInput::Text {
text: "draft prompt".to_string(),
text_elements: Vec::new(),
}]
);
assert_eq!(model, "gpt-restored".to_string());
assert_eq!(effort, Some(ReasoningEffortConfig::High));
assert_eq!(
collaboration_mode,
Some(CollaborationMode {
mode: ModeKind::Plan,
settings: Settings {
model: "gpt-restored".to_string(),
reasoning_effort: Some(ReasoningEffortConfig::High),
developer_instructions: None,
},
})
);
}
other => panic!("expected restored draft submission, got {other:?}"),
}
}
#[tokio::test]
async fn replay_thread_snapshot_restores_collaboration_mode_without_input() {
let (mut app, _app_event_rx, _op_rx) = make_test_app_with_channels().await;
let thread_id = ThreadId::new();
let session_configured = Event {
id: "session-configured".to_string(),
msg: EventMsg::SessionConfigured(SessionConfiguredEvent {
session_id: thread_id,
forked_from_id: None,
thread_name: None,
model: "gpt-test".to_string(),
model_provider_id: "test-provider".to_string(),
approval_policy: AskForApproval::Never,
sandbox_policy: SandboxPolicy::new_read_only_policy(),
cwd: PathBuf::from("/tmp/project"),
reasoning_effort: None,
history_log_id: 0,
history_entry_count: 0,
initial_messages: None,
network_proxy: None,
rollout_path: Some(PathBuf::new()),
}),
};
app.chat_widget
.handle_codex_event(session_configured.clone());
app.chat_widget
.set_reasoning_effort(Some(ReasoningEffortConfig::High));
app.chat_widget
.set_collaboration_mask(CollaborationModeMask {
name: "Plan".to_string(),
mode: Some(ModeKind::Plan),
model: Some("gpt-restored".to_string()),
reasoning_effort: Some(Some(ReasoningEffortConfig::High)),
developer_instructions: None,
});
let input_state = app
.chat_widget
.capture_thread_input_state()
.expect("expected collaboration-only input state");
let (chat_widget, _app_event_tx, _rx, _new_op_rx) =
make_chatwidget_manual_with_sender().await;
app.chat_widget = chat_widget;
app.chat_widget.handle_codex_event(session_configured);
app.chat_widget
.set_reasoning_effort(Some(ReasoningEffortConfig::Low));
app.chat_widget
.set_collaboration_mask(CollaborationModeMask {
name: "Default".to_string(),
mode: Some(ModeKind::Default),
model: Some("gpt-replacement".to_string()),
reasoning_effort: Some(Some(ReasoningEffortConfig::Low)),
developer_instructions: None,
});
app.replay_thread_snapshot(
ThreadEventSnapshot {
session_configured: None,
events: vec![],
input_state: Some(input_state),
},
true,
);
assert_eq!(
app.chat_widget.active_collaboration_mode_kind(),
ModeKind::Plan
);
assert_eq!(app.chat_widget.current_model(), "gpt-restored");
assert_eq!(
app.chat_widget.current_reasoning_effort(),
Some(ReasoningEffortConfig::High)
);
}
#[tokio::test]
async fn replayed_interrupted_turn_restores_queued_input_to_composer() {
let (mut app, _app_event_rx, _op_rx) = make_test_app_with_channels().await;
let thread_id = ThreadId::new();
let session_configured = Event {
id: "session-configured".to_string(),
msg: EventMsg::SessionConfigured(SessionConfiguredEvent {
session_id: thread_id,
forked_from_id: None,
thread_name: None,
model: "gpt-test".to_string(),
model_provider_id: "test-provider".to_string(),
approval_policy: AskForApproval::Never,
sandbox_policy: SandboxPolicy::new_read_only_policy(),
cwd: PathBuf::from("/tmp/project"),
reasoning_effort: None,
history_log_id: 0,
history_entry_count: 0,
initial_messages: None,
network_proxy: None,
rollout_path: Some(PathBuf::new()),
}),
};
app.chat_widget
.handle_codex_event(session_configured.clone());
app.chat_widget.handle_codex_event(Event {
id: "turn-started".to_string(),
msg: EventMsg::TurnStarted(TurnStartedEvent {
turn_id: "turn-1".to_string(),
model_context_window: None,
collaboration_mode_kind: Default::default(),
}),
});
app.chat_widget.handle_codex_event(Event {
id: "agent-delta".to_string(),
msg: EventMsg::AgentMessageDelta(AgentMessageDeltaEvent {
delta: "streaming".to_string(),
}),
});
app.chat_widget
.apply_external_edit("queued follow-up".to_string());
app.chat_widget
.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
let input_state = app
.chat_widget
.capture_thread_input_state()
.expect("expected queued follow-up state");
let (chat_widget, _app_event_tx, _rx, mut new_op_rx) =
make_chatwidget_manual_with_sender().await;
app.chat_widget = chat_widget;
app.chat_widget.handle_codex_event(session_configured);
while new_op_rx.try_recv().is_ok() {}
app.replay_thread_snapshot(
ThreadEventSnapshot {
session_configured: None,
events: vec![Event {
id: "turn-aborted".to_string(),
msg: EventMsg::TurnAborted(TurnAbortedEvent {
turn_id: Some("turn-1".to_string()),
reason: TurnAbortReason::ReviewEnded,
}),
}],
input_state: Some(input_state),
},
true,
);
assert_eq!(
app.chat_widget.composer_text_with_pending(),
"queued follow-up"
);
assert!(app.chat_widget.queued_user_message_texts().is_empty());
assert!(
new_op_rx.try_recv().is_err(),
"replayed interrupted turns should restore queued input for editing, not submit it"
);
}
#[tokio::test]
async fn open_agent_picker_keeps_missing_threads_for_replay() -> Result<()> {
let mut app = make_test_app().await;
@@ -4752,51 +3966,6 @@ mod tests {
Ok(())
}
#[tokio::test]
async fn open_agent_picker_prompts_to_enable_multi_agent_when_disabled() -> Result<()> {
let (mut app, mut app_event_rx, _op_rx) = make_test_app_with_channels().await;
app.open_agent_picker().await;
app.chat_widget
.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
assert_matches!(
app_event_rx.try_recv(),
Ok(AppEvent::UpdateFeatureFlags { updates }) if updates == vec![(Feature::Collab, true)]
);
let cell = match app_event_rx.try_recv() {
Ok(AppEvent::InsertHistoryCell(cell)) => cell,
other => panic!("expected InsertHistoryCell event, got {other:?}"),
};
let rendered = cell
.display_lines(120)
.into_iter()
.map(|line| line.to_string())
.collect::<Vec<_>>()
.join("\n");
assert!(rendered.contains("Multi-agent will be enabled in the next session."));
Ok(())
}
#[tokio::test]
async fn open_agent_picker_allows_existing_agent_threads_when_feature_is_disabled() -> Result<()>
{
let (mut app, mut app_event_rx, _op_rx) = make_test_app_with_channels().await;
let thread_id = ThreadId::new();
app.thread_event_channels
.insert(thread_id, ThreadEventChannel::new(1));
app.open_agent_picker().await;
app.chat_widget
.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
assert_matches!(
app_event_rx.try_recv(),
Ok(AppEvent::SelectAgentThread(selected_thread_id)) if selected_thread_id == thread_id
);
Ok(())
}
#[tokio::test]
async fn refresh_pending_thread_approvals_only_lists_inactive_threads() {
let mut app = make_test_app().await;
@@ -5287,17 +4456,6 @@ mod tests {
)
}
fn next_user_turn_op(op_rx: &mut tokio::sync::mpsc::UnboundedReceiver<Op>) -> Op {
let mut seen = Vec::new();
while let Ok(op) = op_rx.try_recv() {
if matches!(op, Op::UserTurn { .. }) {
return op;
}
seen.push(format!("{op:?}"));
}
panic!("expected UserTurn op, saw: {seen:?}");
}
fn test_otel_manager(config: &Config, model: &str) -> OtelManager {
let model_info = codex_core::test_support::construct_model_info_offline(model, config);
OtelManager::new(

View File

@@ -4108,10 +4108,10 @@ impl ChatComposer {
!footer_props.is_task_running && self.collaboration_mode_indicator.is_some();
let show_shortcuts_hint = match footer_props.mode {
FooterMode::ComposerEmpty => !self.is_in_paste_burst(),
FooterMode::ComposerHasDraft => false,
FooterMode::QuitShortcutReminder
| FooterMode::ShortcutOverlay
| FooterMode::EscHint => false,
| FooterMode::EscHint
| FooterMode::ComposerHasDraft => false,
};
let show_queue_hint = match footer_props.mode {
FooterMode::ComposerHasDraft => footer_props.is_task_running,
@@ -4141,13 +4141,10 @@ impl ChatComposer {
.as_ref()
.map(|line| line.clone().dim());
let status_line_candidate = footer_props.status_line_enabled
&& match footer_props.mode {
FooterMode::ComposerEmpty => true,
FooterMode::ComposerHasDraft => !footer_props.is_task_running,
FooterMode::QuitShortcutReminder
| FooterMode::ShortcutOverlay
| FooterMode::EscHint => false,
};
&& matches!(
footer_props.mode,
FooterMode::ComposerEmpty | FooterMode::ComposerHasDraft
);
let mut truncated_status_line = if status_line_candidate {
status_line.as_ref().map(|line| {
truncate_line_with_ellipsis_if_overflow(line.clone(), available_width)
@@ -4213,7 +4210,7 @@ impl ChatComposer {
can_show_left_with_context(hint_rect, left_width, right_width);
let has_override =
self.footer_flash_visible() || self.footer_hint_override.is_some();
let single_line_layout = if has_override || status_line_active {
let single_line_layout = if has_override {
None
} else {
match footer_props.mode {

View File

@@ -172,10 +172,10 @@ pub(crate) fn reset_mode_after_activity(current: FooterMode) -> FooterMode {
pub(crate) fn footer_height(props: &FooterProps) -> u16 {
let show_shortcuts_hint = match props.mode {
FooterMode::ComposerEmpty => true,
FooterMode::ComposerHasDraft => false,
FooterMode::QuitShortcutReminder | FooterMode::ShortcutOverlay | FooterMode::EscHint => {
false
}
FooterMode::QuitShortcutReminder
| FooterMode::ShortcutOverlay
| FooterMode::EscHint
| FooterMode::ComposerHasDraft => false,
};
let show_queue_hint = match props.mode {
FooterMode::ComposerHasDraft => props.is_task_running,
@@ -562,18 +562,13 @@ fn footer_from_props_lines(
show_shortcuts_hint: bool,
show_queue_hint: bool,
) -> Vec<Line<'static>> {
// If status line content is present, show it for passive composer states.
// Active draft states still prefer the queue hint over the passive status
// line so the footer stays actionable while a task is running.
// If status line content is present, show it for base modes.
if props.status_line_enabled
&& let Some(status_line) = &props.status_line_value
&& match props.mode {
FooterMode::ComposerEmpty => true,
FooterMode::ComposerHasDraft => !props.is_task_running,
FooterMode::QuitShortcutReminder
| FooterMode::ShortcutOverlay
| FooterMode::EscHint => false,
}
&& matches!(
props.mode,
FooterMode::ComposerEmpty | FooterMode::ComposerHasDraft
)
{
return vec![status_line.clone().dim()];
}
@@ -606,8 +601,6 @@ fn footer_from_props_lines(
let state = LeftSideState {
hint: if show_queue_hint {
SummaryHintKind::QueueMessage
} else if show_shortcuts_hint {
SummaryHintKind::Shortcuts
} else {
SummaryHintKind::None
},
@@ -1020,10 +1013,10 @@ mod tests {
let show_cycle_hint = !props.is_task_running;
let show_shortcuts_hint = match props.mode {
FooterMode::ComposerEmpty => true,
FooterMode::ComposerHasDraft => false,
FooterMode::QuitShortcutReminder
| FooterMode::ShortcutOverlay
| FooterMode::EscHint => false,
| FooterMode::EscHint
| FooterMode::ComposerHasDraft => false,
};
let show_queue_hint = match props.mode {
FooterMode::ComposerHasDraft => props.is_task_running,
@@ -1032,21 +1025,13 @@ mod tests {
| FooterMode::ShortcutOverlay
| FooterMode::EscHint => false,
};
let status_line_active = props.status_line_enabled
&& match props.mode {
FooterMode::ComposerEmpty => true,
FooterMode::ComposerHasDraft => !props.is_task_running,
FooterMode::QuitShortcutReminder
| FooterMode::ShortcutOverlay
| FooterMode::EscHint => false,
};
let left_mode_indicator = if status_line_active {
let left_mode_indicator = if props.status_line_enabled {
None
} else {
collaboration_mode_indicator
};
let available_width = area.width.saturating_sub(FOOTER_INDENT_COLS as u16) as usize;
let mut truncated_status_line = if status_line_active
let mut truncated_status_line = if props.status_line_enabled
&& matches!(
props.mode,
FooterMode::ComposerEmpty | FooterMode::ComposerHasDraft
@@ -1059,7 +1044,7 @@ mod tests {
} else {
None
};
let mut left_width = if status_line_active {
let mut left_width = if props.status_line_enabled {
truncated_status_line
.as_ref()
.map(|line| line.width() as u16)
@@ -1073,7 +1058,7 @@ mod tests {
show_queue_hint,
)
};
let right_line = if status_line_active {
let right_line = if props.status_line_enabled {
let full = mode_indicator_line(collaboration_mode_indicator, show_cycle_hint);
let compact = mode_indicator_line(collaboration_mode_indicator, false);
let full_width = full.as_ref().map(|line| line.width() as u16).unwrap_or(0);
@@ -1092,7 +1077,7 @@ mod tests {
.as_ref()
.map(|line| line.width() as u16)
.unwrap_or(0);
if status_line_active
if props.status_line_enabled
&& let Some(max_left) = max_left_width_for_right(area, right_width)
&& left_width > max_left
&& let Some(line) = props
@@ -1112,24 +1097,21 @@ mod tests {
props.mode,
FooterMode::ComposerEmpty | FooterMode::ComposerHasDraft
) {
if status_line_active {
if let Some(line) = truncated_status_line.clone() {
render_footer_line(area, f.buffer_mut(), line);
}
if can_show_left_and_context && let Some(line) = &right_line {
render_context_right(area, f.buffer_mut(), line);
}
} else {
let (summary_left, show_context) = single_line_footer_layout(
area,
right_width,
left_mode_indicator,
show_cycle_hint,
show_shortcuts_hint,
show_queue_hint,
);
match summary_left {
SummaryLeft::Default => {
let (summary_left, show_context) = single_line_footer_layout(
area,
right_width,
left_mode_indicator,
show_cycle_hint,
show_shortcuts_hint,
show_queue_hint,
);
match summary_left {
SummaryLeft::Default => {
if props.status_line_enabled {
if let Some(line) = truncated_status_line.clone() {
render_footer_line(area, f.buffer_mut(), line);
}
} else {
render_footer_from_props(
area,
f.buffer_mut(),
@@ -1140,14 +1122,14 @@ mod tests {
show_queue_hint,
);
}
SummaryLeft::Custom(line) => {
render_footer_line(area, f.buffer_mut(), line);
}
SummaryLeft::None => {}
}
if show_context && let Some(line) = &right_line {
render_context_right(area, f.buffer_mut(), line);
SummaryLeft::Custom(line) => {
render_footer_line(area, f.buffer_mut(), line);
}
SummaryLeft::None => {}
}
if show_context && let Some(line) = &right_line {
render_context_right(area, f.buffer_mut(), line);
}
} else {
render_footer_from_props(
@@ -1434,38 +1416,6 @@ mod tests {
snapshot_footer("footer_status_line_overrides_shortcuts", props);
let props = FooterProps {
mode: FooterMode::ComposerHasDraft,
esc_backtrack_hint: false,
use_shift_enter_hint: false,
is_task_running: true,
collaboration_modes_enabled: false,
is_wsl: false,
quit_shortcut_key: key_hint::ctrl(KeyCode::Char('c')),
context_window_percent: None,
context_window_used_tokens: None,
status_line_value: Some(Line::from("Status line content".to_string())),
status_line_enabled: true,
};
snapshot_footer("footer_status_line_yields_to_queue_hint", props);
let props = FooterProps {
mode: FooterMode::ComposerHasDraft,
esc_backtrack_hint: false,
use_shift_enter_hint: false,
is_task_running: false,
collaboration_modes_enabled: false,
is_wsl: false,
quit_shortcut_key: key_hint::ctrl(KeyCode::Char('c')),
context_window_percent: None,
context_window_used_tokens: None,
status_line_value: Some(Line::from("Status line content".to_string())),
status_line_enabled: true,
};
snapshot_footer("footer_status_line_overrides_draft_idle", props);
let props = FooterProps {
mode: FooterMode::ComposerEmpty,
esc_backtrack_hint: false,

View File

@@ -574,10 +574,6 @@ impl BottomPane {
self.composer.current_text_with_pending()
}
pub(crate) fn composer_pending_pastes(&self) -> Vec<(String, String)> {
self.composer.pending_pastes()
}
pub(crate) fn apply_external_edit(&mut self, text: String) {
self.composer.apply_external_edit(text);
self.request_redraw();
@@ -603,11 +599,6 @@ impl BottomPane {
urls
}
pub(crate) fn set_composer_pending_pastes(&mut self, pending_pastes: Vec<(String, String)>) {
self.composer.set_pending_pastes(pending_pastes);
self.request_redraw();
}
/// Update the status indicator header (defaults to "Working") and details below it.
///
/// Passing `None` clears any existing details. No-ops if the status indicator is not active.

View File

@@ -1,5 +0,0 @@
---
source: tui/src/bottom_pane/footer.rs
expression: terminal.backend()
---
" Status line content "

View File

@@ -1,5 +0,0 @@
---
source: tui/src/bottom_pane/footer.rs
expression: terminal.backend()
---
" tab to queue message 100% context left "

View File

@@ -164,10 +164,6 @@ const PLAN_IMPLEMENTATION_TITLE: &str = "Implement this plan?";
const PLAN_IMPLEMENTATION_YES: &str = "Yes, implement this plan";
const PLAN_IMPLEMENTATION_NO: &str = "No, stay in Plan mode";
const PLAN_IMPLEMENTATION_CODING_MESSAGE: &str = "Implement the plan.";
const MULTI_AGENT_ENABLE_TITLE: &str = "Enable multi-agent?";
const MULTI_AGENT_ENABLE_YES: &str = "Yes, enable";
const MULTI_AGENT_ENABLE_NO: &str = "Not now";
const MULTI_AGENT_ENABLE_NOTICE: &str = "Multi-agent will be enabled in the next session.";
const PLAN_MODE_REASONING_SCOPE_TITLE: &str = "Apply reasoning change";
const PLAN_MODE_REASONING_SCOPE_PLAN_ONLY: &str = "Apply to Plan mode override";
const PLAN_MODE_REASONING_SCOPE_ALL_MODES: &str = "Apply to global default and Plan mode override";
@@ -604,7 +600,6 @@ pub(crate) struct ChatWidget {
retry_status_header: Option<String>,
// Set when commentary output completes; once stream queues go idle we restore the status row.
pending_status_indicator_restore: bool,
suppress_queue_autosend: bool,
thread_id: Option<ThreadId>,
thread_name: Option<String>,
forked_from: Option<ThreadId>,
@@ -712,7 +707,6 @@ pub(crate) struct ActiveCellTranscriptKey {
pub(crate) animation_tick: Option<u64>,
}
#[derive(Debug, Clone, PartialEq)]
pub(crate) struct UserMessage {
text: String,
local_images: Vec<LocalImageAttachment>,
@@ -726,36 +720,6 @@ pub(crate) struct UserMessage {
mention_bindings: Vec<MentionBinding>,
}
#[derive(Debug, Clone, PartialEq, Default)]
struct ThreadComposerState {
text: String,
local_images: Vec<LocalImageAttachment>,
remote_image_urls: Vec<String>,
text_elements: Vec<TextElement>,
mention_bindings: Vec<MentionBinding>,
pending_pastes: Vec<(String, String)>,
}
impl ThreadComposerState {
fn has_content(&self) -> bool {
!self.text.is_empty()
|| !self.local_images.is_empty()
|| !self.remote_image_urls.is_empty()
|| !self.text_elements.is_empty()
|| !self.mention_bindings.is_empty()
|| !self.pending_pastes.is_empty()
}
}
#[derive(Debug, Clone, PartialEq)]
pub(crate) struct ThreadInputState {
composer: Option<ThreadComposerState>,
queued_user_messages: VecDeque<UserMessage>,
current_collaboration_mode: CollaborationMode,
active_collaboration_mask: Option<CollaborationModeMask>,
agent_turn_running: bool,
}
impl From<String> for UserMessage {
fn from(text: String) -> Self {
Self {
@@ -1604,41 +1568,6 @@ impl ChatWidget {
});
}
pub(crate) fn open_multi_agent_enable_prompt(&mut self) {
let items = vec![
SelectionItem {
name: MULTI_AGENT_ENABLE_YES.to_string(),
description: Some(
"Save the setting now. You will need a new session to use it.".to_string(),
),
actions: vec![Box::new(|tx| {
tx.send(AppEvent::UpdateFeatureFlags {
updates: vec![(Feature::Collab, true)],
});
tx.send(AppEvent::InsertHistoryCell(Box::new(
history_cell::new_warning_event(MULTI_AGENT_ENABLE_NOTICE.to_string()),
)));
})],
dismiss_on_select: true,
..Default::default()
},
SelectionItem {
name: MULTI_AGENT_ENABLE_NO.to_string(),
description: Some("Keep multi-agent disabled.".to_string()),
dismiss_on_select: true,
..Default::default()
},
];
self.bottom_pane.show_selection_view(SelectionViewParams {
title: Some(MULTI_AGENT_ENABLE_TITLE.to_string()),
subtitle: Some("Multi-agent is currently disabled in your config.".to_string()),
footer_hint: Some(standard_popup_hint_line()),
items,
..Default::default()
});
}
pub(crate) fn set_token_info(&mut self, info: Option<TokenUsageInfo>) {
match info {
Some(info) => self.apply_token_info(info),
@@ -1994,80 +1923,6 @@ impl ChatWidget {
);
}
pub(crate) fn capture_thread_input_state(&self) -> Option<ThreadInputState> {
let composer = ThreadComposerState {
text: self.bottom_pane.composer_text(),
text_elements: self.bottom_pane.composer_text_elements(),
local_images: self.bottom_pane.composer_local_images(),
remote_image_urls: self.bottom_pane.remote_image_urls(),
mention_bindings: self.bottom_pane.composer_mention_bindings(),
pending_pastes: self.bottom_pane.composer_pending_pastes(),
};
Some(ThreadInputState {
composer: composer.has_content().then_some(composer),
queued_user_messages: self.queued_user_messages.clone(),
current_collaboration_mode: self.current_collaboration_mode.clone(),
active_collaboration_mask: self.active_collaboration_mask.clone(),
agent_turn_running: self.agent_turn_running,
})
}
pub(crate) fn restore_thread_input_state(&mut self, input_state: Option<ThreadInputState>) {
if let Some(input_state) = input_state {
self.current_collaboration_mode = input_state.current_collaboration_mode;
self.active_collaboration_mask = input_state.active_collaboration_mask;
self.agent_turn_running = input_state.agent_turn_running;
self.update_collaboration_mode_indicator();
self.refresh_model_display();
if let Some(composer) = input_state.composer {
let local_image_paths = composer
.local_images
.into_iter()
.map(|img| img.path)
.collect();
self.set_remote_image_urls(composer.remote_image_urls);
self.bottom_pane.set_composer_text_with_mention_bindings(
composer.text,
composer.text_elements,
local_image_paths,
composer.mention_bindings,
);
self.bottom_pane
.set_composer_pending_pastes(composer.pending_pastes);
} else {
self.set_remote_image_urls(Vec::new());
self.bottom_pane.set_composer_text_with_mention_bindings(
String::new(),
Vec::new(),
Vec::new(),
Vec::new(),
);
self.bottom_pane.set_composer_pending_pastes(Vec::new());
}
self.queued_user_messages = input_state.queued_user_messages;
} else {
self.agent_turn_running = false;
self.set_remote_image_urls(Vec::new());
self.bottom_pane.set_composer_text_with_mention_bindings(
String::new(),
Vec::new(),
Vec::new(),
Vec::new(),
);
self.bottom_pane.set_composer_pending_pastes(Vec::new());
self.queued_user_messages.clear();
}
self.turn_sleep_inhibitor
.set_turn_running(self.agent_turn_running);
self.update_task_running_state();
self.refresh_queued_user_messages();
self.request_redraw();
}
pub(crate) fn set_queue_autosend_suppressed(&mut self, suppressed: bool) {
self.suppress_queue_autosend = suppressed;
}
fn on_plan_update(&mut self, update: UpdatePlanArgs) {
self.saw_plan_update_this_turn = true;
self.add_to_history(history_cell::new_plan_update(update));
@@ -3015,7 +2870,6 @@ impl ChatWidget {
current_status_header: String::from("Working"),
retry_status_header: None,
pending_status_indicator_restore: false,
suppress_queue_autosend: false,
thread_id: None,
thread_name: None,
forked_from: None,
@@ -3195,7 +3049,6 @@ impl ChatWidget {
current_status_header: String::from("Working"),
retry_status_header: None,
pending_status_indicator_restore: false,
suppress_queue_autosend: false,
thread_id: None,
thread_name: None,
forked_from: None,
@@ -3364,7 +3217,6 @@ impl ChatWidget {
current_status_header: String::from("Working"),
retry_status_header: None,
pending_status_indicator_restore: false,
suppress_queue_autosend: false,
thread_id: None,
thread_name: None,
forked_from: None,
@@ -4814,10 +4666,7 @@ impl ChatWidget {
}
// If idle and there are queued inputs, submit exactly one to start the next turn.
pub(crate) fn maybe_send_next_queued_input(&mut self) {
if self.suppress_queue_autosend {
return;
}
fn maybe_send_next_queued_input(&mut self) {
if self.bottom_pane.is_task_running() {
return;
}
@@ -7710,14 +7559,6 @@ impl ChatWidget {
self.bottom_pane.remote_image_urls()
}
#[cfg(test)]
pub(crate) fn queued_user_message_texts(&self) -> Vec<String> {
self.queued_user_messages
.iter()
.map(|message| message.text.clone())
.collect()
}
#[cfg(test)]
pub(crate) fn pending_thread_approvals(&self) -> &[String] {
self.bottom_pane.pending_thread_approvals()

View File

@@ -1,12 +0,0 @@
---
source: tui/src/chatwidget/tests.rs
assertion_line: 6001
expression: popup
---
Enable multi-agent?
Multi-agent is currently disabled in your config.
1. Yes, enable Save the setting now. You will need a new session to use it.
2. Not now Keep multi-agent disabled.
Press enter to confirm or esc to go back

View File

@@ -1701,7 +1701,6 @@ async fn make_chatwidget_manual(
current_status_header: String::from("Working"),
retry_status_header: None,
pending_status_indicator_restore: false,
suppress_queue_autosend: false,
thread_id: None,
thread_name: None,
forked_from: None,
@@ -3140,30 +3139,6 @@ async fn empty_enter_during_task_does_not_queue() {
assert!(chat.queued_user_messages.is_empty());
}
#[tokio::test]
async fn restore_thread_input_state_syncs_sleep_inhibitor_state() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(None).await;
chat.set_feature_enabled(Feature::PreventIdleSleep, true);
chat.restore_thread_input_state(Some(ThreadInputState {
composer: None,
queued_user_messages: VecDeque::new(),
current_collaboration_mode: chat.current_collaboration_mode.clone(),
active_collaboration_mask: chat.active_collaboration_mask.clone(),
agent_turn_running: true,
}));
assert!(chat.agent_turn_running);
assert!(chat.turn_sleep_inhibitor.is_turn_running());
assert!(chat.bottom_pane.is_task_running());
chat.restore_thread_input_state(None);
assert!(!chat.agent_turn_running);
assert!(!chat.turn_sleep_inhibitor.is_turn_running());
assert!(!chat.bottom_pane.is_task_running());
}
#[tokio::test]
async fn alt_up_edits_most_recent_queued_message() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(None).await;
@@ -6016,35 +5991,6 @@ async fn experimental_popup_shows_js_repl_node_requirement() {
);
}
#[tokio::test]
async fn multi_agent_enable_prompt_snapshot() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(None).await;
chat.open_multi_agent_enable_prompt();
let popup = render_bottom_popup(&chat, 80);
assert_snapshot!("multi_agent_enable_prompt", popup);
}
#[tokio::test]
async fn multi_agent_enable_prompt_updates_feature_and_emits_notice() {
let (mut chat, mut rx, _op_rx) = make_chatwidget_manual(None).await;
chat.open_multi_agent_enable_prompt();
chat.handle_key_event(KeyEvent::from(KeyCode::Enter));
assert_matches!(
rx.try_recv(),
Ok(AppEvent::UpdateFeatureFlags { updates }) if updates == vec![(Feature::Collab, true)]
);
let cell = match rx.try_recv() {
Ok(AppEvent::InsertHistoryCell(cell)) => cell,
other => panic!("expected InsertHistoryCell event, got {other:?}"),
};
let rendered = lines_to_single_string(&cell.display_lines(120));
assert!(rendered.contains("Multi-agent will be enabled in the next session."));
}
#[tokio::test]
async fn model_selection_popup_snapshot() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5-codex")).await;

View File

@@ -29,7 +29,6 @@ use windows_inhibitor as imp;
#[derive(Debug)]
pub struct SleepInhibitor {
enabled: bool,
turn_running: bool,
platform: imp::SleepInhibitor,
}
@@ -37,14 +36,12 @@ impl SleepInhibitor {
pub fn new(enabled: bool) -> Self {
Self {
enabled,
turn_running: false,
platform: imp::SleepInhibitor::new(),
}
}
/// Update the active turn state; turns sleep prevention on/off as needed.
pub fn set_turn_running(&mut self, turn_running: bool) {
self.turn_running = turn_running;
if !self.enabled {
self.release();
return;
@@ -64,11 +61,6 @@ impl SleepInhibitor {
fn release(&mut self) {
self.platform.release();
}
/// Return the latest turn-running state requested by the caller.
pub fn is_turn_running(&self) -> bool {
self.turn_running
}
}
#[cfg(test)]
@@ -79,18 +71,14 @@ mod tests {
fn sleep_inhibitor_toggles_without_panicking() {
let mut inhibitor = SleepInhibitor::new(true);
inhibitor.set_turn_running(true);
assert!(inhibitor.is_turn_running());
inhibitor.set_turn_running(false);
assert!(!inhibitor.is_turn_running());
}
#[test]
fn sleep_inhibitor_disabled_does_not_panic() {
let mut inhibitor = SleepInhibitor::new(false);
inhibitor.set_turn_running(true);
assert!(inhibitor.is_turn_running());
inhibitor.set_turn_running(false);
assert!(!inhibitor.is_turn_running());
}
#[test]