mirror of
https://github.com/openai/codex.git
synced 2026-05-02 18:37:01 +00:00
Show spawned agent model and effort in TUI (#14273)
- include the requested sub-agent model and reasoning effort in the spawn begin event\n- render that metadata next to the spawned agent name and role in the TUI transcript --------- Co-authored-by: Codex <noreply@openai.com>
This commit is contained in:
committed by
Michael Bolin
parent
8a099b3dfb
commit
285b3a5143
@@ -58,9 +58,12 @@ use codex_protocol::protocol::AgentMessageDeltaEvent;
|
||||
use codex_protocol::protocol::AgentMessageEvent;
|
||||
use codex_protocol::protocol::AgentReasoningDeltaEvent;
|
||||
use codex_protocol::protocol::AgentReasoningEvent;
|
||||
use codex_protocol::protocol::AgentStatus;
|
||||
use codex_protocol::protocol::ApplyPatchApprovalRequestEvent;
|
||||
use codex_protocol::protocol::BackgroundEventEvent;
|
||||
use codex_protocol::protocol::CodexErrorInfo;
|
||||
use codex_protocol::protocol::CollabAgentSpawnBeginEvent;
|
||||
use codex_protocol::protocol::CollabAgentSpawnEndEvent;
|
||||
use codex_protocol::protocol::CreditsSnapshot;
|
||||
use codex_protocol::protocol::Event;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
@@ -1838,6 +1841,7 @@ async fn make_chatwidget_manual(
|
||||
plan_stream_controller: None,
|
||||
last_copyable_output: None,
|
||||
running_commands: HashMap::new(),
|
||||
pending_collab_spawn_requests: HashMap::new(),
|
||||
suppressed_exec_calls: HashSet::new(),
|
||||
skills_all: Vec::new(),
|
||||
skills_initial_state: None,
|
||||
@@ -2011,6 +2015,48 @@ fn lines_to_single_string(lines: &[ratatui::text::Line<'static>]) -> String {
|
||||
s
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn collab_spawn_end_shows_requested_model_and_effort() {
|
||||
let (mut chat, mut rx, _ops) = make_chatwidget_manual(None).await;
|
||||
let sender_thread_id = ThreadId::new();
|
||||
let spawned_thread_id = ThreadId::new();
|
||||
|
||||
chat.handle_codex_event(Event {
|
||||
id: "spawn-begin".into(),
|
||||
msg: EventMsg::CollabAgentSpawnBegin(CollabAgentSpawnBeginEvent {
|
||||
call_id: "call-spawn".to_string(),
|
||||
sender_thread_id,
|
||||
prompt: "Explore the repo".to_string(),
|
||||
model: "gpt-5".to_string(),
|
||||
reasoning_effort: ReasoningEffortConfig::High,
|
||||
}),
|
||||
});
|
||||
chat.handle_codex_event(Event {
|
||||
id: "spawn-end".into(),
|
||||
msg: EventMsg::CollabAgentSpawnEnd(CollabAgentSpawnEndEvent {
|
||||
call_id: "call-spawn".to_string(),
|
||||
sender_thread_id,
|
||||
new_thread_id: Some(spawned_thread_id),
|
||||
new_agent_nickname: Some("Robie".to_string()),
|
||||
new_agent_role: Some("explorer".to_string()),
|
||||
prompt: "Explore the repo".to_string(),
|
||||
status: AgentStatus::PendingInit,
|
||||
}),
|
||||
});
|
||||
|
||||
let cells = drain_insert_history(&mut rx);
|
||||
let rendered = cells
|
||||
.iter()
|
||||
.map(|lines| lines_to_single_string(lines))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
|
||||
assert!(
|
||||
rendered.contains("Spawned Robie [explorer] (gpt-5 high)"),
|
||||
"expected spawn line to include agent metadata and requested model, got {rendered:?}"
|
||||
);
|
||||
}
|
||||
|
||||
fn status_line_text(chat: &ChatWidget) -> Option<String> {
|
||||
chat.status_line_text()
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user