Files
codex/prs/bolinfest/PR-1647.md
2025-09-02 15:17:45 -07:00

1068 lines
40 KiB
Markdown

# PR #1647: Introducing shutdown operation to terminate gracefully
- URL: https://github.com/openai/codex/pull/1647
- Author: aibrahim-oai
- Created: 2025-07-22 03:04:38 UTC
- Updated: 2025-07-23 22:03:33 UTC
- Changes: +153/-47, Files changed: 10, Commits: 16
## Description
Introducing shutdown operation to terminate gracefully.
Currently, we just flush on shutting down.
This fixes the flaky CI.
## Full Diff
```diff
diff --git a/codex-rs/core/src/codex.rs b/codex-rs/core/src/codex.rs
index 73bb714971..4cc888b62e 100644
--- a/codex-rs/core/src/codex.rs
+++ b/codex-rs/core/src/codex.rs
@@ -812,6 +812,37 @@ async fn submission_loop(
}
});
}
+ Op::Shutdown => {
+ info!("Shutting down Codex instance");
+
+ // Gracefully flush and shutdown rollout recorder on session end so tests
+ // that inspect the rollout file do not race with the background writer.
+ if let Some(sess_arc) = sess {
+ let recorder_opt = sess_arc.rollout.lock().unwrap().take();
+ if let Some(rec) = recorder_opt {
+ if let Err(e) = rec.shutdown().await {
+ warn!("failed to shutdown rollout recorder: {e}");
+ let event = Event {
+ id: sub.id.clone(),
+ msg: EventMsg::Error(ErrorEvent {
+ message: "Failed to shutdown rollout recorder".to_string(),
+ }),
+ };
+ if let Err(e) = tx_event.send(event).await {
+ warn!("failed to send error message: {e:?}");
+ }
+ }
+ }
+ }
+ let event = Event {
+ id: sub.id.clone(),
+ msg: EventMsg::ShutdownComplete,
+ };
+ if let Err(e) = tx_event.send(event).await {
+ warn!("failed to send Shutdown event: {e}");
+ }
+ break;
+ }
}
}
debug!("Agent loop exited");
diff --git a/codex-rs/core/src/protocol.rs b/codex-rs/core/src/protocol.rs
index cc201bc7ea..0c375e455d 100644
--- a/codex-rs/core/src/protocol.rs
+++ b/codex-rs/core/src/protocol.rs
@@ -116,6 +116,9 @@ pub enum Op {
/// Request a single history entry identified by `log_id` + `offset`.
GetHistoryEntryRequest { offset: usize, log_id: u64 },
+
+ /// Request to shut down codex instance.
+ Shutdown,
}
/// Determines the conditions under which the user is consulted to approve
@@ -326,6 +329,9 @@ pub enum EventMsg {
/// Response to GetHistoryEntryRequest.
GetHistoryEntryResponse(GetHistoryEntryResponseEvent),
+
+ /// Notification that the agent is shutting down.
+ ShutdownComplete,
}
// Individual event payload types matching each `EventMsg` variant.
diff --git a/codex-rs/core/src/rollout.rs b/codex-rs/core/src/rollout.rs
index 0b19d13397..7f0f61b9eb 100644
--- a/codex-rs/core/src/rollout.rs
+++ b/codex-rs/core/src/rollout.rs
@@ -14,6 +14,7 @@ use time::macros::format_description;
use tokio::io::AsyncWriteExt;
use tokio::sync::mpsc::Sender;
use tokio::sync::mpsc::{self};
+use tokio::sync::oneshot;
use tracing::info;
use tracing::warn;
use uuid::Uuid;
@@ -57,10 +58,10 @@ pub(crate) struct RolloutRecorder {
tx: Sender<RolloutCmd>,
}
-#[derive(Clone)]
enum RolloutCmd {
AddItems(Vec<ResponseItem>),
UpdateState(SessionStateSnapshot),
+ Shutdown { ack: oneshot::Sender<()> },
}
impl RolloutRecorder {
@@ -204,6 +205,21 @@ impl RolloutRecorder {
info!("Resumed rollout successfully from {path:?}");
Ok((Self { tx }, saved))
}
+
+ pub async fn shutdown(&self) -> std::io::Result<()> {
+ let (tx_done, rx_done) = oneshot::channel();
+ match self.tx.send(RolloutCmd::Shutdown { ack: tx_done }).await {
+ Ok(_) => rx_done
+ .await
+ .map_err(|e| IoError::other(format!("failed waiting for rollout shutdown: {e}"))),
+ Err(e) => {
+ warn!("failed to send rollout shutdown command: {e}");
+ Err(IoError::other(format!(
+ "failed to send rollout shutdown command: {e}"
+ )))
+ }
+ }
+ }
}
struct LogFileInfo {
@@ -299,6 +315,9 @@ async fn rollout_writer(
let _ = file.flush().await;
}
}
+ RolloutCmd::Shutdown { ack } => {
+ let _ = ack.send(());
+ }
}
}
}
diff --git a/codex-rs/exec/src/event_processor.rs b/codex-rs/exec/src/event_processor.rs
index 56db651a83..a7edb96af2 100644
--- a/codex-rs/exec/src/event_processor.rs
+++ b/codex-rs/exec/src/event_processor.rs
@@ -1,15 +1,23 @@
+use std::path::Path;
+
use codex_common::summarize_sandbox_policy;
use codex_core::WireApi;
use codex_core::config::Config;
use codex_core::model_supports_reasoning_summaries;
use codex_core::protocol::Event;
+pub(crate) enum CodexStatus {
+ Running,
+ InitiateShutdown,
+ Shutdown,
+}
+
pub(crate) trait EventProcessor {
/// Print summary of effective configuration and user prompt.
fn print_config_summary(&mut self, config: &Config, prompt: &str);
/// Handle a single event emitted by the agent.
- fn process_event(&mut self, event: Event);
+ fn process_event(&mut self, event: Event) -> CodexStatus;
}
pub(crate) fn create_config_summary_entries(config: &Config) -> Vec<(&'static str, String)> {
@@ -35,3 +43,28 @@ pub(crate) fn create_config_summary_entries(config: &Config) -> Vec<(&'static st
entries
}
+
+pub(crate) fn handle_last_message(
+ last_agent_message: Option<&str>,
+ last_message_path: Option<&Path>,
+) {
+ match (last_message_path, last_agent_message) {
+ (Some(path), Some(msg)) => write_last_message_file(msg, Some(path)),
+ (Some(path), None) => {
+ write_last_message_file("", Some(path));
+ eprintln!(
+ "Warning: no last agent message; wrote empty content to {}",
+ path.display()
+ );
+ }
+ (None, _) => eprintln!("Warning: no file to write last message to."),
+ }
+}
+
+fn write_last_message_file(contents: &str, last_message_path: Option<&Path>) {
+ if let Some(path) = last_message_path {
+ if let Err(e) = std::fs::write(path, contents) {
+ eprintln!("Failed to write last message file {path:?}: {e}");
+ }
+ }
+}
diff --git a/codex-rs/exec/src/event_processor_with_human_output.rs b/codex-rs/exec/src/event_processor_with_human_output.rs
index 7b39071116..bc647c683e 100644
--- a/codex-rs/exec/src/event_processor_with_human_output.rs
+++ b/codex-rs/exec/src/event_processor_with_human_output.rs
@@ -15,16 +15,20 @@ use codex_core::protocol::McpToolCallEndEvent;
use codex_core::protocol::PatchApplyBeginEvent;
use codex_core::protocol::PatchApplyEndEvent;
use codex_core::protocol::SessionConfiguredEvent;
+use codex_core::protocol::TaskCompleteEvent;
use codex_core::protocol::TokenUsage;
use owo_colors::OwoColorize;
use owo_colors::Style;
use shlex::try_join;
use std::collections::HashMap;
use std::io::Write;
+use std::path::PathBuf;
use std::time::Instant;
+use crate::event_processor::CodexStatus;
use crate::event_processor::EventProcessor;
use crate::event_processor::create_config_summary_entries;
+use crate::event_processor::handle_last_message;
/// This should be configurable. When used in CI, users may not want to impose
/// a limit so they can see the full transcript.
@@ -54,10 +58,15 @@ pub(crate) struct EventProcessorWithHumanOutput {
show_agent_reasoning: bool,
answer_started: bool,
reasoning_started: bool,
+ last_message_path: Option<PathBuf>,
}
impl EventProcessorWithHumanOutput {
- pub(crate) fn create_with_ansi(with_ansi: bool, config: &Config) -> Self {
+ pub(crate) fn create_with_ansi(
+ with_ansi: bool,
+ config: &Config,
+ last_message_path: Option<PathBuf>,
+ ) -> Self {
let call_id_to_command = HashMap::new();
let call_id_to_patch = HashMap::new();
let call_id_to_tool_call = HashMap::new();
@@ -77,6 +86,7 @@ impl EventProcessorWithHumanOutput {
show_agent_reasoning: !config.hide_agent_reasoning,
answer_started: false,
reasoning_started: false,
+ last_message_path,
}
} else {
Self {
@@ -93,6 +103,7 @@ impl EventProcessorWithHumanOutput {
show_agent_reasoning: !config.hide_agent_reasoning,
answer_started: false,
reasoning_started: false,
+ last_message_path,
}
}
}
@@ -158,7 +169,7 @@ impl EventProcessor for EventProcessorWithHumanOutput {
);
}
- fn process_event(&mut self, event: Event) {
+ fn process_event(&mut self, event: Event) -> CodexStatus {
let Event { id: _, msg } = event;
match msg {
EventMsg::Error(ErrorEvent { message }) => {
@@ -168,9 +179,16 @@ impl EventProcessor for EventProcessorWithHumanOutput {
EventMsg::BackgroundEvent(BackgroundEventEvent { message }) => {
ts_println!(self, "{}", message.style(self.dimmed));
}
- EventMsg::TaskStarted | EventMsg::TaskComplete(_) => {
+ EventMsg::TaskStarted => {
// Ignore.
}
+ EventMsg::TaskComplete(TaskCompleteEvent { last_agent_message }) => {
+ handle_last_message(
+ last_agent_message.as_deref(),
+ self.last_message_path.as_deref(),
+ );
+ return CodexStatus::InitiateShutdown;
+ }
EventMsg::TokenCount(TokenUsage { total_tokens, .. }) => {
ts_println!(self, "tokens used: {total_tokens}");
}
@@ -185,7 +203,7 @@ impl EventProcessor for EventProcessorWithHumanOutput {
}
EventMsg::AgentReasoningDelta(AgentReasoningDeltaEvent { delta }) => {
if !self.show_agent_reasoning {
- return;
+ return CodexStatus::Running;
}
if !self.reasoning_started {
ts_println!(
@@ -498,7 +516,9 @@ impl EventProcessor for EventProcessorWithHumanOutput {
EventMsg::GetHistoryEntryResponse(_) => {
// Currently ignored in exec output.
}
+ EventMsg::ShutdownComplete => return CodexStatus::Shutdown,
}
+ CodexStatus::Running
}
}
diff --git a/codex-rs/exec/src/event_processor_with_json_output.rs b/codex-rs/exec/src/event_processor_with_json_output.rs
index 699460bbed..e7a658b76f 100644
--- a/codex-rs/exec/src/event_processor_with_json_output.rs
+++ b/codex-rs/exec/src/event_processor_with_json_output.rs
@@ -1,18 +1,24 @@
use std::collections::HashMap;
+use std::path::PathBuf;
use codex_core::config::Config;
use codex_core::protocol::Event;
use codex_core::protocol::EventMsg;
+use codex_core::protocol::TaskCompleteEvent;
use serde_json::json;
+use crate::event_processor::CodexStatus;
use crate::event_processor::EventProcessor;
use crate::event_processor::create_config_summary_entries;
+use crate::event_processor::handle_last_message;
-pub(crate) struct EventProcessorWithJsonOutput;
+pub(crate) struct EventProcessorWithJsonOutput {
+ last_message_path: Option<PathBuf>,
+}
impl EventProcessorWithJsonOutput {
- pub fn new() -> Self {
- Self {}
+ pub fn new(last_message_path: Option<PathBuf>) -> Self {
+ Self { last_message_path }
}
}
@@ -33,15 +39,25 @@ impl EventProcessor for EventProcessorWithJsonOutput {
println!("{prompt_json}");
}
- fn process_event(&mut self, event: Event) {
+ fn process_event(&mut self, event: Event) -> CodexStatus {
match event.msg {
EventMsg::AgentMessageDelta(_) | EventMsg::AgentReasoningDelta(_) => {
// Suppress streaming events in JSON mode.
+ CodexStatus::Running
+ }
+ EventMsg::TaskComplete(TaskCompleteEvent { last_agent_message }) => {
+ handle_last_message(
+ last_agent_message.as_deref(),
+ self.last_message_path.as_deref(),
+ );
+ CodexStatus::InitiateShutdown
}
+ EventMsg::ShutdownComplete => CodexStatus::Shutdown,
_ => {
if let Ok(line) = serde_json::to_string(&event) {
println!("{line}");
}
+ CodexStatus::Running
}
}
}
diff --git a/codex-rs/exec/src/lib.rs b/codex-rs/exec/src/lib.rs
index 620ab82327..126e92f597 100644
--- a/codex-rs/exec/src/lib.rs
+++ b/codex-rs/exec/src/lib.rs
@@ -5,7 +5,6 @@ mod event_processor_with_json_output;
use std::io::IsTerminal;
use std::io::Read;
-use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
@@ -28,6 +27,7 @@ use tracing::error;
use tracing::info;
use tracing_subscriber::EnvFilter;
+use crate::event_processor::CodexStatus;
use crate::event_processor::EventProcessor;
pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()> {
@@ -123,11 +123,12 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
let config = Config::load_with_cli_overrides(cli_kv_overrides, overrides)?;
let mut event_processor: Box<dyn EventProcessor> = if json_mode {
- Box::new(EventProcessorWithJsonOutput::new())
+ Box::new(EventProcessorWithJsonOutput::new(last_message_file.clone()))
} else {
Box::new(EventProcessorWithHumanOutput::create_with_ansi(
stdout_with_ansi,
&config,
+ last_message_file.clone(),
))
};
@@ -224,40 +225,17 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
// Run the loop until the task is complete.
while let Some(event) = rx.recv().await {
- let (is_last_event, last_assistant_message) = match &event.msg {
- EventMsg::TaskComplete(TaskCompleteEvent { last_agent_message }) => {
- (true, last_agent_message.clone())
+ let shutdown: CodexStatus = event_processor.process_event(event);
+ match shutdown {
+ CodexStatus::Running => continue,
+ CodexStatus::InitiateShutdown => {
+ codex.submit(Op::Shutdown).await?;
+ }
+ CodexStatus::Shutdown => {
+ break;
}
- _ => (false, None),
- };
- event_processor.process_event(event);
- if is_last_event {
- handle_last_message(last_assistant_message, last_message_file.as_deref())?;
- break;
}
}
Ok(())
}
-
-fn handle_last_message(
- last_agent_message: Option<String>,
- last_message_file: Option<&Path>,
-) -> std::io::Result<()> {
- match (last_agent_message, last_message_file) {
- (Some(last_agent_message), Some(last_message_file)) => {
- // Last message and a file to write to.
- std::fs::write(last_message_file, last_agent_message)?;
- }
- (None, Some(last_message_file)) => {
- eprintln!(
- "Warning: No last message to write to file: {}",
- last_message_file.to_string_lossy()
- );
- }
- (_, None) => {
- // No last message and no file to write to.
- }
- }
- Ok(())
-}
diff --git a/codex-rs/mcp-server/src/codex_tool_runner.rs b/codex-rs/mcp-server/src/codex_tool_runner.rs
index 4e10d158cf..f2cacf6c8e 100644
--- a/codex-rs/mcp-server/src/codex_tool_runner.rs
+++ b/codex-rs/mcp-server/src/codex_tool_runner.rs
@@ -246,7 +246,8 @@ async fn run_codex_tool_session_inner(
| EventMsg::BackgroundEvent(_)
| EventMsg::PatchApplyBegin(_)
| EventMsg::PatchApplyEnd(_)
- | EventMsg::GetHistoryEntryResponse(_) => {
+ | EventMsg::GetHistoryEntryResponse(_)
+ | EventMsg::ShutdownComplete => {
// For now, we do not do anything extra for these
// events. Note that
// send(codex_event_to_notification(&event)) above has
diff --git a/codex-rs/tui/src/app.rs b/codex-rs/tui/src/app.rs
index 37c2616d5b..377b5d6f0b 100644
--- a/codex-rs/tui/src/app.rs
+++ b/codex-rs/tui/src/app.rs
@@ -223,9 +223,7 @@ impl App<'_> {
} => {
match &mut self.app_state {
AppState::Chat { widget } => {
- if widget.on_ctrl_c() {
- self.app_event_tx.send(AppEvent::ExitRequest);
- }
+ widget.on_ctrl_c();
}
AppState::Login { .. } | AppState::GitWarning { .. } => {
// No-op.
diff --git a/codex-rs/tui/src/chatwidget.rs b/codex-rs/tui/src/chatwidget.rs
index 3856587010..081a406f29 100644
--- a/codex-rs/tui/src/chatwidget.rs
+++ b/codex-rs/tui/src/chatwidget.rs
@@ -419,6 +419,9 @@ impl ChatWidget<'_> {
self.bottom_pane
.on_history_entry_response(log_id, offset, entry.map(|e| e.text));
}
+ EventMsg::ShutdownComplete => {
+ self.app_event_tx.send(AppEvent::ExitRequest);
+ }
event => {
self.conversation_history
.add_background_event(format!("{event:?}"));
@@ -471,6 +474,7 @@ impl ChatWidget<'_> {
self.reasoning_buffer.clear();
false
} else if self.bottom_pane.ctrl_c_quit_hint_visible() {
+ self.submit_op(Op::Shutdown);
true
} else {
self.bottom_pane.show_ctrl_c_quit_hint();
```
## Review Comments
### codex-rs/core/src/codex.rs
- Created: 2025-07-22 23:06:28 UTC | Link: https://github.com/openai/codex/pull/1647#discussion_r2223988420
```diff
@@ -841,6 +841,31 @@ async fn submission_loop(
}
});
}
+ Op::Shutdown => {
+ info!("Shutting down Codex instance");
+
+ // Gracefully flush and shutdown rollout recorder on session end so tests
+ // that inspect the rollout file do not race with the background writer.
+ if let Some(sess_arc) = sess {
+ let recorder_opt = {
+ let mut guard = sess_arc.rollout.lock().unwrap();
+ guard.take()
```
> Why do we have to `take()` it?
### codex-rs/core/src/protocol.rs
- Created: 2025-07-23 19:07:44 UTC | Link: https://github.com/openai/codex/pull/1647#discussion_r2226420148
```diff
@@ -326,6 +329,9 @@ pub enum EventMsg {
/// Response to GetHistoryEntryRequest.
GetHistoryEntryResponse(GetHistoryEntryResponseEvent),
+
+ /// Notification that the agent is shutting down.
+ Shutdown,
```
> Would `ShutdownComplete` be an appropriate name?
### codex-rs/core/src/rollout.rs
- Created: 2025-07-22 06:48:47 UTC | Link: https://github.com/openai/codex/pull/1647#discussion_r2221396859
```diff
@@ -294,6 +335,15 @@ async fn rollout_writer(
let _ = file.flush().await;
}
}
+ RolloutCmd::Sync { exit, ack } => {
+ if let Err(e) = file.flush().await {
```
> Since the other two cases always `flush()`, there should never be anything new to `flush()` in this case, no?
- Created: 2025-07-22 07:24:27 UTC | Link: https://github.com/openai/codex/pull/1647#discussion_r2221479432
```diff
@@ -200,6 +205,42 @@ impl RolloutRecorder {
info!("Resumed rollout successfully from {path:?}");
Ok((Self { tx }, saved))
}
+
+ pub async fn sync(&self) -> std::io::Result<()> {
```
> I think something is still not quite right here.
>
> As shown on https://play.rust-lang.org/?version=stable&mode=debug&edition=2024&gist=e6786e1e7d4fcd40e0b0c39236a15ac6, even once `tx` is dropped, all the messages that were sent via `tx` will still be read from `rx` running in another tokio task.
>
> Though one thing that I think is possible is:
>
> https://github.com/openai/codex/blob/710f728124989b26e7c2e8910a31b198826d1025/codex-rs/core/src/rollout.rs#L98-L109
>
> At the end of the `RolloutRecorder` constructor, `RolloutRecorder` has taken ownership of `tx`, but there is no guarantee that the `rollout_writer()` scheduled by that `tokio::task::spawn()` invocation has started yet.
>
> Indeed, IIUC, we are only starting the test with two threads:
>
> https://github.com/openai/codex/blob/710f728124989b26e7c2e8910a31b198826d1025/codex-rs/core/tests/cli_stream.rs#L126-L127
>
> so depending on how many tokio tasks were already scheduled, it might be some time before `rollout_writer()` starts.
>
> Is it possible that `RolloutRecorder` is dropped before `tx.send()` is ever called such that by the time `rollout_writer()` starts there is nothing to read? Tracing through the code, I don't think so...
>
> ...but it is the case that if `main()` completes (and the Tokio runtime shuts down), then any outstanding Tokio tasks that are not finished are dropped and will not complete.
>
> All that is to say, I believe that the new `shutdown()` method needs to exist, but the `sync()` method does not.
>
> Perhaps a more accurate name would be `drain()` (and maybe `RolloutCmd::Drain` should be the enum value name) because what you are trying to do is ensure that all of the enqueued items get processed before proceeding. The `oneshot` is a good way to do this, but then I think the `exit` boolean becomes unnecessary.
- Created: 2025-07-22 07:27:01 UTC | Link: https://github.com/openai/codex/pull/1647#discussion_r2221484989
```diff
@@ -294,6 +335,15 @@ async fn rollout_writer(
let _ = file.flush().await;
}
}
+ RolloutCmd::Sync { exit, ack } => {
+ if let Err(e) = file.flush().await {
+ warn!("Failed to flush on sync: {e}");
+ }
+ let _ = ack.send(());
+ if exit {
```
> Assuming we move to `Drain`, I don't think you ever exit here: you just wait for `tx` to be dropped such that this happens naturally.
- Created: 2025-07-22 22:50:20 UTC | Link: https://github.com/openai/codex/pull/1647#discussion_r2223971644
```diff
@@ -200,6 +202,17 @@ impl RolloutRecorder {
info!("Resumed rollout successfully from {path:?}");
Ok((Self { tx }, saved))
}
+
+ pub async fn shutdown(&self) -> std::io::Result<()> {
+ let (tx_done, rx_done) = oneshot::channel();
+ if let Err(e) = self.tx.send(RolloutCmd::Shutdown { ack: tx_done }).await {
+ warn!("failed to send rollout shutdown command: {e}");
+ return Ok(());
```
> Why not forward the `Err`?
- Created: 2025-07-22 22:53:15 UTC | Link: https://github.com/openai/codex/pull/1647#discussion_r2223974642
```diff
@@ -200,6 +202,17 @@ impl RolloutRecorder {
info!("Resumed rollout successfully from {path:?}");
Ok((Self { tx }, saved))
}
+
+ pub async fn shutdown(&self) -> std::io::Result<()> {
+ let (tx_done, rx_done) = oneshot::channel();
+ if let Err(e) = self.tx.send(RolloutCmd::Shutdown { ack: tx_done }).await {
```
> This is admittedly a nit, but personally, I would rewrite this as a `match self.tx.send(RolloutCmd::Shutdown { ack: tx_done }).await` to avoid using the `return` keyword. I try to reserve the use of `return` when you really need an "early return" in a long function. But if using a single expression is an option, I find that to be cleaner because then it's more "straight line" code (though admittedly `match` branches...).
- Created: 2025-07-22 22:54:11 UTC | Link: https://github.com/openai/codex/pull/1647#discussion_r2223975605
```diff
@@ -294,6 +335,15 @@ async fn rollout_writer(
let _ = file.flush().await;
}
}
+ RolloutCmd::Sync { exit, ack } => {
+ if let Err(e) = file.flush().await {
```
> I would skip the `flush()`: all this needs to do is `ack.send(())`.
- Created: 2025-07-23 19:18:41 UTC | Link: https://github.com/openai/codex/pull/1647#discussion_r2226439487
```diff
@@ -204,6 +205,21 @@ impl RolloutRecorder {
info!("Resumed rollout successfully from {path:?}");
Ok((Self { tx }, saved))
}
+
+ pub async fn shutdown(&self) -> std::io::Result<()> {
```
> I know this has been a long review, but I feel much better that the changes to `RolloutRecorder.rs` are much smaller now.
### codex-rs/exec/src/event_processor.rs
- Created: 2025-07-22 23:00:26 UTC | Link: https://github.com/openai/codex/pull/1647#discussion_r2223981742
```diff
@@ -1,15 +1,19 @@
+use std::path::Path;
+
use codex_common::summarize_sandbox_policy;
use codex_core::WireApi;
use codex_core::config::Config;
use codex_core::model_supports_reasoning_summaries;
use codex_core::protocol::Event;
+use crate::event_processor_with_human_output::CodexStatus;
+
pub(crate) trait EventProcessor {
/// Print summary of effective configuration and user prompt.
fn print_config_summary(&mut self, config: &Config, prompt: &str);
/// Handle a single event emitted by the agent.
- fn process_event(&mut self, event: Event);
+ fn process_event(&mut self, event: Event, last_message_file: Option<&Path>) -> CodexStatus;
```
> `last_message_file` feels like something that should be passed to the constructor rather than this method?
>
> Also, it looks like the logic that uses it is similar across both implementations? A trait can have a default implementation for a method as a way to share code, FYI.
- Created: 2025-07-23 19:13:33 UTC | Link: https://github.com/openai/codex/pull/1647#discussion_r2226430368
```diff
@@ -1,15 +1,35 @@
+use std::path::Path;
+
use codex_common::summarize_sandbox_policy;
use codex_core::WireApi;
use codex_core::config::Config;
use codex_core::model_supports_reasoning_summaries;
use codex_core::protocol::Event;
+pub(crate) enum CodexStatus {
+ Running,
+ InitiateShutdown,
+ Shutdown,
+}
+
pub(crate) trait EventProcessor {
/// Print summary of effective configuration and user prompt.
fn print_config_summary(&mut self, config: &Config, prompt: &str);
/// Handle a single event emitted by the agent.
- fn process_event(&mut self, event: Event);
+ fn process_event(&mut self, event: Event) -> CodexStatus;
+
+ /// Get the path to the last message file.
+ fn last_message_path(&self) -> Option<&Path>;
+
+ /// Write the last message to the last message file.
+ fn write_last_message_file(&self, contents: &str) {
+ if let Some(path) = self.last_message_path() {
+ if let Err(e) = std::fs::write(path, contents) {
+ eprintln!("Failed to write last message file {path:?}: {e}");
+ }
+ }
+ }
```
> Now that both implementations of `EventProcessor` have `last_message_path` as a field, I don't think it makes sense to extend the trait in this way.
- Created: 2025-07-23 21:23:51 UTC | Link: https://github.com/openai/codex/pull/1647#discussion_r2226718109
```diff
@@ -35,3 +43,29 @@ pub(crate) fn create_config_summary_entries(config: &Config) -> Vec<(&'static st
entries
}
+
+pub(crate) fn handle_last_message(
+ last_agent_message: Option<&str>,
+ last_message_path: Option<&Path>,
+) -> CodexStatus {
+ match (last_message_path, last_agent_message) {
+ (Some(path), Some(msg)) => write_last_message_file(msg, Some(path)),
+ (Some(path), None) => {
+ write_last_message_file("", Some(path));
+ eprintln!(
+ "Warning: no last agent message; wrote empty content to {}",
+ path.display()
+ );
+ }
+ (None, _) => eprintln!("Warning: no file to write last message to."),
+ }
+ CodexStatus::InitiateShutdown
```
> This return value really has nothing to do with writing the file. I think this function should not have a return value.
### codex-rs/exec/src/event_processor_with_human_output.rs
- Created: 2025-07-22 23:01:24 UTC | Link: https://github.com/openai/codex/pull/1647#discussion_r2223983217
```diff
@@ -158,21 +166,45 @@ impl EventProcessor for EventProcessorWithHumanOutput {
);
}
- fn process_event(&mut self, event: Event) {
+ fn process_event(&mut self, event: Event, last_message_file: Option<&Path>) -> CodexStatus {
let Event { id: _, msg } = event;
match msg {
EventMsg::Error(ErrorEvent { message }) => {
let prefix = "ERROR:".style(self.red);
ts_println!(self, "{prefix} {message}");
+ CodexStatus::Running
```
> If it is important to return this value, I would early return for the non-`Running` cases and then default to returning `Running` otherwise.
- Created: 2025-07-23 19:16:30 UTC | Link: https://github.com/openai/codex/pull/1647#discussion_r2226435635
```diff
@@ -168,9 +183,13 @@ impl EventProcessor for EventProcessorWithHumanOutput {
EventMsg::BackgroundEvent(BackgroundEventEvent { message }) => {
ts_println!(self, "{}", message.style(self.dimmed));
}
- EventMsg::TaskStarted | EventMsg::TaskComplete(_) => {
+ EventMsg::TaskStarted => {
// Ignore.
}
+ EventMsg::TaskComplete(TaskCompleteEvent { last_agent_message }) => {
+ self.write_last_message_file(last_agent_message.as_deref().unwrap_or(""));
```
> I would just call the two-arg, top-level version of `handle_last_message()` and pass `self.last_message_path` in (and so the same in the other implementation). Then you can get rid of `fn last_message_path(&self) -> Option<&Path>`.
- Created: 2025-07-23 21:24:24 UTC | Link: https://github.com/openai/codex/pull/1647#discussion_r2226718806
```diff
@@ -168,9 +179,15 @@ impl EventProcessor for EventProcessorWithHumanOutput {
EventMsg::BackgroundEvent(BackgroundEventEvent { message }) => {
ts_println!(self, "{}", message.style(self.dimmed));
}
- EventMsg::TaskStarted | EventMsg::TaskComplete(_) => {
+ EventMsg::TaskStarted => {
// Ignore.
}
+ EventMsg::TaskComplete(TaskCompleteEvent { last_agent_message }) => {
+ return handle_last_message(
+ last_agent_message.as_deref(),
+ self.last_message_path.as_deref(),
+ );
```
> And then I would do this:
>
> ```suggestion
> handle_last_message(
> last_agent_message.as_deref(),
> self.last_message_path.as_deref(),
> );
> return CodexStatus::InitiateShutdown;
> ```
### codex-rs/exec/src/event_processor_with_json_output.rs
- Created: 2025-07-23 21:11:30 UTC | Link: https://github.com/openai/codex/pull/1647#discussion_r2226699046
```diff
@@ -33,15 +38,35 @@ impl EventProcessor for EventProcessorWithJsonOutput {
println!("{prompt_json}");
}
- fn process_event(&mut self, event: Event) {
+ fn process_event(&mut self, event: Event) -> CodexStatus {
match event.msg {
EventMsg::AgentMessageDelta(_) | EventMsg::AgentReasoningDelta(_) => {
// Suppress streaming events in JSON mode.
+ CodexStatus::Running
+ }
+ EventMsg::TaskComplete(TaskCompleteEvent { last_agent_message }) => {
+ match (
+ self.last_message_path.clone(),
+ last_agent_message.as_deref(),
+ ) {
+ (Some(path), Some(msg)) => self.write_last_message_file(msg, Some(&path)),
+ (Some(path), None) => {
+ self.write_last_message_file("", Some(&path));
+ eprintln!(
+ "Warning: no last agent message; wrote empty content to {}",
+ path.display()
+ );
+ }
+ (None, _) => eprintln!("Warning: no file to write last message to."),
+ }
+ return CodexStatus::InitiateShutdown;
```
> Why the copy/paste? This was already in its own function: `handle_last_message()`.
- Created: 2025-07-23 21:24:33 UTC | Link: https://github.com/openai/codex/pull/1647#discussion_r2226719036
```diff
@@ -33,15 +39,24 @@ impl EventProcessor for EventProcessorWithJsonOutput {
println!("{prompt_json}");
}
- fn process_event(&mut self, event: Event) {
+ fn process_event(&mut self, event: Event) -> CodexStatus {
match event.msg {
EventMsg::AgentMessageDelta(_) | EventMsg::AgentReasoningDelta(_) => {
// Suppress streaming events in JSON mode.
+ CodexStatus::Running
+ }
+ EventMsg::TaskComplete(TaskCompleteEvent { last_agent_message }) => {
+ handle_last_message(
```
> Same here.
### codex-rs/exec/src/lib.rs
- Created: 2025-07-22 23:03:00 UTC | Link: https://github.com/openai/codex/pull/1647#discussion_r2223985193
```diff
@@ -224,40 +224,18 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
// Run the loop until the task is complete.
while let Some(event) = rx.recv().await {
- let (is_last_event, last_assistant_message) = match &event.msg {
- EventMsg::TaskComplete(TaskCompleteEvent { last_agent_message }) => {
- (true, last_agent_message.clone())
+ let shutdown: CodexStatus =
+ event_processor.process_event(event, last_message_file.as_deref());
```
> Alternatively, the return value is purely a function of `event`, right? In which case you can have a function that just maps `Event` to `CodexStatus`.
- Created: 2025-07-22 23:04:01 UTC | Link: https://github.com/openai/codex/pull/1647#discussion_r2223986073
```diff
@@ -224,40 +224,18 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
// Run the loop until the task is complete.
while let Some(event) = rx.recv().await {
- let (is_last_event, last_assistant_message) = match &event.msg {
- EventMsg::TaskComplete(TaskCompleteEvent { last_agent_message }) => {
- (true, last_agent_message.clone())
+ let shutdown: CodexStatus =
+ event_processor.process_event(event, last_message_file.as_deref());
+ match shutdown {
+ CodexStatus::Running => continue,
+ CodexStatus::InitiateShutdown => {
+ codex.submit(Op::Shutdown).await?;
+ }
+ CodexStatus::Shutdown => {
+ break;
}
- _ => (false, None),
- };
- event_processor.process_event(event);
- if is_last_event {
- handle_last_message(last_assistant_message, last_message_file.as_deref())?;
- break;
}
}
Ok(())
}
-
-fn handle_last_message(
```
> This seems more appropriate as before when it was a standalone function?
- Created: 2025-07-23 19:13:57 UTC | Link: https://github.com/openai/codex/pull/1647#discussion_r2226431000
```diff
@@ -224,40 +224,18 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
// Run the loop until the task is complete.
while let Some(event) = rx.recv().await {
- let (is_last_event, last_assistant_message) = match &event.msg {
- EventMsg::TaskComplete(TaskCompleteEvent { last_agent_message }) => {
- (true, last_agent_message.clone())
+ let shutdown: CodexStatus =
+ event_processor.process_event(event, last_message_file.as_deref());
+ match shutdown {
+ CodexStatus::Running => continue,
+ CodexStatus::InitiateShutdown => {
+ codex.submit(Op::Shutdown).await?;
+ }
+ CodexStatus::Shutdown => {
+ break;
}
- _ => (false, None),
- };
- event_processor.process_event(event);
- if is_last_event {
- handle_last_message(last_assistant_message, last_message_file.as_deref())?;
- break;
}
}
Ok(())
}
-
-fn handle_last_message(
```
> I would move this function to `event_processor.rs`.
- Created: 2025-07-23 19:18:04 UTC | Link: https://github.com/openai/codex/pull/1647#discussion_r2226438434
```diff
@@ -224,40 +225,17 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
// Run the loop until the task is complete.
while let Some(event) = rx.recv().await {
- let (is_last_event, last_assistant_message) = match &event.msg {
- EventMsg::TaskComplete(TaskCompleteEvent { last_agent_message }) => {
- (true, last_agent_message.clone())
+ let shutdown: CodexStatus = event_processor.process_event(event);
+ match shutdown {
+ CodexStatus::Running => continue,
+ CodexStatus::InitiateShutdown => {
+ codex.submit(Op::Shutdown).await?;
+ }
+ CodexStatus::Shutdown => {
+ break;
}
- _ => (false, None),
- };
- event_processor.process_event(event);
- if is_last_event {
- handle_last_message(last_assistant_message, last_message_file.as_deref())?;
- break;
}
}
Ok(())
}
-
-fn handle_last_message(
- last_agent_message: Option<String>,
- last_message_file: Option<&Path>,
-) -> std::io::Result<()> {
- match (last_agent_message, last_message_file) {
- (Some(last_agent_message), Some(last_message_file)) => {
- // Last message and a file to write to.
- std::fs::write(last_message_file, last_agent_message)?;
- }
- (None, Some(last_message_file)) => {
- eprintln!(
```
> In your new version, we print the empty string in this case, which I suppose is fine, though perhaps we should still print this warning to stderr?
### codex-rs/tui/src/app.rs
- Created: 2025-07-23 19:12:09 UTC | Link: https://github.com/openai/codex/pull/1647#discussion_r2226427700
```diff
@@ -223,12 +223,10 @@ impl App<'_> {
} => {
match &mut self.app_state {
AppState::Chat { widget } => {
- if widget.on_ctrl_c() {
- self.app_event_tx.send(AppEvent::ExitRequest);
- }
+ widget.on_ctrl_c();
}
AppState::Login { .. } | AppState::GitWarning { .. } => {
- // No-op.
+ // No-op. This is a no-op.
```
> Remove this change?
- Created: 2025-07-23 21:26:56 UTC | Link: https://github.com/openai/codex/pull/1647#discussion_r2226722541
```diff
@@ -223,9 +223,7 @@ impl App<'_> {
} => {
match &mut self.app_state {
AppState::Chat { widget } => {
- if widget.on_ctrl_c() {
- self.app_event_tx.send(AppEvent::ExitRequest);
- }
+ widget.on_ctrl_c();
```
> Wait, why are we changing Ctrl-C behavior. Did you manually test this? What is different now?
### codex-rs/tui/src/chatwidget.rs
- Created: 2025-07-22 23:05:35 UTC | Link: https://github.com/openai/codex/pull/1647#discussion_r2223987472
```diff
@@ -469,6 +469,7 @@ impl ChatWidget<'_> {
self.reasoning_buffer.clear();
false
} else if self.bottom_pane.ctrl_c_quit_hint_visible() {
+ self.submit_op(Op::Shutdown);
```
> Though nothing in the TUI waits for `Shutdown` to be processed, correct?