28 KiB
PR #2575: fork conversation from a previous message
- URL: https://github.com/openai/codex/pull/2575
- Author: aibrahim-oai
- Created: 2025-08-22 04:12:12 UTC
- Updated: 2025-08-23 00:06:18 UTC
- Changes: +174/-4, Files changed: 6, Commits: 8
Description
This can be the underlying logic in order to start a conversation from a previous message. will need some love in the UI.
Base for building this: #2588
Full Diff
diff --git a/codex-rs/core/src/codex.rs b/codex-rs/core/src/codex.rs
index 79b73a3335..7fd2b93a10 100644
--- a/codex-rs/core/src/codex.rs
+++ b/codex-rs/core/src/codex.rs
@@ -14,6 +14,7 @@ use codex_apply_patch::ApplyPatchAction;
use codex_apply_patch::MaybeApplyPatchVerified;
use codex_apply_patch::maybe_parse_apply_patch_verified;
use codex_login::AuthManager;
+use codex_protocol::protocol::ConversationHistoryResponseEvent;
use codex_protocol::protocol::TurnAbortReason;
use codex_protocol::protocol::TurnAbortedEvent;
use futures::prelude::*;
@@ -147,6 +148,7 @@ impl Codex {
pub async fn spawn(
config: Config,
auth_manager: Arc<AuthManager>,
+ initial_history: Option<Vec<ResponseItem>>,
) -> CodexResult<CodexSpawnOk> {
let (tx_sub, rx_sub) = async_channel::bounded(64);
let (tx_event, rx_event) = async_channel::unbounded();
@@ -177,6 +179,7 @@ impl Codex {
config.clone(),
auth_manager.clone(),
tx_event.clone(),
+ initial_history,
)
.await
.map_err(|e| {
@@ -186,7 +189,12 @@ impl Codex {
let session_id = session.session_id;
// This task will run until Op::Shutdown is received.
- tokio::spawn(submission_loop(session, turn_context, config, rx_sub));
+ tokio::spawn(submission_loop(
+ session.clone(),
+ turn_context,
+ config,
+ rx_sub,
+ ));
let codex = Codex {
next_id: AtomicU64::new(0),
tx_sub,
@@ -332,6 +340,7 @@ impl Session {
config: Arc<Config>,
auth_manager: Arc<AuthManager>,
tx_event: Sender<Event>,
+ initial_history: Option<Vec<ResponseItem>>,
) -> anyhow::Result<(Arc<Self>, TurnContext)> {
let ConfigureSession {
provider,
@@ -391,14 +400,15 @@ impl Session {
}
let rollout_result = match rollout_res {
Ok((session_id, maybe_saved, recorder)) => {
- let restored_items: Option<Vec<ResponseItem>> =
+ let restored_items: Option<Vec<ResponseItem>> = initial_history.or_else(|| {
maybe_saved.and_then(|saved_session| {
if saved_session.items.is_empty() {
None
} else {
Some(saved_session.items)
}
- });
+ })
+ });
RolloutResult {
session_id,
rollout_recorder: Some(recorder),
@@ -1285,6 +1295,21 @@ async fn submission_loop(
}
break;
}
+ Op::GetHistory => {
+ let tx_event = sess.tx_event.clone();
+ let sub_id = sub.id.clone();
+
+ let event = Event {
+ id: sub_id.clone(),
+ msg: EventMsg::ConversationHistory(ConversationHistoryResponseEvent {
+ conversation_id: sess.session_id,
+ entries: sess.state.lock_unchecked().history.contents(),
+ }),
+ };
+ if let Err(e) = tx_event.send(event).await {
+ warn!("failed to send ConversationHistory event: {e}");
+ }
+ }
_ => {
// Ignore unknown ops; enum is non_exhaustive to allow extensions.
}
diff --git a/codex-rs/core/src/conversation_manager.rs b/codex-rs/core/src/conversation_manager.rs
index b553843125..fd90f54660 100644
--- a/codex-rs/core/src/conversation_manager.rs
+++ b/codex-rs/core/src/conversation_manager.rs
@@ -16,6 +16,7 @@ use crate::error::Result as CodexResult;
use crate::protocol::Event;
use crate::protocol::EventMsg;
use crate::protocol::SessionConfiguredEvent;
+use codex_protocol::models::ResponseItem;
/// Represents a newly created Codex conversation, including the first event
/// (which is [`EventMsg::SessionConfigured`]).
@@ -59,8 +60,18 @@ impl ConversationManager {
let CodexSpawnOk {
codex,
session_id: conversation_id,
- } = Codex::spawn(config, auth_manager).await?;
+ } = {
+ let initial_history = None;
+ Codex::spawn(config, auth_manager, initial_history).await?
+ };
+ self.finalize_spawn(codex, conversation_id).await
+ }
+ async fn finalize_spawn(
+ &self,
+ codex: Codex,
+ conversation_id: Uuid,
+ ) -> CodexResult<NewConversation> {
// The first event must be `SessionInitialized`. Validate and forward it
// to the caller so that they can display it in the conversation
// history.
@@ -98,4 +109,120 @@ impl ConversationManager {
.cloned()
.ok_or_else(|| CodexErr::ConversationNotFound(conversation_id))
}
+
+ /// Fork an existing conversation by dropping the last `drop_last_messages`
+ /// user/assistant messages from its transcript and starting a new
+ /// conversation with identical configuration (unless overridden by the
+ /// caller's `config`). The new conversation will have a fresh id.
+ pub async fn fork_conversation(
+ &self,
+ conversation_history: Vec<ResponseItem>,
+ num_messages_to_drop: usize,
+ config: Config,
+ ) -> CodexResult<NewConversation> {
+ // Compute the prefix up to the cut point.
+ let truncated_history =
+ truncate_after_dropping_last_messages(conversation_history, num_messages_to_drop);
+
+ // Spawn a new conversation with the computed initial history.
+ let auth_manager = self.auth_manager.clone();
+ let CodexSpawnOk {
+ codex,
+ session_id: conversation_id,
+ } = Codex::spawn(config, auth_manager, Some(truncated_history)).await?;
+
+ self.finalize_spawn(codex, conversation_id).await
+ }
+}
+
+/// Return a prefix of `items` obtained by dropping the last `n` user messages
+/// and all items that follow them.
+fn truncate_after_dropping_last_messages(items: Vec<ResponseItem>, n: usize) -> Vec<ResponseItem> {
+ if n == 0 || items.is_empty() {
+ return items;
+ }
+
+ // Walk backwards counting only `user` Message items, find cut index.
+ let mut count = 0usize;
+ let mut cut_index = 0usize;
+ for (idx, item) in items.iter().enumerate().rev() {
+ if let ResponseItem::Message { role, .. } = item
+ && role == "user"
+ {
+ count += 1;
+ if count == n {
+ // Cut everything from this user message to the end.
+ cut_index = idx;
+ break;
+ }
+ }
+ }
+ if count < n {
+ // If fewer than n messages exist, drop everything.
+ Vec::new()
+ } else {
+ items.into_iter().take(cut_index).collect()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use codex_protocol::models::ContentItem;
+ use codex_protocol::models::ReasoningItemReasoningSummary;
+ use codex_protocol::models::ResponseItem;
+
+ fn user_msg(text: &str) -> ResponseItem {
+ ResponseItem::Message {
+ id: None,
+ role: "user".to_string(),
+ content: vec![ContentItem::OutputText {
+ text: text.to_string(),
+ }],
+ }
+ }
+ fn assistant_msg(text: &str) -> ResponseItem {
+ ResponseItem::Message {
+ id: None,
+ role: "assistant".to_string(),
+ content: vec![ContentItem::OutputText {
+ text: text.to_string(),
+ }],
+ }
+ }
+
+ #[test]
+ fn drops_from_last_user_only() {
+ let items = vec![
+ user_msg("u1"),
+ assistant_msg("a1"),
+ assistant_msg("a2"),
+ user_msg("u2"),
+ assistant_msg("a3"),
+ ResponseItem::Reasoning {
+ id: "r1".to_string(),
+ summary: vec![ReasoningItemReasoningSummary::SummaryText {
+ text: "s".to_string(),
+ }],
+ content: None,
+ encrypted_content: None,
+ },
+ ResponseItem::FunctionCall {
+ id: None,
+ name: "tool".to_string(),
+ arguments: "{}".to_string(),
+ call_id: "c1".to_string(),
+ },
+ assistant_msg("a4"),
+ ];
+
+ let truncated = truncate_after_dropping_last_messages(items.clone(), 1);
+ assert_eq!(
+ truncated,
+ vec![items[0].clone(), items[1].clone(), items[2].clone()]
+ );
+
+ let truncated2 = truncate_after_dropping_last_messages(items, 2);
+ assert!(truncated2.is_empty());
+ }
}
diff --git a/codex-rs/exec/src/event_processor_with_human_output.rs b/codex-rs/exec/src/event_processor_with_human_output.rs
index 9a562cbd4d..427f7f39ce 100644
--- a/codex-rs/exec/src/event_processor_with_human_output.rs
+++ b/codex-rs/exec/src/event_processor_with_human_output.rs
@@ -539,6 +539,7 @@ impl EventProcessor for EventProcessorWithHumanOutput {
}
},
EventMsg::ShutdownComplete => return CodexStatus::Shutdown,
+ EventMsg::ConversationHistory(_) => {}
}
CodexStatus::Running
}
diff --git a/codex-rs/mcp-server/src/codex_tool_runner.rs b/codex-rs/mcp-server/src/codex_tool_runner.rs
index 36845d895c..c6d65bc89d 100644
--- a/codex-rs/mcp-server/src/codex_tool_runner.rs
+++ b/codex-rs/mcp-server/src/codex_tool_runner.rs
@@ -275,6 +275,7 @@ async fn run_codex_tool_session_inner(
| EventMsg::GetHistoryEntryResponse(_)
| EventMsg::PlanUpdate(_)
| EventMsg::TurnAborted(_)
+ | EventMsg::ConversationHistory(_)
| EventMsg::ShutdownComplete => {
// For now, we do not do anything extra for these
// events. Note that
diff --git a/codex-rs/protocol/src/protocol.rs b/codex-rs/protocol/src/protocol.rs
index fbe052bf95..178eb2f845 100644
--- a/codex-rs/protocol/src/protocol.rs
+++ b/codex-rs/protocol/src/protocol.rs
@@ -22,6 +22,7 @@ use uuid::Uuid;
use crate::config_types::ReasoningEffort as ReasoningEffortConfig;
use crate::config_types::ReasoningSummary as ReasoningSummaryConfig;
use crate::message_history::HistoryEntry;
+use crate::models::ResponseItem;
use crate::parse_command::ParsedCommand;
use crate::plan_tool::UpdatePlanArgs;
@@ -137,6 +138,10 @@ pub enum Op {
/// Request a single history entry identified by `log_id` + `offset`.
GetHistoryEntryRequest { offset: usize, log_id: u64 },
+ /// Request the full in-memory conversation transcript for the current session.
+ /// Reply is delivered via `EventMsg::ConversationHistory`.
+ GetHistory,
+
/// Request the list of MCP tools available across all configured servers.
/// Reply is delivered via `EventMsg::McpListToolsResponse`.
ListMcpTools,
@@ -471,6 +476,8 @@ pub enum EventMsg {
/// Notification that the agent is shutting down.
ShutdownComplete,
+
+ ConversationHistory(ConversationHistoryResponseEvent),
}
// Individual event payload types matching each `EventMsg` variant.
@@ -651,6 +658,14 @@ impl McpToolCallEndEvent {
}
}
+/// Response payload for `Op::GetHistory` containing the current session's
+/// in-memory transcript.
+#[derive(Debug, Clone, Deserialize, Serialize)]
+pub struct ConversationHistoryResponseEvent {
+ pub conversation_id: Uuid,
+ pub entries: Vec<ResponseItem>,
+}
+
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct ExecCommandBeginEvent {
/// Identifier so this can be paired with the ExecCommandEnd event.
diff --git a/codex-rs/tui/src/chatwidget.rs b/codex-rs/tui/src/chatwidget.rs
index 0e4bd85664..54f66c0685 100644
--- a/codex-rs/tui/src/chatwidget.rs
+++ b/codex-rs/tui/src/chatwidget.rs
@@ -818,6 +818,7 @@ impl ChatWidget {
self.on_background_event(message)
}
EventMsg::StreamError(StreamErrorEvent { message }) => self.on_stream_error(message),
+ EventMsg::ConversationHistory(_) => {}
}
// Coalesce redraws: issue at most one after handling the event
if self.needs_redraw {
Review Comments
codex-rs/core/src/codex.rs
- Created: 2025-08-22 19:41:46 UTC | Link: https://github.com/openai/codex/pull/2575#discussion_r2294551602
@@ -169,21 +174,32 @@ impl Codex {
};
// Generate a unique ID for the lifetime of this Codex session.
- let (session, turn_context) =
- Session::new(configure_session, config.clone(), auth, tx_event.clone())
- .await
- .map_err(|e| {
- error!("Failed to create session: {e:#}");
- CodexErr::InternalAgentDied
- })?;
+ let (session, turn_context) = Session::new(
+ configure_session,
+ config.clone(),
+ auth,
+ tx_event.clone(),
+ initial_history,
+ )
+ .await
+ .map_err(|e| {
+ error!("Failed to create session: {e:#}");
+ CodexErr::InternalAgentDied
+ })?;
let session_id = session.session_id;
// This task will run until Op::Shutdown is received.
- tokio::spawn(submission_loop(session, turn_context, config, rx_sub));
+ tokio::spawn(submission_loop(
+ Arc::clone(&session),
Again, can you please prefer
session.clone()? This appears to be the only file in the codebase that usesArc::clone().
- Created: 2025-08-22 19:44:54 UTC | Link: https://github.com/openai/codex/pull/2575#discussion_r2294561296
@@ -130,6 +130,7 @@ pub struct Codex {
next_id: AtomicU64,
tx_sub: Sender<Submission>,
rx_event: Receiver<Event>,
+ session: Arc<Session>,
This seems off to me:
Codexis primarily designed to submit and receive events. This changes its responsibility in an unexpected way.
- Created: 2025-08-22 23:02:50 UTC | Link: https://github.com/openai/codex/pull/2575#discussion_r2294908529
@@ -169,21 +174,32 @@ impl Codex {
};
// Generate a unique ID for the lifetime of this Codex session.
- let (session, turn_context) =
- Session::new(configure_session, config.clone(), auth, tx_event.clone())
- .await
- .map_err(|e| {
- error!("Failed to create session: {e:#}");
- CodexErr::InternalAgentDied
- })?;
+ let (session, turn_context) = Session::new(
+ configure_session,
+ config.clone(),
+ auth,
+ tx_event.clone(),
+ initial_history,
+ )
+ .await
+ .map_err(|e| {
+ error!("Failed to create session: {e:#}");
+ CodexErr::InternalAgentDied
+ })?;
let session_id = session.session_id;
// This task will run until Op::Shutdown is received.
- tokio::spawn(submission_loop(session, turn_context, config, rx_sub));
+ tokio::spawn(submission_loop(
+ Arc::clone(&session),
still this please
- Created: 2025-08-22 23:04:48 UTC | Link: https://github.com/openai/codex/pull/2575#discussion_r2294909967
@@ -391,14 +400,16 @@ impl Session {
}
let rollout_result = match rollout_res {
Ok((session_id, maybe_saved, recorder)) => {
- let restored_items: Option<Vec<ResponseItem>> =
- maybe_saved.and_then(|saved_session| {
+ let restored_items: Option<Vec<ResponseItem>> = match initial_history {
FYI, instead of
match, I would consideritems.or_else().
codex-rs/core/src/conversation_manager.rs
- Created: 2025-08-22 23:06:44 UTC | Link: https://github.com/openai/codex/pull/2575#discussion_r2294911201
@@ -59,7 +60,7 @@ impl ConversationManager {
let CodexSpawnOk {
codex,
session_id: conversation_id,
- } = Codex::spawn(config, auth_manager).await?;
+ } = Codex::spawn(config, auth_manager, None).await?;
Consider adding this above:
let initial_history = None;and then passing
initial_historyinstead ofNonehere, just to be clear about what is being omitted.
- Created: 2025-08-22 23:10:31 UTC | Link: https://github.com/openai/codex/pull/2575#discussion_r2294916024
@@ -98,4 +99,142 @@ impl ConversationManager {
.cloned()
.ok_or_else(|| CodexErr::ConversationNotFound(conversation_id))
}
+
+ /// Fork an existing conversation by dropping the last `drop_last_messages`
+ /// user/assistant messages from its transcript and starting a new
+ /// conversation with identical configuration (unless overridden by the
+ /// caller's `config`). The new conversation will have a fresh id.
+ pub async fn fork_conversation(
+ &self,
+ drop_last_messages: usize,
+ config: Config,
+ items: Vec<ResponseItem>,
Ordering/naming feels a bit weird to me. Consider something like:
conversation_history: Vec<ResponseItem>, num_messages_to_drop: usize, config: Config,Because the method is about a "conversation," I would make that the first arg, followed by parameters to modify that input (
num_messages_to_drop) and then required-to-make-things-work-but-not-the-focus-of-the-method-last (e.g.,config).
- Created: 2025-08-22 23:10:52 UTC | Link: https://github.com/openai/codex/pull/2575#discussion_r2294916330
@@ -98,4 +99,142 @@ impl ConversationManager {
.cloned()
.ok_or_else(|| CodexErr::ConversationNotFound(conversation_id))
}
+
+ /// Fork an existing conversation by dropping the last `drop_last_messages`
+ /// user/assistant messages from its transcript and starting a new
+ /// conversation with identical configuration (unless overridden by the
+ /// caller's `config`). The new conversation will have a fresh id.
+ pub async fn fork_conversation(
+ &self,
+ drop_last_messages: usize,
+ config: Config,
+ items: Vec<ResponseItem>,
+ ) -> CodexResult<NewConversation> {
+ // Compute the prefix up to the cut point.
+ let fork_items = truncate_after_dropping_last_messages(items, drop_last_messages);
truncated_history?
- Created: 2025-08-22 23:12:17 UTC | Link: https://github.com/openai/codex/pull/2575#discussion_r2294917483
@@ -98,4 +99,142 @@ impl ConversationManager {
.cloned()
.ok_or_else(|| CodexErr::ConversationNotFound(conversation_id))
}
+
+ /// Fork an existing conversation by dropping the last `drop_last_messages`
+ /// user/assistant messages from its transcript and starting a new
+ /// conversation with identical configuration (unless overridden by the
+ /// caller's `config`). The new conversation will have a fresh id.
+ pub async fn fork_conversation(
+ &self,
+ drop_last_messages: usize,
+ config: Config,
+ items: Vec<ResponseItem>,
+ ) -> CodexResult<NewConversation> {
+ // Compute the prefix up to the cut point.
+ let fork_items = truncate_after_dropping_last_messages(items, drop_last_messages);
+
+ // Spawn a new conversation with the computed initial history.
+ let auth_manager = self.auth_manager.clone();
+ let CodexSpawnOk {
+ codex,
+ session_id: conversation_id,
+ } = Codex::spawn(config, auth_manager, Some(fork_items)).await?;
+
+ // The first event must be `SessionInitialized`. Validate and forward it
+ // to the caller so that they can display it in the conversation
+ // history.
+ let event = codex.next_event().await?;
Can we avoid the duplication with
spawn_conversation()here?
- Created: 2025-08-22 23:13:39 UTC | Link: https://github.com/openai/codex/pull/2575#discussion_r2294918334
@@ -98,4 +99,142 @@ impl ConversationManager {
.cloned()
.ok_or_else(|| CodexErr::ConversationNotFound(conversation_id))
}
+
+ /// Fork an existing conversation by dropping the last `drop_last_messages`
+ /// user/assistant messages from its transcript and starting a new
+ /// conversation with identical configuration (unless overridden by the
+ /// caller's `config`). The new conversation will have a fresh id.
+ pub async fn fork_conversation(
+ &self,
+ drop_last_messages: usize,
+ config: Config,
+ items: Vec<ResponseItem>,
+ ) -> CodexResult<NewConversation> {
+ // Compute the prefix up to the cut point.
+ let fork_items = truncate_after_dropping_last_messages(items, drop_last_messages);
+
+ // Spawn a new conversation with the computed initial history.
+ let auth_manager = self.auth_manager.clone();
+ let CodexSpawnOk {
+ codex,
+ session_id: conversation_id,
+ } = Codex::spawn(config, auth_manager, Some(fork_items)).await?;
+
+ // The first event must be `SessionInitialized`. Validate and forward it
+ // to the caller so that they can display it in the conversation
+ // history.
+ let event = codex.next_event().await?;
+ let session_configured = match event {
+ Event {
+ id,
+ msg: EventMsg::SessionConfigured(session_configured),
+ } if id == INITIAL_SUBMIT_ID => session_configured,
+ _ => {
+ return Err(CodexErr::SessionConfiguredNotFirstEvent);
+ }
+ };
+
+ let conversation = Arc::new(CodexConversation::new(codex));
+ self.conversations
+ .write()
+ .await
+ .insert(conversation_id, conversation.clone());
+
+ Ok(NewConversation {
+ conversation_id,
+ conversation,
+ session_configured,
+ })
+ }
+}
+
+/// Return a prefix of `items` obtained by dropping the last `n` user messages
+/// and all items that follow them.
+fn truncate_after_dropping_last_messages(items: Vec<ResponseItem>, n: usize) -> Vec<ResponseItem> {
+ if n == 0 || items.is_empty() {
+ return items;
+ }
+
+ // Walk backwards counting only `user` Message items, find cut index.
+ let mut count = 0usize;
+ let mut cut_index = 0usize;
+ for (idx, item) in items.iter().enumerate().rev() {
+ if let ResponseItem::Message { role, .. } = item
+ && role == "user"
+ {
+ count += 1;
+ if count == n {
+ // Cut everything from this user message to the end.
+ cut_index = idx;
+ break;
+ }
+ }
+ }
+ if count < n {
+ // If fewer than n messages exist, drop everything.
+ return Vec::new();
+ }
+ items.into_iter().take(cut_index).collect()
if count < n { // If fewer than n messages exist, drop everything. Vec::new() } else { items.into_iter().take(cut_index).collect() }
- Created: 2025-08-22 23:14:25 UTC | Link: https://github.com/openai/codex/pull/2575#discussion_r2294918780
@@ -98,4 +99,142 @@ impl ConversationManager {
.cloned()
.ok_or_else(|| CodexErr::ConversationNotFound(conversation_id))
}
+
+ /// Fork an existing conversation by dropping the last `drop_last_messages`
+ /// user/assistant messages from its transcript and starting a new
+ /// conversation with identical configuration (unless overridden by the
+ /// caller's `config`). The new conversation will have a fresh id.
+ pub async fn fork_conversation(
+ &self,
+ drop_last_messages: usize,
+ config: Config,
+ items: Vec<ResponseItem>,
+ ) -> CodexResult<NewConversation> {
+ // Compute the prefix up to the cut point.
+ let fork_items = truncate_after_dropping_last_messages(items, drop_last_messages);
+
+ // Spawn a new conversation with the computed initial history.
+ let auth_manager = self.auth_manager.clone();
+ let CodexSpawnOk {
+ codex,
+ session_id: conversation_id,
+ } = Codex::spawn(config, auth_manager, Some(fork_items)).await?;
+
+ // The first event must be `SessionInitialized`. Validate and forward it
+ // to the caller so that they can display it in the conversation
+ // history.
+ let event = codex.next_event().await?;
+ let session_configured = match event {
+ Event {
+ id,
+ msg: EventMsg::SessionConfigured(session_configured),
+ } if id == INITIAL_SUBMIT_ID => session_configured,
+ _ => {
+ return Err(CodexErr::SessionConfiguredNotFirstEvent);
+ }
+ };
+
+ let conversation = Arc::new(CodexConversation::new(codex));
+ self.conversations
+ .write()
+ .await
+ .insert(conversation_id, conversation.clone());
+
+ Ok(NewConversation {
+ conversation_id,
+ conversation,
+ session_configured,
+ })
+ }
+}
+
+/// Return a prefix of `items` obtained by dropping the last `n` user messages
+/// and all items that follow them.
+fn truncate_after_dropping_last_messages(items: Vec<ResponseItem>, n: usize) -> Vec<ResponseItem> {
+ if n == 0 || items.is_empty() {
+ return items;
+ }
+
+ // Walk backwards counting only `user` Message items, find cut index.
+ let mut count = 0usize;
+ let mut cut_index = 0usize;
+ for (idx, item) in items.iter().enumerate().rev() {
+ if let ResponseItem::Message { role, .. } = item
+ && role == "user"
+ {
+ count += 1;
+ if count == n {
+ // Cut everything from this user message to the end.
+ cut_index = idx;
+ break;
+ }
+ }
+ }
+ if count < n {
+ // If fewer than n messages exist, drop everything.
+ return Vec::new();
+ }
+ items.into_iter().take(cut_index).collect()
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use codex_protocol::models::ContentItem;
+ use codex_protocol::models::ReasoningItemReasoningSummary;
+ use codex_protocol::models::ResponseItem;
+
+ fn user_msg(text: &str) -> ResponseItem {
+ ResponseItem::Message {
+ id: None,
+ role: "user".to_string(),
+ content: vec![ContentItem::OutputText {
+ text: text.to_string(),
+ }],
+ }
+ }
+ fn assistant_msg(text: &str) -> ResponseItem {
+ ResponseItem::Message {
+ id: None,
+ role: "assistant".to_string(),
+ content: vec![ContentItem::OutputText {
+ text: text.to_string(),
+ }],
+ }
+ }
+
+ #[test]
+ fn drops_from_last_user_only() {
nice test!
codex-rs/protocol/src/protocol.rs
- Created: 2025-08-22 23:15:53 UTC | Link: https://github.com/openai/codex/pull/2575#discussion_r2294919660
@@ -147,6 +151,8 @@ pub enum Op {
Compact,
/// Request to shut down codex instance.
Shutdown,
+
+ GetHistory,
Docstring.
Also, why not list this closer to
GetHistoryEntryRequest?
- Created: 2025-08-22 23:16:28 UTC | Link: https://github.com/openai/codex/pull/2575#discussion_r2294919990
@@ -651,6 +659,12 @@ impl McpToolCallEndEvent {
}
}
+#[derive(Debug, Clone, Deserialize, Serialize)]
+pub struct ConversationHistoryEvent {
Is this an "event" or a "response"?