Compare commits

...

37 Commits

Author SHA1 Message Date
Charles Cunningham
8c3c405ea7 Update app-server schema fixtures after protocol changes 2026-02-02 22:29:41 -08:00
Charles Cunningham
19498742cd Skip first-turn collaboration update after initial seeding 2026-02-02 22:29:41 -08:00
Charles Cunningham
3dd4a613cb Sync base mode when collaboration UI disabled 2026-02-02 22:29:41 -08:00
Charles Cunningham
18bda76b99 Keep custom base when applying rollback mode 2026-02-02 22:29:41 -08:00
Charles Cunningham
e45544fa93 Defer override context updates to next user turn 2026-02-02 22:29:41 -08:00
Charles Cunningham
7239785077 Left-pad turn context history after rollback 2026-02-02 22:29:41 -08:00
Charles Cunningham
d5652614f6 Fix redundant clone in collaboration mode tests 2026-02-02 22:29:41 -08:00
Charles Cunningham
bea06a5895 Restore mode sync context after rebase 2026-02-02 22:29:41 -08:00
Charles Cunningham
96f94b02be Count only real user turns for turn-context sync 2026-02-02 22:29:41 -08:00
Charles Cunningham
cf50fbf2b4 Fix TS optional-nullable export and backtrack clippy lint 2026-02-02 22:29:41 -08:00
Charles Cunningham
0174a3db48 Simplify backtrack mode sync and generalize pending sync naming 2026-02-02 22:29:40 -08:00
Charles Cunningham
4b7b5cd7cc Sync rollback model-visible mode state across core and tui 2026-02-02 22:29:40 -08:00
Charles Cunningham
c6dca12acf Handle compaction in turn context rebuild 2026-02-02 22:29:40 -08:00
Charles Cunningham
a569296f3d Preserve collaboration inject flag on overrides 2026-02-02 22:29:40 -08:00
Charles Cunningham
b0e5300c7e Fix SessionState import 2026-02-02 22:29:40 -08:00
Charles Cunningham
a580669627 Clarify rollback mode flow 2026-02-02 22:29:40 -08:00
Charles Cunningham
6e1d0fd263 doc 2026-02-02 22:29:40 -08:00
Charles Cunningham
a70e23ac5a Document rollback turn context helper 2026-02-02 22:29:40 -08:00
Charles Cunningham
ec06fccbb7 Extract rollback turn context helper 2026-02-02 22:29:40 -08:00
Charles Cunningham
4c46f80eb4 Restore mcp cancel formatting 2026-02-02 22:29:40 -08:00
Charles Cunningham
954b0884d5 Remove request_user_input call_id plumbing 2026-02-02 22:29:40 -08:00
Charles Cunningham
7af9dce46b Revert request_user_input serialization change 2026-02-02 22:29:40 -08:00
Charles Cunningham
466b4b704f Document turn context history reset 2026-02-02 22:29:40 -08:00
Charles Cunningham
b9fc83dd06 Document turn context placeholders 2026-02-02 22:29:40 -08:00
Charles Cunningham
5b4550d7d7 Rename force collaboration flag 2026-02-02 22:29:40 -08:00
Charles Cunningham
98179dd3ff Avoid stale collaboration mode on rollback 2026-02-02 22:29:39 -08:00
Charles Cunningham
37ef9a1904 Fix rollback lock and propagate user input call_id 2026-02-02 22:29:39 -08:00
Charles Cunningham
a057ee0d21 Remove collaboration mode fallback parsing 2026-02-02 22:29:39 -08:00
Charles Cunningham
e91826feb6 Test rollback uses turn context history 2026-02-02 22:29:39 -08:00
Charles Cunningham
7200e1e47a Use TurnContextItem for rollback modes 2026-02-02 22:29:39 -08:00
Charles Cunningham
883e57d10e Revert "Persist collaboration mode masks in history"
This reverts commit bc9757c41ec3701e196d6b6da1a2e894ec3e19ad.
2026-02-02 22:29:39 -08:00
Charles Cunningham
3341646eb8 Persist collaboration mode masks in history 2026-02-02 22:29:39 -08:00
Charles Cunningham
ef8575c24a Drop redundant clones in backtrack tests 2026-02-02 22:29:39 -08:00
Charles Cunningham
a1a0a2be6f Restore collaboration mode on backtrack failure 2026-02-02 22:29:39 -08:00
Charles Cunningham
0efb5292f3 Add backtrack collaboration mode tests 2026-02-02 22:29:39 -08:00
Charles Cunningham
e39c9e3b64 Sync collaboration mode on rollback 2026-02-02 22:29:39 -08:00
Charles Cunningham
f30151cc18 Fix backtrack to restore collaboration mode and keep developer instructions in sync 2026-02-02 22:29:39 -08:00
20 changed files with 1966 additions and 132 deletions

View File

@@ -310,6 +310,22 @@
}
]
},
"CollaborationMode": {
"description": "Collaboration mode for a Codex session.",
"properties": {
"mode": {
"$ref": "#/definitions/ModeKind"
},
"settings": {
"$ref": "#/definitions/Settings"
}
},
"required": [
"mode",
"settings"
],
"type": "object"
},
"ContentItem": {
"oneOf": [
{
@@ -521,6 +537,17 @@
{
"description": "Conversation history was rolled back by dropping the last N user turns.",
"properties": {
"model_visible_state": {
"anyOf": [
{
"$ref": "#/definitions/ModelVisibleState"
},
{
"type": "null"
}
],
"description": "Model-visible session state after rollback.\n\nThis lets clients synchronize UI mode/model indicators with core after history rewind. Optional for backward compatibility with older persisted events."
},
"num_turns": {
"description": "Number of user turns that were removed from context.",
"format": "uint32",
@@ -3129,6 +3156,21 @@
],
"type": "string"
},
"ModelVisibleState": {
"properties": {
"collaboration_mode": {
"anyOf": [
{
"$ref": "#/definitions/CollaborationMode"
},
{
"type": "null"
}
]
}
},
"type": "object"
},
"NetworkAccess": {
"description": "Represents whether outbound network access is available to the agent.",
"enum": [
@@ -4253,6 +4295,34 @@
}
]
},
"Settings": {
"description": "Settings for a collaboration mode.",
"properties": {
"developer_instructions": {
"type": [
"string",
"null"
]
},
"model": {
"type": "string"
},
"reasoning_effort": {
"anyOf": [
{
"$ref": "#/definitions/ReasoningEffort"
},
{
"type": "null"
}
]
}
},
"required": [
"model"
],
"type": "object"
},
"SkillDependencies": {
"properties": {
"tools": {
@@ -5048,6 +5118,17 @@
{
"description": "Conversation history was rolled back by dropping the last N user turns.",
"properties": {
"model_visible_state": {
"anyOf": [
{
"$ref": "#/definitions/ModelVisibleState"
},
{
"type": "null"
}
],
"description": "Model-visible session state after rollback.\n\nThis lets clients synchronize UI mode/model indicators with core after history rewind. Optional for backward compatibility with older persisted events."
},
"num_turns": {
"description": "Number of user turns that were removed from context.",
"format": "uint32",

View File

@@ -630,6 +630,22 @@
],
"type": "string"
},
"CollaborationMode": {
"description": "Collaboration mode for a Codex session.",
"properties": {
"mode": {
"$ref": "#/definitions/ModeKind"
},
"settings": {
"$ref": "#/definitions/Settings"
}
},
"required": [
"mode",
"settings"
],
"type": "object"
},
"CommandAction": {
"oneOf": [
{
@@ -1099,6 +1115,17 @@
{
"description": "Conversation history was rolled back by dropping the last N user turns.",
"properties": {
"model_visible_state": {
"anyOf": [
{
"$ref": "#/definitions/ModelVisibleState"
},
{
"type": "null"
}
],
"description": "Model-visible session state after rollback.\n\nThis lets clients synchronize UI mode/model indicators with core after history rewind. Optional for backward compatibility with older persisted events."
},
"num_turns": {
"description": "Number of user turns that were removed from context.",
"format": "uint32",
@@ -3908,6 +3935,21 @@
],
"type": "string"
},
"ModelVisibleState": {
"properties": {
"collaboration_mode": {
"anyOf": [
{
"$ref": "#/definitions/CollaborationMode"
},
{
"type": "null"
}
]
}
},
"type": "object"
},
"NetworkAccess": {
"description": "Represents whether outbound network access is available to the agent.",
"enum": [
@@ -5370,6 +5412,34 @@
}
]
},
"Settings": {
"description": "Settings for a collaboration mode.",
"properties": {
"developer_instructions": {
"type": [
"string",
"null"
]
},
"model": {
"type": "string"
},
"reasoning_effort": {
"anyOf": [
{
"$ref": "#/definitions/ReasoningEffort"
},
{
"type": "null"
}
]
}
},
"required": [
"model"
],
"type": "object"
},
"SkillDependencies": {
"properties": {
"tools": {

View File

@@ -1893,6 +1893,22 @@
}
]
},
"CollaborationMode": {
"description": "Collaboration mode for a Codex session.",
"properties": {
"mode": {
"$ref": "#/definitions/ModeKind"
},
"settings": {
"$ref": "#/definitions/Settings"
}
},
"required": [
"mode",
"settings"
],
"type": "object"
},
"CommandExecutionApprovalDecision": {
"oneOf": [
{
@@ -2361,6 +2377,17 @@
{
"description": "Conversation history was rolled back by dropping the last N user turns.",
"properties": {
"model_visible_state": {
"anyOf": [
{
"$ref": "#/definitions/ModelVisibleState"
},
{
"type": "null"
}
],
"description": "Model-visible session state after rollback.\n\nThis lets clients synchronize UI mode/model indicators with core after history rewind. Optional for backward compatibility with older persisted events."
},
"num_turns": {
"description": "Number of user turns that were removed from context.",
"format": "uint32",
@@ -5857,6 +5884,21 @@
],
"type": "string"
},
"ModelVisibleState": {
"properties": {
"collaboration_mode": {
"anyOf": [
{
"$ref": "#/definitions/CollaborationMode"
},
{
"type": "null"
}
]
}
},
"type": "object"
},
"NetworkAccess": {
"description": "Represents whether outbound network access is available to the agent.",
"enum": [
@@ -8366,6 +8408,34 @@
"title": "SetDefaultModelResponse",
"type": "object"
},
"Settings": {
"description": "Settings for a collaboration mode.",
"properties": {
"developer_instructions": {
"type": [
"string",
"null"
]
},
"model": {
"type": "string"
},
"reasoning_effort": {
"anyOf": [
{
"$ref": "#/definitions/ReasoningEffort"
},
{
"type": "null"
}
]
}
},
"required": [
"model"
],
"type": "object"
},
"SkillDependencies": {
"properties": {
"tools": {

View File

@@ -310,6 +310,22 @@
}
]
},
"CollaborationMode": {
"description": "Collaboration mode for a Codex session.",
"properties": {
"mode": {
"$ref": "#/definitions/ModeKind"
},
"settings": {
"$ref": "#/definitions/Settings"
}
},
"required": [
"mode",
"settings"
],
"type": "object"
},
"ContentItem": {
"oneOf": [
{
@@ -521,6 +537,17 @@
{
"description": "Conversation history was rolled back by dropping the last N user turns.",
"properties": {
"model_visible_state": {
"anyOf": [
{
"$ref": "#/definitions/ModelVisibleState"
},
{
"type": "null"
}
],
"description": "Model-visible session state after rollback.\n\nThis lets clients synchronize UI mode/model indicators with core after history rewind. Optional for backward compatibility with older persisted events."
},
"num_turns": {
"description": "Number of user turns that were removed from context.",
"format": "uint32",
@@ -3129,6 +3156,21 @@
],
"type": "string"
},
"ModelVisibleState": {
"properties": {
"collaboration_mode": {
"anyOf": [
{
"$ref": "#/definitions/CollaborationMode"
},
{
"type": "null"
}
]
}
},
"type": "object"
},
"NetworkAccess": {
"description": "Represents whether outbound network access is available to the agent.",
"enum": [
@@ -4253,6 +4295,34 @@
}
]
},
"Settings": {
"description": "Settings for a collaboration mode.",
"properties": {
"developer_instructions": {
"type": [
"string",
"null"
]
},
"model": {
"type": "string"
},
"reasoning_effort": {
"anyOf": [
{
"$ref": "#/definitions/ReasoningEffort"
},
{
"type": "null"
}
]
}
},
"required": [
"model"
],
"type": "object"
},
"SkillDependencies": {
"properties": {
"tools": {

View File

@@ -310,6 +310,22 @@
}
]
},
"CollaborationMode": {
"description": "Collaboration mode for a Codex session.",
"properties": {
"mode": {
"$ref": "#/definitions/ModeKind"
},
"settings": {
"$ref": "#/definitions/Settings"
}
},
"required": [
"mode",
"settings"
],
"type": "object"
},
"ContentItem": {
"oneOf": [
{
@@ -521,6 +537,17 @@
{
"description": "Conversation history was rolled back by dropping the last N user turns.",
"properties": {
"model_visible_state": {
"anyOf": [
{
"$ref": "#/definitions/ModelVisibleState"
},
{
"type": "null"
}
],
"description": "Model-visible session state after rollback.\n\nThis lets clients synchronize UI mode/model indicators with core after history rewind. Optional for backward compatibility with older persisted events."
},
"num_turns": {
"description": "Number of user turns that were removed from context.",
"format": "uint32",
@@ -3129,6 +3156,21 @@
],
"type": "string"
},
"ModelVisibleState": {
"properties": {
"collaboration_mode": {
"anyOf": [
{
"$ref": "#/definitions/CollaborationMode"
},
{
"type": "null"
}
]
}
},
"type": "object"
},
"NetworkAccess": {
"description": "Represents whether outbound network access is available to the agent.",
"enum": [
@@ -4253,6 +4295,34 @@
}
]
},
"Settings": {
"description": "Settings for a collaboration mode.",
"properties": {
"developer_instructions": {
"type": [
"string",
"null"
]
},
"model": {
"type": "string"
},
"reasoning_effort": {
"anyOf": [
{
"$ref": "#/definitions/ReasoningEffort"
},
{
"type": "null"
}
]
}
},
"required": [
"model"
],
"type": "object"
},
"SkillDependencies": {
"properties": {
"tools": {

View File

@@ -310,6 +310,22 @@
}
]
},
"CollaborationMode": {
"description": "Collaboration mode for a Codex session.",
"properties": {
"mode": {
"$ref": "#/definitions/ModeKind"
},
"settings": {
"$ref": "#/definitions/Settings"
}
},
"required": [
"mode",
"settings"
],
"type": "object"
},
"ContentItem": {
"oneOf": [
{
@@ -521,6 +537,17 @@
{
"description": "Conversation history was rolled back by dropping the last N user turns.",
"properties": {
"model_visible_state": {
"anyOf": [
{
"$ref": "#/definitions/ModelVisibleState"
},
{
"type": "null"
}
],
"description": "Model-visible session state after rollback.\n\nThis lets clients synchronize UI mode/model indicators with core after history rewind. Optional for backward compatibility with older persisted events."
},
"num_turns": {
"description": "Number of user turns that were removed from context.",
"format": "uint32",
@@ -3129,6 +3156,21 @@
],
"type": "string"
},
"ModelVisibleState": {
"properties": {
"collaboration_mode": {
"anyOf": [
{
"$ref": "#/definitions/CollaborationMode"
},
{
"type": "null"
}
]
}
},
"type": "object"
},
"NetworkAccess": {
"description": "Represents whether outbound network access is available to the agent.",
"enum": [
@@ -4253,6 +4295,34 @@
}
]
},
"Settings": {
"description": "Settings for a collaboration mode.",
"properties": {
"developer_instructions": {
"type": [
"string",
"null"
]
},
"model": {
"type": "string"
},
"reasoning_effort": {
"anyOf": [
{
"$ref": "#/definitions/ReasoningEffort"
},
{
"type": "null"
}
]
}
},
"required": [
"model"
],
"type": "object"
},
"SkillDependencies": {
"properties": {
"tools": {

View File

@@ -0,0 +1,6 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { CollaborationMode } from "./CollaborationMode";
export type ModelVisibleState = { collaboration_mode?: CollaborationMode, };

View File

@@ -1,9 +1,17 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { ModelVisibleState } from "./ModelVisibleState";
export type ThreadRolledBackEvent = {
/**
* Number of user turns that were removed from context.
*/
num_turns: number, };
num_turns: number,
/**
* Model-visible session state after rollback.
*
* This lets clients synchronize UI mode/model indicators with core after history rewind.
* Optional for backward compatibility with older persisted events.
*/
model_visible_state?: ModelVisibleState, };

View File

@@ -118,6 +118,7 @@ export type { McpToolCallBeginEvent } from "./McpToolCallBeginEvent";
export type { McpToolCallEndEvent } from "./McpToolCallEndEvent";
export type { MessagePhase } from "./MessagePhase";
export type { ModeKind } from "./ModeKind";
export type { ModelVisibleState } from "./ModelVisibleState";
export type { NetworkAccess } from "./NetworkAccess";
export type { NewConversationParams } from "./NewConversationParams";
export type { NewConversationResponse } from "./NewConversationResponse";

View File

@@ -490,7 +490,10 @@ mod tests {
EventMsg::AgentMessage(AgentMessageEvent {
message: "A2".into(),
}),
EventMsg::ThreadRolledBack(ThreadRolledBackEvent { num_turns: 1 }),
EventMsg::ThreadRolledBack(ThreadRolledBackEvent {
num_turns: 1,
model_visible_state: None,
}),
EventMsg::UserMessage(UserMessageEvent {
message: "Third".into(),
images: None,
@@ -565,7 +568,10 @@ mod tests {
EventMsg::AgentMessage(AgentMessageEvent {
message: "A2".into(),
}),
EventMsg::ThreadRolledBack(ThreadRolledBackEvent { num_turns: 99 }),
EventMsg::ThreadRolledBack(ThreadRolledBackEvent {
num_turns: 99,
model_visible_state: None,
}),
];
let turns = build_turns_from_event_msgs(&events);

File diff suppressed because it is too large Load Diff

View File

@@ -173,6 +173,7 @@ mod tests {
RolloutItem::ResponseItem(assistant_msg("a2")),
RolloutItem::EventMsg(EventMsg::ThreadRolledBack(ThreadRolledBackEvent {
num_turns: 1,
model_visible_state: None,
})),
RolloutItem::ResponseItem(user_msg("u3")),
RolloutItem::ResponseItem(assistant_msg("a3")),

View File

@@ -3,6 +3,7 @@ mod session;
mod turn;
pub(crate) use service::SessionServices;
pub(crate) use session::PendingModelVisibleStateSync;
pub(crate) use session::SessionState;
pub(crate) use turn::ActiveTurn;
pub(crate) use turn::RunningTask;

View File

@@ -1,20 +1,38 @@
//! Session-wide mutable state.
use codex_protocol::models::ResponseItem;
use codex_protocol::protocol::ModelVisibleState;
use codex_protocol::protocol::TurnContextItem;
use std::collections::HashMap;
use std::collections::HashSet;
use crate::codex::SessionConfiguration;
use crate::context_manager::ContextManager;
use crate::context_manager::is_user_turn_boundary;
use crate::protocol::RateLimitSnapshot;
use crate::protocol::TokenUsage;
use crate::protocol::TokenUsageInfo;
use crate::truncate::TruncationPolicy;
/// One-shot synchronization state for model-visible settings after rollback/backtrack.
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub(crate) enum PendingModelVisibleStateSync {
/// No synchronization is pending.
#[default]
None,
/// Compare the next submitted model-visible state against this rollback snapshot.
Snapshot(ModelVisibleState),
/// Snapshot is missing/unreliable, so emit all tracked model-visible updates once.
ForceEmitAll,
}
/// Persistent, session-scoped state previously stored directly on `Session`.
pub(crate) struct SessionState {
pub(crate) session_configuration: SessionConfiguration,
pub(crate) history: ContextManager,
pub(crate) turn_context_history: Vec<Option<TurnContextItem>>,
/// Pending one-shot sync for model-visible state after rollback/backtrack.
pub(crate) pending_model_visible_state_sync: PendingModelVisibleStateSync,
pub(crate) latest_rate_limits: Option<RateLimitSnapshot>,
pub(crate) server_reasoning_included: bool,
pub(crate) dependency_env: HashMap<String, String>,
@@ -33,6 +51,8 @@ impl SessionState {
Self {
session_configuration,
history,
turn_context_history: Vec::new(),
pending_model_visible_state_sync: PendingModelVisibleStateSync::None,
latest_rate_limits: None,
server_reasoning_included: false,
dependency_env: HashMap::new(),
@@ -50,6 +70,69 @@ impl SessionState {
self.history.record_items(items, policy);
}
/// Keep `turn_context_history` aligned with user turns even when no TurnContextItem exists
/// yet (e.g., legacy rollouts or model-sourced user messages). We push a `None` placeholder
/// for each user message so later rollback/backtrack logic can safely index the latest
/// user turn and optionally fill the slot with a TurnContextItem.
pub(crate) fn record_user_turn_placeholders(&mut self, items: &[ResponseItem]) {
for item in items {
if is_user_turn_boundary(item) {
self.turn_context_history.push(None);
}
}
}
pub(crate) fn set_last_turn_context(&mut self, turn_context: TurnContextItem) {
if let Some(last) = self.turn_context_history.last_mut()
&& last.is_none()
{
*last = Some(turn_context);
return;
}
self.turn_context_history.push(Some(turn_context));
}
/// Trim or left-pad `turn_context_history` to align with the current number of user turns.
///
/// When history is replaced (e.g., during replay/rollback), we need the last N entries
/// corresponding to the surviving user turns. If the stored list is shorter than the
/// user-turn count, we pad with `None` so indices remain aligned.
pub(crate) fn reset_turn_context_history(&mut self, user_turn_count: usize) {
let existing_len = self.turn_context_history.len();
if existing_len >= user_turn_count {
let start = existing_len - user_turn_count;
self.turn_context_history = self.turn_context_history.split_off(start);
} else {
let mut new_history = Vec::with_capacity(user_turn_count);
let padding = user_turn_count - existing_len;
new_history.resize_with(padding, || None);
new_history.append(&mut self.turn_context_history);
self.turn_context_history = new_history;
}
}
pub(crate) fn set_turn_context_history(
&mut self,
turn_context_history: Vec<Option<TurnContextItem>>,
) {
self.turn_context_history = turn_context_history;
}
/// Consume pending model-visible state sync only when a model-visible update is being built.
///
/// If the next operation does not carry any model-visible update, keep the pending sync for a
/// later turn that does.
pub(crate) fn take_pending_model_visible_state_sync(
&mut self,
has_model_visible_state_update: bool,
) -> PendingModelVisibleStateSync {
if has_model_visible_state_update {
std::mem::take(&mut self.pending_model_visible_state_sync)
} else {
PendingModelVisibleStateSync::None
}
}
pub(crate) fn clone_history(&self) -> ContextManager {
self.history.clone()
}

View File

@@ -1979,6 +1979,20 @@ pub struct UndoCompletedEvent {
pub struct ThreadRolledBackEvent {
/// Number of user turns that were removed from context.
pub num_turns: u32,
/// Model-visible session state after rollback.
///
/// This lets clients synchronize UI mode/model indicators with core after history rewind.
/// Optional for backward compatibility with older persisted events.
#[serde(default, skip_serializing_if = "Option::is_none")]
#[ts(optional)]
pub model_visible_state: Option<ModelVisibleState>,
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, JsonSchema, TS)]
pub struct ModelVisibleState {
#[serde(default, skip_serializing_if = "Option::is_none")]
#[ts(optional)]
pub collaboration_mode: Option<CollaborationMode>,
}
#[derive(Debug, Clone, Deserialize, Serialize, JsonSchema, TS)]

View File

@@ -2524,13 +2524,21 @@ mod tests {
use codex_core::config::ConfigOverrides;
use codex_core::models_manager::manager::ModelsManager;
use codex_core::protocol::AskForApproval;
use codex_core::protocol::CodexErrorInfo;
use codex_core::protocol::ErrorEvent;
use codex_core::protocol::Event;
use codex_core::protocol::EventMsg;
use codex_core::protocol::ModelVisibleState;
use codex_core::protocol::SandboxPolicy;
use codex_core::protocol::SessionConfiguredEvent;
use codex_core::protocol::SessionSource;
use codex_core::protocol::ThreadRolledBackEvent;
use codex_otel::OtelManager;
use codex_protocol::ThreadId;
use codex_protocol::config_types::CollaborationMode;
use codex_protocol::config_types::CollaborationModeMask;
use codex_protocol::config_types::ModeKind;
use codex_protocol::config_types::Settings;
use codex_protocol::user_input::TextElement;
use insta::assert_snapshot;
use pretty_assertions::assert_eq;
@@ -2705,6 +2713,50 @@ mod tests {
)
}
fn next_user_turn(op_rx: &mut tokio::sync::mpsc::UnboundedReceiver<Op>) -> Op {
loop {
match op_rx.try_recv() {
Ok(op @ Op::UserTurn { .. }) => return op,
Ok(_) => continue,
Err(TryRecvError::Empty) => panic!("expected Op::UserTurn but queue was empty"),
Err(TryRecvError::Disconnected) => {
panic!("expected Op::UserTurn but channel closed")
}
}
}
}
fn plan_mask() -> CollaborationModeMask {
CollaborationModeMask {
name: "Plan".to_string(),
mode: Some(ModeKind::Plan),
model: None,
reasoning_effort: None,
developer_instructions: None,
}
}
fn code_mask() -> CollaborationModeMask {
CollaborationModeMask {
name: "Code".to_string(),
mode: Some(ModeKind::Code),
model: None,
reasoning_effort: None,
developer_instructions: None,
}
}
fn collaboration_mode(mode: ModeKind) -> CollaborationMode {
CollaborationMode {
mode,
settings: Settings {
model: "gpt-test".to_string(),
reasoning_effort: None,
developer_instructions: None,
},
}
}
fn test_otel_manager(config: &Config, model: &str) -> OtelManager {
let model_info = ModelsManager::construct_model_info_offline(model, config);
OtelManager::new(
@@ -2911,6 +2963,7 @@ mod tests {
message: text.to_string(),
text_elements,
local_image_paths,
collaboration_mode: None,
}) as Arc<dyn HistoryCell>
};
let agent_cell = |text: &str| -> Arc<dyn HistoryCell> {
@@ -3016,6 +3069,427 @@ mod tests {
assert_eq!(rollback_turns, Some(1));
}
#[tokio::test]
async fn backtrack_to_plan_preserves_plan_mode_on_submit() {
let (mut app, _app_event_rx, mut op_rx) = make_test_app_with_channels().await;
app.chat_widget
.set_feature_enabled(Feature::CollaborationModes, true);
let plan_mask = plan_mask();
let code_mask = code_mask();
let thread_id = ThreadId::new();
app.chat_widget.handle_codex_event(Event {
id: String::new(),
msg: EventMsg::SessionConfigured(SessionConfiguredEvent {
session_id: thread_id,
forked_from_id: None,
thread_name: None,
model: "gpt-test".to_string(),
model_provider_id: "test-provider".to_string(),
approval_policy: AskForApproval::Never,
sandbox_policy: SandboxPolicy::ReadOnly,
cwd: PathBuf::from("/home/user/project"),
reasoning_effort: None,
history_log_id: 0,
history_entry_count: 0,
initial_messages: None,
rollout_path: Some(PathBuf::new()),
}),
});
app.chat_widget.set_collaboration_mask(code_mask.clone());
app.transcript_cells = vec![
Arc::new(UserHistoryCell {
message: "plan".to_string(),
text_elements: Vec::new(),
local_image_paths: Vec::new(),
collaboration_mode: Some(plan_mask),
}) as Arc<dyn HistoryCell>,
Arc::new(AgentMessageCell::new(
vec![Line::from("plan response")],
true,
)) as Arc<dyn HistoryCell>,
Arc::new(UserHistoryCell {
message: "code".to_string(),
text_elements: Vec::new(),
local_image_paths: Vec::new(),
collaboration_mode: Some(code_mask),
}) as Arc<dyn HistoryCell>,
];
app.backtrack.base_id = Some(thread_id);
app.backtrack.primed = true;
app.backtrack.nth_user_message = 0;
let selection = app
.confirm_backtrack_from_main()
.expect("backtrack selection");
app.apply_backtrack_rollback(selection);
assert_eq!(
app.chat_widget.active_collaboration_mode_kind(),
ModeKind::Code
);
app.handle_backtrack_event(&EventMsg::ThreadRolledBack(ThreadRolledBackEvent {
num_turns: 1,
model_visible_state: Some(ModelVisibleState {
collaboration_mode: Some(collaboration_mode(ModeKind::Plan)),
}),
}));
assert_eq!(
app.chat_widget.active_collaboration_mode_kind(),
ModeKind::Plan
);
app.chat_widget
.set_composer_text("follow-up".to_string(), Vec::new(), Vec::new());
app.chat_widget
.handle_key_event(KeyEvent::from(KeyCode::Enter));
let op = next_user_turn(&mut op_rx);
let Op::UserTurn {
collaboration_mode, ..
} = op
else {
panic!("expected Op::UserTurn");
};
let collaboration_mode = collaboration_mode.expect("expected collaboration mode");
assert_eq!(collaboration_mode.mode, ModeKind::Plan);
}
#[tokio::test]
async fn backtrack_plan_then_shift_tab_sends_code_mode() {
let (mut app, _app_event_rx, mut op_rx) = make_test_app_with_channels().await;
app.chat_widget
.set_feature_enabled(Feature::CollaborationModes, true);
let plan_mask = plan_mask();
let code_mask = code_mask();
let thread_id = ThreadId::new();
app.chat_widget.handle_codex_event(Event {
id: String::new(),
msg: EventMsg::SessionConfigured(SessionConfiguredEvent {
session_id: thread_id,
forked_from_id: None,
thread_name: None,
model: "gpt-test".to_string(),
model_provider_id: "test-provider".to_string(),
approval_policy: AskForApproval::Never,
sandbox_policy: SandboxPolicy::ReadOnly,
cwd: PathBuf::from("/home/user/project"),
reasoning_effort: None,
history_log_id: 0,
history_entry_count: 0,
initial_messages: None,
rollout_path: Some(PathBuf::new()),
}),
});
app.chat_widget.set_collaboration_mask(code_mask.clone());
app.transcript_cells = vec![
Arc::new(UserHistoryCell {
message: "plan".to_string(),
text_elements: Vec::new(),
local_image_paths: Vec::new(),
collaboration_mode: Some(plan_mask),
}) as Arc<dyn HistoryCell>,
Arc::new(AgentMessageCell::new(
vec![Line::from("plan response")],
true,
)) as Arc<dyn HistoryCell>,
Arc::new(UserHistoryCell {
message: "code".to_string(),
text_elements: Vec::new(),
local_image_paths: Vec::new(),
collaboration_mode: Some(code_mask),
}) as Arc<dyn HistoryCell>,
];
app.backtrack.base_id = Some(thread_id);
app.backtrack.primed = true;
app.backtrack.nth_user_message = 0;
let selection = app
.confirm_backtrack_from_main()
.expect("backtrack selection");
app.apply_backtrack_rollback(selection);
assert_eq!(
app.chat_widget.active_collaboration_mode_kind(),
ModeKind::Code
);
app.handle_backtrack_event(&EventMsg::ThreadRolledBack(ThreadRolledBackEvent {
num_turns: 1,
model_visible_state: Some(ModelVisibleState {
collaboration_mode: Some(collaboration_mode(ModeKind::Plan)),
}),
}));
assert_eq!(
app.chat_widget.active_collaboration_mode_kind(),
ModeKind::Plan
);
app.chat_widget
.handle_key_event(KeyEvent::from(KeyCode::BackTab));
assert_eq!(
app.chat_widget.active_collaboration_mode_kind(),
ModeKind::Code
);
app.chat_widget
.set_composer_text("follow-up".to_string(), Vec::new(), Vec::new());
app.chat_widget
.handle_key_event(KeyEvent::from(KeyCode::Enter));
let op = next_user_turn(&mut op_rx);
let Op::UserTurn {
collaboration_mode, ..
} = op
else {
panic!("expected Op::UserTurn");
};
let collaboration_mode = collaboration_mode.expect("expected collaboration mode");
assert_eq!(collaboration_mode.mode, ModeKind::Code);
}
#[tokio::test]
async fn backtrack_missing_mode_data_preserves_current_mode() {
let (mut app, _app_event_rx, mut op_rx) = make_test_app_with_channels().await;
app.chat_widget
.set_feature_enabled(Feature::CollaborationModes, true);
let plan_mask = plan_mask();
let thread_id = ThreadId::new();
app.chat_widget.handle_codex_event(Event {
id: String::new(),
msg: EventMsg::SessionConfigured(SessionConfiguredEvent {
session_id: thread_id,
forked_from_id: None,
thread_name: None,
model: "gpt-test".to_string(),
model_provider_id: "test-provider".to_string(),
approval_policy: AskForApproval::Never,
sandbox_policy: SandboxPolicy::ReadOnly,
cwd: PathBuf::from("/home/user/project"),
reasoning_effort: None,
history_log_id: 0,
history_entry_count: 0,
initial_messages: None,
rollout_path: Some(PathBuf::new()),
}),
});
app.chat_widget.set_collaboration_mask(plan_mask.clone());
app.transcript_cells = vec![
Arc::new(UserHistoryCell {
message: "custom".to_string(),
text_elements: Vec::new(),
local_image_paths: Vec::new(),
collaboration_mode: None,
}) as Arc<dyn HistoryCell>,
Arc::new(AgentMessageCell::new(
vec![Line::from("custom response")],
true,
)) as Arc<dyn HistoryCell>,
Arc::new(UserHistoryCell {
message: "plan".to_string(),
text_elements: Vec::new(),
local_image_paths: Vec::new(),
collaboration_mode: Some(plan_mask),
}) as Arc<dyn HistoryCell>,
];
app.backtrack.base_id = Some(thread_id);
app.backtrack.primed = true;
app.backtrack.nth_user_message = 0;
let selection = app
.confirm_backtrack_from_main()
.expect("backtrack selection");
app.apply_backtrack_rollback(selection);
assert_eq!(
app.chat_widget.active_collaboration_mode_kind(),
ModeKind::Plan
);
app.chat_widget
.set_composer_text("follow-up".to_string(), Vec::new(), Vec::new());
app.chat_widget
.handle_key_event(KeyEvent::from(KeyCode::Enter));
let op = next_user_turn(&mut op_rx);
let Op::UserTurn {
collaboration_mode, ..
} = op
else {
panic!("expected Op::UserTurn");
};
let collaboration_mode = collaboration_mode.expect("expected collaboration mode");
assert_eq!(collaboration_mode.mode, ModeKind::Plan);
}
#[tokio::test]
async fn backtrack_success_applies_authoritative_mode_from_core() {
let (mut app, _app_event_rx, _op_rx) = make_test_app_with_channels().await;
app.chat_widget
.set_feature_enabled(Feature::CollaborationModes, true);
let plan_mask = plan_mask();
let code_mask = code_mask();
let thread_id = ThreadId::new();
app.chat_widget.handle_codex_event(Event {
id: String::new(),
msg: EventMsg::SessionConfigured(SessionConfiguredEvent {
session_id: thread_id,
forked_from_id: None,
thread_name: None,
model: "gpt-test".to_string(),
model_provider_id: "test-provider".to_string(),
approval_policy: AskForApproval::Never,
sandbox_policy: SandboxPolicy::ReadOnly,
cwd: PathBuf::from("/home/user/project"),
reasoning_effort: None,
history_log_id: 0,
history_entry_count: 0,
initial_messages: None,
rollout_path: Some(PathBuf::new()),
}),
});
app.chat_widget.set_collaboration_mask(code_mask.clone());
app.transcript_cells = vec![
Arc::new(UserHistoryCell {
message: "plan".to_string(),
text_elements: Vec::new(),
local_image_paths: Vec::new(),
collaboration_mode: Some(plan_mask),
}) as Arc<dyn HistoryCell>,
Arc::new(AgentMessageCell::new(
vec![Line::from("plan response")],
true,
)) as Arc<dyn HistoryCell>,
Arc::new(UserHistoryCell {
message: "code".to_string(),
text_elements: Vec::new(),
local_image_paths: Vec::new(),
collaboration_mode: Some(code_mask),
}) as Arc<dyn HistoryCell>,
];
app.backtrack.base_id = Some(thread_id);
app.backtrack.primed = true;
app.backtrack.nth_user_message = 0;
let selection = app
.confirm_backtrack_from_main()
.expect("backtrack selection");
app.apply_backtrack_rollback(selection);
assert_eq!(
app.chat_widget.active_collaboration_mode_kind(),
ModeKind::Code
);
app.handle_backtrack_event(&EventMsg::ThreadRolledBack(ThreadRolledBackEvent {
num_turns: 1,
model_visible_state: Some(ModelVisibleState {
collaboration_mode: Some(collaboration_mode(ModeKind::Plan)),
}),
}));
assert_eq!(
app.chat_widget.active_collaboration_mode_kind(),
ModeKind::Plan
);
assert!(app.backtrack.pending_rollback.is_none());
}
#[tokio::test]
async fn backtrack_failed_keeps_current_collaboration_mode() {
let (mut app, _app_event_rx, _op_rx) = make_test_app_with_channels().await;
app.chat_widget
.set_feature_enabled(Feature::CollaborationModes, true);
let plan_mask = plan_mask();
let code_mask = code_mask();
let thread_id = ThreadId::new();
app.chat_widget.handle_codex_event(Event {
id: String::new(),
msg: EventMsg::SessionConfigured(SessionConfiguredEvent {
session_id: thread_id,
forked_from_id: None,
thread_name: None,
model: "gpt-test".to_string(),
model_provider_id: "test-provider".to_string(),
approval_policy: AskForApproval::Never,
sandbox_policy: SandboxPolicy::ReadOnly,
cwd: PathBuf::from("/home/user/project"),
reasoning_effort: None,
history_log_id: 0,
history_entry_count: 0,
initial_messages: None,
rollout_path: Some(PathBuf::new()),
}),
});
app.chat_widget.set_collaboration_mask(code_mask.clone());
app.transcript_cells = vec![
Arc::new(UserHistoryCell {
message: "plan".to_string(),
text_elements: Vec::new(),
local_image_paths: Vec::new(),
collaboration_mode: Some(plan_mask),
}) as Arc<dyn HistoryCell>,
Arc::new(AgentMessageCell::new(
vec![Line::from("plan response")],
true,
)) as Arc<dyn HistoryCell>,
Arc::new(UserHistoryCell {
message: "code".to_string(),
text_elements: Vec::new(),
local_image_paths: Vec::new(),
collaboration_mode: Some(code_mask),
}) as Arc<dyn HistoryCell>,
];
app.backtrack.base_id = Some(thread_id);
app.backtrack.primed = true;
app.backtrack.nth_user_message = 0;
let selection = app
.confirm_backtrack_from_main()
.expect("backtrack selection");
app.apply_backtrack_rollback(selection);
assert_eq!(
app.chat_widget.active_collaboration_mode_kind(),
ModeKind::Code
);
app.handle_backtrack_event(&EventMsg::Error(ErrorEvent {
message: "rollback rejected".to_string(),
codex_error_info: Some(CodexErrorInfo::ThreadRollbackFailed),
}));
assert_eq!(
app.chat_widget.active_collaboration_mode_kind(),
ModeKind::Code
);
assert!(app.backtrack.pending_rollback.is_none());
}
#[tokio::test]
async fn new_session_requests_shutdown_for_previous_conversation() {
let (mut app, mut app_event_rx, mut op_rx) = make_test_app_with_channels().await;

View File

@@ -37,6 +37,7 @@ use codex_core::protocol::CodexErrorInfo;
use codex_core::protocol::ErrorEvent;
use codex_core::protocol::EventMsg;
use codex_core::protocol::Op;
use codex_core::protocol::ThreadRolledBackEvent;
use codex_protocol::ThreadId;
use codex_protocol::user_input::TextElement;
use color_eyre::eyre::Result;
@@ -453,13 +454,14 @@ impl App {
pub(crate) fn handle_backtrack_event(&mut self, event: &EventMsg) {
match event {
EventMsg::ThreadRolledBack(_) => self.finish_pending_backtrack(),
EventMsg::ThreadRolledBack(rollback) => self.finish_pending_backtrack(rollback),
EventMsg::Error(ErrorEvent {
codex_error_info: Some(CodexErrorInfo::ThreadRollbackFailed),
..
}) => {
// Core rejected the rollback; clear the guard so the user can retry.
self.backtrack.pending_rollback = None;
let Some(_pending) = self.backtrack.pending_rollback.take() else {
return;
};
}
_ => {}
}
@@ -469,7 +471,7 @@ impl App {
///
/// We ignore events that do not correspond to the currently active thread to avoid applying
/// stale updates after a session switch.
fn finish_pending_backtrack(&mut self) {
fn finish_pending_backtrack(&mut self, rollback: &ThreadRolledBackEvent) {
let Some(pending) = self.backtrack.pending_rollback.take() else {
return;
};
@@ -477,6 +479,12 @@ impl App {
// Ignore rollbacks targeting a prior thread.
return;
}
if let Some(model_visible_state) = rollback.model_visible_state.as_ref() {
self.chat_widget
.apply_model_visible_state(model_visible_state);
}
self.trim_transcript_for_backtrack(pending.selection.nth_user_message);
self.backtrack_render_pending = true;
}
@@ -492,6 +500,7 @@ impl App {
.and_then(|idx| self.transcript_cells.get(idx))
.and_then(|cell| cell.as_any().downcast_ref::<UserHistoryCell>())
.map(|cell| {
let _historical_mode = cell.collaboration_mode.as_ref();
(
cell.message.clone(),
cell.text_elements.clone(),
@@ -574,6 +583,7 @@ mod tests {
message: "first user".to_string(),
text_elements: Vec::new(),
local_image_paths: Vec::new(),
collaboration_mode: None,
}) as Arc<dyn HistoryCell>,
Arc::new(AgentMessageCell::new(vec![Line::from("assistant")], true))
as Arc<dyn HistoryCell>,
@@ -592,6 +602,7 @@ mod tests {
message: "first".to_string(),
text_elements: Vec::new(),
local_image_paths: Vec::new(),
collaboration_mode: None,
}) as Arc<dyn HistoryCell>,
Arc::new(AgentMessageCell::new(vec![Line::from("after")], false))
as Arc<dyn HistoryCell>,
@@ -622,6 +633,7 @@ mod tests {
message: "first".to_string(),
text_elements: Vec::new(),
local_image_paths: Vec::new(),
collaboration_mode: None,
}) as Arc<dyn HistoryCell>,
Arc::new(AgentMessageCell::new(vec![Line::from("between")], false))
as Arc<dyn HistoryCell>,
@@ -629,6 +641,7 @@ mod tests {
message: "second".to_string(),
text_elements: Vec::new(),
local_image_paths: Vec::new(),
collaboration_mode: None,
}) as Arc<dyn HistoryCell>,
Arc::new(AgentMessageCell::new(vec![Line::from("tail")], false))
as Arc<dyn HistoryCell>,

View File

@@ -1459,7 +1459,7 @@ mod tests {
overlay.submit_answers();
let event = rx.try_recv().expect("expected AppEvent");
let AppEvent::CodexOp(Op::UserInputAnswer { id, response, .. }) = event else {
let AppEvent::CodexOp(Op::UserInputAnswer { id, response }) = event else {
panic!("expected UserInputAnswer");
};
assert_eq!(id, "turn-1");

View File

@@ -69,6 +69,7 @@ use codex_core::protocol::McpStartupStatus;
use codex_core::protocol::McpStartupUpdateEvent;
use codex_core::protocol::McpToolCallBeginEvent;
use codex_core::protocol::McpToolCallEndEvent;
use codex_core::protocol::ModelVisibleState;
use codex_core::protocol::Op;
use codex_core::protocol::PatchApplyBeginEvent;
use codex_core::protocol::RateLimitSnapshot;
@@ -3350,6 +3351,7 @@ impl ChatWidget {
text,
text_elements,
local_image_paths,
self.active_collaboration_mask.clone(),
));
}
@@ -3639,6 +3641,7 @@ impl ChatWidget {
event.message,
event.text_elements,
event.local_images,
None,
));
}
@@ -5513,6 +5516,64 @@ impl ChatWidget {
self.request_redraw();
}
/// Synchronize collaboration-mode UI state from core's model-visible snapshot.
///
/// This is primarily used after rollback/backtrack so the composer mode and model display
/// reflect core's authoritative session state.
pub(crate) fn apply_model_visible_state(&mut self, state: &ModelVisibleState) {
let Some(collaboration_mode) = state.collaboration_mode.clone() else {
return;
};
if collaboration_mode.mode == ModeKind::Custom {
self.current_collaboration_mode = collaboration_mode;
if self.collaboration_modes_enabled() {
self.active_collaboration_mask = None;
self.update_collaboration_mode_indicator();
}
} else if self.collaboration_modes_enabled() {
self.active_collaboration_mask = self.collaboration_mask_from_mode(&collaboration_mode);
self.update_collaboration_mode_indicator();
} else {
self.current_collaboration_mode = collaboration_mode;
}
self.refresh_model_display();
self.request_redraw();
}
fn collaboration_mask_from_mode(
&self,
collaboration_mode: &CollaborationMode,
) -> Option<CollaborationModeMask> {
if collaboration_mode.mode == ModeKind::Custom {
return None;
}
let mut mask = collaboration_modes::mask_for_kind(
self.models_manager.as_ref(),
collaboration_mode.mode,
)
.unwrap_or_else(|| CollaborationModeMask {
name: match collaboration_mode.mode {
ModeKind::Plan => "Plan",
ModeKind::Code => "Code",
ModeKind::PairProgramming => "Pair Programming",
ModeKind::Execute => "Execute",
ModeKind::Custom => "Custom",
}
.to_string(),
mode: Some(collaboration_mode.mode),
model: None,
reasoning_effort: None,
developer_instructions: None,
});
mask.mode = Some(collaboration_mode.mode);
mask.model = Some(collaboration_mode.settings.model.clone());
mask.reasoning_effort = Some(collaboration_mode.settings.reasoning_effort);
mask.developer_instructions =
Some(collaboration_mode.settings.developer_instructions.clone());
Some(mask)
}
fn connectors_enabled(&self) -> bool {
self.config.features.enabled(Feature::Apps)
}

View File

@@ -47,6 +47,7 @@ use codex_core::protocol::SessionConfiguredEvent;
use codex_core::web_search::web_search_detail;
use codex_otel::RuntimeMetricsSummary;
use codex_protocol::account::PlanType;
use codex_protocol::config_types::CollaborationModeMask;
use codex_protocol::mcp::Resource;
use codex_protocol::mcp::ResourceTemplate;
use codex_protocol::models::WebSearchAction;
@@ -168,6 +169,7 @@ pub(crate) struct UserHistoryCell {
pub text_elements: Vec<TextElement>,
#[allow(dead_code)]
pub local_image_paths: Vec<PathBuf>,
pub collaboration_mode: Option<CollaborationModeMask>,
}
/// Build logical lines for a user message with styled text elements.
@@ -1018,11 +1020,13 @@ pub(crate) fn new_user_prompt(
message: String,
text_elements: Vec<TextElement>,
local_image_paths: Vec<PathBuf>,
collaboration_mode: Option<CollaborationModeMask>,
) -> UserHistoryCell {
UserHistoryCell {
message,
text_elements,
local_image_paths,
collaboration_mode,
}
}
@@ -3311,6 +3315,7 @@ mod tests {
message: msg.to_string(),
text_elements: Vec::new(),
local_image_paths: Vec::new(),
collaboration_mode: None,
};
// Small width to force wrapping more clearly. Effective wrap width is width-2 due to the ▌ prefix and trailing space.