Use collaboration mode masks without mutating base settings (#9806)

Keep an unmasked base collaboration mode and apply the active mask on
demand. Simplify the TUI mask helpers and update tests/docs to match the
mask contract.
This commit is contained in:
Ahmed Ibrahim
2026-01-24 23:35:31 -08:00
committed by GitHub
parent 24230c066b
commit 58450ba2a1
12 changed files with 472 additions and 310 deletions

View File

@@ -5,6 +5,7 @@ use crate::protocol::common::AuthMode;
use codex_protocol::account::PlanType;
use codex_protocol::approvals::ExecPolicyAmendment as CoreExecPolicyAmendment;
use codex_protocol::config_types::CollaborationMode;
use codex_protocol::config_types::CollaborationModeMask;
use codex_protocol::config_types::ForcedLoginMethod;
use codex_protocol::config_types::Personality;
use codex_protocol::config_types::ReasoningSummary;
@@ -937,7 +938,7 @@ pub struct CollaborationModeListParams {}
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct CollaborationModeListResponse {
pub data: Vec<CollaborationMode>,
pub data: Vec<CollaborationModeMask>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]

View File

@@ -16,7 +16,7 @@ use codex_app_server_protocol::CollaborationModeListResponse;
use codex_app_server_protocol::JSONRPCResponse;
use codex_app_server_protocol::RequestId;
use codex_core::models_manager::test_builtin_collaboration_mode_presets;
use codex_protocol::config_types::CollaborationMode;
use codex_protocol::config_types::CollaborationModeMask;
use codex_protocol::config_types::ModeKind;
use pretty_assertions::assert_eq;
use tempfile::TempDir;
@@ -45,13 +45,23 @@ async fn list_collaboration_modes_returns_presets() -> Result<()> {
let CollaborationModeListResponse { data: items } =
to_response::<CollaborationModeListResponse>(response)?;
let expected = vec![
let expected = [
plan_preset(),
code_preset(),
pair_programming_preset(),
execute_preset(),
];
assert_eq!(expected, items);
assert_eq!(expected.len(), items.len());
for (expected_mask, actual_mask) in expected.iter().zip(items.iter()) {
assert_eq!(expected_mask.name, actual_mask.name);
assert_eq!(expected_mask.mode, actual_mask.mode);
assert_eq!(expected_mask.model, actual_mask.model);
assert_eq!(expected_mask.reasoning_effort, actual_mask.reasoning_effort);
assert_eq!(
expected_mask.developer_instructions,
actual_mask.developer_instructions
);
}
Ok(())
}
@@ -59,11 +69,11 @@ async fn list_collaboration_modes_returns_presets() -> Result<()> {
///
/// If the defaults change in the app server, this helper should be updated alongside the
/// contract, or the test will fail in ways that imply a regression in the API.
fn plan_preset() -> CollaborationMode {
fn plan_preset() -> CollaborationModeMask {
let presets = test_builtin_collaboration_mode_presets();
presets
.into_iter()
.find(|p| p.mode == ModeKind::Plan)
.find(|p| p.mode == Some(ModeKind::Plan))
.unwrap()
}
@@ -71,20 +81,20 @@ fn plan_preset() -> CollaborationMode {
///
/// The helper keeps the expected model and reasoning defaults co-located with the test
/// so that mismatches point directly at the API contract being exercised.
fn pair_programming_preset() -> CollaborationMode {
fn pair_programming_preset() -> CollaborationModeMask {
let presets = test_builtin_collaboration_mode_presets();
presets
.into_iter()
.find(|p| p.mode == ModeKind::PairProgramming)
.find(|p| p.mode == Some(ModeKind::PairProgramming))
.unwrap()
}
/// Builds the code preset that the list response is expected to return.
fn code_preset() -> CollaborationMode {
fn code_preset() -> CollaborationModeMask {
let presets = test_builtin_collaboration_mode_presets();
presets
.into_iter()
.find(|p| p.mode == ModeKind::Code)
.find(|p| p.mode == Some(ModeKind::Code))
.unwrap()
}
@@ -92,10 +102,10 @@ fn code_preset() -> CollaborationMode {
///
/// The execute preset uses a different reasoning effort to capture the higher-effort
/// execution contract the server currently exposes.
fn execute_preset() -> CollaborationMode {
fn execute_preset() -> CollaborationModeMask {
let presets = test_builtin_collaboration_mode_presets();
presets
.into_iter()
.find(|p| p.mode == ModeKind::Execute)
.find(|p| p.mode == Some(ModeKind::Execute))
.unwrap()
}

View File

@@ -1,6 +1,5 @@
use codex_protocol::config_types::CollaborationMode;
use codex_protocol::config_types::CollaborationModeMask;
use codex_protocol::config_types::ModeKind;
use codex_protocol::config_types::Settings;
use codex_protocol::openai_models::ReasoningEffort;
const COLLABORATION_MODE_PLAN: &str = include_str!("../../templates/collaboration_mode/plan.md");
@@ -10,7 +9,7 @@ const COLLABORATION_MODE_PAIR_PROGRAMMING: &str =
const COLLABORATION_MODE_EXECUTE: &str =
include_str!("../../templates/collaboration_mode/execute.md");
pub(super) fn builtin_collaboration_mode_presets() -> Vec<CollaborationMode> {
pub(super) fn builtin_collaboration_mode_presets() -> Vec<CollaborationModeMask> {
vec![
plan_preset(),
code_preset(),
@@ -20,50 +19,46 @@ pub(super) fn builtin_collaboration_mode_presets() -> Vec<CollaborationMode> {
}
#[cfg(any(test, feature = "test-support"))]
pub fn test_builtin_collaboration_mode_presets() -> Vec<CollaborationMode> {
pub fn test_builtin_collaboration_mode_presets() -> Vec<CollaborationModeMask> {
builtin_collaboration_mode_presets()
}
fn plan_preset() -> CollaborationMode {
CollaborationMode {
mode: ModeKind::Plan,
settings: Settings {
model: "gpt-5.2-codex".to_string(),
reasoning_effort: Some(ReasoningEffort::High),
developer_instructions: Some(COLLABORATION_MODE_PLAN.to_string()),
},
fn plan_preset() -> CollaborationModeMask {
CollaborationModeMask {
name: "Plan".to_string(),
mode: Some(ModeKind::Plan),
model: Some("gpt-5.2-codex".to_string()),
reasoning_effort: Some(Some(ReasoningEffort::High)),
developer_instructions: Some(Some(COLLABORATION_MODE_PLAN.to_string())),
}
}
fn code_preset() -> CollaborationMode {
CollaborationMode {
mode: ModeKind::Code,
settings: Settings {
model: "gpt-5.2-codex".to_string(),
reasoning_effort: Some(ReasoningEffort::Medium),
developer_instructions: Some(COLLABORATION_MODE_CODE.to_string()),
},
fn code_preset() -> CollaborationModeMask {
CollaborationModeMask {
name: "Code".to_string(),
mode: Some(ModeKind::Code),
model: Some("gpt-5.2-codex".to_string()),
reasoning_effort: Some(Some(ReasoningEffort::Medium)),
developer_instructions: Some(Some(COLLABORATION_MODE_CODE.to_string())),
}
}
fn pair_programming_preset() -> CollaborationMode {
CollaborationMode {
mode: ModeKind::PairProgramming,
settings: Settings {
model: "gpt-5.2-codex".to_string(),
reasoning_effort: Some(ReasoningEffort::Medium),
developer_instructions: Some(COLLABORATION_MODE_PAIR_PROGRAMMING.to_string()),
},
fn pair_programming_preset() -> CollaborationModeMask {
CollaborationModeMask {
name: "Pair Programming".to_string(),
mode: Some(ModeKind::PairProgramming),
model: Some("gpt-5.2-codex".to_string()),
reasoning_effort: Some(Some(ReasoningEffort::Medium)),
developer_instructions: Some(Some(COLLABORATION_MODE_PAIR_PROGRAMMING.to_string())),
}
}
fn execute_preset() -> CollaborationMode {
CollaborationMode {
mode: ModeKind::Execute,
settings: Settings {
model: "gpt-5.2-codex".to_string(),
reasoning_effort: Some(ReasoningEffort::High),
developer_instructions: Some(COLLABORATION_MODE_EXECUTE.to_string()),
},
fn execute_preset() -> CollaborationModeMask {
CollaborationModeMask {
name: "Execute".to_string(),
mode: Some(ModeKind::Execute),
model: Some("gpt-5.2-codex".to_string()),
reasoning_effort: Some(Some(ReasoningEffort::High)),
developer_instructions: Some(Some(COLLABORATION_MODE_EXECUTE.to_string())),
}
}

View File

@@ -14,7 +14,7 @@ use crate::models_manager::model_presets::builtin_model_presets;
use codex_api::ModelsClient;
use codex_api::ReqwestTransport;
use codex_app_server_protocol::AuthMode;
use codex_protocol::config_types::CollaborationMode;
use codex_protocol::config_types::CollaborationModeMask;
use codex_protocol::openai_models::ModelInfo;
use codex_protocol::openai_models::ModelPreset;
use codex_protocol::openai_models::ModelsResponse;
@@ -91,7 +91,7 @@ impl ModelsManager {
/// List collaboration mode presets.
///
/// Returns a static set of presets seeded with the configured model.
pub fn list_collaboration_modes(&self) -> Vec<CollaborationMode> {
pub fn list_collaboration_modes(&self) -> Vec<CollaborationModeMask> {
builtin_collaboration_mode_presets()
}

View File

@@ -19,7 +19,7 @@ use crate::rollout::RolloutRecorder;
use crate::rollout::truncation;
use crate::skills::SkillsManager;
use codex_protocol::ThreadId;
use codex_protocol::config_types::CollaborationMode;
use codex_protocol::config_types::CollaborationModeMask;
use codex_protocol::openai_models::ModelPreset;
use codex_protocol::protocol::InitialHistory;
use codex_protocol::protocol::McpServerRefreshConfig;
@@ -158,7 +158,7 @@ impl ThreadManager {
.await
}
pub fn list_collaboration_modes(&self) -> Vec<CollaborationMode> {
pub fn list_collaboration_modes(&self) -> Vec<CollaborationModeMask> {
self.state.models_manager.list_collaboration_modes()
}

View File

@@ -101,7 +101,8 @@ Each response yields:
Fetch the built-in collaboration mode presets with `collaborationMode/list`. This endpoint does not accept pagination and returns the full list in one response:
- `data` ordered list of collaboration mode presets
- `data` ordered list of collaboration mode masks (partial settings to apply on top of the base mode)
- For tri-state fields like `reasoning_effort` and `developer_instructions`, omit the field to keep the current value, set it to `null` to clear it, or set a concrete value to update it.
## Event stream

View File

@@ -188,21 +188,21 @@ impl CollaborationMode {
///
/// - `model`: `Some(s)` to update the model, `None` to keep the current model
/// - `effort`: `Some(Some(e))` to set effort to `e`, `Some(None)` to clear effort, `None` to keep current effort
/// - `developer_instructions`: `Some(s)` to update developer instructions, `None` to keep current
/// - `developer_instructions`: `Some(Some(s))` to set instructions, `Some(None)` to clear them, `None` to keep current
///
/// Returns a new `CollaborationMode` with updated values, preserving the mode.
pub fn with_updates(
&self,
model: Option<String>,
effort: Option<Option<ReasoningEffort>>,
developer_instructions: Option<String>,
developer_instructions: Option<Option<String>>,
) -> Self {
let settings = self.settings_ref();
let updated_settings = Settings {
model: model.unwrap_or_else(|| settings.model.clone()),
reasoning_effort: effort.unwrap_or(settings.reasoning_effort),
developer_instructions: developer_instructions
.or_else(|| settings.developer_instructions.clone()),
.unwrap_or_else(|| settings.developer_instructions.clone()),
};
CollaborationMode {
@@ -210,6 +210,26 @@ impl CollaborationMode {
settings: updated_settings,
}
}
/// Applies a mask to this collaboration mode, returning a new collaboration mode
/// with the mask values applied. Fields in the mask that are `Some` will override
/// the corresponding fields, while `None` values will preserve the original values.
///
/// The `name` field in the mask is ignored as it's metadata for the mask itself.
pub fn apply_mask(&self, mask: &CollaborationModeMask) -> Self {
let settings = self.settings_ref();
CollaborationMode {
mode: mask.mode.unwrap_or(self.mode),
settings: Settings {
model: mask.model.clone().unwrap_or_else(|| settings.model.clone()),
reasoning_effort: mask.reasoning_effort.unwrap_or(settings.reasoning_effort),
developer_instructions: mask
.developer_instructions
.clone()
.unwrap_or_else(|| settings.developer_instructions.clone()),
},
}
}
}
/// Settings for a collaboration mode.
@@ -219,3 +239,49 @@ pub struct Settings {
pub reasoning_effort: Option<ReasoningEffort>,
pub developer_instructions: Option<String>,
}
/// A mask for collaboration mode settings, allowing partial updates.
/// All fields except `name` are optional, enabling selective updates.
#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize, JsonSchema, TS)]
pub struct CollaborationModeMask {
pub name: String,
pub mode: Option<ModeKind>,
pub model: Option<String>,
pub reasoning_effort: Option<Option<ReasoningEffort>>,
pub developer_instructions: Option<Option<String>>,
}
#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_eq;
#[test]
fn apply_mask_can_clear_optional_fields() {
let mode = CollaborationMode {
mode: ModeKind::Code,
settings: Settings {
model: "gpt-5.2-codex".to_string(),
reasoning_effort: Some(ReasoningEffort::High),
developer_instructions: Some("stay focused".to_string()),
},
};
let mask = CollaborationModeMask {
name: "Clear".to_string(),
mode: None,
model: None,
reasoning_effort: Some(None),
developer_instructions: Some(None),
};
let expected = CollaborationMode {
mode: ModeKind::Code,
settings: Settings {
model: "gpt-5.2-codex".to_string(),
reasoning_effort: None,
developer_instructions: None,
},
};
assert_eq!(expected, mode.apply_mask(&mask));
}
}

View File

@@ -1502,10 +1502,8 @@ impl App {
AppEvent::UpdateModel(model) => {
self.chat_widget.set_model(&model);
}
AppEvent::UpdateCollaborationMode(mode) => {
let model = mode.model().to_string();
self.chat_widget.set_collaboration_mode(mode);
self.chat_widget.set_model(&model);
AppEvent::UpdateCollaborationMode(mask) => {
self.chat_widget.set_collaboration_mask(mask);
}
AppEvent::OpenReasoningPopup { model } => {
self.chat_widget.open_reasoning_popup(model);

View File

@@ -23,7 +23,7 @@ use crate::history_cell::HistoryCell;
use codex_core::features::Feature;
use codex_core::protocol::AskForApproval;
use codex_core::protocol::SandboxPolicy;
use codex_protocol::config_types::CollaborationMode;
use codex_protocol::config_types::CollaborationModeMask;
use codex_protocol::openai_models::ReasoningEffort;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
@@ -103,8 +103,8 @@ pub(crate) enum AppEvent {
/// Update the current model slug in the running app and widget.
UpdateModel(String),
/// Update the current collaboration mode in the running app and widget.
UpdateCollaborationMode(CollaborationMode),
/// Update the active collaboration mask in the running app and widget.
UpdateCollaborationMode(CollaborationModeMask),
/// Persist the selected model and reasoning effort to the appropriate config.
PersistModelSelection {
@@ -240,10 +240,10 @@ pub(crate) enum AppEvent {
/// Open the custom prompt option from the review popup.
OpenReviewCustomPrompt,
/// Submit a user message with an explicit collaboration mode.
/// Submit a user message with an explicit collaboration mask.
SubmitUserMessageWithMode {
text: String,
collaboration_mode: CollaborationMode,
collaboration_mode: CollaborationModeMask,
},
/// Open the approval popup.

View File

@@ -93,6 +93,7 @@ use codex_protocol::ThreadId;
use codex_protocol::account::PlanType;
use codex_protocol::approvals::ElicitationRequestEvent;
use codex_protocol::config_types::CollaborationMode;
use codex_protocol::config_types::CollaborationModeMask;
use codex_protocol::config_types::ModeKind;
use codex_protocol::config_types::Settings;
use codex_protocol::models::local_image_label_text;
@@ -415,12 +416,12 @@ pub(crate) struct ChatWidget {
/// where the overlay may briefly treat new tail content as already cached.
active_cell_revision: u64,
config: Config,
/// Stored collaboration mode with model and reasoning effort.
/// The unmasked collaboration mode settings (always Custom mode).
///
/// When collaboration modes feature is enabled, this is initialized to the first preset.
/// When disabled, this is Custom. The model and reasoning effort are stored here instead of
/// being read from config or current_model.
stored_collaboration_mode: CollaborationMode,
/// Masks are applied on top of this base mode to derive the effective mode.
current_collaboration_mode: CollaborationMode,
/// The currently active collaboration mask, if any.
active_collaboration_mask: Option<CollaborationModeMask>,
auth_manager: Arc<AuthManager>,
models_manager: Arc<ModelsManager>,
otel_manager: OtelManager,
@@ -731,16 +732,12 @@ impl ChatWidget {
let initial_messages = event.initial_messages.clone();
let model_for_header = event.model.clone();
self.session_header.set_model(&model_for_header);
// Only update stored collaboration settings when collaboration modes are disabled.
// When enabled, we preserve the selected variant (Plan/Pair/Execute/Custom) and its
// instructions as-is; the session configured event should not override it.
if !self.collaboration_modes_enabled() {
self.stored_collaboration_mode = self.stored_collaboration_mode.with_updates(
Some(model_for_header.clone()),
Some(event.reasoning_effort),
None,
);
}
self.current_collaboration_mode = self.current_collaboration_mode.with_updates(
Some(model_for_header.clone()),
Some(event.reasoning_effort),
None,
);
self.refresh_model_display();
let session_info_cell = history_cell::new_session_info(
&self.config,
&model_for_header,
@@ -910,7 +907,7 @@ impl ChatWidget {
if !self.queued_user_messages.is_empty() {
return;
}
if self.stored_collaboration_mode.mode != ModeKind::Plan {
if self.active_mode_kind() != ModeKind::Plan {
return;
}
let has_message = last_agent_message.is_some_and(|message| !message.trim().is_empty());
@@ -932,14 +929,14 @@ impl ChatWidget {
}
fn open_plan_implementation_prompt(&mut self) {
let code_mode = collaboration_modes::code_mode(self.models_manager.as_ref());
let (implement_actions, implement_disabled_reason) = match code_mode {
Some(collaboration_mode) => {
let code_mask = collaboration_modes::code_mask(self.models_manager.as_ref());
let (implement_actions, implement_disabled_reason) = match code_mask {
Some(mask) => {
let user_text = PLAN_IMPLEMENTATION_CODING_MESSAGE.to_string();
let actions: Vec<SelectionAction> = vec![Box::new(move |tx| {
tx.send(AppEvent::SubmitUserMessageWithMode {
text: user_text.clone(),
collaboration_mode: collaboration_mode.clone(),
collaboration_mode: mask.clone(),
});
})];
(actions, None)
@@ -1930,23 +1927,25 @@ impl ChatWidget {
let placeholder = PLACEHOLDERS[rng.random_range(0..PLACEHOLDERS.len())].to_string();
let codex_op_tx = spawn_agent(config.clone(), app_event_tx.clone(), thread_manager);
let model_for_header = model.unwrap_or_else(|| DEFAULT_MODEL_DISPLAY_NAME.to_string());
let model_override = model.as_deref();
let model_for_header = model
.clone()
.unwrap_or_else(|| DEFAULT_MODEL_DISPLAY_NAME.to_string());
let active_collaboration_mask =
Self::initial_collaboration_mask(&config, models_manager.as_ref(), model_override);
let header_model = active_collaboration_mask
.as_ref()
.and_then(|mask| mask.model.clone())
.unwrap_or_else(|| model_for_header.clone());
let fallback_custom = Settings {
model: model_for_header.clone(),
model: header_model.clone(),
reasoning_effort: None,
developer_instructions: None,
};
let stored_collaboration_mode = if config.features.enabled(Feature::CollaborationModes) {
initial_collaboration_mode(
models_manager.as_ref(),
fallback_custom,
config.experimental_mode,
)
} else {
CollaborationMode {
mode: ModeKind::Custom,
settings: fallback_custom,
}
// Collaboration modes start in Custom mode (not activated).
let current_collaboration_mode = CollaborationMode {
mode: ModeKind::Custom,
settings: fallback_custom,
};
let active_cell = Some(Self::placeholder_session_header_cell(&config));
@@ -1970,11 +1969,12 @@ impl ChatWidget {
config,
skills_all: Vec::new(),
skills_initial_state: None,
stored_collaboration_mode,
current_collaboration_mode,
active_collaboration_mask,
auth_manager,
models_manager,
otel_manager,
session_header: SessionHeader::new(model_for_header),
session_header: SessionHeader::new(header_model),
initial_user_message,
token_info: None,
rate_limit_snapshot: None,
@@ -2051,23 +2051,25 @@ impl ChatWidget {
let mut rng = rand::rng();
let placeholder = PLACEHOLDERS[rng.random_range(0..PLACEHOLDERS.len())].to_string();
let model_for_header = model.unwrap_or_else(|| DEFAULT_MODEL_DISPLAY_NAME.to_string());
let model_override = model.as_deref();
let model_for_header = model
.clone()
.unwrap_or_else(|| DEFAULT_MODEL_DISPLAY_NAME.to_string());
let active_collaboration_mask =
Self::initial_collaboration_mask(&config, models_manager.as_ref(), model_override);
let header_model = active_collaboration_mask
.as_ref()
.and_then(|mask| mask.model.clone())
.unwrap_or_else(|| model_for_header.clone());
let fallback_custom = Settings {
model: model_for_header.clone(),
model: header_model.clone(),
reasoning_effort: None,
developer_instructions: None,
};
let stored_collaboration_mode = if config.features.enabled(Feature::CollaborationModes) {
initial_collaboration_mode(
models_manager.as_ref(),
fallback_custom,
config.experimental_mode,
)
} else {
CollaborationMode {
mode: ModeKind::Custom,
settings: fallback_custom,
}
// Collaboration modes start in Custom mode (not activated).
let current_collaboration_mode = CollaborationMode {
mode: ModeKind::Custom,
settings: fallback_custom,
};
let active_cell = Some(Self::placeholder_session_header_cell(&config));
@@ -2091,11 +2093,12 @@ impl ChatWidget {
config,
skills_all: Vec::new(),
skills_initial_state: None,
stored_collaboration_mode,
current_collaboration_mode,
active_collaboration_mask,
auth_manager,
models_manager,
otel_manager,
session_header: SessionHeader::new(model_for_header),
session_header: SessionHeader::new(header_model),
initial_user_message,
token_info: None,
rate_limit_snapshot: None,
@@ -2171,7 +2174,16 @@ impl ChatWidget {
let mut rng = rand::rng();
let placeholder = PLACEHOLDERS[rng.random_range(0..PLACEHOLDERS.len())].to_string();
let header_model = model.unwrap_or_else(|| session_configured.model.clone());
let model_override = model.as_deref();
let header_model = model
.clone()
.unwrap_or_else(|| session_configured.model.clone());
let active_collaboration_mask =
Self::initial_collaboration_mask(&config, models_manager.as_ref(), model_override);
let header_model = active_collaboration_mask
.as_ref()
.and_then(|mask| mask.model.clone())
.unwrap_or(header_model);
let codex_op_tx =
spawn_agent_from_existing(conversation, session_configured, app_event_tx.clone());
@@ -2181,17 +2193,10 @@ impl ChatWidget {
reasoning_effort: None,
developer_instructions: None,
};
let stored_collaboration_mode = if config.features.enabled(Feature::CollaborationModes) {
initial_collaboration_mode(
models_manager.as_ref(),
fallback_custom,
config.experimental_mode,
)
} else {
CollaborationMode {
mode: ModeKind::Custom,
settings: fallback_custom,
}
// Collaboration modes start in Custom mode (not activated).
let current_collaboration_mode = CollaborationMode {
mode: ModeKind::Custom,
settings: fallback_custom,
};
let mut widget = Self {
@@ -2213,7 +2218,8 @@ impl ChatWidget {
config,
skills_all: Vec::new(),
skills_initial_state: None,
stored_collaboration_mode,
current_collaboration_mode,
active_collaboration_mask,
auth_manager,
models_manager,
otel_manager,
@@ -2816,18 +2822,24 @@ impl ChatWidget {
}
}
let effective_mode = self.effective_collaboration_mode();
let collaboration_mode = if self.collaboration_modes_enabled() {
self.active_collaboration_mask
.as_ref()
.map(|_| effective_mode.clone())
} else {
None
};
let op = Op::UserTurn {
items,
cwd: self.config.cwd.clone(),
approval_policy: self.config.approval_policy.value(),
sandbox_policy: self.config.sandbox_policy.get().clone(),
model: self.stored_collaboration_mode.model().to_string(),
effort: self.stored_collaboration_mode.reasoning_effort(),
model: effective_mode.model().to_string(),
effort: effective_mode.reasoning_effort(),
summary: self.config.model_reasoning_summary,
final_output_json_schema: None,
collaboration_mode: self
.collaboration_modes_enabled()
.then(|| self.stored_collaboration_mode.clone()),
collaboration_mode,
personality: None,
};
@@ -3180,7 +3192,7 @@ impl ChatWidget {
.map(|ti| &ti.total_token_usage)
.unwrap_or(&default_usage);
let collaboration_mode = self.collaboration_mode_label();
let reasoning_effort_override = Some(self.stored_collaboration_mode.reasoning_effort());
let reasoning_effort_override = Some(self.effective_reasoning_effort());
self.add_to_history(crate::status::new_status_output(
&self.config,
self.auth_manager.as_ref(),
@@ -3558,23 +3570,24 @@ impl ChatWidget {
return;
}
let current_kind = self
.active_collaboration_mask
.as_ref()
.and_then(|mask| mask.mode)
.or_else(|| {
collaboration_modes::default_mask(self.models_manager.as_ref())
.and_then(|mask| mask.mode)
});
let items: Vec<SelectionItem> = presets
.into_iter()
.map(|preset| {
let name = match preset.mode {
ModeKind::Plan => "Plan",
ModeKind::Code => "Code",
ModeKind::PairProgramming => "Pair Programming",
ModeKind::Execute => "Execute",
ModeKind::Custom => "Custom",
};
let is_current =
collaboration_modes::same_variant(&self.stored_collaboration_mode, &preset);
.map(|mask| {
let name = mask.name.clone();
let is_current = current_kind == mask.mode;
let actions: Vec<SelectionAction> = vec![Box::new(move |tx| {
tx.send(AppEvent::UpdateCollaborationMode(preset.clone()));
tx.send(AppEvent::UpdateCollaborationMode(mask.clone()));
})];
SelectionItem {
name: name.to_string(),
name,
is_current,
actions,
dismiss_on_select: true,
@@ -3690,7 +3703,7 @@ impl ChatWidget {
let model_slug = preset.model.to_string();
let is_current_model = self.current_model() == preset.model.as_str();
let highlight_choice = if is_current_model {
self.stored_collaboration_mode.reasoning_effort()
self.effective_reasoning_effort()
} else {
default_choice
};
@@ -4548,21 +4561,15 @@ impl ChatWidget {
}
if feature == Feature::CollaborationModes {
self.bottom_pane.set_collaboration_modes_enabled(enabled);
let settings = self.stored_collaboration_mode.settings.clone();
let fallback_custom = settings.clone();
self.stored_collaboration_mode = if enabled {
initial_collaboration_mode(
self.models_manager.as_ref(),
fallback_custom,
self.config.experimental_mode,
)
} else {
CollaborationMode {
mode: ModeKind::Custom,
settings,
}
let settings = self.current_collaboration_mode.settings.clone();
self.current_collaboration_mode = CollaborationMode {
mode: ModeKind::Custom,
settings,
};
self.active_collaboration_mask = None;
self.update_collaboration_mode_indicator();
self.refresh_model_display();
self.request_redraw();
}
}
@@ -4591,31 +4598,52 @@ impl ChatWidget {
/// Set the reasoning effort in the stored collaboration mode.
pub(crate) fn set_reasoning_effort(&mut self, effort: Option<ReasoningEffortConfig>) {
self.stored_collaboration_mode =
self.stored_collaboration_mode
self.current_collaboration_mode =
self.current_collaboration_mode
.with_updates(None, Some(effort), None);
if self.collaboration_modes_enabled()
&& let Some(mask) = self.active_collaboration_mask.as_mut()
{
mask.reasoning_effort = Some(effort);
}
}
/// Set the model in the widget's config copy and stored collaboration mode.
pub(crate) fn set_model(&mut self, model: &str) {
self.session_header.set_model(model);
self.stored_collaboration_mode =
self.stored_collaboration_mode
self.current_collaboration_mode =
self.current_collaboration_mode
.with_updates(Some(model.to_string()), None, None);
if self.collaboration_modes_enabled()
&& let Some(mask) = self.active_collaboration_mask.as_mut()
{
mask.model = Some(model.to_string());
}
self.refresh_model_display();
}
pub(crate) fn current_model(&self) -> &str {
self.stored_collaboration_mode.model()
if !self.collaboration_modes_enabled() {
return self.current_collaboration_mode.model();
}
self.active_collaboration_mask
.as_ref()
.and_then(|mask| mask.model.as_deref())
.unwrap_or_else(|| self.current_collaboration_mode.model())
}
#[allow(dead_code)] // Used in tests
pub(crate) fn stored_collaboration_mode(&self) -> &CollaborationMode {
&self.stored_collaboration_mode
pub(crate) fn current_collaboration_mode(&self) -> &CollaborationMode {
&self.current_collaboration_mode
}
#[cfg(test)]
pub(crate) fn current_reasoning_effort(&self) -> Option<ReasoningEffortConfig> {
self.stored_collaboration_mode.reasoning_effort()
self.effective_reasoning_effort()
}
#[cfg(test)]
pub(crate) fn active_collaboration_mode_kind(&self) -> ModeKind {
self.active_mode_kind()
}
fn is_session_configured(&self) -> bool {
@@ -4626,6 +4654,55 @@ impl ChatWidget {
self.config.features.enabled(Feature::CollaborationModes)
}
fn initial_collaboration_mask(
config: &Config,
models_manager: &ModelsManager,
model_override: Option<&str>,
) -> Option<CollaborationModeMask> {
if !config.features.enabled(Feature::CollaborationModes) {
return None;
}
let kind = config.experimental_mode?;
let mut mask = collaboration_modes::mask_for_kind(models_manager, kind)?;
if let Some(model_override) = model_override {
mask.model = Some(model_override.to_string());
}
Some(mask)
}
fn active_mode_kind(&self) -> ModeKind {
self.active_collaboration_mask
.as_ref()
.and_then(|mask| mask.mode)
.unwrap_or(ModeKind::Custom)
}
fn effective_reasoning_effort(&self) -> Option<ReasoningEffortConfig> {
if !self.collaboration_modes_enabled() {
return self.current_collaboration_mode.reasoning_effort();
}
let current_effort = self.current_collaboration_mode.reasoning_effort();
self.active_collaboration_mask
.as_ref()
.and_then(|mask| mask.reasoning_effort)
.unwrap_or(current_effort)
}
fn effective_collaboration_mode(&self) -> CollaborationMode {
if !self.collaboration_modes_enabled() {
return self.current_collaboration_mode.clone();
}
self.active_collaboration_mask.as_ref().map_or_else(
|| self.current_collaboration_mode.clone(),
|mask| self.current_collaboration_mode.apply_mask(mask),
)
}
fn refresh_model_display(&mut self) {
let effective = self.effective_collaboration_mode();
self.session_header.set_model(effective.model());
}
fn model_display_name(&self) -> &str {
let model = self.current_model();
if model.is_empty() {
@@ -4640,7 +4717,7 @@ impl ChatWidget {
if !self.collaboration_modes_enabled() {
return None;
}
match self.stored_collaboration_mode.mode {
match self.active_mode_kind() {
ModeKind::Plan => Some("Plan"),
ModeKind::Code => Some("Code"),
ModeKind::PairProgramming => Some("Pair Programming"),
@@ -4653,7 +4730,7 @@ impl ChatWidget {
if !self.collaboration_modes_enabled() {
return None;
}
match self.stored_collaboration_mode.mode {
match self.active_mode_kind() {
ModeKind::Plan => Some(CollaborationModeIndicator::Plan),
ModeKind::Code => Some(CollaborationModeIndicator::Code),
ModeKind::PairProgramming => Some(CollaborationModeIndicator::PairProgramming),
@@ -4673,26 +4750,25 @@ impl ChatWidget {
return;
}
if let Some(next_mode) = collaboration_modes::next_mode(
if let Some(next_mask) = collaboration_modes::next_mask(
self.models_manager.as_ref(),
&self.stored_collaboration_mode,
self.active_collaboration_mask.as_ref(),
) {
self.set_collaboration_mode(next_mode);
self.set_collaboration_mask(next_mask);
}
}
/// Update the stored collaboration mode.
/// Update the active collaboration mask.
///
/// When collaboration modes are enabled, the current mode is attached to *every*
/// submission as `Op::UserTurn { collaboration_mode: Some(...) }`.
pub(crate) fn set_collaboration_mode(&mut self, mode: CollaborationMode) {
/// When collaboration modes are enabled and a preset is selected (not Custom),
/// the current mode is attached to submissions as `Op::UserTurn { collaboration_mode: Some(...) }`.
pub(crate) fn set_collaboration_mask(&mut self, mask: CollaborationModeMask) {
if !self.collaboration_modes_enabled() {
return;
}
let old_model = self.stored_collaboration_mode.model().to_string();
let mode = mode.with_updates(Some(old_model), None, None);
self.stored_collaboration_mode = mode;
self.active_collaboration_mask = Some(mask);
self.update_collaboration_mode_indicator();
self.refresh_model_display();
self.request_redraw();
}
@@ -4875,11 +4951,9 @@ impl ChatWidget {
pub(crate) fn submit_user_message_with_mode(
&mut self,
text: String,
collaboration_mode: CollaborationMode,
collaboration_mode: CollaborationModeMask,
) {
let model = collaboration_mode.model().to_string();
self.set_collaboration_mode(collaboration_mode);
self.set_model(&model);
self.set_collaboration_mask(collaboration_mode);
self.submit_user_message(text.into());
}
@@ -5306,29 +5380,6 @@ fn extract_first_bold(s: &str) -> Option<String> {
None
}
fn initial_collaboration_mode(
models_manager: &ModelsManager,
fallback_custom: Settings,
desired_mode: Option<ModeKind>,
) -> CollaborationMode {
if let Some(kind) = desired_mode {
if kind == ModeKind::Custom {
return CollaborationMode {
mode: ModeKind::Custom,
settings: fallback_custom,
};
}
if let Some(mode) = collaboration_modes::mode_for_kind(models_manager, kind) {
return mode;
}
}
collaboration_modes::default_mode(models_manager).unwrap_or(CollaborationMode {
mode: ModeKind::Custom,
settings: fallback_custom,
})
}
async fn fetch_rate_limits(base_url: String, auth: CodexAuth) -> Option<RateLimitSnapshot> {
match BackendClient::from_auth(base_url, &auth) {
Ok(client) => match client.get_rate_limits().await {

View File

@@ -89,6 +89,7 @@ use tempfile::NamedTempFile;
use tempfile::tempdir;
use tokio::sync::mpsc::error::TryRecvError;
use tokio::sync::mpsc::unbounded_channel;
use toml::Value as TomlValue;
#[cfg(target_os = "windows")]
fn set_windows_sandbox_enabled(enabled: bool) {
@@ -777,29 +778,16 @@ async fn make_chatwidget_manual(
let auth_manager = AuthManager::from_auth_for_testing(CodexAuth::from_api_key("test"));
let codex_home = cfg.codex_home.clone();
let models_manager = Arc::new(ModelsManager::new(codex_home, auth_manager.clone()));
let collaboration_modes_enabled = cfg.features.enabled(Feature::CollaborationModes);
let reasoning_effort = None;
let stored_collaboration_mode = if collaboration_modes_enabled {
collaboration_modes::default_mode(models_manager.as_ref()).unwrap_or_else(|| {
CollaborationMode {
mode: ModeKind::Custom,
settings: Settings {
model: resolved_model.clone(),
reasoning_effort,
developer_instructions: None,
},
}
})
} else {
CollaborationMode {
mode: ModeKind::Custom,
settings: Settings {
model: resolved_model.clone(),
reasoning_effort,
developer_instructions: None,
},
}
let base_mode = CollaborationMode {
mode: ModeKind::Custom,
settings: Settings {
model: resolved_model.clone(),
reasoning_effort,
developer_instructions: None,
},
};
let current_collaboration_mode = base_mode;
let widget = ChatWidget {
app_event_tx,
codex_op_tx: op_tx,
@@ -807,7 +795,8 @@ async fn make_chatwidget_manual(
active_cell: None,
active_cell_revision: 0,
config: cfg,
stored_collaboration_mode,
current_collaboration_mode,
active_collaboration_mask: None,
auth_manager,
models_manager,
otel_manager,
@@ -1214,7 +1203,7 @@ async fn plan_implementation_popup_yes_emits_submit_message_event() {
panic!("expected SubmitUserMessageWithMode, got {event:?}");
};
assert_eq!(text, PLAN_IMPLEMENTATION_CODING_MESSAGE);
assert_eq!(collaboration_mode.mode, ModeKind::Code);
assert_eq!(collaboration_mode.mode, Some(ModeKind::Code));
}
#[tokio::test]
@@ -1223,7 +1212,7 @@ async fn submit_user_message_with_mode_sets_coding_collaboration_mode() {
chat.thread_id = Some(ThreadId::new());
chat.set_feature_enabled(Feature::CollaborationModes, true);
let code_mode = collaboration_modes::code_mode(chat.models_manager.as_ref())
let code_mode = collaboration_modes::code_mask(chat.models_manager.as_ref())
.expect("expected code collaboration mode");
chat.submit_user_message_with_mode("Implement the plan.".to_string(), code_mode);
@@ -1247,14 +1236,10 @@ async fn submit_user_message_with_mode_sets_coding_collaboration_mode() {
async fn plan_implementation_popup_skips_replayed_turn_complete() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5")).await;
chat.set_feature_enabled(Feature::CollaborationModes, true);
chat.stored_collaboration_mode = CollaborationMode {
mode: ModeKind::Plan,
settings: Settings {
model: chat.current_model().to_string(),
reasoning_effort: None,
developer_instructions: None,
},
};
let plan_mask =
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
.expect("expected plan collaboration mask");
chat.set_collaboration_mask(plan_mask);
chat.replay_initial_messages(vec![EventMsg::TurnComplete(TurnCompleteEvent {
last_agent_message: Some("Plan details".to_string()),
@@ -1271,14 +1256,10 @@ async fn plan_implementation_popup_skips_replayed_turn_complete() {
async fn plan_implementation_popup_skips_when_messages_queued() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5")).await;
chat.set_feature_enabled(Feature::CollaborationModes, true);
chat.stored_collaboration_mode = CollaborationMode {
mode: ModeKind::Plan,
settings: Settings {
model: chat.current_model().to_string(),
reasoning_effort: None,
developer_instructions: None,
},
};
let plan_mask =
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
.expect("expected plan collaboration mask");
chat.set_collaboration_mask(plan_mask);
chat.bottom_pane.set_task_running(true);
chat.queue_user_message("Queued message".into());
@@ -1295,14 +1276,10 @@ async fn plan_implementation_popup_skips_when_messages_queued() {
async fn plan_implementation_popup_shows_on_plan_update_without_message() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5")).await;
chat.set_feature_enabled(Feature::CollaborationModes, true);
chat.stored_collaboration_mode = CollaborationMode {
mode: ModeKind::Plan,
settings: Settings {
model: chat.current_model().to_string(),
reasoning_effort: None,
developer_instructions: None,
},
};
let plan_mask =
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
.expect("expected plan collaboration mask");
chat.set_collaboration_mask(plan_mask);
chat.on_task_started();
chat.on_plan_update(UpdatePlanArgs {
@@ -1327,14 +1304,10 @@ async fn plan_implementation_popup_skips_when_rate_limit_prompt_pending() {
chat.auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::create_dummy_chatgpt_auth_for_testing());
chat.set_feature_enabled(Feature::CollaborationModes, true);
chat.stored_collaboration_mode = CollaborationMode {
mode: ModeKind::Plan,
settings: Settings {
model: chat.current_model().to_string(),
reasoning_effort: None,
developer_instructions: None,
},
};
let plan_mask =
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
.expect("expected plan collaboration mask");
chat.set_collaboration_mask(plan_mask);
chat.on_task_started();
chat.on_plan_update(UpdatePlanArgs {
@@ -2222,22 +2195,25 @@ async fn collab_mode_shift_tab_cycles_only_when_enabled_and_idle() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(None).await;
chat.set_feature_enabled(Feature::CollaborationModes, false);
let initial = chat.stored_collaboration_mode.clone();
let initial = chat.current_collaboration_mode().clone();
chat.handle_key_event(KeyEvent::from(KeyCode::BackTab));
assert_eq!(chat.stored_collaboration_mode, initial);
assert_eq!(chat.current_collaboration_mode(), &initial);
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Custom);
chat.set_feature_enabled(Feature::CollaborationModes, true);
chat.handle_key_event(KeyEvent::from(KeyCode::BackTab));
assert_eq!(chat.stored_collaboration_mode.mode, ModeKind::Plan);
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Plan);
assert_eq!(chat.current_collaboration_mode(), &initial);
chat.handle_key_event(KeyEvent::from(KeyCode::BackTab));
assert_eq!(chat.stored_collaboration_mode.mode, ModeKind::Code);
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Code);
assert_eq!(chat.current_collaboration_mode(), &initial);
chat.on_task_started();
let before = chat.stored_collaboration_mode.clone();
let before = chat.active_collaboration_mode_kind();
chat.handle_key_event(KeyEvent::from(KeyCode::BackTab));
assert_eq!(chat.stored_collaboration_mode, before);
assert_eq!(chat.active_collaboration_mode_kind(), before);
}
#[tokio::test]
@@ -2254,11 +2230,11 @@ async fn collab_slash_command_opens_picker_and_updates_mode() {
);
chat.handle_key_event(KeyEvent::from(KeyCode::Enter));
let selected_mode = match rx.try_recv() {
Ok(AppEvent::UpdateCollaborationMode(mode)) => mode,
let selected_mask = match rx.try_recv() {
Ok(AppEvent::UpdateCollaborationMode(mask)) => mask,
other => panic!("expected UpdateCollaborationMode event, got {other:?}"),
};
chat.set_collaboration_mode(selected_mode);
chat.set_collaboration_mask(selected_mask);
chat.bottom_pane
.set_composer_text("hello".to_string(), Vec::new(), Vec::new());
@@ -2298,7 +2274,81 @@ async fn collab_slash_command_opens_picker_and_updates_mode() {
}
#[tokio::test]
async fn collab_mode_defaults_to_coding_when_enabled() {
async fn experimental_mode_plan_applies_on_startup() {
let codex_home = tempdir().expect("tempdir");
let cfg = ConfigBuilder::default()
.codex_home(codex_home.path().to_path_buf())
.cli_overrides(vec![
(
"features.collaboration_modes".to_string(),
TomlValue::Boolean(true),
),
(
"tui.experimental_mode".to_string(),
TomlValue::String("plan".to_string()),
),
])
.build()
.await
.expect("config");
let resolved_model = ModelsManager::get_model_offline(cfg.model.as_deref());
let otel_manager = test_otel_manager(&cfg, resolved_model.as_str());
let thread_manager = Arc::new(ThreadManager::with_models_provider(
CodexAuth::from_api_key("test"),
cfg.model_provider.clone(),
));
let auth_manager = AuthManager::from_auth_for_testing(CodexAuth::from_api_key("test"));
let init = ChatWidgetInit {
config: cfg,
frame_requester: FrameRequester::test_dummy(),
app_event_tx: AppEventSender::new(unbounded_channel::<AppEvent>().0),
initial_user_message: None,
enhanced_keys_supported: false,
auth_manager,
models_manager: thread_manager.get_models_manager(),
feedback: codex_feedback::CodexFeedback::new(),
is_first_run: true,
model: Some(resolved_model.clone()),
otel_manager,
};
let chat = ChatWidget::new(init, thread_manager);
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Plan);
assert_eq!(chat.current_model(), resolved_model);
}
#[tokio::test]
async fn set_model_updates_active_collaboration_mask() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5.1")).await;
chat.set_feature_enabled(Feature::CollaborationModes, true);
let plan_mask =
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
.expect("expected plan collaboration mask");
chat.set_collaboration_mask(plan_mask);
chat.set_model("gpt-5.1-codex-mini");
assert_eq!(chat.current_model(), "gpt-5.1-codex-mini");
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Plan);
}
#[tokio::test]
async fn set_reasoning_effort_updates_active_collaboration_mask() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5.1")).await;
chat.set_feature_enabled(Feature::CollaborationModes, true);
let plan_mask =
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
.expect("expected plan collaboration mask");
chat.set_collaboration_mask(plan_mask);
chat.set_reasoning_effort(None);
assert_eq!(chat.current_reasoning_effort(), None);
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Plan);
}
#[tokio::test]
async fn collab_mode_is_not_sent_until_selected() {
let (mut chat, _rx, mut op_rx) = make_chatwidget_manual(None).await;
chat.thread_id = Some(ThreadId::new());
chat.set_feature_enabled(Feature::CollaborationModes, true);
@@ -2308,25 +2358,24 @@ async fn collab_mode_defaults_to_coding_when_enabled() {
chat.handle_key_event(KeyEvent::from(KeyCode::Enter));
match next_submit_op(&mut op_rx) {
Op::UserTurn {
collaboration_mode:
Some(CollaborationMode {
mode: ModeKind::Code,
..
}),
collaboration_mode,
personality: None,
..
} => {}
} => {
assert_eq!(collaboration_mode, None);
}
other => {
panic!("expected Op::UserTurn with code collab mode, got {other:?}")
panic!("expected Op::UserTurn, got {other:?}")
}
}
}
#[tokio::test]
async fn collab_mode_enabling_sets_coding_default() {
async fn collab_mode_enabling_keeps_custom_until_selected() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(None).await;
chat.set_feature_enabled(Feature::CollaborationModes, true);
assert_eq!(chat.stored_collaboration_mode.mode, ModeKind::Code);
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Custom);
assert_eq!(chat.current_collaboration_mode().mode, ModeKind::Custom);
}
#[tokio::test]

View File

@@ -1,70 +1,61 @@
use codex_core::models_manager::manager::ModelsManager;
use codex_protocol::config_types::CollaborationMode;
use codex_protocol::config_types::CollaborationModeMask;
use codex_protocol::config_types::ModeKind;
fn mode_kind(mode: &CollaborationMode) -> ModeKind {
mode.mode
}
fn is_tui_mode(kind: ModeKind) -> bool {
matches!(kind, ModeKind::Plan | ModeKind::Code)
}
fn filtered_presets(models_manager: &ModelsManager) -> Vec<CollaborationMode> {
fn filtered_presets(models_manager: &ModelsManager) -> Vec<CollaborationModeMask> {
models_manager
.list_collaboration_modes()
.into_iter()
.filter(|preset| is_tui_mode(mode_kind(preset)))
.filter(|mask| mask.mode.is_some_and(is_tui_mode))
.collect()
}
pub(crate) fn presets_for_tui(models_manager: &ModelsManager) -> Vec<CollaborationMode> {
pub(crate) fn presets_for_tui(models_manager: &ModelsManager) -> Vec<CollaborationModeMask> {
filtered_presets(models_manager)
}
pub(crate) fn default_mode(models_manager: &ModelsManager) -> Option<CollaborationMode> {
pub(crate) fn default_mask(models_manager: &ModelsManager) -> Option<CollaborationModeMask> {
let presets = filtered_presets(models_manager);
presets
.iter()
.find(|preset| preset.mode == ModeKind::Code)
.find(|mask| mask.mode == Some(ModeKind::Code))
.cloned()
.or_else(|| presets.into_iter().next())
}
pub(crate) fn mode_for_kind(
pub(crate) fn mask_for_kind(
models_manager: &ModelsManager,
kind: ModeKind,
) -> Option<CollaborationMode> {
) -> Option<CollaborationModeMask> {
if !is_tui_mode(kind) {
return None;
}
let presets = filtered_presets(models_manager);
presets.into_iter().find(|preset| mode_kind(preset) == kind)
}
pub(crate) fn same_variant(a: &CollaborationMode, b: &CollaborationMode) -> bool {
mode_kind(a) == mode_kind(b)
filtered_presets(models_manager)
.into_iter()
.find(|mask| mask.mode == Some(kind))
}
/// Cycle to the next collaboration mode preset in list order.
pub(crate) fn next_mode(
pub(crate) fn next_mask(
models_manager: &ModelsManager,
current: &CollaborationMode,
) -> Option<CollaborationMode> {
current: Option<&CollaborationModeMask>,
) -> Option<CollaborationModeMask> {
let presets = filtered_presets(models_manager);
if presets.is_empty() {
return None;
}
let current_kind = mode_kind(current);
let current_kind = current.and_then(|mask| mask.mode);
let next_index = presets
.iter()
.position(|preset| mode_kind(preset) == current_kind)
.position(|mask| mask.mode == current_kind)
.map_or(0, |idx| (idx + 1) % presets.len());
presets.get(next_index).cloned()
}
pub(crate) fn code_mode(models_manager: &ModelsManager) -> Option<CollaborationMode> {
filtered_presets(models_manager)
.into_iter()
.find(|preset| mode_kind(preset) == ModeKind::Code)
pub(crate) fn code_mask(models_manager: &ModelsManager) -> Option<CollaborationModeMask> {
mask_for_kind(models_manager, ModeKind::Code)
}