Use backend collaboration modes in TUI picker

This commit is contained in:
canvrno-oai
2026-05-04 13:33:04 -07:00
parent 87d2235b54
commit f52e187048
5 changed files with 66 additions and 7 deletions

View File

@@ -686,7 +686,10 @@ impl App {
if let Some(updated_model) = config.model.clone() {
model = updated_model;
}
let model_catalog = Arc::new(ModelCatalog::new(available_models.clone()));
let model_catalog = Arc::new(ModelCatalog::new(
available_models.clone(),
bootstrap.collaboration_modes,
));
let feedback_audience = bootstrap.feedback_audience;
let auth_mode = bootstrap.auth_mode;
let has_chatgpt_account = bootstrap.has_chatgpt_account;

View File

@@ -20,6 +20,9 @@ use codex_app_server_protocol::Account;
use codex_app_server_protocol::AskForApproval;
use codex_app_server_protocol::AuthMode;
use codex_app_server_protocol::ClientRequest;
use codex_app_server_protocol::CollaborationModeListParams;
use codex_app_server_protocol::CollaborationModeListResponse;
use codex_app_server_protocol::CollaborationModeMask as ApiCollaborationModeMask;
use codex_app_server_protocol::ConfigBatchWriteParams;
use codex_app_server_protocol::ConfigWriteResponse;
use codex_app_server_protocol::ExternalAgentConfigDetectParams;
@@ -105,6 +108,7 @@ use codex_app_server_protocol::UserInput;
use codex_otel::TelemetryAuthMode;
use codex_protocol::ThreadId;
use codex_protocol::approvals::GuardianAssessmentEvent;
use codex_protocol::config_types::CollaborationModeMask;
use codex_protocol::models::ActivePermissionProfile;
use codex_protocol::models::ActivePermissionProfileModification;
use codex_protocol::models::PermissionProfile;
@@ -143,6 +147,7 @@ pub(crate) struct AppServerBootstrap {
pub(crate) feedback_audience: FeedbackAudience,
pub(crate) has_chatgpt_account: bool,
pub(crate) available_models: Vec<ModelPreset>,
pub(crate) collaboration_modes: Vec<CollaborationModeMask>,
}
pub(crate) struct AppServerSession {
@@ -215,6 +220,22 @@ impl AppServerSession {
.into_iter()
.map(model_preset_from_api_model)
.collect::<Vec<_>>();
let collaboration_modes_request_id = self.next_request_id();
let collaboration_modes: CollaborationModeListResponse = self
.client
.request_typed(ClientRequest::CollaborationModeList {
request_id: collaboration_modes_request_id,
params: CollaborationModeListParams::default(),
})
.await
.map_err(|err| {
bootstrap_request_error("collaborationMode/list failed during TUI bootstrap", err)
})?;
let collaboration_modes = collaboration_modes
.data
.into_iter()
.map(collaboration_mode_mask_from_api_mask)
.collect::<Vec<_>>();
let default_model = config
.model
.clone()
@@ -276,6 +297,7 @@ impl AppServerSession {
feedback_audience,
has_chatgpt_account,
available_models,
collaboration_modes,
})
}
@@ -1058,6 +1080,16 @@ fn model_preset_from_api_model(model: ApiModel) -> ModelPreset {
}
}
fn collaboration_mode_mask_from_api_mask(mask: ApiCollaborationModeMask) -> CollaborationModeMask {
CollaborationModeMask {
name: mask.name,
mode: mask.mode,
model: mask.model,
reasoning_effort: mask.reasoning_effort,
developer_instructions: None,
}
}
fn approvals_reviewer_override_from_config(
config: &Config,
) -> Option<codex_app_server_protocol::ApprovalsReviewer> {

View File

@@ -1,5 +1,6 @@
use super::*;
use codex_app_server_protocol::PluginAvailability;
use codex_protocol::config_types::CollaborationModeMask;
use pretty_assertions::assert_eq;
pub(super) async fn test_config() -> Config {
@@ -136,9 +137,20 @@ pub(super) fn test_session_telemetry(config: &Config, model: &str) -> SessionTel
pub(super) fn test_model_catalog(_config: &Config) -> Arc<ModelCatalog> {
Arc::new(ModelCatalog::new(
crate::legacy_core::test_support::all_model_presets().clone(),
test_collaboration_mode_presets(),
))
}
fn test_collaboration_mode_presets() -> Vec<CollaborationModeMask> {
crate::legacy_core::test_support::builtin_collaboration_mode_presets()
.into_iter()
.map(|mut preset| {
preset.developer_instructions = None;
preset
})
.collect()
}
// --- Helpers for tests that need direct construction and event draining ---
pub(super) async fn make_chatwidget_manual(
model_override: Option<&str>,
@@ -437,7 +449,7 @@ pub(crate) fn set_fast_mode_test_catalog(chat: &mut ChatWidget) {
.map(Into::into)
.collect();
chat.model_catalog = Arc::new(ModelCatalog::new(models));
chat.model_catalog = Arc::new(ModelCatalog::new(models, test_collaboration_mode_presets()));
}
pub(crate) async fn make_chatwidget_manual_with_sender() -> (

View File

@@ -1,11 +1,11 @@
use codex_models_manager::collaboration_mode_presets::builtin_collaboration_mode_presets;
use codex_protocol::config_types::CollaborationModeMask;
use codex_protocol::config_types::ModeKind;
use crate::model_catalog::ModelCatalog;
fn filtered_presets(_model_catalog: &ModelCatalog) -> Vec<CollaborationModeMask> {
builtin_collaboration_mode_presets()
fn filtered_presets(model_catalog: &ModelCatalog) -> Vec<CollaborationModeMask> {
model_catalog
.list_collaboration_modes()
.into_iter()
.filter(|mask| mask.mode.is_some_and(ModeKind::is_tui_visible))
.collect()

View File

@@ -1,17 +1,29 @@
use codex_protocol::config_types::CollaborationModeMask;
use codex_protocol::openai_models::ModelPreset;
use std::convert::Infallible;
#[derive(Debug, Clone)]
pub(crate) struct ModelCatalog {
models: Vec<ModelPreset>,
collaboration_modes: Vec<CollaborationModeMask>,
}
impl ModelCatalog {
pub(crate) fn new(models: Vec<ModelPreset>) -> Self {
Self { models }
pub(crate) fn new(
models: Vec<ModelPreset>,
collaboration_modes: Vec<CollaborationModeMask>,
) -> Self {
Self {
models,
collaboration_modes,
}
}
pub(crate) fn try_list_models(&self) -> Result<Vec<ModelPreset>, Infallible> {
Ok(self.models.clone())
}
pub(crate) fn list_collaboration_modes(&self) -> Vec<CollaborationModeMask> {
self.collaboration_modes.clone()
}
}