mirror of
https://github.com/openai/codex.git
synced 2026-02-02 15:03:38 +00:00
Compare commits
3 Commits
dev/cc/new
...
collab-mod
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
181615cab0 | ||
|
|
8472942485 | ||
|
|
0666e95234 |
@@ -1303,6 +1303,7 @@ impl Session {
|
||||
|
||||
fn build_collaboration_mode_update_item(
|
||||
&self,
|
||||
current_context: &TurnContext,
|
||||
previous_collaboration_mode: &CollaborationMode,
|
||||
next_collaboration_mode: Option<&CollaborationMode>,
|
||||
) -> Option<ResponseItem> {
|
||||
@@ -1312,7 +1313,8 @@ impl Session {
|
||||
}
|
||||
// If the next mode has empty developer instructions, this returns None and we emit no
|
||||
// update, so prior collaboration instructions remain in the prompt history.
|
||||
Some(DeveloperInstructions::from_collaboration_mode(next_mode)?.into())
|
||||
let model_info = current_context.client.get_model_info();
|
||||
Some(DeveloperInstructions::from_collaboration_mode(next_mode, &model_info)?.into())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
@@ -1337,6 +1339,7 @@ impl Session {
|
||||
update_items.push(permissions_item);
|
||||
}
|
||||
if let Some(collaboration_mode_item) = self.build_collaboration_mode_update_item(
|
||||
current_context,
|
||||
previous_collaboration_mode,
|
||||
next_collaboration_mode,
|
||||
) {
|
||||
@@ -1838,13 +1841,23 @@ impl Session {
|
||||
items.push(DeveloperInstructions::new(developer_instructions.to_string()).into());
|
||||
}
|
||||
// Add developer instructions from collaboration_mode if they exist and are non-empty
|
||||
let collaboration_mode = {
|
||||
let session_configuration = {
|
||||
let state = self.state.lock().await;
|
||||
state.session_configuration.collaboration_mode.clone()
|
||||
state.session_configuration.clone()
|
||||
};
|
||||
if let Some(collab_instructions) =
|
||||
DeveloperInstructions::from_collaboration_mode(&collaboration_mode)
|
||||
{
|
||||
let per_turn_config = Self::build_per_turn_config(&session_configuration);
|
||||
let model_info = self
|
||||
.services
|
||||
.models_manager
|
||||
.get_model_info(
|
||||
session_configuration.collaboration_mode.model(),
|
||||
&per_turn_config,
|
||||
)
|
||||
.await;
|
||||
if let Some(collab_instructions) = DeveloperInstructions::from_collaboration_mode(
|
||||
&session_configuration.collaboration_mode,
|
||||
&model_info,
|
||||
) {
|
||||
items.push(collab_instructions.into());
|
||||
}
|
||||
if let Some(user_instructions) = turn_context.user_instructions.as_deref() {
|
||||
|
||||
@@ -178,6 +178,7 @@ pub(crate) fn find_model_info_for_slug(slug: &str) -> ModelInfo {
|
||||
Personality::Pragmatic,
|
||||
PERSONALITY_PRAGMATIC.to_string(),
|
||||
)]))),
|
||||
collaboration_modes_messages: None,
|
||||
}),
|
||||
apply_patch_tool_type: Some(ApplyPatchToolType::Freeform),
|
||||
shell_type: ConfigShellToolType::ShellCommand,
|
||||
@@ -222,6 +223,7 @@ pub(crate) fn find_model_info_for_slug(slug: &str) -> ModelInfo {
|
||||
Personality::Pragmatic,
|
||||
PERSONALITY_PRAGMATIC.to_string(),
|
||||
)]))),
|
||||
collaboration_modes_messages: None,
|
||||
}),
|
||||
)
|
||||
} else if slug.starts_with("gpt-5.1-codex-max") {
|
||||
|
||||
@@ -1,4 +1,11 @@
|
||||
use std::collections::BTreeMap;
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use chrono::DateTime;
|
||||
use chrono::Utc;
|
||||
use codex_core::CodexAuth;
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::protocol::COLLABORATION_MODE_CLOSE_TAG;
|
||||
use codex_core::protocol::COLLABORATION_MODE_OPEN_TAG;
|
||||
use codex_core::protocol::EventMsg;
|
||||
@@ -6,6 +13,14 @@ use codex_core::protocol::Op;
|
||||
use codex_protocol::config_types::CollaborationMode;
|
||||
use codex_protocol::config_types::ModeKind;
|
||||
use codex_protocol::config_types::Settings;
|
||||
use codex_protocol::openai_models::CollaborationModesMessages;
|
||||
use codex_protocol::openai_models::ConfigShellToolType;
|
||||
use codex_protocol::openai_models::ModelInfo;
|
||||
use codex_protocol::openai_models::ModelInstructionsTemplate;
|
||||
use codex_protocol::openai_models::ModelVisibility;
|
||||
use codex_protocol::openai_models::ReasoningEffort;
|
||||
use codex_protocol::openai_models::ReasoningEffortPreset;
|
||||
use codex_protocol::openai_models::TruncationPolicyConfig;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use core_test_support::responses::ev_completed;
|
||||
use core_test_support::responses::ev_response_created;
|
||||
@@ -13,20 +28,102 @@ use core_test_support::responses::mount_sse_once;
|
||||
use core_test_support::responses::sse;
|
||||
use core_test_support::responses::start_mock_server;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use core_test_support::test_codex::TestCodexBuilder;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
use core_test_support::wait_for_event;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde::Serialize;
|
||||
use serde_json::Value;
|
||||
|
||||
fn sse_completed(id: &str) -> String {
|
||||
sse(vec![ev_response_created(id), ev_completed(id)])
|
||||
}
|
||||
|
||||
const TEST_COLLAB_MODEL: &str = "test-collab-template";
|
||||
const MODEL_FALLBACK_TEXT: &str = "model fallback";
|
||||
const CACHE_FILE: &str = "models_cache.json";
|
||||
|
||||
#[allow(clippy::expect_used)]
|
||||
fn cached_collab_builder() -> TestCodexBuilder {
|
||||
let mut builder = test_codex()
|
||||
.with_auth(CodexAuth::create_dummy_chatgpt_auth_for_testing())
|
||||
.with_model(TEST_COLLAB_MODEL)
|
||||
.with_config(|config| {
|
||||
config.features.enable(Feature::RemoteModels);
|
||||
config.model_provider.request_max_retries = Some(0);
|
||||
config.model_provider.stream_max_retries = Some(0);
|
||||
});
|
||||
builder = builder.with_pre_build_hook(|home| {
|
||||
write_models_cache(home).expect("models cache should be written");
|
||||
});
|
||||
builder
|
||||
}
|
||||
|
||||
fn write_models_cache(home: &Path) -> Result<()> {
|
||||
let cache = ModelsCache {
|
||||
fetched_at: Utc::now(),
|
||||
etag: None,
|
||||
models: vec![test_collab_model(TEST_COLLAB_MODEL)],
|
||||
};
|
||||
let contents = serde_json::to_vec_pretty(&cache)?;
|
||||
std::fs::write(home.join(CACHE_FILE), contents)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn test_collab_model(slug: &str) -> ModelInfo {
|
||||
ModelInfo {
|
||||
slug: slug.to_string(),
|
||||
display_name: "Test collab model".to_string(),
|
||||
description: Some("test collab model".to_string()),
|
||||
default_reasoning_level: Some(ReasoningEffort::Medium),
|
||||
supported_reasoning_levels: vec![ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Medium,
|
||||
description: "medium".to_string(),
|
||||
}],
|
||||
shell_type: ConfigShellToolType::ShellCommand,
|
||||
visibility: ModelVisibility::List,
|
||||
supported_in_api: true,
|
||||
priority: 1,
|
||||
upgrade: None,
|
||||
base_instructions: "base instructions".to_string(),
|
||||
model_instructions_template: Some(ModelInstructionsTemplate {
|
||||
template: "template".to_string(),
|
||||
personality_messages: None,
|
||||
collaboration_modes_messages: Some(CollaborationModesMessages(BTreeMap::from([(
|
||||
ModeKind::Custom,
|
||||
MODEL_FALLBACK_TEXT.to_string(),
|
||||
)]))),
|
||||
}),
|
||||
supports_reasoning_summaries: false,
|
||||
support_verbosity: false,
|
||||
default_verbosity: None,
|
||||
apply_patch_tool_type: None,
|
||||
truncation_policy: TruncationPolicyConfig::bytes(10_000),
|
||||
supports_parallel_tool_calls: false,
|
||||
context_window: Some(272_000),
|
||||
auto_compact_token_limit: None,
|
||||
effective_context_window_percent: 95,
|
||||
experimental_supported_tools: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
struct ModelsCache {
|
||||
fetched_at: DateTime<Utc>,
|
||||
#[serde(default)]
|
||||
etag: Option<String>,
|
||||
models: Vec<ModelInfo>,
|
||||
}
|
||||
|
||||
fn collab_mode_with_instructions(instructions: Option<&str>) -> CollaborationMode {
|
||||
collab_mode_with_model("gpt-5.1", instructions)
|
||||
}
|
||||
|
||||
fn collab_mode_with_model(model: &str, instructions: Option<&str>) -> CollaborationMode {
|
||||
CollaborationMode {
|
||||
mode: ModeKind::Custom,
|
||||
settings: Settings {
|
||||
model: "gpt-5.1".to_string(),
|
||||
model: model.to_string(),
|
||||
reasoning_effort: None,
|
||||
developer_instructions: instructions.map(str::to_string),
|
||||
},
|
||||
@@ -527,3 +624,106 @@ async fn empty_collaboration_instructions_are_ignored() -> Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn collaboration_instructions_precedence_mode_overrides_model_template() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let req = mount_sse_once(&server, sse_completed("resp-1")).await;
|
||||
|
||||
let mut builder = cached_collab_builder();
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
let mode_text = "mode instructions";
|
||||
let collaboration_mode = collab_mode_with_model(TEST_COLLAB_MODEL, Some(mode_text));
|
||||
test.codex
|
||||
.submit(Op::OverrideTurnContext {
|
||||
cwd: None,
|
||||
approval_policy: None,
|
||||
sandbox_policy: None,
|
||||
windows_sandbox_level: None,
|
||||
model: None,
|
||||
effort: None,
|
||||
summary: None,
|
||||
collaboration_mode: Some(collaboration_mode),
|
||||
personality: None,
|
||||
})
|
||||
.await?;
|
||||
|
||||
test.codex
|
||||
.submit(Op::UserInput {
|
||||
items: vec![UserInput::Text {
|
||||
text: "hello".into(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
final_output_json_schema: None,
|
||||
})
|
||||
.await?;
|
||||
wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let input = req.single_request().input();
|
||||
let dev_texts = developer_texts(&input);
|
||||
let mode_text = collab_xml(mode_text);
|
||||
assert_eq!(count_exact(&dev_texts, &mode_text), 1);
|
||||
let last_collab = dev_texts
|
||||
.iter()
|
||||
.rev()
|
||||
.find(|text| text.starts_with(COLLABORATION_MODE_OPEN_TAG))
|
||||
.cloned();
|
||||
assert_eq!(last_collab, Some(mode_text));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn collaboration_instructions_fall_back_to_model_template_when_mode_empty() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let req = mount_sse_once(&server, sse_completed("resp-1")).await;
|
||||
|
||||
let mut builder = cached_collab_builder();
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
let collaboration_mode = collab_mode_with_model(TEST_COLLAB_MODEL, None);
|
||||
test.codex
|
||||
.submit(Op::OverrideTurnContext {
|
||||
cwd: None,
|
||||
approval_policy: None,
|
||||
sandbox_policy: None,
|
||||
windows_sandbox_level: None,
|
||||
model: None,
|
||||
effort: None,
|
||||
summary: None,
|
||||
collaboration_mode: Some(collaboration_mode),
|
||||
personality: None,
|
||||
})
|
||||
.await?;
|
||||
|
||||
test.codex
|
||||
.submit(Op::UserInput {
|
||||
items: vec![UserInput::Text {
|
||||
text: "hello".into(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
final_output_json_schema: None,
|
||||
})
|
||||
.await?;
|
||||
wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let request = req.single_request();
|
||||
let model = request
|
||||
.body_json()
|
||||
.get("model")
|
||||
.and_then(Value::as_str)
|
||||
.unwrap_or_default()
|
||||
.to_string();
|
||||
assert_eq!(model, TEST_COLLAB_MODEL.to_string());
|
||||
let input = request.input();
|
||||
let dev_texts = developer_texts(&input);
|
||||
let model_text = collab_xml(MODEL_FALLBACK_TEXT);
|
||||
assert_eq!(count_exact(&dev_texts, &model_text), 1);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -296,6 +296,7 @@ async fn user_turn_personality_remote_model_template_includes_update_message() -
|
||||
Personality::Friendly,
|
||||
remote_personality_message.to_string(),
|
||||
)]))),
|
||||
collaboration_modes_messages: None,
|
||||
}),
|
||||
supports_reasoning_summaries: false,
|
||||
support_verbosity: false,
|
||||
|
||||
@@ -166,7 +166,9 @@ pub enum AltScreenMode {
|
||||
}
|
||||
|
||||
/// Initial collaboration mode to use when the TUI starts.
|
||||
#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Hash, JsonSchema, TS)]
|
||||
#[derive(
|
||||
Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord, Hash, JsonSchema, TS,
|
||||
)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum ModeKind {
|
||||
Plan,
|
||||
|
||||
@@ -12,6 +12,7 @@ use ts_rs::TS;
|
||||
|
||||
use crate::config_types::CollaborationMode;
|
||||
use crate::config_types::SandboxMode;
|
||||
use crate::openai_models::ModelInfo;
|
||||
use crate::protocol::AskForApproval;
|
||||
use crate::protocol::COLLABORATION_MODE_CLOSE_TAG;
|
||||
use crate::protocol::COLLABORATION_MODE_OPEN_TAG;
|
||||
@@ -301,17 +302,30 @@ impl DeveloperInstructions {
|
||||
}
|
||||
|
||||
/// Returns developer instructions from a collaboration mode if they exist and are non-empty.
|
||||
pub fn from_collaboration_mode(collaboration_mode: &CollaborationMode) -> Option<Self> {
|
||||
collaboration_mode
|
||||
/// Precedence:
|
||||
/// 1. collaboration_mode.settings.developer_instructions if it exists
|
||||
/// 2. model.collaboration_modes_messages for the collaboration_mode.mode if it exists
|
||||
/// 3. None
|
||||
pub fn from_collaboration_mode(
|
||||
collaboration_mode: &CollaborationMode,
|
||||
model: &ModelInfo,
|
||||
) -> Option<Self> {
|
||||
let instructions = collaboration_mode
|
||||
.settings
|
||||
.developer_instructions
|
||||
.as_ref()
|
||||
.filter(|instructions| !instructions.is_empty())
|
||||
.map(|instructions| {
|
||||
DeveloperInstructions::new(format!(
|
||||
"{COLLABORATION_MODE_OPEN_TAG}{instructions}{COLLABORATION_MODE_CLOSE_TAG}"
|
||||
))
|
||||
})
|
||||
.or_else(|| {
|
||||
model
|
||||
.model_instructions_template
|
||||
.as_ref()
|
||||
.and_then(|template| template.collaboration_modes_messages.as_ref())
|
||||
.and_then(|messages| messages.0.get(&collaboration_mode.mode))
|
||||
})?;
|
||||
|
||||
Some(DeveloperInstructions::new(format!(
|
||||
"{COLLABORATION_MODE_OPEN_TAG}{instructions}{COLLABORATION_MODE_CLOSE_TAG}"
|
||||
)))
|
||||
}
|
||||
|
||||
fn from_permissions_with_network(
|
||||
|
||||
@@ -11,6 +11,7 @@ use strum_macros::EnumIter;
|
||||
use tracing::warn;
|
||||
use ts_rs::TS;
|
||||
|
||||
use crate::config_types::ModeKind;
|
||||
use crate::config_types::Personality;
|
||||
use crate::config_types::Verbosity;
|
||||
|
||||
@@ -252,6 +253,7 @@ impl ModelInfo {
|
||||
pub struct ModelInstructionsTemplate {
|
||||
pub template: String,
|
||||
pub personality_messages: Option<PersonalityMessages>,
|
||||
pub collaboration_modes_messages: Option<CollaborationModesMessages>,
|
||||
}
|
||||
|
||||
impl ModelInstructionsTemplate {
|
||||
@@ -272,6 +274,11 @@ impl ModelInstructionsTemplate {
|
||||
#[serde(transparent)]
|
||||
pub struct PersonalityMessages(pub BTreeMap<Personality, String>);
|
||||
|
||||
// serializes as a dictionary from collaboration mode to message
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, TS, JsonSchema)]
|
||||
#[serde(transparent)]
|
||||
pub struct CollaborationModesMessages(pub BTreeMap<ModeKind, String>);
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, TS, JsonSchema)]
|
||||
pub struct ModelInfoUpgrade {
|
||||
pub model: String,
|
||||
@@ -446,6 +453,7 @@ mod tests {
|
||||
let model = test_model(Some(ModelInstructionsTemplate {
|
||||
template: "Hello {{ personality_message }}".to_string(),
|
||||
personality_messages: Some(personality_messages()),
|
||||
collaboration_modes_messages: None,
|
||||
}));
|
||||
|
||||
let instructions = model.get_model_instructions(Some(Personality::Friendly));
|
||||
@@ -458,6 +466,7 @@ mod tests {
|
||||
let model = test_model(Some(ModelInstructionsTemplate {
|
||||
template: "Hello there".to_string(),
|
||||
personality_messages: Some(personality_messages()),
|
||||
collaboration_modes_messages: None,
|
||||
}));
|
||||
|
||||
let instructions = model.get_model_instructions(Some(Personality::Friendly));
|
||||
|
||||
Reference in New Issue
Block a user