Compare commits

...

1 Commits

Author SHA1 Message Date
Ahmed Ibrahim
bef7887733 Remove personality feature flag behavior 2026-02-17 10:18:52 -08:00
6 changed files with 15 additions and 36 deletions

View File

@@ -1999,7 +1999,6 @@ impl Session {
current_context,
shell.as_ref(),
exec_policy.as_ref(),
self.features.enabled(Feature::Personality),
)
}
@@ -2552,9 +2551,7 @@ impl Session {
{
items.push(collab_instructions.into());
}
if self.features.enabled(Feature::Personality)
&& let Some(personality) = turn_context.personality
{
if let Some(personality) = turn_context.personality {
let model_info = turn_context.model_info.clone();
let has_baked_personality = model_info.supports_personality()
&& base_instructions == model_info.get_model_instructions(Some(personality));
@@ -7977,7 +7974,6 @@ mod tests {
}))
})
&& let Some(p) = reconstruction_turn.personality
&& session.features.enabled(Feature::Personality)
&& let Some(personality_message) = reconstruction_turn
.model_info
.model_messages

View File

@@ -1656,11 +1656,7 @@ impl Config {
let personality = personality
.or(config_profile.personality)
.or(cfg.personality)
.or_else(|| {
features
.enabled(Feature::Personality)
.then_some(Personality::Pragmatic)
});
.or(Some(Personality::Pragmatic));
let experimental_compact_prompt_path = config_profile
.experimental_compact_prompt_file

View File

@@ -62,11 +62,7 @@ fn build_collaboration_mode_update_item(
fn build_personality_update_item(
previous: Option<&TurnContext>,
next: &TurnContext,
personality_feature_enabled: bool,
) -> Option<ResponseItem> {
if !personality_feature_enabled {
return None;
}
let previous = previous?;
if next.model_info.slug != previous.model_info.slug {
return None;
@@ -120,7 +116,6 @@ pub(crate) fn build_settings_update_items(
next: &TurnContext,
shell: &Shell,
exec_policy: &Policy,
personality_feature_enabled: bool,
) -> Vec<ResponseItem> {
let mut update_items = Vec::new();
@@ -138,9 +133,7 @@ pub(crate) fn build_settings_update_items(
{
update_items.push(model_instructions_item);
}
if let Some(personality_item) =
build_personality_update_item(previous, next, personality_feature_enabled)
{
if let Some(personality_item) = build_personality_update_item(previous, next) {
update_items.push(personality_item);
}

View File

@@ -8,7 +8,6 @@ use codex_protocol::openai_models::TruncationPolicyConfig;
use codex_protocol::openai_models::default_input_modalities;
use crate::config::Config;
use crate::features::Feature;
use crate::truncate::approx_bytes_for_tokens;
use tracing::warn;
@@ -48,8 +47,6 @@ pub(crate) fn with_config_overrides(mut model: ModelInfo, config: &Config) -> Mo
if let Some(base_instructions) = &config.base_instructions {
model.base_instructions = base_instructions.clone();
model.model_messages = None;
} else if !config.features.enabled(Feature::Personality) {
model.model_messages = None;
}
model

View File

@@ -465,7 +465,7 @@ async fn user_turn_personality_same_value_does_not_add_update_message() -> anyho
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn instructions_uses_base_if_feature_disabled() -> anyhow::Result<()> {
async fn instructions_use_personality_template_even_if_feature_disabled() -> anyhow::Result<()> {
let codex_home = TempDir::new().expect("create temp dir");
let mut config = load_default_config_for_test(&codex_home).await;
config.features.disable(Feature::Personality);
@@ -473,16 +473,17 @@ async fn instructions_uses_base_if_feature_disabled() -> anyhow::Result<()> {
let model_info =
codex_core::test_support::construct_model_info_offline("gpt-5.2-codex", &config);
assert_eq!(
model_info.get_model_instructions(config.personality),
model_info.base_instructions
let instructions = model_info.get_model_instructions(config.personality);
assert!(
instructions.contains(LOCAL_FRIENDLY_TEMPLATE),
"expected local friendly personality template, got: {instructions:?}"
);
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn user_turn_personality_skips_if_feature_disabled() -> anyhow::Result<()> {
async fn user_turn_personality_updates_even_if_feature_disabled() -> anyhow::Result<()> {
skip_if_no_network!(Ok(()));
let server = start_mock_server().await;
@@ -529,7 +530,7 @@ async fn user_turn_personality_skips_if_feature_disabled() -> anyhow::Result<()>
effort: None,
summary: None,
collaboration_mode: None,
personality: Some(Personality::Pragmatic),
personality: Some(Personality::Friendly),
})
.await?;
@@ -562,10 +563,11 @@ async fn user_turn_personality_skips_if_feature_disabled() -> anyhow::Result<()>
let developer_texts = request.message_input_texts("developer");
let personality_text = developer_texts
.iter()
.find(|text| text.contains("<personality_spec>"));
.find(|text| text.contains("<personality_spec>"))
.expect("expected personality preamble");
assert!(
personality_text.is_none(),
"expected no personality preamble, got {personality_text:?}"
personality_text.contains(LOCAL_FRIENDLY_TEMPLATE),
"expected friendly personality update, got {personality_text:?}"
);
Ok(())
}

View File

@@ -3854,7 +3854,6 @@ impl ChatWidget {
let personality = self
.config
.personality
.filter(|_| self.config.features.enabled(Feature::Personality))
.filter(|_| self.current_model_supports_personality());
let op = Op::UserTurn {
items,
@@ -6043,9 +6042,6 @@ impl ChatWidget {
self.refresh_model_display();
self.request_redraw();
}
if feature == Feature::Personality {
self.sync_personality_command_enabled();
}
if feature == Feature::PreventIdleSleep {
self.turn_sleep_inhibitor = SleepInhibitor::new(enabled);
self.turn_sleep_inhibitor
@@ -6130,8 +6126,7 @@ impl ChatWidget {
}
fn sync_personality_command_enabled(&mut self) {
self.bottom_pane
.set_personality_command_enabled(self.config.features.enabled(Feature::Personality));
self.bottom_pane.set_personality_command_enabled(true);
}
fn current_model_supports_personality(&self) -> bool {