Compare commits

...

10 Commits

Author SHA1 Message Date
Charles Cunningham
fd9005eac8 Keep personality support when remote models are disabled 2026-01-27 22:10:40 -08:00
Charles Cunningham
945f748afd Avoid unknown model warning for remote slugs 2026-01-27 20:55:02 -08:00
Charles Cunningham
41401fbf87 Fix bug 2026-01-27 15:28:34 -08:00
Charles Cunningham
7e91d7e365 Restore local personality fallback 2026-01-27 14:11:13 -08:00
Charles Cunningham
2d87199bcc Refactor personality nudge eligibility 2026-01-27 14:01:23 -08:00
Charles Cunningham
df2e0d7c87 Fix personality override app-server test 2026-01-27 13:48:45 -08:00
Charles Cunningham
479bfe7b52 Gate personality on remote metadata 2026-01-27 13:27:04 -08:00
Charles Cunningham
26ef31a902 Require remote personality metadata 2026-01-27 13:25:37 -08:00
Charles Cunningham
6ac5245854 NUX: show personality nudge on startup/model switch; fix /personality support 2026-01-27 13:25:37 -08:00
Charles Cunningham
d4e9556a67 tui: add one-time personality nudge NUX with persisted hide flag 2026-01-27 13:25:37 -08:00
12 changed files with 644 additions and 73 deletions

View File

@@ -8,6 +8,7 @@ use app_test_support::create_mock_responses_server_sequence_unchecked;
use app_test_support::create_shell_command_sse_response;
use app_test_support::format_with_current_shell_display;
use app_test_support::to_response;
use app_test_support::write_models_cache_with_models;
use codex_app_server_protocol::ByteRange;
use codex_app_server_protocol::ClientInfo;
use codex_app_server_protocol::CommandExecutionApprovalDecision;
@@ -41,7 +42,14 @@ use codex_protocol::config_types::CollaborationMode;
use codex_protocol::config_types::ModeKind;
use codex_protocol::config_types::Personality;
use codex_protocol::config_types::Settings;
use codex_protocol::openai_models::ConfigShellToolType;
use codex_protocol::openai_models::ModelInfo;
use codex_protocol::openai_models::ModelInstructionsTemplate;
use codex_protocol::openai_models::ModelVisibility;
use codex_protocol::openai_models::PersonalityMessages;
use codex_protocol::openai_models::ReasoningEffort;
use codex_protocol::openai_models::ReasoningEffortPreset;
use codex_protocol::openai_models::TruncationPolicyConfig;
use core_test_support::responses;
use core_test_support::skip_if_no_network;
use pretty_assertions::assert_eq;
@@ -421,12 +429,13 @@ async fn turn_start_accepts_personality_override_v2() -> Result<()> {
let response_mock = responses::mount_sse_once(&server, body).await;
let codex_home = TempDir::new()?;
create_config_toml(
write_models_cache_with_models(
codex_home.path(),
&server.uri(),
"never",
&BTreeMap::default(),
vec![personality_enabled_model("exp-codex-personality")],
)?;
let mut feature_flags = BTreeMap::new();
feature_flags.insert(Feature::RemoteModels, true);
create_config_toml(codex_home.path(), &server.uri(), "never", &feature_flags)?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
@@ -1681,3 +1690,45 @@ stream_max_retries = 0
),
)
}
fn personality_enabled_model(slug: &str) -> ModelInfo {
ModelInfo {
slug: slug.to_string(),
display_name: slug.to_string(),
description: Some(format!("{slug} description")),
default_reasoning_level: Some(ReasoningEffort::Medium),
supported_reasoning_levels: vec![
ReasoningEffortPreset {
effort: ReasoningEffort::Low,
description: "low".to_string(),
},
ReasoningEffortPreset {
effort: ReasoningEffort::Medium,
description: "medium".to_string(),
},
],
shell_type: ConfigShellToolType::ShellCommand,
visibility: ModelVisibility::List,
supported_in_api: true,
priority: 0,
upgrade: None,
base_instructions: "base instructions".to_string(),
model_instructions_template: Some(ModelInstructionsTemplate {
template: "Base instructions\n{{ personality_message }}\n".to_string(),
personality_messages: Some(PersonalityMessages(BTreeMap::from([
(Personality::Friendly, "Friendly message".to_string()),
(Personality::Pragmatic, "Pragmatic message".to_string()),
]))),
}),
supports_reasoning_summaries: false,
support_verbosity: false,
default_verbosity: None,
apply_patch_tool_type: None,
truncation_policy: TruncationPolicyConfig::bytes(10_000),
supports_parallel_tool_calls: false,
context_window: Some(128_000),
auto_compact_token_limit: None,
effective_context_window_percent: 95,
experimental_supported_tools: Vec::new(),
}
}

View File

@@ -456,6 +456,10 @@
"description": "Tracks whether the user has seen the model migration prompt",
"type": "boolean"
},
"hide_personality_nudge": {
"description": "Tracks whether the user has already seen the personality selection nudge.",
"type": "boolean"
},
"hide_rate_limit_model_nudge": {
"description": "Tracks whether the user opted out of the rate limit model switch reminder.",
"type": "boolean"

View File

@@ -33,6 +33,8 @@ pub enum ConfigEdit {
SetNoticeHideWorldWritableWarning(bool),
/// Toggle the rate limit model nudge acknowledgement flag.
SetNoticeHideRateLimitModelNudge(bool),
/// Toggle the personality selection nudge acknowledgement flag.
SetNoticeHidePersonalityNudge(bool),
/// Toggle the Windows onboarding acknowledgement flag.
SetWindowsWslSetupAcknowledged(bool),
/// Toggle the model migration prompt acknowledgement flag.
@@ -296,6 +298,11 @@ impl ConfigDocument {
&[Notice::TABLE_KEY, "hide_rate_limit_model_nudge"],
value(*acknowledged),
)),
ConfigEdit::SetNoticeHidePersonalityNudge(acknowledged) => Ok(self.write_value(
Scope::Global,
&[Notice::TABLE_KEY, "hide_personality_nudge"],
value(*acknowledged),
)),
ConfigEdit::SetNoticeHideModelMigrationPrompt(migration_config, acknowledged) => {
Ok(self.write_value(
Scope::Global,
@@ -748,6 +755,12 @@ impl ConfigEditsBuilder {
self
}
pub fn set_hide_personality_nudge(mut self, acknowledged: bool) -> Self {
self.edits
.push(ConfigEdit::SetNoticeHidePersonalityNudge(acknowledged));
self
}
pub fn set_hide_model_migration_prompt(mut self, model: &str, acknowledged: bool) -> Self {
self.edits
.push(ConfigEdit::SetNoticeHideModelMigrationPrompt(
@@ -1257,6 +1270,34 @@ hide_rate_limit_model_nudge = true
assert_eq!(contents, expected);
}
#[test]
fn blocking_set_hide_personality_nudge_preserves_table() {
let tmp = tempdir().expect("tmpdir");
let codex_home = tmp.path();
std::fs::write(
codex_home.join(CONFIG_TOML_FILE),
r#"[notice]
existing = "value"
"#,
)
.expect("seed");
apply_blocking(
codex_home,
None,
&[ConfigEdit::SetNoticeHidePersonalityNudge(true)],
)
.expect("persist");
let contents =
std::fs::read_to_string(codex_home.join(CONFIG_TOML_FILE)).expect("read config");
let expected = r#"[notice]
existing = "value"
hide_personality_nudge = true
"#;
assert_eq!(contents, expected);
}
#[test]
fn blocking_set_hide_gpt5_1_migration_prompt_preserves_table() {
let tmp = tempdir().expect("tmpdir");

View File

@@ -480,6 +480,8 @@ pub struct Notice {
pub hide_world_writable_warning: Option<bool>,
/// Tracks whether the user opted out of the rate limit model switch reminder.
pub hide_rate_limit_model_nudge: Option<bool>,
/// Tracks whether the user has already seen the personality selection nudge.
pub hide_personality_nudge: Option<bool>,
/// Tracks whether the user has seen the model migration prompt
pub hide_gpt5_1_migration_prompt: Option<bool>,
/// Tracks whether the user has seen the gpt-5.1-codex-max migration prompt

View File

@@ -103,6 +103,19 @@ impl ModelsManager {
Ok(self.build_available_models(remote_models))
}
/// Determine whether a model supports personalities based on remote metadata.
pub fn supports_personality(&self, model: &str, config: &Config) -> bool {
let remote = self
.try_get_remote_models(config)
.ok()
.and_then(|remote_models| remote_models.into_iter().find(|info| info.slug == model));
if let Some(remote) = remote {
return model_info::with_config_overrides(remote, config).supports_personality();
}
model_info::with_config_overrides(model_info::find_model_info_for_slug(model), config)
.supports_personality()
}
// todo(aibrahim): should be visible to core only and sent on session_configured event
/// Get the model identifier to use, refreshing according to the specified strategy.
///
@@ -141,10 +154,9 @@ impl ModelsManager {
.await
.into_iter()
.find(|m| m.slug == model);
let model = if let Some(remote) = remote {
remote
} else {
model_info::find_model_info_for_slug(model)
let model = match remote {
Some(remote) => remote,
None => model_info::find_model_info_for_slug(model),
};
model_info::with_config_overrides(model, config)
}
@@ -324,6 +336,12 @@ impl ModelsManager {
}
}
#[cfg(any(test, feature = "test-support"))]
/// Override remote model metadata for tests.
pub async fn set_remote_models_for_testing(&self, models: Vec<ModelInfo>) {
*self.remote_models.write().await = models;
}
#[cfg(any(test, feature = "test-support"))]
/// Get model identifier without consulting remote state or cache.
pub fn get_model_offline(model: Option<&str>) -> String {
@@ -365,10 +383,14 @@ mod tests {
use crate::features::Feature;
use crate::model_provider_info::WireApi;
use chrono::Utc;
use codex_protocol::config_types::Personality;
use codex_protocol::openai_models::ModelInstructionsTemplate;
use codex_protocol::openai_models::ModelsResponse;
use codex_protocol::openai_models::PersonalityMessages;
use core_test_support::responses::mount_models_once;
use pretty_assertions::assert_eq;
use serde_json::json;
use std::collections::BTreeMap;
use tempfile::tempdir;
use wiremock::MockServer;
@@ -699,6 +721,66 @@ mod tests {
assert_eq!(available, vec![expected_hidden, expected_visible]);
}
#[tokio::test]
async fn supports_personality_requires_remote_metadata() {
let codex_home = tempdir().expect("temp dir");
let mut config = ConfigBuilder::default()
.codex_home(codex_home.path().to_path_buf())
.build()
.await
.expect("load default test config");
config.features.enable(Feature::RemoteModels);
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::create_dummy_chatgpt_auth_for_testing());
let provider = provider_for("http://example.test".to_string());
let manager =
ModelsManager::with_provider(codex_home.path().to_path_buf(), auth_manager, provider);
// Remote metadata must explicitly include personality support.
let mut remote = remote_model("gpt-5.2-codex", "Remote gpt-5.2-codex", 0);
remote.model_instructions_template = Some(ModelInstructionsTemplate {
template: "{{ personality_message }}".to_string(),
personality_messages: None,
});
*manager.remote_models.write().await = vec![remote];
assert!(!manager.supports_personality("gpt-5.2-codex", &config));
let model = manager.get_model_info("gpt-5.2-codex", &config).await;
assert!(!model.supports_personality());
}
#[tokio::test]
async fn supports_personality_respects_base_instructions_override() {
let codex_home = tempdir().expect("temp dir");
let mut config = ConfigBuilder::default()
.codex_home(codex_home.path().to_path_buf())
.build()
.await
.expect("load default test config");
config.features.enable(Feature::RemoteModels);
config.base_instructions = Some("override instructions".to_string());
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::create_dummy_chatgpt_auth_for_testing());
let provider = provider_for("http://example.test".to_string());
let manager =
ModelsManager::with_provider(codex_home.path().to_path_buf(), auth_manager, provider);
let mut remote = remote_model("gpt-5.2-codex", "Remote gpt-5.2-codex", 0);
remote.model_instructions_template = Some(ModelInstructionsTemplate {
template: "{{ personality_message }}".to_string(),
personality_messages: Some(PersonalityMessages(BTreeMap::from([
(Personality::Friendly, "Friendly template".to_string()),
(Personality::Pragmatic, "Pragmatic template".to_string()),
]))),
});
*manager.remote_models.write().await = vec![remote];
assert!(!manager.supports_personality("gpt-5.2-codex", &config));
let model = manager.get_model_info("gpt-5.2-codex", &config).await;
assert!(!model.supports_personality());
assert_eq!(model.base_instructions, "override instructions");
}
#[test]
fn bundled_models_json_roundtrips() {
let file_contents = include_str!("../../models.json");

View File

@@ -34,6 +34,16 @@ const PERSONALITY_PRAGMATIC: &str = include_str!("../../templates/personalities/
pub(crate) const CONTEXT_WINDOW_272K: i64 = 272_000;
fn gpt_5_2_codex_personality_template() -> ModelInstructionsTemplate {
ModelInstructionsTemplate {
template: GPT_5_2_CODEX_INSTRUCTIONS_TEMPLATE.to_string(),
personality_messages: Some(PersonalityMessages(BTreeMap::from([
(Personality::Friendly, PERSONALITY_FRIENDLY.to_string()),
(Personality::Pragmatic, PERSONALITY_PRAGMATIC.to_string()),
]))),
}
}
macro_rules! model_info {
(
$slug:expr $(, $key:ident : $value:expr )* $(,)?
@@ -169,16 +179,7 @@ pub(crate) fn find_model_info_for_slug(slug: &str) -> ModelInfo {
model_info!(
slug,
base_instructions: GPT_5_2_CODEX_INSTRUCTIONS.to_string(),
model_instructions_template: Some(ModelInstructionsTemplate {
template: GPT_5_2_CODEX_INSTRUCTIONS_TEMPLATE.to_string(),
personality_messages: Some(PersonalityMessages(BTreeMap::from([(
Personality::Friendly,
PERSONALITY_FRIENDLY.to_string(),
), (
Personality::Pragmatic,
PERSONALITY_PRAGMATIC.to_string(),
)]))),
}),
model_instructions_template: Some(gpt_5_2_codex_personality_template()),
apply_patch_tool_type: Some(ApplyPatchToolType::Freeform),
shell_type: ConfigShellToolType::ShellCommand,
supports_parallel_tool_calls: true,
@@ -205,6 +206,7 @@ pub(crate) fn find_model_info_for_slug(slug: &str) -> ModelInfo {
model_info!(
slug,
base_instructions: GPT_5_2_CODEX_INSTRUCTIONS.to_string(),
model_instructions_template: Some(gpt_5_2_codex_personality_template()),
apply_patch_tool_type: Some(ApplyPatchToolType::Freeform),
shell_type: ConfigShellToolType::ShellCommand,
supports_parallel_tool_calls: true,

View File

@@ -292,10 +292,16 @@ async fn user_turn_personality_remote_model_template_includes_update_message() -
base_instructions: "base instructions".to_string(),
model_instructions_template: Some(ModelInstructionsTemplate {
template: "Base instructions\n{{ personality_message }}\n".to_string(),
personality_messages: Some(PersonalityMessages(BTreeMap::from([(
Personality::Friendly,
remote_personality_message.to_string(),
)]))),
personality_messages: Some(PersonalityMessages(BTreeMap::from([
(
Personality::Friendly,
remote_personality_message.to_string(),
),
(
Personality::Pragmatic,
"Pragmatic from remote template".to_string(),
),
]))),
}),
supports_reasoning_summaries: false,
support_verbosity: false,

View File

@@ -1532,6 +1532,9 @@ impl App {
AppEvent::OpenReasoningPopup { model } => {
self.chat_widget.open_reasoning_popup(model);
}
AppEvent::OpenPersonalityPopup => {
self.chat_widget.open_personality_popup();
}
AppEvent::OpenAllModelsPopup { models } => {
self.chat_widget.open_all_models_popup(models);
}
@@ -1976,6 +1979,9 @@ impl App {
AppEvent::UpdateRateLimitSwitchPromptHidden(hidden) => {
self.chat_widget.set_rate_limit_switch_prompt_hidden(hidden);
}
AppEvent::UpdatePersonalityNudgeHidden(hidden) => {
self.chat_widget.set_personality_nudge_hidden(hidden);
}
AppEvent::PersistFullAccessWarningAcknowledged => {
if let Err(err) = ConfigEditsBuilder::new(&self.config.codex_home)
.set_hide_full_access_warning(true)
@@ -2021,6 +2027,21 @@ impl App {
));
}
}
AppEvent::PersistPersonalityNudgeHidden => {
if let Err(err) = ConfigEditsBuilder::new(&self.config.codex_home)
.set_hide_personality_nudge(true)
.apply()
.await
{
tracing::error!(
error = %err,
"failed to persist personality nudge preference"
);
self.chat_widget.add_error_message(format!(
"Failed to save personality nudge preference: {err}"
));
}
}
AppEvent::PersistModelMigrationPromptAcknowledged {
from_model,
to_model,

View File

@@ -126,6 +126,9 @@ pub(crate) enum AppEvent {
model: ModelPreset,
},
/// Open the personality selection popup.
OpenPersonalityPopup,
/// Open the full model picker (non-auto models).
OpenAllModelsPopup {
models: Vec<ModelPreset>,
@@ -202,6 +205,9 @@ pub(crate) enum AppEvent {
/// Update whether the rate limit switch prompt has been acknowledged for the session.
UpdateRateLimitSwitchPromptHidden(bool),
/// Update whether the personality nudge has been acknowledged for the session.
UpdatePersonalityNudgeHidden(bool),
/// Persist the acknowledgement flag for the full access warning prompt.
PersistFullAccessWarningAcknowledged,
@@ -212,6 +218,9 @@ pub(crate) enum AppEvent {
/// Persist the acknowledgement flag for the rate limit switch prompt.
PersistRateLimitSwitchPromptHidden,
/// Persist the acknowledgement flag for the personality nudge.
PersistPersonalityNudgeHidden,
/// Persist the acknowledgement flag for the model migration prompt.
PersistModelMigrationPromptAcknowledged {
from_model: String,

View File

@@ -386,6 +386,14 @@ enum RateLimitSwitchPromptState {
Shown,
}
#[derive(Default)]
enum PersonalityNudgeState {
#[default]
Idle,
Pending,
Shown,
}
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
pub(crate) enum ExternalEditorState {
#[default]
@@ -438,6 +446,7 @@ pub(crate) struct ChatWidget {
plan_type: Option<PlanType>,
rate_limit_warnings: RateLimitWarningState,
rate_limit_switch_prompt: RateLimitSwitchPromptState,
personality_nudge: PersonalityNudgeState,
rate_limit_poller: Option<JoinHandle<()>>,
// Stream lifecycle controller
stream_controller: Option<StreamController>,
@@ -745,6 +754,8 @@ impl ChatWidget {
);
self.refresh_model_display();
self.sync_personality_command_enabled();
self.schedule_personality_nudge_if_needed();
self.maybe_show_pending_personality_nudge();
let session_info_cell = history_cell::new_session_info(
&self.config,
&model_for_header,
@@ -904,7 +915,7 @@ impl ChatWidget {
response: last_agent_message.unwrap_or_default(),
});
self.maybe_show_pending_rate_limit_prompt();
self.maybe_show_post_turn_nudges();
}
fn maybe_prompt_plan_implementation(&mut self, last_agent_message: Option<&str>) {
@@ -1109,7 +1120,7 @@ impl ChatWidget {
self.unified_exec_wait_streak = None;
self.clear_unified_exec_processes();
self.stream_controller = None;
self.maybe_show_pending_rate_limit_prompt();
self.maybe_show_post_turn_nudges();
}
fn on_error(&mut self, message: String) {
@@ -2018,6 +2029,7 @@ impl ChatWidget {
plan_type: None,
rate_limit_warnings: RateLimitWarningState::default(),
rate_limit_switch_prompt: RateLimitSwitchPromptState::default(),
personality_nudge: PersonalityNudgeState::default(),
rate_limit_poller: None,
stream_controller: None,
running_commands: HashMap::new(),
@@ -2151,6 +2163,7 @@ impl ChatWidget {
plan_type: None,
rate_limit_warnings: RateLimitWarningState::default(),
rate_limit_switch_prompt: RateLimitSwitchPromptState::default(),
personality_nudge: PersonalityNudgeState::default(),
rate_limit_poller: None,
stream_controller: None,
running_commands: HashMap::new(),
@@ -2277,6 +2290,7 @@ impl ChatWidget {
plan_type: None,
rate_limit_warnings: RateLimitWarningState::default(),
rate_limit_switch_prompt: RateLimitSwitchPromptState::default(),
personality_nudge: PersonalityNudgeState::default(),
rate_limit_poller: None,
stream_controller: None,
running_commands: HashMap::new(),
@@ -2425,50 +2439,57 @@ impl ChatWidget {
self.request_redraw();
}
}
_ => match self.bottom_pane.handle_key_event(key_event) {
InputResult::Submitted {
text,
text_elements,
} => {
let user_message = UserMessage {
_ => {
let had_modal_or_popup = !self.bottom_pane.no_modal_or_popup_active();
let input_result = self.bottom_pane.handle_key_event(key_event);
if had_modal_or_popup && self.bottom_pane.no_modal_or_popup_active() {
self.on_bottom_pane_view_closed();
}
match input_result {
InputResult::Submitted {
text,
local_images: self
.bottom_pane
.take_recent_submission_images_with_placeholders(),
text_elements,
};
if self.is_session_configured() {
// Submitted is only emitted when steer is enabled (Enter sends immediately).
// Reset any reasoning header only when we are actually submitting a turn.
self.reasoning_buffer.clear();
self.full_reasoning_buffer.clear();
self.set_status_header(String::from("Working"));
self.submit_user_message(user_message);
} else {
} => {
let user_message = UserMessage {
text,
local_images: self
.bottom_pane
.take_recent_submission_images_with_placeholders(),
text_elements,
};
if self.is_session_configured() {
// Submitted is only emitted when steer is enabled (Enter sends immediately).
// Reset any reasoning header only when we are actually submitting a turn.
self.reasoning_buffer.clear();
self.full_reasoning_buffer.clear();
self.set_status_header(String::from("Working"));
self.submit_user_message(user_message);
} else {
self.queue_user_message(user_message);
}
}
InputResult::Queued {
text,
text_elements,
} => {
let user_message = UserMessage {
text,
local_images: self
.bottom_pane
.take_recent_submission_images_with_placeholders(),
text_elements,
};
self.queue_user_message(user_message);
}
InputResult::Command(cmd) => {
self.dispatch_command(cmd);
}
InputResult::CommandWithArgs(cmd, args) => {
self.dispatch_command_with_args(cmd, args);
}
InputResult::None => {}
}
InputResult::Queued {
text,
text_elements,
} => {
let user_message = UserMessage {
text,
local_images: self
.bottom_pane
.take_recent_submission_images_with_placeholders(),
text_elements,
};
self.queue_user_message(user_message);
}
InputResult::Command(cmd) => {
self.dispatch_command(cmd);
}
InputResult::CommandWithArgs(cmd, args) => {
self.dispatch_command_with_args(cmd, args);
}
InputResult::None => {}
},
}
}
}
@@ -3331,6 +3352,37 @@ impl ChatWidget {
.unwrap_or(false)
}
fn maybe_show_post_turn_nudges(&mut self) {
let rate_limit_was_pending = matches!(
self.rate_limit_switch_prompt,
RateLimitSwitchPromptState::Pending
);
self.maybe_show_pending_rate_limit_prompt();
if rate_limit_was_pending
&& matches!(
self.rate_limit_switch_prompt,
RateLimitSwitchPromptState::Shown
)
{
return;
}
self.maybe_show_pending_personality_nudge();
}
fn on_bottom_pane_view_closed(&mut self) {
if matches!(self.personality_nudge, PersonalityNudgeState::Pending) {
self.maybe_show_pending_personality_nudge();
return;
}
if matches!(self.personality_nudge, PersonalityNudgeState::Shown)
&& !self.personality_nudge_hidden()
&& self.config.model_personality.is_none()
{
// Allow the nudge to re-trigger on future model switches after Esc dismissal.
self.personality_nudge = PersonalityNudgeState::Idle;
}
}
fn maybe_show_pending_rate_limit_prompt(&mut self) {
if self.rate_limit_switch_prompt_hidden() {
self.rate_limit_switch_prompt = RateLimitSwitchPromptState::Idle;
@@ -3423,6 +3475,80 @@ impl ChatWidget {
});
}
fn personality_nudge_hidden(&self) -> bool {
self.config.notices.hide_personality_nudge.unwrap_or(false)
}
fn schedule_personality_nudge_if_needed(&mut self) {
if !self.is_session_configured() {
return;
}
if !self.personality_nudge_is_eligible() {
self.personality_nudge = PersonalityNudgeState::Idle;
return;
}
if matches!(self.personality_nudge, PersonalityNudgeState::Shown) {
return;
}
self.personality_nudge = PersonalityNudgeState::Pending;
}
fn maybe_show_pending_personality_nudge(&mut self) {
if !self.personality_nudge_is_eligible() {
self.personality_nudge = PersonalityNudgeState::Idle;
return;
}
if !matches!(self.personality_nudge, PersonalityNudgeState::Pending) {
return;
}
if !self.bottom_pane.no_modal_or_popup_active() {
return;
}
self.open_personality_nudge();
self.personality_nudge = PersonalityNudgeState::Shown;
}
fn personality_nudge_is_eligible(&self) -> bool {
!self.personality_nudge_hidden()
&& self.config.model_personality.is_none()
&& self.current_model_supports_personality()
}
fn open_personality_nudge(&mut self) {
let choose_actions: Vec<SelectionAction> = vec![Box::new(|tx| {
tx.send(AppEvent::OpenPersonalityPopup);
})];
let not_now_actions: Vec<SelectionAction> = vec![Box::new(|tx| {
tx.send(AppEvent::UpdatePersonalityNudgeHidden(true));
tx.send(AppEvent::PersistPersonalityNudgeHidden);
})];
let items = vec![
SelectionItem {
name: "Choose a personality".to_string(),
description: Some("Pick Friendly or Pragmatic for future responses.".to_string()),
actions: choose_actions,
dismiss_on_select: true,
..Default::default()
},
SelectionItem {
name: "Not now".to_string(),
description: Some("You can run /personality any time.".to_string()),
actions: not_now_actions,
dismiss_on_select: true,
..Default::default()
},
];
self.bottom_pane.show_selection_view(SelectionViewParams {
title: Some("New: response personalities".to_string()),
subtitle: Some("Prefer a different style? Try /personality.".to_string()),
footer_hint: Some(standard_popup_hint_line()),
items,
..Default::default()
});
}
/// Open a popup to choose a quick auto model. Selecting "All models"
/// opens the full picker with every available preset.
pub(crate) fn open_model_popup(&mut self) {
@@ -4744,6 +4870,13 @@ impl ChatWidget {
}
}
pub(crate) fn set_personality_nudge_hidden(&mut self, hidden: bool) {
self.config.notices.hide_personality_nudge = Some(hidden);
if hidden {
self.personality_nudge = PersonalityNudgeState::Idle;
}
}
#[cfg_attr(not(target_os = "windows"), allow(dead_code))]
pub(crate) fn world_writable_warning_hidden(&self) -> bool {
self.config
@@ -4767,6 +4900,7 @@ impl ChatWidget {
/// Set the personality in the widget's config copy.
pub(crate) fn set_personality(&mut self, personality: Personality) {
self.config.model_personality = Some(personality);
self.personality_nudge = PersonalityNudgeState::Idle;
}
/// Set the model in the widget's config copy and stored collaboration mode.
@@ -4781,6 +4915,8 @@ impl ChatWidget {
}
self.refresh_model_display();
self.sync_personality_command_enabled();
self.schedule_personality_nudge_if_needed();
self.maybe_show_pending_personality_nudge();
}
pub(crate) fn current_model(&self) -> &str {
@@ -4799,17 +4935,8 @@ impl ChatWidget {
}
fn current_model_supports_personality(&self) -> bool {
let model = self.current_model();
self.models_manager
.try_list_models(&self.config)
.ok()
.and_then(|models| {
models
.into_iter()
.find(|preset| preset.model == model)
.map(|preset| preset.supports_personality)
})
.unwrap_or(false)
.supports_personality(self.current_model(), &self.config)
}
#[allow(dead_code)] // Used in tests

View File

@@ -0,0 +1,11 @@
---
source: tui/src/chatwidget/tests.rs
expression: popup
---
New: response personalities
Prefer a different style? Try /personality.
1. Choose a personality Pick Friendly or Pragmatic for future responses.
2. Not now You can run /personality any time.
Press enter to confirm or esc to go back

View File

@@ -67,7 +67,10 @@ use codex_protocol::config_types::CollaborationMode;
use codex_protocol::config_types::ModeKind;
use codex_protocol::config_types::Personality;
use codex_protocol::config_types::Settings;
use codex_protocol::openai_models::ModelInfo;
use codex_protocol::openai_models::ModelInstructionsTemplate;
use codex_protocol::openai_models::ModelPreset;
use codex_protocol::openai_models::PersonalityMessages;
use codex_protocol::openai_models::ReasoningEffortPreset;
use codex_protocol::parse_command::ParsedCommand;
use codex_protocol::plan_tool::PlanItemArg;
@@ -82,8 +85,10 @@ use crossterm::event::KeyEvent;
use crossterm::event::KeyModifiers;
use insta::assert_snapshot;
use pretty_assertions::assert_eq;
use serde_json::json;
#[cfg(target_os = "windows")]
use serial_test::serial;
use std::collections::BTreeMap;
use std::collections::HashSet;
use std::path::PathBuf;
use tempfile::NamedTempFile;
@@ -124,6 +129,58 @@ fn snapshot(percent: f64) -> RateLimitSnapshot {
}
}
fn personality_template() -> ModelInstructionsTemplate {
ModelInstructionsTemplate {
template: "Base instructions\n{{ personality_message }}\n".to_string(),
personality_messages: Some(PersonalityMessages(BTreeMap::from([
(Personality::Friendly, "Friendly message".to_string()),
(Personality::Pragmatic, "Pragmatic message".to_string()),
]))),
}
}
fn remote_model_with_personality(slug: &str) -> ModelInfo {
let mut model: ModelInfo = serde_json::from_value(json!({
"slug": slug,
"display_name": slug,
"description": format!("{slug} description"),
"default_reasoning_level": "medium",
"supported_reasoning_levels": [
{"effort": "low", "description": "low"},
{"effort": "medium", "description": "medium"}
],
"shell_type": "shell_command",
"visibility": "list",
"supported_in_api": true,
"priority": 0,
"upgrade": null,
"base_instructions": "base instructions",
"supports_reasoning_summaries": false,
"support_verbosity": false,
"default_verbosity": null,
"apply_patch_tool_type": null,
"truncation_policy": {"mode": "bytes", "limit": 10_000},
"supports_parallel_tool_calls": false,
"context_window": 128_000,
"experimental_supported_tools": [],
}))
.expect("valid model");
model.model_instructions_template = Some(personality_template());
model
}
async fn enable_personality_support(chat: &mut ChatWidget, models: &[&str]) {
chat.set_feature_enabled(Feature::RemoteModels, true);
let remote_models = models
.iter()
.copied()
.map(remote_model_with_personality)
.collect();
chat.models_manager
.set_remote_models_for_testing(remote_models)
.await;
}
#[tokio::test]
async fn resumed_initial_messages_render_history() {
let (mut chat, mut rx, _ops) = make_chatwidget_manual(None).await;
@@ -798,6 +855,7 @@ async fn make_chatwidget_manual(
plan_type: None,
rate_limit_warnings: RateLimitWarningState::default(),
rate_limit_switch_prompt: RateLimitSwitchPromptState::default(),
personality_nudge: PersonalityNudgeState::default(),
rate_limit_poller: None,
stream_controller: None,
running_commands: HashMap::new(),
@@ -1126,6 +1184,150 @@ async fn rate_limit_switch_prompt_respects_hidden_notice() {
));
}
fn session_configured_event_for(model: &str) -> Event {
let rollout_file = NamedTempFile::new().expect("rollout file");
Event {
id: "session-configured".into(),
msg: EventMsg::SessionConfigured(codex_core::protocol::SessionConfiguredEvent {
session_id: ThreadId::new(),
forked_from_id: None,
model: model.to_string(),
model_provider_id: "test-provider".to_string(),
approval_policy: AskForApproval::Never,
sandbox_policy: SandboxPolicy::ReadOnly,
cwd: PathBuf::from("/home/user/project"),
reasoning_effort: Some(ReasoningEffortConfig::default()),
history_log_id: 0,
history_entry_count: 0,
initial_messages: None,
rollout_path: Some(rollout_file.path().to_path_buf()),
}),
}
}
#[tokio::test]
async fn personality_nudge_respects_hidden_notice() {
let (mut chat, _rx, _) = make_chatwidget_manual(Some("bengalfox")).await;
enable_personality_support(&mut chat, &["bengalfox"]).await;
chat.config.notices.hide_personality_nudge = Some(true);
chat.handle_codex_event(session_configured_event_for("bengalfox"));
chat.maybe_show_post_turn_nudges();
assert!(matches!(
chat.personality_nudge,
PersonalityNudgeState::Idle
));
}
#[tokio::test]
async fn personality_nudge_shows_once_and_hides_after_seen() {
let (mut chat, mut rx, _) = make_chatwidget_manual(Some("bengalfox")).await;
enable_personality_support(&mut chat, &["bengalfox"]).await;
chat.handle_codex_event(session_configured_event_for("bengalfox"));
assert!(matches!(
chat.personality_nudge,
PersonalityNudgeState::Shown
));
// Select "Not now" to persist the hide flag.
chat.handle_key_event(KeyEvent::from(KeyCode::Down));
chat.handle_key_event(KeyEvent::from(KeyCode::Enter));
let mut saw_update = false;
let mut saw_persist = false;
while let Ok(event) = rx.try_recv() {
match event {
AppEvent::UpdatePersonalityNudgeHidden(true) => saw_update = true,
AppEvent::PersistPersonalityNudgeHidden => saw_persist = true,
_ => {}
}
}
assert!(saw_update, "expected UpdatePersonalityNudgeHidden(true)");
assert!(saw_persist, "expected PersistPersonalityNudgeHidden");
chat.set_personality_nudge_hidden(true);
chat.schedule_personality_nudge_if_needed();
assert!(matches!(
chat.personality_nudge,
PersonalityNudgeState::Idle
));
}
#[tokio::test]
async fn personality_nudge_escape_does_not_persist_hidden() {
let (mut chat, mut rx, _) = make_chatwidget_manual(Some("bengalfox")).await;
enable_personality_support(&mut chat, &["bengalfox"]).await;
chat.handle_codex_event(session_configured_event_for("bengalfox"));
assert!(matches!(
chat.personality_nudge,
PersonalityNudgeState::Shown
));
chat.handle_key_event(KeyEvent::from(KeyCode::Esc));
let mut saw_update = false;
let mut saw_persist = false;
while let Ok(event) = rx.try_recv() {
match event {
AppEvent::UpdatePersonalityNudgeHidden(true) => saw_update = true,
AppEvent::PersistPersonalityNudgeHidden => saw_persist = true,
_ => {}
}
}
assert!(!saw_update, "Esc should not hide the personality nudge");
assert!(
!saw_persist,
"Esc should not persist the personality nudge hide flag"
);
assert_eq!(chat.config.notices.hide_personality_nudge, None);
assert!(matches!(
chat.personality_nudge,
PersonalityNudgeState::Idle
));
}
#[tokio::test]
async fn gpt_52_codex_supports_personality_command() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5.2-codex")).await;
enable_personality_support(&mut chat, &["gpt-5.2-codex"]).await;
assert!(chat.current_model_supports_personality());
}
#[tokio::test]
async fn personality_nudge_shows_on_model_switch_after_escape() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("bengalfox")).await;
enable_personality_support(&mut chat, &["bengalfox"]).await;
chat.handle_codex_event(session_configured_event_for("bengalfox"));
assert!(matches!(
chat.personality_nudge,
PersonalityNudgeState::Shown
));
chat.handle_key_event(KeyEvent::from(KeyCode::Esc));
assert!(matches!(
chat.personality_nudge,
PersonalityNudgeState::Idle
));
chat.set_model("gpt-5");
assert!(matches!(
chat.personality_nudge,
PersonalityNudgeState::Idle
));
chat.set_model("bengalfox");
assert!(matches!(
chat.personality_nudge,
PersonalityNudgeState::Shown
));
}
#[tokio::test]
async fn rate_limit_switch_prompt_defers_until_task_complete() {
let auth = CodexAuth::create_dummy_chatgpt_auth_for_testing();
@@ -2411,7 +2613,9 @@ async fn collab_mode_enabling_keeps_custom_until_selected() {
#[tokio::test]
async fn user_turn_includes_personality_from_config() {
let (mut chat, _rx, mut op_rx) = make_chatwidget_manual(Some("bengalfox")).await;
enable_personality_support(&mut chat, &["bengalfox"]).await;
chat.thread_id = Some(ThreadId::new());
chat.set_personality_nudge_hidden(true);
chat.set_model("bengalfox");
chat.set_personality(Personality::Friendly);
@@ -2974,6 +3178,7 @@ async fn model_selection_popup_snapshot() {
#[tokio::test]
async fn personality_selection_popup_snapshot() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("bengalfox")).await;
enable_personality_support(&mut chat, &["bengalfox"]).await;
chat.thread_id = Some(ThreadId::new());
chat.open_personality_popup();
@@ -2981,6 +3186,16 @@ async fn personality_selection_popup_snapshot() {
assert_snapshot!("personality_selection_popup", popup);
}
#[tokio::test]
async fn personality_nudge_popup_snapshot() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("bengalfox")).await;
enable_personality_support(&mut chat, &["bengalfox"]).await;
chat.handle_codex_event(session_configured_event_for("bengalfox"));
let popup = render_bottom_popup(&chat, 80);
assert_snapshot!("personality_nudge_popup", popup);
}
#[tokio::test]
async fn model_picker_hides_show_in_picker_false_models_from_cache() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("test-visible-model")).await;