NUX: show personality nudge on startup/model switch; fix /personality support

This commit is contained in:
Charles Cunningham
2026-01-27 12:16:00 -08:00
parent d4e9556a67
commit 6ac5245854
4 changed files with 224 additions and 72 deletions

View File

@@ -103,6 +103,38 @@ impl ModelsManager {
Ok(self.build_available_models(remote_models))
}
/// Determine whether a model supports personalities, with a local fallback when
/// remote metadata omits the instructions template.
pub fn supports_personality(&self, model: &str, config: &Config) -> bool {
let remote_supports = self
.try_get_remote_models(config)
.ok()
.and_then(|remote_models| remote_models.into_iter().find(|info| info.slug == model))
.is_some_and(|info| info.supports_personality());
if remote_supports {
return true;
}
if self
.local_models
.iter()
.find(|preset| preset.model == model)
.is_some_and(|preset| preset.supports_personality)
{
return true;
}
self.try_list_models(config)
.ok()
.and_then(|models| {
models
.into_iter()
.find(|preset| preset.model == model)
.map(|preset| preset.supports_personality)
})
.unwrap_or(false)
}
// todo(aibrahim): should be visible to core only and sent on session_configured event
/// Get the model identifier to use, refreshing according to the specified strategy.
///
@@ -136,15 +168,19 @@ impl ModelsManager {
// todo(aibrahim): look if we can tighten it to pub(crate)
/// Look up model metadata, applying remote overrides and config adjustments.
pub async fn get_model_info(&self, model: &str, config: &Config) -> ModelInfo {
let local = model_info::find_model_info_for_slug(model);
let remote = self
.get_remote_models(config)
.await
.into_iter()
.find(|m| m.slug == model);
let model = if let Some(remote) = remote {
let model = if let Some(mut remote) = remote {
if !remote.supports_personality() && local.supports_personality() {
remote.model_instructions_template = local.model_instructions_template.clone();
}
remote
} else {
model_info::find_model_info_for_slug(model)
local
};
model_info::with_config_overrides(model, config)
}
@@ -365,6 +401,7 @@ mod tests {
use crate::features::Feature;
use crate::model_provider_info::WireApi;
use chrono::Utc;
use codex_protocol::openai_models::ModelInstructionsTemplate;
use codex_protocol::openai_models::ModelsResponse;
use core_test_support::responses::mount_models_once;
use pretty_assertions::assert_eq;
@@ -699,6 +736,34 @@ mod tests {
assert_eq!(available, vec![expected_hidden, expected_visible]);
}
#[tokio::test]
async fn supports_personality_falls_back_to_local_presets() {
let codex_home = tempdir().expect("temp dir");
let mut config = ConfigBuilder::default()
.codex_home(codex_home.path().to_path_buf())
.build()
.await
.expect("load default test config");
config.features.enable(Feature::RemoteModels);
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::create_dummy_chatgpt_auth_for_testing());
let provider = provider_for("http://example.test".to_string());
let manager =
ModelsManager::with_provider(codex_home.path().to_path_buf(), auth_manager, provider);
// Remote metadata without personality messages should not disable local support.
let mut remote = remote_model("gpt-5.2-codex", "Remote gpt-5.2-codex", 0);
remote.model_instructions_template = Some(ModelInstructionsTemplate {
template: "{{ personality_message }}".to_string(),
personality_messages: None,
});
*manager.remote_models.write().await = vec![remote];
assert!(manager.supports_personality("gpt-5.2-codex", &config));
let model = manager.get_model_info("gpt-5.2-codex", &config).await;
assert!(model.supports_personality());
}
#[test]
fn bundled_models_json_roundtrips() {
let file_contents = include_str!("../../models.json");

View File

@@ -34,6 +34,16 @@ const PERSONALITY_PRAGMATIC: &str = include_str!("../../templates/personalities/
pub(crate) const CONTEXT_WINDOW_272K: i64 = 272_000;
fn gpt_5_2_codex_personality_template() -> ModelInstructionsTemplate {
ModelInstructionsTemplate {
template: GPT_5_2_CODEX_INSTRUCTIONS_TEMPLATE.to_string(),
personality_messages: Some(PersonalityMessages(BTreeMap::from([
(Personality::Friendly, PERSONALITY_FRIENDLY.to_string()),
(Personality::Pragmatic, PERSONALITY_PRAGMATIC.to_string()),
]))),
}
}
macro_rules! model_info {
(
$slug:expr $(, $key:ident : $value:expr )* $(,)?
@@ -169,16 +179,7 @@ pub(crate) fn find_model_info_for_slug(slug: &str) -> ModelInfo {
model_info!(
slug,
base_instructions: GPT_5_2_CODEX_INSTRUCTIONS.to_string(),
model_instructions_template: Some(ModelInstructionsTemplate {
template: GPT_5_2_CODEX_INSTRUCTIONS_TEMPLATE.to_string(),
personality_messages: Some(PersonalityMessages(BTreeMap::from([(
Personality::Friendly,
PERSONALITY_FRIENDLY.to_string(),
), (
Personality::Pragmatic,
PERSONALITY_PRAGMATIC.to_string(),
)]))),
}),
model_instructions_template: Some(gpt_5_2_codex_personality_template()),
apply_patch_tool_type: Some(ApplyPatchToolType::Freeform),
shell_type: ConfigShellToolType::ShellCommand,
supports_parallel_tool_calls: true,
@@ -205,6 +206,7 @@ pub(crate) fn find_model_info_for_slug(slug: &str) -> ModelInfo {
model_info!(
slug,
base_instructions: GPT_5_2_CODEX_INSTRUCTIONS.to_string(),
model_instructions_template: Some(gpt_5_2_codex_personality_template()),
apply_patch_tool_type: Some(ApplyPatchToolType::Freeform),
shell_type: ConfigShellToolType::ShellCommand,
supports_parallel_tool_calls: true,

View File

@@ -755,6 +755,7 @@ impl ChatWidget {
self.refresh_model_display();
self.sync_personality_command_enabled();
self.schedule_personality_nudge_if_needed();
self.maybe_show_pending_personality_nudge();
let session_info_cell = history_cell::new_session_info(
&self.config,
&model_for_header,
@@ -2438,50 +2439,57 @@ impl ChatWidget {
self.request_redraw();
}
}
_ => match self.bottom_pane.handle_key_event(key_event) {
InputResult::Submitted {
text,
text_elements,
} => {
let user_message = UserMessage {
_ => {
let had_modal_or_popup = !self.bottom_pane.no_modal_or_popup_active();
let input_result = self.bottom_pane.handle_key_event(key_event);
if had_modal_or_popup && self.bottom_pane.no_modal_or_popup_active() {
self.on_bottom_pane_view_closed();
}
match input_result {
InputResult::Submitted {
text,
local_images: self
.bottom_pane
.take_recent_submission_images_with_placeholders(),
text_elements,
};
if self.is_session_configured() {
// Submitted is only emitted when steer is enabled (Enter sends immediately).
// Reset any reasoning header only when we are actually submitting a turn.
self.reasoning_buffer.clear();
self.full_reasoning_buffer.clear();
self.set_status_header(String::from("Working"));
self.submit_user_message(user_message);
} else {
} => {
let user_message = UserMessage {
text,
local_images: self
.bottom_pane
.take_recent_submission_images_with_placeholders(),
text_elements,
};
if self.is_session_configured() {
// Submitted is only emitted when steer is enabled (Enter sends immediately).
// Reset any reasoning header only when we are actually submitting a turn.
self.reasoning_buffer.clear();
self.full_reasoning_buffer.clear();
self.set_status_header(String::from("Working"));
self.submit_user_message(user_message);
} else {
self.queue_user_message(user_message);
}
}
InputResult::Queued {
text,
text_elements,
} => {
let user_message = UserMessage {
text,
local_images: self
.bottom_pane
.take_recent_submission_images_with_placeholders(),
text_elements,
};
self.queue_user_message(user_message);
}
InputResult::Command(cmd) => {
self.dispatch_command(cmd);
}
InputResult::CommandWithArgs(cmd, args) => {
self.dispatch_command_with_args(cmd, args);
}
InputResult::None => {}
}
InputResult::Queued {
text,
text_elements,
} => {
let user_message = UserMessage {
text,
local_images: self
.bottom_pane
.take_recent_submission_images_with_placeholders(),
text_elements,
};
self.queue_user_message(user_message);
}
InputResult::Command(cmd) => {
self.dispatch_command(cmd);
}
InputResult::CommandWithArgs(cmd, args) => {
self.dispatch_command_with_args(cmd, args);
}
InputResult::None => {}
},
}
}
}
@@ -3361,6 +3369,20 @@ impl ChatWidget {
self.maybe_show_pending_personality_nudge();
}
fn on_bottom_pane_view_closed(&mut self) {
if matches!(self.personality_nudge, PersonalityNudgeState::Pending) {
self.maybe_show_pending_personality_nudge();
return;
}
if matches!(self.personality_nudge, PersonalityNudgeState::Shown)
&& !self.personality_nudge_hidden()
&& self.config.model_personality.is_none()
{
// Allow the nudge to re-trigger on future model switches after Esc dismissal.
self.personality_nudge = PersonalityNudgeState::Idle;
}
}
fn maybe_show_pending_rate_limit_prompt(&mut self) {
if self.rate_limit_switch_prompt_hidden() {
self.rate_limit_switch_prompt = RateLimitSwitchPromptState::Idle;
@@ -3487,19 +3509,22 @@ impl ChatWidget {
self.personality_nudge = PersonalityNudgeState::Idle;
return;
}
if !self.bottom_pane.no_modal_or_popup_active() {
return;
}
self.open_personality_nudge();
self.personality_nudge = PersonalityNudgeState::Shown;
self.app_event_tx
.send(AppEvent::UpdatePersonalityNudgeHidden(true));
self.app_event_tx
.send(AppEvent::PersistPersonalityNudgeHidden);
}
fn open_personality_nudge(&mut self) {
let choose_actions: Vec<SelectionAction> = vec![Box::new(|tx| {
tx.send(AppEvent::OpenPersonalityPopup);
})];
let not_now_actions: Vec<SelectionAction> = vec![Box::new(|tx| {
tx.send(AppEvent::UpdatePersonalityNudgeHidden(true));
tx.send(AppEvent::PersistPersonalityNudgeHidden);
})];
let items = vec![
SelectionItem {
name: "Choose a personality".to_string(),
@@ -3511,6 +3536,7 @@ impl ChatWidget {
SelectionItem {
name: "Not now".to_string(),
description: Some("You can run /personality any time.".to_string()),
actions: not_now_actions,
dismiss_on_select: true,
..Default::default()
},
@@ -4892,6 +4918,7 @@ impl ChatWidget {
self.refresh_model_display();
self.sync_personality_command_enabled();
self.schedule_personality_nudge_if_needed();
self.maybe_show_pending_personality_nudge();
}
pub(crate) fn current_model(&self) -> &str {
@@ -4910,17 +4937,8 @@ impl ChatWidget {
}
fn current_model_supports_personality(&self) -> bool {
let model = self.current_model();
self.models_manager
.try_list_models(&self.config)
.ok()
.and_then(|models| {
models
.into_iter()
.find(|preset| preset.model == model)
.map(|preset| preset.supports_personality)
})
.unwrap_or(false)
.supports_personality(self.current_model(), &self.config)
}
#[allow(dead_code)] // Used in tests

View File

@@ -1167,17 +1167,15 @@ async fn personality_nudge_shows_once_and_hides_after_seen() {
let (mut chat, mut rx, _) = make_chatwidget_manual(Some("bengalfox")).await;
chat.handle_codex_event(session_configured_event_for("bengalfox"));
assert!(matches!(
chat.personality_nudge,
PersonalityNudgeState::Pending
));
chat.maybe_show_post_turn_nudges();
assert!(matches!(
chat.personality_nudge,
PersonalityNudgeState::Shown
));
// Select "Not now" to persist the hide flag.
chat.handle_key_event(KeyEvent::from(KeyCode::Down));
chat.handle_key_event(KeyEvent::from(KeyCode::Enter));
let mut saw_update = false;
let mut saw_persist = false;
while let Ok(event) = rx.try_recv() {
@@ -1199,6 +1197,75 @@ async fn personality_nudge_shows_once_and_hides_after_seen() {
));
}
#[tokio::test]
async fn personality_nudge_escape_does_not_persist_hidden() {
let (mut chat, mut rx, _) = make_chatwidget_manual(Some("bengalfox")).await;
chat.handle_codex_event(session_configured_event_for("bengalfox"));
assert!(matches!(
chat.personality_nudge,
PersonalityNudgeState::Shown
));
chat.handle_key_event(KeyEvent::from(KeyCode::Esc));
let mut saw_update = false;
let mut saw_persist = false;
while let Ok(event) = rx.try_recv() {
match event {
AppEvent::UpdatePersonalityNudgeHidden(true) => saw_update = true,
AppEvent::PersistPersonalityNudgeHidden => saw_persist = true,
_ => {}
}
}
assert!(!saw_update, "Esc should not hide the personality nudge");
assert!(
!saw_persist,
"Esc should not persist the personality nudge hide flag"
);
assert_eq!(chat.config.notices.hide_personality_nudge, None);
assert!(matches!(
chat.personality_nudge,
PersonalityNudgeState::Idle
));
}
#[tokio::test]
async fn gpt_52_codex_supports_personality_command() {
let (chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5.2-codex")).await;
assert!(chat.current_model_supports_personality());
}
#[tokio::test]
async fn personality_nudge_shows_on_model_switch_after_escape() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("bengalfox")).await;
chat.handle_codex_event(session_configured_event_for("bengalfox"));
assert!(matches!(
chat.personality_nudge,
PersonalityNudgeState::Shown
));
chat.handle_key_event(KeyEvent::from(KeyCode::Esc));
assert!(matches!(
chat.personality_nudge,
PersonalityNudgeState::Idle
));
chat.set_model("gpt-5");
assert!(matches!(
chat.personality_nudge,
PersonalityNudgeState::Idle
));
chat.set_model("bengalfox");
assert!(matches!(
chat.personality_nudge,
PersonalityNudgeState::Shown
));
}
#[tokio::test]
async fn rate_limit_switch_prompt_defers_until_task_complete() {
let auth = CodexAuth::create_dummy_chatgpt_auth_for_testing();
@@ -2485,6 +2552,7 @@ async fn collab_mode_enabling_keeps_custom_until_selected() {
async fn user_turn_includes_personality_from_config() {
let (mut chat, _rx, mut op_rx) = make_chatwidget_manual(Some("bengalfox")).await;
chat.thread_id = Some(ThreadId::new());
chat.set_personality_nudge_hidden(true);
chat.set_model("bengalfox");
chat.set_personality(Personality::Friendly);
@@ -3058,7 +3126,6 @@ async fn personality_selection_popup_snapshot() {
async fn personality_nudge_popup_snapshot() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("bengalfox")).await;
chat.handle_codex_event(session_configured_event_for("bengalfox"));
chat.maybe_show_post_turn_nudges();
let popup = render_bottom_popup(&chat, 80);
assert_snapshot!("personality_nudge_popup", popup);