Wire with_remote_overrides to construct model families (#7621)

- This PR wires `with_remote_overrides` and make the
`construct_model_families` an async function
- Moves getting model family a level above to keep the function `sync`
- Updates the tests to local, offline, and `sync` helper for model
families
This commit is contained in:
Ahmed Ibrahim
2025-12-05 10:40:15 -08:00
committed by GitHub
parent 5f80ad6da8
commit d08efb1743
16 changed files with 147 additions and 108 deletions

View File

@@ -302,7 +302,10 @@ impl App {
};
let enhanced_keys_supported = tui.enhanced_keys_supported();
let model_family = conversation_manager
.get_models_manager()
.construct_model_family(&config.model, &config)
.await;
let mut chat_widget = match resume_selection {
ResumeSelection::StartFresh | ResumeSelection::Exit => {
let init = crate::chatwidget::ChatWidgetInit {
@@ -317,6 +320,7 @@ impl App {
feedback: feedback.clone(),
skills: skills.clone(),
is_first_run,
model_family,
};
ChatWidget::new(init, conversation_manager.clone())
}
@@ -343,6 +347,7 @@ impl App {
feedback: feedback.clone(),
skills: skills.clone(),
is_first_run,
model_family,
};
ChatWidget::new_from_existing(
init,
@@ -481,6 +486,11 @@ impl App {
}
async fn handle_event(&mut self, tui: &mut tui::Tui, event: AppEvent) -> Result<bool> {
let model_family = self
.server
.get_models_manager()
.construct_model_family(&self.config.model, &self.config)
.await;
match event {
AppEvent::NewSession => {
let summary = session_summary(
@@ -500,6 +510,7 @@ impl App {
feedback: self.feedback.clone(),
skills: self.skills.clone(),
is_first_run: false,
model_family,
};
self.chat_widget = ChatWidget::new(init, self.server.clone());
if let Some(summary) = summary {
@@ -549,6 +560,7 @@ impl App {
feedback: self.feedback.clone(),
skills: self.skills.clone(),
is_first_run: false,
model_family: model_family.clone(),
};
self.chat_widget = ChatWidget::new_from_existing(
init,
@@ -677,7 +689,12 @@ impl App {
self.on_update_reasoning_effort(effort);
}
AppEvent::UpdateModel(model) => {
self.chat_widget.set_model(&model);
let model_family = self
.server
.get_models_manager()
.construct_model_family(&model, &self.config)
.await;
self.chat_widget.set_model(&model, model_family);
self.config.model = model;
}
AppEvent::OpenReasoningPopup { model } => {

View File

@@ -340,6 +340,7 @@ impl App {
let session_configured = new_conv.session_configured;
let init = crate::chatwidget::ChatWidgetInit {
config: cfg,
model_family: self.chat_widget.get_model_family(),
frame_requester: tui.frame_requester(),
app_event_tx: self.app_event_tx.clone(),
initial_prompt: None,

View File

@@ -11,6 +11,7 @@ use codex_core::config::Config;
use codex_core::config::types::Notifications;
use codex_core::git_info::current_branch_name;
use codex_core::git_info::local_git_branches;
use codex_core::openai_models::model_family::ModelFamily;
use codex_core::openai_models::models_manager::ModelsManager;
use codex_core::project_doc::DEFAULT_PROJECT_DOC_FILENAME;
use codex_core::protocol::AgentMessageDeltaEvent;
@@ -261,6 +262,7 @@ pub(crate) struct ChatWidgetInit {
pub(crate) feedback: codex_feedback::CodexFeedback,
pub(crate) skills: Option<Vec<SkillMetadata>>,
pub(crate) is_first_run: bool,
pub(crate) model_family: ModelFamily,
}
#[derive(Default)]
@@ -277,6 +279,7 @@ pub(crate) struct ChatWidget {
bottom_pane: BottomPane,
active_cell: Option<Box<dyn HistoryCell>>,
config: Config,
model_family: ModelFamily,
auth_manager: Arc<AuthManager>,
models_manager: Arc<ModelsManager>,
session_header: SessionHeader,
@@ -465,15 +468,13 @@ impl ChatWidget {
}
fn on_agent_reasoning_final(&mut self) {
let reasoning_summary_format = self.get_model_family().reasoning_summary_format;
// At the end of a reasoning block, record transcript-only content.
self.full_reasoning_buffer.push_str(&self.reasoning_buffer);
let model_family = self
.models_manager
.construct_model_family(&self.config.model, &self.config);
if !self.full_reasoning_buffer.is_empty() {
let cell = history_cell::new_reasoning_summary_block(
self.full_reasoning_buffer.clone(),
&model_family,
reasoning_summary_format,
);
self.add_boxed_history(cell);
}
@@ -647,6 +648,9 @@ impl ChatWidget {
self.stream_controller = None;
self.maybe_show_pending_rate_limit_prompt();
}
pub(crate) fn get_model_family(&self) -> ModelFamily {
self.model_family.clone()
}
fn on_error(&mut self, message: String) {
self.finalize_turn();
@@ -1249,6 +1253,7 @@ impl ChatWidget {
feedback,
skills,
is_first_run,
model_family,
} = common;
let mut rng = rand::rng();
let placeholder = EXAMPLE_PROMPTS[rng.random_range(0..EXAMPLE_PROMPTS.len())].to_string();
@@ -1270,6 +1275,7 @@ impl ChatWidget {
}),
active_cell: None,
config: config.clone(),
model_family,
auth_manager,
models_manager,
session_header: SessionHeader::new(config.model),
@@ -1329,6 +1335,7 @@ impl ChatWidget {
models_manager,
feedback,
skills,
model_family,
..
} = common;
let mut rng = rand::rng();
@@ -1353,6 +1360,7 @@ impl ChatWidget {
}),
active_cell: None,
config: config.clone(),
model_family,
auth_manager,
models_manager,
session_header: SessionHeader::new(config.model),
@@ -1785,7 +1793,7 @@ impl ChatWidget {
EventMsg::AgentReasoning(AgentReasoningEvent { .. }) => self.on_agent_reasoning_final(),
EventMsg::AgentReasoningRawContent(AgentReasoningRawContentEvent { text }) => {
self.on_agent_reasoning_delta(text);
self.on_agent_reasoning_final()
self.on_agent_reasoning_final();
}
EventMsg::AgentReasoningSectionBreak(_) => self.on_reasoning_section_break(),
EventMsg::TaskStarted(_) => self.on_task_started(),
@@ -2843,9 +2851,10 @@ impl ChatWidget {
}
/// Set the model in the widget's config copy.
pub(crate) fn set_model(&mut self, model: &str) {
pub(crate) fn set_model(&mut self, model: &str, model_family: ModelFamily) {
self.session_header.set_model(model);
self.config.model = model.to_string();
self.model_family = model_family;
}
pub(crate) fn add_info_message(&mut self, message: String, hint: Option<String>) {

View File

@@ -10,6 +10,7 @@ use codex_core::CodexAuth;
use codex_core::config::Config;
use codex_core::config::ConfigOverrides;
use codex_core::config::ConfigToml;
use codex_core::openai_models::models_manager::ModelsManager;
use codex_core::protocol::AgentMessageDeltaEvent;
use codex_core::protocol::AgentMessageEvent;
use codex_core::protocol::AgentReasoningDeltaEvent;
@@ -345,6 +346,7 @@ async fn helpers_are_available_and_do_not_panic() {
let (tx_raw, _rx) = unbounded_channel::<AppEvent>();
let tx = AppEventSender::new(tx_raw);
let cfg = test_config();
let model_family = ModelsManager::construct_model_family_offline(&cfg.model, &cfg);
let conversation_manager = Arc::new(ConversationManager::with_auth(CodexAuth::from_api_key(
"test",
)));
@@ -361,6 +363,7 @@ async fn helpers_are_available_and_do_not_panic() {
feedback: codex_feedback::CodexFeedback::new(),
skills: None,
is_first_run: true,
model_family,
};
let mut w = ChatWidget::new(init, conversation_manager);
// Basic construction sanity.
@@ -394,6 +397,7 @@ fn make_chatwidget_manual() -> (
bottom_pane: bottom,
active_cell: None,
config: cfg.clone(),
model_family: ModelsManager::construct_model_family_offline(&cfg.model, &cfg),
auth_manager: auth_manager.clone(),
models_manager: Arc::new(ModelsManager::new(auth_manager)),
session_header: SessionHeader::new(cfg.model),

View File

@@ -27,7 +27,6 @@ use codex_common::format_env_display::format_env_display;
use codex_core::config::Config;
use codex_core::config::types::McpServerTransportConfig;
use codex_core::config::types::ReasoningSummaryFormat;
use codex_core::openai_models::model_family::ModelFamily;
use codex_core::protocol::FileChange;
use codex_core::protocol::McpAuthStatus;
use codex_core::protocol::McpInvocation;
@@ -1421,9 +1420,9 @@ pub(crate) fn new_view_image_tool_call(path: PathBuf, cwd: &Path) -> PlainHistor
pub(crate) fn new_reasoning_summary_block(
full_reasoning_buffer: String,
model_family: &ModelFamily,
reasoning_summary_format: ReasoningSummaryFormat,
) -> Box<dyn HistoryCell> {
if model_family.reasoning_summary_format == ReasoningSummaryFormat::Experimental {
if reasoning_summary_format == ReasoningSummaryFormat::Experimental {
// Experimental format is following:
// ** header **
//
@@ -1513,8 +1512,6 @@ mod tests {
use crate::exec_cell::CommandOutput;
use crate::exec_cell::ExecCall;
use crate::exec_cell::ExecCell;
use codex_core::AuthManager;
use codex_core::CodexAuth;
use codex_core::config::Config;
use codex_core::config::ConfigOverrides;
use codex_core::config::ConfigToml;
@@ -1527,7 +1524,6 @@ mod tests {
use pretty_assertions::assert_eq;
use serde_json::json;
use std::collections::HashMap;
use std::sync::Arc;
use codex_core::protocol::ExecCommandSource;
use mcp_types::CallToolResult;
@@ -2326,13 +2322,12 @@ mod tests {
#[test]
fn reasoning_summary_block() {
let config = test_config();
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::from_api_key("Test API Key"));
let models_manager = Arc::new(ModelsManager::new(auth_manager));
let model_family = models_manager.construct_model_family(&config.model, &config);
let reasoning_format =
ModelsManager::construct_model_family_offline(&config.model, &config)
.reasoning_summary_format;
let cell = new_reasoning_summary_block(
"**High level reasoning**\n\nDetailed reasoning goes here.".to_string(),
&model_family,
reasoning_format,
);
let rendered_display = render_lines(&cell.display_lines(80));
@@ -2345,12 +2340,13 @@ mod tests {
#[test]
fn reasoning_summary_block_returns_reasoning_cell_when_feature_disabled() {
let config = test_config();
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::from_api_key("Test API Key"));
let models_manager = Arc::new(ModelsManager::new(auth_manager));
let model_family = models_manager.construct_model_family(&config.model, &config);
let cell =
new_reasoning_summary_block("Detailed reasoning goes here.".to_string(), &model_family);
let reasoning_format =
ModelsManager::construct_model_family_offline(&config.model, &config)
.reasoning_summary_format;
let cell = new_reasoning_summary_block(
"Detailed reasoning goes here.".to_string(),
reasoning_format,
);
let rendered = render_transcript(cell.as_ref());
assert_eq!(rendered, vec!["• Detailed reasoning goes here."]);
@@ -2362,11 +2358,7 @@ mod tests {
config.model = "gpt-3.5-turbo".to_string();
config.model_supports_reasoning_summaries = Some(true);
config.model_reasoning_summary_format = Some(ReasoningSummaryFormat::Experimental);
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::from_api_key("Test API Key"));
let models_manager = Arc::new(ModelsManager::new(auth_manager));
let model_family = models_manager.construct_model_family(&config.model, &config);
let model_family = ModelsManager::construct_model_family_offline(&config.model, &config);
assert_eq!(
model_family.reasoning_summary_format,
ReasoningSummaryFormat::Experimental
@@ -2374,7 +2366,7 @@ mod tests {
let cell = new_reasoning_summary_block(
"**High level reasoning**\n\nDetailed reasoning goes here.".to_string(),
&model_family,
model_family.reasoning_summary_format,
);
let rendered_display = render_lines(&cell.display_lines(80));
@@ -2384,13 +2376,12 @@ mod tests {
#[test]
fn reasoning_summary_block_falls_back_when_header_is_missing() {
let config = test_config();
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::from_api_key("Test API Key"));
let models_manager = Arc::new(ModelsManager::new(auth_manager));
let model_family = models_manager.construct_model_family(&config.model, &config);
let reasoning_format =
ModelsManager::construct_model_family_offline(&config.model, &config)
.reasoning_summary_format;
let cell = new_reasoning_summary_block(
"**High level reasoning without closing".to_string(),
&model_family,
reasoning_format,
);
let rendered = render_transcript(cell.as_ref());
@@ -2400,13 +2391,12 @@ mod tests {
#[test]
fn reasoning_summary_block_falls_back_when_summary_is_missing() {
let config = test_config();
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::from_api_key("Test API Key"));
let models_manager = Arc::new(ModelsManager::new(auth_manager));
let model_family = models_manager.construct_model_family(&config.model, &config);
let reasoning_format =
ModelsManager::construct_model_family_offline(&config.model, &config)
.reasoning_summary_format;
let cell = new_reasoning_summary_block(
"**High level reasoning without closing**".to_string(),
&model_family,
reasoning_format.clone(),
);
let rendered = render_transcript(cell.as_ref());
@@ -2414,7 +2404,7 @@ mod tests {
let cell = new_reasoning_summary_block(
"**High level reasoning without closing**\n\n ".to_string(),
&model_family,
reasoning_format,
);
let rendered = render_transcript(cell.as_ref());
@@ -2424,13 +2414,12 @@ mod tests {
#[test]
fn reasoning_summary_block_splits_header_and_summary_when_present() {
let config = test_config();
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::from_api_key("Test API Key"));
let models_manager = Arc::new(ModelsManager::new(auth_manager));
let model_family = models_manager.construct_model_family(&config.model, &config);
let reasoning_format =
ModelsManager::construct_model_family_offline(&config.model, &config)
.reasoning_summary_format;
let cell = new_reasoning_summary_block(
"**High level plan**\n\nWe should fix the bug next.".to_string(),
&model_family,
reasoning_format,
);
let rendered_display = render_lines(&cell.display_lines(80));