Chore: limit find family visability (#7891)

a little bit more code quality of life
This commit is contained in:
Ahmed Ibrahim
2025-12-11 13:30:56 -08:00
committed by GitHub
parent 0af7e4a195
commit b9fb3b81e5
5 changed files with 80 additions and 46 deletions

View File

@@ -252,13 +252,15 @@ impl Stream for ResponseStream {
#[cfg(test)]
mod tests {
use crate::openai_models::model_family::find_family_for_model;
use codex_api::ResponsesApiRequest;
use codex_api::common::OpenAiVerbosity;
use codex_api::common::TextControls;
use codex_api::create_text_param_for_request;
use pretty_assertions::assert_eq;
use crate::config::test_config;
use crate::openai_models::models_manager::ModelsManager;
use super::*;
struct InstructionsTestCase {
@@ -309,7 +311,9 @@ mod tests {
},
];
for test_case in test_cases {
let model_family = find_family_for_model(test_case.slug);
let config = test_config();
let model_family =
ModelsManager::construct_model_family_offline(test_case.slug, &config);
let expected = if test_case.expects_apply_patch_instructions {
format!(
"{}\n{}",

View File

@@ -50,6 +50,8 @@ use std::collections::HashMap;
use std::io::ErrorKind;
use std::path::Path;
use std::path::PathBuf;
#[cfg(test)]
use tempfile::tempdir;
use crate::config::profile::ConfigProfile;
use toml::Value as TomlValue;
@@ -68,6 +70,17 @@ pub(crate) const PROJECT_DOC_MAX_BYTES: usize = 32 * 1024; // 32 KiB
pub const CONFIG_TOML_FILE: &str = "config.toml";
#[cfg(test)]
pub(crate) fn test_config() -> Config {
let codex_home = tempdir().expect("create temp dir");
Config::load_from_base_config_with_overrides(
ConfigToml::default(),
ConfigOverrides::default(),
codex_home.path().to_path_buf(),
)
.expect("load default test config")
}
/// Application configuration loaded from disk and merged with overrides.
#[derive(Debug, Clone, PartialEq)]
pub struct Config {

View File

@@ -157,10 +157,9 @@ macro_rules! model_family {
}};
}
// todo(aibrahim): remove this function
/// Returns a `ModelFamily` for the given model slug, or `None` if the slug
/// does not match any known model family.
pub fn find_family_for_model(slug: &str) -> ModelFamily {
/// Internal offline helper for `ModelsManager` that returns a `ModelFamily` for the given
/// model slug.
pub(in crate::openai_models) fn find_family_for_model(slug: &str) -> ModelFamily {
if slug.starts_with("o3") {
model_family!(
slug, "o3",
@@ -329,6 +328,7 @@ pub fn find_family_for_model(slug: &str) -> ModelFamily {
}
fn derive_default_model_family(model: &str) -> ModelFamily {
tracing::warn!("Unknown model {model} is used. This will degrade the performance of Codex.");
ModelFamily {
slug: model.to_string(),
family: model.to_string(),

View File

@@ -24,7 +24,6 @@ use crate::error::Result as CoreResult;
use crate::features::Feature;
use crate::model_provider_info::ModelProviderInfo;
use crate::openai_models::model_family::ModelFamily;
use crate::openai_models::model_family::find_family_for_model;
use crate::openai_models::model_presets::builtin_model_presets;
const MODEL_CACHE_FILE: &str = "models_cache.json";
@@ -117,9 +116,13 @@ impl ModelsManager {
.map(|models| models.clone())
}
fn find_family_for_model(slug: &str) -> ModelFamily {
super::model_family::find_family_for_model(slug)
}
/// Look up the requested model family while applying remote metadata overrides.
pub async fn construct_model_family(&self, model: &str, config: &Config) -> ModelFamily {
find_family_for_model(model)
Self::find_family_for_model(model)
.with_config_overrides(config)
.with_remote_overrides(self.remote_models.read().await.clone())
}
@@ -154,7 +157,7 @@ impl ModelsManager {
#[cfg(any(test, feature = "test-support"))]
/// Offline helper that builds a `ModelFamily` without consulting remote state.
pub fn construct_model_family_offline(model: &str, config: &Config) -> ModelFamily {
find_family_for_model(model).with_config_overrides(config)
Self::find_family_for_model(model).with_config_overrides(config)
}
/// Replace the cached remote models and rebuild the derived presets list.

View File

@@ -1121,7 +1121,8 @@ pub(crate) fn build_specs(
#[cfg(test)]
mod tests {
use crate::client_common::tools::FreeformTool;
use crate::openai_models::model_family::find_family_for_model;
use crate::config::test_config;
use crate::openai_models::models_manager::ModelsManager;
use crate::tools::registry::ConfiguredToolSpec;
use mcp_types::ToolInputSchema;
use pretty_assertions::assert_eq;
@@ -1216,7 +1217,8 @@ mod tests {
#[test]
fn test_full_toolset_specs_for_gpt5_codex_unified_exec_web_search() {
let model_family = find_family_for_model("gpt-5-codex");
let config = test_config();
let model_family = ModelsManager::construct_model_family_offline("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
features.enable(Feature::WebSearchRequest);
@@ -1274,13 +1276,14 @@ mod tests {
}
}
fn assert_model_tools(model_family: &str, features: &Features, expected_tools: &[&str]) {
let model_family = find_family_for_model(model_family);
let config = ToolsConfig::new(&ToolsConfigParams {
fn assert_model_tools(model_slug: &str, features: &Features, expected_tools: &[&str]) {
let config = test_config();
let model_family = ModelsManager::construct_model_family_offline(model_slug, &config);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_family: &model_family,
features,
});
let (tools, _) = build_specs(&config, Some(HashMap::new())).build();
let (tools, _) = build_specs(&tools_config, Some(HashMap::new())).build();
let tool_names = tools.iter().map(|t| t.spec.name()).collect::<Vec<_>>();
assert_eq!(&tool_names, &expected_tools,);
}
@@ -1467,19 +1470,20 @@ mod tests {
#[test]
fn test_build_specs_default_shell_present() {
let model_family = find_family_for_model("o3");
let config = test_config();
let model_family = ModelsManager::construct_model_family_offline("o3", &config);
let mut features = Features::with_defaults();
features.enable(Feature::WebSearchRequest);
features.enable(Feature::UnifiedExec);
let config = ToolsConfig::new(&ToolsConfigParams {
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_family: &model_family,
features: &features,
});
let (tools, _) = build_specs(&config, Some(HashMap::new())).build();
let (tools, _) = build_specs(&tools_config, Some(HashMap::new())).build();
// Only check the shell variant and a couple of core tools.
let mut subset = vec!["exec_command", "write_stdin", "update_plan"];
if let Some(shell_tool) = shell_tool_name(&config) {
if let Some(shell_tool) = shell_tool_name(&tools_config) {
subset.push(shell_tool);
}
assert_contains_tool_names(&tools, &subset);
@@ -1488,15 +1492,16 @@ mod tests {
#[test]
#[ignore]
fn test_parallel_support_flags() {
let model_family = find_family_for_model("gpt-5-codex");
let config = test_config();
let model_family = ModelsManager::construct_model_family_offline("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.disable(Feature::ViewImageTool);
features.enable(Feature::UnifiedExec);
let config = ToolsConfig::new(&ToolsConfigParams {
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_family: &model_family,
features: &features,
});
let (tools, _) = build_specs(&config, None).build();
let (tools, _) = build_specs(&tools_config, None).build();
assert!(!find_tool(&tools, "exec_command").supports_parallel_tool_calls);
assert!(!find_tool(&tools, "write_stdin").supports_parallel_tool_calls);
@@ -1507,14 +1512,16 @@ mod tests {
#[test]
fn test_test_model_family_includes_sync_tool() {
let model_family = find_family_for_model("test-gpt-5-codex");
let config = test_config();
let model_family =
ModelsManager::construct_model_family_offline("test-gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.disable(Feature::ViewImageTool);
let config = ToolsConfig::new(&ToolsConfigParams {
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_family: &model_family,
features: &features,
});
let (tools, _) = build_specs(&config, None).build();
let (tools, _) = build_specs(&tools_config, None).build();
assert!(
tools
@@ -1536,16 +1543,17 @@ mod tests {
#[test]
fn test_build_specs_mcp_tools_converted() {
let model_family = find_family_for_model("o3");
let config = test_config();
let model_family = ModelsManager::construct_model_family_offline("o3", &config);
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
features.enable(Feature::WebSearchRequest);
let config = ToolsConfig::new(&ToolsConfigParams {
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_family: &model_family,
features: &features,
});
let (tools, _) = build_specs(
&config,
&tools_config,
Some(HashMap::from([(
"test_server/do_something_cool".to_string(),
mcp_types::Tool {
@@ -1630,10 +1638,11 @@ mod tests {
#[test]
fn test_build_specs_mcp_tools_sorted_by_name() {
let model_family = find_family_for_model("o3");
let config = test_config();
let model_family = ModelsManager::construct_model_family_offline("o3", &config);
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
let config = ToolsConfig::new(&ToolsConfigParams {
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_family: &model_family,
features: &features,
});
@@ -1687,7 +1696,7 @@ mod tests {
),
]);
let (tools, _) = build_specs(&config, Some(tools_map)).build();
let (tools, _) = build_specs(&tools_config, Some(tools_map)).build();
// Only assert that the MCP tools themselves are sorted by fully-qualified name.
let mcp_names: Vec<_> = tools
@@ -1705,17 +1714,18 @@ mod tests {
#[test]
fn test_mcp_tool_property_missing_type_defaults_to_string() {
let model_family = find_family_for_model("gpt-5-codex");
let config = test_config();
let model_family = ModelsManager::construct_model_family_offline("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
features.enable(Feature::WebSearchRequest);
let config = ToolsConfig::new(&ToolsConfigParams {
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_family: &model_family,
features: &features,
});
let (tools, _) = build_specs(
&config,
&tools_config,
Some(HashMap::from([(
"dash/search".to_string(),
mcp_types::Tool {
@@ -1761,17 +1771,18 @@ mod tests {
#[test]
fn test_mcp_tool_integer_normalized_to_number() {
let model_family = find_family_for_model("gpt-5-codex");
let config = test_config();
let model_family = ModelsManager::construct_model_family_offline("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
features.enable(Feature::WebSearchRequest);
let config = ToolsConfig::new(&ToolsConfigParams {
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_family: &model_family,
features: &features,
});
let (tools, _) = build_specs(
&config,
&tools_config,
Some(HashMap::from([(
"dash/paginate".to_string(),
mcp_types::Tool {
@@ -1813,18 +1824,19 @@ mod tests {
#[test]
fn test_mcp_tool_array_without_items_gets_default_string_items() {
let model_family = find_family_for_model("gpt-5-codex");
let config = test_config();
let model_family = ModelsManager::construct_model_family_offline("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
features.enable(Feature::WebSearchRequest);
features.enable(Feature::ApplyPatchFreeform);
let config = ToolsConfig::new(&ToolsConfigParams {
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_family: &model_family,
features: &features,
});
let (tools, _) = build_specs(
&config,
&tools_config,
Some(HashMap::from([(
"dash/tags".to_string(),
mcp_types::Tool {
@@ -1869,17 +1881,18 @@ mod tests {
#[test]
fn test_mcp_tool_anyof_defaults_to_string() {
let model_family = find_family_for_model("gpt-5-codex");
let config = test_config();
let model_family = ModelsManager::construct_model_family_offline("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
features.enable(Feature::WebSearchRequest);
let config = ToolsConfig::new(&ToolsConfigParams {
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_family: &model_family,
features: &features,
});
let (tools, _) = build_specs(
&config,
&tools_config,
Some(HashMap::from([(
"dash/value".to_string(),
mcp_types::Tool {
@@ -1980,16 +1993,17 @@ Examples of valid command strings:
#[test]
fn test_get_openai_tools_mcp_tools_with_additional_properties_schema() {
let model_family = find_family_for_model("gpt-5-codex");
let config = test_config();
let model_family = ModelsManager::construct_model_family_offline("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
features.enable(Feature::WebSearchRequest);
let config = ToolsConfig::new(&ToolsConfigParams {
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_family: &model_family,
features: &features,
});
let (tools, _) = build_specs(
&config,
&tools_config,
Some(HashMap::from([(
"test_server/do_something_cool".to_string(),
mcp_types::Tool {