chore: migrate from Config::load_from_base_config_with_overrides to ConfigBuilder (#8276)

https://github.com/openai/codex/pull/8235 introduced `ConfigBuilder` and
this PR updates all call non-test call sites to use it instead of
`Config::load_from_base_config_with_overrides()`.

This is important because `load_from_base_config_with_overrides()` uses
an empty `ConfigRequirements`, which is a reasonable default for testing
so the tests are not influenced by the settings on the host. This method
is now guarded by `#[cfg(test)]` so it cannot be used by business logic.

Because `ConfigBuilder::build()` is `async`, many of the test methods
had to be migrated to be `async`, as well. On the bright side, this made
it possible to eliminate a bunch of `block_on_future()` stuff.
This commit is contained in:
Michael Bolin
2025-12-18 16:12:52 -08:00
committed by GitHub
parent 2d9826098e
commit 3d4ced3ff5
42 changed files with 1081 additions and 1176 deletions

View File

@@ -254,7 +254,7 @@ async fn resume_includes_initial_messages_and_sends_prior_items() {
..built_in_model_providers()["openai"].clone()
};
let codex_home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider = model_provider;
// Also configure user instructions to ensure they are NOT delivered on resume.
config.user_instructions = Some("be nice".to_string());
@@ -343,7 +343,7 @@ async fn includes_conversation_id_and_model_headers_in_request() {
// Init session
let codex_home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider = model_provider;
let conversation_manager = ConversationManager::with_models_provider_and_home(
@@ -403,7 +403,7 @@ async fn includes_base_instructions_override_in_request() {
..built_in_model_providers()["openai"].clone()
};
let codex_home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.base_instructions = Some("test instructions".to_string());
config.model_provider = model_provider;
@@ -467,7 +467,7 @@ async fn chatgpt_auth_sends_correct_request() {
// Init session
let codex_home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider = model_provider;
let conversation_manager = ConversationManager::with_models_provider_and_home(
create_dummy_codex_auth(),
@@ -559,7 +559,7 @@ async fn prefers_apikey_when_config_prefers_apikey_even_with_chatgpt_tokens() {
Some("acc-123"),
);
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider = model_provider;
let auth_manager =
@@ -602,7 +602,7 @@ async fn includes_user_instructions_message_in_request() {
};
let codex_home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider = model_provider;
config.user_instructions = Some("be nice".to_string());
@@ -671,7 +671,7 @@ async fn skills_append_to_instructions() {
)
.expect("write skill");
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider = model_provider;
config.cwd = codex_home.path().to_path_buf();
config.features.enable(Feature::Skills);
@@ -1029,7 +1029,7 @@ async fn includes_developer_instructions_message_in_request() {
};
let codex_home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider = model_provider;
config.user_instructions = Some("be nice".to_string());
config.developer_instructions = Some("be useful".to_string());
@@ -1119,7 +1119,7 @@ async fn azure_responses_request_includes_store_and_reasoning_ids() {
};
let codex_home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider_id = provider.name.clone();
config.model_provider = provider.clone();
let effort = config.model_reasoning_effort;
@@ -1261,7 +1261,7 @@ async fn token_count_includes_rate_limits_snapshot() {
provider.base_url = Some(format!("{}/v1", server.uri()));
let home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&home);
let mut config = load_default_config_for_test(&home).await;
config.model_provider = provider;
let conversation_manager = ConversationManager::with_models_provider_and_home(
@@ -1616,7 +1616,7 @@ async fn azure_overrides_assign_properties_used_for_responses_url() {
// Init session
let codex_home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider = provider;
let conversation_manager = ConversationManager::with_models_provider_and_home(
@@ -1698,7 +1698,7 @@ async fn env_var_overrides_loaded_auth() {
// Init session
let codex_home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider = provider;
let conversation_manager = ConversationManager::with_models_provider_and_home(
@@ -1780,7 +1780,7 @@ async fn history_dedupes_streamed_and_final_messages_across_turns() {
// Init session with isolated codex home.
let codex_home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider = model_provider;
let conversation_manager = ConversationManager::with_models_provider_and_home(

View File

@@ -137,7 +137,7 @@ async fn summarize_context_three_requests_and_instructions() {
// Build config pointing to the mock server and spawn Codex.
let model_provider = non_openai_model_provider(&server);
let home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&home);
let mut config = load_default_config_for_test(&home).await;
config.model_provider = model_provider;
set_test_compact_prompt(&mut config);
config.model_auto_compact_token_limit = Some(200_000);
@@ -331,7 +331,7 @@ async fn manual_compact_uses_custom_prompt() {
let model_provider = non_openai_model_provider(&server);
let home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&home);
let mut config = load_default_config_for_test(&home).await;
config.model_provider = model_provider;
config.compact_prompt = Some(custom_prompt.to_string());
@@ -411,7 +411,7 @@ async fn manual_compact_emits_api_and_local_token_usage_events() {
let model_provider = non_openai_model_provider(&server);
let home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&home);
let mut config = load_default_config_for_test(&home).await;
config.model_provider = model_provider;
set_test_compact_prompt(&mut config);
@@ -1062,7 +1062,7 @@ async fn auto_compact_runs_after_token_limit_hit() {
let model_provider = non_openai_model_provider(&server);
let home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&home);
let mut config = load_default_config_for_test(&home).await;
config.model_provider = model_provider;
set_test_compact_prompt(&mut config);
config.model_auto_compact_token_limit = Some(200_000);
@@ -1285,7 +1285,7 @@ async fn auto_compact_persists_rollout_entries() {
let model_provider = non_openai_model_provider(&server);
let home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&home);
let mut config = load_default_config_for_test(&home).await;
config.model_provider = model_provider;
set_test_compact_prompt(&mut config);
config.model_auto_compact_token_limit = Some(200_000);
@@ -1397,7 +1397,7 @@ async fn manual_compact_retries_after_context_window_error() {
let model_provider = non_openai_model_provider(&server);
let home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&home);
let mut config = load_default_config_for_test(&home).await;
config.model_provider = model_provider;
set_test_compact_prompt(&mut config);
config.model_auto_compact_token_limit = Some(200_000);
@@ -1530,7 +1530,7 @@ async fn manual_compact_twice_preserves_latest_user_messages() {
let model_provider = non_openai_model_provider(&server);
let home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&home);
let mut config = load_default_config_for_test(&home).await;
config.model_provider = model_provider;
set_test_compact_prompt(&mut config);
let codex = ConversationManager::with_models_provider(
@@ -1733,7 +1733,7 @@ async fn auto_compact_allows_multiple_attempts_when_interleaved_with_other_turn_
let model_provider = non_openai_model_provider(&server);
let home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&home);
let mut config = load_default_config_for_test(&home).await;
config.model_provider = model_provider;
set_test_compact_prompt(&mut config);
config.model_auto_compact_token_limit = Some(200);
@@ -1844,7 +1844,7 @@ async fn auto_compact_triggers_after_function_call_over_95_percent_usage() {
let model_provider = non_openai_model_provider(&server);
let home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&home);
let mut config = load_default_config_for_test(&home).await;
config.model_provider = model_provider;
set_test_compact_prompt(&mut config);
config.model_context_window = Some(context_window);

View File

@@ -862,7 +862,7 @@ async fn start_test_conversation(
..built_in_model_providers()["openai"].clone()
};
let home = TempDir::new().expect("create temp dir");
let mut config = load_default_config_for_test(&home);
let mut config = load_default_config_for_test(&home).await;
config.model_provider = model_provider;
config.compact_prompt = Some(SUMMARIZATION_PROMPT.to_string());
if let Some(model) = model {

View File

@@ -51,7 +51,7 @@ async fn fork_conversation_twice_drops_to_first_message() {
};
let home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&home);
let mut config = load_default_config_for_test(&home).await;
config.model_provider = model_provider.clone();
let config_for_fork = config.clone();

View File

@@ -12,7 +12,7 @@ use tempfile::tempdir;
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn list_models_returns_api_key_models() -> Result<()> {
let codex_home = tempdir()?;
let config = load_default_config_for_test(&codex_home);
let config = load_default_config_for_test(&codex_home).await;
let manager = ConversationManager::with_models_provider(
CodexAuth::from_api_key("sk-test"),
built_in_model_providers()["openai"].clone(),
@@ -28,7 +28,7 @@ async fn list_models_returns_api_key_models() -> Result<()> {
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn list_models_returns_chatgpt_models() -> Result<()> {
let codex_home = tempdir()?;
let config = load_default_config_for_test(&codex_home);
let config = load_default_config_for_test(&codex_home).await;
let manager = ConversationManager::with_models_provider(
CodexAuth::create_dummy_chatgpt_auth_for_testing(),
built_in_model_providers()["openai"].clone(),

View File

@@ -19,7 +19,7 @@ async fn override_turn_context_does_not_persist_when_config_exists() {
.await
.expect("seed config.toml");
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model = Some("gpt-4o".to_string());
let conversation_manager = ConversationManager::with_models_provider(
@@ -62,7 +62,7 @@ async fn override_turn_context_does_not_create_config_file() {
"test setup should start without config"
);
let config = load_default_config_for_test(&codex_home);
let config = load_default_config_for_test(&codex_home).await;
let conversation_manager = ConversationManager::with_models_provider(
CodexAuth::from_api_key("Test API Key"),

View File

@@ -316,7 +316,7 @@ async fn remote_models_preserve_builtin_presets() -> Result<()> {
.await;
let codex_home = TempDir::new()?;
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.features.enable(Feature::RemoteModels);
let auth = CodexAuth::create_dummy_chatgpt_auth_for_testing();
@@ -374,7 +374,7 @@ async fn remote_models_hide_picker_only_models() -> Result<()> {
.await;
let codex_home = TempDir::new()?;
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.features.enable(Feature::RemoteModels);
let auth = CodexAuth::create_dummy_chatgpt_auth_for_testing();
@@ -440,7 +440,7 @@ where
let home = Arc::new(TempDir::new()?);
let cwd = Arc::new(TempDir::new()?);
let mut config = load_default_config_for_test(&home);
let mut config = load_default_config_for_test(&home).await;
config.cwd = cwd.path().to_path_buf();
config.features.enable(Feature::RemoteModels);

View File

@@ -42,7 +42,7 @@ fn resume_history(
async fn emits_warning_when_resumed_model_differs() {
// Arrange a config with a current model and a prior rollout recorded under a different model.
let home = TempDir::new().expect("tempdir");
let mut config = load_default_config_for_test(&home);
let mut config = load_default_config_for_test(&home).await;
config.model = Some("current-model".to_string());
// Ensure cwd is absolute (the helper sets it to the temp dir already).
assert!(config.cwd.is_absolute());

View File

@@ -453,7 +453,7 @@ async fn review_input_isolated_from_parent_history() {
// Seed a parent session history via resume file with both user + assistant items.
let codex_home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider = ModelProviderInfo {
base_url: Some(format!("{}/v1", server.uri())),
..built_in_model_providers()["openai"].clone()
@@ -740,7 +740,7 @@ where
base_url: Some(format!("{}/v1", server.uri())),
..built_in_model_providers()["openai"].clone()
};
let mut config = load_default_config_for_test(codex_home);
let mut config = load_default_config_for_test(codex_home).await;
config.model_provider = model_provider;
mutator(&mut config);
let conversation_manager = ConversationManager::with_models_provider(
@@ -769,7 +769,7 @@ where
base_url: Some(format!("{}/v1", server.uri())),
..built_in_model_providers()["openai"].clone()
};
let mut config = load_default_config_for_test(codex_home);
let mut config = load_default_config_for_test(codex_home).await;
config.model_provider = model_provider;
mutator(&mut config);
let conversation_manager = ConversationManager::with_models_provider(

View File

@@ -39,7 +39,7 @@ async fn user_shell_cmd_ls_and_cat_in_temp_dir() {
// Load config and pin cwd to the temp dir so ls/cat operate there.
let codex_home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.cwd = cwd.path().to_path_buf();
let conversation_manager = ConversationManager::with_models_provider(
@@ -100,7 +100,7 @@ async fn user_shell_cmd_ls_and_cat_in_temp_dir() {
async fn user_shell_cmd_can_be_interrupted() {
// Set up isolated config and conversation.
let codex_home = TempDir::new().unwrap();
let config = load_default_config_for_test(&codex_home);
let config = load_default_config_for_test(&codex_home).await;
let conversation_manager = ConversationManager::with_models_provider(
codex_core::CodexAuth::from_api_key("dummy"),
config.model_provider.clone(),