diff --git a/codex-rs/app-server/src/lib.rs b/codex-rs/app-server/src/lib.rs index d9aaabd1ca..61bcbfe7c8 100644 --- a/codex-rs/app-server/src/lib.rs +++ b/codex-rs/app-server/src/lib.rs @@ -129,7 +129,7 @@ pub async fn run_main( .try_init(); // Task: process incoming messages. - let processor_handle = tokio::spawn({ + let processor_handle = tokio::spawn(async move { let outgoing_message_sender = OutgoingMessageSender::new(outgoing_tx); let cli_overrides: Vec<(String, TomlValue)> = cli_kv_overrides.clone(); let loader_overrides = loader_overrides_for_config_api; @@ -140,8 +140,8 @@ pub async fn run_main( cli_overrides, loader_overrides, feedback.clone(), - ); - async move { + ) + .await; while let Some(msg) = incoming_rx.recv().await { match msg { JSONRPCMessage::Request(r) => processor.process_request(r).await, diff --git a/codex-rs/app-server/src/message_processor.rs b/codex-rs/app-server/src/message_processor.rs index 1f442b9956..6139344379 100644 --- a/codex-rs/app-server/src/message_processor.rs +++ b/codex-rs/app-server/src/message_processor.rs @@ -39,7 +39,7 @@ pub(crate) struct MessageProcessor { impl MessageProcessor { /// Create a new `MessageProcessor`, retaining a handle to the outgoing /// `Sender` so handlers can enqueue messages to be written to stdout. - pub(crate) fn new( + pub(crate) async fn new( outgoing: OutgoingMessageSender, codex_linux_sandbox_exe: Option, config: Arc, @@ -53,11 +53,9 @@ impl MessageProcessor { false, config.cli_auth_credentials_store_mode, ); - let thread_manager = Arc::new(ThreadManager::new( - config.codex_home.clone(), - auth_manager.clone(), - SessionSource::VSCode, - )); + let thread_manager = Arc::new( + ThreadManager::new((*config).clone(), auth_manager.clone(), SessionSource::VSCode).await, + ); let codex_message_processor = CodexMessageProcessor::new( auth_manager, thread_manager, diff --git a/codex-rs/core/src/codex.rs b/codex-rs/core/src/codex.rs index ec097b6150..8791d8450b 100644 --- a/codex-rs/core/src/codex.rs +++ b/codex-rs/core/src/codex.rs @@ -3656,10 +3656,9 @@ mod tests { let conversation_id = ThreadId::default(); let auth_manager = AuthManager::from_auth_for_testing(CodexAuth::from_api_key("Test API Key")); - let models_manager = Arc::new(ModelsManager::new( - config.codex_home.clone(), - auth_manager.clone(), - )); + let models_manager = Arc::new( + ModelsManager::new((*config).clone(), auth_manager.clone()).await, + ); let agent_control = AgentControl::default(); let exec_policy = ExecPolicyManager::default(); let (agent_status_tx, _agent_status_rx) = watch::channel(AgentStatus::PendingInit); @@ -3751,10 +3750,9 @@ mod tests { let conversation_id = ThreadId::default(); let auth_manager = AuthManager::from_auth_for_testing(CodexAuth::from_api_key("Test API Key")); - let models_manager = Arc::new(ModelsManager::new( - config.codex_home.clone(), - auth_manager.clone(), - )); + let models_manager = Arc::new( + ModelsManager::new((*config).clone(), auth_manager.clone()).await, + ); let agent_control = AgentControl::default(); let exec_policy = ExecPolicyManager::default(); let (agent_status_tx, _agent_status_rx) = watch::channel(AgentStatus::PendingInit); diff --git a/codex-rs/core/src/models_manager/manager.rs b/codex-rs/core/src/models_manager/manager.rs index e1462f5431..ab47e4aa8b 100644 --- a/codex-rs/core/src/models_manager/manager.rs +++ b/codex-rs/core/src/models_manager/manager.rs @@ -60,18 +60,26 @@ pub struct ModelsManager { impl ModelsManager { /// Construct a manager scoped to the provided `AuthManager`. /// - /// Uses `codex_home` to store cached model metadata and initializes with built-in presets. - pub fn new(codex_home: PathBuf, auth_manager: Arc) -> Self { - let cache_path = codex_home.join(MODEL_CACHE_FILE); + /// Uses `config.codex_home` to store cached model metadata and initializes with built-in presets. + /// Refreshes available models using the default strategy. + pub async fn new(config: Config, auth_manager: Arc) -> Self { + let cache_path = config.codex_home.join(MODEL_CACHE_FILE); let cache_manager = ModelsCacheManager::new(cache_path, DEFAULT_MODEL_CACHE_TTL); - Self { + let manager = Self { local_models: builtin_model_presets(auth_manager.get_auth_mode()), remote_models: RwLock::new(Self::load_remote_models_from_file().unwrap_or_default()), auth_manager, etag: RwLock::new(None), cache_manager, provider: ModelProviderInfo::create_openai_provider(), + }; + if let Err(err) = manager + .refresh_available_models(&config, RefreshStrategy::default()) + .await + { + error!("failed to refresh available models during initialization: {err}"); } + manager } /// List all available models, refreshing according to the specified strategy. diff --git a/codex-rs/core/src/thread_manager.rs b/codex-rs/core/src/thread_manager.rs index ab868d46ca..1aab66dc8e 100644 --- a/codex-rs/core/src/thread_manager.rs +++ b/codex-rs/core/src/thread_manager.rs @@ -65,19 +65,18 @@ pub(crate) struct ThreadManagerState { } impl ThreadManager { - pub fn new( - codex_home: PathBuf, + pub async fn new( + config: Config, auth_manager: Arc, session_source: SessionSource, ) -> Self { Self { state: Arc::new(ThreadManagerState { threads: Arc::new(RwLock::new(HashMap::new())), - models_manager: Arc::new(ModelsManager::new( - codex_home.clone(), - auth_manager.clone(), - )), - skills_manager: Arc::new(SkillsManager::new(codex_home)), + models_manager: Arc::new( + ModelsManager::new(config.clone(), auth_manager.clone()).await, + ), + skills_manager: Arc::new(SkillsManager::new(config.codex_home.clone())), auth_manager, session_source, #[cfg(any(test, feature = "test-support"))] diff --git a/codex-rs/core/tests/suite/client.rs b/codex-rs/core/tests/suite/client.rs index ecb8dcbbf2..28f5174b1a 100644 --- a/codex-rs/core/tests/suite/client.rs +++ b/codex-rs/core/tests/suite/client.rs @@ -574,11 +574,8 @@ async fn prefers_apikey_when_config_prefers_apikey_even_with_chatgpt_tokens() { Ok(None) => panic!("No CodexAuth found in codex_home"), Err(e) => panic!("Failed to load CodexAuth: {e}"), }; - let thread_manager = ThreadManager::new( - codex_home.path().to_path_buf(), - auth_manager, - SessionSource::Exec, - ); + let thread_manager = ThreadManager::new(config.clone(), auth_manager, SessionSource::Exec) + .await; let NewThread { thread: codex, .. } = thread_manager .start_thread(config) .await diff --git a/codex-rs/exec/src/lib.rs b/codex-rs/exec/src/lib.rs index e35bd4525f..bf9118d7ea 100644 --- a/codex-rs/exec/src/lib.rs +++ b/codex-rs/exec/src/lib.rs @@ -304,11 +304,8 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option) -> any true, config.cli_auth_credentials_store_mode, ); - let thread_manager = ThreadManager::new( - config.codex_home.clone(), - auth_manager.clone(), - SessionSource::Exec, - ); + let thread_manager = ThreadManager::new(config.clone(), auth_manager.clone(), SessionSource::Exec) + .await; let default_model = thread_manager .get_models_manager() .get_default_model(&config.model, &config, RefreshStrategy::default()) diff --git a/codex-rs/mcp-server/src/lib.rs b/codex-rs/mcp-server/src/lib.rs index dabd7cca0f..176610a2f1 100644 --- a/codex-rs/mcp-server/src/lib.rs +++ b/codex-rs/mcp-server/src/lib.rs @@ -96,14 +96,14 @@ pub async fn run_main( })?; // Task: process incoming messages. - let processor_handle = tokio::spawn({ + let processor_handle = tokio::spawn(async move { let outgoing_message_sender = OutgoingMessageSender::new(outgoing_tx); let mut processor = MessageProcessor::new( outgoing_message_sender, codex_linux_sandbox_exe, std::sync::Arc::new(config), - ); - async move { + ) + .await; while let Some(msg) = incoming_rx.recv().await { match msg { JSONRPCMessage::Request(r) => processor.process_request(r).await, diff --git a/codex-rs/mcp-server/src/message_processor.rs b/codex-rs/mcp-server/src/message_processor.rs index dcf5411a09..6cabcf3e1d 100644 --- a/codex-rs/mcp-server/src/message_processor.rs +++ b/codex-rs/mcp-server/src/message_processor.rs @@ -47,7 +47,7 @@ pub(crate) struct MessageProcessor { impl MessageProcessor { /// Create a new `MessageProcessor`, retaining a handle to the outgoing /// `Sender` so handlers can enqueue messages to be written to stdout. - pub(crate) fn new( + pub(crate) async fn new( outgoing: OutgoingMessageSender, codex_linux_sandbox_exe: Option, config: Arc, @@ -58,11 +58,9 @@ impl MessageProcessor { false, config.cli_auth_credentials_store_mode, ); - let thread_manager = Arc::new(ThreadManager::new( - config.codex_home.clone(), - auth_manager, - SessionSource::Mcp, - )); + let thread_manager = Arc::new( + ThreadManager::new((*config).clone(), auth_manager, SessionSource::Mcp).await, + ); Self { outgoing, initialized: false, diff --git a/codex-rs/tui/src/app.rs b/codex-rs/tui/src/app.rs index d95e89590a..d2853db97a 100644 --- a/codex-rs/tui/src/app.rs +++ b/codex-rs/tui/src/app.rs @@ -365,11 +365,9 @@ impl App { let app_event_tx = AppEventSender::new(app_event_tx); emit_deprecation_notice(&app_event_tx, ollama_chat_support_notice); - let thread_manager = Arc::new(ThreadManager::new( - config.codex_home.clone(), - auth_manager.clone(), - SessionSource::Cli, - )); + let thread_manager = Arc::new( + ThreadManager::new(config.clone(), auth_manager.clone(), SessionSource::Cli).await, + ); let mut model = thread_manager .get_models_manager() .get_default_model(&config.model, &config, RefreshStrategy::default()) diff --git a/codex-rs/tui/src/chatwidget/tests.rs b/codex-rs/tui/src/chatwidget/tests.rs index 4567c56a92..bfafc676c3 100644 --- a/codex-rs/tui/src/chatwidget/tests.rs +++ b/codex-rs/tui/src/chatwidget/tests.rs @@ -394,7 +394,9 @@ async fn make_chatwidget_manual( config: cfg, model: resolved_model.clone(), auth_manager: auth_manager.clone(), - models_manager: Arc::new(ModelsManager::new(codex_home, auth_manager)), + models_manager: Arc::new( + ModelsManager::new(cfg.clone(), auth_manager).await, + ), session_header: SessionHeader::new(resolved_model), initial_user_message: None, token_info: None, @@ -432,13 +434,12 @@ async fn make_chatwidget_manual( (widget, rx, op_rx) } -fn set_chatgpt_auth(chat: &mut ChatWidget) { +async fn set_chatgpt_auth(chat: &mut ChatWidget) { chat.auth_manager = AuthManager::from_auth_for_testing(CodexAuth::create_dummy_chatgpt_auth_for_testing()); - chat.models_manager = Arc::new(ModelsManager::new( - chat.config.codex_home.clone(), - chat.auth_manager.clone(), - )); + chat.models_manager = Arc::new( + ModelsManager::new(chat.config.clone(), chat.auth_manager.clone()).await, + ); } pub(crate) async fn make_chatwidget_manual_with_sender() -> ( @@ -2220,7 +2221,7 @@ async fn startup_prompts_for_windows_sandbox_when_agent_requested() { async fn model_reasoning_selection_popup_snapshot() { let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5.1-codex-max")).await; - set_chatgpt_auth(&mut chat); + set_chatgpt_auth(&mut chat).await; chat.config.model_reasoning_effort = Some(ReasoningEffortConfig::High); let preset = get_available_model(&chat, "gpt-5.1-codex-max"); @@ -2234,7 +2235,7 @@ async fn model_reasoning_selection_popup_snapshot() { async fn model_reasoning_selection_popup_extra_high_warning_snapshot() { let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5.1-codex-max")).await; - set_chatgpt_auth(&mut chat); + set_chatgpt_auth(&mut chat).await; chat.config.model_reasoning_effort = Some(ReasoningEffortConfig::XHigh); let preset = get_available_model(&chat, "gpt-5.1-codex-max"); @@ -2248,7 +2249,7 @@ async fn model_reasoning_selection_popup_extra_high_warning_snapshot() { async fn reasoning_popup_shows_extra_high_with_space() { let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5.1-codex-max")).await; - set_chatgpt_auth(&mut chat); + set_chatgpt_auth(&mut chat).await; let preset = get_available_model(&chat, "gpt-5.1-codex-max"); chat.open_reasoning_popup(preset); diff --git a/codex-rs/tui2/src/app.rs b/codex-rs/tui2/src/app.rs index b95fd48c7e..ca92f627e9 100644 --- a/codex-rs/tui2/src/app.rs +++ b/codex-rs/tui2/src/app.rs @@ -428,11 +428,9 @@ impl App { let app_event_tx = AppEventSender::new(app_event_tx); emit_deprecation_notice(&app_event_tx, ollama_chat_support_notice); - let thread_manager = Arc::new(ThreadManager::new( - config.codex_home.clone(), - auth_manager.clone(), - SessionSource::Cli, - )); + let thread_manager = Arc::new( + ThreadManager::new(config.clone(), auth_manager.clone(), SessionSource::Cli).await, + ); let mut model = thread_manager .get_models_manager() .get_default_model(&config.model, &config, RefreshStrategy::default()) diff --git a/codex-rs/tui2/src/chatwidget/tests.rs b/codex-rs/tui2/src/chatwidget/tests.rs index 32dce6b648..acaf558496 100644 --- a/codex-rs/tui2/src/chatwidget/tests.rs +++ b/codex-rs/tui2/src/chatwidget/tests.rs @@ -383,7 +383,9 @@ async fn make_chatwidget_manual( config: cfg, model: resolved_model.clone(), auth_manager: auth_manager.clone(), - models_manager: Arc::new(ModelsManager::new(codex_home, auth_manager)), + models_manager: Arc::new( + ModelsManager::new(cfg.clone(), auth_manager).await, + ), session_header: SessionHeader::new(resolved_model), initial_user_message: None, token_info: None, @@ -419,13 +421,12 @@ async fn make_chatwidget_manual( (widget, rx, op_rx) } -fn set_chatgpt_auth(chat: &mut ChatWidget) { +async fn set_chatgpt_auth(chat: &mut ChatWidget) { chat.auth_manager = AuthManager::from_auth_for_testing(CodexAuth::create_dummy_chatgpt_auth_for_testing()); - chat.models_manager = Arc::new(ModelsManager::new( - chat.config.codex_home.clone(), - chat.auth_manager.clone(), - )); + chat.models_manager = Arc::new( + ModelsManager::new(chat.config.clone(), chat.auth_manager.clone()).await, + ); } pub(crate) async fn make_chatwidget_manual_with_sender() -> ( @@ -1921,7 +1922,7 @@ async fn startup_prompts_for_windows_sandbox_when_agent_requested() { async fn model_reasoning_selection_popup_snapshot() { let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5.1-codex-max")).await; - set_chatgpt_auth(&mut chat); + set_chatgpt_auth(&mut chat).await; chat.config.model_reasoning_effort = Some(ReasoningEffortConfig::High); let preset = get_available_model(&chat, "gpt-5.1-codex-max"); @@ -1935,7 +1936,7 @@ async fn model_reasoning_selection_popup_snapshot() { async fn model_reasoning_selection_popup_extra_high_warning_snapshot() { let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5.1-codex-max")).await; - set_chatgpt_auth(&mut chat); + set_chatgpt_auth(&mut chat).await; chat.config.model_reasoning_effort = Some(ReasoningEffortConfig::XHigh); let preset = get_available_model(&chat, "gpt-5.1-codex-max"); @@ -1949,7 +1950,7 @@ async fn model_reasoning_selection_popup_extra_high_warning_snapshot() { async fn reasoning_popup_shows_extra_high_with_space() { let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5.1-codex-max")).await; - set_chatgpt_auth(&mut chat); + set_chatgpt_auth(&mut chat).await; let preset = get_available_model(&chat, "gpt-5.1-codex-max"); chat.open_reasoning_popup(preset);