mirror of
https://github.com/openai/codex.git
synced 2026-02-01 22:47:52 +00:00
@@ -57,6 +57,7 @@ pub struct TestCodexBuilder {
|
||||
config_mutators: Vec<Box<ConfigMutator>>,
|
||||
auth: CodexAuth,
|
||||
pre_build_hooks: Vec<Box<PreBuildHook>>,
|
||||
home: Option<Arc<TempDir>>,
|
||||
}
|
||||
|
||||
impl TestCodexBuilder {
|
||||
@@ -88,8 +89,16 @@ impl TestCodexBuilder {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_home(mut self, home: Arc<TempDir>) -> Self {
|
||||
self.home = Some(home);
|
||||
self
|
||||
}
|
||||
|
||||
pub async fn build(&mut self, server: &wiremock::MockServer) -> anyhow::Result<TestCodex> {
|
||||
let home = Arc::new(TempDir::new()?);
|
||||
let home = match self.home.clone() {
|
||||
Some(home) => home,
|
||||
None => Arc::new(TempDir::new()?),
|
||||
};
|
||||
self.build_with_home(server, home, None).await
|
||||
}
|
||||
|
||||
@@ -98,7 +107,10 @@ impl TestCodexBuilder {
|
||||
server: &StreamingSseServer,
|
||||
) -> anyhow::Result<TestCodex> {
|
||||
let base_url = server.uri();
|
||||
let home = Arc::new(TempDir::new()?);
|
||||
let home = match self.home.clone() {
|
||||
Some(home) => home,
|
||||
None => Arc::new(TempDir::new()?),
|
||||
};
|
||||
self.build_with_home_and_base_url(format!("{base_url}/v1"), home, None)
|
||||
.await
|
||||
}
|
||||
@@ -108,7 +120,10 @@ impl TestCodexBuilder {
|
||||
server: &WebSocketTestServer,
|
||||
) -> anyhow::Result<TestCodex> {
|
||||
let base_url = format!("{}/v1", server.uri());
|
||||
let home = Arc::new(TempDir::new()?);
|
||||
let home = match self.home.clone() {
|
||||
Some(home) => home,
|
||||
None => Arc::new(TempDir::new()?),
|
||||
};
|
||||
let base_url_clone = base_url.clone();
|
||||
self.config_mutators.push(Box::new(move |config| {
|
||||
config.model_provider.base_url = Some(base_url_clone);
|
||||
@@ -432,5 +447,6 @@ pub fn test_codex() -> TestCodexBuilder {
|
||||
config_mutators: vec![],
|
||||
auth: CodexAuth::from_api_key("dummy"),
|
||||
pre_build_hooks: vec![],
|
||||
home: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -257,31 +257,19 @@ async fn resume_includes_initial_messages_and_sends_prior_items() {
|
||||
let resp_mock = mount_sse_once(&server, sse_completed("resp1")).await;
|
||||
|
||||
// Configure Codex to resume from our file
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
config.model_provider = model_provider;
|
||||
// Also configure user instructions to ensure they are NOT delivered on resume.
|
||||
config.user_instructions = Some("be nice".to_string());
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider_and_home(
|
||||
CodexAuth::from_api_key("Test API Key"),
|
||||
config.model_provider.clone(),
|
||||
config.codex_home.clone(),
|
||||
);
|
||||
let auth_manager =
|
||||
codex_core::AuthManager::from_auth_for_testing(CodexAuth::from_api_key("Test API Key"));
|
||||
let NewThread {
|
||||
thread: codex,
|
||||
session_configured,
|
||||
..
|
||||
} = thread_manager
|
||||
.resume_thread_from_rollout(config, session_path.clone(), auth_manager)
|
||||
let codex_home = Arc::new(TempDir::new().unwrap());
|
||||
let mut builder = test_codex()
|
||||
.with_home(codex_home.clone())
|
||||
.with_config(|config| {
|
||||
// Ensure user instructions are NOT delivered on resume.
|
||||
config.user_instructions = Some("be nice".to_string());
|
||||
});
|
||||
let test = builder
|
||||
.resume(&server, codex_home, session_path.clone())
|
||||
.await
|
||||
.expect("resume conversation");
|
||||
let codex = test.codex.clone();
|
||||
let session_configured = test.session_configured;
|
||||
|
||||
// 1) Assert initial_messages only includes existing EventMsg entries; response items are not converted
|
||||
let initial_msgs = session_configured
|
||||
@@ -367,30 +355,13 @@ async fn includes_conversation_id_and_model_headers_in_request() {
|
||||
|
||||
let resp_mock = mount_sse_once(&server, sse_completed("resp1")).await;
|
||||
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
|
||||
// Init session
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
config.model_provider = model_provider;
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider_and_home(
|
||||
CodexAuth::from_api_key("Test API Key"),
|
||||
config.model_provider.clone(),
|
||||
config.codex_home.clone(),
|
||||
);
|
||||
let NewThread {
|
||||
thread: codex,
|
||||
thread_id: session_id,
|
||||
session_configured: _,
|
||||
..
|
||||
} = thread_manager
|
||||
.start_thread(config)
|
||||
let mut builder = test_codex().with_auth(CodexAuth::from_api_key("Test API Key"));
|
||||
let test = builder
|
||||
.build(&server)
|
||||
.await
|
||||
.expect("create new conversation");
|
||||
let codex = test.codex.clone();
|
||||
let session_id = test.session_configured.session_id;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -425,26 +396,16 @@ async fn includes_base_instructions_override_in_request() {
|
||||
let server = MockServer::start().await;
|
||||
let resp_mock = mount_sse_once(&server, sse_completed("resp1")).await;
|
||||
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
|
||||
config.base_instructions = Some("test instructions".to_string());
|
||||
config.model_provider = model_provider;
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider_and_home(
|
||||
CodexAuth::from_api_key("Test API Key"),
|
||||
config.model_provider.clone(),
|
||||
config.codex_home.clone(),
|
||||
);
|
||||
let codex = thread_manager
|
||||
.start_thread(config)
|
||||
let mut builder = test_codex()
|
||||
.with_auth(CodexAuth::from_api_key("Test API Key"))
|
||||
.with_config(|config| {
|
||||
config.base_instructions = Some("test instructions".to_string());
|
||||
});
|
||||
let codex = builder
|
||||
.build(&server)
|
||||
.await
|
||||
.expect("create new conversation")
|
||||
.thread;
|
||||
.codex;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -479,29 +440,19 @@ async fn chatgpt_auth_sends_correct_request() {
|
||||
|
||||
let resp_mock = mount_sse_once(&server, sse_completed("resp1")).await;
|
||||
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/api/codex", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
|
||||
// Init session
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
config.model_provider = model_provider;
|
||||
let thread_manager = ThreadManager::with_models_provider_and_home(
|
||||
create_dummy_codex_auth(),
|
||||
config.model_provider.clone(),
|
||||
config.codex_home.clone(),
|
||||
);
|
||||
let NewThread {
|
||||
thread: codex,
|
||||
thread_id,
|
||||
session_configured: _,
|
||||
..
|
||||
} = thread_manager
|
||||
.start_thread(config)
|
||||
let mut model_provider = built_in_model_providers()["openai"].clone();
|
||||
model_provider.base_url = Some(format!("{}/api/codex", server.uri()));
|
||||
let mut builder = test_codex()
|
||||
.with_auth(create_dummy_codex_auth())
|
||||
.with_config(move |config| {
|
||||
config.model_provider = model_provider;
|
||||
});
|
||||
let test = builder
|
||||
.build(&server)
|
||||
.await
|
||||
.expect("create new conversation");
|
||||
let codex = test.codex.clone();
|
||||
let thread_id = test.session_configured.session_id;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -617,26 +568,16 @@ async fn includes_user_instructions_message_in_request() {
|
||||
|
||||
let resp_mock = mount_sse_once(&server, sse_completed("resp1")).await;
|
||||
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
config.model_provider = model_provider;
|
||||
config.user_instructions = Some("be nice".to_string());
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider_and_home(
|
||||
CodexAuth::from_api_key("Test API Key"),
|
||||
config.model_provider.clone(),
|
||||
config.codex_home.clone(),
|
||||
);
|
||||
let codex = thread_manager
|
||||
.start_thread(config)
|
||||
let mut builder = test_codex()
|
||||
.with_auth(CodexAuth::from_api_key("Test API Key"))
|
||||
.with_config(|config| {
|
||||
config.user_instructions = Some("be nice".to_string());
|
||||
});
|
||||
let codex = builder
|
||||
.build(&server)
|
||||
.await
|
||||
.expect("create new conversation")
|
||||
.thread;
|
||||
.codex;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -689,12 +630,7 @@ async fn skills_append_to_instructions() {
|
||||
|
||||
let resp_mock = mount_sse_once(&server, sse_completed("resp1")).await;
|
||||
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let codex_home = Arc::new(TempDir::new().unwrap());
|
||||
let skill_dir = codex_home.path().join("skills/demo");
|
||||
std::fs::create_dir_all(&skill_dir).expect("create skill dir");
|
||||
std::fs::write(
|
||||
@@ -703,20 +639,18 @@ async fn skills_append_to_instructions() {
|
||||
)
|
||||
.expect("write skill");
|
||||
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
config.model_provider = model_provider;
|
||||
config.cwd = codex_home.path().to_path_buf();
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider_and_home(
|
||||
CodexAuth::from_api_key("Test API Key"),
|
||||
config.model_provider.clone(),
|
||||
config.codex_home.clone(),
|
||||
);
|
||||
let codex = thread_manager
|
||||
.start_thread(config)
|
||||
let codex_home_path = codex_home.path().to_path_buf();
|
||||
let mut builder = test_codex()
|
||||
.with_home(codex_home.clone())
|
||||
.with_auth(CodexAuth::from_api_key("Test API Key"))
|
||||
.with_config(move |config| {
|
||||
config.cwd = codex_home_path;
|
||||
});
|
||||
let codex = builder
|
||||
.build(&server)
|
||||
.await
|
||||
.expect("create new conversation")
|
||||
.thread;
|
||||
.codex;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -1131,28 +1065,17 @@ async fn includes_developer_instructions_message_in_request() {
|
||||
let server = MockServer::start().await;
|
||||
|
||||
let resp_mock = mount_sse_once(&server, sse_completed("resp1")).await;
|
||||
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
config.model_provider = model_provider;
|
||||
config.user_instructions = Some("be nice".to_string());
|
||||
config.developer_instructions = Some("be useful".to_string());
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider_and_home(
|
||||
CodexAuth::from_api_key("Test API Key"),
|
||||
config.model_provider.clone(),
|
||||
config.codex_home.clone(),
|
||||
);
|
||||
let codex = thread_manager
|
||||
.start_thread(config)
|
||||
let mut builder = test_codex()
|
||||
.with_auth(CodexAuth::from_api_key("Test API Key"))
|
||||
.with_config(|config| {
|
||||
config.user_instructions = Some("be nice".to_string());
|
||||
config.developer_instructions = Some("be useful".to_string());
|
||||
});
|
||||
let codex = builder
|
||||
.build(&server)
|
||||
.await
|
||||
.expect("create new conversation")
|
||||
.thread;
|
||||
.codex;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -1390,20 +1313,16 @@ async fn token_count_includes_rate_limits_snapshot() {
|
||||
let mut provider = built_in_model_providers()["openai"].clone();
|
||||
provider.base_url = Some(format!("{}/v1", server.uri()));
|
||||
|
||||
let home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&home).await;
|
||||
config.model_provider = provider;
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider_and_home(
|
||||
CodexAuth::from_api_key("test"),
|
||||
config.model_provider.clone(),
|
||||
config.codex_home.clone(),
|
||||
);
|
||||
let codex = thread_manager
|
||||
.start_thread(config)
|
||||
let mut builder = test_codex()
|
||||
.with_auth(CodexAuth::from_api_key("test"))
|
||||
.with_config(move |config| {
|
||||
config.model_provider = provider;
|
||||
});
|
||||
let codex = builder
|
||||
.build(&server)
|
||||
.await
|
||||
.expect("create conversation")
|
||||
.thread;
|
||||
.codex;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -1753,20 +1672,16 @@ async fn azure_overrides_assign_properties_used_for_responses_url() {
|
||||
};
|
||||
|
||||
// Init session
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
config.model_provider = provider;
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider_and_home(
|
||||
create_dummy_codex_auth(),
|
||||
config.model_provider.clone(),
|
||||
config.codex_home.clone(),
|
||||
);
|
||||
let codex = thread_manager
|
||||
.start_thread(config)
|
||||
let mut builder = test_codex()
|
||||
.with_auth(create_dummy_codex_auth())
|
||||
.with_config(move |config| {
|
||||
config.model_provider = provider;
|
||||
});
|
||||
let codex = builder
|
||||
.build(&server)
|
||||
.await
|
||||
.expect("create new conversation")
|
||||
.thread;
|
||||
.codex;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -1837,20 +1752,16 @@ async fn env_var_overrides_loaded_auth() {
|
||||
};
|
||||
|
||||
// Init session
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
config.model_provider = provider;
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider_and_home(
|
||||
create_dummy_codex_auth(),
|
||||
config.model_provider.clone(),
|
||||
config.codex_home.clone(),
|
||||
);
|
||||
let codex = thread_manager
|
||||
.start_thread(config)
|
||||
let mut builder = test_codex()
|
||||
.with_auth(create_dummy_codex_auth())
|
||||
.with_config(move |config| {
|
||||
config.model_provider = provider;
|
||||
});
|
||||
let codex = builder
|
||||
.build(&server)
|
||||
.await
|
||||
.expect("create new conversation")
|
||||
.thread;
|
||||
.codex;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -1905,26 +1816,12 @@ async fn history_dedupes_streamed_and_final_messages_across_turns() {
|
||||
|
||||
let request_log = mount_sse_sequence(&server, vec![sse1.clone(), sse1.clone(), sse1]).await;
|
||||
|
||||
// Configure provider to point to mock server (Responses API) and use API key auth.
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
|
||||
// Init session with isolated codex home.
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
config.model_provider = model_provider;
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider_and_home(
|
||||
CodexAuth::from_api_key("Test API Key"),
|
||||
config.model_provider.clone(),
|
||||
config.codex_home.clone(),
|
||||
);
|
||||
let NewThread { thread: codex, .. } = thread_manager
|
||||
.start_thread(config)
|
||||
let mut builder = test_codex().with_auth(CodexAuth::from_api_key("Test API Key"));
|
||||
let codex = builder
|
||||
.build(&server)
|
||||
.await
|
||||
.expect("create new conversation");
|
||||
.expect("create new conversation")
|
||||
.codex;
|
||||
|
||||
// Turn 1: user sends U1; wait for completion.
|
||||
codex
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
#![allow(clippy::expect_used)]
|
||||
use codex_core::CodexAuth;
|
||||
use codex_core::ModelProviderInfo;
|
||||
use codex_core::NewThread;
|
||||
use codex_core::ThreadManager;
|
||||
use codex_core::built_in_model_providers;
|
||||
use codex_core::compact::SUMMARIZATION_PROMPT;
|
||||
use codex_core::compact::SUMMARY_PREFIX;
|
||||
@@ -17,7 +15,6 @@ use codex_core::protocol::SandboxPolicy;
|
||||
use codex_core::protocol::WarningEvent;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use core_test_support::responses::ev_local_shell_call;
|
||||
use core_test_support::responses::ev_reasoning_item;
|
||||
use core_test_support::skip_if_no_network;
|
||||
@@ -25,7 +22,6 @@ use core_test_support::test_codex::test_codex;
|
||||
use core_test_support::wait_for_event;
|
||||
use core_test_support::wait_for_event_match;
|
||||
use std::collections::VecDeque;
|
||||
use tempfile::TempDir;
|
||||
|
||||
use core_test_support::responses::ev_assistant_message;
|
||||
use core_test_support::responses::ev_completed;
|
||||
@@ -140,21 +136,14 @@ async fn summarize_context_three_requests_and_instructions() {
|
||||
|
||||
// Build config pointing to the mock server and spawn Codex.
|
||||
let model_provider = non_openai_model_provider(&server);
|
||||
let home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&home).await;
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(&mut config);
|
||||
config.model_auto_compact_token_limit = Some(200_000);
|
||||
let thread_manager = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("dummy"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
let NewThread {
|
||||
thread: codex,
|
||||
session_configured,
|
||||
..
|
||||
} = thread_manager.start_thread(config).await.unwrap();
|
||||
let rollout_path = session_configured.rollout_path.expect("rollout path");
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(config);
|
||||
config.model_auto_compact_token_limit = Some(200_000);
|
||||
});
|
||||
let test = builder.build(&server).await.unwrap();
|
||||
let codex = test.codex.clone();
|
||||
let rollout_path = test.session_configured.rollout_path.expect("rollout path");
|
||||
|
||||
// 1) Normal user input – should hit server once.
|
||||
codex
|
||||
@@ -338,20 +327,15 @@ async fn manual_compact_uses_custom_prompt() {
|
||||
let custom_prompt = "Use this compact prompt instead";
|
||||
|
||||
let model_provider = non_openai_model_provider(&server);
|
||||
let home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&home).await;
|
||||
config.model_provider = model_provider;
|
||||
config.compact_prompt = Some(custom_prompt.to_string());
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("dummy"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
let codex = thread_manager
|
||||
.start_thread(config)
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.model_provider = model_provider;
|
||||
config.compact_prompt = Some(custom_prompt.to_string());
|
||||
});
|
||||
let codex = builder
|
||||
.build(&server)
|
||||
.await
|
||||
.expect("create conversation")
|
||||
.thread;
|
||||
.codex;
|
||||
|
||||
codex.submit(Op::Compact).await.expect("trigger compact");
|
||||
let warning_event = wait_for_event(&codex, |ev| matches!(ev, EventMsg::Warning(_))).await;
|
||||
@@ -414,16 +398,11 @@ async fn manual_compact_emits_api_and_local_token_usage_events() {
|
||||
mount_sse_once(&server, sse_compact).await;
|
||||
|
||||
let model_provider = non_openai_model_provider(&server);
|
||||
let home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&home).await;
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(&mut config);
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("dummy"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
let NewThread { thread: codex, .. } = thread_manager.start_thread(config).await.unwrap();
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(config);
|
||||
});
|
||||
let codex = builder.build(&server).await.unwrap().codex;
|
||||
|
||||
// Trigger manual compact and collect TokenCount events for the compact turn.
|
||||
codex.submit(Op::Compact).await.unwrap();
|
||||
@@ -1039,16 +1018,12 @@ async fn auto_compact_runs_after_token_limit_hit() {
|
||||
|
||||
let model_provider = non_openai_model_provider(&server);
|
||||
|
||||
let home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&home).await;
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(&mut config);
|
||||
config.model_auto_compact_token_limit = Some(200_000);
|
||||
let thread_manager = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("dummy"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
let codex = thread_manager.start_thread(config).await.unwrap().thread;
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(config);
|
||||
config.model_auto_compact_token_limit = Some(200_000);
|
||||
});
|
||||
let codex = builder.build(&server).await.unwrap().codex;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -1379,20 +1354,14 @@ async fn auto_compact_persists_rollout_entries() {
|
||||
|
||||
let model_provider = non_openai_model_provider(&server);
|
||||
|
||||
let home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&home).await;
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(&mut config);
|
||||
config.model_auto_compact_token_limit = Some(200_000);
|
||||
let thread_manager = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("dummy"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
let NewThread {
|
||||
thread: codex,
|
||||
session_configured,
|
||||
..
|
||||
} = thread_manager.start_thread(config).await.unwrap();
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(config);
|
||||
config.model_auto_compact_token_limit = Some(200_000);
|
||||
});
|
||||
let test = builder.build(&server).await.unwrap();
|
||||
let codex = test.codex.clone();
|
||||
let session_configured = test.session_configured;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -1497,19 +1466,12 @@ async fn manual_compact_retries_after_context_window_error() {
|
||||
|
||||
let model_provider = non_openai_model_provider(&server);
|
||||
|
||||
let home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&home).await;
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(&mut config);
|
||||
config.model_auto_compact_token_limit = Some(200_000);
|
||||
let codex = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("dummy"),
|
||||
config.model_provider.clone(),
|
||||
)
|
||||
.start_thread(config)
|
||||
.await
|
||||
.unwrap()
|
||||
.thread;
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(config);
|
||||
config.model_auto_compact_token_limit = Some(200_000);
|
||||
});
|
||||
let codex = builder.build(&server).await.unwrap().codex;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -1632,18 +1594,11 @@ async fn manual_compact_twice_preserves_latest_user_messages() {
|
||||
|
||||
let model_provider = non_openai_model_provider(&server);
|
||||
|
||||
let home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&home).await;
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(&mut config);
|
||||
let codex = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("dummy"),
|
||||
config.model_provider.clone(),
|
||||
)
|
||||
.start_thread(config)
|
||||
.await
|
||||
.unwrap()
|
||||
.thread;
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(config);
|
||||
});
|
||||
let codex = builder.build(&server).await.unwrap().codex;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -1700,12 +1655,11 @@ async fn manual_compact_twice_preserves_latest_user_messages() {
|
||||
&& item
|
||||
.get("content")
|
||||
.and_then(|v| v.as_array())
|
||||
.map(|arr| {
|
||||
.is_some_and(|arr| {
|
||||
arr.iter().any(|entry| {
|
||||
entry.get("text").and_then(|v| v.as_str()) == Some(expected)
|
||||
})
|
||||
})
|
||||
.unwrap_or(false)
|
||||
})
|
||||
};
|
||||
|
||||
@@ -1843,16 +1797,12 @@ async fn auto_compact_allows_multiple_attempts_when_interleaved_with_other_turn_
|
||||
|
||||
let model_provider = non_openai_model_provider(&server);
|
||||
|
||||
let home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&home).await;
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(&mut config);
|
||||
config.model_auto_compact_token_limit = Some(200);
|
||||
let thread_manager = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("dummy"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
let codex = thread_manager.start_thread(config).await.unwrap().thread;
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(config);
|
||||
config.model_auto_compact_token_limit = Some(200);
|
||||
});
|
||||
let codex = builder.build(&server).await.unwrap().codex;
|
||||
|
||||
let mut auto_compact_lifecycle_events = Vec::new();
|
||||
for user in [MULTI_AUTO_MSG, follow_up_user, final_user] {
|
||||
@@ -1954,21 +1904,13 @@ async fn auto_compact_triggers_after_function_call_over_95_percent_usage() {
|
||||
|
||||
let model_provider = non_openai_model_provider(&server);
|
||||
|
||||
let home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&home).await;
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(&mut config);
|
||||
config.model_context_window = Some(context_window);
|
||||
config.model_auto_compact_token_limit = Some(limit);
|
||||
|
||||
let codex = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("dummy"),
|
||||
config.model_provider.clone(),
|
||||
)
|
||||
.start_thread(config)
|
||||
.await
|
||||
.unwrap()
|
||||
.thread;
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(config);
|
||||
config.model_context_window = Some(context_window);
|
||||
config.model_auto_compact_token_limit = Some(limit);
|
||||
});
|
||||
let codex = builder.build(&server).await.unwrap().codex;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
|
||||
@@ -10,12 +10,8 @@
|
||||
use super::compact::COMPACT_WARNING_MESSAGE;
|
||||
use super::compact::FIRST_REPLY;
|
||||
use super::compact::SUMMARY_TEXT;
|
||||
use codex_core::CodexAuth;
|
||||
use codex_core::CodexThread;
|
||||
use codex_core::ModelProviderInfo;
|
||||
use codex_core::NewThread;
|
||||
use codex_core::ThreadManager;
|
||||
use codex_core::built_in_model_providers;
|
||||
use codex_core::compact::SUMMARIZATION_PROMPT;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::protocol::EventMsg;
|
||||
@@ -23,12 +19,12 @@ use codex_core::protocol::Op;
|
||||
use codex_core::protocol::WarningEvent;
|
||||
use codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use core_test_support::responses::ResponseMock;
|
||||
use core_test_support::responses::ev_assistant_message;
|
||||
use core_test_support::responses::ev_completed;
|
||||
use core_test_support::responses::mount_sse_once_match;
|
||||
use core_test_support::responses::sse;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
use core_test_support::wait_for_event;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::Value;
|
||||
@@ -99,8 +95,7 @@ fn extract_summary_message(request: &Value, summary_text: &str) -> Value {
|
||||
.and_then(|arr| arr.first())
|
||||
.and_then(|entry| entry.get("text"))
|
||||
.and_then(Value::as_str)
|
||||
.map(|text| text.contains(summary_text))
|
||||
.unwrap_or(false)
|
||||
.is_some_and(|text| text.contains(summary_text))
|
||||
})
|
||||
})
|
||||
.cloned()
|
||||
@@ -117,21 +112,18 @@ fn normalize_compact_prompts(requests: &mut [Value]) {
|
||||
{
|
||||
return true;
|
||||
}
|
||||
let content = item
|
||||
.get("content")
|
||||
.and_then(Value::as_array)
|
||||
.cloned()
|
||||
let Some(content) = item.get("content").and_then(Value::as_array) else {
|
||||
return false;
|
||||
};
|
||||
let Some(first) = content.first() else {
|
||||
return false;
|
||||
};
|
||||
let text = first
|
||||
.get("text")
|
||||
.and_then(Value::as_str)
|
||||
.unwrap_or_default();
|
||||
if let Some(first) = content.first() {
|
||||
let text = first
|
||||
.get("text")
|
||||
.and_then(Value::as_str)
|
||||
.unwrap_or_default();
|
||||
let normalized_text = normalize_line_endings_str(text);
|
||||
!(text.is_empty() || normalized_text == normalized_summary_prompt)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
let normalized_text = normalize_line_endings_str(text);
|
||||
!(text.is_empty() || normalized_text == normalized_summary_prompt)
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -874,9 +866,7 @@ fn gather_request_bodies(request_log: &[ResponseMock]) -> Vec<Value> {
|
||||
.flat_map(ResponseMock::requests)
|
||||
.map(|request| request.body_json())
|
||||
.collect::<Vec<_>>();
|
||||
for body in &mut bodies {
|
||||
normalize_line_endings(body);
|
||||
}
|
||||
bodies.iter_mut().for_each(normalize_line_endings);
|
||||
bodies
|
||||
}
|
||||
|
||||
@@ -960,29 +950,19 @@ async fn mount_second_compact_flow(server: &MockServer) -> Vec<ResponseMock> {
|
||||
async fn start_test_conversation(
|
||||
server: &MockServer,
|
||||
model: Option<&str>,
|
||||
) -> (TempDir, Config, ThreadManager, Arc<CodexThread>) {
|
||||
let model_provider = ModelProviderInfo {
|
||||
name: "Non-OpenAI Model provider".into(),
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
let home = TempDir::new().expect("create temp dir");
|
||||
let mut config = load_default_config_for_test(&home).await;
|
||||
config.model_provider = model_provider;
|
||||
config.compact_prompt = Some(SUMMARIZATION_PROMPT.to_string());
|
||||
if let Some(model) = model {
|
||||
config.model = Some(model.to_string());
|
||||
}
|
||||
let manager = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("dummy"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
let NewThread { thread, .. } = manager
|
||||
.start_thread(config.clone())
|
||||
.await
|
||||
.expect("create conversation");
|
||||
|
||||
(home, config, manager, thread)
|
||||
) -> (Arc<TempDir>, Config, Arc<ThreadManager>, Arc<CodexThread>) {
|
||||
let base_url = format!("{}/v1", server.uri());
|
||||
let model = model.map(str::to_string);
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.model_provider.name = "Non-OpenAI Model provider".to_string();
|
||||
config.model_provider.base_url = Some(base_url);
|
||||
config.compact_prompt = Some(SUMMARIZATION_PROMPT.to_string());
|
||||
if let Some(model) = model {
|
||||
config.model = Some(model);
|
||||
}
|
||||
});
|
||||
let test = builder.build(server).await.expect("create conversation");
|
||||
(test.home, test.config, test.thread_manager, test.codex)
|
||||
}
|
||||
|
||||
async fn user_turn(conversation: &Arc<CodexThread>, text: &str) {
|
||||
@@ -1021,13 +1001,14 @@ async fn resume_conversation(
|
||||
config: &Config,
|
||||
path: std::path::PathBuf,
|
||||
) -> Arc<CodexThread> {
|
||||
let auth_manager =
|
||||
codex_core::AuthManager::from_auth_for_testing(CodexAuth::from_api_key("dummy"));
|
||||
let NewThread { thread, .. } = manager
|
||||
let auth_manager = codex_core::AuthManager::from_auth_for_testing(
|
||||
codex_core::CodexAuth::from_api_key("dummy"),
|
||||
);
|
||||
manager
|
||||
.resume_thread_from_rollout(config.clone(), path, auth_manager)
|
||||
.await
|
||||
.expect("resume conversation");
|
||||
thread
|
||||
.expect("resume conversation")
|
||||
.thread
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -1037,9 +1018,9 @@ async fn fork_thread(
|
||||
path: std::path::PathBuf,
|
||||
nth_user_message: usize,
|
||||
) -> Arc<CodexThread> {
|
||||
let NewThread { thread, .. } = manager
|
||||
manager
|
||||
.fork_thread(nth_user_message, config.clone(), path)
|
||||
.await
|
||||
.expect("fork conversation");
|
||||
thread
|
||||
.expect("fork conversation")
|
||||
.thread
|
||||
}
|
||||
|
||||
@@ -1,8 +1,4 @@
|
||||
use codex_core::CodexAuth;
|
||||
use codex_core::ModelProviderInfo;
|
||||
use codex_core::NewThread;
|
||||
use codex_core::ThreadManager;
|
||||
use codex_core::built_in_model_providers;
|
||||
use codex_core::parse_turn_item;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::Op;
|
||||
@@ -10,10 +6,9 @@ use codex_core::protocol::RolloutItem;
|
||||
use codex_core::protocol::RolloutLine;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
use core_test_support::wait_for_event;
|
||||
use tempfile::TempDir;
|
||||
use wiremock::Mock;
|
||||
use wiremock::MockServer;
|
||||
use wiremock::ResponseTemplate;
|
||||
@@ -44,25 +39,11 @@ async fn fork_thread_twice_drops_to_first_message() {
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
// Configure Codex to use the mock server.
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
|
||||
let home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&home).await;
|
||||
config.model_provider = model_provider.clone();
|
||||
let config_for_fork = config.clone();
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("dummy"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
let NewThread { thread: codex, .. } = thread_manager
|
||||
.start_thread(config)
|
||||
.await
|
||||
.expect("create conversation");
|
||||
let mut builder = test_codex();
|
||||
let test = builder.build(&server).await.expect("create conversation");
|
||||
let codex = test.codex.clone();
|
||||
let thread_manager = test.thread_manager.clone();
|
||||
let config_for_fork = test.config.clone();
|
||||
|
||||
// Send three user messages; wait for three completed turns.
|
||||
for text in ["first", "second", "third"] {
|
||||
|
||||
@@ -1,36 +1,28 @@
|
||||
use codex_core::CodexAuth;
|
||||
use codex_core::ThreadManager;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_protocol::openai_models::ReasoningEffort;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use core_test_support::responses::start_mock_server;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
use core_test_support::wait_for_event;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
|
||||
const CONFIG_TOML: &str = "config.toml";
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn override_turn_context_does_not_persist_when_config_exists() {
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let config_path = codex_home.path().join(CONFIG_TOML);
|
||||
let server = start_mock_server().await;
|
||||
let initial_contents = "model = \"gpt-4o\"\n";
|
||||
tokio::fs::write(&config_path, initial_contents)
|
||||
.await
|
||||
.expect("seed config.toml");
|
||||
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
config.model = Some("gpt-4o".to_string());
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("Test API Key"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
let codex = thread_manager
|
||||
.start_thread(config)
|
||||
.await
|
||||
.expect("create conversation")
|
||||
.thread;
|
||||
let mut builder = test_codex()
|
||||
.with_pre_build_hook(move |home| {
|
||||
let config_path = home.join(CONFIG_TOML);
|
||||
std::fs::write(config_path, initial_contents).expect("seed config.toml");
|
||||
})
|
||||
.with_config(|config| {
|
||||
config.model = Some("gpt-4o".to_string());
|
||||
});
|
||||
let test = builder.build(&server).await.expect("create conversation");
|
||||
let codex = test.codex.clone();
|
||||
let config_path = test.home.path().join(CONFIG_TOML);
|
||||
|
||||
codex
|
||||
.submit(Op::OverrideTurnContext {
|
||||
@@ -57,25 +49,16 @@ async fn override_turn_context_does_not_persist_when_config_exists() {
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn override_turn_context_does_not_create_config_file() {
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let config_path = codex_home.path().join(CONFIG_TOML);
|
||||
let server = start_mock_server().await;
|
||||
let mut builder = test_codex();
|
||||
let test = builder.build(&server).await.expect("create conversation");
|
||||
let codex = test.codex.clone();
|
||||
let config_path = test.home.path().join(CONFIG_TOML);
|
||||
assert!(
|
||||
!config_path.exists(),
|
||||
"test setup should start without config"
|
||||
);
|
||||
|
||||
let config = load_default_config_for_test(&codex_home).await;
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("Test API Key"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
let codex = thread_manager
|
||||
.start_thread(config)
|
||||
.await
|
||||
.expect("create conversation")
|
||||
.thread;
|
||||
|
||||
codex
|
||||
.submit(Op::OverrideTurnContext {
|
||||
cwd: None,
|
||||
|
||||
@@ -1,11 +1,7 @@
|
||||
use codex_core::CodexAuth;
|
||||
use codex_core::CodexThread;
|
||||
use codex_core::ContentItem;
|
||||
use codex_core::ModelProviderInfo;
|
||||
use codex_core::REVIEW_PROMPT;
|
||||
use codex_core::ResponseItem;
|
||||
use codex_core::ThreadManager;
|
||||
use codex_core::built_in_model_providers;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::protocol::ENVIRONMENT_CONTEXT_OPEN_TAG;
|
||||
use codex_core::protocol::EventMsg;
|
||||
@@ -21,11 +17,11 @@ use codex_core::protocol::RolloutItem;
|
||||
use codex_core::protocol::RolloutLine;
|
||||
use codex_core::review_format::render_review_output_text;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use core_test_support::load_sse_fixture_with_id_from_str;
|
||||
use core_test_support::responses::ResponseMock;
|
||||
use core_test_support::responses::mount_sse_sequence;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
use core_test_support::wait_for_event;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::PathBuf;
|
||||
@@ -73,8 +69,8 @@ async fn review_op_emits_lifecycle_and_review_output() {
|
||||
let review_json_escaped = serde_json::to_string(&review_json).unwrap();
|
||||
let sse_raw = sse_template.replace("__REVIEW__", &review_json_escaped);
|
||||
let (server, _request_log) = start_responses_server_with_sse(&sse_raw, 1).await;
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let codex = new_conversation_for_server(&server, &codex_home, |_| {}).await;
|
||||
let codex_home = Arc::new(TempDir::new().unwrap());
|
||||
let codex = new_conversation_for_server(&server, codex_home.clone(), |_| {}).await;
|
||||
|
||||
// Submit review request.
|
||||
codex
|
||||
@@ -174,6 +170,7 @@ async fn review_op_emits_lifecycle_and_review_output() {
|
||||
"assistant review output contains user_action markup"
|
||||
);
|
||||
|
||||
let _codex_home_guard = codex_home;
|
||||
server.verify().await;
|
||||
}
|
||||
|
||||
@@ -194,8 +191,8 @@ async fn review_op_with_plain_text_emits_review_fallback() {
|
||||
{"type":"response.completed", "response": {"id": "__ID__"}}
|
||||
]"#;
|
||||
let (server, _request_log) = start_responses_server_with_sse(sse_raw, 1).await;
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let codex = new_conversation_for_server(&server, &codex_home, |_| {}).await;
|
||||
let codex_home = Arc::new(TempDir::new().unwrap());
|
||||
let codex = new_conversation_for_server(&server, codex_home.clone(), |_| {}).await;
|
||||
|
||||
codex
|
||||
.submit(Op::Review {
|
||||
@@ -226,6 +223,7 @@ async fn review_op_with_plain_text_emits_review_fallback() {
|
||||
assert_eq!(expected, review);
|
||||
let _complete = wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let _codex_home_guard = codex_home;
|
||||
server.verify().await;
|
||||
}
|
||||
|
||||
@@ -254,8 +252,8 @@ async fn review_filters_agent_message_related_events() {
|
||||
{"type":"response.completed", "response": {"id": "__ID__"}}
|
||||
]"#;
|
||||
let (server, _request_log) = start_responses_server_with_sse(sse_raw, 1).await;
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let codex = new_conversation_for_server(&server, &codex_home, |_| {}).await;
|
||||
let codex_home = Arc::new(TempDir::new().unwrap());
|
||||
let codex = new_conversation_for_server(&server, codex_home.clone(), |_| {}).await;
|
||||
|
||||
codex
|
||||
.submit(Op::Review {
|
||||
@@ -295,6 +293,7 @@ async fn review_filters_agent_message_related_events() {
|
||||
.await;
|
||||
assert!(saw_entered && saw_exited, "missing review lifecycle events");
|
||||
|
||||
let _codex_home_guard = codex_home;
|
||||
server.verify().await;
|
||||
}
|
||||
|
||||
@@ -335,8 +334,8 @@ async fn review_does_not_emit_agent_message_on_structured_output() {
|
||||
let review_json_escaped = serde_json::to_string(&review_json).unwrap();
|
||||
let sse_raw = sse_template.replace("__REVIEW__", &review_json_escaped);
|
||||
let (server, _request_log) = start_responses_server_with_sse(&sse_raw, 1).await;
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let codex = new_conversation_for_server(&server, &codex_home, |_| {}).await;
|
||||
let codex_home = Arc::new(TempDir::new().unwrap());
|
||||
let codex = new_conversation_for_server(&server, codex_home.clone(), |_| {}).await;
|
||||
|
||||
codex
|
||||
.submit(Op::Review {
|
||||
@@ -375,6 +374,7 @@ async fn review_does_not_emit_agent_message_on_structured_output() {
|
||||
assert_eq!(1, agent_messages, "expected exactly one AgentMessage event");
|
||||
assert!(saw_entered && saw_exited, "missing review lifecycle events");
|
||||
|
||||
let _codex_home_guard = codex_home;
|
||||
server.verify().await;
|
||||
}
|
||||
|
||||
@@ -389,9 +389,9 @@ async fn review_uses_custom_review_model_from_config() {
|
||||
{"type":"response.completed", "response": {"id": "__ID__"}}
|
||||
]"#;
|
||||
let (server, request_log) = start_responses_server_with_sse(sse_raw, 1).await;
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let codex_home = Arc::new(TempDir::new().unwrap());
|
||||
// Choose a review model different from the main model; ensure it is used.
|
||||
let codex = new_conversation_for_server(&server, &codex_home, |cfg| {
|
||||
let codex = new_conversation_for_server(&server, codex_home.clone(), |cfg| {
|
||||
cfg.model = Some("gpt-4.1".to_string());
|
||||
cfg.review_model = Some("gpt-5.1".to_string());
|
||||
})
|
||||
@@ -428,6 +428,7 @@ async fn review_uses_custom_review_model_from_config() {
|
||||
let body = request.body_json();
|
||||
assert_eq!(body["model"].as_str().unwrap(), "gpt-5.1");
|
||||
|
||||
let _codex_home_guard = codex_home;
|
||||
server.verify().await;
|
||||
}
|
||||
|
||||
@@ -442,8 +443,8 @@ async fn review_uses_session_model_when_review_model_unset() {
|
||||
{"type":"response.completed", "response": {"id": "__ID__"}}
|
||||
]"#;
|
||||
let (server, request_log) = start_responses_server_with_sse(sse_raw, 1).await;
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let codex = new_conversation_for_server(&server, &codex_home, |cfg| {
|
||||
let codex_home = Arc::new(TempDir::new().unwrap());
|
||||
let codex = new_conversation_for_server(&server, codex_home.clone(), |cfg| {
|
||||
cfg.model = Some("gpt-4.1".to_string());
|
||||
cfg.review_model = None;
|
||||
})
|
||||
@@ -478,6 +479,7 @@ async fn review_uses_session_model_when_review_model_unset() {
|
||||
let body = request.body_json();
|
||||
assert_eq!(body["model"].as_str().unwrap(), "gpt-4.1");
|
||||
|
||||
let _codex_home_guard = codex_home;
|
||||
server.verify().await;
|
||||
}
|
||||
|
||||
@@ -497,12 +499,7 @@ async fn review_input_isolated_from_parent_history() {
|
||||
let (server, request_log) = start_responses_server_with_sse(sse_raw, 1).await;
|
||||
|
||||
// Seed a parent session history via resume file with both user + assistant items.
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
config.model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
let codex_home = Arc::new(TempDir::new().unwrap());
|
||||
|
||||
let session_file = codex_home.path().join("resume.jsonl");
|
||||
{
|
||||
@@ -564,7 +561,8 @@ async fn review_input_isolated_from_parent_history() {
|
||||
.unwrap();
|
||||
}
|
||||
let codex =
|
||||
resume_conversation_for_server(&server, &codex_home, session_file.clone(), |_| {}).await;
|
||||
resume_conversation_for_server(&server, codex_home.clone(), session_file.clone(), |_| {})
|
||||
.await;
|
||||
|
||||
// Submit review request; it must start fresh (no parent history in `input`).
|
||||
let review_prompt = "Please review only this".to_string();
|
||||
@@ -657,6 +655,7 @@ async fn review_input_isolated_from_parent_history() {
|
||||
"expected user interruption message in rollout"
|
||||
);
|
||||
|
||||
let _codex_home_guard = codex_home;
|
||||
server.verify().await;
|
||||
}
|
||||
|
||||
@@ -675,8 +674,8 @@ async fn review_history_surfaces_in_parent_session() {
|
||||
{"type":"response.completed", "response": {"id": "__ID__"}}
|
||||
]"#;
|
||||
let (server, request_log) = start_responses_server_with_sse(sse_raw, 2).await;
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let codex = new_conversation_for_server(&server, &codex_home, |_| {}).await;
|
||||
let codex_home = Arc::new(TempDir::new().unwrap());
|
||||
let codex = new_conversation_for_server(&server, codex_home.clone(), |_| {}).await;
|
||||
|
||||
// 1) Run a review turn that produces an assistant message (isolated in child).
|
||||
codex
|
||||
@@ -755,6 +754,7 @@ async fn review_history_surfaces_in_parent_session() {
|
||||
"review assistant output missing from parent turn input"
|
||||
);
|
||||
|
||||
let _codex_home_guard = codex_home;
|
||||
server.verify().await;
|
||||
}
|
||||
|
||||
@@ -807,9 +807,10 @@ async fn review_uses_overridden_cwd_for_base_branch_merge_base() {
|
||||
.trim()
|
||||
.to_string();
|
||||
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let codex = new_conversation_for_server(&server, &codex_home, |config| {
|
||||
config.cwd = initial_cwd.path().to_path_buf();
|
||||
let codex_home = Arc::new(TempDir::new().unwrap());
|
||||
let initial_cwd_path = initial_cwd.path().to_path_buf();
|
||||
let codex = new_conversation_for_server(&server, codex_home.clone(), move |config| {
|
||||
config.cwd = initial_cwd_path;
|
||||
})
|
||||
.await;
|
||||
|
||||
@@ -859,6 +860,7 @@ async fn review_uses_overridden_cwd_for_base_branch_merge_base() {
|
||||
"expected review prompt to include merge-base sha {head_sha}"
|
||||
);
|
||||
|
||||
let _codex_home_guard = codex_home;
|
||||
server.verify().await;
|
||||
}
|
||||
|
||||
@@ -878,57 +880,47 @@ async fn start_responses_server_with_sse(
|
||||
#[expect(clippy::expect_used)]
|
||||
async fn new_conversation_for_server<F>(
|
||||
server: &MockServer,
|
||||
codex_home: &TempDir,
|
||||
codex_home: Arc<TempDir>,
|
||||
mutator: F,
|
||||
) -> Arc<CodexThread>
|
||||
where
|
||||
F: FnOnce(&mut Config),
|
||||
F: FnOnce(&mut Config) + Send + 'static,
|
||||
{
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
let mut config = load_default_config_for_test(codex_home).await;
|
||||
config.model_provider = model_provider;
|
||||
mutator(&mut config);
|
||||
let thread_manager = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("Test API Key"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
thread_manager
|
||||
.start_thread(config)
|
||||
let base_url = format!("{}/v1", server.uri());
|
||||
let mut builder = test_codex()
|
||||
.with_home(codex_home)
|
||||
.with_config(move |config| {
|
||||
config.model_provider.base_url = Some(base_url.clone());
|
||||
mutator(config);
|
||||
});
|
||||
builder
|
||||
.build(server)
|
||||
.await
|
||||
.expect("create conversation")
|
||||
.thread
|
||||
.codex
|
||||
}
|
||||
|
||||
/// Create a conversation resuming from a rollout file, configured to talk to the provided mock server.
|
||||
#[expect(clippy::expect_used)]
|
||||
async fn resume_conversation_for_server<F>(
|
||||
server: &MockServer,
|
||||
codex_home: &TempDir,
|
||||
codex_home: Arc<TempDir>,
|
||||
resume_path: std::path::PathBuf,
|
||||
mutator: F,
|
||||
) -> Arc<CodexThread>
|
||||
where
|
||||
F: FnOnce(&mut Config),
|
||||
F: FnOnce(&mut Config) + Send + 'static,
|
||||
{
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
let mut config = load_default_config_for_test(codex_home).await;
|
||||
config.model_provider = model_provider;
|
||||
mutator(&mut config);
|
||||
let thread_manager = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("Test API Key"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
let auth_manager =
|
||||
codex_core::AuthManager::from_auth_for_testing(CodexAuth::from_api_key("Test API Key"));
|
||||
thread_manager
|
||||
.resume_thread_from_rollout(config, resume_path, auth_manager)
|
||||
let base_url = format!("{}/v1", server.uri());
|
||||
let mut builder = test_codex()
|
||||
.with_home(codex_home.clone())
|
||||
.with_config(move |config| {
|
||||
config.model_provider.base_url = Some(base_url.clone());
|
||||
mutator(config);
|
||||
});
|
||||
builder
|
||||
.resume(server, codex_home, resume_path)
|
||||
.await
|
||||
.expect("resume conversation")
|
||||
.thread
|
||||
.codex
|
||||
}
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
use anyhow::Context;
|
||||
use codex_core::NewThread;
|
||||
use codex_core::ThreadManager;
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::ExecCommandEndEvent;
|
||||
@@ -10,7 +8,6 @@ use codex_core::protocol::Op;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_core::protocol::TurnAbortReason;
|
||||
use core_test_support::assert_regex_match;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use core_test_support::responses;
|
||||
use core_test_support::responses::ev_assistant_message;
|
||||
use core_test_support::responses::ev_completed;
|
||||
@@ -38,19 +35,17 @@ async fn user_shell_cmd_ls_and_cat_in_temp_dir() {
|
||||
.await
|
||||
.expect("write temp file");
|
||||
|
||||
// Load config and pin cwd to the temp dir so ls/cat operate there.
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
config.cwd = cwd.path().to_path_buf();
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider(
|
||||
codex_core::CodexAuth::from_api_key("dummy"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
let NewThread { thread: codex, .. } = thread_manager
|
||||
.start_thread(config)
|
||||
// Pin cwd to the temp dir so ls/cat operate there.
|
||||
let server = start_mock_server().await;
|
||||
let cwd_path = cwd.path().to_path_buf();
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.cwd = cwd_path;
|
||||
});
|
||||
let codex = builder
|
||||
.build(&server)
|
||||
.await
|
||||
.expect("create new conversation");
|
||||
.expect("create new conversation")
|
||||
.codex;
|
||||
|
||||
// 1) shell command should list the file
|
||||
let list_cmd = "ls".to_string();
|
||||
@@ -97,16 +92,13 @@ async fn user_shell_cmd_ls_and_cat_in_temp_dir() {
|
||||
#[tokio::test]
|
||||
async fn user_shell_cmd_can_be_interrupted() {
|
||||
// Set up isolated config and conversation.
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let config = load_default_config_for_test(&codex_home).await;
|
||||
let thread_manager = ThreadManager::with_models_provider(
|
||||
codex_core::CodexAuth::from_api_key("dummy"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
let NewThread { thread: codex, .. } = thread_manager
|
||||
.start_thread(config)
|
||||
let server = start_mock_server().await;
|
||||
let mut builder = test_codex();
|
||||
let codex = builder
|
||||
.build(&server)
|
||||
.await
|
||||
.expect("create new conversation");
|
||||
.expect("create new conversation")
|
||||
.codex;
|
||||
|
||||
// Start a long-running command and then interrupt it.
|
||||
let sleep_cmd = "sleep 5".to_string();
|
||||
|
||||
Reference in New Issue
Block a user