refactor(core/tests): use test_codex builder to simplify conversation setup in suite tests

- Switch to using test_codex builder utilities for server+config setup in fork_conversation.rs, resume_warning.rs, and review.rs tests.
- Remove manual config, tempdir, and provider wiring; leverage test_codex and TestCodex for cleaner, reusable initialization.
- Update helper functions in review.rs to return TestCodex instead of raw conversation Arcs.
- Remove unnecessary imports and intermediate home/config
This commit is contained in:
pakrym-oai
2025-11-15 10:01:28 -08:00
parent 1f84e855e6
commit e467f97032
4 changed files with 55 additions and 94 deletions

View File

@@ -97,11 +97,11 @@ You are producing plain text that will later be styled by the CLI. Follow these
- Tone: collaborative, concise, factual; present tense, active voice; selfcontained; no "above/below"; parallel wording.
- Don'ts: no nested bullets/hierarchies; no ANSI codes; don't cram unrelated keywords; keep keyword lists short—wrap/reformat if long; avoid naming formatting styles in answers.
- Adaptation: code explanations → precise, structured with code refs; simple tasks → lead with outcome; big changes → logical walkthrough + rationale + next actions; casual one-offs → plain sentences, no headers/bullets.
- File References: When referencing files in your response, make sure to include the relevant start line and always follow the below rules:
- File References: When referencing files in your response follow the below rules:
* Use inline code to make file paths clickable.
* Each reference should have a stand alone path. Even if it's the same file.
* Accepted: absolute, workspacerelative, a/ or b/ diff prefixes, or bare filename/suffix.
* Optionally include line/column (1based): :line[:column] or #Lline[Ccolumn] (column defaults to 1). Do not run extra command to find line numbers.
* Optionally include line/column (1based): :line[:column] or #Lline[Ccolumn] (column defaults to 1).
* Do not use URIs like file://, vscode://, or https://.
* Do not provide range of lines
* Examples: src/app.ts, src/app.ts:42, b/server/index.js#L10, C:\repo\project\main.rs:12:5

View File

@@ -1,8 +1,6 @@
use codex_core::CodexAuth;
use codex_core::ConversationManager;
use codex_core::ModelProviderInfo;
use codex_core::NewConversation;
use codex_core::built_in_model_providers;
use codex_core::parse_turn_item;
use codex_core::protocol::EventMsg;
use codex_core::protocol::Op;
@@ -10,10 +8,9 @@ use codex_core::protocol::RolloutItem;
use codex_core::protocol::RolloutLine;
use codex_protocol::items::TurnItem;
use codex_protocol::user_input::UserInput;
use core_test_support::load_default_config_for_test;
use core_test_support::skip_if_no_network;
use core_test_support::test_codex::test_codex;
use core_test_support::wait_for_event;
use tempfile::TempDir;
use wiremock::Mock;
use wiremock::MockServer;
use wiremock::ResponseTemplate;
@@ -44,25 +41,14 @@ async fn fork_conversation_twice_drops_to_first_message() {
.mount(&server)
.await;
// Configure Codex to use the mock server.
let model_provider = ModelProviderInfo {
base_url: Some(format!("{}/v1", server.uri())),
..built_in_model_providers()["openai"].clone()
};
let home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&home);
config.model_provider = model_provider.clone();
let config_for_fork = config.clone();
let conversation_manager = ConversationManager::with_auth(CodexAuth::from_api_key("dummy"));
let NewConversation {
conversation: codex,
..
} = conversation_manager
.new_conversation(config)
let mut builder = test_codex();
let test = builder
.build(&server)
.await
.expect("create conversation");
.expect("create conversation via test_codex");
let codex = test.codex.clone();
let config_for_fork = test.config.clone();
let conversation_manager = ConversationManager::with_auth(CodexAuth::from_api_key("dummy"));
// Send three user messages; wait for three completed turns.
for text in ["first", "second", "third"] {

View File

@@ -12,9 +12,9 @@ use codex_core::protocol::TurnContextItem;
use codex_core::protocol::WarningEvent;
use codex_protocol::ConversationId;
use core::time::Duration;
use core_test_support::load_default_config_for_test;
use core_test_support::test_codex::test_codex;
use core_test_support::wait_for_event;
use tempfile::TempDir;
use wiremock::MockServer;
fn resume_history(config: &codex_core::config::Config, previous_model: &str, rollout_path: &std::path::Path) -> InitialHistory {
let turn_ctx = TurnContextItem {
@@ -36,13 +36,19 @@ fn resume_history(config: &codex_core::config::Config, previous_model: &str, rol
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn emits_warning_when_resumed_model_differs() {
// Arrange a config with a current model and a prior rollout recorded under a different model.
let home = TempDir::new().expect("tempdir");
let mut config = load_default_config_for_test(&home);
config.model = "current-model".to_string();
let server = MockServer::start().await;
let mut builder = test_codex().with_config(|config| {
config.model = "current-model".to_string();
});
let test = builder
.build(&server)
.await
.expect("create test conversation");
let mut config = test.config.clone();
// Ensure cwd is absolute (the helper sets it to the temp dir already).
assert!(config.cwd.is_absolute());
let rollout_path = home.path().join("rollout.jsonl");
let rollout_path = test.home.path().join("rollout.jsonl");
std::fs::write(&rollout_path, "").expect("create rollout placeholder");
let initial_history = resume_history(&config, "previous-model", &rollout_path);

View File

@@ -1,11 +1,6 @@
use codex_core::CodexAuth;
use codex_core::CodexConversation;
use codex_core::ContentItem;
use codex_core::ConversationManager;
use codex_core::ModelProviderInfo;
use codex_core::REVIEW_PROMPT;
use codex_core::ResponseItem;
use codex_core::built_in_model_providers;
use codex_core::config::Config;
use codex_core::protocol::ENVIRONMENT_CONTEXT_OPEN_TAG;
use codex_core::protocol::EventMsg;
@@ -19,9 +14,10 @@ use codex_core::protocol::ReviewRequest;
use codex_core::protocol::RolloutItem;
use codex_core::protocol::RolloutLine;
use codex_protocol::user_input::UserInput;
use core_test_support::load_default_config_for_test;
use core_test_support::load_sse_fixture_with_id_from_str;
use core_test_support::skip_if_no_network;
use core_test_support::test_codex::TestCodex;
use core_test_support::test_codex::test_codex;
use core_test_support::wait_for_event;
use pretty_assertions::assert_eq;
use std::path::PathBuf;
@@ -73,8 +69,8 @@ async fn review_op_emits_lifecycle_and_review_output() {
let review_json_escaped = serde_json::to_string(&review_json).unwrap();
let sse_raw = sse_template.replace("__REVIEW__", &review_json_escaped);
let server = start_responses_server_with_sse(&sse_raw, 1).await;
let codex_home = TempDir::new().unwrap();
let codex = new_conversation_for_server(&server, &codex_home, |_| {}).await;
let test = new_conversation_for_server(&server, |_| {}).await;
let codex = Arc::clone(&test.codex);
// Submit review request.
codex
@@ -170,8 +166,8 @@ async fn review_op_with_plain_text_emits_review_fallback() {
{"type":"response.completed", "response": {"id": "__ID__"}}
]"#;
let server = start_responses_server_with_sse(sse_raw, 1).await;
let codex_home = TempDir::new().unwrap();
let codex = new_conversation_for_server(&server, &codex_home, |_| {}).await;
let test = new_conversation_for_server(&server, |_| {}).await;
let codex = Arc::clone(&test.codex);
codex
.submit(Op::Review {
@@ -228,8 +224,8 @@ async fn review_filters_agent_message_related_events() {
{"type":"response.completed", "response": {"id": "__ID__"}}
]"#;
let server = start_responses_server_with_sse(sse_raw, 1).await;
let codex_home = TempDir::new().unwrap();
let codex = new_conversation_for_server(&server, &codex_home, |_| {}).await;
let test = new_conversation_for_server(&server, |_| {}).await;
let codex = Arc::clone(&test.codex);
codex
.submit(Op::Review {
@@ -312,8 +308,8 @@ async fn review_does_not_emit_agent_message_on_structured_output() {
let review_json_escaped = serde_json::to_string(&review_json).unwrap();
let sse_raw = sse_template.replace("__REVIEW__", &review_json_escaped);
let server = start_responses_server_with_sse(&sse_raw, 1).await;
let codex_home = TempDir::new().unwrap();
let codex = new_conversation_for_server(&server, &codex_home, |_| {}).await;
let test = new_conversation_for_server(&server, |_| {}).await;
let codex = Arc::clone(&test.codex);
codex
.submit(Op::Review {
@@ -360,13 +356,13 @@ async fn review_uses_custom_review_model_from_config() {
{"type":"response.completed", "response": {"id": "__ID__"}}
]"#;
let server = start_responses_server_with_sse(sse_raw, 1).await;
let codex_home = TempDir::new().unwrap();
// Choose a review model different from the main model; ensure it is used.
let codex = new_conversation_for_server(&server, &codex_home, |cfg| {
let test = new_conversation_for_server(&server, |cfg| {
cfg.model = "gpt-4.1".to_string();
cfg.review_model = "gpt-5".to_string();
})
.await;
let codex = Arc::clone(&test.codex);
codex
.submit(Op::Review {
@@ -415,13 +411,7 @@ async fn review_input_isolated_from_parent_history() {
let server = start_responses_server_with_sse(sse_raw, 1).await;
// Seed a parent session history via resume file with both user + assistant items.
let codex_home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&codex_home);
config.model_provider = ModelProviderInfo {
base_url: Some(format!("{}/v1", server.uri())),
..built_in_model_providers()["openai"].clone()
};
let codex_home = Arc::new(TempDir::new().unwrap());
let session_file = codex_home.path().join("resume.jsonl");
{
let mut f = tokio::fs::File::create(&session_file).await.unwrap();
@@ -480,8 +470,14 @@ async fn review_input_isolated_from_parent_history() {
.await
.unwrap();
}
let codex =
resume_conversation_for_server(&server, &codex_home, session_file.clone(), |_| {}).await;
let test = resume_conversation_for_server(
&server,
Arc::clone(&codex_home),
session_file.clone(),
|_| {},
)
.await;
let codex = Arc::clone(&test.codex);
// Submit review request; it must start fresh (no parent history in `input`).
let review_prompt = "Please review only this".to_string();
@@ -593,8 +589,8 @@ async fn review_history_does_not_leak_into_parent_session() {
{"type":"response.completed", "response": {"id": "__ID__"}}
]"#;
let server = start_responses_server_with_sse(sse_raw, 2).await;
let codex_home = TempDir::new().unwrap();
let codex = new_conversation_for_server(&server, &codex_home, |_| {}).await;
let test = new_conversation_for_server(&server, |_| {}).await;
let codex = Arc::clone(&test.codex);
// 1) Run a review turn that produces an assistant message (isolated in child).
codex
@@ -682,55 +678,28 @@ async fn start_responses_server_with_sse(sse_raw: &str, expected_requests: usize
/// Create a conversation configured to talk to the provided mock server.
#[expect(clippy::expect_used)]
async fn new_conversation_for_server<F>(
server: &MockServer,
codex_home: &TempDir,
mutator: F,
) -> Arc<CodexConversation>
async fn new_conversation_for_server<F>(server: &MockServer, mutator: F) -> TestCodex
where
F: FnOnce(&mut Config),
F: FnOnce(&mut Config) + Send + 'static,
{
let model_provider = ModelProviderInfo {
base_url: Some(format!("{}/v1", server.uri())),
..built_in_model_providers()["openai"].clone()
};
let mut config = load_default_config_for_test(codex_home);
config.model_provider = model_provider;
mutator(&mut config);
let conversation_manager =
ConversationManager::with_auth(CodexAuth::from_api_key("Test API Key"));
conversation_manager
.new_conversation(config)
.await
.expect("create conversation")
.conversation
let mut builder = test_codex().with_config(mutator);
builder.build(server).await.expect("create conversation")
}
/// Create a conversation resuming from a rollout file, configured to talk to the provided mock server.
#[expect(clippy::expect_used)]
async fn resume_conversation_for_server<F>(
server: &MockServer,
codex_home: &TempDir,
codex_home: Arc<TempDir>,
resume_path: std::path::PathBuf,
mutator: F,
) -> Arc<CodexConversation>
) -> TestCodex
where
F: FnOnce(&mut Config),
F: FnOnce(&mut Config) + Send + 'static,
{
let model_provider = ModelProviderInfo {
base_url: Some(format!("{}/v1", server.uri())),
..built_in_model_providers()["openai"].clone()
};
let mut config = load_default_config_for_test(codex_home);
config.model_provider = model_provider;
mutator(&mut config);
let conversation_manager =
ConversationManager::with_auth(CodexAuth::from_api_key("Test API Key"));
let auth_manager =
codex_core::AuthManager::from_auth_for_testing(CodexAuth::from_api_key("Test API Key"));
conversation_manager
.resume_conversation_from_rollout(config, resume_path, auth_manager)
let mut builder = test_codex().with_config(mutator);
builder
.resume(server, codex_home, resume_path)
.await
.expect("resume conversation")
.conversation
}