Compare commits

...

12 Commits

Author SHA1 Message Date
aibrahim-oai
d465d71955 Merge branch 'main' into codex/implement-cli-tool-invocation-flow-tests 2025-07-16 22:32:32 -07:00
aibrahim-oai
3d1cfe31a2 Merge branch 'main' into codex/implement-cli-tool-invocation-flow-tests 2025-07-14 15:22:46 -07:00
Ahmed Ibrahim
d6e934f7cd replace stdout with file matching 2025-07-14 11:13:09 -07:00
Ahmed Ibrahim
0b83f2965c fmt 2025-07-14 11:00:54 -07:00
Ahmed Ibrahim
d4dc3b11bc fmt 2025-07-14 10:57:47 -07:00
aibrahim-oai
bcbe02ff1d Merge branch 'main' into codex/implement-cli-tool-invocation-flow-tests 2025-07-12 17:28:25 -07:00
Ahmed Ibrahim
51257e2fd0 Adressing feedback 2025-07-12 17:15:37 -07:00
aibrahim-oai
0ece374c58 Merge branch 'main' into codex/implement-cli-tool-invocation-flow-tests 2025-07-12 17:08:03 -07:00
aibrahim-oai
f532554924 Fix clippy warnings in integration tests 2025-07-11 14:43:58 -07:00
aibrahim-oai
f9609cc9bf Format integration test imports 2025-07-11 14:29:48 -07:00
aibrahim-oai
781798b4ed Use sandbox dirs and env var constant 2025-07-11 14:13:55 -07:00
aibrahim-oai
5bafe0dc59 Update Cargo.lock for new dev dependencies 2025-07-11 14:03:15 -07:00
3 changed files with 236 additions and 0 deletions

5
codex-rs/Cargo.lock generated
View File

@@ -617,6 +617,7 @@ name = "codex-cli"
version = "0.0.0"
dependencies = [
"anyhow",
"assert_cmd",
"clap",
"clap_complete",
"codex-chatgpt",
@@ -627,10 +628,14 @@ dependencies = [
"codex-login",
"codex-mcp-server",
"codex-tui",
"indoc",
"predicates",
"serde_json",
"tempfile",
"tokio",
"tracing",
"tracing-subscriber",
"wiremock",
]
[[package]]

View File

@@ -36,3 +36,11 @@ tokio = { version = "1", features = [
] }
tracing = "0.1.41"
tracing-subscriber = "0.3.19"
[dev-dependencies]
assert_cmd = "2"
predicates = "3"
tempfile = "3"
wiremock = "0.6"
tokio = { version = "1", features = ["macros", "rt-multi-thread"] }
indoc = "2"

View File

@@ -0,0 +1,223 @@
#![allow(clippy::unwrap_used)]
//! End-to-end integration tests for the `codex` CLI.
//!
//! These spin up a local [`wiremock`][] server to stand in for the MCP server
//! and then run the real compiled `codex` binary against it. The goal is to
//! verify the high-level request/response flow rather than the details of the
//! individual async functions.
//!
//! [`wiremock`]: https://docs.rs/wiremock
use codex_core::exec::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
use predicates::prelude::*;
use std::fs;
use std::path::Path;
use tempfile::TempDir;
use wiremock::Mock;
use wiremock::MockServer;
use wiremock::ResponseTemplate;
use wiremock::matchers::method;
use wiremock::matchers::path;
// ----- tests -----
/// Sends a single simple prompt and verifies that the streamed response is
/// surfaced to the user. This exercises the most common "ask a question, get a
/// textual answer" flow.
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn full_conversation_turn_integration() {
if std::env::var(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
println!("Skipping test because network is disabled");
return;
}
let server = MockServer::start().await;
Mock::given(method("POST"))
.and(path("/v1/responses"))
.respond_with(
ResponseTemplate::new(200)
.insert_header("content-type", "text/event-stream")
.set_body_raw(sse_message("Hello, world."), "text/event-stream"),
)
.expect(1)
.mount(&server)
.await;
// Disable retries — the mock server will fail hard if we make an unexpected
// request, so retries only slow the test down.
unsafe {
std::env::set_var("OPENAI_REQUEST_MAX_RETRIES", "0");
std::env::set_var("OPENAI_STREAM_MAX_RETRIES", "0");
}
let codex_home = TempDir::new().unwrap();
let sandbox = TempDir::new().unwrap();
write_config(codex_home.path(), &server);
// Capture the agent's final message in a file so we can assert on it precisely.
let last_message_file = sandbox.path().join("last_message.txt");
let mut cmd = assert_cmd::Command::cargo_bin("codex").unwrap();
cmd.env("CODEX_HOME", codex_home.path())
.current_dir(sandbox.path())
.arg("exec")
.arg("--skip-git-repo-check")
.arg("--output-last-message")
.arg(&last_message_file)
.arg("Hello");
cmd.assert()
.success()
.stdout(predicate::str::contains("Hello, world."));
// Assert on the captured last message file (more robust than stdout formatting).
let last = fs::read_to_string(&last_message_file).unwrap();
let expected = "Hello, world.";
assert_eq!(last.trim(), expected);
}
/// Simulates a tool invocation (`shell`) followed by a second assistant message
/// once the tool call completes.
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn tool_invocation_flow() {
if std::env::var(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
println!("Skipping test because network is disabled");
return;
}
let server = MockServer::start().await;
// The first request returns a function-call item; the second returns the
// final assistant message. Use an atomic counter to serve them in order.
struct SeqResponder {
count: std::sync::atomic::AtomicUsize,
}
impl wiremock::Respond for SeqResponder {
fn respond(&self, _: &wiremock::Request) -> ResponseTemplate {
use std::sync::atomic::Ordering;
match self.count.fetch_add(1, Ordering::SeqCst) {
0 => ResponseTemplate::new(200)
.insert_header("content-type", "text/event-stream")
.set_body_raw(sse_function_call(), "text/event-stream"),
_ => ResponseTemplate::new(200)
.insert_header("content-type", "text/event-stream")
.set_body_raw(sse_final_after_call(), "text/event-stream"),
}
}
}
Mock::given(method("POST"))
.and(path("/v1/responses"))
.respond_with(SeqResponder {
count: std::sync::atomic::AtomicUsize::new(0),
})
.expect(2)
.mount(&server)
.await;
unsafe {
std::env::set_var("OPENAI_REQUEST_MAX_RETRIES", "0");
std::env::set_var("OPENAI_STREAM_MAX_RETRIES", "0");
}
let codex_home = TempDir::new().unwrap();
let sandbox = TempDir::new().unwrap();
write_config(codex_home.path(), &server);
// Capture final assistant message after tool invocation.
let last_message_file = sandbox.path().join("last_message.txt");
let mut cmd = assert_cmd::Command::cargo_bin("codex").unwrap();
cmd.env("CODEX_HOME", codex_home.path())
.current_dir(sandbox.path())
.arg("exec")
.arg("--skip-git-repo-check")
.arg("--output-last-message")
.arg(&last_message_file)
.arg("Run shell");
cmd.assert()
.success()
.stdout(predicate::str::contains("exec echo hi"))
.stdout(predicate::str::contains("hi"));
// Assert that the final assistant message (second response) was 'done'.
let last = fs::read_to_string(&last_message_file).unwrap();
let expected = "done";
assert_eq!(last.trim(), expected);
}
/// Write a minimal `config.toml` pointing the CLI at the mock server.
fn write_config(codex_home: &Path, server: &MockServer) {
fs::write(
codex_home.join("config.toml"),
format!(
r#"
model_provider = "mock"
model = "test-model"
[model_providers.mock]
name = "mock"
base_url = "{}/v1"
env_key = "PATH"
wire_api = "responses"
"#,
server.uri()
),
)
.unwrap();
}
/// Small helper to generate an SSE stream with a single assistant message.
fn sse_message(text: &str) -> String {
const TEMPLATE: &str = r#"event: response.output_item.done
data: {"type":"response.output_item.done","item":{"type":"message","role":"assistant","content":[{"type":"output_text","text":"TEXT_PLACEHOLDER"}]}}
event: response.completed
data: {"type":"response.completed","response":{"id":"resp1","output":[]}}
"#;
TEMPLATE.replace("TEXT_PLACEHOLDER", text)
}
/// Helper to craft an SSE stream that returns a `function_call`.
fn sse_function_call() -> String {
let call = serde_json::json!({
"type": "response.output_item.done",
"item": {
"type": "function_call",
"name": "shell",
"arguments": "{\"command\":[\"echo\",\"hi\"]}",
"call_id": "call1"
}
});
let completed = serde_json::json!({
"type": "response.completed",
"response": {"id": "resp1", "output": []}
});
format!(
"event: response.output_item.done\ndata: {call}\n\n\
event: response.completed\ndata: {completed}\n\n\n"
)
}
/// SSE stream for the assistant's final message after the tool call returns.
fn sse_final_after_call() -> String {
let msg = serde_json::json!({
"type": "response.output_item.done",
"item": {"type": "message", "role": "assistant", "content": [{"type": "output_text", "text": "done"}]}
});
let completed = serde_json::json!({
"type": "response.completed",
"response": {"id": "resp2", "output": []}
});
format!(
"event: response.output_item.done\ndata: {msg}\n\n\
event: response.completed\ndata: {completed}\n\n\n"
)
}