mirror of
https://github.com/openai/codex.git
synced 2026-04-28 08:34:54 +00:00
17 KiB
17 KiB
PR #1630: chores: refactoring tests
- URL: https://github.com/openai/codex/pull/1630
- Author: aibrahim-oai
- Created: 2025-07-20 05:56:44 UTC
- Updated: 2025-07-24 18:59:50 UTC
- Changes: +75/-46, Files changed: 7, Commits: 35
Description
- Used
Command::cargo_bininstead ofAssertCommand::new("cargo")
Full Diff
diff --git a/codex-rs/Cargo.lock b/codex-rs/Cargo.lock
index 3e4b84a435..050d497c2c 100644
--- a/codex-rs/Cargo.lock
+++ b/codex-rs/Cargo.lock
@@ -626,6 +626,7 @@ name = "codex-cli"
version = "0.0.0"
dependencies = [
"anyhow",
+ "assert_cmd",
"clap",
"clap_complete",
"codex-chatgpt",
@@ -636,10 +637,15 @@ dependencies = [
"codex-login",
"codex-mcp-server",
"codex-tui",
+ "predicates",
"serde_json",
+ "tempfile",
"tokio",
"tracing",
"tracing-subscriber",
+ "uuid",
+ "walkdir",
+ "wiremock",
]
[[package]]
diff --git a/codex-rs/cli/Cargo.toml b/codex-rs/cli/Cargo.toml
index 943788157b..60d40f3451 100644
--- a/codex-rs/cli/Cargo.toml
+++ b/codex-rs/cli/Cargo.toml
@@ -26,6 +26,7 @@ codex-login = { path = "../login" }
codex-linux-sandbox = { path = "../linux-sandbox" }
codex-mcp-server = { path = "../mcp-server" }
codex-tui = { path = "../tui" }
+predicates = "3.1.3"
serde_json = "1"
tokio = { version = "1", features = [
"io-std",
@@ -36,3 +37,10 @@ tokio = { version = "1", features = [
] }
tracing = "0.1.41"
tracing-subscriber = "0.3.19"
+
+[dev-dependencies]
+assert_cmd = "2"
+tempfile = "3"
+uuid = { version = "1", features = ["serde", "v4"] }
+walkdir = "2.5.0"
+wiremock = "0.6"
diff --git a/codex-rs/core/tests/cli_responses_fixture.sse b/codex-rs/cli/tests/cli_responses_fixture.sse
similarity index 100%
rename from codex-rs/core/tests/cli_responses_fixture.sse
rename to codex-rs/cli/tests/cli_responses_fixture.sse
diff --git a/codex-rs/core/tests/cli_stream.rs b/codex-rs/cli/tests/cli_stream.rs
similarity index 93%
rename from codex-rs/core/tests/cli_stream.rs
rename to codex-rs/cli/tests/cli_stream.rs
index 567279ebd0..42a8d135f7 100644
--- a/codex-rs/core/tests/cli_stream.rs
+++ b/codex-rs/cli/tests/cli_stream.rs
@@ -1,7 +1,8 @@
#![expect(clippy::unwrap_used)]
-use assert_cmd::Command as AssertCommand;
+use assert_cmd::prelude::*;
use codex_core::exec::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
+use std::process::Command;
use std::time::Duration;
use std::time::Instant;
use tempfile::TempDir;
@@ -50,13 +51,8 @@ async fn chat_mode_stream_cli() {
"model_providers.mock={{ name = \"mock\", base_url = \"{}/v1\", env_key = \"PATH\", wire_api = \"chat\" }}",
server.uri()
);
- let mut cmd = AssertCommand::new("cargo");
- cmd.arg("run")
- .arg("-p")
- .arg("codex-cli")
- .arg("--quiet")
- .arg("--")
- .arg("exec")
+ let mut cmd = Command::cargo_bin("codex").unwrap();
+ cmd.arg("exec")
.arg("--skip-git-repo-check")
.arg("-c")
.arg(&provider_override)
@@ -100,13 +96,8 @@ async fn responses_api_stream_cli() {
std::path::Path::new(env!("CARGO_MANIFEST_DIR")).join("tests/cli_responses_fixture.sse");
let home = TempDir::new().unwrap();
- let mut cmd = AssertCommand::new("cargo");
- cmd.arg("run")
- .arg("-p")
- .arg("codex-cli")
- .arg("--quiet")
- .arg("--")
- .arg("exec")
+ let mut cmd = Command::cargo_bin("codex").unwrap();
+ cmd.arg("exec")
.arg("--skip-git-repo-check")
.arg("-C")
.arg(env!("CARGO_MANIFEST_DIR"))
@@ -146,13 +137,8 @@ async fn integration_creates_and_checks_session_file() {
// 4. Run the codex CLI through cargo (ensures the right bin is built) and invoke `exec`,
// which is what records a session.
- let mut cmd = AssertCommand::new("cargo");
- cmd.arg("run")
- .arg("-p")
- .arg("codex-cli")
- .arg("--quiet")
- .arg("--")
- .arg("exec")
+ let mut cmd = Command::cargo_bin("codex").unwrap();
+ cmd.arg("exec")
.arg("--skip-git-repo-check")
.arg("-C")
.arg(env!("CARGO_MANIFEST_DIR"))
@@ -185,7 +171,9 @@ async fn integration_creates_and_checks_session_file() {
for entry in WalkDir::new(&sessions_dir) {
let entry = match entry {
Ok(e) => e,
- Err(_) => continue,
+ Err(_) => {
+ continue;
+ }
};
if !entry.file_type().is_file() {
continue;
@@ -207,7 +195,9 @@ async fn integration_creates_and_checks_session_file() {
}
let item: serde_json::Value = match serde_json::from_str(line) {
Ok(v) => v,
- Err(_) => continue,
+ Err(_) => {
+ continue;
+ }
};
if item.get("type").and_then(|t| t.as_str()) == Some("message") {
if let Some(c) = item.get("content") {
@@ -228,7 +218,6 @@ async fn integration_creates_and_checks_session_file() {
Some(p) => p,
None => panic!("No session file containing the marker was found"),
};
-
// Basic sanity checks on location and metadata.
let rel = match path.strip_prefix(&sessions_dir) {
Ok(r) => r,
@@ -312,13 +301,8 @@ async fn integration_creates_and_checks_session_file() {
// to sidestep the issue.
let resume_path_str = path.to_string_lossy().replace('\\', "/");
let resume_override = format!("experimental_resume=\"{resume_path_str}\"");
- let mut cmd2 = AssertCommand::new("cargo");
- cmd2.arg("run")
- .arg("-p")
- .arg("codex-cli")
- .arg("--quiet")
- .arg("--")
- .arg("exec")
+ let mut cmd2 = Command::cargo_bin("codex").unwrap();
+ cmd2.arg("exec")
.arg("--skip-git-repo-check")
.arg("-c")
.arg(&resume_override)
diff --git a/codex-rs/core/tests/live_cli.rs b/codex-rs/cli/tests/live_cli.rs
similarity index 98%
rename from codex-rs/core/tests/live_cli.rs
rename to codex-rs/cli/tests/live_cli.rs
index d79e242c4d..05f72fb443 100644
--- a/codex-rs/core/tests/live_cli.rs
+++ b/codex-rs/cli/tests/live_cli.rs
@@ -30,7 +30,7 @@ fn run_live(prompt: &str) -> (assert_cmd::assert::Assert, TempDir) {
// implementation). Instead we configure the std `Command` ourselves, then later hand the
// resulting `Output` to `assert_cmd` for the familiar assertions.
- let mut cmd = Command::cargo_bin("codex-rs").unwrap();
+ let mut cmd = Command::cargo_bin("codex-cli").unwrap();
cmd.current_dir(dir.path());
cmd.env("OPENAI_API_KEY", require_api_key());
diff --git a/codex-rs/core/src/codex.rs b/codex-rs/core/src/codex.rs
index 4cc888b62e..f35348b779 100644
--- a/codex-rs/core/src/codex.rs
+++ b/codex-rs/core/src/codex.rs
@@ -594,7 +594,7 @@ async fn submission_loop(
let mut restored_items: Option<Vec<ResponseItem>> = None;
let rollout_recorder: Option<RolloutRecorder> =
if let Some(path) = resume_path.as_ref() {
- match RolloutRecorder::resume(path).await {
+ match RolloutRecorder::resume(path, cwd.clone()).await {
Ok((rec, saved)) => {
session_id = saved.session_id;
if !saved.items.is_empty() {
diff --git a/codex-rs/core/src/rollout.rs b/codex-rs/core/src/rollout.rs
index 7f0f61b9eb..3e6de34d96 100644
--- a/codex-rs/core/src/rollout.rs
+++ b/codex-rs/core/src/rollout.rs
@@ -20,6 +20,8 @@ use tracing::warn;
use uuid::Uuid;
use crate::config::Config;
+use crate::git_info::GitInfo;
+use crate::git_info::collect_git_info;
use crate::models::ResponseItem;
const SESSIONS_SUBDIR: &str = "sessions";
@@ -31,6 +33,14 @@ pub struct SessionMeta {
pub instructions: Option<String>,
}
+#[derive(Serialize)]
+struct SessionMetaWithGit {
+ #[serde(flatten)]
+ meta: SessionMeta,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ git: Option<GitInfo>,
+}
+
#[derive(Serialize, Deserialize, Default, Clone)]
pub struct SessionStateSnapshot {}
@@ -86,15 +96,12 @@ impl RolloutRecorder {
.format(timestamp_format)
.map_err(|e| IoError::other(format!("failed to format timestamp: {e}")))?;
- let meta = SessionMeta {
- timestamp,
- id: session_id,
- instructions,
- };
+ // Clone the cwd for the spawned task to collect git info asynchronously
+ let cwd = config.cwd.clone();
// A reasonably-sized bounded channel. If the buffer fills up the send
// future will yield, which is fine – we only need to ensure we do not
- // perform *blocking* I/O on the caller’s thread.
+ // perform *blocking* I/O on the caller's thread.
let (tx, rx) = mpsc::channel::<RolloutCmd>(256);
// Spawn a Tokio task that owns the file handle and performs async
@@ -103,7 +110,12 @@ impl RolloutRecorder {
tokio::task::spawn(rollout_writer(
tokio::fs::File::from_std(file),
rx,
- Some(meta),
+ Some(SessionMeta {
+ timestamp,
+ id: session_id,
+ instructions,
+ }),
+ cwd,
));
Ok(Self { tx })
@@ -143,7 +155,10 @@ impl RolloutRecorder {
.map_err(|e| IoError::other(format!("failed to queue rollout state: {e}")))
}
- pub async fn resume(path: &Path) -> std::io::Result<(Self, SavedSession)> {
+ pub async fn resume(
+ path: &Path,
+ cwd: std::path::PathBuf,
+ ) -> std::io::Result<(Self, SavedSession)> {
info!("Resuming rollout from {path:?}");
let text = tokio::fs::read_to_string(path).await?;
let mut lines = text.lines();
@@ -201,7 +216,12 @@ impl RolloutRecorder {
.open(path)?;
let (tx, rx) = mpsc::channel::<RolloutCmd>(256);
- tokio::task::spawn(rollout_writer(tokio::fs::File::from_std(file), rx, None));
+ tokio::task::spawn(rollout_writer(
+ tokio::fs::File::from_std(file),
+ rx,
+ None,
+ cwd,
+ ));
info!("Resumed rollout successfully from {path:?}");
Ok((Self { tx }, saved))
}
@@ -270,15 +290,26 @@ fn create_log_file(config: &Config, session_id: Uuid) -> std::io::Result<LogFile
async fn rollout_writer(
mut file: tokio::fs::File,
mut rx: mpsc::Receiver<RolloutCmd>,
- meta: Option<SessionMeta>,
+ mut meta: Option<SessionMeta>,
+ cwd: std::path::PathBuf,
) {
- if let Some(meta) = meta {
- if let Ok(json) = serde_json::to_string(&meta) {
+ // If we have a meta, collect git info asynchronously and write meta first
+ if let Some(session_meta) = meta.take() {
+ let git_info = collect_git_info(&cwd).await;
+ let session_meta_with_git = SessionMetaWithGit {
+ meta: session_meta,
+ git: git_info,
+ };
+
+ // Write the SessionMeta as the first item in the file
+ if let Ok(json) = serde_json::to_string(&session_meta_with_git) {
let _ = file.write_all(json.as_bytes()).await;
let _ = file.write_all(b"\n").await;
let _ = file.flush().await;
}
}
+
+ // Process rollout commands
while let Some(cmd) = rx.recv().await {
match cmd {
RolloutCmd::AddItems(items) => {
Review Comments
codex-rs/cli/Cargo.toml
- Created: 2025-07-23 23:48:31 UTC | Link: https://github.com/openai/codex/pull/1630#discussion_r2226967201
@@ -36,3 +36,11 @@ tokio = { version = "1", features = [
] }
tracing = "0.1.41"
tracing-subscriber = "0.3.19"
+predicates = "3.1.3"
Is this needed? If so, alpha sort and make sure it shouldn't go in
dev-dependencies?
codex-rs/core/src/rollout.rs
- Created: 2025-07-23 19:24:34 UTC | Link: https://github.com/openai/codex/pull/1630#discussion_r2226450285
@@ -285,15 +340,66 @@ async fn rollout_writer(
#[serde(flatten)]
state: &'a SessionStateSnapshot,
}
- if let Ok(json) = serde_json::to_string(&StateLine {
+ let line = StateLine {
record_type: "state",
state: &state,
- }) {
- let _ = file.write_all(json.as_bytes()).await;
- let _ = file.write_all(b"\n").await;
- let _ = file.flush().await;
+ };
+ if let Err(e) = write_json_line(&mut file, &line).await {
+ warn!("Failed to write state: {e}");
+ }
+ }
+ RolloutCmd::Sync { exit, ack } => {
+ if let Err(e) = file.flush().await {
+ warn!("Failed to flush on sync: {e}");
+ }
+ let _ = ack.send(());
+ if exit {
+ break;
}
}
}
}
}
+
+pub async fn prepare_rollout_recorder(
+ config: &Config,
+ mut session_id: Uuid,
+ instructions: Option<String>,
+ resume_path: Option<&Path>,
+) -> (
agreed
- Created: 2025-07-23 19:25:59 UTC | Link: https://github.com/openai/codex/pull/1630#discussion_r2226453238
@@ -252,13 +250,30 @@ async fn rollout_writer(
mut rx: mpsc::Receiver<RolloutCmd>,
meta: Option<SessionMeta>,
) {
+ // Helper to serialize and write a single line (JSON + newline)
+ async fn write_json_line<T: serde::Serialize>(
+ file: &mut tokio::fs::File,
+ value: &T,
+ ) -> std::io::Result<()> {
+ let mut buf = serde_json::to_vec(value)?;
+ buf.push(b'\n');
+ file.write_all(&buf).await?;
+ // TODO: decide if we want to flush here or TaskComplete is enough.
I think we want to flush here?
- Created: 2025-07-23 23:49:36 UTC | Link: https://github.com/openai/codex/pull/1630#discussion_r2226968364
@@ -235,12 +240,11 @@ struct LogFileInfo {
fn create_log_file(config: &Config, session_id: Uuid) -> std::io::Result<LogFileInfo> {
// Resolve ~/.codex/sessions/YYYY/MM/DD and create it if missing.
- let timestamp = OffsetDateTime::now_local()
- .map_err(|e| IoError::other(format!("failed to get local time: {e}")))?;
+ let timestamp = OffsetDateTime::now_utc();
I think we want the time in the user's local timezone?
- Created: 2025-07-23 23:50:14 UTC | Link: https://github.com/openai/codex/pull/1630#discussion_r2226968966
@@ -272,13 +276,29 @@ async fn rollout_writer(
mut rx: mpsc::Receiver<RolloutCmd>,
meta: Option<SessionMeta>,
) {
+ // Helper to serialize and write a single line (JSON + newline)
+ async fn write_json_line<T: serde::Serialize>(
+ file: &mut tokio::fs::File,
+ value: &T,
+ ) -> std::io::Result<()> {
+ let mut buf = serde_json::to_vec(value)?;
+ buf.push(b'\n');
+ file.write_all(&buf).await?;
+ file.flush().await?;
+ Ok(())
+ }
+
+ // Write meta line if present
if let Some(meta) = meta {
- if let Ok(json) = serde_json::to_string(&meta) {
- let _ = file.write_all(json.as_bytes()).await;
- let _ = file.write_all(b"\n").await;
- let _ = file.flush().await;
+ if let Err(e) = write_json_line(&mut file, &meta).await {
+ warn!("Failed to write session meta: {e}");
+ }
+ if let Err(e) = file.flush().await {
write_json_linealready does theflush(), no?
- Created: 2025-07-23 23:51:09 UTC | Link: https://github.com/openai/codex/pull/1630#discussion_r2226969822
@@ -321,3 +339,43 @@ async fn rollout_writer(
}
}
}
+
+pub async fn prepare_rollout_recorder(
+ config: &Config,
+ session_id: Uuid,
+ instructions: Option<String>,
+ resume_path: Option<&Path>,
+) -> RolloutSetup {
+ // Try to resume
+ let (mut restored_items, mut recorder_opt) = (None, None);
Can you please make these two separate declarations? I don't feel like this is idiomatic Rust.
- Created: 2025-07-24 00:21:41 UTC | Link: https://github.com/openai/codex/pull/1630#discussion_r2227001129
@@ -235,12 +240,11 @@ struct LogFileInfo {
fn create_log_file(config: &Config, session_id: Uuid) -> std::io::Result<LogFileInfo> {
// Resolve ~/.codex/sessions/YYYY/MM/DD and create it if missing.
- let timestamp = OffsetDateTime::now_local()
- .map_err(|e| IoError::other(format!("failed to get local time: {e}")))?;
+ let timestamp = OffsetDateTime::now_utc();
OK, as discussed on Slack, we'll stick with UTC, ultimately giving the user tools that will convert to local timezone to make this easier to reason about.