Compare commits

..

3 Commits

Author SHA1 Message Date
pap-openai
c6b2c5c772 Merge branch 'main' into codex-concurrent-simple 2025-08-03 00:42:39 +01:00
pap
aed712286b adding best-of-n 2025-08-03 00:38:25 +01:00
pap
6fcedb46a9 adding automerge option 2025-08-02 23:41:04 +01:00
38 changed files with 1302 additions and 3156 deletions

View File

@@ -11,7 +11,7 @@
"@types/bun": "^1.2.19",
"@types/node": "^24.1.0",
"prettier": "^3.6.2",
"typescript": "^5.9.2",
"typescript": "^5.8.3",
},
},
},
@@ -68,7 +68,7 @@
"tunnel": ["tunnel@0.0.6", "", {}, "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg=="],
"typescript": ["typescript@5.9.2", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A=="],
"typescript": ["typescript@5.8.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ=="],
"undici": ["undici@5.29.0", "", { "dependencies": { "@fastify/busboy": "^2.0.0" } }, "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg=="],

View File

@@ -16,6 +16,6 @@
"@types/bun": "^1.2.19",
"@types/node": "^24.1.0",
"prettier": "^3.6.2",
"typescript": "^5.9.2"
"typescript": "^5.8.3"
}
}

View File

@@ -147,8 +147,4 @@ const READ_ONLY_SEATBELT_POLICY = `
(sysctl-name "kern.version")
(sysctl-name "sysctl.proc_cputype")
(sysctl-name-prefix "hw.perflevel")
)
; Added on top of Chrome profile
; Needed for python multiprocessing on MacOS for the SemLock
(allow ipc-posix-sem)`.trim();
)`.trim();

84
codex-rs/Cargo.lock generated
View File

@@ -661,7 +661,7 @@ dependencies = [
"clap",
"codex-core",
"serde",
"toml 0.9.4",
"toml 0.9.2",
]
[[package]]
@@ -699,7 +699,6 @@ dependencies = [
"serde_json",
"sha1",
"shlex",
"similar",
"strum_macros 0.27.2",
"tempfile",
"thiserror 2.0.12",
@@ -707,7 +706,7 @@ dependencies = [
"tokio",
"tokio-test",
"tokio-util",
"toml 0.9.4",
"toml 0.9.2",
"tracing",
"tree-sitter",
"tree-sitter-bash",
@@ -831,7 +830,7 @@ dependencies = [
"tempfile",
"tokio",
"tokio-test",
"toml 0.9.4",
"toml 0.9.2",
"tracing",
"tracing-subscriber",
"uuid",
@@ -860,7 +859,6 @@ dependencies = [
"mcp-types",
"path-clean",
"pretty_assertions",
"rand 0.8.5",
"ratatui",
"ratatui-image",
"regex-lite",
@@ -870,14 +868,13 @@ dependencies = [
"shlex",
"strum 0.27.2",
"strum_macros 0.27.2",
"supports-color",
"textwrap 0.16.2",
"tokio",
"tracing",
"tracing-appender",
"tracing-subscriber",
"tui-input",
"tui-markdown",
"tui-textarea",
"unicode-segmentation",
"unicode-width 0.1.14",
"uuid",
@@ -2017,7 +2014,7 @@ dependencies = [
"libc",
"percent-encoding",
"pin-project-lite",
"socket2",
"socket2 0.6.0",
"system-configuration",
"tokio",
"tower-service",
@@ -2339,12 +2336,6 @@ dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "is_ci"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7655c9839580ee829dfacba1d1278c2b7883e50a277ff7541299489d6bdfdc45"
[[package]]
name = "is_terminal_polyfill"
version = "1.70.1"
@@ -3997,9 +3988,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.142"
version = "1.0.141"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "030fedb782600dcbd6f02d479bf0d817ac3bb40d644745b769d6a96bc3afc5a7"
checksum = "30b9eff21ebe718216c6ec64e1d9ac57087aad11efc64e32002bce4a0d4c03d3"
dependencies = [
"indexmap 2.10.0",
"itoa",
@@ -4183,10 +4174,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
[[package]]
name = "smawk"
version = "0.3.2"
name = "socket2"
version = "0.5.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b7c388c1b5e93756d0c740965c41e8822f866621d41acbdf6336a6a168f8840c"
checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678"
dependencies = [
"libc",
"windows-sys 0.52.0",
]
[[package]]
name = "socket2"
@@ -4240,7 +4235,7 @@ dependencies = [
"starlark_syntax",
"static_assertions",
"strsim 0.10.0",
"textwrap 0.11.0",
"textwrap",
"thiserror 1.0.69",
]
@@ -4376,15 +4371,6 @@ version = "2.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
[[package]]
name = "supports-color"
version = "3.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c64fc7232dd8d2e4ac5ce4ef302b1d81e0b80d055b9d77c7c4f51f6aa4c867d6"
dependencies = [
"is_ci",
]
[[package]]
name = "syn"
version = "1.0.109"
@@ -4538,17 +4524,6 @@ dependencies = [
"unicode-width 0.1.14",
]
[[package]]
name = "textwrap"
version = "0.16.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c13547615a44dc9c452a8a534638acdf07120d4b6847c8178705da06306a3057"
dependencies = [
"smawk",
"unicode-linebreak",
"unicode-width 0.2.0",
]
[[package]]
name = "thiserror"
version = "1.0.69"
@@ -4663,9 +4638,9 @@ dependencies = [
[[package]]
name = "tokio"
version = "1.47.1"
version = "1.46.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89e49afdadebb872d3145a5638b59eb0691ea23e46ca484037cfab3b76b95038"
checksum = "0cc3a2344dafbe23a245241fe8b09735b521110d30fcefbbd5feb1797ca35d17"
dependencies = [
"backtrace",
"bytes",
@@ -4676,9 +4651,9 @@ dependencies = [
"pin-project-lite",
"signal-hook-registry",
"slab",
"socket2",
"socket2 0.5.10",
"tokio-macros",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -4763,9 +4738,9 @@ dependencies = [
[[package]]
name = "toml"
version = "0.9.4"
version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41ae868b5a0f67631c14589f7e250c1ea2c574ee5ba21c6c8dd4b1485705a5a1"
checksum = "ed0aee96c12fa71097902e0bb061a5e1ebd766a6636bb605ba401c45c1650eac"
dependencies = [
"indexmap 2.10.0",
"serde",
@@ -5013,6 +4988,17 @@ dependencies = [
"tracing",
]
[[package]]
name = "tui-textarea"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a5318dd619ed73c52a9417ad19046724effc1287fb75cdcc4eca1d6ac1acbae"
dependencies = [
"crossterm",
"ratatui",
"unicode-width 0.2.0",
]
[[package]]
name = "typenum"
version = "1.18.0"
@@ -5031,12 +5017,6 @@ version = "1.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
[[package]]
name = "unicode-linebreak"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f"
[[package]]
name = "unicode-segmentation"
version = "1.12.0"

View File

@@ -0,0 +1,584 @@
use std::fs::File;
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
use std::process::Command;
use std::process::Stdio;
use std::sync::OnceLock;
use tokio::process::Command as TokioCommand;
use tokio::sync::Semaphore;
use anyhow::Context;
use codex_common::CliConfigOverrides;
use codex_exec::Cli as ExecCli;
// Serialize git worktree add operations across tasks to avoid repository lock contention.
static GIT_WORKTREE_ADD_SEMAPHORE: OnceLock<Semaphore> = OnceLock::new();
#[derive(Debug, Clone)]
pub struct ConcurrentRunResult {
pub branch: String,
pub worktree_dir: PathBuf,
pub log_file: Option<PathBuf>,
pub exec_exit_code: Option<i32>,
pub _had_changes: bool,
pub _applied_changes: Option<usize>,
}
fn compute_codex_home() -> PathBuf {
if let Ok(val) = std::env::var("CODEX_HOME") {
if !val.is_empty() {
return PathBuf::from(val);
}
}
// Fallback to default (~/.codex) without requiring it to already exist.
codex_core::config::find_codex_home().unwrap_or_else(|_| {
let mut p = std::env::var_os("HOME")
.map(PathBuf::from)
.unwrap_or_default();
if p.as_os_str().is_empty() {
return PathBuf::from(".codex");
}
p.push(".codex");
p
})
}
fn slugify_prompt(prompt: &str, max_len: usize) -> String {
let mut out = String::with_capacity(prompt.len());
let mut prev_hyphen = false;
for ch in prompt.chars() {
let c = ch.to_ascii_lowercase();
let keep = matches!(c, 'a'..='z' | '0'..='9');
if keep {
out.push(c);
prev_hyphen = false;
} else if c.is_ascii_whitespace() || matches!(c, '-' | '_' | '+') {
if !prev_hyphen && !out.is_empty() {
out.push('-');
prev_hyphen = true;
}
} else {
// skip other punctuation/symbols
}
if out.len() >= max_len {
break;
}
}
// Trim trailing hyphens
while out.ends_with('-') {
out.pop();
}
if out.is_empty() {
"task".to_string()
} else {
out
}
}
fn git_output(repo_dir: &Path, args: &[&str]) -> anyhow::Result<String> {
let out = Command::new("git")
.args(args)
.current_dir(repo_dir)
.output()
.with_context(|| format!("running git {args:?}"))?;
if !out.status.success() {
anyhow::bail!(
"git {:?} failed with status {}: {}",
args,
out.status,
String::from_utf8_lossy(&out.stderr)
);
}
Ok(String::from_utf8_lossy(&out.stdout).trim().to_string())
}
fn git_capture_stdout(repo_dir: &Path, args: &[&str]) -> anyhow::Result<Vec<u8>> {
let out = Command::new("git")
.args(args)
.current_dir(repo_dir)
.output()
.with_context(|| format!("running git {args:?}"))?;
if !out.status.success() {
anyhow::bail!(
"git {:?} failed with status {}: {}",
args,
out.status,
String::from_utf8_lossy(&out.stderr)
);
}
Ok(out.stdout)
}
fn count_files_in_patch(diff: &[u8]) -> usize {
// Count occurrences of lines starting with "diff --git ", which mark file boundaries.
// This works for text and binary patches produced by `git diff --binary`.
let mut count = 0usize;
for line in diff.split(|&b| b == b'\n') {
if line.starts_with(b"diff --git ") {
count += 1;
}
}
count
}
pub async fn run_concurrent_flow(
prompt: String,
cli_config_overrides: CliConfigOverrides,
codex_linux_sandbox_exe: Option<PathBuf>,
automerge: bool,
quiet: bool,
) -> anyhow::Result<ConcurrentRunResult> {
let cwd = std::env::current_dir()?;
// Ensure we are in a git repo and find repo root.
let repo_root_str = git_output(&cwd, &["rev-parse", "--show-toplevel"]);
let repo_root = match repo_root_str {
Ok(p) => PathBuf::from(p),
Err(err) => {
eprintln!("Not inside a Git repo: {err}");
std::process::exit(1);
}
};
// Determine current branch and original head commit.
let current_branch = git_output(&repo_root, &["rev-parse", "--abbrev-ref", "HEAD"])
.unwrap_or_else(|_| "HEAD".to_string());
let original_head =
git_output(&repo_root, &["rev-parse", "HEAD"]).context("finding original HEAD commit")?;
// Build worktree target path under $CODEX_HOME/worktrees/<repo>/<branch>
let mut codex_home = compute_codex_home();
codex_home.push("worktrees");
// repo name = last component of repo_root
let repo_name = repo_root
.file_name()
.map(|s| s.to_string_lossy().to_string())
.unwrap_or_else(|| "repo".to_string());
codex_home.push(repo_name.clone());
// Prepare branch name: codex/<slug>, retrying with a numeric suffix to avoid races.
let slug = slugify_prompt(&prompt, 64);
let mut branch: String;
let worktree_dir: PathBuf;
let mut attempt: u32 = 1;
loop {
branch = if attempt == 1 {
format!("codex/{slug}")
} else {
format!("codex/{slug}-{attempt}")
};
let mut candidate_dir = codex_home.clone();
candidate_dir.push(&branch);
// Create parent directories for candidate path.
if let Some(parent) = candidate_dir.parent() {
std::fs::create_dir_all(parent)?;
}
if !quiet {
println!(
"Creating worktree at {} with branch {}",
candidate_dir.display(),
branch
);
}
// Try to add worktree with new branch from current HEAD
let worktree_path_str = candidate_dir.to_string_lossy().to_string();
let add_status = Command::new("git")
.arg("worktree")
.arg("add")
.arg("-b")
.arg(&branch)
.arg(&worktree_path_str)
.current_dir(&repo_root)
.status()?;
if add_status.success() {
worktree_dir = candidate_dir;
break;
}
attempt += 1;
if attempt > 50 {
anyhow::bail!("Failed to create git worktree after multiple attempts");
}
// Retry with a new branch name.
}
// Either run codex exec inline (verbose) or as a subprocess with logs redirected.
let mut log_file: Option<PathBuf> = None;
let mut exec_exit_code: Option<i32> = None;
if quiet {
let exe = std::env::current_exe()
.map_err(|e| anyhow::anyhow!("failed to locate current executable: {e}"))?;
// Prepare logs directory: $CODEX_HOME/logs/<repo_name>
let mut logs_dir = compute_codex_home();
logs_dir.push("logs");
logs_dir.push(&repo_name);
std::fs::create_dir_all(&logs_dir)?;
let sanitized_branch = branch.replace('/', "_");
let log_path = logs_dir.join(format!("{sanitized_branch}.log"));
let log_f = File::create(&log_path)?;
log_file = Some(log_path.clone());
let mut cmd = Command::new(exe);
cmd.arg("exec")
.arg("--full-auto")
.arg("--cd")
.arg(worktree_dir.as_os_str())
.stdout(Stdio::from(log_f.try_clone()?))
.stderr(Stdio::from(log_f));
// Forward any root-level config overrides.
for ov in cli_config_overrides.raw_overrides.iter() {
cmd.arg("-c").arg(ov);
}
// Append the prompt last (positional argument).
cmd.arg(&prompt);
let status = cmd.status()?;
exec_exit_code = status.code();
if !status.success() && !quiet {
eprintln!("codex exec failed with exit code {exec_exit_code:?}");
}
} else {
// Build an ExecCli to run in full-auto mode at the worktree directory.
let mut exec_cli = ExecCli {
images: vec![],
model: None,
sandbox_mode: None,
config_profile: None,
full_auto: true,
dangerously_bypass_approvals_and_sandbox: false,
cwd: Some(worktree_dir.clone()),
skip_git_repo_check: false,
config_overrides: CliConfigOverrides::default(),
color: Default::default(),
json: false,
last_message_file: None,
prompt: Some(prompt.clone()),
};
// Prepend any root-level config overrides.
super::prepend_config_flags(&mut exec_cli.config_overrides, cli_config_overrides);
// Run codex exec
if let Err(e) = codex_exec::run_main(exec_cli, codex_linux_sandbox_exe).await {
eprintln!("codex exec failed: {e}");
// Do not attempt to bring changes on failure; leave worktree for inspection.
return Err(e);
}
}
// Auto-commit changes in the worktree if any
let status_out = Command::new("git")
.args(["status", "--porcelain"])
.current_dir(&worktree_dir)
.output()?;
let status_text = String::from_utf8_lossy(&status_out.stdout);
let had_changes = !status_text.trim().is_empty();
if had_changes {
// Stage and commit
if !Command::new("git")
.args(["add", "-A"])
.current_dir(&worktree_dir)
.status()?
.success()
{
anyhow::bail!("git add failed in worktree");
}
let commit_message = format!("Codex concurrent: {prompt}");
if !Command::new("git")
.args(["commit", "-m", &commit_message])
.current_dir(&worktree_dir)
.status()?
.success()
{
if !quiet {
eprintln!("No commit created (maybe no changes)");
}
} else if !quiet {
println!("Committed changes in worktree branch {branch}");
}
} else if !quiet {
println!("No changes detected in worktree; skipping commit.");
}
if !automerge {
if !quiet {
println!(
"Auto-merge disabled; leaving changes in worktree {} on branch {}.",
worktree_dir.display(),
branch
);
println!(
"You can review and manually merge from that branch into {current_branch} when ready."
);
println!("Summary: Auto-merge disabled.");
}
return Ok(ConcurrentRunResult {
branch,
worktree_dir,
log_file,
exec_exit_code,
_had_changes: had_changes,
_applied_changes: None,
});
}
// Bring the changes into the main working tree as UNSTAGED modifications.
// We generate a patch from the original HEAD to the worktree branch tip, then apply with 3-way merge.
if !quiet {
println!("Applying changes from {branch} onto {current_branch} as unstaged modifications");
}
let range = format!("{original_head}..{branch}");
let mut diff_bytes =
git_capture_stdout(&repo_root, &["diff", "--binary", "--full-index", &range])?;
// Fallback: if there is nothing in the commit range (e.g., commit didn't happen),
// try to capture uncommitted changes from the worktree working tree.
if diff_bytes.is_empty() && had_changes {
// If we saw changes earlier but no commit diff was produced, fall back to working tree diff.
// This captures unstaged changes relative to HEAD in the worktree.
diff_bytes =
git_capture_stdout(&worktree_dir, &["diff", "--binary", "--full-index", "HEAD"])?;
}
if diff_bytes.is_empty() {
if !quiet {
println!("Summary: 0 changes detected.");
}
return Ok(ConcurrentRunResult {
branch,
worktree_dir,
log_file,
exec_exit_code,
_had_changes: had_changes,
_applied_changes: Some(0),
});
}
let changed_files = count_files_in_patch(&diff_bytes);
let mut child = Command::new("git")
.arg("apply")
.arg("-3")
.stdin(Stdio::piped())
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.current_dir(&repo_root)
.spawn()
.context("spawning git apply")?;
if let Some(stdin) = child.stdin.as_mut() {
stdin
.write_all(&diff_bytes)
.context("writing patch to git apply stdin")?;
}
let status = child.wait().context("waiting for git apply")?;
if !status.success() {
if !quiet {
eprintln!(
"Applying changes failed. You can manually inspect {} and apply diffs.",
worktree_dir.display()
);
println!("Summary: Apply failed.");
}
} else {
if !quiet {
println!("Changes applied to working tree (unstaged).");
println!("Summary: Applied {changed_files} files changed.");
}
// Cleanup: remove the worktree and delete the temporary branch.
if !quiet {
println!(
"Cleaning up worktree {} and branch {}",
worktree_dir.display(),
branch
);
}
let worktree_path_str = worktree_dir.to_string_lossy().to_string();
let remove_status = Command::new("git")
.args(["worktree", "remove", &worktree_path_str])
.current_dir(&repo_root)
.status();
match remove_status {
Ok(s) if s.success() => {
// removed
}
_ => {
if !quiet {
eprintln!("git worktree remove failed; retrying with --force");
}
let _ = Command::new("git")
.args(["worktree", "remove", "--force", &worktree_path_str])
.current_dir(&repo_root)
.status();
}
}
let del_status = Command::new("git")
.args(["branch", "-D", &branch])
.current_dir(&repo_root)
.status();
if let Ok(s) = del_status {
if !s.success() && !quiet {
eprintln!("Failed to delete branch {branch}");
}
} else if !quiet {
eprintln!("Error running git branch -D {branch}");
}
}
Ok(ConcurrentRunResult {
branch,
worktree_dir,
log_file,
exec_exit_code,
_had_changes: had_changes,
_applied_changes: Some(changed_files),
})
}
/// A Send-friendly variant used for best-of-n: run quietly (logs redirected) and do not auto-merge.
/// This intentionally avoids referencing non-Send types from codex-exec.
pub async fn run_concurrent_flow_quiet_no_automerge(
prompt: String,
cli_config_overrides: CliConfigOverrides,
_codex_linux_sandbox_exe: Option<PathBuf>,
) -> anyhow::Result<ConcurrentRunResult> {
let cwd = std::env::current_dir()?;
let repo_root_str = git_output(&cwd, &["rev-parse", "--show-toplevel"]);
let repo_root = match repo_root_str {
Ok(p) => PathBuf::from(p),
Err(err) => {
eprintln!("Not inside a Git repo: {err}");
std::process::exit(1);
}
};
// Capture basic repo info (not used further in quiet/no-automerge flow).
let mut codex_home = compute_codex_home();
codex_home.push("worktrees");
let repo_name = repo_root
.file_name()
.map(|s| s.to_string_lossy().to_string())
.unwrap_or_else(|| "repo".to_string());
codex_home.push(repo_name.clone());
let slug = slugify_prompt(&prompt, 64);
let mut branch: String;
let worktree_dir: PathBuf;
// Serialize worktree creation to avoid git repo lock contention across tasks.
{
let semaphore = GIT_WORKTREE_ADD_SEMAPHORE.get_or_init(|| Semaphore::new(1));
let _permit = semaphore.acquire().await.expect("semaphore closed");
let mut attempt: u32 = 1;
loop {
branch = if attempt == 1 {
format!("codex/{slug}")
} else {
format!("codex/{slug}-{attempt}")
};
let mut candidate_dir = codex_home.clone();
candidate_dir.push(&branch);
if let Some(parent) = candidate_dir.parent() {
std::fs::create_dir_all(parent)?;
}
let worktree_path_str = candidate_dir.to_string_lossy().to_string();
let add_status = TokioCommand::new("git")
.arg("worktree")
.arg("add")
.arg("-b")
.arg(&branch)
.arg(&worktree_path_str)
.current_dir(&repo_root)
.status()
.await?;
if add_status.success() {
worktree_dir = candidate_dir;
break;
}
attempt += 1;
if attempt > 50 {
anyhow::bail!("Failed to create git worktree after multiple attempts");
}
}
}
// Run the CLI in quiet mode (logs redirected).
let exe = std::env::current_exe()
.map_err(|e| anyhow::anyhow!("failed to locate current executable: {e}"))?;
let mut logs_dir = compute_codex_home();
logs_dir.push("logs");
logs_dir.push(&repo_name);
std::fs::create_dir_all(&logs_dir)?;
let sanitized_branch = branch.replace('/', "_");
let log_path = logs_dir.join(format!("{sanitized_branch}.log"));
let log_f = File::create(&log_path)?;
let log_file = Some(log_path.clone());
let mut cmd = TokioCommand::new(exe);
cmd.arg("exec")
.arg("--full-auto")
.arg("--cd")
.arg(worktree_dir.as_os_str())
.stdout(Stdio::from(log_f.try_clone()?))
.stderr(Stdio::from(log_f));
for ov in cli_config_overrides.raw_overrides.iter() {
cmd.arg("-c").arg(ov);
}
cmd.arg(&prompt);
let status = cmd.status().await?;
let exec_exit_code = status.code();
// Auto-commit changes in the worktree if any
let status_out = TokioCommand::new("git")
.args(["status", "--porcelain"])
.current_dir(&worktree_dir)
.output()
.await?;
let status_text = String::from_utf8_lossy(&status_out.stdout);
let had_changes = !status_text.trim().is_empty();
if had_changes {
if !TokioCommand::new("git")
.args(["add", "-A"])
.current_dir(&worktree_dir)
.status()
.await?
.success()
{
anyhow::bail!("git add failed in worktree");
}
let commit_message = format!("Codex concurrent: {prompt}");
let _ = TokioCommand::new("git")
.args(["commit", "-m", &commit_message])
.current_dir(&worktree_dir)
.status()
.await?;
}
Ok(ConcurrentRunResult {
branch,
worktree_dir,
log_file,
exec_exit_code,
_had_changes: had_changes,
_applied_changes: None,
})
}

View File

@@ -17,6 +17,7 @@ use codex_tui::Cli as TuiCli;
use std::path::PathBuf;
use crate::proto::ProtoCli;
mod concurrent;
/// Codex CLI
///
@@ -32,6 +33,22 @@ struct MultitoolCli {
#[clap(flatten)]
pub config_overrides: CliConfigOverrides,
/// Experimental:Launch a concurrent task in a separate Git worktree using the given prompt.
/// Creates worktree under $CODEX_HOME/worktrees/<repo>/codex/<slug> and runs `codex exec` in full-auto mode.
#[arg(long = "concurrent", value_name = "PROMPT")]
pub concurrent: Option<String>,
/// When using --concurrent, also attempt to auto-merge the resulting changes
/// back into the current working tree as unstaged modifications via
/// a 3-way git apply. Disable with --automerge=false.
#[arg(long = "automerge", default_value_t = true, action = clap::ArgAction::Set)]
pub automerge: bool,
/// Run the same --concurrent prompt N times in separate worktrees and keep them all.
/// Intended to generate multiple candidate solutions without auto-merging.
#[arg(long = "best-of-n", value_name = "N", default_value_t = 1)]
pub best_of_n: usize,
#[clap(flatten)]
interactive: TuiCli,
@@ -116,6 +133,87 @@ fn main() -> anyhow::Result<()> {
async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()> {
let cli = MultitoolCli::parse();
// Handle --concurrent at the root level.
if let Some(prompt) = cli.concurrent.clone() {
if cli.subcommand.is_some() {
eprintln!("--concurrent cannot be used together with a subcommand");
std::process::exit(2);
}
let runs = if cli.best_of_n == 0 { 1 } else { cli.best_of_n };
if runs > 1 {
println!(
"Running best-of-n with {runs} runs; auto-merge will be disabled and worktrees kept."
);
// Launch all runs concurrently and collect results as they finish.
let mut join_set = tokio::task::JoinSet::new();
for _ in 0..runs {
let prompt = prompt.clone();
let overrides = cli.config_overrides.clone();
let sandbox = codex_linux_sandbox_exe.clone();
join_set.spawn(async move {
concurrent::run_concurrent_flow_quiet_no_automerge(prompt, overrides, sandbox)
.await
});
}
let mut results: Vec<concurrent::ConcurrentRunResult> = Vec::with_capacity(runs);
while let Some(join_result) = join_set.join_next().await {
match join_result {
Ok(Ok(res)) => {
println!(
"task finished for branch: {}\n, directory: {}",
res.branch,
res.worktree_dir.display()
);
results.push(res);
}
Ok(Err(err)) => {
eprintln!("concurrent task failed: {err}");
}
Err(join_err) => {
eprintln!("failed to join concurrent task: {join_err}");
}
}
}
println!("\nBest-of-n summary:");
for r in &results {
let status = match r.exec_exit_code {
Some(0) => "OK",
Some(_code) => "FAIL",
None => "OK",
};
let log = r
.log_file
.as_ref()
.map(|p| p.to_string_lossy().to_string())
.unwrap_or_else(|| "<no log>".to_string());
println!(
"[{status}] branch={} worktree={} log={}",
r.branch,
r.worktree_dir.display(),
log
);
}
} else {
concurrent::run_concurrent_flow(
prompt,
cli.config_overrides,
codex_linux_sandbox_exe,
cli.automerge,
false,
)
.await?;
}
return Ok(());
}
if cli.best_of_n > 1 {
eprintln!("--best-of-n requires --concurrent <PROMPT>");
std::process::exit(2);
}
match cli.subcommand {
None => {
let mut tui_cli = cli.interactive;

View File

@@ -34,7 +34,6 @@ serde_json = "1"
serde_bytes = "0.11"
sha1 = "0.10.6"
shlex = "1.3.0"
similar = "2.7.0"
strum_macros = "0.27.2"
thiserror = "2.0.12"
time = { version = "0.3", features = ["formatting", "local-offset", "macros"] }
@@ -46,7 +45,7 @@ tokio = { version = "1", features = [
"signal",
] }
tokio-util = "0.7.14"
toml = "0.9.4"
toml = "0.9.2"
tracing = { version = "0.1.41", features = ["log"] }
tree-sitter = "0.25.8"
tree-sitter-bash = "0.25.0"

View File

@@ -10,7 +10,7 @@ You MUST adhere to the following criteria when executing the task:
- Showing user code and tool call details is allowed.
- User instructions may overwrite the _CODING GUIDELINES_ section in this developer message.
- Do not use \`ls -R\`, \`find\`, or \`grep\` - these are slow in large repos. Use \`rg\` and \`rg --files\`.
- Use \`apply_patch\` to edit files: {"command":["apply_patch","*** Begin Patch\\n*** Update File: path/to/file.py\\n@@ def example():\\n- pass\\n+ return 123\\n*** End Patch"]}
- Use \`apply_patch\` to edit files: {"cmd":["apply_patch","*** Begin Patch\\n*** Update File: path/to/file.py\\n@@ def example():\\n- pass\\n+ return 123\\n*** End Patch"]}
- If completing the user's task requires writing or modifying files:
- Your code and final answer should follow these _CODING GUIDELINES_:
- Fix the problem at the root cause rather than applying surface-level patches, when possible.
@@ -40,16 +40,16 @@ You MUST adhere to the following criteria when executing the task:
Your patch language is a strippeddown, fileoriented diff format designed to be easy to parse and safe to apply. You can think of it as a highlevel envelope:
*** Begin Patch
**_ Begin Patch
[ one or more file sections ]
*** End Patch
_** End Patch
Within that envelope, you get a sequence of file operations.
You MUST include a header to specify the action you are taking.
Each operation starts with one of three headers:
*** Add File: <path> - create a new file. Every following line is a + line (the initial contents).
*** Delete File: <path> - remove an existing file. Nothing follows.
**_ Add File: <path> - create a new file. Every following line is a + line (the initial contents).
_** Delete File: <path> - remove an existing file. Nothing follows.
\*\*\* Update File: <path> - patch an existing file in place (optionally with a rename).
May be immediately followed by \*\*\* Move to: <new path> if you want to rename the file.
@@ -63,28 +63,28 @@ Within a hunk each line starts with:
At the end of a truncated hunk you can emit \*\*\* End of File.
Patch := Begin { FileOp } End
Begin := "*** Begin Patch" NEWLINE
End := "*** End Patch" NEWLINE
Begin := "**_ Begin Patch" NEWLINE
End := "_** End Patch" NEWLINE
FileOp := AddFile | DeleteFile | UpdateFile
AddFile := "*** Add File: " path NEWLINE { "+" line NEWLINE }
DeleteFile := "*** Delete File: " path NEWLINE
UpdateFile := "*** Update File: " path NEWLINE [ MoveTo ] { Hunk }
MoveTo := "*** Move to: " newPath NEWLINE
AddFile := "**_ Add File: " path NEWLINE { "+" line NEWLINE }
DeleteFile := "_** Delete File: " path NEWLINE
UpdateFile := "**_ Update File: " path NEWLINE [ MoveTo ] { Hunk }
MoveTo := "_** Move to: " newPath NEWLINE
Hunk := "@@" [ header ] NEWLINE { HunkLine } [ "*** End of File" NEWLINE ]
HunkLine := (" " | "-" | "+") text NEWLINE
A full patch can combine several operations:
*** Begin Patch
*** Add File: hello.txt
**_ Begin Patch
_** Add File: hello.txt
+Hello world
*** Update File: src/app.py
*** Move to: src/main.py
**_ Update File: src/app.py
_** Move to: src/main.py
@@ def greet():
-print("Hi")
+print("Hello, world!")
*** Delete File: obsolete.txt
*** End Patch
**_ Delete File: obsolete.txt
_** End Patch
It is important to remember:
@@ -101,7 +101,7 @@ Plan updates
A tool named `update_plan` is available. Use it to keep an uptodate, stepbystep plan for the task so you can follow your progress. When making your plans, keep in mind that you are a deployed coding agent - `update_plan` calls should not involve doing anything that you aren't capable of doing. For example, `update_plan` calls should NEVER contain tasks to merge your own pull requests. Only stop to ask the user if you genuinely need their feedback on a change.
- At the start of any nontrivial task, call `update_plan` with an initial plan: a short list of 1sentence steps with a `status` for each step (`pending`, `in_progress`, or `completed`). There should always be exactly one `in_progress` step until everything is done.
- At the start of the task, call `update_plan` with an initial plan: a short list of 1sentence steps with a `status` for each step (`pending`, `in_progress`, or `completed`). There should always be exactly one `in_progress` step until everything is done.
- Whenever you finish a step, call `update_plan` again, marking the finished step as `completed` and the next step as `in_progress`.
- If your plan needs to change, call `update_plan` with the revised steps and include an `explanation` describing the change.
- When all steps are complete, make a final `update_plan` call with all steps marked `completed`.

View File

@@ -207,7 +207,6 @@ async fn process_chat_sse<S>(
}
let mut fn_call_state = FunctionCallState::default();
let mut assistant_text = String::new();
loop {
let sse = match timeout(idle_timeout, stream.next()).await {
@@ -255,42 +254,21 @@ async fn process_chat_sse<S>(
let choice_opt = chunk.get("choices").and_then(|c| c.get(0));
if let Some(choice) = choice_opt {
// Handle assistant content tokens as streaming deltas.
// Handle assistant content tokens.
if let Some(content) = choice
.get("delta")
.and_then(|d| d.get("content"))
.and_then(|c| c.as_str())
{
if !content.is_empty() {
assistant_text.push_str(content);
let _ = tx_event
.send(Ok(ResponseEvent::OutputTextDelta(content.to_string())))
.await;
}
}
let item = ResponseItem::Message {
role: "assistant".to_string(),
content: vec![ContentItem::OutputText {
text: content.to_string(),
}],
id: None,
};
// Forward any reasoning/thinking deltas if present.
if let Some(reasoning) = choice
.get("delta")
.and_then(|d| d.get("reasoning"))
.and_then(|c| c.as_str())
{
let _ = tx_event
.send(Ok(ResponseEvent::ReasoningSummaryDelta(
reasoning.to_string(),
)))
.await;
}
if let Some(reasoning_content) = choice
.get("delta")
.and_then(|d| d.get("reasoning_content"))
.and_then(|c| c.as_str())
{
let _ = tx_event
.send(Ok(ResponseEvent::ReasoningSummaryDelta(
reasoning_content.to_string(),
)))
.await;
let _ = tx_event.send(Ok(ResponseEvent::OutputItemDone(item))).await;
}
// Handle streaming function / tool calls.
@@ -339,18 +317,7 @@ async fn process_chat_sse<S>(
let _ = tx_event.send(Ok(ResponseEvent::OutputItemDone(item))).await;
}
"stop" => {
// Regular turn without tool-call. Emit the final assistant message
// as a single OutputItemDone so non-delta consumers see the result.
if !assistant_text.is_empty() {
let item = ResponseItem::Message {
role: "assistant".to_string(),
content: vec![ContentItem::OutputText {
text: std::mem::take(&mut assistant_text),
}],
id: None,
};
let _ = tx_event.send(Ok(ResponseEvent::OutputItemDone(item))).await;
}
// Regular turn without tool-call.
}
_ => {}
}
@@ -391,10 +358,7 @@ async fn process_chat_sse<S>(
pub(crate) struct AggregatedChatStream<S> {
inner: S,
cumulative: String,
cumulative_reasoning: String,
pending: std::collections::VecDeque<ResponseEvent>,
// When true, do not emit a cumulative assistant message at Completed.
streaming_mode: bool,
pending_completed: Option<ResponseEvent>,
}
impl<S> Stream for AggregatedChatStream<S>
@@ -406,8 +370,8 @@ where
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let this = self.get_mut();
// First, flush any buffered events from the previous call.
if let Some(ev) = this.pending.pop_front() {
// First, flush any buffered Completed event from the previous call.
if let Some(ev) = this.pending_completed.take() {
return Poll::Ready(Some(Ok(ev)));
}
@@ -424,21 +388,16 @@ where
let is_assistant_delta = matches!(&item, crate::models::ResponseItem::Message { role, .. } if role == "assistant");
if is_assistant_delta {
// Only use the final assistant message if we have not
// seen any deltas; otherwise, deltas already built the
// cumulative text and this would duplicate it.
if this.cumulative.is_empty() {
if let crate::models::ResponseItem::Message { content, .. } = &item {
if let Some(text) = content.iter().find_map(|c| match c {
crate::models::ContentItem::OutputText { text } => Some(text),
_ => None,
}) {
this.cumulative.push_str(text);
}
if let crate::models::ResponseItem::Message { content, .. } = &item {
if let Some(text) = content.iter().find_map(|c| match c {
crate::models::ContentItem::OutputText { text } => Some(text),
_ => None,
}) {
this.cumulative.push_str(text);
}
}
// Swallow assistant message here; emit on Completed.
// Swallow partial assistant chunk; keep polling.
continue;
}
@@ -449,48 +408,24 @@ where
response_id,
token_usage,
}))) => {
// Build any aggregated items in the correct order: Reasoning first, then Message.
let mut emitted_any = false;
if !this.cumulative_reasoning.is_empty() {
let aggregated_reasoning = crate::models::ResponseItem::Reasoning {
id: String::new(),
summary: vec![
crate::models::ReasoningItemReasoningSummary::SummaryText {
text: std::mem::take(&mut this.cumulative_reasoning),
},
],
content: None,
encrypted_content: None,
};
this.pending
.push_back(ResponseEvent::OutputItemDone(aggregated_reasoning));
emitted_any = true;
}
if !this.cumulative.is_empty() {
let aggregated_message = crate::models::ResponseItem::Message {
let aggregated_item = crate::models::ResponseItem::Message {
id: None,
role: "assistant".to_string(),
content: vec![crate::models::ContentItem::OutputText {
text: std::mem::take(&mut this.cumulative),
}],
};
this.pending
.push_back(ResponseEvent::OutputItemDone(aggregated_message));
emitted_any = true;
}
// Always emit Completed last when anything was aggregated.
if emitted_any {
this.pending.push_back(ResponseEvent::Completed {
response_id: response_id.clone(),
token_usage: token_usage.clone(),
// Buffer Completed so it is returned *after* the aggregated message.
this.pending_completed = Some(ResponseEvent::Completed {
response_id,
token_usage,
});
// Return the first pending event now.
if let Some(ev) = this.pending.pop_front() {
return Poll::Ready(Some(Ok(ev)));
}
return Poll::Ready(Some(Ok(ResponseEvent::OutputItemDone(
aggregated_item,
))));
}
// Nothing aggregated forward Completed directly.
@@ -504,25 +439,11 @@ where
// will never appear in a Chat Completions stream.
continue;
}
Poll::Ready(Some(Ok(ResponseEvent::OutputTextDelta(delta)))) => {
// Always accumulate deltas so we can emit a final OutputItemDone at Completed.
this.cumulative.push_str(&delta);
if this.streaming_mode {
// In streaming mode, also forward the delta immediately.
return Poll::Ready(Some(Ok(ResponseEvent::OutputTextDelta(delta))));
} else {
continue;
}
}
Poll::Ready(Some(Ok(ResponseEvent::ReasoningSummaryDelta(delta)))) => {
// Always accumulate reasoning deltas so we can emit a final Reasoning item at Completed.
this.cumulative_reasoning.push_str(&delta);
if this.streaming_mode {
// In streaming mode, also forward the delta immediately.
return Poll::Ready(Some(Ok(ResponseEvent::ReasoningSummaryDelta(delta))));
} else {
continue;
}
Poll::Ready(Some(Ok(ResponseEvent::OutputTextDelta(_))))
| Poll::Ready(Some(Ok(ResponseEvent::ReasoningSummaryDelta(_)))) => {
// Deltas are ignored here since aggregation waits for the
// final OutputItemDone.
continue;
}
}
}
@@ -554,23 +475,9 @@ pub(crate) trait AggregateStreamExt: Stream<Item = Result<ResponseEvent>> + Size
AggregatedChatStream {
inner: self,
cumulative: String::new(),
cumulative_reasoning: String::new(),
pending: std::collections::VecDeque::new(),
streaming_mode: false,
pending_completed: None,
}
}
}
impl<T> AggregateStreamExt for T where T: Stream<Item = Result<ResponseEvent>> + Sized {}
impl<S> AggregatedChatStream<S> {
pub(crate) fn streaming_mode(inner: S) -> Self {
AggregatedChatStream {
inner,
cumulative: String::new(),
cumulative_reasoning: String::new(),
pending: std::collections::VecDeque::new(),
streaming_mode: true,
}
}
}

View File

@@ -93,11 +93,7 @@ impl ModelClient {
// Wrap it with the aggregation adapter so callers see *only*
// the final assistant message per turn (matching the
// behaviour of the Responses API).
let mut aggregated = if !self.config.hide_agent_reasoning {
crate::chat_completions::AggregatedChatStream::streaming_mode(response_stream)
} else {
response_stream.aggregate()
};
let mut aggregated = response_stream.aggregate();
// Bridge the aggregated stream back into a standard
// `ResponseStream` by forwarding events through a channel.
@@ -442,7 +438,7 @@ async fn process_sse<S>(
}
}
}
"response.reasoning_summary_text.delta" | "response.reasoning_text.delta" => {
"response.reasoning_summary_text.delta" => {
if let Some(delta) = event.delta {
let event = ResponseEvent::ReasoningSummaryDelta(delta);
if tx_event.send(Ok(event)).await.is_err() {

View File

@@ -56,7 +56,6 @@ use crate::mcp_tool_call::handle_mcp_tool_call;
use crate::models::ContentItem;
use crate::models::FunctionCallOutputPayload;
use crate::models::LocalShellAction;
use crate::models::ReasoningItemContent;
use crate::models::ReasoningItemReasoningSummary;
use crate::models::ResponseInputItem;
use crate::models::ResponseItem;
@@ -65,7 +64,6 @@ use crate::plan_tool::handle_update_plan;
use crate::project_doc::get_user_instructions;
use crate::protocol::AgentMessageDeltaEvent;
use crate::protocol::AgentMessageEvent;
use crate::protocol::AgentReasoningContentEvent;
use crate::protocol::AgentReasoningDeltaEvent;
use crate::protocol::AgentReasoningEvent;
use crate::protocol::ApplyPatchApprovalRequestEvent;
@@ -87,13 +85,11 @@ use crate::protocol::SandboxPolicy;
use crate::protocol::SessionConfiguredEvent;
use crate::protocol::Submission;
use crate::protocol::TaskCompleteEvent;
use crate::protocol::TurnDiffEvent;
use crate::rollout::RolloutRecorder;
use crate::safety::SafetyCheck;
use crate::safety::assess_command_safety;
use crate::safety::assess_safety_for_untrusted_command;
use crate::shell;
use crate::turn_diff_tracker::TurnDiffTracker;
use crate::user_notification::UserNotification;
use crate::util::backoff;
@@ -229,7 +225,6 @@ pub(crate) struct Session {
state: Mutex<State>,
codex_linux_sandbox_exe: Option<PathBuf>,
user_shell: shell::Shell,
hide_agent_reasoning: bool,
}
impl Session {
@@ -367,11 +362,7 @@ impl Session {
}
}
async fn on_exec_command_begin(
&self,
turn_diff_tracker: &mut TurnDiffTracker,
exec_command_context: ExecCommandContext,
) {
async fn notify_exec_command_begin(&self, exec_command_context: ExecCommandContext) {
let ExecCommandContext {
sub_id,
call_id,
@@ -383,15 +374,11 @@ impl Session {
Some(ApplyPatchCommandContext {
user_explicitly_approved_this_action,
changes,
}) => {
turn_diff_tracker.on_patch_begin(&changes);
EventMsg::PatchApplyBegin(PatchApplyBeginEvent {
call_id,
auto_approved: !user_explicitly_approved_this_action,
changes,
})
}
}) => EventMsg::PatchApplyBegin(PatchApplyBeginEvent {
call_id,
auto_approved: !user_explicitly_approved_this_action,
changes,
}),
None => EventMsg::ExecCommandBegin(ExecCommandBeginEvent {
call_id,
command: command_for_display.clone(),
@@ -405,21 +392,15 @@ impl Session {
let _ = self.tx_event.send(event).await;
}
#[allow(clippy::too_many_arguments)]
async fn on_exec_command_end(
async fn notify_exec_command_end(
&self,
turn_diff_tracker: &mut TurnDiffTracker,
sub_id: &str,
call_id: &str,
output: &ExecToolCallOutput,
stdout: &str,
stderr: &str,
exit_code: i32,
is_apply_patch: bool,
) {
let ExecToolCallOutput {
stdout,
stderr,
duration,
exit_code,
} = output;
// Because stdout and stderr could each be up to 100 KiB, we send
// truncated versions.
const MAX_STREAM_OUTPUT: usize = 5 * 1024; // 5KiB
@@ -431,15 +412,14 @@ impl Session {
call_id: call_id.to_string(),
stdout,
stderr,
success: *exit_code == 0,
success: exit_code == 0,
})
} else {
EventMsg::ExecCommandEnd(ExecCommandEndEvent {
call_id: call_id.to_string(),
stdout,
stderr,
duration: *duration,
exit_code: *exit_code,
exit_code,
})
};
@@ -448,20 +428,6 @@ impl Session {
msg,
};
let _ = self.tx_event.send(event).await;
// If this is an apply_patch, after we emit the end patch, emit a second event
// with the full turn diff if there is one.
if is_apply_patch {
let unified_diff = turn_diff_tracker.get_unified_diff();
if let Ok(Some(unified_diff)) = unified_diff {
let msg = EventMsg::TurnDiff(TurnDiffEvent { unified_diff });
let event = Event {
id: sub_id.into(),
msg,
};
let _ = self.tx_event.send(event).await;
}
}
}
/// Helper that emits a BackgroundEvent with the given message. This keeps
@@ -825,7 +791,6 @@ async fn submission_loop(
codex_linux_sandbox_exe: config.codex_linux_sandbox_exe.clone(),
disable_response_storage,
user_shell: default_shell,
hide_agent_reasoning: config.hide_agent_reasoning,
}));
// Patch restored state into the newly created session.
@@ -1036,10 +1001,6 @@ async fn run_task(sess: Arc<Session>, sub_id: String, input: Vec<InputItem>) {
.await;
let last_agent_message: Option<String>;
// Although from the perspective of codex.rs, TurnDiffTracker has the lifecycle of a Task which contains
// many turns, from the perspective of the user, it is a single turn.
let mut turn_diff_tracker = TurnDiffTracker::new();
loop {
// Note that pending_input would be something like a message the user
// submitted through the UI while the model was running. Though the UI
@@ -1071,7 +1032,7 @@ async fn run_task(sess: Arc<Session>, sub_id: String, input: Vec<InputItem>) {
})
})
.collect();
match run_turn(&sess, &mut turn_diff_tracker, sub_id.clone(), turn_input).await {
match run_turn(&sess, sub_id.clone(), turn_input).await {
Ok(turn_output) => {
let mut items_to_record_in_conversation_history = Vec::<ResponseItem>::new();
let mut responses = Vec::<ResponseInputItem>::new();
@@ -1136,7 +1097,6 @@ async fn run_task(sess: Arc<Session>, sub_id: String, input: Vec<InputItem>) {
ResponseItem::Reasoning {
id,
summary,
content,
encrypted_content,
},
None,
@@ -1144,7 +1104,6 @@ async fn run_task(sess: Arc<Session>, sub_id: String, input: Vec<InputItem>) {
items_to_record_in_conversation_history.push(ResponseItem::Reasoning {
id: id.clone(),
summary: summary.clone(),
content: content.clone(),
encrypted_content: encrypted_content.clone(),
});
}
@@ -1199,7 +1158,6 @@ async fn run_task(sess: Arc<Session>, sub_id: String, input: Vec<InputItem>) {
async fn run_turn(
sess: &Session,
turn_diff_tracker: &mut TurnDiffTracker,
sub_id: String,
input: Vec<ResponseItem>,
) -> CodexResult<Vec<ProcessedResponseItem>> {
@@ -1214,7 +1172,7 @@ async fn run_turn(
let mut retries = 0;
loop {
match try_run_turn(sess, turn_diff_tracker, &sub_id, &prompt).await {
match try_run_turn(sess, &sub_id, &prompt).await {
Ok(output) => return Ok(output),
Err(CodexErr::Interrupted) => return Err(CodexErr::Interrupted),
Err(CodexErr::EnvVar(var)) => return Err(CodexErr::EnvVar(var)),
@@ -1260,7 +1218,6 @@ struct ProcessedResponseItem {
async fn try_run_turn(
sess: &Session,
turn_diff_tracker: &mut TurnDiffTracker,
sub_id: &str,
prompt: &Prompt,
) -> CodexResult<Vec<ProcessedResponseItem>> {
@@ -1348,8 +1305,7 @@ async fn try_run_turn(
match event {
ResponseEvent::Created => {}
ResponseEvent::OutputItemDone(item) => {
let response =
handle_response_item(sess, turn_diff_tracker, sub_id, item.clone()).await?;
let response = handle_response_item(sess, sub_id, item.clone()).await?;
output.push(ProcessedResponseItem { item, response });
}
@@ -1367,16 +1323,6 @@ async fn try_run_turn(
.ok();
}
let unified_diff = turn_diff_tracker.get_unified_diff();
if let Ok(Some(unified_diff)) = unified_diff {
let msg = EventMsg::TurnDiff(TurnDiffEvent { unified_diff });
let event = Event {
id: sub_id.to_string(),
msg,
};
let _ = sess.tx_event.send(event).await;
}
return Ok(output);
}
ResponseEvent::OutputTextDelta(delta) => {
@@ -1387,13 +1333,11 @@ async fn try_run_turn(
sess.tx_event.send(event).await.ok();
}
ResponseEvent::ReasoningSummaryDelta(delta) => {
if !sess.hide_agent_reasoning {
let event = Event {
id: sub_id.to_string(),
msg: EventMsg::AgentReasoningDelta(AgentReasoningDeltaEvent { delta }),
};
sess.tx_event.send(event).await.ok();
}
let event = Event {
id: sub_id.to_string(),
msg: EventMsg::AgentReasoningDelta(AgentReasoningDeltaEvent { delta }),
};
sess.tx_event.send(event).await.ok();
}
}
}
@@ -1483,7 +1427,6 @@ async fn run_compact_task(
async fn handle_response_item(
sess: &Session,
turn_diff_tracker: &mut TurnDiffTracker,
sub_id: &str,
item: ResponseItem,
) -> CodexResult<Option<ResponseInputItem>> {
@@ -1501,36 +1444,16 @@ async fn handle_response_item(
}
None
}
ResponseItem::Reasoning {
id: _,
summary,
content,
encrypted_content: _,
} => {
if !sess.hide_agent_reasoning {
for item in summary {
let text = match item {
ReasoningItemReasoningSummary::SummaryText { text } => text,
};
let event = Event {
id: sub_id.to_string(),
msg: EventMsg::AgentReasoning(AgentReasoningEvent { text }),
};
sess.tx_event.send(event).await.ok();
}
}
if !sess.hide_agent_reasoning && content.is_some() {
let content = content.unwrap();
for item in content {
let text = match item {
ReasoningItemContent::ReasoningText { text } => text,
};
let event = Event {
id: sub_id.to_string(),
msg: EventMsg::AgentReasoningContent(AgentReasoningContentEvent { text }),
};
sess.tx_event.send(event).await.ok();
}
ResponseItem::Reasoning { summary, .. } => {
for item in summary {
let text = match item {
ReasoningItemReasoningSummary::SummaryText { text } => text,
};
let event = Event {
id: sub_id.to_string(),
msg: EventMsg::AgentReasoning(AgentReasoningEvent { text }),
};
sess.tx_event.send(event).await.ok();
}
None
}
@@ -1541,17 +1464,7 @@ async fn handle_response_item(
..
} => {
info!("FunctionCall: {arguments}");
Some(
handle_function_call(
sess,
turn_diff_tracker,
sub_id.to_string(),
name,
arguments,
call_id,
)
.await,
)
Some(handle_function_call(sess, sub_id.to_string(), name, arguments, call_id).await)
}
ResponseItem::LocalShellCall {
id,
@@ -1586,7 +1499,6 @@ async fn handle_response_item(
handle_container_exec_with_params(
exec_params,
sess,
turn_diff_tracker,
sub_id.to_string(),
effective_call_id,
)
@@ -1604,7 +1516,6 @@ async fn handle_response_item(
async fn handle_function_call(
sess: &Session,
turn_diff_tracker: &mut TurnDiffTracker,
sub_id: String,
name: String,
arguments: String,
@@ -1618,8 +1529,7 @@ async fn handle_function_call(
return *output;
}
};
handle_container_exec_with_params(params, sess, turn_diff_tracker, sub_id, call_id)
.await
handle_container_exec_with_params(params, sess, sub_id, call_id).await
}
"update_plan" => handle_update_plan(sess, arguments, sub_id, call_id).await,
_ => {
@@ -1693,7 +1603,6 @@ fn maybe_run_with_user_profile(params: ExecParams, sess: &Session) -> ExecParams
async fn handle_container_exec_with_params(
params: ExecParams,
sess: &Session,
turn_diff_tracker: &mut TurnDiffTracker,
sub_id: String,
call_id: String,
) -> ResponseInputItem {
@@ -1841,7 +1750,7 @@ async fn handle_container_exec_with_params(
},
),
};
sess.on_exec_command_begin(turn_diff_tracker, exec_command_context.clone())
sess.notify_exec_command_begin(exec_command_context.clone())
.await;
let params = maybe_run_with_user_profile(params, sess);
@@ -1866,22 +1775,23 @@ async fn handle_container_exec_with_params(
stdout,
stderr,
duration,
} = &output;
} = output;
sess.on_exec_command_end(
turn_diff_tracker,
sess.notify_exec_command_end(
&sub_id,
&call_id,
&output,
&stdout,
&stderr,
exit_code,
exec_command_context.apply_patch.is_some(),
)
.await;
let is_success = *exit_code == 0;
let is_success = exit_code == 0;
let content = format_exec_output(
if is_success { stdout } else { stderr },
*exit_code,
*duration,
if is_success { &stdout } else { &stderr },
exit_code,
duration,
);
ResponseInputItem::FunctionCallOutput {
@@ -1893,15 +1803,7 @@ async fn handle_container_exec_with_params(
}
}
Err(CodexErr::Sandbox(error)) => {
handle_sandbox_error(
turn_diff_tracker,
params,
exec_command_context,
error,
sandbox_type,
sess,
)
.await
handle_sandbox_error(params, exec_command_context, error, sandbox_type, sess).await
}
Err(e) => {
// Handle non-sandbox errors
@@ -1917,7 +1819,6 @@ async fn handle_container_exec_with_params(
}
async fn handle_sandbox_error(
turn_diff_tracker: &mut TurnDiffTracker,
params: ExecParams,
exec_command_context: ExecCommandContext,
error: SandboxErr,
@@ -1974,8 +1875,7 @@ async fn handle_sandbox_error(
sess.notify_background_event(&sub_id, "retrying command without sandbox")
.await;
sess.on_exec_command_begin(turn_diff_tracker, exec_command_context)
.await;
sess.notify_exec_command_begin(exec_command_context).await;
// This is an escalated retry; the policy will not be
// examined and the sandbox has been set to `None`.
@@ -2000,22 +1900,23 @@ async fn handle_sandbox_error(
stdout,
stderr,
duration,
} = &retry_output;
} = retry_output;
sess.on_exec_command_end(
turn_diff_tracker,
sess.notify_exec_command_end(
&sub_id,
&call_id,
&retry_output,
&stdout,
&stderr,
exit_code,
is_apply_patch,
)
.await;
let is_success = *exit_code == 0;
let is_success = exit_code == 0;
let content = format_exec_output(
if is_success { stdout } else { stderr },
*exit_code,
*duration,
if is_success { &stdout } else { &stderr },
exit_code,
duration,
);
ResponseInputItem::FunctionCallOutput {

View File

@@ -480,18 +480,12 @@ impl Config {
// Load base instructions override from a file if specified. If the
// path is relative, resolve it against the effective cwd so the
// behaviour matches other path-like config values.
let experimental_instructions_path = config_profile
.experimental_instructions_file
.as_ref()
.or(cfg.experimental_instructions_file.as_ref());
let file_base_instructions =
Self::get_base_instructions(experimental_instructions_path, &resolved_cwd)?;
let file_base_instructions = Self::get_base_instructions(
cfg.experimental_instructions_file.as_ref(),
&resolved_cwd,
)?;
let base_instructions = base_instructions.or(file_base_instructions);
// Resolve hide/show reasoning flags with consistent precedence:
// if hide is true, force show_reasoning_content to false.
let hide_agent_reasoning_val = cfg.hide_agent_reasoning.unwrap_or(false);
let config = Self {
model,
model_context_window,
@@ -521,7 +515,7 @@ impl Config {
tui: cfg.tui.unwrap_or_default(),
codex_linux_sandbox_exe,
hide_agent_reasoning: hide_agent_reasoning_val,
hide_agent_reasoning: cfg.hide_agent_reasoning.unwrap_or(false),
model_reasoning_effort: config_profile
.model_reasoning_effort
.or(cfg.model_reasoning_effort)

View File

@@ -1,5 +1,4 @@
use serde::Deserialize;
use std::path::PathBuf;
use crate::config_types::ReasoningEffort;
use crate::config_types::ReasoningSummary;
@@ -18,5 +17,4 @@ pub struct ConfigProfile {
pub model_reasoning_effort: Option<ReasoningEffort>,
pub model_reasoning_summary: Option<ReasoningSummary>,
pub chatgpt_base_url: Option<String>,
pub experimental_instructions_file: Option<PathBuf>,
}

View File

@@ -140,7 +140,11 @@ pub async fn process_exec_tool_call(
let exit_code = raw_output.exit_status.code().unwrap_or(-1);
if exit_code != 0 && is_likely_sandbox_denied(sandbox_type, exit_code) {
// NOTE(ragona): This is much less restrictive than the previous check. If we exec
// a command, and it returns anything other than success, we assume that it may have
// been a sandboxing error and allow the user to retry. (The user of course may choose
// not to retry, or in a non-interactive mode, would automatically reject the approval.)
if exit_code != 0 && sandbox_type != SandboxType::None {
return Err(CodexErr::Sandbox(SandboxErr::Denied(
exit_code, stdout, stderr,
)));
@@ -219,26 +223,6 @@ fn create_linux_sandbox_command_args(
linux_cmd
}
/// We don't have a fully deterministic way to tell if our command failed
/// because of the sandbox - a command in the user's zshrc file might hit an
/// error, but the command itself might fail or succeed for other reasons.
/// For now, we conservatively check for 'command not found' (exit code 127),
/// and can add additional cases as necessary.
fn is_likely_sandbox_denied(sandbox_type: SandboxType, exit_code: i32) -> bool {
if sandbox_type == SandboxType::None {
return false;
}
// Quick rejects: well-known non-sandbox shell exit codes
// 127: command not found, 2: misuse of shell builtins
if exit_code == 127 {
return false;
}
// For all other cases, we assume the sandbox is the cause
true
}
#[derive(Debug)]
pub struct RawExecToolCallOutput {
pub exit_status: ExitStatus,

View File

@@ -38,14 +38,12 @@ pub mod plan_tool;
mod project_doc;
pub mod protocol;
mod rollout;
pub(crate) mod safety;
mod safety;
pub mod seatbelt;
pub mod shell;
pub mod spawn;
pub mod turn_diff_tracker;
mod user_notification;
pub mod util;
pub use apply_patch::CODEX_APPLY_PATCH_ARG1;
pub use client_common::model_supports_reasoning_summaries;
pub use safety::get_platform_sandbox;

View File

@@ -45,8 +45,6 @@ pub enum ResponseItem {
Reasoning {
id: String,
summary: Vec<ReasoningItemReasoningSummary>,
#[serde(default, skip_serializing_if = "Option::is_none")]
content: Option<Vec<ReasoningItemContent>>,
encrypted_content: Option<String>,
},
LocalShellCall {
@@ -138,12 +136,6 @@ pub enum ReasoningItemReasoningSummary {
SummaryText { text: String },
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum ReasoningItemContent {
ReasoningText { text: String },
}
impl From<Vec<InputItem>> for ResponseInputItem {
fn from(items: Vec<InputItem>) -> Self {
Self::Message {

View File

@@ -359,9 +359,6 @@ pub enum EventMsg {
/// Agent reasoning delta event from agent.
AgentReasoningDelta(AgentReasoningDeltaEvent),
/// Raw chain-of-thought from agent.
AgentReasoningContent(AgentReasoningContentEvent),
/// Ack the client's configure message.
SessionConfigured(SessionConfiguredEvent),
@@ -390,8 +387,6 @@ pub enum EventMsg {
/// Notification that a patch application has finished.
PatchApplyEnd(PatchApplyEndEvent),
TurnDiff(TurnDiffEvent),
/// Response to GetHistoryEntryRequest.
GetHistoryEntryResponse(GetHistoryEntryResponseEvent),
@@ -467,11 +462,6 @@ pub struct AgentReasoningEvent {
pub text: String,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct AgentReasoningContentEvent {
pub text: String,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct AgentReasoningDeltaEvent {
pub delta: String,
@@ -533,8 +523,6 @@ pub struct ExecCommandEndEvent {
pub stderr: String,
/// The command's exit code.
pub exit_code: i32,
/// The duration of the command execution.
pub duration: Duration,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
@@ -608,11 +596,6 @@ pub struct PatchApplyEndEvent {
pub success: bool,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct TurnDiffEvent {
pub unified_diff: String,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct GetHistoryEntryResponseEvent {
pub offset: usize,

View File

@@ -65,7 +65,3 @@
(sysctl-name "sysctl.proc_cputype")
(sysctl-name-prefix "hw.perflevel")
)
; Added on top of Chrome profile
; Needed for python multiprocessing on MacOS for the SemLock
(allow ipc-posix-sem)

View File

@@ -1,887 +0,0 @@
use std::collections::HashMap;
use std::fs;
use std::path::Path;
use std::path::PathBuf;
use std::process::Command;
use anyhow::Context;
use anyhow::Result;
use anyhow::anyhow;
use sha1::digest::Output;
use uuid::Uuid;
use crate::protocol::FileChange;
const ZERO_OID: &str = "0000000000000000000000000000000000000000";
const DEV_NULL: &str = "/dev/null";
struct BaselineFileInfo {
path: PathBuf,
content: Vec<u8>,
mode: FileMode,
oid: String,
}
/// Tracks sets of changes to files and exposes the overall unified diff.
/// Internally, the way this works is now:
/// 1. Maintain an in-memory baseline snapshot of files when they are first seen.
/// For new additions, do not create a baseline so that diffs are shown as proper additions (using /dev/null).
/// 2. Keep a stable internal filename (uuid) per external path for rename tracking.
/// 3. To compute the aggregated unified diff, compare each baseline snapshot to the current file on disk entirely in-memory
/// using the `similar` crate and emit unified diffs with rewritten external paths.
#[derive(Default)]
pub struct TurnDiffTracker {
/// Map external path -> internal filename (uuid).
external_to_temp_name: HashMap<PathBuf, String>,
/// Internal filename -> baseline file info.
baseline_file_info: HashMap<String, BaselineFileInfo>,
/// Internal filename -> external path as of current accumulated state (after applying all changes).
/// This is where renames are tracked.
temp_name_to_current_path: HashMap<String, PathBuf>,
/// Cache of known git worktree roots to avoid repeated filesystem walks.
git_root_cache: Vec<PathBuf>,
}
impl TurnDiffTracker {
pub fn new() -> Self {
Self::default()
}
/// Front-run apply patch calls to track the starting contents of any modified files.
/// - Creates an in-memory baseline snapshot for files that already exist on disk when first seen.
/// - For additions, we intentionally do not create a baseline snapshot so that diffs are proper additions.
/// - Also updates internal mappings for move/rename events.
pub fn on_patch_begin(&mut self, changes: &HashMap<PathBuf, FileChange>) {
for (path, change) in changes.iter() {
// Ensure a stable internal filename exists for this external path.
if !self.external_to_temp_name.contains_key(path) {
let internal = Uuid::new_v4().to_string();
self.external_to_temp_name
.insert(path.clone(), internal.clone());
self.temp_name_to_current_path
.insert(internal.clone(), path.clone());
// If the file exists on disk now, snapshot as baseline; else leave missing to represent /dev/null.
let baseline_file_info = if path.exists() {
let mode = file_mode_for_path(path);
let mode_val = mode.unwrap_or(FileMode::Regular);
let content = blob_bytes(path, &mode_val).unwrap_or_default();
let oid = if mode == Some(FileMode::Symlink) {
format!("{:x}", git_blob_sha1_hex_bytes(&content))
} else {
self.git_blob_oid_for_path(path)
.unwrap_or_else(|| format!("{:x}", git_blob_sha1_hex_bytes(&content)))
};
Some(BaselineFileInfo {
path: path.clone(),
content,
mode: mode_val,
oid,
})
} else {
Some(BaselineFileInfo {
path: path.clone(),
content: vec![],
mode: FileMode::Regular,
oid: ZERO_OID.to_string(),
})
};
if let Some(baseline_file_info) = baseline_file_info {
self.baseline_file_info
.insert(internal.clone(), baseline_file_info);
}
}
// Track rename/move in current mapping if provided in an Update.
if let FileChange::Update {
move_path: Some(dest),
..
} = change
{
let uuid_filename = match self.external_to_temp_name.get(path) {
Some(i) => i.clone(),
None => {
// This should be rare, but if we haven't mapped the source, create it with no baseline.
let i = Uuid::new_v4().to_string();
self.baseline_file_info.insert(
i.clone(),
BaselineFileInfo {
path: path.clone(),
content: vec![],
mode: FileMode::Regular,
oid: ZERO_OID.to_string(),
},
);
i
}
};
// Update current external mapping for temp file name.
self.temp_name_to_current_path
.insert(uuid_filename.clone(), dest.clone());
// Update forward file_mapping: external current -> internal name.
self.external_to_temp_name.remove(path);
self.external_to_temp_name
.insert(dest.clone(), uuid_filename);
};
}
}
fn get_path_for_internal(&self, internal: &str) -> Option<PathBuf> {
self.temp_name_to_current_path
.get(internal)
.cloned()
.or_else(|| {
self.baseline_file_info
.get(internal)
.map(|info| info.path.clone())
})
}
/// Find the git worktree root for a file/directory by walking up to the first ancestor containing a `.git` entry.
/// Uses a simple cache of known roots and avoids negative-result caching for simplicity.
fn find_git_root_cached(&mut self, start: &Path) -> Option<PathBuf> {
let dir = if start.is_dir() {
start
} else {
start.parent()?
};
// Fast path: if any cached root is an ancestor of this path, use it.
if let Some(root) = self
.git_root_cache
.iter()
.find(|r| dir.starts_with(r))
.cloned()
{
return Some(root);
}
// Walk up to find a `.git` marker.
let mut cur = dir.to_path_buf();
loop {
let git_marker = cur.join(".git");
if git_marker.is_dir() || git_marker.is_file() {
if !self.git_root_cache.iter().any(|r| r == &cur) {
self.git_root_cache.push(cur.clone());
}
return Some(cur);
}
// On Windows, avoid walking above the drive or UNC share root.
#[cfg(windows)]
{
if is_windows_drive_or_unc_root(&cur) {
return None;
}
}
if let Some(parent) = cur.parent() {
cur = parent.to_path_buf();
} else {
return None;
}
}
}
/// Return a display string for `path` relative to its git root if found, else absolute.
fn relative_to_git_root_str(&mut self, path: &Path) -> String {
let s = if let Some(root) = self.find_git_root_cached(path) {
if let Ok(rel) = path.strip_prefix(&root) {
rel.display().to_string()
} else {
path.display().to_string()
}
} else {
path.display().to_string()
};
s.replace('\\', "/")
}
/// Ask git to compute the blob SHA-1 for the file at `path` within its repository.
/// Returns None if no repository is found or git invocation fails.
fn git_blob_oid_for_path(&mut self, path: &Path) -> Option<String> {
let root = self.find_git_root_cached(path)?;
// Compute a path relative to the repo root for better portability across platforms.
let rel = path.strip_prefix(&root).unwrap_or(path);
let output = Command::new("git")
.arg("-C")
.arg(&root)
.arg("hash-object")
.arg("--")
.arg(rel)
.output()
.ok()?;
if !output.status.success() {
return None;
}
let s = String::from_utf8_lossy(&output.stdout).trim().to_string();
if s.len() == 40 { Some(s) } else { None }
}
/// Recompute the aggregated unified diff by comparing all of the in-memory snapshots that were
/// collected before the first time they were touched by apply_patch during this turn with
/// the current repo state.
pub fn get_unified_diff(&mut self) -> Result<Option<String>> {
let mut aggregated = String::new();
// Compute diffs per tracked internal file in a stable order by external path.
let mut baseline_file_names: Vec<String> =
self.baseline_file_info.keys().cloned().collect();
// Sort lexicographically by full repo-relative path to match git behavior.
baseline_file_names.sort_by_key(|internal| {
self.get_path_for_internal(internal)
.map(|p| self.relative_to_git_root_str(&p))
.unwrap_or_default()
});
for internal in baseline_file_names {
aggregated.push_str(self.get_file_diff(&internal).as_str());
if !aggregated.ends_with('\n') {
aggregated.push('\n');
}
}
if aggregated.trim().is_empty() {
Ok(None)
} else {
Ok(Some(aggregated))
}
}
fn get_file_diff(&mut self, internal_file_name: &str) -> String {
let mut aggregated = String::new();
// Snapshot lightweight fields only.
let (baseline_external_path, baseline_mode, left_oid) = {
if let Some(info) = self.baseline_file_info.get(internal_file_name) {
(info.path.clone(), info.mode, info.oid.clone())
} else {
(PathBuf::new(), FileMode::Regular, ZERO_OID.to_string())
}
};
let current_external_path = match self.get_path_for_internal(internal_file_name) {
Some(p) => p,
None => return aggregated,
};
let current_mode = file_mode_for_path(&current_external_path).unwrap_or(FileMode::Regular);
let right_bytes = blob_bytes(&current_external_path, &current_mode);
// Compute displays with &mut self before borrowing any baseline content.
let left_display = self.relative_to_git_root_str(&baseline_external_path);
let right_display = self.relative_to_git_root_str(&current_external_path);
// Compute right oid before borrowing baseline content.
let right_oid = if let Some(b) = right_bytes.as_ref() {
if current_mode == FileMode::Symlink {
format!("{:x}", git_blob_sha1_hex_bytes(b))
} else {
self.git_blob_oid_for_path(&current_external_path)
.unwrap_or_else(|| format!("{:x}", git_blob_sha1_hex_bytes(b)))
}
} else {
ZERO_OID.to_string()
};
// Borrow baseline content only after all &mut self uses are done.
let left_present = left_oid.as_str() != ZERO_OID;
let left_bytes: Option<&[u8]> = if left_present {
self.baseline_file_info
.get(internal_file_name)
.map(|i| i.content.as_slice())
} else {
None
};
// Fast path: identical bytes or both missing.
if left_bytes == right_bytes.as_deref() {
return aggregated;
}
aggregated.push_str(&format!("diff --git a/{left_display} b/{right_display}\n"));
let is_add = !left_present && right_bytes.is_some();
let is_delete = left_present && right_bytes.is_none();
if is_add {
aggregated.push_str(&format!("new file mode {current_mode}\n"));
} else if is_delete {
aggregated.push_str(&format!("deleted file mode {baseline_mode}\n"));
} else if baseline_mode != current_mode {
aggregated.push_str(&format!("old mode {baseline_mode}\n"));
aggregated.push_str(&format!("new mode {current_mode}\n"));
}
let left_text = left_bytes.and_then(|b| std::str::from_utf8(b).ok());
let right_text = right_bytes
.as_deref()
.and_then(|b| std::str::from_utf8(b).ok());
let can_text_diff = matches!(
(left_text, right_text, is_add, is_delete),
(Some(_), Some(_), _, _) | (_, Some(_), true, _) | (Some(_), _, _, true)
);
if can_text_diff {
let l = left_text.unwrap_or("");
let r = right_text.unwrap_or("");
aggregated.push_str(&format!("index {left_oid}..{right_oid}\n"));
let old_header = if left_present {
format!("a/{left_display}")
} else {
DEV_NULL.to_string()
};
let new_header = if right_bytes.is_some() {
format!("b/{right_display}")
} else {
DEV_NULL.to_string()
};
let diff = similar::TextDiff::from_lines(l, r);
let unified = diff
.unified_diff()
.context_radius(3)
.header(&old_header, &new_header)
.to_string();
aggregated.push_str(&unified);
} else {
aggregated.push_str(&format!("index {left_oid}..{right_oid}\n"));
let old_header = if left_present {
format!("a/{left_display}")
} else {
DEV_NULL.to_string()
};
let new_header = if right_bytes.is_some() {
format!("b/{right_display}")
} else {
DEV_NULL.to_string()
};
aggregated.push_str(&format!("--- {old_header}\n"));
aggregated.push_str(&format!("+++ {new_header}\n"));
aggregated.push_str("Binary files differ\n");
}
aggregated
}
}
/// Compute the Git SHA-1 blob object ID for the given content (bytes).
fn git_blob_sha1_hex_bytes(data: &[u8]) -> Output<sha1::Sha1> {
// Git blob hash is sha1 of: "blob <len>\0<data>"
let header = format!("blob {}\0", data.len());
use sha1::Digest;
let mut hasher = sha1::Sha1::new();
hasher.update(header.as_bytes());
hasher.update(data);
hasher.finalize()
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
enum FileMode {
Regular,
#[cfg(unix)]
Executable,
Symlink,
}
impl FileMode {
fn as_str(&self) -> &'static str {
match self {
FileMode::Regular => "100644",
#[cfg(unix)]
FileMode::Executable => "100755",
FileMode::Symlink => "120000",
}
}
}
impl std::fmt::Display for FileMode {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(self.as_str())
}
}
#[cfg(unix)]
fn file_mode_for_path(path: &Path) -> Option<FileMode> {
use std::os::unix::fs::PermissionsExt;
let meta = fs::symlink_metadata(path).ok()?;
let ft = meta.file_type();
if ft.is_symlink() {
return Some(FileMode::Symlink);
}
let mode = meta.permissions().mode();
let is_exec = (mode & 0o111) != 0;
Some(if is_exec {
FileMode::Executable
} else {
FileMode::Regular
})
}
#[cfg(not(unix))]
fn file_mode_for_path(_path: &Path) -> Option<FileMode> {
// Default to non-executable on non-unix.
Some(FileMode::Regular)
}
fn blob_bytes(path: &Path, mode: &FileMode) -> Option<Vec<u8>> {
if path.exists() {
let contents = if *mode == FileMode::Symlink {
symlink_blob_bytes(path)
.ok_or_else(|| anyhow!("failed to read symlink target for {}", path.display()))
} else {
fs::read(path)
.with_context(|| format!("failed to read current file for diff {}", path.display()))
};
contents.ok()
} else {
None
}
}
#[cfg(unix)]
fn symlink_blob_bytes(path: &Path) -> Option<Vec<u8>> {
use std::os::unix::ffi::OsStrExt;
let target = std::fs::read_link(path).ok()?;
Some(target.as_os_str().as_bytes().to_vec())
}
#[cfg(not(unix))]
fn symlink_blob_bytes(_path: &Path) -> Option<Vec<u8>> {
None
}
#[cfg(windows)]
fn is_windows_drive_or_unc_root(p: &std::path::Path) -> bool {
use std::path::Component;
let mut comps = p.components();
matches!(
(comps.next(), comps.next(), comps.next()),
(Some(Component::Prefix(_)), Some(Component::RootDir), None)
)
}
#[cfg(test)]
mod tests {
#![allow(clippy::unwrap_used)]
use super::*;
use pretty_assertions::assert_eq;
use tempfile::tempdir;
/// Compute the Git SHA-1 blob object ID for the given content (string).
/// This delegates to the bytes version to avoid UTF-8 lossy conversions here.
fn git_blob_sha1_hex(data: &str) -> String {
format!("{:x}", git_blob_sha1_hex_bytes(data.as_bytes()))
}
fn normalize_diff_for_test(input: &str, root: &Path) -> String {
let root_str = root.display().to_string().replace('\\', "/");
let replaced = input.replace(&root_str, "<TMP>");
// Split into blocks on lines starting with "diff --git ", sort blocks for determinism, and rejoin
let mut blocks: Vec<String> = Vec::new();
let mut current = String::new();
for line in replaced.lines() {
if line.starts_with("diff --git ") && !current.is_empty() {
blocks.push(current);
current = String::new();
}
if !current.is_empty() {
current.push('\n');
}
current.push_str(line);
}
if !current.is_empty() {
blocks.push(current);
}
blocks.sort();
let mut out = blocks.join("\n");
if !out.ends_with('\n') {
out.push('\n');
}
out
}
#[test]
fn accumulates_add_and_update() {
let mut acc = TurnDiffTracker::new();
let dir = tempdir().unwrap();
let file = dir.path().join("a.txt");
// First patch: add file (baseline should be /dev/null).
let add_changes = HashMap::from([(
file.clone(),
FileChange::Add {
content: "foo\n".to_string(),
},
)]);
acc.on_patch_begin(&add_changes);
// Simulate apply: create the file on disk.
fs::write(&file, "foo\n").unwrap();
let first = acc.get_unified_diff().unwrap().unwrap();
let first = normalize_diff_for_test(&first, dir.path());
let expected_first = {
let mode = file_mode_for_path(&file).unwrap_or(FileMode::Regular);
let right_oid = git_blob_sha1_hex("foo\n");
format!(
r#"diff --git a/<TMP>/a.txt b/<TMP>/a.txt
new file mode {mode}
index {ZERO_OID}..{right_oid}
--- {DEV_NULL}
+++ b/<TMP>/a.txt
@@ -0,0 +1 @@
+foo
"#,
)
};
assert_eq!(first, expected_first);
// Second patch: update the file on disk.
let update_changes = HashMap::from([(
file.clone(),
FileChange::Update {
unified_diff: "".to_owned(),
move_path: None,
},
)]);
acc.on_patch_begin(&update_changes);
// Simulate apply: append a new line.
fs::write(&file, "foo\nbar\n").unwrap();
let combined = acc.get_unified_diff().unwrap().unwrap();
let combined = normalize_diff_for_test(&combined, dir.path());
let expected_combined = {
let mode = file_mode_for_path(&file).unwrap_or(FileMode::Regular);
let right_oid = git_blob_sha1_hex("foo\nbar\n");
format!(
r#"diff --git a/<TMP>/a.txt b/<TMP>/a.txt
new file mode {mode}
index {ZERO_OID}..{right_oid}
--- {DEV_NULL}
+++ b/<TMP>/a.txt
@@ -0,0 +1,2 @@
+foo
+bar
"#,
)
};
assert_eq!(combined, expected_combined);
}
#[test]
fn accumulates_delete() {
let dir = tempdir().unwrap();
let file = dir.path().join("b.txt");
fs::write(&file, "x\n").unwrap();
let mut acc = TurnDiffTracker::new();
let del_changes = HashMap::from([(file.clone(), FileChange::Delete)]);
acc.on_patch_begin(&del_changes);
// Simulate apply: delete the file from disk.
let baseline_mode = file_mode_for_path(&file).unwrap_or(FileMode::Regular);
fs::remove_file(&file).unwrap();
let diff = acc.get_unified_diff().unwrap().unwrap();
let diff = normalize_diff_for_test(&diff, dir.path());
let expected = {
let left_oid = git_blob_sha1_hex("x\n");
format!(
r#"diff --git a/<TMP>/b.txt b/<TMP>/b.txt
deleted file mode {baseline_mode}
index {left_oid}..{ZERO_OID}
--- a/<TMP>/b.txt
+++ {DEV_NULL}
@@ -1 +0,0 @@
-x
"#,
)
};
assert_eq!(diff, expected);
}
#[test]
fn accumulates_move_and_update() {
let dir = tempdir().unwrap();
let src = dir.path().join("src.txt");
let dest = dir.path().join("dst.txt");
fs::write(&src, "line\n").unwrap();
let mut acc = TurnDiffTracker::new();
let mv_changes = HashMap::from([(
src.clone(),
FileChange::Update {
unified_diff: "".to_owned(),
move_path: Some(dest.clone()),
},
)]);
acc.on_patch_begin(&mv_changes);
// Simulate apply: move and update content.
fs::rename(&src, &dest).unwrap();
fs::write(&dest, "line2\n").unwrap();
let out = acc.get_unified_diff().unwrap().unwrap();
let out = normalize_diff_for_test(&out, dir.path());
let expected = {
let left_oid = git_blob_sha1_hex("line\n");
let right_oid = git_blob_sha1_hex("line2\n");
format!(
r#"diff --git a/<TMP>/src.txt b/<TMP>/dst.txt
index {left_oid}..{right_oid}
--- a/<TMP>/src.txt
+++ b/<TMP>/dst.txt
@@ -1 +1 @@
-line
+line2
"#
)
};
assert_eq!(out, expected);
}
#[test]
fn move_without_1change_yields_no_diff() {
let dir = tempdir().unwrap();
let src = dir.path().join("moved.txt");
let dest = dir.path().join("renamed.txt");
fs::write(&src, "same\n").unwrap();
let mut acc = TurnDiffTracker::new();
let mv_changes = HashMap::from([(
src.clone(),
FileChange::Update {
unified_diff: "".to_owned(),
move_path: Some(dest.clone()),
},
)]);
acc.on_patch_begin(&mv_changes);
// Simulate apply: move only, no content change.
fs::rename(&src, &dest).unwrap();
let diff = acc.get_unified_diff().unwrap();
assert_eq!(diff, None);
}
#[test]
fn move_declared_but_file_only_appears_at_dest_is_add() {
let dir = tempdir().unwrap();
let src = dir.path().join("src.txt");
let dest = dir.path().join("dest.txt");
let mut acc = TurnDiffTracker::new();
let mv = HashMap::from([(
src.clone(),
FileChange::Update {
unified_diff: "".into(),
move_path: Some(dest.clone()),
},
)]);
acc.on_patch_begin(&mv);
// No file existed initially; create only dest
fs::write(&dest, "hello\n").unwrap();
let diff = acc.get_unified_diff().unwrap().unwrap();
let diff = normalize_diff_for_test(&diff, dir.path());
let expected = {
let mode = file_mode_for_path(&dest).unwrap_or(FileMode::Regular);
let right_oid = git_blob_sha1_hex("hello\n");
format!(
r#"diff --git a/<TMP>/src.txt b/<TMP>/dest.txt
new file mode {mode}
index {ZERO_OID}..{right_oid}
--- {DEV_NULL}
+++ b/<TMP>/dest.txt
@@ -0,0 +1 @@
+hello
"#,
)
};
assert_eq!(diff, expected);
}
#[test]
fn update_persists_across_new_baseline_for_new_file() {
let dir = tempdir().unwrap();
let a = dir.path().join("a.txt");
let b = dir.path().join("b.txt");
fs::write(&a, "foo\n").unwrap();
fs::write(&b, "z\n").unwrap();
let mut acc = TurnDiffTracker::new();
// First: update existing a.txt (baseline snapshot is created for a).
let update_a = HashMap::from([(
a.clone(),
FileChange::Update {
unified_diff: "".to_owned(),
move_path: None,
},
)]);
acc.on_patch_begin(&update_a);
// Simulate apply: modify a.txt on disk.
fs::write(&a, "foo\nbar\n").unwrap();
let first = acc.get_unified_diff().unwrap().unwrap();
let first = normalize_diff_for_test(&first, dir.path());
let expected_first = {
let left_oid = git_blob_sha1_hex("foo\n");
let right_oid = git_blob_sha1_hex("foo\nbar\n");
format!(
r#"diff --git a/<TMP>/a.txt b/<TMP>/a.txt
index {left_oid}..{right_oid}
--- a/<TMP>/a.txt
+++ b/<TMP>/a.txt
@@ -1 +1,2 @@
foo
+bar
"#
)
};
assert_eq!(first, expected_first);
// Next: introduce a brand-new path b.txt into baseline snapshots via a delete change.
let del_b = HashMap::from([(b.clone(), FileChange::Delete)]);
acc.on_patch_begin(&del_b);
// Simulate apply: delete b.txt.
let baseline_mode = file_mode_for_path(&b).unwrap_or(FileMode::Regular);
fs::remove_file(&b).unwrap();
let combined = acc.get_unified_diff().unwrap().unwrap();
let combined = normalize_diff_for_test(&combined, dir.path());
let expected = {
let left_oid_a = git_blob_sha1_hex("foo\n");
let right_oid_a = git_blob_sha1_hex("foo\nbar\n");
let left_oid_b = git_blob_sha1_hex("z\n");
format!(
r#"diff --git a/<TMP>/a.txt b/<TMP>/a.txt
index {left_oid_a}..{right_oid_a}
--- a/<TMP>/a.txt
+++ b/<TMP>/a.txt
@@ -1 +1,2 @@
foo
+bar
diff --git a/<TMP>/b.txt b/<TMP>/b.txt
deleted file mode {baseline_mode}
index {left_oid_b}..{ZERO_OID}
--- a/<TMP>/b.txt
+++ {DEV_NULL}
@@ -1 +0,0 @@
-z
"#,
)
};
assert_eq!(combined, expected);
}
#[test]
fn binary_files_differ_update() {
let dir = tempdir().unwrap();
let file = dir.path().join("bin.dat");
// Initial non-UTF8 bytes
let left_bytes: Vec<u8> = vec![0xff, 0xfe, 0xfd, 0x00];
// Updated non-UTF8 bytes
let right_bytes: Vec<u8> = vec![0x01, 0x02, 0x03, 0x00];
fs::write(&file, &left_bytes).unwrap();
let mut acc = TurnDiffTracker::new();
let update_changes = HashMap::from([(
file.clone(),
FileChange::Update {
unified_diff: "".to_owned(),
move_path: None,
},
)]);
acc.on_patch_begin(&update_changes);
// Apply update on disk
fs::write(&file, &right_bytes).unwrap();
let diff = acc.get_unified_diff().unwrap().unwrap();
let diff = normalize_diff_for_test(&diff, dir.path());
let expected = {
let left_oid = format!("{:x}", git_blob_sha1_hex_bytes(&left_bytes));
let right_oid = format!("{:x}", git_blob_sha1_hex_bytes(&right_bytes));
format!(
r#"diff --git a/<TMP>/bin.dat b/<TMP>/bin.dat
index {left_oid}..{right_oid}
--- a/<TMP>/bin.dat
+++ b/<TMP>/bin.dat
Binary files differ
"#
)
};
assert_eq!(diff, expected);
}
#[test]
fn filenames_with_spaces_add_and_update() {
let mut acc = TurnDiffTracker::new();
let dir = tempdir().unwrap();
let file = dir.path().join("name with spaces.txt");
// First patch: add file (baseline should be /dev/null).
let add_changes = HashMap::from([(
file.clone(),
FileChange::Add {
content: "foo\n".to_string(),
},
)]);
acc.on_patch_begin(&add_changes);
// Simulate apply: create the file on disk.
fs::write(&file, "foo\n").unwrap();
let first = acc.get_unified_diff().unwrap().unwrap();
let first = normalize_diff_for_test(&first, dir.path());
let expected_first = {
let mode = file_mode_for_path(&file).unwrap_or(FileMode::Regular);
let right_oid = git_blob_sha1_hex("foo\n");
format!(
r#"diff --git a/<TMP>/name with spaces.txt b/<TMP>/name with spaces.txt
new file mode {mode}
index {ZERO_OID}..{right_oid}
--- {DEV_NULL}
+++ b/<TMP>/name with spaces.txt
@@ -0,0 +1 @@
+foo
"#,
)
};
assert_eq!(first, expected_first);
// Second patch: update the file on disk.
let update_changes = HashMap::from([(
file.clone(),
FileChange::Update {
unified_diff: "".to_owned(),
move_path: None,
},
)]);
acc.on_patch_begin(&update_changes);
// Simulate apply: append a new line with a space.
fs::write(&file, "foo\nbar baz\n").unwrap();
let combined = acc.get_unified_diff().unwrap().unwrap();
let combined = normalize_diff_for_test(&combined, dir.path());
let expected_combined = {
let mode = file_mode_for_path(&file).unwrap_or(FileMode::Regular);
let right_oid = git_blob_sha1_hex("foo\nbar baz\n");
format!(
r#"diff --git a/<TMP>/name with spaces.txt b/<TMP>/name with spaces.txt
new file mode {mode}
index {ZERO_OID}..{right_oid}
--- {DEV_NULL}
+++ b/<TMP>/name with spaces.txt
@@ -0,0 +1,2 @@
+foo
+bar baz
"#,
)
};
assert_eq!(combined, expected_combined);
}
}

View File

@@ -1,69 +0,0 @@
#![cfg(target_os = "macos")]
#![expect(clippy::expect_used)]
use std::collections::HashMap;
use std::sync::Arc;
use codex_core::exec::ExecParams;
use codex_core::exec::SandboxType;
use codex_core::exec::process_exec_tool_call;
use codex_core::protocol::SandboxPolicy;
use codex_core::spawn::CODEX_SANDBOX_ENV_VAR;
use tempfile::TempDir;
use tokio::sync::Notify;
use codex_core::get_platform_sandbox;
async fn run_test_cmd(tmp: TempDir, cmd: Vec<&str>, should_be_ok: bool) {
if std::env::var(CODEX_SANDBOX_ENV_VAR) == Ok("seatbelt".to_string()) {
eprintln!("{CODEX_SANDBOX_ENV_VAR} is set to 'seatbelt', skipping test.");
return;
}
let sandbox_type = get_platform_sandbox().expect("should be able to get sandbox type");
assert_eq!(sandbox_type, SandboxType::MacosSeatbelt);
let params = ExecParams {
command: cmd.iter().map(|s| s.to_string()).collect(),
cwd: tmp.path().to_path_buf(),
timeout_ms: Some(1000),
env: HashMap::new(),
};
let ctrl_c = Arc::new(Notify::new());
let policy = SandboxPolicy::new_read_only_policy();
let result = process_exec_tool_call(params, sandbox_type, ctrl_c, &policy, &None, None).await;
assert!(result.is_ok() == should_be_ok);
}
/// Command succeeds with exit code 0 normally
#[tokio::test]
async fn exit_code_0_succeeds() {
let tmp = TempDir::new().expect("should be able to create temp dir");
let cmd = vec!["echo", "hello"];
run_test_cmd(tmp, cmd, true).await
}
/// Command not found returns exit code 127, this is not considered a sandbox error
#[tokio::test]
async fn exit_command_not_found_is_ok() {
let tmp = TempDir::new().expect("should be able to create temp dir");
let cmd = vec!["/bin/bash", "-c", "nonexistent_command_12345"];
run_test_cmd(tmp, cmd, true).await
}
/// Writing a file fails and should be considered a sandbox error
#[tokio::test]
async fn write_file_fails_as_sandbox_error() {
let tmp = TempDir::new().expect("should be able to create temp dir");
let path = tmp.path().join("test.txt");
let cmd = vec![
"/user/bin/touch",
path.to_str().expect("should be able to get path"),
];
run_test_cmd(tmp, cmd, false).await;
}

View File

@@ -177,7 +177,8 @@ async fn live_shell_function_call() {
match ev.msg {
EventMsg::ExecCommandBegin(codex_core::protocol::ExecCommandBeginEvent {
command,
..
call_id: _,
cwd: _,
}) => {
assert_eq!(command, vec!["echo", MARKER]);
saw_begin = true;
@@ -185,7 +186,8 @@ async fn live_shell_function_call() {
EventMsg::ExecCommandEnd(codex_core::protocol::ExecCommandEndEvent {
stdout,
exit_code,
..
call_id: _,
stderr: _,
}) => {
assert_eq!(exit_code, 0, "echo returned nonzero exit code");
assert!(stdout.contains(MARKER));

View File

@@ -44,14 +44,20 @@ pub(crate) fn create_config_summary_entries(config: &Config) -> Vec<(&'static st
entries
}
pub(crate) fn handle_last_message(last_agent_message: Option<&str>, output_file: &Path) {
let message = last_agent_message.unwrap_or_default();
write_last_message_file(message, Some(output_file));
if last_agent_message.is_none() {
eprintln!(
"Warning: no last agent message; wrote empty content to {}",
output_file.display()
);
pub(crate) fn handle_last_message(
last_agent_message: Option<&str>,
last_message_path: Option<&Path>,
) {
match (last_message_path, last_agent_message) {
(Some(path), Some(msg)) => write_last_message_file(msg, Some(path)),
(Some(path), None) => {
write_last_message_file("", Some(path));
eprintln!(
"Warning: no last agent message; wrote empty content to {}",
path.display()
);
}
(None, _) => eprintln!("Warning: no file to write last message to."),
}
}

View File

@@ -4,7 +4,6 @@ use codex_core::config::Config;
use codex_core::plan_tool::UpdatePlanArgs;
use codex_core::protocol::AgentMessageDeltaEvent;
use codex_core::protocol::AgentMessageEvent;
use codex_core::protocol::AgentReasoningContentEvent;
use codex_core::protocol::AgentReasoningDeltaEvent;
use codex_core::protocol::BackgroundEventEvent;
use codex_core::protocol::ErrorEvent;
@@ -21,7 +20,6 @@ use codex_core::protocol::PatchApplyEndEvent;
use codex_core::protocol::SessionConfiguredEvent;
use codex_core::protocol::TaskCompleteEvent;
use codex_core::protocol::TokenUsage;
use codex_core::protocol::TurnDiffEvent;
use owo_colors::OwoColorize;
use owo_colors::Style;
use shlex::try_join;
@@ -108,6 +106,7 @@ impl EventProcessorWithHumanOutput {
struct ExecCommandBegin {
command: Vec<String>,
start_time: Instant,
}
struct PatchApplyBegin {
@@ -171,9 +170,10 @@ impl EventProcessor for EventProcessorWithHumanOutput {
// Ignore.
}
EventMsg::TaskComplete(TaskCompleteEvent { last_agent_message }) => {
if let Some(output_file) = self.last_message_path.as_deref() {
handle_last_message(last_agent_message.as_deref(), output_file);
}
handle_last_message(
last_agent_message.as_deref(),
self.last_message_path.as_deref(),
);
return CodexStatus::InitiateShutdown;
}
EventMsg::TokenCount(TokenUsage { total_tokens, .. }) => {
@@ -204,14 +204,6 @@ impl EventProcessor for EventProcessorWithHumanOutput {
#[allow(clippy::expect_used)]
std::io::stdout().flush().expect("could not flush stdout");
}
EventMsg::AgentReasoningContent(AgentReasoningContentEvent { text }) => {
if !self.show_agent_reasoning {
return CodexStatus::Running;
}
print!("{text}");
#[allow(clippy::expect_used)]
std::io::stdout().flush().expect("could not flush stdout");
}
EventMsg::AgentMessage(AgentMessageEvent { message }) => {
// if answer_started is false, this means we haven't received any
// delta. Thus, we need to print the message as a new answer.
@@ -236,6 +228,7 @@ impl EventProcessor for EventProcessorWithHumanOutput {
call_id.clone(),
ExecCommandBegin {
command: command.clone(),
start_time: Instant::now(),
},
);
ts_println!(
@@ -251,14 +244,16 @@ impl EventProcessor for EventProcessorWithHumanOutput {
call_id,
stdout,
stderr,
duration,
exit_code,
}) => {
let exec_command = self.call_id_to_command.remove(&call_id);
let (duration, call) = if let Some(ExecCommandBegin { command, .. }) = exec_command
let (duration, call) = if let Some(ExecCommandBegin {
command,
start_time,
}) = exec_command
{
(
format!(" in {}", format_duration(duration)),
format!(" in {}", format_elapsed(start_time)),
format!("{}", escape_command(&command).style(self.bold)),
)
} else {
@@ -408,7 +403,6 @@ impl EventProcessor for EventProcessorWithHumanOutput {
stdout,
stderr,
success,
..
}) => {
let patch_begin = self.call_id_to_patch.remove(&call_id);
@@ -438,10 +432,6 @@ impl EventProcessor for EventProcessorWithHumanOutput {
println!("{}", line.style(self.dimmed));
}
}
EventMsg::TurnDiff(TurnDiffEvent { unified_diff }) => {
ts_println!(self, "{}", "turn diff:".style(self.magenta));
println!("{unified_diff}");
}
EventMsg::ExecApprovalRequest(_) => {
// Should we exit?
}

View File

@@ -46,9 +46,10 @@ impl EventProcessor for EventProcessorWithJsonOutput {
CodexStatus::Running
}
EventMsg::TaskComplete(TaskCompleteEvent { last_agent_message }) => {
if let Some(output_file) = self.last_message_path.as_deref() {
handle_last_message(last_agent_message.as_deref(), output_file);
}
handle_last_message(
last_agent_message.as_deref(),
self.last_message_path.as_deref(),
);
CodexStatus::InitiateShutdown
}
EventMsg::ShutdownComplete => CodexStatus::Shutdown,

View File

@@ -26,7 +26,7 @@ multimap = "0.10.0"
path-absolutize = "3.1.1"
regex-lite = "0.1"
serde = { version = "1.0.194", features = ["derive"] }
serde_json = "1.0.142"
serde_json = "1.0.110"
serde_with = { version = "3", features = ["macros"] }
[dev-dependencies]

View File

@@ -17,5 +17,5 @@ clap = { version = "4", features = ["derive"] }
ignore = "0.4.23"
nucleo-matcher = "0.3.1"
serde = { version = "1", features = ["derive"] }
serde_json = "1.0.142"
serde_json = "1.0.110"
tokio = { version = "1", features = ["full"] }

View File

@@ -252,8 +252,7 @@ async fn run_codex_tool_session_inner(
EventMsg::AgentMessage(AgentMessageEvent { .. }) => {
// TODO: think how we want to support this in the MCP
}
EventMsg::AgentReasoningContent(_)
| EventMsg::TaskStarted
EventMsg::TaskStarted
| EventMsg::TokenCount(_)
| EventMsg::AgentReasoning(_)
| EventMsg::McpToolCallBegin(_)
@@ -264,7 +263,6 @@ async fn run_codex_tool_session_inner(
| EventMsg::BackgroundEvent(_)
| EventMsg::PatchApplyBegin(_)
| EventMsg::PatchApplyEnd(_)
| EventMsg::TurnDiff(_)
| EventMsg::GetHistoryEntryResponse(_)
| EventMsg::PlanUpdate(_)
| EventMsg::ShutdownComplete => {

View File

@@ -90,15 +90,13 @@ pub async fn run_conversation_loop(
EventMsg::AgentMessage(AgentMessageEvent { .. }) => {
// TODO: think how we want to support this in the MCP
}
EventMsg::AgentReasoningContent(_)
| EventMsg::TaskStarted
EventMsg::TaskStarted
| EventMsg::TokenCount(_)
| EventMsg::AgentReasoning(_)
| EventMsg::McpToolCallBegin(_)
| EventMsg::McpToolCallEnd(_)
| EventMsg::ExecCommandBegin(_)
| EventMsg::ExecCommandEnd(_)
| EventMsg::TurnDiff(_)
| EventMsg::BackgroundEvent(_)
| EventMsg::ExecCommandOutputDelta(_)
| EventMsg::PatchApplyBegin(_)

View File

@@ -18,7 +18,7 @@ use crate::codex_tool_runner::INVALID_PARAMS_ERROR_CODE;
/// Conforms to [`mcp_types::ElicitRequestParams`] so that it can be used as the
/// `params` field of an [`ElicitRequest`].
#[derive(Debug, Deserialize, Serialize)]
#[derive(Debug, Serialize)]
pub struct ExecApprovalElicitRequestParams {
// These fields are required so that `params`
// conforms to ElicitRequestParams.

View File

@@ -89,18 +89,14 @@ async fn shell_command_approval_triggers_elicitation() -> anyhow::Result<()> {
// This is the first request from the server, so the id should be 0 given
// how things are currently implemented.
let elicitation_request_id = RequestId::Integer(0);
let params = serde_json::from_value::<ExecApprovalElicitRequestParams>(
elicitation_request
.params
.clone()
.ok_or_else(|| anyhow::anyhow!("elicitation_request.params must be set"))?,
)?;
let expected_elicitation_request = create_expected_elicitation_request(
elicitation_request_id.clone(),
shell_command.clone(),
workdir_for_shell_function_call.path(),
codex_request_id.to_string(),
params.codex_event_id.clone(),
// Internal Codex id: empirically it is 1, but this is
// admittedly an internal detail that could change.
"1".to_string(),
)?;
assert_eq!(expected_elicitation_request, elicitation_request);

View File

@@ -48,8 +48,6 @@ serde_json = { version = "1", features = ["preserve_order"] }
shlex = "1.3.0"
strum = "0.27.2"
strum_macros = "0.27.2"
supports-color = "3.0.2"
textwrap = "0.16.2"
tokio = { version = "1", features = [
"io-std",
"macros",
@@ -62,6 +60,7 @@ tracing-appender = "0.2.3"
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
tui-input = "0.14.0"
tui-markdown = "0.3.3"
tui-textarea = "0.7.0"
unicode-segmentation = "1.12.0"
unicode-width = "0.1"
uuid = "1"
@@ -71,5 +70,3 @@ uuid = "1"
[dev-dependencies]
insta = "1.43.1"
pretty_assertions = "1"
rand = "0.8"
chrono = { version = "0.4", features = ["serde"] }

View File

@@ -438,15 +438,14 @@ impl App<'_> {
);
self.pending_history_lines.clear();
}
terminal.draw(|frame| match &mut self.app_state {
match &mut self.app_state {
AppState::Chat { widget } => {
if let Some((x, y)) = widget.cursor_pos(frame.area()) {
frame.set_cursor_position((x, y));
}
frame.render_widget_ref(&**widget, frame.area())
terminal.draw(|frame| frame.render_widget_ref(&**widget, frame.area()))?;
}
AppState::GitWarning { screen } => frame.render_widget_ref(&*screen, frame.area()),
})?;
AppState::GitWarning { screen } => {
terminal.draw(|frame| frame.render_widget_ref(&*screen, frame.area()))?;
}
}
Ok(())
}

View File

@@ -1,11 +1,6 @@
use codex_core::protocol::TokenUsage;
use crossterm::event::KeyCode;
use crossterm::event::KeyEvent;
use crossterm::event::KeyModifiers;
use ratatui::buffer::Buffer;
use ratatui::layout::Constraint;
use ratatui::layout::Layout;
use ratatui::layout::Margin;
use ratatui::layout::Rect;
use ratatui::style::Color;
use ratatui::style::Style;
@@ -13,11 +8,13 @@ use ratatui::style::Styled;
use ratatui::style::Stylize;
use ratatui::text::Line;
use ratatui::text::Span;
use ratatui::widgets::Block;
use ratatui::widgets::BorderType;
use ratatui::widgets::Borders;
use ratatui::widgets::StatefulWidgetRef;
use ratatui::widgets::Widget;
use ratatui::widgets::WidgetRef;
use tui_textarea::Input;
use tui_textarea::Key;
use tui_textarea::TextArea;
use super::chat_composer_history::ChatComposerHistory;
use super::command_popup::CommandPopup;
@@ -25,10 +22,7 @@ use super::file_search_popup::FileSearchPopup;
use crate::app_event::AppEvent;
use crate::app_event_sender::AppEventSender;
use crate::bottom_pane::textarea::TextArea;
use crate::bottom_pane::textarea::TextAreaState;
use codex_file_search::FileMatch;
use std::cell::RefCell;
const BASE_PLACEHOLDER_TEXT: &str = "...";
/// If the pasted content exceeds this number of characters, replace it with a
@@ -41,14 +35,8 @@ pub enum InputResult {
None,
}
struct TokenUsageInfo {
token_usage: TokenUsage,
model_context_window: Option<u64>,
}
pub(crate) struct ChatComposer {
textarea: TextArea,
textarea_state: RefCell<TextAreaState>,
pub(crate) struct ChatComposer<'a> {
textarea: TextArea<'a>,
active_popup: ActivePopup,
app_event_tx: AppEventSender,
history: ChatComposerHistory,
@@ -57,8 +45,6 @@ pub(crate) struct ChatComposer {
dismissed_file_popup_token: Option<String>,
current_file_query: Option<String>,
pending_pastes: Vec<(String, String)>,
token_usage_info: Option<TokenUsageInfo>,
has_focus: bool,
}
/// Popup state at most one can be visible at any time.
@@ -68,17 +54,20 @@ enum ActivePopup {
File(FileSearchPopup),
}
impl ChatComposer {
impl ChatComposer<'_> {
pub fn new(
has_input_focus: bool,
app_event_tx: AppEventSender,
enhanced_keys_supported: bool,
) -> Self {
let mut textarea = TextArea::default();
textarea.set_placeholder_text(BASE_PLACEHOLDER_TEXT);
textarea.set_cursor_line_style(ratatui::style::Style::default());
let use_shift_enter_hint = enhanced_keys_supported;
Self {
textarea: TextArea::new(),
textarea_state: RefCell::new(TextAreaState::default()),
let mut this = Self {
textarea,
active_popup: ActivePopup::None,
app_event_tx,
history: ChatComposerHistory::new(),
@@ -87,13 +76,13 @@ impl ChatComposer {
dismissed_file_popup_token: None,
current_file_query: None,
pending_pastes: Vec::new(),
token_usage_info: None,
has_focus: has_input_focus,
}
};
this.update_border(has_input_focus);
this
}
pub fn desired_height(&self, width: u16) -> u16 {
self.textarea.desired_height(width - 1)
pub fn desired_height(&self) -> u16 {
self.textarea.lines().len().max(1) as u16
+ match &self.active_popup {
ActivePopup::None => 1u16,
ActivePopup::Command(c) => c.calculate_required_height(),
@@ -101,21 +90,6 @@ impl ChatComposer {
}
}
pub fn cursor_pos(&self, area: Rect) -> Option<(u16, u16)> {
let popup_height = match &self.active_popup {
ActivePopup::Command(popup) => popup.calculate_required_height(),
ActivePopup::File(popup) => popup.calculate_required_height(),
ActivePopup::None => 1,
};
let [textarea_rect, _] =
Layout::vertical([Constraint::Min(0), Constraint::Max(popup_height)]).areas(area);
let mut textarea_rect = textarea_rect;
textarea_rect.width = textarea_rect.width.saturating_sub(1);
textarea_rect.x += 1;
let state = self.textarea_state.borrow();
self.textarea.cursor_pos_with_state(textarea_rect, &state)
}
/// Returns true if the composer currently contains no user input.
pub(crate) fn is_empty(&self) -> bool {
self.textarea.is_empty()
@@ -129,10 +103,28 @@ impl ChatComposer {
token_usage: TokenUsage,
model_context_window: Option<u64>,
) {
self.token_usage_info = Some(TokenUsageInfo {
token_usage,
model_context_window,
});
let placeholder = match (token_usage.total_tokens, model_context_window) {
(total_tokens, Some(context_window)) => {
let percent_remaining: u8 = if context_window > 0 {
// Calculate the percentage of context left.
let percent = 100.0 - (total_tokens as f32 / context_window as f32 * 100.0);
percent.clamp(0.0, 100.0) as u8
} else {
// If we don't have a context window, we cannot compute the
// percentage.
100
};
// When https://github.com/openai/codex/issues/1257 is resolved,
// check if `percent_remaining < 25`, and if so, recommend
// /compact.
format!("{BASE_PLACEHOLDER_TEXT}{percent_remaining}% context left")
}
(total_tokens, None) => {
format!("{BASE_PLACEHOLDER_TEXT}{total_tokens} tokens used")
}
};
self.textarea.set_placeholder_text(placeholder);
}
/// Record the history metadata advertised by `SessionConfiguredEvent` so
@@ -150,12 +142,8 @@ impl ChatComposer {
offset: usize,
entry: Option<String>,
) -> bool {
let Some(text) = self.history.on_entry_response(log_id, offset, entry) else {
return false;
};
self.textarea.set_text(&text);
self.textarea.set_cursor(0);
true
self.history
.on_entry_response(log_id, offset, entry, &mut self.textarea)
}
pub fn handle_paste(&mut self, pasted: String) -> bool {
@@ -191,7 +179,7 @@ impl ChatComposer {
pub fn set_ctrl_c_quit_hint(&mut self, show: bool, has_focus: bool) {
self.ctrl_c_quit_hint = show;
self.set_has_focus(has_focus);
self.update_border(has_focus);
}
/// Handle a key event coming from the main UI.
@@ -219,47 +207,49 @@ impl ChatComposer {
unreachable!();
};
match key_event {
KeyEvent {
code: KeyCode::Up, ..
} => {
match key_event.into() {
Input { key: Key::Up, .. } => {
popup.move_up();
(InputResult::None, true)
}
KeyEvent {
code: KeyCode::Down,
..
} => {
Input { key: Key::Down, .. } => {
popup.move_down();
(InputResult::None, true)
}
KeyEvent {
code: KeyCode::Tab, ..
} => {
Input { key: Key::Tab, .. } => {
if let Some(cmd) = popup.selected_command() {
let first_line = self.textarea.text().lines().next().unwrap_or("");
let first_line = self
.textarea
.lines()
.first()
.map(|s| s.as_str())
.unwrap_or("");
let starts_with_cmd = first_line
.trim_start()
.starts_with(&format!("/{}", cmd.command()));
if !starts_with_cmd {
self.textarea.set_text(&format!("/{} ", cmd.command()));
self.textarea.select_all();
self.textarea.cut();
let _ = self.textarea.insert_str(format!("/{} ", cmd.command()));
}
}
(InputResult::None, true)
}
KeyEvent {
code: KeyCode::Enter,
modifiers: KeyModifiers::NONE,
..
Input {
key: Key::Enter,
shift: false,
alt: false,
ctrl: false,
} => {
if let Some(cmd) = popup.selected_command() {
// Send command to the app layer.
self.app_event_tx.send(AppEvent::DispatchCommand(*cmd));
// Clear textarea so no residual text remains.
self.textarea.set_text("");
self.textarea.select_all();
self.textarea.cut();
// Hide popup since the command has been dispatched.
self.active_popup = ActivePopup::None;
@@ -278,23 +268,16 @@ impl ChatComposer {
unreachable!();
};
match key_event {
KeyEvent {
code: KeyCode::Up, ..
} => {
match key_event.into() {
Input { key: Key::Up, .. } => {
popup.move_up();
(InputResult::None, true)
}
KeyEvent {
code: KeyCode::Down,
..
} => {
Input { key: Key::Down, .. } => {
popup.move_down();
(InputResult::None, true)
}
KeyEvent {
code: KeyCode::Esc, ..
} => {
Input { key: Key::Esc, .. } => {
// Hide popup without modifying text, remember token to avoid immediate reopen.
if let Some(tok) = Self::current_at_token(&self.textarea) {
self.dismissed_file_popup_token = Some(tok.to_string());
@@ -302,13 +285,12 @@ impl ChatComposer {
self.active_popup = ActivePopup::None;
(InputResult::None, true)
}
KeyEvent {
code: KeyCode::Tab, ..
}
| KeyEvent {
code: KeyCode::Enter,
modifiers: KeyModifiers::NONE,
..
Input { key: Key::Tab, .. }
| Input {
key: Key::Enter,
ctrl: false,
alt: false,
shift: false,
} => {
if let Some(sel) = popup.selected_match() {
let sel_path = sel.to_string();
@@ -333,89 +315,46 @@ impl ChatComposer {
/// - A token is delimited by ASCII whitespace (space, tab, newline).
/// - If the token under the cursor starts with `@` and contains at least
/// one additional character, that token (without `@`) is returned.
fn current_at_token(textarea: &TextArea) -> Option<String> {
let cursor_offset = textarea.cursor();
let text = textarea.text();
fn current_at_token(textarea: &tui_textarea::TextArea) -> Option<String> {
let (row, col) = textarea.cursor();
// Adjust the provided byte offset to the nearest valid char boundary at or before it.
let mut safe_cursor = cursor_offset.min(text.len());
// If we're not on a char boundary, move back to the start of the current char.
if safe_cursor < text.len() && !text.is_char_boundary(safe_cursor) {
// Find the last valid boundary <= cursor_offset.
safe_cursor = text
.char_indices()
.map(|(i, _)| i)
.take_while(|&i| i <= cursor_offset)
.last()
.unwrap_or(0);
}
// Guard against out-of-bounds rows.
let line = textarea.lines().get(row)?.as_str();
// Split the line around the (now safe) cursor position.
let before_cursor = &text[..safe_cursor];
let after_cursor = &text[safe_cursor..];
// Calculate byte offset for cursor position
let cursor_byte_offset = line.chars().take(col).map(|c| c.len_utf8()).sum::<usize>();
// Detect whether we're on whitespace at the cursor boundary.
let at_whitespace = if safe_cursor < text.len() {
text[safe_cursor..]
.chars()
.next()
.map(|c| c.is_whitespace())
.unwrap_or(false)
} else {
false
};
// Split the line at the cursor position so we can search for word
// boundaries on both sides.
let before_cursor = &line[..cursor_byte_offset];
let after_cursor = &line[cursor_byte_offset..];
// Left candidate: token containing the cursor position.
let start_left = before_cursor
// Find start index (first character **after** the previous multi-byte whitespace).
let start_idx = before_cursor
.char_indices()
.rfind(|(_, c)| c.is_whitespace())
.map(|(idx, c)| idx + c.len_utf8())
.unwrap_or(0);
let end_left_rel = after_cursor
// Find end index (first multi-byte whitespace **after** the cursor position).
let end_rel_idx = after_cursor
.char_indices()
.find(|(_, c)| c.is_whitespace())
.map(|(idx, _)| idx)
.unwrap_or(after_cursor.len());
let end_left = safe_cursor + end_left_rel;
let token_left = if start_left < end_left {
Some(&text[start_left..end_left])
let end_idx = cursor_byte_offset + end_rel_idx;
if start_idx >= end_idx {
return None;
}
let token = &line[start_idx..end_idx];
if token.starts_with('@') && token.len() > 1 {
Some(token[1..].to_string())
} else {
None
};
// Right candidate: token immediately after any whitespace from the cursor.
let ws_len_right: usize = after_cursor
.chars()
.take_while(|c| c.is_whitespace())
.map(|c| c.len_utf8())
.sum();
let start_right = safe_cursor + ws_len_right;
let end_right_rel = text[start_right..]
.char_indices()
.find(|(_, c)| c.is_whitespace())
.map(|(idx, _)| idx)
.unwrap_or(text.len() - start_right);
let end_right = start_right + end_right_rel;
let token_right = if start_right < end_right {
Some(&text[start_right..end_right])
} else {
None
};
let left_at = token_left
.filter(|t| t.starts_with('@') && t.len() > 1)
.map(|t| t[1..].to_string());
let right_at = token_right
.filter(|t| t.starts_with('@') && t.len() > 1)
.map(|t| t[1..].to_string());
if at_whitespace {
return right_at.or(left_at);
}
if after_cursor.starts_with('@') {
return right_at.or(left_at);
}
left_at.or(right_at)
}
/// Replace the active `@token` (the one under the cursor) with `path`.
@@ -424,73 +363,94 @@ impl ChatComposer {
/// where the cursor is within the token and regardless of how many
/// `@tokens` exist in the line.
fn insert_selected_path(&mut self, path: &str) {
let cursor_offset = self.textarea.cursor();
let text = self.textarea.text();
let (row, col) = self.textarea.cursor();
let before_cursor = &text[..cursor_offset];
let after_cursor = &text[cursor_offset..];
// Materialize the textarea lines so we can mutate them easily.
let mut lines: Vec<String> = self.textarea.lines().to_vec();
// Determine token boundaries.
let start_idx = before_cursor
.char_indices()
.rfind(|(_, c)| c.is_whitespace())
.map(|(idx, c)| idx + c.len_utf8())
.unwrap_or(0);
if let Some(line) = lines.get_mut(row) {
// Calculate byte offset for cursor position
let cursor_byte_offset = line.chars().take(col).map(|c| c.len_utf8()).sum::<usize>();
let end_rel_idx = after_cursor
.char_indices()
.find(|(_, c)| c.is_whitespace())
.map(|(idx, _)| idx)
.unwrap_or(after_cursor.len());
let end_idx = cursor_offset + end_rel_idx;
let before_cursor = &line[..cursor_byte_offset];
let after_cursor = &line[cursor_byte_offset..];
// Replace the slice `[start_idx, end_idx)` with the chosen path and a trailing space.
let mut new_text =
String::with_capacity(text.len() - (end_idx - start_idx) + path.len() + 1);
new_text.push_str(&text[..start_idx]);
new_text.push_str(path);
new_text.push(' ');
new_text.push_str(&text[end_idx..]);
// Determine token boundaries.
let start_idx = before_cursor
.char_indices()
.rfind(|(_, c)| c.is_whitespace())
.map(|(idx, c)| idx + c.len_utf8())
.unwrap_or(0);
self.textarea.set_text(&new_text);
let end_rel_idx = after_cursor
.char_indices()
.find(|(_, c)| c.is_whitespace())
.map(|(idx, _)| idx)
.unwrap_or(after_cursor.len());
let end_idx = cursor_byte_offset + end_rel_idx;
// Replace the slice `[start_idx, end_idx)` with the chosen path and a trailing space.
let mut new_line =
String::with_capacity(line.len() - (end_idx - start_idx) + path.len() + 1);
new_line.push_str(&line[..start_idx]);
new_line.push_str(path);
new_line.push(' ');
new_line.push_str(&line[end_idx..]);
*line = new_line;
// Re-populate the textarea.
let new_text = lines.join("\n");
self.textarea.select_all();
self.textarea.cut();
let _ = self.textarea.insert_str(new_text);
// Note: tui-textarea currently exposes only relative cursor
// movements. Leaving the cursor position unchanged is acceptable
// as subsequent typing will move the cursor naturally.
}
}
/// Handle key event when no popup is visible.
fn handle_key_event_without_popup(&mut self, key_event: KeyEvent) -> (InputResult, bool) {
match key_event {
let input: Input = key_event.into();
match input {
// -------------------------------------------------------------
// History navigation (Up / Down) only when the composer is not
// empty or when the cursor is at the correct position, to avoid
// interfering with normal cursor movement.
// -------------------------------------------------------------
KeyEvent {
code: KeyCode::Up | KeyCode::Down,
..
} => {
if self
.history
.should_handle_navigation(self.textarea.text(), self.textarea.cursor())
{
let replace_text = match key_event.code {
KeyCode::Up => self.history.navigate_up(&self.app_event_tx),
KeyCode::Down => self.history.navigate_down(&self.app_event_tx),
_ => unreachable!(),
};
if let Some(text) = replace_text {
self.textarea.set_text(&text);
self.textarea.set_cursor(0);
Input { key: Key::Up, .. } => {
if self.history.should_handle_navigation(&self.textarea) {
let consumed = self
.history
.navigate_up(&mut self.textarea, &self.app_event_tx);
if consumed {
return (InputResult::None, true);
}
}
self.handle_input_basic(key_event)
self.handle_input_basic(input)
}
KeyEvent {
code: KeyCode::Enter,
modifiers: KeyModifiers::NONE,
..
Input { key: Key::Down, .. } => {
if self.history.should_handle_navigation(&self.textarea) {
let consumed = self
.history
.navigate_down(&mut self.textarea, &self.app_event_tx);
if consumed {
return (InputResult::None, true);
}
}
self.handle_input_basic(input)
}
Input {
key: Key::Enter,
shift: false,
alt: false,
ctrl: false,
} => {
let mut text = self.textarea.text().to_string();
self.textarea.set_text("");
let mut text = self.textarea.lines().join("\n");
self.textarea.select_all();
self.textarea.cut();
// Replace all pending pastes in the text
for (placeholder, actual) in &self.pending_pastes {
@@ -507,15 +467,41 @@ impl ChatComposer {
(InputResult::Submitted(text), true)
}
}
Input {
key: Key::Enter, ..
}
| Input {
key: Key::Char('j'),
ctrl: true,
alt: false,
shift: false,
} => {
self.textarea.insert_newline();
(InputResult::None, true)
}
Input {
key: Key::Char('d'),
ctrl: true,
alt: false,
shift: false,
} => {
self.textarea.input(Input {
key: Key::Delete,
ctrl: false,
alt: false,
shift: false,
});
(InputResult::None, true)
}
input => self.handle_input_basic(input),
}
}
/// Handle generic Input events that modify the textarea content.
fn handle_input_basic(&mut self, input: KeyEvent) -> (InputResult, bool) {
fn handle_input_basic(&mut self, input: Input) -> (InputResult, bool) {
// Special handling for backspace on placeholders
if let KeyEvent {
code: KeyCode::Backspace,
if let Input {
key: Key::Backspace,
..
} = input
{
@@ -524,9 +510,20 @@ impl ChatComposer {
}
}
if let Input {
key: Key::Char('u'),
ctrl: true,
alt: false,
..
} = input
{
self.textarea.delete_line_by_head();
return (InputResult::None, true);
}
// Normal input handling
self.textarea.input(input);
let text_after = self.textarea.text();
let text_after = self.textarea.lines().join("\n");
// Check if any placeholders were removed and remove their corresponding pending pastes
self.pending_pastes
@@ -538,16 +535,21 @@ impl ChatComposer {
/// Attempts to remove a placeholder if the cursor is at the end of one.
/// Returns true if a placeholder was removed.
fn try_remove_placeholder_at_cursor(&mut self) -> bool {
let p = self.textarea.cursor();
let text = self.textarea.text();
let (row, col) = self.textarea.cursor();
let line = self
.textarea
.lines()
.get(row)
.map(|s| s.as_str())
.unwrap_or("");
// Find any placeholder that ends at the cursor position
let placeholder_to_remove = self.pending_pastes.iter().find_map(|(ph, _)| {
if p < ph.len() {
if col < ph.len() {
return None;
}
let potential_ph_start = p - ph.len();
if text[potential_ph_start..p] == *ph {
let potential_ph_start = col - ph.len();
if line[potential_ph_start..col] == *ph {
Some(ph.clone())
} else {
None
@@ -555,7 +557,17 @@ impl ChatComposer {
});
if let Some(placeholder) = placeholder_to_remove {
self.textarea.replace_range(p - placeholder.len()..p, "");
// Remove the entire placeholder from the text
let placeholder_len = placeholder.len();
for _ in 0..placeholder_len {
self.textarea.input(Input {
key: Key::Backspace,
ctrl: false,
alt: false,
shift: false,
});
}
// Remove from pending pastes
self.pending_pastes.retain(|(ph, _)| ph != &placeholder);
true
} else {
@@ -567,7 +579,16 @@ impl ChatComposer {
/// textarea. This must be called after every modification that can change
/// the text so the popup is shown/updated/hidden as appropriate.
fn sync_command_popup(&mut self) {
let first_line = self.textarea.text().lines().next().unwrap_or("");
// Inspect only the first line to decide whether to show the popup. In
// the common case (no leading slash) we avoid copying the entire
// textarea contents.
let first_line = self
.textarea
.lines()
.first()
.map(|s| s.as_str())
.unwrap_or("");
let input_starts_with_slash = first_line.starts_with('/');
match &mut self.active_popup {
ActivePopup::Command(popup) => {
@@ -623,29 +644,74 @@ impl ChatComposer {
self.dismissed_file_popup_token = None;
}
fn set_has_focus(&mut self, has_focus: bool) {
self.has_focus = has_focus;
fn update_border(&mut self, has_focus: bool) {
let border_style = if has_focus {
Style::default().fg(Color::Cyan)
} else {
Style::default().dim()
};
self.textarea.set_block(
ratatui::widgets::Block::default()
.borders(Borders::LEFT)
.border_type(BorderType::QuadrantOutside)
.border_style(border_style),
);
}
}
impl WidgetRef for &ChatComposer {
impl WidgetRef for &ChatComposer<'_> {
fn render_ref(&self, area: Rect, buf: &mut Buffer) {
let popup_height = match &self.active_popup {
ActivePopup::Command(popup) => popup.calculate_required_height(),
ActivePopup::File(popup) => popup.calculate_required_height(),
ActivePopup::None => 1,
};
let [textarea_rect, popup_rect] =
Layout::vertical([Constraint::Min(0), Constraint::Max(popup_height)]).areas(area);
match &self.active_popup {
ActivePopup::Command(popup) => {
popup.render_ref(popup_rect, buf);
let popup_height = popup.calculate_required_height();
// Split the provided rect so that the popup is rendered at the
// **bottom** and the textarea occupies the remaining space above.
let popup_height = popup_height.min(area.height);
let textarea_rect = Rect {
x: area.x,
y: area.y,
width: area.width,
height: area.height.saturating_sub(popup_height),
};
let popup_rect = Rect {
x: area.x,
y: area.y + textarea_rect.height,
width: area.width,
height: popup_height,
};
popup.render(popup_rect, buf);
self.textarea.render(textarea_rect, buf);
}
ActivePopup::File(popup) => {
popup.render_ref(popup_rect, buf);
let popup_height = popup.calculate_required_height();
let popup_height = popup_height.min(area.height);
let textarea_rect = Rect {
x: area.x,
y: area.y,
width: area.width,
height: area.height.saturating_sub(popup_height),
};
let popup_rect = Rect {
x: area.x,
y: area.y + textarea_rect.height,
width: area.width,
height: popup_height,
};
popup.render(popup_rect, buf);
self.textarea.render(textarea_rect, buf);
}
ActivePopup::None => {
let bottom_line_rect = popup_rect;
let mut textarea_rect = area;
textarea_rect.height = textarea_rect.height.saturating_sub(1);
self.textarea.render(textarea_rect, buf);
let mut bottom_line_rect = area;
bottom_line_rect.y += textarea_rect.height;
bottom_line_rect.height = 1;
let key_hint_style = Style::default().fg(Color::Cyan);
let hint = if self.ctrl_c_quit_hint {
vec![
@@ -674,56 +740,6 @@ impl WidgetRef for &ChatComposer {
.render_ref(bottom_line_rect, buf);
}
}
Block::default()
.border_style(Style::default().dim())
.borders(Borders::LEFT)
.border_type(BorderType::QuadrantOutside)
.border_style(Style::default().fg(if self.has_focus {
Color::Cyan
} else {
Color::Gray
}))
.render_ref(
Rect::new(textarea_rect.x, textarea_rect.y, 1, textarea_rect.height),
buf,
);
let mut textarea_rect = textarea_rect;
textarea_rect.width = textarea_rect.width.saturating_sub(1);
textarea_rect.x += 1;
let mut state = self.textarea_state.borrow_mut();
StatefulWidgetRef::render_ref(&(&self.textarea), textarea_rect, buf, &mut state);
if self.textarea.text().is_empty() {
let placeholder = if let Some(token_usage_info) = &self.token_usage_info {
let token_usage = &token_usage_info.token_usage;
let model_context_window = token_usage_info.model_context_window;
match (token_usage.total_tokens, model_context_window) {
(total_tokens, Some(context_window)) => {
let percent_remaining: u8 = if context_window > 0 {
// Calculate the percentage of context left.
let percent =
100.0 - (total_tokens as f32 / context_window as f32 * 100.0);
percent.clamp(0.0, 100.0) as u8
} else {
// If we don't have a context window, we cannot compute the
// percentage.
100
};
// When https://github.com/openai/codex/issues/1257 is resolved,
// check if `percent_remaining < 25`, and if so, recommend
// /compact.
format!("{BASE_PLACEHOLDER_TEXT}{percent_remaining}% context left")
}
(total_tokens, None) => {
format!("{BASE_PLACEHOLDER_TEXT}{total_tokens} tokens used")
}
}
} else {
BASE_PLACEHOLDER_TEXT.to_string()
};
Line::from(placeholder)
.style(Style::default().dim())
.render_ref(textarea_rect.inner(Margin::new(1, 0)), buf);
}
}
}
@@ -733,7 +749,7 @@ mod tests {
use crate::bottom_pane::ChatComposer;
use crate::bottom_pane::InputResult;
use crate::bottom_pane::chat_composer::LARGE_PASTE_CHAR_THRESHOLD;
use crate::bottom_pane::textarea::TextArea;
use tui_textarea::TextArea;
#[test]
fn test_current_at_token_basic_cases() {
@@ -776,9 +792,9 @@ mod tests {
];
for (input, cursor_pos, expected, description) in test_cases {
let mut textarea = TextArea::new();
let mut textarea = TextArea::default();
textarea.insert_str(input);
textarea.set_cursor(cursor_pos);
textarea.move_cursor(tui_textarea::CursorMove::Jump(0, cursor_pos));
let result = ChatComposer::current_at_token(&textarea);
assert_eq!(
@@ -810,9 +826,9 @@ mod tests {
];
for (input, cursor_pos, expected, description) in test_cases {
let mut textarea = TextArea::new();
let mut textarea = TextArea::default();
textarea.insert_str(input);
textarea.set_cursor(cursor_pos);
textarea.move_cursor(tui_textarea::CursorMove::Jump(0, cursor_pos));
let result = ChatComposer::current_at_token(&textarea);
assert_eq!(
@@ -847,13 +863,13 @@ mod tests {
// Full-width space boundaries
(
"test @İstanbul",
8,
6,
Some("İstanbul".to_string()),
"@ token after full-width space",
),
(
"@ЙЦУ @诶",
10,
6,
Some("".to_string()),
"Full-width space between Unicode tokens",
),
@@ -867,9 +883,9 @@ mod tests {
];
for (input, cursor_pos, expected, description) in test_cases {
let mut textarea = TextArea::new();
let mut textarea = TextArea::default();
textarea.insert_str(input);
textarea.set_cursor(cursor_pos);
textarea.move_cursor(tui_textarea::CursorMove::Jump(0, cursor_pos));
let result = ChatComposer::current_at_token(&textarea);
assert_eq!(
@@ -891,7 +907,7 @@ mod tests {
let needs_redraw = composer.handle_paste("hello".to_string());
assert!(needs_redraw);
assert_eq!(composer.textarea.text(), "hello");
assert_eq!(composer.textarea.lines(), ["hello"]);
assert!(composer.pending_pastes.is_empty());
let (result, _) =
@@ -916,7 +932,7 @@ mod tests {
let needs_redraw = composer.handle_paste(large.clone());
assert!(needs_redraw);
let placeholder = format!("[Pasted Content {} chars]", large.chars().count());
assert_eq!(composer.textarea.text(), placeholder);
assert_eq!(composer.textarea.lines(), [placeholder.as_str()]);
assert_eq!(composer.pending_pastes.len(), 1);
assert_eq!(composer.pending_pastes[0].0, placeholder);
assert_eq!(composer.pending_pastes[0].1, large);
@@ -992,7 +1008,7 @@ mod tests {
composer.handle_paste("b".repeat(LARGE_PASTE_CHAR_THRESHOLD + 4));
composer.handle_paste("c".repeat(LARGE_PASTE_CHAR_THRESHOLD + 6));
// Move cursor to end and press backspace
composer.textarea.set_cursor(composer.textarea.text().len());
composer.textarea.move_cursor(tui_textarea::CursorMove::End);
composer.handle_key_event(KeyEvent::new(KeyCode::Backspace, KeyModifiers::NONE));
}
@@ -1107,7 +1123,7 @@ mod tests {
current_pos += content.len();
}
(
composer.textarea.text().to_string(),
composer.textarea.lines().join("\n"),
composer.pending_pastes.len(),
current_pos,
)
@@ -1118,18 +1134,25 @@ mod tests {
let mut deletion_states = vec![];
// First deletion
composer.textarea.set_cursor(states[0].2);
composer
.textarea
.move_cursor(tui_textarea::CursorMove::Jump(0, states[0].2 as u16));
composer.handle_key_event(KeyEvent::new(KeyCode::Backspace, KeyModifiers::NONE));
deletion_states.push((
composer.textarea.text().to_string(),
composer.textarea.lines().join("\n"),
composer.pending_pastes.len(),
));
// Second deletion
composer.textarea.set_cursor(composer.textarea.text().len());
composer
.textarea
.move_cursor(tui_textarea::CursorMove::Jump(
0,
composer.textarea.lines().join("\n").len() as u16,
));
composer.handle_key_event(KeyEvent::new(KeyCode::Backspace, KeyModifiers::NONE));
deletion_states.push((
composer.textarea.text().to_string(),
composer.textarea.lines().join("\n"),
composer.pending_pastes.len(),
));
@@ -1168,13 +1191,17 @@ mod tests {
composer.handle_paste(paste.clone());
composer
.textarea
.set_cursor((placeholder.len() - pos_from_end) as usize);
.move_cursor(tui_textarea::CursorMove::Jump(
0,
(placeholder.len() - pos_from_end) as u16,
));
composer.handle_key_event(KeyEvent::new(KeyCode::Backspace, KeyModifiers::NONE));
let result = (
composer.textarea.text().contains(&placeholder),
composer.textarea.lines().join("\n").contains(&placeholder),
composer.pending_pastes.len(),
);
composer.textarea.set_text("");
composer.textarea.select_all();
composer.textarea.cut();
result
})
.collect();

View File

@@ -1,5 +1,8 @@
use std::collections::HashMap;
use tui_textarea::CursorMove;
use tui_textarea::TextArea;
use crate::app_event::AppEvent;
use crate::app_event_sender::AppEventSender;
use codex_core::protocol::Op;
@@ -64,52 +67,59 @@ impl ChatComposerHistory {
/// Should Up/Down key presses be interpreted as history navigation given
/// the current content and cursor position of `textarea`?
pub fn should_handle_navigation(&self, text: &str, cursor: usize) -> bool {
pub fn should_handle_navigation(&self, textarea: &TextArea) -> bool {
if self.history_entry_count == 0 && self.local_history.is_empty() {
return false;
}
if text.is_empty() {
if textarea.is_empty() {
return true;
}
// Textarea is not empty only navigate when cursor is at start and
// text matches last recalled history entry so regular editing is not
// hijacked.
if cursor != 0 {
let (row, col) = textarea.cursor();
if row != 0 || col != 0 {
return false;
}
matches!(&self.last_history_text, Some(prev) if prev == text)
let lines = textarea.lines();
matches!(&self.last_history_text, Some(prev) if prev == &lines.join("\n"))
}
/// Handle <Up>. Returns true when the key was consumed and the caller
/// should request a redraw.
pub fn navigate_up(&mut self, app_event_tx: &AppEventSender) -> Option<String> {
pub fn navigate_up(&mut self, textarea: &mut TextArea, app_event_tx: &AppEventSender) -> bool {
let total_entries = self.history_entry_count + self.local_history.len();
if total_entries == 0 {
return None;
return false;
}
let next_idx = match self.history_cursor {
None => (total_entries as isize) - 1,
Some(0) => return None, // already at oldest
Some(0) => return true, // already at oldest
Some(idx) => idx - 1,
};
self.history_cursor = Some(next_idx);
self.populate_history_at_index(next_idx as usize, app_event_tx)
self.populate_history_at_index(next_idx as usize, textarea, app_event_tx);
true
}
/// Handle <Down>.
pub fn navigate_down(&mut self, app_event_tx: &AppEventSender) -> Option<String> {
pub fn navigate_down(
&mut self,
textarea: &mut TextArea,
app_event_tx: &AppEventSender,
) -> bool {
let total_entries = self.history_entry_count + self.local_history.len();
if total_entries == 0 {
return None;
return false;
}
let next_idx_opt = match self.history_cursor {
None => return None, // not browsing
None => return false, // not browsing
Some(idx) if (idx as usize) + 1 >= total_entries => None,
Some(idx) => Some(idx + 1),
};
@@ -117,15 +127,16 @@ impl ChatComposerHistory {
match next_idx_opt {
Some(idx) => {
self.history_cursor = Some(idx);
self.populate_history_at_index(idx as usize, app_event_tx)
self.populate_history_at_index(idx as usize, textarea, app_event_tx);
}
None => {
// Past newest clear and exit browsing mode.
self.history_cursor = None;
self.last_history_text = None;
Some(String::new())
self.replace_textarea_content(textarea, "");
}
}
true
}
/// Integrate a GetHistoryEntryResponse event.
@@ -134,18 +145,19 @@ impl ChatComposerHistory {
log_id: u64,
offset: usize,
entry: Option<String>,
) -> Option<String> {
textarea: &mut TextArea,
) -> bool {
if self.history_log_id != Some(log_id) {
return None;
return false;
}
let text = entry?;
let Some(text) = entry else { return false };
self.fetched_history.insert(offset, text.clone());
if self.history_cursor == Some(offset as isize) {
self.last_history_text = Some(text.clone());
return Some(text);
self.replace_textarea_content(textarea, &text);
return true;
}
None
false
}
// ---------------------------------------------------------------------
@@ -155,20 +167,21 @@ impl ChatComposerHistory {
fn populate_history_at_index(
&mut self,
global_idx: usize,
textarea: &mut TextArea,
app_event_tx: &AppEventSender,
) -> Option<String> {
) {
if global_idx >= self.history_entry_count {
// Local entry.
if let Some(text) = self
.local_history
.get(global_idx - self.history_entry_count)
{
self.last_history_text = Some(text.clone());
return Some(text.clone());
let t = text.clone();
self.replace_textarea_content(textarea, &t);
}
} else if let Some(text) = self.fetched_history.get(&global_idx) {
self.last_history_text = Some(text.clone());
return Some(text.clone());
let t = text.clone();
self.replace_textarea_content(textarea, &t);
} else if let Some(log_id) = self.history_log_id {
let op = Op::GetHistoryEntryRequest {
offset: global_idx,
@@ -176,7 +189,14 @@ impl ChatComposerHistory {
};
app_event_tx.send(AppEvent::CodexOp(op));
}
None
}
fn replace_textarea_content(&mut self, textarea: &mut TextArea, text: &str) {
textarea.select_all();
textarea.cut();
let _ = textarea.insert_str(text);
textarea.move_cursor(CursorMove::Jump(0, 0));
self.last_history_text = Some(text.to_string());
}
}
@@ -197,9 +217,11 @@ mod tests {
// Pretend there are 3 persistent entries.
history.set_metadata(1, 3);
let mut textarea = TextArea::default();
// First Up should request offset 2 (latest) and await async data.
assert!(history.should_handle_navigation("", 0));
assert!(history.navigate_up(&tx).is_none()); // don't replace the text yet
assert!(history.should_handle_navigation(&textarea));
assert!(history.navigate_up(&mut textarea, &tx));
// Verify that an AppEvent::CodexOp with the correct GetHistoryEntryRequest was sent.
let event = rx.try_recv().expect("expected AppEvent to be sent");
@@ -213,15 +235,14 @@ mod tests {
},
history_request1
);
assert_eq!(textarea.lines().join("\n"), ""); // still empty
// Inject the async response.
assert_eq!(
Some("latest".into()),
history.on_entry_response(1, 2, Some("latest".into()))
);
assert!(history.on_entry_response(1, 2, Some("latest".into()), &mut textarea));
assert_eq!(textarea.lines().join("\n"), "latest");
// Next Up should move to offset 1.
assert!(history.navigate_up(&tx).is_none()); // don't replace the text yet
assert!(history.navigate_up(&mut textarea, &tx));
// Verify second CodexOp event for offset 1.
let event2 = rx.try_recv().expect("expected second event");
@@ -236,9 +257,7 @@ mod tests {
history_request_2
);
assert_eq!(
Some("older".into()),
history.on_entry_response(1, 1, Some("older".into()))
);
history.on_entry_response(1, 1, Some("older".into()), &mut textarea);
assert_eq!(textarea.lines().join("\n"), "older");
}
}

View File

@@ -19,7 +19,6 @@ mod chat_composer_history;
mod command_popup;
mod file_search_popup;
mod status_indicator_view;
mod textarea;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) enum CancellationEvent {
@@ -37,7 +36,7 @@ use status_indicator_view::StatusIndicatorView;
pub(crate) struct BottomPane<'a> {
/// Composer is retained even when a BottomPaneView is displayed so the
/// input state is retained when the view is closed.
composer: ChatComposer,
composer: ChatComposer<'a>,
/// If present, this is displayed instead of the `composer`.
active_view: Option<Box<dyn BottomPaneView<'a> + 'a>>,
@@ -75,19 +74,7 @@ impl BottomPane<'_> {
self.active_view
.as_ref()
.map(|v| v.desired_height(width))
.unwrap_or(self.composer.desired_height(width))
}
pub fn cursor_pos(&self, area: Rect) -> Option<(u16, u16)> {
// Hide the cursor whenever an overlay view is active (e.g. the
// status indicator shown while a task is running, or approval modal).
// In these states the textarea is not interactable, so we should not
// show its caret.
if self.active_view.is_some() {
None
} else {
self.composer.cursor_pos(area)
}
.unwrap_or(self.composer.desired_height())
}
/// Forward a key event to the active view or the composer.

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,7 @@
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Arc;
use std::time::Duration;
use codex_core::codex_wrapper::CodexConversation;
use codex_core::codex_wrapper::init_codex;
@@ -389,7 +390,6 @@ impl ChatWidget<'_> {
EventMsg::ExecCommandEnd(ExecCommandEndEvent {
call_id,
exit_code,
duration,
stdout,
stderr,
}) => {
@@ -400,7 +400,7 @@ impl ChatWidget<'_> {
exit_code,
stdout,
stderr,
duration,
duration: Duration::from_secs(0),
},
));
}
@@ -509,10 +509,6 @@ impl ChatWidget<'_> {
self.bottom_pane
.set_token_usage(self.token_usage.clone(), self.config.model_context_window);
}
pub fn cursor_pos(&self, area: Rect) -> Option<(u16, u16)> {
self.bottom_pane.cursor_pos(area)
}
}
impl WidgetRef for &ChatWidget<'_> {

View File

@@ -57,7 +57,7 @@ impl StatusIndicatorWidget {
thread::spawn(move || {
let mut counter = 0usize;
while running_clone.load(Ordering::Relaxed) {
std::thread::sleep(Duration::from_millis(100));
std::thread::sleep(Duration::from_millis(200));
counter = counter.wrapping_add(1);
frame_idx_clone.store(counter, Ordering::Relaxed);
app_event_tx_clone.send(AppEvent::RequestRedraw);
@@ -98,51 +98,46 @@ impl WidgetRef for StatusIndicatorWidget {
.borders(Borders::LEFT)
.border_type(BorderType::QuadrantOutside)
.border_style(widget_style.dim());
// Animated 3dot pattern inside brackets. The *active* dot is bold
// white, the others are dim.
const DOT_COUNT: usize = 3;
let idx = self.frame_idx.load(std::sync::atomic::Ordering::Relaxed);
let header_text = "Working";
let header_chars: Vec<char> = header_text.chars().collect();
let padding = 4usize; // virtual padding around the word for smoother loop
let period = header_chars.len() + padding * 2;
let pos = idx % period;
let has_true_color = supports_color::on_cached(supports_color::Stream::Stdout)
.map(|level| level.has_16m)
.unwrap_or(false);
// Width of the bright band (in characters).
let band_half_width = 2.0;
let phase = idx % (DOT_COUNT * 2 - 2);
let active = if phase < DOT_COUNT {
phase
} else {
(DOT_COUNT * 2 - 2) - phase
};
let mut header_spans: Vec<Span<'static>> = Vec::new();
for (i, ch) in header_chars.iter().enumerate() {
let i_pos = i as isize + padding as isize;
let pos = pos as isize;
let dist = (i_pos - pos).abs() as f32;
let t = if dist <= band_half_width {
let x = std::f32::consts::PI * (dist / band_half_width);
0.5 * (1.0 + x.cos())
} else {
0.0
};
header_spans.push(Span::styled(
"Working ",
Style::default()
.fg(Color::White)
.add_modifier(Modifier::BOLD),
));
let brightness = 0.4 + 0.6 * t;
let level = (brightness * 255.0).clamp(0.0, 255.0) as u8;
let style = if has_true_color {
header_spans.push(Span::styled(
"[",
Style::default()
.fg(Color::White)
.add_modifier(Modifier::BOLD),
));
for i in 0..DOT_COUNT {
let style = if i == active {
Style::default()
.fg(Color::Rgb(level, level, level))
.fg(Color::White)
.add_modifier(Modifier::BOLD)
} else {
// Bold makes dark gray and gray look the same, so don't use it
// when true color is not supported.
Style::default().fg(color_for_level(level))
Style::default().dim()
};
header_spans.push(Span::styled(ch.to_string(), style));
header_spans.push(Span::styled(".", style));
}
header_spans.push(Span::styled(
" ",
"] ",
Style::default()
.fg(Color::White)
.add_modifier(Modifier::BOLD),
@@ -194,13 +189,3 @@ impl WidgetRef for StatusIndicatorWidget {
paragraph.render_ref(area, buf);
}
}
fn color_for_level(level: u8) -> Color {
if level < 128 {
Color::DarkGray
} else if level < 192 {
Color::Gray
} else {
Color::White
}
}