mirror of
https://github.com/openai/codex.git
synced 2026-02-02 23:13:37 +00:00
Compare commits
2 Commits
fix_compac
...
patch-guar
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
27018edc50 | ||
|
|
0144fb4fab |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -30,7 +30,6 @@ result
|
||||
# cli tools
|
||||
CLAUDE.md
|
||||
.claude/
|
||||
AGENTS.override.md
|
||||
|
||||
# caches
|
||||
.cache/
|
||||
|
||||
2
codex-rs/Cargo.lock
generated
2
codex-rs/Cargo.lock
generated
@@ -1051,6 +1051,7 @@ dependencies = [
|
||||
"escargot",
|
||||
"eventsource-stream",
|
||||
"futures",
|
||||
"ignore",
|
||||
"indexmap 2.10.0",
|
||||
"landlock",
|
||||
"libc",
|
||||
@@ -1069,6 +1070,7 @@ dependencies = [
|
||||
"serde_json",
|
||||
"serial_test",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"shlex",
|
||||
"similar",
|
||||
"strum_macros 0.27.2",
|
||||
|
||||
@@ -9,7 +9,6 @@ use codex_protocol::config_types::ReasoningEffort;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
use codex_protocol::config_types::Verbosity;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::FileChange;
|
||||
@@ -698,7 +697,6 @@ pub struct ExecCommandApprovalParams {
|
||||
pub cwd: PathBuf,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub reason: Option<String>,
|
||||
pub parsed_cmd: Vec<ParsedCommand>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
@@ -906,9 +904,6 @@ mod tests {
|
||||
command: vec!["echo".to_string(), "hello".to_string()],
|
||||
cwd: PathBuf::from("/tmp"),
|
||||
reason: Some("because tests".to_string()),
|
||||
parsed_cmd: vec![ParsedCommand::Unknown {
|
||||
cmd: "echo hello".to_string(),
|
||||
}],
|
||||
};
|
||||
let request = ServerRequest::ExecCommandApproval {
|
||||
request_id: RequestId::Integer(7),
|
||||
@@ -925,12 +920,6 @@ mod tests {
|
||||
"command": ["echo", "hello"],
|
||||
"cwd": "/tmp",
|
||||
"reason": "because tests",
|
||||
"parsedCmd": [
|
||||
{
|
||||
"type": "unknown",
|
||||
"cmd": "echo hello"
|
||||
}
|
||||
]
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
|
||||
@@ -1284,7 +1284,6 @@ async fn apply_bespoke_event_handling(
|
||||
command,
|
||||
cwd,
|
||||
reason,
|
||||
parsed_cmd,
|
||||
}) => {
|
||||
let params = ExecCommandApprovalParams {
|
||||
conversation_id,
|
||||
@@ -1292,7 +1291,6 @@ async fn apply_bespoke_event_handling(
|
||||
command,
|
||||
cwd,
|
||||
reason,
|
||||
parsed_cmd,
|
||||
};
|
||||
let rx = outgoing
|
||||
.send_request(ServerRequestPayload::ExecCommandApproval(params))
|
||||
|
||||
@@ -27,7 +27,6 @@ use codex_core::protocol_config_types::ReasoningEffort;
|
||||
use codex_core::protocol_config_types::ReasoningSummary;
|
||||
use codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::Event;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::InputMessageKind;
|
||||
@@ -312,9 +311,6 @@ async fn test_send_user_turn_changes_approval_policy_behavior() {
|
||||
],
|
||||
cwd: working_directory.clone(),
|
||||
reason: None,
|
||||
parsed_cmd: vec![ParsedCommand::Unknown {
|
||||
cmd: "python3 -c 'print(42)'".to_string()
|
||||
}],
|
||||
},
|
||||
params
|
||||
);
|
||||
|
||||
@@ -33,6 +33,7 @@ env-flags = { workspace = true }
|
||||
eventsource-stream = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
indexmap = { workspace = true }
|
||||
ignore = { workspace = true }
|
||||
libc = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
os_info = { workspace = true }
|
||||
@@ -43,6 +44,7 @@ reqwest = { workspace = true, features = ["json", "stream"] }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
sha1 = { workspace = true }
|
||||
sha2 = { workspace = true }
|
||||
shlex = { workspace = true }
|
||||
similar = { workspace = true }
|
||||
strum_macros = { workspace = true }
|
||||
|
||||
168
codex-rs/core/src/codebase_change_notice.rs
Normal file
168
codex-rs/core/src/codebase_change_notice.rs
Normal file
@@ -0,0 +1,168 @@
|
||||
use std::fmt::Write;
|
||||
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
|
||||
use crate::codebase_snapshot::SnapshotDiff;
|
||||
|
||||
pub(crate) const CODEBASE_CHANGE_NOTICE_MAX_PATHS: usize = 40;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) struct CodebaseChangeNotice {
|
||||
added: Vec<String>,
|
||||
removed: Vec<String>,
|
||||
modified: Vec<String>,
|
||||
truncated: bool,
|
||||
}
|
||||
|
||||
impl CodebaseChangeNotice {
|
||||
pub(crate) fn new(diff: SnapshotDiff, limit: usize) -> Self {
|
||||
let mut remaining = limit;
|
||||
let mut truncated = false;
|
||||
|
||||
let added = take_paths(diff.added, &mut remaining, &mut truncated);
|
||||
let removed = take_paths(diff.removed, &mut remaining, &mut truncated);
|
||||
let modified = take_paths(diff.modified, &mut remaining, &mut truncated);
|
||||
|
||||
Self {
|
||||
added,
|
||||
removed,
|
||||
modified,
|
||||
truncated,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn is_empty(&self) -> bool {
|
||||
self.added.is_empty() && self.removed.is_empty() && self.modified.is_empty()
|
||||
}
|
||||
|
||||
pub(crate) fn serialize_to_xml(&self) -> String {
|
||||
let mut output = String::new();
|
||||
if self.truncated {
|
||||
let _ = writeln!(output, "<codebase_changes truncated=\"true\">");
|
||||
} else {
|
||||
let _ = writeln!(output, "<codebase_changes>");
|
||||
}
|
||||
|
||||
let mut summary_parts = Vec::new();
|
||||
if !self.added.is_empty() {
|
||||
summary_parts.push(format!("added {}", self.added.len()));
|
||||
}
|
||||
if !self.removed.is_empty() {
|
||||
summary_parts.push(format!("removed {}", self.removed.len()));
|
||||
}
|
||||
if !self.modified.is_empty() {
|
||||
summary_parts.push(format!("modified {}", self.modified.len()));
|
||||
}
|
||||
|
||||
if summary_parts.is_empty() {
|
||||
let _ = writeln!(output, " <summary>no changes</summary>");
|
||||
} else {
|
||||
let summary = summary_parts.join(", ");
|
||||
let _ = writeln!(output, " <summary>{summary}</summary>");
|
||||
}
|
||||
|
||||
serialize_section(&mut output, "added", &self.added);
|
||||
serialize_section(&mut output, "removed", &self.removed);
|
||||
serialize_section(&mut output, "modified", &self.modified);
|
||||
if self.truncated {
|
||||
let _ = writeln!(output, " <note>additional paths omitted</note>");
|
||||
}
|
||||
|
||||
let _ = writeln!(output, "</codebase_changes>");
|
||||
output
|
||||
}
|
||||
}
|
||||
|
||||
fn take_paths(mut paths: Vec<String>, remaining: &mut usize, truncated: &mut bool) -> Vec<String> {
|
||||
if *remaining == 0 {
|
||||
if !paths.is_empty() {
|
||||
*truncated = true;
|
||||
}
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
if paths.len() > *remaining {
|
||||
paths.truncate(*remaining);
|
||||
*truncated = true;
|
||||
}
|
||||
|
||||
*remaining -= paths.len();
|
||||
paths
|
||||
}
|
||||
|
||||
fn serialize_section(output: &mut String, tag: &str, paths: &[String]) {
|
||||
if paths.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let _ = writeln!(output, " <{tag}>");
|
||||
for path in paths {
|
||||
let _ = writeln!(output, " <path>{}</path>", escape_xml(path));
|
||||
}
|
||||
let _ = writeln!(output, " </{tag}>");
|
||||
}
|
||||
|
||||
fn escape_xml(value: &str) -> String {
|
||||
let mut escaped = String::with_capacity(value.len());
|
||||
for ch in value.chars() {
|
||||
match ch {
|
||||
'&' => escaped.push_str("&"),
|
||||
'<' => escaped.push_str("<"),
|
||||
'>' => escaped.push_str(">"),
|
||||
'"' => escaped.push_str("""),
|
||||
'\'' => escaped.push_str("'"),
|
||||
other => escaped.push(other),
|
||||
}
|
||||
}
|
||||
escaped
|
||||
}
|
||||
|
||||
impl From<CodebaseChangeNotice> for ResponseItem {
|
||||
fn from(notice: CodebaseChangeNotice) -> Self {
|
||||
ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::InputText {
|
||||
text: notice.serialize_to_xml(),
|
||||
}],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn constructs_notice_with_limit() {
|
||||
let diff = SnapshotDiff {
|
||||
added: vec!["a.rs".to_string(), "b.rs".to_string()],
|
||||
removed: vec!["c.rs".to_string()],
|
||||
modified: vec!["d.rs".to_string(), "e.rs".to_string()],
|
||||
};
|
||||
|
||||
let notice = CodebaseChangeNotice::new(diff, 3);
|
||||
assert!(notice.truncated);
|
||||
assert_eq!(
|
||||
notice.added.len() + notice.removed.len() + notice.modified.len(),
|
||||
3
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serializes_notice() {
|
||||
let diff = SnapshotDiff {
|
||||
added: vec!["src/lib.rs".to_string()],
|
||||
removed: Vec::new(),
|
||||
modified: vec!["src/main.rs".to_string()],
|
||||
};
|
||||
let notice = CodebaseChangeNotice::new(diff, CODEBASE_CHANGE_NOTICE_MAX_PATHS);
|
||||
let xml = notice.serialize_to_xml();
|
||||
assert!(xml.contains("<added>"));
|
||||
assert!(xml.contains("<modified>"));
|
||||
assert!(xml.contains("src/lib.rs"));
|
||||
assert!(xml.contains("src/main.rs"));
|
||||
}
|
||||
}
|
||||
278
codex-rs/core/src/codebase_snapshot.rs
Normal file
278
codex-rs/core/src/codebase_snapshot.rs
Normal file
@@ -0,0 +1,278 @@
|
||||
use std::borrow::Cow;
|
||||
use std::collections::BTreeMap;
|
||||
use std::fs::File;
|
||||
use std::io::Read;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::time::SystemTime;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use ignore::WalkBuilder;
|
||||
use sha2::Digest;
|
||||
use sha2::Sha256;
|
||||
use tokio::task;
|
||||
use tracing::warn;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) struct CodebaseSnapshot {
|
||||
root: PathBuf,
|
||||
entries: BTreeMap<String, EntryFingerprint>,
|
||||
root_digest: DigestBytes,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) struct EntryFingerprint {
|
||||
pub kind: EntryKind,
|
||||
pub digest: DigestBytes,
|
||||
pub size: u64,
|
||||
pub modified_millis: Option<u128>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
#[repr(u8)]
|
||||
pub(crate) enum EntryKind {
|
||||
File,
|
||||
Symlink,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Default)]
|
||||
pub(crate) struct SnapshotDiff {
|
||||
pub added: Vec<String>,
|
||||
pub removed: Vec<String>,
|
||||
pub modified: Vec<String>,
|
||||
}
|
||||
|
||||
impl SnapshotDiff {
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.added.is_empty() && self.removed.is_empty() && self.modified.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) type DigestBytes = [u8; 32];
|
||||
|
||||
impl CodebaseSnapshot {
|
||||
pub(crate) async fn capture(root: PathBuf) -> Result<Self> {
|
||||
task::spawn_blocking(move || Self::from_disk(&root))
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("codebase snapshot task failed: {e}"))?
|
||||
}
|
||||
|
||||
pub(crate) fn from_disk(root: &Path) -> Result<Self> {
|
||||
if !root.exists() {
|
||||
return Ok(Self::empty(root));
|
||||
}
|
||||
|
||||
let mut entries: BTreeMap<String, EntryFingerprint> = BTreeMap::new();
|
||||
|
||||
let mut walker = WalkBuilder::new(root);
|
||||
walker
|
||||
.hidden(false)
|
||||
.git_ignore(true)
|
||||
.git_exclude(true)
|
||||
.parents(true)
|
||||
.ignore(true)
|
||||
.follow_links(false);
|
||||
|
||||
for result in walker.build() {
|
||||
let entry = match result {
|
||||
Ok(entry) => entry,
|
||||
Err(err) => {
|
||||
warn!("codebase snapshot failed to read entry: {err}");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
let path = entry.path();
|
||||
if entry.depth() == 0 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let relative = match path.strip_prefix(root) {
|
||||
Ok(rel) => rel,
|
||||
Err(_) => continue,
|
||||
};
|
||||
if relative.as_os_str().is_empty() {
|
||||
continue;
|
||||
}
|
||||
let rel_string = normalize_rel_path(relative);
|
||||
|
||||
let file_type = match entry.file_type() {
|
||||
Some(file_type) => file_type,
|
||||
None => continue,
|
||||
};
|
||||
|
||||
if file_type.is_dir() {
|
||||
continue;
|
||||
}
|
||||
|
||||
if file_type.is_file() {
|
||||
match fingerprint_file(path) {
|
||||
Ok(fp) => {
|
||||
entries.insert(rel_string, fp);
|
||||
}
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"codebase snapshot failed to hash file {}: {err}",
|
||||
path.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if file_type.is_symlink() {
|
||||
match fingerprint_symlink(path) {
|
||||
Ok(fp) => {
|
||||
entries.insert(rel_string, fp);
|
||||
}
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"codebase snapshot failed to hash symlink {}: {err}",
|
||||
path.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let root_digest = compute_root_digest(&entries);
|
||||
|
||||
Ok(Self {
|
||||
root: root.to_path_buf(),
|
||||
entries,
|
||||
root_digest,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn diff(&self, newer: &CodebaseSnapshot) -> SnapshotDiff {
|
||||
let mut diff = SnapshotDiff::default();
|
||||
|
||||
for (path, fingerprint) in &newer.entries {
|
||||
match self.entries.get(path) {
|
||||
None => diff.added.push(path.clone()),
|
||||
Some(existing) if existing != fingerprint => diff.modified.push(path.clone()),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
for path in self.entries.keys() {
|
||||
if !newer.entries.contains_key(path) {
|
||||
diff.removed.push(path.clone());
|
||||
}
|
||||
}
|
||||
|
||||
diff
|
||||
}
|
||||
|
||||
pub(crate) fn root(&self) -> &Path {
|
||||
&self.root
|
||||
}
|
||||
|
||||
fn empty(root: &Path) -> Self {
|
||||
Self {
|
||||
root: root.to_path_buf(),
|
||||
entries: BTreeMap::new(),
|
||||
root_digest: Sha256::digest(b"").into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn fingerprint_file(path: &Path) -> Result<EntryFingerprint> {
|
||||
let metadata = path
|
||||
.metadata()
|
||||
.with_context(|| format!("metadata {}", path.display()))?;
|
||||
let mut file = File::open(path).with_context(|| format!("open {}", path.display()))?;
|
||||
|
||||
let mut hasher = Sha256::new();
|
||||
let mut buf = [0u8; 64 * 1024];
|
||||
loop {
|
||||
let read = file.read(&mut buf)?;
|
||||
if read == 0 {
|
||||
break;
|
||||
}
|
||||
hasher.update(&buf[..read]);
|
||||
}
|
||||
|
||||
Ok(EntryFingerprint {
|
||||
kind: EntryKind::File,
|
||||
digest: hasher.finalize().into(),
|
||||
size: metadata.len(),
|
||||
modified_millis: metadata.modified().ok().and_then(system_time_to_millis),
|
||||
})
|
||||
}
|
||||
|
||||
fn fingerprint_symlink(path: &Path) -> Result<EntryFingerprint> {
|
||||
let target =
|
||||
std::fs::read_link(path).with_context(|| format!("read_link {}", path.display()))?;
|
||||
let mut hasher = Sha256::new();
|
||||
let target_str = normalize_rel_path(&target);
|
||||
hasher.update(target_str.as_bytes());
|
||||
Ok(EntryFingerprint {
|
||||
kind: EntryKind::Symlink,
|
||||
digest: hasher.finalize().into(),
|
||||
size: 0,
|
||||
modified_millis: None,
|
||||
})
|
||||
}
|
||||
|
||||
fn compute_root_digest(entries: &BTreeMap<String, EntryFingerprint>) -> DigestBytes {
|
||||
let mut hasher = Sha256::new();
|
||||
for (path, fingerprint) in entries {
|
||||
hasher.update(path.as_bytes());
|
||||
hasher.update(fingerprint.digest);
|
||||
hasher.update([fingerprint.kind as u8]);
|
||||
hasher.update(fingerprint.size.to_le_bytes());
|
||||
if let Some(modified) = fingerprint.modified_millis {
|
||||
hasher.update(modified.to_le_bytes());
|
||||
}
|
||||
}
|
||||
hasher.finalize().into()
|
||||
}
|
||||
|
||||
fn normalize_rel_path(path: &Path) -> String {
|
||||
let s = path_to_cow(path);
|
||||
if s.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
s.replace('\\', "/")
|
||||
}
|
||||
}
|
||||
|
||||
fn path_to_cow(path: &Path) -> Cow<'_, str> {
|
||||
path.to_string_lossy()
|
||||
}
|
||||
|
||||
fn system_time_to_millis(ts: SystemTime) -> Option<u128> {
|
||||
ts.duration_since(SystemTime::UNIX_EPOCH)
|
||||
.map(|duration| duration.as_millis())
|
||||
.ok()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[test]
|
||||
fn diff_tracks_added_modified_removed() {
|
||||
let dir = tempdir().unwrap();
|
||||
let root = dir.path();
|
||||
|
||||
std::fs::write(root.join("file_a.txt"), "alpha").unwrap();
|
||||
std::fs::write(root.join("file_b.txt"), "bravo").unwrap();
|
||||
let snapshot_one = CodebaseSnapshot::from_disk(root).unwrap();
|
||||
|
||||
std::fs::write(root.join("file_a.txt"), "alpha-updated").unwrap();
|
||||
std::fs::remove_file(root.join("file_b.txt")).unwrap();
|
||||
std::fs::write(root.join("file_c.txt"), "charlie").unwrap();
|
||||
let snapshot_two = CodebaseSnapshot::from_disk(root).unwrap();
|
||||
|
||||
let diff = snapshot_one.diff(&snapshot_two);
|
||||
assert_eq!(diff.added, vec!["file_c.txt".to_string()]);
|
||||
assert_eq!(diff.modified, vec!["file_a.txt".to_string()]);
|
||||
assert_eq!(diff.removed, vec!["file_b.txt".to_string()]);
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
use std::borrow::Cow;
|
||||
use std::fmt::Debug;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicU64;
|
||||
@@ -43,6 +44,9 @@ use crate::apply_patch::convert_apply_patch_to_protocol;
|
||||
use crate::client::ModelClient;
|
||||
use crate::client_common::Prompt;
|
||||
use crate::client_common::ResponseEvent;
|
||||
use crate::codebase_change_notice::CODEBASE_CHANGE_NOTICE_MAX_PATHS;
|
||||
use crate::codebase_change_notice::CodebaseChangeNotice;
|
||||
use crate::codebase_snapshot::CodebaseSnapshot;
|
||||
use crate::config::Config;
|
||||
use crate::config_types::ShellEnvironmentPolicy;
|
||||
use crate::conversation_history::ConversationHistory;
|
||||
@@ -620,7 +624,6 @@ impl Session {
|
||||
warn!("Overwriting existing pending approval for sub_id: {event_id}");
|
||||
}
|
||||
|
||||
let parsed_cmd = parse_command(&command);
|
||||
let event = Event {
|
||||
id: event_id,
|
||||
msg: EventMsg::ExecApprovalRequest(ExecApprovalRequestEvent {
|
||||
@@ -628,7 +631,6 @@ impl Session {
|
||||
command,
|
||||
cwd,
|
||||
reason,
|
||||
parsed_cmd,
|
||||
}),
|
||||
};
|
||||
self.send_event(event).await;
|
||||
@@ -748,6 +750,73 @@ impl Session {
|
||||
self.persist_rollout_items(&rollout_items).await;
|
||||
}
|
||||
|
||||
async fn stored_snapshot_for_root(&self, root: &Path) -> Option<CodebaseSnapshot> {
|
||||
let state = self.state.lock().await;
|
||||
state
|
||||
.codebase_snapshot
|
||||
.as_ref()
|
||||
.filter(|snapshot| snapshot.root() == root)
|
||||
.cloned()
|
||||
}
|
||||
|
||||
async fn set_codebase_snapshot(&self, snapshot: CodebaseSnapshot) {
|
||||
let mut state = self.state.lock().await;
|
||||
state.codebase_snapshot = Some(snapshot);
|
||||
}
|
||||
|
||||
pub(crate) async fn emit_codebase_delta_if_changed(
|
||||
&self,
|
||||
turn_context: &TurnContext,
|
||||
sub_id: &str,
|
||||
) -> anyhow::Result<()> {
|
||||
let cwd = turn_context.cwd.clone();
|
||||
let previous = self.stored_snapshot_for_root(&cwd).await;
|
||||
let latest = CodebaseSnapshot::capture(cwd.clone()).await?;
|
||||
|
||||
if let Some(previous_snapshot) = previous {
|
||||
let diff = previous_snapshot.diff(&latest);
|
||||
if diff.is_empty() {
|
||||
self.set_codebase_snapshot(latest).await;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let notice = CodebaseChangeNotice::new(diff, CODEBASE_CHANGE_NOTICE_MAX_PATHS);
|
||||
if notice.is_empty() {
|
||||
self.set_codebase_snapshot(latest).await;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let response_item: ResponseItem = notice.into();
|
||||
self.record_conversation_items(std::slice::from_ref(&response_item))
|
||||
.await;
|
||||
|
||||
for msg in
|
||||
map_response_item_to_event_messages(&response_item, self.show_raw_agent_reasoning())
|
||||
{
|
||||
let event = Event {
|
||||
id: sub_id.to_string(),
|
||||
msg,
|
||||
};
|
||||
self.send_event(event).await;
|
||||
}
|
||||
|
||||
self.set_codebase_snapshot(latest).await;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
self.set_codebase_snapshot(latest).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) async fn refresh_codebase_snapshot(
|
||||
&self,
|
||||
turn_context: &TurnContext,
|
||||
) -> anyhow::Result<()> {
|
||||
let snapshot = CodebaseSnapshot::capture(turn_context.cwd.clone()).await?;
|
||||
self.set_codebase_snapshot(snapshot).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn build_initial_context(&self, turn_context: &TurnContext) -> Vec<ResponseItem> {
|
||||
let mut items = Vec::<ResponseItem>::with_capacity(2);
|
||||
if let Some(user_instructions) = turn_context.user_instructions.as_deref() {
|
||||
@@ -884,7 +953,10 @@ impl Session {
|
||||
call_id,
|
||||
command: command_for_display.clone(),
|
||||
cwd,
|
||||
parsed_cmd: parse_command(&command_for_display),
|
||||
parsed_cmd: parse_command(&command_for_display)
|
||||
.into_iter()
|
||||
.map(Into::into)
|
||||
.collect(),
|
||||
}),
|
||||
};
|
||||
let event = Event {
|
||||
@@ -1677,6 +1749,14 @@ pub(crate) async fn run_task(
|
||||
.await;
|
||||
}
|
||||
|
||||
if !is_review_mode
|
||||
&& let Err(err) = sess
|
||||
.emit_codebase_delta_if_changed(turn_context.as_ref(), &sub_id)
|
||||
.await
|
||||
{
|
||||
warn!(error = ?err, "failed to compute codebase changes");
|
||||
}
|
||||
|
||||
let mut last_agent_message: Option<String> = None;
|
||||
// Although from the perspective of codex.rs, TurnDiffTracker has the lifecycle of a Task which contains
|
||||
// many turns, from the perspective of the user, it is a single turn.
|
||||
@@ -1910,6 +1990,11 @@ pub(crate) async fn run_task(
|
||||
}
|
||||
}
|
||||
|
||||
if !is_review_mode && let Err(err) = sess.refresh_codebase_snapshot(turn_context.as_ref()).await
|
||||
{
|
||||
warn!(error = ?err, "failed to refresh codebase snapshot");
|
||||
}
|
||||
|
||||
// If this was a review thread and we have a final assistant message,
|
||||
// try to parse it as a ReviewOutput.
|
||||
//
|
||||
|
||||
@@ -71,15 +71,13 @@ async fn run_compact_task_inner(
|
||||
input: Vec<InputItem>,
|
||||
) {
|
||||
let initial_input_for_turn: ResponseInputItem = ResponseInputItem::from(input);
|
||||
// Track the items we append for this compact prompt so trimming does not drop them.
|
||||
let extra_items: Vec<ResponseItem> = vec![initial_input_for_turn.clone().into()];
|
||||
let mut turn_input = sess.turn_input_with_history(extra_items.clone()).await;
|
||||
let mut turn_input = sess
|
||||
.turn_input_with_history(vec![initial_input_for_turn.clone().into()])
|
||||
.await;
|
||||
let mut truncated_count = 0usize;
|
||||
let mut trimmed_tails: Vec<Vec<ResponseItem>> = Vec::new();
|
||||
|
||||
let max_retries = turn_context.client.get_provider().stream_max_retries();
|
||||
let mut context_retries = 0;
|
||||
let mut stream_retries = 0;
|
||||
let mut retries = 0;
|
||||
|
||||
let rollout_item = RolloutItem::TurnContext(TurnContextItem {
|
||||
cwd: turn_context.cwd.clone(),
|
||||
@@ -116,32 +114,11 @@ async fn run_compact_task_inner(
|
||||
return;
|
||||
}
|
||||
Err(e @ CodexErr::ContextWindowExceeded) => {
|
||||
// Drop the most recent user turn (its message plus ensuing traffic) and retry.
|
||||
if turn_input.len() > extra_items.len() {
|
||||
let history_len = turn_input.len() - extra_items.len();
|
||||
let mut prompt_items = turn_input.split_off(history_len);
|
||||
let trimmed = trim_recent_history_to_previous_user_message(&mut turn_input);
|
||||
turn_input.append(&mut prompt_items);
|
||||
if !trimmed.is_empty() {
|
||||
truncated_count += trimmed.len();
|
||||
trimmed_tails.push(trimmed);
|
||||
if context_retries >= max_retries {
|
||||
sess.set_total_tokens_full(&sub_id, turn_context.as_ref())
|
||||
.await;
|
||||
let event = Event {
|
||||
id: sub_id.clone(),
|
||||
msg: EventMsg::Error(ErrorEvent {
|
||||
message: e.to_string(),
|
||||
}),
|
||||
};
|
||||
sess.send_event(event).await;
|
||||
return;
|
||||
}
|
||||
context_retries += 1;
|
||||
stream_retries = 0;
|
||||
// Keep stream retry budget untouched; we trimmed context successfully.
|
||||
continue;
|
||||
}
|
||||
if turn_input.len() > 1 {
|
||||
turn_input.remove(0);
|
||||
truncated_count += 1;
|
||||
retries = 0;
|
||||
continue;
|
||||
}
|
||||
sess.set_total_tokens_full(&sub_id, turn_context.as_ref())
|
||||
.await;
|
||||
@@ -155,12 +132,12 @@ async fn run_compact_task_inner(
|
||||
return;
|
||||
}
|
||||
Err(e) => {
|
||||
if stream_retries < max_retries {
|
||||
stream_retries += 1;
|
||||
let delay = backoff(stream_retries);
|
||||
if retries < max_retries {
|
||||
retries += 1;
|
||||
let delay = backoff(retries);
|
||||
sess.notify_stream_error(
|
||||
&sub_id,
|
||||
format!("Re-connecting... {stream_retries}/{max_retries}"),
|
||||
format!("Re-connecting... {retries}/{max_retries}"),
|
||||
)
|
||||
.await;
|
||||
tokio::time::sleep(delay).await;
|
||||
@@ -183,10 +160,7 @@ async fn run_compact_task_inner(
|
||||
let summary_text = get_last_assistant_message_from_turn(&history_snapshot).unwrap_or_default();
|
||||
let user_messages = collect_user_messages(&history_snapshot);
|
||||
let initial_context = sess.build_initial_context(turn_context.as_ref());
|
||||
let mut new_history = build_compacted_history(initial_context, &user_messages, &summary_text);
|
||||
for mut trimmed in trimmed_tails.into_iter().rev() {
|
||||
new_history.append(&mut trimmed);
|
||||
}
|
||||
let new_history = build_compacted_history(initial_context, &user_messages, &summary_text);
|
||||
sess.replace_history(new_history).await;
|
||||
|
||||
let rollout_item = RolloutItem::Compacted(CompactedItem {
|
||||
@@ -203,27 +177,6 @@ async fn run_compact_task_inner(
|
||||
sess.send_event(event).await;
|
||||
}
|
||||
|
||||
/// Trim conversation history back to the previous user message boundary, removing that user turn.
|
||||
///
|
||||
/// Returns the removed items in their original order so they can be restored later.
|
||||
fn trim_recent_history_to_previous_user_message(
|
||||
turn_input: &mut Vec<ResponseItem>,
|
||||
) -> Vec<ResponseItem> {
|
||||
if turn_input.is_empty() {
|
||||
return Vec::new();
|
||||
}
|
||||
if let Some(last_user_index) = turn_input.iter().rposition(|item| {
|
||||
matches!(
|
||||
item,
|
||||
ResponseItem::Message { role, .. } if role == "user"
|
||||
)
|
||||
}) {
|
||||
turn_input.split_off(last_user_index)
|
||||
} else {
|
||||
std::mem::take(turn_input)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn content_items_to_text(content: &[ContentItem]) -> Option<String> {
|
||||
let mut pieces = Vec::new();
|
||||
for item in content {
|
||||
|
||||
@@ -28,8 +28,6 @@ use crate::model_family::find_family_for_model;
|
||||
use crate::model_provider_info::ModelProviderInfo;
|
||||
use crate::model_provider_info::built_in_model_providers;
|
||||
use crate::openai_model_info::get_model_info;
|
||||
use crate::project_doc::DEFAULT_PROJECT_DOC_FILENAME;
|
||||
use crate::project_doc::LOCAL_PROJECT_DOC_FILENAME;
|
||||
use crate::protocol::AskForApproval;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
use anyhow::Context;
|
||||
@@ -1125,15 +1123,6 @@ impl Config {
|
||||
.or(cfg.review_model)
|
||||
.unwrap_or_else(default_review_model);
|
||||
|
||||
let mut approval_policy = approval_policy
|
||||
.or(config_profile.approval_policy)
|
||||
.or(cfg.approval_policy)
|
||||
.unwrap_or_else(AskForApproval::default);
|
||||
|
||||
if features.enabled(Feature::ApproveAll) {
|
||||
approval_policy = AskForApproval::OnRequest;
|
||||
}
|
||||
|
||||
let config = Self {
|
||||
model,
|
||||
review_model,
|
||||
@@ -1144,7 +1133,10 @@ impl Config {
|
||||
model_provider_id,
|
||||
model_provider,
|
||||
cwd: resolved_cwd,
|
||||
approval_policy,
|
||||
approval_policy: approval_policy
|
||||
.or(config_profile.approval_policy)
|
||||
.or(cfg.approval_policy)
|
||||
.unwrap_or_else(AskForApproval::default),
|
||||
sandbox_policy,
|
||||
shell_environment_policy,
|
||||
notify: cfg.notify,
|
||||
@@ -1225,18 +1217,20 @@ impl Config {
|
||||
}
|
||||
|
||||
fn load_instructions(codex_dir: Option<&Path>) -> Option<String> {
|
||||
let base = codex_dir?;
|
||||
for candidate in [LOCAL_PROJECT_DOC_FILENAME, DEFAULT_PROJECT_DOC_FILENAME] {
|
||||
let mut path = base.to_path_buf();
|
||||
path.push(candidate);
|
||||
if let Ok(contents) = std::fs::read_to_string(&path) {
|
||||
let trimmed = contents.trim();
|
||||
if !trimmed.is_empty() {
|
||||
return Some(trimmed.to_string());
|
||||
}
|
||||
let mut p = match codex_dir {
|
||||
Some(p) => p.to_path_buf(),
|
||||
None => return None,
|
||||
};
|
||||
|
||||
p.push("AGENTS.md");
|
||||
std::fs::read_to_string(&p).ok().and_then(|s| {
|
||||
let s = s.trim();
|
||||
if s.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(s.to_string())
|
||||
}
|
||||
}
|
||||
None
|
||||
})
|
||||
}
|
||||
|
||||
fn get_base_instructions(
|
||||
@@ -1438,26 +1432,6 @@ exclude_slash_tmp = true
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn approve_all_feature_forces_on_request_policy() -> std::io::Result<()> {
|
||||
let cfg = r#"
|
||||
[features]
|
||||
approve_all = true
|
||||
"#;
|
||||
let parsed = toml::from_str::<ConfigToml>(cfg)
|
||||
.expect("TOML deserialization should succeed for approve_all feature");
|
||||
let temp_dir = TempDir::new()?;
|
||||
let config = Config::load_from_base_config_with_overrides(
|
||||
parsed,
|
||||
ConfigOverrides::default(),
|
||||
temp_dir.path().to_path_buf(),
|
||||
)?;
|
||||
|
||||
assert!(config.features.enabled(Feature::ApproveAll));
|
||||
assert_eq!(config.approval_policy, AskForApproval::OnRequest);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn config_defaults_to_auto_oauth_store_mode() -> std::io::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
@@ -41,8 +41,6 @@ pub enum Feature {
|
||||
ViewImageTool,
|
||||
/// Allow the model to request web searches.
|
||||
WebSearchRequest,
|
||||
/// Automatically approve all approval requests from the harness.
|
||||
ApproveAll,
|
||||
}
|
||||
|
||||
impl Feature {
|
||||
@@ -249,10 +247,4 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
stage: Stage::Stable,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::ApproveAll,
|
||||
key: "approve_all",
|
||||
stage: Stage::Experimental,
|
||||
default_enabled: false,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -11,6 +11,8 @@ pub mod bash;
|
||||
mod chat_completions;
|
||||
mod client;
|
||||
mod client_common;
|
||||
mod codebase_change_notice;
|
||||
mod codebase_snapshot;
|
||||
pub mod codex;
|
||||
mod codex_conversation;
|
||||
pub mod token_data;
|
||||
|
||||
@@ -1,9 +1,44 @@
|
||||
use crate::bash::try_parse_bash;
|
||||
use crate::bash::try_parse_word_only_commands_sequence;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use shlex::split as shlex_split;
|
||||
use shlex::try_join as shlex_try_join;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
|
||||
pub enum ParsedCommand {
|
||||
Read {
|
||||
cmd: String,
|
||||
name: String,
|
||||
},
|
||||
ListFiles {
|
||||
cmd: String,
|
||||
path: Option<String>,
|
||||
},
|
||||
Search {
|
||||
cmd: String,
|
||||
query: Option<String>,
|
||||
path: Option<String>,
|
||||
},
|
||||
Unknown {
|
||||
cmd: String,
|
||||
},
|
||||
}
|
||||
|
||||
// Convert core's parsed command enum into the protocol's simplified type so
|
||||
// events can carry the canonical representation across process boundaries.
|
||||
impl From<ParsedCommand> for codex_protocol::parse_command::ParsedCommand {
|
||||
fn from(v: ParsedCommand) -> Self {
|
||||
use codex_protocol::parse_command::ParsedCommand as P;
|
||||
match v {
|
||||
ParsedCommand::Read { cmd, name } => P::Read { cmd, name },
|
||||
ParsedCommand::ListFiles { cmd, path } => P::ListFiles { cmd, path },
|
||||
ParsedCommand::Search { cmd, query, path } => P::Search { cmd, query, path },
|
||||
ParsedCommand::Unknown { cmd } => P::Unknown { cmd },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn shlex_join(tokens: &[String]) -> String {
|
||||
shlex_try_join(tokens.iter().map(String::as_str))
|
||||
.unwrap_or_else(|_| "<command included NUL byte>".to_string())
|
||||
|
||||
@@ -21,8 +21,6 @@ use tracing::error;
|
||||
|
||||
/// Default filename scanned for project-level docs.
|
||||
pub const DEFAULT_PROJECT_DOC_FILENAME: &str = "AGENTS.md";
|
||||
/// Preferred local override for project-level docs.
|
||||
pub const LOCAL_PROJECT_DOC_FILENAME: &str = "AGENTS.override.md";
|
||||
|
||||
/// When both `Config::instructions` and the project doc are present, they will
|
||||
/// be concatenated with the following separator.
|
||||
@@ -180,8 +178,7 @@ pub fn discover_project_doc_paths(config: &Config) -> std::io::Result<Vec<PathBu
|
||||
|
||||
fn candidate_filenames<'a>(config: &'a Config) -> Vec<&'a str> {
|
||||
let mut names: Vec<&'a str> =
|
||||
Vec::with_capacity(2 + config.project_doc_fallback_filenames.len());
|
||||
names.push(LOCAL_PROJECT_DOC_FILENAME);
|
||||
Vec::with_capacity(1 + config.project_doc_fallback_filenames.len());
|
||||
names.push(DEFAULT_PROJECT_DOC_FILENAME);
|
||||
for candidate in &config.project_doc_fallback_filenames {
|
||||
let candidate = candidate.as_str();
|
||||
@@ -384,29 +381,6 @@ mod tests {
|
||||
assert_eq!(res, "root doc\n\ncrate doc");
|
||||
}
|
||||
|
||||
/// AGENTS.override.md is preferred over AGENTS.md when both are present.
|
||||
#[tokio::test]
|
||||
async fn agents_local_md_preferred() {
|
||||
let tmp = tempfile::tempdir().expect("tempdir");
|
||||
fs::write(tmp.path().join(DEFAULT_PROJECT_DOC_FILENAME), "versioned").unwrap();
|
||||
fs::write(tmp.path().join(LOCAL_PROJECT_DOC_FILENAME), "local").unwrap();
|
||||
|
||||
let cfg = make_config(&tmp, 4096, None);
|
||||
|
||||
let res = get_user_instructions(&cfg)
|
||||
.await
|
||||
.expect("local doc expected");
|
||||
|
||||
assert_eq!(res, "local");
|
||||
|
||||
let discovery = discover_project_doc_paths(&cfg).expect("discover paths");
|
||||
assert_eq!(discovery.len(), 1);
|
||||
assert_eq!(
|
||||
discovery[0].file_name().unwrap().to_string_lossy(),
|
||||
LOCAL_PROJECT_DOC_FILENAME
|
||||
);
|
||||
}
|
||||
|
||||
/// When AGENTS.md is absent but a configured fallback exists, the fallback is used.
|
||||
#[tokio::test]
|
||||
async fn uses_configured_fallback_when_agents_missing() {
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
use codex_protocol::models::ResponseItem;
|
||||
|
||||
use crate::codebase_snapshot::CodebaseSnapshot;
|
||||
use crate::conversation_history::ConversationHistory;
|
||||
use crate::protocol::RateLimitSnapshot;
|
||||
use crate::protocol::TokenUsage;
|
||||
@@ -13,6 +14,7 @@ pub(crate) struct SessionState {
|
||||
pub(crate) history: ConversationHistory,
|
||||
pub(crate) token_info: Option<TokenUsageInfo>,
|
||||
pub(crate) latest_rate_limits: Option<RateLimitSnapshot>,
|
||||
pub(crate) codebase_snapshot: Option<CodebaseSnapshot>,
|
||||
}
|
||||
|
||||
impl SessionState {
|
||||
|
||||
@@ -239,20 +239,6 @@ pub fn ev_apply_patch_function_call(call_id: &str, patch: &str) -> Value {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn ev_function_call_output(call_id: &str, content: &str) -> Value {
|
||||
serde_json::json!({
|
||||
"type": "response.output_item.done",
|
||||
"item": {
|
||||
"type": "function_call_output",
|
||||
"call_id": call_id,
|
||||
"output": {
|
||||
"content": content,
|
||||
"success": true
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn sse_failed(id: &str, code: &str, message: &str) -> String {
|
||||
sse(vec![serde_json::json!({
|
||||
"type": "response.failed",
|
||||
|
||||
@@ -19,20 +19,17 @@ use core_test_support::responses::ev_assistant_message;
|
||||
use core_test_support::responses::ev_completed;
|
||||
use core_test_support::responses::ev_completed_with_tokens;
|
||||
use core_test_support::responses::ev_function_call;
|
||||
use core_test_support::responses::ev_function_call_output;
|
||||
use core_test_support::responses::mount_sse_once_match;
|
||||
use core_test_support::responses::mount_sse_sequence;
|
||||
use core_test_support::responses::sse;
|
||||
use core_test_support::responses::sse_failed;
|
||||
use core_test_support::responses::start_mock_server;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::Value;
|
||||
// --- Test helpers -----------------------------------------------------------
|
||||
|
||||
pub(super) const FIRST_REPLY: &str = "FIRST_REPLY";
|
||||
pub(super) const SUMMARY_TEXT: &str = "SUMMARY_ONLY_CONTEXT";
|
||||
const THIRD_USER_MSG: &str = "next turn";
|
||||
const THIRD_ASSISTANT_MSG: &str = "post compact assistant";
|
||||
const AUTO_SUMMARY_TEXT: &str = "AUTO_SUMMARY";
|
||||
const FIRST_AUTO_MSG: &str = "token limit start";
|
||||
const SECOND_AUTO_MSG: &str = "token limit push";
|
||||
@@ -647,10 +644,6 @@ async fn manual_compact_retries_after_context_window_error() {
|
||||
ev_assistant_message("m2", SUMMARY_TEXT),
|
||||
ev_completed("r2"),
|
||||
]);
|
||||
let third_turn = sse(vec![
|
||||
ev_assistant_message("m3", THIRD_ASSISTANT_MSG),
|
||||
ev_completed("r3"),
|
||||
]);
|
||||
|
||||
let request_log = mount_sse_sequence(
|
||||
&server,
|
||||
@@ -658,7 +651,6 @@ async fn manual_compact_retries_after_context_window_error() {
|
||||
user_turn.clone(),
|
||||
compact_failed.clone(),
|
||||
compact_succeeds.clone(),
|
||||
third_turn,
|
||||
],
|
||||
)
|
||||
.await;
|
||||
@@ -696,29 +688,17 @@ async fn manual_compact_retries_after_context_window_error() {
|
||||
panic!("expected background event after compact retry");
|
||||
};
|
||||
assert!(
|
||||
event
|
||||
.message
|
||||
.contains("Trimmed 2 older conversation item(s)"),
|
||||
event.message.contains("Trimmed 1 older conversation item"),
|
||||
"background event should mention trimmed item count: {}",
|
||||
event.message
|
||||
);
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
items: vec![InputItem::Text {
|
||||
text: THIRD_USER_MSG.into(),
|
||||
}],
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
let requests = request_log.requests();
|
||||
assert_eq!(
|
||||
requests.len(),
|
||||
4,
|
||||
"expected user turn, two compact attempts, and one follow-up turn"
|
||||
3,
|
||||
"expected user turn and two compact attempts"
|
||||
);
|
||||
|
||||
let compact_attempt = requests[1].body_json();
|
||||
@@ -730,415 +710,42 @@ async fn manual_compact_retries_after_context_window_error() {
|
||||
let retry_input = retry_attempt["input"]
|
||||
.as_array()
|
||||
.unwrap_or_else(|| panic!("retry attempt missing input array: {retry_attempt}"));
|
||||
|
||||
fn extract_text(item: &Value) -> Option<String> {
|
||||
item.get("content")
|
||||
.and_then(Value::as_array)
|
||||
assert_eq!(
|
||||
compact_input
|
||||
.last()
|
||||
.and_then(|item| item.get("content"))
|
||||
.and_then(|v| v.as_array())
|
||||
.and_then(|items| items.first())
|
||||
.and_then(|entry| entry.get("text"))
|
||||
.and_then(Value::as_str)
|
||||
.map(str::to_string)
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
extract_text(compact_input.last().expect("compact input empty")).as_deref(),
|
||||
.and_then(|text| text.as_str()),
|
||||
Some(SUMMARIZATION_PROMPT),
|
||||
"compact attempt should include summarization prompt",
|
||||
"compact attempt should include summarization prompt"
|
||||
);
|
||||
assert_eq!(
|
||||
extract_text(retry_input.last().expect("retry input empty")).as_deref(),
|
||||
retry_input
|
||||
.last()
|
||||
.and_then(|item| item.get("content"))
|
||||
.and_then(|v| v.as_array())
|
||||
.and_then(|items| items.first())
|
||||
.and_then(|entry| entry.get("text"))
|
||||
.and_then(|text| text.as_str()),
|
||||
Some(SUMMARIZATION_PROMPT),
|
||||
"retry attempt should include summarization prompt",
|
||||
"retry attempt should include summarization prompt"
|
||||
);
|
||||
|
||||
let contains_text = |items: &[Value], needle: &str| {
|
||||
items
|
||||
.iter()
|
||||
.any(|item| extract_text(item).is_some_and(|text| text == needle))
|
||||
};
|
||||
|
||||
assert!(
|
||||
contains_text(compact_input, "first turn"),
|
||||
"compact attempt should include original user message",
|
||||
);
|
||||
assert!(
|
||||
contains_text(compact_input, FIRST_REPLY),
|
||||
"compact attempt should include original assistant reply",
|
||||
);
|
||||
assert!(
|
||||
!contains_text(retry_input, "first turn"),
|
||||
"retry should drop original user message",
|
||||
);
|
||||
assert!(
|
||||
!contains_text(retry_input, FIRST_REPLY),
|
||||
"retry should drop assistant reply tied to original user message",
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
compact_input.len().saturating_sub(retry_input.len()),
|
||||
2,
|
||||
"retry should drop the most recent user turn (before {} vs after {})",
|
||||
retry_input.len(),
|
||||
compact_input.len().saturating_sub(1),
|
||||
"retry should drop exactly one history item (before {} vs after {})",
|
||||
compact_input.len(),
|
||||
retry_input.len()
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn manual_compact_trims_last_user_turn_with_function_calls_on_context_error() {
|
||||
skip_if_no_network!();
|
||||
|
||||
// Scenario 1: ensure the retry trims the most recent turn when function calls are involved.
|
||||
const FIRST_USER_MSG: &str = "first user turn";
|
||||
const SECOND_USER_MSG: &str = "second user turn";
|
||||
const FIRST_CALL_A: &str = "call-first-a";
|
||||
const FIRST_CALL_B: &str = "call-first-b";
|
||||
const SECOND_CALL_A: &str = "call-second-a";
|
||||
const SECOND_CALL_B: &str = "call-second-b";
|
||||
|
||||
{
|
||||
let server = start_mock_server().await;
|
||||
|
||||
let first_turn_initial = sse(vec![ev_function_call(FIRST_CALL_A, "tool.first.a", "{}")]);
|
||||
let first_turn_second_call = sse(vec![
|
||||
ev_function_call_output(FIRST_CALL_A, "first-call-a output"),
|
||||
ev_function_call(FIRST_CALL_B, "tool.first.b", "{}"),
|
||||
]);
|
||||
let first_turn_complete = sse(vec![
|
||||
ev_function_call_output(FIRST_CALL_B, "first-call-b output"),
|
||||
ev_assistant_message("assistant-first", "first turn complete"),
|
||||
ev_completed("resp-first"),
|
||||
]);
|
||||
let second_turn_initial = sse(vec![ev_function_call(SECOND_CALL_A, "tool.second.a", "{}")]);
|
||||
let second_turn_second_call = sse(vec![
|
||||
ev_function_call_output(SECOND_CALL_A, "second-call-a output"),
|
||||
ev_function_call(SECOND_CALL_B, "tool.second.b", "{}"),
|
||||
]);
|
||||
let second_turn_complete = sse(vec![
|
||||
ev_function_call_output(SECOND_CALL_B, "second-call-b output"),
|
||||
ev_assistant_message("assistant-second", "second turn complete"),
|
||||
ev_completed("resp-second"),
|
||||
]);
|
||||
let compact_failed = sse_failed(
|
||||
"resp-fail",
|
||||
"context_length_exceeded",
|
||||
CONTEXT_LIMIT_MESSAGE,
|
||||
);
|
||||
let compact_retry = sse(vec![
|
||||
ev_assistant_message("assistant-summary", SUMMARY_TEXT),
|
||||
ev_completed("resp-summary"),
|
||||
]);
|
||||
|
||||
let request_log = mount_sse_sequence(
|
||||
&server,
|
||||
vec![
|
||||
first_turn_initial,
|
||||
first_turn_second_call,
|
||||
first_turn_complete,
|
||||
second_turn_initial,
|
||||
second_turn_second_call,
|
||||
second_turn_complete,
|
||||
compact_failed,
|
||||
compact_retry,
|
||||
],
|
||||
)
|
||||
.await;
|
||||
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
let home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&home);
|
||||
config.model_provider = model_provider;
|
||||
config.model_auto_compact_token_limit = Some(200_000);
|
||||
let codex = ConversationManager::with_auth(CodexAuth::from_api_key("dummy"))
|
||||
.new_conversation(config)
|
||||
.await
|
||||
.unwrap()
|
||||
.conversation;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
items: vec![InputItem::Text {
|
||||
text: FIRST_USER_MSG.into(),
|
||||
}],
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
items: vec![InputItem::Text {
|
||||
text: SECOND_USER_MSG.into(),
|
||||
}],
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
codex.submit(Op::Compact).await.unwrap();
|
||||
let EventMsg::BackgroundEvent(event) =
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::BackgroundEvent(_))).await
|
||||
else {
|
||||
panic!("expected background event after compact retry");
|
||||
};
|
||||
assert!(
|
||||
event
|
||||
.message
|
||||
.contains("Trimmed 2 older conversation item(s)"),
|
||||
"background event should report trimming chunked user turn: {}",
|
||||
event.message
|
||||
);
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
let requests = request_log.requests();
|
||||
assert_eq!(
|
||||
requests.len(),
|
||||
8,
|
||||
"expected two user turns (with tool call round-trips) followed by compact attempt + retry"
|
||||
);
|
||||
|
||||
let compact_attempt = requests[6].body_json();
|
||||
let retry_attempt = requests[7].body_json();
|
||||
|
||||
fn extract_text(item: &Value) -> Option<String> {
|
||||
item.get("content")
|
||||
.and_then(Value::as_array)
|
||||
.and_then(|items| items.first())
|
||||
.and_then(|entry| entry.get("text"))
|
||||
.and_then(Value::as_str)
|
||||
.map(str::to_string)
|
||||
}
|
||||
|
||||
let contains_text = |items: &[Value], needle: &str| {
|
||||
items
|
||||
.iter()
|
||||
.any(|item| extract_text(item).is_some_and(|text| text == needle))
|
||||
};
|
||||
|
||||
assert!(
|
||||
contains_text(
|
||||
compact_attempt["input"].as_array().unwrap(),
|
||||
SECOND_USER_MSG
|
||||
),
|
||||
"initial compact attempt should include most recent user message",
|
||||
);
|
||||
assert!(
|
||||
!contains_text(retry_attempt["input"].as_array().unwrap(), SECOND_USER_MSG),
|
||||
"retry should drop the most recent user message",
|
||||
);
|
||||
assert!(
|
||||
contains_text(
|
||||
compact_attempt["input"].as_array().unwrap(),
|
||||
"second turn complete"
|
||||
),
|
||||
"initial compact attempt should include assistant reply for most recent turn",
|
||||
);
|
||||
assert!(
|
||||
!contains_text(
|
||||
retry_attempt["input"].as_array().unwrap(),
|
||||
"second turn complete"
|
||||
),
|
||||
"retry should drop assistant reply for most recent turn",
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
compact_attempt["input"]
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.len()
|
||||
.saturating_sub(retry_attempt["input"].as_array().unwrap().len()),
|
||||
2,
|
||||
"retry should drop the most recent user turn from the prompt",
|
||||
);
|
||||
|
||||
let retry_call_ids: std::collections::HashSet<_> = retry_attempt["input"]
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.filter_map(|item| item.get("call_id").and_then(|v| v.as_str()))
|
||||
.collect();
|
||||
assert!(
|
||||
!retry_call_ids.contains(SECOND_CALL_A),
|
||||
"retry should remove function call {SECOND_CALL_A}"
|
||||
);
|
||||
assert!(
|
||||
!retry_call_ids.contains(SECOND_CALL_B),
|
||||
"retry should remove function call {SECOND_CALL_B}"
|
||||
);
|
||||
}
|
||||
|
||||
// Scenario 2: after a retry succeeds, the trimmed turn is restored to history for the next user input.
|
||||
{
|
||||
const SIMPLE_FIRST_USER_MSG: &str = "first user turn";
|
||||
const SIMPLE_FIRST_ASSISTANT_MSG: &str = "first assistant reply";
|
||||
const SIMPLE_SECOND_USER_MSG: &str = "second user turn";
|
||||
const SIMPLE_SECOND_ASSISTANT_MSG: &str = "second assistant reply";
|
||||
const SIMPLE_THIRD_USER_MSG: &str = "post compact user";
|
||||
const SIMPLE_THIRD_ASSISTANT_MSG: &str = "post compact assistant";
|
||||
|
||||
let server = start_mock_server().await;
|
||||
|
||||
let first_turn = sse(vec![
|
||||
ev_assistant_message("assistant-first", SIMPLE_FIRST_ASSISTANT_MSG),
|
||||
ev_completed("resp-first"),
|
||||
]);
|
||||
let second_turn = sse(vec![
|
||||
ev_assistant_message("assistant-second", SIMPLE_SECOND_ASSISTANT_MSG),
|
||||
ev_completed("resp-second"),
|
||||
]);
|
||||
let compact_failed = sse_failed(
|
||||
"resp-fail",
|
||||
"context_length_exceeded",
|
||||
CONTEXT_LIMIT_MESSAGE,
|
||||
);
|
||||
let compact_retry = sse(vec![
|
||||
ev_assistant_message("assistant-summary", SUMMARY_TEXT),
|
||||
ev_completed("resp-summary"),
|
||||
]);
|
||||
let third_turn = sse(vec![
|
||||
ev_assistant_message("assistant-third", SIMPLE_THIRD_ASSISTANT_MSG),
|
||||
ev_completed("resp-third"),
|
||||
]);
|
||||
|
||||
let request_log = mount_sse_sequence(
|
||||
&server,
|
||||
vec![
|
||||
first_turn,
|
||||
second_turn,
|
||||
compact_failed,
|
||||
compact_retry,
|
||||
third_turn,
|
||||
],
|
||||
)
|
||||
.await;
|
||||
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
let home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&home);
|
||||
config.model_provider = model_provider;
|
||||
config.model_auto_compact_token_limit = Some(200_000);
|
||||
let codex = ConversationManager::with_auth(CodexAuth::from_api_key("dummy"))
|
||||
.new_conversation(config)
|
||||
.await
|
||||
.unwrap()
|
||||
.conversation;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
items: vec![InputItem::Text {
|
||||
text: SIMPLE_FIRST_USER_MSG.into(),
|
||||
}],
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
items: vec![InputItem::Text {
|
||||
text: SIMPLE_SECOND_USER_MSG.into(),
|
||||
}],
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
codex.submit(Op::Compact).await.unwrap();
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
items: vec![InputItem::Text {
|
||||
text: SIMPLE_THIRD_USER_MSG.into(),
|
||||
}],
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
let requests = request_log.requests();
|
||||
assert_eq!(
|
||||
requests.len(),
|
||||
5,
|
||||
"expected two user turns, two compact attempts, and a post-compact turn",
|
||||
);
|
||||
|
||||
let retry_request = &requests[3];
|
||||
let retry_body = retry_request.body_json();
|
||||
let retry_input = retry_body
|
||||
.get("input")
|
||||
.and_then(Value::as_array)
|
||||
.expect("retry request missing input array");
|
||||
assert!(
|
||||
retry_input.iter().all(|item| {
|
||||
item.get("content")
|
||||
.and_then(Value::as_array)
|
||||
.and_then(|entries| entries.first())
|
||||
.and_then(|entry| entry.get("text"))
|
||||
.and_then(Value::as_str)
|
||||
.map(|text| {
|
||||
text != SIMPLE_SECOND_USER_MSG && text != SIMPLE_SECOND_ASSISTANT_MSG
|
||||
})
|
||||
.unwrap_or(true)
|
||||
}),
|
||||
"retry compact input should omit trimmed second turn",
|
||||
);
|
||||
|
||||
let final_request = &requests[4];
|
||||
let body = final_request.body_json();
|
||||
let input_items = body
|
||||
.get("input")
|
||||
.and_then(Value::as_array)
|
||||
.expect("final request missing input array");
|
||||
|
||||
fn message_index(items: &[Value], needle: &str) -> Option<usize> {
|
||||
items.iter().position(|item| {
|
||||
item.get("type").and_then(Value::as_str) == Some("message")
|
||||
&& item
|
||||
.get("content")
|
||||
.and_then(Value::as_array)
|
||||
.and_then(|entries| entries.first())
|
||||
.and_then(|entry| entry.get("text"))
|
||||
.and_then(Value::as_str)
|
||||
.is_some_and(|text| text == needle)
|
||||
})
|
||||
}
|
||||
|
||||
let summary_index = input_items
|
||||
.iter()
|
||||
.position(|item| {
|
||||
item.get("content")
|
||||
.and_then(Value::as_array)
|
||||
.and_then(|entries| entries.first())
|
||||
.and_then(|entry| entry.get("text"))
|
||||
.and_then(Value::as_str)
|
||||
.map(|text| text.contains(SUMMARY_TEXT))
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.expect("final request should include summary bridge");
|
||||
let second_user_index = message_index(input_items, SIMPLE_SECOND_USER_MSG)
|
||||
.expect("trimmed second user message should remain in history");
|
||||
let second_assistant_index = message_index(input_items, SIMPLE_SECOND_ASSISTANT_MSG)
|
||||
.expect("trimmed assistant reply should remain in history");
|
||||
let third_user_index = message_index(input_items, SIMPLE_THIRD_USER_MSG)
|
||||
.expect("post-compact user turn should be present");
|
||||
assert!(
|
||||
summary_index < second_user_index,
|
||||
"summary bridge should precede restored user message"
|
||||
);
|
||||
assert!(
|
||||
second_user_index < second_assistant_index,
|
||||
"restored user message should precede assistant reply"
|
||||
);
|
||||
assert!(
|
||||
second_assistant_index < third_user_index,
|
||||
"restored assistant reply should precede new user turn"
|
||||
if let (Some(first_before), Some(first_after)) = (compact_input.first(), retry_input.first()) {
|
||||
assert_ne!(
|
||||
first_before, first_after,
|
||||
"retry should drop the oldest conversation item"
|
||||
);
|
||||
} else {
|
||||
panic!("expected non-empty compact inputs");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -17,7 +17,6 @@ use codex_core::ConversationManager;
|
||||
use codex_core::NewConversation;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::git_info::get_git_repo_root;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::Event;
|
||||
@@ -169,7 +168,8 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
|
||||
model,
|
||||
review_model: None,
|
||||
config_profile,
|
||||
// Default to never ask for approvals in headless mode. Feature flags can override.
|
||||
// This CLI is intended to be headless and has no affordances for asking
|
||||
// the user for approval.
|
||||
approval_policy: Some(AskForApproval::Never),
|
||||
sandbox_mode,
|
||||
cwd: cwd.map(|p| p.canonicalize().unwrap_or(p)),
|
||||
@@ -192,7 +192,6 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
|
||||
};
|
||||
|
||||
let config = Config::load_with_cli_overrides(cli_kv_overrides, overrides).await?;
|
||||
let approve_all_enabled = config.features.enabled(Feature::ApproveAll);
|
||||
|
||||
let otel = codex_core::otel_init::build_provider(&config, env!("CARGO_PKG_VERSION"));
|
||||
|
||||
@@ -361,34 +360,6 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
|
||||
if matches!(event.msg, EventMsg::Error(_)) {
|
||||
error_seen = true;
|
||||
}
|
||||
// Auto-approve requests when the approve_all feature is enabled.
|
||||
if approve_all_enabled {
|
||||
match &event.msg {
|
||||
EventMsg::ExecApprovalRequest(_) => {
|
||||
if let Err(e) = conversation
|
||||
.submit(Op::ExecApproval {
|
||||
id: event.id.clone(),
|
||||
decision: codex_core::protocol::ReviewDecision::Approved,
|
||||
})
|
||||
.await
|
||||
{
|
||||
error!("failed to auto-approve exec: {e}");
|
||||
}
|
||||
}
|
||||
EventMsg::ApplyPatchApprovalRequest(_) => {
|
||||
if let Err(e) = conversation
|
||||
.submit(Op::PatchApproval {
|
||||
id: event.id.clone(),
|
||||
decision: codex_core::protocol::ReviewDecision::Approved,
|
||||
})
|
||||
.await
|
||||
{
|
||||
error!("failed to auto-approve patch: {e}");
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
let shutdown: CodexStatus = event_processor.process_event(event);
|
||||
match shutdown {
|
||||
CodexStatus::Running => continue,
|
||||
|
||||
@@ -1,81 +0,0 @@
|
||||
#![cfg(not(target_os = "windows"))]
|
||||
#![allow(clippy::expect_used, clippy::unwrap_used)]
|
||||
|
||||
use anyhow::Result;
|
||||
use core_test_support::responses;
|
||||
use core_test_support::responses::ev_assistant_message;
|
||||
use core_test_support::responses::ev_completed;
|
||||
use core_test_support::responses::ev_function_call;
|
||||
use core_test_support::responses::ev_response_created;
|
||||
use core_test_support::responses::mount_sse_sequence;
|
||||
use core_test_support::responses::sse;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use core_test_support::test_codex_exec::test_codex_exec;
|
||||
use serde_json::Value;
|
||||
use serde_json::json;
|
||||
|
||||
async fn run_exec_with_args(args: &[&str]) -> Result<String> {
|
||||
let test = test_codex_exec();
|
||||
|
||||
let call_id = "exec-approve";
|
||||
let exec_args = json!({
|
||||
"command": [
|
||||
if cfg!(windows) { "cmd.exe" } else { "/bin/sh" },
|
||||
if cfg!(windows) { "/C" } else { "-lc" },
|
||||
"echo approve-all-ok",
|
||||
],
|
||||
"timeout_ms": 1500,
|
||||
"with_escalated_permissions": true
|
||||
});
|
||||
|
||||
let response_streams = vec![
|
||||
sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_function_call(call_id, "shell", &serde_json::to_string(&exec_args)?),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
sse(vec![
|
||||
ev_assistant_message("msg-1", "done"),
|
||||
ev_completed("resp-2"),
|
||||
]),
|
||||
];
|
||||
|
||||
let server = responses::start_mock_server().await;
|
||||
let mock = mount_sse_sequence(&server, response_streams).await;
|
||||
|
||||
test.cmd_with_server(&server).args(args).assert().success();
|
||||
|
||||
let requests = mock.requests();
|
||||
assert!(requests.len() >= 2, "expected at least two responses POSTs");
|
||||
let item = requests[1].function_call_output(call_id);
|
||||
let output_str = item
|
||||
.get("output")
|
||||
.and_then(Value::as_str)
|
||||
.expect("function_call_output.output should be a string");
|
||||
|
||||
Ok(output_str.to_string())
|
||||
}
|
||||
|
||||
/// Setting `features.approve_all=true` should switch to auto-approvals.
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn approve_all_auto_accepts_exec() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let output = run_exec_with_args(&[
|
||||
"--skip-git-repo-check",
|
||||
"-c",
|
||||
"features.approve_all=true",
|
||||
"train",
|
||||
])
|
||||
.await?;
|
||||
assert!(
|
||||
output.contains("Exit code: 0"),
|
||||
"expected Exit code: 0 in output: {output}"
|
||||
);
|
||||
assert!(
|
||||
output.contains("approve-all-ok"),
|
||||
"expected command output in response: {output}"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,6 +1,5 @@
|
||||
// Aggregates all former standalone integration tests as modules.
|
||||
mod apply_patch;
|
||||
mod approve_all;
|
||||
mod auth_env;
|
||||
mod originator;
|
||||
mod output_schema;
|
||||
|
||||
@@ -178,7 +178,6 @@ async fn run_codex_tool_session_inner(
|
||||
cwd,
|
||||
call_id,
|
||||
reason: _,
|
||||
parsed_cmd,
|
||||
}) => {
|
||||
handle_exec_approval_request(
|
||||
command,
|
||||
@@ -189,7 +188,6 @@ async fn run_codex_tool_session_inner(
|
||||
request_id_str.clone(),
|
||||
event.id.clone(),
|
||||
call_id,
|
||||
parsed_cmd,
|
||||
)
|
||||
.await;
|
||||
continue;
|
||||
|
||||
@@ -4,7 +4,6 @@ use std::sync::Arc;
|
||||
use codex_core::CodexConversation;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_core::protocol::ReviewDecision;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use mcp_types::ElicitRequest;
|
||||
use mcp_types::ElicitRequestParamsRequestedSchema;
|
||||
use mcp_types::JSONRPCErrorError;
|
||||
@@ -36,7 +35,6 @@ pub struct ExecApprovalElicitRequestParams {
|
||||
pub codex_call_id: String,
|
||||
pub codex_command: Vec<String>,
|
||||
pub codex_cwd: PathBuf,
|
||||
pub codex_parsed_cmd: Vec<ParsedCommand>,
|
||||
}
|
||||
|
||||
// TODO(mbolin): ExecApprovalResponse does not conform to ElicitResult. See:
|
||||
@@ -58,7 +56,6 @@ pub(crate) async fn handle_exec_approval_request(
|
||||
tool_call_id: String,
|
||||
event_id: String,
|
||||
call_id: String,
|
||||
codex_parsed_cmd: Vec<ParsedCommand>,
|
||||
) {
|
||||
let escaped_command =
|
||||
shlex::try_join(command.iter().map(String::as_str)).unwrap_or_else(|_| command.join(" "));
|
||||
@@ -80,7 +77,6 @@ pub(crate) async fn handle_exec_approval_request(
|
||||
codex_call_id: call_id,
|
||||
codex_command: command,
|
||||
codex_cwd: cwd,
|
||||
codex_parsed_cmd,
|
||||
};
|
||||
let params_json = match serde_json::to_value(¶ms) {
|
||||
Ok(value) => value,
|
||||
|
||||
@@ -3,7 +3,6 @@ use std::env;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use codex_core::parse_command;
|
||||
use codex_core::protocol::FileChange;
|
||||
use codex_core::protocol::ReviewDecision;
|
||||
use codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
||||
@@ -177,7 +176,6 @@ fn create_expected_elicitation_request(
|
||||
shlex::try_join(command.iter().map(std::convert::AsRef::as_ref))?,
|
||||
workdir.to_string_lossy()
|
||||
);
|
||||
let codex_parsed_cmd = parse_command::parse_command(&command);
|
||||
Ok(JSONRPCRequest {
|
||||
jsonrpc: JSONRPC_VERSION.into(),
|
||||
id: elicitation_request_id,
|
||||
@@ -195,7 +193,6 @@ fn create_expected_elicitation_request(
|
||||
codex_command: command,
|
||||
codex_cwd: workdir.to_path_buf(),
|
||||
codex_call_id: "call1234".to_string(),
|
||||
codex_parsed_cmd,
|
||||
})?),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1178,7 +1178,6 @@ pub struct ExecApprovalRequestEvent {
|
||||
/// Optional human-readable reason for the approval (e.g. retry without sandbox).
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub reason: Option<String>,
|
||||
pub parsed_cmd: Vec<ParsedCommand>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, TS)]
|
||||
|
||||
@@ -427,9 +427,8 @@ impl App {
|
||||
tui.frame_requester().schedule_frame();
|
||||
}
|
||||
// Esc primes/advances backtracking only in normal (not working) mode
|
||||
// with the composer focused and empty. In any other state, forward
|
||||
// Esc so the active UI (e.g. status indicator, modals, popups)
|
||||
// handles it.
|
||||
// with an empty composer. In any other state, forward Esc so the
|
||||
// active UI (e.g. status indicator, modals, popups) handles it.
|
||||
KeyEvent {
|
||||
code: KeyCode::Esc,
|
||||
kind: KeyEventKind::Press | KeyEventKind::Repeat,
|
||||
|
||||
@@ -82,16 +82,15 @@ impl App {
|
||||
|
||||
/// Handle global Esc presses for backtracking when no overlay is present.
|
||||
pub(crate) fn handle_backtrack_esc_key(&mut self, tui: &mut tui::Tui) {
|
||||
if !self.chat_widget.composer_is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
if !self.backtrack.primed {
|
||||
self.prime_backtrack();
|
||||
} else if self.overlay.is_none() {
|
||||
self.open_backtrack_preview(tui);
|
||||
} else if self.backtrack.overlay_preview_active {
|
||||
self.step_backtrack_and_highlight(tui);
|
||||
// Only handle backtracking when composer is empty to avoid clobbering edits.
|
||||
if self.chat_widget.composer_is_empty() {
|
||||
if !self.backtrack.primed {
|
||||
self.prime_backtrack();
|
||||
} else if self.overlay.is_none() {
|
||||
self.open_backtrack_preview(tui);
|
||||
} else if self.backtrack.overlay_preview_active {
|
||||
self.step_backtrack_and_highlight(tui);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -316,11 +316,6 @@ impl ChatComposer {
|
||||
self.sync_file_search_popup();
|
||||
}
|
||||
|
||||
pub(crate) fn clear_for_ctrl_c(&mut self) {
|
||||
self.set_text_content(String::new());
|
||||
self.history.reset_navigation();
|
||||
}
|
||||
|
||||
/// Get the current composer text.
|
||||
pub(crate) fn current_text(&self) -> String {
|
||||
self.textarea.text().to_string()
|
||||
@@ -857,12 +852,10 @@ impl ChatComposer {
|
||||
return (InputResult::None, true);
|
||||
}
|
||||
if key_event.code == KeyCode::Esc {
|
||||
if self.is_empty() {
|
||||
let next_mode = esc_hint_mode(self.footer_mode, self.is_task_running);
|
||||
if next_mode != self.footer_mode {
|
||||
self.footer_mode = next_mode;
|
||||
return (InputResult::None, true);
|
||||
}
|
||||
let next_mode = esc_hint_mode(self.footer_mode, self.is_task_running);
|
||||
if next_mode != self.footer_mode {
|
||||
self.footer_mode = next_mode;
|
||||
return (InputResult::None, true);
|
||||
}
|
||||
} else {
|
||||
self.footer_mode = reset_mode_after_activity(self.footer_mode);
|
||||
@@ -1799,35 +1792,6 @@ mod tests {
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn esc_hint_stays_hidden_with_draft_content() {
|
||||
use crossterm::event::KeyCode;
|
||||
use crossterm::event::KeyEvent;
|
||||
use crossterm::event::KeyModifiers;
|
||||
|
||||
let (tx, _rx) = unbounded_channel::<AppEvent>();
|
||||
let sender = AppEventSender::new(tx);
|
||||
let mut composer = ChatComposer::new(
|
||||
true,
|
||||
sender,
|
||||
true,
|
||||
"Ask Codex to do anything".to_string(),
|
||||
false,
|
||||
);
|
||||
|
||||
type_chars_humanlike(&mut composer, &['d']);
|
||||
|
||||
assert!(!composer.is_empty());
|
||||
assert_eq!(composer.current_text(), "d");
|
||||
assert_eq!(composer.footer_mode, FooterMode::ShortcutSummary);
|
||||
assert!(matches!(composer.active_popup, ActivePopup::None));
|
||||
|
||||
let _ = composer.handle_key_event(KeyEvent::new(KeyCode::Esc, KeyModifiers::NONE));
|
||||
|
||||
assert_eq!(composer.footer_mode, FooterMode::ShortcutSummary);
|
||||
assert!(!composer.esc_backtrack_hint);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn question_mark_only_toggles_on_first_char() {
|
||||
use crossterm::event::KeyCode;
|
||||
|
||||
@@ -70,12 +70,6 @@ impl ChatComposerHistory {
|
||||
self.local_history.push(text.to_string());
|
||||
}
|
||||
|
||||
/// Reset navigation tracking so the next Up key resumes from the latest entry.
|
||||
pub fn reset_navigation(&mut self) {
|
||||
self.history_cursor = None;
|
||||
self.last_history_text = None;
|
||||
}
|
||||
|
||||
/// Should Up/Down key presses be interpreted as history navigation given
|
||||
/// the current content and cursor position of `textarea`?
|
||||
pub fn should_handle_navigation(&self, text: &str, cursor: usize) -> bool {
|
||||
@@ -277,24 +271,4 @@ mod tests {
|
||||
history.on_entry_response(1, 1, Some("older".into()))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reset_navigation_resets_cursor() {
|
||||
let (tx, _rx) = unbounded_channel::<AppEvent>();
|
||||
let tx = AppEventSender::new(tx);
|
||||
|
||||
let mut history = ChatComposerHistory::new();
|
||||
history.set_metadata(1, 3);
|
||||
history.fetched_history.insert(1, "command2".into());
|
||||
history.fetched_history.insert(2, "command3".into());
|
||||
|
||||
assert_eq!(Some("command3".into()), history.navigate_up(&tx));
|
||||
assert_eq!(Some("command2".into()), history.navigate_up(&tx));
|
||||
|
||||
history.reset_navigation();
|
||||
assert!(history.history_cursor.is_none());
|
||||
assert!(history.last_history_text.is_none());
|
||||
|
||||
assert_eq!(Some("command3".into()), history.navigate_up(&tx));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,7 +37,6 @@ pub(crate) struct SelectionItem {
|
||||
pub name: String,
|
||||
pub display_shortcut: Option<KeyBinding>,
|
||||
pub description: Option<String>,
|
||||
pub selected_description: Option<String>,
|
||||
pub is_current: bool,
|
||||
pub actions: Vec<SelectionAction>,
|
||||
pub dismiss_on_select: bool,
|
||||
@@ -194,16 +193,12 @@ impl ListSelectionView {
|
||||
} else {
|
||||
format!("{prefix} {n}. {name_with_marker}")
|
||||
};
|
||||
let description = is_selected
|
||||
.then(|| item.selected_description.clone())
|
||||
.flatten()
|
||||
.or_else(|| item.description.clone());
|
||||
GenericDisplayRow {
|
||||
name: display_name,
|
||||
display_shortcut: item.display_shortcut,
|
||||
match_indices: None,
|
||||
is_current: item.is_current,
|
||||
description,
|
||||
description: item.description.clone(),
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
@@ -236,7 +236,7 @@ impl BottomPane {
|
||||
CancellationEvent::NotHandled
|
||||
} else {
|
||||
self.view_stack.pop();
|
||||
self.clear_composer_for_ctrl_c();
|
||||
self.set_composer_text(String::new());
|
||||
self.show_ctrl_c_quit_hint();
|
||||
CancellationEvent::Handled
|
||||
}
|
||||
@@ -270,11 +270,6 @@ impl BottomPane {
|
||||
self.request_redraw();
|
||||
}
|
||||
|
||||
pub(crate) fn clear_composer_for_ctrl_c(&mut self) {
|
||||
self.composer.clear_for_ctrl_c();
|
||||
self.request_redraw();
|
||||
}
|
||||
|
||||
/// Get the current composer text (for tests and programmatic checks).
|
||||
pub(crate) fn composer_text(&self) -> String {
|
||||
self.composer.current_text()
|
||||
|
||||
@@ -26,7 +26,6 @@ pub(crate) struct TextArea {
|
||||
wrap_cache: RefCell<Option<WrapCache>>,
|
||||
preferred_col: Option<usize>,
|
||||
elements: Vec<TextElement>,
|
||||
kill_buffer: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -49,7 +48,6 @@ impl TextArea {
|
||||
wrap_cache: RefCell::new(None),
|
||||
preferred_col: None,
|
||||
elements: Vec::new(),
|
||||
kill_buffer: String::new(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -59,7 +57,6 @@ impl TextArea {
|
||||
self.wrap_cache.replace(None);
|
||||
self.preferred_col = None;
|
||||
self.elements.clear();
|
||||
self.kill_buffer.clear();
|
||||
}
|
||||
|
||||
pub fn text(&self) -> &str {
|
||||
@@ -308,13 +305,6 @@ impl TextArea {
|
||||
} => {
|
||||
self.kill_to_end_of_line();
|
||||
}
|
||||
KeyEvent {
|
||||
code: KeyCode::Char('y'),
|
||||
modifiers: KeyModifiers::CONTROL,
|
||||
..
|
||||
} => {
|
||||
self.yank();
|
||||
}
|
||||
|
||||
// Cursor movement
|
||||
KeyEvent {
|
||||
@@ -447,7 +437,7 @@ impl TextArea {
|
||||
|
||||
pub fn delete_backward_word(&mut self) {
|
||||
let start = self.beginning_of_previous_word();
|
||||
self.kill_range(start..self.cursor_pos);
|
||||
self.replace_range(start..self.cursor_pos, "");
|
||||
}
|
||||
|
||||
/// Delete text to the right of the cursor using "word" semantics.
|
||||
@@ -458,63 +448,32 @@ impl TextArea {
|
||||
pub fn delete_forward_word(&mut self) {
|
||||
let end = self.end_of_next_word();
|
||||
if end > self.cursor_pos {
|
||||
self.kill_range(self.cursor_pos..end);
|
||||
self.replace_range(self.cursor_pos..end, "");
|
||||
}
|
||||
}
|
||||
|
||||
pub fn kill_to_end_of_line(&mut self) {
|
||||
let eol = self.end_of_current_line();
|
||||
let range = if self.cursor_pos == eol {
|
||||
if self.cursor_pos == eol {
|
||||
if eol < self.text.len() {
|
||||
Some(self.cursor_pos..eol + 1)
|
||||
} else {
|
||||
None
|
||||
self.replace_range(self.cursor_pos..eol + 1, "");
|
||||
}
|
||||
} else {
|
||||
Some(self.cursor_pos..eol)
|
||||
};
|
||||
|
||||
if let Some(range) = range {
|
||||
self.kill_range(range);
|
||||
self.replace_range(self.cursor_pos..eol, "");
|
||||
}
|
||||
}
|
||||
|
||||
pub fn kill_to_beginning_of_line(&mut self) {
|
||||
let bol = self.beginning_of_current_line();
|
||||
let range = if self.cursor_pos == bol {
|
||||
if bol > 0 { Some(bol - 1..bol) } else { None }
|
||||
if self.cursor_pos == bol {
|
||||
if bol > 0 {
|
||||
self.replace_range(bol - 1..bol, "");
|
||||
}
|
||||
} else {
|
||||
Some(bol..self.cursor_pos)
|
||||
};
|
||||
|
||||
if let Some(range) = range {
|
||||
self.kill_range(range);
|
||||
self.replace_range(bol..self.cursor_pos, "");
|
||||
}
|
||||
}
|
||||
|
||||
pub fn yank(&mut self) {
|
||||
if self.kill_buffer.is_empty() {
|
||||
return;
|
||||
}
|
||||
let text = self.kill_buffer.clone();
|
||||
self.insert_str(&text);
|
||||
}
|
||||
|
||||
fn kill_range(&mut self, range: Range<usize>) {
|
||||
let range = self.expand_range_to_element_boundaries(range);
|
||||
if range.start >= range.end {
|
||||
return;
|
||||
}
|
||||
|
||||
let removed = self.text[range.clone()].to_string();
|
||||
if removed.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
self.kill_buffer = removed;
|
||||
self.replace_range_raw(range, "");
|
||||
}
|
||||
|
||||
/// Move the cursor left by a single grapheme cluster.
|
||||
pub fn move_cursor_left(&mut self) {
|
||||
self.cursor_pos = self.prev_atomic_boundary(self.cursor_pos);
|
||||
@@ -1239,39 +1198,6 @@ mod tests {
|
||||
assert_eq!(t.cursor(), elem_range.start);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn yank_restores_last_kill() {
|
||||
let mut t = ta_with("hello");
|
||||
t.set_cursor(0);
|
||||
t.kill_to_end_of_line();
|
||||
assert_eq!(t.text(), "");
|
||||
assert_eq!(t.cursor(), 0);
|
||||
|
||||
t.yank();
|
||||
assert_eq!(t.text(), "hello");
|
||||
assert_eq!(t.cursor(), 5);
|
||||
|
||||
let mut t = ta_with("hello world");
|
||||
t.set_cursor(t.text().len());
|
||||
t.delete_backward_word();
|
||||
assert_eq!(t.text(), "hello ");
|
||||
assert_eq!(t.cursor(), 6);
|
||||
|
||||
t.yank();
|
||||
assert_eq!(t.text(), "hello world");
|
||||
assert_eq!(t.cursor(), 11);
|
||||
|
||||
let mut t = ta_with("hello");
|
||||
t.set_cursor(5);
|
||||
t.kill_to_beginning_of_line();
|
||||
assert_eq!(t.text(), "");
|
||||
assert_eq!(t.cursor(), 0);
|
||||
|
||||
t.yank();
|
||||
assert_eq!(t.text(), "hello");
|
||||
assert_eq!(t.cursor(), 5);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cursor_left_and_right_handle_graphemes() {
|
||||
let mut t = ta_with("a👍b");
|
||||
|
||||
@@ -53,8 +53,6 @@ use ratatui::buffer::Buffer;
|
||||
use ratatui::layout::Constraint;
|
||||
use ratatui::layout::Layout;
|
||||
use ratatui::layout::Rect;
|
||||
use ratatui::style::Stylize;
|
||||
use ratatui::text::Line;
|
||||
use ratatui::widgets::Widget;
|
||||
use ratatui::widgets::WidgetRef;
|
||||
use tokio::sync::mpsc::UnboundedSender;
|
||||
@@ -83,7 +81,6 @@ use crate::history_cell::AgentMessageCell;
|
||||
use crate::history_cell::HistoryCell;
|
||||
use crate::history_cell::McpToolCallCell;
|
||||
use crate::markdown::append_markdown;
|
||||
use crate::render::renderable::ColumnRenderable;
|
||||
use crate::slash_command::SlashCommand;
|
||||
use crate::status::RateLimitSnapshotDisplay;
|
||||
use crate::text_formatting::truncate_text;
|
||||
@@ -1721,6 +1718,7 @@ impl ChatWidget {
|
||||
} else {
|
||||
default_choice
|
||||
};
|
||||
|
||||
let mut items: Vec<SelectionItem> = Vec::new();
|
||||
for choice in choices.iter() {
|
||||
let effort = choice.display;
|
||||
@@ -1743,14 +1741,6 @@ impl ChatWidget {
|
||||
.map(|preset| preset.description.to_string())
|
||||
});
|
||||
|
||||
let warning = "⚠ High reasoning effort can quickly consume Plus plan rate limits.";
|
||||
let show_warning = model_slug == "gpt-5-codex" && effort == ReasoningEffortConfig::High;
|
||||
let selected_description = show_warning.then(|| {
|
||||
description
|
||||
.as_ref()
|
||||
.map_or(warning.to_string(), |d| format!("{d}\n{warning}"))
|
||||
});
|
||||
|
||||
let model_for_action = model_slug.clone();
|
||||
let effort_for_action = choice.stored;
|
||||
let actions: Vec<SelectionAction> = vec![Box::new(move |tx| {
|
||||
@@ -1780,7 +1770,6 @@ impl ChatWidget {
|
||||
items.push(SelectionItem {
|
||||
name: effort_label,
|
||||
description,
|
||||
selected_description,
|
||||
is_current: is_current_model && choice.stored == highlight_choice,
|
||||
actions,
|
||||
dismiss_on_select: true,
|
||||
@@ -1788,13 +1777,9 @@ impl ChatWidget {
|
||||
});
|
||||
}
|
||||
|
||||
let mut header = ColumnRenderable::new();
|
||||
header.push(Line::from(
|
||||
format!("Select Reasoning Level for {model_slug}").bold(),
|
||||
));
|
||||
|
||||
self.bottom_pane.show_selection_view(SelectionViewParams {
|
||||
header: Box::new(header),
|
||||
title: Some("Select Reasoning Level".to_string()),
|
||||
subtitle: Some(format!("Reasoning for model {model_slug}")),
|
||||
footer_hint: Some(standard_popup_hint_line()),
|
||||
items,
|
||||
..Default::default()
|
||||
|
||||
@@ -2,13 +2,12 @@
|
||||
source: tui/src/chatwidget/tests.rs
|
||||
expression: popup
|
||||
---
|
||||
Select Reasoning Level for gpt-5-codex
|
||||
Select Reasoning Level
|
||||
Reasoning for model gpt-5-codex
|
||||
|
||||
1. Low Fastest responses with limited reasoning
|
||||
2. Medium (default) Dynamically adjusts reasoning based on the task
|
||||
› 3. High (current) Maximizes reasoning depth for complex or ambiguous
|
||||
problems
|
||||
⚠ High reasoning effort can quickly consume Plus plan
|
||||
rate limits.
|
||||
|
||||
Press enter to confirm or esc to go back
|
||||
|
||||
@@ -392,7 +392,6 @@ fn exec_approval_emits_proposed_command_and_decision_history() {
|
||||
reason: Some(
|
||||
"this is a test reason such as one that would be produced by the model".into(),
|
||||
),
|
||||
parsed_cmd: vec![],
|
||||
};
|
||||
chat.handle_codex_event(Event {
|
||||
id: "sub-short".into(),
|
||||
@@ -434,7 +433,6 @@ fn exec_approval_decision_truncates_multiline_and_long_commands() {
|
||||
reason: Some(
|
||||
"this is a test reason such as one that would be produced by the model".into(),
|
||||
),
|
||||
parsed_cmd: vec![],
|
||||
};
|
||||
chat.handle_codex_event(Event {
|
||||
id: "sub-multi".into(),
|
||||
@@ -482,7 +480,6 @@ fn exec_approval_decision_truncates_multiline_and_long_commands() {
|
||||
command: vec!["bash".into(), "-lc".into(), long],
|
||||
cwd: std::env::current_dir().unwrap_or_else(|_| PathBuf::from(".")),
|
||||
reason: None,
|
||||
parsed_cmd: vec![],
|
||||
};
|
||||
chat.handle_codex_event(Event {
|
||||
id: "sub-long".into(),
|
||||
@@ -508,7 +505,10 @@ fn begin_exec(chat: &mut ChatWidget, call_id: &str, raw_cmd: &str) {
|
||||
// Build the full command vec and parse it using core's parser,
|
||||
// then convert to protocol variants for the event payload.
|
||||
let command = vec!["bash".to_string(), "-lc".to_string(), raw_cmd.to_string()];
|
||||
let parsed_cmd: Vec<ParsedCommand> = codex_core::parse_command::parse_command(&command);
|
||||
let parsed_cmd: Vec<ParsedCommand> = codex_core::parse_command::parse_command(&command)
|
||||
.into_iter()
|
||||
.map(Into::into)
|
||||
.collect();
|
||||
chat.handle_codex_event(Event {
|
||||
id: call_id.to_string(),
|
||||
msg: EventMsg::ExecCommandBegin(ExecCommandBeginEvent {
|
||||
@@ -1205,7 +1205,10 @@ async fn binary_size_transcript_snapshot() {
|
||||
call_id: e.call_id.clone(),
|
||||
command: e.command,
|
||||
cwd: e.cwd,
|
||||
parsed_cmd,
|
||||
parsed_cmd: parsed_cmd
|
||||
.into_iter()
|
||||
.map(std::convert::Into::into)
|
||||
.collect(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
@@ -1320,7 +1323,6 @@ fn approval_modal_exec_snapshot() {
|
||||
reason: Some(
|
||||
"this is a test reason such as one that would be produced by the model".into(),
|
||||
),
|
||||
parsed_cmd: vec![],
|
||||
};
|
||||
chat.handle_codex_event(Event {
|
||||
id: "sub-approve".into(),
|
||||
@@ -1364,7 +1366,6 @@ fn approval_modal_exec_without_reason_snapshot() {
|
||||
command: vec!["bash".into(), "-lc".into(), "echo hello world".into()],
|
||||
cwd: std::env::current_dir().unwrap_or_else(|_| PathBuf::from(".")),
|
||||
reason: None,
|
||||
parsed_cmd: vec![],
|
||||
};
|
||||
chat.handle_codex_event(Event {
|
||||
id: "sub-approve-noreason".into(),
|
||||
@@ -1574,7 +1575,6 @@ fn status_widget_and_approval_modal_snapshot() {
|
||||
reason: Some(
|
||||
"this is a test reason such as one that would be produced by the model".into(),
|
||||
),
|
||||
parsed_cmd: vec![],
|
||||
};
|
||||
chat.handle_codex_event(Event {
|
||||
id: "sub-approve-exec".into(),
|
||||
@@ -2241,15 +2241,17 @@ fn chatwidget_exec_and_status_layout_vt100_snapshot() {
|
||||
command: vec!["bash".into(), "-lc".into(), "rg \"Change Approved\"".into()],
|
||||
cwd: std::env::current_dir().unwrap_or_else(|_| PathBuf::from(".")),
|
||||
parsed_cmd: vec![
|
||||
ParsedCommand::Search {
|
||||
codex_core::parse_command::ParsedCommand::Search {
|
||||
query: Some("Change Approved".into()),
|
||||
path: None,
|
||||
cmd: "rg \"Change Approved\"".into(),
|
||||
},
|
||||
ParsedCommand::Read {
|
||||
}
|
||||
.into(),
|
||||
codex_core::parse_command::ParsedCommand::Read {
|
||||
name: "diff_render.rs".into(),
|
||||
cmd: "cat diff_render.rs".into(),
|
||||
},
|
||||
}
|
||||
.into(),
|
||||
],
|
||||
}),
|
||||
});
|
||||
|
||||
@@ -2492,7 +2492,7 @@
|
||||
{"ts":"2025-08-09T15:51:59.856Z","dir":"to_tui","kind":"app_event","variant":"RequestRedraw"}
|
||||
{"ts":"2025-08-09T15:51:59.858Z","dir":"to_tui","kind":"app_event","variant":"Redraw"}
|
||||
{"ts":"2025-08-09T15:51:59.939Z","dir":"to_tui","kind":"log_line","line":"[INFO codex_core::codex] FunctionCall: {\"command\":[\"bash\",\"-lc\",\"just fix\"],\"with_escalated_permissions\":true,\"justifica"}
|
||||
{"ts":"2025-08-09T15:51:59.939Z","dir":"to_tui","kind":"codex_event","payload":{"id":"1","msg":{"type":"exec_approval_request","call_id":"call_KOxVodT3X5ci7LJmudvcovhW","command":["bash","-lc","just fix"],"cwd":"/Users/easong/code/codex/codex-rs","reason":"Run clippy with network and system permissions to apply lint fixes across workspace.","parsed_cmd":[]}}}
|
||||
{"ts":"2025-08-09T15:51:59.939Z","dir":"to_tui","kind":"codex_event","payload":{"id":"1","msg":{"type":"exec_approval_request","call_id":"call_KOxVodT3X5ci7LJmudvcovhW","command":["bash","-lc","just fix"],"cwd":"/Users/easong/code/codex/codex-rs","reason":"Run clippy with network and system permissions to apply lint fixes across workspace."}}}
|
||||
{"ts":"2025-08-09T15:51:59.939Z","dir":"to_tui","kind":"app_event","variant":"RequestRedraw"}
|
||||
{"ts":"2025-08-09T15:51:59.939Z","dir":"to_tui","kind":"insert_history","lines":5}
|
||||
{"ts":"2025-08-09T15:51:59.939Z","dir":"to_tui","kind":"app_event","variant":"RequestRedraw"}
|
||||
@@ -4172,7 +4172,7 @@
|
||||
{"ts":"2025-08-09T15:53:09.375Z","dir":"to_tui","kind":"app_event","variant":"RequestRedraw"}
|
||||
{"ts":"2025-08-09T15:53:09.376Z","dir":"to_tui","kind":"app_event","variant":"Redraw"}
|
||||
{"ts":"2025-08-09T15:53:09.448Z","dir":"to_tui","kind":"log_line","line":"[INFO codex_core::codex] FunctionCall: {\"command\":[\"bash\",\"-lc\",\"just fix\"],\"with_escalated_permissions\":true,\"justifica"}
|
||||
{"ts":"2025-08-09T15:53:09.448Z","dir":"to_tui","kind":"codex_event","payload":{"id":"1","msg":{"type":"exec_approval_request","call_id":"call_POl3hxI2xeszBLv9IOM7L2ir","command":["bash","-lc","just fix"],"cwd":"/Users/easong/code/codex/codex-rs","reason":"Clippy needs broader permissions; allow to run and apply lint fixes.","parsed_cmd":[]}}}
|
||||
{"ts":"2025-08-09T15:53:09.448Z","dir":"to_tui","kind":"codex_event","payload":{"id":"1","msg":{"type":"exec_approval_request","call_id":"call_POl3hxI2xeszBLv9IOM7L2ir","command":["bash","-lc","just fix"],"cwd":"/Users/easong/code/codex/codex-rs","reason":"Clippy needs broader permissions; allow to run and apply lint fixes."}}}
|
||||
{"ts":"2025-08-09T15:53:09.448Z","dir":"to_tui","kind":"app_event","variant":"RequestRedraw"}
|
||||
{"ts":"2025-08-09T15:53:09.449Z","dir":"to_tui","kind":"insert_history","lines":5}
|
||||
{"ts":"2025-08-09T15:53:09.449Z","dir":"to_tui","kind":"app_event","variant":"RequestRedraw"}
|
||||
@@ -7776,7 +7776,7 @@
|
||||
{"ts":"2025-08-09T15:58:28.583Z","dir":"to_tui","kind":"app_event","variant":"RequestRedraw"}
|
||||
{"ts":"2025-08-09T15:58:28.590Z","dir":"to_tui","kind":"app_event","variant":"Redraw"}
|
||||
{"ts":"2025-08-09T15:58:28.594Z","dir":"to_tui","kind":"log_line","line":"[INFO codex_core::codex] FunctionCall: {\"command\":[\"bash\",\"-lc\",\"cargo test -p codex-core shell::tests::test_current_she"}
|
||||
{"ts":"2025-08-09T15:58:28.594Z","dir":"to_tui","kind":"codex_event","payload":{"id":"1","msg":{"type":"exec_approval_request","call_id":"call_iMa8Qnw0dYLba4rVysxebmkV","command":["bash","-lc","cargo test -p codex-core shell::tests::test_current_shell_detects_zsh -- --nocapture"],"cwd":"/Users/easong/code/codex/codex-rs","reason":"Run the macOS shell detection test without sandbox limits so dscl can read user shell.","parsed_cmd":[]}}}
|
||||
{"ts":"2025-08-09T15:58:28.594Z","dir":"to_tui","kind":"codex_event","payload":{"id":"1","msg":{"type":"exec_approval_request","call_id":"call_iMa8Qnw0dYLba4rVysxebmkV","command":["bash","-lc","cargo test -p codex-core shell::tests::test_current_shell_detects_zsh -- --nocapture"],"cwd":"/Users/easong/code/codex/codex-rs","reason":"Run the macOS shell detection test without sandbox limits so dscl can read user shell."}}}
|
||||
{"ts":"2025-08-09T15:58:28.594Z","dir":"to_tui","kind":"app_event","variant":"RequestRedraw"}
|
||||
{"ts":"2025-08-09T15:58:28.594Z","dir":"to_tui","kind":"insert_history","lines":5}
|
||||
{"ts":"2025-08-09T15:58:28.594Z","dir":"to_tui","kind":"app_event","variant":"RequestRedraw"}
|
||||
@@ -8730,7 +8730,7 @@
|
||||
{"ts":"2025-08-09T15:59:01.983Z","dir":"to_tui","kind":"app_event","variant":"RequestRedraw"}
|
||||
{"ts":"2025-08-09T15:59:01.985Z","dir":"to_tui","kind":"app_event","variant":"Redraw"}
|
||||
{"ts":"2025-08-09T15:59:02.005Z","dir":"to_tui","kind":"log_line","line":"[INFO codex_core::codex] FunctionCall: {\"command\":[\"bash\",\"-lc\",\"cargo test --all-features\"],\"with_escalated_permissions"}
|
||||
{"ts":"2025-08-09T15:59:02.005Z","dir":"to_tui","kind":"codex_event","payload":{"id":"1","msg":{"type":"exec_approval_request","call_id":"call_JDFGIuFhYCIiQO1Aq2L9lBO1","command":["bash","-lc","cargo test --all-features"],"cwd":"/Users/easong/code/codex/codex-rs","reason":"Run full test suite without sandbox constraints to validate the merge.","parsed_cmd":[]}}}
|
||||
{"ts":"2025-08-09T15:59:02.005Z","dir":"to_tui","kind":"codex_event","payload":{"id":"1","msg":{"type":"exec_approval_request","call_id":"call_JDFGIuFhYCIiQO1Aq2L9lBO1","command":["bash","-lc","cargo test --all-features"],"cwd":"/Users/easong/code/codex/codex-rs","reason":"Run full test suite without sandbox constraints to validate the merge."}}}
|
||||
{"ts":"2025-08-09T15:59:02.006Z","dir":"to_tui","kind":"app_event","variant":"RequestRedraw"}
|
||||
{"ts":"2025-08-09T15:59:02.006Z","dir":"to_tui","kind":"insert_history","lines":5}
|
||||
{"ts":"2025-08-09T15:59:02.006Z","dir":"to_tui","kind":"app_event","variant":"RequestRedraw"}
|
||||
|
||||
Reference in New Issue
Block a user