mirror of
https://github.com/openai/codex.git
synced 2026-03-25 16:13:56 +00:00
Compare commits
3 Commits
stack/util
...
jif/remove
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2b914de0a8 | ||
|
|
178c3b15b4 | ||
|
|
782a99cde5 |
@@ -1,176 +0,0 @@
|
||||
use std::path::Path;
|
||||
use std::time::Duration;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use serde::Deserialize;
|
||||
use tokio::process::Command;
|
||||
use tokio::time::timeout;
|
||||
|
||||
use crate::function_tool::FunctionCallError;
|
||||
use crate::tools::context::FunctionToolOutput;
|
||||
use crate::tools::context::ToolInvocation;
|
||||
use crate::tools::context::ToolPayload;
|
||||
use crate::tools::handlers::parse_arguments;
|
||||
use crate::tools::registry::ToolHandler;
|
||||
use crate::tools::registry::ToolKind;
|
||||
|
||||
pub struct GrepFilesHandler;
|
||||
|
||||
const DEFAULT_LIMIT: usize = 100;
|
||||
const MAX_LIMIT: usize = 2000;
|
||||
const COMMAND_TIMEOUT: Duration = Duration::from_secs(30);
|
||||
|
||||
fn default_limit() -> usize {
|
||||
DEFAULT_LIMIT
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct GrepFilesArgs {
|
||||
pattern: String,
|
||||
#[serde(default)]
|
||||
include: Option<String>,
|
||||
#[serde(default)]
|
||||
path: Option<String>,
|
||||
#[serde(default = "default_limit")]
|
||||
limit: usize,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ToolHandler for GrepFilesHandler {
|
||||
type Output = FunctionToolOutput;
|
||||
|
||||
fn kind(&self) -> ToolKind {
|
||||
ToolKind::Function
|
||||
}
|
||||
|
||||
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
|
||||
let ToolInvocation { payload, turn, .. } = invocation;
|
||||
|
||||
let arguments = match payload {
|
||||
ToolPayload::Function { arguments } => arguments,
|
||||
_ => {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"grep_files handler received unsupported payload".to_string(),
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let args: GrepFilesArgs = parse_arguments(&arguments)?;
|
||||
|
||||
let pattern = args.pattern.trim();
|
||||
if pattern.is_empty() {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"pattern must not be empty".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
if args.limit == 0 {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"limit must be greater than zero".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let limit = args.limit.min(MAX_LIMIT);
|
||||
let search_path = turn.resolve_path(args.path.clone());
|
||||
|
||||
verify_path_exists(&search_path).await?;
|
||||
|
||||
let include = args.include.as_deref().map(str::trim).and_then(|val| {
|
||||
if val.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(val.to_string())
|
||||
}
|
||||
});
|
||||
|
||||
let search_results =
|
||||
run_rg_search(pattern, include.as_deref(), &search_path, limit, &turn.cwd).await?;
|
||||
|
||||
if search_results.is_empty() {
|
||||
Ok(FunctionToolOutput::from_text(
|
||||
"No matches found.".to_string(),
|
||||
Some(false),
|
||||
))
|
||||
} else {
|
||||
Ok(FunctionToolOutput::from_text(
|
||||
search_results.join("\n"),
|
||||
Some(true),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn verify_path_exists(path: &Path) -> Result<(), FunctionCallError> {
|
||||
tokio::fs::metadata(path).await.map_err(|err| {
|
||||
FunctionCallError::RespondToModel(format!("unable to access `{}`: {err}", path.display()))
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn run_rg_search(
|
||||
pattern: &str,
|
||||
include: Option<&str>,
|
||||
search_path: &Path,
|
||||
limit: usize,
|
||||
cwd: &Path,
|
||||
) -> Result<Vec<String>, FunctionCallError> {
|
||||
let mut command = Command::new("rg");
|
||||
command
|
||||
.current_dir(cwd)
|
||||
.arg("--files-with-matches")
|
||||
.arg("--sortr=modified")
|
||||
.arg("--regexp")
|
||||
.arg(pattern)
|
||||
.arg("--no-messages");
|
||||
|
||||
if let Some(glob) = include {
|
||||
command.arg("--glob").arg(glob);
|
||||
}
|
||||
|
||||
command.arg("--").arg(search_path);
|
||||
|
||||
let output = timeout(COMMAND_TIMEOUT, command.output())
|
||||
.await
|
||||
.map_err(|_| {
|
||||
FunctionCallError::RespondToModel("rg timed out after 30 seconds".to_string())
|
||||
})?
|
||||
.map_err(|err| {
|
||||
FunctionCallError::RespondToModel(format!(
|
||||
"failed to launch rg: {err}. Ensure ripgrep is installed and on PATH."
|
||||
))
|
||||
})?;
|
||||
|
||||
match output.status.code() {
|
||||
Some(0) => Ok(parse_results(&output.stdout, limit)),
|
||||
Some(1) => Ok(Vec::new()),
|
||||
_ => {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
Err(FunctionCallError::RespondToModel(format!(
|
||||
"rg failed: {stderr}"
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_results(stdout: &[u8], limit: usize) -> Vec<String> {
|
||||
let mut results = Vec::new();
|
||||
for line in stdout.split(|byte| *byte == b'\n') {
|
||||
if line.is_empty() {
|
||||
continue;
|
||||
}
|
||||
if let Ok(text) = std::str::from_utf8(line) {
|
||||
if text.is_empty() {
|
||||
continue;
|
||||
}
|
||||
results.push(text.to_string());
|
||||
if results.len() == limit {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
results
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[path = "grep_files_tests.rs"]
|
||||
mod tests;
|
||||
@@ -1,271 +0,0 @@
|
||||
use std::collections::VecDeque;
|
||||
use std::ffi::OsStr;
|
||||
use std::fs::FileType;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use codex_utils_string::take_bytes_at_char_boundary;
|
||||
use serde::Deserialize;
|
||||
use tokio::fs;
|
||||
|
||||
use crate::function_tool::FunctionCallError;
|
||||
use crate::tools::context::FunctionToolOutput;
|
||||
use crate::tools::context::ToolInvocation;
|
||||
use crate::tools::context::ToolPayload;
|
||||
use crate::tools::handlers::parse_arguments;
|
||||
use crate::tools::registry::ToolHandler;
|
||||
use crate::tools::registry::ToolKind;
|
||||
|
||||
pub struct ListDirHandler;
|
||||
|
||||
const MAX_ENTRY_LENGTH: usize = 500;
|
||||
const INDENTATION_SPACES: usize = 2;
|
||||
|
||||
fn default_offset() -> usize {
|
||||
1
|
||||
}
|
||||
|
||||
fn default_limit() -> usize {
|
||||
25
|
||||
}
|
||||
|
||||
fn default_depth() -> usize {
|
||||
2
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct ListDirArgs {
|
||||
dir_path: String,
|
||||
#[serde(default = "default_offset")]
|
||||
offset: usize,
|
||||
#[serde(default = "default_limit")]
|
||||
limit: usize,
|
||||
#[serde(default = "default_depth")]
|
||||
depth: usize,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ToolHandler for ListDirHandler {
|
||||
type Output = FunctionToolOutput;
|
||||
|
||||
fn kind(&self) -> ToolKind {
|
||||
ToolKind::Function
|
||||
}
|
||||
|
||||
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
|
||||
let ToolInvocation { payload, .. } = invocation;
|
||||
|
||||
let arguments = match payload {
|
||||
ToolPayload::Function { arguments } => arguments,
|
||||
_ => {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"list_dir handler received unsupported payload".to_string(),
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let args: ListDirArgs = parse_arguments(&arguments)?;
|
||||
|
||||
let ListDirArgs {
|
||||
dir_path,
|
||||
offset,
|
||||
limit,
|
||||
depth,
|
||||
} = args;
|
||||
|
||||
if offset == 0 {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"offset must be a 1-indexed entry number".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
if limit == 0 {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"limit must be greater than zero".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
if depth == 0 {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"depth must be greater than zero".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let path = PathBuf::from(&dir_path);
|
||||
if !path.is_absolute() {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"dir_path must be an absolute path".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let entries = list_dir_slice(&path, offset, limit, depth).await?;
|
||||
let mut output = Vec::with_capacity(entries.len() + 1);
|
||||
output.push(format!("Absolute path: {}", path.display()));
|
||||
output.extend(entries);
|
||||
Ok(FunctionToolOutput::from_text(output.join("\n"), Some(true)))
|
||||
}
|
||||
}
|
||||
|
||||
async fn list_dir_slice(
|
||||
path: &Path,
|
||||
offset: usize,
|
||||
limit: usize,
|
||||
depth: usize,
|
||||
) -> Result<Vec<String>, FunctionCallError> {
|
||||
let mut entries = Vec::new();
|
||||
collect_entries(path, Path::new(""), depth, &mut entries).await?;
|
||||
|
||||
if entries.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
entries.sort_unstable_by(|a, b| a.name.cmp(&b.name));
|
||||
|
||||
let start_index = offset - 1;
|
||||
if start_index >= entries.len() {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"offset exceeds directory entry count".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let remaining_entries = entries.len() - start_index;
|
||||
let capped_limit = limit.min(remaining_entries);
|
||||
let end_index = start_index + capped_limit;
|
||||
let selected_entries = &entries[start_index..end_index];
|
||||
let mut formatted = Vec::with_capacity(selected_entries.len());
|
||||
|
||||
for entry in selected_entries {
|
||||
formatted.push(format_entry_line(entry));
|
||||
}
|
||||
|
||||
if end_index < entries.len() {
|
||||
formatted.push(format!("More than {capped_limit} entries found"));
|
||||
}
|
||||
|
||||
Ok(formatted)
|
||||
}
|
||||
|
||||
async fn collect_entries(
|
||||
dir_path: &Path,
|
||||
relative_prefix: &Path,
|
||||
depth: usize,
|
||||
entries: &mut Vec<DirEntry>,
|
||||
) -> Result<(), FunctionCallError> {
|
||||
let mut queue = VecDeque::new();
|
||||
queue.push_back((dir_path.to_path_buf(), relative_prefix.to_path_buf(), depth));
|
||||
|
||||
while let Some((current_dir, prefix, remaining_depth)) = queue.pop_front() {
|
||||
let mut read_dir = fs::read_dir(¤t_dir).await.map_err(|err| {
|
||||
FunctionCallError::RespondToModel(format!("failed to read directory: {err}"))
|
||||
})?;
|
||||
|
||||
let mut dir_entries = Vec::new();
|
||||
|
||||
while let Some(entry) = read_dir.next_entry().await.map_err(|err| {
|
||||
FunctionCallError::RespondToModel(format!("failed to read directory: {err}"))
|
||||
})? {
|
||||
let file_type = entry.file_type().await.map_err(|err| {
|
||||
FunctionCallError::RespondToModel(format!("failed to inspect entry: {err}"))
|
||||
})?;
|
||||
|
||||
let file_name = entry.file_name();
|
||||
let relative_path = if prefix.as_os_str().is_empty() {
|
||||
PathBuf::from(&file_name)
|
||||
} else {
|
||||
prefix.join(&file_name)
|
||||
};
|
||||
|
||||
let display_name = format_entry_component(&file_name);
|
||||
let display_depth = prefix.components().count();
|
||||
let sort_key = format_entry_name(&relative_path);
|
||||
let kind = DirEntryKind::from(&file_type);
|
||||
dir_entries.push((
|
||||
entry.path(),
|
||||
relative_path,
|
||||
kind,
|
||||
DirEntry {
|
||||
name: sort_key,
|
||||
display_name,
|
||||
depth: display_depth,
|
||||
kind,
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
dir_entries.sort_unstable_by(|a, b| a.3.name.cmp(&b.3.name));
|
||||
|
||||
for (entry_path, relative_path, kind, dir_entry) in dir_entries {
|
||||
if kind == DirEntryKind::Directory && remaining_depth > 1 {
|
||||
queue.push_back((entry_path, relative_path, remaining_depth - 1));
|
||||
}
|
||||
entries.push(dir_entry);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn format_entry_name(path: &Path) -> String {
|
||||
let normalized = path.to_string_lossy().replace("\\", "/");
|
||||
if normalized.len() > MAX_ENTRY_LENGTH {
|
||||
take_bytes_at_char_boundary(&normalized, MAX_ENTRY_LENGTH).to_string()
|
||||
} else {
|
||||
normalized
|
||||
}
|
||||
}
|
||||
|
||||
fn format_entry_component(name: &OsStr) -> String {
|
||||
let normalized = name.to_string_lossy();
|
||||
if normalized.len() > MAX_ENTRY_LENGTH {
|
||||
take_bytes_at_char_boundary(&normalized, MAX_ENTRY_LENGTH).to_string()
|
||||
} else {
|
||||
normalized.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
fn format_entry_line(entry: &DirEntry) -> String {
|
||||
let indent = " ".repeat(entry.depth * INDENTATION_SPACES);
|
||||
let mut name = entry.display_name.clone();
|
||||
match entry.kind {
|
||||
DirEntryKind::Directory => name.push('/'),
|
||||
DirEntryKind::Symlink => name.push('@'),
|
||||
DirEntryKind::Other => name.push('?'),
|
||||
DirEntryKind::File => {}
|
||||
}
|
||||
format!("{indent}{name}")
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct DirEntry {
|
||||
name: String,
|
||||
display_name: String,
|
||||
depth: usize,
|
||||
kind: DirEntryKind,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
enum DirEntryKind {
|
||||
Directory,
|
||||
File,
|
||||
Symlink,
|
||||
Other,
|
||||
}
|
||||
|
||||
impl From<&FileType> for DirEntryKind {
|
||||
fn from(file_type: &FileType) -> Self {
|
||||
if file_type.is_symlink() {
|
||||
DirEntryKind::Symlink
|
||||
} else if file_type.is_dir() {
|
||||
DirEntryKind::Directory
|
||||
} else if file_type.is_file() {
|
||||
DirEntryKind::File
|
||||
} else {
|
||||
DirEntryKind::Other
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[path = "list_dir_tests.rs"]
|
||||
mod tests;
|
||||
@@ -2,9 +2,7 @@ pub(crate) mod agent_jobs;
|
||||
pub mod apply_patch;
|
||||
mod artifacts;
|
||||
mod dynamic;
|
||||
mod grep_files;
|
||||
mod js_repl;
|
||||
mod list_dir;
|
||||
mod mcp;
|
||||
mod mcp_resource;
|
||||
pub(crate) mod multi_agents;
|
||||
@@ -41,10 +39,8 @@ pub use artifacts::ArtifactsHandler;
|
||||
use codex_protocol::models::PermissionProfile;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
pub use dynamic::DynamicToolHandler;
|
||||
pub use grep_files::GrepFilesHandler;
|
||||
pub use js_repl::JsReplHandler;
|
||||
pub use js_repl::JsReplResetHandler;
|
||||
pub use list_dir::ListDirHandler;
|
||||
pub use mcp::McpHandler;
|
||||
pub use mcp_resource::McpResourceHandler;
|
||||
pub use plan::PlanHandler;
|
||||
|
||||
@@ -1839,59 +1839,6 @@ fn create_test_sync_tool() -> ToolSpec {
|
||||
})
|
||||
}
|
||||
|
||||
fn create_grep_files_tool() -> ToolSpec {
|
||||
let properties = BTreeMap::from([
|
||||
(
|
||||
"pattern".to_string(),
|
||||
JsonSchema::String {
|
||||
description: Some("Regular expression pattern to search for.".to_string()),
|
||||
},
|
||||
),
|
||||
(
|
||||
"include".to_string(),
|
||||
JsonSchema::String {
|
||||
description: Some(
|
||||
"Optional glob that limits which files are searched (e.g. \"*.rs\" or \
|
||||
\"*.{ts,tsx}\")."
|
||||
.to_string(),
|
||||
),
|
||||
},
|
||||
),
|
||||
(
|
||||
"path".to_string(),
|
||||
JsonSchema::String {
|
||||
description: Some(
|
||||
"Directory or file path to search. Defaults to the session's working directory."
|
||||
.to_string(),
|
||||
),
|
||||
},
|
||||
),
|
||||
(
|
||||
"limit".to_string(),
|
||||
JsonSchema::Number {
|
||||
description: Some(
|
||||
"Maximum number of file paths to return (defaults to 100).".to_string(),
|
||||
),
|
||||
},
|
||||
),
|
||||
]);
|
||||
|
||||
ToolSpec::Function(ResponsesApiTool {
|
||||
name: "grep_files".to_string(),
|
||||
description: "Finds files whose contents match the pattern and lists them by modification \
|
||||
time."
|
||||
.to_string(),
|
||||
strict: false,
|
||||
defer_loading: None,
|
||||
parameters: JsonSchema::Object {
|
||||
properties,
|
||||
required: Some(vec!["pattern".to_string()]),
|
||||
additional_properties: Some(false.into()),
|
||||
},
|
||||
output_schema: None,
|
||||
})
|
||||
}
|
||||
|
||||
fn create_tool_search_tool(app_tools: &HashMap<String, ToolInfo>) -> ToolSpec {
|
||||
let properties = BTreeMap::from([
|
||||
(
|
||||
@@ -2204,54 +2151,6 @@ fn create_read_file_tool() -> ToolSpec {
|
||||
})
|
||||
}
|
||||
|
||||
fn create_list_dir_tool() -> ToolSpec {
|
||||
let properties = BTreeMap::from([
|
||||
(
|
||||
"dir_path".to_string(),
|
||||
JsonSchema::String {
|
||||
description: Some("Absolute path to the directory to list.".to_string()),
|
||||
},
|
||||
),
|
||||
(
|
||||
"offset".to_string(),
|
||||
JsonSchema::Number {
|
||||
description: Some(
|
||||
"The entry number to start listing from. Must be 1 or greater.".to_string(),
|
||||
),
|
||||
},
|
||||
),
|
||||
(
|
||||
"limit".to_string(),
|
||||
JsonSchema::Number {
|
||||
description: Some("The maximum number of entries to return.".to_string()),
|
||||
},
|
||||
),
|
||||
(
|
||||
"depth".to_string(),
|
||||
JsonSchema::Number {
|
||||
description: Some(
|
||||
"The maximum directory depth to traverse. Must be 1 or greater.".to_string(),
|
||||
),
|
||||
},
|
||||
),
|
||||
]);
|
||||
|
||||
ToolSpec::Function(ResponsesApiTool {
|
||||
name: "list_dir".to_string(),
|
||||
description:
|
||||
"Lists entries in a local directory with 1-indexed entry numbers and simple type labels."
|
||||
.to_string(),
|
||||
strict: false,
|
||||
defer_loading: None,
|
||||
parameters: JsonSchema::Object {
|
||||
properties,
|
||||
required: Some(vec!["dir_path".to_string()]),
|
||||
additional_properties: Some(false.into()),
|
||||
},
|
||||
output_schema: None,
|
||||
})
|
||||
}
|
||||
|
||||
fn create_js_repl_tool() -> ToolSpec {
|
||||
// Keep JS input freeform, but block the most common malformed payload shapes
|
||||
// (JSON wrappers, quoted strings, and markdown fences) before they reach the
|
||||
@@ -2751,10 +2650,8 @@ pub(crate) fn build_specs_with_discoverable_tools(
|
||||
use crate::tools::handlers::CodeModeExecuteHandler;
|
||||
use crate::tools::handlers::CodeModeWaitHandler;
|
||||
use crate::tools::handlers::DynamicToolHandler;
|
||||
use crate::tools::handlers::GrepFilesHandler;
|
||||
use crate::tools::handlers::JsReplHandler;
|
||||
use crate::tools::handlers::JsReplResetHandler;
|
||||
use crate::tools::handlers::ListDirHandler;
|
||||
use crate::tools::handlers::McpHandler;
|
||||
use crate::tools::handlers::McpResourceHandler;
|
||||
use crate::tools::handlers::PlanHandler;
|
||||
@@ -3029,20 +2926,6 @@ pub(crate) fn build_specs_with_discoverable_tools(
|
||||
builder.register_handler("apply_patch", apply_patch_handler);
|
||||
}
|
||||
|
||||
if config
|
||||
.experimental_supported_tools
|
||||
.contains(&"grep_files".to_string())
|
||||
{
|
||||
let grep_files_handler = Arc::new(GrepFilesHandler);
|
||||
push_tool_spec(
|
||||
&mut builder,
|
||||
create_grep_files_tool(),
|
||||
/*supports_parallel_tool_calls*/ true,
|
||||
config.code_mode_enabled,
|
||||
);
|
||||
builder.register_handler("grep_files", grep_files_handler);
|
||||
}
|
||||
|
||||
if config
|
||||
.experimental_supported_tools
|
||||
.contains(&"read_file".to_string())
|
||||
@@ -3057,21 +2940,6 @@ pub(crate) fn build_specs_with_discoverable_tools(
|
||||
builder.register_handler("read_file", read_file_handler);
|
||||
}
|
||||
|
||||
if config
|
||||
.experimental_supported_tools
|
||||
.iter()
|
||||
.any(|tool| tool == "list_dir")
|
||||
{
|
||||
let list_dir_handler = Arc::new(ListDirHandler);
|
||||
push_tool_spec(
|
||||
&mut builder,
|
||||
create_list_dir_tool(),
|
||||
/*supports_parallel_tool_calls*/ true,
|
||||
config.code_mode_enabled,
|
||||
);
|
||||
builder.register_handler("list_dir", list_dir_handler);
|
||||
}
|
||||
|
||||
if config
|
||||
.experimental_supported_tools
|
||||
.contains(&"test_sync_tool".to_string())
|
||||
|
||||
@@ -1665,21 +1665,13 @@ fn test_parallel_support_flags() {
|
||||
|
||||
assert!(find_tool(&tools, "exec_command").supports_parallel_tool_calls);
|
||||
assert!(!find_tool(&tools, "write_stdin").supports_parallel_tool_calls);
|
||||
assert!(find_tool(&tools, "grep_files").supports_parallel_tool_calls);
|
||||
assert!(find_tool(&tools, "list_dir").supports_parallel_tool_calls);
|
||||
assert!(find_tool(&tools, "read_file").supports_parallel_tool_calls);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_test_model_info_includes_sync_tool() {
|
||||
let _config = test_config();
|
||||
let mut model_info = model_info_from_models_json("gpt-5-codex");
|
||||
model_info.experimental_supported_tools = vec![
|
||||
"test_sync_tool".to_string(),
|
||||
"read_file".to_string(),
|
||||
"grep_files".to_string(),
|
||||
"list_dir".to_string(),
|
||||
];
|
||||
model_info.experimental_supported_tools = vec!["test_sync_tool".to_string()];
|
||||
let features = Features::with_defaults();
|
||||
let available_models = Vec::new();
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
@@ -1698,17 +1690,6 @@ fn test_test_model_info_includes_sync_tool() {
|
||||
.iter()
|
||||
.any(|tool| tool_name(&tool.spec) == "test_sync_tool")
|
||||
);
|
||||
assert!(
|
||||
tools
|
||||
.iter()
|
||||
.any(|tool| tool_name(&tool.spec) == "read_file")
|
||||
);
|
||||
assert!(
|
||||
tools
|
||||
.iter()
|
||||
.any(|tool| tool_name(&tool.spec) == "grep_files")
|
||||
);
|
||||
assert!(tools.iter().any(|tool| tool_name(&tool.spec) == "list_dir"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -616,12 +616,7 @@ fn ensure_test_model_catalog(config: &mut Config) -> Result<()> {
|
||||
.unwrap_or_else(|| panic!("missing bundled model gpt-5.1-codex"));
|
||||
model.slug = TEST_MODEL_WITH_EXPERIMENTAL_TOOLS.to_string();
|
||||
model.display_name = TEST_MODEL_WITH_EXPERIMENTAL_TOOLS.to_string();
|
||||
model.experimental_supported_tools = vec![
|
||||
"test_sync_tool".to_string(),
|
||||
"read_file".to_string(),
|
||||
"grep_files".to_string(),
|
||||
"list_dir".to_string(),
|
||||
];
|
||||
model.experimental_supported_tools = vec!["test_sync_tool".to_string()];
|
||||
config.model_catalog = Some(ModelsResponse {
|
||||
models: vec![model],
|
||||
});
|
||||
|
||||
@@ -1,145 +0,0 @@
|
||||
#![cfg(not(target_os = "windows"))]
|
||||
|
||||
use anyhow::Result;
|
||||
use core_test_support::responses::mount_function_call_agent_response;
|
||||
use core_test_support::responses::start_mock_server;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use core_test_support::test_codex::TestCodex;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
use std::collections::HashSet;
|
||||
use std::path::Path;
|
||||
use std::process::Command as StdCommand;
|
||||
|
||||
const MODEL_WITH_TOOL: &str = "test-gpt-5.1-codex";
|
||||
|
||||
fn ripgrep_available() -> bool {
|
||||
StdCommand::new("rg")
|
||||
.arg("--version")
|
||||
.output()
|
||||
.map(|output| output.status.success())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
macro_rules! skip_if_ripgrep_missing {
|
||||
($ret:expr $(,)?) => {{
|
||||
if !ripgrep_available() {
|
||||
eprintln!("rg not available in PATH; skipping test");
|
||||
return $ret;
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn grep_files_tool_collects_matches() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
skip_if_ripgrep_missing!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let test = build_test_codex(&server).await?;
|
||||
|
||||
let search_dir = test.cwd.path().join("src");
|
||||
std::fs::create_dir_all(&search_dir)?;
|
||||
let alpha = search_dir.join("alpha.rs");
|
||||
let beta = search_dir.join("beta.rs");
|
||||
let gamma = search_dir.join("gamma.txt");
|
||||
std::fs::write(&alpha, "alpha needle\n")?;
|
||||
std::fs::write(&beta, "beta needle\n")?;
|
||||
std::fs::write(&gamma, "needle in text but excluded\n")?;
|
||||
|
||||
let call_id = "grep-files-collect";
|
||||
let arguments = serde_json::json!({
|
||||
"pattern": "needle",
|
||||
"path": search_dir.to_string_lossy(),
|
||||
"include": "*.rs",
|
||||
})
|
||||
.to_string();
|
||||
|
||||
let mocks =
|
||||
mount_function_call_agent_response(&server, call_id, &arguments, "grep_files").await;
|
||||
test.submit_turn("please find uses of needle").await?;
|
||||
|
||||
let req = mocks.completion.single_request();
|
||||
let (content_opt, success_opt) = req
|
||||
.function_call_output_content_and_success(call_id)
|
||||
.expect("tool output present");
|
||||
let content = content_opt.expect("content present");
|
||||
let success = success_opt.unwrap_or(true);
|
||||
assert!(
|
||||
success,
|
||||
"expected success for matches, got content={content}"
|
||||
);
|
||||
|
||||
let entries = collect_file_names(&content);
|
||||
assert_eq!(entries.len(), 2, "content: {content}");
|
||||
assert!(
|
||||
entries.contains("alpha.rs"),
|
||||
"missing alpha.rs in {entries:?}"
|
||||
);
|
||||
assert!(
|
||||
entries.contains("beta.rs"),
|
||||
"missing beta.rs in {entries:?}"
|
||||
);
|
||||
assert!(
|
||||
!entries.contains("gamma.txt"),
|
||||
"txt file should be filtered out: {entries:?}"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn grep_files_tool_reports_empty_results() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
skip_if_ripgrep_missing!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let test = build_test_codex(&server).await?;
|
||||
|
||||
let search_dir = test.cwd.path().join("logs");
|
||||
std::fs::create_dir_all(&search_dir)?;
|
||||
std::fs::write(search_dir.join("output.txt"), "no hits here")?;
|
||||
|
||||
let call_id = "grep-files-empty";
|
||||
let arguments = serde_json::json!({
|
||||
"pattern": "needle",
|
||||
"path": search_dir.to_string_lossy(),
|
||||
"limit": 5,
|
||||
})
|
||||
.to_string();
|
||||
|
||||
let mocks =
|
||||
mount_function_call_agent_response(&server, call_id, &arguments, "grep_files").await;
|
||||
test.submit_turn("search again").await?;
|
||||
|
||||
let req = mocks.completion.single_request();
|
||||
let (content_opt, success_opt) = req
|
||||
.function_call_output_content_and_success(call_id)
|
||||
.expect("tool output present");
|
||||
let content = content_opt.expect("content present");
|
||||
if let Some(success) = success_opt {
|
||||
assert!(!success, "expected success=false content={content}");
|
||||
}
|
||||
assert_eq!(content, "No matches found.");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(clippy::expect_used)]
|
||||
async fn build_test_codex(server: &wiremock::MockServer) -> Result<TestCodex> {
|
||||
let mut builder = test_codex().with_model(MODEL_WITH_TOOL);
|
||||
builder.build(server).await
|
||||
}
|
||||
|
||||
fn collect_file_names(content: &str) -> HashSet<String> {
|
||||
content
|
||||
.lines()
|
||||
.filter_map(|line| {
|
||||
if line.trim().is_empty() {
|
||||
return None;
|
||||
}
|
||||
Path::new(line)
|
||||
.file_name()
|
||||
.map(|name| name.to_string_lossy().into_owned())
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
@@ -1,167 +0,0 @@
|
||||
#![cfg(not(target_os = "windows"))]
|
||||
|
||||
use core_test_support::responses::mount_function_call_agent_response;
|
||||
use core_test_support::responses::start_mock_server;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
#[ignore = "disabled until we enable list_dir tool"]
|
||||
async fn list_dir_tool_returns_entries() -> anyhow::Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let test = test_codex().build(&server).await?;
|
||||
|
||||
let dir_path = test.cwd.path().join("sample_dir");
|
||||
std::fs::create_dir(&dir_path)?;
|
||||
std::fs::write(dir_path.join("alpha.txt"), "first file")?;
|
||||
std::fs::create_dir(dir_path.join("nested"))?;
|
||||
let dir_path = dir_path.to_string_lossy().to_string();
|
||||
|
||||
let call_id = "list-dir-call";
|
||||
let arguments = json!({
|
||||
"dir_path": dir_path,
|
||||
"offset": 1,
|
||||
"limit": 2,
|
||||
})
|
||||
.to_string();
|
||||
|
||||
let mocks = mount_function_call_agent_response(&server, call_id, &arguments, "list_dir").await;
|
||||
test.submit_turn("list directory contents").await?;
|
||||
let req = mocks.completion.single_request();
|
||||
let (content_opt, _) = req
|
||||
.function_call_output_content_and_success(call_id)
|
||||
.expect("function_call_output present");
|
||||
let output = content_opt.expect("output content present in tool output");
|
||||
assert_eq!(output, "E1: [file] alpha.txt\nE2: [dir] nested");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
#[ignore = "disabled until we enable list_dir tool"]
|
||||
async fn list_dir_tool_depth_one_omits_children() -> anyhow::Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let test = test_codex().build(&server).await?;
|
||||
|
||||
let dir_path = test.cwd.path().join("depth_one");
|
||||
std::fs::create_dir(&dir_path)?;
|
||||
std::fs::write(dir_path.join("alpha.txt"), "alpha")?;
|
||||
std::fs::create_dir(dir_path.join("nested"))?;
|
||||
std::fs::write(dir_path.join("nested").join("beta.txt"), "beta")?;
|
||||
let dir_path = dir_path.to_string_lossy().to_string();
|
||||
|
||||
let call_id = "list-dir-depth1";
|
||||
let arguments = json!({
|
||||
"dir_path": dir_path,
|
||||
"offset": 1,
|
||||
"limit": 10,
|
||||
"depth": 1,
|
||||
})
|
||||
.to_string();
|
||||
|
||||
let mocks = mount_function_call_agent_response(&server, call_id, &arguments, "list_dir").await;
|
||||
test.submit_turn("list directory contents depth one")
|
||||
.await?;
|
||||
let req = mocks.completion.single_request();
|
||||
let (content_opt, _) = req
|
||||
.function_call_output_content_and_success(call_id)
|
||||
.expect("function_call_output present");
|
||||
let output = content_opt.expect("output content present in tool output");
|
||||
assert_eq!(output, "E1: [file] alpha.txt\nE2: [dir] nested");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
#[ignore = "disabled until we enable list_dir tool"]
|
||||
async fn list_dir_tool_depth_two_includes_children_only() -> anyhow::Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let test = test_codex().build(&server).await?;
|
||||
|
||||
let dir_path = test.cwd.path().join("depth_two");
|
||||
std::fs::create_dir(&dir_path)?;
|
||||
std::fs::write(dir_path.join("alpha.txt"), "alpha")?;
|
||||
let nested = dir_path.join("nested");
|
||||
std::fs::create_dir(&nested)?;
|
||||
std::fs::write(nested.join("beta.txt"), "beta")?;
|
||||
let deeper = nested.join("grand");
|
||||
std::fs::create_dir(&deeper)?;
|
||||
std::fs::write(deeper.join("gamma.txt"), "gamma")?;
|
||||
let dir_path_string = dir_path.to_string_lossy().to_string();
|
||||
|
||||
let call_id = "list-dir-depth2";
|
||||
let arguments = json!({
|
||||
"dir_path": dir_path_string,
|
||||
"offset": 1,
|
||||
"limit": 10,
|
||||
"depth": 2,
|
||||
})
|
||||
.to_string();
|
||||
|
||||
let mocks = mount_function_call_agent_response(&server, call_id, &arguments, "list_dir").await;
|
||||
test.submit_turn("list directory contents depth two")
|
||||
.await?;
|
||||
let req = mocks.completion.single_request();
|
||||
let (content_opt, _) = req
|
||||
.function_call_output_content_and_success(call_id)
|
||||
.expect("function_call_output present");
|
||||
let output = content_opt.expect("output content present in tool output");
|
||||
assert_eq!(
|
||||
output,
|
||||
"E1: [file] alpha.txt\nE2: [dir] nested\nE3: [file] nested/beta.txt\nE4: [dir] nested/grand"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
#[ignore = "disabled until we enable list_dir tool"]
|
||||
async fn list_dir_tool_depth_three_includes_grandchildren() -> anyhow::Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let test = test_codex().build(&server).await?;
|
||||
|
||||
let dir_path = test.cwd.path().join("depth_three");
|
||||
std::fs::create_dir(&dir_path)?;
|
||||
std::fs::write(dir_path.join("alpha.txt"), "alpha")?;
|
||||
let nested = dir_path.join("nested");
|
||||
std::fs::create_dir(&nested)?;
|
||||
std::fs::write(nested.join("beta.txt"), "beta")?;
|
||||
let deeper = nested.join("grand");
|
||||
std::fs::create_dir(&deeper)?;
|
||||
std::fs::write(deeper.join("gamma.txt"), "gamma")?;
|
||||
let dir_path_string = dir_path.to_string_lossy().to_string();
|
||||
|
||||
let call_id = "list-dir-depth3";
|
||||
let arguments = json!({
|
||||
"dir_path": dir_path_string,
|
||||
"offset": 1,
|
||||
"limit": 10,
|
||||
"depth": 3,
|
||||
})
|
||||
.to_string();
|
||||
|
||||
let mocks = mount_function_call_agent_response(&server, call_id, &arguments, "list_dir").await;
|
||||
test.submit_turn("list directory contents depth three")
|
||||
.await?;
|
||||
let req = mocks.completion.single_request();
|
||||
let (content_opt, _) = req
|
||||
.function_call_output_content_and_success(call_id)
|
||||
.expect("function_call_output present");
|
||||
let output = content_opt.expect("output content present in tool output");
|
||||
assert_eq!(
|
||||
output,
|
||||
"E1: [file] alpha.txt\nE2: [dir] nested\nE3: [file] nested/beta.txt\nE4: [dir] nested/grand\nE5: [file] nested/grand/gamma.txt"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -75,7 +75,6 @@ mod deprecation_notice;
|
||||
mod exec;
|
||||
mod exec_policy;
|
||||
mod fork_thread;
|
||||
mod grep_files;
|
||||
mod hierarchical_agents;
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
mod hooks;
|
||||
@@ -83,7 +82,6 @@ mod image_rollout;
|
||||
mod items;
|
||||
mod js_repl;
|
||||
mod json_result;
|
||||
mod list_dir;
|
||||
mod live_cli;
|
||||
mod live_reload;
|
||||
mod memories;
|
||||
@@ -101,7 +99,6 @@ mod personality_migration;
|
||||
mod plugins;
|
||||
mod prompt_caching;
|
||||
mod quota_exceeded;
|
||||
mod read_file;
|
||||
mod realtime_conversation;
|
||||
mod remote_env;
|
||||
mod remote_models;
|
||||
|
||||
@@ -1,42 +0,0 @@
|
||||
#![cfg(not(target_os = "windows"))]
|
||||
|
||||
use core_test_support::responses::mount_function_call_agent_response;
|
||||
use core_test_support::responses::start_mock_server;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
#[ignore = "disabled until we enable read_file tool"]
|
||||
async fn read_file_tool_returns_requested_lines() -> anyhow::Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let test = test_codex().build(&server).await?;
|
||||
|
||||
let file_path = test.cwd.path().join("sample.txt");
|
||||
std::fs::write(&file_path, "first\nsecond\nthird\nfourth\n")?;
|
||||
let file_path = file_path.to_string_lossy().to_string();
|
||||
|
||||
let call_id = "read-file-call";
|
||||
let arguments = json!({
|
||||
"file_path": file_path,
|
||||
"offset": 2,
|
||||
"limit": 2,
|
||||
})
|
||||
.to_string();
|
||||
|
||||
let mocks = mount_function_call_agent_response(&server, call_id, &arguments, "read_file").await;
|
||||
|
||||
test.submit_turn("please inspect sample.txt").await?;
|
||||
|
||||
let req = mocks.completion.single_request();
|
||||
let (output_text_opt, _) = req
|
||||
.function_call_output_content_and_success(call_id)
|
||||
.expect("output present");
|
||||
let output_text = output_text_opt.expect("output text present");
|
||||
assert_eq!(output_text, "L2: second\nL3: third");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Reference in New Issue
Block a user