chore: remove grep_files handler (#15775)

# External (non-OpenAI) Pull Request Requirements

Before opening this Pull Request, please read the dedicated
"Contributing" markdown file or your PR may be closed:
https://github.com/openai/codex/blob/main/docs/contributing.md

If your PR conforms to our contribution guidelines, replace this text
with a detailed and high quality description of your changes.

Include a link to a bug report or enhancement request.

---------

Co-authored-by: Codex <noreply@openai.com>
This commit is contained in:
jif-oai
2026-03-25 16:01:45 +00:00
committed by Roy Han
parent 1dc26021c8
commit 1c8bd07c9c
9 changed files with 2 additions and 629 deletions

View File

@@ -1,176 +0,0 @@
use std::path::Path;
use std::time::Duration;
use async_trait::async_trait;
use serde::Deserialize;
use tokio::process::Command;
use tokio::time::timeout;
use crate::function_tool::FunctionCallError;
use crate::tools::context::FunctionToolOutput;
use crate::tools::context::ToolInvocation;
use crate::tools::context::ToolPayload;
use crate::tools::handlers::parse_arguments;
use crate::tools::registry::ToolHandler;
use crate::tools::registry::ToolKind;
pub struct GrepFilesHandler;
const DEFAULT_LIMIT: usize = 100;
const MAX_LIMIT: usize = 2000;
const COMMAND_TIMEOUT: Duration = Duration::from_secs(30);
fn default_limit() -> usize {
DEFAULT_LIMIT
}
#[derive(Deserialize)]
struct GrepFilesArgs {
pattern: String,
#[serde(default)]
include: Option<String>,
#[serde(default)]
path: Option<String>,
#[serde(default = "default_limit")]
limit: usize,
}
#[async_trait]
impl ToolHandler for GrepFilesHandler {
type Output = FunctionToolOutput;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let ToolInvocation { payload, turn, .. } = invocation;
let arguments = match payload {
ToolPayload::Function { arguments } => arguments,
_ => {
return Err(FunctionCallError::RespondToModel(
"grep_files handler received unsupported payload".to_string(),
));
}
};
let args: GrepFilesArgs = parse_arguments(&arguments)?;
let pattern = args.pattern.trim();
if pattern.is_empty() {
return Err(FunctionCallError::RespondToModel(
"pattern must not be empty".to_string(),
));
}
if args.limit == 0 {
return Err(FunctionCallError::RespondToModel(
"limit must be greater than zero".to_string(),
));
}
let limit = args.limit.min(MAX_LIMIT);
let search_path = turn.resolve_path(args.path.clone());
verify_path_exists(&search_path).await?;
let include = args.include.as_deref().map(str::trim).and_then(|val| {
if val.is_empty() {
None
} else {
Some(val.to_string())
}
});
let search_results =
run_rg_search(pattern, include.as_deref(), &search_path, limit, &turn.cwd).await?;
if search_results.is_empty() {
Ok(FunctionToolOutput::from_text(
"No matches found.".to_string(),
Some(false),
))
} else {
Ok(FunctionToolOutput::from_text(
search_results.join("\n"),
Some(true),
))
}
}
}
async fn verify_path_exists(path: &Path) -> Result<(), FunctionCallError> {
tokio::fs::metadata(path).await.map_err(|err| {
FunctionCallError::RespondToModel(format!("unable to access `{}`: {err}", path.display()))
})?;
Ok(())
}
async fn run_rg_search(
pattern: &str,
include: Option<&str>,
search_path: &Path,
limit: usize,
cwd: &Path,
) -> Result<Vec<String>, FunctionCallError> {
let mut command = Command::new("rg");
command
.current_dir(cwd)
.arg("--files-with-matches")
.arg("--sortr=modified")
.arg("--regexp")
.arg(pattern)
.arg("--no-messages");
if let Some(glob) = include {
command.arg("--glob").arg(glob);
}
command.arg("--").arg(search_path);
let output = timeout(COMMAND_TIMEOUT, command.output())
.await
.map_err(|_| {
FunctionCallError::RespondToModel("rg timed out after 30 seconds".to_string())
})?
.map_err(|err| {
FunctionCallError::RespondToModel(format!(
"failed to launch rg: {err}. Ensure ripgrep is installed and on PATH."
))
})?;
match output.status.code() {
Some(0) => Ok(parse_results(&output.stdout, limit)),
Some(1) => Ok(Vec::new()),
_ => {
let stderr = String::from_utf8_lossy(&output.stderr);
Err(FunctionCallError::RespondToModel(format!(
"rg failed: {stderr}"
)))
}
}
}
fn parse_results(stdout: &[u8], limit: usize) -> Vec<String> {
let mut results = Vec::new();
for line in stdout.split(|byte| *byte == b'\n') {
if line.is_empty() {
continue;
}
if let Ok(text) = std::str::from_utf8(line) {
if text.is_empty() {
continue;
}
results.push(text.to_string());
if results.len() == limit {
break;
}
}
}
results
}
#[cfg(test)]
#[path = "grep_files_tests.rs"]
mod tests;

View File

@@ -2,7 +2,6 @@ pub(crate) mod agent_jobs;
pub mod apply_patch;
mod artifacts;
mod dynamic;
mod grep_files;
mod js_repl;
mod list_dir;
mod mcp;
@@ -41,7 +40,6 @@ pub use artifacts::ArtifactsHandler;
use codex_protocol::models::PermissionProfile;
use codex_protocol::protocol::AskForApproval;
pub use dynamic::DynamicToolHandler;
pub use grep_files::GrepFilesHandler;
pub use js_repl::JsReplHandler;
pub use js_repl::JsReplResetHandler;
pub use list_dir::ListDirHandler;

View File

@@ -1839,59 +1839,6 @@ fn create_test_sync_tool() -> ToolSpec {
})
}
fn create_grep_files_tool() -> ToolSpec {
let properties = BTreeMap::from([
(
"pattern".to_string(),
JsonSchema::String {
description: Some("Regular expression pattern to search for.".to_string()),
},
),
(
"include".to_string(),
JsonSchema::String {
description: Some(
"Optional glob that limits which files are searched (e.g. \"*.rs\" or \
\"*.{ts,tsx}\")."
.to_string(),
),
},
),
(
"path".to_string(),
JsonSchema::String {
description: Some(
"Directory or file path to search. Defaults to the session's working directory."
.to_string(),
),
},
),
(
"limit".to_string(),
JsonSchema::Number {
description: Some(
"Maximum number of file paths to return (defaults to 100).".to_string(),
),
},
),
]);
ToolSpec::Function(ResponsesApiTool {
name: "grep_files".to_string(),
description: "Finds files whose contents match the pattern and lists them by modification \
time."
.to_string(),
strict: false,
defer_loading: None,
parameters: JsonSchema::Object {
properties,
required: Some(vec!["pattern".to_string()]),
additional_properties: Some(false.into()),
},
output_schema: None,
})
}
fn create_tool_search_tool(app_tools: &HashMap<String, ToolInfo>) -> ToolSpec {
let properties = BTreeMap::from([
(
@@ -2751,7 +2698,6 @@ pub(crate) fn build_specs_with_discoverable_tools(
use crate::tools::handlers::CodeModeExecuteHandler;
use crate::tools::handlers::CodeModeWaitHandler;
use crate::tools::handlers::DynamicToolHandler;
use crate::tools::handlers::GrepFilesHandler;
use crate::tools::handlers::JsReplHandler;
use crate::tools::handlers::JsReplResetHandler;
use crate::tools::handlers::ListDirHandler;
@@ -3029,20 +2975,6 @@ pub(crate) fn build_specs_with_discoverable_tools(
builder.register_handler("apply_patch", apply_patch_handler);
}
if config
.experimental_supported_tools
.contains(&"grep_files".to_string())
{
let grep_files_handler = Arc::new(GrepFilesHandler);
push_tool_spec(
&mut builder,
create_grep_files_tool(),
/*supports_parallel_tool_calls*/ true,
config.code_mode_enabled,
);
builder.register_handler("grep_files", grep_files_handler);
}
if config
.experimental_supported_tools
.contains(&"read_file".to_string())

View File

@@ -1665,21 +1665,13 @@ fn test_parallel_support_flags() {
assert!(find_tool(&tools, "exec_command").supports_parallel_tool_calls);
assert!(!find_tool(&tools, "write_stdin").supports_parallel_tool_calls);
assert!(find_tool(&tools, "grep_files").supports_parallel_tool_calls);
assert!(find_tool(&tools, "list_dir").supports_parallel_tool_calls);
assert!(find_tool(&tools, "read_file").supports_parallel_tool_calls);
}
#[test]
fn test_test_model_info_includes_sync_tool() {
let _config = test_config();
let mut model_info = model_info_from_models_json("gpt-5-codex");
model_info.experimental_supported_tools = vec![
"test_sync_tool".to_string(),
"read_file".to_string(),
"grep_files".to_string(),
"list_dir".to_string(),
];
model_info.experimental_supported_tools = vec!["test_sync_tool".to_string()];
let features = Features::with_defaults();
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
@@ -1698,17 +1690,6 @@ fn test_test_model_info_includes_sync_tool() {
.iter()
.any(|tool| tool_name(&tool.spec) == "test_sync_tool")
);
assert!(
tools
.iter()
.any(|tool| tool_name(&tool.spec) == "read_file")
);
assert!(
tools
.iter()
.any(|tool| tool_name(&tool.spec) == "grep_files")
);
assert!(tools.iter().any(|tool| tool_name(&tool.spec) == "list_dir"));
}
#[test]

View File

@@ -616,12 +616,7 @@ fn ensure_test_model_catalog(config: &mut Config) -> Result<()> {
.unwrap_or_else(|| panic!("missing bundled model gpt-5.1-codex"));
model.slug = TEST_MODEL_WITH_EXPERIMENTAL_TOOLS.to_string();
model.display_name = TEST_MODEL_WITH_EXPERIMENTAL_TOOLS.to_string();
model.experimental_supported_tools = vec![
"test_sync_tool".to_string(),
"read_file".to_string(),
"grep_files".to_string(),
"list_dir".to_string(),
];
model.experimental_supported_tools = vec!["test_sync_tool".to_string()];
config.model_catalog = Some(ModelsResponse {
models: vec![model],
});

View File

@@ -1,145 +0,0 @@
#![cfg(not(target_os = "windows"))]
use anyhow::Result;
use core_test_support::responses::mount_function_call_agent_response;
use core_test_support::responses::start_mock_server;
use core_test_support::skip_if_no_network;
use core_test_support::test_codex::TestCodex;
use core_test_support::test_codex::test_codex;
use std::collections::HashSet;
use std::path::Path;
use std::process::Command as StdCommand;
const MODEL_WITH_TOOL: &str = "test-gpt-5.1-codex";
fn ripgrep_available() -> bool {
StdCommand::new("rg")
.arg("--version")
.output()
.map(|output| output.status.success())
.unwrap_or(false)
}
macro_rules! skip_if_ripgrep_missing {
($ret:expr $(,)?) => {{
if !ripgrep_available() {
eprintln!("rg not available in PATH; skipping test");
return $ret;
}
}};
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn grep_files_tool_collects_matches() -> Result<()> {
skip_if_no_network!(Ok(()));
skip_if_ripgrep_missing!(Ok(()));
let server = start_mock_server().await;
let test = build_test_codex(&server).await?;
let search_dir = test.cwd.path().join("src");
std::fs::create_dir_all(&search_dir)?;
let alpha = search_dir.join("alpha.rs");
let beta = search_dir.join("beta.rs");
let gamma = search_dir.join("gamma.txt");
std::fs::write(&alpha, "alpha needle\n")?;
std::fs::write(&beta, "beta needle\n")?;
std::fs::write(&gamma, "needle in text but excluded\n")?;
let call_id = "grep-files-collect";
let arguments = serde_json::json!({
"pattern": "needle",
"path": search_dir.to_string_lossy(),
"include": "*.rs",
})
.to_string();
let mocks =
mount_function_call_agent_response(&server, call_id, &arguments, "grep_files").await;
test.submit_turn("please find uses of needle").await?;
let req = mocks.completion.single_request();
let (content_opt, success_opt) = req
.function_call_output_content_and_success(call_id)
.expect("tool output present");
let content = content_opt.expect("content present");
let success = success_opt.unwrap_or(true);
assert!(
success,
"expected success for matches, got content={content}"
);
let entries = collect_file_names(&content);
assert_eq!(entries.len(), 2, "content: {content}");
assert!(
entries.contains("alpha.rs"),
"missing alpha.rs in {entries:?}"
);
assert!(
entries.contains("beta.rs"),
"missing beta.rs in {entries:?}"
);
assert!(
!entries.contains("gamma.txt"),
"txt file should be filtered out: {entries:?}"
);
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn grep_files_tool_reports_empty_results() -> Result<()> {
skip_if_no_network!(Ok(()));
skip_if_ripgrep_missing!(Ok(()));
let server = start_mock_server().await;
let test = build_test_codex(&server).await?;
let search_dir = test.cwd.path().join("logs");
std::fs::create_dir_all(&search_dir)?;
std::fs::write(search_dir.join("output.txt"), "no hits here")?;
let call_id = "grep-files-empty";
let arguments = serde_json::json!({
"pattern": "needle",
"path": search_dir.to_string_lossy(),
"limit": 5,
})
.to_string();
let mocks =
mount_function_call_agent_response(&server, call_id, &arguments, "grep_files").await;
test.submit_turn("search again").await?;
let req = mocks.completion.single_request();
let (content_opt, success_opt) = req
.function_call_output_content_and_success(call_id)
.expect("tool output present");
let content = content_opt.expect("content present");
if let Some(success) = success_opt {
assert!(!success, "expected success=false content={content}");
}
assert_eq!(content, "No matches found.");
Ok(())
}
#[allow(clippy::expect_used)]
async fn build_test_codex(server: &wiremock::MockServer) -> Result<TestCodex> {
let mut builder = test_codex().with_model(MODEL_WITH_TOOL);
builder.build(server).await
}
fn collect_file_names(content: &str) -> HashSet<String> {
content
.lines()
.filter_map(|line| {
if line.trim().is_empty() {
return None;
}
Path::new(line)
.file_name()
.map(|name| name.to_string_lossy().into_owned())
})
.collect()
}

View File

@@ -1,167 +0,0 @@
#![cfg(not(target_os = "windows"))]
use core_test_support::responses::mount_function_call_agent_response;
use core_test_support::responses::start_mock_server;
use core_test_support::skip_if_no_network;
use core_test_support::test_codex::test_codex;
use pretty_assertions::assert_eq;
use serde_json::json;
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
#[ignore = "disabled until we enable list_dir tool"]
async fn list_dir_tool_returns_entries() -> anyhow::Result<()> {
skip_if_no_network!(Ok(()));
let server = start_mock_server().await;
let test = test_codex().build(&server).await?;
let dir_path = test.cwd.path().join("sample_dir");
std::fs::create_dir(&dir_path)?;
std::fs::write(dir_path.join("alpha.txt"), "first file")?;
std::fs::create_dir(dir_path.join("nested"))?;
let dir_path = dir_path.to_string_lossy().to_string();
let call_id = "list-dir-call";
let arguments = json!({
"dir_path": dir_path,
"offset": 1,
"limit": 2,
})
.to_string();
let mocks = mount_function_call_agent_response(&server, call_id, &arguments, "list_dir").await;
test.submit_turn("list directory contents").await?;
let req = mocks.completion.single_request();
let (content_opt, _) = req
.function_call_output_content_and_success(call_id)
.expect("function_call_output present");
let output = content_opt.expect("output content present in tool output");
assert_eq!(output, "E1: [file] alpha.txt\nE2: [dir] nested");
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
#[ignore = "disabled until we enable list_dir tool"]
async fn list_dir_tool_depth_one_omits_children() -> anyhow::Result<()> {
skip_if_no_network!(Ok(()));
let server = start_mock_server().await;
let test = test_codex().build(&server).await?;
let dir_path = test.cwd.path().join("depth_one");
std::fs::create_dir(&dir_path)?;
std::fs::write(dir_path.join("alpha.txt"), "alpha")?;
std::fs::create_dir(dir_path.join("nested"))?;
std::fs::write(dir_path.join("nested").join("beta.txt"), "beta")?;
let dir_path = dir_path.to_string_lossy().to_string();
let call_id = "list-dir-depth1";
let arguments = json!({
"dir_path": dir_path,
"offset": 1,
"limit": 10,
"depth": 1,
})
.to_string();
let mocks = mount_function_call_agent_response(&server, call_id, &arguments, "list_dir").await;
test.submit_turn("list directory contents depth one")
.await?;
let req = mocks.completion.single_request();
let (content_opt, _) = req
.function_call_output_content_and_success(call_id)
.expect("function_call_output present");
let output = content_opt.expect("output content present in tool output");
assert_eq!(output, "E1: [file] alpha.txt\nE2: [dir] nested");
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
#[ignore = "disabled until we enable list_dir tool"]
async fn list_dir_tool_depth_two_includes_children_only() -> anyhow::Result<()> {
skip_if_no_network!(Ok(()));
let server = start_mock_server().await;
let test = test_codex().build(&server).await?;
let dir_path = test.cwd.path().join("depth_two");
std::fs::create_dir(&dir_path)?;
std::fs::write(dir_path.join("alpha.txt"), "alpha")?;
let nested = dir_path.join("nested");
std::fs::create_dir(&nested)?;
std::fs::write(nested.join("beta.txt"), "beta")?;
let deeper = nested.join("grand");
std::fs::create_dir(&deeper)?;
std::fs::write(deeper.join("gamma.txt"), "gamma")?;
let dir_path_string = dir_path.to_string_lossy().to_string();
let call_id = "list-dir-depth2";
let arguments = json!({
"dir_path": dir_path_string,
"offset": 1,
"limit": 10,
"depth": 2,
})
.to_string();
let mocks = mount_function_call_agent_response(&server, call_id, &arguments, "list_dir").await;
test.submit_turn("list directory contents depth two")
.await?;
let req = mocks.completion.single_request();
let (content_opt, _) = req
.function_call_output_content_and_success(call_id)
.expect("function_call_output present");
let output = content_opt.expect("output content present in tool output");
assert_eq!(
output,
"E1: [file] alpha.txt\nE2: [dir] nested\nE3: [file] nested/beta.txt\nE4: [dir] nested/grand"
);
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
#[ignore = "disabled until we enable list_dir tool"]
async fn list_dir_tool_depth_three_includes_grandchildren() -> anyhow::Result<()> {
skip_if_no_network!(Ok(()));
let server = start_mock_server().await;
let test = test_codex().build(&server).await?;
let dir_path = test.cwd.path().join("depth_three");
std::fs::create_dir(&dir_path)?;
std::fs::write(dir_path.join("alpha.txt"), "alpha")?;
let nested = dir_path.join("nested");
std::fs::create_dir(&nested)?;
std::fs::write(nested.join("beta.txt"), "beta")?;
let deeper = nested.join("grand");
std::fs::create_dir(&deeper)?;
std::fs::write(deeper.join("gamma.txt"), "gamma")?;
let dir_path_string = dir_path.to_string_lossy().to_string();
let call_id = "list-dir-depth3";
let arguments = json!({
"dir_path": dir_path_string,
"offset": 1,
"limit": 10,
"depth": 3,
})
.to_string();
let mocks = mount_function_call_agent_response(&server, call_id, &arguments, "list_dir").await;
test.submit_turn("list directory contents depth three")
.await?;
let req = mocks.completion.single_request();
let (content_opt, _) = req
.function_call_output_content_and_success(call_id)
.expect("function_call_output present");
let output = content_opt.expect("output content present in tool output");
assert_eq!(
output,
"E1: [file] alpha.txt\nE2: [dir] nested\nE3: [file] nested/beta.txt\nE4: [dir] nested/grand\nE5: [file] nested/grand/gamma.txt"
);
Ok(())
}

View File

@@ -75,7 +75,6 @@ mod deprecation_notice;
mod exec;
mod exec_policy;
mod fork_thread;
mod grep_files;
mod hierarchical_agents;
#[cfg(not(target_os = "windows"))]
mod hooks;
@@ -83,7 +82,6 @@ mod image_rollout;
mod items;
mod js_repl;
mod json_result;
mod list_dir;
mod live_cli;
mod live_reload;
mod memories;
@@ -101,7 +99,6 @@ mod personality_migration;
mod plugins;
mod prompt_caching;
mod quota_exceeded;
mod read_file;
mod realtime_conversation;
mod remote_env;
mod remote_models;

View File

@@ -1,42 +0,0 @@
#![cfg(not(target_os = "windows"))]
use core_test_support::responses::mount_function_call_agent_response;
use core_test_support::responses::start_mock_server;
use core_test_support::skip_if_no_network;
use core_test_support::test_codex::test_codex;
use pretty_assertions::assert_eq;
use serde_json::json;
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
#[ignore = "disabled until we enable read_file tool"]
async fn read_file_tool_returns_requested_lines() -> anyhow::Result<()> {
skip_if_no_network!(Ok(()));
let server = start_mock_server().await;
let test = test_codex().build(&server).await?;
let file_path = test.cwd.path().join("sample.txt");
std::fs::write(&file_path, "first\nsecond\nthird\nfourth\n")?;
let file_path = file_path.to_string_lossy().to_string();
let call_id = "read-file-call";
let arguments = json!({
"file_path": file_path,
"offset": 2,
"limit": 2,
})
.to_string();
let mocks = mount_function_call_agent_response(&server, call_id, &arguments, "read_file").await;
test.submit_turn("please inspect sample.txt").await?;
let req = mocks.completion.single_request();
let (output_text_opt, _) = req
.function_call_output_content_and_success(call_id)
.expect("output present");
let output_text = output_text_opt.expect("output text present");
assert_eq!(output_text, "L2: second\nL3: third");
Ok(())
}