Compare commits

..

1 Commits

Author SHA1 Message Date
Michael Bolin
2810b26ffd core: type-erase ToolHandler outputs 2026-04-02 15:16:56 -07:00
57 changed files with 2507 additions and 3083 deletions

View File

@@ -234,18 +234,14 @@ crate.annotation(
inject_repo(crate, "zlib")
bazel_dep(name = "xz", version = "5.4.5.bcr.8")
# TODO(zbarsky): Enable annotation after fixing windows arm64 builds.
crate.annotation(
crate = "lzma-sys",
gen_build_script = "off",
deps = ["@xz//:lzma"],
gen_build_script = "on",
)
bazel_dep(name = "openssl", version = "3.5.4.bcr.0")
inject_repo(crate, "xz")
crate.annotation(
build_script_data = [
"@openssl//:gen_dir",

2
MODULE.bazel.lock generated
View File

@@ -228,8 +228,6 @@
"https://bcr.bazel.build/modules/upb/0.0.0-20220923-a547704/MODULE.bazel": "7298990c00040a0e2f121f6c32544bab27d4452f80d9ce51349b1a28f3005c43",
"https://bcr.bazel.build/modules/with_cfg.bzl/0.12.0/MODULE.bazel": "b573395fe63aef4299ba095173e2f62ccfee5ad9bbf7acaa95dba73af9fc2b38",
"https://bcr.bazel.build/modules/with_cfg.bzl/0.12.0/source.json": "3f3fbaeafecaf629877ad152a2c9def21f8d330d91aa94c5dc75bbb98c10b8b8",
"https://bcr.bazel.build/modules/xz/5.4.5.bcr.8/MODULE.bazel": "e48a69bd54053c2ec5fffc2a29fb70122afd3e83ab6c07068f63bc6553fa57cc",
"https://bcr.bazel.build/modules/xz/5.4.5.bcr.8/source.json": "bd7e928ccd63505b44f4784f7bbf12cc11f9ff23bf3ca12ff2c91cd74846099e",
"https://bcr.bazel.build/modules/zlib/1.2.11/MODULE.bazel": "07b389abc85fdbca459b69e2ec656ae5622873af3f845e1c9d80fe179f3effa0",
"https://bcr.bazel.build/modules/zlib/1.3.1.bcr.5/MODULE.bazel": "eec517b5bbe5492629466e11dae908d043364302283de25581e3eb944326c4ca",
"https://bcr.bazel.build/modules/zlib/1.3.1.bcr.8/MODULE.bazel": "772c674bb78a0342b8caf32ab5c25085c493ca4ff08398208dcbe4375fe9f776",

View File

@@ -132,7 +132,7 @@ Example with notification opt-out:
## API Overview
- `thread/start` — create a new thread; emits `thread/started` (including the current `thread.status`) and auto-subscribes you to turn/item events for that thread. When the request includes a `cwd` and the resolved sandbox is `workspace-write` or full access, app-server also marks that project as trusted in the user `config.toml`.
- `thread/start` — create a new thread; emits `thread/started` (including the current `thread.status`) and auto-subscribes you to turn/item events for that thread.
- `thread/resume` — reopen an existing thread by id so subsequent `turn/start` calls append to it.
- `thread/fork` — fork an existing thread into a new thread id by copying the stored history; if the source thread is currently mid-turn, the fork records the same interruption marker as `turn/interrupt` instead of inheriting an unmarked partial turn suffix. The returned `thread.forkedFromId` points at the source thread when known. Accepts `ephemeral: true` for an in-memory temporary fork, emits `thread/started` (including the current `thread.status`), and auto-subscribes you to turn/item events for the new thread.
- `thread/list` — page through stored rollouts; supports cursor-based pagination and optional `modelProviders`, `sourceKinds`, `archived`, `cwd`, and `searchTerm` filters. Each returned `thread` includes `status` (`ThreadStatus`), defaulting to `notLoaded` when the thread is not currently loaded.

View File

@@ -235,7 +235,6 @@ use codex_features::Feature;
use codex_features::Stage;
use codex_feedback::CodexFeedback;
use codex_git_utils::git_diff_to_remote;
use codex_git_utils::resolve_root_git_project_for_trust;
use codex_login::AuthManager;
use codex_login::AuthMode as CoreAuthMode;
use codex_login::CLIENT_ID;
@@ -256,7 +255,6 @@ use codex_protocol::ThreadId;
use codex_protocol::config_types::CollaborationMode;
use codex_protocol::config_types::ForcedLoginMethod;
use codex_protocol::config_types::Personality;
use codex_protocol::config_types::TrustLevel;
use codex_protocol::config_types::WindowsSandboxLevel;
use codex_protocol::dynamic_tools::DynamicToolSpec as CoreDynamicToolSpec;
use codex_protocol::items::TurnItem;
@@ -2192,11 +2190,10 @@ impl CodexMessageProcessor {
experimental_raw_events: bool,
request_trace: Option<W3cTraceContext>,
) {
let requested_cwd = typesafe_overrides.cwd.clone();
let mut config = match derive_config_from_params(
let config = match derive_config_from_params(
&cli_overrides,
config_overrides.clone(),
typesafe_overrides.clone(),
config_overrides,
typesafe_overrides,
&cloud_requirements,
&listener_task_context.codex_home,
&runtime_feature_enablement,
@@ -2214,56 +2211,6 @@ impl CodexMessageProcessor {
}
};
if requested_cwd.is_some()
&& !config.active_project.is_trusted()
&& matches!(
config.permissions.sandbox_policy.get(),
codex_protocol::protocol::SandboxPolicy::WorkspaceWrite { .. }
| codex_protocol::protocol::SandboxPolicy::DangerFullAccess
| codex_protocol::protocol::SandboxPolicy::ExternalSandbox { .. }
)
{
let trust_target = resolve_root_git_project_for_trust(config.cwd.as_path())
.unwrap_or_else(|| config.cwd.to_path_buf());
if let Err(err) = codex_core::config::set_project_trust_level(
&listener_task_context.codex_home,
trust_target.as_path(),
TrustLevel::Trusted,
) {
let error = JSONRPCErrorError {
code: INTERNAL_ERROR_CODE,
message: format!("failed to persist trusted project state: {err}"),
data: None,
};
listener_task_context
.outgoing
.send_error(request_id, error)
.await;
return;
}
config = match derive_config_from_params(
&cli_overrides,
config_overrides,
typesafe_overrides,
&cloud_requirements,
&listener_task_context.codex_home,
&runtime_feature_enablement,
)
.await
{
Ok(config) => config,
Err(err) => {
let error = config_load_error(&err);
listener_task_context
.outgoing
.send_error(request_id, error)
.await;
return;
}
};
}
let dynamic_tools = dynamic_tools.unwrap_or_default();
let core_dynamic_tools = if dynamic_tools.is_empty() {
Vec::new()

View File

@@ -26,7 +26,6 @@ use codex_app_server_protocol::TurnCompletedNotification;
use codex_app_server_protocol::TurnStartParams;
use codex_app_server_protocol::TurnStartResponse;
use codex_app_server_protocol::UserInput as V2UserInput;
use codex_core::shell::default_user_shell;
use codex_features::FEATURES;
use codex_features::Feature;
use pretty_assertions::assert_eq;
@@ -68,12 +67,11 @@ async fn thread_shell_command_runs_as_standalone_turn_and_persists_history() ->
)
.await??;
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(start_resp)?;
let (shell_command, expected_output) = current_shell_output_command("hello from bang")?;
let shell_id = mcp
.send_thread_shell_command_request(ThreadShellCommandParams {
thread_id: thread.id.clone(),
command: shell_command,
command: "printf 'hello from bang\\n'".to_string(),
})
.await?;
let shell_resp: JSONRPCResponse = timeout(
@@ -95,7 +93,7 @@ async fn thread_shell_command_runs_as_standalone_turn_and_persists_history() ->
assert_eq!(status, &CommandExecutionStatus::InProgress);
let delta = wait_for_command_execution_output_delta(&mut mcp, &command_id).await?;
assert_eq!(delta.delta, expected_output);
assert_eq!(delta.delta, "hello from bang\n");
let completed = wait_for_command_execution_completed(&mut mcp, Some(&command_id)).await?;
let ThreadItem::CommandExecution {
@@ -112,7 +110,7 @@ async fn thread_shell_command_runs_as_standalone_turn_and_persists_history() ->
assert_eq!(id, &command_id);
assert_eq!(source, &CommandExecutionSource::UserShell);
assert_eq!(status, &CommandExecutionStatus::Completed);
assert_eq!(aggregated_output.as_deref(), Some(expected_output.as_str()));
assert_eq!(aggregated_output.as_deref(), Some("hello from bang\n"));
assert_eq!(*exit_code, Some(0));
timeout(
@@ -149,7 +147,7 @@ async fn thread_shell_command_runs_as_standalone_turn_and_persists_history() ->
};
assert_eq!(source, &CommandExecutionSource::UserShell);
assert_eq!(status, &CommandExecutionStatus::Completed);
assert_eq!(aggregated_output.as_deref(), Some(expected_output.as_str()));
assert_eq!(aggregated_output.as_deref(), Some("hello from bang\n"));
Ok(())
}
@@ -198,7 +196,6 @@ async fn thread_shell_command_uses_existing_active_turn() -> Result<()> {
)
.await??;
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(start_resp)?;
let (shell_command, expected_output) = current_shell_output_command("active turn bang")?;
let turn_id = mcp
.send_turn_start_request(TurnStartParams {
@@ -243,7 +240,7 @@ async fn thread_shell_command_uses_existing_active_turn() -> Result<()> {
let shell_id = mcp
.send_thread_shell_command_request(ThreadShellCommandParams {
thread_id: thread.id.clone(),
command: shell_command,
command: "printf 'active turn bang\\n'".to_string(),
})
.await?;
let shell_resp: JSONRPCResponse = timeout(
@@ -272,7 +269,7 @@ async fn thread_shell_command_uses_existing_active_turn() -> Result<()> {
unreachable!("helper returns command execution item");
};
assert_eq!(source, &CommandExecutionSource::UserShell);
assert_eq!(aggregated_output.as_deref(), Some(expected_output.as_str()));
assert_eq!(aggregated_output.as_deref(), Some("active turn bang\n"));
mcp.send_response(
request_id,
@@ -312,7 +309,7 @@ async fn thread_shell_command_uses_existing_active_turn() -> Result<()> {
source: CommandExecutionSource::UserShell,
aggregated_output,
..
} if aggregated_output.as_deref() == Some(expected_output.as_str())
} if aggregated_output.as_deref() == Some("active turn bang\n")
)
}),
"expected active-turn shell command to be persisted on the existing turn"
@@ -321,24 +318,6 @@ async fn thread_shell_command_uses_existing_active_turn() -> Result<()> {
Ok(())
}
fn current_shell_output_command(text: &str) -> Result<(String, String)> {
let command_and_output = match default_user_shell().name() {
"powershell" => {
let escaped_text = text.replace('\'', "''");
(
format!("Write-Output '{escaped_text}'"),
format!("{text}\r\n"),
)
}
"cmd" => (format!("echo {text}"), format!("{text}\r\n")),
_ => {
let quoted_text = shlex::try_quote(text)?;
(format!("printf '%s\\n' {quoted_text}"), format!("{text}\n"))
}
};
Ok(command_and_output)
}
async fn wait_for_command_execution_started(
mcp: &mut McpProcess,
expected_id: Option<&str>,

View File

@@ -4,14 +4,12 @@ use app_test_support::McpProcess;
use app_test_support::create_mock_responses_server_repeating_assistant;
use app_test_support::to_response;
use app_test_support::write_chatgpt_auth;
use codex_app_server_protocol::AskForApproval;
use codex_app_server_protocol::JSONRPCError;
use codex_app_server_protocol::JSONRPCMessage;
use codex_app_server_protocol::JSONRPCResponse;
use codex_app_server_protocol::McpServerStartupState;
use codex_app_server_protocol::McpServerStatusUpdatedNotification;
use codex_app_server_protocol::RequestId;
use codex_app_server_protocol::SandboxMode;
use codex_app_server_protocol::ServerNotification;
use codex_app_server_protocol::ThreadStartParams;
use codex_app_server_protocol::ThreadStartResponse;
@@ -19,7 +17,6 @@ use codex_app_server_protocol::ThreadStartedNotification;
use codex_app_server_protocol::ThreadStatus;
use codex_app_server_protocol::ThreadStatusChangedNotification;
use codex_core::config::set_project_trust_level;
use codex_git_utils::resolve_root_git_project_for_trust;
use codex_login::AuthCredentialsStoreMode;
use codex_login::REFRESH_TOKEN_URL_OVERRIDE_ENV_VAR;
use codex_protocol::config_types::ServiceTier;
@@ -51,7 +48,7 @@ async fn thread_start_creates_thread_and_emits_started() -> Result<()> {
let server = create_mock_responses_server_repeating_assistant("Done").await;
let codex_home = TempDir::new()?;
create_config_toml_without_approval_policy(codex_home.path(), &server.uri())?;
create_config_toml(codex_home.path(), &server.uri())?;
// Start server and initialize.
let mut mcp = McpProcess::new(codex_home.path()).await?;
@@ -234,7 +231,7 @@ async fn thread_start_respects_project_config_from_cwd() -> Result<()> {
let server = create_mock_responses_server_repeating_assistant("Done").await;
let codex_home = TempDir::new()?;
create_config_toml_without_approval_policy(codex_home.path(), &server.uri())?;
create_config_toml(codex_home.path(), &server.uri())?;
let workspace = TempDir::new()?;
let project_config_dir = workspace.path().join(".codex");
@@ -275,7 +272,7 @@ async fn thread_start_accepts_flex_service_tier() -> Result<()> {
let server = create_mock_responses_server_repeating_assistant("Done").await;
let codex_home = TempDir::new()?;
create_config_toml_without_approval_policy(codex_home.path(), &server.uri())?;
create_config_toml(codex_home.path(), &server.uri())?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
@@ -303,7 +300,7 @@ async fn thread_start_accepts_metrics_service_name() -> Result<()> {
let server = create_mock_responses_server_repeating_assistant("Done").await;
let codex_home = TempDir::new()?;
create_config_toml_without_approval_policy(codex_home.path(), &server.uri())?;
create_config_toml(codex_home.path(), &server.uri())?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
@@ -330,7 +327,7 @@ async fn thread_start_accepts_metrics_service_name() -> Result<()> {
async fn thread_start_ephemeral_remains_pathless() -> Result<()> {
let server = create_mock_responses_server_repeating_assistant("Done").await;
let codex_home = TempDir::new()?;
create_config_toml_without_approval_policy(codex_home.path(), &server.uri())?;
create_config_toml(codex_home.path(), &server.uri())?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
@@ -587,210 +584,16 @@ async fn thread_start_surfaces_cloud_requirements_load_errors() -> Result<()> {
Ok(())
}
#[tokio::test]
async fn thread_start_with_elevated_sandbox_trusts_project_and_followup_loads_project_config()
-> Result<()> {
let server = create_mock_responses_server_repeating_assistant("Done").await;
let codex_home = TempDir::new()?;
create_config_toml_without_approval_policy(codex_home.path(), &server.uri())?;
let workspace = TempDir::new()?;
let project_config_dir = workspace.path().join(".codex");
std::fs::create_dir_all(&project_config_dir)?;
std::fs::write(
project_config_dir.join("config.toml"),
r#"
model_reasoning_effort = "high"
"#,
)?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
let first_request = mcp
.send_thread_start_request(ThreadStartParams {
cwd: Some(workspace.path().display().to_string()),
sandbox: Some(SandboxMode::WorkspaceWrite),
..Default::default()
})
.await?;
timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(first_request)),
)
.await??;
let second_request = mcp
.send_thread_start_request(ThreadStartParams {
cwd: Some(workspace.path().display().to_string()),
..Default::default()
})
.await?;
let second_response: JSONRPCResponse = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(second_request)),
)
.await??;
let ThreadStartResponse {
approval_policy,
reasoning_effort,
..
} = to_response::<ThreadStartResponse>(second_response)?;
assert_eq!(approval_policy, AskForApproval::OnRequest);
assert_eq!(reasoning_effort, Some(ReasoningEffort::High));
let config_toml = std::fs::read_to_string(codex_home.path().join("config.toml"))?;
let trusted_root = resolve_root_git_project_for_trust(workspace.path())
.unwrap_or_else(|| workspace.path().to_path_buf());
assert!(config_toml.contains(&trusted_root.display().to_string()));
assert!(config_toml.contains("trust_level = \"trusted\""));
Ok(())
}
#[tokio::test]
async fn thread_start_with_nested_git_cwd_trusts_repo_root() -> Result<()> {
let server = create_mock_responses_server_repeating_assistant("Done").await;
let codex_home = TempDir::new()?;
create_config_toml_without_approval_policy(codex_home.path(), &server.uri())?;
let repo_root = TempDir::new()?;
std::fs::create_dir(repo_root.path().join(".git"))?;
let nested = repo_root.path().join("nested/project");
std::fs::create_dir_all(&nested)?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
let request_id = mcp
.send_thread_start_request(ThreadStartParams {
cwd: Some(nested.display().to_string()),
sandbox: Some(SandboxMode::WorkspaceWrite),
..Default::default()
})
.await?;
timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
)
.await??;
let config_toml = std::fs::read_to_string(codex_home.path().join("config.toml"))?;
let trusted_root =
resolve_root_git_project_for_trust(&nested).expect("git root should resolve");
assert!(config_toml.contains(&trusted_root.display().to_string()));
assert!(!config_toml.contains(&nested.display().to_string()));
Ok(())
}
#[tokio::test]
async fn thread_start_with_read_only_sandbox_does_not_persist_project_trust() -> Result<()> {
let server = create_mock_responses_server_repeating_assistant("Done").await;
let codex_home = TempDir::new()?;
create_config_toml_without_approval_policy(codex_home.path(), &server.uri())?;
let workspace = TempDir::new()?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
let request_id = mcp
.send_thread_start_request(ThreadStartParams {
cwd: Some(workspace.path().display().to_string()),
..Default::default()
})
.await?;
timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
)
.await??;
let config_toml = std::fs::read_to_string(codex_home.path().join("config.toml"))?;
assert!(!config_toml.contains("trust_level = \"trusted\""));
assert!(!config_toml.contains(&workspace.path().display().to_string()));
Ok(())
}
#[tokio::test]
async fn thread_start_skips_trust_write_when_project_is_already_trusted() -> Result<()> {
let server = create_mock_responses_server_repeating_assistant("Done").await;
let codex_home = TempDir::new()?;
create_config_toml_without_approval_policy(codex_home.path(), &server.uri())?;
let workspace = TempDir::new()?;
let project_config_dir = workspace.path().join(".codex");
std::fs::create_dir_all(&project_config_dir)?;
std::fs::write(
project_config_dir.join("config.toml"),
r#"
model_reasoning_effort = "high"
"#,
)?;
set_project_trust_level(codex_home.path(), workspace.path(), TrustLevel::Trusted)?;
let config_before = std::fs::read_to_string(codex_home.path().join("config.toml"))?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
let request_id = mcp
.send_thread_start_request(ThreadStartParams {
cwd: Some(workspace.path().display().to_string()),
sandbox: Some(SandboxMode::WorkspaceWrite),
..Default::default()
})
.await?;
let response: JSONRPCResponse = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
)
.await??;
let ThreadStartResponse {
approval_policy,
reasoning_effort,
..
} = to_response::<ThreadStartResponse>(response)?;
assert_eq!(approval_policy, AskForApproval::OnRequest);
assert_eq!(reasoning_effort, Some(ReasoningEffort::High));
let config_after = std::fs::read_to_string(codex_home.path().join("config.toml"))?;
assert_eq!(config_after, config_before);
Ok(())
}
fn create_config_toml_without_approval_policy(
codex_home: &Path,
server_uri: &str,
) -> std::io::Result<()> {
create_config_toml_with_optional_approval_policy(
codex_home, server_uri, /*approval_policy*/ None,
)
}
fn create_config_toml_with_optional_approval_policy(
codex_home: &Path,
server_uri: &str,
approval_policy: Option<&str>,
) -> std::io::Result<()> {
// Helper to create a config.toml pointing at the mock model server.
fn create_config_toml(codex_home: &Path, server_uri: &str) -> std::io::Result<()> {
let config_toml = codex_home.join("config.toml");
let approval_policy = approval_policy
.map(|policy| format!("approval_policy = \"{policy}\"\n"))
.unwrap_or_default();
std::fs::write(
config_toml,
format!(
r#"
model = "mock-model"
{approval_policy}sandbox_mode = "read-only"
approval_policy = "never"
sandbox_mode = "read-only"
model_provider = "mock_provider"

View File

@@ -2523,67 +2523,6 @@ async fn command_execution_notifications_include_process_id() -> Result<()> {
Ok(())
}
#[tokio::test]
async fn turn_start_with_elevated_override_does_not_persist_project_trust() -> Result<()> {
let responses = vec![create_final_assistant_message_sse_response("Done")?];
let server = create_mock_responses_server_sequence_unchecked(responses).await;
let codex_home = TempDir::new()?;
create_config_toml(
codex_home.path(),
&server.uri(),
"never",
&BTreeMap::from([(Feature::Personality, true)]),
)?;
let workspace = TempDir::new()?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
let thread_request = mcp
.send_thread_start_request(ThreadStartParams {
cwd: Some(workspace.path().display().to_string()),
..Default::default()
})
.await?;
let thread_response: JSONRPCResponse = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(thread_request)),
)
.await??;
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(thread_response)?;
let turn_request = mcp
.send_turn_start_request(TurnStartParams {
thread_id: thread.id,
cwd: Some(workspace.path().to_path_buf()),
sandbox_policy: Some(codex_app_server_protocol::SandboxPolicy::DangerFullAccess),
input: vec![V2UserInput::Text {
text: "Hello".to_string(),
text_elements: Vec::new(),
}],
..Default::default()
})
.await?;
timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(turn_request)),
)
.await??;
timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_notification_message("turn/completed"),
)
.await??;
let config_toml = std::fs::read_to_string(codex_home.path().join("config.toml"))?;
assert!(!config_toml.contains("trust_level = \"trusted\""));
assert!(!config_toml.contains(&workspace.path().display().to_string()));
Ok(())
}
// Helper to create a config.toml pointing at the mock model server.
fn create_config_toml(
codex_home: &Path,

View File

@@ -41,11 +41,11 @@ use crate::state::TaskKind;
use crate::tasks::SessionTask;
use crate::tasks::SessionTaskContext;
use crate::tools::ToolRouter;
use crate::tools::context::FunctionToolOutput;
use crate::tools::context::ToolInvocation;
use crate::tools::context::ToolPayload;
use crate::tools::handlers::ShellHandler;
use crate::tools::handlers::UnifiedExecHandler;
use crate::tools::registry::AnyToolResult;
use crate::tools::registry::ToolHandler;
use crate::tools::router::ToolCallSource;
use crate::turn_diff_tracker::TurnDiffTracker;
@@ -119,10 +119,14 @@ use std::time::Duration as StdDuration;
#[path = "codex_tests_guardian.rs"]
mod guardian_tests;
use codex_protocol::models::function_call_output_content_items_to_text;
fn expect_text_tool_output(output: &FunctionToolOutput) -> String {
function_call_output_content_items_to_text(&output.body).unwrap_or_default()
fn expect_text_tool_output(output: &AnyToolResult) -> String {
let ResponseInputItem::FunctionCallOutput { output, .. } = output
.result
.to_response_item(&output.call_id, &output.payload)
else {
panic!("expected function call output");
};
output.body.to_text().unwrap_or_default()
}
struct InstructionsTestCase {
@@ -3119,6 +3123,7 @@ async fn spawn_task_turn_span_inherits_dispatch_trace_context() {
captured_trace: Arc<std::sync::Mutex<Option<W3cTraceContext>>>,
}
#[async_trait::async_trait]
impl SessionTask for TraceCaptureTask {
fn kind(&self) -> TaskKind {
TaskKind::Regular
@@ -4374,6 +4379,7 @@ struct NeverEndingTask {
listen_to_cancellation_token: bool,
}
#[async_trait::async_trait]
impl SessionTask for NeverEndingTask {
fn kind(&self) -> TaskKind {
self.kind
@@ -5113,8 +5119,7 @@ async fn fatal_tool_error_stops_turn_and_reports_error() {
ToolCallSource::Direct,
)
.await
.err()
.expect("expected fatal error");
.expect_err("expected fatal error");
match err {
FunctionCallError::Fatal(message) => {

View File

@@ -8,7 +8,7 @@ use crate::exec::ExecParams;
use crate::exec_policy::ExecPolicyManager;
use crate::guardian::GUARDIAN_REVIEWER_NAME;
use crate::sandboxing::SandboxPermissions;
use crate::tools::context::FunctionToolOutput;
use crate::tools::registry::AnyToolResult;
use crate::turn_diff_tracker::TurnDiffTracker;
use codex_app_server_protocol::ConfigLayerSource;
use codex_exec_server::EnvironmentManager;
@@ -20,7 +20,6 @@ use codex_protocol::models::ContentItem;
use codex_protocol::models::NetworkPermissions;
use codex_protocol::models::PermissionProfile;
use codex_protocol::models::ResponseItem;
use codex_protocol::models::function_call_output_content_items_to_text;
use codex_protocol::permissions::FileSystemSandboxPolicy;
use codex_protocol::permissions::NetworkSandboxPolicy;
use codex_protocol::protocol::AskForApproval;
@@ -40,8 +39,14 @@ use std::fs;
use std::sync::Arc;
use tempfile::tempdir;
fn expect_text_output(output: &FunctionToolOutput) -> String {
function_call_output_content_items_to_text(&output.body).unwrap_or_default()
fn expect_text_output(output: &AnyToolResult) -> String {
let ResponseInputItem::FunctionCallOutput { output, .. } = output
.result
.to_response_item(&output.call_id, &output.payload)
else {
panic!("expected function call output");
};
output.body.to_text().unwrap_or_default()
}
#[tokio::test]

View File

@@ -112,12 +112,10 @@ impl ProviderAuthScript {
fn new(tokens: &[&str]) -> std::io::Result<Self> {
let tempdir = tempfile::tempdir()?;
let tokens_file = tempdir.path().join("tokens.txt");
// `cmd.exe`'s `set /p` treats LF-only input as one line, so use CRLF on Windows.
let token_line_ending = if cfg!(windows) { "\r\n" } else { "\n" };
let mut token_file_contents = String::new();
for token in tokens {
token_file_contents.push_str(token);
token_file_contents.push_str(token_line_ending);
token_file_contents.push('\n');
}
std::fs::write(&tokens_file, token_file_contents)?;
@@ -144,28 +142,23 @@ mv tokens.next tokens.txt
#[cfg(windows)]
let (command, args) = {
let script_path = tempdir.path().join("print-token.cmd");
let script_path = tempdir.path().join("print-token.ps1");
std::fs::write(
&script_path,
r#"@echo off
setlocal EnableExtensions DisableDelayedExpansion
set "first_line="
<tokens.txt set /p "first_line="
if not defined first_line exit /b 1
setlocal EnableDelayedExpansion
echo(!first_line!
endlocal
more +1 tokens.txt > tokens.next
move /y tokens.next tokens.txt >nul
r#"$lines = @(Get-Content -Path tokens.txt)
if ($lines.Count -eq 0) { exit 1 }
Write-Output $lines[0]
$lines | Select-Object -Skip 1 | Set-Content -Path tokens.txt
"#,
)?;
(
"cmd.exe".to_string(),
"powershell".to_string(),
vec![
"/d".to_string(),
"/s".to_string(),
"/c".to_string(),
".\\print-token.cmd".to_string(),
"-NoProfile".to_string(),
"-ExecutionPolicy".to_string(),
"Bypass".to_string(),
"-File".to_string(),
".\\print-token.ps1".to_string(),
],
)
};
@@ -179,7 +172,7 @@ move /y tokens.next tokens.txt >nul
fn auth_config(&self) -> ModelProviderAuthInfo {
let timeout_ms = if cfg!(windows) {
// Process startup can be slow on loaded Windows CI workers.
// `powershell.exe` startup can be slow on loaded Windows CI workers
10_000
} else {
2_000

View File

@@ -18,7 +18,7 @@ use rmcp::model::RequestId;
use tokio::sync::oneshot;
use crate::codex::TurnContext;
use crate::tasks::AnySessionTask;
use crate::tasks::SessionTask;
use codex_protocol::models::PermissionProfile;
use codex_protocol::protocol::ReviewDecision;
use codex_protocol::protocol::TokenUsage;
@@ -69,7 +69,7 @@ pub(crate) enum TaskKind {
pub(crate) struct RunningTask {
pub(crate) done: Arc<Notify>,
pub(crate) kind: TaskKind,
pub(crate) task: Arc<dyn AnySessionTask>,
pub(crate) task: Arc<dyn SessionTask>,
pub(crate) cancellation_token: CancellationToken,
pub(crate) handle: Arc<AbortOnDropHandle<()>>,
pub(crate) turn_context: Arc<TurnContext>,

View File

@@ -4,12 +4,14 @@ use super::SessionTask;
use super::SessionTaskContext;
use crate::codex::TurnContext;
use crate::state::TaskKind;
use async_trait::async_trait;
use codex_protocol::user_input::UserInput;
use tokio_util::sync::CancellationToken;
#[derive(Clone, Copy, Default)]
pub(crate) struct CompactTask;
#[async_trait]
impl SessionTask for CompactTask {
fn kind(&self) -> TaskKind {
TaskKind::Compact
@@ -28,14 +30,14 @@ impl SessionTask for CompactTask {
) -> Option<String> {
let session = session.clone_session();
let _ = if crate::compact::should_use_remote_compact_task(&ctx.provider) {
session.services.session_telemetry.counter(
let _ = session.services.session_telemetry.counter(
"codex.task.compact",
/*inc*/ 1,
&[("type", "remote")],
);
crate::compact_remote::run_remote_compact_task(session.clone(), ctx).await
} else {
session.services.session_telemetry.counter(
let _ = session.services.session_telemetry.counter(
"codex.task.compact",
/*inc*/ 1,
&[("type", "local")],

View File

@@ -2,6 +2,7 @@ use crate::codex::TurnContext;
use crate::state::TaskKind;
use crate::tasks::SessionTask;
use crate::tasks::SessionTaskContext;
use async_trait::async_trait;
use codex_git_utils::CreateGhostCommitOptions;
use codex_git_utils::GhostSnapshotReport;
use codex_git_utils::GitToolingError;
@@ -25,6 +26,7 @@ pub(crate) struct GhostSnapshotTask {
const SNAPSHOT_WARNING_THRESHOLD: Duration = Duration::from_secs(240);
#[async_trait]
impl SessionTask for GhostSnapshotTask {
fn kind(&self) -> TaskKind {
TaskKind::Regular

View File

@@ -9,7 +9,7 @@ use std::sync::Arc;
use std::time::Duration;
use std::time::Instant;
use futures::future::BoxFuture;
use async_trait::async_trait;
use tokio::select;
use tokio::sync::Notify;
use tokio_util::sync::CancellationToken;
@@ -126,6 +126,7 @@ impl SessionTaskContext {
/// intentionally small: implementers identify themselves via
/// [`SessionTask::kind`], perform their work in [`SessionTask::run`], and may
/// release resources in [`SessionTask::abort`].
#[async_trait]
pub(crate) trait SessionTask: Send + Sync + 'static {
/// Describes the type of work the task performs so the session can
/// surface it in telemetry and UI.
@@ -142,84 +143,21 @@ pub(crate) trait SessionTask: Send + Sync + 'static {
/// abort; implementers should watch for it and terminate quickly once it
/// fires. Returning [`Some`] yields a final message that
/// [`Session::on_task_finished`] will emit to the client.
fn run(
async fn run(
self: Arc<Self>,
session: Arc<SessionTaskContext>,
ctx: Arc<TurnContext>,
input: Vec<UserInput>,
cancellation_token: CancellationToken,
) -> impl std::future::Future<Output = Option<String>> + Send;
) -> Option<String>;
/// Gives the task a chance to perform cleanup after an abort.
///
/// The default implementation is a no-op; override this if additional
/// teardown or notifications are required once
/// [`Session::abort_all_tasks`] cancels the task.
fn abort(
&self,
session: Arc<SessionTaskContext>,
ctx: Arc<TurnContext>,
) -> impl std::future::Future<Output = ()> + Send {
async move {
let _ = (session, ctx);
}
}
}
pub(crate) trait AnySessionTask: Send + Sync + 'static {
fn kind(&self) -> TaskKind;
fn span_name(&self) -> &'static str;
fn run(
self: Arc<Self>,
session: Arc<SessionTaskContext>,
ctx: Arc<TurnContext>,
input: Vec<UserInput>,
cancellation_token: CancellationToken,
) -> BoxFuture<'static, Option<String>>;
fn abort<'a>(
&'a self,
session: Arc<SessionTaskContext>,
ctx: Arc<TurnContext>,
) -> BoxFuture<'a, ()>;
}
impl<T> AnySessionTask for T
where
T: SessionTask,
{
fn kind(&self) -> TaskKind {
SessionTask::kind(self)
}
fn span_name(&self) -> &'static str {
SessionTask::span_name(self)
}
fn run(
self: Arc<Self>,
session: Arc<SessionTaskContext>,
ctx: Arc<TurnContext>,
input: Vec<UserInput>,
cancellation_token: CancellationToken,
) -> BoxFuture<'static, Option<String>> {
Box::pin(SessionTask::run(
self,
session,
ctx,
input,
cancellation_token,
))
}
fn abort<'a>(
&'a self,
session: Arc<SessionTaskContext>,
ctx: Arc<TurnContext>,
) -> BoxFuture<'a, ()> {
Box::pin(SessionTask::abort(self, session, ctx))
async fn abort(&self, session: Arc<SessionTaskContext>, ctx: Arc<TurnContext>) {
let _ = (session, ctx);
}
}
@@ -241,7 +179,7 @@ impl Session {
input: Vec<UserInput>,
task: T,
) {
let task: Arc<dyn AnySessionTask> = Arc::new(task);
let task: Arc<dyn SessionTask> = Arc::new(task);
let task_kind = task.kind();
let span_name = task.span_name();
let started_at = Instant::now();

View File

@@ -1,5 +1,6 @@
use std::sync::Arc;
use async_trait::async_trait;
use tokio_util::sync::CancellationToken;
use crate::codex::TurnContext;
@@ -24,6 +25,7 @@ impl RegularTask {
}
}
#[async_trait]
impl SessionTask for RegularTask {
fn kind(&self) -> TaskKind {
TaskKind::Regular

View File

@@ -1,6 +1,7 @@
use std::borrow::Cow;
use std::sync::Arc;
use async_trait::async_trait;
use codex_protocol::config_types::WebSearchMode;
use codex_protocol::items::TurnItem;
use codex_protocol::models::ContentItem;
@@ -47,6 +48,7 @@ impl ReviewTask {
}
}
#[async_trait]
impl SessionTask for ReviewTask {
fn kind(&self) -> TaskKind {
TaskKind::Review
@@ -63,7 +65,7 @@ impl SessionTask for ReviewTask {
input: Vec<UserInput>,
cancellation_token: CancellationToken,
) -> Option<String> {
session.session.services.session_telemetry.counter(
let _ = session.session.services.session_telemetry.counter(
"codex.task.review",
/*inc*/ 1,
&[],

View File

@@ -4,6 +4,7 @@ use crate::codex::TurnContext;
use crate::state::TaskKind;
use crate::tasks::SessionTask;
use crate::tasks::SessionTaskContext;
use async_trait::async_trait;
use codex_git_utils::RestoreGhostCommitOptions;
use codex_git_utils::restore_ghost_commit_with_options;
use codex_protocol::models::ResponseItem;
@@ -24,6 +25,7 @@ impl UndoTask {
}
}
#[async_trait]
impl SessionTask for UndoTask {
fn kind(&self) -> TaskKind {
TaskKind::Regular
@@ -40,11 +42,11 @@ impl SessionTask for UndoTask {
_input: Vec<UserInput>,
cancellation_token: CancellationToken,
) -> Option<String> {
session
.session
.services
.session_telemetry
.counter("codex.task.undo", /*inc*/ 1, &[]);
let _ = session.session.services.session_telemetry.counter(
"codex.task.undo",
/*inc*/ 1,
&[],
);
let sess = session.clone_session();
sess.send_event(
ctx.as_ref(),

View File

@@ -1,6 +1,7 @@
use std::sync::Arc;
use std::time::Duration;
use async_trait::async_trait;
use codex_async_utils::CancelErr;
use codex_async_utils::OrCancelExt;
use codex_protocol::user_input::UserInput;
@@ -61,6 +62,7 @@ impl UserShellCommandTask {
}
}
#[async_trait]
impl SessionTask for UserShellCommandTask {
fn kind(&self) -> TaskKind {
TaskKind::Regular

View File

@@ -2,8 +2,10 @@ use crate::function_tool::FunctionCallError;
use crate::tools::context::FunctionToolOutput;
use crate::tools::context::ToolInvocation;
use crate::tools::context::ToolPayload;
use crate::tools::registry::AnyToolResult;
use crate::tools::registry::ToolHandler;
use crate::tools::registry::ToolKind;
use futures::future::BoxFuture;
use super::ExecContext;
use super::PUBLIC_TOOL_NAME;
@@ -52,8 +54,6 @@ impl CodeModeExecuteHandler {
}
impl ToolHandler for CodeModeExecuteHandler {
type Output = FunctionToolOutput;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
@@ -62,23 +62,35 @@ impl ToolHandler for CodeModeExecuteHandler {
matches!(payload, ToolPayload::Custom { .. })
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let ToolInvocation {
session,
turn,
call_id,
tool_name,
payload,
..
} = invocation;
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let ToolInvocation {
session,
turn,
call_id,
tool_name,
payload,
..
} = invocation;
let payload_for_result = payload.clone();
match payload {
ToolPayload::Custom { input } if tool_name == PUBLIC_TOOL_NAME => {
self.execute(session, turn, call_id, input).await
}
_ => Err(FunctionCallError::RespondToModel(format!(
"{PUBLIC_TOOL_NAME} expects raw JavaScript source text"
))),
}
let result = match payload {
ToolPayload::Custom { input } if tool_name == PUBLIC_TOOL_NAME => {
self.execute(session, turn, call_id.clone(), input).await
}
_ => Err(FunctionCallError::RespondToModel(format!(
"{PUBLIC_TOOL_NAME} expects raw JavaScript source text"
))),
}?;
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(result),
})
})
}
}

View File

@@ -1,9 +1,10 @@
use futures::future::BoxFuture;
use serde::Deserialize;
use crate::function_tool::FunctionCallError;
use crate::tools::context::FunctionToolOutput;
use crate::tools::context::ToolInvocation;
use crate::tools::context::ToolPayload;
use crate::tools::registry::AnyToolResult;
use crate::tools::registry::ToolHandler;
use crate::tools::registry::ToolKind;
@@ -39,44 +40,55 @@ where
}
impl ToolHandler for CodeModeWaitHandler {
type Output = FunctionToolOutput;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let ToolInvocation {
session,
turn,
tool_name,
payload,
..
} = invocation;
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let ToolInvocation {
session,
turn,
call_id,
tool_name,
payload,
..
} = invocation;
let payload_for_result = payload.clone();
match payload {
ToolPayload::Function { arguments } if tool_name == WAIT_TOOL_NAME => {
let args: ExecWaitArgs = parse_arguments(&arguments)?;
let exec = ExecContext { session, turn };
let started_at = std::time::Instant::now();
let response = exec
.session
.services
.code_mode_service
.wait(codex_code_mode::WaitRequest {
cell_id: args.cell_id,
yield_time_ms: args.yield_time_ms,
terminate: args.terminate,
})
.await
.map_err(FunctionCallError::RespondToModel)?;
handle_runtime_response(&exec, response, args.max_tokens, started_at)
.await
.map_err(FunctionCallError::RespondToModel)
}
_ => Err(FunctionCallError::RespondToModel(format!(
"{WAIT_TOOL_NAME} expects JSON arguments"
))),
}
let result = match payload {
ToolPayload::Function { arguments } if tool_name == WAIT_TOOL_NAME => {
let args: ExecWaitArgs = parse_arguments(&arguments)?;
let exec = ExecContext { session, turn };
let started_at = std::time::Instant::now();
let response = exec
.session
.services
.code_mode_service
.wait(codex_code_mode::WaitRequest {
cell_id: args.cell_id,
yield_time_ms: args.yield_time_ms,
terminate: args.terminate,
})
.await
.map_err(FunctionCallError::RespondToModel)?;
handle_runtime_response(&exec, response, args.max_tokens, started_at)
.await
.map_err(FunctionCallError::RespondToModel)
}
_ => Err(FunctionCallError::RespondToModel(format!(
"{WAIT_TOOL_NAME} expects JSON arguments"
))),
}?;
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(result),
})
})
}
}

View File

@@ -11,6 +11,7 @@ use crate::tools::context::ToolInvocation;
use crate::tools::context::ToolPayload;
use crate::tools::handlers::multi_agents::build_agent_spawn_config;
use crate::tools::handlers::parse_arguments;
use crate::tools::registry::AnyToolResult;
use crate::tools::registry::ToolHandler;
use crate::tools::registry::ToolKind;
use codex_protocol::ThreadId;
@@ -19,6 +20,7 @@ use codex_protocol::protocol::SessionSource;
use codex_protocol::protocol::SubAgentSource;
use codex_protocol::user_input::UserInput;
use futures::StreamExt;
use futures::future::BoxFuture;
use futures::stream::FuturesUnordered;
use serde::Deserialize;
use serde::Serialize;
@@ -178,8 +180,6 @@ impl JobProgressEmitter {
}
impl ToolHandler for BatchJobHandler {
type Output = FunctionToolOutput;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
@@ -188,31 +188,48 @@ impl ToolHandler for BatchJobHandler {
matches!(payload, ToolPayload::Function { .. })
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let ToolInvocation {
session,
turn,
tool_name,
payload,
..
} = invocation;
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let ToolInvocation {
session,
turn,
call_id,
tool_name,
payload,
..
} = invocation;
let payload_for_result = payload.clone();
let arguments = match payload {
ToolPayload::Function { arguments } => arguments,
_ => {
return Err(FunctionCallError::RespondToModel(
"agent jobs handler received unsupported payload".to_string(),
));
}
};
let arguments = match payload {
ToolPayload::Function { arguments } => arguments,
_ => {
return Err(FunctionCallError::RespondToModel(
"agent jobs handler received unsupported payload".to_string(),
));
}
};
match tool_name.as_str() {
"spawn_agents_on_csv" => spawn_agents_on_csv::handle(session, turn, arguments).await,
"report_agent_job_result" => report_agent_job_result::handle(session, arguments).await,
other => Err(FunctionCallError::RespondToModel(format!(
"unsupported agent job tool {other}"
))),
}
let result = match tool_name.as_str() {
"spawn_agents_on_csv" => {
spawn_agents_on_csv::handle(session, turn, arguments).await
}
"report_agent_job_result" => {
report_agent_job_result::handle(session, arguments).await
}
other => Err(FunctionCallError::RespondToModel(format!(
"unsupported agent job tool {other}"
))),
}?;
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(result),
})
})
}
}

View File

@@ -16,6 +16,7 @@ use crate::tools::events::ToolEventCtx;
use crate::tools::handlers::apply_granted_turn_permissions;
use crate::tools::handlers::parse_arguments;
use crate::tools::orchestrator::ToolOrchestrator;
use crate::tools::registry::AnyToolResult;
use crate::tools::registry::ToolHandler;
use crate::tools::registry::ToolKind;
use crate::tools::runtimes::apply_patch::ApplyPatchRequest;
@@ -30,6 +31,7 @@ use codex_sandboxing::policy_transforms::merge_permission_profiles;
use codex_sandboxing::policy_transforms::normalize_additional_permissions;
use codex_tools::ApplyPatchToolArgs;
use codex_utils_absolute_path::AbsolutePathBuf;
use futures::future::BoxFuture;
use std::collections::BTreeSet;
use std::sync::Arc;
@@ -122,8 +124,6 @@ async fn effective_patch_permissions(
}
impl ToolHandler for ApplyPatchHandler {
type Output = ApplyPatchToolOutput;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
@@ -135,119 +135,137 @@ impl ToolHandler for ApplyPatchHandler {
)
}
async fn is_mutating(&self, _invocation: &ToolInvocation) -> bool {
fn is_mutating(&self, _invocation: &ToolInvocation) -> bool {
true
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let ToolInvocation {
session,
turn,
tracker,
call_id,
tool_name,
payload,
..
} = invocation;
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let ToolInvocation {
session,
turn,
tracker,
call_id,
tool_name,
payload,
..
} = invocation;
let payload_for_result = payload.clone();
let patch_input = match payload {
ToolPayload::Function { arguments } => {
let args: ApplyPatchToolArgs = parse_arguments(&arguments)?;
args.input
}
ToolPayload::Custom { input } => input,
_ => {
return Err(FunctionCallError::RespondToModel(
"apply_patch handler received unsupported payload".to_string(),
));
}
};
let patch_input = match payload {
ToolPayload::Function { arguments } => {
let args: ApplyPatchToolArgs = parse_arguments(&arguments)?;
args.input
}
ToolPayload::Custom { input } => input,
_ => {
return Err(FunctionCallError::RespondToModel(
"apply_patch handler received unsupported payload".to_string(),
));
}
};
// Re-parse and verify the patch so we can compute changes and approval.
// Avoid building temporary ExecParams/command vectors; derive directly from inputs.
let cwd = turn.cwd.clone();
let command = vec!["apply_patch".to_string(), patch_input.clone()];
match codex_apply_patch::maybe_parse_apply_patch_verified(&command, &cwd) {
codex_apply_patch::MaybeApplyPatchVerified::Body(changes) => {
let (file_paths, effective_additional_permissions, file_system_sandbox_policy) =
effective_patch_permissions(session.as_ref(), turn.as_ref(), &changes).await;
match apply_patch::apply_patch(turn.as_ref(), &file_system_sandbox_policy, changes)
// Re-parse and verify the patch so we can compute changes and approval.
// Avoid building temporary ExecParams/command vectors; derive directly from inputs.
let cwd = turn.cwd.clone();
let command = vec!["apply_patch".to_string(), patch_input.clone()];
match codex_apply_patch::maybe_parse_apply_patch_verified(&command, &cwd) {
codex_apply_patch::MaybeApplyPatchVerified::Body(changes) => {
let (file_paths, effective_additional_permissions, file_system_sandbox_policy) =
effective_patch_permissions(session.as_ref(), turn.as_ref(), &changes).await;
match apply_patch::apply_patch(
turn.as_ref(),
&file_system_sandbox_policy,
changes,
)
.await
{
InternalApplyPatchInvocation::Output(item) => {
let content = item?;
Ok(ApplyPatchToolOutput::from_text(content))
}
InternalApplyPatchInvocation::DelegateToExec(apply) => {
let changes = convert_apply_patch_to_protocol(&apply.action);
let emitter =
ToolEmitter::apply_patch(changes.clone(), apply.auto_approved);
let event_ctx = ToolEventCtx::new(
session.as_ref(),
turn.as_ref(),
&call_id,
Some(&tracker),
);
emitter.begin(event_ctx).await;
let req = ApplyPatchRequest {
action: apply.action,
file_paths,
changes,
exec_approval_requirement: apply.exec_approval_requirement,
additional_permissions: effective_additional_permissions
.additional_permissions,
permissions_preapproved: effective_additional_permissions
.permissions_preapproved,
timeout_ms: None,
};
let mut orchestrator = ToolOrchestrator::new();
let mut runtime = ApplyPatchRuntime::new();
let tool_ctx = ToolCtx {
session: session.clone(),
turn: turn.clone(),
call_id: call_id.clone(),
tool_name: tool_name.to_string(),
};
let out = orchestrator
.run(
&mut runtime,
&req,
&tool_ctx,
{
InternalApplyPatchInvocation::Output(item) => {
let content = item?;
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(ApplyPatchToolOutput::from_text(content)),
})
}
InternalApplyPatchInvocation::DelegateToExec(apply) => {
let changes = convert_apply_patch_to_protocol(&apply.action);
let emitter =
ToolEmitter::apply_patch(changes.clone(), apply.auto_approved);
let event_ctx = ToolEventCtx::new(
session.as_ref(),
turn.as_ref(),
turn.approval_policy.value(),
)
.await
.map(|result| result.output);
let event_ctx = ToolEventCtx::new(
session.as_ref(),
turn.as_ref(),
&call_id,
Some(&tracker),
);
let content = emitter.finish(event_ctx, out).await?;
Ok(ApplyPatchToolOutput::from_text(content))
&call_id,
Some(&tracker),
);
emitter.begin(event_ctx).await;
let req = ApplyPatchRequest {
action: apply.action,
file_paths,
changes,
exec_approval_requirement: apply.exec_approval_requirement,
additional_permissions: effective_additional_permissions
.additional_permissions,
permissions_preapproved: effective_additional_permissions
.permissions_preapproved,
timeout_ms: None,
};
let mut orchestrator = ToolOrchestrator::new();
let mut runtime = ApplyPatchRuntime::new();
let tool_ctx = ToolCtx {
session: session.clone(),
turn: turn.clone(),
call_id: call_id.clone(),
tool_name: tool_name.to_string(),
};
let out = orchestrator
.run(
&mut runtime,
&req,
&tool_ctx,
turn.as_ref(),
turn.approval_policy.value(),
)
.await
.map(|result| result.output);
let event_ctx = ToolEventCtx::new(
session.as_ref(),
turn.as_ref(),
&call_id,
Some(&tracker),
);
let content = emitter.finish(event_ctx, out).await?;
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(ApplyPatchToolOutput::from_text(content)),
})
}
}
}
codex_apply_patch::MaybeApplyPatchVerified::CorrectnessError(parse_error) => {
Err(FunctionCallError::RespondToModel(format!(
"apply_patch verification failed: {parse_error}"
)))
}
codex_apply_patch::MaybeApplyPatchVerified::ShellParseError(error) => {
tracing::trace!("Failed to parse apply_patch input, {error:?}");
Err(FunctionCallError::RespondToModel(
"apply_patch handler received invalid patch input".to_string(),
))
}
codex_apply_patch::MaybeApplyPatchVerified::NotApplyPatch => {
Err(FunctionCallError::RespondToModel(
"apply_patch handler received non-apply_patch input".to_string(),
))
}
}
codex_apply_patch::MaybeApplyPatchVerified::CorrectnessError(parse_error) => {
Err(FunctionCallError::RespondToModel(format!(
"apply_patch verification failed: {parse_error}"
)))
}
codex_apply_patch::MaybeApplyPatchVerified::ShellParseError(error) => {
tracing::trace!("Failed to parse apply_patch input, {error:?}");
Err(FunctionCallError::RespondToModel(
"apply_patch handler received invalid patch input".to_string(),
))
}
codex_apply_patch::MaybeApplyPatchVerified::NotApplyPatch => {
Err(FunctionCallError::RespondToModel(
"apply_patch handler received non-apply_patch input".to_string(),
))
}
}
})
}
}

View File

@@ -5,6 +5,7 @@ use crate::tools::context::FunctionToolOutput;
use crate::tools::context::ToolInvocation;
use crate::tools::context::ToolPayload;
use crate::tools::handlers::parse_arguments;
use crate::tools::registry::AnyToolResult;
use crate::tools::registry::ToolHandler;
use crate::tools::registry::ToolKind;
use codex_protocol::dynamic_tools::DynamicToolCallRequest;
@@ -12,6 +13,7 @@ use codex_protocol::dynamic_tools::DynamicToolResponse;
use codex_protocol::models::FunctionCallOutputContentItem;
use codex_protocol::protocol::DynamicToolCallResponseEvent;
use codex_protocol::protocol::EventMsg;
use futures::future::BoxFuture;
use serde_json::Value;
use std::time::Instant;
use tokio::sync::oneshot;
@@ -20,53 +22,63 @@ use tracing::warn;
pub struct DynamicToolHandler;
impl ToolHandler for DynamicToolHandler {
type Output = FunctionToolOutput;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
async fn is_mutating(&self, _invocation: &ToolInvocation) -> bool {
fn is_mutating(&self, _invocation: &ToolInvocation) -> bool {
true
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let ToolInvocation {
session,
turn,
call_id,
tool_name,
payload,
..
} = invocation;
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let ToolInvocation {
session,
turn,
call_id,
tool_name,
payload,
..
} = invocation;
let arguments = match payload {
ToolPayload::Function { arguments } => arguments,
_ => {
return Err(FunctionCallError::RespondToModel(
"dynamic tool handler received unsupported payload".to_string(),
));
}
};
let payload_for_result = payload.clone();
let arguments = match payload {
ToolPayload::Function { arguments } => arguments,
_ => {
return Err(FunctionCallError::RespondToModel(
"dynamic tool handler received unsupported payload".to_string(),
));
}
};
let args: Value = parse_arguments(&arguments)?;
let response = request_dynamic_tool(&session, turn.as_ref(), call_id, tool_name, args)
.await
.ok_or_else(|| {
FunctionCallError::RespondToModel(
"dynamic tool call was cancelled before receiving a response".to_string(),
)
})?;
let args: Value = parse_arguments(&arguments)?;
let response =
request_dynamic_tool(&session, turn.as_ref(), call_id.clone(), tool_name, args)
.await
.ok_or_else(|| {
FunctionCallError::RespondToModel(
"dynamic tool call was cancelled before receiving a response"
.to_string(),
)
})?;
let DynamicToolResponse {
content_items,
success,
} = response;
let body = content_items
.into_iter()
.map(FunctionCallOutputContentItem::from)
.collect::<Vec<_>>();
Ok(FunctionToolOutput::from_content(body, Some(success)))
let DynamicToolResponse {
content_items,
success,
} = response;
let body = content_items
.into_iter()
.map(FunctionCallOutputContentItem::from)
.collect::<Vec<_>>();
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(FunctionToolOutput::from_content(body, Some(success))),
})
})
}
}

View File

@@ -1,3 +1,4 @@
use futures::future::BoxFuture;
use serde_json::Value as JsonValue;
use std::sync::Arc;
use std::time::Duration;
@@ -16,6 +17,7 @@ use crate::tools::events::ToolEventStage;
use crate::tools::handlers::parse_arguments;
use crate::tools::js_repl::JS_REPL_PRAGMA_PREFIX;
use crate::tools::js_repl::JsReplArgs;
use crate::tools::registry::AnyToolResult;
use crate::tools::registry::ToolHandler;
use crate::tools::registry::ToolKind;
use codex_features::Feature;
@@ -92,8 +94,6 @@ async fn emit_js_repl_exec_end(
emitter.emit(ctx, stage).await;
}
impl ToolHandler for JsReplHandler {
type Output = FunctionToolOutput;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
@@ -105,100 +105,114 @@ impl ToolHandler for JsReplHandler {
)
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let ToolInvocation {
session,
turn,
tracker,
payload,
call_id,
..
} = invocation;
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let ToolInvocation {
session,
turn,
tracker,
payload,
call_id,
..
} = invocation;
let payload_for_result = payload.clone();
if !session.features().enabled(Feature::JsRepl) {
return Err(FunctionCallError::RespondToModel(
"js_repl is disabled by feature flag".to_string(),
));
}
let args = match payload {
ToolPayload::Function { arguments } => parse_arguments(&arguments)?,
ToolPayload::Custom { input } => parse_freeform_args(&input)?,
_ => {
if !session.features().enabled(Feature::JsRepl) {
return Err(FunctionCallError::RespondToModel(
"js_repl expects custom or function payload".to_string(),
"js_repl is disabled by feature flag".to_string(),
));
}
};
let manager = turn.js_repl.manager().await?;
let started_at = Instant::now();
emit_js_repl_exec_begin(session.as_ref(), turn.as_ref(), &call_id).await;
let result = manager
.execute(Arc::clone(&session), Arc::clone(&turn), tracker, args)
.await;
let result = match result {
Ok(result) => result,
Err(err) => {
let message = err.to_string();
emit_js_repl_exec_end(
session.as_ref(),
turn.as_ref(),
&call_id,
"",
Some(&message),
started_at.elapsed(),
)
let args = match payload {
ToolPayload::Function { arguments } => parse_arguments(&arguments)?,
ToolPayload::Custom { input } => parse_freeform_args(&input)?,
_ => {
return Err(FunctionCallError::RespondToModel(
"js_repl expects custom or function payload".to_string(),
));
}
};
let manager = turn.js_repl.manager().await?;
let started_at = Instant::now();
emit_js_repl_exec_begin(session.as_ref(), turn.as_ref(), &call_id).await;
let result = manager
.execute(Arc::clone(&session), Arc::clone(&turn), tracker, args)
.await;
return Err(err);
let result = match result {
Ok(result) => result,
Err(err) => {
let message = err.to_string();
emit_js_repl_exec_end(
session.as_ref(),
turn.as_ref(),
&call_id,
"",
Some(&message),
started_at.elapsed(),
)
.await;
return Err(err);
}
};
let content = result.output;
let mut items = Vec::with_capacity(result.content_items.len() + 1);
if !content.is_empty() {
items.push(FunctionCallOutputContentItem::InputText {
text: content.clone(),
});
}
};
items.extend(result.content_items);
let content = result.output;
let mut items = Vec::with_capacity(result.content_items.len() + 1);
if !content.is_empty() {
items.push(FunctionCallOutputContentItem::InputText {
text: content.clone(),
});
}
items.extend(result.content_items);
emit_js_repl_exec_end(
session.as_ref(),
turn.as_ref(),
&call_id,
&content,
/*error*/ None,
started_at.elapsed(),
)
.await;
emit_js_repl_exec_end(
session.as_ref(),
turn.as_ref(),
&call_id,
&content,
/*error*/ None,
started_at.elapsed(),
)
.await;
if items.is_empty() {
Ok(FunctionToolOutput::from_text(content, Some(true)))
} else {
Ok(FunctionToolOutput::from_content(items, Some(true)))
}
let output = if items.is_empty() {
FunctionToolOutput::from_text(content, Some(true))
} else {
FunctionToolOutput::from_content(items, Some(true))
};
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(output),
})
})
}
}
impl ToolHandler for JsReplResetHandler {
type Output = FunctionToolOutput;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
if !invocation.session.features().enabled(Feature::JsRepl) {
return Err(FunctionCallError::RespondToModel(
"js_repl is disabled by feature flag".to_string(),
));
}
let manager = invocation.turn.js_repl.manager().await?;
manager.reset().await?;
Ok(FunctionToolOutput::from_text(
"js_repl kernel reset".to_string(),
Some(true),
))
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
if !invocation.session.features().enabled(Feature::JsRepl) {
return Err(FunctionCallError::RespondToModel(
"js_repl is disabled by feature flag".to_string(),
));
}
let manager = invocation.turn.js_repl.manager().await?;
manager.reset().await?;
Ok(AnyToolResult::new(
&invocation,
FunctionToolOutput::from_text("js_repl kernel reset".to_string(), Some(true)),
))
})
}
}

View File

@@ -5,6 +5,7 @@ use std::path::Path;
use std::path::PathBuf;
use codex_utils_string::take_bytes_at_char_boundary;
use futures::future::BoxFuture;
use serde::Deserialize;
use tokio::fs;
@@ -13,6 +14,7 @@ use crate::tools::context::FunctionToolOutput;
use crate::tools::context::ToolInvocation;
use crate::tools::context::ToolPayload;
use crate::tools::handlers::parse_arguments;
use crate::tools::registry::AnyToolResult;
use crate::tools::registry::ToolHandler;
use crate::tools::registry::ToolKind;
@@ -45,63 +47,73 @@ struct ListDirArgs {
}
impl ToolHandler for ListDirHandler {
type Output = FunctionToolOutput;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let ToolInvocation { payload, .. } = invocation;
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let ToolInvocation {
call_id, payload, ..
} = invocation;
let arguments = match payload {
ToolPayload::Function { arguments } => arguments,
_ => {
let payload_for_result = payload.clone();
let arguments = match payload {
ToolPayload::Function { arguments } => arguments,
_ => {
return Err(FunctionCallError::RespondToModel(
"list_dir handler received unsupported payload".to_string(),
));
}
};
let args: ListDirArgs = parse_arguments(&arguments)?;
let ListDirArgs {
dir_path,
offset,
limit,
depth,
} = args;
if offset == 0 {
return Err(FunctionCallError::RespondToModel(
"list_dir handler received unsupported payload".to_string(),
"offset must be a 1-indexed entry number".to_string(),
));
}
};
let args: ListDirArgs = parse_arguments(&arguments)?;
if limit == 0 {
return Err(FunctionCallError::RespondToModel(
"limit must be greater than zero".to_string(),
));
}
let ListDirArgs {
dir_path,
offset,
limit,
depth,
} = args;
if depth == 0 {
return Err(FunctionCallError::RespondToModel(
"depth must be greater than zero".to_string(),
));
}
if offset == 0 {
return Err(FunctionCallError::RespondToModel(
"offset must be a 1-indexed entry number".to_string(),
));
}
let path = PathBuf::from(&dir_path);
if !path.is_absolute() {
return Err(FunctionCallError::RespondToModel(
"dir_path must be an absolute path".to_string(),
));
}
if limit == 0 {
return Err(FunctionCallError::RespondToModel(
"limit must be greater than zero".to_string(),
));
}
if depth == 0 {
return Err(FunctionCallError::RespondToModel(
"depth must be greater than zero".to_string(),
));
}
let path = PathBuf::from(&dir_path);
if !path.is_absolute() {
return Err(FunctionCallError::RespondToModel(
"dir_path must be an absolute path".to_string(),
));
}
let entries = list_dir_slice(&path, offset, limit, depth).await?;
let mut output = Vec::with_capacity(entries.len() + 1);
output.push(format!("Absolute path: {}", path.display()));
output.extend(entries);
Ok(FunctionToolOutput::from_text(output.join("\n"), Some(true)))
let entries = list_dir_slice(&path, offset, limit, depth).await?;
let mut output = Vec::with_capacity(entries.len() + 1);
output.push(format!("Absolute path: {}", path.display()));
output.extend(entries);
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(FunctionToolOutput::from_text(output.join("\n"), Some(true))),
})
})
}
}

View File

@@ -1,56 +1,66 @@
use futures::future::BoxFuture;
use std::sync::Arc;
use crate::function_tool::FunctionCallError;
use crate::mcp_tool_call::handle_mcp_tool_call;
use crate::tools::context::ToolInvocation;
use crate::tools::context::ToolPayload;
use crate::tools::registry::AnyToolResult;
use crate::tools::registry::ToolHandler;
use crate::tools::registry::ToolKind;
use codex_protocol::mcp::CallToolResult;
pub struct McpHandler;
impl ToolHandler for McpHandler {
type Output = CallToolResult;
impl ToolHandler for McpHandler {
fn kind(&self) -> ToolKind {
ToolKind::Mcp
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let ToolInvocation {
session,
turn,
call_id,
payload,
..
} = invocation;
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let ToolInvocation {
session,
turn,
call_id,
payload,
..
} = invocation;
let payload = match payload {
ToolPayload::Mcp {
let payload_for_result = payload.clone();
let payload = match payload {
ToolPayload::Mcp {
server,
tool,
raw_arguments,
} => (server, tool, raw_arguments),
_ => {
return Err(FunctionCallError::RespondToModel(
"mcp handler received unsupported payload".to_string(),
));
}
};
let (server, tool, raw_arguments) = payload;
let arguments_str = raw_arguments;
let output = handle_mcp_tool_call(
Arc::clone(&session),
&turn,
call_id.clone(),
server,
tool,
raw_arguments,
} => (server, tool, raw_arguments),
_ => {
return Err(FunctionCallError::RespondToModel(
"mcp handler received unsupported payload".to_string(),
));
}
};
arguments_str,
)
.await;
let (server, tool, raw_arguments) = payload;
let arguments_str = raw_arguments;
let output = handle_mcp_tool_call(
Arc::clone(&session),
&turn,
call_id.clone(),
server,
tool,
arguments_str,
)
.await;
Ok(output)
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(output),
})
})
}
}

View File

@@ -23,12 +23,14 @@ use crate::function_tool::FunctionCallError;
use crate::tools::context::FunctionToolOutput;
use crate::tools::context::ToolInvocation;
use crate::tools::context::ToolPayload;
use crate::tools::registry::AnyToolResult;
use crate::tools::registry::ToolHandler;
use crate::tools::registry::ToolKind;
use codex_protocol::protocol::EventMsg;
use codex_protocol::protocol::McpInvocation;
use codex_protocol::protocol::McpToolCallBeginEvent;
use codex_protocol::protocol::McpToolCallEndEvent;
use futures::future::BoxFuture;
pub struct McpResourceHandler;
@@ -178,65 +180,75 @@ struct ReadResourcePayload {
}
impl ToolHandler for McpResourceHandler {
type Output = FunctionToolOutput;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let ToolInvocation {
session,
turn,
call_id,
tool_name,
payload,
..
} = invocation;
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let ToolInvocation {
session,
turn,
call_id,
tool_name,
payload,
..
} = invocation;
let payload_for_result = payload.clone();
let arguments = match payload {
ToolPayload::Function { arguments } => arguments,
_ => {
return Err(FunctionCallError::RespondToModel(
"mcp_resource handler received unsupported payload".to_string(),
));
}
};
let arguments = match payload {
ToolPayload::Function { arguments } => arguments,
_ => {
return Err(FunctionCallError::RespondToModel(
"mcp_resource handler received unsupported payload".to_string(),
));
}
};
let arguments_value = parse_arguments(arguments.as_str())?;
let arguments_value = parse_arguments(arguments.as_str())?;
match tool_name.as_str() {
"list_mcp_resources" => {
handle_list_resources(
Arc::clone(&session),
Arc::clone(&turn),
call_id.clone(),
arguments_value.clone(),
)
.await
}
"list_mcp_resource_templates" => {
handle_list_resource_templates(
Arc::clone(&session),
Arc::clone(&turn),
call_id.clone(),
arguments_value.clone(),
)
.await
}
"read_mcp_resource" => {
handle_read_resource(
Arc::clone(&session),
Arc::clone(&turn),
call_id,
arguments_value,
)
.await
}
other => Err(FunctionCallError::RespondToModel(format!(
"unsupported MCP resource tool: {other}"
))),
}
let result = match tool_name.as_str() {
"list_mcp_resources" => {
handle_list_resources(
Arc::clone(&session),
Arc::clone(&turn),
call_id.clone(),
arguments_value.clone(),
)
.await
}
"list_mcp_resource_templates" => {
handle_list_resource_templates(
Arc::clone(&session),
Arc::clone(&turn),
call_id.clone(),
arguments_value.clone(),
)
.await
}
"read_mcp_resource" => {
handle_read_resource(
Arc::clone(&session),
Arc::clone(&turn),
call_id.clone(),
arguments_value,
)
.await
}
other => Err(FunctionCallError::RespondToModel(format!(
"unsupported MCP resource tool: {other}"
))),
}?;
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(result),
})
})
}
}

View File

@@ -15,6 +15,7 @@ use crate::tools::context::ToolOutput;
use crate::tools::context::ToolPayload;
pub(crate) use crate::tools::handlers::multi_agents_common::*;
use crate::tools::handlers::parse_arguments;
use crate::tools::registry::AnyToolResult;
use crate::tools::registry::ToolHandler;
use crate::tools::registry::ToolKind;
use codex_protocol::ThreadId;
@@ -32,6 +33,7 @@ use codex_protocol::protocol::CollabResumeEndEvent;
use codex_protocol::protocol::CollabWaitingBeginEvent;
use codex_protocol::protocol::CollabWaitingEndEvent;
use codex_protocol::user_input::UserInput;
use futures::future::BoxFuture;
use serde::Deserialize;
use serde::Serialize;
use serde_json::Value as JsonValue;

View File

@@ -3,8 +3,6 @@ use super::*;
pub(crate) struct Handler;
impl ToolHandler for Handler {
type Output = CloseAgentResult;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
@@ -13,84 +11,94 @@ impl ToolHandler for Handler {
matches!(payload, ToolPayload::Function { .. })
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let ToolInvocation {
session,
turn,
payload,
call_id,
..
} = invocation;
let arguments = function_arguments(payload)?;
let args: CloseAgentArgs = parse_arguments(&arguments)?;
let agent_id = parse_agent_id_target(&args.target)?;
let receiver_agent = session
.services
.agent_control
.get_agent_metadata(agent_id)
.unwrap_or_default();
session
.send_event(
&turn,
CollabCloseBeginEvent {
call_id: call_id.clone(),
sender_thread_id: session.conversation_id,
receiver_thread_id: agent_id,
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let ToolInvocation {
session,
turn,
payload,
call_id,
..
} = invocation;
let payload_for_result = payload.clone();
let arguments = function_arguments(payload)?;
let args: CloseAgentArgs = parse_arguments(&arguments)?;
let agent_id = parse_agent_id_target(&args.target)?;
let receiver_agent = session
.services
.agent_control
.get_agent_metadata(agent_id)
.unwrap_or_default();
session
.send_event(
&turn,
CollabCloseBeginEvent {
call_id: call_id.clone(),
sender_thread_id: session.conversation_id,
receiver_thread_id: agent_id,
}
.into(),
)
.await;
let status = match session
.services
.agent_control
.subscribe_status(agent_id)
.await
{
Ok(mut status_rx) => status_rx.borrow_and_update().clone(),
Err(err) => {
let status = session.services.agent_control.get_status(agent_id).await;
session
.send_event(
&turn,
CollabCloseEndEvent {
call_id: call_id.clone(),
sender_thread_id: session.conversation_id,
receiver_thread_id: agent_id,
receiver_agent_nickname: receiver_agent.agent_nickname.clone(),
receiver_agent_role: receiver_agent.agent_role.clone(),
status,
}
.into(),
)
.await;
return Err(collab_agent_error(agent_id, err));
}
.into(),
)
.await;
let status = match session
.services
.agent_control
.subscribe_status(agent_id)
.await
{
Ok(mut status_rx) => status_rx.borrow_and_update().clone(),
Err(err) => {
let status = session.services.agent_control.get_status(agent_id).await;
session
.send_event(
&turn,
CollabCloseEndEvent {
call_id: call_id.clone(),
sender_thread_id: session.conversation_id,
receiver_thread_id: agent_id,
receiver_agent_nickname: receiver_agent.agent_nickname.clone(),
receiver_agent_role: receiver_agent.agent_role.clone(),
status,
}
.into(),
)
.await;
return Err(collab_agent_error(agent_id, err));
}
};
let result = session
.services
.agent_control
.close_agent(agent_id)
.await
.map_err(|err| collab_agent_error(agent_id, err))
.map(|_| ());
session
.send_event(
&turn,
CollabCloseEndEvent {
call_id,
sender_thread_id: session.conversation_id,
receiver_thread_id: agent_id,
receiver_agent_nickname: receiver_agent.agent_nickname,
receiver_agent_role: receiver_agent.agent_role,
status: status.clone(),
}
.into(),
)
.await;
result?;
};
let result = session
.services
.agent_control
.close_agent(agent_id)
.await
.map_err(|err| collab_agent_error(agent_id, err))
.map(|_| ());
session
.send_event(
&turn,
CollabCloseEndEvent {
call_id: call_id.clone(),
sender_thread_id: session.conversation_id,
receiver_thread_id: agent_id,
receiver_agent_nickname: receiver_agent.agent_nickname,
receiver_agent_role: receiver_agent.agent_role,
status: status.clone(),
}
.into(),
)
.await;
result?;
Ok(CloseAgentResult {
previous_status: status,
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(CloseAgentResult {
previous_status: status,
}),
})
})
}
}

View File

@@ -5,8 +5,6 @@ use std::sync::Arc;
pub(crate) struct Handler;
impl ToolHandler for Handler {
type Output = ResumeAgentResult;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
@@ -15,102 +13,114 @@ impl ToolHandler for Handler {
matches!(payload, ToolPayload::Function { .. })
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let ToolInvocation {
session,
turn,
payload,
call_id,
..
} = invocation;
let arguments = function_arguments(payload)?;
let args: ResumeAgentArgs = parse_arguments(&arguments)?;
let receiver_thread_id = ThreadId::from_string(&args.id).map_err(|err| {
FunctionCallError::RespondToModel(format!("invalid agent id {}: {err:?}", args.id))
})?;
let receiver_agent = session
.services
.agent_control
.get_agent_metadata(receiver_thread_id)
.unwrap_or_default();
let child_depth = next_thread_spawn_depth(&turn.session_source);
let max_depth = turn.config.agent_max_depth;
if exceeds_thread_spawn_depth_limit(child_depth, max_depth) {
return Err(FunctionCallError::RespondToModel(
"Agent depth limit reached. Solve the task yourself.".to_string(),
));
}
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let ToolInvocation {
session,
turn,
payload,
call_id,
..
} = invocation;
let payload_for_result = payload.clone();
let arguments = function_arguments(payload)?;
let args: ResumeAgentArgs = parse_arguments(&arguments)?;
let receiver_thread_id = ThreadId::from_string(&args.id).map_err(|err| {
FunctionCallError::RespondToModel(format!("invalid agent id {}: {err:?}", args.id))
})?;
let receiver_agent = session
.services
.agent_control
.get_agent_metadata(receiver_thread_id)
.unwrap_or_default();
let child_depth = next_thread_spawn_depth(&turn.session_source);
let max_depth = turn.config.agent_max_depth;
if exceeds_thread_spawn_depth_limit(child_depth, max_depth) {
return Err(FunctionCallError::RespondToModel(
"Agent depth limit reached. Solve the task yourself.".to_string(),
));
}
session
.send_event(
&turn,
CollabResumeBeginEvent {
call_id: call_id.clone(),
sender_thread_id: session.conversation_id,
receiver_thread_id,
receiver_agent_nickname: receiver_agent.agent_nickname.clone(),
receiver_agent_role: receiver_agent.agent_role.clone(),
}
.into(),
)
.await;
session
.send_event(
&turn,
CollabResumeBeginEvent {
call_id: call_id.clone(),
sender_thread_id: session.conversation_id,
receiver_thread_id,
receiver_agent_nickname: receiver_agent.agent_nickname.clone(),
receiver_agent_role: receiver_agent.agent_role.clone(),
}
.into(),
)
.await;
let mut status = session
.services
.agent_control
.get_status(receiver_thread_id)
.await;
let (receiver_agent, error) = if matches!(status, AgentStatus::NotFound) {
match try_resume_closed_agent(&session, &turn, receiver_thread_id, child_depth).await {
Ok(()) => {
status = session
.services
.agent_control
.get_status(receiver_thread_id)
.await;
(
session
let mut status = session
.services
.agent_control
.get_status(receiver_thread_id)
.await;
let (receiver_agent, error) = if matches!(status, AgentStatus::NotFound) {
match try_resume_closed_agent(&session, &turn, receiver_thread_id, child_depth)
.await
{
Ok(()) => {
status = session
.services
.agent_control
.get_agent_metadata(receiver_thread_id)
.unwrap_or(receiver_agent),
None,
)
}
Err(err) => {
status = session
.services
.agent_control
.get_status(receiver_thread_id)
.await;
(receiver_agent, Some(err))
.get_status(receiver_thread_id)
.await;
(
session
.services
.agent_control
.get_agent_metadata(receiver_thread_id)
.unwrap_or(receiver_agent),
None,
)
}
Err(err) => {
status = session
.services
.agent_control
.get_status(receiver_thread_id)
.await;
(receiver_agent, Some(err))
}
}
} else {
(receiver_agent, None)
};
session
.send_event(
&turn,
CollabResumeEndEvent {
call_id: call_id.clone(),
sender_thread_id: session.conversation_id,
receiver_thread_id,
receiver_agent_nickname: receiver_agent.agent_nickname,
receiver_agent_role: receiver_agent.agent_role,
status: status.clone(),
}
.into(),
)
.await;
if let Some(err) = error {
return Err(err);
}
} else {
(receiver_agent, None)
};
session
.send_event(
&turn,
CollabResumeEndEvent {
call_id,
sender_thread_id: session.conversation_id,
receiver_thread_id,
receiver_agent_nickname: receiver_agent.agent_nickname,
receiver_agent_role: receiver_agent.agent_role,
status: status.clone(),
}
.into(),
)
.await;
turn.session_telemetry
.counter("codex.multi_agent.resume", /*inc*/ 1, &[]);
if let Some(err) = error {
return Err(err);
}
turn.session_telemetry
.counter("codex.multi_agent.resume", /*inc*/ 1, &[]);
Ok(ResumeAgentResult { status })
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(ResumeAgentResult { status }),
})
})
}
}

View File

@@ -4,8 +4,6 @@ use crate::agent::control::render_input_preview;
pub(crate) struct Handler;
impl ToolHandler for Handler {
type Output = SendInputResult;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
@@ -14,72 +12,82 @@ impl ToolHandler for Handler {
matches!(payload, ToolPayload::Function { .. })
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let ToolInvocation {
session,
turn,
payload,
call_id,
..
} = invocation;
let arguments = function_arguments(payload)?;
let args: SendInputArgs = parse_arguments(&arguments)?;
let receiver_thread_id = parse_agent_id_target(&args.target)?;
let input_items = parse_collab_input(args.message, args.items)?;
let prompt = render_input_preview(&input_items);
let receiver_agent = session
.services
.agent_control
.get_agent_metadata(receiver_thread_id)
.unwrap_or_default();
if args.interrupt {
session
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let ToolInvocation {
session,
turn,
payload,
call_id,
..
} = invocation;
let payload_for_result = payload.clone();
let arguments = function_arguments(payload)?;
let args: SendInputArgs = parse_arguments(&arguments)?;
let receiver_thread_id = parse_agent_id_target(&args.target)?;
let input_items = parse_collab_input(args.message, args.items)?;
let prompt = render_input_preview(&input_items);
let receiver_agent = session
.services
.agent_control
.interrupt_agent(receiver_thread_id)
.get_agent_metadata(receiver_thread_id)
.unwrap_or_default();
if args.interrupt {
session
.services
.agent_control
.interrupt_agent(receiver_thread_id)
.await
.map_err(|err| collab_agent_error(receiver_thread_id, err))?;
}
session
.send_event(
&turn,
CollabAgentInteractionBeginEvent {
call_id: call_id.clone(),
sender_thread_id: session.conversation_id,
receiver_thread_id,
prompt: prompt.clone(),
}
.into(),
)
.await;
let agent_control = session.services.agent_control.clone();
let result = agent_control
.send_input(receiver_thread_id, input_items)
.await
.map_err(|err| collab_agent_error(receiver_thread_id, err))?;
}
session
.send_event(
&turn,
CollabAgentInteractionBeginEvent {
call_id: call_id.clone(),
sender_thread_id: session.conversation_id,
receiver_thread_id,
prompt: prompt.clone(),
}
.into(),
)
.await;
let agent_control = session.services.agent_control.clone();
let result = agent_control
.send_input(receiver_thread_id, input_items)
.await
.map_err(|err| collab_agent_error(receiver_thread_id, err));
let status = session
.services
.agent_control
.get_status(receiver_thread_id)
.await;
session
.send_event(
&turn,
CollabAgentInteractionEndEvent {
call_id,
sender_thread_id: session.conversation_id,
receiver_thread_id,
receiver_agent_nickname: receiver_agent.agent_nickname,
receiver_agent_role: receiver_agent.agent_role,
prompt,
status,
}
.into(),
)
.await;
let submission_id = result?;
.map_err(|err| collab_agent_error(receiver_thread_id, err));
let status = session
.services
.agent_control
.get_status(receiver_thread_id)
.await;
session
.send_event(
&turn,
CollabAgentInteractionEndEvent {
call_id: call_id.clone(),
sender_thread_id: session.conversation_id,
receiver_thread_id,
receiver_agent_nickname: receiver_agent.agent_nickname,
receiver_agent_role: receiver_agent.agent_role,
prompt,
status,
}
.into(),
)
.await;
let submission_id = result?;
Ok(SendInputResult { submission_id })
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(SendInputResult { submission_id }),
})
})
}
}

View File

@@ -11,8 +11,6 @@ use crate::agent::next_thread_spawn_depth;
pub(crate) struct Handler;
impl ToolHandler for Handler {
type Output = SpawnAgentResult;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
@@ -21,149 +19,159 @@ impl ToolHandler for Handler {
matches!(payload, ToolPayload::Function { .. })
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let ToolInvocation {
session,
turn,
payload,
call_id,
..
} = invocation;
let arguments = function_arguments(payload)?;
let args: SpawnAgentArgs = parse_arguments(&arguments)?;
let role_name = args
.agent_type
.as_deref()
.map(str::trim)
.filter(|role| !role.is_empty());
let input_items = parse_collab_input(args.message, args.items)?;
let prompt = render_input_preview(&input_items);
let session_source = turn.session_source.clone();
let child_depth = next_thread_spawn_depth(&session_source);
let max_depth = turn.config.agent_max_depth;
if exceeds_thread_spawn_depth_limit(child_depth, max_depth) {
return Err(FunctionCallError::RespondToModel(
"Agent depth limit reached. Solve the task yourself.".to_string(),
));
}
session
.send_event(
&turn,
CollabAgentSpawnBeginEvent {
call_id: call_id.clone(),
sender_thread_id: session.conversation_id,
prompt: prompt.clone(),
model: args.model.clone().unwrap_or_default(),
reasoning_effort: args.reasoning_effort.unwrap_or_default(),
}
.into(),
)
.await;
let mut config =
build_agent_spawn_config(&session.get_base_instructions().await, turn.as_ref())?;
apply_requested_spawn_agent_model_overrides(
&session,
turn.as_ref(),
&mut config,
args.model.as_deref(),
args.reasoning_effort,
)
.await?;
apply_role_to_config(&mut config, role_name)
.await
.map_err(FunctionCallError::RespondToModel)?;
apply_spawn_agent_runtime_overrides(&mut config, turn.as_ref())?;
apply_spawn_agent_overrides(&mut config, child_depth);
let result = session
.services
.agent_control
.spawn_agent_with_metadata(
config,
input_items,
Some(thread_spawn_source(
session.conversation_id,
&turn.session_source,
child_depth,
role_name,
/*task_name*/ None,
)?),
SpawnAgentOptions {
fork_parent_spawn_call_id: args.fork_context.then(|| call_id.clone()),
fork_mode: args.fork_context.then_some(SpawnAgentForkMode::FullHistory),
},
)
.await
.map_err(collab_spawn_error);
let (new_thread_id, new_agent_metadata, status) = match &result {
Ok(spawned_agent) => (
Some(spawned_agent.thread_id),
Some(spawned_agent.metadata.clone()),
spawned_agent.status.clone(),
),
Err(_) => (None, None, AgentStatus::NotFound),
};
let agent_snapshot = match new_thread_id {
Some(thread_id) => {
session
.services
.agent_control
.get_agent_config_snapshot(thread_id)
.await
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let ToolInvocation {
session,
turn,
payload,
call_id,
..
} = invocation;
let payload_for_result = payload.clone();
let arguments = function_arguments(payload)?;
let args: SpawnAgentArgs = parse_arguments(&arguments)?;
let role_name = args
.agent_type
.as_deref()
.map(str::trim)
.filter(|role| !role.is_empty());
let input_items = parse_collab_input(args.message, args.items)?;
let prompt = render_input_preview(&input_items);
let session_source = turn.session_source.clone();
let child_depth = next_thread_spawn_depth(&session_source);
let max_depth = turn.config.agent_max_depth;
if exceeds_thread_spawn_depth_limit(child_depth, max_depth) {
return Err(FunctionCallError::RespondToModel(
"Agent depth limit reached. Solve the task yourself.".to_string(),
));
}
None => None,
};
let (_new_agent_path, new_agent_nickname, new_agent_role) =
match (&agent_snapshot, new_agent_metadata) {
(Some(snapshot), _) => (
snapshot.session_source.get_agent_path().map(String::from),
snapshot.session_source.get_nickname(),
snapshot.session_source.get_agent_role(),
),
(None, Some(metadata)) => (
metadata.agent_path.map(String::from),
metadata.agent_nickname,
metadata.agent_role,
),
(None, None) => (None, None, None),
};
let effective_model = agent_snapshot
.as_ref()
.map(|snapshot| snapshot.model.clone())
.unwrap_or_else(|| args.model.clone().unwrap_or_default());
let effective_reasoning_effort = agent_snapshot
.as_ref()
.and_then(|snapshot| snapshot.reasoning_effort)
.unwrap_or(args.reasoning_effort.unwrap_or_default());
let nickname = new_agent_nickname.clone();
session
.send_event(
&turn,
CollabAgentSpawnEndEvent {
call_id,
sender_thread_id: session.conversation_id,
new_thread_id,
new_agent_nickname,
new_agent_role,
prompt,
model: effective_model,
reasoning_effort: effective_reasoning_effort,
status,
}
.into(),
session
.send_event(
&turn,
CollabAgentSpawnBeginEvent {
call_id: call_id.clone(),
sender_thread_id: session.conversation_id,
prompt: prompt.clone(),
model: args.model.clone().unwrap_or_default(),
reasoning_effort: args.reasoning_effort.unwrap_or_default(),
}
.into(),
)
.await;
let mut config =
build_agent_spawn_config(&session.get_base_instructions().await, turn.as_ref())?;
apply_requested_spawn_agent_model_overrides(
&session,
turn.as_ref(),
&mut config,
args.model.as_deref(),
args.reasoning_effort,
)
.await;
let new_thread_id = result?.thread_id;
let role_tag = role_name.unwrap_or(DEFAULT_ROLE_NAME);
turn.session_telemetry.counter(
"codex.multi_agent.spawn",
/*inc*/ 1,
&[("role", role_tag)],
);
.await?;
apply_role_to_config(&mut config, role_name)
.await
.map_err(FunctionCallError::RespondToModel)?;
apply_spawn_agent_runtime_overrides(&mut config, turn.as_ref())?;
apply_spawn_agent_overrides(&mut config, child_depth);
Ok(SpawnAgentResult {
agent_id: new_thread_id.to_string(),
nickname,
let result = session
.services
.agent_control
.spawn_agent_with_metadata(
config,
input_items,
Some(thread_spawn_source(
session.conversation_id,
&turn.session_source,
child_depth,
role_name,
/*task_name*/ None,
)?),
SpawnAgentOptions {
fork_parent_spawn_call_id: args.fork_context.then(|| call_id.clone()),
fork_mode: args.fork_context.then_some(SpawnAgentForkMode::FullHistory),
},
)
.await
.map_err(collab_spawn_error);
let (new_thread_id, new_agent_metadata, status) = match &result {
Ok(spawned_agent) => (
Some(spawned_agent.thread_id),
Some(spawned_agent.metadata.clone()),
spawned_agent.status.clone(),
),
Err(_) => (None, None, AgentStatus::NotFound),
};
let agent_snapshot = match new_thread_id {
Some(thread_id) => {
session
.services
.agent_control
.get_agent_config_snapshot(thread_id)
.await
}
None => None,
};
let (_new_agent_path, new_agent_nickname, new_agent_role) =
match (&agent_snapshot, new_agent_metadata) {
(Some(snapshot), _) => (
snapshot.session_source.get_agent_path().map(String::from),
snapshot.session_source.get_nickname(),
snapshot.session_source.get_agent_role(),
),
(None, Some(metadata)) => (
metadata.agent_path.map(String::from),
metadata.agent_nickname,
metadata.agent_role,
),
(None, None) => (None, None, None),
};
let effective_model = agent_snapshot
.as_ref()
.map(|snapshot| snapshot.model.clone())
.unwrap_or_else(|| args.model.clone().unwrap_or_default());
let effective_reasoning_effort = agent_snapshot
.as_ref()
.and_then(|snapshot| snapshot.reasoning_effort)
.unwrap_or(args.reasoning_effort.unwrap_or_default());
let nickname = new_agent_nickname.clone();
session
.send_event(
&turn,
CollabAgentSpawnEndEvent {
call_id: call_id.clone(),
sender_thread_id: session.conversation_id,
new_thread_id,
new_agent_nickname,
new_agent_role,
prompt,
model: effective_model,
reasoning_effort: effective_reasoning_effort,
status,
}
.into(),
)
.await;
let new_thread_id = result?.thread_id;
let role_tag = role_name.unwrap_or(DEFAULT_ROLE_NAME);
turn.session_telemetry.counter(
"codex.multi_agent.spawn",
/*inc*/ 1,
&[("role", role_tag)],
);
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(SpawnAgentResult {
agent_id: new_thread_id.to_string(),
nickname,
}),
})
})
}
}

View File

@@ -15,8 +15,6 @@ use tokio::time::timeout_at;
pub(crate) struct Handler;
impl ToolHandler for Handler {
type Output = WaitAgentResult;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
@@ -25,162 +23,172 @@ impl ToolHandler for Handler {
matches!(payload, ToolPayload::Function { .. })
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let ToolInvocation {
session,
turn,
payload,
call_id,
..
} = invocation;
let arguments = function_arguments(payload)?;
let args: WaitArgs = parse_arguments(&arguments)?;
let receiver_thread_ids = parse_agent_id_targets(args.targets)?;
let mut receiver_agents = Vec::with_capacity(receiver_thread_ids.len());
let mut target_by_thread_id = HashMap::with_capacity(receiver_thread_ids.len());
for receiver_thread_id in &receiver_thread_ids {
let agent_metadata = session
.services
.agent_control
.get_agent_metadata(*receiver_thread_id)
.unwrap_or_default();
target_by_thread_id.insert(
*receiver_thread_id,
agent_metadata
.agent_path
.as_ref()
.map(ToString::to_string)
.unwrap_or_else(|| receiver_thread_id.to_string()),
);
receiver_agents.push(CollabAgentRef {
thread_id: *receiver_thread_id,
agent_nickname: agent_metadata.agent_nickname,
agent_role: agent_metadata.agent_role,
});
}
let timeout_ms = args.timeout_ms.unwrap_or(DEFAULT_WAIT_TIMEOUT_MS);
let timeout_ms = match timeout_ms {
ms if ms <= 0 => {
return Err(FunctionCallError::RespondToModel(
"timeout_ms must be greater than zero".to_owned(),
));
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let ToolInvocation {
session,
turn,
payload,
call_id,
..
} = invocation;
let payload_for_result = payload.clone();
let arguments = function_arguments(payload)?;
let args: WaitArgs = parse_arguments(&arguments)?;
let receiver_thread_ids = parse_agent_id_targets(args.targets)?;
let mut receiver_agents = Vec::with_capacity(receiver_thread_ids.len());
let mut target_by_thread_id = HashMap::with_capacity(receiver_thread_ids.len());
for receiver_thread_id in &receiver_thread_ids {
let agent_metadata = session
.services
.agent_control
.get_agent_metadata(*receiver_thread_id)
.unwrap_or_default();
target_by_thread_id.insert(
*receiver_thread_id,
agent_metadata
.agent_path
.as_ref()
.map(ToString::to_string)
.unwrap_or_else(|| receiver_thread_id.to_string()),
);
receiver_agents.push(CollabAgentRef {
thread_id: *receiver_thread_id,
agent_nickname: agent_metadata.agent_nickname,
agent_role: agent_metadata.agent_role,
});
}
ms => ms.clamp(MIN_WAIT_TIMEOUT_MS, MAX_WAIT_TIMEOUT_MS),
};
session
.send_event(
&turn,
CollabWaitingBeginEvent {
sender_thread_id: session.conversation_id,
receiver_thread_ids: receiver_thread_ids.clone(),
receiver_agents: receiver_agents.clone(),
call_id: call_id.clone(),
let timeout_ms = args.timeout_ms.unwrap_or(DEFAULT_WAIT_TIMEOUT_MS);
let timeout_ms = match timeout_ms {
ms if ms <= 0 => {
return Err(FunctionCallError::RespondToModel(
"timeout_ms must be greater than zero".to_owned(),
));
}
.into(),
)
.await;
ms => ms.clamp(MIN_WAIT_TIMEOUT_MS, MAX_WAIT_TIMEOUT_MS),
};
let mut status_rxs = Vec::with_capacity(receiver_thread_ids.len());
let mut initial_final_statuses = Vec::new();
for id in &receiver_thread_ids {
match session.services.agent_control.subscribe_status(*id).await {
Ok(rx) => {
let status = rx.borrow().clone();
if is_final(&status) {
initial_final_statuses.push((*id, status));
session
.send_event(
&turn,
CollabWaitingBeginEvent {
sender_thread_id: session.conversation_id,
receiver_thread_ids: receiver_thread_ids.clone(),
receiver_agents: receiver_agents.clone(),
call_id: call_id.clone(),
}
status_rxs.push((*id, rx));
}
Err(CodexErr::ThreadNotFound(_)) => {
initial_final_statuses.push((*id, AgentStatus::NotFound));
}
Err(err) => {
let mut statuses = HashMap::with_capacity(1);
statuses.insert(*id, session.services.agent_control.get_status(*id).await);
session
.send_event(
&turn,
CollabWaitingEndEvent {
sender_thread_id: session.conversation_id,
call_id: call_id.clone(),
agent_statuses: build_wait_agent_statuses(
&statuses,
&receiver_agents,
),
statuses,
}
.into(),
)
.await;
return Err(collab_agent_error(*id, err));
}
}
}
.into(),
)
.await;
let statuses = if !initial_final_statuses.is_empty() {
initial_final_statuses
} else {
let mut futures = FuturesUnordered::new();
for (id, rx) in status_rxs.into_iter() {
let session = session.clone();
futures.push(wait_for_final_status(session, id, rx));
}
let mut results = Vec::new();
let deadline = Instant::now() + Duration::from_millis(timeout_ms as u64);
loop {
match timeout_at(deadline, futures.next()).await {
Ok(Some(Some(result))) => {
results.push(result);
break;
let mut status_rxs = Vec::with_capacity(receiver_thread_ids.len());
let mut initial_final_statuses = Vec::new();
for id in &receiver_thread_ids {
match session.services.agent_control.subscribe_status(*id).await {
Ok(rx) => {
let status = rx.borrow().clone();
if is_final(&status) {
initial_final_statuses.push((*id, status));
}
status_rxs.push((*id, rx));
}
Err(CodexErr::ThreadNotFound(_)) => {
initial_final_statuses.push((*id, AgentStatus::NotFound));
}
Err(err) => {
let mut statuses = HashMap::with_capacity(1);
statuses.insert(*id, session.services.agent_control.get_status(*id).await);
session
.send_event(
&turn,
CollabWaitingEndEvent {
sender_thread_id: session.conversation_id,
call_id: call_id.clone(),
agent_statuses: build_wait_agent_statuses(
&statuses,
&receiver_agents,
),
statuses,
}
.into(),
)
.await;
return Err(collab_agent_error(*id, err));
}
Ok(Some(None)) => continue,
Ok(None) | Err(_) => break,
}
}
if !results.is_empty() {
let statuses = if !initial_final_statuses.is_empty() {
initial_final_statuses
} else {
let mut futures = FuturesUnordered::new();
for (id, rx) in status_rxs.into_iter() {
let session = session.clone();
futures.push(wait_for_final_status(session, id, rx));
}
let mut results = Vec::new();
let deadline = Instant::now() + Duration::from_millis(timeout_ms as u64);
loop {
match futures.next().now_or_never() {
Some(Some(Some(result))) => results.push(result),
Some(Some(None)) => continue,
Some(None) | None => break,
match timeout_at(deadline, futures.next()).await {
Ok(Some(Some(result))) => {
results.push(result);
break;
}
Ok(Some(None)) => continue,
Ok(None) | Err(_) => break,
}
}
}
results
};
let timed_out = statuses.is_empty();
let statuses_by_id = statuses.clone().into_iter().collect::<HashMap<_, _>>();
let agent_statuses = build_wait_agent_statuses(&statuses_by_id, &receiver_agents);
let result = WaitAgentResult {
status: statuses
.into_iter()
.filter_map(|(thread_id, status)| {
target_by_thread_id
.get(&thread_id)
.cloned()
.map(|target| (target, status))
})
.collect(),
timed_out,
};
session
.send_event(
&turn,
CollabWaitingEndEvent {
sender_thread_id: session.conversation_id,
call_id,
agent_statuses,
statuses: statuses_by_id,
if !results.is_empty() {
loop {
match futures.next().now_or_never() {
Some(Some(Some(result))) => results.push(result),
Some(Some(None)) => continue,
Some(None) | None => break,
}
}
}
.into(),
)
.await;
results
};
Ok(result)
let timed_out = statuses.is_empty();
let statuses_by_id = statuses.clone().into_iter().collect::<HashMap<_, _>>();
let agent_statuses = build_wait_agent_statuses(&statuses_by_id, &receiver_agents);
let result = WaitAgentResult {
status: statuses
.into_iter()
.filter_map(|(thread_id, status)| {
target_by_thread_id
.get(&thread_id)
.cloned()
.map(|target| (target, status))
})
.collect(),
timed_out,
};
session
.send_event(
&turn,
CollabWaitingEndEvent {
sender_thread_id: session.conversation_id,
call_id: call_id.clone(),
agent_statuses,
statuses: statuses_by_id,
}
.into(),
)
.await;
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(result),
})
})
}
}

View File

@@ -115,6 +115,7 @@ fn history_contains_inter_agent_communication(
#[derive(Clone, Copy)]
struct NeverEndingTask;
#[async_trait::async_trait]
impl SessionTask for NeverEndingTask {
fn kind(&self) -> TaskKind {
TaskKind::Regular

View File

@@ -9,6 +9,7 @@ use crate::tools::context::ToolOutput;
use crate::tools::context::ToolPayload;
use crate::tools::handlers::multi_agents_common::*;
use crate::tools::handlers::parse_arguments;
use crate::tools::registry::AnyToolResult;
use crate::tools::registry::ToolHandler;
use crate::tools::registry::ToolKind;
use codex_protocol::AgentPath;
@@ -23,6 +24,7 @@ use codex_protocol::protocol::CollabCloseEndEvent;
use codex_protocol::protocol::CollabWaitingBeginEvent;
use codex_protocol::protocol::CollabWaitingEndEvent;
use codex_protocol::user_input::UserInput;
use futures::future::BoxFuture;
use serde::Deserialize;
use serde::Serialize;
use serde_json::Value as JsonValue;

View File

@@ -3,8 +3,6 @@ use super::*;
pub(crate) struct Handler;
impl ToolHandler for Handler {
type Output = CloseAgentResult;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
@@ -13,93 +11,103 @@ impl ToolHandler for Handler {
matches!(payload, ToolPayload::Function { .. })
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let ToolInvocation {
session,
turn,
payload,
call_id,
..
} = invocation;
let arguments = function_arguments(payload)?;
let args: CloseAgentArgs = parse_arguments(&arguments)?;
let agent_id = resolve_agent_target(&session, &turn, &args.target).await?;
let receiver_agent = session
.services
.agent_control
.get_agent_metadata(agent_id)
.unwrap_or_default();
if receiver_agent
.agent_path
.as_ref()
.is_some_and(AgentPath::is_root)
{
return Err(FunctionCallError::RespondToModel(
"root is not a spawned agent".to_string(),
));
}
session
.send_event(
&turn,
CollabCloseBeginEvent {
call_id: call_id.clone(),
sender_thread_id: session.conversation_id,
receiver_thread_id: agent_id,
}
.into(),
)
.await;
let status = match session
.services
.agent_control
.subscribe_status(agent_id)
.await
{
Ok(mut status_rx) => status_rx.borrow_and_update().clone(),
Err(err) => {
let status = session.services.agent_control.get_status(agent_id).await;
session
.send_event(
&turn,
CollabCloseEndEvent {
call_id: call_id.clone(),
sender_thread_id: session.conversation_id,
receiver_thread_id: agent_id,
receiver_agent_nickname: receiver_agent.agent_nickname.clone(),
receiver_agent_role: receiver_agent.agent_role.clone(),
status,
}
.into(),
)
.await;
return Err(collab_agent_error(agent_id, err));
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let ToolInvocation {
session,
turn,
payload,
call_id,
..
} = invocation;
let payload_for_result = payload.clone();
let arguments = function_arguments(payload)?;
let args: CloseAgentArgs = parse_arguments(&arguments)?;
let agent_id = resolve_agent_target(&session, &turn, &args.target).await?;
let receiver_agent = session
.services
.agent_control
.get_agent_metadata(agent_id)
.unwrap_or_default();
if receiver_agent
.agent_path
.as_ref()
.is_some_and(AgentPath::is_root)
{
return Err(FunctionCallError::RespondToModel(
"root is not a spawned agent".to_string(),
));
}
};
let result = session
.services
.agent_control
.close_agent(agent_id)
.await
.map_err(|err| collab_agent_error(agent_id, err))
.map(|_| ());
session
.send_event(
&turn,
CollabCloseEndEvent {
call_id,
sender_thread_id: session.conversation_id,
receiver_thread_id: agent_id,
receiver_agent_nickname: receiver_agent.agent_nickname,
receiver_agent_role: receiver_agent.agent_role,
status: status.clone(),
session
.send_event(
&turn,
CollabCloseBeginEvent {
call_id: call_id.clone(),
sender_thread_id: session.conversation_id,
receiver_thread_id: agent_id,
}
.into(),
)
.await;
let status = match session
.services
.agent_control
.subscribe_status(agent_id)
.await
{
Ok(mut status_rx) => status_rx.borrow_and_update().clone(),
Err(err) => {
let status = session.services.agent_control.get_status(agent_id).await;
session
.send_event(
&turn,
CollabCloseEndEvent {
call_id: call_id.clone(),
sender_thread_id: session.conversation_id,
receiver_thread_id: agent_id,
receiver_agent_nickname: receiver_agent.agent_nickname.clone(),
receiver_agent_role: receiver_agent.agent_role.clone(),
status,
}
.into(),
)
.await;
return Err(collab_agent_error(agent_id, err));
}
.into(),
)
.await;
result?;
};
let result = session
.services
.agent_control
.close_agent(agent_id)
.await
.map_err(|err| collab_agent_error(agent_id, err))
.map(|_| ());
session
.send_event(
&turn,
CollabCloseEndEvent {
call_id: call_id.clone(),
sender_thread_id: session.conversation_id,
receiver_thread_id: agent_id,
receiver_agent_nickname: receiver_agent.agent_nickname,
receiver_agent_role: receiver_agent.agent_role,
status: status.clone(),
}
.into(),
)
.await;
result?;
Ok(CloseAgentResult {
previous_status: status,
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(CloseAgentResult {
previous_status: status,
}),
})
})
}
}

View File

@@ -1,14 +1,11 @@
use super::message_tool::FollowupTaskArgs;
use super::message_tool::MessageDeliveryMode;
use super::message_tool::MessageToolResult;
use super::message_tool::handle_message_string_tool;
use super::*;
pub(crate) struct Handler;
impl ToolHandler for Handler {
type Output = MessageToolResult;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
@@ -17,16 +14,29 @@ impl ToolHandler for Handler {
matches!(payload, ToolPayload::Function { .. })
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let arguments = function_arguments(invocation.payload.clone())?;
let args: FollowupTaskArgs = parse_arguments(&arguments)?;
handle_message_string_tool(
invocation,
MessageDeliveryMode::TriggerTurn,
args.target,
args.message,
args.interrupt,
)
.await
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let call_id = invocation.call_id.clone();
let payload_for_result = invocation.payload.clone();
let arguments = function_arguments(invocation.payload.clone())?;
let args: FollowupTaskArgs = parse_arguments(&arguments)?;
let result = handle_message_string_tool(
invocation,
MessageDeliveryMode::TriggerTurn,
args.target,
args.message,
args.interrupt,
)
.await?;
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(result),
})
})
}
}

View File

@@ -4,8 +4,6 @@ use crate::agent::control::ListedAgent;
pub(crate) struct Handler;
impl ToolHandler for Handler {
type Output = ListAgentsResult;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
@@ -14,27 +12,38 @@ impl ToolHandler for Handler {
matches!(payload, ToolPayload::Function { .. })
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let ToolInvocation {
session,
turn,
payload,
..
} = invocation;
let arguments = function_arguments(payload)?;
let args: ListAgentsArgs = parse_arguments(&arguments)?;
session
.services
.agent_control
.register_session_root(session.conversation_id, &turn.session_source);
let agents = session
.services
.agent_control
.list_agents(&turn.session_source, args.path_prefix.as_deref())
.await
.map_err(collab_spawn_error)?;
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let ToolInvocation {
session,
turn,
payload,
call_id,
..
} = invocation;
let payload_for_result = payload.clone();
let arguments = function_arguments(payload)?;
let args: ListAgentsArgs = parse_arguments(&arguments)?;
session
.services
.agent_control
.register_session_root(session.conversation_id, &turn.session_source);
let agents = session
.services
.agent_control
.list_agents(&turn.session_source, args.path_prefix.as_deref())
.await
.map_err(collab_spawn_error)?;
Ok(ListAgentsResult { agents })
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(ListAgentsResult { agents }),
})
})
}
}

View File

@@ -1,5 +1,4 @@
use super::message_tool::MessageDeliveryMode;
use super::message_tool::MessageToolResult;
use super::message_tool::SendMessageArgs;
use super::message_tool::handle_message_string_tool;
use super::*;
@@ -7,8 +6,6 @@ use super::*;
pub(crate) struct Handler;
impl ToolHandler for Handler {
type Output = MessageToolResult;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
@@ -17,16 +14,29 @@ impl ToolHandler for Handler {
matches!(payload, ToolPayload::Function { .. })
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let arguments = function_arguments(invocation.payload.clone())?;
let args: SendMessageArgs = parse_arguments(&arguments)?;
handle_message_string_tool(
invocation,
MessageDeliveryMode::QueueOnly,
args.target,
args.message,
/*interrupt*/ false,
)
.await
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let call_id = invocation.call_id.clone();
let payload_for_result = invocation.payload.clone();
let arguments = function_arguments(invocation.payload.clone())?;
let args: SendMessageArgs = parse_arguments(&arguments)?;
let result = handle_message_string_tool(
invocation,
MessageDeliveryMode::QueueOnly,
args.target,
args.message,
/*interrupt*/ false,
)
.await?;
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(result),
})
})
}
}

View File

@@ -12,8 +12,6 @@ use codex_protocol::protocol::Op;
pub(crate) struct Handler;
impl ToolHandler for Handler {
type Output = SpawnAgentResult;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
@@ -22,178 +20,188 @@ impl ToolHandler for Handler {
matches!(payload, ToolPayload::Function { .. })
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let ToolInvocation {
session,
turn,
payload,
call_id,
..
} = invocation;
let arguments = function_arguments(payload)?;
let args: SpawnAgentArgs = parse_arguments(&arguments)?;
let fork_mode = args.fork_mode()?;
let role_name = args
.agent_type
.as_deref()
.map(str::trim)
.filter(|role| !role.is_empty());
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let ToolInvocation {
session,
turn,
payload,
call_id,
..
} = invocation;
let payload_for_result = payload.clone();
let arguments = function_arguments(payload)?;
let args: SpawnAgentArgs = parse_arguments(&arguments)?;
let fork_mode = args.fork_mode()?;
let role_name = args
.agent_type
.as_deref()
.map(str::trim)
.filter(|role| !role.is_empty());
let initial_operation = parse_collab_input(Some(args.message), /*items*/ None)?;
let prompt = render_input_preview(&initial_operation);
let initial_operation = parse_collab_input(Some(args.message), /*items*/ None)?;
let prompt = render_input_preview(&initial_operation);
let session_source = turn.session_source.clone();
let child_depth = next_thread_spawn_depth(&session_source);
let max_depth = turn.config.agent_max_depth;
if exceeds_thread_spawn_depth_limit(child_depth, max_depth) {
return Err(FunctionCallError::RespondToModel(
"Agent depth limit reached. Solve the task yourself.".to_string(),
));
}
session
.send_event(
&turn,
CollabAgentSpawnBeginEvent {
call_id: call_id.clone(),
sender_thread_id: session.conversation_id,
prompt: prompt.clone(),
model: args.model.clone().unwrap_or_default(),
reasoning_effort: args.reasoning_effort.unwrap_or_default(),
}
.into(),
)
.await;
let mut config =
build_agent_spawn_config(&session.get_base_instructions().await, turn.as_ref())?;
apply_requested_spawn_agent_model_overrides(
&session,
turn.as_ref(),
&mut config,
args.model.as_deref(),
args.reasoning_effort,
)
.await?;
apply_role_to_config(&mut config, role_name)
.await
.map_err(FunctionCallError::RespondToModel)?;
apply_spawn_agent_runtime_overrides(&mut config, turn.as_ref())?;
apply_spawn_agent_overrides(&mut config, child_depth);
let spawn_source = thread_spawn_source(
session.conversation_id,
&turn.session_source,
child_depth,
role_name,
Some(args.task_name.clone()),
)?;
let result = session
.services
.agent_control
.spawn_agent_with_metadata(
config,
match (spawn_source.get_agent_path(), initial_operation) {
(Some(recipient), Op::UserInput { items, .. })
if items
.iter()
.all(|item| matches!(item, UserInput::Text { .. })) =>
{
Op::InterAgentCommunication {
communication: InterAgentCommunication::new(
turn.session_source
.get_agent_path()
.unwrap_or_else(AgentPath::root),
recipient,
Vec::new(),
prompt.clone(),
/*trigger_turn*/ true,
),
}
}
(_, initial_operation) => initial_operation,
},
Some(spawn_source),
SpawnAgentOptions {
fork_parent_spawn_call_id: fork_mode.as_ref().map(|_| call_id.clone()),
fork_mode,
},
)
.await
.map_err(collab_spawn_error);
let (new_thread_id, new_agent_metadata, status) = match &result {
Ok(spawned_agent) => (
Some(spawned_agent.thread_id),
Some(spawned_agent.metadata.clone()),
spawned_agent.status.clone(),
),
Err(_) => (None, None, AgentStatus::NotFound),
};
let agent_snapshot = match new_thread_id {
Some(thread_id) => {
session
.services
.agent_control
.get_agent_config_snapshot(thread_id)
.await
let session_source = turn.session_source.clone();
let child_depth = next_thread_spawn_depth(&session_source);
let max_depth = turn.config.agent_max_depth;
if exceeds_thread_spawn_depth_limit(child_depth, max_depth) {
return Err(FunctionCallError::RespondToModel(
"Agent depth limit reached. Solve the task yourself.".to_string(),
));
}
None => None,
};
let (new_agent_path, new_agent_nickname, new_agent_role) =
match (&agent_snapshot, new_agent_metadata) {
(Some(snapshot), _) => (
snapshot.session_source.get_agent_path().map(String::from),
snapshot.session_source.get_nickname(),
snapshot.session_source.get_agent_role(),
),
(None, Some(metadata)) => (
metadata.agent_path.map(String::from),
metadata.agent_nickname,
metadata.agent_role,
),
(None, None) => (None, None, None),
};
let effective_model = agent_snapshot
.as_ref()
.map(|snapshot| snapshot.model.clone())
.unwrap_or_else(|| args.model.clone().unwrap_or_default());
let effective_reasoning_effort = agent_snapshot
.as_ref()
.and_then(|snapshot| snapshot.reasoning_effort)
.unwrap_or(args.reasoning_effort.unwrap_or_default());
let nickname = new_agent_nickname.clone();
session
.send_event(
&turn,
CollabAgentSpawnEndEvent {
call_id,
sender_thread_id: session.conversation_id,
new_thread_id,
new_agent_nickname,
new_agent_role,
prompt,
model: effective_model,
reasoning_effort: effective_reasoning_effort,
status,
}
.into(),
session
.send_event(
&turn,
CollabAgentSpawnBeginEvent {
call_id: call_id.clone(),
sender_thread_id: session.conversation_id,
prompt: prompt.clone(),
model: args.model.clone().unwrap_or_default(),
reasoning_effort: args.reasoning_effort.unwrap_or_default(),
}
.into(),
)
.await;
let mut config =
build_agent_spawn_config(&session.get_base_instructions().await, turn.as_ref())?;
apply_requested_spawn_agent_model_overrides(
&session,
turn.as_ref(),
&mut config,
args.model.as_deref(),
args.reasoning_effort,
)
.await;
let _ = result?;
let role_tag = role_name.unwrap_or(DEFAULT_ROLE_NAME);
turn.session_telemetry.counter(
"codex.multi_agent.spawn",
/*inc*/ 1,
&[("role", role_tag)],
);
let task_name = new_agent_path.ok_or_else(|| {
FunctionCallError::RespondToModel(
"spawned agent is missing a canonical task name".to_string(),
)
})?;
.await?;
apply_role_to_config(&mut config, role_name)
.await
.map_err(FunctionCallError::RespondToModel)?;
apply_spawn_agent_runtime_overrides(&mut config, turn.as_ref())?;
apply_spawn_agent_overrides(&mut config, child_depth);
Ok(SpawnAgentResult {
agent_id: None,
task_name,
nickname,
let spawn_source = thread_spawn_source(
session.conversation_id,
&turn.session_source,
child_depth,
role_name,
Some(args.task_name.clone()),
)?;
let result = session
.services
.agent_control
.spawn_agent_with_metadata(
config,
match (spawn_source.get_agent_path(), initial_operation) {
(Some(recipient), Op::UserInput { items, .. })
if items
.iter()
.all(|item| matches!(item, UserInput::Text { .. })) =>
{
Op::InterAgentCommunication {
communication: InterAgentCommunication::new(
turn.session_source
.get_agent_path()
.unwrap_or_else(AgentPath::root),
recipient,
Vec::new(),
prompt.clone(),
/*trigger_turn*/ true,
),
}
}
(_, initial_operation) => initial_operation,
},
Some(spawn_source),
SpawnAgentOptions {
fork_parent_spawn_call_id: fork_mode.as_ref().map(|_| call_id.clone()),
fork_mode,
},
)
.await
.map_err(collab_spawn_error);
let (new_thread_id, new_agent_metadata, status) = match &result {
Ok(spawned_agent) => (
Some(spawned_agent.thread_id),
Some(spawned_agent.metadata.clone()),
spawned_agent.status.clone(),
),
Err(_) => (None, None, AgentStatus::NotFound),
};
let agent_snapshot = match new_thread_id {
Some(thread_id) => {
session
.services
.agent_control
.get_agent_config_snapshot(thread_id)
.await
}
None => None,
};
let (new_agent_path, new_agent_nickname, new_agent_role) =
match (&agent_snapshot, new_agent_metadata) {
(Some(snapshot), _) => (
snapshot.session_source.get_agent_path().map(String::from),
snapshot.session_source.get_nickname(),
snapshot.session_source.get_agent_role(),
),
(None, Some(metadata)) => (
metadata.agent_path.map(String::from),
metadata.agent_nickname,
metadata.agent_role,
),
(None, None) => (None, None, None),
};
let effective_model = agent_snapshot
.as_ref()
.map(|snapshot| snapshot.model.clone())
.unwrap_or_else(|| args.model.clone().unwrap_or_default());
let effective_reasoning_effort = agent_snapshot
.as_ref()
.and_then(|snapshot| snapshot.reasoning_effort)
.unwrap_or(args.reasoning_effort.unwrap_or_default());
let nickname = new_agent_nickname.clone();
session
.send_event(
&turn,
CollabAgentSpawnEndEvent {
call_id: call_id.clone(),
sender_thread_id: session.conversation_id,
new_thread_id,
new_agent_nickname,
new_agent_role,
prompt,
model: effective_model,
reasoning_effort: effective_reasoning_effort,
status,
}
.into(),
)
.await;
let _ = result?;
let role_tag = role_name.unwrap_or(DEFAULT_ROLE_NAME);
turn.session_telemetry.counter(
"codex.multi_agent.spawn",
/*inc*/ 1,
&[("role", role_tag)],
);
let task_name = new_agent_path.ok_or_else(|| {
FunctionCallError::RespondToModel(
"spawned agent is missing a canonical task name".to_string(),
)
})?;
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(SpawnAgentResult {
agent_id: None,
task_name,
nickname,
}),
})
})
}
}

View File

@@ -7,8 +7,6 @@ use tokio::time::timeout_at;
pub(crate) struct Handler;
impl ToolHandler for Handler {
type Output = WaitAgentResult;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
@@ -17,59 +15,69 @@ impl ToolHandler for Handler {
matches!(payload, ToolPayload::Function { .. })
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let ToolInvocation {
session,
turn,
payload,
call_id,
..
} = invocation;
let arguments = function_arguments(payload)?;
let args: WaitArgs = parse_arguments(&arguments)?;
let timeout_ms = args.timeout_ms.unwrap_or(DEFAULT_WAIT_TIMEOUT_MS);
let timeout_ms = match timeout_ms {
ms if ms <= 0 => {
return Err(FunctionCallError::RespondToModel(
"timeout_ms must be greater than zero".to_owned(),
));
}
ms => ms.clamp(MIN_WAIT_TIMEOUT_MS, MAX_WAIT_TIMEOUT_MS),
};
let mut mailbox_seq_rx = session.subscribe_mailbox_seq();
session
.send_event(
&turn,
CollabWaitingBeginEvent {
sender_thread_id: session.conversation_id,
receiver_thread_ids: Vec::new(),
receiver_agents: Vec::new(),
call_id: call_id.clone(),
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let ToolInvocation {
session,
turn,
payload,
call_id,
..
} = invocation;
let payload_for_result = payload.clone();
let arguments = function_arguments(payload)?;
let args: WaitArgs = parse_arguments(&arguments)?;
let timeout_ms = args.timeout_ms.unwrap_or(DEFAULT_WAIT_TIMEOUT_MS);
let timeout_ms = match timeout_ms {
ms if ms <= 0 => {
return Err(FunctionCallError::RespondToModel(
"timeout_ms must be greater than zero".to_owned(),
));
}
.into(),
)
.await;
ms => ms.clamp(MIN_WAIT_TIMEOUT_MS, MAX_WAIT_TIMEOUT_MS),
};
let deadline = Instant::now() + Duration::from_millis(timeout_ms as u64);
let timed_out = !wait_for_mailbox_change(&mut mailbox_seq_rx, deadline).await;
let result = WaitAgentResult::from_timed_out(timed_out);
let mut mailbox_seq_rx = session.subscribe_mailbox_seq();
session
.send_event(
&turn,
CollabWaitingEndEvent {
sender_thread_id: session.conversation_id,
call_id,
agent_statuses: Vec::new(),
statuses: HashMap::new(),
}
.into(),
)
.await;
session
.send_event(
&turn,
CollabWaitingBeginEvent {
sender_thread_id: session.conversation_id,
receiver_thread_ids: Vec::new(),
receiver_agents: Vec::new(),
call_id: call_id.clone(),
}
.into(),
)
.await;
Ok(result)
let deadline = Instant::now() + Duration::from_millis(timeout_ms as u64);
let timed_out = !wait_for_mailbox_change(&mut mailbox_seq_rx, deadline).await;
let result = WaitAgentResult::from_timed_out(timed_out);
session
.send_event(
&turn,
CollabWaitingEndEvent {
sender_thread_id: session.conversation_id,
call_id: call_id.clone(),
agent_statuses: Vec::new(),
statuses: HashMap::new(),
}
.into(),
)
.await;
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(result),
})
})
}
}

View File

@@ -4,6 +4,7 @@ use crate::function_tool::FunctionCallError;
use crate::tools::context::ToolInvocation;
use crate::tools::context::ToolOutput;
use crate::tools::context::ToolPayload;
use crate::tools::registry::AnyToolResult;
use crate::tools::registry::ToolHandler;
use crate::tools::registry::ToolKind;
use codex_protocol::config_types::ModeKind;
@@ -11,6 +12,7 @@ use codex_protocol::models::FunctionCallOutputPayload;
use codex_protocol::models::ResponseInputItem;
use codex_protocol::plan_tool::UpdatePlanArgs;
use codex_protocol::protocol::EventMsg;
use futures::future::BoxFuture;
use serde_json::Value as JsonValue;
pub struct PlanHandler;
@@ -44,33 +46,41 @@ impl ToolOutput for PlanToolOutput {
}
impl ToolHandler for PlanHandler {
type Output = PlanToolOutput;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let ToolInvocation {
session,
turn,
call_id,
payload,
..
} = invocation;
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let ToolInvocation {
session,
turn,
call_id,
payload,
..
} = invocation;
let arguments = match payload {
ToolPayload::Function { arguments } => arguments,
_ => {
return Err(FunctionCallError::RespondToModel(
"update_plan handler received unsupported payload".to_string(),
));
}
};
let payload_for_result = payload.clone();
let arguments = match payload {
ToolPayload::Function { arguments } => arguments,
_ => {
return Err(FunctionCallError::RespondToModel(
"update_plan handler received unsupported payload".to_string(),
));
}
};
handle_update_plan(session.as_ref(), turn.as_ref(), arguments, call_id).await?;
handle_update_plan(session.as_ref(), turn.as_ref(), arguments, call_id.clone()).await?;
Ok(PlanToolOutput)
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(PlanToolOutput),
})
})
}
}

View File

@@ -1,67 +1,77 @@
use codex_protocol::request_permissions::RequestPermissionsArgs;
use codex_sandboxing::policy_transforms::normalize_additional_permissions;
use futures::future::BoxFuture;
use crate::function_tool::FunctionCallError;
use crate::tools::context::FunctionToolOutput;
use crate::tools::context::ToolInvocation;
use crate::tools::context::ToolPayload;
use crate::tools::handlers::parse_arguments_with_base_path;
use crate::tools::registry::AnyToolResult;
use crate::tools::registry::ToolHandler;
use crate::tools::registry::ToolKind;
pub struct RequestPermissionsHandler;
impl ToolHandler for RequestPermissionsHandler {
type Output = FunctionToolOutput;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let ToolInvocation {
session,
turn,
call_id,
payload,
..
} = invocation;
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let ToolInvocation {
session,
turn,
call_id,
payload,
..
} = invocation;
let arguments = match payload {
ToolPayload::Function { arguments } => arguments,
_ => {
let payload_for_result = payload.clone();
let arguments = match payload {
ToolPayload::Function { arguments } => arguments,
_ => {
return Err(FunctionCallError::RespondToModel(
"request_permissions handler received unsupported payload".to_string(),
));
}
};
let mut args: RequestPermissionsArgs =
parse_arguments_with_base_path(&arguments, turn.cwd.as_path())?;
args.permissions = normalize_additional_permissions(args.permissions.into())
.map(codex_protocol::request_permissions::RequestPermissionProfile::from)
.map_err(FunctionCallError::RespondToModel)?;
if args.permissions.is_empty() {
return Err(FunctionCallError::RespondToModel(
"request_permissions handler received unsupported payload".to_string(),
"request_permissions requires at least one permission".to_string(),
));
}
};
let mut args: RequestPermissionsArgs =
parse_arguments_with_base_path(&arguments, turn.cwd.as_path())?;
args.permissions = normalize_additional_permissions(args.permissions.into())
.map(codex_protocol::request_permissions::RequestPermissionProfile::from)
.map_err(FunctionCallError::RespondToModel)?;
if args.permissions.is_empty() {
return Err(FunctionCallError::RespondToModel(
"request_permissions requires at least one permission".to_string(),
));
}
let response = session
.request_permissions(turn.as_ref(), call_id.clone(), args)
.await
.ok_or_else(|| {
FunctionCallError::RespondToModel(
"request_permissions was cancelled before receiving a response".to_string(),
)
})?;
let response = session
.request_permissions(turn.as_ref(), call_id, args)
.await
.ok_or_else(|| {
FunctionCallError::RespondToModel(
"request_permissions was cancelled before receiving a response".to_string(),
)
let content = serde_json::to_string(&response).map_err(|err| {
FunctionCallError::Fatal(format!(
"failed to serialize request_permissions response: {err}"
))
})?;
let content = serde_json::to_string(&response).map_err(|err| {
FunctionCallError::Fatal(format!(
"failed to serialize request_permissions response: {err}"
))
})?;
Ok(FunctionToolOutput::from_text(content, Some(true)))
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(FunctionToolOutput::from_text(content, Some(true))),
})
})
}
}

View File

@@ -3,67 +3,77 @@ use crate::tools::context::FunctionToolOutput;
use crate::tools::context::ToolInvocation;
use crate::tools::context::ToolPayload;
use crate::tools::handlers::parse_arguments;
use crate::tools::registry::AnyToolResult;
use crate::tools::registry::ToolHandler;
use crate::tools::registry::ToolKind;
use codex_protocol::request_user_input::RequestUserInputArgs;
use codex_tools::REQUEST_USER_INPUT_TOOL_NAME;
use codex_tools::normalize_request_user_input_args;
use codex_tools::request_user_input_unavailable_message;
use futures::future::BoxFuture;
pub struct RequestUserInputHandler {
pub default_mode_request_user_input: bool,
}
impl ToolHandler for RequestUserInputHandler {
type Output = FunctionToolOutput;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let ToolInvocation {
session,
turn,
call_id,
payload,
..
} = invocation;
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let ToolInvocation {
session,
turn,
call_id,
payload,
..
} = invocation;
let arguments = match payload {
ToolPayload::Function { arguments } => arguments,
_ => {
return Err(FunctionCallError::RespondToModel(format!(
"{REQUEST_USER_INPUT_TOOL_NAME} handler received unsupported payload"
)));
let payload_for_result = payload.clone();
let arguments = match payload {
ToolPayload::Function { arguments } => arguments,
_ => {
return Err(FunctionCallError::RespondToModel(format!(
"{REQUEST_USER_INPUT_TOOL_NAME} handler received unsupported payload"
)));
}
};
let mode = session.collaboration_mode().await.mode;
if let Some(message) =
request_user_input_unavailable_message(mode, self.default_mode_request_user_input)
{
return Err(FunctionCallError::RespondToModel(message));
}
};
let mode = session.collaboration_mode().await.mode;
if let Some(message) =
request_user_input_unavailable_message(mode, self.default_mode_request_user_input)
{
return Err(FunctionCallError::RespondToModel(message));
}
let args: RequestUserInputArgs = parse_arguments(&arguments)?;
let args = normalize_request_user_input_args(args)
.map_err(FunctionCallError::RespondToModel)?;
let response = session
.request_user_input(turn.as_ref(), call_id.clone(), args)
.await
.ok_or_else(|| {
FunctionCallError::RespondToModel(format!(
"{REQUEST_USER_INPUT_TOOL_NAME} was cancelled before receiving a response"
))
})?;
let args: RequestUserInputArgs = parse_arguments(&arguments)?;
let args =
normalize_request_user_input_args(args).map_err(FunctionCallError::RespondToModel)?;
let response = session
.request_user_input(turn.as_ref(), call_id, args)
.await
.ok_or_else(|| {
FunctionCallError::RespondToModel(format!(
"{REQUEST_USER_INPUT_TOOL_NAME} was cancelled before receiving a response"
let content = serde_json::to_string(&response).map_err(|err| {
FunctionCallError::Fatal(format!(
"failed to serialize {REQUEST_USER_INPUT_TOOL_NAME} response: {err}"
))
})?;
let content = serde_json::to_string(&response).map_err(|err| {
FunctionCallError::Fatal(format!(
"failed to serialize {REQUEST_USER_INPUT_TOOL_NAME} response: {err}"
))
})?;
Ok(FunctionToolOutput::from_text(content, Some(true)))
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(FunctionToolOutput::from_text(content, Some(true))),
})
})
}
}

View File

@@ -26,6 +26,7 @@ use crate::tools::handlers::parse_arguments;
use crate::tools::handlers::parse_arguments_with_base_path;
use crate::tools::handlers::resolve_workdir_base_path;
use crate::tools::orchestrator::ToolOrchestrator;
use crate::tools::registry::AnyToolResult;
use crate::tools::registry::PostToolUsePayload;
use crate::tools::registry::PreToolUsePayload;
use crate::tools::registry::ToolHandler;
@@ -39,6 +40,7 @@ use codex_protocol::models::PermissionProfile;
use codex_protocol::protocol::ExecCommandSource;
use codex_shell_command::is_safe_command::is_known_safe_command;
use codex_tools::ShellCommandBackendConfig;
use futures::future::BoxFuture;
pub struct ShellHandler;
@@ -178,8 +180,6 @@ impl From<ShellCommandBackendConfig> for ShellCommandHandler {
}
impl ToolHandler for ShellHandler {
type Output = FunctionToolOutput;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
@@ -191,7 +191,7 @@ impl ToolHandler for ShellHandler {
)
}
async fn is_mutating(&self, invocation: &ToolInvocation) -> bool {
fn is_mutating(&self, invocation: &ToolInvocation) -> bool {
match &invocation.payload {
ToolPayload::Function { arguments } => {
serde_json::from_str::<ShellToolCallParams>(arguments)
@@ -220,66 +220,78 @@ impl ToolHandler for ShellHandler {
})
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let ToolInvocation {
session,
turn,
tracker,
call_id,
tool_name,
payload,
..
} = invocation;
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let ToolInvocation {
session,
turn,
tracker,
call_id,
tool_name,
payload,
..
} = invocation;
let payload_for_result = payload.clone();
match payload {
ToolPayload::Function { arguments } => {
let cwd = resolve_workdir_base_path(&arguments, turn.cwd.as_path())?;
let params: ShellToolCallParams =
parse_arguments_with_base_path(&arguments, cwd.as_path())?;
let prefix_rule = params.prefix_rule.clone();
let exec_params =
Self::to_exec_params(&params, turn.as_ref(), session.conversation_id);
Self::run_exec_like(RunExecLikeArgs {
tool_name: tool_name.clone(),
exec_params,
additional_permissions: params.additional_permissions.clone(),
prefix_rule,
session,
turn,
tracker,
call_id,
freeform: false,
shell_runtime_backend: ShellRuntimeBackend::Generic,
})
.await
}
ToolPayload::LocalShell { params } => {
let exec_params =
Self::to_exec_params(&params, turn.as_ref(), session.conversation_id);
Self::run_exec_like(RunExecLikeArgs {
tool_name: tool_name.clone(),
exec_params,
additional_permissions: None,
prefix_rule: None,
session,
turn,
tracker,
call_id,
freeform: false,
shell_runtime_backend: ShellRuntimeBackend::Generic,
})
.await
}
_ => Err(FunctionCallError::RespondToModel(format!(
"unsupported payload for shell handler: {tool_name}"
))),
}
let output = match payload {
ToolPayload::Function { arguments } => {
let cwd = resolve_workdir_base_path(&arguments, turn.cwd.as_path())?;
let params: ShellToolCallParams =
parse_arguments_with_base_path(&arguments, cwd.as_path())?;
let prefix_rule = params.prefix_rule.clone();
let exec_params =
Self::to_exec_params(&params, turn.as_ref(), session.conversation_id);
Self::run_exec_like(RunExecLikeArgs {
tool_name: tool_name.clone(),
exec_params,
additional_permissions: params.additional_permissions.clone(),
prefix_rule,
session,
turn,
tracker,
call_id: call_id.clone(),
freeform: false,
shell_runtime_backend: ShellRuntimeBackend::Generic,
})
.await?
}
ToolPayload::LocalShell { params } => {
let exec_params =
Self::to_exec_params(&params, turn.as_ref(), session.conversation_id);
Self::run_exec_like(RunExecLikeArgs {
tool_name: tool_name.clone(),
exec_params,
additional_permissions: None,
prefix_rule: None,
session,
turn,
tracker,
call_id: call_id.clone(),
freeform: false,
shell_runtime_backend: ShellRuntimeBackend::Generic,
})
.await?
}
_ => {
return Err(FunctionCallError::RespondToModel(format!(
"unsupported payload for shell handler: {tool_name}"
)));
}
};
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(output),
})
})
}
}
impl ToolHandler for ShellCommandHandler {
type Output = FunctionToolOutput;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
@@ -288,7 +300,7 @@ impl ToolHandler for ShellCommandHandler {
matches!(payload, ToolPayload::Function { .. })
}
async fn is_mutating(&self, invocation: &ToolInvocation) -> bool {
fn is_mutating(&self, invocation: &ToolInvocation) -> bool {
let ToolPayload::Function { arguments } = &invocation.payload else {
return true;
};
@@ -327,55 +339,67 @@ impl ToolHandler for ShellCommandHandler {
})
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let ToolInvocation {
session,
turn,
tracker,
call_id,
tool_name,
payload,
..
} = invocation;
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let ToolInvocation {
session,
turn,
tracker,
call_id,
tool_name,
payload,
..
} = invocation;
let payload_for_result = payload.clone();
let ToolPayload::Function { arguments } = payload else {
return Err(FunctionCallError::RespondToModel(format!(
"unsupported payload for shell_command handler: {tool_name}"
)));
};
let ToolPayload::Function { arguments } = payload else {
return Err(FunctionCallError::RespondToModel(format!(
"unsupported payload for shell_command handler: {tool_name}"
)));
};
let cwd = resolve_workdir_base_path(&arguments, turn.cwd.as_path())?;
let params: ShellCommandToolCallParams =
parse_arguments_with_base_path(&arguments, cwd.as_path())?;
let workdir = turn.resolve_path(params.workdir.clone());
maybe_emit_implicit_skill_invocation(
session.as_ref(),
turn.as_ref(),
&params.command,
&workdir,
)
.await;
let prefix_rule = params.prefix_rule.clone();
let exec_params = Self::to_exec_params(
&params,
session.as_ref(),
turn.as_ref(),
session.conversation_id,
turn.tools_config.allow_login_shell,
)?;
ShellHandler::run_exec_like(RunExecLikeArgs {
tool_name,
exec_params,
additional_permissions: params.additional_permissions.clone(),
prefix_rule,
session,
turn,
tracker,
call_id,
freeform: true,
shell_runtime_backend: self.shell_runtime_backend(),
let cwd = resolve_workdir_base_path(&arguments, turn.cwd.as_path())?;
let params: ShellCommandToolCallParams =
parse_arguments_with_base_path(&arguments, cwd.as_path())?;
let workdir = turn.resolve_path(params.workdir.clone());
maybe_emit_implicit_skill_invocation(
session.as_ref(),
turn.as_ref(),
&params.command,
&workdir,
)
.await;
let prefix_rule = params.prefix_rule.clone();
let exec_params = Self::to_exec_params(
&params,
session.as_ref(),
turn.as_ref(),
session.conversation_id,
turn.tools_config.allow_login_shell,
)?;
let output = ShellHandler::run_exec_like(RunExecLikeArgs {
tool_name,
exec_params,
additional_permissions: params.additional_permissions.clone(),
prefix_rule,
session,
turn,
tracker,
call_id: call_id.clone(),
freeform: true,
shell_runtime_backend: self.shell_runtime_backend(),
})
.await?;
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(output),
})
})
.await
}
}

View File

@@ -4,6 +4,7 @@ use std::sync::Arc;
use std::sync::OnceLock;
use std::time::Duration;
use futures::future::BoxFuture;
use serde::Deserialize;
use tokio::sync::Barrier;
use tokio::time::sleep;
@@ -13,6 +14,7 @@ use crate::tools::context::FunctionToolOutput;
use crate::tools::context::ToolInvocation;
use crate::tools::context::ToolPayload;
use crate::tools::handlers::parse_arguments;
use crate::tools::registry::AnyToolResult;
use crate::tools::registry::ToolHandler;
use crate::tools::registry::ToolKind;
@@ -54,43 +56,53 @@ fn barrier_map() -> &'static tokio::sync::Mutex<HashMap<String, BarrierState>> {
}
impl ToolHandler for TestSyncHandler {
type Output = FunctionToolOutput;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let ToolInvocation { payload, .. } = invocation;
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let ToolInvocation {
call_id, payload, ..
} = invocation;
let payload_for_result = payload.clone();
let arguments = match payload {
ToolPayload::Function { arguments } => arguments,
_ => {
return Err(FunctionCallError::RespondToModel(
"test_sync_tool handler received unsupported payload".to_string(),
));
let arguments = match payload {
ToolPayload::Function { arguments } => arguments,
_ => {
return Err(FunctionCallError::RespondToModel(
"test_sync_tool handler received unsupported payload".to_string(),
));
}
};
let args: TestSyncArgs = parse_arguments(&arguments)?;
if let Some(delay) = args.sleep_before_ms
&& delay > 0
{
sleep(Duration::from_millis(delay)).await;
}
};
let args: TestSyncArgs = parse_arguments(&arguments)?;
if let Some(barrier) = args.barrier {
wait_on_barrier(barrier).await?;
}
if let Some(delay) = args.sleep_before_ms
&& delay > 0
{
sleep(Duration::from_millis(delay)).await;
}
if let Some(delay) = args.sleep_after_ms
&& delay > 0
{
sleep(Duration::from_millis(delay)).await;
}
if let Some(barrier) = args.barrier {
wait_on_barrier(barrier).await?;
}
if let Some(delay) = args.sleep_after_ms
&& delay > 0
{
sleep(Duration::from_millis(delay)).await;
}
Ok(FunctionToolOutput::from_text("ok".to_string(), Some(true)))
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(FunctionToolOutput::from_text("ok".to_string(), Some(true))),
})
})
}
}

View File

@@ -2,6 +2,7 @@ use crate::function_tool::FunctionCallError;
use crate::tools::context::ToolInvocation;
use crate::tools::context::ToolPayload;
use crate::tools::context::ToolSearchOutput;
use crate::tools::registry::AnyToolResult;
use crate::tools::registry::ToolHandler;
use crate::tools::registry::ToolKind;
use bm25::Document;
@@ -12,6 +13,7 @@ use codex_tools::TOOL_SEARCH_DEFAULT_LIMIT;
use codex_tools::TOOL_SEARCH_TOOL_NAME;
use codex_tools::ToolSearchResultSource;
use codex_tools::collect_tool_search_output_tools;
use futures::future::BoxFuture;
use std::collections::HashMap;
pub struct ToolSearchHandler {
@@ -25,76 +27,84 @@ impl ToolSearchHandler {
}
impl ToolHandler for ToolSearchHandler {
type Output = ToolSearchOutput;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
async fn handle(
fn handle(
&self,
invocation: ToolInvocation,
) -> Result<ToolSearchOutput, FunctionCallError> {
let ToolInvocation { payload, .. } = invocation;
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let ToolInvocation {
call_id, payload, ..
} = invocation;
let args = match payload {
ToolPayload::ToolSearch { arguments } => arguments,
_ => {
return Err(FunctionCallError::Fatal(format!(
"{TOOL_SEARCH_TOOL_NAME} handler received unsupported payload"
)));
let payload_for_result = payload.clone();
let args = match payload {
ToolPayload::ToolSearch { arguments } => arguments,
_ => {
return Err(FunctionCallError::Fatal(format!(
"{TOOL_SEARCH_TOOL_NAME} handler received unsupported payload"
)));
}
};
let query = args.query.trim();
if query.is_empty() {
return Err(FunctionCallError::RespondToModel(
"query must not be empty".to_string(),
));
}
};
let limit = args.limit.unwrap_or(TOOL_SEARCH_DEFAULT_LIMIT);
let query = args.query.trim();
if query.is_empty() {
return Err(FunctionCallError::RespondToModel(
"query must not be empty".to_string(),
));
}
let limit = args.limit.unwrap_or(TOOL_SEARCH_DEFAULT_LIMIT);
if limit == 0 {
return Err(FunctionCallError::RespondToModel(
"limit must be greater than zero".to_string(),
));
}
if limit == 0 {
return Err(FunctionCallError::RespondToModel(
"limit must be greater than zero".to_string(),
));
}
let mut entries: Vec<(String, ToolInfo)> = self.tools.clone().into_iter().collect();
entries.sort_by(|a, b| a.0.cmp(&b.0));
let mut entries: Vec<(String, ToolInfo)> = self.tools.clone().into_iter().collect();
entries.sort_by(|a, b| a.0.cmp(&b.0));
let tools = if entries.is_empty() {
Vec::new()
} else {
let documents: Vec<Document<usize>> = entries
.iter()
.enumerate()
.map(|(idx, (name, info))| Document::new(idx, build_search_text(name, info)))
.collect();
let search_engine =
SearchEngineBuilder::<usize>::with_documents(Language::English, documents)
.build();
let results = search_engine.search(query, limit);
if entries.is_empty() {
return Ok(ToolSearchOutput { tools: Vec::new() });
}
collect_tool_search_output_tools(
results
.into_iter()
.filter_map(|result| entries.get(result.document.id))
.map(|(_name, tool)| ToolSearchResultSource {
tool_namespace: tool.tool_namespace.as_str(),
tool_name: tool.tool_name.as_str(),
tool: &tool.tool,
connector_name: tool.connector_name.as_deref(),
connector_description: tool.connector_description.as_deref(),
}),
)
.map_err(|err| {
FunctionCallError::Fatal(format!(
"failed to encode {TOOL_SEARCH_TOOL_NAME} output: {err}"
))
})?
};
let documents: Vec<Document<usize>> = entries
.iter()
.enumerate()
.map(|(idx, (name, info))| Document::new(idx, build_search_text(name, info)))
.collect();
let search_engine =
SearchEngineBuilder::<usize>::with_documents(Language::English, documents).build();
let results = search_engine.search(query, limit);
let tools = collect_tool_search_output_tools(
results
.into_iter()
.filter_map(|result| entries.get(result.document.id))
.map(|(_name, tool)| ToolSearchResultSource {
tool_namespace: tool.tool_namespace.as_str(),
tool_name: tool.tool_name.as_str(),
tool: &tool.tool,
connector_name: tool.connector_name.as_deref(),
connector_description: tool.connector_description.as_deref(),
}),
)
.map_err(|err| {
FunctionCallError::Fatal(format!(
"failed to encode {TOOL_SEARCH_TOOL_NAME} output: {err}"
))
})?;
Ok(ToolSearchOutput { tools })
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(ToolSearchOutput { tools }),
})
})
}
}

View File

@@ -22,135 +22,145 @@ use crate::tools::context::FunctionToolOutput;
use crate::tools::context::ToolInvocation;
use crate::tools::context::ToolPayload;
use crate::tools::handlers::parse_arguments;
use crate::tools::registry::AnyToolResult;
use crate::tools::registry::ToolHandler;
use crate::tools::registry::ToolKind;
use futures::future::BoxFuture;
pub struct ToolSuggestHandler;
impl ToolHandler for ToolSuggestHandler {
type Output = FunctionToolOutput;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let ToolInvocation {
payload,
session,
turn,
call_id,
..
} = invocation;
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let ToolInvocation {
payload,
session,
turn,
call_id,
..
} = invocation;
let payload_for_result = payload.clone();
let arguments = match payload {
ToolPayload::Function { arguments } => arguments,
_ => {
return Err(FunctionCallError::Fatal(format!(
"{TOOL_SUGGEST_TOOL_NAME} handler received unsupported payload"
)));
let arguments = match payload {
ToolPayload::Function { arguments } => arguments,
_ => {
return Err(FunctionCallError::Fatal(format!(
"{TOOL_SUGGEST_TOOL_NAME} handler received unsupported payload"
)));
}
};
let args: ToolSuggestArgs = parse_arguments(&arguments)?;
let suggest_reason = args.suggest_reason.trim();
if suggest_reason.is_empty() {
return Err(FunctionCallError::RespondToModel(
"suggest_reason must not be empty".to_string(),
));
}
if args.action_type != DiscoverableToolAction::Install {
return Err(FunctionCallError::RespondToModel(
"tool suggestions currently support only action_type=\"install\"".to_string(),
));
}
if args.tool_type == DiscoverableToolType::Plugin
&& turn.app_server_client_name.as_deref() == Some("codex-tui")
{
return Err(FunctionCallError::RespondToModel(
"plugin tool suggestions are not available in codex-tui yet".to_string(),
));
}
};
let args: ToolSuggestArgs = parse_arguments(&arguments)?;
let suggest_reason = args.suggest_reason.trim();
if suggest_reason.is_empty() {
return Err(FunctionCallError::RespondToModel(
"suggest_reason must not be empty".to_string(),
));
}
if args.action_type != DiscoverableToolAction::Install {
return Err(FunctionCallError::RespondToModel(
"tool suggestions currently support only action_type=\"install\"".to_string(),
));
}
if args.tool_type == DiscoverableToolType::Plugin
&& turn.app_server_client_name.as_deref() == Some("codex-tui")
{
return Err(FunctionCallError::RespondToModel(
"plugin tool suggestions are not available in codex-tui yet".to_string(),
));
}
let auth = session.services.auth_manager.auth().await;
let manager = session.services.mcp_connection_manager.read().await;
let mcp_tools = manager.list_all_tools().await;
drop(manager);
let accessible_connectors = connectors::with_app_enabled_state(
connectors::accessible_connectors_from_mcp_tools(&mcp_tools),
&turn.config,
);
let discoverable_tools = connectors::list_tool_suggest_discoverable_tools_with_auth(
&turn.config,
auth.as_ref(),
&accessible_connectors,
)
.await
.map(|discoverable_tools| {
filter_tool_suggest_discoverable_tools_for_client(
discoverable_tools,
turn.app_server_client_name.as_deref(),
let auth = session.services.auth_manager.auth().await;
let manager = session.services.mcp_connection_manager.read().await;
let mcp_tools = manager.list_all_tools().await;
drop(manager);
let accessible_connectors = connectors::with_app_enabled_state(
connectors::accessible_connectors_from_mcp_tools(&mcp_tools),
&turn.config,
);
let discoverable_tools = connectors::list_tool_suggest_discoverable_tools_with_auth(
&turn.config,
auth.as_ref(),
&accessible_connectors,
)
})
.map_err(|err| {
FunctionCallError::RespondToModel(format!(
"tool suggestions are unavailable right now: {err}"
))
})?;
let tool = discoverable_tools
.into_iter()
.find(|tool| tool.tool_type() == args.tool_type && tool.id() == args.tool_id)
.ok_or_else(|| {
.await
.map(|discoverable_tools| {
filter_tool_suggest_discoverable_tools_for_client(
discoverable_tools,
turn.app_server_client_name.as_deref(),
)
})
.map_err(|err| {
FunctionCallError::RespondToModel(format!(
"tool_id must match one of the discoverable tools exposed by {TOOL_SUGGEST_TOOL_NAME}"
"tool suggestions are unavailable right now: {err}"
))
})?;
let request_id = RequestId::String(format!("tool_suggestion_{call_id}").into());
let params = build_tool_suggestion_elicitation_request(
CODEX_APPS_MCP_SERVER_NAME,
session.conversation_id.to_string(),
turn.sub_id.clone(),
&args,
suggest_reason,
&tool,
);
let response = session
.request_mcp_server_elicitation(turn.as_ref(), request_id, params)
.await;
let user_confirmed = response
.as_ref()
.is_some_and(|response| response.action == ElicitationAction::Accept);
let tool = discoverable_tools
.into_iter()
.find(|tool| tool.tool_type() == args.tool_type && tool.id() == args.tool_id)
.ok_or_else(|| {
FunctionCallError::RespondToModel(format!(
"tool_id must match one of the discoverable tools exposed by {TOOL_SUGGEST_TOOL_NAME}"
))
})?;
let completed = if user_confirmed {
verify_tool_suggestion_completed(&session, &turn, &tool, auth.as_ref()).await
} else {
false
};
if completed && let DiscoverableTool::Connector(connector) = &tool {
session
.merge_connector_selection(HashSet::from([connector.id.clone()]))
let request_id = RequestId::String(format!("tool_suggestion_{call_id}").into());
let params = build_tool_suggestion_elicitation_request(
CODEX_APPS_MCP_SERVER_NAME,
session.conversation_id.to_string(),
turn.sub_id.clone(),
&args,
suggest_reason,
&tool,
);
let response = session
.request_mcp_server_elicitation(turn.as_ref(), request_id, params)
.await;
}
let user_confirmed = response
.as_ref()
.is_some_and(|response| response.action == ElicitationAction::Accept);
let content = serde_json::to_string(&ToolSuggestResult {
completed,
user_confirmed,
tool_type: args.tool_type,
action_type: args.action_type,
tool_id: tool.id().to_string(),
tool_name: tool.name().to_string(),
suggest_reason: suggest_reason.to_string(),
let completed = if user_confirmed {
verify_tool_suggestion_completed(&session, &turn, &tool, auth.as_ref()).await
} else {
false
};
if completed && let DiscoverableTool::Connector(connector) = &tool {
session
.merge_connector_selection(HashSet::from([connector.id.clone()]))
.await;
}
let content = serde_json::to_string(&ToolSuggestResult {
completed,
user_confirmed,
tool_type: args.tool_type,
action_type: args.action_type,
tool_id: tool.id().to_string(),
tool_name: tool.name().to_string(),
suggest_reason: suggest_reason.to_string(),
})
.map_err(|err| {
FunctionCallError::Fatal(format!(
"failed to serialize {TOOL_SUGGEST_TOOL_NAME} response: {err}"
))
})?;
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(FunctionToolOutput::from_text(content, Some(true))),
})
})
.map_err(|err| {
FunctionCallError::Fatal(format!(
"failed to serialize {TOOL_SUGGEST_TOOL_NAME} response: {err}"
))
})?;
Ok(FunctionToolOutput::from_text(content, Some(true)))
}
}

View File

@@ -14,6 +14,7 @@ use crate::tools::handlers::normalize_and_validate_additional_permissions;
use crate::tools::handlers::parse_arguments;
use crate::tools::handlers::parse_arguments_with_base_path;
use crate::tools::handlers::resolve_workdir_base_path;
use crate::tools::registry::AnyToolResult;
use crate::tools::registry::PostToolUsePayload;
use crate::tools::registry::PreToolUsePayload;
use crate::tools::registry::ToolHandler;
@@ -30,6 +31,7 @@ use codex_protocol::protocol::EventMsg;
use codex_protocol::protocol::TerminalInteractionEvent;
use codex_shell_command::is_safe_command::is_known_safe_command;
use codex_tools::UnifiedExecShellMode;
use futures::future::BoxFuture;
use serde::Deserialize;
use std::path::PathBuf;
use std::sync::Arc;
@@ -86,8 +88,6 @@ fn default_tty() -> bool {
}
impl ToolHandler for UnifiedExecHandler {
type Output = ExecCommandToolOutput;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
@@ -96,7 +96,7 @@ impl ToolHandler for UnifiedExecHandler {
matches!(payload, ToolPayload::Function { .. })
}
async fn is_mutating(&self, invocation: &ToolInvocation) -> bool {
fn is_mutating(&self, invocation: &ToolInvocation) -> bool {
let ToolPayload::Function { arguments } = &invocation.payload else {
tracing::error!(
"This should never happen, invocation payload is wrong: {:?}",
@@ -156,211 +156,226 @@ impl ToolHandler for UnifiedExecHandler {
})
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let ToolInvocation {
session,
turn,
tracker,
call_id,
tool_name,
payload,
..
} = invocation;
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let ToolInvocation {
session,
turn,
tracker,
call_id,
tool_name,
payload,
..
} = invocation;
let payload_for_result = payload.clone();
let arguments = match payload {
ToolPayload::Function { arguments } => arguments,
_ => {
return Err(FunctionCallError::RespondToModel(
"unified_exec handler received unsupported payload".to_string(),
));
}
};
let arguments = match payload {
ToolPayload::Function { arguments } => arguments,
_ => {
return Err(FunctionCallError::RespondToModel(
"unified_exec handler received unsupported payload".to_string(),
));
}
};
let manager: &UnifiedExecProcessManager = &session.services.unified_exec_manager;
let context = UnifiedExecContext::new(session.clone(), turn.clone(), call_id.clone());
let manager: &UnifiedExecProcessManager = &session.services.unified_exec_manager;
let context = UnifiedExecContext::new(session.clone(), turn.clone(), call_id.clone());
let response = match tool_name.as_str() {
"exec_command" => {
let cwd = resolve_workdir_base_path(&arguments, context.turn.cwd.as_path())?;
let args: ExecCommandArgs =
parse_arguments_with_base_path(&arguments, cwd.as_path())?;
let workdir = context.turn.resolve_path(args.workdir.clone());
maybe_emit_implicit_skill_invocation(
session.as_ref(),
context.turn.as_ref(),
&args.cmd,
&workdir,
)
.await;
let process_id = manager.allocate_process_id().await;
let command = get_command(
&args,
session.user_shell(),
&turn.tools_config.unified_exec_shell_mode,
turn.tools_config.allow_login_shell,
)
.map_err(FunctionCallError::RespondToModel)?;
let command_for_display = codex_shell_command::parse_command::shlex_join(&command);
let ExecCommandArgs {
workdir,
tty,
yield_time_ms,
max_output_tokens,
sandbox_permissions,
additional_permissions,
justification,
prefix_rule,
..
} = args;
let exec_permission_approvals_enabled =
session.features().enabled(Feature::ExecPermissionApprovals);
let requested_additional_permissions = additional_permissions.clone();
let effective_additional_permissions = apply_granted_turn_permissions(
context.session.as_ref(),
sandbox_permissions,
additional_permissions,
)
.await;
let additional_permissions_allowed = exec_permission_approvals_enabled
|| (session.features().enabled(Feature::RequestPermissionsTool)
&& effective_additional_permissions.permissions_preapproved);
// Sticky turn permissions have already been approved, so they should
// continue through the normal exec approval flow for the command.
if effective_additional_permissions
.sandbox_permissions
.requests_sandbox_override()
&& !effective_additional_permissions.permissions_preapproved
&& !matches!(
context.turn.approval_policy.value(),
codex_protocol::protocol::AskForApproval::OnRequest
let response = match tool_name.as_str() {
"exec_command" => {
let cwd = resolve_workdir_base_path(&arguments, context.turn.cwd.as_path())?;
let args: ExecCommandArgs =
parse_arguments_with_base_path(&arguments, cwd.as_path())?;
let workdir = context.turn.resolve_path(args.workdir.clone());
maybe_emit_implicit_skill_invocation(
session.as_ref(),
context.turn.as_ref(),
&args.cmd,
&workdir,
)
{
let approval_policy = context.turn.approval_policy.value();
manager.release_process_id(process_id).await;
.await;
let process_id = manager.allocate_process_id().await;
let command = get_command(
&args,
session.user_shell(),
&turn.tools_config.unified_exec_shell_mode,
turn.tools_config.allow_login_shell,
)
.map_err(FunctionCallError::RespondToModel)?;
let command_for_display =
codex_shell_command::parse_command::shlex_join(&command);
let ExecCommandArgs {
workdir,
tty,
yield_time_ms,
max_output_tokens,
sandbox_permissions,
additional_permissions,
justification,
prefix_rule,
..
} = args;
let exec_permission_approvals_enabled =
session.features().enabled(Feature::ExecPermissionApprovals);
let requested_additional_permissions = additional_permissions.clone();
let effective_additional_permissions = apply_granted_turn_permissions(
context.session.as_ref(),
sandbox_permissions,
additional_permissions,
)
.await;
let additional_permissions_allowed = exec_permission_approvals_enabled
|| (session.features().enabled(Feature::RequestPermissionsTool)
&& effective_additional_permissions.permissions_preapproved);
// Sticky turn permissions have already been approved, so they should
// continue through the normal exec approval flow for the command.
if effective_additional_permissions
.sandbox_permissions
.requests_sandbox_override()
&& !effective_additional_permissions.permissions_preapproved
&& !matches!(
context.turn.approval_policy.value(),
codex_protocol::protocol::AskForApproval::OnRequest
)
{
let approval_policy = context.turn.approval_policy.value();
manager.release_process_id(process_id).await;
return Err(FunctionCallError::RespondToModel(format!(
"approval policy is {approval_policy:?}; reject command — you cannot ask for escalated permissions if the approval policy is {approval_policy:?}"
)));
}
let workdir = workdir.filter(|value| !value.is_empty());
let workdir = workdir.map(|dir| context.turn.resolve_path(Some(dir)));
let cwd = workdir.clone().unwrap_or(cwd);
let normalized_additional_permissions = match implicit_granted_permissions(
sandbox_permissions,
requested_additional_permissions.as_ref(),
&effective_additional_permissions,
)
.map_or_else(
|| {
normalize_and_validate_additional_permissions(
additional_permissions_allowed,
context.turn.approval_policy.value(),
effective_additional_permissions.sandbox_permissions,
effective_additional_permissions.additional_permissions,
effective_additional_permissions.permissions_preapproved,
&cwd,
)
},
|permissions| Ok(Some(permissions)),
) {
Ok(normalized) => normalized,
Err(err) => {
manager.release_process_id(process_id).await;
return Err(FunctionCallError::RespondToModel(err));
}
};
if let Some(output) = intercept_apply_patch(
&command,
&cwd,
Some(yield_time_ms),
context.session.clone(),
context.turn.clone(),
Some(&tracker),
&context.call_id,
tool_name.as_str(),
)
.await?
{
manager.release_process_id(process_id).await;
return Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(ExecCommandToolOutput {
event_call_id: String::new(),
chunk_id: String::new(),
wall_time: std::time::Duration::ZERO,
raw_output: output.into_text().into_bytes(),
max_output_tokens: None,
process_id: None,
exit_code: None,
original_token_count: None,
session_command: None,
}),
});
}
emit_unified_exec_tty_metric(&turn.session_telemetry, tty);
manager
.exec_command(
ExecCommandRequest {
command,
process_id,
yield_time_ms,
max_output_tokens,
workdir,
network: context.turn.network.clone(),
tty,
sandbox_permissions: effective_additional_permissions
.sandbox_permissions,
additional_permissions: normalized_additional_permissions,
additional_permissions_preapproved:
effective_additional_permissions.permissions_preapproved,
justification,
prefix_rule,
},
&context,
)
.await
.map_err(|err| {
FunctionCallError::RespondToModel(format!(
"exec_command failed for `{command_for_display}`: {err:?}"
))
})?
}
"write_stdin" => {
let args: WriteStdinArgs = parse_arguments(&arguments)?;
let response = manager
.write_stdin(WriteStdinRequest {
process_id: args.session_id,
input: &args.chars,
yield_time_ms: args.yield_time_ms,
max_output_tokens: args.max_output_tokens,
})
.await
.map_err(|err| {
FunctionCallError::RespondToModel(format!("write_stdin failed: {err}"))
})?;
let interaction = TerminalInteractionEvent {
call_id: response.event_call_id.clone(),
process_id: args.session_id.to_string(),
stdin: args.chars.clone(),
};
session
.send_event(turn.as_ref(), EventMsg::TerminalInteraction(interaction))
.await;
response
}
other => {
return Err(FunctionCallError::RespondToModel(format!(
"approval policy is {approval_policy:?}; reject command — you cannot ask for escalated permissions if the approval policy is {approval_policy:?}"
"unsupported unified exec function {other}"
)));
}
};
let workdir = workdir.filter(|value| !value.is_empty());
let workdir = workdir.map(|dir| context.turn.resolve_path(Some(dir)));
let cwd = workdir.clone().unwrap_or(cwd);
let normalized_additional_permissions = match implicit_granted_permissions(
sandbox_permissions,
requested_additional_permissions.as_ref(),
&effective_additional_permissions,
)
.map_or_else(
|| {
normalize_and_validate_additional_permissions(
additional_permissions_allowed,
context.turn.approval_policy.value(),
effective_additional_permissions.sandbox_permissions,
effective_additional_permissions.additional_permissions,
effective_additional_permissions.permissions_preapproved,
&cwd,
)
},
|permissions| Ok(Some(permissions)),
) {
Ok(normalized) => normalized,
Err(err) => {
manager.release_process_id(process_id).await;
return Err(FunctionCallError::RespondToModel(err));
}
};
if let Some(output) = intercept_apply_patch(
&command,
&cwd,
Some(yield_time_ms),
context.session.clone(),
context.turn.clone(),
Some(&tracker),
&context.call_id,
tool_name.as_str(),
)
.await?
{
manager.release_process_id(process_id).await;
return Ok(ExecCommandToolOutput {
event_call_id: String::new(),
chunk_id: String::new(),
wall_time: std::time::Duration::ZERO,
raw_output: output.into_text().into_bytes(),
max_output_tokens: None,
process_id: None,
exit_code: None,
original_token_count: None,
session_command: None,
});
}
emit_unified_exec_tty_metric(&turn.session_telemetry, tty);
manager
.exec_command(
ExecCommandRequest {
command,
process_id,
yield_time_ms,
max_output_tokens,
workdir,
network: context.turn.network.clone(),
tty,
sandbox_permissions: effective_additional_permissions
.sandbox_permissions,
additional_permissions: normalized_additional_permissions,
additional_permissions_preapproved: effective_additional_permissions
.permissions_preapproved,
justification,
prefix_rule,
},
&context,
)
.await
.map_err(|err| {
FunctionCallError::RespondToModel(format!(
"exec_command failed for `{command_for_display}`: {err:?}"
))
})?
}
"write_stdin" => {
let args: WriteStdinArgs = parse_arguments(&arguments)?;
let response = manager
.write_stdin(WriteStdinRequest {
process_id: args.session_id,
input: &args.chars,
yield_time_ms: args.yield_time_ms,
max_output_tokens: args.max_output_tokens,
})
.await
.map_err(|err| {
FunctionCallError::RespondToModel(format!("write_stdin failed: {err}"))
})?;
let interaction = TerminalInteractionEvent {
call_id: response.event_call_id.clone(),
process_id: args.session_id.to_string(),
stdin: args.chars.clone(),
};
session
.send_event(turn.as_ref(), EventMsg::TerminalInteraction(interaction))
.await;
response
}
other => {
return Err(FunctionCallError::RespondToModel(format!(
"unsupported unified exec function {other}"
)));
}
};
Ok(response)
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(response),
})
})
}
}

View File

@@ -15,10 +15,12 @@ use crate::tools::context::ToolInvocation;
use crate::tools::context::ToolOutput;
use crate::tools::context::ToolPayload;
use crate::tools::handlers::parse_arguments;
use crate::tools::registry::AnyToolResult;
use crate::tools::registry::ToolHandler;
use crate::tools::registry::ToolKind;
use codex_protocol::protocol::EventMsg;
use codex_protocol::protocol::ViewImageToolCallEvent;
use futures::future::BoxFuture;
pub struct ViewImageHandler;
@@ -37,125 +39,136 @@ enum ViewImageDetail {
}
impl ToolHandler for ViewImageHandler {
type Output = ViewImageOutput;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
if !invocation
.turn
.model_info
.input_modalities
.contains(&InputModality::Image)
{
return Err(FunctionCallError::RespondToModel(
VIEW_IMAGE_UNSUPPORTED_MESSAGE.to_string(),
));
}
let ToolInvocation {
session,
turn,
payload,
call_id,
..
} = invocation;
let arguments = match payload {
ToolPayload::Function { arguments } => arguments,
_ => {
fn handle(
&self,
invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
if !invocation
.turn
.model_info
.input_modalities
.contains(&InputModality::Image)
{
return Err(FunctionCallError::RespondToModel(
"view_image handler received unsupported payload".to_string(),
VIEW_IMAGE_UNSUPPORTED_MESSAGE.to_string(),
));
}
};
let args: ViewImageArgs = parse_arguments(&arguments)?;
// `view_image` accepts only its documented detail values: omit
// `detail` for the default path or set it to `original`.
// Other string values remain invalid rather than being silently
// reinterpreted.
let detail = match args.detail.as_deref() {
None => None,
Some("original") => Some(ViewImageDetail::Original),
Some(detail) => {
let ToolInvocation {
session,
turn,
payload,
call_id,
..
} = invocation;
let payload_for_result = payload.clone();
let arguments = match payload {
ToolPayload::Function { arguments } => arguments,
_ => {
return Err(FunctionCallError::RespondToModel(
"view_image handler received unsupported payload".to_string(),
));
}
};
let args: ViewImageArgs = parse_arguments(&arguments)?;
// `view_image` accepts only its documented detail values: omit
// `detail` for the default path or set it to `original`.
// Other string values remain invalid rather than being silently
// reinterpreted.
let detail = match args.detail.as_deref() {
None => None,
Some("original") => Some(ViewImageDetail::Original),
Some(detail) => {
return Err(FunctionCallError::RespondToModel(format!(
"view_image.detail only supports `original`; omit `detail` for default resized behavior, got `{detail}`"
)));
}
};
let abs_path =
AbsolutePathBuf::try_from(turn.resolve_path(Some(args.path))).map_err(|error| {
FunctionCallError::RespondToModel(format!(
"unable to resolve image path: {error}"
))
})?;
let metadata = turn
.environment
.get_filesystem()
.get_metadata(&abs_path)
.await
.map_err(|error| {
FunctionCallError::RespondToModel(format!(
"unable to locate image at `{}`: {error}",
abs_path.display()
))
})?;
if !metadata.is_file {
return Err(FunctionCallError::RespondToModel(format!(
"view_image.detail only supports `original`; omit `detail` for default resized behavior, got `{detail}`"
"image path `{}` is not a file",
abs_path.display()
)));
}
};
let file_bytes = turn
.environment
.get_filesystem()
.read_file(&abs_path)
.await
.map_err(|error| {
FunctionCallError::RespondToModel(format!(
"unable to read image at `{}`: {error}",
abs_path.display()
))
})?;
let event_path = abs_path.to_path_buf();
let abs_path =
AbsolutePathBuf::try_from(turn.resolve_path(Some(args.path))).map_err(|error| {
FunctionCallError::RespondToModel(format!("unable to resolve image path: {error}"))
})?;
let can_request_original_detail =
can_request_original_image_detail(turn.features.get(), &turn.model_info);
let use_original_detail =
can_request_original_detail && matches!(detail, Some(ViewImageDetail::Original));
let image_mode = if use_original_detail {
PromptImageMode::Original
} else {
PromptImageMode::ResizeToFit
};
let image_detail = use_original_detail.then_some(ImageDetail::Original);
let metadata = turn
.environment
.get_filesystem()
.get_metadata(&abs_path)
.await
.map_err(|error| {
FunctionCallError::RespondToModel(format!(
"unable to locate image at `{}`: {error}",
abs_path.display()
))
})?;
let image = load_for_prompt_bytes(abs_path.as_path(), file_bytes, image_mode).map_err(
|error| {
FunctionCallError::RespondToModel(format!(
"unable to process image at `{}`: {error}",
abs_path.display()
))
},
)?;
let image_url = image.into_data_url();
if !metadata.is_file {
return Err(FunctionCallError::RespondToModel(format!(
"image path `{}` is not a file",
abs_path.display()
)));
}
let file_bytes = turn
.environment
.get_filesystem()
.read_file(&abs_path)
.await
.map_err(|error| {
FunctionCallError::RespondToModel(format!(
"unable to read image at `{}`: {error}",
abs_path.display()
))
})?;
let event_path = abs_path.to_path_buf();
session
.send_event(
turn.as_ref(),
EventMsg::ViewImageToolCall(ViewImageToolCallEvent {
call_id: call_id.clone(),
path: event_path,
}),
)
.await;
let can_request_original_detail =
can_request_original_image_detail(turn.features.get(), &turn.model_info);
let use_original_detail =
can_request_original_detail && matches!(detail, Some(ViewImageDetail::Original));
let image_mode = if use_original_detail {
PromptImageMode::Original
} else {
PromptImageMode::ResizeToFit
};
let image_detail = use_original_detail.then_some(ImageDetail::Original);
let image =
load_for_prompt_bytes(abs_path.as_path(), file_bytes, image_mode).map_err(|error| {
FunctionCallError::RespondToModel(format!(
"unable to process image at `{}`: {error}",
abs_path.display()
))
})?;
let image_url = image.into_data_url();
session
.send_event(
turn.as_ref(),
EventMsg::ViewImageToolCall(ViewImageToolCallEvent {
call_id,
path: event_path,
Ok(AnyToolResult {
call_id,
payload: payload_for_result,
result: Box::new(ViewImageOutput {
image_url,
image_detail,
}),
)
.await;
Ok(ViewImageOutput {
image_url,
image_detail,
})
})
}
}

View File

@@ -36,8 +36,6 @@ pub enum ToolKind {
}
pub trait ToolHandler: Send + Sync {
type Output: ToolOutput + 'static;
fn kind(&self) -> ToolKind;
fn matches_kind(&self, payload: &ToolPayload) -> bool {
@@ -53,11 +51,8 @@ pub trait ToolHandler: Send + Sync {
/// user (through file system, OS operations, ...).
/// This function must remains defensive and return `true` if a doubt exist on the
/// exact effect of a ToolInvocation.
fn is_mutating(
&self,
_invocation: &ToolInvocation,
) -> impl std::future::Future<Output = bool> + Send {
async { false }
fn is_mutating(&self, _invocation: &ToolInvocation) -> bool {
false
}
fn pre_tool_use_payload(&self, _invocation: &ToolInvocation) -> Option<PreToolUsePayload> {
@@ -78,7 +73,7 @@ pub trait ToolHandler: Send + Sync {
fn handle(
&self,
invocation: ToolInvocation,
) -> impl std::future::Future<Output = Result<Self::Output, FunctionCallError>> + Send;
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>>;
}
pub(crate) struct AnyToolResult {
@@ -87,7 +82,26 @@ pub(crate) struct AnyToolResult {
pub(crate) result: Box<dyn ToolOutput>,
}
impl std::fmt::Debug for AnyToolResult {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("AnyToolResult")
.field("call_id", &self.call_id)
.field("payload", &self.payload)
.field("log_preview", &self.result.log_preview())
.field("success", &self.result.success_for_logging())
.finish()
}
}
impl AnyToolResult {
pub(crate) fn new(invocation: &ToolInvocation, result: impl ToolOutput + 'static) -> Self {
Self {
call_id: invocation.call_id.clone(),
payload: invocation.payload.clone(),
result: Box::new(result),
}
}
pub(crate) fn into_response(self) -> ResponseInputItem {
let Self {
call_id,
@@ -106,6 +120,28 @@ impl AnyToolResult {
}
}
impl ToolOutput for AnyToolResult {
fn log_preview(&self) -> String {
self.result.log_preview()
}
fn success_for_logging(&self) -> bool {
self.result.success_for_logging()
}
fn to_response_item(&self, call_id: &str, payload: &ToolPayload) -> ResponseInputItem {
self.result.to_response_item(call_id, payload)
}
fn post_tool_use_response(&self, call_id: &str, payload: &ToolPayload) -> Option<Value> {
self.result.post_tool_use_response(call_id, payload)
}
fn code_mode_result(&self, payload: &ToolPayload) -> serde_json::Value {
self.result.code_mode_result(payload)
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct PreToolUsePayload {
pub(crate) command: String,
@@ -117,68 +153,6 @@ pub(crate) struct PostToolUsePayload {
pub(crate) tool_response: Value,
}
trait AnyToolHandler: Send + Sync {
fn matches_kind(&self, payload: &ToolPayload) -> bool;
fn is_mutating<'a>(&'a self, invocation: &'a ToolInvocation) -> BoxFuture<'a, bool>;
fn pre_tool_use_payload(&self, invocation: &ToolInvocation) -> Option<PreToolUsePayload>;
fn post_tool_use_payload(
&self,
call_id: &str,
payload: &ToolPayload,
result: &dyn ToolOutput,
) -> Option<PostToolUsePayload>;
fn handle_any<'a>(
&'a self,
invocation: ToolInvocation,
) -> BoxFuture<'a, Result<AnyToolResult, FunctionCallError>>;
}
impl<T> AnyToolHandler for T
where
T: ToolHandler,
{
fn matches_kind(&self, payload: &ToolPayload) -> bool {
ToolHandler::matches_kind(self, payload)
}
fn is_mutating<'a>(&'a self, invocation: &'a ToolInvocation) -> BoxFuture<'a, bool> {
Box::pin(ToolHandler::is_mutating(self, invocation))
}
fn pre_tool_use_payload(&self, invocation: &ToolInvocation) -> Option<PreToolUsePayload> {
ToolHandler::pre_tool_use_payload(self, invocation)
}
fn post_tool_use_payload(
&self,
call_id: &str,
payload: &ToolPayload,
result: &dyn ToolOutput,
) -> Option<PostToolUsePayload> {
ToolHandler::post_tool_use_payload(self, call_id, payload, result)
}
fn handle_any<'a>(
&'a self,
invocation: ToolInvocation,
) -> BoxFuture<'a, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move {
let call_id = invocation.call_id.clone();
let payload = invocation.payload.clone();
let output = self.handle(invocation).await?;
Ok(AnyToolResult {
call_id,
payload,
result: Box::new(output),
})
})
}
}
pub(crate) fn tool_handler_key(tool_name: &str, namespace: Option<&str>) -> String {
if let Some(namespace) = namespace {
format!("{namespace}:{tool_name}")
@@ -188,15 +162,15 @@ pub(crate) fn tool_handler_key(tool_name: &str, namespace: Option<&str>) -> Stri
}
pub struct ToolRegistry {
handlers: HashMap<String, Arc<dyn AnyToolHandler>>,
handlers: HashMap<String, Arc<dyn ToolHandler>>,
}
impl ToolRegistry {
fn new(handlers: HashMap<String, Arc<dyn AnyToolHandler>>) -> Self {
fn new(handlers: HashMap<String, Arc<dyn ToolHandler>>) -> Self {
Self { handlers }
}
fn handler(&self, name: &str, namespace: Option<&str>) -> Option<Arc<dyn AnyToolHandler>> {
fn handler(&self, name: &str, namespace: Option<&str>) -> Option<Arc<dyn ToolHandler>> {
self.handlers
.get(&tool_handler_key(name, namespace))
.map(Arc::clone)
@@ -316,7 +290,7 @@ impl ToolRegistry {
)));
}
let is_mutating = handler.is_mutating(&invocation).await;
let is_mutating = handler.is_mutating(&invocation);
let response_cell = tokio::sync::Mutex::new(None);
let invocation_for_tool = invocation.clone();
@@ -338,7 +312,7 @@ impl ToolRegistry {
invocation_for_tool.turn.tool_call_gate.wait_ready().await;
tracing::trace!("tool gate released");
}
match handler.handle_any(invocation_for_tool).await {
match handler.handle(invocation_for_tool).await {
Ok(result) => {
let preview = result.result.log_preview();
let success = result.result.success_for_logging();
@@ -443,7 +417,7 @@ impl ToolRegistry {
}
pub struct ToolRegistryBuilder {
handlers: HashMap<String, Arc<dyn AnyToolHandler>>,
handlers: HashMap<String, Arc<dyn ToolHandler>>,
specs: Vec<ConfiguredToolSpec>,
}
@@ -473,7 +447,7 @@ impl ToolRegistryBuilder {
H: ToolHandler + 'static,
{
let name = name.into();
let handler: Arc<dyn AnyToolHandler> = handler;
let handler: Arc<dyn ToolHandler> = handler;
if self
.handlers
.insert(name.clone(), handler.clone())

View File

@@ -1,24 +1,26 @@
use super::*;
use futures::future::BoxFuture;
use pretty_assertions::assert_eq;
struct TestHandler;
impl ToolHandler for TestHandler {
type Output = crate::tools::context::FunctionToolOutput;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
async fn handle(&self, _invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
unreachable!("test handler should not be invoked")
fn handle(
&self,
_invocation: ToolInvocation,
) -> BoxFuture<'_, Result<AnyToolResult, FunctionCallError>> {
Box::pin(async move { unreachable!("test handler should not be invoked") })
}
}
#[test]
fn handler_looks_up_namespaced_aliases_explicitly() {
let plain_handler = Arc::new(TestHandler) as Arc<dyn AnyToolHandler>;
let namespaced_handler = Arc::new(TestHandler) as Arc<dyn AnyToolHandler>;
let plain_handler = Arc::new(TestHandler) as Arc<dyn ToolHandler>;
let namespaced_handler = Arc::new(TestHandler) as Arc<dyn ToolHandler>;
let namespace = "mcp__codex_apps__gmail";
let tool_name = "gmail_get_recent_emails";
let namespaced_name = tool_handler_key(tool_name, Some(namespace));

View File

@@ -59,8 +59,7 @@ async fn js_repl_tools_only_blocks_direct_tool_calls() -> anyhow::Result<()> {
ToolCallSource::Direct,
)
.await
.err()
.expect("direct tool calls should be blocked");
.expect_err("direct tool calls should be blocked");
let FunctionCallError::RespondToModel(message) = err else {
panic!("expected RespondToModel, got {err:?}");
};
@@ -117,8 +116,7 @@ async fn js_repl_tools_only_allows_js_repl_source_calls() -> anyhow::Result<()>
ToolCallSource::JsRepl,
)
.await
.err()
.expect("shell call with empty args should fail");
.expect_err("shell call with empty args should fail");
let message = err.to_string();
assert!(
!message.contains("direct tool calls are disabled"),

View File

@@ -355,12 +355,10 @@ impl ProviderAuthScript {
fn new(tokens: &[&str]) -> std::io::Result<Self> {
let tempdir = tempfile::tempdir()?;
let token_file = tempdir.path().join("tokens.txt");
// `cmd.exe`'s `set /p` treats LF-only input as one line, so use CRLF on Windows.
let token_line_ending = if cfg!(windows) { "\r\n" } else { "\n" };
let mut token_file_contents = String::new();
for token in tokens {
token_file_contents.push_str(token);
token_file_contents.push_str(token_line_ending);
token_file_contents.push('\n');
}
std::fs::write(&token_file, token_file_contents)?;
@@ -387,28 +385,23 @@ mv tokens.next tokens.txt
#[cfg(windows)]
let (command, args) = {
let script_path = tempdir.path().join("print-token.cmd");
let script_path = tempdir.path().join("print-token.ps1");
std::fs::write(
&script_path,
r#"@echo off
setlocal EnableExtensions DisableDelayedExpansion
set "first_line="
<tokens.txt set /p "first_line="
if not defined first_line exit /b 1
setlocal EnableDelayedExpansion
echo(!first_line!
endlocal
more +1 tokens.txt > tokens.next
move /y tokens.next tokens.txt >nul
r#"$lines = @(Get-Content -Path tokens.txt)
if ($lines.Count -eq 0) { exit 1 }
Write-Output $lines[0]
$lines | Select-Object -Skip 1 | Set-Content -Path tokens.txt
"#,
)?;
(
"cmd.exe".to_string(),
"powershell.exe".to_string(),
vec![
"/d".to_string(),
"/s".to_string(),
"/c".to_string(),
".\\print-token.cmd".to_string(),
"-NoProfile".to_string(),
"-ExecutionPolicy".to_string(),
"Bypass".to_string(),
"-File".to_string(),
".\\print-token.ps1".to_string(),
],
)
};
@@ -443,12 +436,13 @@ exit 1
#[cfg(windows)]
let (command, args) = (
"cmd.exe".to_string(),
"powershell.exe".to_string(),
vec![
"/d".to_string(),
"/s".to_string(),
"/c".to_string(),
"exit /b 1".to_string(),
"-NoProfile".to_string(),
"-ExecutionPolicy".to_string(),
"Bypass".to_string(),
"-Command".to_string(),
"exit 1".to_string(),
],
);
@@ -463,8 +457,8 @@ exit 1
serde_json::from_value(json!({
"command": self.command,
"args": self.args,
// Process startup can be slow on loaded Windows CI workers, so leave enough slack to
// avoid turning these auth-cache assertions into a process-launch timing test.
// `powershell.exe` startup can be slow on loaded Windows CI workers, so leave enough
// slack to avoid turning these auth-cache assertions into a process-launch timing test.
"timeout_ms": 10_000,
"refresh_interval_ms": 60000,
"cwd": self.tempdir.path(),

View File

@@ -569,10 +569,11 @@ impl Tui {
terminal.invalidate_viewport();
}
let area = terminal.viewport_area;
// Update the y position for suspending so Ctrl-Z can place the cursor correctly.
#[cfg(unix)]
{
let area = terminal.viewport_area;
let inline_area_bottom = if self.alt_screen_active.load(Ordering::Relaxed) {
self.alt_saved_viewport
.map(|r| r.bottom().saturating_sub(1))

View File

@@ -1,328 +0,0 @@
---
name: codex-applied-devbox
description: Sync a local Codex worktree from `~/code/codex-worktrees/` to a mirrored path on a remote host, then run a reproducible remote build or exec command there.
---
# Codex Applied Devbox
Use this skill when you want local file editing/search on your laptop, but want
the actual build or execution to happen on a remote host such as `dev`.
This skill assumes:
- remote host alias: `dev`
- local Codex worktree root: `~/code/codex-worktrees`
- remote mirror root: `/tmp/codex-worktrees`
If the box itself needs to be created, resumed, suspended, or inspected, use
the `applied-devbox` skill first.
## Objective
1. Create or reuse a local worktree under `~/code/codex-worktrees/`.
2. Mirror that worktree to the remote host under `/tmp/codex-worktrees/`.
3. Run one configurable remote Bazel command against the mirrored copy.
4. Keep the flow reproducible by excluding build artifacts and local repo state.
## Operator Defaults
When using this skill interactively, the operator should bias toward immediate
execution over setup-heavy preflights.
Default posture:
- If the user asks for a specific PR or branch, create a fresh worktree first.
- Do not spend time checking whether an equivalent worktree already exists
unless the user explicitly asked to reuse one.
- Assume `dev` is reachable and run the sync directly; only debug SSH or remote
prereqs after the real command fails.
- Avoid separate "can I reach the host?" or "does rsync exist remotely?"
checks unless there is a known problem pattern.
- Prefer one end-to-end attempt over multiple speculative probes.
In practice, that means the operator should usually do this:
1. Fetch the requested PR or ref.
2. Create a new local worktree under `~/code/codex-worktrees/`.
3. Run `sync-worktree-and-run` immediately.
4. Only inspect host reachability, missing tools, or conflicting paths if that
end-to-end run fails.
### PR Fast Path
For a request like "build PR 16620 on devbox", prefer this shape:
```bash
mkdir -p ~/code/codex-worktrees
git -C ~/code/codex fetch origin pull/16620/head
git -C ~/code/codex worktree add -b pr-16620 \
~/code/codex-worktrees/pr-16620 FETCH_HEAD
skills/codex-applied-devbox/scripts/sync-worktree-and-run \
~/code/codex-worktrees/pr-16620
```
This is intentionally direct. It skips separate validation steps and lets the
real sync/build path prove whether the environment is healthy.
## Key rule for concurrent builds
- Keep each worktree as its own Bazel workspace path.
- Let Bazel derive a separate `output_base` per worktree automatically.
- Reuse the shared caches from `.bazelrc`:
- `~/.cache/bazel-disk-cache`
- `~/.cache/bazel-repo-cache`
- `~/.cache/bazel-repo-contents-cache`
- Do not force a shared `--output_base` across two live worktrees.
On `dev`, this has already been validated with two mirrored worktrees:
- both builds started at the same second
- each worktree got its own Bazel server and `output_base`
- both builds reused shared cache state and completed successfully
## Script
The script lives at:
`skills/codex-applied-devbox/scripts/sync-worktree-and-run`
Default behavior:
- host: `dev`
- local worktree root: `~/code/codex-worktrees`
- remote mirror root: `/tmp/codex-worktrees`
- remote command:
`cd codex-rs && export PATH=$HOME/code/openai/project/dotslash-gen/bin:$HOME/.local/bin:$PATH && bazel build --bes_backend= --bes_results_url= //codex-rs/cli:cli`
- prints the exact copy-paste remote Codex Bazel run command for the mirrored checkout
- the helper command intentionally stays on the mirrored-worktree Bazel path and uses `//codex-rs/cli:codex`
Example:
```bash
skills/codex-applied-devbox/scripts/sync-worktree-and-run \
~/code/codex-worktrees/my-feature
```
This will mirror:
- local: `~/code/codex-worktrees/my-feature`
- remote: `/tmp/codex-worktrees/my-feature`
It will print:
```bash
ssh -t dev 'cd /tmp/codex-worktrees/my-feature/codex-rs && export PATH=$HOME/code/openai/project/dotslash-gen/bin:$HOME/.local/bin:$PATH && bazel run --bes_backend= --bes_results_url= //codex-rs/cli:codex --'
```
Custom host, remote root, and command:
```bash
skills/codex-applied-devbox/scripts/sync-worktree-and-run \
--host dev \
--remote-root /tmp/codex-worktrees \
--command 'cd codex-rs && export PATH=$HOME/code/openai/project/dotslash-gen/bin:$HOME/.local/bin:$PATH && bazel build --bes_backend= --bes_results_url= //codex-rs/tui:tui' \
~/code/codex-worktrees/my-feature
```
## Recommended setup
1. Create the local worktree from your main Codex checkout.
```bash
mkdir -p ~/code/codex-worktrees
git -C ~/code/codex worktree add -b my-feature \
~/code/codex-worktrees/my-feature origin/main
```
2. Edit locally or fetch the PR/ref you want to test.
3. Sync and build remotely immediately:
```bash
skills/codex-applied-devbox/scripts/sync-worktree-and-run \
~/code/codex-worktrees/my-feature
```
4. Repeat sync/build as needed after local edits.
## Retrospective Notes
The main friction in a real run was not rsync itself. It was operator delay
before the first real attempt:
- checking whether a matching worktree already existed before simply creating
the one needed for the task
- verifying host reachability before letting the real sync prove it
- remembering the mirrored remote path after the sync
- hand-writing the SSH command needed to run Codex in that mirrored checkout
- waiting through a cold Bazel build with no simple "jump in here yourself"
command printed by the script
The current script update addresses the third and fourth issues by printing the
exact `ssh -t ...` command for running Codex in the mirrored checkout.
This skill update addresses the first two issues by telling the operator to
start the end-to-end flow sooner and only investigate after an actual failure.
The next improvements worth making, if you want this flow to feel faster and
more automatic, are:
- add `--pr <number>` so the script can fetch `pull/<n>/head` and create or
reuse `~/code/codex-worktrees/pr-<n>` itself
- add `--tmux-window <name>` support so the remote command can start in a named
tmux session/window and print the exact follow/log command
- add an optional "sync only changed files" mode driven by git status or
`git diff --name-only` for large worktrees
- add an optional `--bazel-target <label>` shortcut so users do not have to
remember the common labels
## Validated run paths on `dev`
What has been verified:
- `sync-worktree-and-run` can mirror the local worktree and complete a remote
Bazel build with:
`bazel build --bes_backend= --bes_results_url= //codex-rs/cli:cli`
- on current `main`, `bazel run --bes_backend= --bes_results_url=
//codex-rs/cli:codex --` builds successfully on `dev`
Practical note:
- older pre-`#16634` checkouts could fail on `dev` when launching
`//codex-rs/cli:codex`; treat current `main` as the baseline before carrying
that older caveat forward
## Bazel defaults on the devbox
Use this decision rule:
- Default to Bazel for remote builds in mirrored worktrees.
- Keep the existing `.bazelrc` cache settings; they already share the useful
cache layers across worktrees.
- On `dev`, clear the BES flags for routine builds:
`--bes_backend= --bes_results_url=`
- Prepend both common Bazel locations to `PATH`:
`export PATH=$HOME/code/openai/project/dotslash-gen/bin:$HOME/.local/bin:$PATH`
- Prefer labels that have already been validated on the host:
- `//codex-rs/cli:cli`
- `//codex-rs/tui:tui`
- `//codex-rs/utils/absolute-path:absolute-path`
Current practical note:
- older pre-`#16634` checkouts could fail on `dev` when launching
`//codex-rs/cli:codex`; re-test current `main` before treating that older
caveat as still active
What is shared versus isolated:
- Shared across worktrees:
- `~/.cache/bazel-disk-cache`
- `~/.cache/bazel-repo-cache`
- `~/.cache/bazel-repo-contents-cache`
- the Bazel install base under `~/.cache/bazel/_bazel_dev-user/install`
- Still per worktree:
- each `output_base`
- each Bazel server
- mutable workspace-specific state under
`~/.cache/bazel/_bazel_dev-user/<hash>`
That means this setup saves disk space compared with giving every worktree its
own completely separate Bazel root, but it does not eliminate the large
per-worktree `output_base` directories.
## Fresh default devbox bootstrap
This was validated against a fresh box created with a temporary minimal config
override, not your personal `~/.config/applied-devbox/config.toml`.
Validated sequence:
1. Create a minimal config file locally and point `APPLIED_DEVBOX_CONFIG` at it.
An empty file is enough if you want the CLI's built-in defaults without your
personal apt/git/custom-setup additions.
2. Create the box:
```bash
APPLIED_DEVBOX_CONFIG=/tmp/applied-devbox-default-config.toml \
a devbox new codex-bazel-0402-1800 \
--sku cpu64 \
--home-size 2Ti \
--skip-secret-setup \
--skip-tool-setup
```
If you expect large Bazel output trees or long-lived mirrored worktrees, prefer
`--sku cpu64 --home-size 2Ti` over the smaller defaults.
3. If the first `a devbox ssh` fails on websocket transport, establish
connectivity with:
```bash
APPLIED_DEVBOX_CONFIG=/tmp/applied-devbox-default-config.toml \
a devbox ssh codex-bazel-0402-1800 --no-ws --no-tmux -- bash -lc 'hostname && whoami'
```
After that, direct `ssh codex-bazel-0402-1800` was available on this machine.
4. Install `rsync` once on the new box:
```bash
ssh codex-bazel-0402-1800 'sudo apt-get update && sudo apt-get install -y rsync'
```
5. Run the mirrored Bazel build:
```bash
skills/codex-applied-devbox/scripts/sync-worktree-and-run \
--host codex-bazel-0402-1800 \
~/code/codex-worktrees/my-feature
```
What was validated on the fresh box:
- the box came up from a default-style config override
- the first websocket-based SSH attempt failed, but `--no-ws` succeeded
- plain `ssh <box>` worked after the first successful `--no-ws` connection
- `rsync` was the only package that had to be installed manually
- Bazel was already available from the default OpenAI clone at
`~/code/openai/project/dotslash-gen/bin`
- the first mirrored `//codex-rs/cli:cli` build completed successfully in
`68.24s`
## Sync exclusions
The script excludes:
- `.git`
- `.sl`
- `.jj`
- `target`
- `node_modules`
- `.venv`, `venv`
- `dist`, `build`, `.next`
- `.pytest_cache`, `.mypy_cache`, `__pycache__`, `.ruff_cache`
- `.DS_Store`
## Cleanup
Remove a stale remote mirror:
```bash
ssh dev 'rm -rf /tmp/codex-worktrees/my-feature'
```
Remove the local worktree when finished:
```bash
git -C ~/code/codex worktree remove ~/code/codex-worktrees/my-feature
git -C ~/code/codex branch -D my-feature
```
## Guardrails
- Treat the local worktree as the editing source of truth.
- Treat the mirrored remote copy as disposable build state.
- Do not sync `.git` or build outputs.
- Keep the local worktree under `~/code/codex-worktrees/` so the mirror path is
stable and easy to clean up.

View File

@@ -1,165 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
usage() {
cat <<'EOF'
Usage:
sync-worktree-and-run [options] <local-worktree>
Sync a local Codex worktree to a mirrored path on a remote host, then run a
command there.
Options:
--host <host> Remote SSH host. Default: dev
--local-root <path> Expected local worktree root.
Default: ~/code/codex-worktrees
--remote-root <path> Remote mirror root.
Default: /tmp/codex-worktrees
--command <command> Command to run on the remote copy.
Default: cd codex-rs &&
export PATH=$HOME/code/openai/project/dotslash-gen/bin:
$HOME/.local/bin:$PATH &&
bazel build --bes_backend= --bes_results_url=
//codex-rs/cli:cli
Prints the exact Bazel-backed Codex SSH run command for the mirrored
checkout on every run.
-h, --help Show this help text.
EOF
}
shell_single_quote() {
local value="$1"
value=${value//\'/\'\"\'\"\'}
printf "'%s'" "$value"
}
host="dev"
local_root="$HOME/code/codex-worktrees"
remote_root="/tmp/codex-worktrees"
command_to_run='cd codex-rs && export PATH=$HOME/code/openai/project/dotslash-gen/bin:$HOME/.local/bin:$PATH && bazel build --bes_backend= --bes_results_url= //codex-rs/cli:cli'
local_worktree=""
while [[ $# -gt 0 ]]; do
case "$1" in
--host)
host="$2"
shift 2
;;
--local-root)
local_root="$2"
shift 2
;;
--remote-root)
remote_root="$2"
shift 2
;;
--command)
command_to_run="$2"
shift 2
;;
-h|--help)
usage
exit 0
;;
-*)
echo "unknown option: $1" >&2
usage >&2
exit 2
;;
*)
if [[ -n "$local_worktree" ]]; then
echo "expected exactly one local worktree path" >&2
usage >&2
exit 2
fi
local_worktree="$1"
shift
;;
esac
done
if [[ -z "$local_worktree" ]]; then
echo "missing local worktree path" >&2
usage >&2
exit 2
fi
if [[ ! -d "$local_worktree" ]]; then
echo "local worktree does not exist: $local_worktree" >&2
exit 1
fi
if [[ ! -d "$local_root" ]]; then
echo "local root does not exist: $local_root" >&2
exit 1
fi
local_root_abs="$(cd "$local_root" && pwd -P)"
local_worktree_abs="$(cd "$local_worktree" && pwd -P)"
case "$local_worktree_abs/" in
"$local_root_abs"/*)
relative_path="${local_worktree_abs#$local_root_abs/}"
;;
*)
echo "local worktree must live under local root: $local_root_abs" >&2
exit 1
;;
esac
remote_worktree="${remote_root%/}/$relative_path"
remote_codex_dir="${remote_worktree%/}/codex-rs"
remote_codex_run_command="cd $remote_codex_dir && export PATH=\$HOME/code/openai/project/dotslash-gen/bin:\$HOME/.local/bin:\$PATH && bazel run --bes_backend= --bes_results_url= //codex-rs/cli:codex --"
echo "# Shared-worktree Bazel Codex run command:"
echo "ssh -t $host $(shell_single_quote "$remote_codex_run_command")"
if ! command -v rsync >/dev/null 2>&1; then
echo "local rsync is not installed or not on PATH" >&2
exit 1
fi
if ! ssh "$host" 'command -v rsync >/dev/null 2>&1'; then
echo "remote rsync is not installed on $host" >&2
echo "try: ssh $host 'sudo apt-get update && sudo apt-get install -y rsync'" >&2
exit 1
fi
ssh "$host" bash -s -- "$remote_worktree" <<'EOF'
set -euo pipefail
remote_worktree="$1"
mkdir -p "$remote_worktree"
EOF
rsync -a --delete \
--exclude='.git' \
--exclude='.sl' \
--exclude='.jj' \
--exclude='target' \
--exclude='node_modules' \
--exclude='.venv' \
--exclude='venv' \
--exclude='dist' \
--exclude='build' \
--exclude='.next' \
--exclude='.pytest_cache' \
--exclude='.mypy_cache' \
--exclude='__pycache__' \
--exclude='.ruff_cache' \
--exclude='.DS_Store' \
-e ssh \
"$local_worktree_abs/" \
"$host:$remote_worktree/"
printf -v remote_worktree_q '%q' "$remote_worktree"
printf -v command_to_run_q '%q' "$command_to_run"
ssh "$host" "bash -s" <<EOF
set -euo pipefail
remote_worktree=$remote_worktree_q
command_to_run=$command_to_run_q
cd "\$remote_worktree"
echo "REMOTE_PWD=\$PWD"
eval "\$command_to_run"
EOF