Compare commits

...

1 Commits

Author SHA1 Message Date
Dylan Hurd
2760cadbf7 [tools] Simplify tools config 2025-08-19 13:54:49 -07:00
10 changed files with 92 additions and 84 deletions

View File

@@ -14,6 +14,7 @@ use codex_apply_patch::ApplyPatchAction;
use codex_apply_patch::MaybeApplyPatchVerified;
use codex_apply_patch::maybe_parse_apply_patch_verified;
use codex_login::CodexAuth;
use codex_protocol::config_types::CodexTool;
use codex_protocol::protocol::TurnAbortReason;
use codex_protocol::protocol::TurnAbortedEvent;
use futures::prelude::*;
@@ -474,13 +475,7 @@ impl Session {
);
let turn_context = TurnContext {
client,
tools_config: ToolsConfig::new(
&config.model_family,
approval_policy,
sandbox_policy.clone(),
config.include_plan_tool,
config.include_apply_patch_tool,
),
tools_config: ToolsConfig::from(config.as_ref()),
user_instructions,
base_instructions,
approval_policy,
@@ -1043,12 +1038,18 @@ async fn submission_loop(
.unwrap_or(prev.sandbox_policy.clone());
let new_cwd = cwd.clone().unwrap_or_else(|| prev.cwd.clone());
let codex_tools: HashSet<CodexTool> = config
.codex_tools
.as_ref()
.unwrap_or(&vec![])
.iter()
.cloned()
.collect();
let tools_config = ToolsConfig::new(
&effective_family,
new_approval_policy,
new_sandbox_policy.clone(),
config.include_plan_tool,
config.include_apply_patch_tool,
codex_tools,
);
let new_turn_context = TurnContext {
@@ -1117,14 +1118,20 @@ async fn submission_loop(
sess.session_id,
);
let codex_tools: HashSet<CodexTool> = config
.codex_tools
.as_ref()
.unwrap_or(&vec![])
.iter()
.cloned()
.collect();
let fresh_turn_context = TurnContext {
client,
tools_config: ToolsConfig::new(
&model_family,
approval_policy,
sandbox_policy.clone(),
config.include_plan_tool,
config.include_apply_patch_tool,
codex_tools,
),
user_instructions: turn_context.user_instructions.clone(),
base_instructions: turn_context.base_instructions.clone(),

View File

@@ -14,6 +14,7 @@ use crate::openai_model_info::get_model_info;
use crate::protocol::AskForApproval;
use crate::protocol::SandboxPolicy;
use codex_login::AuthMode;
use codex_protocol::config_types::CodexTool;
use codex_protocol::config_types::ReasoningEffort;
use codex_protocol::config_types::ReasoningSummary;
use codex_protocol::config_types::SandboxMode;
@@ -154,13 +155,8 @@ pub struct Config {
/// Experimental rollout resume path (absolute path to .jsonl; undocumented).
pub experimental_resume: Option<PathBuf>,
/// Include an experimental plan tool that the model can use to update its current plan and status of each step.
pub include_plan_tool: bool,
/// Include the `apply_patch` tool for models that benefit from invoking
/// file edits as a structured tool call. When unset, this falls back to the
/// model family's default preference.
pub include_apply_patch_tool: bool,
/// List of additional Codex-provided tools to enable for the model
pub codex_tools: Option<Vec<CodexTool>>,
/// The value for the `originator` header included with Responses API requests.
pub internal_originator: Option<String>,
@@ -412,6 +408,11 @@ pub struct ConfigToml {
/// The value for the `originator` header included with Responses API requests.
pub internal_originator: Option<String>,
/// List of additional Codex-provided tools to enable for the model.
pub codex_tools: Option<Vec<CodexTool>>,
/// Used internally for project settings, e.g. has the user trusted codex to
/// run in the current directory.
pub projects: Option<HashMap<String, ProjectConfig>>,
/// If set to `true`, the API key will be signed with the `originator` header.
@@ -491,10 +492,9 @@ pub struct ConfigOverrides {
pub config_profile: Option<String>,
pub codex_linux_sandbox_exe: Option<PathBuf>,
pub base_instructions: Option<String>,
pub include_plan_tool: Option<bool>,
pub include_apply_patch_tool: Option<bool>,
pub disable_response_storage: Option<bool>,
pub show_raw_agent_reasoning: Option<bool>,
pub codex_tools: Option<Vec<CodexTool>>,
}
impl Config {
@@ -517,8 +517,7 @@ impl Config {
config_profile: config_profile_key,
codex_linux_sandbox_exe,
base_instructions,
include_plan_tool,
include_apply_patch_tool,
codex_tools: codex_tools_override,
disable_response_storage,
show_raw_agent_reasoning,
} = overrides;
@@ -622,9 +621,6 @@ impl Config {
Self::get_base_instructions(experimental_instructions_path, &resolved_cwd)?;
let base_instructions = base_instructions.or(file_base_instructions);
let include_apply_patch_tool_val =
include_apply_patch_tool.unwrap_or(model_family.uses_apply_patch_tool);
let config = Self {
model,
model_family,
@@ -676,8 +672,7 @@ impl Config {
.unwrap_or("https://chatgpt.com/backend-api/".to_string()),
experimental_resume,
include_plan_tool: include_plan_tool.unwrap_or(false),
include_apply_patch_tool: include_apply_patch_tool_val,
codex_tools: codex_tools_override.or(cfg.codex_tools),
internal_originator: cfg.internal_originator,
preferred_auth_method: cfg.preferred_auth_method.unwrap_or(AuthMode::ChatGPT),
};
@@ -1041,8 +1036,7 @@ disable_response_storage = true
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
experimental_resume: None,
base_instructions: None,
include_plan_tool: false,
include_apply_patch_tool: false,
codex_tools: None,
internal_originator: None,
preferred_auth_method: AuthMode::ChatGPT,
},
@@ -1094,8 +1088,7 @@ disable_response_storage = true
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
experimental_resume: None,
base_instructions: None,
include_plan_tool: false,
include_apply_patch_tool: false,
codex_tools: None,
internal_originator: None,
preferred_auth_method: AuthMode::ChatGPT,
};
@@ -1162,8 +1155,7 @@ disable_response_storage = true
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
experimental_resume: None,
base_instructions: None,
include_plan_tool: false,
include_apply_patch_tool: false,
codex_tools: None,
internal_originator: None,
preferred_auth_method: AuthMode::ChatGPT,
};

View File

@@ -41,7 +41,7 @@ pub use conversation_manager::NewConversation;
pub mod model_family;
mod models;
mod openai_model_info;
mod openai_tools;
pub mod openai_tools;
pub mod plan_tool;
mod project_doc;
mod rollout;

View File

@@ -1,10 +1,13 @@
use codex_protocol::config_types::CodexTool;
use serde::Deserialize;
use serde::Serialize;
use serde_json::Value as JsonValue;
use serde_json::json;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::collections::HashSet;
use crate::config::Config;
use crate::model_family::ModelFamily;
use crate::plan_tool::PLAN_TOOL;
use crate::protocol::AskForApproval;
@@ -32,6 +35,7 @@ pub(crate) enum OpenAiTool {
LocalShell {},
}
// Which shell tool to use
#[derive(Debug, Clone)]
pub enum ConfigShellToolType {
DefaultShell,
@@ -42,8 +46,7 @@ pub enum ConfigShellToolType {
#[derive(Debug, Clone)]
pub struct ToolsConfig {
pub shell_type: ConfigShellToolType,
pub plan_tool: bool,
pub apply_patch_tool: bool,
pub codex_tools: HashSet<CodexTool>,
}
impl ToolsConfig {
@@ -51,8 +54,7 @@ impl ToolsConfig {
model_family: &ModelFamily,
approval_policy: AskForApproval,
sandbox_policy: SandboxPolicy,
include_plan_tool: bool,
include_apply_patch_tool: bool,
codex_tools: HashSet<CodexTool>,
) -> Self {
let mut shell_type = if model_family.uses_local_shell_tool {
ConfigShellToolType::LocalShell
@@ -67,12 +69,28 @@ impl ToolsConfig {
Self {
shell_type,
plan_tool: include_plan_tool,
apply_patch_tool: include_apply_patch_tool || model_family.uses_apply_patch_tool,
codex_tools,
}
}
}
impl From<&Config> for ToolsConfig {
fn from(config: &Config) -> Self {
Self::new(
&config.model_family,
config.approval_policy,
config.sandbox_policy.clone(),
config
.codex_tools
.as_ref()
.unwrap_or(&vec![])
.iter()
.cloned()
.collect(),
)
}
}
/// Generic JSONSchema subset needed for our tool definitions
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(tag = "type", rename_all = "lowercase")]
@@ -535,12 +553,11 @@ pub(crate) fn get_openai_tools(
}
}
if config.plan_tool {
tools.push(PLAN_TOOL.clone());
}
if config.apply_patch_tool {
tools.push(create_apply_patch_tool());
for tool in &config.codex_tools {
match tool {
CodexTool::UpdatePlan => tools.push(PLAN_TOOL.clone()),
CodexTool::ApplyPatch => tools.push(create_apply_patch_tool()),
}
}
if let Some(mcp_tools) = mcp_tools {
@@ -595,8 +612,7 @@ mod tests {
&model_family,
AskForApproval::Never,
SandboxPolicy::ReadOnly,
true,
model_family.uses_apply_patch_tool,
HashSet::from([CodexTool::UpdatePlan]),
);
let tools = get_openai_tools(&config, Some(HashMap::new()));
@@ -610,8 +626,7 @@ mod tests {
&model_family,
AskForApproval::Never,
SandboxPolicy::ReadOnly,
true,
model_family.uses_apply_patch_tool,
HashSet::from([CodexTool::UpdatePlan]),
);
let tools = get_openai_tools(&config, Some(HashMap::new()));
@@ -625,8 +640,7 @@ mod tests {
&model_family,
AskForApproval::Never,
SandboxPolicy::ReadOnly,
false,
model_family.uses_apply_patch_tool,
HashSet::new(),
);
let tools = get_openai_tools(
&config,
@@ -719,8 +733,7 @@ mod tests {
&model_family,
AskForApproval::Never,
SandboxPolicy::ReadOnly,
false,
model_family.uses_apply_patch_tool,
HashSet::new(),
);
let tools = get_openai_tools(
@@ -775,8 +788,7 @@ mod tests {
&model_family,
AskForApproval::Never,
SandboxPolicy::ReadOnly,
false,
model_family.uses_apply_patch_tool,
HashSet::new(),
);
let tools = get_openai_tools(
@@ -826,8 +838,7 @@ mod tests {
&model_family,
AskForApproval::Never,
SandboxPolicy::ReadOnly,
false,
model_family.uses_apply_patch_tool,
HashSet::new(),
);
let tools = get_openai_tools(
@@ -880,8 +891,7 @@ mod tests {
&model_family,
AskForApproval::Never,
SandboxPolicy::ReadOnly,
false,
model_family.uses_apply_patch_tool,
HashSet::new(),
);
let tools = get_openai_tools(

View File

@@ -144,9 +144,8 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
cwd: cwd.map(|p| p.canonicalize().unwrap_or(p)),
model_provider,
codex_linux_sandbox_exe,
codex_tools: None,
base_instructions: None,
include_plan_tool: None,
include_apply_patch_tool: None,
disable_response_storage: oss.then_some(true),
show_raw_agent_reasoning: oss.then_some(true),
};

View File

@@ -602,8 +602,7 @@ fn derive_config_from_params(
sandbox: sandbox_mode,
config: cli_overrides,
base_instructions,
include_plan_tool,
include_apply_patch_tool,
codex_tools,
} = params;
let overrides = ConfigOverrides {
model,
@@ -614,8 +613,7 @@ fn derive_config_from_params(
model_provider: None,
codex_linux_sandbox_exe,
base_instructions,
include_plan_tool,
include_apply_patch_tool,
codex_tools,
disable_response_storage: None,
show_raw_agent_reasoning: None,
};

View File

@@ -50,10 +50,6 @@ pub struct CodexToolCallParam {
/// The set of instructions to use instead of the default ones.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub base_instructions: Option<String>,
/// Whether to include the plan tool in the conversation.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub include_plan_tool: Option<bool>,
}
/// Custom enum mirroring [`AskForApproval`], but has an extra dependency on
@@ -146,7 +142,6 @@ impl CodexToolCallParam {
sandbox,
config: cli_overrides,
base_instructions,
include_plan_tool,
} = self;
// Build the `ConfigOverrides` recognized by codex-core.
@@ -159,8 +154,7 @@ impl CodexToolCallParam {
model_provider: None,
codex_linux_sandbox_exe,
base_instructions,
include_plan_tool,
include_apply_patch_tool: None,
codex_tools: None,
disable_response_storage: None,
show_raw_agent_reasoning: None,
};

View File

@@ -46,3 +46,14 @@ pub enum SandboxMode {
#[serde(rename = "danger-full-access")]
DangerFullAccess,
}
/// Additional tools available within Codex, beyond the default shells.
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Hash, TS)]
#[serde(rename_all = "snake_case")]
pub enum CodexTool {
/// Provides a session-persistent planning mechanism
UpdatePlan,
/// Matches the `apply_patch` synthetic shell command, for models that
/// benefit from invoking file edits as a structured tool call.
ApplyPatch,
}

View File

@@ -2,6 +2,7 @@ use std::collections::HashMap;
use std::fmt::Display;
use std::path::PathBuf;
use crate::config_types::CodexTool;
use crate::config_types::ReasoningEffort;
use crate::config_types::ReasoningSummary;
use crate::config_types::SandboxMode;
@@ -105,13 +106,9 @@ pub struct NewConversationParams {
#[serde(skip_serializing_if = "Option::is_none")]
pub base_instructions: Option<String>,
/// Whether to include the plan tool in the conversation.
/// List of additional Codex-provided tools to enable for the model
#[serde(skip_serializing_if = "Option::is_none")]
pub include_plan_tool: Option<bool>,
/// Whether to include the apply patch tool in the conversation.
#[serde(skip_serializing_if = "Option::is_none")]
pub include_apply_patch_tool: Option<bool>,
pub codex_tools: Option<Vec<CodexTool>>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
@@ -311,8 +308,7 @@ mod tests {
sandbox: None,
config: None,
base_instructions: None,
include_plan_tool: None,
include_apply_patch_tool: None,
codex_tools: Some(vec![CodexTool::UpdatePlan]),
},
};
assert_eq!(
@@ -321,7 +317,8 @@ mod tests {
"id": 42,
"params": {
"model": "gpt-5",
"approvalPolicy": "on-request"
"approvalPolicy": "on-request",
"codex_tools": ["update_plan"]
}
}),
serde_json::to_value(&request).unwrap(),

View File

@@ -15,6 +15,7 @@ use codex_core::protocol::SandboxPolicy;
use codex_login::AuthMode;
use codex_login::CodexAuth;
use codex_ollama::DEFAULT_OSS_MODEL;
use codex_protocol::config_types::CodexTool;
use codex_protocol::config_types::SandboxMode;
use std::fs::OpenOptions;
use std::path::PathBuf;
@@ -116,8 +117,7 @@ pub async fn run_main(
config_profile: cli.config_profile.clone(),
codex_linux_sandbox_exe,
base_instructions: None,
include_plan_tool: Some(true),
include_apply_patch_tool: None,
codex_tools: Some(vec![CodexTool::UpdatePlan]),
disable_response_storage: cli.oss.then_some(true),
show_raw_agent_reasoning: cli.oss.then_some(true),
};