feat: add Codex Apps sediment file remapping (#15197)

## Summary
- bridge Codex Apps tools that declare `_meta["openai/fileParams"]`
through the OpenAI file upload flow
- mask those file params in model-visible tool schemas so the model
provides absolute local file paths instead of raw file payload objects
- rewrite those local file path arguments client-side into
`ProvidedFilePayload`-shaped objects before the normal MCP tool call

## Details
- applies to scalar and array file params declared in
`openai/fileParams`
- Codex uploads local files directly to the backend and uses the
uploaded file metadata to build the MCP tool arguments locally
- this PR is input-only

## Verification
- `just fmt`
- `cargo test -p codex-core mcp_tool_call -- --nocapture`

---------

Co-authored-by: Codex <noreply@openai.com>
This commit is contained in:
Casey Chow
2026-04-09 14:10:44 -04:00
committed by GitHub
parent 25a0f6784d
commit 244b15c95d
15 changed files with 1313 additions and 35 deletions

View File

@@ -23,6 +23,7 @@ In the codex-rs folder where the rust code lives:
- When making a change that adds or changes an API, ensure that the documentation in the `docs/` folder is up to date if applicable.
- Prefer private modules and explicitly exported public crate API.
- If you change `ConfigToml` or nested config types, run `just write-config-schema` to update `codex-rs/core/config.schema.json`.
- When working with MCP tool calls, prefer using `codex-rs/codex-mcp/src/mcp_connection_manager.rs` to handle mutation of tools and tool calls. Aim to minimize the footprint of changes and leverage existing abstractions rather than plumbing code through multiple levels of function calls.
- If you change Rust dependencies (`Cargo.toml` or `Cargo.lock`), run `just bazel-lock-update` from the
repo root to refresh `MODULE.bazel.lock`, and include that lockfile update in the same change.
- After dependency changes, run `just bazel-lock-check` from the repo root so lockfile drift is caught

1
codex-rs/Cargo.lock generated
View File

@@ -1414,6 +1414,7 @@ dependencies = [
"reqwest",
"serde",
"serde_json",
"tempfile",
"thiserror 2.0.18",
"tokio",
"tokio-test",

View File

@@ -14,22 +14,24 @@ codex-protocol = { workspace = true }
codex-utils-rustls-provider = { workspace = true }
futures = { workspace = true }
http = { workspace = true }
reqwest = { workspace = true, features = ["json", "stream"] }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true, features = ["macros", "net", "rt", "sync", "time"] }
tokio = { workspace = true, features = ["fs", "macros", "net", "rt", "sync", "time"] }
tokio-tungstenite = { workspace = true }
tungstenite = { workspace = true }
tracing = { workspace = true }
eventsource-stream = { workspace = true }
regex-lite = { workspace = true }
tokio-util = { workspace = true, features = ["codec"] }
tokio-util = { workspace = true, features = ["codec", "io"] }
url = { workspace = true }
[dev-dependencies]
anyhow = { workspace = true }
assert_matches = { workspace = true }
pretty_assertions = { workspace = true }
tempfile = { workspace = true }
tokio-test = { workspace = true }
wiremock = { workspace = true }
reqwest = { workspace = true }

View File

@@ -0,0 +1,370 @@
use std::path::Path;
use std::path::PathBuf;
use std::time::Duration;
use crate::AuthProvider;
use codex_client::build_reqwest_client_with_custom_ca;
use reqwest::StatusCode;
use reqwest::header::CONTENT_LENGTH;
use serde::Deserialize;
use tokio::fs::File;
use tokio::time::Instant;
use tokio_util::io::ReaderStream;
pub const OPENAI_FILE_URI_PREFIX: &str = "sediment://";
pub const OPENAI_FILE_UPLOAD_LIMIT_BYTES: u64 = 512 * 1024 * 1024;
const OPENAI_FILE_REQUEST_TIMEOUT: Duration = Duration::from_secs(60);
const OPENAI_FILE_FINALIZE_TIMEOUT: Duration = Duration::from_secs(30);
const OPENAI_FILE_FINALIZE_RETRY_DELAY: Duration = Duration::from_millis(250);
const OPENAI_FILE_USE_CASE: &str = "codex";
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct UploadedOpenAiFile {
pub file_id: String,
pub uri: String,
pub download_url: String,
pub file_name: String,
pub file_size_bytes: u64,
pub mime_type: Option<String>,
pub path: PathBuf,
}
#[derive(Debug, thiserror::Error)]
pub enum OpenAiFileError {
#[error("path `{path}` does not exist")]
MissingPath { path: PathBuf },
#[error("path `{path}` is not a file")]
NotAFile { path: PathBuf },
#[error("path `{path}` cannot be read: {source}")]
ReadFile {
path: PathBuf,
#[source]
source: std::io::Error,
},
#[error(
"file `{path}` is too large: {size_bytes} bytes exceeds the limit of {limit_bytes} bytes"
)]
FileTooLarge {
path: PathBuf,
size_bytes: u64,
limit_bytes: u64,
},
#[error("failed to send OpenAI file request to {url}: {source}")]
Request {
url: String,
#[source]
source: reqwest::Error,
},
#[error("OpenAI file request to {url} failed with status {status}: {body}")]
UnexpectedStatus {
url: String,
status: StatusCode,
body: String,
},
#[error("failed to parse OpenAI file response from {url}: {source}")]
Decode {
url: String,
#[source]
source: serde_json::Error,
},
#[error("OpenAI file upload for `{file_id}` is not ready yet")]
UploadNotReady { file_id: String },
#[error("OpenAI file upload for `{file_id}` failed: {message}")]
UploadFailed { file_id: String, message: String },
}
#[derive(Deserialize)]
struct CreateFileResponse {
file_id: String,
upload_url: String,
}
#[derive(Deserialize)]
#[serde(rename_all = "snake_case")]
struct DownloadLinkResponse {
status: String,
download_url: Option<String>,
file_name: Option<String>,
mime_type: Option<String>,
error_message: Option<String>,
}
pub fn openai_file_uri(file_id: &str) -> String {
format!("{OPENAI_FILE_URI_PREFIX}{file_id}")
}
pub async fn upload_local_file(
base_url: &str,
auth: &impl AuthProvider,
path: &Path,
) -> Result<UploadedOpenAiFile, OpenAiFileError> {
let metadata = tokio::fs::metadata(path)
.await
.map_err(|source| match source.kind() {
std::io::ErrorKind::NotFound => OpenAiFileError::MissingPath {
path: path.to_path_buf(),
},
_ => OpenAiFileError::ReadFile {
path: path.to_path_buf(),
source,
},
})?;
if !metadata.is_file() {
return Err(OpenAiFileError::NotAFile {
path: path.to_path_buf(),
});
}
if metadata.len() > OPENAI_FILE_UPLOAD_LIMIT_BYTES {
return Err(OpenAiFileError::FileTooLarge {
path: path.to_path_buf(),
size_bytes: metadata.len(),
limit_bytes: OPENAI_FILE_UPLOAD_LIMIT_BYTES,
});
}
let file_name = path
.file_name()
.and_then(|value| value.to_str())
.unwrap_or("file")
.to_string();
let create_url = format!("{}/files", base_url.trim_end_matches('/'));
let create_response = authorized_request(auth, reqwest::Method::POST, &create_url)
.json(&serde_json::json!({
"file_name": file_name,
"file_size": metadata.len(),
"use_case": OPENAI_FILE_USE_CASE,
}))
.send()
.await
.map_err(|source| OpenAiFileError::Request {
url: create_url.clone(),
source,
})?;
let create_status = create_response.status();
let create_body = create_response.text().await.unwrap_or_default();
if !create_status.is_success() {
return Err(OpenAiFileError::UnexpectedStatus {
url: create_url,
status: create_status,
body: create_body,
});
}
let create_payload: CreateFileResponse =
serde_json::from_str(&create_body).map_err(|source| OpenAiFileError::Decode {
url: create_url.clone(),
source,
})?;
let upload_file = File::open(path)
.await
.map_err(|source| OpenAiFileError::ReadFile {
path: path.to_path_buf(),
source,
})?;
let upload_response = build_reqwest_client()
.put(&create_payload.upload_url)
.timeout(OPENAI_FILE_REQUEST_TIMEOUT)
.header("x-ms-blob-type", "BlockBlob")
.header(CONTENT_LENGTH, metadata.len())
.body(reqwest::Body::wrap_stream(ReaderStream::new(upload_file)))
.send()
.await
.map_err(|source| OpenAiFileError::Request {
url: create_payload.upload_url.clone(),
source,
})?;
let upload_status = upload_response.status();
let upload_body = upload_response.text().await.unwrap_or_default();
if !upload_status.is_success() {
return Err(OpenAiFileError::UnexpectedStatus {
url: create_payload.upload_url.clone(),
status: upload_status,
body: upload_body,
});
}
let finalize_url = format!(
"{}/files/{}/uploaded",
base_url.trim_end_matches('/'),
create_payload.file_id,
);
let finalize_started_at = Instant::now();
loop {
let finalize_response = authorized_request(auth, reqwest::Method::POST, &finalize_url)
.json(&serde_json::json!({}))
.send()
.await
.map_err(|source| OpenAiFileError::Request {
url: finalize_url.clone(),
source,
})?;
let finalize_status = finalize_response.status();
let finalize_body = finalize_response.text().await.unwrap_or_default();
if !finalize_status.is_success() {
return Err(OpenAiFileError::UnexpectedStatus {
url: finalize_url.clone(),
status: finalize_status,
body: finalize_body,
});
}
let finalize_payload: DownloadLinkResponse =
serde_json::from_str(&finalize_body).map_err(|source| OpenAiFileError::Decode {
url: finalize_url.clone(),
source,
})?;
match finalize_payload.status.as_str() {
"success" => {
return Ok(UploadedOpenAiFile {
file_id: create_payload.file_id.clone(),
uri: openai_file_uri(&create_payload.file_id),
download_url: finalize_payload.download_url.ok_or_else(|| {
OpenAiFileError::UploadFailed {
file_id: create_payload.file_id.clone(),
message: "missing download_url".to_string(),
}
})?,
file_name: finalize_payload.file_name.unwrap_or(file_name),
file_size_bytes: metadata.len(),
mime_type: finalize_payload.mime_type,
path: path.to_path_buf(),
});
}
"retry" => {
if finalize_started_at.elapsed() >= OPENAI_FILE_FINALIZE_TIMEOUT {
return Err(OpenAiFileError::UploadNotReady {
file_id: create_payload.file_id,
});
}
tokio::time::sleep(OPENAI_FILE_FINALIZE_RETRY_DELAY).await;
}
_ => {
return Err(OpenAiFileError::UploadFailed {
file_id: create_payload.file_id,
message: finalize_payload
.error_message
.unwrap_or_else(|| "upload finalization returned an error".to_string()),
});
}
}
}
}
fn authorized_request(
auth: &impl AuthProvider,
method: reqwest::Method,
url: &str,
) -> reqwest::RequestBuilder {
let client = build_reqwest_client();
let mut request = client
.request(method, url)
.timeout(OPENAI_FILE_REQUEST_TIMEOUT);
if let Some(token) = auth.bearer_token() {
request = request.bearer_auth(token);
}
if let Some(account_id) = auth.account_id() {
request = request.header("chatgpt-account-id", account_id);
}
request
}
fn build_reqwest_client() -> reqwest::Client {
build_reqwest_client_with_custom_ca(reqwest::Client::builder()).unwrap_or_else(|error| {
tracing::warn!(error = %error, "failed to build OpenAI file upload client");
reqwest::Client::new()
})
}
#[cfg(test)]
mod tests {
use super::*;
use crate::CoreAuthProvider;
use pretty_assertions::assert_eq;
use std::sync::Arc;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering;
use tempfile::TempDir;
use wiremock::Mock;
use wiremock::MockServer;
use wiremock::Request;
use wiremock::ResponseTemplate;
use wiremock::matchers::body_json;
use wiremock::matchers::header;
use wiremock::matchers::method;
use wiremock::matchers::path;
fn chatgpt_auth() -> CoreAuthProvider {
CoreAuthProvider::for_test(Some("token"), Some("account_id"))
}
fn base_url_for(server: &MockServer) -> String {
format!("{}/backend-api", server.uri())
}
#[tokio::test]
async fn upload_local_file_returns_canonical_uri() {
let server = MockServer::start().await;
Mock::given(method("POST"))
.and(path("/backend-api/files"))
.and(header("chatgpt-account-id", "account_id"))
.and(body_json(serde_json::json!({
"file_name": "hello.txt",
"file_size": 5,
"use_case": "codex",
})))
.respond_with(
ResponseTemplate::new(200)
.set_body_json(serde_json::json!({"file_id": "file_123", "upload_url": format!("{}/upload/file_123", server.uri())})),
)
.mount(&server)
.await;
Mock::given(method("PUT"))
.and(path("/upload/file_123"))
.and(header("content-length", "5"))
.respond_with(ResponseTemplate::new(200))
.mount(&server)
.await;
let finalize_attempts = Arc::new(AtomicUsize::new(0));
let finalize_attempts_responder = Arc::clone(&finalize_attempts);
let download_url = format!("{}/download/file_123", server.uri());
Mock::given(method("POST"))
.and(path("/backend-api/files/file_123/uploaded"))
.respond_with(move |_request: &Request| {
if finalize_attempts_responder.fetch_add(1, Ordering::SeqCst) == 0 {
return ResponseTemplate::new(200).set_body_json(serde_json::json!({
"status": "retry"
}));
}
ResponseTemplate::new(200).set_body_json(serde_json::json!({
"status": "success",
"download_url": download_url,
"file_name": "hello.txt",
"mime_type": "text/plain",
"file_size_bytes": 5
}))
})
.mount(&server)
.await;
let base_url = base_url_for(&server);
let dir = TempDir::new().expect("temp dir");
let path = dir.path().join("hello.txt");
tokio::fs::write(&path, b"hello").await.expect("write file");
let uploaded = upload_local_file(&base_url, &chatgpt_auth(), &path)
.await
.expect("upload succeeds");
assert_eq!(uploaded.file_id, "file_123");
assert_eq!(uploaded.uri, "sediment://file_123");
assert_eq!(
uploaded.download_url,
format!("{}/download/file_123", server.uri())
);
assert_eq!(uploaded.file_name, "hello.txt");
assert_eq!(uploaded.mime_type, Some("text/plain".to_string()));
assert_eq!(finalize_attempts.load(Ordering::SeqCst), 2);
}
}

View File

@@ -3,6 +3,7 @@ pub(crate) mod auth;
pub(crate) mod common;
pub(crate) mod endpoint;
pub(crate) mod error;
pub(crate) mod files;
pub(crate) mod provider;
pub(crate) mod rate_limits;
pub(crate) mod requests;
@@ -52,6 +53,7 @@ pub use crate::endpoint::ResponsesWebsocketClient;
pub use crate::endpoint::ResponsesWebsocketConnection;
pub use crate::endpoint::session_update_session_json;
pub use crate::error::ApiError;
pub use crate::files::upload_local_file;
pub use crate::provider::Provider;
pub use crate::provider::RetryConfig;
pub use crate::provider::is_azure_responses_wire_base_url;

View File

@@ -38,4 +38,5 @@ pub use mcp_connection_manager::McpConnectionManager;
pub use mcp_connection_manager::SandboxState;
pub use mcp_connection_manager::ToolInfo;
pub use mcp_connection_manager::codex_apps_tools_cache_key;
pub use mcp_connection_manager::declared_openai_file_input_param_names;
pub use mcp_connection_manager::filter_non_codex_apps_mcp_tools_only;

View File

@@ -74,6 +74,8 @@ use rmcp::model::Tool;
use serde::Deserialize;
use serde::Serialize;
use serde_json::Map;
use serde_json::Value as JsonValue;
use sha1::Digest;
use sha1::Sha1;
use tokio::sync::Mutex;
@@ -197,6 +199,89 @@ pub struct ToolInfo {
pub connector_description: Option<String>,
}
const META_OPENAI_FILE_PARAMS: &str = "openai/fileParams";
pub fn declared_openai_file_input_param_names(
meta: Option<&Map<String, JsonValue>>,
) -> Vec<String> {
let Some(meta) = meta else {
return Vec::new();
};
meta.get(META_OPENAI_FILE_PARAMS)
.and_then(JsonValue::as_array)
.into_iter()
.flatten()
.filter_map(JsonValue::as_str)
.filter(|value| !value.is_empty())
.map(str::to_string)
.collect()
}
/// Returns the model-visible view of a tool while preserving the raw metadata
/// used by execution. Keep cache entries raw and call this at manager return
/// boundaries.
fn tool_with_model_visible_input_schema(tool: &Tool) -> Tool {
let file_params = declared_openai_file_input_param_names(tool.meta.as_deref());
if file_params.is_empty() {
return tool.clone();
}
let mut tool = tool.clone();
let mut input_schema = JsonValue::Object(tool.input_schema.as_ref().clone());
mask_input_schema_for_file_path_params(&mut input_schema, &file_params);
if let JsonValue::Object(input_schema) = input_schema {
tool.input_schema = Arc::new(input_schema);
}
tool
}
fn mask_input_schema_for_file_path_params(input_schema: &mut JsonValue, file_params: &[String]) {
let Some(properties) = input_schema
.as_object_mut()
.and_then(|schema| schema.get_mut("properties"))
.and_then(JsonValue::as_object_mut)
else {
return;
};
for field_name in file_params {
let Some(property_schema) = properties.get_mut(field_name) else {
continue;
};
mask_input_property_schema(property_schema);
}
}
fn mask_input_property_schema(schema: &mut JsonValue) {
let Some(object) = schema.as_object_mut() else {
return;
};
let mut description = object
.get("description")
.and_then(JsonValue::as_str)
.map(str::to_string)
.unwrap_or_default();
let guidance = "This parameter expects an absolute local file path. If you want to upload a file, provide the absolute path to that file here.";
if description.is_empty() {
description = guidance.to_string();
} else if !description.contains(guidance) {
description = format!("{description} {guidance}");
}
let is_array = object.get("type").and_then(JsonValue::as_str) == Some("array")
|| object.get("items").is_some();
object.clear();
object.insert("description".to_string(), JsonValue::String(description));
if is_array {
object.insert("type".to_string(), JsonValue::String("array".to_string()));
object.insert("items".to_string(), serde_json::json!({ "type": "string" }));
} else {
object.insert("type".to_string(), JsonValue::String("string".to_string()));
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct CodexAppsToolsCacheKey {
account_id: Option<String>,
@@ -534,6 +619,10 @@ impl AsyncManagedClient {
let annotate_tools = |tools: Vec<ToolInfo>| {
let mut tools = tools;
for tool in &mut tools {
if tool.server_name == CODEX_APPS_MCP_SERVER_NAME {
tool.tool = tool_with_model_visible_input_schema(&tool.tool);
}
let plugin_names = match tool.connector_id.as_deref() {
Some(connector_id) => self
.tool_plugin_provenance
@@ -914,10 +1003,13 @@ impl McpConnectionManager {
list_start.elapsed(),
&[("cache", "miss")],
);
Ok(qualify_tools(filter_tools(
tools,
&managed_client.tool_filter,
)))
let tools = filter_tools(tools, &managed_client.tool_filter)
.into_iter()
.map(|mut tool| {
tool.tool = tool_with_model_visible_input_schema(&tool.tool);
tool
});
Ok(qualify_tools(tools))
}
/// Returns a single map that contains all resources. Each key is the
@@ -1419,6 +1511,12 @@ async fn start_server_task(
.await
.map_err(StartupOutcomeError::from)?;
let server_supports_sandbox_state_capability = initialize_result
.capabilities
.experimental
.as_ref()
.and_then(|exp| exp.get(MCP_SANDBOX_STATE_CAPABILITY))
.is_some();
let list_start = Instant::now();
let fetch_start = Instant::now();
let tools = list_tools_for_client_uncached(
@@ -1448,12 +1546,6 @@ async fn start_server_task(
}
let tools = filter_tools(tools, &tool_filter);
let server_supports_sandbox_state_capability = initialize_result
.capabilities
.experimental
.as_ref()
.and_then(|exp| exp.get(MCP_SANDBOX_STATE_CAPABILITY))
.is_some();
let managed = ManagedClient {
client: Arc::clone(&client),
tools,

View File

@@ -3,6 +3,7 @@ use codex_protocol::protocol::GranularApprovalConfig;
use codex_protocol::protocol::McpAuthStatus;
use pretty_assertions::assert_eq;
use rmcp::model::JsonObject;
use rmcp::model::Meta;
use rmcp::model::NumberOrString;
use std::collections::HashSet;
use std::sync::Arc;
@@ -63,6 +64,86 @@ fn create_codex_apps_tools_cache_context(
}
}
#[test]
fn declared_openai_file_fields_treat_names_literally() {
let meta = serde_json::json!({
"openai/fileParams": ["file", "input_file", "attachments"]
});
let meta = meta.as_object().expect("meta object");
assert_eq!(
declared_openai_file_input_param_names(Some(meta)),
vec![
"file".to_string(),
"input_file".to_string(),
"attachments".to_string(),
]
);
}
#[test]
fn tool_with_model_visible_input_schema_masks_file_params() {
let mut tool = create_test_tool(CODEX_APPS_MCP_SERVER_NAME, "upload").tool;
tool.input_schema = Arc::new(
serde_json::json!({
"type": "object",
"properties": {
"file": {
"type": "object",
"description": "Original file payload."
},
"files": {
"type": "array",
"items": {"type": "object"}
}
}
})
.as_object()
.expect("object")
.clone(),
);
tool.meta = Some(Meta(
serde_json::json!({
"openai/fileParams": ["file", "files"]
})
.as_object()
.expect("object")
.clone(),
));
let tool = tool_with_model_visible_input_schema(&tool);
assert_eq!(
*tool.input_schema,
serde_json::json!({
"type": "object",
"properties": {
"file": {
"type": "string",
"description": "Original file payload. This parameter expects an absolute local file path. If you want to upload a file, provide the absolute path to that file here."
},
"files": {
"type": "array",
"items": {"type": "string"},
"description": "This parameter expects an absolute local file path. If you want to upload a file, provide the absolute path to that file here."
}
}
})
.as_object()
.expect("object")
.clone()
);
}
#[test]
fn tool_with_model_visible_input_schema_leaves_tools_without_file_params_unchanged() {
let original_tool = create_test_tool("custom", "upload").tool;
let tool = tool_with_model_visible_input_schema(&original_tool);
assert_eq!(tool, original_tool);
}
#[test]
fn elicitation_granular_policy_defaults_to_prompting() {
assert!(!elicitation_is_rejected_by_policy(

View File

@@ -56,6 +56,7 @@ mod original_image_detail;
pub use codex_mcp::MCP_SANDBOX_STATE_CAPABILITY;
pub use codex_mcp::MCP_SANDBOX_STATE_METHOD;
pub use codex_mcp::SandboxState;
mod mcp_openai_file;
mod mcp_tool_call;
mod memories;
pub(crate) mod mention_syntax;

View File

@@ -0,0 +1,473 @@
//! Bridges Apps SDK-style `openai/fileParams` metadata into Codex's MCP flow.
//!
//! Strategy:
//! - Inspect `_meta["openai/fileParams"]` to discover which tool arguments are
//! file inputs.
//! - At tool execution time, upload those local files to OpenAI file storage
//! and rewrite only the declared arguments into the provided-file payload
//! shape expected by the downstream Apps tool.
//!
//! Model-visible schema masking is owned by `codex-mcp` alongside MCP tool
//! inventory, so this module only handles the execution-time argument rewrite.
use crate::codex::Session;
use crate::codex::TurnContext;
use codex_api::CoreAuthProvider;
use codex_api::upload_local_file;
use codex_login::CodexAuth;
use serde_json::Value as JsonValue;
pub(crate) async fn rewrite_mcp_tool_arguments_for_openai_files(
sess: &Session,
turn_context: &TurnContext,
arguments_value: Option<JsonValue>,
openai_file_input_params: Option<&[String]>,
) -> Result<Option<JsonValue>, String> {
let Some(openai_file_input_params) = openai_file_input_params else {
return Ok(arguments_value);
};
let Some(arguments_value) = arguments_value else {
return Ok(None);
};
let Some(arguments) = arguments_value.as_object() else {
return Ok(Some(arguments_value));
};
let auth = sess.services.auth_manager.auth().await;
let mut rewritten_arguments = arguments.clone();
for field_name in openai_file_input_params {
let Some(value) = arguments.get(field_name) else {
continue;
};
let Some(uploaded_value) =
rewrite_argument_value_for_openai_files(turn_context, auth.as_ref(), field_name, value)
.await?
else {
continue;
};
rewritten_arguments.insert(field_name.clone(), uploaded_value);
}
if rewritten_arguments == *arguments {
return Ok(Some(arguments_value));
}
Ok(Some(JsonValue::Object(rewritten_arguments)))
}
async fn rewrite_argument_value_for_openai_files(
turn_context: &TurnContext,
auth: Option<&CodexAuth>,
field_name: &str,
value: &JsonValue,
) -> Result<Option<JsonValue>, String> {
match value {
JsonValue::String(path_or_file_ref) => {
let rewritten = build_uploaded_local_argument_value(
turn_context,
auth,
field_name,
/*index*/ None,
path_or_file_ref,
)
.await?;
Ok(Some(rewritten))
}
JsonValue::Array(values) => {
let mut rewritten_values = Vec::with_capacity(values.len());
for (index, item) in values.iter().enumerate() {
let Some(path_or_file_ref) = item.as_str() else {
return Ok(None);
};
let rewritten = build_uploaded_local_argument_value(
turn_context,
auth,
field_name,
Some(index),
path_or_file_ref,
)
.await?;
rewritten_values.push(rewritten);
}
Ok(Some(JsonValue::Array(rewritten_values)))
}
_ => Ok(None),
}
}
async fn build_uploaded_local_argument_value(
turn_context: &TurnContext,
auth: Option<&CodexAuth>,
field_name: &str,
index: Option<usize>,
file_path: &str,
) -> Result<JsonValue, String> {
let resolved_path = turn_context.resolve_path(Some(file_path.to_string()));
let Some(auth) = auth else {
return Err(
"ChatGPT auth is required to upload local files for Codex Apps tools".to_string(),
);
};
let token_data = auth
.get_token_data()
.map_err(|error| format!("failed to read ChatGPT auth for file upload: {error}"))?;
let upload_auth = CoreAuthProvider {
token: Some(token_data.access_token),
account_id: token_data.account_id,
};
let uploaded = upload_local_file(
turn_context.config.chatgpt_base_url.trim_end_matches('/'),
&upload_auth,
&resolved_path,
)
.await
.map_err(|error| match index {
Some(index) => {
format!("failed to upload `{file_path}` for `{field_name}[{index}]`: {error}")
}
None => format!("failed to upload `{file_path}` for `{field_name}`: {error}"),
})?;
Ok(serde_json::json!({
"download_url": uploaded.download_url,
"file_id": uploaded.file_id,
"mime_type": uploaded.mime_type,
"file_name": uploaded.file_name,
"uri": uploaded.uri,
"file_size_bytes": uploaded.file_size_bytes,
}))
}
#[cfg(test)]
mod tests {
use super::*;
use crate::codex::make_session_and_context;
use codex_utils_absolute_path::AbsolutePathBuf;
use pretty_assertions::assert_eq;
use std::sync::Arc;
use tempfile::tempdir;
#[tokio::test]
async fn openai_file_argument_rewrite_requires_declared_file_params() {
let (session, turn_context) = make_session_and_context().await;
let arguments = Some(serde_json::json!({
"file": "/tmp/codex-smoke-file.txt"
}));
let rewritten = rewrite_mcp_tool_arguments_for_openai_files(
&session,
&Arc::new(turn_context),
arguments.clone(),
/*openai_file_input_params*/ None,
)
.await
.expect("rewrite should succeed");
assert_eq!(rewritten, arguments);
}
#[tokio::test]
async fn build_uploaded_local_argument_value_uploads_local_file_path() {
use wiremock::Mock;
use wiremock::MockServer;
use wiremock::ResponseTemplate;
use wiremock::matchers::body_json;
use wiremock::matchers::header;
use wiremock::matchers::method;
use wiremock::matchers::path;
let server = MockServer::start().await;
Mock::given(method("POST"))
.and(path("/backend-api/files"))
.and(header("chatgpt-account-id", "account_id"))
.and(body_json(serde_json::json!({
"file_name": "file_report.csv",
"file_size": 5,
"use_case": "codex",
})))
.respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({
"file_id": "file_123",
"upload_url": format!("{}/upload/file_123", server.uri()),
})))
.expect(1)
.mount(&server)
.await;
Mock::given(method("PUT"))
.and(path("/upload/file_123"))
.respond_with(ResponseTemplate::new(200))
.expect(1)
.mount(&server)
.await;
Mock::given(method("POST"))
.and(path("/backend-api/files/file_123/uploaded"))
.respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({
"status": "success",
"download_url": format!("{}/download/file_123", server.uri()),
"file_name": "file_report.csv",
"mime_type": "text/csv",
"file_size_bytes": 5,
})))
.expect(1)
.mount(&server)
.await;
let (_, mut turn_context) = make_session_and_context().await;
let auth = CodexAuth::create_dummy_chatgpt_auth_for_testing();
let dir = tempdir().expect("temp dir");
let local_path = dir.path().join("file_report.csv");
tokio::fs::write(&local_path, b"hello")
.await
.expect("write local file");
turn_context.cwd = AbsolutePathBuf::try_from(dir.path()).expect("absolute path");
let mut config = (*turn_context.config).clone();
config.chatgpt_base_url = format!("{}/backend-api", server.uri());
turn_context.config = Arc::new(config);
let rewritten = build_uploaded_local_argument_value(
&turn_context,
Some(&auth),
"file",
/*index*/ None,
"file_report.csv",
)
.await
.expect("rewrite should upload the local file");
assert_eq!(
rewritten,
serde_json::json!({
"download_url": format!("{}/download/file_123", server.uri()),
"file_id": "file_123",
"mime_type": "text/csv",
"file_name": "file_report.csv",
"uri": "sediment://file_123",
"file_size_bytes": 5,
})
);
}
#[tokio::test]
async fn rewrite_argument_value_for_openai_files_rewrites_scalar_path() {
use wiremock::Mock;
use wiremock::MockServer;
use wiremock::ResponseTemplate;
use wiremock::matchers::body_json;
use wiremock::matchers::header;
use wiremock::matchers::method;
use wiremock::matchers::path;
let server = MockServer::start().await;
Mock::given(method("POST"))
.and(path("/backend-api/files"))
.and(header("chatgpt-account-id", "account_id"))
.and(body_json(serde_json::json!({
"file_name": "file_report.csv",
"file_size": 5,
"use_case": "codex",
})))
.respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({
"file_id": "file_123",
"upload_url": format!("{}/upload/file_123", server.uri()),
})))
.expect(1)
.mount(&server)
.await;
Mock::given(method("PUT"))
.and(path("/upload/file_123"))
.respond_with(ResponseTemplate::new(200))
.expect(1)
.mount(&server)
.await;
Mock::given(method("POST"))
.and(path("/backend-api/files/file_123/uploaded"))
.respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({
"status": "success",
"download_url": format!("{}/download/file_123", server.uri()),
"file_name": "file_report.csv",
"mime_type": "text/csv",
"file_size_bytes": 5,
})))
.expect(1)
.mount(&server)
.await;
let (_, mut turn_context) = make_session_and_context().await;
let auth = CodexAuth::create_dummy_chatgpt_auth_for_testing();
let dir = tempdir().expect("temp dir");
let local_path = dir.path().join("file_report.csv");
tokio::fs::write(&local_path, b"hello")
.await
.expect("write local file");
turn_context.cwd = AbsolutePathBuf::try_from(dir.path()).expect("absolute path");
let mut config = (*turn_context.config).clone();
config.chatgpt_base_url = format!("{}/backend-api", server.uri());
turn_context.config = Arc::new(config);
let rewritten = rewrite_argument_value_for_openai_files(
&turn_context,
Some(&auth),
"file",
&serde_json::json!("file_report.csv"),
)
.await
.expect("rewrite should succeed");
assert_eq!(
rewritten,
Some(serde_json::json!({
"download_url": format!("{}/download/file_123", server.uri()),
"file_id": "file_123",
"mime_type": "text/csv",
"file_name": "file_report.csv",
"uri": "sediment://file_123",
"file_size_bytes": 5,
}))
);
}
#[tokio::test]
async fn rewrite_argument_value_for_openai_files_rewrites_array_paths() {
use wiremock::Mock;
use wiremock::MockServer;
use wiremock::ResponseTemplate;
use wiremock::matchers::body_json;
use wiremock::matchers::header;
use wiremock::matchers::method;
use wiremock::matchers::path;
let server = MockServer::start().await;
Mock::given(method("POST"))
.and(path("/backend-api/files"))
.and(header("chatgpt-account-id", "account_id"))
.and(body_json(serde_json::json!({
"file_name": "one.csv",
"file_size": 3,
"use_case": "codex",
})))
.respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({
"file_id": "file_1",
"upload_url": format!("{}/upload/file_1", server.uri()),
})))
.expect(1)
.mount(&server)
.await;
Mock::given(method("POST"))
.and(path("/backend-api/files"))
.and(header("chatgpt-account-id", "account_id"))
.and(body_json(serde_json::json!({
"file_name": "two.csv",
"file_size": 3,
"use_case": "codex",
})))
.respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({
"file_id": "file_2",
"upload_url": format!("{}/upload/file_2", server.uri()),
})))
.expect(1)
.mount(&server)
.await;
Mock::given(method("PUT"))
.and(path("/upload/file_1"))
.respond_with(ResponseTemplate::new(200))
.expect(1)
.mount(&server)
.await;
Mock::given(method("PUT"))
.and(path("/upload/file_2"))
.respond_with(ResponseTemplate::new(200))
.expect(1)
.mount(&server)
.await;
Mock::given(method("POST"))
.and(path("/backend-api/files/file_1/uploaded"))
.respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({
"status": "success",
"download_url": format!("{}/download/file_1", server.uri()),
"file_name": "one.csv",
"mime_type": "text/csv",
"file_size_bytes": 3,
})))
.expect(1)
.mount(&server)
.await;
Mock::given(method("POST"))
.and(path("/backend-api/files/file_2/uploaded"))
.respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({
"status": "success",
"download_url": format!("{}/download/file_2", server.uri()),
"file_name": "two.csv",
"mime_type": "text/csv",
"file_size_bytes": 3,
})))
.expect(1)
.mount(&server)
.await;
let (_, mut turn_context) = make_session_and_context().await;
let auth = CodexAuth::create_dummy_chatgpt_auth_for_testing();
let dir = tempdir().expect("temp dir");
tokio::fs::write(dir.path().join("one.csv"), b"one")
.await
.expect("write first local file");
tokio::fs::write(dir.path().join("two.csv"), b"two")
.await
.expect("write second local file");
turn_context.cwd = AbsolutePathBuf::try_from(dir.path()).expect("absolute path");
let mut config = (*turn_context.config).clone();
config.chatgpt_base_url = format!("{}/backend-api", server.uri());
turn_context.config = Arc::new(config);
let rewritten = rewrite_argument_value_for_openai_files(
&turn_context,
Some(&auth),
"files",
&serde_json::json!(["one.csv", "two.csv"]),
)
.await
.expect("rewrite should succeed");
assert_eq!(
rewritten,
Some(serde_json::json!([
{
"download_url": format!("{}/download/file_1", server.uri()),
"file_id": "file_1",
"mime_type": "text/csv",
"file_name": "one.csv",
"uri": "sediment://file_1",
"file_size_bytes": 3,
},
{
"download_url": format!("{}/download/file_2", server.uri()),
"file_id": "file_2",
"mime_type": "text/csv",
"file_name": "two.csv",
"uri": "sediment://file_2",
"file_size_bytes": 3,
}
]))
);
}
#[tokio::test]
async fn rewrite_mcp_tool_arguments_for_openai_files_surfaces_upload_failures() {
let (mut session, turn_context) = make_session_and_context().await;
session.services.auth_manager = crate::test_support::auth_manager_from_auth(
CodexAuth::create_dummy_chatgpt_auth_for_testing(),
);
let error = rewrite_mcp_tool_arguments_for_openai_files(
&session,
&turn_context,
Some(serde_json::json!({
"file": "/definitely/missing/file.csv",
})),
Some(&["file".to_string()]),
)
.await
.expect_err("missing file should fail");
assert!(error.contains("failed to upload"));
assert!(error.contains("file"));
}
}

View File

@@ -26,6 +26,7 @@ use crate::guardian::guardian_approval_request_to_json;
use crate::guardian::guardian_rejection_message;
use crate::guardian::review_approval_request;
use crate::guardian::routes_approval_to_guardian;
use crate::mcp_openai_file::rewrite_mcp_tool_arguments_for_openai_files;
use crate::mcp_tool_approval_templates::RenderedMcpToolApprovalParam;
use crate::mcp_tool_approval_templates::render_mcp_tool_approval_template;
use codex_analytics::AppInvocation;
@@ -34,6 +35,7 @@ use codex_analytics::build_track_events_context;
use codex_config::types::AppToolApproval;
use codex_features::Feature;
use codex_mcp::CODEX_APPS_MCP_SERVER_NAME;
use codex_mcp::declared_openai_file_input_param_names;
use codex_mcp::mcp_permission_prompt_is_auto_approved;
use codex_otel::sanitize_metric_tag_value;
use codex_protocol::mcp::CallToolResult;
@@ -178,14 +180,16 @@ pub(crate) async fn handle_mcp_tool_call(
let start = Instant::now();
let result = async {
sess.call_tool(
execute_mcp_tool_call(
sess.as_ref(),
turn_context.as_ref(),
&server,
&tool_name,
arguments_value.clone(),
metadata.as_ref(),
request_meta.clone(),
)
.await
.map_err(|e| format!("tool call error: {e:?}"))
}
.instrument(mcp_tool_call_span(
sess.as_ref(),
@@ -200,13 +204,6 @@ pub(crate) async fn handle_mcp_tool_call(
},
))
.await;
let result = sanitize_mcp_tool_result_for_model(
turn_context
.model_info
.input_modalities
.contains(&InputModality::Image),
result,
);
if let Err(error) = &result {
tracing::warn!("MCP tool call error: {error:?}");
}
@@ -294,11 +291,17 @@ pub(crate) async fn handle_mcp_tool_call(
maybe_mark_thread_memory_mode_polluted(sess.as_ref(), turn_context.as_ref()).await;
let start = Instant::now();
// Perform the tool call.
let result = async {
sess.call_tool(&server, &tool_name, arguments_value.clone(), request_meta)
.await
.map_err(|e| format!("tool call error: {e:?}"))
execute_mcp_tool_call(
sess.as_ref(),
turn_context.as_ref(),
&server,
&tool_name,
arguments_value.clone(),
metadata.as_ref(),
request_meta,
)
.await
}
.instrument(mcp_tool_call_span(
sess.as_ref(),
@@ -313,13 +316,6 @@ pub(crate) async fn handle_mcp_tool_call(
},
))
.await;
let result = sanitize_mcp_tool_result_for_model(
turn_context
.model_info
.input_modalities
.contains(&InputModality::Image),
result,
);
if let Err(error) = &result {
tracing::warn!("MCP tool call error: {error:?}");
}
@@ -453,6 +449,35 @@ fn record_server_fields(span: &Span, url: Option<&str>) {
}
}
async fn execute_mcp_tool_call(
sess: &Session,
turn_context: &TurnContext,
server: &str,
tool_name: &str,
arguments_value: Option<serde_json::Value>,
metadata: Option<&McpToolApprovalMetadata>,
request_meta: Option<serde_json::Value>,
) -> Result<CallToolResult, String> {
let rewritten_arguments = rewrite_mcp_tool_arguments_for_openai_files(
sess,
turn_context,
arguments_value,
metadata.and_then(|metadata| metadata.openai_file_input_params.as_deref()),
)
.await?;
let result = sess
.call_tool(server, tool_name, rewritten_arguments, request_meta)
.await
.map_err(|e| format!("tool call error: {e:?}"))?;
sanitize_mcp_tool_result_for_model(
turn_context
.model_info
.input_modalities
.contains(&InputModality::Image),
Ok(result),
)
}
async fn maybe_mark_thread_memory_mode_polluted(sess: &Session, turn_context: &TurnContext) {
if !turn_context
.config
@@ -566,6 +591,7 @@ pub(crate) struct McpToolApprovalMetadata {
tool_title: Option<String>,
tool_description: Option<String>,
codex_apps_meta: Option<serde_json::Map<String, serde_json::Value>>,
openai_file_input_params: Option<Vec<String>>,
}
const MCP_TOOL_CODEX_APPS_META_KEY: &str = "_codex_apps";
@@ -983,7 +1009,6 @@ pub(crate) async fn lookup_mcp_tool_metadata(
.await
.list_all_tools()
.await;
let tool_info = tools
.into_values()
.find(|tool_info| tool_info.server_name == server && tool_info.tool.name == tool_name)?;
@@ -1025,6 +1050,10 @@ pub(crate) async fn lookup_mcp_tool_metadata(
.and_then(|meta| meta.get(MCP_TOOL_CODEX_APPS_META_KEY))
.and_then(serde_json::Value::as_object)
.cloned(),
openai_file_input_params: Some(declared_openai_file_input_param_names(
tool_info.tool.meta.as_deref(),
))
.filter(|params| !params.is_empty()),
})
}

View File

@@ -59,6 +59,7 @@ fn approval_metadata(
tool_title: tool_title.map(str::to_string),
tool_description: tool_description.map(str::to_string),
codex_apps_meta: None,
openai_file_input_params: None,
}
}
@@ -597,6 +598,7 @@ async fn codex_apps_tool_call_request_meta_includes_turn_metadata_and_codex_apps
.cloned()
.expect("_codex_apps metadata should be an object"),
),
openai_file_input_params: None,
};
assert_eq!(
@@ -744,6 +746,7 @@ fn guardian_mcp_review_request_includes_annotations_when_present() {
tool_title: None,
tool_description: None,
codex_apps_meta: None,
openai_file_input_params: None,
};
let request = build_guardian_mcp_tool_review_request("call-1", &invocation, Some(&metadata));
@@ -1254,6 +1257,7 @@ async fn approve_mode_skips_when_annotations_do_not_require_approval() {
tool_title: Some("Read Only Tool".to_string()),
tool_description: None,
codex_apps_meta: None,
openai_file_input_params: None,
};
let decision = maybe_request_mcp_tool_approval(
@@ -1321,6 +1325,7 @@ async fn guardian_mode_skips_auto_when_annotations_do_not_require_approval() {
tool_title: Some("Read Only Tool".to_string()),
tool_description: None,
codex_apps_meta: None,
openai_file_input_params: None,
};
let decision = maybe_request_mcp_tool_approval(
@@ -1391,6 +1396,7 @@ async fn guardian_mode_mcp_denial_returns_rationale_message() {
tool_title: Some("Dangerous Tool".to_string()),
tool_description: Some("Reads calendar data.".to_string()),
codex_apps_meta: None,
openai_file_input_params: None,
};
let decision = maybe_request_mcp_tool_approval(
@@ -1441,6 +1447,7 @@ async fn prompt_mode_waits_for_approval_when_annotations_do_not_require_approval
tool_title: Some("Read Only Tool".to_string()),
tool_description: None,
codex_apps_meta: None,
openai_file_input_params: None,
};
let mut approval_task = {
@@ -1517,6 +1524,7 @@ async fn approve_mode_blocks_when_arc_returns_interrupt_for_model() {
tool_title: Some("Dangerous Tool".to_string()),
tool_description: Some("Performs a risky action.".to_string()),
codex_apps_meta: None,
openai_file_input_params: None,
};
let decision = maybe_request_mcp_tool_approval(
@@ -1586,6 +1594,7 @@ async fn custom_approve_mode_blocks_when_arc_returns_interrupt_for_model() {
tool_title: Some("Dangerous Tool".to_string()),
tool_description: Some("Performs a risky action.".to_string()),
codex_apps_meta: None,
openai_file_input_params: None,
};
let decision = maybe_request_mcp_tool_approval(
@@ -1655,6 +1664,7 @@ async fn approve_mode_blocks_when_arc_returns_interrupt_without_annotations() {
tool_title: Some("Dangerous Tool".to_string()),
tool_description: Some("Performs a risky action.".to_string()),
codex_apps_meta: None,
openai_file_input_params: None,
};
let decision = maybe_request_mcp_tool_approval(
@@ -1732,6 +1742,7 @@ async fn full_access_mode_skips_arc_monitor_for_all_approval_modes() {
tool_title: Some("Dangerous Tool".to_string()),
tool_description: Some("Performs a risky action.".to_string()),
codex_apps_meta: None,
openai_file_input_params: None,
};
for approval_mode in [
@@ -1833,6 +1844,7 @@ async fn approve_mode_routes_arc_ask_user_to_guardian_when_guardian_reviewer_is_
tool_title: Some("Dangerous Tool".to_string()),
tool_description: Some("Performs a risky action.".to_string()),
codex_apps_meta: None,
openai_file_input_params: None,
};
let decision = maybe_request_mcp_tool_approval(

View File

@@ -22,6 +22,8 @@ const SEARCHABLE_TOOL_COUNT: usize = 100;
pub const CALENDAR_CREATE_EVENT_RESOURCE_URI: &str =
"connector://calendar/tools/calendar_create_event";
const CALENDAR_LIST_EVENTS_RESOURCE_URI: &str = "connector://calendar/tools/calendar_list_events";
pub const DOCUMENT_EXTRACT_TEXT_RESOURCE_URI: &str =
"connector://calendar/tools/calendar_extract_text";
#[derive(Clone)]
pub struct AppsTestServer {
@@ -235,6 +237,39 @@ impl Respond for CodexAppsJsonRpcResponder {
"connector_id": CONNECTOR_ID
}
}
},
{
"name": "calendar_extract_text",
"description": "Extract text from an uploaded document.",
"annotations": {
"readOnlyHint": false
},
"inputSchema": {
"type": "object",
"properties": {
"file": {
"type": "object",
"description": "Document file payload.",
"properties": {
"file_id": { "type": "string" }
},
"required": ["file_id"]
}
},
"required": ["file"],
"additionalProperties": false
},
"_meta": {
"connector_id": CONNECTOR_ID,
"connector_name": self.connector_name.clone(),
"connector_description": self.connector_description.clone(),
"openai/fileParams": ["file"],
"_codex_apps": {
"resource_uri": DOCUMENT_EXTRACT_TEXT_RESOURCE_URI,
"contains_mcp_source": true,
"connector_id": CONNECTOR_ID
}
}
}
],
"nextCursor": null
@@ -245,7 +280,7 @@ impl Respond for CodexAppsJsonRpcResponder {
.pointer_mut("/result/tools")
.and_then(Value::as_array_mut)
{
for index in 2..SEARCHABLE_TOOL_COUNT {
for index in 3..SEARCHABLE_TOOL_COUNT {
tools.push(json!({
"name": format!("calendar_timezone_option_{index}"),
"description": format!("Read timezone option {index}."),
@@ -283,6 +318,10 @@ impl Respond for CodexAppsJsonRpcResponder {
.pointer("/params/arguments/starts_at")
.and_then(Value::as_str)
.unwrap_or_default();
let file_id = body
.pointer("/params/arguments/file/file_id")
.and_then(Value::as_str)
.unwrap_or_default();
let codex_apps_meta = body.pointer("/params/_meta/_codex_apps").cloned();
ResponseTemplate::new(200).set_body_json(json!({
@@ -291,7 +330,7 @@ impl Respond for CodexAppsJsonRpcResponder {
"result": {
"content": [{
"type": "text",
"text": format!("called {tool_name} for {title} at {starts_at}")
"text": format!("called {tool_name} for {title} at {starts_at} with {file_id}")
}],
"structuredContent": {
"_codex_apps": codex_apps_meta,

View File

@@ -106,6 +106,7 @@ mod model_switching;
mod model_visible_layout;
mod models_cache_ttl;
mod models_etag_responses;
mod openai_file_mcp;
mod otel;
mod pending_input;
mod permissions_messages;

View File

@@ -0,0 +1,173 @@
#![cfg(not(target_os = "windows"))]
use anyhow::Result;
use codex_core::config::Config;
use codex_features::Feature;
use codex_login::CodexAuth;
use codex_protocol::protocol::AskForApproval;
use codex_protocol::protocol::SandboxPolicy;
use core_test_support::apps_test_server::AppsTestServer;
use core_test_support::apps_test_server::DOCUMENT_EXTRACT_TEXT_RESOURCE_URI;
use core_test_support::responses::ev_assistant_message;
use core_test_support::responses::ev_completed;
use core_test_support::responses::ev_function_call;
use core_test_support::responses::ev_response_created;
use core_test_support::responses::mount_sse_sequence;
use core_test_support::responses::sse;
use core_test_support::responses::start_mock_server;
use core_test_support::test_codex::test_codex;
use pretty_assertions::assert_eq;
use serde_json::Value;
use serde_json::json;
use wiremock::Mock;
use wiremock::ResponseTemplate;
use wiremock::matchers::body_json;
use wiremock::matchers::header;
use wiremock::matchers::method;
use wiremock::matchers::path;
const DOCUMENT_EXTRACT_TOOL: &str = "mcp__codex_apps__calendar_extract_text";
fn configure_apps(config: &mut Config, chatgpt_base_url: &str) {
if let Err(err) = config.features.enable(Feature::Apps) {
panic!("test config should allow feature update: {err}");
}
config.chatgpt_base_url = chatgpt_base_url.to_string();
}
fn tool_by_name<'a>(body: &'a Value, name: &str) -> &'a Value {
body.get("tools")
.and_then(Value::as_array)
.and_then(|tools| {
tools.iter().find(|tool| {
tool.get("name").and_then(Value::as_str) == Some(name)
|| tool.get("type").and_then(Value::as_str) == Some(name)
})
})
.unwrap_or_else(|| panic!("missing tool {name} in /v1/responses request: {body:?}"))
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn codex_apps_file_params_upload_local_paths_before_mcp_tool_call() -> Result<()> {
let server = start_mock_server().await;
let apps_server = AppsTestServer::mount(&server).await?;
Mock::given(method("POST"))
.and(path("/files"))
.and(header("chatgpt-account-id", "account_id"))
.and(body_json(json!({
"file_name": "report.txt",
"file_size": 11,
"use_case": "codex",
})))
.respond_with(ResponseTemplate::new(200).set_body_json(json!({
"file_id": "file_123",
"upload_url": format!("{}/upload/file_123", server.uri()),
})))
.expect(1)
.mount(&server)
.await;
Mock::given(method("PUT"))
.and(path("/upload/file_123"))
.and(header("content-length", "11"))
.respond_with(ResponseTemplate::new(200))
.expect(1)
.mount(&server)
.await;
Mock::given(method("POST"))
.and(path("/files/file_123/uploaded"))
.respond_with(ResponseTemplate::new(200).set_body_json(json!({
"status": "success",
"download_url": format!("{}/download/file_123", server.uri()),
"file_name": "report.txt",
"mime_type": "text/plain",
"file_size_bytes": 11,
})))
.expect(1)
.mount(&server)
.await;
let call_id = "extract-call-1";
let mock = mount_sse_sequence(
&server,
vec![
sse(vec![
ev_response_created("resp-1"),
ev_function_call(
call_id,
DOCUMENT_EXTRACT_TOOL,
&json!({"file": "report.txt"}).to_string(),
),
ev_completed("resp-1"),
]),
sse(vec![
ev_response_created("resp-2"),
ev_assistant_message("msg-1", "done"),
ev_completed("resp-2"),
]),
],
)
.await;
let mut builder = test_codex()
.with_auth(CodexAuth::create_dummy_chatgpt_auth_for_testing())
.with_config(move |config| configure_apps(config, apps_server.chatgpt_base_url.as_str()));
let test = builder.build(&server).await?;
tokio::fs::write(test.cwd.path().join("report.txt"), b"hello world").await?;
test.submit_turn_with_policies(
"Extract the report text with the app tool.",
AskForApproval::Never,
SandboxPolicy::DangerFullAccess,
)
.await?;
let requests = mock.requests();
let body = requests[0].body_json();
let extract_tool = tool_by_name(&body, DOCUMENT_EXTRACT_TOOL);
assert_eq!(
extract_tool.pointer("/parameters/properties/file"),
Some(&json!({
"type": "string",
"description": "Document file payload. This parameter expects an absolute local file path. If you want to upload a file, provide the absolute path to that file here."
}))
);
let apps_tool_call = server
.received_requests()
.await
.unwrap_or_default()
.into_iter()
.find_map(|request| {
let body: Value = serde_json::from_slice(&request.body).ok()?;
(request.url.path() == "/api/codex/apps"
&& body.get("method").and_then(Value::as_str) == Some("tools/call")
&& body.pointer("/params/name").and_then(Value::as_str)
== Some("calendar_extract_text"))
.then_some(body)
})
.expect("apps calendar_extract_text tools/call request should be recorded");
assert_eq!(
apps_tool_call.pointer("/params/arguments/file"),
Some(&json!({
"download_url": format!("{}/download/file_123", server.uri()),
"file_id": "file_123",
"mime_type": "text/plain",
"file_name": "report.txt",
"uri": "sediment://file_123",
"file_size_bytes": 11,
}))
);
assert_eq!(
apps_tool_call.pointer("/params/_meta/_codex_apps"),
Some(&json!({
"resource_uri": DOCUMENT_EXTRACT_TEXT_RESOURCE_URI,
"contains_mcp_source": true,
"connector_id": "calendar",
}))
);
server.verify().await;
Ok(())
}