Compare commits

...

7 Commits

Author SHA1 Message Date
Ahmed Ibrahim
53e3afad32 Fix stale MCP config test fields
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 05:36:22 +00:00
Ahmed Ibrahim
58c5839266 Drop unused MCP crate dependency
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 04:59:02 +00:00
Ahmed Ibrahim
e62a2fd26c Extract MCP core crate
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 04:53:51 +00:00
Ahmed Ibrahim
1cf68f940c codex: fix CI failure on PR #15010
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 04:05:05 +00:00
Ahmed Ibrahim
0f406c3de0 codex: address PR review feedback (#15010)
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 03:49:51 +00:00
Ahmed Ibrahim
8b3fc35e0b fix: unblock config loader split CI
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 03:34:46 +00:00
Ahmed Ibrahim
38a28973a8 refactor: move config loader internals into codex-config
Extract config-layer IO and managed requirements loading into codex-config so codex-core keeps a thinner config loader facade.

Co-authored-by: Codex <noreply@openai.com>
2026-03-18 02:30:22 +00:00
29 changed files with 3113 additions and 2738 deletions

33
codex-rs/Cargo.lock generated
View File

@@ -1430,6 +1430,7 @@ dependencies = [
"codex-feedback",
"codex-file-search",
"codex-login",
"codex-mcp",
"codex-otel",
"codex-protocol",
"codex-rmcp-client",
@@ -1786,10 +1787,12 @@ name = "codex-config"
version = "0.0.0"
dependencies = [
"anyhow",
"base64 0.22.1",
"codex-app-server-protocol",
"codex-execpolicy",
"codex-protocol",
"codex-utils-absolute-path",
"core-foundation 0.9.4",
"futures",
"multimap",
"pretty_assertions",
@@ -1802,6 +1805,7 @@ dependencies = [
"toml 0.9.11+spec-1.1.0",
"toml_edit 0.24.0+spec-1.1.0",
"tracing",
"windows-sys 0.52.0",
]
[[package]]
@@ -1846,6 +1850,7 @@ dependencies = [
"codex-git",
"codex-hooks",
"codex-keyring-store",
"codex-mcp",
"codex-network-proxy",
"codex-otel",
"codex-protocol",
@@ -1866,7 +1871,6 @@ dependencies = [
"codex-utils-stream-parser",
"codex-utils-string",
"codex-windows-sandbox",
"core-foundation 0.9.4",
"core_test_support",
"csv",
"ctor 0.6.3",
@@ -1926,7 +1930,6 @@ dependencies = [
"walkdir",
"which",
"wildmatch",
"windows-sys 0.52.0",
"wiremock",
"zip",
"zstd",
@@ -2166,6 +2169,32 @@ dependencies = [
"wiremock",
]
[[package]]
name = "codex-mcp"
version = "0.0.0"
dependencies = [
"anyhow",
"async-channel",
"codex-async-utils",
"codex-config",
"codex-otel",
"codex-protocol",
"codex-rmcp-client",
"futures",
"pretty_assertions",
"regex-lite",
"rmcp",
"serde",
"serde_json",
"sha1",
"tempfile",
"thiserror 2.0.18",
"tokio",
"tokio-util",
"tracing",
"url",
]
[[package]]
name = "codex-mcp-server"
version = "0.0.0"

View File

@@ -32,6 +32,7 @@ members = [
"linux-sandbox",
"lmstudio",
"login",
"mcp-core",
"mcp-server",
"network-proxy",
"ollama",
@@ -114,6 +115,7 @@ codex-keyring-store = { path = "keyring-store" }
codex-linux-sandbox = { path = "linux-sandbox" }
codex-lmstudio = { path = "lmstudio" }
codex-login = { path = "login" }
codex-mcp = { path = "mcp-core" }
codex-mcp-server = { path = "mcp-server" }
codex-network-proxy = { path = "network-proxy" }
codex-ollama = { path = "ollama" }

View File

@@ -40,6 +40,7 @@ codex-backend-client = { workspace = true }
codex-file-search = { workspace = true }
codex-chatgpt = { workspace = true }
codex-login = { workspace = true }
codex-mcp = { workspace = true }
codex-protocol = { workspace = true }
codex-app-server-protocol = { workspace = true }
codex-feedback = { workspace = true }

View File

@@ -216,7 +216,6 @@ use codex_core::git_info::git_diff_to_remote;
use codex_core::mcp::auth::discover_supported_scopes;
use codex_core::mcp::auth::resolve_oauth_scopes;
use codex_core::mcp::collect_mcp_snapshot;
use codex_core::mcp::group_tools_by_server;
use codex_core::models_manager::collaboration_mode_presets::CollaborationModesConfig;
use codex_core::parse_cursor;
use codex_core::plugins::MarketplaceError;
@@ -240,6 +239,7 @@ use codex_feedback::CodexFeedback;
use codex_login::ServerOptions as LoginServerOptions;
use codex_login::ShutdownHandle;
use codex_login::run_login_server;
use codex_mcp::group_tools_by_server;
use codex_protocol::ThreadId;
use codex_protocol::config_types::CollaborationMode;
use codex_protocol::config_types::ForcedLoginMethod;

View File

@@ -4,10 +4,14 @@ version.workspace = true
edition.workspace = true
license.workspace = true
[lib]
doctest = false
[lints]
workspace = true
[dependencies]
base64 = { workspace = true }
codex-app-server-protocol = { workspace = true }
codex-execpolicy = { workspace = true }
codex-protocol = { workspace = true }
@@ -24,6 +28,16 @@ toml = { workspace = true }
toml_edit = { workspace = true }
tracing = { workspace = true }
[target.'cfg(target_os = "macos")'.dependencies]
core-foundation = "0.9"
[target.'cfg(target_os = "windows")'.dependencies]
windows-sys = { version = "0.52", features = [
"Win32_Foundation",
"Win32_System_Com",
"Win32_UI_Shell",
] }
[dev-dependencies]
anyhow = { workspace = true }
pretty_assertions = { workspace = true }

View File

@@ -1,10 +1,10 @@
use super::LoaderOverrides;
use crate::LoaderOverrides;
use crate::config_error_from_toml;
use crate::io_error_from_config_error;
#[cfg(target_os = "macos")]
use super::macos::ManagedAdminConfigLayer;
use crate::macos::ManagedAdminConfigLayer;
#[cfg(target_os = "macos")]
use super::macos::load_managed_admin_config_layer;
use codex_config::config_error_from_toml;
use codex_config::io_error_from_config_error;
use crate::macos::load_managed_admin_config_layer;
use codex_utils_absolute_path::AbsolutePathBuf;
use std::io;
use std::path::Path;
@@ -16,26 +16,26 @@ use toml::Value as TomlValue;
const CODEX_MANAGED_CONFIG_SYSTEM_PATH: &str = "/etc/codex/managed_config.toml";
#[derive(Debug, Clone)]
pub(super) struct MangedConfigFromFile {
pub struct ManagedConfigFromFile {
pub managed_config: TomlValue,
pub file: AbsolutePathBuf,
}
#[derive(Debug, Clone)]
pub(super) struct ManagedConfigFromMdm {
pub struct ManagedConfigFromMdm {
pub managed_config: TomlValue,
pub raw_toml: String,
}
#[derive(Debug, Clone)]
pub(super) struct LoadedConfigLayers {
pub struct LoadedConfigLayers {
/// If present, data read from a file such as `/etc/codex/managed_config.toml`.
pub managed_config: Option<MangedConfigFromFile>,
pub managed_config: Option<ManagedConfigFromFile>,
/// If present, data read from managed preferences (macOS only).
pub managed_config_from_mdm: Option<ManagedConfigFromMdm>,
}
pub(super) async fn load_config_layers_internal(
pub async fn load_config_layers_internal(
codex_home: &Path,
overrides: LoaderOverrides,
) -> io::Result<LoadedConfigLayers> {
@@ -59,7 +59,7 @@ pub(super) async fn load_config_layers_internal(
let managed_config =
read_config_from_path(&managed_config_path, /*log_missing_as_info*/ false)
.await?
.map(|managed_config| MangedConfigFromFile {
.map(|managed_config| ManagedConfigFromFile {
managed_config,
file: managed_config_path.clone(),
});
@@ -88,7 +88,7 @@ fn map_managed_admin_layer(layer: ManagedAdminConfigLayer) -> ManagedConfigFromM
}
}
pub(super) async fn read_config_from_path(
async fn read_config_from_path(
path: impl AsRef<Path>,
log_missing_as_info: bool,
) -> io::Result<Option<TomlValue>> {
@@ -120,8 +120,7 @@ pub(super) async fn read_config_from_path(
}
}
/// Return the default managed config path.
pub(super) fn managed_config_default_path(codex_home: &Path) -> PathBuf {
fn managed_config_default_path(codex_home: &Path) -> PathBuf {
#[cfg(unix)]
{
let _ = codex_home;

View File

@@ -3,6 +3,10 @@ mod config_requirements;
mod constraint;
mod diagnostics;
mod fingerprint;
mod layer_io;
mod loader;
#[cfg(target_os = "macos")]
mod macos;
mod merge;
mod overrides;
mod requirements_exec_policy;
@@ -44,6 +48,15 @@ pub use diagnostics::format_config_error;
pub use diagnostics::format_config_error_with_source;
pub use diagnostics::io_error_from_config_error;
pub use fingerprint::version_for_toml;
pub use layer_io::LoadedConfigLayers;
pub use layer_io::ManagedConfigFromFile;
pub use layer_io::ManagedConfigFromMdm;
pub use layer_io::load_config_layers_internal;
pub use loader::load_managed_admin_requirements;
pub use loader::load_requirements_from_legacy_scheme;
pub use loader::load_requirements_toml;
pub use loader::system_config_toml_file;
pub use loader::system_requirements_toml_file;
pub use merge::merge_toml_values;
pub use overrides::build_cli_overrides_layer;
pub use requirements_exec_policy::RequirementsExecPolicy;

View File

@@ -0,0 +1,236 @@
use crate::ConfigRequirementsToml;
use crate::ConfigRequirementsWithSources;
use crate::LoadedConfigLayers;
use crate::RequirementSource;
#[cfg(target_os = "macos")]
use crate::macos::load_managed_admin_requirements_toml;
use codex_protocol::config_types::SandboxMode;
use codex_protocol::protocol::AskForApproval;
use codex_utils_absolute_path::AbsolutePathBuf;
use serde::Deserialize;
use std::io;
use std::path::Path;
#[cfg(windows)]
use std::path::PathBuf;
#[cfg(unix)]
pub const SYSTEM_CONFIG_TOML_FILE_UNIX: &str = "/etc/codex/config.toml";
#[cfg(windows)]
const DEFAULT_PROGRAM_DATA_DIR_WINDOWS: &str = r"C:\ProgramData";
pub async fn load_requirements_toml(
config_requirements_toml: &mut ConfigRequirementsWithSources,
requirements_toml_file: impl AsRef<Path>,
) -> io::Result<()> {
let requirements_toml_file =
AbsolutePathBuf::from_absolute_path(requirements_toml_file.as_ref())?;
match tokio::fs::read_to_string(&requirements_toml_file).await {
Ok(contents) => {
let requirements_config: ConfigRequirementsToml =
toml::from_str(&contents).map_err(|err| {
io::Error::new(
io::ErrorKind::InvalidData,
format!(
"Error parsing requirements file {}: {err}",
requirements_toml_file.as_ref().display(),
),
)
})?;
config_requirements_toml.merge_unset_fields(
RequirementSource::SystemRequirementsToml {
file: requirements_toml_file.clone(),
},
requirements_config,
);
}
Err(err) if err.kind() == io::ErrorKind::NotFound => {}
Err(err) => {
return Err(io::Error::new(
err.kind(),
format!(
"Failed to read requirements file {}: {err}",
requirements_toml_file.as_ref().display(),
),
));
}
}
Ok(())
}
pub async fn load_managed_admin_requirements(
config_requirements_toml: &mut ConfigRequirementsWithSources,
managed_config_requirements_base64: Option<&str>,
) -> io::Result<()> {
#[cfg(target_os = "macos")]
{
load_managed_admin_requirements_toml(
config_requirements_toml,
managed_config_requirements_base64,
)
.await
}
#[cfg(not(target_os = "macos"))]
{
let _ = config_requirements_toml;
let _ = managed_config_requirements_base64;
Ok(())
}
}
#[cfg(unix)]
pub fn system_requirements_toml_file() -> io::Result<AbsolutePathBuf> {
AbsolutePathBuf::from_absolute_path(Path::new("/etc/codex/requirements.toml"))
}
#[cfg(windows)]
pub fn system_requirements_toml_file() -> io::Result<AbsolutePathBuf> {
windows_system_requirements_toml_file()
}
#[cfg(unix)]
pub fn system_config_toml_file() -> io::Result<AbsolutePathBuf> {
AbsolutePathBuf::from_absolute_path(Path::new(SYSTEM_CONFIG_TOML_FILE_UNIX))
}
#[cfg(windows)]
pub fn system_config_toml_file() -> io::Result<AbsolutePathBuf> {
windows_system_config_toml_file()
}
#[cfg(windows)]
fn windows_codex_system_dir() -> PathBuf {
let program_data = windows_program_data_dir_from_known_folder().unwrap_or_else(|err| {
tracing::warn!(
error = %err,
"Failed to resolve ProgramData known folder; using default path"
);
PathBuf::from(DEFAULT_PROGRAM_DATA_DIR_WINDOWS)
});
program_data.join("OpenAI").join("Codex")
}
#[cfg(windows)]
fn windows_system_requirements_toml_file() -> io::Result<AbsolutePathBuf> {
let requirements_toml_file = windows_codex_system_dir().join("requirements.toml");
AbsolutePathBuf::try_from(requirements_toml_file)
}
#[cfg(windows)]
fn windows_system_config_toml_file() -> io::Result<AbsolutePathBuf> {
let config_toml_file = windows_codex_system_dir().join("config.toml");
AbsolutePathBuf::try_from(config_toml_file)
}
#[cfg(windows)]
fn windows_program_data_dir_from_known_folder() -> io::Result<PathBuf> {
use std::ffi::OsString;
use std::os::windows::ffi::OsStringExt;
use windows_sys::Win32::System::Com::CoTaskMemFree;
use windows_sys::Win32::UI::Shell::FOLDERID_ProgramData;
use windows_sys::Win32::UI::Shell::KF_FLAG_DEFAULT;
use windows_sys::Win32::UI::Shell::SHGetKnownFolderPath;
let mut path_ptr = std::ptr::null_mut::<u16>();
let known_folder_flags = u32::try_from(KF_FLAG_DEFAULT).map_err(|_| {
io::Error::other(format!(
"KF_FLAG_DEFAULT did not fit in u32: {KF_FLAG_DEFAULT}"
))
})?;
let hr = unsafe {
SHGetKnownFolderPath(&FOLDERID_ProgramData, known_folder_flags, 0, &mut path_ptr)
};
if hr != 0 {
return Err(io::Error::other(format!(
"SHGetKnownFolderPath(FOLDERID_ProgramData) failed with HRESULT {hr:#010x}"
)));
}
if path_ptr.is_null() {
return Err(io::Error::other(
"SHGetKnownFolderPath(FOLDERID_ProgramData) returned a null pointer",
));
}
let path = unsafe {
let mut len = 0usize;
while *path_ptr.add(len) != 0 {
len += 1;
}
let wide = std::slice::from_raw_parts(path_ptr, len);
let path = PathBuf::from(OsString::from_wide(wide));
CoTaskMemFree(path_ptr.cast());
path
};
Ok(path)
}
pub async fn load_requirements_from_legacy_scheme(
config_requirements_toml: &mut ConfigRequirementsWithSources,
loaded_config_layers: LoadedConfigLayers,
) -> io::Result<()> {
let LoadedConfigLayers {
managed_config,
managed_config_from_mdm,
} = loaded_config_layers;
for (source, config) in managed_config_from_mdm
.map(|config| {
(
RequirementSource::LegacyManagedConfigTomlFromMdm,
config.managed_config,
)
})
.into_iter()
.chain(managed_config.map(|config| {
(
RequirementSource::LegacyManagedConfigTomlFromFile { file: config.file },
config.managed_config,
)
}))
{
let legacy_config: LegacyManagedConfigToml =
config.try_into().map_err(|err: toml::de::Error| {
io::Error::new(
io::ErrorKind::InvalidData,
format!("Failed to parse config requirements as TOML: {err}"),
)
})?;
let requirements = ConfigRequirementsToml::from(legacy_config);
config_requirements_toml.merge_unset_fields(source, requirements);
}
Ok(())
}
#[derive(Deserialize, Debug, Clone, Default, PartialEq)]
struct LegacyManagedConfigToml {
approval_policy: Option<AskForApproval>,
sandbox_mode: Option<SandboxMode>,
}
impl From<LegacyManagedConfigToml> for ConfigRequirementsToml {
fn from(legacy: LegacyManagedConfigToml) -> Self {
let mut config_requirements_toml = ConfigRequirementsToml::default();
let LegacyManagedConfigToml {
approval_policy,
sandbox_mode,
} = legacy;
if let Some(approval_policy) = approval_policy {
config_requirements_toml.allowed_approval_policies = Some(vec![approval_policy]);
}
if let Some(sandbox_mode) = sandbox_mode {
let required_mode = sandbox_mode.into();
let mut allowed_modes = vec![crate::SandboxModeRequirement::ReadOnly];
if required_mode != crate::SandboxModeRequirement::ReadOnly {
allowed_modes.push(required_mode);
}
config_requirements_toml.allowed_sandbox_modes = Some(allowed_modes);
}
config_requirements_toml
}
}

View File

@@ -1,6 +1,6 @@
use super::ConfigRequirementsToml;
use super::ConfigRequirementsWithSources;
use super::RequirementSource;
use crate::ConfigRequirementsToml;
use crate::ConfigRequirementsWithSources;
use crate::RequirementSource;
use base64::Engine;
use base64::prelude::BASE64_STANDARD;
use core_foundation::base::TCFType;
@@ -16,19 +16,19 @@ const MANAGED_PREFERENCES_CONFIG_KEY: &str = "config_toml_base64";
const MANAGED_PREFERENCES_REQUIREMENTS_KEY: &str = "requirements_toml_base64";
#[derive(Debug, Clone)]
pub(super) struct ManagedAdminConfigLayer {
pub struct ManagedAdminConfigLayer {
pub config: TomlValue,
pub raw_toml: String,
}
pub(super) fn managed_preferences_requirements_source() -> RequirementSource {
fn managed_preferences_requirements_source() -> RequirementSource {
RequirementSource::MdmManagedPreferences {
domain: MANAGED_PREFERENCES_APPLICATION_ID.to_string(),
key: MANAGED_PREFERENCES_REQUIREMENTS_KEY.to_string(),
}
}
pub(crate) async fn load_managed_admin_config_layer(
pub async fn load_managed_admin_config_layer(
override_base64: Option<&str>,
) -> io::Result<Option<ManagedAdminConfigLayer>> {
if let Some(encoded) = override_base64 {
@@ -61,7 +61,7 @@ fn load_managed_admin_config() -> io::Result<Option<ManagedAdminConfigLayer>> {
.transpose()
}
pub(crate) async fn load_managed_admin_requirements_toml(
pub async fn load_managed_admin_requirements_toml(
target: &mut ConfigRequirementsWithSources,
override_base64: Option<&str>,
) -> io::Result<()> {

View File

@@ -41,6 +41,7 @@ codex-file-search = { workspace = true }
codex-git = { workspace = true }
codex-hooks = { workspace = true }
codex-keyring-store = { workspace = true }
codex-mcp = { workspace = true }
codex-network-proxy = { workspace = true }
codex-otel = { workspace = true }
codex-artifacts = { workspace = true }
@@ -123,7 +124,6 @@ landlock = { workspace = true }
seccompiler = { workspace = true }
[target.'cfg(target_os = "macos")'.dependencies]
core-foundation = "0.9"
keyring = { workspace = true, features = ["apple-native"] }
# Build OpenSSL from source for musl builds.
@@ -136,11 +136,6 @@ openssl-sys = { workspace = true, features = ["vendored"] }
[target.'cfg(target_os = "windows")'.dependencies]
keyring = { workspace = true, features = ["windows-native"] }
windows-sys = { version = "0.52", features = [
"Win32_Foundation",
"Win32_System_Com",
"Win32_UI_Shell",
] }
[target.'cfg(any(target_os = "freebsd", target_os = "openbsd"))'.dependencies]
keyring = { workspace = true, features = ["sync-secret-service"] }

View File

@@ -214,6 +214,7 @@ use crate::mcp::with_codex_apps_mcp;
use crate::mcp_connection_manager::McpConnectionManager;
use crate::mcp_connection_manager::codex_apps_tools_cache_key;
use crate::mcp_connection_manager::filter_non_codex_apps_mcp_tools_only;
use crate::mcp_connection_manager::to_mcp_server_configs;
use crate::memories;
use crate::mentions::build_connector_slug_counts;
use crate::mentions::build_skill_name_counts;
@@ -1890,8 +1891,9 @@ impl Session {
cancel_guard.cancel();
*cancel_guard = CancellationToken::new();
}
let extracted_mcp_servers = to_mcp_server_configs(&mcp_servers);
let (mcp_connection_manager, cancel_token) = McpConnectionManager::new(
&mcp_servers,
&extracted_mcp_servers,
config.mcp_oauth_credentials_store_mode,
auth_statuses.clone(),
&session_configuration.approval_policy,
@@ -4006,8 +4008,9 @@ impl Session {
guard.cancel();
*guard = CancellationToken::new();
}
let extracted_mcp_servers = to_mcp_server_configs(&mcp_servers);
let (refreshed_manager, cancel_token) = McpConnectionManager::new(
&mcp_servers,
&extracted_mcp_servers,
store_mode,
auth_statuses,
&turn_context.config.permissions.approval_policy,

View File

@@ -1,27 +1,18 @@
mod layer_io;
#[cfg(target_os = "macos")]
mod macos;
#[cfg(test)]
mod tests;
use crate::config::ConfigToml;
use crate::config_loader::layer_io::LoadedConfigLayers;
use crate::git_info::resolve_root_git_project_for_trust;
use codex_app_server_protocol::ConfigLayerSource;
use codex_config::CONFIG_TOML_FILE;
use codex_config::ConfigRequirementsWithSources;
use codex_protocol::config_types::SandboxMode;
use codex_protocol::config_types::TrustLevel;
use codex_protocol::protocol::AskForApproval;
use codex_utils_absolute_path::AbsolutePathBuf;
use codex_utils_absolute_path::AbsolutePathBufGuard;
use dunce::canonicalize as normalize_path;
use serde::Deserialize;
use std::io;
use std::path::Path;
#[cfg(windows)]
use std::path::PathBuf;
use toml::Value as TomlValue;
pub use codex_config::AppRequirementToml;
@@ -38,6 +29,7 @@ pub use codex_config::ConfigRequirements;
pub use codex_config::ConfigRequirementsToml;
pub use codex_config::ConstrainedWithSource;
pub use codex_config::FeatureRequirementsToml;
use codex_config::LoadedConfigLayers;
pub use codex_config::LoaderOverrides;
pub use codex_config::McpServerIdentity;
pub use codex_config::McpServerRequirement;
@@ -55,18 +47,16 @@ pub(crate) use codex_config::config_error_from_toml;
pub use codex_config::format_config_error;
pub use codex_config::format_config_error_with_source;
pub(crate) use codex_config::io_error_from_config_error;
use codex_config::load_config_layers_internal;
use codex_config::load_managed_admin_requirements;
use codex_config::load_requirements_from_legacy_scheme;
pub(crate) use codex_config::load_requirements_toml;
pub use codex_config::merge_toml_values;
use codex_config::system_config_toml_file;
use codex_config::system_requirements_toml_file;
#[cfg(test)]
pub(crate) use codex_config::version_for_toml;
/// On Unix systems, load default settings from this file path, if present.
/// Note that /etc/codex/ is treated as a "config folder," so subfolders such
/// as skills/ and rules/ will also be honored.
pub const SYSTEM_CONFIG_TOML_FILE_UNIX: &str = "/etc/codex/config.toml";
#[cfg(windows)]
const DEFAULT_PROGRAM_DATA_DIR_WINDOWS: &str = r"C:\ProgramData";
const DEFAULT_PROJECT_ROOT_MARKERS: &[&str] = &[".git"];
pub(crate) async fn first_layer_config_error(layers: &ConfigLayerStack) -> Option<ConfigError> {
@@ -125,8 +115,7 @@ pub async fn load_config_layers_state(
.merge_unset_fields(RequirementSource::CloudRequirements, requirements);
}
#[cfg(target_os = "macos")]
macos::load_managed_admin_requirements_toml(
load_managed_admin_requirements(
&mut config_requirements_toml,
overrides
.macos_managed_config_requirements_base64
@@ -140,7 +129,7 @@ pub async fn load_config_layers_state(
// Make a best-effort to support the legacy `managed_config.toml` as a
// requirements specification.
let loaded_config_layers = layer_io::load_config_layers_internal(codex_home, overrides).await?;
let loaded_config_layers = load_config_layers_internal(codex_home, overrides).await?;
load_requirements_from_legacy_scheme(
&mut config_requirements_toml,
loaded_config_layers.clone(),
@@ -343,185 +332,6 @@ async fn load_config_toml_for_required_layer(
Ok(create_entry(toml_value))
}
/// If available, apply requirements from the platform system
/// `requirements.toml` location to `config_requirements_toml` by filling in
/// any unset fields.
async fn load_requirements_toml(
config_requirements_toml: &mut ConfigRequirementsWithSources,
requirements_toml_file: impl AsRef<Path>,
) -> io::Result<()> {
let requirements_toml_file =
AbsolutePathBuf::from_absolute_path(requirements_toml_file.as_ref())?;
match tokio::fs::read_to_string(&requirements_toml_file).await {
Ok(contents) => {
let requirements_config: ConfigRequirementsToml =
toml::from_str(&contents).map_err(|e| {
io::Error::new(
io::ErrorKind::InvalidData,
format!(
"Error parsing requirements file {}: {e}",
requirements_toml_file.as_ref().display(),
),
)
})?;
config_requirements_toml.merge_unset_fields(
RequirementSource::SystemRequirementsToml {
file: requirements_toml_file.clone(),
},
requirements_config,
);
}
Err(e) => {
if e.kind() != io::ErrorKind::NotFound {
return Err(io::Error::new(
e.kind(),
format!(
"Failed to read requirements file {}: {e}",
requirements_toml_file.as_ref().display(),
),
));
}
}
}
Ok(())
}
#[cfg(unix)]
fn system_requirements_toml_file() -> io::Result<AbsolutePathBuf> {
AbsolutePathBuf::from_absolute_path(Path::new("/etc/codex/requirements.toml"))
}
#[cfg(windows)]
fn system_requirements_toml_file() -> io::Result<AbsolutePathBuf> {
windows_system_requirements_toml_file()
}
#[cfg(unix)]
fn system_config_toml_file() -> io::Result<AbsolutePathBuf> {
AbsolutePathBuf::from_absolute_path(Path::new(SYSTEM_CONFIG_TOML_FILE_UNIX))
}
#[cfg(windows)]
fn system_config_toml_file() -> io::Result<AbsolutePathBuf> {
windows_system_config_toml_file()
}
#[cfg(windows)]
fn windows_codex_system_dir() -> PathBuf {
let program_data = windows_program_data_dir_from_known_folder().unwrap_or_else(|err| {
tracing::warn!(
error = %err,
"Failed to resolve ProgramData known folder; using default path"
);
PathBuf::from(DEFAULT_PROGRAM_DATA_DIR_WINDOWS)
});
program_data.join("OpenAI").join("Codex")
}
#[cfg(windows)]
fn windows_system_requirements_toml_file() -> io::Result<AbsolutePathBuf> {
let requirements_toml_file = windows_codex_system_dir().join("requirements.toml");
AbsolutePathBuf::try_from(requirements_toml_file)
}
#[cfg(windows)]
fn windows_system_config_toml_file() -> io::Result<AbsolutePathBuf> {
let config_toml_file = windows_codex_system_dir().join("config.toml");
AbsolutePathBuf::try_from(config_toml_file)
}
#[cfg(windows)]
fn windows_program_data_dir_from_known_folder() -> io::Result<PathBuf> {
use std::ffi::OsString;
use std::os::windows::ffi::OsStringExt;
use windows_sys::Win32::System::Com::CoTaskMemFree;
use windows_sys::Win32::UI::Shell::FOLDERID_ProgramData;
use windows_sys::Win32::UI::Shell::KF_FLAG_DEFAULT;
use windows_sys::Win32::UI::Shell::SHGetKnownFolderPath;
let mut path_ptr = std::ptr::null_mut::<u16>();
let known_folder_flags = u32::try_from(KF_FLAG_DEFAULT).map_err(|_| {
io::Error::other(format!(
"KF_FLAG_DEFAULT did not fit in u32: {KF_FLAG_DEFAULT}"
))
})?;
// Known folder IDs reference:
// https://learn.microsoft.com/en-us/windows/win32/shell/knownfolderid
// SAFETY: SHGetKnownFolderPath initializes path_ptr with a CoTaskMem-allocated,
// null-terminated UTF-16 string on success.
let hr = unsafe {
SHGetKnownFolderPath(&FOLDERID_ProgramData, known_folder_flags, 0, &mut path_ptr)
};
if hr != 0 {
return Err(io::Error::other(format!(
"SHGetKnownFolderPath(FOLDERID_ProgramData) failed with HRESULT {hr:#010x}"
)));
}
if path_ptr.is_null() {
return Err(io::Error::other(
"SHGetKnownFolderPath(FOLDERID_ProgramData) returned a null pointer",
));
}
// SAFETY: path_ptr is a valid null-terminated UTF-16 string allocated by
// SHGetKnownFolderPath and must be freed with CoTaskMemFree.
let path = unsafe {
let mut len = 0usize;
while *path_ptr.add(len) != 0 {
len += 1;
}
let wide = std::slice::from_raw_parts(path_ptr, len);
let path = PathBuf::from(OsString::from_wide(wide));
CoTaskMemFree(path_ptr.cast());
path
};
Ok(path)
}
async fn load_requirements_from_legacy_scheme(
config_requirements_toml: &mut ConfigRequirementsWithSources,
loaded_config_layers: LoadedConfigLayers,
) -> io::Result<()> {
// In this implementation, earlier layers cannot be overwritten by later
// layers, so list managed_config_from_mdm first because it has the highest
// precedence.
let LoadedConfigLayers {
managed_config,
managed_config_from_mdm,
} = loaded_config_layers;
for (source, config) in managed_config_from_mdm
.map(|config| {
(
RequirementSource::LegacyManagedConfigTomlFromMdm,
config.managed_config,
)
})
.into_iter()
.chain(managed_config.map(|c| {
(
RequirementSource::LegacyManagedConfigTomlFromFile { file: c.file },
c.managed_config,
)
}))
{
let legacy_config: LegacyManagedConfigToml =
config.try_into().map_err(|err: toml::de::Error| {
io::Error::new(
io::ErrorKind::InvalidData,
format!("Failed to parse config requirements as TOML: {err}"),
)
})?;
let new_requirements_toml = ConfigRequirementsToml::from(legacy_config);
config_requirements_toml.merge_unset_fields(source, new_requirements_toml);
}
Ok(())
}
/// Reads `project_root_markers` from the [toml::Value] produced by merging
/// `config.toml` from the config layers in the stack preceding
/// [ConfigLayerSource::Project].
@@ -895,51 +705,12 @@ async fn load_project_layers(
Ok(layers)
}
/// The legacy mechanism for specifying admin-enforced configuration is to read
/// from a file like `/etc/codex/managed_config.toml` that has the same
/// structure as `config.toml` where fields like `approval_policy` can specify
/// exactly one value rather than a list of allowed values.
///
/// If present, re-interpret `managed_config.toml` as a `requirements.toml`
/// where each specified field is treated as a constraint allowing only that
/// value.
#[derive(Deserialize, Debug, Clone, Default, PartialEq)]
struct LegacyManagedConfigToml {
approval_policy: Option<AskForApproval>,
sandbox_mode: Option<SandboxMode>,
}
impl From<LegacyManagedConfigToml> for ConfigRequirementsToml {
fn from(legacy: LegacyManagedConfigToml) -> Self {
let mut config_requirements_toml = ConfigRequirementsToml::default();
let LegacyManagedConfigToml {
approval_policy,
sandbox_mode,
} = legacy;
if let Some(approval_policy) = approval_policy {
config_requirements_toml.allowed_approval_policies = Some(vec![approval_policy]);
}
if let Some(sandbox_mode) = sandbox_mode {
let required_mode: SandboxModeRequirement = sandbox_mode.into();
// Allowing read-only is a requirement for Codex to function correctly.
// So in this backfill path, we append read-only if it's not already specified.
let mut allowed_modes = vec![SandboxModeRequirement::ReadOnly];
if required_mode != SandboxModeRequirement::ReadOnly {
allowed_modes.push(required_mode);
}
config_requirements_toml.allowed_sandbox_modes = Some(allowed_modes);
}
config_requirements_toml
}
}
// Cannot name this `mod tests` because of tests.rs in this folder.
#[cfg(test)]
mod unit_tests {
use super::*;
#[cfg(windows)]
use std::path::Path;
use codex_config::ManagedConfigFromFile;
use codex_protocol::protocol::SandboxPolicy;
use tempfile::tempdir;
#[test]
@@ -979,65 +750,81 @@ foo = "xyzzy"
Ok(())
}
#[test]
fn legacy_managed_config_backfill_includes_read_only_sandbox_mode() {
let legacy = LegacyManagedConfigToml {
approval_policy: None,
sandbox_mode: Some(SandboxMode::WorkspaceWrite),
#[tokio::test]
async fn legacy_managed_config_backfill_includes_read_only_sandbox_mode() {
let tmp = tempdir().expect("tempdir");
let managed_path = AbsolutePathBuf::try_from(tmp.path().join("managed_config.toml"))
.expect("managed path");
let loaded_layers = LoadedConfigLayers {
managed_config: Some(ManagedConfigFromFile {
managed_config: toml::toml! {
sandbox_mode = "workspace-write"
}
.into(),
file: managed_path.clone(),
}),
managed_config_from_mdm: None,
};
let requirements = ConfigRequirementsToml::from(legacy);
let mut requirements_with_sources = ConfigRequirementsWithSources::default();
load_requirements_from_legacy_scheme(&mut requirements_with_sources, loaded_layers)
.await
.expect("load legacy requirements");
let requirements: ConfigRequirements = requirements_with_sources
.try_into()
.expect("requirements parse");
assert_eq!(
requirements.allowed_sandbox_modes,
Some(vec![
SandboxModeRequirement::ReadOnly,
SandboxModeRequirement::WorkspaceWrite
])
requirements.sandbox_policy.get(),
&SandboxPolicy::new_read_only_policy()
);
assert!(
requirements
.sandbox_policy
.can_set(&SandboxPolicy::new_workspace_write_policy())
.is_ok()
);
assert_eq!(
requirements
.sandbox_policy
.can_set(&SandboxPolicy::DangerFullAccess),
Err(codex_config::ConstraintError::InvalidValue {
field_name: "sandbox_mode",
candidate: "DangerFullAccess".into(),
allowed: "[ReadOnly, WorkspaceWrite]".into(),
requirement_source: RequirementSource::LegacyManagedConfigTomlFromFile {
file: managed_path,
},
})
);
}
#[cfg(windows)]
#[test]
fn windows_system_requirements_toml_file_uses_expected_suffix() {
let expected = windows_program_data_dir_from_known_folder()
.unwrap_or_else(|_| PathBuf::from(DEFAULT_PROGRAM_DATA_DIR_WINDOWS))
.join("OpenAI")
.join("Codex")
.join("requirements.toml");
assert_eq!(
windows_system_requirements_toml_file()
.expect("requirements.toml path")
.as_path(),
expected.as_path()
);
assert!(
windows_system_requirements_toml_file()
system_requirements_toml_file()
.expect("requirements.toml path")
.as_path()
.ends_with(Path::new("OpenAI").join("Codex").join("requirements.toml"))
.ends_with(
std::path::Path::new("OpenAI")
.join("Codex")
.join("requirements.toml")
)
);
}
#[cfg(windows)]
#[test]
fn windows_system_config_toml_file_uses_expected_suffix() {
let expected = windows_program_data_dir_from_known_folder()
.unwrap_or_else(|_| PathBuf::from(DEFAULT_PROGRAM_DATA_DIR_WINDOWS))
.join("OpenAI")
.join("Codex")
.join("config.toml");
assert_eq!(
windows_system_config_toml_file()
.expect("config.toml path")
.as_path(),
expected.as_path()
);
assert!(
windows_system_config_toml_file()
system_config_toml_file()
.expect("config.toml path")
.as_path()
.ends_with(Path::new("OpenAI").join("Codex").join("config.toml"))
.ends_with(
std::path::Path::new("OpenAI")
.join("Codex")
.join("config.toml")
)
);
}
}

View File

@@ -40,6 +40,7 @@ use crate::mcp::auth::compute_auth_statuses;
use crate::mcp::with_codex_apps_mcp;
use crate::mcp_connection_manager::McpConnectionManager;
use crate::mcp_connection_manager::codex_apps_tools_cache_key;
use crate::mcp_connection_manager::to_mcp_server_configs;
use crate::plugins::AppConnectorId;
use crate::plugins::PluginsManager;
use crate::plugins::list_tool_suggest_discoverable_plugins;
@@ -215,8 +216,9 @@ pub async fn list_accessible_connectors_from_mcp_tools_with_options_and_status(
use_legacy_landlock: config.features.use_legacy_landlock(),
};
let extracted_mcp_servers = to_mcp_server_configs(&mcp_servers);
let (mcp_connection_manager, cancel_token) = McpConnectionManager::new(
&mcp_servers,
&extracted_mcp_servers,
config.mcp_oauth_credentials_store_mode,
auth_status_entries,
&config.permissions.approval_policy,

View File

@@ -1,126 +1,28 @@
use std::collections::HashMap;
use anyhow::Result;
use codex_protocol::protocol::McpAuthStatus;
pub use codex_mcp::auth::McpAuthStatusEntry;
pub use codex_mcp::auth::McpOAuthLoginConfig;
pub use codex_mcp::auth::McpOAuthLoginSupport;
pub use codex_mcp::auth::McpOAuthScopesSource;
pub use codex_mcp::auth::ResolvedMcpOAuthScopes;
pub use codex_mcp::auth::resolve_oauth_scopes;
pub use codex_mcp::auth::should_retry_without_scopes;
use codex_rmcp_client::OAuthCredentialsStoreMode;
use codex_rmcp_client::OAuthProviderError;
use codex_rmcp_client::determine_streamable_http_auth_status;
use codex_rmcp_client::discover_streamable_http_oauth;
use futures::future::join_all;
use tracing::warn;
use crate::config::types::McpServerConfig;
use crate::config::types::McpServerTransportConfig;
#[derive(Debug, Clone)]
pub struct McpOAuthLoginConfig {
pub url: String,
pub http_headers: Option<HashMap<String, String>>,
pub env_http_headers: Option<HashMap<String, String>>,
pub discovered_scopes: Option<Vec<String>>,
}
#[derive(Debug)]
pub enum McpOAuthLoginSupport {
Supported(McpOAuthLoginConfig),
Unsupported,
Unknown(anyhow::Error),
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum McpOAuthScopesSource {
Explicit,
Configured,
Discovered,
Empty,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ResolvedMcpOAuthScopes {
pub scopes: Vec<String>,
pub source: McpOAuthScopesSource,
}
use crate::mcp_connection_manager::to_mcp_server_config;
use crate::mcp_connection_manager::to_mcp_server_transport_config;
pub async fn oauth_login_support(transport: &McpServerTransportConfig) -> McpOAuthLoginSupport {
let McpServerTransportConfig::StreamableHttp {
url,
bearer_token_env_var,
http_headers,
env_http_headers,
} = transport
else {
return McpOAuthLoginSupport::Unsupported;
};
if bearer_token_env_var.is_some() {
return McpOAuthLoginSupport::Unsupported;
}
match discover_streamable_http_oauth(url, http_headers.clone(), env_http_headers.clone()).await
{
Ok(Some(discovery)) => McpOAuthLoginSupport::Supported(McpOAuthLoginConfig {
url: url.clone(),
http_headers: http_headers.clone(),
env_http_headers: env_http_headers.clone(),
discovered_scopes: discovery.scopes_supported,
}),
Ok(None) => McpOAuthLoginSupport::Unsupported,
Err(err) => McpOAuthLoginSupport::Unknown(err),
}
codex_mcp::auth::oauth_login_support(&to_mcp_server_transport_config(transport)).await
}
pub async fn discover_supported_scopes(
transport: &McpServerTransportConfig,
) -> Option<Vec<String>> {
match oauth_login_support(transport).await {
McpOAuthLoginSupport::Supported(config) => config.discovered_scopes,
McpOAuthLoginSupport::Unsupported | McpOAuthLoginSupport::Unknown(_) => None,
}
}
pub fn resolve_oauth_scopes(
explicit_scopes: Option<Vec<String>>,
configured_scopes: Option<Vec<String>>,
discovered_scopes: Option<Vec<String>>,
) -> ResolvedMcpOAuthScopes {
if let Some(scopes) = explicit_scopes {
return ResolvedMcpOAuthScopes {
scopes,
source: McpOAuthScopesSource::Explicit,
};
}
if let Some(scopes) = configured_scopes {
return ResolvedMcpOAuthScopes {
scopes,
source: McpOAuthScopesSource::Configured,
};
}
if let Some(scopes) = discovered_scopes
&& !scopes.is_empty()
{
return ResolvedMcpOAuthScopes {
scopes,
source: McpOAuthScopesSource::Discovered,
};
}
ResolvedMcpOAuthScopes {
scopes: Vec::new(),
source: McpOAuthScopesSource::Empty,
}
}
pub fn should_retry_without_scopes(scopes: &ResolvedMcpOAuthScopes, error: &anyhow::Error) -> bool {
scopes.source == McpOAuthScopesSource::Discovered
&& error.downcast_ref::<OAuthProviderError>().is_some()
}
#[derive(Debug, Clone)]
pub struct McpAuthStatusEntry {
pub config: McpServerConfig,
pub auth_status: McpAuthStatus,
codex_mcp::auth::discover_supported_scopes(&to_mcp_server_transport_config(transport)).await
}
pub async fn compute_auth_statuses<'a, I>(
@@ -130,159 +32,9 @@ pub async fn compute_auth_statuses<'a, I>(
where
I: IntoIterator<Item = (&'a String, &'a McpServerConfig)>,
{
let futures = servers.into_iter().map(|(name, config)| {
let name = name.clone();
let config = config.clone();
async move {
let auth_status = match compute_auth_status(&name, &config, store_mode).await {
Ok(status) => status,
Err(error) => {
warn!("failed to determine auth status for MCP server `{name}`: {error:?}");
McpAuthStatus::Unsupported
}
};
let entry = McpAuthStatusEntry {
config,
auth_status,
};
(name, entry)
}
});
join_all(futures).await.into_iter().collect()
}
async fn compute_auth_status(
server_name: &str,
config: &McpServerConfig,
store_mode: OAuthCredentialsStoreMode,
) -> Result<McpAuthStatus> {
match &config.transport {
McpServerTransportConfig::Stdio { .. } => Ok(McpAuthStatus::Unsupported),
McpServerTransportConfig::StreamableHttp {
url,
bearer_token_env_var,
http_headers,
env_http_headers,
} => {
determine_streamable_http_auth_status(
server_name,
url,
bearer_token_env_var.as_deref(),
http_headers.clone(),
env_http_headers.clone(),
store_mode,
)
.await
}
}
}
#[cfg(test)]
mod tests {
use anyhow::anyhow;
use pretty_assertions::assert_eq;
use super::McpOAuthScopesSource;
use super::OAuthProviderError;
use super::ResolvedMcpOAuthScopes;
use super::resolve_oauth_scopes;
use super::should_retry_without_scopes;
#[test]
fn resolve_oauth_scopes_prefers_explicit() {
let resolved = resolve_oauth_scopes(
Some(vec!["explicit".to_string()]),
Some(vec!["configured".to_string()]),
Some(vec!["discovered".to_string()]),
);
assert_eq!(
resolved,
ResolvedMcpOAuthScopes {
scopes: vec!["explicit".to_string()],
source: McpOAuthScopesSource::Explicit,
}
);
}
#[test]
fn resolve_oauth_scopes_prefers_configured_over_discovered() {
let resolved = resolve_oauth_scopes(
None,
Some(vec!["configured".to_string()]),
Some(vec!["discovered".to_string()]),
);
assert_eq!(
resolved,
ResolvedMcpOAuthScopes {
scopes: vec!["configured".to_string()],
source: McpOAuthScopesSource::Configured,
}
);
}
#[test]
fn resolve_oauth_scopes_uses_discovered_when_needed() {
let resolved = resolve_oauth_scopes(None, None, Some(vec!["discovered".to_string()]));
assert_eq!(
resolved,
ResolvedMcpOAuthScopes {
scopes: vec!["discovered".to_string()],
source: McpOAuthScopesSource::Discovered,
}
);
}
#[test]
fn resolve_oauth_scopes_preserves_explicitly_empty_configured_scopes() {
let resolved = resolve_oauth_scopes(None, Some(Vec::new()), Some(vec!["ignored".into()]));
assert_eq!(
resolved,
ResolvedMcpOAuthScopes {
scopes: Vec::new(),
source: McpOAuthScopesSource::Configured,
}
);
}
#[test]
fn resolve_oauth_scopes_falls_back_to_empty() {
let resolved = resolve_oauth_scopes(None, None, None);
assert_eq!(
resolved,
ResolvedMcpOAuthScopes {
scopes: Vec::new(),
source: McpOAuthScopesSource::Empty,
}
);
}
#[test]
fn should_retry_without_scopes_only_for_discovered_provider_errors() {
let discovered = ResolvedMcpOAuthScopes {
scopes: vec!["scope".to_string()],
source: McpOAuthScopesSource::Discovered,
};
let provider_error = anyhow!(OAuthProviderError::new(
Some("invalid_scope".to_string()),
Some("scope rejected".to_string()),
));
assert!(should_retry_without_scopes(&discovered, &provider_error));
let configured = ResolvedMcpOAuthScopes {
scopes: vec!["scope".to_string()],
source: McpOAuthScopesSource::Configured,
};
assert!(!should_retry_without_scopes(&configured, &provider_error));
assert!(!should_retry_without_scopes(
&discovered,
&anyhow!("timed out waiting for OAuth callback"),
));
}
let servers = servers
.into_iter()
.map(|(name, config)| (name.clone(), to_mcp_server_config(config)))
.collect::<HashMap<_, _>>();
codex_mcp::auth::compute_auth_statuses(servers.iter(), store_mode).await
}

View File

@@ -21,76 +21,22 @@ use crate::CodexAuth;
use crate::config::Config;
use crate::config::types::McpServerConfig;
use crate::config::types::McpServerTransportConfig;
use crate::mcp::auth::McpAuthStatusEntry;
use crate::mcp::auth::compute_auth_statuses;
use crate::mcp_connection_manager::McpConnectionManager;
use crate::mcp_connection_manager::SandboxState;
use crate::mcp_connection_manager::codex_apps_tools_cache_key;
use crate::mcp_connection_manager::to_mcp_server_configs;
use crate::plugins::PluginCapabilitySummary;
use crate::plugins::PluginsManager;
const MCP_TOOL_NAME_PREFIX: &str = "mcp";
const MCP_TOOL_NAME_DELIMITER: &str = "__";
pub(crate) const CODEX_APPS_MCP_SERVER_NAME: &str = "codex_apps";
pub use codex_mcp::CODEX_APPS_MCP_SERVER_NAME;
pub use codex_mcp::ToolPluginProvenance;
pub use codex_mcp::group_tools_by_server;
pub use codex_mcp::split_qualified_tool_name;
const CODEX_CONNECTORS_TOKEN_ENV_VAR: &str = "CODEX_CONNECTORS_TOKEN";
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub struct ToolPluginProvenance {
plugin_display_names_by_connector_id: HashMap<String, Vec<String>>,
plugin_display_names_by_mcp_server_name: HashMap<String, Vec<String>>,
}
impl ToolPluginProvenance {
pub fn plugin_display_names_for_connector_id(&self, connector_id: &str) -> &[String] {
self.plugin_display_names_by_connector_id
.get(connector_id)
.map(Vec::as_slice)
.unwrap_or(&[])
}
pub fn plugin_display_names_for_mcp_server_name(&self, server_name: &str) -> &[String] {
self.plugin_display_names_by_mcp_server_name
.get(server_name)
.map(Vec::as_slice)
.unwrap_or(&[])
}
fn from_capability_summaries(capability_summaries: &[PluginCapabilitySummary]) -> Self {
let mut tool_plugin_provenance = Self::default();
for plugin in capability_summaries {
for connector_id in &plugin.app_connector_ids {
tool_plugin_provenance
.plugin_display_names_by_connector_id
.entry(connector_id.0.clone())
.or_default()
.push(plugin.display_name.clone());
}
for server_name in &plugin.mcp_server_names {
tool_plugin_provenance
.plugin_display_names_by_mcp_server_name
.entry(server_name.clone())
.or_default()
.push(plugin.display_name.clone());
}
}
for plugin_names in tool_plugin_provenance
.plugin_display_names_by_connector_id
.values_mut()
.chain(
tool_plugin_provenance
.plugin_display_names_by_mcp_server_name
.values_mut(),
)
{
plugin_names.sort_unstable();
plugin_names.dedup();
}
tool_plugin_provenance
}
}
fn codex_apps_mcp_bearer_token_env_var() -> Option<String> {
match env::var(CODEX_CONNECTORS_TOKEN_ENV_VAR) {
Ok(value) if !value.trim().is_empty() => Some(CODEX_CONNECTORS_TOKEN_ENV_VAR.to_string()),
@@ -219,10 +165,29 @@ impl McpManager {
pub fn tool_plugin_provenance(&self, config: &Config) -> ToolPluginProvenance {
let loaded_plugins = self.plugins_manager.plugins_for_config(config);
ToolPluginProvenance::from_capability_summaries(loaded_plugins.capability_summaries())
tool_plugin_provenance_from_capability_summaries(loaded_plugins.capability_summaries())
}
}
fn tool_plugin_provenance_from_capability_summaries(
capability_summaries: &[PluginCapabilitySummary],
) -> ToolPluginProvenance {
let mut tool_plugin_provenance = ToolPluginProvenance::default();
for plugin in capability_summaries {
for connector_id in &plugin.app_connector_ids {
tool_plugin_provenance
.record_connector_plugin_name(connector_id.0.clone(), plugin.display_name.clone());
}
for server_name in &plugin.mcp_server_names {
tool_plugin_provenance
.record_server_plugin_name(server_name.clone(), plugin.display_name.clone());
}
}
tool_plugin_provenance.sort_and_dedup();
tool_plugin_provenance
}
fn configured_mcp_servers(
config: &Config,
plugins_manager: &PluginsManager,
@@ -282,8 +247,9 @@ pub async fn collect_mcp_snapshot(config: &Config) -> McpListToolsResponseEvent
use_legacy_landlock: config.features.use_legacy_landlock(),
};
let extracted_mcp_servers = to_mcp_server_configs(&mcp_servers);
let (mcp_connection_manager, cancel_token) = McpConnectionManager::new(
&mcp_servers,
&extracted_mcp_servers,
config.mcp_oauth_credentials_store_mode,
auth_status_entries.clone(),
&config.permissions.approval_policy,
@@ -302,39 +268,9 @@ pub async fn collect_mcp_snapshot(config: &Config) -> McpListToolsResponseEvent
snapshot
}
pub fn split_qualified_tool_name(qualified_name: &str) -> Option<(String, String)> {
let mut parts = qualified_name.split(MCP_TOOL_NAME_DELIMITER);
let prefix = parts.next()?;
if prefix != MCP_TOOL_NAME_PREFIX {
return None;
}
let server_name = parts.next()?;
let tool_name: String = parts.collect::<Vec<_>>().join(MCP_TOOL_NAME_DELIMITER);
if tool_name.is_empty() {
return None;
}
Some((server_name.to_string(), tool_name))
}
pub fn group_tools_by_server(
tools: &HashMap<String, Tool>,
) -> HashMap<String, HashMap<String, Tool>> {
let mut grouped = HashMap::new();
for (qualified_name, tool) in tools {
if let Some((server_name, tool_name)) = split_qualified_tool_name(qualified_name) {
grouped
.entry(server_name)
.or_insert_with(HashMap::new)
.insert(tool_name, tool.clone());
}
}
grouped
}
pub(crate) async fn collect_mcp_snapshot_from_manager(
mcp_connection_manager: &McpConnectionManager,
auth_status_entries: HashMap<String, crate::mcp::auth::McpAuthStatusEntry>,
auth_status_entries: HashMap<String, McpAuthStatusEntry>,
) -> McpListToolsResponseEvent {
let (tools, resources, resource_templates) = tokio::join!(
mcp_connection_manager.list_all_tools(),

View File

@@ -84,7 +84,7 @@ fn group_tools_by_server_strips_prefix_and_groups() {
#[test]
fn tool_plugin_provenance_collects_app_and_mcp_sources() {
let provenance = ToolPluginProvenance::from_capability_summaries(&[
let provenance = tool_plugin_provenance_from_capability_summaries(&[
PluginCapabilitySummary {
display_name: "alpha-plugin".to_string(),
app_connector_ids: vec![AppConnectorId("connector_example".to_string())],

File diff suppressed because it is too large Load Diff

View File

@@ -1,371 +1,3 @@
use std::collections::HashSet;
use std::sync::LazyLock;
use serde::Deserialize;
use serde::Serialize;
use serde_json::Map;
use serde_json::Value;
use tracing::warn;
const CONSEQUENTIAL_TOOL_MESSAGE_TEMPLATES_SCHEMA_VERSION: u8 = 4;
const CONNECTOR_NAME_TEMPLATE_VAR: &str = "{connector_name}";
static CONSEQUENTIAL_TOOL_MESSAGE_TEMPLATES: LazyLock<
Option<Vec<ConsequentialToolMessageTemplate>>,
> = LazyLock::new(load_consequential_tool_message_templates);
#[derive(Clone, Debug, PartialEq)]
pub(crate) struct RenderedMcpToolApprovalTemplate {
pub(crate) question: String,
pub(crate) elicitation_message: String,
pub(crate) tool_params: Option<Value>,
pub(crate) tool_params_display: Vec<RenderedMcpToolApprovalParam>,
}
#[derive(Clone, Debug, PartialEq, Serialize)]
pub(crate) struct RenderedMcpToolApprovalParam {
pub(crate) name: String,
pub(crate) value: Value,
pub(crate) display_name: String,
}
#[derive(Debug, Deserialize)]
struct ConsequentialToolMessageTemplatesFile {
schema_version: u8,
templates: Vec<ConsequentialToolMessageTemplate>,
}
#[derive(Clone, Debug, Deserialize, PartialEq, Eq)]
struct ConsequentialToolMessageTemplate {
connector_id: String,
server_name: String,
tool_title: String,
template: String,
template_params: Vec<ConsequentialToolTemplateParam>,
}
#[derive(Clone, Debug, Deserialize, PartialEq, Eq)]
struct ConsequentialToolTemplateParam {
name: String,
label: String,
}
pub(crate) fn render_mcp_tool_approval_template(
server_name: &str,
connector_id: Option<&str>,
connector_name: Option<&str>,
tool_title: Option<&str>,
tool_params: Option<&Value>,
) -> Option<RenderedMcpToolApprovalTemplate> {
let templates = CONSEQUENTIAL_TOOL_MESSAGE_TEMPLATES.as_ref()?;
render_mcp_tool_approval_template_from_templates(
templates,
server_name,
connector_id,
connector_name,
tool_title,
tool_params,
)
}
fn load_consequential_tool_message_templates() -> Option<Vec<ConsequentialToolMessageTemplate>> {
let templates = match serde_json::from_str::<ConsequentialToolMessageTemplatesFile>(
include_str!("consequential_tool_message_templates.json"),
) {
Ok(templates) => templates,
Err(err) => {
warn!(error = %err, "failed to parse consequential tool approval templates");
return None;
}
};
if templates.schema_version != CONSEQUENTIAL_TOOL_MESSAGE_TEMPLATES_SCHEMA_VERSION {
warn!(
found_schema_version = templates.schema_version,
expected_schema_version = CONSEQUENTIAL_TOOL_MESSAGE_TEMPLATES_SCHEMA_VERSION,
"unexpected consequential tool approval templates schema version"
);
return None;
}
Some(templates.templates)
}
fn render_mcp_tool_approval_template_from_templates(
templates: &[ConsequentialToolMessageTemplate],
server_name: &str,
connector_id: Option<&str>,
connector_name: Option<&str>,
tool_title: Option<&str>,
tool_params: Option<&Value>,
) -> Option<RenderedMcpToolApprovalTemplate> {
let connector_id = connector_id?;
let tool_title = tool_title.map(str::trim).filter(|name| !name.is_empty())?;
let template = templates.iter().find(|template| {
template.server_name == server_name
&& template.connector_id == connector_id
&& template.tool_title == tool_title
})?;
let elicitation_message = render_question_template(&template.template, connector_name)?;
let (tool_params, tool_params_display) = match tool_params {
Some(Value::Object(tool_params)) => {
render_tool_params(tool_params, &template.template_params)?
}
Some(_) => return None,
None => (None, Vec::new()),
};
Some(RenderedMcpToolApprovalTemplate {
question: elicitation_message.clone(),
elicitation_message,
tool_params,
tool_params_display,
})
}
fn render_question_template(template: &str, connector_name: Option<&str>) -> Option<String> {
let template = template.trim();
if template.is_empty() {
return None;
}
if template.contains(CONNECTOR_NAME_TEMPLATE_VAR) {
let connector_name = connector_name
.map(str::trim)
.filter(|name| !name.is_empty())?;
return Some(template.replace(CONNECTOR_NAME_TEMPLATE_VAR, connector_name));
}
Some(template.to_string())
}
fn render_tool_params(
tool_params: &Map<String, Value>,
template_params: &[ConsequentialToolTemplateParam],
) -> Option<(Option<Value>, Vec<RenderedMcpToolApprovalParam>)> {
let mut display_params = Vec::new();
let mut display_names = HashSet::new();
let mut handled_names = HashSet::new();
for template_param in template_params {
let label = template_param.label.trim();
if label.is_empty() {
return None;
}
let Some(value) = tool_params.get(&template_param.name) else {
continue;
};
if !display_names.insert(label.to_string()) {
return None;
}
display_params.push(RenderedMcpToolApprovalParam {
name: template_param.name.clone(),
value: value.clone(),
display_name: label.to_string(),
});
handled_names.insert(template_param.name.as_str());
}
let mut remaining_params = tool_params
.iter()
.filter(|(name, _)| !handled_names.contains(name.as_str()))
.collect::<Vec<_>>();
remaining_params.sort_by(|(left_name, _), (right_name, _)| left_name.cmp(right_name));
for (name, value) in remaining_params {
if handled_names.contains(name.as_str()) {
continue;
}
if !display_names.insert(name.clone()) {
return None;
}
display_params.push(RenderedMcpToolApprovalParam {
name: name.clone(),
value: value.clone(),
display_name: name.clone(),
});
}
Some((Some(Value::Object(tool_params.clone())), display_params))
}
#[cfg(test)]
mod tests {
use pretty_assertions::assert_eq;
use serde_json::json;
use super::*;
#[test]
fn renders_exact_match_with_readable_param_labels() {
let templates = vec![ConsequentialToolMessageTemplate {
connector_id: "calendar".to_string(),
server_name: "codex_apps".to_string(),
tool_title: "create_event".to_string(),
template: "Allow {connector_name} to create an event?".to_string(),
template_params: vec![
ConsequentialToolTemplateParam {
name: "calendar_id".to_string(),
label: "Calendar".to_string(),
},
ConsequentialToolTemplateParam {
name: "title".to_string(),
label: "Title".to_string(),
},
],
}];
let rendered = render_mcp_tool_approval_template_from_templates(
&templates,
"codex_apps",
Some("calendar"),
Some("Calendar"),
Some("create_event"),
Some(&json!({
"title": "Roadmap review",
"calendar_id": "primary",
"timezone": "UTC",
})),
);
assert_eq!(
rendered,
Some(RenderedMcpToolApprovalTemplate {
question: "Allow Calendar to create an event?".to_string(),
elicitation_message: "Allow Calendar to create an event?".to_string(),
tool_params: Some(json!({
"title": "Roadmap review",
"calendar_id": "primary",
"timezone": "UTC",
})),
tool_params_display: vec![
RenderedMcpToolApprovalParam {
name: "calendar_id".to_string(),
value: json!("primary"),
display_name: "Calendar".to_string(),
},
RenderedMcpToolApprovalParam {
name: "title".to_string(),
value: json!("Roadmap review"),
display_name: "Title".to_string(),
},
RenderedMcpToolApprovalParam {
name: "timezone".to_string(),
value: json!("UTC"),
display_name: "timezone".to_string(),
},
],
})
);
}
#[test]
fn returns_none_when_no_exact_match_exists() {
let templates = vec![ConsequentialToolMessageTemplate {
connector_id: "calendar".to_string(),
server_name: "codex_apps".to_string(),
tool_title: "create_event".to_string(),
template: "Allow {connector_name} to create an event?".to_string(),
template_params: Vec::new(),
}];
assert_eq!(
render_mcp_tool_approval_template_from_templates(
&templates,
"codex_apps",
Some("calendar"),
Some("Calendar"),
Some("delete_event"),
Some(&json!({})),
),
None
);
}
#[test]
fn returns_none_when_relabeling_would_collide() {
let templates = vec![ConsequentialToolMessageTemplate {
connector_id: "calendar".to_string(),
server_name: "codex_apps".to_string(),
tool_title: "create_event".to_string(),
template: "Allow {connector_name} to create an event?".to_string(),
template_params: vec![ConsequentialToolTemplateParam {
name: "calendar_id".to_string(),
label: "timezone".to_string(),
}],
}];
assert_eq!(
render_mcp_tool_approval_template_from_templates(
&templates,
"codex_apps",
Some("calendar"),
Some("Calendar"),
Some("create_event"),
Some(&json!({
"calendar_id": "primary",
"timezone": "UTC",
})),
),
None
);
}
#[test]
fn bundled_templates_load() {
assert_eq!(CONSEQUENTIAL_TOOL_MESSAGE_TEMPLATES.is_some(), true);
}
#[test]
fn renders_literal_template_without_connector_substitution() {
let templates = vec![ConsequentialToolMessageTemplate {
connector_id: "github".to_string(),
server_name: "codex_apps".to_string(),
tool_title: "add_comment".to_string(),
template: "Allow GitHub to add a comment to a pull request?".to_string(),
template_params: Vec::new(),
}];
let rendered = render_mcp_tool_approval_template_from_templates(
&templates,
"codex_apps",
Some("github"),
None,
Some("add_comment"),
Some(&json!({})),
);
assert_eq!(
rendered,
Some(RenderedMcpToolApprovalTemplate {
question: "Allow GitHub to add a comment to a pull request?".to_string(),
elicitation_message: "Allow GitHub to add a comment to a pull request?".to_string(),
tool_params: Some(json!({})),
tool_params_display: Vec::new(),
})
);
}
#[test]
fn returns_none_when_connector_placeholder_has_no_value() {
let templates = vec![ConsequentialToolMessageTemplate {
connector_id: "calendar".to_string(),
server_name: "codex_apps".to_string(),
tool_title: "create_event".to_string(),
template: "Allow {connector_name} to create an event?".to_string(),
template_params: Vec::new(),
}];
assert_eq!(
render_mcp_tool_approval_template_from_templates(
&templates,
"codex_apps",
Some("calendar"),
None,
Some("create_event"),
Some(&json!({})),
),
None
);
}
}
pub use codex_mcp::tool_approval_templates::RenderedMcpToolApprovalParam;
pub use codex_mcp::tool_approval_templates::RenderedMcpToolApprovalTemplate;
pub use codex_mcp::tool_approval_templates::render_mcp_tool_approval_template;

View File

@@ -51,6 +51,12 @@ use std::path::Path;
use std::sync::Arc;
use toml_edit::value;
pub(crate) use codex_mcp::MCP_TOOL_APPROVAL_ACCEPT;
pub(crate) use codex_mcp::MCP_TOOL_APPROVAL_ACCEPT_FOR_SESSION;
pub(crate) use codex_mcp::MCP_TOOL_APPROVAL_DECLINE_SYNTHETIC;
use codex_mcp::MCP_TOOL_APPROVAL_QUESTION_ID_PREFIX;
pub(crate) use codex_mcp::is_mcp_tool_approval_question_id;
/// Handles the specified tool call dispatches the appropriate
/// `McpToolCallBegin` and `McpToolCallEnd` events to the `Session`.
pub(crate) async fn handle_mcp_tool_call(
@@ -420,14 +426,6 @@ struct McpToolApprovalElicitationRequest<'a> {
prompt_options: McpToolApprovalPromptOptions,
}
pub(crate) const MCP_TOOL_APPROVAL_QUESTION_ID_PREFIX: &str = "mcp_tool_call_approval";
pub(crate) const MCP_TOOL_APPROVAL_ACCEPT: &str = "Allow";
pub(crate) const MCP_TOOL_APPROVAL_ACCEPT_FOR_SESSION: &str = "Allow for this session";
// Internal-only token used when guardian auto-reviews delegated MCP approvals on the
// RequestUserInput compatibility path. That legacy MCP prompt has allow/cancel labels but no
// real "Decline" answer, so this lets guardian denials round-trip distinctly from user cancel.
// This is not a user-facing option.
pub(crate) const MCP_TOOL_APPROVAL_DECLINE_SYNTHETIC: &str = "__codex_mcp_decline__";
const MCP_TOOL_APPROVAL_ACCEPT_AND_REMEMBER: &str = "Allow and don't ask me again";
const MCP_TOOL_APPROVAL_CANCEL: &str = "Cancel";
const MCP_TOOL_APPROVAL_KIND_KEY: &str = "codex_approval_kind";
@@ -445,12 +443,6 @@ const MCP_TOOL_APPROVAL_TOOL_DESCRIPTION_KEY: &str = "tool_description";
const MCP_TOOL_APPROVAL_TOOL_PARAMS_KEY: &str = "tool_params";
const MCP_TOOL_APPROVAL_TOOL_PARAMS_DISPLAY_KEY: &str = "tool_params_display";
pub(crate) fn is_mcp_tool_approval_question_id(question_id: &str) -> bool {
question_id
.strip_prefix(MCP_TOOL_APPROVAL_QUESTION_ID_PREFIX)
.is_some_and(|suffix| suffix.starts_with('_'))
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize)]
struct McpToolApprovalKey {
server: String,

View File

@@ -0,0 +1,9 @@
load("//:defs.bzl", "codex_rust_crate")
codex_rust_crate(
name = "mcp-core",
crate_name = "codex_mcp",
compile_data = [
"src/consequential_tool_message_templates.json",
],
)

View File

@@ -0,0 +1,41 @@
[package]
name = "codex-mcp"
version.workspace = true
edition.workspace = true
license.workspace = true
[lib]
name = "codex_mcp"
path = "src/lib.rs"
[lints]
workspace = true
[dependencies]
anyhow = { workspace = true }
async-channel = { workspace = true }
codex-async-utils = { workspace = true }
codex-config = { workspace = true }
codex-otel = { workspace = true }
codex-protocol = { workspace = true }
codex-rmcp-client = { workspace = true }
futures = { workspace = true }
regex-lite = { workspace = true }
rmcp = { workspace = true, default-features = false, features = [
"base64",
"macros",
"schemars",
"server",
] }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
sha1 = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true, features = ["macros", "rt-multi-thread", "time"] }
tokio-util = { workspace = true, features = ["rt"] }
tracing = { workspace = true, features = ["log"] }
url = { workspace = true }
[dev-dependencies]
pretty_assertions = { workspace = true }
tempfile = { workspace = true }

View File

@@ -0,0 +1,288 @@
use std::collections::HashMap;
use anyhow::Result;
use codex_protocol::protocol::McpAuthStatus;
use codex_rmcp_client::OAuthCredentialsStoreMode;
use codex_rmcp_client::OAuthProviderError;
use codex_rmcp_client::determine_streamable_http_auth_status;
use codex_rmcp_client::discover_streamable_http_oauth;
use futures::future::join_all;
use tracing::warn;
use crate::McpServerConfig;
use crate::McpServerTransportConfig;
#[derive(Debug, Clone)]
pub struct McpOAuthLoginConfig {
pub url: String,
pub http_headers: Option<HashMap<String, String>>,
pub env_http_headers: Option<HashMap<String, String>>,
pub discovered_scopes: Option<Vec<String>>,
}
#[derive(Debug)]
pub enum McpOAuthLoginSupport {
Supported(McpOAuthLoginConfig),
Unsupported,
Unknown(anyhow::Error),
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum McpOAuthScopesSource {
Explicit,
Configured,
Discovered,
Empty,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ResolvedMcpOAuthScopes {
pub scopes: Vec<String>,
pub source: McpOAuthScopesSource,
}
pub async fn oauth_login_support(transport: &McpServerTransportConfig) -> McpOAuthLoginSupport {
let McpServerTransportConfig::StreamableHttp {
url,
bearer_token_env_var,
http_headers,
env_http_headers,
} = transport
else {
return McpOAuthLoginSupport::Unsupported;
};
if bearer_token_env_var.is_some() {
return McpOAuthLoginSupport::Unsupported;
}
match discover_streamable_http_oauth(url, http_headers.clone(), env_http_headers.clone()).await
{
Ok(Some(discovery)) => McpOAuthLoginSupport::Supported(McpOAuthLoginConfig {
url: url.clone(),
http_headers: http_headers.clone(),
env_http_headers: env_http_headers.clone(),
discovered_scopes: discovery.scopes_supported,
}),
Ok(None) => McpOAuthLoginSupport::Unsupported,
Err(err) => McpOAuthLoginSupport::Unknown(err),
}
}
pub async fn discover_supported_scopes(
transport: &McpServerTransportConfig,
) -> Option<Vec<String>> {
match oauth_login_support(transport).await {
McpOAuthLoginSupport::Supported(config) => config.discovered_scopes,
McpOAuthLoginSupport::Unsupported | McpOAuthLoginSupport::Unknown(_) => None,
}
}
pub fn resolve_oauth_scopes(
explicit_scopes: Option<Vec<String>>,
configured_scopes: Option<Vec<String>>,
discovered_scopes: Option<Vec<String>>,
) -> ResolvedMcpOAuthScopes {
if let Some(scopes) = explicit_scopes {
return ResolvedMcpOAuthScopes {
scopes,
source: McpOAuthScopesSource::Explicit,
};
}
if let Some(scopes) = configured_scopes {
return ResolvedMcpOAuthScopes {
scopes,
source: McpOAuthScopesSource::Configured,
};
}
if let Some(scopes) = discovered_scopes
&& !scopes.is_empty()
{
return ResolvedMcpOAuthScopes {
scopes,
source: McpOAuthScopesSource::Discovered,
};
}
ResolvedMcpOAuthScopes {
scopes: Vec::new(),
source: McpOAuthScopesSource::Empty,
}
}
pub fn should_retry_without_scopes(scopes: &ResolvedMcpOAuthScopes, error: &anyhow::Error) -> bool {
scopes.source == McpOAuthScopesSource::Discovered
&& error.downcast_ref::<OAuthProviderError>().is_some()
}
#[derive(Debug, Clone)]
pub struct McpAuthStatusEntry {
pub config: McpServerConfig,
pub auth_status: McpAuthStatus,
}
pub async fn compute_auth_statuses<'a, I>(
servers: I,
store_mode: OAuthCredentialsStoreMode,
) -> HashMap<String, McpAuthStatusEntry>
where
I: IntoIterator<Item = (&'a String, &'a McpServerConfig)>,
{
let futures = servers.into_iter().map(|(name, config)| {
let name = name.clone();
let config = config.clone();
async move {
let auth_status = match compute_auth_status(&name, &config, store_mode).await {
Ok(status) => status,
Err(error) => {
warn!("failed to determine auth status for MCP server `{name}`: {error:?}");
McpAuthStatus::Unsupported
}
};
let entry = McpAuthStatusEntry {
config,
auth_status,
};
(name, entry)
}
});
join_all(futures).await.into_iter().collect()
}
async fn compute_auth_status(
server_name: &str,
config: &McpServerConfig,
store_mode: OAuthCredentialsStoreMode,
) -> Result<McpAuthStatus> {
match &config.transport {
McpServerTransportConfig::Stdio { .. } => Ok(McpAuthStatus::Unsupported),
McpServerTransportConfig::StreamableHttp {
url,
bearer_token_env_var,
http_headers,
env_http_headers,
} => {
determine_streamable_http_auth_status(
server_name,
url,
bearer_token_env_var.as_deref(),
http_headers.clone(),
env_http_headers.clone(),
store_mode,
)
.await
}
}
}
#[cfg(test)]
mod tests {
use anyhow::anyhow;
use pretty_assertions::assert_eq;
use super::McpOAuthScopesSource;
use super::OAuthProviderError;
use super::ResolvedMcpOAuthScopes;
use super::resolve_oauth_scopes;
use super::should_retry_without_scopes;
#[test]
fn resolve_oauth_scopes_prefers_explicit() {
let resolved = resolve_oauth_scopes(
Some(vec!["explicit".to_string()]),
Some(vec!["configured".to_string()]),
Some(vec!["discovered".to_string()]),
);
assert_eq!(
resolved,
ResolvedMcpOAuthScopes {
scopes: vec!["explicit".to_string()],
source: McpOAuthScopesSource::Explicit,
}
);
}
#[test]
fn resolve_oauth_scopes_prefers_configured_over_discovered() {
let resolved = resolve_oauth_scopes(
None,
Some(vec!["configured".to_string()]),
Some(vec!["discovered".to_string()]),
);
assert_eq!(
resolved,
ResolvedMcpOAuthScopes {
scopes: vec!["configured".to_string()],
source: McpOAuthScopesSource::Configured,
}
);
}
#[test]
fn resolve_oauth_scopes_uses_discovered_when_needed() {
let resolved = resolve_oauth_scopes(None, None, Some(vec!["discovered".to_string()]));
assert_eq!(
resolved,
ResolvedMcpOAuthScopes {
scopes: vec!["discovered".to_string()],
source: McpOAuthScopesSource::Discovered,
}
);
}
#[test]
fn resolve_oauth_scopes_preserves_explicitly_empty_configured_scopes() {
let resolved = resolve_oauth_scopes(None, Some(Vec::new()), Some(vec!["ignored".into()]));
assert_eq!(
resolved,
ResolvedMcpOAuthScopes {
scopes: Vec::new(),
source: McpOAuthScopesSource::Configured,
}
);
}
#[test]
fn resolve_oauth_scopes_falls_back_to_empty() {
let resolved = resolve_oauth_scopes(None, None, None);
assert_eq!(
resolved,
ResolvedMcpOAuthScopes {
scopes: Vec::new(),
source: McpOAuthScopesSource::Empty,
}
);
}
#[test]
fn should_retry_without_scopes_only_for_discovered_provider_errors() {
let discovered = ResolvedMcpOAuthScopes {
scopes: vec!["scope".to_string()],
source: McpOAuthScopesSource::Discovered,
};
let provider_error = anyhow!(OAuthProviderError::new(
Some("invalid_scope".to_string()),
Some("scope rejected".to_string()),
));
assert!(should_retry_without_scopes(&discovered, &provider_error));
let configured = ResolvedMcpOAuthScopes {
scopes: vec!["scope".to_string()],
source: McpOAuthScopesSource::Configured,
};
assert!(!should_retry_without_scopes(&configured, &provider_error));
assert!(!should_retry_without_scopes(
&discovered,
&anyhow!("timed out waiting for OAuth callback"),
));
}
}

View File

@@ -0,0 +1,36 @@
use std::collections::HashMap;
use std::path::PathBuf;
use std::time::Duration;
use serde::Deserialize;
use serde::Serialize;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct McpServerConfig {
pub transport: McpServerTransportConfig,
pub enabled: bool,
pub required: bool,
pub startup_timeout_sec: Option<Duration>,
pub tool_timeout_sec: Option<Duration>,
pub enabled_tools: Option<Vec<String>>,
pub disabled_tools: Option<Vec<String>>,
pub scopes: Option<Vec<String>>,
pub oauth_resource: Option<String>,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub enum McpServerTransportConfig {
Stdio {
command: String,
args: Vec<String>,
env: Option<HashMap<String, String>>,
env_vars: Vec<String>,
cwd: Option<PathBuf>,
},
StreamableHttp {
url: String,
bearer_token_env_var: Option<String>,
http_headers: Option<HashMap<String, String>>,
env_http_headers: Option<HashMap<String, String>>,
},
}

File diff suppressed because it is too large Load Diff

View File

@@ -535,7 +535,6 @@ fn mcp_init_error_display_prompts_for_github_pat() {
},
enabled: true,
required: false,
disabled_reason: None,
startup_timeout_sec: None,
tool_timeout_sec: None,
enabled_tools: None,
@@ -583,7 +582,6 @@ fn mcp_init_error_display_reports_generic_errors() {
},
enabled: true,
required: false,
disabled_reason: None,
startup_timeout_sec: None,
tool_timeout_sec: None,
enabled_tools: None,

View File

@@ -0,0 +1,114 @@
mod config;
mod connection_manager;
mod tool_call;
pub mod auth;
pub mod tool_approval_templates;
use std::collections::HashMap;
use codex_protocol::mcp::Tool;
pub use config::McpServerConfig;
pub use config::McpServerTransportConfig;
pub use connection_manager::CodexAppsToolsCacheKey;
pub use connection_manager::MCP_SANDBOX_STATE_CAPABILITY;
pub use connection_manager::MCP_SANDBOX_STATE_METHOD;
pub use connection_manager::McpConnectionManager;
pub use connection_manager::SandboxState;
pub use connection_manager::ToolInfo;
pub use connection_manager::codex_apps_tools_cache_key_from_token_data;
pub use connection_manager::filter_non_codex_apps_mcp_tools_only;
pub use tool_call::MCP_TOOL_APPROVAL_ACCEPT;
pub use tool_call::MCP_TOOL_APPROVAL_ACCEPT_FOR_SESSION;
pub use tool_call::MCP_TOOL_APPROVAL_DECLINE_SYNTHETIC;
pub use tool_call::MCP_TOOL_APPROVAL_QUESTION_ID_PREFIX;
pub use tool_call::is_mcp_tool_approval_question_id;
const MCP_TOOL_NAME_PREFIX: &str = "mcp";
const MCP_TOOL_NAME_DELIMITER: &str = "__";
pub const CODEX_APPS_MCP_SERVER_NAME: &str = "codex_apps";
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub struct ToolPluginProvenance {
pub plugin_display_names_by_connector_id: HashMap<String, Vec<String>>,
pub plugin_display_names_by_mcp_server_name: HashMap<String, Vec<String>>,
}
impl ToolPluginProvenance {
pub fn plugin_display_names_for_connector_id(&self, connector_id: &str) -> &[String] {
self.plugin_display_names_by_connector_id
.get(connector_id)
.map(Vec::as_slice)
.unwrap_or(&[])
}
pub fn plugin_display_names_for_mcp_server_name(&self, server_name: &str) -> &[String] {
self.plugin_display_names_by_mcp_server_name
.get(server_name)
.map(Vec::as_slice)
.unwrap_or(&[])
}
pub fn record_connector_plugin_name(
&mut self,
connector_id: impl Into<String>,
plugin_display_name: impl Into<String>,
) {
self.plugin_display_names_by_connector_id
.entry(connector_id.into())
.or_default()
.push(plugin_display_name.into());
}
pub fn record_server_plugin_name(
&mut self,
server_name: impl Into<String>,
plugin_display_name: impl Into<String>,
) {
self.plugin_display_names_by_mcp_server_name
.entry(server_name.into())
.or_default()
.push(plugin_display_name.into());
}
pub fn sort_and_dedup(&mut self) {
for plugin_names in self
.plugin_display_names_by_connector_id
.values_mut()
.chain(self.plugin_display_names_by_mcp_server_name.values_mut())
{
plugin_names.sort_unstable();
plugin_names.dedup();
}
}
}
pub fn split_qualified_tool_name(qualified_name: &str) -> Option<(String, String)> {
let mut parts = qualified_name.split(MCP_TOOL_NAME_DELIMITER);
let prefix = parts.next()?;
if prefix != MCP_TOOL_NAME_PREFIX {
return None;
}
let server_name = parts.next()?;
let tool_name = parts.collect::<Vec<_>>().join(MCP_TOOL_NAME_DELIMITER);
if tool_name.is_empty() {
return None;
}
Some((server_name.to_string(), tool_name))
}
pub fn group_tools_by_server(
tools: &HashMap<String, Tool>,
) -> HashMap<String, HashMap<String, Tool>> {
let mut grouped = HashMap::new();
for (qualified_name, tool) in tools {
if let Some((server_name, tool_name)) = split_qualified_tool_name(qualified_name) {
grouped
.entry(server_name)
.or_insert_with(HashMap::new)
.insert(tool_name, tool.clone());
}
}
grouped
}

View File

@@ -0,0 +1,371 @@
use std::collections::HashSet;
use std::sync::LazyLock;
use serde::Deserialize;
use serde::Serialize;
use serde_json::Map;
use serde_json::Value;
use tracing::warn;
const CONSEQUENTIAL_TOOL_MESSAGE_TEMPLATES_SCHEMA_VERSION: u8 = 4;
const CONNECTOR_NAME_TEMPLATE_VAR: &str = "{connector_name}";
static CONSEQUENTIAL_TOOL_MESSAGE_TEMPLATES: LazyLock<
Option<Vec<ConsequentialToolMessageTemplate>>,
> = LazyLock::new(load_consequential_tool_message_templates);
#[derive(Clone, Debug, PartialEq)]
pub struct RenderedMcpToolApprovalTemplate {
pub question: String,
pub elicitation_message: String,
pub tool_params: Option<Value>,
pub tool_params_display: Vec<RenderedMcpToolApprovalParam>,
}
#[derive(Clone, Debug, PartialEq, Serialize)]
pub struct RenderedMcpToolApprovalParam {
pub name: String,
pub value: Value,
pub display_name: String,
}
#[derive(Debug, Deserialize)]
struct ConsequentialToolMessageTemplatesFile {
schema_version: u8,
templates: Vec<ConsequentialToolMessageTemplate>,
}
#[derive(Clone, Debug, Deserialize, PartialEq, Eq)]
struct ConsequentialToolMessageTemplate {
connector_id: String,
server_name: String,
tool_title: String,
template: String,
template_params: Vec<ConsequentialToolTemplateParam>,
}
#[derive(Clone, Debug, Deserialize, PartialEq, Eq)]
struct ConsequentialToolTemplateParam {
name: String,
label: String,
}
pub fn render_mcp_tool_approval_template(
server_name: &str,
connector_id: Option<&str>,
connector_name: Option<&str>,
tool_title: Option<&str>,
tool_params: Option<&Value>,
) -> Option<RenderedMcpToolApprovalTemplate> {
let templates = CONSEQUENTIAL_TOOL_MESSAGE_TEMPLATES.as_ref()?;
render_mcp_tool_approval_template_from_templates(
templates,
server_name,
connector_id,
connector_name,
tool_title,
tool_params,
)
}
fn load_consequential_tool_message_templates() -> Option<Vec<ConsequentialToolMessageTemplate>> {
let templates = match serde_json::from_str::<ConsequentialToolMessageTemplatesFile>(
include_str!("consequential_tool_message_templates.json"),
) {
Ok(templates) => templates,
Err(err) => {
warn!(error = %err, "failed to parse consequential tool approval templates");
return None;
}
};
if templates.schema_version != CONSEQUENTIAL_TOOL_MESSAGE_TEMPLATES_SCHEMA_VERSION {
warn!(
found_schema_version = templates.schema_version,
expected_schema_version = CONSEQUENTIAL_TOOL_MESSAGE_TEMPLATES_SCHEMA_VERSION,
"unexpected consequential tool approval templates schema version"
);
return None;
}
Some(templates.templates)
}
fn render_mcp_tool_approval_template_from_templates(
templates: &[ConsequentialToolMessageTemplate],
server_name: &str,
connector_id: Option<&str>,
connector_name: Option<&str>,
tool_title: Option<&str>,
tool_params: Option<&Value>,
) -> Option<RenderedMcpToolApprovalTemplate> {
let connector_id = connector_id?;
let tool_title = tool_title.map(str::trim).filter(|name| !name.is_empty())?;
let template = templates.iter().find(|template| {
template.server_name == server_name
&& template.connector_id == connector_id
&& template.tool_title == tool_title
})?;
let elicitation_message = render_question_template(&template.template, connector_name)?;
let (tool_params, tool_params_display) = match tool_params {
Some(Value::Object(tool_params)) => {
render_tool_params(tool_params, &template.template_params)?
}
Some(_) => return None,
None => (None, Vec::new()),
};
Some(RenderedMcpToolApprovalTemplate {
question: elicitation_message.clone(),
elicitation_message,
tool_params,
tool_params_display,
})
}
fn render_question_template(template: &str, connector_name: Option<&str>) -> Option<String> {
let template = template.trim();
if template.is_empty() {
return None;
}
if template.contains(CONNECTOR_NAME_TEMPLATE_VAR) {
let connector_name = connector_name
.map(str::trim)
.filter(|name| !name.is_empty())?;
return Some(template.replace(CONNECTOR_NAME_TEMPLATE_VAR, connector_name));
}
Some(template.to_string())
}
fn render_tool_params(
tool_params: &Map<String, Value>,
template_params: &[ConsequentialToolTemplateParam],
) -> Option<(Option<Value>, Vec<RenderedMcpToolApprovalParam>)> {
let mut display_params = Vec::new();
let mut display_names = HashSet::new();
let mut handled_names = HashSet::new();
for template_param in template_params {
let label = template_param.label.trim();
if label.is_empty() {
return None;
}
let Some(value) = tool_params.get(&template_param.name) else {
continue;
};
if !display_names.insert(label.to_string()) {
return None;
}
display_params.push(RenderedMcpToolApprovalParam {
name: template_param.name.clone(),
value: value.clone(),
display_name: label.to_string(),
});
handled_names.insert(template_param.name.as_str());
}
let mut remaining_params = tool_params
.iter()
.filter(|(name, _)| !handled_names.contains(name.as_str()))
.collect::<Vec<_>>();
remaining_params.sort_by(|(left_name, _), (right_name, _)| left_name.cmp(right_name));
for (name, value) in remaining_params {
if handled_names.contains(name.as_str()) {
continue;
}
if !display_names.insert(name.clone()) {
return None;
}
display_params.push(RenderedMcpToolApprovalParam {
name: name.clone(),
value: value.clone(),
display_name: name.clone(),
});
}
Some((Some(Value::Object(tool_params.clone())), display_params))
}
#[cfg(test)]
mod tests {
use pretty_assertions::assert_eq;
use serde_json::json;
use super::*;
#[test]
fn renders_exact_match_with_readable_param_labels() {
let templates = vec![ConsequentialToolMessageTemplate {
connector_id: "calendar".to_string(),
server_name: "codex_apps".to_string(),
tool_title: "create_event".to_string(),
template: "Allow {connector_name} to create an event?".to_string(),
template_params: vec![
ConsequentialToolTemplateParam {
name: "calendar_id".to_string(),
label: "Calendar".to_string(),
},
ConsequentialToolTemplateParam {
name: "title".to_string(),
label: "Title".to_string(),
},
],
}];
let rendered = render_mcp_tool_approval_template_from_templates(
&templates,
"codex_apps",
Some("calendar"),
Some("Calendar"),
Some("create_event"),
Some(&json!({
"title": "Roadmap review",
"calendar_id": "primary",
"timezone": "UTC",
})),
);
assert_eq!(
rendered,
Some(RenderedMcpToolApprovalTemplate {
question: "Allow Calendar to create an event?".to_string(),
elicitation_message: "Allow Calendar to create an event?".to_string(),
tool_params: Some(json!({
"title": "Roadmap review",
"calendar_id": "primary",
"timezone": "UTC",
})),
tool_params_display: vec![
RenderedMcpToolApprovalParam {
name: "calendar_id".to_string(),
value: json!("primary"),
display_name: "Calendar".to_string(),
},
RenderedMcpToolApprovalParam {
name: "title".to_string(),
value: json!("Roadmap review"),
display_name: "Title".to_string(),
},
RenderedMcpToolApprovalParam {
name: "timezone".to_string(),
value: json!("UTC"),
display_name: "timezone".to_string(),
},
],
})
);
}
#[test]
fn returns_none_when_no_exact_match_exists() {
let templates = vec![ConsequentialToolMessageTemplate {
connector_id: "calendar".to_string(),
server_name: "codex_apps".to_string(),
tool_title: "create_event".to_string(),
template: "Allow {connector_name} to create an event?".to_string(),
template_params: Vec::new(),
}];
assert_eq!(
render_mcp_tool_approval_template_from_templates(
&templates,
"codex_apps",
Some("calendar"),
Some("Calendar"),
Some("delete_event"),
Some(&json!({})),
),
None
);
}
#[test]
fn returns_none_when_relabeling_would_collide() {
let templates = vec![ConsequentialToolMessageTemplate {
connector_id: "calendar".to_string(),
server_name: "codex_apps".to_string(),
tool_title: "create_event".to_string(),
template: "Allow {connector_name} to create an event?".to_string(),
template_params: vec![ConsequentialToolTemplateParam {
name: "calendar_id".to_string(),
label: "timezone".to_string(),
}],
}];
assert_eq!(
render_mcp_tool_approval_template_from_templates(
&templates,
"codex_apps",
Some("calendar"),
Some("Calendar"),
Some("create_event"),
Some(&json!({
"calendar_id": "primary",
"timezone": "UTC",
})),
),
None
);
}
#[test]
fn bundled_templates_load() {
assert_eq!(CONSEQUENTIAL_TOOL_MESSAGE_TEMPLATES.is_some(), true);
}
#[test]
fn renders_literal_template_without_connector_substitution() {
let templates = vec![ConsequentialToolMessageTemplate {
connector_id: "github".to_string(),
server_name: "codex_apps".to_string(),
tool_title: "add_comment".to_string(),
template: "Allow GitHub to add a comment to a pull request?".to_string(),
template_params: Vec::new(),
}];
let rendered = render_mcp_tool_approval_template_from_templates(
&templates,
"codex_apps",
Some("github"),
None,
Some("add_comment"),
Some(&json!({})),
);
assert_eq!(
rendered,
Some(RenderedMcpToolApprovalTemplate {
question: "Allow GitHub to add a comment to a pull request?".to_string(),
elicitation_message: "Allow GitHub to add a comment to a pull request?".to_string(),
tool_params: Some(json!({})),
tool_params_display: Vec::new(),
})
);
}
#[test]
fn returns_none_when_connector_placeholder_has_no_value() {
let templates = vec![ConsequentialToolMessageTemplate {
connector_id: "calendar".to_string(),
server_name: "codex_apps".to_string(),
tool_title: "create_event".to_string(),
template: "Allow {connector_name} to create an event?".to_string(),
template_params: Vec::new(),
}];
assert_eq!(
render_mcp_tool_approval_template_from_templates(
&templates,
"codex_apps",
Some("calendar"),
None,
Some("create_event"),
Some(&json!({})),
),
None
);
}
}

View File

@@ -0,0 +1,10 @@
pub const MCP_TOOL_APPROVAL_QUESTION_ID_PREFIX: &str = "mcp_tool_call_approval";
pub const MCP_TOOL_APPROVAL_ACCEPT: &str = "Allow";
pub const MCP_TOOL_APPROVAL_ACCEPT_FOR_SESSION: &str = "Allow for this session";
pub const MCP_TOOL_APPROVAL_DECLINE_SYNTHETIC: &str = "__codex_mcp_decline__";
pub fn is_mcp_tool_approval_question_id(question_id: &str) -> bool {
question_id
.strip_prefix(MCP_TOOL_APPROVAL_QUESTION_ID_PREFIX)
.is_some_and(|suffix| suffix.starts_with('_'))
}