Compare commits

..

5 Commits

Author SHA1 Message Date
Ahmed Ibrahim
4cc0d1c053 Add Bazel target for extensions crate
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 05:46:47 +00:00
Ahmed Ibrahim
3848e8e43e Fix extensions extraction regressions
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 05:36:22 +00:00
Ahmed Ibrahim
d174c6ad4c Restore plugin capability summary adapters
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 05:25:51 +00:00
Ahmed Ibrahim
bb9dcc5982 codex: fix CI failure on PR #15028
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 05:16:41 +00:00
Ahmed Ibrahim
342b8e40a1 Extract codex-extensions from codex-core
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 05:06:44 +00:00
44 changed files with 1680 additions and 1945 deletions

View File

@@ -86,7 +86,8 @@ jobs:
- uses: dtolnay/rust-toolchain@1.93.0
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
with:
tool: cargo-shear@1.5.1
tool: cargo-shear
version: 1.5.1
- name: cargo shear
run: cargo shear
@@ -290,7 +291,8 @@ jobs:
if: ${{ env.USE_SCCACHE == 'true' }}
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
with:
tool: sccache@0.7.5
tool: sccache
version: 0.7.5
- name: Configure sccache backend
if: ${{ env.USE_SCCACHE == 'true' }}
@@ -419,7 +421,8 @@ jobs:
if: ${{ matrix.profile == 'release' }}
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
with:
tool: cargo-chef@0.1.71
tool: cargo-chef
version: 0.1.71
- name: Pre-warm dependency cache (cargo-chef)
if: ${{ matrix.profile == 'release' }}
@@ -590,7 +593,8 @@ jobs:
if: ${{ env.USE_SCCACHE == 'true' }}
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
with:
tool: sccache@0.7.5
tool: sccache
version: 0.7.5
- name: Configure sccache backend
if: ${{ env.USE_SCCACHE == 'true' }}
@@ -624,7 +628,8 @@ jobs:
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
with:
tool: nextest@0.9.103
tool: nextest
version: 0.9.103
- name: Enable unprivileged user namespaces (Linux)
if: runner.os == 'Linux'

37
codex-rs/Cargo.lock generated
View File

@@ -1845,11 +1845,11 @@ dependencies = [
"codex-config",
"codex-connectors",
"codex-execpolicy",
"codex-extensions",
"codex-file-search",
"codex-git",
"codex-hooks",
"codex-keyring-store",
"codex-models",
"codex-network-proxy",
"codex-otel",
"codex-protocol",
@@ -2037,6 +2037,18 @@ dependencies = [
"syn 2.0.114",
]
[[package]]
name = "codex-extensions"
version = "0.0.0"
dependencies = [
"codex-protocol",
"codex-utils-absolute-path",
"serde",
"serde_json",
"thiserror 2.0.18",
"tracing",
]
[[package]]
name = "codex-feedback"
version = "0.0.0"
@@ -2195,29 +2207,6 @@ dependencies = [
"wiremock",
]
[[package]]
name = "codex-models"
version = "0.0.0"
dependencies = [
"async-trait",
"base64 0.22.1",
"chrono",
"codex-api",
"codex-protocol",
"http 1.4.0",
"maplit",
"pretty_assertions",
"schemars 0.8.22",
"serde",
"serde_json",
"tempfile",
"thiserror 2.0.18",
"tokio",
"toml 0.9.11+spec-1.1.0",
"tracing",
"wiremock",
]
[[package]]
name = "codex-network-proxy"
version = "0.0.0"

View File

@@ -27,12 +27,12 @@ members = [
"exec",
"execpolicy",
"execpolicy-legacy",
"extensions",
"keyring-store",
"file-search",
"linux-sandbox",
"lmstudio",
"login",
"models",
"mcp-server",
"network-proxy",
"ollama",
@@ -106,6 +106,7 @@ codex-config = { path = "config" }
codex-core = { path = "core" }
codex-exec = { path = "exec" }
codex-execpolicy = { path = "execpolicy" }
codex-extensions = { path = "extensions" }
codex-experimental-api-macros = { path = "codex-experimental-api-macros" }
codex-feedback = { path = "feedback" }
codex-file-search = { path = "file-search" }
@@ -115,7 +116,6 @@ codex-keyring-store = { path = "keyring-store" }
codex-linux-sandbox = { path = "linux-sandbox" }
codex-lmstudio = { path = "lmstudio" }
codex-login = { path = "login" }
codex-models = { path = "models" }
codex-mcp-server = { path = "mcp-server" }
codex-network-proxy = { path = "network-proxy" }
codex-ollama = { path = "ollama" }

View File

@@ -37,11 +37,11 @@ codex-config = { workspace = true }
codex-shell-command = { workspace = true }
codex-skills = { workspace = true }
codex-execpolicy = { workspace = true }
codex-extensions = { workspace = true }
codex-file-search = { workspace = true }
codex-git = { workspace = true }
codex-hooks = { workspace = true }
codex-keyring-store = { workspace = true }
codex-models = { workspace = true }
codex-network-proxy = { workspace = true }
codex-otel = { workspace = true }
codex-artifacts = { workspace = true }

View File

@@ -1,10 +1,9 @@
use codex_models::ModelProviderInfo;
use codex_models::WireApi;
use codex_otel::AuthEnvTelemetryMetadata;
use crate::auth::CODEX_API_KEY_ENV_VAR;
use crate::auth::OPENAI_API_KEY_ENV_VAR;
use crate::auth::REFRESH_TOKEN_URL_OVERRIDE_ENV_VAR;
use crate::model_provider_info::ModelProviderInfo;
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub(crate) struct AuthEnvTelemetry {
@@ -65,7 +64,7 @@ mod tests {
env_key: Some("sk-should-not-leak".to_string()),
env_key_instructions: None,
experimental_bearer_token: None,
wire_api: WireApi::Responses,
wire_api: crate::model_provider_info::WireApi::Responses,
query_params: None,
http_headers: None,
env_http_headers: None,

View File

@@ -541,10 +541,10 @@ impl ModelClient {
Some(manager) => manager.auth().await,
None => None,
};
let api_provider = self.state.provider.to_api_provider(matches!(
auth.as_ref().map(CodexAuth::auth_mode),
Some(crate::auth::AuthMode::Chatgpt)
));
let api_provider = self
.state
.provider
.to_api_provider(auth.as_ref().map(CodexAuth::auth_mode))?;
let api_auth = auth_provider_from_auth(auth.clone(), &self.state.provider)?;
Ok(CurrentClientSetup {
auth,

View File

@@ -191,15 +191,6 @@ impl From<CancelErr> for CodexErr {
}
}
impl From<codex_models::EnvVarError> for CodexErr {
fn from(error: codex_models::EnvVarError) -> Self {
Self::EnvVar(EnvVarError {
var: error.var,
instructions: error.instructions,
})
}
}
impl CodexErr {
pub fn is_retryable(&self) -> bool {
match self {

View File

@@ -1 +1,356 @@
pub use codex_models::model_provider_info::*;
//! Registry of model providers supported by Codex.
//!
//! Providers can be defined in two places:
//! 1. Built-in defaults compiled into the binary so Codex works out-of-the-box.
//! 2. User-defined entries inside `~/.codex/config.toml` under the `model_providers`
//! key. These override or extend the defaults at runtime.
use crate::auth::AuthMode;
use crate::error::EnvVarError;
use codex_api::Provider as ApiProvider;
use codex_api::provider::RetryConfig as ApiRetryConfig;
use http::HeaderMap;
use http::header::HeaderName;
use http::header::HeaderValue;
use schemars::JsonSchema;
use serde::Deserialize;
use serde::Serialize;
use std::collections::HashMap;
use std::fmt;
use std::time::Duration;
const DEFAULT_STREAM_IDLE_TIMEOUT_MS: u64 = 300_000;
const DEFAULT_STREAM_MAX_RETRIES: u64 = 5;
const DEFAULT_REQUEST_MAX_RETRIES: u64 = 4;
pub(crate) const DEFAULT_WEBSOCKET_CONNECT_TIMEOUT_MS: u64 = 15_000;
/// Hard cap for user-configured `stream_max_retries`.
const MAX_STREAM_MAX_RETRIES: u64 = 100;
/// Hard cap for user-configured `request_max_retries`.
const MAX_REQUEST_MAX_RETRIES: u64 = 100;
const OPENAI_PROVIDER_NAME: &str = "OpenAI";
pub const OPENAI_PROVIDER_ID: &str = "openai";
const CHAT_WIRE_API_REMOVED_ERROR: &str = "`wire_api = \"chat\"` is no longer supported.\nHow to fix: set `wire_api = \"responses\"` in your provider config.\nMore info: https://github.com/openai/codex/discussions/7782";
pub(crate) const LEGACY_OLLAMA_CHAT_PROVIDER_ID: &str = "ollama-chat";
pub(crate) const OLLAMA_CHAT_PROVIDER_REMOVED_ERROR: &str = "`ollama-chat` is no longer supported.\nHow to fix: replace `ollama-chat` with `ollama` in `model_provider`, `oss_provider`, or `--local-provider`.\nMore info: https://github.com/openai/codex/discussions/7782";
/// Wire protocol that the provider speaks.
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Serialize, JsonSchema)]
#[serde(rename_all = "lowercase")]
pub enum WireApi {
/// The Responses API exposed by OpenAI at `/v1/responses`.
#[default]
Responses,
}
impl fmt::Display for WireApi {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let value = match self {
Self::Responses => "responses",
};
f.write_str(value)
}
}
impl<'de> Deserialize<'de> for WireApi {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let value = String::deserialize(deserializer)?;
match value.as_str() {
"responses" => Ok(Self::Responses),
"chat" => Err(serde::de::Error::custom(CHAT_WIRE_API_REMOVED_ERROR)),
_ => Err(serde::de::Error::unknown_variant(&value, &["responses"])),
}
}
}
/// Serializable representation of a provider definition.
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, JsonSchema)]
#[schemars(deny_unknown_fields)]
pub struct ModelProviderInfo {
/// Friendly display name.
pub name: String,
/// Base URL for the provider's OpenAI-compatible API.
pub base_url: Option<String>,
/// Environment variable that stores the user's API key for this provider.
pub env_key: Option<String>,
/// Optional instructions to help the user get a valid value for the
/// variable and set it.
pub env_key_instructions: Option<String>,
/// Value to use with `Authorization: Bearer <token>` header. Use of this
/// config is discouraged in favor of `env_key` for security reasons, but
/// this may be necessary when using this programmatically.
pub experimental_bearer_token: Option<String>,
/// Which wire protocol this provider expects.
#[serde(default)]
pub wire_api: WireApi,
/// Optional query parameters to append to the base URL.
pub query_params: Option<HashMap<String, String>>,
/// Additional HTTP headers to include in requests to this provider where
/// the (key, value) pairs are the header name and value.
pub http_headers: Option<HashMap<String, String>>,
/// Optional HTTP headers to include in requests to this provider where the
/// (key, value) pairs are the header name and _environment variable_ whose
/// value should be used. If the environment variable is not set, or the
/// value is empty, the header will not be included in the request.
pub env_http_headers: Option<HashMap<String, String>>,
/// Maximum number of times to retry a failed HTTP request to this provider.
pub request_max_retries: Option<u64>,
/// Number of times to retry reconnecting a dropped streaming response before failing.
pub stream_max_retries: Option<u64>,
/// Idle timeout (in milliseconds) to wait for activity on a streaming response before treating
/// the connection as lost.
pub stream_idle_timeout_ms: Option<u64>,
/// Maximum time (in milliseconds) to wait for a websocket connection attempt before treating
/// it as failed.
pub websocket_connect_timeout_ms: Option<u64>,
/// Does this provider require an OpenAI API Key or ChatGPT login token? If true,
/// user is presented with login screen on first run, and login preference and token/key
/// are stored in auth.json. If false (which is the default), login screen is skipped,
/// and API key (if needed) comes from the "env_key" environment variable.
#[serde(default)]
pub requires_openai_auth: bool,
/// Whether this provider supports the Responses API WebSocket transport.
#[serde(default)]
pub supports_websockets: bool,
}
impl ModelProviderInfo {
fn build_header_map(&self) -> crate::error::Result<HeaderMap> {
let capacity = self.http_headers.as_ref().map_or(0, HashMap::len)
+ self.env_http_headers.as_ref().map_or(0, HashMap::len);
let mut headers = HeaderMap::with_capacity(capacity);
if let Some(extra) = &self.http_headers {
for (k, v) in extra {
if let (Ok(name), Ok(value)) = (HeaderName::try_from(k), HeaderValue::try_from(v)) {
headers.insert(name, value);
}
}
}
if let Some(env_headers) = &self.env_http_headers {
for (header, env_var) in env_headers {
if let Ok(val) = std::env::var(env_var)
&& !val.trim().is_empty()
&& let (Ok(name), Ok(value)) =
(HeaderName::try_from(header), HeaderValue::try_from(val))
{
headers.insert(name, value);
}
}
}
Ok(headers)
}
pub(crate) fn to_api_provider(
&self,
auth_mode: Option<AuthMode>,
) -> crate::error::Result<ApiProvider> {
let default_base_url = if matches!(auth_mode, Some(AuthMode::Chatgpt)) {
"https://chatgpt.com/backend-api/codex"
} else {
"https://api.openai.com/v1"
};
let base_url = self
.base_url
.clone()
.unwrap_or_else(|| default_base_url.to_string());
let headers = self.build_header_map()?;
let retry = ApiRetryConfig {
max_attempts: self.request_max_retries(),
base_delay: Duration::from_millis(200),
retry_429: false,
retry_5xx: true,
retry_transport: true,
};
Ok(ApiProvider {
name: self.name.clone(),
base_url,
query_params: self.query_params.clone(),
headers,
retry,
stream_idle_timeout: self.stream_idle_timeout(),
})
}
/// If `env_key` is Some, returns the API key for this provider if present
/// (and non-empty) in the environment. If `env_key` is required but
/// cannot be found, returns an error.
pub fn api_key(&self) -> crate::error::Result<Option<String>> {
match &self.env_key {
Some(env_key) => {
let api_key = std::env::var(env_key)
.ok()
.filter(|v| !v.trim().is_empty())
.ok_or_else(|| {
crate::error::CodexErr::EnvVar(EnvVarError {
var: env_key.clone(),
instructions: self.env_key_instructions.clone(),
})
})?;
Ok(Some(api_key))
}
None => Ok(None),
}
}
/// Effective maximum number of request retries for this provider.
pub fn request_max_retries(&self) -> u64 {
self.request_max_retries
.unwrap_or(DEFAULT_REQUEST_MAX_RETRIES)
.min(MAX_REQUEST_MAX_RETRIES)
}
/// Effective maximum number of stream reconnection attempts for this provider.
pub fn stream_max_retries(&self) -> u64 {
self.stream_max_retries
.unwrap_or(DEFAULT_STREAM_MAX_RETRIES)
.min(MAX_STREAM_MAX_RETRIES)
}
/// Effective idle timeout for streaming responses.
pub fn stream_idle_timeout(&self) -> Duration {
self.stream_idle_timeout_ms
.map(Duration::from_millis)
.unwrap_or(Duration::from_millis(DEFAULT_STREAM_IDLE_TIMEOUT_MS))
}
/// Effective timeout for websocket connect attempts.
pub fn websocket_connect_timeout(&self) -> Duration {
self.websocket_connect_timeout_ms
.map(Duration::from_millis)
.unwrap_or(Duration::from_millis(DEFAULT_WEBSOCKET_CONNECT_TIMEOUT_MS))
}
pub fn create_openai_provider(base_url: Option<String>) -> ModelProviderInfo {
ModelProviderInfo {
name: OPENAI_PROVIDER_NAME.into(),
base_url,
env_key: None,
env_key_instructions: None,
experimental_bearer_token: None,
wire_api: WireApi::Responses,
query_params: None,
http_headers: Some(
[("version".to_string(), env!("CARGO_PKG_VERSION").to_string())]
.into_iter()
.collect(),
),
env_http_headers: Some(
[
(
"OpenAI-Organization".to_string(),
"OPENAI_ORGANIZATION".to_string(),
),
("OpenAI-Project".to_string(), "OPENAI_PROJECT".to_string()),
]
.into_iter()
.collect(),
),
// Use global defaults for retry/timeout unless overridden in config.toml.
request_max_retries: None,
stream_max_retries: None,
stream_idle_timeout_ms: None,
websocket_connect_timeout_ms: None,
requires_openai_auth: true,
supports_websockets: true,
}
}
pub fn is_openai(&self) -> bool {
self.name == OPENAI_PROVIDER_NAME
}
}
pub const DEFAULT_LMSTUDIO_PORT: u16 = 1234;
pub const DEFAULT_OLLAMA_PORT: u16 = 11434;
pub const LMSTUDIO_OSS_PROVIDER_ID: &str = "lmstudio";
pub const OLLAMA_OSS_PROVIDER_ID: &str = "ollama";
/// Built-in default provider list.
pub fn built_in_model_providers(
openai_base_url: Option<String>,
) -> HashMap<String, ModelProviderInfo> {
use ModelProviderInfo as P;
let openai_provider = P::create_openai_provider(openai_base_url);
// We do not want to be in the business of adjucating which third-party
// providers are bundled with Codex CLI, so we only include the OpenAI and
// open source ("oss") providers by default. Users are encouraged to add to
// `model_providers` in config.toml to add their own providers.
[
(OPENAI_PROVIDER_ID, openai_provider),
(
OLLAMA_OSS_PROVIDER_ID,
create_oss_provider(DEFAULT_OLLAMA_PORT, WireApi::Responses),
),
(
LMSTUDIO_OSS_PROVIDER_ID,
create_oss_provider(DEFAULT_LMSTUDIO_PORT, WireApi::Responses),
),
]
.into_iter()
.map(|(k, v)| (k.to_string(), v))
.collect()
}
pub fn create_oss_provider(default_provider_port: u16, wire_api: WireApi) -> ModelProviderInfo {
// These CODEX_OSS_ environment variables are experimental: we may
// switch to reading values from config.toml instead.
let default_codex_oss_base_url = format!(
"http://localhost:{codex_oss_port}/v1",
codex_oss_port = std::env::var("CODEX_OSS_PORT")
.ok()
.filter(|value| !value.trim().is_empty())
.and_then(|value| value.parse::<u16>().ok())
.unwrap_or(default_provider_port)
);
let codex_oss_base_url = std::env::var("CODEX_OSS_BASE_URL")
.ok()
.filter(|v| !v.trim().is_empty())
.unwrap_or(default_codex_oss_base_url);
create_oss_provider_with_base_url(&codex_oss_base_url, wire_api)
}
pub fn create_oss_provider_with_base_url(base_url: &str, wire_api: WireApi) -> ModelProviderInfo {
ModelProviderInfo {
name: "gpt-oss".into(),
base_url: Some(base_url.into()),
env_key: None,
env_key_instructions: None,
experimental_bearer_token: None,
wire_api,
query_params: None,
http_headers: None,
env_http_headers: None,
request_max_retries: None,
stream_max_retries: None,
stream_idle_timeout_ms: None,
websocket_connect_timeout_ms: None,
requires_openai_auth: false,
supports_websockets: false,
}
}
#[cfg(test)]
#[path = "model_provider_info_tests.rs"]
mod tests;

View File

@@ -11,24 +11,27 @@ use tokio::fs;
use tracing::error;
use tracing::info;
/// Manages loading and saving of models cache to disk.
#[derive(Debug)]
pub struct ModelsCacheManager {
pub(crate) struct ModelsCacheManager {
cache_path: PathBuf,
cache_ttl: Duration,
}
impl ModelsCacheManager {
pub fn new(cache_path: PathBuf, cache_ttl: Duration) -> Self {
/// Create a new cache manager with the given path and TTL.
pub(crate) fn new(cache_path: PathBuf, cache_ttl: Duration) -> Self {
Self {
cache_path,
cache_ttl,
}
}
pub async fn load_fresh(&self, expected_version: &str) -> Option<ModelsCache> {
/// Attempt to load a fresh cache entry. Returns `None` if the cache doesn't exist or is stale.
pub(crate) async fn load_fresh(&self, expected_version: &str) -> Option<ModelsCache> {
info!(
cache_path = %self.cache_path.display(),
expected_version,
cache_path = %self.cache_path.display(),
expected_version,
"models cache: attempting load_fresh"
);
let cache = match self.load().await {
@@ -70,7 +73,8 @@ impl ModelsCacheManager {
Some(cache)
}
pub async fn persist_cache(
/// Persist the cache to disk, creating parent directories as needed.
pub(crate) async fn persist_cache(
&self,
models: &[ModelInfo],
etag: Option<String>,
@@ -87,7 +91,8 @@ impl ModelsCacheManager {
}
}
pub async fn renew_cache_ttl(&self) -> io::Result<()> {
/// Renew the cache TTL by updating the fetched_at timestamp to now.
pub(crate) async fn renew_cache_ttl(&self) -> io::Result<()> {
let mut cache = match self.load().await? {
Some(cache) => cache,
None => return Err(io::Error::new(ErrorKind::NotFound, "cache not found")),
@@ -118,12 +123,14 @@ impl ModelsCacheManager {
}
#[cfg(test)]
pub fn set_ttl(&mut self, ttl: Duration) {
/// Set the cache TTL.
pub(crate) fn set_ttl(&mut self, ttl: Duration) {
self.cache_ttl = ttl;
}
#[cfg(test)]
pub async fn manipulate_cache_for_test<F>(&self, f: F) -> io::Result<()>
/// Manipulate cache file for testing. Allows setting a custom fetched_at timestamp.
pub(crate) async fn manipulate_cache_for_test<F>(&self, f: F) -> io::Result<()>
where
F: FnOnce(&mut DateTime<Utc>),
{
@@ -136,7 +143,8 @@ impl ModelsCacheManager {
}
#[cfg(test)]
pub async fn mutate_cache_for_test<F>(&self, f: F) -> io::Result<()>
/// Mutate the full cache contents for testing.
pub(crate) async fn mutate_cache_for_test<F>(&self, f: F) -> io::Result<()>
where
F: FnOnce(&mut ModelsCache),
{
@@ -149,17 +157,19 @@ impl ModelsCacheManager {
}
}
/// Serialized snapshot of models and metadata cached on disk.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ModelsCache {
pub fetched_at: DateTime<Utc>,
pub(crate) struct ModelsCache {
pub(crate) fetched_at: DateTime<Utc>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub etag: Option<String>,
pub(crate) etag: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub client_version: Option<String>,
pub models: Vec<ModelInfo>,
pub(crate) client_version: Option<String>,
pub(crate) models: Vec<ModelInfo>,
}
impl ModelsCache {
/// Returns `true` when the cache entry has not exceeded the configured TTL.
fn is_fresh(&self, ttl: Duration) -> bool {
if ttl.is_zero() {
return false;

View File

@@ -10,12 +10,18 @@ const KNOWN_MODE_NAMES_PLACEHOLDER: &str = "{{KNOWN_MODE_NAMES}}";
const REQUEST_USER_INPUT_AVAILABILITY_PLACEHOLDER: &str = "{{REQUEST_USER_INPUT_AVAILABILITY}}";
const ASKING_QUESTIONS_GUIDANCE_PLACEHOLDER: &str = "{{ASKING_QUESTIONS_GUIDANCE}}";
/// Stores feature flags that control collaboration-mode behavior.
///
/// Keep mode-related flags here so new collaboration-mode capabilities can be
/// added without large cross-cutting diffs to constructor and call-site
/// signatures.
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
pub struct CollaborationModesConfig {
/// Enables `request_user_input` availability in Default mode.
pub default_mode_request_user_input: bool,
}
pub fn builtin_collaboration_mode_presets(
pub(crate) fn builtin_collaboration_mode_presets(
collaboration_modes_config: CollaborationModesConfig,
) -> Vec<CollaborationModeMask> {
vec![plan_preset(), default_preset(collaboration_modes_config)]

View File

@@ -1,3 +1,4 @@
use super::cache::ModelsCacheManager;
use crate::api_bridge::auth_provider_from_auth;
use crate::api_bridge::map_api_error;
use crate::auth::AuthManager;
@@ -9,19 +10,18 @@ use crate::config::Config;
use crate::default_client::build_reqwest_client;
use crate::error::CodexErr;
use crate::error::Result as CoreResult;
use crate::model_provider_info::ModelProviderInfo;
use crate::models_manager::collaboration_mode_presets::CollaborationModesConfig;
use crate::models_manager::collaboration_mode_presets::builtin_collaboration_mode_presets;
use crate::models_manager::model_info;
use crate::response_debug_context::extract_response_debug_context;
use crate::response_debug_context::telemetry_transport_error_message;
use crate::util::FeedbackRequestTags;
use crate::util::emit_feedback_request_tags_with_auth_env;
use codex_api::ModelsClient;
use codex_api::RequestTelemetry;
use codex_api::ReqwestTransport;
use codex_api::TransportError;
use codex_models::ModelProviderInfo;
use codex_models::models_manager::cache::ModelsCacheManager;
use codex_models::models_manager::collaboration_mode_presets::CollaborationModesConfig;
use codex_models::models_manager::collaboration_mode_presets::builtin_collaboration_mode_presets;
use codex_models::response_debug_context::extract_response_debug_context;
use codex_models::response_debug_context::telemetry_transport_error_message;
use codex_otel::TelemetryAuthMode;
use codex_protocol::config_types::CollaborationModeMask;
use codex_protocol::openai_models::ModelInfo;
@@ -433,9 +433,7 @@ impl ModelsManager {
codex_otel::start_global_timer("codex.remote_models.fetch_update.duration_ms", &[]);
let auth = self.auth_manager.auth().await;
let auth_mode = auth.as_ref().map(CodexAuth::auth_mode);
let api_provider = self
.provider
.to_api_provider(matches!(auth_mode, Some(AuthMode::Chatgpt)));
let api_provider = self.provider.to_api_provider(auth_mode)?;
let api_auth = auth_provider_from_auth(auth.clone(), &self.provider)?;
let auth_env = collect_auth_env_telemetry(
&self.provider,

View File

@@ -1,16 +1,15 @@
pub mod cache;
pub mod collaboration_mode_presets;
pub mod manager;
pub mod model_info;
pub mod cache {
pub use codex_models::models_manager::cache::*;
}
pub mod collaboration_mode_presets {
pub use codex_models::models_manager::collaboration_mode_presets::*;
}
pub mod model_presets {
pub use codex_models::models_manager::model_presets::*;
}
pub mod model_presets;
/// Convert the client version string to a whole version string (e.g. "1.2.3-alpha.4" -> "1.2.3").
pub fn client_version_to_whole() -> String {
codex_models::models_manager::client_version_to_whole()
format!(
"{}.{}.{}",
env!("CARGO_PKG_VERSION_MAJOR"),
env!("CARGO_PKG_VERSION_MINOR"),
env!("CARGO_PKG_VERSION_PATCH")
)
}

View File

@@ -46,6 +46,9 @@ use crate::skills::loader::SkillRoot;
use crate::skills::loader::load_skills_from_roots;
use codex_app_server_protocol::ConfigValueWriteParams;
use codex_app_server_protocol::MergeStrategy;
use codex_extensions::plugins::AppConnectorId;
use codex_extensions::plugins::PluginCapabilitySummary;
use codex_extensions::plugins::PluginTelemetryMetadata;
use codex_protocol::protocol::SkillScope;
use codex_utils_absolute_path::AbsolutePathBuf;
use serde::Deserialize;
@@ -70,10 +73,7 @@ const DEFAULT_MCP_CONFIG_FILE: &str = ".mcp.json";
const DEFAULT_APP_CONFIG_FILE: &str = ".app.json";
pub const OPENAI_CURATED_MARKETPLACE_NAME: &str = "openai-curated";
static CURATED_REPO_SYNC_STARTED: AtomicBool = AtomicBool::new(false);
const MAX_CAPABILITY_SUMMARY_DESCRIPTION_LEN: usize = 1024;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct AppConnectorId(pub String);
const MAX_CAPABILITY_SUMMARY_DESCRIPTION_LEN: usize = 140;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct PluginInstallRequest {
@@ -157,98 +157,6 @@ impl LoadedPlugin {
}
}
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub struct PluginCapabilitySummary {
pub config_name: String,
pub display_name: String,
pub description: Option<String>,
pub has_skills: bool,
pub mcp_server_names: Vec<String>,
pub app_connector_ids: Vec<AppConnectorId>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct PluginTelemetryMetadata {
pub plugin_id: PluginId,
pub capability_summary: Option<PluginCapabilitySummary>,
}
impl PluginTelemetryMetadata {
pub fn from_plugin_id(plugin_id: &PluginId) -> Self {
Self {
plugin_id: plugin_id.clone(),
capability_summary: None,
}
}
}
impl PluginCapabilitySummary {
fn from_plugin(plugin: &LoadedPlugin) -> Option<Self> {
if !plugin.is_active() {
return None;
}
let mut mcp_server_names: Vec<String> = plugin.mcp_servers.keys().cloned().collect();
mcp_server_names.sort_unstable();
let summary = Self {
config_name: plugin.config_name.clone(),
display_name: plugin
.manifest_name
.clone()
.unwrap_or_else(|| plugin.config_name.clone()),
description: prompt_safe_plugin_description(plugin.manifest_description.as_deref()),
has_skills: !plugin.skill_roots.is_empty(),
mcp_server_names,
app_connector_ids: plugin.apps.clone(),
};
(summary.has_skills
|| !summary.mcp_server_names.is_empty()
|| !summary.app_connector_ids.is_empty())
.then_some(summary)
}
pub fn telemetry_metadata(&self) -> Option<PluginTelemetryMetadata> {
PluginId::parse(&self.config_name)
.ok()
.map(|plugin_id| PluginTelemetryMetadata {
plugin_id,
capability_summary: Some(self.clone()),
})
}
}
impl From<PluginDetailSummary> for PluginCapabilitySummary {
fn from(value: PluginDetailSummary) -> Self {
Self {
config_name: value.id,
display_name: value.name,
description: prompt_safe_plugin_description(value.description.as_deref()),
has_skills: !value.skills.is_empty(),
mcp_server_names: value.mcp_server_names,
app_connector_ids: value.apps,
}
}
}
fn prompt_safe_plugin_description(description: Option<&str>) -> Option<String> {
let description = description?
.split_whitespace()
.collect::<Vec<_>>()
.join(" ");
if description.is_empty() {
return None;
}
Some(
description
.chars()
.take(MAX_CAPABILITY_SUMMARY_DESCRIPTION_LEN)
.collect(),
)
}
#[derive(Debug, Clone, PartialEq)]
pub struct PluginLoadOutcome {
plugins: Vec<LoadedPlugin>,
@@ -265,7 +173,7 @@ impl PluginLoadOutcome {
fn from_plugins(plugins: Vec<LoadedPlugin>) -> Self {
let capability_summaries = plugins
.iter()
.filter_map(PluginCapabilitySummary::from_plugin)
.filter_map(plugin_capability_summary_from_loaded_plugin)
.collect::<Vec<_>>();
Self {
plugins,
@@ -321,6 +229,64 @@ impl PluginLoadOutcome {
}
}
fn plugin_capability_summary_from_loaded_plugin(
plugin: &LoadedPlugin,
) -> Option<PluginCapabilitySummary> {
if !plugin.is_active() {
return None;
}
let mut mcp_server_names: Vec<String> = plugin.mcp_servers.keys().cloned().collect();
mcp_server_names.sort_unstable();
let summary = PluginCapabilitySummary {
config_name: plugin.config_name.clone(),
display_name: plugin
.manifest_name
.clone()
.unwrap_or_else(|| plugin.config_name.clone()),
description: prompt_safe_plugin_description(plugin.manifest_description.as_deref()),
has_skills: !plugin.skill_roots.is_empty(),
mcp_server_names,
app_connector_ids: plugin.apps.clone(),
};
(summary.has_skills
|| !summary.mcp_server_names.is_empty()
|| !summary.app_connector_ids.is_empty())
.then_some(summary)
}
impl From<PluginDetailSummary> for PluginCapabilitySummary {
fn from(value: PluginDetailSummary) -> Self {
Self {
config_name: value.id,
display_name: value.name,
description: prompt_safe_plugin_description(value.description.as_deref()),
has_skills: !value.skills.is_empty(),
mcp_server_names: value.mcp_server_names,
app_connector_ids: value.apps,
}
}
}
fn prompt_safe_plugin_description(description: Option<&str>) -> Option<String> {
let description = description?
.split_whitespace()
.collect::<Vec<_>>()
.join(" ");
if description.is_empty() {
return None;
}
Some(
description
.chars()
.take(MAX_CAPABILITY_SUMMARY_DESCRIPTION_LEN)
.collect(),
)
}
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub struct RemotePluginSyncResult {
/// Plugin ids newly installed into the local plugin cache.

View File

@@ -1,454 +1,6 @@
use codex_utils_absolute_path::AbsolutePathBuf;
use serde::Deserialize;
use serde_json::Value as JsonValue;
use std::fs;
use std::path::Component;
use std::path::Path;
pub(crate) const PLUGIN_MANIFEST_PATH: &str = ".codex-plugin/plugin.json";
const MAX_DEFAULT_PROMPT_COUNT: usize = 3;
const MAX_DEFAULT_PROMPT_LEN: usize = 128;
#[derive(Debug, Default, Deserialize)]
#[serde(rename_all = "camelCase")]
pub(crate) struct PluginManifest {
#[serde(default)]
pub(crate) name: String,
#[serde(default)]
pub(crate) description: Option<String>,
// Keep manifest paths as raw strings so we can validate the required `./...` syntax before
// resolving them under the plugin root.
#[serde(default)]
skills: Option<String>,
#[serde(default)]
mcp_servers: Option<String>,
#[serde(default)]
apps: Option<String>,
#[serde(default)]
interface: Option<PluginManifestInterface>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct PluginManifestPaths {
pub skills: Option<AbsolutePathBuf>,
pub mcp_servers: Option<AbsolutePathBuf>,
pub apps: Option<AbsolutePathBuf>,
}
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub struct PluginManifestInterfaceSummary {
pub display_name: Option<String>,
pub short_description: Option<String>,
pub long_description: Option<String>,
pub developer_name: Option<String>,
pub category: Option<String>,
pub capabilities: Vec<String>,
pub website_url: Option<String>,
pub privacy_policy_url: Option<String>,
pub terms_of_service_url: Option<String>,
pub default_prompt: Option<Vec<String>>,
pub brand_color: Option<String>,
pub composer_icon: Option<AbsolutePathBuf>,
pub logo: Option<AbsolutePathBuf>,
pub screenshots: Vec<AbsolutePathBuf>,
}
#[derive(Debug, Default, Deserialize)]
#[serde(rename_all = "camelCase")]
struct PluginManifestInterface {
#[serde(default)]
display_name: Option<String>,
#[serde(default)]
short_description: Option<String>,
#[serde(default)]
long_description: Option<String>,
#[serde(default)]
developer_name: Option<String>,
#[serde(default)]
category: Option<String>,
#[serde(default)]
capabilities: Vec<String>,
#[serde(default)]
#[serde(alias = "websiteURL")]
website_url: Option<String>,
#[serde(default)]
#[serde(alias = "privacyPolicyURL")]
privacy_policy_url: Option<String>,
#[serde(default)]
#[serde(alias = "termsOfServiceURL")]
terms_of_service_url: Option<String>,
#[serde(default)]
default_prompt: Option<PluginManifestDefaultPrompt>,
#[serde(default)]
brand_color: Option<String>,
#[serde(default)]
composer_icon: Option<String>,
#[serde(default)]
logo: Option<String>,
#[serde(default)]
screenshots: Vec<String>,
}
#[derive(Debug, Deserialize)]
#[serde(untagged)]
enum PluginManifestDefaultPrompt {
String(String),
List(Vec<PluginManifestDefaultPromptEntry>),
Invalid(JsonValue),
}
#[derive(Debug, Deserialize)]
#[serde(untagged)]
enum PluginManifestDefaultPromptEntry {
String(String),
Invalid(JsonValue),
}
pub(crate) fn load_plugin_manifest(plugin_root: &Path) -> Option<PluginManifest> {
let manifest_path = plugin_root.join(PLUGIN_MANIFEST_PATH);
if !manifest_path.is_file() {
return None;
}
let contents = fs::read_to_string(&manifest_path).ok()?;
match serde_json::from_str(&contents) {
Ok(manifest) => Some(manifest),
Err(err) => {
tracing::warn!(
path = %manifest_path.display(),
"failed to parse plugin manifest: {err}"
);
None
}
}
}
pub(crate) fn plugin_manifest_name(manifest: &PluginManifest, plugin_root: &Path) -> String {
plugin_root
.file_name()
.and_then(|name| name.to_str())
.filter(|_| manifest.name.trim().is_empty())
.unwrap_or(&manifest.name)
.to_string()
}
pub(crate) fn plugin_manifest_interface(
manifest: &PluginManifest,
plugin_root: &Path,
) -> Option<PluginManifestInterfaceSummary> {
let interface = manifest.interface.as_ref()?;
let interface = PluginManifestInterfaceSummary {
display_name: interface.display_name.clone(),
short_description: interface.short_description.clone(),
long_description: interface.long_description.clone(),
developer_name: interface.developer_name.clone(),
category: interface.category.clone(),
capabilities: interface.capabilities.clone(),
website_url: interface.website_url.clone(),
privacy_policy_url: interface.privacy_policy_url.clone(),
terms_of_service_url: interface.terms_of_service_url.clone(),
default_prompt: resolve_default_prompts(plugin_root, interface.default_prompt.as_ref()),
brand_color: interface.brand_color.clone(),
composer_icon: resolve_interface_asset_path(
plugin_root,
"interface.composerIcon",
interface.composer_icon.as_deref(),
),
logo: resolve_interface_asset_path(
plugin_root,
"interface.logo",
interface.logo.as_deref(),
),
screenshots: interface
.screenshots
.iter()
.filter_map(|screenshot| {
resolve_interface_asset_path(plugin_root, "interface.screenshots", Some(screenshot))
})
.collect(),
};
let has_fields = interface.display_name.is_some()
|| interface.short_description.is_some()
|| interface.long_description.is_some()
|| interface.developer_name.is_some()
|| interface.category.is_some()
|| !interface.capabilities.is_empty()
|| interface.website_url.is_some()
|| interface.privacy_policy_url.is_some()
|| interface.terms_of_service_url.is_some()
|| interface.default_prompt.is_some()
|| interface.brand_color.is_some()
|| interface.composer_icon.is_some()
|| interface.logo.is_some()
|| !interface.screenshots.is_empty();
has_fields.then_some(interface)
}
pub(crate) fn plugin_manifest_paths(
manifest: &PluginManifest,
plugin_root: &Path,
) -> PluginManifestPaths {
PluginManifestPaths {
skills: resolve_manifest_path(plugin_root, "skills", manifest.skills.as_deref()),
mcp_servers: resolve_manifest_path(
plugin_root,
"mcpServers",
manifest.mcp_servers.as_deref(),
),
apps: resolve_manifest_path(plugin_root, "apps", manifest.apps.as_deref()),
}
}
fn resolve_interface_asset_path(
plugin_root: &Path,
field: &'static str,
path: Option<&str>,
) -> Option<AbsolutePathBuf> {
resolve_manifest_path(plugin_root, field, path)
}
fn resolve_default_prompts(
plugin_root: &Path,
value: Option<&PluginManifestDefaultPrompt>,
) -> Option<Vec<String>> {
match value? {
PluginManifestDefaultPrompt::String(prompt) => {
resolve_default_prompt_str(plugin_root, "interface.defaultPrompt", prompt)
.map(|prompt| vec![prompt])
}
PluginManifestDefaultPrompt::List(values) => {
let mut prompts = Vec::new();
for (index, item) in values.iter().enumerate() {
if prompts.len() >= MAX_DEFAULT_PROMPT_COUNT {
warn_invalid_default_prompt(
plugin_root,
"interface.defaultPrompt",
&format!("maximum of {MAX_DEFAULT_PROMPT_COUNT} prompts is supported"),
);
break;
}
match item {
PluginManifestDefaultPromptEntry::String(prompt) => {
let field = format!("interface.defaultPrompt[{index}]");
if let Some(prompt) =
resolve_default_prompt_str(plugin_root, &field, prompt)
{
prompts.push(prompt);
}
}
PluginManifestDefaultPromptEntry::Invalid(value) => {
let field = format!("interface.defaultPrompt[{index}]");
warn_invalid_default_prompt(
plugin_root,
&field,
&format!("expected a string, found {}", json_value_type(value)),
);
}
}
}
(!prompts.is_empty()).then_some(prompts)
}
PluginManifestDefaultPrompt::Invalid(value) => {
warn_invalid_default_prompt(
plugin_root,
"interface.defaultPrompt",
&format!(
"expected a string or array of strings, found {}",
json_value_type(value)
),
);
None
}
}
}
fn resolve_default_prompt_str(plugin_root: &Path, field: &str, prompt: &str) -> Option<String> {
let prompt = prompt.split_whitespace().collect::<Vec<_>>().join(" ");
if prompt.is_empty() {
warn_invalid_default_prompt(plugin_root, field, "prompt must not be empty");
return None;
}
if prompt.chars().count() > MAX_DEFAULT_PROMPT_LEN {
warn_invalid_default_prompt(
plugin_root,
field,
&format!("prompt must be at most {MAX_DEFAULT_PROMPT_LEN} characters"),
);
return None;
}
Some(prompt)
}
fn warn_invalid_default_prompt(plugin_root: &Path, field: &str, message: &str) {
let manifest_path = plugin_root.join(PLUGIN_MANIFEST_PATH);
tracing::warn!(
path = %manifest_path.display(),
"ignoring {field}: {message}"
);
}
fn json_value_type(value: &JsonValue) -> &'static str {
match value {
JsonValue::Null => "null",
JsonValue::Bool(_) => "boolean",
JsonValue::Number(_) => "number",
JsonValue::String(_) => "string",
JsonValue::Array(_) => "array",
JsonValue::Object(_) => "object",
}
}
fn resolve_manifest_path(
plugin_root: &Path,
field: &'static str,
path: Option<&str>,
) -> Option<AbsolutePathBuf> {
// `plugin.json` paths are required to be relative to the plugin root and we return the
// normalized absolute path to the rest of the system.
let path = path?;
if path.is_empty() {
return None;
}
let Some(relative_path) = path.strip_prefix("./") else {
tracing::warn!("ignoring {field}: path must start with `./` relative to plugin root");
return None;
};
if relative_path.is_empty() {
tracing::warn!("ignoring {field}: path must not be `./`");
return None;
}
let mut normalized = std::path::PathBuf::new();
for component in Path::new(relative_path).components() {
match component {
Component::Normal(component) => normalized.push(component),
Component::ParentDir => {
tracing::warn!("ignoring {field}: path must not contain '..'");
return None;
}
_ => {
tracing::warn!("ignoring {field}: path must stay within the plugin root");
return None;
}
}
}
AbsolutePathBuf::try_from(plugin_root.join(normalized))
.map_err(|err| {
tracing::warn!("ignoring {field}: path must resolve to an absolute path: {err}");
err
})
.ok()
}
#[cfg(test)]
mod tests {
use super::MAX_DEFAULT_PROMPT_LEN;
use super::PluginManifest;
use super::plugin_manifest_interface;
use pretty_assertions::assert_eq;
use std::fs;
use std::path::Path;
use tempfile::tempdir;
fn write_manifest(plugin_root: &Path, interface: &str) {
fs::create_dir_all(plugin_root.join(".codex-plugin")).expect("create manifest dir");
fs::write(
plugin_root.join(".codex-plugin/plugin.json"),
format!(
r#"{{
"name": "demo-plugin",
"interface": {interface}
}}"#
),
)
.expect("write manifest");
}
fn load_manifest(plugin_root: &Path) -> PluginManifest {
let manifest_path = plugin_root.join(".codex-plugin/plugin.json");
let contents = fs::read_to_string(manifest_path).expect("read manifest");
serde_json::from_str(&contents).expect("parse manifest")
}
#[test]
fn plugin_manifest_interface_accepts_legacy_default_prompt_string() {
let tmp = tempdir().expect("tempdir");
let plugin_root = tmp.path().join("demo-plugin");
write_manifest(
&plugin_root,
r#"{
"displayName": "Demo Plugin",
"defaultPrompt": " Summarize my inbox "
}"#,
);
let manifest = load_manifest(&plugin_root);
let interface =
plugin_manifest_interface(&manifest, &plugin_root).expect("plugin interface");
assert_eq!(
interface.default_prompt,
Some(vec!["Summarize my inbox".to_string()])
);
}
#[test]
fn plugin_manifest_interface_normalizes_default_prompt_array() {
let tmp = tempdir().expect("tempdir");
let plugin_root = tmp.path().join("demo-plugin");
let too_long = "x".repeat(MAX_DEFAULT_PROMPT_LEN + 1);
write_manifest(
&plugin_root,
&format!(
r#"{{
"displayName": "Demo Plugin",
"defaultPrompt": [
" Summarize my inbox ",
123,
"{too_long}",
" ",
"Draft the reply ",
"Find my next action",
"Archive old mail"
]
}}"#
),
);
let manifest = load_manifest(&plugin_root);
let interface =
plugin_manifest_interface(&manifest, &plugin_root).expect("plugin interface");
assert_eq!(
interface.default_prompt,
Some(vec![
"Summarize my inbox".to_string(),
"Draft the reply".to_string(),
"Find my next action".to_string(),
])
);
}
#[test]
fn plugin_manifest_interface_ignores_invalid_default_prompt_shape() {
let tmp = tempdir().expect("tempdir");
let plugin_root = tmp.path().join("demo-plugin");
write_manifest(
&plugin_root,
r#"{
"displayName": "Demo Plugin",
"defaultPrompt": { "text": "Summarize my inbox" }
}"#,
);
let manifest = load_manifest(&plugin_root);
let interface =
plugin_manifest_interface(&manifest, &plugin_root).expect("plugin interface");
assert_eq!(interface.default_prompt, None);
}
}
pub use codex_extensions::plugins::PluginManifestInterfaceSummary;
pub(crate) use codex_extensions::plugins::PluginManifestPaths;
pub(crate) use codex_extensions::plugins::load_plugin_manifest;
pub(crate) use codex_extensions::plugins::plugin_manifest_interface;
pub(crate) use codex_extensions::plugins::plugin_manifest_name;
pub(crate) use codex_extensions::plugins::plugin_manifest_paths;

View File

@@ -11,17 +11,20 @@ mod store;
pub(crate) mod test_support;
mod toggles;
pub use codex_extensions::plugins::AppConnectorId;
pub use codex_extensions::plugins::PluginCapabilitySummary;
pub use codex_extensions::plugins::PluginId;
pub use codex_extensions::plugins::PluginManifestInterfaceSummary;
pub use codex_extensions::plugins::PluginTelemetryMetadata;
pub(crate) use curated_repo::curated_plugins_repo_path;
pub(crate) use curated_repo::read_curated_plugins_sha;
pub(crate) use curated_repo::sync_openai_plugins_repo;
pub(crate) use discoverable::list_tool_suggest_discoverable_plugins;
pub(crate) use injection::build_plugin_injections;
pub use manager::AppConnectorId;
pub use manager::ConfiguredMarketplacePluginSummary;
pub use manager::ConfiguredMarketplaceSummary;
pub use manager::LoadedPlugin;
pub use manager::OPENAI_CURATED_MARKETPLACE_NAME;
pub use manager::PluginCapabilitySummary;
pub use manager::PluginDetailSummary;
pub use manager::PluginInstallError;
pub use manager::PluginInstallOutcome;
@@ -30,7 +33,6 @@ pub use manager::PluginLoadOutcome;
pub use manager::PluginReadOutcome;
pub use manager::PluginReadRequest;
pub use manager::PluginRemoteSyncError;
pub use manager::PluginTelemetryMetadata;
pub use manager::PluginUninstallError;
pub use manager::PluginsManager;
pub use manager::RemotePluginSyncResult;
@@ -38,7 +40,6 @@ pub use manager::installed_plugin_telemetry_metadata;
pub use manager::load_plugin_apps;
pub(crate) use manager::plugin_namespace_for_skill_path;
pub use manager::plugin_telemetry_metadata_from_root;
pub use manifest::PluginManifestInterfaceSummary;
pub(crate) use manifest::PluginManifestPaths;
pub(crate) use manifest::load_plugin_manifest;
pub(crate) use manifest::plugin_manifest_interface;
@@ -50,5 +51,4 @@ pub use marketplace::MarketplacePluginInstallPolicy;
pub use marketplace::MarketplacePluginSourceSummary;
pub(crate) use render::render_explicit_plugin_instructions;
pub(crate) use render::render_plugins_section;
pub use store::PluginId;
pub use toggles::collect_plugin_enabled_candidates;

View File

@@ -1,91 +1,5 @@
use crate::plugins::PluginCapabilitySummary;
use codex_protocol::protocol::PLUGINS_INSTRUCTIONS_CLOSE_TAG;
use codex_protocol::protocol::PLUGINS_INSTRUCTIONS_OPEN_TAG;
pub(crate) fn render_plugins_section(plugins: &[PluginCapabilitySummary]) -> Option<String> {
if plugins.is_empty() {
return None;
}
let mut lines = vec![
"## Plugins".to_string(),
"A plugin is a local bundle of skills, MCP servers, and apps. Below is the list of plugins that are enabled and available in this session.".to_string(),
"### Available plugins".to_string(),
];
lines.extend(
plugins
.iter()
.map(|plugin| match plugin.description.as_deref() {
Some(description) => format!("- `{}`: {description}", plugin.display_name),
None => format!("- `{}`", plugin.display_name),
}),
);
lines.push("### How to use plugins".to_string());
lines.push(
r###"- Discovery: The list above is the plugins available in this session.
- Skill naming: If a plugin contributes skills, those skill entries are prefixed with `plugin_name:` in the Skills list.
- Trigger rules: If the user explicitly names a plugin, prefer capabilities associated with that plugin for that turn.
- Relationship to capabilities: Plugins are not invoked directly. Use their underlying skills, MCP tools, and app tools to help solve the task.
- Preference: When a relevant plugin is available, prefer using capabilities associated with that plugin over standalone capabilities that provide similar functionality.
- Missing/blocked: If the user requests a plugin that is not listed above, or the plugin does not have relevant callable capabilities for the task, say so briefly and continue with the best fallback."###
.to_string(),
);
let body = lines.join("\n");
Some(format!(
"{PLUGINS_INSTRUCTIONS_OPEN_TAG}\n{body}\n{PLUGINS_INSTRUCTIONS_CLOSE_TAG}"
))
}
pub(crate) fn render_explicit_plugin_instructions(
plugin: &PluginCapabilitySummary,
available_mcp_servers: &[String],
available_apps: &[String],
) -> Option<String> {
let mut lines = vec![format!(
"Capabilities from the `{}` plugin:",
plugin.display_name
)];
if plugin.has_skills {
lines.push(format!(
"- Skills from this plugin are prefixed with `{}:`.",
plugin.display_name
));
}
if !available_mcp_servers.is_empty() {
lines.push(format!(
"- MCP servers from this plugin available in this session: {}.",
available_mcp_servers
.iter()
.map(|server| format!("`{server}`"))
.collect::<Vec<_>>()
.join(", ")
));
}
if !available_apps.is_empty() {
lines.push(format!(
"- Apps from this plugin available in this session: {}.",
available_apps
.iter()
.map(|app| format!("`{app}`"))
.collect::<Vec<_>>()
.join(", ")
));
}
if lines.len() == 1 {
return None;
}
lines.push("Use these plugin-associated capabilities to help solve the task.".to_string());
Some(lines.join("\n"))
}
pub(crate) use codex_extensions::plugins::render_explicit_plugin_instructions;
pub(crate) use codex_extensions::plugins::render_plugins_section;
#[cfg(test)]
#[path = "render_tests.rs"]

View File

@@ -1,346 +1,6 @@
use super::load_plugin_manifest;
use super::manifest::PLUGIN_MANIFEST_PATH;
use super::plugin_manifest_name;
use codex_utils_absolute_path::AbsolutePathBuf;
use std::fs;
use std::io;
use std::path::Path;
use std::path::PathBuf;
pub(crate) const DEFAULT_PLUGIN_VERSION: &str = "local";
pub(crate) const PLUGINS_CACHE_DIR: &str = "plugins/cache";
#[derive(Debug, thiserror::Error)]
pub enum PluginIdError {
#[error("{0}")]
Invalid(String),
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct PluginId {
pub plugin_name: String,
pub marketplace_name: String,
}
impl PluginId {
pub fn new(plugin_name: String, marketplace_name: String) -> Result<Self, PluginIdError> {
validate_plugin_segment(&plugin_name, "plugin name").map_err(PluginIdError::Invalid)?;
validate_plugin_segment(&marketplace_name, "marketplace name")
.map_err(PluginIdError::Invalid)?;
Ok(Self {
plugin_name,
marketplace_name,
})
}
pub fn parse(plugin_key: &str) -> Result<Self, PluginIdError> {
let Some((plugin_name, marketplace_name)) = plugin_key.rsplit_once('@') else {
return Err(PluginIdError::Invalid(format!(
"invalid plugin key `{plugin_key}`; expected <plugin>@<marketplace>"
)));
};
if plugin_name.is_empty() || marketplace_name.is_empty() {
return Err(PluginIdError::Invalid(format!(
"invalid plugin key `{plugin_key}`; expected <plugin>@<marketplace>"
)));
}
Self::new(plugin_name.to_string(), marketplace_name.to_string()).map_err(|err| match err {
PluginIdError::Invalid(message) => {
PluginIdError::Invalid(format!("{message} in `{plugin_key}`"))
}
})
}
pub fn as_key(&self) -> String {
format!("{}@{}", self.plugin_name, self.marketplace_name)
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct PluginInstallResult {
pub plugin_id: PluginId,
pub plugin_version: String,
pub installed_path: AbsolutePathBuf,
}
#[derive(Debug, Clone)]
pub struct PluginStore {
root: AbsolutePathBuf,
}
impl PluginStore {
pub fn new(codex_home: PathBuf) -> Self {
Self {
root: AbsolutePathBuf::try_from(codex_home.join(PLUGINS_CACHE_DIR))
.unwrap_or_else(|err| panic!("plugin cache root should be absolute: {err}")),
}
}
pub fn root(&self) -> &AbsolutePathBuf {
&self.root
}
pub fn plugin_base_root(&self, plugin_id: &PluginId) -> AbsolutePathBuf {
AbsolutePathBuf::try_from(
self.root
.as_path()
.join(&plugin_id.marketplace_name)
.join(&plugin_id.plugin_name),
)
.unwrap_or_else(|err| panic!("plugin cache path should resolve to an absolute path: {err}"))
}
pub fn plugin_root(&self, plugin_id: &PluginId, plugin_version: &str) -> AbsolutePathBuf {
AbsolutePathBuf::try_from(
self.plugin_base_root(plugin_id)
.as_path()
.join(plugin_version),
)
.unwrap_or_else(|err| panic!("plugin cache path should resolve to an absolute path: {err}"))
}
pub fn active_plugin_version(&self, plugin_id: &PluginId) -> Option<String> {
let mut discovered_versions = fs::read_dir(self.plugin_base_root(plugin_id).as_path())
.ok()?
.filter_map(Result::ok)
.filter_map(|entry| {
entry.file_type().ok().filter(std::fs::FileType::is_dir)?;
entry.file_name().into_string().ok()
})
.filter(|version| validate_plugin_segment(version, "plugin version").is_ok())
.collect::<Vec<_>>();
discovered_versions.sort_unstable();
if discovered_versions.len() == 1 {
discovered_versions.pop()
} else {
None
}
}
pub fn active_plugin_root(&self, plugin_id: &PluginId) -> Option<AbsolutePathBuf> {
self.active_plugin_version(plugin_id)
.map(|plugin_version| self.plugin_root(plugin_id, &plugin_version))
}
pub fn is_installed(&self, plugin_id: &PluginId) -> bool {
self.active_plugin_version(plugin_id).is_some()
}
pub fn install(
&self,
source_path: AbsolutePathBuf,
plugin_id: PluginId,
) -> Result<PluginInstallResult, PluginStoreError> {
self.install_with_version(source_path, plugin_id, DEFAULT_PLUGIN_VERSION.to_string())
}
pub fn install_with_version(
&self,
source_path: AbsolutePathBuf,
plugin_id: PluginId,
plugin_version: String,
) -> Result<PluginInstallResult, PluginStoreError> {
if !source_path.as_path().is_dir() {
return Err(PluginStoreError::Invalid(format!(
"plugin source path is not a directory: {}",
source_path.display()
)));
}
let plugin_name = plugin_name_for_source(source_path.as_path())?;
if plugin_name != plugin_id.plugin_name {
return Err(PluginStoreError::Invalid(format!(
"plugin manifest name `{plugin_name}` does not match marketplace plugin name `{}`",
plugin_id.plugin_name
)));
}
validate_plugin_segment(&plugin_version, "plugin version")
.map_err(PluginStoreError::Invalid)?;
let installed_path = self.plugin_root(&plugin_id, &plugin_version);
replace_plugin_root_atomically(
source_path.as_path(),
self.plugin_base_root(&plugin_id).as_path(),
&plugin_version,
)?;
Ok(PluginInstallResult {
plugin_id,
plugin_version,
installed_path,
})
}
pub fn uninstall(&self, plugin_id: &PluginId) -> Result<(), PluginStoreError> {
remove_existing_target(self.plugin_base_root(plugin_id).as_path())
}
}
#[derive(Debug, thiserror::Error)]
pub enum PluginStoreError {
#[error("{context}: {source}")]
Io {
context: &'static str,
#[source]
source: io::Error,
},
#[error("{0}")]
Invalid(String),
}
impl PluginStoreError {
fn io(context: &'static str, source: io::Error) -> Self {
Self::Io { context, source }
}
}
fn plugin_name_for_source(source_path: &Path) -> Result<String, PluginStoreError> {
let manifest_path = source_path.join(PLUGIN_MANIFEST_PATH);
if !manifest_path.is_file() {
return Err(PluginStoreError::Invalid(format!(
"missing plugin manifest: {}",
manifest_path.display()
)));
}
let manifest = load_plugin_manifest(source_path).ok_or_else(|| {
PluginStoreError::Invalid(format!(
"missing or invalid plugin manifest: {}",
manifest_path.display()
))
})?;
let plugin_name = plugin_manifest_name(&manifest, source_path);
validate_plugin_segment(&plugin_name, "plugin name")
.map_err(PluginStoreError::Invalid)
.map(|_| plugin_name)
}
fn validate_plugin_segment(segment: &str, kind: &str) -> Result<(), String> {
if segment.is_empty() {
return Err(format!("invalid {kind}: must not be empty"));
}
if !segment
.chars()
.all(|ch| ch.is_ascii_alphanumeric() || ch == '-' || ch == '_')
{
return Err(format!(
"invalid {kind}: only ASCII letters, digits, `_`, and `-` are allowed"
));
}
Ok(())
}
fn remove_existing_target(path: &Path) -> Result<(), PluginStoreError> {
if !path.exists() {
return Ok(());
}
if path.is_dir() {
fs::remove_dir_all(path).map_err(|err| {
PluginStoreError::io("failed to remove existing plugin cache entry", err)
})
} else {
fs::remove_file(path).map_err(|err| {
PluginStoreError::io("failed to remove existing plugin cache entry", err)
})
}
}
fn replace_plugin_root_atomically(
source: &Path,
target_root: &Path,
plugin_version: &str,
) -> Result<(), PluginStoreError> {
let Some(parent) = target_root.parent() else {
return Err(PluginStoreError::Invalid(format!(
"plugin cache path has no parent: {}",
target_root.display()
)));
};
fs::create_dir_all(parent)
.map_err(|err| PluginStoreError::io("failed to create plugin cache directory", err))?;
let Some(plugin_dir_name) = target_root.file_name() else {
return Err(PluginStoreError::Invalid(format!(
"plugin cache path has no directory name: {}",
target_root.display()
)));
};
let staged_dir = tempfile::Builder::new()
.prefix("plugin-install-")
.tempdir_in(parent)
.map_err(|err| {
PluginStoreError::io("failed to create temporary plugin cache directory", err)
})?;
let staged_root = staged_dir.path().join(plugin_dir_name);
let staged_version_root = staged_root.join(plugin_version);
copy_dir_recursive(source, &staged_version_root)?;
if target_root.exists() {
let backup_dir = tempfile::Builder::new()
.prefix("plugin-backup-")
.tempdir_in(parent)
.map_err(|err| {
PluginStoreError::io("failed to create plugin cache backup directory", err)
})?;
let backup_root = backup_dir.path().join(plugin_dir_name);
fs::rename(target_root, &backup_root)
.map_err(|err| PluginStoreError::io("failed to back up plugin cache entry", err))?;
if let Err(err) = fs::rename(&staged_root, target_root) {
let rollback_result = fs::rename(&backup_root, target_root);
return match rollback_result {
Ok(()) => Err(PluginStoreError::io(
"failed to activate updated plugin cache entry",
err,
)),
Err(rollback_err) => {
let backup_path = backup_dir.keep().join(plugin_dir_name);
Err(PluginStoreError::Invalid(format!(
"failed to activate updated plugin cache entry at {}: {err}; failed to restore previous cache entry (left at {}): {rollback_err}",
target_root.display(),
backup_path.display()
)))
}
};
}
} else {
fs::rename(&staged_root, target_root)
.map_err(|err| PluginStoreError::io("failed to activate plugin cache entry", err))?;
}
Ok(())
}
fn copy_dir_recursive(source: &Path, target: &Path) -> Result<(), PluginStoreError> {
fs::create_dir_all(target)
.map_err(|err| PluginStoreError::io("failed to create plugin target directory", err))?;
for entry in fs::read_dir(source)
.map_err(|err| PluginStoreError::io("failed to read plugin source directory", err))?
{
let entry =
entry.map_err(|err| PluginStoreError::io("failed to enumerate plugin source", err))?;
let source_path = entry.path();
let target_path = target.join(entry.file_name());
let file_type = entry
.file_type()
.map_err(|err| PluginStoreError::io("failed to inspect plugin source entry", err))?;
if file_type.is_dir() {
copy_dir_recursive(&source_path, &target_path)?;
} else if file_type.is_file() {
fs::copy(&source_path, &target_path)
.map_err(|err| PluginStoreError::io("failed to copy plugin file", err))?;
}
}
Ok(())
}
#[cfg(test)]
#[path = "store_tests.rs"]
mod tests;
pub(crate) use codex_extensions::plugins::DEFAULT_PLUGIN_VERSION;
pub use codex_extensions::plugins::PluginId;
pub use codex_extensions::plugins::PluginIdError;
pub(crate) use codex_extensions::plugins::PluginInstallResult;
pub(crate) use codex_extensions::plugins::PluginStore;
pub use codex_extensions::plugins::PluginStoreError;

View File

@@ -348,7 +348,7 @@ pub(crate) async fn handle_start(
let provider = sess.provider().await;
let auth = sess.services.auth_manager.auth().await;
let realtime_api_key = realtime_api_key(auth.as_ref(), &provider)?;
let mut api_provider = provider.to_api_provider(/*use_chatgpt_base_url*/ false);
let mut api_provider = provider.to_api_provider(Some(crate::auth::AuthMode::ApiKey))?;
let config = sess.get_config().await;
if let Some(realtime_ws_base_url) = &config.experimental_realtime_ws_base_url {
api_provider.base_url = realtime_ws_base_url.clone();

View File

@@ -1 +1,167 @@
pub use codex_models::response_debug_context::*;
use base64::Engine;
use codex_api::TransportError;
use codex_api::error::ApiError;
const REQUEST_ID_HEADER: &str = "x-request-id";
const OAI_REQUEST_ID_HEADER: &str = "x-oai-request-id";
const CF_RAY_HEADER: &str = "cf-ray";
const AUTH_ERROR_HEADER: &str = "x-openai-authorization-error";
const X_ERROR_JSON_HEADER: &str = "x-error-json";
#[derive(Debug, Default, Clone, PartialEq, Eq)]
pub(crate) struct ResponseDebugContext {
pub(crate) request_id: Option<String>,
pub(crate) cf_ray: Option<String>,
pub(crate) auth_error: Option<String>,
pub(crate) auth_error_code: Option<String>,
}
pub(crate) fn extract_response_debug_context(transport: &TransportError) -> ResponseDebugContext {
let mut context = ResponseDebugContext::default();
let TransportError::Http {
headers, body: _, ..
} = transport
else {
return context;
};
let extract_header = |name: &str| {
headers
.as_ref()
.and_then(|headers| headers.get(name))
.and_then(|value| value.to_str().ok())
.map(str::to_string)
};
context.request_id =
extract_header(REQUEST_ID_HEADER).or_else(|| extract_header(OAI_REQUEST_ID_HEADER));
context.cf_ray = extract_header(CF_RAY_HEADER);
context.auth_error = extract_header(AUTH_ERROR_HEADER);
context.auth_error_code = extract_header(X_ERROR_JSON_HEADER).and_then(|encoded| {
let decoded = base64::engine::general_purpose::STANDARD
.decode(encoded)
.ok()?;
let parsed = serde_json::from_slice::<serde_json::Value>(&decoded).ok()?;
parsed
.get("error")
.and_then(|error| error.get("code"))
.and_then(serde_json::Value::as_str)
.map(str::to_string)
});
context
}
pub(crate) fn extract_response_debug_context_from_api_error(
error: &ApiError,
) -> ResponseDebugContext {
match error {
ApiError::Transport(transport) => extract_response_debug_context(transport),
_ => ResponseDebugContext::default(),
}
}
pub(crate) fn telemetry_transport_error_message(error: &TransportError) -> String {
match error {
TransportError::Http { status, .. } => format!("http {}", status.as_u16()),
TransportError::RetryLimit => "retry limit reached".to_string(),
TransportError::Timeout => "timeout".to_string(),
TransportError::Network(err) => err.to_string(),
TransportError::Build(err) => err.to_string(),
}
}
pub(crate) fn telemetry_api_error_message(error: &ApiError) -> String {
match error {
ApiError::Transport(transport) => telemetry_transport_error_message(transport),
ApiError::Api { status, .. } => format!("api error {}", status.as_u16()),
ApiError::Stream(err) => err.to_string(),
ApiError::ContextWindowExceeded => "context window exceeded".to_string(),
ApiError::QuotaExceeded => "quota exceeded".to_string(),
ApiError::UsageNotIncluded => "usage not included".to_string(),
ApiError::Retryable { .. } => "retryable error".to_string(),
ApiError::RateLimit(_) => "rate limit".to_string(),
ApiError::InvalidRequest { .. } => "invalid request".to_string(),
ApiError::ServerOverloaded => "server overloaded".to_string(),
}
}
#[cfg(test)]
mod tests {
use super::ResponseDebugContext;
use super::extract_response_debug_context;
use super::telemetry_api_error_message;
use super::telemetry_transport_error_message;
use codex_api::TransportError;
use codex_api::error::ApiError;
use http::HeaderMap;
use http::HeaderValue;
use http::StatusCode;
use pretty_assertions::assert_eq;
#[test]
fn extract_response_debug_context_decodes_identity_headers() {
let mut headers = HeaderMap::new();
headers.insert("x-oai-request-id", HeaderValue::from_static("req-auth"));
headers.insert("cf-ray", HeaderValue::from_static("ray-auth"));
headers.insert(
"x-openai-authorization-error",
HeaderValue::from_static("missing_authorization_header"),
);
headers.insert(
"x-error-json",
HeaderValue::from_static("eyJlcnJvciI6eyJjb2RlIjoidG9rZW5fZXhwaXJlZCJ9fQ=="),
);
let context = extract_response_debug_context(&TransportError::Http {
status: StatusCode::UNAUTHORIZED,
url: Some("https://chatgpt.com/backend-api/codex/models".to_string()),
headers: Some(headers),
body: Some(r#"{"error":{"message":"plain text error"},"status":401}"#.to_string()),
});
assert_eq!(
context,
ResponseDebugContext {
request_id: Some("req-auth".to_string()),
cf_ray: Some("ray-auth".to_string()),
auth_error: Some("missing_authorization_header".to_string()),
auth_error_code: Some("token_expired".to_string()),
}
);
}
#[test]
fn telemetry_error_messages_omit_http_bodies() {
let transport = TransportError::Http {
status: StatusCode::UNAUTHORIZED,
url: Some("https://chatgpt.com/backend-api/codex/responses".to_string()),
headers: None,
body: Some(r#"{"error":{"message":"secret token leaked"}}"#.to_string()),
};
assert_eq!(telemetry_transport_error_message(&transport), "http 401");
assert_eq!(
telemetry_api_error_message(&ApiError::Transport(transport)),
"http 401"
);
}
#[test]
fn telemetry_error_messages_preserve_non_http_details() {
let network = TransportError::Network("dns lookup failed".to_string());
let build = TransportError::Build("invalid header value".to_string());
let stream = ApiError::Stream("socket closed".to_string());
assert_eq!(
telemetry_transport_error_message(&network),
"dns lookup failed"
);
assert_eq!(
telemetry_transport_error_message(&build),
"invalid header value"
);
assert_eq!(telemetry_api_error_message(&stream), "socket closed");
}
}

View File

@@ -8,6 +8,10 @@ pub mod remote;
pub mod render;
pub mod system;
pub use codex_extensions::skills::SkillError;
pub use codex_extensions::skills::SkillLoadOutcome;
pub use codex_extensions::skills::SkillMetadata;
pub use codex_extensions::skills::SkillPolicy;
pub(crate) use env_var_dependencies::collect_env_var_dependencies;
pub(crate) use env_var_dependencies::resolve_skill_dependencies_for_turn;
pub(crate) use injection::SkillInjections;
@@ -16,8 +20,4 @@ pub(crate) use injection::collect_explicit_skill_mentions;
pub(crate) use invocation_utils::build_implicit_skill_path_indexes;
pub(crate) use invocation_utils::maybe_emit_implicit_skill_invocation;
pub use manager::SkillsManager;
pub use model::SkillError;
pub use model::SkillLoadOutcome;
pub use model::SkillMetadata;
pub use model::SkillPolicy;
pub use render::render_skills_section;

View File

@@ -1,113 +1 @@
use std::collections::HashMap;
use std::collections::HashSet;
use std::path::PathBuf;
use std::sync::Arc;
use codex_protocol::models::PermissionProfile;
use codex_protocol::protocol::SkillScope;
use serde::Deserialize;
#[derive(Debug, Clone, Default, Deserialize, PartialEq, Eq)]
pub struct SkillManagedNetworkOverride {
pub allowed_domains: Option<Vec<String>>,
pub denied_domains: Option<Vec<String>>,
}
impl SkillManagedNetworkOverride {
pub fn has_domain_overrides(&self) -> bool {
self.allowed_domains.is_some() || self.denied_domains.is_some()
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct SkillMetadata {
pub name: String,
pub description: String,
pub short_description: Option<String>,
pub interface: Option<SkillInterface>,
pub dependencies: Option<SkillDependencies>,
pub policy: Option<SkillPolicy>,
pub permission_profile: Option<PermissionProfile>,
pub managed_network_override: Option<SkillManagedNetworkOverride>,
/// Path to the SKILLS.md file that declares this skill.
pub path_to_skills_md: PathBuf,
pub scope: SkillScope,
}
impl SkillMetadata {
fn allow_implicit_invocation(&self) -> bool {
self.policy
.as_ref()
.and_then(|policy| policy.allow_implicit_invocation)
.unwrap_or(true)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub struct SkillPolicy {
pub allow_implicit_invocation: Option<bool>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct SkillInterface {
pub display_name: Option<String>,
pub short_description: Option<String>,
pub icon_small: Option<PathBuf>,
pub icon_large: Option<PathBuf>,
pub brand_color: Option<String>,
pub default_prompt: Option<String>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct SkillDependencies {
pub tools: Vec<SkillToolDependency>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct SkillToolDependency {
pub r#type: String,
pub value: String,
pub description: Option<String>,
pub transport: Option<String>,
pub command: Option<String>,
pub url: Option<String>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct SkillError {
pub path: PathBuf,
pub message: String,
}
#[derive(Debug, Clone, Default)]
pub struct SkillLoadOutcome {
pub skills: Vec<SkillMetadata>,
pub errors: Vec<SkillError>,
pub disabled_paths: HashSet<PathBuf>,
pub(crate) implicit_skills_by_scripts_dir: Arc<HashMap<PathBuf, SkillMetadata>>,
pub(crate) implicit_skills_by_doc_path: Arc<HashMap<PathBuf, SkillMetadata>>,
}
impl SkillLoadOutcome {
pub fn is_skill_enabled(&self, skill: &SkillMetadata) -> bool {
!self.disabled_paths.contains(&skill.path_to_skills_md)
}
pub fn is_skill_allowed_for_implicit_invocation(&self, skill: &SkillMetadata) -> bool {
self.is_skill_enabled(skill) && skill.allow_implicit_invocation()
}
pub fn allowed_skills_for_implicit_invocation(&self) -> Vec<SkillMetadata> {
self.skills
.iter()
.filter(|skill| self.is_skill_allowed_for_implicit_invocation(skill))
.cloned()
.collect()
}
pub fn skills_with_enabled(&self) -> impl Iterator<Item = (&SkillMetadata, bool)> {
self.skills
.iter()
.map(|skill| (skill, self.is_skill_enabled(skill)))
}
}
pub use codex_extensions::skills::model::*;

View File

@@ -1,48 +1 @@
use crate::skills::model::SkillMetadata;
use codex_protocol::protocol::SKILLS_INSTRUCTIONS_CLOSE_TAG;
use codex_protocol::protocol::SKILLS_INSTRUCTIONS_OPEN_TAG;
pub fn render_skills_section(skills: &[SkillMetadata]) -> Option<String> {
if skills.is_empty() {
return None;
}
let mut lines: Vec<String> = Vec::new();
lines.push("## Skills".to_string());
lines.push("A skill is a set of local instructions to follow that is stored in a `SKILL.md` file. Below is the list of skills that can be used. Each entry includes a name, description, and file path so you can open the source for full instructions when using a specific skill.".to_string());
lines.push("### Available skills".to_string());
for skill in skills {
let path_str = skill.path_to_skills_md.to_string_lossy().replace('\\', "/");
let name = skill.name.as_str();
let description = skill.description.as_str();
lines.push(format!("- {name}: {description} (file: {path_str})"));
}
lines.push("### How to use skills".to_string());
lines.push(
r###"- Discovery: The list above is the skills available in this session (name + description + file path). Skill bodies live on disk at the listed paths.
- Trigger rules: If the user names a skill (with `$SkillName` or plain text) OR the task clearly matches a skill's description shown above, you must use that skill for that turn. Multiple mentions mean use them all. Do not carry skills across turns unless re-mentioned.
- Missing/blocked: If a named skill isn't in the list or the path can't be read, say so briefly and continue with the best fallback.
- How to use a skill (progressive disclosure):
1) After deciding to use a skill, open its `SKILL.md`. Read only enough to follow the workflow.
2) When `SKILL.md` references relative paths (e.g., `scripts/foo.py`), resolve them relative to the skill directory listed above first, and only consider other paths if needed.
3) If `SKILL.md` points to extra folders such as `references/`, load only the specific files needed for the request; don't bulk-load everything.
4) If `scripts/` exist, prefer running or patching them instead of retyping large code blocks.
5) If `assets/` or templates exist, reuse them instead of recreating from scratch.
- Coordination and sequencing:
- If multiple skills apply, choose the minimal set that covers the request and state the order you'll use them.
- Announce which skill(s) you're using and why (one short line). If you skip an obvious skill, say why.
- Context hygiene:
- Keep context small: summarize long sections instead of pasting them; only load extra files when needed.
- Avoid deep reference-chasing: prefer opening only files directly linked from `SKILL.md` unless you're blocked.
- When variants exist (frameworks, providers, domains), pick only the relevant reference file(s) and note that choice.
- Safety and fallback: If a skill can't be applied cleanly (missing files, unclear instructions), state the issue, pick the next-best approach, and continue."###
.to_string(),
);
let body = lines.join("\n");
Some(format!(
"{SKILLS_INSTRUCTIONS_OPEN_TAG}\n{body}\n{SKILLS_INSTRUCTIONS_CLOSE_TAG}"
))
}
pub use codex_extensions::skills::render_skills_section;

View File

@@ -0,0 +1,6 @@
load("//:defs.bzl", "codex_rust_crate")
codex_rust_crate(
name = "extensions",
crate_name = "codex_extensions",
)

View File

@@ -0,0 +1,21 @@
[package]
name = "codex-extensions"
version.workspace = true
edition.workspace = true
license.workspace = true
[lib]
doctest = false
name = "codex_extensions"
path = "src/lib.rs"
[lints]
workspace = true
[dependencies]
codex-protocol = { workspace = true }
codex-utils-absolute-path = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
thiserror = { workspace = true }
tracing = { workspace = true }

View File

@@ -0,0 +1,2 @@
pub mod plugins;
pub mod skills;

View File

@@ -0,0 +1,328 @@
use codex_utils_absolute_path::AbsolutePathBuf;
use serde::Deserialize;
use serde_json::Value as JsonValue;
use std::fs;
use std::path::Component;
use std::path::Path;
pub const PLUGIN_MANIFEST_PATH: &str = ".codex-plugin/plugin.json";
const MAX_DEFAULT_PROMPT_COUNT: usize = 3;
const MAX_DEFAULT_PROMPT_LEN: usize = 128;
#[derive(Debug, Default, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct PluginManifest {
#[serde(default)]
pub name: String,
#[serde(default)]
pub description: Option<String>,
#[serde(default)]
skills: Option<String>,
#[serde(default)]
mcp_servers: Option<String>,
#[serde(default)]
apps: Option<String>,
#[serde(default)]
interface: Option<PluginManifestInterface>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct PluginManifestPaths {
pub skills: Option<AbsolutePathBuf>,
pub mcp_servers: Option<AbsolutePathBuf>,
pub apps: Option<AbsolutePathBuf>,
}
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub struct PluginManifestInterfaceSummary {
pub display_name: Option<String>,
pub short_description: Option<String>,
pub long_description: Option<String>,
pub developer_name: Option<String>,
pub category: Option<String>,
pub capabilities: Vec<String>,
pub website_url: Option<String>,
pub privacy_policy_url: Option<String>,
pub terms_of_service_url: Option<String>,
pub default_prompt: Option<Vec<String>>,
pub brand_color: Option<String>,
pub composer_icon: Option<AbsolutePathBuf>,
pub logo: Option<AbsolutePathBuf>,
pub screenshots: Vec<AbsolutePathBuf>,
}
#[derive(Debug, Default, Deserialize)]
#[serde(rename_all = "camelCase")]
struct PluginManifestInterface {
#[serde(default)]
display_name: Option<String>,
#[serde(default)]
short_description: Option<String>,
#[serde(default)]
long_description: Option<String>,
#[serde(default)]
developer_name: Option<String>,
#[serde(default)]
category: Option<String>,
#[serde(default)]
capabilities: Vec<String>,
#[serde(default, alias = "websiteURL")]
website_url: Option<String>,
#[serde(default, alias = "privacyPolicyURL")]
privacy_policy_url: Option<String>,
#[serde(default, alias = "termsOfServiceURL")]
terms_of_service_url: Option<String>,
#[serde(default)]
default_prompt: Option<PluginManifestDefaultPrompt>,
#[serde(default)]
brand_color: Option<String>,
#[serde(default)]
composer_icon: Option<String>,
#[serde(default)]
logo: Option<String>,
#[serde(default)]
screenshots: Vec<String>,
}
#[derive(Debug, Deserialize)]
#[serde(untagged)]
enum PluginManifestDefaultPrompt {
String(String),
List(Vec<PluginManifestDefaultPromptEntry>),
Invalid(JsonValue),
}
#[derive(Debug, Deserialize)]
#[serde(untagged)]
enum PluginManifestDefaultPromptEntry {
String(String),
Invalid(JsonValue),
}
pub fn load_plugin_manifest(plugin_root: &Path) -> Option<PluginManifest> {
let manifest_path = plugin_root.join(PLUGIN_MANIFEST_PATH);
if !manifest_path.is_file() {
return None;
}
let contents = fs::read_to_string(&manifest_path).ok()?;
match serde_json::from_str(&contents) {
Ok(manifest) => Some(manifest),
Err(err) => {
tracing::warn!(
path = %manifest_path.display(),
"failed to parse plugin manifest: {err}"
);
None
}
}
}
pub fn plugin_manifest_name(manifest: &PluginManifest, plugin_root: &Path) -> String {
plugin_root
.file_name()
.and_then(|name| name.to_str())
.filter(|_| manifest.name.trim().is_empty())
.unwrap_or(&manifest.name)
.to_string()
}
pub fn plugin_manifest_interface(
manifest: &PluginManifest,
plugin_root: &Path,
) -> Option<PluginManifestInterfaceSummary> {
let interface = manifest.interface.as_ref()?;
let interface = PluginManifestInterfaceSummary {
display_name: interface.display_name.clone(),
short_description: interface.short_description.clone(),
long_description: interface.long_description.clone(),
developer_name: interface.developer_name.clone(),
category: interface.category.clone(),
capabilities: interface.capabilities.clone(),
website_url: interface.website_url.clone(),
privacy_policy_url: interface.privacy_policy_url.clone(),
terms_of_service_url: interface.terms_of_service_url.clone(),
default_prompt: resolve_default_prompts(plugin_root, interface.default_prompt.as_ref()),
brand_color: interface.brand_color.clone(),
composer_icon: resolve_interface_asset_path(
plugin_root,
"interface.composerIcon",
interface.composer_icon.as_deref(),
),
logo: resolve_interface_asset_path(
plugin_root,
"interface.logo",
interface.logo.as_deref(),
),
screenshots: interface
.screenshots
.iter()
.filter_map(|screenshot| {
resolve_interface_asset_path(plugin_root, "interface.screenshots", Some(screenshot))
})
.collect(),
};
let has_fields = interface.display_name.is_some()
|| interface.short_description.is_some()
|| interface.long_description.is_some()
|| interface.developer_name.is_some()
|| interface.category.is_some()
|| !interface.capabilities.is_empty()
|| interface.website_url.is_some()
|| interface.privacy_policy_url.is_some()
|| interface.terms_of_service_url.is_some()
|| interface.default_prompt.is_some()
|| interface.brand_color.is_some()
|| interface.composer_icon.is_some()
|| interface.logo.is_some()
|| !interface.screenshots.is_empty();
has_fields.then_some(interface)
}
pub fn plugin_manifest_paths(manifest: &PluginManifest, plugin_root: &Path) -> PluginManifestPaths {
PluginManifestPaths {
skills: resolve_manifest_path(plugin_root, "skills", manifest.skills.as_deref()),
mcp_servers: resolve_manifest_path(
plugin_root,
"mcpServers",
manifest.mcp_servers.as_deref(),
),
apps: resolve_manifest_path(plugin_root, "apps", manifest.apps.as_deref()),
}
}
fn resolve_interface_asset_path(
plugin_root: &Path,
field: &'static str,
path: Option<&str>,
) -> Option<AbsolutePathBuf> {
resolve_manifest_path(plugin_root, field, path)
}
fn resolve_default_prompts(
plugin_root: &Path,
value: Option<&PluginManifestDefaultPrompt>,
) -> Option<Vec<String>> {
match value? {
PluginManifestDefaultPrompt::String(prompt) => {
resolve_default_prompt_str(plugin_root, "interface.defaultPrompt", prompt)
.map(|prompt| vec![prompt])
}
PluginManifestDefaultPrompt::List(values) => {
let mut prompts = Vec::new();
for (index, item) in values.iter().enumerate() {
if prompts.len() >= MAX_DEFAULT_PROMPT_COUNT {
warn_invalid_default_prompt(
plugin_root,
"interface.defaultPrompt",
&format!("maximum of {MAX_DEFAULT_PROMPT_COUNT} prompts is supported"),
);
break;
}
match item {
PluginManifestDefaultPromptEntry::String(prompt) => {
let field = format!("interface.defaultPrompt[{index}]");
if let Some(prompt) =
resolve_default_prompt_str(plugin_root, &field, prompt)
{
prompts.push(prompt);
}
}
PluginManifestDefaultPromptEntry::Invalid(value) => {
let field = format!("interface.defaultPrompt[{index}]");
warn_invalid_default_prompt(
plugin_root,
&field,
&format!("expected a string, found {}", json_value_type(value)),
);
}
}
}
(!prompts.is_empty()).then_some(prompts)
}
PluginManifestDefaultPrompt::Invalid(value) => {
warn_invalid_default_prompt(
plugin_root,
"interface.defaultPrompt",
&format!(
"expected a string or array of strings, found {}",
json_value_type(value)
),
);
None
}
}
}
fn resolve_default_prompt_str(plugin_root: &Path, field: &str, prompt: &str) -> Option<String> {
let prompt = prompt.trim();
if prompt.is_empty() {
warn_invalid_default_prompt(plugin_root, field, "must not be empty");
return None;
}
let prompt = prompt.split_whitespace().collect::<Vec<_>>().join(" ");
if prompt.chars().count() > MAX_DEFAULT_PROMPT_LEN {
warn_invalid_default_prompt(
plugin_root,
field,
&format!("maximum length is {MAX_DEFAULT_PROMPT_LEN} characters"),
);
return None;
}
Some(prompt)
}
fn resolve_manifest_path(
plugin_root: &Path,
field: &'static str,
value: Option<&str>,
) -> Option<AbsolutePathBuf> {
let value = value?;
if !value.starts_with("./") {
tracing::warn!(
path = %plugin_root.display(),
"ignoring invalid plugin manifest path for {field}; expected ./relative/path"
);
return None;
}
let relative = Path::new(value);
if relative.components().any(|component| {
matches!(
component,
Component::ParentDir | Component::RootDir | Component::Prefix(_)
)
}) {
tracing::warn!(
path = %plugin_root.display(),
"ignoring invalid plugin manifest path for {field}; path must stay within plugin root"
);
return None;
}
let resolved = plugin_root.join(relative);
AbsolutePathBuf::try_from(resolved).ok()
}
fn warn_invalid_default_prompt(plugin_root: &Path, field: &str, reason: &str) {
tracing::warn!(
path = %plugin_root.display(),
"ignoring invalid plugin manifest {field}: {reason}"
);
}
fn json_value_type(value: &JsonValue) -> &'static str {
match value {
JsonValue::Null => "null",
JsonValue::Bool(_) => "boolean",
JsonValue::Number(_) => "number",
JsonValue::String(_) => "string",
JsonValue::Array(_) => "array",
JsonValue::Object(_) => "object",
}
}

View File

@@ -0,0 +1,25 @@
mod manifest;
mod render;
mod store;
mod types;
pub use manifest::PLUGIN_MANIFEST_PATH;
pub use manifest::PluginManifest;
pub use manifest::PluginManifestInterfaceSummary;
pub use manifest::PluginManifestPaths;
pub use manifest::load_plugin_manifest;
pub use manifest::plugin_manifest_interface;
pub use manifest::plugin_manifest_name;
pub use manifest::plugin_manifest_paths;
pub use render::render_explicit_plugin_instructions;
pub use render::render_plugins_section;
pub use store::DEFAULT_PLUGIN_VERSION;
pub use store::PLUGINS_CACHE_DIR;
pub use store::PluginId;
pub use store::PluginIdError;
pub use store::PluginInstallResult;
pub use store::PluginStore;
pub use store::PluginStoreError;
pub use types::AppConnectorId;
pub use types::PluginCapabilitySummary;
pub use types::PluginTelemetryMetadata;

View File

@@ -0,0 +1,88 @@
use crate::plugins::PluginCapabilitySummary;
use codex_protocol::protocol::PLUGINS_INSTRUCTIONS_CLOSE_TAG;
use codex_protocol::protocol::PLUGINS_INSTRUCTIONS_OPEN_TAG;
pub fn render_plugins_section(plugins: &[PluginCapabilitySummary]) -> Option<String> {
if plugins.is_empty() {
return None;
}
let mut lines = vec![
"## Plugins".to_string(),
"A plugin is a local bundle of skills, MCP servers, and apps. Below is the list of plugins that are enabled and available in this session.".to_string(),
"### Available plugins".to_string(),
];
lines.extend(
plugins
.iter()
.map(|plugin| match plugin.description.as_deref() {
Some(description) => format!("- `{}`: {description}", plugin.display_name),
None => format!("- `{}`", plugin.display_name),
}),
);
lines.push("### How to use plugins".to_string());
lines.push(
r###"- Discovery: The list above is the plugins available in this session.
- Skill naming: If a plugin contributes skills, those skill entries are prefixed with `plugin_name:` in the Skills list.
- Trigger rules: If the user explicitly names a plugin, prefer capabilities associated with that plugin for that turn.
- Relationship to capabilities: Plugins are not invoked directly. Use their underlying skills, MCP tools, and app tools to help solve the task.
- Preference: When a relevant plugin is available, prefer using capabilities associated with that plugin over standalone capabilities that provide similar functionality.
- Missing/blocked: If the user requests a plugin that is not listed above, or the plugin does not have relevant callable capabilities for the task, say so briefly and continue with the best fallback."###
.to_string(),
);
let body = lines.join("\n");
Some(format!(
"{PLUGINS_INSTRUCTIONS_OPEN_TAG}\n{body}\n{PLUGINS_INSTRUCTIONS_CLOSE_TAG}"
))
}
pub fn render_explicit_plugin_instructions(
plugin: &PluginCapabilitySummary,
available_mcp_servers: &[String],
available_apps: &[String],
) -> Option<String> {
let mut lines = vec![format!(
"Capabilities from the `{}` plugin:",
plugin.display_name
)];
if plugin.has_skills {
lines.push(format!(
"- Skills from this plugin are prefixed with `{}:`.",
plugin.display_name
));
}
if !available_mcp_servers.is_empty() {
lines.push(format!(
"- MCP servers from this plugin available in this session: {}.",
available_mcp_servers
.iter()
.map(|server| format!("`{server}`"))
.collect::<Vec<_>>()
.join(", ")
));
}
if !available_apps.is_empty() {
lines.push(format!(
"- Apps from this plugin available in this session: {}.",
available_apps
.iter()
.map(|app| format!("`{app}`"))
.collect::<Vec<_>>()
.join(", ")
));
}
if lines.len() == 1 {
return None;
}
lines.push("Use these plugin-associated capabilities to help solve the task.".to_string());
Some(lines.join("\n"))
}

View File

@@ -0,0 +1,306 @@
use crate::plugins::PLUGIN_MANIFEST_PATH;
use crate::plugins::load_plugin_manifest;
use crate::plugins::plugin_manifest_name;
use codex_utils_absolute_path::AbsolutePathBuf;
use std::fs;
use std::io;
use std::path::Path;
use std::path::PathBuf;
pub const DEFAULT_PLUGIN_VERSION: &str = "local";
pub const PLUGINS_CACHE_DIR: &str = "plugins/cache";
#[derive(Debug, thiserror::Error)]
pub enum PluginIdError {
#[error("{0}")]
Invalid(String),
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct PluginId {
pub plugin_name: String,
pub marketplace_name: String,
}
impl PluginId {
pub fn new(plugin_name: String, marketplace_name: String) -> Result<Self, PluginIdError> {
validate_plugin_segment(&plugin_name, "plugin name").map_err(PluginIdError::Invalid)?;
validate_plugin_segment(&marketplace_name, "marketplace name")
.map_err(PluginIdError::Invalid)?;
Ok(Self {
plugin_name,
marketplace_name,
})
}
pub fn parse(plugin_key: &str) -> Result<Self, PluginIdError> {
let Some((plugin_name, marketplace_name)) = plugin_key.rsplit_once('@') else {
return Err(PluginIdError::Invalid(format!(
"invalid plugin key `{plugin_key}`; expected <plugin>@<marketplace>"
)));
};
if plugin_name.is_empty() || marketplace_name.is_empty() {
return Err(PluginIdError::Invalid(format!(
"invalid plugin key `{plugin_key}`; expected <plugin>@<marketplace>"
)));
}
Self::new(plugin_name.to_string(), marketplace_name.to_string()).map_err(|err| match err {
PluginIdError::Invalid(message) => {
PluginIdError::Invalid(format!("{message} in `{plugin_key}`"))
}
})
}
pub fn as_key(&self) -> String {
format!("{}@{}", self.plugin_name, self.marketplace_name)
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct PluginInstallResult {
pub plugin_id: PluginId,
pub plugin_version: String,
pub installed_path: AbsolutePathBuf,
}
#[derive(Debug, Clone)]
pub struct PluginStore {
root: AbsolutePathBuf,
}
impl PluginStore {
pub fn new(codex_home: PathBuf) -> Self {
Self {
root: AbsolutePathBuf::try_from(codex_home.join(PLUGINS_CACHE_DIR))
.unwrap_or_else(|err| panic!("plugin cache root should be absolute: {err}")),
}
}
pub fn root(&self) -> &AbsolutePathBuf {
&self.root
}
pub fn plugin_base_root(&self, plugin_id: &PluginId) -> AbsolutePathBuf {
AbsolutePathBuf::try_from(
self.root
.as_path()
.join(&plugin_id.marketplace_name)
.join(&plugin_id.plugin_name),
)
.unwrap_or_else(|err| panic!("plugin cache path should resolve to an absolute path: {err}"))
}
pub fn plugin_root(&self, plugin_id: &PluginId, plugin_version: &str) -> AbsolutePathBuf {
AbsolutePathBuf::try_from(
self.plugin_base_root(plugin_id)
.as_path()
.join(plugin_version),
)
.unwrap_or_else(|err| panic!("plugin cache path should resolve to an absolute path: {err}"))
}
pub fn active_plugin_version(&self, plugin_id: &PluginId) -> Option<String> {
let mut discovered_versions = fs::read_dir(self.plugin_base_root(plugin_id).as_path())
.ok()?
.filter_map(Result::ok)
.filter_map(|entry| {
entry.file_type().ok().filter(std::fs::FileType::is_dir)?;
entry.file_name().into_string().ok()
})
.filter(|version| validate_plugin_segment(version, "plugin version").is_ok())
.collect::<Vec<_>>();
discovered_versions.sort_unstable();
if discovered_versions.len() == 1 {
discovered_versions.pop()
} else {
None
}
}
pub fn active_plugin_root(&self, plugin_id: &PluginId) -> Option<AbsolutePathBuf> {
self.active_plugin_version(plugin_id)
.map(|plugin_version| self.plugin_root(plugin_id, &plugin_version))
}
pub fn is_installed(&self, plugin_id: &PluginId) -> bool {
self.active_plugin_version(plugin_id).is_some()
}
pub fn install(
&self,
source_path: AbsolutePathBuf,
plugin_id: PluginId,
) -> Result<PluginInstallResult, PluginStoreError> {
self.install_with_version(source_path, plugin_id, DEFAULT_PLUGIN_VERSION.to_string())
}
pub fn install_with_version(
&self,
source_path: AbsolutePathBuf,
plugin_id: PluginId,
plugin_version: String,
) -> Result<PluginInstallResult, PluginStoreError> {
if !source_path.as_path().is_dir() {
return Err(PluginStoreError::Invalid(format!(
"plugin source path is not a directory: {}",
source_path.display()
)));
}
let plugin_name = plugin_name_for_source(source_path.as_path())?;
if plugin_name != plugin_id.plugin_name {
return Err(PluginStoreError::Invalid(format!(
"plugin manifest name `{plugin_name}` does not match marketplace plugin name `{}`",
plugin_id.plugin_name
)));
}
validate_plugin_segment(&plugin_version, "plugin version")
.map_err(PluginStoreError::Invalid)?;
let installed_path = self.plugin_root(&plugin_id, &plugin_version);
replace_plugin_root_atomically(
source_path.as_path(),
self.plugin_base_root(&plugin_id).as_path(),
&plugin_version,
)?;
Ok(PluginInstallResult {
plugin_id,
plugin_version,
installed_path,
})
}
pub fn uninstall(&self, plugin_id: &PluginId) -> Result<(), PluginStoreError> {
remove_existing_target(self.plugin_base_root(plugin_id).as_path())
}
}
#[derive(Debug, thiserror::Error)]
pub enum PluginStoreError {
#[error("{context}: {source}")]
Io {
context: &'static str,
#[source]
source: io::Error,
},
#[error("{0}")]
Invalid(String),
}
impl PluginStoreError {
fn io(context: &'static str, source: io::Error) -> Self {
Self::Io { context, source }
}
}
fn plugin_name_for_source(source_path: &Path) -> Result<String, PluginStoreError> {
let manifest_path = source_path.join(PLUGIN_MANIFEST_PATH);
if !manifest_path.is_file() {
return Err(PluginStoreError::Invalid(format!(
"missing plugin manifest: {}",
manifest_path.display()
)));
}
let manifest = load_plugin_manifest(source_path).ok_or_else(|| {
PluginStoreError::Invalid(format!(
"missing or invalid plugin manifest: {}",
manifest_path.display()
))
})?;
let plugin_name = plugin_manifest_name(&manifest, source_path);
validate_plugin_segment(&plugin_name, "plugin name")
.map_err(PluginStoreError::Invalid)
.map(|_| plugin_name)
}
fn validate_plugin_segment(segment: &str, kind: &str) -> Result<(), String> {
if segment.is_empty() {
return Err(format!("invalid {kind}: must not be empty"));
}
if !segment
.chars()
.all(|ch| ch.is_ascii_alphanumeric() || ch == '-' || ch == '_')
{
return Err(format!(
"invalid {kind}: only ASCII letters, digits, `_`, and `-` are allowed"
));
}
Ok(())
}
fn remove_existing_target(path: &Path) -> Result<(), PluginStoreError> {
if !path.exists() {
return Ok(());
}
if path.is_dir() {
fs::remove_dir_all(path).map_err(|err| {
PluginStoreError::io("failed to remove existing plugin cache entry", err)
})
} else {
fs::remove_file(path).map_err(|err| {
PluginStoreError::io("failed to remove existing plugin cache entry", err)
})
}
}
fn replace_plugin_root_atomically(
source: &Path,
target_root: &Path,
plugin_version: &str,
) -> Result<(), PluginStoreError> {
let Some(parent) = target_root.parent() else {
return Err(PluginStoreError::Invalid(format!(
"plugin cache path has no parent: {}",
target_root.display()
)));
};
fs::create_dir_all(parent)
.map_err(|err| PluginStoreError::io("failed to create plugin cache parent", err))?;
let target = parent.join(format!(
".{}-{}.next",
target_root
.file_name()
.and_then(|file_name| file_name.to_str())
.ok_or_else(|| PluginStoreError::Invalid(format!(
"plugin cache path has no terminal directory name: {}",
target_root.display()
)))?,
plugin_version
));
let staging = target.join(plugin_version);
remove_existing_target(target.as_path())?;
copy_dir_recursive(source, staging.as_path())?;
remove_existing_target(target_root)?;
fs::rename(target.as_path(), target_root)
.map_err(|err| PluginStoreError::io("failed to activate plugin cache entry", err))
}
fn copy_dir_recursive(source: &Path, target: &Path) -> Result<(), PluginStoreError> {
fs::create_dir_all(target)
.map_err(|err| PluginStoreError::io("failed to create plugin cache directory", err))?;
for entry in fs::read_dir(source)
.map_err(|err| PluginStoreError::io("failed to read plugin source directory", err))?
{
let entry =
entry.map_err(|err| PluginStoreError::io("failed to read plugin source entry", err))?;
let source_path = entry.path();
let target_path = target.join(entry.file_name());
let file_type = entry
.file_type()
.map_err(|err| PluginStoreError::io("failed to read plugin source file type", err))?;
if file_type.is_dir() {
copy_dir_recursive(source_path.as_path(), target_path.as_path())?;
} else if file_type.is_file() {
fs::copy(source_path.as_path(), target_path.as_path())
.map_err(|err| PluginStoreError::io("failed to copy plugin source file", err))?;
}
}
Ok(())
}

View File

@@ -0,0 +1,40 @@
use crate::plugins::PluginId;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct AppConnectorId(pub String);
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub struct PluginCapabilitySummary {
pub config_name: String,
pub display_name: String,
pub description: Option<String>,
pub has_skills: bool,
pub mcp_server_names: Vec<String>,
pub app_connector_ids: Vec<AppConnectorId>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct PluginTelemetryMetadata {
pub plugin_id: PluginId,
pub capability_summary: Option<PluginCapabilitySummary>,
}
impl PluginTelemetryMetadata {
pub fn from_plugin_id(plugin_id: &PluginId) -> Self {
Self {
plugin_id: plugin_id.clone(),
capability_summary: None,
}
}
}
impl PluginCapabilitySummary {
pub fn telemetry_metadata(&self) -> Option<PluginTelemetryMetadata> {
PluginId::parse(&self.config_name)
.ok()
.map(|plugin_id| PluginTelemetryMetadata {
plugin_id,
capability_summary: Some(self.clone()),
})
}
}

View File

@@ -0,0 +1,12 @@
pub mod model;
mod render;
pub use model::SkillDependencies;
pub use model::SkillError;
pub use model::SkillInterface;
pub use model::SkillLoadOutcome;
pub use model::SkillManagedNetworkOverride;
pub use model::SkillMetadata;
pub use model::SkillPolicy;
pub use model::SkillToolDependency;
pub use render::render_skills_section;

View File

@@ -0,0 +1,112 @@
use std::collections::HashMap;
use std::collections::HashSet;
use std::path::PathBuf;
use std::sync::Arc;
use codex_protocol::models::PermissionProfile;
use codex_protocol::protocol::SkillScope;
use serde::Deserialize;
#[derive(Debug, Clone, Default, Deserialize, PartialEq, Eq)]
pub struct SkillManagedNetworkOverride {
pub allowed_domains: Option<Vec<String>>,
pub denied_domains: Option<Vec<String>>,
}
impl SkillManagedNetworkOverride {
pub fn has_domain_overrides(&self) -> bool {
self.allowed_domains.is_some() || self.denied_domains.is_some()
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct SkillMetadata {
pub name: String,
pub description: String,
pub short_description: Option<String>,
pub interface: Option<SkillInterface>,
pub dependencies: Option<SkillDependencies>,
pub policy: Option<SkillPolicy>,
pub permission_profile: Option<PermissionProfile>,
pub managed_network_override: Option<SkillManagedNetworkOverride>,
pub path_to_skills_md: PathBuf,
pub scope: SkillScope,
}
impl SkillMetadata {
fn allow_implicit_invocation(&self) -> bool {
self.policy
.as_ref()
.and_then(|policy| policy.allow_implicit_invocation)
.unwrap_or(true)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub struct SkillPolicy {
pub allow_implicit_invocation: Option<bool>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct SkillInterface {
pub display_name: Option<String>,
pub short_description: Option<String>,
pub icon_small: Option<PathBuf>,
pub icon_large: Option<PathBuf>,
pub brand_color: Option<String>,
pub default_prompt: Option<String>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct SkillDependencies {
pub tools: Vec<SkillToolDependency>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct SkillToolDependency {
pub r#type: String,
pub value: String,
pub description: Option<String>,
pub transport: Option<String>,
pub command: Option<String>,
pub url: Option<String>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct SkillError {
pub path: PathBuf,
pub message: String,
}
#[derive(Debug, Clone, Default)]
pub struct SkillLoadOutcome {
pub skills: Vec<SkillMetadata>,
pub errors: Vec<SkillError>,
pub disabled_paths: HashSet<PathBuf>,
pub implicit_skills_by_scripts_dir: Arc<HashMap<PathBuf, SkillMetadata>>,
pub implicit_skills_by_doc_path: Arc<HashMap<PathBuf, SkillMetadata>>,
}
impl SkillLoadOutcome {
pub fn is_skill_enabled(&self, skill: &SkillMetadata) -> bool {
!self.disabled_paths.contains(&skill.path_to_skills_md)
}
pub fn is_skill_allowed_for_implicit_invocation(&self, skill: &SkillMetadata) -> bool {
self.is_skill_enabled(skill) && skill.allow_implicit_invocation()
}
pub fn allowed_skills_for_implicit_invocation(&self) -> Vec<SkillMetadata> {
self.skills
.iter()
.filter(|skill| self.is_skill_allowed_for_implicit_invocation(skill))
.cloned()
.collect()
}
pub fn skills_with_enabled(&self) -> impl Iterator<Item = (&SkillMetadata, bool)> {
self.skills
.iter()
.map(|skill| (skill, self.is_skill_enabled(skill)))
}
}

View File

@@ -0,0 +1,48 @@
use crate::skills::model::SkillMetadata;
use codex_protocol::protocol::SKILLS_INSTRUCTIONS_CLOSE_TAG;
use codex_protocol::protocol::SKILLS_INSTRUCTIONS_OPEN_TAG;
pub fn render_skills_section(skills: &[SkillMetadata]) -> Option<String> {
if skills.is_empty() {
return None;
}
let mut lines: Vec<String> = Vec::new();
lines.push("## Skills".to_string());
lines.push("A skill is a set of local instructions to follow that is stored in a `SKILL.md` file. Below is the list of skills that can be used. Each entry includes a name, description, and file path so you can open the source for full instructions when using a specific skill.".to_string());
lines.push("### Available skills".to_string());
for skill in skills {
let path_str = skill.path_to_skills_md.to_string_lossy().replace('\\', "/");
let name = skill.name.as_str();
let description = skill.description.as_str();
lines.push(format!("- {name}: {description} (file: {path_str})"));
}
lines.push("### How to use skills".to_string());
lines.push(
r###"- Discovery: The list above is the skills available in this session (name + description + file path). Skill bodies live on disk at the listed paths.
- Trigger rules: If the user names a skill (with `$SkillName` or plain text) OR the task clearly matches a skill's description shown above, you must use that skill for that turn. Multiple mentions mean use them all. Do not carry skills across turns unless re-mentioned.
- Missing/blocked: If a named skill isn't in the list or the path can't be read, say so briefly and continue with the best fallback.
- How to use a skill (progressive disclosure):
1) After deciding to use a skill, open its `SKILL.md`. Read only enough to follow the workflow.
2) When `SKILL.md` references relative paths (e.g., `scripts/foo.py`), resolve them relative to the skill directory listed above first, and only consider other paths if needed.
3) If `SKILL.md` points to extra folders such as `references/`, load only the specific files needed for the request; don't bulk-load everything.
4) If `scripts/` exist, prefer running or patching them instead of retyping large code blocks.
5) If `assets/` or templates exist, reuse them instead of recreating from scratch.
- Coordination and sequencing:
- If multiple skills apply, choose the minimal set that covers the request and state the order you'll use them.
- Announce which skill(s) you're using and why (one short line). If you skip an obvious skill, say why.
- Context hygiene:
- Keep context small: summarize long sections instead of pasting them; only load extra files when needed.
- Avoid deep reference-chasing: prefer opening only files directly linked from `SKILL.md` unless you're blocked.
- When variants exist (frameworks, providers, domains), pick only the relevant reference file(s) and note that choice.
- Safety and fallback: If a skill can't be applied cleanly (missing files, unclear instructions), state the issue, pick the next-best approach, and continue."###
.to_string(),
);
let body = lines.join("\n");
Some(format!(
"{SKILLS_INSTRUCTIONS_OPEN_TAG}\n{body}\n{SKILLS_INSTRUCTIONS_CLOSE_TAG}"
))
}

View File

@@ -1,9 +0,0 @@
load("//:defs.bzl", "codex_rust_crate")
codex_rust_crate(
name = "models",
crate_name = "codex_models",
compile_data = glob(["templates/**"]) + [
"//codex-rs/core:models.json",
],
)

View File

@@ -1,31 +0,0 @@
[package]
name = "codex-models"
version.workspace = true
edition.workspace = true
license.workspace = true
[lib]
doctest = false
name = "codex_models"
path = "src/lib.rs"
[lints]
workspace = true
[dependencies]
base64 = { workspace = true }
chrono = { workspace = true, features = ["serde"] }
codex-api = { workspace = true }
codex-protocol = { workspace = true }
http = { workspace = true }
schemars = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
tokio = { workspace = true, features = ["fs", "sync", "time"] }
tracing = { workspace = true }
[dev-dependencies]
maplit = { workspace = true }
pretty_assertions = { workspace = true }
serde_json = { workspace = true }
toml = { workspace = true }

View File

@@ -1,18 +0,0 @@
pub mod model_provider_info;
pub mod models_manager;
pub mod response_debug_context;
pub use model_provider_info::DEFAULT_LMSTUDIO_PORT;
pub use model_provider_info::DEFAULT_OLLAMA_PORT;
pub use model_provider_info::DEFAULT_WEBSOCKET_CONNECT_TIMEOUT_MS;
pub use model_provider_info::EnvVarError;
pub use model_provider_info::LEGACY_OLLAMA_CHAT_PROVIDER_ID;
pub use model_provider_info::LMSTUDIO_OSS_PROVIDER_ID;
pub use model_provider_info::ModelProviderInfo;
pub use model_provider_info::OLLAMA_CHAT_PROVIDER_REMOVED_ERROR;
pub use model_provider_info::OLLAMA_OSS_PROVIDER_ID;
pub use model_provider_info::OPENAI_PROVIDER_ID;
pub use model_provider_info::WireApi;
pub use model_provider_info::built_in_model_providers;
pub use model_provider_info::create_oss_provider;
pub use model_provider_info::create_oss_provider_with_base_url;

View File

@@ -1,329 +0,0 @@
//! Registry of model providers supported by Codex.
//!
//! Providers can be defined in two places:
//! 1. Built-in defaults compiled into the binary so Codex works out-of-the-box.
//! 2. User-defined entries inside `~/.codex/config.toml` under the `model_providers`
//! key. These override or extend the defaults at runtime.
use codex_api::Provider as ApiProvider;
use codex_api::provider::RetryConfig as ApiRetryConfig;
use http::HeaderMap;
use http::header::HeaderName;
use http::header::HeaderValue;
use schemars::JsonSchema;
use serde::Deserialize;
use serde::Serialize;
use std::collections::HashMap;
use std::error::Error;
use std::fmt;
use std::time::Duration;
const DEFAULT_STREAM_IDLE_TIMEOUT_MS: u64 = 300_000;
const DEFAULT_STREAM_MAX_RETRIES: u64 = 5;
const DEFAULT_REQUEST_MAX_RETRIES: u64 = 4;
pub const DEFAULT_WEBSOCKET_CONNECT_TIMEOUT_MS: u64 = 15_000;
const MAX_STREAM_MAX_RETRIES: u64 = 100;
const MAX_REQUEST_MAX_RETRIES: u64 = 100;
const OPENAI_PROVIDER_NAME: &str = "OpenAI";
pub const OPENAI_PROVIDER_ID: &str = "openai";
const CHAT_WIRE_API_REMOVED_ERROR: &str = "`wire_api = \"chat\"` is no longer supported.\nHow to fix: set `wire_api = \"responses\"` in your provider config.\nMore info: https://github.com/openai/codex/discussions/7782";
pub const LEGACY_OLLAMA_CHAT_PROVIDER_ID: &str = "ollama-chat";
pub const OLLAMA_CHAT_PROVIDER_REMOVED_ERROR: &str = "`ollama-chat` is no longer supported.\nHow to fix: replace `ollama-chat` with `ollama` in `model_provider`, `oss_provider`, or `--local-provider`.\nMore info: https://github.com/openai/codex/discussions/7782";
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct EnvVarError {
pub var: String,
pub instructions: Option<String>,
}
impl fmt::Display for EnvVarError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Missing environment variable: `{}`.", self.var)?;
if let Some(instructions) = &self.instructions {
write!(f, " {instructions}")?;
}
Ok(())
}
}
impl Error for EnvVarError {}
/// Wire protocol that the provider speaks.
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Serialize, JsonSchema)]
#[serde(rename_all = "lowercase")]
pub enum WireApi {
/// The Responses API exposed by OpenAI at `/v1/responses`.
#[default]
Responses,
}
impl fmt::Display for WireApi {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let value = match self {
Self::Responses => "responses",
};
f.write_str(value)
}
}
impl<'de> Deserialize<'de> for WireApi {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let value = String::deserialize(deserializer)?;
match value.as_str() {
"responses" => Ok(Self::Responses),
"chat" => Err(serde::de::Error::custom(CHAT_WIRE_API_REMOVED_ERROR)),
_ => Err(serde::de::Error::unknown_variant(&value, &["responses"])),
}
}
}
/// Serializable representation of a provider definition.
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, JsonSchema)]
#[schemars(deny_unknown_fields)]
pub struct ModelProviderInfo {
/// Friendly display name.
pub name: String,
/// Base URL for the provider's OpenAI-compatible API.
pub base_url: Option<String>,
/// Environment variable that stores the user's API key for this provider.
pub env_key: Option<String>,
/// Optional instructions to help the user get a valid value for the variable and set it.
pub env_key_instructions: Option<String>,
/// Value to use with `Authorization: Bearer <token>`.
pub experimental_bearer_token: Option<String>,
/// Which wire protocol this provider expects.
#[serde(default)]
pub wire_api: WireApi,
/// Optional query parameters to append to the base URL.
pub query_params: Option<HashMap<String, String>>,
/// Additional HTTP headers to include in requests to this provider.
pub http_headers: Option<HashMap<String, String>>,
/// Additional HTTP headers whose values come from environment variables.
pub env_http_headers: Option<HashMap<String, String>>,
/// Maximum number of times to retry a failed HTTP request to this provider.
pub request_max_retries: Option<u64>,
/// Number of times to retry reconnecting a dropped streaming response before failing.
pub stream_max_retries: Option<u64>,
/// Idle timeout for streaming responses.
pub stream_idle_timeout_ms: Option<u64>,
/// Maximum time to wait for a websocket connection attempt.
pub websocket_connect_timeout_ms: Option<u64>,
/// Whether the provider requires OpenAI auth.
#[serde(default)]
pub requires_openai_auth: bool,
/// Whether the provider supports websocket transport.
#[serde(default)]
pub supports_websockets: bool,
}
impl ModelProviderInfo {
fn build_header_map(&self) -> HeaderMap {
let capacity = self.http_headers.as_ref().map_or(0, HashMap::len)
+ self.env_http_headers.as_ref().map_or(0, HashMap::len);
let mut headers = HeaderMap::with_capacity(capacity);
if let Some(extra) = &self.http_headers {
for (k, v) in extra {
if let (Ok(name), Ok(value)) = (HeaderName::try_from(k), HeaderValue::try_from(v)) {
headers.insert(name, value);
}
}
}
if let Some(env_headers) = &self.env_http_headers {
for (header, env_var) in env_headers {
if let Ok(val) = std::env::var(env_var)
&& !val.trim().is_empty()
&& let (Ok(name), Ok(value)) =
(HeaderName::try_from(header), HeaderValue::try_from(val))
{
headers.insert(name, value);
}
}
}
headers
}
pub fn to_api_provider(&self, use_chatgpt_base_url: bool) -> ApiProvider {
let default_base_url = if use_chatgpt_base_url {
"https://chatgpt.com/backend-api/codex"
} else {
"https://api.openai.com/v1"
};
let base_url = self
.base_url
.clone()
.unwrap_or_else(|| default_base_url.to_string());
let retry = ApiRetryConfig {
max_attempts: self.request_max_retries(),
base_delay: Duration::from_millis(200),
retry_429: false,
retry_5xx: true,
retry_transport: true,
};
ApiProvider {
name: self.name.clone(),
base_url,
query_params: self.query_params.clone(),
headers: self.build_header_map(),
retry,
stream_idle_timeout: self.stream_idle_timeout(),
}
}
/// If `env_key` is Some, returns the API key for this provider if present
/// (and non-empty) in the environment.
pub fn api_key(&self) -> Result<Option<String>, EnvVarError> {
match &self.env_key {
Some(env_key) => {
let api_key = std::env::var(env_key)
.ok()
.filter(|v| !v.trim().is_empty())
.ok_or_else(|| EnvVarError {
var: env_key.clone(),
instructions: self.env_key_instructions.clone(),
})?;
Ok(Some(api_key))
}
None => Ok(None),
}
}
pub fn request_max_retries(&self) -> u64 {
self.request_max_retries
.unwrap_or(DEFAULT_REQUEST_MAX_RETRIES)
.min(MAX_REQUEST_MAX_RETRIES)
}
pub fn stream_max_retries(&self) -> u64 {
self.stream_max_retries
.unwrap_or(DEFAULT_STREAM_MAX_RETRIES)
.min(MAX_STREAM_MAX_RETRIES)
}
pub fn stream_idle_timeout(&self) -> Duration {
self.stream_idle_timeout_ms
.map(Duration::from_millis)
.unwrap_or(Duration::from_millis(DEFAULT_STREAM_IDLE_TIMEOUT_MS))
}
pub fn websocket_connect_timeout(&self) -> Duration {
self.websocket_connect_timeout_ms
.map(Duration::from_millis)
.unwrap_or(Duration::from_millis(DEFAULT_WEBSOCKET_CONNECT_TIMEOUT_MS))
}
pub fn create_openai_provider(base_url: Option<String>) -> ModelProviderInfo {
ModelProviderInfo {
name: OPENAI_PROVIDER_NAME.into(),
base_url,
env_key: None,
env_key_instructions: None,
experimental_bearer_token: None,
wire_api: WireApi::Responses,
query_params: None,
http_headers: Some(
[("version".to_string(), env!("CARGO_PKG_VERSION").to_string())]
.into_iter()
.collect(),
),
env_http_headers: Some(
[
(
"OpenAI-Organization".to_string(),
"OPENAI_ORGANIZATION".to_string(),
),
("OpenAI-Project".to_string(), "OPENAI_PROJECT".to_string()),
]
.into_iter()
.collect(),
),
request_max_retries: None,
stream_max_retries: None,
stream_idle_timeout_ms: None,
websocket_connect_timeout_ms: None,
requires_openai_auth: true,
supports_websockets: true,
}
}
pub fn is_openai(&self) -> bool {
self.name == OPENAI_PROVIDER_NAME
}
}
pub const DEFAULT_LMSTUDIO_PORT: u16 = 1234;
pub const DEFAULT_OLLAMA_PORT: u16 = 11434;
pub const LMSTUDIO_OSS_PROVIDER_ID: &str = "lmstudio";
pub const OLLAMA_OSS_PROVIDER_ID: &str = "ollama";
/// Built-in default provider list.
pub fn built_in_model_providers(
openai_base_url: Option<String>,
) -> HashMap<String, ModelProviderInfo> {
use ModelProviderInfo as P;
let openai_provider = P::create_openai_provider(openai_base_url);
[
(OPENAI_PROVIDER_ID, openai_provider),
(
OLLAMA_OSS_PROVIDER_ID,
create_oss_provider(DEFAULT_OLLAMA_PORT, WireApi::Responses),
),
(
LMSTUDIO_OSS_PROVIDER_ID,
create_oss_provider(DEFAULT_LMSTUDIO_PORT, WireApi::Responses),
),
]
.into_iter()
.map(|(k, v)| (k.to_string(), v))
.collect()
}
pub fn create_oss_provider(default_provider_port: u16, wire_api: WireApi) -> ModelProviderInfo {
let default_codex_oss_base_url = format!(
"http://localhost:{codex_oss_port}/v1",
codex_oss_port = std::env::var("CODEX_OSS_PORT")
.ok()
.filter(|value| !value.trim().is_empty())
.and_then(|value| value.parse::<u16>().ok())
.unwrap_or(default_provider_port)
);
let codex_oss_base_url = std::env::var("CODEX_OSS_BASE_URL")
.ok()
.filter(|v| !v.trim().is_empty())
.unwrap_or(default_codex_oss_base_url);
create_oss_provider_with_base_url(&codex_oss_base_url, wire_api)
}
pub fn create_oss_provider_with_base_url(base_url: &str, wire_api: WireApi) -> ModelProviderInfo {
ModelProviderInfo {
name: "gpt-oss".into(),
base_url: Some(base_url.into()),
env_key: None,
env_key_instructions: None,
experimental_bearer_token: None,
wire_api,
query_params: None,
http_headers: None,
env_http_headers: None,
request_max_retries: None,
stream_max_retries: None,
stream_idle_timeout_ms: None,
websocket_connect_timeout_ms: None,
requires_openai_auth: false,
supports_websockets: false,
}
}
#[cfg(test)]
#[path = "model_provider_info_tests.rs"]
mod tests;

View File

@@ -1,13 +0,0 @@
pub mod cache;
pub mod collaboration_mode_presets;
pub mod model_presets;
/// Convert the client version string to a whole version string.
pub fn client_version_to_whole() -> String {
format!(
"{}.{}.{}",
env!("CARGO_PKG_VERSION_MAJOR"),
env!("CARGO_PKG_VERSION_MINOR"),
env!("CARGO_PKG_VERSION_PATCH")
)
}

View File

@@ -1,165 +0,0 @@
use base64::Engine;
use codex_api::TransportError;
use codex_api::error::ApiError;
const REQUEST_ID_HEADER: &str = "x-request-id";
const OAI_REQUEST_ID_HEADER: &str = "x-oai-request-id";
const CF_RAY_HEADER: &str = "cf-ray";
const AUTH_ERROR_HEADER: &str = "x-openai-authorization-error";
const X_ERROR_JSON_HEADER: &str = "x-error-json";
#[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct ResponseDebugContext {
pub request_id: Option<String>,
pub cf_ray: Option<String>,
pub auth_error: Option<String>,
pub auth_error_code: Option<String>,
}
pub fn extract_response_debug_context(transport: &TransportError) -> ResponseDebugContext {
let mut context = ResponseDebugContext::default();
let TransportError::Http {
headers, body: _, ..
} = transport
else {
return context;
};
let extract_header = |name: &str| {
headers
.as_ref()
.and_then(|headers| headers.get(name))
.and_then(|value| value.to_str().ok())
.map(str::to_string)
};
context.request_id =
extract_header(REQUEST_ID_HEADER).or_else(|| extract_header(OAI_REQUEST_ID_HEADER));
context.cf_ray = extract_header(CF_RAY_HEADER);
context.auth_error = extract_header(AUTH_ERROR_HEADER);
context.auth_error_code = extract_header(X_ERROR_JSON_HEADER).and_then(|encoded| {
let decoded = base64::engine::general_purpose::STANDARD
.decode(encoded)
.ok()?;
let parsed = serde_json::from_slice::<serde_json::Value>(&decoded).ok()?;
parsed
.get("error")
.and_then(|error| error.get("code"))
.and_then(serde_json::Value::as_str)
.map(str::to_string)
});
context
}
pub fn extract_response_debug_context_from_api_error(error: &ApiError) -> ResponseDebugContext {
match error {
ApiError::Transport(transport) => extract_response_debug_context(transport),
_ => ResponseDebugContext::default(),
}
}
pub fn telemetry_transport_error_message(error: &TransportError) -> String {
match error {
TransportError::Http { status, .. } => format!("http {}", status.as_u16()),
TransportError::RetryLimit => "retry limit reached".to_string(),
TransportError::Timeout => "timeout".to_string(),
TransportError::Network(err) => err.to_string(),
TransportError::Build(err) => err.to_string(),
}
}
pub fn telemetry_api_error_message(error: &ApiError) -> String {
match error {
ApiError::Transport(transport) => telemetry_transport_error_message(transport),
ApiError::Api { status, .. } => format!("api error {}", status.as_u16()),
ApiError::Stream(err) => err.to_string(),
ApiError::ContextWindowExceeded => "context window exceeded".to_string(),
ApiError::QuotaExceeded => "quota exceeded".to_string(),
ApiError::UsageNotIncluded => "usage not included".to_string(),
ApiError::Retryable { .. } => "retryable error".to_string(),
ApiError::RateLimit(_) => "rate limit".to_string(),
ApiError::InvalidRequest { .. } => "invalid request".to_string(),
ApiError::ServerOverloaded => "server overloaded".to_string(),
}
}
#[cfg(test)]
mod tests {
use super::ResponseDebugContext;
use super::extract_response_debug_context;
use super::telemetry_api_error_message;
use super::telemetry_transport_error_message;
use codex_api::TransportError;
use codex_api::error::ApiError;
use http::HeaderMap;
use http::HeaderValue;
use http::StatusCode;
use pretty_assertions::assert_eq;
#[test]
fn extract_response_debug_context_decodes_identity_headers() {
let mut headers = HeaderMap::new();
headers.insert("x-oai-request-id", HeaderValue::from_static("req-auth"));
headers.insert("cf-ray", HeaderValue::from_static("ray-auth"));
headers.insert(
"x-openai-authorization-error",
HeaderValue::from_static("missing_authorization_header"),
);
headers.insert(
"x-error-json",
HeaderValue::from_static("eyJlcnJvciI6eyJjb2RlIjoidG9rZW5fZXhwaXJlZCJ9fQ=="),
);
let context = extract_response_debug_context(&TransportError::Http {
status: StatusCode::UNAUTHORIZED,
url: Some("https://chatgpt.com/backend-api/codex/models".to_string()),
headers: Some(headers),
body: Some(r#"{"error":{"message":"plain text error"},"status":401}"#.to_string()),
});
assert_eq!(
context,
ResponseDebugContext {
request_id: Some("req-auth".to_string()),
cf_ray: Some("ray-auth".to_string()),
auth_error: Some("missing_authorization_header".to_string()),
auth_error_code: Some("token_expired".to_string()),
}
);
}
#[test]
fn telemetry_error_messages_omit_http_bodies() {
let transport = TransportError::Http {
status: StatusCode::UNAUTHORIZED,
url: Some("https://chatgpt.com/backend-api/codex/responses".to_string()),
headers: None,
body: Some(r#"{"error":{"message":"secret token leaked"}}"#.to_string()),
};
assert_eq!(telemetry_transport_error_message(&transport), "http 401");
assert_eq!(
telemetry_api_error_message(&ApiError::Transport(transport)),
"http 401"
);
}
#[test]
fn telemetry_error_messages_preserve_non_http_details() {
let network = TransportError::Network("dns lookup failed".to_string());
let build = TransportError::Build("invalid header value".to_string());
let stream = ApiError::Stream("socket closed".to_string());
assert_eq!(
telemetry_transport_error_message(&network),
"dns lookup failed"
);
assert_eq!(
telemetry_transport_error_message(&build),
"invalid header value"
);
assert_eq!(telemetry_api_error_message(&stream), "socket closed");
}
}

View File

@@ -1,11 +0,0 @@
# Collaboration Mode: Default
You are now in Default mode. Any previous instructions for other modes (e.g. Plan mode) are no longer active.
Your active mode changes only when new developer instructions with a different `<collaboration_mode>...</collaboration_mode>` change it; user requests or tool descriptions do not change mode by themselves. Known mode names are {{KNOWN_MODE_NAMES}}.
## request_user_input availability
{{REQUEST_USER_INPUT_AVAILABILITY}}
{{ASKING_QUESTIONS_GUIDANCE}}

View File

@@ -1,128 +0,0 @@
# Plan Mode (Conversational)
You work in 3 phases, and you should *chat your way* to a great plan before finalizing it. A great plan is very detailed—intent- and implementation-wise—so that it can be handed to another engineer or agent to be implemented right away. It must be **decision complete**, where the implementer does not need to make any decisions.
## Mode rules (strict)
You are in **Plan Mode** until a developer message explicitly ends it.
Plan Mode is not changed by user intent, tone, or imperative language. If a user asks for execution while still in Plan Mode, treat it as a request to **plan the execution**, not perform it.
## Plan Mode vs update_plan tool
Plan Mode is a collaboration mode that can involve requesting user input and eventually issuing a `<proposed_plan>` block.
Separately, `update_plan` is a checklist/progress/TODOs tool; it does not enter or exit Plan Mode. Do not confuse it with Plan mode or try to use it while in Plan mode. If you try to use `update_plan` in Plan mode, it will return an error.
## Execution vs. mutation in Plan Mode
You may explore and execute **non-mutating** actions that improve the plan. You must not perform **mutating** actions.
### Allowed (non-mutating, plan-improving)
Actions that gather truth, reduce ambiguity, or validate feasibility without changing repo-tracked state. Examples:
* Reading or searching files, configs, schemas, types, manifests, and docs
* Static analysis, inspection, and repo exploration
* Dry-run style commands when they do not edit repo-tracked files
* Tests, builds, or checks that may write to caches or build artifacts (for example, `target/`, `.cache/`, or snapshots) so long as they do not edit repo-tracked files
### Not allowed (mutating, plan-executing)
Actions that implement the plan or change repo-tracked state. Examples:
* Editing or writing files
* Running formatters or linters that rewrite files
* Applying patches, migrations, or codegen that updates repo-tracked files
* Side-effectful commands whose purpose is to carry out the plan rather than refine it
When in doubt: if the action would reasonably be described as "doing the work" rather than "planning the work," do not do it.
## PHASE 1 — Ground in the environment (explore first, ask second)
Begin by grounding yourself in the actual environment. Eliminate unknowns in the prompt by discovering facts, not by asking the user. Resolve all questions that can be answered through exploration or inspection. Identify missing or ambiguous details only if they cannot be derived from the environment. Silent exploration between turns is allowed and encouraged.
Before asking the user any question, perform at least one targeted non-mutating exploration pass (for example: search relevant files, inspect likely entrypoints/configs, confirm current implementation shape), unless no local environment/repo is available.
Exception: you may ask clarifying questions about the user's prompt before exploring, ONLY if there are obvious ambiguities or contradictions in the prompt itself. However, if ambiguity might be resolved by exploring, always prefer exploring first.
Do not ask questions that can be answered from the repo or system (for example, "where is this struct?" or "which UI component should we use?" when exploration can make it clear). Only ask once you have exhausted reasonable non-mutating exploration.
## PHASE 2 — Intent chat (what they actually want)
* Keep asking until you can clearly state: goal + success criteria, audience, in/out of scope, constraints, current state, and the key preferences/tradeoffs.
* Bias toward questions over guessing: if any high-impact ambiguity remains, do NOT plan yet—ask.
## PHASE 3 — Implementation chat (what/how well build)
* Once intent is stable, keep asking until the spec is decision complete: approach, interfaces (APIs/schemas/I/O), data flow, edge cases/failure modes, testing + acceptance criteria, rollout/monitoring, and any migrations/compat constraints.
## Asking questions
Critical rules:
* Strongly prefer using the `request_user_input` tool to ask any questions.
* Offer only meaningful multiplechoice options; dont include filler choices that are obviously wrong or irrelevant.
* In rare cases where an unavoidable, important question cant be expressed with reasonable multiplechoice options (due to extreme ambiguity), you may ask it directly without the tool.
You SHOULD ask many questions, but each question must:
* materially change the spec/plan, OR
* confirm/lock an assumption, OR
* choose between meaningful tradeoffs.
* not be answerable by non-mutating commands.
Use the `request_user_input` tool only for decisions that materially change the plan, for confirming important assumptions, or for information that cannot be discovered via non-mutating exploration.
## Two kinds of unknowns (treat differently)
1. **Discoverable facts** (repo/system truth): explore first.
* Before asking, run targeted searches and check likely sources of truth (configs/manifests/entrypoints/schemas/types/constants).
* Ask only if: multiple plausible candidates; nothing found but you need a missing identifier/context; or ambiguity is actually product intent.
* If asking, present concrete candidates (paths/service names) + recommend one.
* Never ask questions you can answer from your environment (e.g., “where is this struct”).
2. **Preferences/tradeoffs** (not discoverable): ask early.
* These are intent or implementation preferences that cannot be derived from exploration.
* Provide 24 mutually exclusive options + a recommended default.
* If unanswered, proceed with the recommended option and record it as an assumption in the final plan.
## Finalization rule
Only output the final plan when it is decision complete and leaves no decisions to the implementer.
When you present the official plan, wrap it in a `<proposed_plan>` block so the client can render it specially:
1) The opening tag must be on its own line.
2) Start the plan content on the next line (no text on the same line as the tag).
3) The closing tag must be on its own line.
4) Use Markdown inside the block.
5) Keep the tags exactly as `<proposed_plan>` and `</proposed_plan>` (do not translate or rename them), even if the plan content is in another language.
Example:
<proposed_plan>
plan content
</proposed_plan>
plan content should be human and agent digestible. The final plan must be plan-only, concise by default, and include:
* A clear title
* A brief summary section
* Important changes or additions to public APIs/interfaces/types
* Test cases and scenarios
* Explicit assumptions and defaults chosen where needed
When possible, prefer a compact structure with 3-5 short sections, usually: Summary, Key Changes or Implementation Changes, Test Plan, and Assumptions. Do not include a separate Scope section unless scope boundaries are genuinely important to avoid mistakes.
Prefer grouped implementation bullets by subsystem or behavior over file-by-file inventories. Mention files only when needed to disambiguate a non-obvious change, and avoid naming more than 3 paths unless extra specificity is necessary to prevent mistakes. Prefer behavior-level descriptions over symbol-by-symbol removal lists. For v1 feature-addition plans, do not invent detailed schema, validation, precedence, fallback, or wire-shape policy unless the request establishes it or it is needed to prevent a concrete implementation mistake; prefer the intended capability and minimum interface/behavior changes.
Keep bullets short and avoid explanatory sub-bullets unless they are needed to prevent ambiguity. Prefer the minimum detail needed for implementation safety, not exhaustive coverage. Within each section, compress related changes into a few high-signal bullets and omit branch-by-branch logic, repeated invariants, and long lists of unaffected behavior unless they are necessary to prevent a likely implementation mistake. Avoid repeated repo facts and irrelevant edge-case or rollout detail. For straightforward refactors, keep the plan to a compact summary, key edits, tests, and assumptions. If the user asks for more detail, then expand.
Do not ask "should I proceed?" in the final output. The user can easily switch out of Plan mode and request implementation if you have included a `<proposed_plan>` block in your response. Alternatively, they can decide to stay in Plan mode and continue refining the plan.
Only produce at most one `<proposed_plan>` block per turn, and only when you are presenting a complete spec.
If the user stays in Plan mode and asks for revisions after a prior `<proposed_plan>`, any new `<proposed_plan>` must be a complete replacement.