Compare commits

...

2 Commits

Author SHA1 Message Date
Colin Young
46abb06865 [Codex][Codex CLI] refine auth routing observability follow-up
Use the provider header map when deriving residency telemetry, remove the conversation-start base URL override fallback, and keep request-route telemetry owned by the actual request path.

Co-authored-by: Codex <noreply@openai.com>
2026-03-17 15:20:29 -07:00
Colin Young
d02630eccc [Codex][Codex CLI] Add auth routing observability follow-up
Checkpoint the PR2 remainder before rebasing onto main.

Co-authored-by: Codex <noreply@openai.com>
2026-03-16 15:57:11 -07:00
22 changed files with 2014 additions and 259 deletions

View File

@@ -1240,6 +1240,10 @@ impl AuthManager {
.unwrap_or(false)
}
pub(crate) fn codex_api_key_env_enabled(&self) -> bool {
self.enable_codex_api_key_env
}
pub fn is_external_auth_active(&self) -> bool {
self.auth_cached()
.as_ref()

View File

@@ -0,0 +1,36 @@
use crate::auth::CODEX_API_KEY_ENV_VAR;
use crate::auth::OPENAI_API_KEY_ENV_VAR;
use crate::auth::REFRESH_TOKEN_URL_OVERRIDE_ENV_VAR;
use crate::model_provider_info::ModelProviderInfo;
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub(crate) struct AuthEnvTelemetry {
pub(crate) openai_api_key_env_present: bool,
pub(crate) codex_api_key_env_present: bool,
pub(crate) codex_api_key_env_enabled: bool,
pub(crate) provider_env_key_name: Option<String>,
pub(crate) provider_env_key_present: Option<bool>,
pub(crate) refresh_token_url_override_present: bool,
}
pub(crate) fn collect_auth_env_telemetry(
provider: &ModelProviderInfo,
codex_api_key_env_enabled: bool,
) -> AuthEnvTelemetry {
AuthEnvTelemetry {
openai_api_key_env_present: env_var_present(OPENAI_API_KEY_ENV_VAR),
codex_api_key_env_present: env_var_present(CODEX_API_KEY_ENV_VAR),
codex_api_key_env_enabled,
provider_env_key_name: provider.env_key.clone(),
provider_env_key_present: provider.env_key.as_deref().map(env_var_present),
refresh_token_url_override_present: env_var_present(REFRESH_TOKEN_URL_OVERRIDE_ENV_VAR),
}
}
fn env_var_present(name: &str) -> bool {
match std::env::var(name) {
Ok(value) => !value.trim().is_empty(),
Err(std::env::VarError::NotUnicode(_)) => true,
Err(std::env::VarError::NotPresent) => false,
}
}

View File

@@ -34,6 +34,8 @@ use crate::api_bridge::CoreAuthProvider;
use crate::api_bridge::auth_provider_from_auth;
use crate::api_bridge::map_api_error;
use crate::auth::UnauthorizedRecovery;
use crate::auth_env_telemetry::AuthEnvTelemetry;
use crate::auth_env_telemetry::collect_auth_env_telemetry;
use codex_api::CompactClient as ApiCompactClient;
use codex_api::CompactionInput as ApiCompactionInput;
use codex_api::MemoriesClient as ApiMemoriesClient;
@@ -81,7 +83,6 @@ use tokio::sync::oneshot;
use tokio::sync::oneshot::error::TryRecvError;
use tokio_tungstenite::tungstenite::Error;
use tokio_tungstenite::tungstenite::Message;
use tracing::instrument;
use tracing::trace;
use tracing::warn;
@@ -94,6 +95,9 @@ use crate::client_common::ResponseEvent;
use crate::client_common::ResponseStream;
use crate::config::Config;
use crate::default_client::build_reqwest_client;
use crate::default_client::residency_header_telemetry_for_provider_headers;
use crate::endpoint_config_telemetry::EndpointConfigTelemetry;
use crate::endpoint_config_telemetry::EndpointConfigTelemetrySource;
use crate::error::CodexErr;
use crate::error::Result;
use crate::flags::CODEX_RS_SSE_FIXTURE;
@@ -135,6 +139,8 @@ struct ModelClientState {
auth_manager: Option<Arc<AuthManager>>,
conversation_id: ThreadId,
provider: ModelProviderInfo,
endpoint_telemetry_source: EndpointConfigTelemetrySource,
auth_env_telemetry: AuthEnvTelemetry,
session_source: SessionSource,
model_verbosity: Option<VerbosityConfig>,
responses_websockets_enabled_by_feature: bool,
@@ -153,16 +159,25 @@ struct CurrentClientSetup {
auth: Option<CodexAuth>,
api_provider: codex_api::Provider,
api_auth: CoreAuthProvider,
endpoint_telemetry: EndpointConfigTelemetry,
provider_header_names: Option<String>,
}
#[derive(Clone, Copy)]
#[derive(Clone)]
struct RequestRouteTelemetry {
endpoint: &'static str,
residency_header_attached: bool,
residency_header_value: Option<String>,
}
impl RequestRouteTelemetry {
fn for_endpoint(endpoint: &'static str) -> Self {
Self { endpoint }
fn for_endpoint(endpoint: &'static str, provider_headers: &ApiHeaderMap) -> Self {
let residency = residency_header_telemetry_for_provider_headers(provider_headers);
Self {
endpoint,
residency_header_attached: residency.attached,
residency_header_value: residency.value,
}
}
}
@@ -264,11 +279,75 @@ impl ModelClient {
include_timing_metrics: bool,
beta_features_header: Option<String>,
) -> Self {
let endpoint_telemetry_source =
EndpointConfigTelemetrySource::for_provider_without_id(&provider);
Self::new_with_endpoint_telemetry_source(
auth_manager,
conversation_id,
provider,
endpoint_telemetry_source,
session_source,
model_verbosity,
responses_websockets_enabled_by_feature,
enable_request_compression,
include_timing_metrics,
beta_features_header,
)
}
#[allow(clippy::too_many_arguments)]
pub fn new_with_provider_id(
auth_manager: Option<Arc<AuthManager>>,
conversation_id: ThreadId,
provider_id: &str,
provider: ModelProviderInfo,
session_source: SessionSource,
model_verbosity: Option<VerbosityConfig>,
responses_websockets_enabled_by_feature: bool,
enable_request_compression: bool,
include_timing_metrics: bool,
beta_features_header: Option<String>,
) -> Self {
let endpoint_telemetry_source =
EndpointConfigTelemetrySource::for_provider(provider_id, &provider);
Self::new_with_endpoint_telemetry_source(
auth_manager,
conversation_id,
provider,
endpoint_telemetry_source,
session_source,
model_verbosity,
responses_websockets_enabled_by_feature,
enable_request_compression,
include_timing_metrics,
beta_features_header,
)
}
#[allow(clippy::too_many_arguments)]
pub(crate) fn new_with_endpoint_telemetry_source(
auth_manager: Option<Arc<AuthManager>>,
conversation_id: ThreadId,
provider: ModelProviderInfo,
endpoint_telemetry_source: EndpointConfigTelemetrySource,
session_source: SessionSource,
model_verbosity: Option<VerbosityConfig>,
responses_websockets_enabled_by_feature: bool,
enable_request_compression: bool,
include_timing_metrics: bool,
beta_features_header: Option<String>,
) -> Self {
let codex_api_key_env_enabled = auth_manager
.as_ref()
.is_some_and(|manager| manager.codex_api_key_env_enabled());
let auth_env_telemetry = collect_auth_env_telemetry(&provider, codex_api_key_env_enabled);
Self {
state: Arc::new(ModelClientState {
auth_manager,
conversation_id,
provider,
endpoint_telemetry_source,
auth_env_telemetry,
session_source,
model_verbosity,
responses_websockets_enabled_by_feature,
@@ -337,7 +416,13 @@ impl ModelClient {
&client_setup.api_auth,
PendingUnauthorizedRetry::default(),
),
RequestRouteTelemetry::for_endpoint(RESPONSES_COMPACT_ENDPOINT),
client_setup.endpoint_telemetry,
self.state.auth_env_telemetry.clone(),
client_setup.provider_header_names.clone(),
RequestRouteTelemetry::for_endpoint(
RESPONSES_COMPACT_ENDPOINT,
&client_setup.api_provider.headers,
),
);
let client =
ApiCompactClient::new(transport, client_setup.api_provider, client_setup.api_auth)
@@ -405,7 +490,13 @@ impl ModelClient {
&client_setup.api_auth,
PendingUnauthorizedRetry::default(),
),
RequestRouteTelemetry::for_endpoint(MEMORIES_SUMMARIZE_ENDPOINT),
client_setup.endpoint_telemetry,
self.state.auth_env_telemetry.clone(),
client_setup.provider_header_names.clone(),
RequestRouteTelemetry::for_endpoint(
MEMORIES_SUMMARIZE_ENDPOINT,
&client_setup.api_provider.headers,
),
);
let client =
ApiMemoriesClient::new(transport, client_setup.api_provider, client_setup.api_auth)
@@ -449,11 +540,17 @@ impl ModelClient {
fn build_request_telemetry(
session_telemetry: &SessionTelemetry,
auth_context: AuthRequestTelemetryContext,
endpoint_telemetry: EndpointConfigTelemetry,
auth_env_telemetry: AuthEnvTelemetry,
provider_header_names: Option<String>,
request_route_telemetry: RequestRouteTelemetry,
) -> Arc<dyn RequestTelemetry> {
let telemetry = Arc::new(ApiTelemetry::new(
session_telemetry.clone(),
auth_context,
endpoint_telemetry,
auth_env_telemetry,
provider_header_names,
request_route_telemetry,
));
let request_telemetry: Arc<dyn RequestTelemetry> = telemetry;
@@ -510,10 +607,16 @@ impl ModelClient {
.provider
.to_api_provider(auth.as_ref().map(CodexAuth::auth_mode))?;
let api_auth = auth_provider_from_auth(auth.clone(), &self.state.provider)?;
let endpoint_telemetry = self
.state
.endpoint_telemetry_source
.classify(api_provider.base_url.as_str());
Ok(CurrentClientSetup {
auth,
api_provider,
api_auth,
endpoint_telemetry,
provider_header_names: self.state.provider.telemetry_header_names(),
})
}
@@ -530,13 +633,18 @@ impl ModelClient {
turn_state: Option<Arc<OnceLock<String>>>,
turn_metadata_header: Option<&str>,
auth_context: AuthRequestTelemetryContext,
endpoint_telemetry: EndpointConfigTelemetry,
provider_header_names: Option<&str>,
request_route_telemetry: RequestRouteTelemetry,
) -> std::result::Result<ApiWebSocketConnection, ApiError> {
let headers = self.build_websocket_headers(turn_state.as_ref(), turn_metadata_header);
let websocket_telemetry = ModelClientSession::build_websocket_telemetry(
session_telemetry,
auth_context,
request_route_telemetry,
endpoint_telemetry,
self.state.auth_env_telemetry.clone(),
provider_header_names.map(str::to_owned),
request_route_telemetry.clone(),
);
let start = Instant::now();
let result = ApiWebSocketResponsesClient::new(api_provider, api_auth)
@@ -554,7 +662,7 @@ impl ModelClient {
.map(extract_response_debug_context_from_api_error)
.unwrap_or_default();
let status = result.as_ref().err().and_then(api_error_http_status);
session_telemetry.record_websocket_connect(
session_telemetry.record_websocket_connect_with_endpoint_details(
start.elapsed(),
status,
error_message.as_deref(),
@@ -564,25 +672,71 @@ impl ModelClient {
auth_context.recovery_mode,
auth_context.recovery_phase,
request_route_telemetry.endpoint,
request_route_telemetry.residency_header_attached,
request_route_telemetry.residency_header_value.as_deref(),
provider_header_names,
endpoint_telemetry.base_url_origin,
endpoint_telemetry.host_class,
endpoint_telemetry.base_url_source,
endpoint_telemetry.base_url_is_default,
self.state.auth_env_telemetry.openai_api_key_env_present,
self.state.auth_env_telemetry.codex_api_key_env_present,
self.state.auth_env_telemetry.codex_api_key_env_enabled,
self.state
.auth_env_telemetry
.provider_env_key_name
.as_deref(),
self.state.auth_env_telemetry.provider_env_key_present,
self.state
.auth_env_telemetry
.refresh_token_url_override_present,
false,
response_debug.request_id.as_deref(),
response_debug.cf_ray.as_deref(),
response_debug.auth_error.as_deref(),
response_debug.auth_error_code.as_deref(),
response_debug.error_body_class,
response_debug.safe_error_message,
);
emit_feedback_request_tags(&FeedbackRequestTags {
endpoint: request_route_telemetry.endpoint,
auth_header_attached: auth_context.auth_header_attached,
auth_header_name: auth_context.auth_header_name,
auth_mode: auth_context.auth_mode,
auth_env_openai_api_key_present: self
.state
.auth_env_telemetry
.openai_api_key_env_present,
auth_env_codex_api_key_present: self.state.auth_env_telemetry.codex_api_key_env_present,
auth_env_codex_api_key_enabled: self.state.auth_env_telemetry.codex_api_key_env_enabled,
auth_env_provider_key_name: self
.state
.auth_env_telemetry
.provider_env_key_name
.as_deref(),
auth_env_provider_key_present: self.state.auth_env_telemetry.provider_env_key_present,
auth_env_refresh_token_url_override_present: self
.state
.auth_env_telemetry
.refresh_token_url_override_present,
auth_retry_after_unauthorized: Some(auth_context.retry_after_unauthorized),
auth_recovery_mode: auth_context.recovery_mode,
auth_recovery_phase: auth_context.recovery_phase,
auth_connection_reused: Some(false),
provider_header_names,
base_url_origin: endpoint_telemetry.base_url_origin,
host_class: endpoint_telemetry.host_class,
base_url_source: endpoint_telemetry.base_url_source,
base_url_is_default: endpoint_telemetry.base_url_is_default,
residency_header_attached: Some(request_route_telemetry.residency_header_attached),
residency_header_value: request_route_telemetry.residency_header_value.as_deref(),
auth_request_id: response_debug.request_id.as_deref(),
auth_cf_ray: response_debug.cf_ray.as_deref(),
auth_error: response_debug.auth_error.as_deref(),
auth_error_code: response_debug.auth_error_code.as_deref(),
error_body_class: response_debug.error_body_class,
safe_error_message: response_debug.safe_error_message,
geo_denial_detected: Some(response_debug.geo_denial_detected),
auth_recovery_followup_success: auth_context
.retry_after_unauthorized
.then_some(result.is_ok()),
@@ -591,6 +745,23 @@ impl ModelClient {
.then_some(status)
.flatten(),
});
if status == Some(StatusCode::UNAUTHORIZED.as_u16()) && response_debug.geo_denial_detected {
session_telemetry.record_geo_denial(
request_route_telemetry.endpoint,
auth_context.auth_header_attached,
auth_context.auth_header_name,
request_route_telemetry.residency_header_attached,
request_route_telemetry.residency_header_value.as_deref(),
provider_header_names,
status,
response_debug.request_id.as_deref(),
response_debug.cf_ray.as_deref(),
response_debug.auth_error.as_deref(),
response_debug.auth_error_code.as_deref(),
response_debug.error_body_class.unwrap_or_default(),
response_debug.safe_error_message,
);
}
result
}
@@ -839,6 +1010,11 @@ impl ModelClientSession {
&client_setup.api_auth,
PendingUnauthorizedRetry::default(),
);
let endpoint_telemetry = client_setup.endpoint_telemetry;
let request_route_telemetry = RequestRouteTelemetry::for_endpoint(
RESPONSES_ENDPOINT,
&client_setup.api_provider.headers,
);
let connection = self
.client
.connect_websocket(
@@ -848,7 +1024,9 @@ impl ModelClientSession {
Some(Arc::clone(&self.turn_state)),
None,
auth_context,
RequestRouteTelemetry::for_endpoint(RESPONSES_ENDPOINT),
endpoint_telemetry,
client_setup.provider_header_names.as_deref(),
request_route_telemetry,
)
.await?;
self.websocket_session.connection = Some(connection);
@@ -856,31 +1034,19 @@ impl ModelClientSession {
Ok(())
}
/// Returns a websocket connection for this turn.
#[instrument(
name = "model_client.websocket_connection",
level = "info",
skip_all,
fields(
provider = %self.client.state.provider.name,
wire_api = %self.client.state.provider.wire_api,
transport = "responses_websocket",
api.path = "responses",
turn.has_metadata_header = params.turn_metadata_header.is_some()
)
)]
#[allow(clippy::too_many_arguments)]
async fn websocket_connection(
&mut self,
params: WebsocketConnectParams<'_>,
session_telemetry: &SessionTelemetry,
api_provider: codex_api::Provider,
api_auth: CoreAuthProvider,
turn_metadata_header: Option<&str>,
options: &ApiResponsesOptions,
auth_context: AuthRequestTelemetryContext,
endpoint_telemetry: EndpointConfigTelemetry,
provider_header_names: Option<&str>,
request_route_telemetry: RequestRouteTelemetry,
) -> std::result::Result<&ApiWebSocketConnection, ApiError> {
let WebsocketConnectParams {
session_telemetry,
api_provider,
api_auth,
turn_metadata_header,
options,
auth_context,
request_route_telemetry,
} = params;
let needs_new = match self.websocket_session.connection.as_ref() {
Some(conn) => conn.is_closed().await,
None => true,
@@ -902,6 +1068,8 @@ impl ModelClientSession {
Some(turn_state),
turn_metadata_header,
auth_context,
endpoint_telemetry,
provider_header_names,
request_route_telemetry,
)
.await?;
@@ -935,19 +1103,6 @@ impl ModelClientSession {
/// Handles SSE fixtures, reasoning summaries, verbosity, and the
/// `text` controls used for output schemas.
#[allow(clippy::too_many_arguments)]
#[instrument(
name = "model_client.stream_responses_api",
level = "info",
skip_all,
fields(
model = %model_info.slug,
wire_api = %self.client.state.provider.wire_api,
transport = "responses_http",
http.method = "POST",
api.path = "responses",
turn.has_metadata_header = turn_metadata_header.is_some()
)
)]
async fn stream_responses_api(
&self,
prompt: &Prompt,
@@ -985,7 +1140,13 @@ impl ModelClientSession {
let (request_telemetry, sse_telemetry) = Self::build_streaming_telemetry(
session_telemetry,
request_auth_context,
RequestRouteTelemetry::for_endpoint(RESPONSES_ENDPOINT),
client_setup.endpoint_telemetry,
self.client.state.auth_env_telemetry.clone(),
client_setup.provider_header_names.clone(),
RequestRouteTelemetry::for_endpoint(
RESPONSES_ENDPOINT,
&client_setup.api_provider.headers,
),
);
let compression = self.responses_request_compression(client_setup.auth.as_ref());
let options = self.build_responses_options(turn_metadata_header, compression);
@@ -1031,19 +1192,6 @@ impl ModelClientSession {
/// Streams a turn via the Responses API over WebSocket transport.
#[allow(clippy::too_many_arguments)]
#[instrument(
name = "model_client.stream_responses_websocket",
level = "info",
skip_all,
fields(
model = %model_info.slug,
wire_api = %self.client.state.provider.wire_api,
transport = "responses_websocket",
api.path = "responses",
turn.has_metadata_header = turn_metadata_header.is_some(),
websocket.warmup = warmup
)
)]
async fn stream_responses_websocket(
&mut self,
prompt: &Prompt,
@@ -1069,6 +1217,10 @@ impl ModelClientSession {
pending_retry,
);
let compression = self.responses_request_compression(client_setup.auth.as_ref());
let request_route_telemetry = RequestRouteTelemetry::for_endpoint(
RESPONSES_ENDPOINT,
&client_setup.api_provider.headers,
);
let options = self.build_responses_options(turn_metadata_header, compression);
let request = self.build_responses_request(
@@ -1088,17 +1240,17 @@ impl ModelClientSession {
}
match self
.websocket_connection(WebsocketConnectParams {
.websocket_connection(
session_telemetry,
api_provider: client_setup.api_provider,
api_auth: client_setup.api_auth,
client_setup.api_provider,
client_setup.api_auth,
turn_metadata_header,
options: &options,
auth_context: request_auth_context,
request_route_telemetry: RequestRouteTelemetry::for_endpoint(
RESPONSES_ENDPOINT,
),
})
&options,
request_auth_context,
client_setup.endpoint_telemetry,
client_setup.provider_header_names.as_deref(),
request_route_telemetry,
)
.await
{
Ok(_) => {}
@@ -1148,11 +1300,17 @@ impl ModelClientSession {
fn build_streaming_telemetry(
session_telemetry: &SessionTelemetry,
auth_context: AuthRequestTelemetryContext,
endpoint_telemetry: EndpointConfigTelemetry,
auth_env_telemetry: AuthEnvTelemetry,
provider_header_names: Option<String>,
request_route_telemetry: RequestRouteTelemetry,
) -> (Arc<dyn RequestTelemetry>, Arc<dyn SseTelemetry>) {
let telemetry = Arc::new(ApiTelemetry::new(
session_telemetry.clone(),
auth_context,
endpoint_telemetry,
auth_env_telemetry,
provider_header_names,
request_route_telemetry,
));
let request_telemetry: Arc<dyn RequestTelemetry> = telemetry.clone();
@@ -1164,11 +1322,17 @@ impl ModelClientSession {
fn build_websocket_telemetry(
session_telemetry: &SessionTelemetry,
auth_context: AuthRequestTelemetryContext,
endpoint_telemetry: EndpointConfigTelemetry,
auth_env_telemetry: AuthEnvTelemetry,
provider_header_names: Option<String>,
request_route_telemetry: RequestRouteTelemetry,
) -> Arc<dyn WebsocketTelemetry> {
let telemetry = Arc::new(ApiTelemetry::new(
session_telemetry.clone(),
auth_context,
endpoint_telemetry,
auth_env_telemetry,
provider_header_names,
request_route_telemetry,
));
let websocket_telemetry: Arc<dyn WebsocketTelemetry> = telemetry;
@@ -1496,16 +1660,6 @@ impl AuthRequestTelemetryContext {
}
}
struct WebsocketConnectParams<'a> {
session_telemetry: &'a SessionTelemetry,
api_provider: codex_api::Provider,
api_auth: CoreAuthProvider,
turn_metadata_header: Option<&'a str>,
options: &'a ApiResponsesOptions,
auth_context: AuthRequestTelemetryContext,
request_route_telemetry: RequestRouteTelemetry,
}
async fn handle_unauthorized(
transport: TransportError,
auth_recovery: &mut Option<UnauthorizedRecovery>,
@@ -1632,6 +1786,9 @@ fn api_error_http_status(error: &ApiError) -> Option<u16> {
struct ApiTelemetry {
session_telemetry: SessionTelemetry,
auth_context: AuthRequestTelemetryContext,
endpoint_telemetry: EndpointConfigTelemetry,
auth_env_telemetry: AuthEnvTelemetry,
provider_header_names: Option<String>,
request_route_telemetry: RequestRouteTelemetry,
}
@@ -1639,11 +1796,17 @@ impl ApiTelemetry {
fn new(
session_telemetry: SessionTelemetry,
auth_context: AuthRequestTelemetryContext,
endpoint_telemetry: EndpointConfigTelemetry,
auth_env_telemetry: AuthEnvTelemetry,
provider_header_names: Option<String>,
request_route_telemetry: RequestRouteTelemetry,
) -> Self {
Self {
session_telemetry,
auth_context,
endpoint_telemetry,
auth_env_telemetry,
provider_header_names,
request_route_telemetry,
}
}
@@ -1662,35 +1825,74 @@ impl RequestTelemetry for ApiTelemetry {
let debug = error
.map(extract_response_debug_context)
.unwrap_or_default();
self.session_telemetry.record_api_request(
attempt,
status,
error_message.as_deref(),
duration,
self.auth_context.auth_header_attached,
self.auth_context.auth_header_name,
self.auth_context.retry_after_unauthorized,
self.auth_context.recovery_mode,
self.auth_context.recovery_phase,
self.request_route_telemetry.endpoint,
debug.request_id.as_deref(),
debug.cf_ray.as_deref(),
debug.auth_error.as_deref(),
debug.auth_error_code.as_deref(),
);
self.session_telemetry
.record_api_request_with_endpoint_details(
attempt,
status,
error_message.as_deref(),
duration,
self.auth_context.auth_header_attached,
self.auth_context.auth_header_name,
self.auth_context.retry_after_unauthorized,
self.auth_context.recovery_mode,
self.auth_context.recovery_phase,
self.request_route_telemetry.endpoint,
self.request_route_telemetry.residency_header_attached,
self.request_route_telemetry
.residency_header_value
.as_deref(),
self.provider_header_names.as_deref(),
self.endpoint_telemetry.base_url_origin,
self.endpoint_telemetry.host_class,
self.endpoint_telemetry.base_url_source,
self.endpoint_telemetry.base_url_is_default,
self.auth_env_telemetry.openai_api_key_env_present,
self.auth_env_telemetry.codex_api_key_env_present,
self.auth_env_telemetry.codex_api_key_env_enabled,
self.auth_env_telemetry.provider_env_key_name.as_deref(),
self.auth_env_telemetry.provider_env_key_present,
self.auth_env_telemetry.refresh_token_url_override_present,
debug.request_id.as_deref(),
debug.cf_ray.as_deref(),
debug.auth_error.as_deref(),
debug.auth_error_code.as_deref(),
debug.error_body_class,
debug.safe_error_message,
);
emit_feedback_request_tags(&FeedbackRequestTags {
endpoint: self.request_route_telemetry.endpoint,
auth_header_attached: self.auth_context.auth_header_attached,
auth_header_name: self.auth_context.auth_header_name,
auth_mode: self.auth_context.auth_mode,
auth_env_openai_api_key_present: self.auth_env_telemetry.openai_api_key_env_present,
auth_env_codex_api_key_present: self.auth_env_telemetry.codex_api_key_env_present,
auth_env_codex_api_key_enabled: self.auth_env_telemetry.codex_api_key_env_enabled,
auth_env_provider_key_name: self.auth_env_telemetry.provider_env_key_name.as_deref(),
auth_env_provider_key_present: self.auth_env_telemetry.provider_env_key_present,
auth_env_refresh_token_url_override_present: self
.auth_env_telemetry
.refresh_token_url_override_present,
auth_retry_after_unauthorized: Some(self.auth_context.retry_after_unauthorized),
auth_recovery_mode: self.auth_context.recovery_mode,
auth_recovery_phase: self.auth_context.recovery_phase,
auth_connection_reused: None,
provider_header_names: self.provider_header_names.as_deref(),
base_url_origin: self.endpoint_telemetry.base_url_origin,
host_class: self.endpoint_telemetry.host_class,
base_url_source: self.endpoint_telemetry.base_url_source,
base_url_is_default: self.endpoint_telemetry.base_url_is_default,
residency_header_attached: Some(self.request_route_telemetry.residency_header_attached),
residency_header_value: self
.request_route_telemetry
.residency_header_value
.as_deref(),
auth_request_id: debug.request_id.as_deref(),
auth_cf_ray: debug.cf_ray.as_deref(),
auth_error: debug.auth_error.as_deref(),
auth_error_code: debug.auth_error_code.as_deref(),
error_body_class: debug.error_body_class,
safe_error_message: debug.safe_error_message,
geo_denial_detected: Some(debug.geo_denial_detected),
auth_recovery_followup_success: self
.auth_context
.retry_after_unauthorized
@@ -1701,6 +1903,25 @@ impl RequestTelemetry for ApiTelemetry {
.then_some(status)
.flatten(),
});
if status == Some(StatusCode::UNAUTHORIZED.as_u16()) && debug.geo_denial_detected {
self.session_telemetry.record_geo_denial(
self.request_route_telemetry.endpoint,
self.auth_context.auth_header_attached,
self.auth_context.auth_header_name,
self.request_route_telemetry.residency_header_attached,
self.request_route_telemetry
.residency_header_value
.as_deref(),
self.provider_header_names.as_deref(),
status,
debug.request_id.as_deref(),
debug.cf_ray.as_deref(),
debug.auth_error.as_deref(),
debug.auth_error_code.as_deref(),
debug.error_body_class.unwrap_or_default(),
debug.safe_error_message,
);
}
}
}
@@ -1734,14 +1955,35 @@ impl WebsocketTelemetry for ApiTelemetry {
auth_header_attached: self.auth_context.auth_header_attached,
auth_header_name: self.auth_context.auth_header_name,
auth_mode: self.auth_context.auth_mode,
auth_env_openai_api_key_present: self.auth_env_telemetry.openai_api_key_env_present,
auth_env_codex_api_key_present: self.auth_env_telemetry.codex_api_key_env_present,
auth_env_codex_api_key_enabled: self.auth_env_telemetry.codex_api_key_env_enabled,
auth_env_provider_key_name: self.auth_env_telemetry.provider_env_key_name.as_deref(),
auth_env_provider_key_present: self.auth_env_telemetry.provider_env_key_present,
auth_env_refresh_token_url_override_present: self
.auth_env_telemetry
.refresh_token_url_override_present,
auth_retry_after_unauthorized: Some(self.auth_context.retry_after_unauthorized),
auth_recovery_mode: self.auth_context.recovery_mode,
auth_recovery_phase: self.auth_context.recovery_phase,
auth_connection_reused: Some(connection_reused),
provider_header_names: self.provider_header_names.as_deref(),
base_url_origin: self.endpoint_telemetry.base_url_origin,
host_class: self.endpoint_telemetry.host_class,
base_url_source: self.endpoint_telemetry.base_url_source,
base_url_is_default: self.endpoint_telemetry.base_url_is_default,
residency_header_attached: Some(self.request_route_telemetry.residency_header_attached),
residency_header_value: self
.request_route_telemetry
.residency_header_value
.as_deref(),
auth_request_id: debug.request_id.as_deref(),
auth_cf_ray: debug.cf_ray.as_deref(),
auth_error: debug.auth_error.as_deref(),
auth_error_code: debug.auth_error_code.as_deref(),
error_body_class: debug.error_body_class,
safe_error_message: debug.safe_error_message,
geo_denial_detected: Some(debug.geo_denial_detected),
auth_recovery_followup_success: self
.auth_context
.retry_after_unauthorized

View File

@@ -25,6 +25,7 @@ use crate::compact::should_use_remote_compact_task;
use crate::compact_remote::run_inline_remote_auto_compact_task;
use crate::config::ManagedFeatures;
use crate::connectors;
use crate::endpoint_config_telemetry::resolve_endpoint_config_telemetry_source;
use crate::exec_policy::ExecPolicyManager;
use crate::features::FEATURES;
use crate::features::Feature;
@@ -153,6 +154,7 @@ use tracing::warn;
use uuid::Uuid;
use crate::ModelProviderInfo;
use crate::auth_env_telemetry::collect_auth_env_telemetry;
use crate::client::ModelClient;
use crate::client::ModelClientSession;
use crate::client_common::Prompt;
@@ -1601,8 +1603,33 @@ impl Session {
)],
);
session_telemetry.conversation_starts(
let auth_env_telemetry = collect_auth_env_telemetry(
&config.model_provider,
auth_manager.codex_api_key_env_enabled(),
);
let conversation_start_base_url = config
.model_provider
.to_api_provider(auth.map(CodexAuth::auth_mode))
.map(|provider| provider.base_url)
.unwrap_or_default();
let endpoint_telemetry = resolve_endpoint_config_telemetry_source(
&config,
session_configuration.session_source.clone(),
)
.classify(&conversation_start_base_url);
session_telemetry.conversation_starts_with_endpoint_details(
config.model_provider.name.as_str(),
endpoint_telemetry.base_url_origin,
endpoint_telemetry.host_class,
endpoint_telemetry.base_url_source,
endpoint_telemetry.base_url_is_default,
auth_env_telemetry.openai_api_key_env_present,
auth_env_telemetry.codex_api_key_env_present,
auth_env_telemetry.codex_api_key_env_enabled,
auth_env_telemetry.provider_env_key_name.as_deref(),
auth_env_telemetry.provider_env_key_present,
auth_env_telemetry.refresh_token_url_override_present,
session_configuration.collaboration_mode.reasoning_effort(),
config
.model_reasoning_summary
@@ -1781,10 +1808,14 @@ impl Session {
network_proxy,
network_approval: Arc::clone(&network_approval),
state_db: state_db_ctx.clone(),
model_client: ModelClient::new(
model_client: ModelClient::new_with_endpoint_telemetry_source(
Some(Arc::clone(&auth_manager)),
conversation_id,
session_configuration.provider.clone(),
resolve_endpoint_config_telemetry_source(
&config,
session_configuration.session_source.clone(),
),
session_configuration.session_source.clone(),
config.model_verbosity,
ws_version_from_features(config.as_ref()),

View File

@@ -4134,7 +4134,7 @@ model_verbosity = "high"
supports_websockets: false,
};
let model_provider_map = {
let mut model_provider_map = built_in_model_providers(/* openai_base_url */ None);
let mut model_provider_map = built_in_model_providers(None);
model_provider_map.insert("openai-custom".to_string(), openai_custom_provider.clone());
model_provider_map
};

View File

@@ -30,6 +30,12 @@ pub const DEFAULT_ORIGINATOR: &str = "codex_cli_rs";
pub const CODEX_INTERNAL_ORIGINATOR_OVERRIDE_ENV_VAR: &str = "CODEX_INTERNAL_ORIGINATOR_OVERRIDE";
pub const RESIDENCY_HEADER_NAME: &str = "x-openai-internal-codex-residency";
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub struct ResidencyHeaderTelemetry {
pub attached: bool,
pub value: Option<String>,
}
#[derive(Debug, Clone)]
pub struct Originator {
pub value: String,
@@ -89,6 +95,33 @@ pub fn set_default_client_residency_requirement(enforce_residency: Option<Reside
*guard = enforce_residency;
}
pub fn current_residency_header_telemetry() -> ResidencyHeaderTelemetry {
let Ok(guard) = REQUIREMENTS_RESIDENCY.read() else {
tracing::warn!("Failed to acquire requirements residency lock");
return ResidencyHeaderTelemetry::default();
};
let Some(requirement) = guard.as_ref() else {
return ResidencyHeaderTelemetry::default();
};
ResidencyHeaderTelemetry {
attached: true,
value: Some(residency_header_value(*requirement).to_string()),
}
}
pub fn residency_header_telemetry_for_provider_headers(
provider_headers: &HeaderMap,
) -> ResidencyHeaderTelemetry {
if let Some(value) = provider_headers.get(RESIDENCY_HEADER_NAME) {
return ResidencyHeaderTelemetry {
attached: true,
value: value.to_str().ok().map(str::to_owned),
};
}
current_residency_header_telemetry()
}
pub fn originator() -> Originator {
if let Ok(guard) = ORIGINATOR.read()
&& let Some(originator) = guard.as_ref()
@@ -184,12 +217,6 @@ pub fn create_client() -> CodexHttpClient {
CodexHttpClient::new(inner)
}
/// Builds the default reqwest client used for ordinary Codex HTTP traffic.
///
/// This starts from the standard Codex user agent, default headers, and sandbox-specific proxy
/// policy, then layers in shared custom CA handling from `CODEX_CA_CERTIFICATE` /
/// `SSL_CERT_FILE`. The function remains infallible for compatibility with existing call sites, so
/// a custom-CA or builder failure is logged and falls back to `reqwest::Client::new()`.
pub fn build_reqwest_client() -> reqwest::Client {
try_build_reqwest_client().unwrap_or_else(|error| {
tracing::warn!(error = %error, "failed to build default reqwest client");
@@ -197,10 +224,6 @@ pub fn build_reqwest_client() -> reqwest::Client {
})
}
/// Tries to build the default reqwest client used for ordinary Codex HTTP traffic.
///
/// Callers that need a structured CA-loading failure instead of the legacy logged fallback can use
/// this method directly.
pub fn try_build_reqwest_client() -> Result<reqwest::Client, BuildCustomCaTransportError> {
let ua = get_codex_user_agent();
@@ -222,14 +245,20 @@ pub fn default_headers() -> HeaderMap {
&& let Some(requirement) = guard.as_ref()
&& !headers.contains_key(RESIDENCY_HEADER_NAME)
{
let value = match requirement {
ResidencyRequirement::Us => HeaderValue::from_static("us"),
};
headers.insert(RESIDENCY_HEADER_NAME, value);
headers.insert(
RESIDENCY_HEADER_NAME,
HeaderValue::from_static(residency_header_value(*requirement)),
);
}
headers
}
fn residency_header_value(requirement: ResidencyRequirement) -> &'static str {
match requirement {
ResidencyRequirement::Us => "us",
}
}
fn is_sandboxed() -> bool {
std::env::var(CODEX_SANDBOX_ENV_VAR).as_deref() == Ok("seatbelt")
}

View File

@@ -0,0 +1,336 @@
use crate::config::Config;
use crate::model_provider_info::LMSTUDIO_OSS_PROVIDER_ID;
use crate::model_provider_info::ModelProviderInfo;
use crate::model_provider_info::OLLAMA_OSS_PROVIDER_ID;
use crate::model_provider_info::OPENAI_PROVIDER_ID;
use codex_app_server_protocol::ConfigLayerSource;
use codex_protocol::protocol::SessionSource;
use reqwest::Url;
const BASE_URL_ORIGIN_CHATGPT: &str = "chatgpt.com";
const BASE_URL_ORIGIN_OPENAI_API: &str = "api.openai.com";
const BASE_URL_ORIGIN_OPENROUTER: &str = "openrouter.ai";
const BASE_URL_ORIGIN_CUSTOM: &str = "custom";
const HOST_CLASS_OPENAI_CHATGPT: &str = "openai_chatgpt";
const HOST_CLASS_OPENAI_API: &str = "openai_api";
const HOST_CLASS_KNOWN_THIRD_PARTY: &str = "known_third_party";
const HOST_CLASS_CUSTOM_UNKNOWN: &str = "custom_unknown";
const BASE_URL_SOURCE_DEFAULT: &str = "default";
const BASE_URL_SOURCE_ENV: &str = "env";
const BASE_URL_SOURCE_CONFIG_TOML: &str = "config_toml";
const BASE_URL_SOURCE_IDE_SETTINGS: &str = "ide_settings";
const BASE_URL_SOURCE_MANAGED_CONFIG: &str = "managed_config";
const BASE_URL_SOURCE_SESSION_FLAGS: &str = "session_flags";
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub(crate) struct EndpointConfigTelemetrySource {
pub(crate) base_url_source: &'static str,
pub(crate) base_url_is_default: bool,
}
impl EndpointConfigTelemetrySource {
pub(crate) const fn new(base_url_source: &'static str, base_url_is_default: bool) -> Self {
Self {
base_url_source,
base_url_is_default,
}
}
pub(crate) fn classify(self, base_url: &str) -> EndpointConfigTelemetry {
let (base_url_origin, host_class) = classify_base_url(base_url);
EndpointConfigTelemetry {
base_url_origin,
host_class,
base_url_source: self.base_url_source,
base_url_is_default: self.base_url_is_default,
}
}
pub(crate) fn for_provider(
provider_id: &str,
provider: &ModelProviderInfo,
) -> EndpointConfigTelemetrySource {
endpoint_source_from_provider_defaults(provider_id, provider)
}
pub(crate) fn for_provider_without_id(provider: &ModelProviderInfo) -> Self {
let base_url_is_default = provider.base_url.is_none();
let base_url_source = if base_url_is_default {
BASE_URL_SOURCE_DEFAULT
} else {
BASE_URL_SOURCE_CONFIG_TOML
};
EndpointConfigTelemetrySource::new(base_url_source, base_url_is_default)
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub(crate) struct EndpointConfigTelemetry {
pub(crate) base_url_origin: &'static str,
pub(crate) host_class: &'static str,
pub(crate) base_url_source: &'static str,
pub(crate) base_url_is_default: bool,
}
impl Default for EndpointConfigTelemetry {
fn default() -> Self {
Self {
base_url_origin: BASE_URL_ORIGIN_CUSTOM,
host_class: HOST_CLASS_CUSTOM_UNKNOWN,
base_url_source: BASE_URL_SOURCE_DEFAULT,
base_url_is_default: false,
}
}
}
pub(crate) fn resolve_endpoint_config_telemetry_source(
config: &Config,
session_source: SessionSource,
) -> EndpointConfigTelemetrySource {
resolve_endpoint_config_telemetry_source_for_provider(
config,
config.model_provider_id.as_str(),
&config.model_provider,
session_source,
)
}
pub(crate) fn resolve_endpoint_config_telemetry_source_for_provider(
config: &Config,
provider_id: &str,
provider: &ModelProviderInfo,
session_source: SessionSource,
) -> EndpointConfigTelemetrySource {
let origins = config.config_layer_stack.origins();
if provider_id == OPENAI_PROVIDER_ID
&& let Some(origin) = origins.get("openai_base_url")
{
return endpoint_source_from_layer(&origin.name, session_source);
}
let key = format!("model_providers.{provider_id}.base_url");
if let Some(origin) = origins.get(&key) {
return endpoint_source_from_layer(&origin.name, session_source);
}
endpoint_source_from_provider_defaults(provider_id, provider)
}
fn endpoint_source_from_layer(
layer: &ConfigLayerSource,
session_source: SessionSource,
) -> EndpointConfigTelemetrySource {
let base_url_source = match layer {
ConfigLayerSource::SessionFlags => match session_source {
SessionSource::VSCode | SessionSource::Mcp => BASE_URL_SOURCE_IDE_SETTINGS,
SessionSource::Cli
| SessionSource::Exec
| SessionSource::SubAgent(_)
| SessionSource::Unknown => BASE_URL_SOURCE_SESSION_FLAGS,
},
ConfigLayerSource::User { .. } | ConfigLayerSource::Project { .. } => {
BASE_URL_SOURCE_CONFIG_TOML
}
ConfigLayerSource::System { .. }
| ConfigLayerSource::Mdm { .. }
| ConfigLayerSource::LegacyManagedConfigTomlFromFile { .. }
| ConfigLayerSource::LegacyManagedConfigTomlFromMdm => BASE_URL_SOURCE_MANAGED_CONFIG,
};
EndpointConfigTelemetrySource::new(base_url_source, false)
}
fn endpoint_source_from_provider_defaults(
provider_id: &str,
provider: &ModelProviderInfo,
) -> EndpointConfigTelemetrySource {
let env_source = match provider_id {
"openai" => env_var_present("OPENAI_BASE_URL"),
OLLAMA_OSS_PROVIDER_ID | LMSTUDIO_OSS_PROVIDER_ID => {
env_var_present("CODEX_OSS_BASE_URL") || env_var_present("CODEX_OSS_PORT")
}
_ => false,
};
if env_source {
return EndpointConfigTelemetrySource::new(BASE_URL_SOURCE_ENV, false);
}
let base_url_is_default = match provider_id {
"openai" => provider.base_url.is_none(),
OLLAMA_OSS_PROVIDER_ID | LMSTUDIO_OSS_PROVIDER_ID => true,
_ => provider.base_url.is_none(),
};
if base_url_is_default {
return EndpointConfigTelemetrySource::new(BASE_URL_SOURCE_DEFAULT, true);
}
EndpointConfigTelemetrySource::new(BASE_URL_SOURCE_CONFIG_TOML, false)
}
fn env_var_present(name: &str) -> bool {
std::env::var(name)
.ok()
.is_some_and(|value| !value.trim().is_empty())
}
fn classify_base_url(base_url: &str) -> (&'static str, &'static str) {
let Ok(url) = Url::parse(base_url) else {
return (BASE_URL_ORIGIN_CUSTOM, HOST_CLASS_CUSTOM_UNKNOWN);
};
let Some(host) = url.host_str().map(str::to_ascii_lowercase) else {
return (BASE_URL_ORIGIN_CUSTOM, HOST_CLASS_CUSTOM_UNKNOWN);
};
if matches!(host.as_str(), "chatgpt.com" | "chat.openai.com") {
if is_chatgpt_codex_path(url.path()) {
return (BASE_URL_ORIGIN_CHATGPT, HOST_CLASS_OPENAI_CHATGPT);
}
return (BASE_URL_ORIGIN_CHATGPT, HOST_CLASS_CUSTOM_UNKNOWN);
}
if host == BASE_URL_ORIGIN_OPENAI_API {
return (BASE_URL_ORIGIN_OPENAI_API, HOST_CLASS_OPENAI_API);
}
if host == BASE_URL_ORIGIN_OPENROUTER || host.ends_with(".openrouter.ai") {
return (BASE_URL_ORIGIN_OPENROUTER, HOST_CLASS_KNOWN_THIRD_PARTY);
}
(BASE_URL_ORIGIN_CUSTOM, HOST_CLASS_CUSTOM_UNKNOWN)
}
fn is_chatgpt_codex_path(path: &str) -> bool {
path == "/backend-api/codex" || path.starts_with("/backend-api/codex/")
}
#[cfg(test)]
mod tests {
use super::EndpointConfigTelemetry;
use super::EndpointConfigTelemetrySource;
use super::endpoint_source_from_layer;
use super::endpoint_source_from_provider_defaults;
use crate::model_provider_info::WireApi;
use crate::model_provider_info::create_oss_provider_with_base_url;
use codex_app_server_protocol::ConfigLayerSource;
use codex_protocol::protocol::SessionSource;
use codex_utils_absolute_path::AbsolutePathBuf;
use pretty_assertions::assert_eq;
fn provider(base_url: Option<&str>) -> crate::ModelProviderInfo {
crate::ModelProviderInfo {
name: "test-provider".to_string(),
base_url: base_url.map(str::to_string),
env_key: None,
env_key_instructions: None,
experimental_bearer_token: None,
wire_api: crate::WireApi::Responses,
query_params: None,
http_headers: None,
env_http_headers: None,
request_max_retries: None,
stream_max_retries: None,
stream_idle_timeout_ms: None,
requires_openai_auth: true,
supports_websockets: true,
}
}
#[test]
fn endpoint_config_telemetry_classifies_known_hosts_without_logging_custom_values() {
let source = EndpointConfigTelemetrySource::new("config_toml", false);
assert_eq!(
source.classify("https://chatgpt.com/backend-api/codex"),
EndpointConfigTelemetry {
base_url_origin: "chatgpt.com",
host_class: "openai_chatgpt",
base_url_source: "config_toml",
base_url_is_default: false,
}
);
assert_eq!(
source.classify("https://api.openai.com/v1"),
EndpointConfigTelemetry {
base_url_origin: "api.openai.com",
host_class: "openai_api",
base_url_source: "config_toml",
base_url_is_default: false,
}
);
assert_eq!(
source.classify("https://openrouter.ai/api/v1"),
EndpointConfigTelemetry {
base_url_origin: "openrouter.ai",
host_class: "known_third_party",
base_url_source: "config_toml",
base_url_is_default: false,
}
);
assert_eq!(
source.classify("https://private.example.internal/v1?token=secret"),
EndpointConfigTelemetry {
base_url_origin: "custom",
host_class: "custom_unknown",
base_url_source: "config_toml",
base_url_is_default: false,
}
);
assert_eq!(
source.classify("https://chatgpt.com/api/codex"),
EndpointConfigTelemetry {
base_url_origin: "chatgpt.com",
host_class: "custom_unknown",
base_url_source: "config_toml",
base_url_is_default: false,
}
);
}
#[test]
fn endpoint_config_telemetry_source_maps_layers_and_defaults() {
assert_eq!(
endpoint_source_from_layer(&ConfigLayerSource::SessionFlags, SessionSource::VSCode),
EndpointConfigTelemetrySource::new("ide_settings", false)
);
assert_eq!(
endpoint_source_from_layer(
&ConfigLayerSource::Project {
dot_codex_folder: AbsolutePathBuf::try_from(std::path::PathBuf::from(
"/tmp/project/.codex",
))
.expect("absolute path"),
},
SessionSource::Cli,
),
EndpointConfigTelemetrySource::new("config_toml", false)
);
assert_eq!(
endpoint_source_from_provider_defaults("openai", &provider(None)),
EndpointConfigTelemetrySource::new("default", true)
);
assert_eq!(
endpoint_source_from_provider_defaults(
"custom",
&provider(Some("https://example.com/v1"))
),
EndpointConfigTelemetrySource::new("config_toml", false)
);
}
#[test]
fn endpoint_config_telemetry_source_requires_explicit_provider_id_for_builtin_oss_defaults() {
let provider =
create_oss_provider_with_base_url("http://localhost:1234/v1", WireApi::Responses);
assert_eq!(
EndpointConfigTelemetrySource::for_provider("lmstudio", &provider),
EndpointConfigTelemetrySource::new("default", true)
);
assert_eq!(
EndpointConfigTelemetrySource::for_provider_without_id(&provider),
EndpointConfigTelemetrySource::new("config_toml", false)
);
}
}

View File

@@ -31,6 +31,7 @@ pub mod connectors;
mod context_manager;
mod contextual_user_message;
pub mod custom_prompts;
mod endpoint_config_telemetry;
pub mod env;
mod environment_context;
pub mod error;
@@ -106,6 +107,7 @@ pub type CodexConversation = CodexThread;
pub use analytics_client::AnalyticsEventsClient;
pub use auth::AuthManager;
pub use auth::CodexAuth;
mod auth_env_telemetry;
pub mod default_client;
pub mod project_doc;
mod rollout;

View File

@@ -15,6 +15,7 @@ use http::header::HeaderValue;
use schemars::JsonSchema;
use serde::Deserialize;
use serde::Serialize;
use std::collections::BTreeSet;
use std::collections::HashMap;
use std::fmt;
use std::time::Duration;
@@ -42,15 +43,6 @@ pub enum WireApi {
Responses,
}
impl fmt::Display for WireApi {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let value = match self {
Self::Responses => "responses",
};
f.write_str(value)
}
}
impl<'de> Deserialize<'de> for WireApi {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
@@ -65,6 +57,15 @@ impl<'de> Deserialize<'de> for WireApi {
}
}
impl fmt::Display for WireApi {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let value = match self {
Self::Responses => "responses",
};
f.write_str(value)
}
}
/// Serializable representation of a provider definition.
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, JsonSchema)]
#[schemars(deny_unknown_fields)]
@@ -125,6 +126,38 @@ pub struct ModelProviderInfo {
}
impl ModelProviderInfo {
pub(crate) fn telemetry_header_names(&self) -> Option<String> {
let mut names = BTreeSet::new();
if let Some(headers) = &self.http_headers {
for (name, value) in headers {
if let (Ok(name), Ok(_value)) =
(HeaderName::try_from(name), HeaderValue::try_from(value))
{
names.insert(name.as_str().to_string());
}
}
}
if let Some(env_headers) = &self.env_http_headers {
for (header, env_var) in env_headers {
if let Ok(value) = std::env::var(env_var)
&& !value.trim().is_empty()
&& let (Ok(name), Ok(_value)) =
(HeaderName::try_from(header), HeaderValue::try_from(value))
{
names.insert(name.as_str().to_string());
}
}
}
if names.is_empty() {
None
} else {
Some(names.into_iter().collect::<Vec<_>>().join(","))
}
}
fn build_header_map(&self) -> crate::error::Result<HeaderMap> {
let capacity = self.http_headers.as_ref().map_or(0, HashMap::len)
+ self.env_http_headers.as_ref().map_or(0, HashMap::len);
@@ -226,7 +259,6 @@ impl ModelProviderInfo {
.map(Duration::from_millis)
.unwrap_or(Duration::from_millis(DEFAULT_STREAM_IDLE_TIMEOUT_MS))
}
pub fn create_openai_provider(base_url: Option<String>) -> ModelProviderInfo {
ModelProviderInfo {
name: OPENAI_PROVIDER_NAME.into(),
@@ -277,14 +309,13 @@ pub fn built_in_model_providers(
openai_base_url: Option<String>,
) -> HashMap<String, ModelProviderInfo> {
use ModelProviderInfo as P;
let openai_provider = P::create_openai_provider(openai_base_url);
// We do not want to be in the business of adjucating which third-party
// providers are bundled with Codex CLI, so we only include the OpenAI and
// open source ("oss") providers by default. Users are encouraged to add to
// `model_providers` in config.toml to add their own providers.
[
(OPENAI_PROVIDER_ID, openai_provider),
("openai", P::create_openai_provider(openai_base_url)),
(
OLLAMA_OSS_PROVIDER_ID,
create_oss_provider(DEFAULT_OLLAMA_PORT, WireApi::Responses),

View File

@@ -1,6 +1,15 @@
use super::*;
use pretty_assertions::assert_eq;
#[test]
fn openai_auth_provider_defaults_to_api_before_login() {
let provider = ModelProviderInfo::create_openai_provider(None);
let api_provider = provider.to_api_provider(None).unwrap();
assert_eq!(api_provider.base_url, "https://api.openai.com/v1");
}
#[test]
fn test_deserialize_ollama_model_provider_toml() {
let azure_provider_toml = r#"

View File

@@ -3,9 +3,12 @@ use crate::api_bridge::auth_provider_from_auth;
use crate::api_bridge::map_api_error;
use crate::auth::AuthManager;
use crate::auth::AuthMode;
use crate::auth::CodexAuth;
use crate::auth_env_telemetry::AuthEnvTelemetry;
use crate::auth_env_telemetry::collect_auth_env_telemetry;
use crate::config::Config;
use crate::default_client::build_reqwest_client;
use crate::default_client::residency_header_telemetry_for_provider_headers;
use crate::endpoint_config_telemetry::EndpointConfigTelemetrySource;
use crate::error::CodexErr;
use crate::error::Result as CoreResult;
use crate::model_provider_info::ModelProviderInfo;
@@ -26,7 +29,6 @@ use codex_protocol::openai_models::ModelInfo;
use codex_protocol::openai_models::ModelPreset;
use codex_protocol::openai_models::ModelsResponse;
use http::HeaderMap;
use std::fmt;
use std::path::PathBuf;
use std::sync::Arc;
use std::time::Duration;
@@ -35,17 +37,26 @@ use tokio::sync::TryLockError;
use tokio::time::timeout;
use tracing::error;
use tracing::info;
use tracing::instrument;
const MODEL_CACHE_FILE: &str = "models_cache.json";
const DEFAULT_MODEL_CACHE_TTL: Duration = Duration::from_secs(300);
const MODELS_REFRESH_TIMEOUT: Duration = Duration::from_secs(5);
const MODELS_ENDPOINT: &str = "/models";
const OPENAI_PROVIDER_ID: &str = "openai";
#[derive(Clone)]
struct ModelsRequestTelemetry {
auth_mode: Option<String>,
auth_header_attached: bool,
auth_header_name: Option<&'static str>,
auth_env_telemetry: AuthEnvTelemetry,
residency_header_attached: bool,
residency_header_value: Option<String>,
provider_header_names: Option<String>,
base_url_origin: &'static str,
host_class: &'static str,
base_url_source: &'static str,
base_url_is_default: bool,
}
impl RequestTelemetry for ModelsRequestTelemetry {
@@ -56,28 +67,44 @@ impl RequestTelemetry for ModelsRequestTelemetry {
error: Option<&TransportError>,
duration: Duration,
) {
let success = status.is_some_and(|code| code.is_success()) && error.is_none();
let error_message = error.map(telemetry_transport_error_message);
let response_debug = error
.map(extract_response_debug_context)
.unwrap_or_default();
let status = status.map(|status| status.as_u16());
let success = status.is_some_and(|code| (200..=299).contains(&code)) && error.is_none();
let success_str = if success { "true" } else { "false" };
tracing::event!(
target: "codex_otel.log_only",
tracing::Level::INFO,
event.name = "codex.api_request",
duration_ms = %duration.as_millis(),
http.response.status_code = status,
success = success,
success = success_str,
error.message = error_message.as_deref(),
attempt = attempt,
endpoint = MODELS_ENDPOINT,
auth.header_attached = self.auth_header_attached,
auth.header_name = self.auth_header_name,
auth.env_openai_api_key_present = self.auth_env_telemetry.openai_api_key_env_present,
auth.env_codex_api_key_present = self.auth_env_telemetry.codex_api_key_env_present,
auth.env_codex_api_key_enabled = self.auth_env_telemetry.codex_api_key_env_enabled,
auth.env_provider_key_name = self.auth_env_telemetry.provider_env_key_name.as_deref(),
auth.env_provider_key_present = self.auth_env_telemetry.provider_env_key_present,
auth.env_refresh_token_url_override_present = self.auth_env_telemetry.refresh_token_url_override_present,
residency_header_attached = self.residency_header_attached,
residency_header_value = self.residency_header_value.as_deref(),
provider_header_names = self.provider_header_names.as_deref(),
base_url_origin = self.base_url_origin,
host_class = self.host_class,
base_url_source = self.base_url_source,
base_url_is_default = self.base_url_is_default,
auth.request_id = response_debug.request_id.as_deref(),
auth.cf_ray = response_debug.cf_ray.as_deref(),
auth.error = response_debug.auth_error.as_deref(),
auth.error_code = response_debug.auth_error_code.as_deref(),
error_body_class = response_debug.error_body_class,
safe_error_message = response_debug.safe_error_message,
auth.mode = self.auth_mode.as_deref(),
);
tracing::event!(
@@ -86,16 +113,31 @@ impl RequestTelemetry for ModelsRequestTelemetry {
event.name = "codex.api_request",
duration_ms = %duration.as_millis(),
http.response.status_code = status,
success = success,
success = success_str,
error.message = error_message.as_deref(),
attempt = attempt,
endpoint = MODELS_ENDPOINT,
auth.header_attached = self.auth_header_attached,
auth.header_name = self.auth_header_name,
auth.env_openai_api_key_present = self.auth_env_telemetry.openai_api_key_env_present,
auth.env_codex_api_key_present = self.auth_env_telemetry.codex_api_key_env_present,
auth.env_codex_api_key_enabled = self.auth_env_telemetry.codex_api_key_env_enabled,
auth.env_provider_key_name = self.auth_env_telemetry.provider_env_key_name.as_deref(),
auth.env_provider_key_present = self.auth_env_telemetry.provider_env_key_present,
auth.env_refresh_token_url_override_present = self.auth_env_telemetry.refresh_token_url_override_present,
residency_header_attached = self.residency_header_attached,
residency_header_value = self.residency_header_value.as_deref(),
provider_header_names = self.provider_header_names.as_deref(),
base_url_origin = self.base_url_origin,
host_class = self.host_class,
base_url_source = self.base_url_source,
base_url_is_default = self.base_url_is_default,
auth.request_id = response_debug.request_id.as_deref(),
auth.cf_ray = response_debug.cf_ray.as_deref(),
auth.error = response_debug.auth_error.as_deref(),
auth.error_code = response_debug.auth_error_code.as_deref(),
error_body_class = response_debug.error_body_class,
safe_error_message = response_debug.safe_error_message,
auth.mode = self.auth_mode.as_deref(),
);
emit_feedback_request_tags(&FeedbackRequestTags {
@@ -103,17 +145,100 @@ impl RequestTelemetry for ModelsRequestTelemetry {
auth_header_attached: self.auth_header_attached,
auth_header_name: self.auth_header_name,
auth_mode: self.auth_mode.as_deref(),
auth_env_openai_api_key_present: self.auth_env_telemetry.openai_api_key_env_present,
auth_env_codex_api_key_present: self.auth_env_telemetry.codex_api_key_env_present,
auth_env_codex_api_key_enabled: self.auth_env_telemetry.codex_api_key_env_enabled,
auth_env_provider_key_name: self.auth_env_telemetry.provider_env_key_name.as_deref(),
auth_env_provider_key_present: self.auth_env_telemetry.provider_env_key_present,
auth_env_refresh_token_url_override_present: self
.auth_env_telemetry
.refresh_token_url_override_present,
auth_retry_after_unauthorized: None,
auth_recovery_mode: None,
auth_recovery_phase: None,
auth_connection_reused: None,
provider_header_names: self.provider_header_names.as_deref(),
base_url_origin: self.base_url_origin,
host_class: self.host_class,
base_url_source: self.base_url_source,
base_url_is_default: self.base_url_is_default,
residency_header_attached: Some(self.residency_header_attached),
residency_header_value: self.residency_header_value.as_deref(),
auth_request_id: response_debug.request_id.as_deref(),
auth_cf_ray: response_debug.cf_ray.as_deref(),
auth_error: response_debug.auth_error.as_deref(),
auth_error_code: response_debug.auth_error_code.as_deref(),
error_body_class: response_debug.error_body_class,
safe_error_message: response_debug.safe_error_message,
geo_denial_detected: Some(response_debug.geo_denial_detected),
auth_recovery_followup_success: None,
auth_recovery_followup_status: None,
});
if status == Some(http::StatusCode::UNAUTHORIZED.as_u16())
&& response_debug.geo_denial_detected
{
tracing::event!(
target: "codex_otel.log_only",
tracing::Level::INFO,
event.name = "codex.geo_denial",
geo_denial_detected = true,
request_id = response_debug.request_id.as_deref(),
cf_ray = response_debug.cf_ray.as_deref(),
endpoint = MODELS_ENDPOINT,
auth.header_attached = self.auth_header_attached,
auth.header_name = self.auth_header_name,
auth.mode = self.auth_mode.as_deref(),
auth.env_openai_api_key_present = self.auth_env_telemetry.openai_api_key_env_present,
auth.env_codex_api_key_present = self.auth_env_telemetry.codex_api_key_env_present,
auth.env_codex_api_key_enabled = self.auth_env_telemetry.codex_api_key_env_enabled,
auth.env_provider_key_name = self.auth_env_telemetry.provider_env_key_name.as_deref(),
auth.env_provider_key_present = self.auth_env_telemetry.provider_env_key_present,
auth.env_refresh_token_url_override_present = self.auth_env_telemetry.refresh_token_url_override_present,
residency_header_attached = self.residency_header_attached,
residency_header_value = self.residency_header_value.as_deref(),
provider_header_names = self.provider_header_names.as_deref(),
base_url_origin = self.base_url_origin,
host_class = self.host_class,
base_url_source = self.base_url_source,
base_url_is_default = self.base_url_is_default,
http_status = status,
auth.error = response_debug.auth_error.as_deref(),
auth.error_code = response_debug.auth_error_code.as_deref(),
error_body_class = response_debug.error_body_class.unwrap_or_default(),
safe_error_message = response_debug.safe_error_message,
);
tracing::event!(
target: "codex_otel.trace_safe",
tracing::Level::INFO,
event.name = "codex.geo_denial",
geo_denial_detected = true,
request_id = response_debug.request_id.as_deref(),
cf_ray = response_debug.cf_ray.as_deref(),
endpoint = MODELS_ENDPOINT,
auth.header_attached = self.auth_header_attached,
auth.header_name = self.auth_header_name,
auth.mode = self.auth_mode.as_deref(),
auth.env_openai_api_key_present = self.auth_env_telemetry.openai_api_key_env_present,
auth.env_codex_api_key_present = self.auth_env_telemetry.codex_api_key_env_present,
auth.env_codex_api_key_enabled = self.auth_env_telemetry.codex_api_key_env_enabled,
auth.env_provider_key_name = self.auth_env_telemetry.provider_env_key_name.as_deref(),
auth.env_provider_key_present = self.auth_env_telemetry.provider_env_key_present,
auth.env_refresh_token_url_override_present = self.auth_env_telemetry.refresh_token_url_override_present,
residency_header_attached = self.residency_header_attached,
residency_header_value = self.residency_header_value.as_deref(),
provider_header_names = self.provider_header_names.as_deref(),
base_url_origin = self.base_url_origin,
host_class = self.host_class,
base_url_source = self.base_url_source,
base_url_is_default = self.base_url_is_default,
http_status = status,
auth.error = response_debug.auth_error.as_deref(),
auth.error_code = response_debug.auth_error_code.as_deref(),
error_body_class = response_debug.error_body_class.unwrap_or_default(),
safe_error_message = response_debug.safe_error_message,
);
}
}
}
@@ -128,22 +253,6 @@ pub enum RefreshStrategy {
OnlineIfUncached,
}
impl RefreshStrategy {
const fn as_str(self) -> &'static str {
match self {
Self::Online => "online",
Self::Offline => "offline",
Self::OnlineIfUncached => "online_if_uncached",
}
}
}
impl fmt::Display for RefreshStrategy {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.as_str())
}
}
/// How the manager's base catalog is sourced for the lifetime of the process.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum CatalogMode {
@@ -163,6 +272,8 @@ pub struct ModelsManager {
etag: RwLock<Option<String>>,
cache_manager: ModelsCacheManager,
provider: ModelProviderInfo,
endpoint_telemetry_source: EndpointConfigTelemetrySource,
auth_env_telemetry: AuthEnvTelemetry,
}
impl ModelsManager {
@@ -177,16 +288,16 @@ impl ModelsManager {
model_catalog: Option<ModelsResponse>,
collaboration_modes_config: CollaborationModesConfig,
) -> Self {
let provider = ModelProviderInfo::create_openai_provider(None);
Self::new_with_provider(
codex_home,
auth_manager,
model_catalog,
collaboration_modes_config,
ModelProviderInfo::create_openai_provider(/* base_url */ None),
provider,
)
}
/// Construct a manager with an explicit provider used for remote model refreshes.
pub fn new_with_provider(
codex_home: PathBuf,
auth_manager: Arc<AuthManager>,
@@ -194,6 +305,26 @@ impl ModelsManager {
collaboration_modes_config: CollaborationModesConfig,
provider: ModelProviderInfo,
) -> Self {
Self::new_with_provider_and_endpoint_telemetry_source(
codex_home,
auth_manager,
model_catalog,
collaboration_modes_config,
provider.clone(),
EndpointConfigTelemetrySource::for_provider(OPENAI_PROVIDER_ID, &provider),
)
}
pub(crate) fn new_with_provider_and_endpoint_telemetry_source(
codex_home: PathBuf,
auth_manager: Arc<AuthManager>,
model_catalog: Option<ModelsResponse>,
collaboration_modes_config: CollaborationModesConfig,
provider: ModelProviderInfo,
endpoint_telemetry_source: EndpointConfigTelemetrySource,
) -> Self {
let auth_env_telemetry =
collect_auth_env_telemetry(&provider, auth_manager.codex_api_key_env_enabled());
let cache_path = codex_home.join(MODEL_CACHE_FILE);
let cache_manager = ModelsCacheManager::new(cache_path, DEFAULT_MODEL_CACHE_TTL);
let catalog_mode = if model_catalog.is_some() {
@@ -215,17 +346,14 @@ impl ModelsManager {
etag: RwLock::new(None),
cache_manager,
provider,
endpoint_telemetry_source,
auth_env_telemetry,
}
}
/// List all available models, refreshing according to the specified strategy.
///
/// Returns model presets sorted by priority and filtered by auth mode and visibility.
#[instrument(
level = "info",
skip(self),
fields(refresh_strategy = %refresh_strategy)
)]
pub async fn list_models(&self, refresh_strategy: RefreshStrategy) -> Vec<ModelPreset> {
if let Err(err) = self.refresh_available_models(refresh_strategy).await {
error!("failed to refresh available models: {err}");
@@ -261,14 +389,6 @@ impl ModelsManager {
///
/// If `model` is provided, returns it directly. Otherwise selects the default based on
/// auth mode and available models.
#[instrument(
level = "info",
skip(self, model),
fields(
model.provided = model.is_some(),
refresh_strategy = %refresh_strategy
)
)]
pub async fn get_default_model(
&self,
model: &Option<String>,
@@ -292,7 +412,6 @@ impl ModelsManager {
// todo(aibrahim): look if we can tighten it to pub(crate)
/// Look up model metadata, applying remote overrides and config adjustments.
#[instrument(level = "info", skip(self, config), fields(model = model))]
pub async fn get_model_info(&self, model: &str, config: &Config) -> ModelInfo {
let remote_models = self.get_remote_models().await;
Self::construct_model_info_from_candidates(model, &remote_models, config)
@@ -414,14 +533,26 @@ impl ModelsManager {
let _timer =
codex_otel::start_global_timer("codex.remote_models.fetch_update.duration_ms", &[]);
let auth = self.auth_manager.auth().await;
let auth_mode = auth.as_ref().map(CodexAuth::auth_mode);
let auth_mode = self.auth_manager.auth_mode();
let api_provider = self.provider.to_api_provider(auth_mode)?;
let api_auth = auth_provider_from_auth(auth.clone(), &self.provider)?;
let transport = ReqwestTransport::new(build_reqwest_client());
let endpoint_telemetry = self
.endpoint_telemetry_source
.classify(api_provider.base_url.as_str());
let residency = residency_header_telemetry_for_provider_headers(&api_provider.headers);
let request_telemetry: Arc<dyn RequestTelemetry> = Arc::new(ModelsRequestTelemetry {
auth_mode: auth_mode.map(|mode| TelemetryAuthMode::from(mode).to_string()),
auth_header_attached: api_auth.auth_header_attached(),
auth_header_name: api_auth.auth_header_name(),
auth_env_telemetry: self.auth_env_telemetry.clone(),
residency_header_attached: residency.attached,
residency_header_value: residency.value,
provider_header_names: self.provider.telemetry_header_names(),
base_url_origin: endpoint_telemetry.base_url_origin,
host_class: endpoint_telemetry.host_class,
base_url_source: endpoint_telemetry.base_url_source,
base_url_is_default: endpoint_telemetry.base_url_is_default,
});
let client = ModelsClient::new(transport, api_provider, api_auth)
.with_telemetry(Some(request_telemetry));
@@ -520,13 +651,28 @@ impl ModelsManager {
auth_manager: Arc<AuthManager>,
provider: ModelProviderInfo,
) -> Self {
Self::new_with_provider(
codex_home,
let cache_path = codex_home.join(MODEL_CACHE_FILE);
let cache_manager = ModelsCacheManager::new(cache_path, DEFAULT_MODEL_CACHE_TTL);
let auth_env_telemetry =
collect_auth_env_telemetry(&provider, auth_manager.codex_api_key_env_enabled());
Self {
remote_models: RwLock::new(
Self::load_remote_models_from_file()
.unwrap_or_else(|err| panic!("failed to load bundled models.json: {err}")),
),
catalog_mode: CatalogMode::Default,
collaboration_modes_config: CollaborationModesConfig::default(),
auth_manager,
None,
CollaborationModesConfig::default(),
etag: RwLock::new(None),
cache_manager,
endpoint_telemetry_source: if provider.is_openai() {
EndpointConfigTelemetrySource::for_provider(OPENAI_PROVIDER_ID, &provider)
} else {
EndpointConfigTelemetrySource::for_provider_without_id(&provider)
},
auth_env_telemetry,
provider,
)
}
}
/// Get model identifier without consulting remote state or cache.

View File

@@ -7,6 +7,11 @@ const OAI_REQUEST_ID_HEADER: &str = "x-oai-request-id";
const CF_RAY_HEADER: &str = "cf-ray";
const AUTH_ERROR_HEADER: &str = "x-openai-authorization-error";
const X_ERROR_JSON_HEADER: &str = "x-error-json";
const WORKSPACE_NOT_AUTHORIZED_IN_REGION_MESSAGE: &str =
"Workspace is not authorized in this region.";
pub(crate) const WORKSPACE_NOT_AUTHORIZED_IN_REGION_CLASS: &str =
"workspace_not_authorized_in_region";
const MAX_ERROR_BODY_BYTES: usize = 1000;
#[derive(Debug, Default, Clone, PartialEq, Eq)]
pub(crate) struct ResponseDebugContext {
@@ -14,15 +19,15 @@ pub(crate) struct ResponseDebugContext {
pub(crate) cf_ray: Option<String>,
pub(crate) auth_error: Option<String>,
pub(crate) auth_error_code: Option<String>,
pub(crate) safe_error_message: Option<&'static str>,
pub(crate) error_body_class: Option<&'static str>,
pub(crate) geo_denial_detected: bool,
}
pub(crate) fn extract_response_debug_context(transport: &TransportError) -> ResponseDebugContext {
let mut context = ResponseDebugContext::default();
let TransportError::Http {
headers, body: _, ..
} = transport
else {
let TransportError::Http { headers, body, .. } = transport else {
return context;
};
@@ -49,6 +54,14 @@ pub(crate) fn extract_response_debug_context(transport: &TransportError) -> Resp
.and_then(serde_json::Value::as_str)
.map(str::to_string)
});
let error_body = extract_error_body(body.as_deref());
context.safe_error_message = error_body
.as_deref()
.and_then(allowlisted_error_body_message);
context.error_body_class = error_body.as_deref().and_then(classify_error_body_message);
context.geo_denial_detected = context.error_body_class
== Some(WORKSPACE_NOT_AUTHORIZED_IN_REGION_CLASS)
|| context.auth_error_code.as_deref() == Some(WORKSPACE_NOT_AUTHORIZED_IN_REGION_CLASS);
context
}
@@ -87,9 +100,75 @@ pub(crate) fn telemetry_api_error_message(error: &ApiError) -> String {
}
}
fn extract_error_body(body: Option<&str>) -> Option<String> {
let body = body?;
if let Some(message) = extract_error_message(body) {
return Some(message);
}
let trimmed = body.trim();
if trimmed.is_empty() {
return None;
}
Some(truncate_with_ellipsis(trimmed, MAX_ERROR_BODY_BYTES))
}
fn extract_error_message(body: &str) -> Option<String> {
let json = serde_json::from_str::<serde_json::Value>(body).ok()?;
let message = json
.get("error")
.and_then(|error| error.get("message"))
.and_then(serde_json::Value::as_str)?;
let message = message.trim();
if message.is_empty() {
None
} else {
Some(message.to_string())
}
}
fn classify_error_body_message(message: &str) -> Option<&'static str> {
if message == WORKSPACE_NOT_AUTHORIZED_IN_REGION_MESSAGE {
Some(WORKSPACE_NOT_AUTHORIZED_IN_REGION_CLASS)
} else {
None
}
}
fn allowlisted_error_body_message(message: &str) -> Option<&'static str> {
if message == WORKSPACE_NOT_AUTHORIZED_IN_REGION_MESSAGE {
Some(WORKSPACE_NOT_AUTHORIZED_IN_REGION_MESSAGE)
} else {
None
}
}
fn truncate_with_ellipsis(input: &str, max_bytes: usize) -> String {
if input.len() <= max_bytes {
return input.to_string();
}
let ellipsis = "...";
let keep = max_bytes.saturating_sub(ellipsis.len());
let mut truncated = String::new();
let mut used = 0usize;
for ch in input.chars() {
let len = ch.len_utf8();
if used + len > keep {
break;
}
truncated.push(ch);
used += len;
}
truncated.push_str(ellipsis);
truncated
}
#[cfg(test)]
mod tests {
use super::ResponseDebugContext;
use super::WORKSPACE_NOT_AUTHORIZED_IN_REGION_CLASS;
use super::extract_response_debug_context;
use super::telemetry_api_error_message;
use super::telemetry_transport_error_message;
@@ -101,33 +180,69 @@ mod tests {
use pretty_assertions::assert_eq;
#[test]
fn extract_response_debug_context_decodes_identity_headers() {
fn extract_response_debug_context_decodes_geo_denial_details() {
let mut headers = HeaderMap::new();
headers.insert("x-oai-request-id", HeaderValue::from_static("req-auth"));
headers.insert("cf-ray", HeaderValue::from_static("ray-auth"));
headers.insert(
"x-openai-authorization-error",
HeaderValue::from_static("missing_authorization_header"),
);
headers.insert("x-oai-request-id", HeaderValue::from_static("req-geo"));
headers.insert("cf-ray", HeaderValue::from_static("ray-geo"));
headers.insert(
"x-error-json",
HeaderValue::from_static("eyJlcnJvciI6eyJjb2RlIjoidG9rZW5fZXhwaXJlZCJ9fQ=="),
HeaderValue::from_static(
"eyJlcnJvciI6eyJjb2RlIjoid29ya3NwYWNlX25vdF9hdXRob3JpemVkX2luX3JlZ2lvbiJ9fQ==",
),
);
let context = extract_response_debug_context(&TransportError::Http {
status: StatusCode::UNAUTHORIZED,
url: Some("https://chatgpt.com/backend-api/codex/models".to_string()),
url: Some("https://chatgpt.com/backend-api/codex/responses".to_string()),
headers: Some(headers),
body: Some(r#"{"error":{"message":"plain text error"},"status":401}"#.to_string()),
body: Some(
r#"{"error":{"message":"Workspace is not authorized in this region."},"status":401}"#
.to_string(),
),
});
assert_eq!(
context,
ResponseDebugContext {
request_id: Some("req-auth".to_string()),
cf_ray: Some("ray-auth".to_string()),
auth_error: Some("missing_authorization_header".to_string()),
auth_error_code: Some("token_expired".to_string()),
request_id: Some("req-geo".to_string()),
cf_ray: Some("ray-geo".to_string()),
auth_error: None,
auth_error_code: Some("workspace_not_authorized_in_region".to_string()),
safe_error_message: Some("Workspace is not authorized in this region."),
error_body_class: Some(WORKSPACE_NOT_AUTHORIZED_IN_REGION_CLASS),
geo_denial_detected: true,
}
);
}
#[test]
fn extract_response_debug_context_detects_geo_denial_from_error_code_without_body_message() {
let mut headers = HeaderMap::new();
headers.insert("x-oai-request-id", HeaderValue::from_static("req-geo-code"));
headers.insert(
"x-error-json",
HeaderValue::from_static(
"eyJlcnJvciI6eyJjb2RlIjoid29ya3NwYWNlX25vdF9hdXRob3JpemVkX2luX3JlZ2lvbiJ9fQ==",
),
);
let context = extract_response_debug_context(&TransportError::Http {
status: StatusCode::UNAUTHORIZED,
url: Some("https://chatgpt.com/backend-api/codex/responses".to_string()),
headers: Some(headers),
body: Some(String::new()),
});
assert_eq!(
context,
ResponseDebugContext {
request_id: Some("req-geo-code".to_string()),
cf_ray: None,
auth_error: None,
auth_error_code: Some("workspace_not_authorized_in_region".to_string()),
safe_error_message: None,
error_body_class: None,
geo_denial_detected: true,
}
);
}

View File

@@ -1,7 +1,6 @@
use crate::AuthManager;
use crate::CodexAuth;
use crate::ModelProviderInfo;
use crate::OPENAI_PROVIDER_ID;
use crate::agent::AgentControl;
use crate::codex::Codex;
use crate::codex::CodexSpawnArgs;
@@ -9,6 +8,7 @@ use crate::codex::CodexSpawnOk;
use crate::codex::INITIAL_SUBMIT_ID;
use crate::codex_thread::CodexThread;
use crate::config::Config;
use crate::endpoint_config_telemetry::resolve_endpoint_config_telemetry_source_for_provider;
use crate::error::CodexErr;
use crate::error::Result as CodexResult;
use crate::file_watcher::FileWatcher;
@@ -47,6 +47,8 @@ use tokio::sync::RwLock;
use tokio::sync::broadcast;
use tracing::warn;
use crate::model_provider_info::OPENAI_PROVIDER_ID;
const THREAD_CREATED_CHANNEL_CAPACITY: usize = 1024;
/// Test-only override for enabling thread-manager behaviors used by integration
/// tests.
@@ -173,7 +175,14 @@ impl ThreadManager {
.model_providers
.get(OPENAI_PROVIDER_ID)
.cloned()
.unwrap_or_else(|| ModelProviderInfo::create_openai_provider(/* base_url */ None));
.unwrap_or_else(|| ModelProviderInfo::create_openai_provider(None));
let openai_endpoint_telemetry_source =
resolve_endpoint_config_telemetry_source_for_provider(
config,
OPENAI_PROVIDER_ID,
&openai_models_provider,
session_source.clone(),
);
let (thread_created_tx, _) = broadcast::channel(THREAD_CREATED_CHANNEL_CAPACITY);
let plugins_manager = Arc::new(PluginsManager::new(codex_home.clone()));
let mcp_manager = Arc::new(McpManager::new(Arc::clone(&plugins_manager)));
@@ -187,13 +196,16 @@ impl ThreadManager {
state: Arc::new(ThreadManagerState {
threads: Arc::new(RwLock::new(HashMap::new())),
thread_created_tx,
models_manager: Arc::new(ModelsManager::new_with_provider(
codex_home,
auth_manager.clone(),
config.model_catalog.clone(),
collaboration_modes_config,
openai_models_provider,
)),
models_manager: Arc::new(
ModelsManager::new_with_provider_and_endpoint_telemetry_source(
codex_home,
auth_manager.clone(),
config.model_catalog.clone(),
collaboration_modes_config,
openai_models_provider,
openai_endpoint_telemetry_source,
),
),
skills_manager,
plugins_manager,
mcp_manager,

View File

@@ -59,7 +59,7 @@ fn function_payload(args: serde_json::Value) -> ToolPayload {
fn thread_manager() -> ThreadManager {
ThreadManager::with_models_provider_for_tests(
CodexAuth::from_api_key("dummy"),
built_in_model_providers(/* openai_base_url */ None)["openai"].clone(),
built_in_model_providers(None)["openai"].clone(),
)
}
@@ -164,7 +164,7 @@ async fn spawn_agent_uses_explorer_role_and_preserves_approval_policy() {
let manager = thread_manager();
session.services.agent_control = manager.agent_control();
let mut config = (*turn.config).clone();
let provider = built_in_model_providers(/* openai_base_url */ None)["ollama"].clone();
let provider = built_in_model_providers(None)["ollama"].clone();
config.model_provider_id = "ollama".to_string();
config.model_provider = provider.clone();
config

View File

@@ -42,14 +42,30 @@ pub(crate) struct FeedbackRequestTags<'a> {
pub auth_header_attached: bool,
pub auth_header_name: Option<&'a str>,
pub auth_mode: Option<&'a str>,
pub auth_env_openai_api_key_present: bool,
pub auth_env_codex_api_key_present: bool,
pub auth_env_codex_api_key_enabled: bool,
pub auth_env_provider_key_name: Option<&'a str>,
pub auth_env_provider_key_present: Option<bool>,
pub auth_env_refresh_token_url_override_present: bool,
pub auth_retry_after_unauthorized: Option<bool>,
pub auth_recovery_mode: Option<&'a str>,
pub auth_recovery_phase: Option<&'a str>,
pub auth_connection_reused: Option<bool>,
pub provider_header_names: Option<&'a str>,
pub base_url_origin: &'a str,
pub host_class: &'a str,
pub base_url_source: &'a str,
pub base_url_is_default: bool,
pub residency_header_attached: Option<bool>,
pub residency_header_value: Option<&'a str>,
pub auth_request_id: Option<&'a str>,
pub auth_cf_ray: Option<&'a str>,
pub auth_error: Option<&'a str>,
pub auth_error_code: Option<&'a str>,
pub error_body_class: Option<&'a str>,
pub safe_error_message: Option<&'a str>,
pub geo_denial_detected: Option<bool>,
pub auth_recovery_followup_success: Option<bool>,
pub auth_recovery_followup_status: Option<u16>,
}
@@ -80,6 +96,10 @@ impl<'a> Auth401FeedbackSnapshot<'a> {
pub(crate) fn emit_feedback_request_tags(tags: &FeedbackRequestTags<'_>) {
let auth_header_name = tags.auth_header_name.unwrap_or("");
let auth_mode = tags.auth_mode.unwrap_or("");
let auth_env_provider_key_name = tags.auth_env_provider_key_name.unwrap_or("");
let auth_env_provider_key_present = tags
.auth_env_provider_key_present
.map_or_else(String::new, |value| value.to_string());
let auth_retry_after_unauthorized = tags
.auth_retry_after_unauthorized
.map_or_else(String::new, |value| value.to_string());
@@ -88,10 +108,20 @@ pub(crate) fn emit_feedback_request_tags(tags: &FeedbackRequestTags<'_>) {
let auth_connection_reused = tags
.auth_connection_reused
.map_or_else(String::new, |value| value.to_string());
let provider_header_names = tags.provider_header_names.unwrap_or("");
let residency_header_attached = tags
.residency_header_attached
.map_or_else(String::new, |value| value.to_string());
let residency_header_value = tags.residency_header_value.unwrap_or("");
let auth_request_id = tags.auth_request_id.unwrap_or("");
let auth_cf_ray = tags.auth_cf_ray.unwrap_or("");
let auth_error = tags.auth_error.unwrap_or("");
let auth_error_code = tags.auth_error_code.unwrap_or("");
let error_body_class = tags.error_body_class.unwrap_or("");
let safe_error_message = tags.safe_error_message.unwrap_or("");
let geo_denial_detected = tags
.geo_denial_detected
.map_or_else(String::new, |value| value.to_string());
let auth_recovery_followup_success = tags
.auth_recovery_followup_success
.map_or_else(String::new, |value| value.to_string());
@@ -103,14 +133,31 @@ pub(crate) fn emit_feedback_request_tags(tags: &FeedbackRequestTags<'_>) {
auth_header_attached = tags.auth_header_attached,
auth_header_name = auth_header_name,
auth_mode = auth_mode,
auth_env_openai_api_key_present = tags.auth_env_openai_api_key_present,
auth_env_codex_api_key_present = tags.auth_env_codex_api_key_present,
auth_env_codex_api_key_enabled = tags.auth_env_codex_api_key_enabled,
auth_env_provider_key_name = auth_env_provider_key_name,
auth_env_provider_key_present = auth_env_provider_key_present,
auth_env_refresh_token_url_override_present =
tags.auth_env_refresh_token_url_override_present,
auth_retry_after_unauthorized = auth_retry_after_unauthorized,
auth_recovery_mode = auth_recovery_mode,
auth_recovery_phase = auth_recovery_phase,
auth_connection_reused = auth_connection_reused,
provider_header_names = provider_header_names,
base_url_origin = tags.base_url_origin,
host_class = tags.host_class,
base_url_source = tags.base_url_source,
base_url_is_default = tags.base_url_is_default,
residency_header_attached = residency_header_attached,
residency_header_value = residency_header_value,
auth_request_id = auth_request_id,
auth_cf_ray = auth_cf_ray,
auth_error = auth_error,
auth_error_code = auth_error_code,
error_body_class = error_body_class,
safe_error_message = safe_error_message,
geo_denial_detected = geo_denial_detected,
auth_recovery_followup_success = auth_recovery_followup_success,
auth_recovery_followup_status = auth_recovery_followup_status
);

View File

@@ -96,14 +96,30 @@ fn emit_feedback_request_tags_records_sentry_feedback_fields() {
auth_header_attached: true,
auth_header_name: Some("authorization"),
auth_mode: Some("chatgpt"),
auth_env_openai_api_key_present: true,
auth_env_codex_api_key_present: false,
auth_env_codex_api_key_enabled: true,
auth_env_provider_key_name: Some("OPENAI_API_KEY"),
auth_env_provider_key_present: Some(true),
auth_env_refresh_token_url_override_present: true,
auth_retry_after_unauthorized: Some(false),
auth_recovery_mode: Some("managed"),
auth_recovery_phase: Some("refresh_token"),
auth_connection_reused: Some(true),
provider_header_names: Some("openai-project"),
base_url_origin: "chatgpt.com",
host_class: "openai_chatgpt",
base_url_source: "default",
base_url_is_default: true,
residency_header_attached: Some(true),
residency_header_value: Some("us"),
auth_request_id: Some("req-123"),
auth_cf_ray: Some("ray-123"),
auth_error: Some("missing_authorization_header"),
auth_error_code: Some("token_expired"),
error_body_class: Some("workspace_not_authorized_in_region"),
safe_error_message: Some("Workspace is not authorized in this region."),
geo_denial_detected: Some(true),
auth_recovery_followup_success: Some(true),
auth_recovery_followup_status: Some(200),
});
@@ -125,10 +141,23 @@ fn emit_feedback_request_tags_records_sentry_feedback_fields() {
tags.get("auth_request_id").map(String::as_str),
Some("\"req-123\"")
);
assert_eq!(
tags.get("auth_env_provider_key_name").map(String::as_str),
Some("\"OPENAI_API_KEY\"")
);
assert_eq!(
tags.get("auth_env_provider_key_present")
.map(String::as_str),
Some("\"true\"")
);
assert_eq!(
tags.get("auth_error_code").map(String::as_str),
Some("\"token_expired\"")
);
assert_eq!(
tags.get("geo_denial_detected").map(String::as_str),
Some("\"true\"")
);
assert_eq!(
tags.get("auth_recovery_followup_success")
.map(String::as_str),
@@ -219,54 +248,6 @@ fn emit_feedback_auth_recovery_tags_clears_stale_401_fields() {
);
}
#[test]
fn emit_feedback_request_tags_preserves_latest_auth_fields_after_unauthorized() {
let tags = Arc::new(Mutex::new(BTreeMap::new()));
let _guard = tracing_subscriber::registry()
.with(TagCollectorLayer { tags: tags.clone() })
.set_default();
emit_feedback_request_tags(&FeedbackRequestTags {
endpoint: "/responses",
auth_header_attached: true,
auth_header_name: Some("authorization"),
auth_mode: Some("chatgpt"),
auth_retry_after_unauthorized: Some(true),
auth_recovery_mode: Some("managed"),
auth_recovery_phase: Some("refresh_token"),
auth_connection_reused: None,
auth_request_id: Some("req-123"),
auth_cf_ray: Some("ray-123"),
auth_error: Some("missing_authorization_header"),
auth_error_code: Some("token_expired"),
auth_recovery_followup_success: Some(false),
auth_recovery_followup_status: Some(401),
});
let tags = tags.lock().unwrap().clone();
assert_eq!(
tags.get("auth_request_id").map(String::as_str),
Some("\"req-123\"")
);
assert_eq!(
tags.get("auth_cf_ray").map(String::as_str),
Some("\"ray-123\"")
);
assert_eq!(
tags.get("auth_error").map(String::as_str),
Some("\"missing_authorization_header\"")
);
assert_eq!(
tags.get("auth_error_code").map(String::as_str),
Some("\"token_expired\"")
);
assert_eq!(
tags.get("auth_recovery_followup_success")
.map(String::as_str),
Some("\"false\"")
);
}
#[test]
fn emit_feedback_request_tags_clears_stale_latest_auth_fields() {
let tags = Arc::new(Mutex::new(BTreeMap::new()));
@@ -279,14 +260,30 @@ fn emit_feedback_request_tags_clears_stale_latest_auth_fields() {
auth_header_attached: true,
auth_header_name: Some("authorization"),
auth_mode: Some("chatgpt"),
auth_retry_after_unauthorized: Some(false),
auth_env_openai_api_key_present: true,
auth_env_codex_api_key_present: true,
auth_env_codex_api_key_enabled: true,
auth_env_provider_key_name: Some("OPENAI_API_KEY"),
auth_env_provider_key_present: Some(true),
auth_env_refresh_token_url_override_present: true,
auth_retry_after_unauthorized: Some(true),
auth_recovery_mode: Some("managed"),
auth_recovery_phase: Some("refresh_token"),
auth_connection_reused: Some(true),
provider_header_names: Some("openai-project"),
base_url_origin: "chatgpt.com",
host_class: "openai_chatgpt",
base_url_source: "default",
base_url_is_default: true,
residency_header_attached: Some(true),
residency_header_value: Some("us"),
auth_request_id: Some("req-123"),
auth_cf_ray: Some("ray-123"),
auth_error: Some("missing_authorization_header"),
auth_error_code: Some("token_expired"),
error_body_class: Some("workspace_not_authorized_in_region"),
safe_error_message: Some("Workspace is not authorized in this region."),
geo_denial_detected: Some(true),
auth_recovery_followup_success: Some(true),
auth_recovery_followup_status: Some(200),
});
@@ -295,14 +292,30 @@ fn emit_feedback_request_tags_clears_stale_latest_auth_fields() {
auth_header_attached: true,
auth_header_name: None,
auth_mode: None,
auth_env_openai_api_key_present: false,
auth_env_codex_api_key_present: false,
auth_env_codex_api_key_enabled: false,
auth_env_provider_key_name: None,
auth_env_provider_key_present: None,
auth_env_refresh_token_url_override_present: false,
auth_retry_after_unauthorized: None,
auth_recovery_mode: None,
auth_recovery_phase: None,
auth_connection_reused: None,
provider_header_names: None,
base_url_origin: "chatgpt.com",
host_class: "openai_chatgpt",
base_url_source: "default",
base_url_is_default: true,
residency_header_attached: None,
residency_header_value: None,
auth_request_id: None,
auth_cf_ray: None,
auth_error: None,
auth_error_code: None,
error_body_class: None,
safe_error_message: None,
geo_denial_detected: None,
auth_recovery_followup_success: None,
auth_recovery_followup_status: None,
});
@@ -313,6 +326,27 @@ fn emit_feedback_request_tags_clears_stale_latest_auth_fields() {
Some("\"\"")
);
assert_eq!(tags.get("auth_mode").map(String::as_str), Some("\"\""));
assert_eq!(
tags.get("provider_header_names").map(String::as_str),
Some("\"\"")
);
assert_eq!(
tags.get("auth_env_provider_key_name").map(String::as_str),
Some("\"\"")
);
assert_eq!(
tags.get("auth_env_provider_key_present")
.map(String::as_str),
Some("\"\"")
);
assert_eq!(
tags.get("residency_header_attached").map(String::as_str),
Some("\"\"")
);
assert_eq!(
tags.get("residency_header_value").map(String::as_str),
Some("\"\"")
);
assert_eq!(
tags.get("auth_request_id").map(String::as_str),
Some("\"\"")
@@ -323,6 +357,18 @@ fn emit_feedback_request_tags_clears_stale_latest_auth_fields() {
tags.get("auth_error_code").map(String::as_str),
Some("\"\"")
);
assert_eq!(
tags.get("error_body_class").map(String::as_str),
Some("\"\"")
);
assert_eq!(
tags.get("safe_error_message").map(String::as_str),
Some("\"\"")
);
assert_eq!(
tags.get("geo_denial_detected").map(String::as_str),
Some("\"\"")
);
assert_eq!(
tags.get("auth_recovery_followup_success")
.map(String::as_str),

View File

@@ -226,7 +226,7 @@ impl TestCodexBuilder {
) -> anyhow::Result<(Config, Arc<TempDir>)> {
let model_provider = ModelProviderInfo {
base_url: Some(base_url),
..built_in_model_providers(/* openai_base_url */ None)["openai"].clone()
..built_in_model_providers(None)["openai"].clone()
};
let cwd = Arc::new(TempDir::new()?);
let mut config = load_default_config_for_test(home).await;

View File

@@ -715,7 +715,7 @@ async fn chatgpt_auth_sends_correct_request() {
)
.await;
let mut model_provider = built_in_model_providers(/* openai_base_url */ None)["openai"].clone();
let mut model_provider = built_in_model_providers(None)["openai"].clone();
model_provider.base_url = Some(format!("{}/api/codex", server.uri()));
let mut builder = test_codex()
.with_auth(create_dummy_codex_auth())
@@ -791,7 +791,7 @@ async fn prefers_apikey_when_config_prefers_apikey_even_with_chatgpt_tokens() {
let model_provider = ModelProviderInfo {
base_url: Some(format!("{}/v1", server.uri())),
..built_in_model_providers(/* openai_base_url */ None)["openai"].clone()
..built_in_model_providers(None)["openai"].clone()
};
// Init session
@@ -1965,7 +1965,7 @@ async fn token_count_includes_rate_limits_snapshot() {
.mount(&server)
.await;
let mut provider = built_in_model_providers(/* openai_base_url */ None)["openai"].clone();
let mut provider = built_in_model_providers(None)["openai"].clone();
provider.base_url = Some(format!("{}/v1", server.uri()));
let mut builder = test_codex()

View File

@@ -93,7 +93,7 @@ fn json_fragment(text: &str) -> String {
}
fn non_openai_model_provider(server: &MockServer) -> ModelProviderInfo {
let mut provider = built_in_model_providers(/* openai_base_url */ None)["openai"].clone();
let mut provider = built_in_model_providers(None)["openai"].clone();
provider.name = "OpenAI (test)".into();
provider.base_url = Some(format!("{}/v1", server.uri()));
provider

View File

@@ -95,7 +95,7 @@ async fn remote_models_get_model_info_uses_longest_matching_prefix() -> Result<(
let auth = CodexAuth::create_dummy_chatgpt_auth_for_testing();
let provider = ModelProviderInfo {
base_url: Some(format!("{}/v1", server.uri())),
..built_in_model_providers(/* openai_base_url */ None)["openai"].clone()
..built_in_model_providers(None)["openai"].clone()
};
let manager = codex_core::test_support::models_manager_with_provider(
codex_home.path().to_path_buf(),
@@ -654,7 +654,7 @@ async fn remote_models_do_not_append_removed_builtin_presets() -> Result<()> {
let auth = CodexAuth::create_dummy_chatgpt_auth_for_testing();
let provider = ModelProviderInfo {
base_url: Some(format!("{}/v1", server.uri())),
..built_in_model_providers(/* openai_base_url */ None)["openai"].clone()
..built_in_model_providers(None)["openai"].clone()
};
let manager = codex_core::test_support::models_manager_with_provider(
codex_home.path().to_path_buf(),
@@ -709,7 +709,7 @@ async fn remote_models_merge_adds_new_high_priority_first() -> Result<()> {
let auth = CodexAuth::create_dummy_chatgpt_auth_for_testing();
let provider = ModelProviderInfo {
base_url: Some(format!("{}/v1", server.uri())),
..built_in_model_providers(/* openai_base_url */ None)["openai"].clone()
..built_in_model_providers(None)["openai"].clone()
};
let manager = codex_core::test_support::models_manager_with_provider(
codex_home.path().to_path_buf(),
@@ -756,7 +756,7 @@ async fn remote_models_merge_replaces_overlapping_model() -> Result<()> {
let auth = CodexAuth::create_dummy_chatgpt_auth_for_testing();
let provider = ModelProviderInfo {
base_url: Some(format!("{}/v1", server.uri())),
..built_in_model_providers(/* openai_base_url */ None)["openai"].clone()
..built_in_model_providers(None)["openai"].clone()
};
let manager = codex_core::test_support::models_manager_with_provider(
codex_home.path().to_path_buf(),
@@ -800,7 +800,7 @@ async fn remote_models_merge_preserves_bundled_models_on_empty_response() -> Res
let auth = CodexAuth::create_dummy_chatgpt_auth_for_testing();
let provider = ModelProviderInfo {
base_url: Some(format!("{}/v1", server.uri())),
..built_in_model_providers(/* openai_base_url */ None)["openai"].clone()
..built_in_model_providers(None)["openai"].clone()
};
let manager = codex_core::test_support::models_manager_with_provider(
codex_home.path().to_path_buf(),
@@ -841,7 +841,7 @@ async fn remote_models_request_times_out_after_5s() -> Result<()> {
let auth = CodexAuth::create_dummy_chatgpt_auth_for_testing();
let provider = ModelProviderInfo {
base_url: Some(format!("{}/v1", server.uri())),
..built_in_model_providers(/* openai_base_url */ None)["openai"].clone()
..built_in_model_providers(None)["openai"].clone()
};
let manager = codex_core::test_support::models_manager_with_provider(
codex_home.path().to_path_buf(),
@@ -907,7 +907,7 @@ async fn remote_models_hide_picker_only_models() -> Result<()> {
let auth = CodexAuth::create_dummy_chatgpt_auth_for_testing();
let provider = ModelProviderInfo {
base_url: Some(format!("{}/v1", server.uri())),
..built_in_model_providers(/* openai_base_url */ None)["openai"].clone()
..built_in_model_providers(None)["openai"].clone()
};
let manager = codex_core::test_support::models_manager_with_provider(
codex_home.path().to_path_buf(),

View File

@@ -303,12 +303,68 @@ impl SessionTelemetry {
sandbox_policy: SandboxPolicy,
mcp_servers: Vec<&str>,
active_profile: Option<String>,
) {
self.conversation_starts_with_endpoint_details(
provider_name,
"unknown",
"unknown",
"unknown",
false,
false,
false,
false,
None,
None,
false,
reasoning_effort,
reasoning_summary,
context_window,
auto_compact_token_limit,
approval_policy,
sandbox_policy,
mcp_servers,
active_profile,
);
}
#[allow(clippy::too_many_arguments)]
pub fn conversation_starts_with_endpoint_details(
&self,
provider_name: &str,
base_url_origin: &str,
host_class: &str,
base_url_source: &str,
base_url_is_default: bool,
auth_env_openai_api_key_present: bool,
auth_env_codex_api_key_present: bool,
auth_env_codex_api_key_enabled: bool,
auth_env_provider_key_name: Option<&str>,
auth_env_provider_key_present: Option<bool>,
auth_env_refresh_token_url_override_present: bool,
reasoning_effort: Option<ReasoningEffort>,
reasoning_summary: ReasoningSummary,
context_window: Option<i64>,
auto_compact_token_limit: Option<i64>,
approval_policy: AskForApproval,
sandbox_policy: SandboxPolicy,
mcp_servers: Vec<&str>,
active_profile: Option<String>,
) {
log_and_trace_event!(
self,
common: {
event.name = "codex.conversation_starts",
provider_name = %provider_name,
base_url_origin = base_url_origin,
host_class = host_class,
base_url_source = base_url_source,
base_url_is_default = base_url_is_default,
auth.env_openai_api_key_present = auth_env_openai_api_key_present,
auth.env_codex_api_key_present = auth_env_codex_api_key_present,
auth.env_codex_api_key_enabled = auth_env_codex_api_key_enabled,
auth.env_provider_key_name = auth_env_provider_key_name,
auth.env_provider_key_present = auth_env_provider_key_present,
auth.env_refresh_token_url_override_present = auth_env_refresh_token_url_override_present,
reasoning_effort = reasoning_effort.map(|e| e.to_string()),
reasoning_summary = %reasoning_summary,
context_window = context_window,
@@ -340,7 +396,7 @@ impl SessionTelemetry {
Ok(response) => (Some(response.status().as_u16()), None),
Err(error) => (error.status().map(|s| s.as_u16()), Some(error.to_string())),
};
self.record_api_request(
self.record_api_request_with_endpoint_details(
attempt,
status,
error.as_deref(),
@@ -351,6 +407,21 @@ impl SessionTelemetry {
None,
None,
"unknown",
false,
None,
None,
"custom",
"custom_unknown",
"default",
false,
false,
false,
false,
None,
None,
false,
None,
None,
None,
None,
None,
@@ -377,6 +448,72 @@ impl SessionTelemetry {
cf_ray: Option<&str>,
auth_error: Option<&str>,
auth_error_code: Option<&str>,
) {
self.record_api_request_with_endpoint_details(
attempt,
status,
error,
duration,
auth_header_attached,
auth_header_name,
retry_after_unauthorized,
recovery_mode,
recovery_phase,
endpoint,
false,
None,
None,
"unknown",
"unknown",
"unknown",
false,
false,
false,
false,
None,
None,
false,
request_id,
cf_ray,
auth_error,
auth_error_code,
None,
None,
);
}
#[allow(clippy::too_many_arguments)]
pub fn record_api_request_with_endpoint_details(
&self,
attempt: u64,
status: Option<u16>,
error: Option<&str>,
duration: Duration,
auth_header_attached: bool,
auth_header_name: Option<&str>,
retry_after_unauthorized: bool,
recovery_mode: Option<&str>,
recovery_phase: Option<&str>,
endpoint: &str,
residency_header_attached: bool,
residency_header_value: Option<&str>,
provider_header_names: Option<&str>,
base_url_origin: &str,
host_class: &str,
base_url_source: &str,
base_url_is_default: bool,
auth_env_openai_api_key_present: bool,
auth_env_codex_api_key_present: bool,
auth_env_codex_api_key_enabled: bool,
auth_env_provider_key_name: Option<&str>,
auth_env_provider_key_present: Option<bool>,
auth_env_refresh_token_url_override_present: bool,
request_id: Option<&str>,
cf_ray: Option<&str>,
auth_error: Option<&str>,
auth_error_code: Option<&str>,
error_body_class: Option<&str>,
safe_error_message: Option<&str>,
) {
let success = status.is_some_and(|code| (200..=299).contains(&code)) && error.is_none();
let success_str = if success { "true" } else { "false" };
@@ -407,10 +544,25 @@ impl SessionTelemetry {
auth.recovery_mode = recovery_mode,
auth.recovery_phase = recovery_phase,
endpoint = endpoint,
residency_header_attached = residency_header_attached,
residency_header_value = residency_header_value,
provider_header_names = provider_header_names,
base_url_origin = base_url_origin,
host_class = host_class,
base_url_source = base_url_source,
base_url_is_default = base_url_is_default,
auth.env_openai_api_key_present = auth_env_openai_api_key_present,
auth.env_codex_api_key_present = auth_env_codex_api_key_present,
auth.env_codex_api_key_enabled = auth_env_codex_api_key_enabled,
auth.env_provider_key_name = auth_env_provider_key_name,
auth.env_provider_key_present = auth_env_provider_key_present,
auth.env_refresh_token_url_override_present = auth_env_refresh_token_url_override_present,
auth.request_id = request_id,
auth.cf_ray = cf_ray,
auth.error = auth_error,
auth.error_code = auth_error_code,
error_body_class = error_body_class,
safe_error_message = safe_error_message,
},
log: {},
trace: {},
@@ -434,6 +586,72 @@ impl SessionTelemetry {
cf_ray: Option<&str>,
auth_error: Option<&str>,
auth_error_code: Option<&str>,
) {
self.record_websocket_connect_with_endpoint_details(
duration,
status,
error,
auth_header_attached,
auth_header_name,
retry_after_unauthorized,
recovery_mode,
recovery_phase,
endpoint,
false,
None,
None,
"unknown",
"unknown",
"unknown",
false,
false,
false,
false,
None,
None,
false,
connection_reused,
request_id,
cf_ray,
auth_error,
auth_error_code,
None,
None,
);
}
#[allow(clippy::too_many_arguments)]
pub fn record_websocket_connect_with_endpoint_details(
&self,
duration: Duration,
status: Option<u16>,
error: Option<&str>,
auth_header_attached: bool,
auth_header_name: Option<&str>,
retry_after_unauthorized: bool,
recovery_mode: Option<&str>,
recovery_phase: Option<&str>,
endpoint: &str,
residency_header_attached: bool,
residency_header_value: Option<&str>,
provider_header_names: Option<&str>,
base_url_origin: &str,
host_class: &str,
base_url_source: &str,
base_url_is_default: bool,
auth_env_openai_api_key_present: bool,
auth_env_codex_api_key_present: bool,
auth_env_codex_api_key_enabled: bool,
auth_env_provider_key_name: Option<&str>,
auth_env_provider_key_present: Option<bool>,
auth_env_refresh_token_url_override_present: bool,
connection_reused: bool,
request_id: Option<&str>,
cf_ray: Option<&str>,
auth_error: Option<&str>,
auth_error_code: Option<&str>,
error_body_class: Option<&str>,
safe_error_message: Option<&str>,
) {
let success = error.is_none()
&& status
@@ -454,11 +672,67 @@ impl SessionTelemetry {
auth.recovery_mode = recovery_mode,
auth.recovery_phase = recovery_phase,
endpoint = endpoint,
residency_header_attached = residency_header_attached,
residency_header_value = residency_header_value,
provider_header_names = provider_header_names,
base_url_origin = base_url_origin,
host_class = host_class,
base_url_source = base_url_source,
base_url_is_default = base_url_is_default,
auth.env_openai_api_key_present = auth_env_openai_api_key_present,
auth.env_codex_api_key_present = auth_env_codex_api_key_present,
auth.env_codex_api_key_enabled = auth_env_codex_api_key_enabled,
auth.env_provider_key_name = auth_env_provider_key_name,
auth.env_provider_key_present = auth_env_provider_key_present,
auth.env_refresh_token_url_override_present = auth_env_refresh_token_url_override_present,
auth.connection_reused = connection_reused,
auth.request_id = request_id,
auth.cf_ray = cf_ray,
auth.error = auth_error,
auth.error_code = auth_error_code,
error_body_class = error_body_class,
safe_error_message = safe_error_message,
},
log: {},
trace: {},
);
}
#[allow(clippy::too_many_arguments)]
pub fn record_geo_denial(
&self,
endpoint: &str,
auth_header_attached: bool,
auth_header_name: Option<&str>,
residency_header_attached: bool,
residency_header_value: Option<&str>,
provider_header_names: Option<&str>,
http_status: Option<u16>,
request_id: Option<&str>,
cf_ray: Option<&str>,
auth_error: Option<&str>,
auth_error_code: Option<&str>,
error_body_class: &str,
safe_error_message: Option<&str>,
) {
log_and_trace_event!(
self,
common: {
event.name = "codex.geo_denial",
geo_denial_detected = true,
request_id = request_id,
cf_ray = cf_ray,
endpoint = endpoint,
auth.header_attached = auth_header_attached,
auth.header_name = auth_header_name,
residency_header_attached = residency_header_attached,
residency_header_value = residency_header_value,
provider_header_names = provider_header_names,
http_status = http_status,
auth.error = auth_error,
auth.error_code = auth_error_code,
error_body_class = error_body_class,
safe_error_message = safe_error_message,
},
log: {},
trace: {},

View File

@@ -485,7 +485,7 @@ fn otel_export_routing_policy_routes_api_request_auth_observability() {
);
let root_span = tracing::info_span!("root");
let _root_guard = root_span.enter();
manager.record_api_request(
manager.record_api_request_with_endpoint_details(
1,
Some(401),
Some("http 401"),
@@ -496,10 +496,25 @@ fn otel_export_routing_policy_routes_api_request_auth_observability() {
Some("managed"),
Some("refresh_token"),
"/responses",
true,
Some("us"),
Some("openai-project,version"),
"chatgpt.com",
"openai_chatgpt",
"ide_settings",
false,
true,
false,
true,
Some("OPENAI_API_KEY"),
Some(true),
true,
Some("req-401"),
Some("ray-401"),
Some("missing_authorization_header"),
Some("token_expired"),
Some("workspace_not_authorized_in_region"),
Some("Workspace is not authorized in this region."),
);
});
@@ -543,10 +558,71 @@ fn otel_export_routing_policy_routes_api_request_auth_observability() {
request_log_attrs.get("endpoint").map(String::as_str),
Some("/responses")
);
assert_eq!(
request_log_attrs
.get("residency_header_attached")
.map(String::as_str),
Some("true")
);
assert_eq!(
request_log_attrs
.get("residency_header_value")
.map(String::as_str),
Some("us")
);
assert_eq!(
request_log_attrs.get("base_url_origin").map(String::as_str),
Some("chatgpt.com")
);
assert_eq!(
request_log_attrs.get("host_class").map(String::as_str),
Some("openai_chatgpt")
);
assert_eq!(
request_log_attrs.get("base_url_source").map(String::as_str),
Some("ide_settings")
);
assert_eq!(
request_log_attrs
.get("auth.env_openai_api_key_present")
.map(String::as_str),
Some("true")
);
assert_eq!(
request_log_attrs
.get("auth.env_provider_key_name")
.map(String::as_str),
Some("OPENAI_API_KEY")
);
assert_eq!(
request_log_attrs
.get("provider_header_names")
.map(String::as_str),
Some("openai-project,version")
);
assert_eq!(
request_log_attrs
.get("base_url_is_default")
.map(String::as_str),
Some("false")
);
assert_eq!(
request_log_attrs.get("auth.error").map(String::as_str),
Some("missing_authorization_header")
);
assert_eq!(
request_log_attrs
.get("error_body_class")
.map(String::as_str),
Some("workspace_not_authorized_in_region")
);
assert_eq!(
request_log_attrs
.get("safe_error_message")
.map(String::as_str),
Some("Workspace is not authorized in this region.")
);
assert!(!request_log_attrs.contains_key("error_body"));
let spans = span_exporter.get_finished_spans().expect("span export");
let request_trace_event =
@@ -570,10 +646,22 @@ fn otel_export_routing_policy_routes_api_request_auth_observability() {
.map(String::as_str),
Some("true")
);
assert_eq!(
request_trace_attrs
.get("base_url_origin")
.map(String::as_str),
Some("chatgpt.com")
);
assert_eq!(
request_trace_attrs.get("endpoint").map(String::as_str),
Some("/responses")
);
assert_eq!(
request_trace_attrs
.get("safe_error_message")
.map(String::as_str),
Some("Workspace is not authorized in this region.")
);
}
#[test]
@@ -617,7 +705,7 @@ fn otel_export_routing_policy_routes_websocket_connect_auth_observability() {
);
let root_span = tracing::info_span!("root");
let _root_guard = root_span.enter();
manager.record_websocket_connect(
manager.record_websocket_connect_with_endpoint_details(
std::time::Duration::from_millis(17),
Some(401),
Some("http 401"),
@@ -627,11 +715,26 @@ fn otel_export_routing_policy_routes_websocket_connect_auth_observability() {
Some("managed"),
Some("reload"),
"/responses",
true,
Some("us"),
Some("x-api-key"),
"openrouter.ai",
"known_third_party",
"config_toml",
false,
false,
true,
false,
Some("OPENROUTER_API_KEY"),
Some(true),
false,
false,
Some("req-ws-401"),
Some("ray-ws-401"),
Some("missing_authorization_header"),
Some("token_expired"),
Some("workspace_not_authorized_in_region"),
Some("Workspace is not authorized in this region."),
);
});
@@ -647,6 +750,12 @@ fn otel_export_routing_policy_routes_websocket_connect_auth_observability() {
.map(String::as_str),
Some("true")
);
assert_eq!(
connect_log_attrs
.get("auth.env_provider_key_name")
.map(String::as_str),
Some("OPENROUTER_API_KEY")
);
assert_eq!(
connect_log_attrs
.get("auth.header_name")
@@ -657,10 +766,36 @@ fn otel_export_routing_policy_routes_websocket_connect_auth_observability() {
connect_log_attrs.get("auth.error").map(String::as_str),
Some("missing_authorization_header")
);
assert_eq!(
connect_log_attrs
.get("provider_header_names")
.map(String::as_str),
Some("x-api-key")
);
assert_eq!(
connect_log_attrs.get("base_url_origin").map(String::as_str),
Some("openrouter.ai")
);
assert_eq!(
connect_log_attrs.get("host_class").map(String::as_str),
Some("known_third_party")
);
assert_eq!(
connect_log_attrs.get("endpoint").map(String::as_str),
Some("/responses")
);
assert_eq!(
connect_log_attrs
.get("residency_header_value")
.map(String::as_str),
Some("us")
);
assert_eq!(
connect_log_attrs
.get("safe_error_message")
.map(String::as_str),
Some("Workspace is not authorized in this region.")
);
assert_eq!(
connect_log_attrs
.get("auth.connection_reused")
@@ -678,6 +813,24 @@ fn otel_export_routing_policy_routes_websocket_connect_auth_observability() {
.map(String::as_str),
Some("reload")
);
assert_eq!(
connect_trace_attrs
.get("base_url_source")
.map(String::as_str),
Some("config_toml")
);
assert_eq!(
connect_trace_attrs
.get("error_body_class")
.map(String::as_str),
Some("workspace_not_authorized_in_region")
);
assert_eq!(
connect_trace_attrs
.get("safe_error_message")
.map(String::as_str),
Some("Workspace is not authorized in this region.")
);
}
#[test]
@@ -756,3 +909,245 @@ fn otel_export_routing_policy_routes_websocket_request_transport_observability()
Some("true")
);
}
#[test]
fn otel_export_routing_policy_routes_geo_denial_log_and_trace_events() {
let log_exporter = InMemoryLogExporter::default();
let logger_provider = SdkLoggerProvider::builder()
.with_simple_exporter(log_exporter.clone())
.build();
let span_exporter = InMemorySpanExporter::default();
let tracer_provider = SdkTracerProvider::builder()
.with_simple_exporter(span_exporter.clone())
.build();
let tracer = tracer_provider.tracer("sink-split-test");
let subscriber = tracing_subscriber::registry()
.with(
opentelemetry_appender_tracing::layer::OpenTelemetryTracingBridge::new(
&logger_provider,
)
.with_filter(filter_fn(OtelProvider::log_export_filter)),
)
.with(
tracing_opentelemetry::layer()
.with_tracer(tracer)
.with_filter(filter_fn(OtelProvider::trace_export_filter)),
);
tracing::subscriber::with_default(subscriber, || {
tracing::callsite::rebuild_interest_cache();
let manager = SessionTelemetry::new(
ThreadId::new(),
"gpt-5.1",
"gpt-5.1",
Some("account-id".to_string()),
Some("engineer@example.com".to_string()),
Some(TelemetryAuthMode::Chatgpt),
"codex_exec".to_string(),
true,
"tty".to_string(),
SessionSource::Cli,
);
let root_span = tracing::info_span!("root");
let _root_guard = root_span.enter();
manager.record_geo_denial(
"/responses",
true,
Some("authorization"),
true,
Some("us"),
Some("x-api-key"),
Some(401),
Some("req-geo"),
Some("ray-geo"),
Some("missing_authorization_header"),
Some("workspace_not_authorized_in_region"),
"workspace_not_authorized_in_region",
Some("Workspace is not authorized in this region."),
);
});
logger_provider.force_flush().expect("flush logs");
tracer_provider.force_flush().expect("flush traces");
let logs = log_exporter.get_emitted_logs().expect("log export");
let geo_log = find_log_by_event_name(&logs, "codex.geo_denial");
let geo_log_attrs = log_attributes(&geo_log.record);
assert_eq!(
geo_log_attrs.get("geo_denial_detected").map(String::as_str),
Some("true")
);
assert_eq!(
geo_log_attrs.get("request_id").map(String::as_str),
Some("req-geo")
);
assert_eq!(
geo_log_attrs
.get("auth.header_attached")
.map(String::as_str),
Some("true")
);
assert_eq!(
geo_log_attrs.get("auth.header_name").map(String::as_str),
Some("authorization")
);
assert_eq!(
geo_log_attrs.get("endpoint").map(String::as_str),
Some("/responses")
);
assert_eq!(
geo_log_attrs
.get("provider_header_names")
.map(String::as_str),
Some("x-api-key")
);
assert_eq!(
geo_log_attrs.get("auth.error_code").map(String::as_str),
Some("workspace_not_authorized_in_region")
);
assert_eq!(
geo_log_attrs
.get("residency_header_value")
.map(String::as_str),
Some("us")
);
assert_eq!(
geo_log_attrs.get("error_body_class").map(String::as_str),
Some("workspace_not_authorized_in_region")
);
assert_eq!(
geo_log_attrs.get("safe_error_message").map(String::as_str),
Some("Workspace is not authorized in this region.")
);
assert!(!geo_log_attrs.contains_key("error_body"));
let spans = span_exporter.get_finished_spans().expect("span export");
let geo_trace_event = find_span_event_by_name_attr(&spans[0].events.events, "codex.geo_denial");
let geo_trace_attrs = span_event_attributes(geo_trace_event);
assert_eq!(
geo_trace_attrs
.get("auth.header_attached")
.map(String::as_str),
Some("true")
);
assert_eq!(
geo_trace_attrs.get("cf_ray").map(String::as_str),
Some("ray-geo")
);
assert_eq!(
geo_trace_attrs.get("auth.error").map(String::as_str),
Some("missing_authorization_header")
);
assert_eq!(
geo_trace_attrs.get("http_status").map(String::as_str),
Some("401")
);
assert_eq!(
geo_trace_attrs
.get("safe_error_message")
.map(String::as_str),
Some("Workspace is not authorized in this region.")
);
}
#[test]
fn otel_export_routing_policy_routes_conversation_start_endpoint_config() {
let log_exporter = InMemoryLogExporter::default();
let logger_provider = SdkLoggerProvider::builder()
.with_simple_exporter(log_exporter.clone())
.build();
let span_exporter = InMemorySpanExporter::default();
let tracer_provider = SdkTracerProvider::builder()
.with_simple_exporter(span_exporter.clone())
.build();
let tracer = tracer_provider.tracer("sink-split-test");
let subscriber = tracing_subscriber::registry()
.with(
opentelemetry_appender_tracing::layer::OpenTelemetryTracingBridge::new(
&logger_provider,
)
.with_filter(filter_fn(OtelProvider::log_export_filter)),
)
.with(
tracing_opentelemetry::layer()
.with_tracer(tracer)
.with_filter(filter_fn(OtelProvider::trace_export_filter)),
);
tracing::subscriber::with_default(subscriber, || {
tracing::callsite::rebuild_interest_cache();
let manager = SessionTelemetry::new(
ThreadId::new(),
"gpt-5.1",
"gpt-5.1",
Some("account-id".to_string()),
Some("engineer@example.com".to_string()),
Some(TelemetryAuthMode::Chatgpt),
"codex_exec".to_string(),
true,
"tty".to_string(),
SessionSource::Cli,
);
let root_span = tracing::info_span!("root");
let _root_guard = root_span.enter();
manager.conversation_starts_with_endpoint_details(
"OpenAI",
"custom",
"custom_unknown",
"env",
false,
true,
false,
true,
Some("OPENAI_API_KEY"),
Some(true),
true,
None,
codex_protocol::config_types::ReasoningSummary::Auto,
None,
None,
codex_protocol::protocol::AskForApproval::OnRequest,
codex_protocol::protocol::SandboxPolicy::new_read_only_policy(),
Vec::new(),
None,
);
});
logger_provider.force_flush().expect("flush logs");
tracer_provider.force_flush().expect("flush traces");
let logs = log_exporter.get_emitted_logs().expect("log export");
let start_log = find_log_by_event_name(&logs, "codex.conversation_starts");
let start_log_attrs = log_attributes(&start_log.record);
assert_eq!(
start_log_attrs.get("base_url_origin").map(String::as_str),
Some("custom")
);
assert_eq!(
start_log_attrs.get("host_class").map(String::as_str),
Some("custom_unknown")
);
assert_eq!(
start_log_attrs.get("base_url_source").map(String::as_str),
Some("env")
);
assert_eq!(
start_log_attrs
.get("auth.env_provider_key_name")
.map(String::as_str),
Some("OPENAI_API_KEY")
);
let spans = span_exporter.get_finished_spans().expect("span export");
let start_trace_event =
find_span_event_by_name_attr(&spans[0].events.events, "codex.conversation_starts");
let start_trace_attrs = span_event_attributes(start_trace_event);
assert_eq!(
start_trace_attrs
.get("base_url_is_default")
.map(String::as_str),
Some("false")
);
}