[codex] implement codex.user_prompt trace events

This commit is contained in:
Anton Panasenko
2025-09-11 15:43:45 -07:00
parent 089e367814
commit d1ea58e87e
7 changed files with 77 additions and 8 deletions

View File

@@ -941,6 +941,7 @@ mod tests {
"test",
None,
AuthMode::ChatGPT,
false,
"test".to_string(),
);
@@ -1011,6 +1012,7 @@ mod tests {
"test",
None,
AuthMode::ChatGPT,
false,
"test".to_string(),
);
@@ -1054,6 +1056,7 @@ mod tests {
"test",
None,
AuthMode::ChatGPT,
false,
"test".to_string(),
);
@@ -1168,6 +1171,7 @@ mod tests {
"test",
None,
AuthMode::ChatGPT,
false,
"test".to_string(),
);

View File

@@ -451,6 +451,7 @@ impl Session {
config.model_family.slug.as_str(),
auth_manager.auth().and_then(|a| a.get_account_id()),
auth_manager.preferred_auth_method(),
config.otel.log_user_prompt,
terminal::user_agent(),
);
@@ -1231,10 +1232,10 @@ async fn submission_loop(
updated_config.model_context_window = Some(model_info.context_window);
}
let trace_manager = prev
.client
.get_trace_manager()
.with_model(updated_config.model.as_str(), updated_config.model_family.slug.as_str());
let trace_manager = prev.client.get_trace_manager().with_model(
updated_config.model.as_str(),
updated_config.model_family.slug.as_str(),
);
let client = ModelClient::new(
Arc::new(updated_config),
@@ -1290,6 +1291,7 @@ async fn submission_loop(
}
}
Op::UserInput { items } => {
let _ = turn_context.client.get_trace_manager().user_prompt(&items);
// attempt to inject input into current task
if let Err(items) = sess.inject_input(items).await {
// no current task, spawn a new one
@@ -1307,6 +1309,7 @@ async fn submission_loop(
effort,
summary,
} => {
let _ = turn_context.client.get_trace_manager().user_prompt(&items);
// attempt to inject input into current task
if let Err(items) = sess.inject_input(items).await {
// Derive a fresh TurnContext for this turn using the provided overrides.
@@ -1325,10 +1328,10 @@ async fn submission_loop(
per_turn_config.model_context_window = Some(model_info.context_window);
}
let trace_manager = turn_context
.client
.get_trace_manager()
.with_model(per_turn_config.model.as_str(), per_turn_config.model_family.slug.as_str());
let trace_manager = turn_context.client.get_trace_manager().with_model(
per_turn_config.model.as_str(),
per_turn_config.model_family.slug.as_str(),
);
// Build a new client with perturn reasoning settings.
// Reuse the same provider and session id; auth defaults to env/API key.

View File

@@ -1068,12 +1068,14 @@ impl Config {
use crate::config_types::OtelSampler;
let t: OtelConfigToml = cfg.otel.unwrap_or_default();
let enabled = t.enabled.unwrap_or(false);
let log_user_prompt = t.log_user_prompt.unwrap_or(false);
let environment = t
.environment
.unwrap_or(DEFAULT_OTEL_ENVIRONMENT.to_string());
if !enabled {
OtelConfig {
enabled,
log_user_prompt,
environment,
sampler: OtelSampler::AlwaysOn,
exporter: OtelExporterKind::None,
@@ -1083,6 +1085,7 @@ impl Config {
let exporter = t.exporter.unwrap_or(OtelExporterKind::OtlpFile);
OtelConfig {
enabled,
log_user_prompt,
environment,
sampler,
exporter,
@@ -1669,6 +1672,7 @@ model_verbosity = "high"
tui_notifications: Default::default(),
otel: crate::config_types::OtelConfig {
enabled: false,
log_user_prompt: false,
environment: DEFAULT_OTEL_ENVIRONMENT.to_string(),
exporter: crate::config_types::OtelExporterKind::None,
sampler: crate::config_types::OtelSampler::AlwaysOn,
@@ -1733,6 +1737,7 @@ model_verbosity = "high"
tui_notifications: Default::default(),
otel: crate::config_types::OtelConfig {
enabled: false,
log_user_prompt: false,
environment: DEFAULT_OTEL_ENVIRONMENT.to_string(),
exporter: crate::config_types::OtelExporterKind::None,
sampler: crate::config_types::OtelSampler::AlwaysOn,
@@ -1812,6 +1817,7 @@ model_verbosity = "high"
tui_notifications: Default::default(),
otel: crate::config_types::OtelConfig {
enabled: false,
log_user_prompt: false,
environment: DEFAULT_OTEL_ENVIRONMENT.to_string(),
exporter: crate::config_types::OtelExporterKind::None,
sampler: crate::config_types::OtelSampler::AlwaysOn,
@@ -1877,6 +1883,7 @@ model_verbosity = "high"
tui_notifications: Default::default(),
otel: crate::config_types::OtelConfig {
enabled: false,
log_user_prompt: false,
environment: DEFAULT_OTEL_ENVIRONMENT.to_string(),
exporter: crate::config_types::OtelExporterKind::None,
sampler: crate::config_types::OtelSampler::AlwaysOn,

View File

@@ -117,6 +117,9 @@ pub struct OtelConfigToml {
/// Enable or disable OTEL entirely. Defaults to false.
pub enabled: Option<bool>,
/// Log user prompt in traces
pub log_user_prompt: Option<bool>,
/// Mark traces with environment (dev, staging, prod, test). Defaults to dev.
pub environment: Option<String>,
@@ -131,6 +134,7 @@ pub struct OtelConfigToml {
#[derive(Debug, Clone, PartialEq)]
pub struct OtelConfig {
pub enabled: bool,
pub log_user_prompt: bool,
pub environment: String,
pub sampler: OtelSampler,
pub exporter: OtelExporterKind,

View File

@@ -80,6 +80,7 @@ async fn run_request(input: Vec<ResponseItem>) -> Value {
config.model_family.slug.as_str(),
None,
AuthMode::ChatGPT,
false,
"test".to_string(),
);

View File

@@ -73,6 +73,7 @@ async fn run_stream(sse_body: &str) -> Vec<ResponseEvent> {
config.model_family.slug.as_str(),
None,
AuthMode::ChatGPT,
false,
"test".to_string(),
);

View File

@@ -3,6 +3,7 @@ use chrono::Utc;
use codex_protocol::mcp_protocol::AuthMode;
use codex_protocol::mcp_protocol::ConversationId;
use codex_protocol::models::ResponseItem;
use codex_protocol::protocol::InputItem;
use opentelemetry::propagation::TextMapPropagator;
use opentelemetry_http::HeaderInjector;
use opentelemetry_sdk::propagation::TraceContextPropagator;
@@ -132,6 +133,39 @@ impl SSESpan {
}
}
pub struct UserPromptSpan(pub(crate) Span);
impl UserPromptSpan {
pub fn new(metadata: TraceMetadata, prompt: &str) -> Self {
let prompt_to_log = if metadata.log_user_prompts {
prompt
} else {
"[REDACTED]"
};
let span = info_span!(
"codex.user_prompt",
session.id = %metadata.conversation_id,
app.version = %metadata.app_version,
user.account_id = tracing::field::Empty,
terminal.type = %metadata.terminal_type,
event.timestamp = %timestamp(),
prompt_length = %prompt.chars().count(),
prompt = %prompt_to_log,
);
if let Some(account_id) = &metadata.account_id {
span.record("user.account_id", account_id);
}
Self(span)
}
pub fn span(&self) -> Span {
self.0.clone()
}
}
#[derive(Debug, Clone)]
pub struct TraceMetadata {
conversation_id: ConversationId,
@@ -139,6 +173,7 @@ pub struct TraceMetadata {
account_id: Option<String>,
model: String,
slug: String,
log_user_prompts: bool,
app_version: &'static str,
terminal_type: String,
}
@@ -155,6 +190,7 @@ impl TraceManager {
slug: &str,
account_id: Option<String>,
auth_mode: AuthMode,
log_user_prompts: bool,
terminal_type: String,
) -> TraceManager {
Self {
@@ -164,6 +200,7 @@ impl TraceManager {
account_id,
model: model.to_owned(),
slug: slug.to_owned(),
log_user_prompts,
app_version: env!("CARGO_PKG_VERSION"),
terminal_type,
},
@@ -191,6 +228,18 @@ impl TraceManager {
pub fn response(&self) -> SSESpan {
SSESpan::new(self.metadata.clone())
}
pub fn user_prompt(&self, items: &[InputItem]) -> UserPromptSpan {
let prompt = items
.iter()
.flat_map(|item| match item {
InputItem::Text { text } => Some(text.as_str()),
_ => None,
})
.collect::<String>();
UserPromptSpan::new(self.metadata.clone(), prompt.as_ref())
}
}
fn timestamp() -> String {