Compare commits

..

1 Commits

Author SHA1 Message Date
Tom Wiltzius
43c6030612 core: add optional datadog trace headers for responses calls 2026-02-01 16:27:50 -08:00
32 changed files with 421 additions and 553 deletions

View File

@@ -111,41 +111,10 @@ impl CodexRequestBuilder {
}
pub async fn send(self) -> Result<Response, reqwest::Error> {
let residency_header = self
.builder
.try_clone()
.and_then(|builder| builder.build().ok())
.and_then(|request| {
request
.headers()
.get("x-openai-internal-codex-residency")
.and_then(|value| value.to_str().ok())
.map(std::borrow::ToOwned::to_owned)
});
let headers = trace_headers();
match self.builder.headers(headers).send().await {
Ok(response) => {
if is_codex_responses_path(&self.url) && response.status().as_u16() == 401 {
let cf_ray = response
.headers()
.get("cf-ray")
.and_then(|value| value.to_str().ok());
let auth_error = response
.headers()
.get("x-openai-authorization-error")
.and_then(|value| value.to_str().ok());
tracing::info!(
method = %self.method,
url = %self.url,
status = %response.status(),
residency_header_present = residency_header.is_some(),
residency_header_value = ?residency_header,
cf_ray,
auth_error,
"Codex responses request returned unauthorized"
);
}
tracing::debug!(
method = %self.method,
url = %self.url,
@@ -172,11 +141,6 @@ impl CodexRequestBuilder {
}
}
fn is_codex_responses_path(url: &str) -> bool {
url.contains("/backend-api/codex/responses")
|| url.contains("/chat/backend/api/codex/responses")
}
struct HeaderMapInjector<'a>(&'a mut HeaderMap);
impl<'a> Injector for HeaderMapInjector<'a> {

View File

@@ -3,7 +3,6 @@ edition.workspace = true
license.workspace = true
name = "codex-core"
version.workspace = true
build = "build.rs"
[lib]
doctest = false

View File

@@ -1,27 +0,0 @@
use std::fs;
use std::path::Path;
fn main() {
let samples_dir = Path::new("src/skills/assets/samples");
if !samples_dir.exists() {
return;
}
println!("cargo:rerun-if-changed={}", samples_dir.display());
visit_dir(samples_dir);
}
fn visit_dir(dir: &Path) {
let entries = match fs::read_dir(dir) {
Ok(entries) => entries,
Err(_) => return,
};
for entry in entries.flatten() {
let path = entry.path();
println!("cargo:rerun-if-changed={}", path.display());
if path.is_dir() {
visit_dir(&path);
}
}
}

View File

@@ -412,6 +412,10 @@
"description": "Value to use with `Authorization: Bearer <token>` header. Use of this config is discouraged in favor of `env_key` for security reasons, but this may be necessary when using this programmatically.",
"type": "string"
},
"force_datadog_tracing": {
"description": "Whether to include Datadog tracing headers on Responses API calls.",
"type": "boolean"
},
"http_headers": {
"additionalProperties": {
"type": "string"

View File

@@ -50,6 +50,7 @@ use tokio::sync::mpsc;
use tokio_tungstenite::tungstenite::Error;
use tokio_tungstenite::tungstenite::Message;
use tracing::warn;
use uuid::Uuid;
use crate::AuthManager;
use crate::auth::CodexAuth;
@@ -72,6 +73,10 @@ use crate::transport_manager::TransportManager;
pub const WEB_SEARCH_ELIGIBLE_HEADER: &str = "x-oai-web-search-eligible";
pub const X_CODEX_TURN_STATE_HEADER: &str = "x-codex-turn-state";
const X_DATADOG_TRACE_ID_HEADER: &str = "x-datadog-trace-id";
const X_DATADOG_PARENT_ID_HEADER: &str = "x-datadog-parent-id";
const X_DATADOG_SAMPLING_PRIORITY_HEADER: &str = "x-datadog-sampling-priority";
const DATADOG_SAMPLING_PRIORITY: i32 = 2;
#[derive(Debug)]
struct ModelClientState {
@@ -380,7 +385,11 @@ impl ModelClientSession {
store_override: None,
conversation_id: Some(conversation_id),
session_source: Some(self.state.session_source.clone()),
extra_headers: build_responses_headers(&self.state.config, Some(&self.turn_state)),
extra_headers: build_responses_headers(
&self.state.config,
&self.state.provider,
Some(&self.turn_state),
),
compression,
turn_state: Some(Arc::clone(&self.turn_state)),
}
@@ -712,6 +721,7 @@ fn experimental_feature_headers(config: &Config) -> ApiHeaderMap {
fn build_responses_headers(
config: &Config,
provider: &ModelProviderInfo,
turn_state: Option<&Arc<OnceLock<String>>>,
) -> ApiHeaderMap {
let mut headers = experimental_feature_headers(config);
@@ -725,6 +735,7 @@ fn build_responses_headers(
},
),
);
apply_responses_trace_headers(&mut headers, provider.force_datadog_tracing);
if let Some(turn_state) = turn_state
&& let Some(state) = turn_state.get()
&& let Ok(header_value) = HeaderValue::from_str(state)
@@ -734,6 +745,38 @@ fn build_responses_headers(
headers
}
fn apply_responses_trace_headers(headers: &mut ApiHeaderMap, force_datadog_tracing: bool) {
if !force_datadog_tracing {
return;
}
apply_datadog_headers(headers);
}
fn apply_datadog_headers(headers: &mut ApiHeaderMap) {
let trace_id = random_datadog_id();
let parent_id = random_datadog_id();
if let Ok(trace_header) = HeaderValue::from_str(&trace_id.to_string()) {
let _ = headers.insert(X_DATADOG_TRACE_ID_HEADER, trace_header);
}
if let Ok(parent_header) = HeaderValue::from_str(&parent_id.to_string()) {
let _ = headers.insert(X_DATADOG_PARENT_ID_HEADER, parent_header);
}
let priority_value = DATADOG_SAMPLING_PRIORITY.to_string();
if let Ok(priority_header) = HeaderValue::from_str(&priority_value) {
let _ = headers.insert(X_DATADOG_SAMPLING_PRIORITY_HEADER, priority_header);
}
}
fn random_datadog_id() -> u64 {
loop {
let value = (Uuid::new_v4().as_u128() & u128::from(u64::MAX)) as u64;
if value != 0 {
return value;
}
}
}
fn map_response_stream<S>(api_stream: S, otel_manager: OtelManager) -> ResponseStream
where
S: futures::Stream<Item = std::result::Result<ResponseEvent, ApiError>>

View File

@@ -51,7 +51,6 @@ use codex_protocol::items::PlanItem;
use codex_protocol::items::TurnItem;
use codex_protocol::items::UserMessageItem;
use codex_protocol::models::BaseInstructions;
use codex_protocol::models::format_allow_prefixes;
use codex_protocol::openai_models::ModelInfo;
use codex_protocol::protocol::FileChange;
use codex_protocol::protocol::HasLegacyEvent;
@@ -213,6 +212,7 @@ use codex_protocol::models::ContentItem;
use codex_protocol::models::DeveloperInstructions;
use codex_protocol::models::ResponseInputItem;
use codex_protocol::models::ResponseItem;
use codex_protocol::models::render_command_prefix_list;
use codex_protocol::protocol::CodexErrorInfo;
use codex_protocol::protocol::InitialHistory;
use codex_protocol::user_input::UserInput;
@@ -490,7 +490,7 @@ pub(crate) struct TurnContext {
pub(crate) developer_instructions: Option<String>,
pub(crate) compact_prompt: Option<String>,
pub(crate) user_instructions: Option<String>,
pub(crate) collaboration_mode: CollaborationMode,
pub(crate) collaboration_mode_kind: ModeKind,
pub(crate) personality: Option<Personality>,
pub(crate) approval_policy: AskForApproval,
pub(crate) sandbox_policy: SandboxPolicy,
@@ -692,7 +692,7 @@ impl Session {
developer_instructions: session_configuration.developer_instructions.clone(),
compact_prompt: session_configuration.compact_prompt.clone(),
user_instructions: session_configuration.user_instructions.clone(),
collaboration_mode: session_configuration.collaboration_mode.clone(),
collaboration_mode_kind: session_configuration.collaboration_mode.mode,
personality: session_configuration.personality,
approval_policy: session_configuration.approval_policy.value(),
sandbox_policy: session_configuration.sandbox_policy.get().clone(),
@@ -1356,14 +1356,16 @@ impl Session {
fn build_collaboration_mode_update_item(
&self,
previous: Option<&Arc<TurnContext>>,
next: &TurnContext,
previous_collaboration_mode: &CollaborationMode,
next_collaboration_mode: Option<&CollaborationMode>,
) -> Option<ResponseItem> {
let prev = previous?;
if prev.collaboration_mode != next.collaboration_mode {
if let Some(next_mode) = next_collaboration_mode {
if previous_collaboration_mode == next_mode {
return None;
}
// If the next mode has empty developer instructions, this returns None and we emit no
// update, so prior collaboration instructions remain in the prompt history.
Some(DeveloperInstructions::from_collaboration_mode(&next.collaboration_mode)?.into())
Some(DeveloperInstructions::from_collaboration_mode(next_mode)?.into())
} else {
None
}
@@ -1373,6 +1375,8 @@ impl Session {
&self,
previous_context: Option<&Arc<TurnContext>>,
current_context: &TurnContext,
previous_collaboration_mode: &CollaborationMode,
next_collaboration_mode: Option<&CollaborationMode>,
) -> Vec<ResponseItem> {
let mut update_items = Vec::new();
if let Some(env_item) =
@@ -1385,9 +1389,10 @@ impl Session {
{
update_items.push(permissions_item);
}
if let Some(collaboration_mode_item) =
self.build_collaboration_mode_update_item(previous_context, current_context)
{
if let Some(collaboration_mode_item) = self.build_collaboration_mode_update_item(
previous_collaboration_mode,
next_collaboration_mode,
) {
update_items.push(collaboration_mode_item);
}
if let Some(personality_item) =
@@ -1517,7 +1522,7 @@ impl Session {
sub_id: &str,
amendment: &ExecPolicyAmendment,
) {
let Some(prefixes) = format_allow_prefixes(vec![amendment.command.clone()]) else {
let Some(prefixes) = render_command_prefix_list([amendment.command.as_slice()]) else {
warn!("execpolicy amendment for {sub_id} had no command prefix");
return;
};
@@ -2568,6 +2573,18 @@ mod handlers {
sub_id: String,
updates: SessionSettingsUpdate,
) {
let previous_context = sess
.new_default_turn_with_sub_id(sess.next_internal_sub_id())
.await;
let previous_collaboration_mode = sess
.state
.lock()
.await
.session_configuration
.collaboration_mode
.clone();
let next_collaboration_mode = updates.collaboration_mode.clone();
if let Err(err) = sess.update_settings(updates).await {
sess.send_event_raw(Event {
id: sub_id,
@@ -2577,6 +2594,24 @@ mod handlers {
}),
})
.await;
return;
}
let initial_context_seeded = sess.state.lock().await.initial_context_seeded;
if !initial_context_seeded {
return;
}
let current_context = sess.new_default_turn_with_sub_id(sub_id).await;
let update_items = sess.build_settings_update_items(
Some(&previous_context),
&current_context,
&previous_collaboration_mode,
next_collaboration_mode.as_ref(),
);
if !update_items.is_empty() {
sess.record_conversation_items(&current_context, &update_items)
.await;
}
}
@@ -2636,6 +2671,14 @@ mod handlers {
_ => unreachable!(),
};
let previous_collaboration_mode = sess
.state
.lock()
.await
.session_configuration
.collaboration_mode
.clone();
let next_collaboration_mode = updates.collaboration_mode.clone();
let Ok(current_context) = sess.new_turn_with_sub_id(sub_id, updates).await else {
// new_turn_with_sub_id already emits the error event.
return;
@@ -2648,8 +2691,12 @@ mod handlers {
// Attempt to inject input into current task
if let Err(items) = sess.inject_input(items).await {
sess.seed_initial_context_if_needed(&current_context).await;
let update_items =
sess.build_settings_update_items(previous_context.as_ref(), &current_context);
let update_items = sess.build_settings_update_items(
previous_context.as_ref(),
&current_context,
&previous_collaboration_mode,
next_collaboration_mode.as_ref(),
);
if !update_items.is_empty() {
sess.record_conversation_items(&current_context, &update_items)
.await;
@@ -3164,7 +3211,7 @@ async fn spawn_review_thread(
developer_instructions: None,
user_instructions: None,
compact_prompt: parent_turn_context.compact_prompt.clone(),
collaboration_mode: parent_turn_context.collaboration_mode.clone(),
collaboration_mode_kind: parent_turn_context.collaboration_mode_kind,
personality: parent_turn_context.personality,
approval_policy: parent_turn_context.approval_policy,
sandbox_policy: parent_turn_context.sandbox_policy.clone(),
@@ -3279,7 +3326,7 @@ pub(crate) async fn run_turn(
let total_usage_tokens = sess.get_total_token_usage().await;
let event = EventMsg::TurnStarted(TurnStartedEvent {
model_context_window: turn_context.client.get_model_context_window(),
collaboration_mode_kind: turn_context.collaboration_mode.mode,
collaboration_mode_kind: turn_context.collaboration_mode_kind,
});
sess.send_event(&turn_context, event).await;
if total_usage_tokens >= auto_compact_limit {
@@ -4192,7 +4239,7 @@ async fn try_run_sampling_request(
let mut last_agent_message: Option<String> = None;
let mut active_item: Option<TurnItem> = None;
let mut should_emit_turn_diff = false;
let plan_mode = turn_context.collaboration_mode.mode == ModeKind::Plan;
let plan_mode = turn_context.collaboration_mode_kind == ModeKind::Plan;
let mut plan_mode_state = plan_mode.then(|| PlanModeStreamState::new(&turn_context.sub_id));
let receiving_span = trace_span!("receiving_stream");
let outcome: CodexResult<SamplingRequestResult> = loop {

View File

@@ -61,7 +61,7 @@ pub(crate) async fn run_compact_task(
) {
let start_event = EventMsg::TurnStarted(TurnStartedEvent {
model_context_window: turn_context.client.get_model_context_window(),
collaboration_mode_kind: turn_context.collaboration_mode.mode,
collaboration_mode_kind: turn_context.collaboration_mode_kind,
});
sess.send_event(&turn_context, start_event).await;
run_compact_task_inner(sess.clone(), turn_context, input).await;

View File

@@ -22,7 +22,7 @@ pub(crate) async fn run_inline_remote_auto_compact_task(
pub(crate) async fn run_remote_compact_task(sess: Arc<Session>, turn_context: Arc<TurnContext>) {
let start_event = EventMsg::TurnStarted(TurnStartedEvent {
model_context_window: turn_context.client.get_model_context_window(),
collaboration_mode_kind: turn_context.collaboration_mode.mode,
collaboration_mode_kind: turn_context.collaboration_mode_kind,
});
sess.send_event(&turn_context, start_event).await;

View File

@@ -3722,6 +3722,7 @@ model_verbosity = "high"
stream_idle_timeout_ms: Some(300_000),
requires_openai_auth: false,
supports_websockets: false,
force_datadog_tracing: false,
};
let model_provider_map = {
let mut model_provider_map = built_in_model_providers();

View File

@@ -84,13 +84,6 @@ pub fn set_default_client_residency_requirement(enforce_residency: Option<Reside
tracing::warn!("Failed to acquire requirements residency lock");
return;
};
if *guard != enforce_residency {
tracing::info!(
previous = ?*guard,
updated = ?enforce_residency,
"Updated Codex residency requirement"
);
}
*guard = enforce_residency;
}
@@ -195,11 +188,6 @@ pub fn build_reqwest_client() -> reqwest::Client {
let value = match requirement {
ResidencyRequirement::Us => HeaderValue::from_static("us"),
};
tracing::debug!(
header_name = RESIDENCY_HEADER_NAME,
header_value = ?value,
"Applying residency header to default HTTP client"
);
headers.insert(RESIDENCY_HEADER_NAME, value);
}
let ua = get_codex_user_agent();

View File

@@ -106,6 +106,10 @@ pub struct ModelProviderInfo {
/// Whether this provider supports the Responses API WebSocket transport.
#[serde(default)]
pub supports_websockets: bool,
/// Whether to include Datadog tracing headers on Responses API calls.
#[serde(default, skip_serializing_if = "std::ops::Not::not")]
pub force_datadog_tracing: bool,
}
impl ModelProviderInfo {
@@ -264,6 +268,7 @@ impl ModelProviderInfo {
stream_idle_timeout_ms: None,
requires_openai_auth: true,
supports_websockets: true,
force_datadog_tracing: false,
}
}
@@ -343,6 +348,7 @@ pub fn create_oss_provider_with_base_url(base_url: &str, wire_api: WireApi) -> M
stream_idle_timeout_ms: None,
requires_openai_auth: false,
supports_websockets: false,
force_datadog_tracing: false,
}
}
@@ -372,6 +378,7 @@ base_url = "http://localhost:11434/v1"
stream_idle_timeout_ms: None,
requires_openai_auth: false,
supports_websockets: false,
force_datadog_tracing: false,
};
let provider: ModelProviderInfo = toml::from_str(azure_provider_toml).unwrap();
@@ -403,6 +410,7 @@ query_params = { api-version = "2025-04-01-preview" }
stream_idle_timeout_ms: None,
requires_openai_auth: false,
supports_websockets: false,
force_datadog_tracing: false,
};
let provider: ModelProviderInfo = toml::from_str(azure_provider_toml).unwrap();
@@ -437,6 +445,7 @@ env_http_headers = { "X-Example-Env-Header" = "EXAMPLE_ENV_VAR" }
stream_idle_timeout_ms: None,
requires_openai_auth: false,
supports_websockets: false,
force_datadog_tracing: false,
};
let provider: ModelProviderInfo = toml::from_str(azure_provider_toml).unwrap();

View File

@@ -437,6 +437,7 @@ mod tests {
stream_idle_timeout_ms: Some(5_000),
requires_openai_auth: false,
supports_websockets: false,
force_datadog_tracing: false,
}
}
@@ -497,6 +498,42 @@ mod tests {
);
}
#[tokio::test]
async fn refresh_available_models_never_adds_datadog_trace_headers() {
let server = MockServer::start().await;
let models_mock = mount_models_once(&server, ModelsResponse { models: Vec::new() }).await;
let codex_home = tempdir().expect("temp dir");
let mut config = ConfigBuilder::default()
.codex_home(codex_home.path().to_path_buf())
.build()
.await
.expect("load default test config");
config.features.enable(Feature::RemoteModels);
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::create_dummy_chatgpt_auth_for_testing());
let mut provider = provider_for(server.uri());
provider.force_datadog_tracing = true;
let manager =
ModelsManager::with_provider(codex_home.path().to_path_buf(), auth_manager, provider);
manager
.refresh_available_models(&config, RefreshStrategy::OnlineIfUncached)
.await
.expect("refresh succeeds");
let requests = models_mock.requests();
assert_eq!(requests.len(), 1);
assert!(requests[0].headers.get("x-datadog-trace-id").is_none());
assert!(requests[0].headers.get("x-datadog-parent-id").is_none());
assert!(
requests[0]
.headers
.get("x-datadog-sampling-priority")
.is_none()
);
}
#[tokio::test]
async fn refresh_available_models_uses_cache_when_fresh() {
let server = MockServer::start().await;

View File

@@ -86,8 +86,21 @@ fn read_marker(path: &AbsolutePathBuf) -> Result<String, SystemSkillsError> {
}
fn embedded_system_skills_fingerprint() -> String {
let mut items = Vec::new();
collect_fingerprint_items(&SYSTEM_SKILLS_DIR, &mut items);
let mut items: Vec<(String, Option<u64>)> = SYSTEM_SKILLS_DIR
.entries()
.iter()
.map(|entry| match entry {
include_dir::DirEntry::Dir(dir) => (dir.path().to_string_lossy().to_string(), None),
include_dir::DirEntry::File(file) => {
let mut file_hasher = DefaultHasher::new();
file.contents().hash(&mut file_hasher);
(
file.path().to_string_lossy().to_string(),
Some(file_hasher.finish()),
)
}
})
.collect();
items.sort_unstable_by(|(a, _), (b, _)| a.cmp(b));
let mut hasher = DefaultHasher::new();
@@ -99,25 +112,6 @@ fn embedded_system_skills_fingerprint() -> String {
format!("{:x}", hasher.finish())
}
fn collect_fingerprint_items(dir: &Dir<'_>, items: &mut Vec<(String, Option<u64>)>) {
for entry in dir.entries() {
match entry {
include_dir::DirEntry::Dir(subdir) => {
items.push((subdir.path().to_string_lossy().to_string(), None));
collect_fingerprint_items(subdir, items);
}
include_dir::DirEntry::File(file) => {
let mut file_hasher = DefaultHasher::new();
file.contents().hash(&mut file_hasher);
items.push((
file.path().to_string_lossy().to_string(),
Some(file_hasher.finish()),
));
}
}
}
}
/// Writes the embedded `include_dir::Dir` to disk under `dest`.
///
/// Preserves the embedded directory structure.
@@ -169,28 +163,3 @@ impl SystemSkillsError {
Self::Io { action, source }
}
}
#[cfg(test)]
mod tests {
use super::SYSTEM_SKILLS_DIR;
use super::collect_fingerprint_items;
#[test]
fn fingerprint_traverses_nested_entries() {
let mut items = Vec::new();
collect_fingerprint_items(&SYSTEM_SKILLS_DIR, &mut items);
let mut paths: Vec<String> = items.into_iter().map(|(path, _)| path).collect();
paths.sort_unstable();
assert!(
paths
.binary_search_by(|probe| probe.as_str().cmp("skill-creator/SKILL.md"))
.is_ok()
);
assert!(
paths
.binary_search_by(|probe| probe.as_str().cmp("skill-creator/scripts/init_skill.py"))
.is_ok()
);
}
}

View File

@@ -48,7 +48,7 @@ pub(crate) async fn handle_output_item_done(
previously_active_item: Option<TurnItem>,
) -> Result<OutputItemResult> {
let mut output = OutputItemResult::default();
let plan_mode = ctx.turn_context.collaboration_mode.mode == ModeKind::Plan;
let plan_mode = ctx.turn_context.collaboration_mode_kind == ModeKind::Plan;
match ToolRouter::build_tool_call(ctx.sess.as_ref(), item.clone()).await {
// The model emitted a tool call; log it, persist the item immediately, and queue the tool execution.

View File

@@ -67,7 +67,7 @@ impl SessionTask for UserShellCommandTask {
let event = EventMsg::TurnStarted(TurnStartedEvent {
model_context_window: turn_context.client.get_model_context_window(),
collaboration_mode_kind: turn_context.collaboration_mode.mode,
collaboration_mode_kind: turn_context.collaboration_mode_kind,
});
let session = session.clone_session();
session.send_event(turn_context.as_ref(), event).await;

View File

@@ -104,7 +104,7 @@ pub(crate) async fn handle_update_plan(
arguments: String,
_call_id: String,
) -> Result<String, FunctionCallError> {
if turn_context.collaboration_mode.mode == ModeKind::Plan {
if turn_context.collaboration_mode_kind == ModeKind::Plan {
return Err(FunctionCallError::RespondToModel(
"update_plan is a TODO/checklist tool and is not allowed in Plan mode".to_string(),
));

View File

@@ -61,6 +61,7 @@ async fn run_request(input: Vec<ResponseItem>) -> Value {
stream_idle_timeout_ms: Some(5_000),
requires_openai_auth: false,
supports_websockets: false,
force_datadog_tracing: false,
};
let codex_home = match TempDir::new() {

View File

@@ -60,6 +60,7 @@ async fn run_stream_with_bytes(sse_body: &[u8]) -> Vec<ResponseEvent> {
stream_idle_timeout_ms: Some(5_000),
requires_openai_auth: false,
supports_websockets: false,
force_datadog_tracing: false,
};
let codex_home = match TempDir::new() {

View File

@@ -26,6 +26,10 @@ use futures::StreamExt;
use tempfile::TempDir;
use wiremock::matchers::header;
const X_DATADOG_TRACE_ID_HEADER: &str = "x-datadog-trace-id";
const X_DATADOG_PARENT_ID_HEADER: &str = "x-datadog-parent-id";
const X_DATADOG_SAMPLING_PRIORITY_HEADER: &str = "x-datadog-sampling-priority";
#[tokio::test]
async fn responses_stream_includes_subagent_header_on_review() {
core_test_support::skip_if_no_network!();
@@ -58,6 +62,7 @@ async fn responses_stream_includes_subagent_header_on_review() {
stream_idle_timeout_ms: Some(5_000),
requires_openai_auth: false,
supports_websockets: false,
force_datadog_tracing: false,
};
let codex_home = TempDir::new().expect("failed to create TempDir");
@@ -156,6 +161,7 @@ async fn responses_stream_includes_subagent_header_on_other() {
stream_idle_timeout_ms: Some(5_000),
requires_openai_auth: false,
supports_websockets: false,
force_datadog_tracing: false,
};
let codex_home = TempDir::new().expect("failed to create TempDir");
@@ -283,6 +289,110 @@ async fn responses_stream_includes_web_search_eligible_header_false_when_disable
);
}
#[tokio::test]
async fn responses_stream_includes_datadog_trace_headers_when_enabled() {
core_test_support::skip_if_no_network!();
let server = responses::start_mock_server().await;
let response_body = responses::sse(vec![
responses::ev_response_created("resp-1"),
responses::ev_completed("resp-1"),
]);
let request_recorder = responses::mount_sse_once(&server, response_body).await;
let provider = ModelProviderInfo {
name: "mock".into(),
base_url: Some(format!("{}/v1", server.uri())),
env_key: None,
env_key_instructions: None,
experimental_bearer_token: None,
wire_api: WireApi::Responses,
query_params: None,
http_headers: None,
env_http_headers: None,
request_max_retries: Some(0),
stream_max_retries: Some(0),
stream_idle_timeout_ms: Some(5_000),
requires_openai_auth: false,
supports_websockets: false,
force_datadog_tracing: true,
};
let codex_home = TempDir::new().expect("failed to create TempDir");
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider_id = provider.name.clone();
config.model_provider = provider.clone();
let effort = config.model_reasoning_effort;
let summary = config.model_reasoning_summary;
let model = ModelsManager::get_model_offline(config.model.as_deref());
config.model = Some(model.clone());
let config = Arc::new(config);
let conversation_id = ThreadId::new();
let auth_mode = AuthMode::Chatgpt;
let session_source = SessionSource::SubAgent(SubAgentSource::Review);
let model_info = ModelsManager::construct_model_info_offline(model.as_str(), &config);
let otel_manager = OtelManager::new(
conversation_id,
model.as_str(),
model_info.slug.as_str(),
None,
Some("test@test.com".to_string()),
Some(auth_mode),
false,
"test".to_string(),
session_source.clone(),
);
let mut client_session = ModelClient::new(
Arc::clone(&config),
None,
model_info,
otel_manager,
provider,
effort,
summary,
conversation_id,
session_source,
TransportManager::new(),
)
.new_session();
let mut prompt = Prompt::default();
prompt.input = vec![ResponseItem::Message {
id: None,
role: "user".into(),
content: vec![ContentItem::InputText {
text: "hello".into(),
}],
end_turn: None,
}];
let mut stream = client_session.stream(&prompt).await.expect("stream failed");
while let Some(event) = stream.next().await {
if matches!(event, Ok(ResponseEvent::Completed { .. })) {
break;
}
}
let request = request_recorder.single_request();
let trace_id = request
.header(X_DATADOG_TRACE_ID_HEADER)
.expect("trace id header");
let parent_id = request
.header(X_DATADOG_PARENT_ID_HEADER)
.expect("parent id header");
assert!(trace_id.parse::<u64>().is_ok_and(|id| id != 0));
assert!(parent_id.parse::<u64>().is_ok_and(|id| id != 0));
assert_eq!(
request
.header(X_DATADOG_SAMPLING_PRIORITY_HEADER)
.as_deref(),
Some("2")
);
}
#[tokio::test]
async fn responses_respects_model_info_overrides_from_config() {
core_test_support::skip_if_no_network!();
@@ -310,6 +420,7 @@ async fn responses_respects_model_info_overrides_from_config() {
stream_idle_timeout_ms: Some(5_000),
requires_openai_auth: false,
supports_websockets: false,
force_datadog_tracing: false,
};
let codex_home = TempDir::new().expect("failed to create TempDir");

View File

@@ -1152,6 +1152,7 @@ async fn azure_responses_request_includes_store_and_reasoning_ids() {
stream_idle_timeout_ms: Some(5_000),
requires_openai_auth: false,
supports_websockets: false,
force_datadog_tracing: false,
};
let codex_home = TempDir::new().unwrap();
@@ -1674,6 +1675,7 @@ async fn azure_overrides_assign_properties_used_for_responses_url() {
stream_idle_timeout_ms: None,
requires_openai_auth: false,
supports_websockets: false,
force_datadog_tracing: false,
};
// Init session
@@ -1755,6 +1757,7 @@ async fn env_var_overrides_loaded_auth() {
stream_idle_timeout_ms: None,
requires_openai_auth: false,
supports_websockets: false,
force_datadog_tracing: false,
};
// Init session

View File

@@ -35,6 +35,9 @@ use tempfile::TempDir;
use tracing_test::traced_test;
const MODEL: &str = "gpt-5.2-codex";
const X_DATADOG_TRACE_ID_HEADER: &str = "x-datadog-trace-id";
const X_DATADOG_PARENT_ID_HEADER: &str = "x-datadog-parent-id";
const X_DATADOG_SAMPLING_PRIORITY_HEADER: &str = "x-datadog-sampling-priority";
struct WebsocketTestHarness {
_codex_home: TempDir,
@@ -136,6 +139,42 @@ async fn responses_websocket_emits_reasoning_included_event() {
server.shutdown().await;
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn responses_websocket_includes_datadog_trace_headers_when_enabled() {
skip_if_no_network!();
let server = start_websocket_server(vec![vec![vec![
ev_response_created("resp-1"),
ev_completed("resp-1"),
]]])
.await;
let mut provider = websocket_provider(&server);
provider.force_datadog_tracing = true;
let harness = websocket_harness_with_provider(provider).await;
let mut session = harness.client.new_session();
let prompt = prompt_with_input(vec![message_item("hello")]);
stream_until_complete(&mut session, &prompt).await;
let handshake = server.single_handshake();
let trace_id = handshake
.header(X_DATADOG_TRACE_ID_HEADER)
.expect("trace id header");
let parent_id = handshake
.header(X_DATADOG_PARENT_ID_HEADER)
.expect("parent id header");
assert!(trace_id.parse::<u64>().is_ok_and(|id| id != 0));
assert!(parent_id.parse::<u64>().is_ok_and(|id| id != 0));
assert_eq!(
handshake
.header(X_DATADOG_SAMPLING_PRIORITY_HEADER)
.as_deref(),
Some("2")
);
server.shutdown().await;
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn responses_websocket_appends_on_prefix() {
skip_if_no_network!();
@@ -236,11 +275,16 @@ fn websocket_provider(server: &WebSocketTestServer) -> ModelProviderInfo {
stream_idle_timeout_ms: Some(5_000),
requires_openai_auth: false,
supports_websockets: true,
force_datadog_tracing: false,
}
}
async fn websocket_harness(server: &WebSocketTestServer) -> WebsocketTestHarness {
let provider = websocket_provider(server);
websocket_harness_with_provider(provider).await
}
async fn websocket_harness_with_provider(provider: ModelProviderInfo) -> WebsocketTestHarness {
let codex_home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&codex_home).await;
config.model = Some(MODEL.to_string());

View File

@@ -22,12 +22,9 @@ fn sse_completed(id: &str) -> String {
sse(vec![ev_response_created(id), ev_completed(id)])
}
fn collab_mode_with_mode_and_instructions(
mode: ModeKind,
instructions: Option<&str>,
) -> CollaborationMode {
fn collab_mode_with_instructions(instructions: Option<&str>) -> CollaborationMode {
CollaborationMode {
mode,
mode: ModeKind::Custom,
settings: Settings {
model: "gpt-5.1".to_string(),
reasoning_effort: None,
@@ -36,10 +33,6 @@ fn collab_mode_with_mode_and_instructions(
}
}
fn collab_mode_with_instructions(instructions: Option<&str>) -> CollaborationMode {
collab_mode_with_mode_and_instructions(ModeKind::Custom, instructions)
}
fn developer_texts(input: &[Value]) -> Vec<String> {
input
.iter()
@@ -178,7 +171,7 @@ async fn collaboration_instructions_added_on_user_turn() -> Result<()> {
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn override_then_next_turn_uses_updated_collaboration_instructions() -> Result<()> {
async fn override_then_user_turn_uses_updated_collaboration_instructions() -> Result<()> {
skip_if_no_network!(Ok(()));
let server = start_mock_server().await;
@@ -203,12 +196,20 @@ async fn override_then_next_turn_uses_updated_collaboration_instructions() -> Re
.await?;
test.codex
.submit(Op::UserInput {
.submit(Op::UserTurn {
items: vec![UserInput::Text {
text: "hello".into(),
text_elements: Vec::new(),
}],
cwd: test.config.cwd.clone(),
approval_policy: test.config.approval_policy.value(),
sandbox_policy: test.config.sandbox_policy.get().clone(),
model: test.session_configured.model.clone(),
effort: None,
summary: test.config.model_reasoning_summary,
collaboration_mode: None,
final_output_json_schema: None,
personality: None,
})
.await?;
wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
@@ -271,7 +272,7 @@ async fn user_turn_overrides_collaboration_instructions_after_override() -> Resu
let dev_texts = developer_texts(&input);
let base_text = collab_xml(base_text);
let turn_text = collab_xml(turn_text);
assert_eq!(count_exact(&dev_texts, &base_text), 0);
assert_eq!(count_exact(&dev_texts, &base_text), 1);
assert_eq!(count_exact(&dev_texts, &turn_text), 1);
Ok(())
@@ -418,159 +419,6 @@ async fn collaboration_mode_update_noop_does_not_append() -> Result<()> {
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn collaboration_mode_update_emits_new_instruction_message_when_mode_changes() -> Result<()> {
skip_if_no_network!(Ok(()));
let server = start_mock_server().await;
let _req1 = mount_sse_once(&server, sse_completed("resp-1")).await;
let req2 = mount_sse_once(&server, sse_completed("resp-2")).await;
let test = test_codex().build(&server).await?;
let code_text = "code mode instructions";
let plan_text = "plan mode instructions";
test.codex
.submit(Op::OverrideTurnContext {
cwd: None,
approval_policy: None,
sandbox_policy: None,
windows_sandbox_level: None,
model: None,
effort: None,
summary: None,
collaboration_mode: Some(collab_mode_with_mode_and_instructions(
ModeKind::Code,
Some(code_text),
)),
personality: None,
})
.await?;
test.codex
.submit(Op::UserInput {
items: vec![UserInput::Text {
text: "hello 1".into(),
text_elements: Vec::new(),
}],
final_output_json_schema: None,
})
.await?;
wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
test.codex
.submit(Op::OverrideTurnContext {
cwd: None,
approval_policy: None,
sandbox_policy: None,
windows_sandbox_level: None,
model: None,
effort: None,
summary: None,
collaboration_mode: Some(collab_mode_with_mode_and_instructions(
ModeKind::Plan,
Some(plan_text),
)),
personality: None,
})
.await?;
test.codex
.submit(Op::UserInput {
items: vec![UserInput::Text {
text: "hello 2".into(),
text_elements: Vec::new(),
}],
final_output_json_schema: None,
})
.await?;
wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
let input = req2.single_request().input();
let dev_texts = developer_texts(&input);
let code_text = collab_xml(code_text);
let plan_text = collab_xml(plan_text);
assert_eq!(count_exact(&dev_texts, &code_text), 1);
assert_eq!(count_exact(&dev_texts, &plan_text), 1);
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn collaboration_mode_update_noop_does_not_append_when_mode_is_unchanged() -> Result<()> {
skip_if_no_network!(Ok(()));
let server = start_mock_server().await;
let _req1 = mount_sse_once(&server, sse_completed("resp-1")).await;
let req2 = mount_sse_once(&server, sse_completed("resp-2")).await;
let test = test_codex().build(&server).await?;
let collab_text = "mode-stable instructions";
test.codex
.submit(Op::OverrideTurnContext {
cwd: None,
approval_policy: None,
sandbox_policy: None,
windows_sandbox_level: None,
model: None,
effort: None,
summary: None,
collaboration_mode: Some(collab_mode_with_mode_and_instructions(
ModeKind::Code,
Some(collab_text),
)),
personality: None,
})
.await?;
test.codex
.submit(Op::UserInput {
items: vec![UserInput::Text {
text: "hello 1".into(),
text_elements: Vec::new(),
}],
final_output_json_schema: None,
})
.await?;
wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
test.codex
.submit(Op::OverrideTurnContext {
cwd: None,
approval_policy: None,
sandbox_policy: None,
windows_sandbox_level: None,
model: None,
effort: None,
summary: None,
collaboration_mode: Some(collab_mode_with_mode_and_instructions(
ModeKind::Code,
Some(collab_text),
)),
personality: None,
})
.await?;
test.codex
.submit(Op::UserInput {
items: vec![UserInput::Text {
text: "hello 2".into(),
text_elements: Vec::new(),
}],
final_output_json_schema: None,
})
.await?;
wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
let input = req2.single_request().input();
let dev_texts = developer_texts(&input);
let collab_text = collab_xml(collab_text);
assert_eq!(count_exact(&dev_texts, &collab_text), 1);
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn resume_replays_collaboration_instructions() -> Result<()> {
skip_if_no_network!(Ok(()));

View File

@@ -34,6 +34,7 @@ async fn remote_compact_replaces_history_for_followups() -> Result<()> {
.with_auth(CodexAuth::create_dummy_chatgpt_auth_for_testing())
.with_config(|config| {
config.features.enable(Feature::RemoteCompaction);
config.model_provider.force_datadog_tracing = true;
}),
)
.await?;
@@ -108,6 +109,9 @@ async fn remote_compact_replaces_history_for_followups() -> Result<()> {
compact_request.header("authorization").as_deref(),
Some("Bearer Access Token")
);
assert_eq!(compact_request.header("x-datadog-trace-id"), None);
assert_eq!(compact_request.header("x-datadog-parent-id"), None);
assert_eq!(compact_request.header("x-datadog-sampling-priority"), None);
let compact_body = compact_request.body_json();
assert_eq!(
compact_body.get("model").and_then(|v| v.as_str()),

View File

@@ -18,6 +18,7 @@ use core_test_support::skip_if_no_network;
use core_test_support::test_codex::test_codex;
use core_test_support::wait_for_event;
use pretty_assertions::assert_eq;
use std::collections::HashSet;
use std::path::Path;
use std::time::Duration;
use tempfile::TempDir;
@@ -103,7 +104,7 @@ fn rollout_environment_texts(text: &str) -> Vec<String> {
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn override_turn_context_without_user_turn_does_not_record_permissions_update() -> Result<()> {
async fn override_turn_context_records_permissions_update() -> Result<()> {
skip_if_no_network!(Ok(()));
let server = start_mock_server().await;
@@ -137,15 +138,19 @@ async fn override_turn_context_without_user_turn_does_not_record_permissions_upd
.filter(|text| text.contains("`approval_policy`"))
.collect();
assert!(
approval_texts.is_empty(),
"did not expect permissions updates before a new user turn: {approval_texts:?}"
approval_texts
.iter()
.any(|text| text.contains("`approval_policy` is `never`")),
"expected updated approval policy instructions in rollout"
);
let unique: HashSet<&String> = approval_texts.iter().copied().collect();
assert_eq!(unique.len(), 2);
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn override_turn_context_without_user_turn_does_not_record_environment_update() -> Result<()> {
async fn override_turn_context_records_environment_update() -> Result<()> {
skip_if_no_network!(Ok(()));
let server = start_mock_server().await;
@@ -172,16 +177,17 @@ async fn override_turn_context_without_user_turn_does_not_record_environment_upd
let rollout_path = test.codex.rollout_path().expect("rollout path");
let rollout_text = read_rollout_text(&rollout_path).await?;
let env_texts = rollout_environment_texts(&rollout_text);
let new_cwd_text = new_cwd.path().display().to_string();
assert!(
env_texts.is_empty(),
"did not expect environment updates before a new user turn: {env_texts:?}"
env_texts.iter().any(|text| text.contains(&new_cwd_text)),
"expected environment update with new cwd in rollout"
);
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn override_turn_context_without_user_turn_does_not_record_collaboration_update() -> Result<()> {
async fn override_turn_context_records_collaboration_update() -> Result<()> {
skip_if_no_network!(Ok(()));
let server = start_mock_server().await;
@@ -214,7 +220,7 @@ async fn override_turn_context_without_user_turn_does_not_record_collaboration_u
.iter()
.filter(|text| text.as_str() == collab_text.as_str())
.count();
assert_eq!(collab_count, 0);
assert_eq!(collab_count, 1);
Ok(())
}

View File

@@ -136,7 +136,7 @@ async fn permissions_message_added_on_override_change() -> Result<()> {
let permissions_2 = permissions_texts(input2);
assert_eq!(permissions_1.len(), 1);
assert_eq!(permissions_2.len(), 2);
assert_eq!(permissions_2.len(), 3);
let unique = permissions_2.into_iter().collect::<HashSet<String>>();
assert_eq!(unique.len(), 2);
@@ -267,7 +267,7 @@ async fn resume_replays_permissions_messages() -> Result<()> {
let body3 = req3.single_request().body_json();
let input = body3["input"].as_array().expect("input array");
let permissions = permissions_texts(input);
assert_eq!(permissions.len(), 3);
assert_eq!(permissions.len(), 4);
let unique = permissions.into_iter().collect::<HashSet<String>>();
assert_eq!(unique.len(), 2);
@@ -337,7 +337,7 @@ async fn resume_and_fork_append_permissions_messages() -> Result<()> {
let body2 = req2.single_request().body_json();
let input2 = body2["input"].as_array().expect("input array");
let permissions_base = permissions_texts(input2);
assert_eq!(permissions_base.len(), 2);
assert_eq!(permissions_base.len(), 3);
builder = builder.with_config(|config| {
config.approval_policy = Constrained::allow_any(AskForApproval::UnlessTrusted);

View File

@@ -272,97 +272,6 @@ async fn user_turn_personality_some_adds_update_message() -> anyhow::Result<()>
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn user_turn_personality_same_value_does_not_add_update_message() -> anyhow::Result<()> {
skip_if_no_network!(Ok(()));
let server = start_mock_server().await;
let resp_mock = mount_sse_sequence(
&server,
vec![sse_completed("resp-1"), sse_completed("resp-2")],
)
.await;
let mut builder = test_codex()
.with_model("exp-codex-personality")
.with_config(|config| {
config.features.disable(Feature::RemoteModels);
config.features.enable(Feature::Personality);
config.personality = Some(Personality::Pragmatic);
});
let test = builder.build(&server).await?;
test.codex
.submit(Op::UserTurn {
items: vec![UserInput::Text {
text: "hello".into(),
text_elements: Vec::new(),
}],
final_output_json_schema: None,
cwd: test.cwd_path().to_path_buf(),
approval_policy: test.config.approval_policy.value(),
sandbox_policy: SandboxPolicy::ReadOnly,
model: test.session_configured.model.clone(),
effort: test.config.model_reasoning_effort,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
personality: None,
})
.await?;
wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
test.codex
.submit(Op::OverrideTurnContext {
cwd: None,
approval_policy: None,
sandbox_policy: None,
windows_sandbox_level: None,
model: None,
effort: None,
summary: None,
collaboration_mode: None,
personality: Some(Personality::Pragmatic),
})
.await?;
test.codex
.submit(Op::UserTurn {
items: vec![UserInput::Text {
text: "hello".into(),
text_elements: Vec::new(),
}],
final_output_json_schema: None,
cwd: test.cwd_path().to_path_buf(),
approval_policy: test.config.approval_policy.value(),
sandbox_policy: SandboxPolicy::ReadOnly,
model: test.session_configured.model.clone(),
effort: test.config.model_reasoning_effort,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
personality: None,
})
.await?;
wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
let requests = resp_mock.requests();
assert_eq!(requests.len(), 2, "expected two requests");
let request = requests
.last()
.expect("expected second request after personality override");
let developer_texts = request.message_input_texts("developer");
let personality_text = developer_texts
.iter()
.find(|text| text.contains("<personality_spec>"));
assert!(
personality_text.is_none(),
"expected no personality preamble for unchanged personality, got {personality_text:?}"
);
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn instructions_uses_base_if_feature_disabled() -> anyhow::Result<()> {
let codex_home = TempDir::new().expect("create temp dir");

View File

@@ -388,14 +388,17 @@ async fn overrides_turn_context_but_keeps_cached_prefix_and_key_constant() -> an
});
let expected_permissions_msg = body1["input"][0].clone();
let body1_input = body1["input"].as_array().expect("input array");
// After overriding the turn context, emit one updated permissions message.
// After overriding the turn context, emit two updated permissions messages.
let expected_permissions_msg_2 = body2["input"][body1_input.len()].clone();
let expected_permissions_msg_3 = body2["input"][body1_input.len() + 1].clone();
assert_ne!(
expected_permissions_msg_2, expected_permissions_msg,
"expected updated permissions message after override"
);
assert_eq!(expected_permissions_msg_2, expected_permissions_msg_3);
let mut expected_body2 = body1_input.to_vec();
expected_body2.push(expected_permissions_msg_2);
expected_body2.push(expected_permissions_msg_3);
expected_body2.push(expected_user_message_2);
assert_eq!(body2["input"], serde_json::Value::Array(expected_body2));

View File

@@ -74,6 +74,7 @@ async fn continue_after_stream_error() {
stream_idle_timeout_ms: Some(2_000),
requires_openai_auth: false,
supports_websockets: false,
force_datadog_tracing: false,
};
let TestCodex { codex, .. } = test_codex()

View File

@@ -82,6 +82,7 @@ async fn retries_on_early_close() {
stream_idle_timeout_ms: Some(2000),
requires_openai_auth: false,
supports_websockets: false,
force_datadog_tracing: false,
};
let TestCodex { codex, .. } = test_codex()

View File

@@ -105,28 +105,35 @@ fn append_locked_line(policy_path: &Path, line: &str) -> Result<(), AmendError>
source,
})?;
file.seek(SeekFrom::Start(0))
.map_err(|source| AmendError::SeekPolicyFile {
let len = file
.metadata()
.map_err(|source| AmendError::PolicyMetadata {
path: policy_path.to_path_buf(),
source,
})?;
let mut contents = String::new();
file.read_to_string(&mut contents)
.map_err(|source| AmendError::ReadPolicyFile {
path: policy_path.to_path_buf(),
source,
})?;
})?
.len();
if contents.lines().any(|existing| existing == line) {
return Ok(());
}
if !contents.is_empty() && !contents.ends_with('\n') {
file.write_all(b"\n")
.map_err(|source| AmendError::WritePolicyFile {
// Ensure file ends in a newline before appending.
if len > 0 {
file.seek(SeekFrom::End(-1))
.map_err(|source| AmendError::SeekPolicyFile {
path: policy_path.to_path_buf(),
source,
})?;
let mut last = [0; 1];
file.read_exact(&mut last)
.map_err(|source| AmendError::ReadPolicyFile {
path: policy_path.to_path_buf(),
source,
})?;
if last[0] != b'\n' {
file.write_all(b"\n")
.map_err(|source| AmendError::WritePolicyFile {
path: policy_path.to_path_buf(),
source,
})?;
}
}
file.write_all(format!("{line}\n").as_bytes())

View File

@@ -1,5 +1,4 @@
use std::any::Any;
use std::fs;
use std::sync::Arc;
use anyhow::Context;
@@ -11,12 +10,10 @@ use codex_execpolicy::Policy;
use codex_execpolicy::PolicyParser;
use codex_execpolicy::RuleMatch;
use codex_execpolicy::RuleRef;
use codex_execpolicy::blocking_append_allow_prefix_rule;
use codex_execpolicy::rule::PatternToken;
use codex_execpolicy::rule::PrefixPattern;
use codex_execpolicy::rule::PrefixRule;
use pretty_assertions::assert_eq;
use tempfile::tempdir;
fn tokens(cmd: &[&str]) -> Vec<String> {
cmd.iter().map(std::string::ToString::to_string).collect()
@@ -49,24 +46,6 @@ fn rule_snapshots(rules: &[RuleRef]) -> Vec<RuleSnapshot> {
.collect()
}
#[test]
fn append_allow_prefix_rule_dedupes_existing_rule() -> Result<()> {
let tmp = tempdir().context("create temp dir")?;
let policy_path = tmp.path().join("rules").join("default.rules");
let prefix = tokens(&["python3"]);
blocking_append_allow_prefix_rule(&policy_path, &prefix)?;
blocking_append_allow_prefix_rule(&policy_path, &prefix)?;
let contents = fs::read_to_string(&policy_path).context("read policy")?;
assert_eq!(
contents,
r#"prefix_rule(pattern=["python3"], decision="allow")
"#
);
Ok(())
}
#[test]
fn basic_match() -> Result<()> {
let policy_src = r#"

View File

@@ -233,13 +233,10 @@ impl DeveloperInstructions {
if !request_rule_enabled {
APPROVAL_POLICY_ON_REQUEST.to_string()
} else {
let command_prefixes =
format_allow_prefixes(exec_policy.get_allowed_prefixes());
let command_prefixes = format_allow_prefixes(exec_policy);
match command_prefixes {
Some(prefixes) => {
format!(
"{APPROVAL_POLICY_ON_REQUEST_RULE}\nApproved command prefixes:\n{prefixes}"
)
format!("{APPROVAL_POLICY_ON_REQUEST_RULE}\n{prefixes}")
}
None => APPROVAL_POLICY_ON_REQUEST_RULE.to_string(),
}
@@ -374,51 +371,20 @@ impl DeveloperInstructions {
}
}
const MAX_RENDERED_PREFIXES: usize = 100;
const MAX_ALLOW_PREFIX_TEXT_BYTES: usize = 5000;
const TRUNCATED_MARKER: &str = "...\n[Some commands were truncated]";
pub fn format_allow_prefixes(prefixes: Vec<Vec<String>>) -> Option<String> {
let mut truncated = false;
if prefixes.len() > MAX_RENDERED_PREFIXES {
truncated = true;
}
let mut prefixes = prefixes;
prefixes.sort_by(|a, b| {
a.len()
.cmp(&b.len())
.then_with(|| prefix_combined_str_len(a).cmp(&prefix_combined_str_len(b)))
.then_with(|| a.cmp(b))
});
let full_text = prefixes
pub fn render_command_prefix_list<I, P>(prefixes: I) -> Option<String>
where
I: IntoIterator<Item = P>,
P: AsRef<[String]>,
{
let lines = prefixes
.into_iter()
.take(MAX_RENDERED_PREFIXES)
.map(|prefix| format!("- {}", render_command_prefix(&prefix)))
.collect::<Vec<_>>()
.join("\n");
// truncate to last UTF8 char
let mut output = full_text;
let byte_idx = output
.char_indices()
.nth(MAX_ALLOW_PREFIX_TEXT_BYTES)
.map(|(i, _)| i);
if let Some(byte_idx) = byte_idx {
truncated = true;
output = output[..byte_idx].to_string();
.map(|prefix| format!("- {}", render_command_prefix(prefix.as_ref())))
.collect::<Vec<_>>();
if lines.is_empty() {
return None;
}
if truncated {
Some(format!("{output}{TRUNCATED_MARKER}"))
} else {
Some(output)
}
}
fn prefix_combined_str_len(prefix: &[String]) -> usize {
prefix.iter().map(String::len).sum()
Some(lines.join("\n"))
}
fn render_command_prefix(prefix: &[String]) -> String {
@@ -430,6 +396,12 @@ fn render_command_prefix(prefix: &[String]) -> String {
format!("[{tokens}]")
}
fn format_allow_prefixes(exec_policy: &Policy) -> Option<String> {
let prefixes = exec_policy.get_allowed_prefixes();
let lines = render_command_prefix_list(prefixes)?;
Some(format!("Approved command prefixes:\n{lines}"))
}
impl From<DeveloperInstructions> for ResponseItem {
fn from(di: DeveloperInstructions) -> Self {
ResponseItem::Message {
@@ -1028,62 +1000,6 @@ mod tests {
assert!(text.contains(r#"["git", "pull"]"#));
}
#[test]
fn render_command_prefix_list_sorts_by_len_then_total_len_then_alphabetical() {
let prefixes = vec![
vec!["b".to_string(), "zz".to_string()],
vec!["aa".to_string()],
vec!["b".to_string()],
vec!["a".to_string(), "b".to_string(), "c".to_string()],
vec!["a".to_string()],
vec!["b".to_string(), "a".to_string()],
];
let output = format_allow_prefixes(prefixes).expect("rendered list");
assert_eq!(
output,
r#"- ["a"]
- ["b"]
- ["aa"]
- ["b", "a"]
- ["b", "zz"]
- ["a", "b", "c"]"#
.to_string(),
);
}
#[test]
fn render_command_prefix_list_limits_output_to_max_prefixes() {
let prefixes = (0..(MAX_RENDERED_PREFIXES + 5))
.map(|i| vec![format!("{i:03}")])
.collect::<Vec<_>>();
let output = format_allow_prefixes(prefixes).expect("rendered list");
assert_eq!(output.ends_with(TRUNCATED_MARKER), true);
eprintln!("output: {output}");
assert_eq!(output.lines().count(), MAX_RENDERED_PREFIXES + 1);
}
#[test]
fn format_allow_prefixes_limits_output() {
let mut exec_policy = Policy::empty();
for i in 0..200 {
exec_policy
.add_prefix_rule(
&[format!("tool-{i:03}"), "x".repeat(500)],
codex_execpolicy::Decision::Allow,
)
.expect("add rule");
}
let output =
format_allow_prefixes(exec_policy.get_allowed_prefixes()).expect("formatted prefixes");
assert!(
output.len() <= MAX_ALLOW_PREFIX_TEXT_BYTES + TRUNCATED_MARKER.len(),
"output length exceeds expected limit: {output}",
);
}
#[test]
fn serializes_success_as_plain_string() -> Result<()> {
let item = ResponseInputItem::FunctionCallOutput {