Compare commits

...

6 Commits

Author SHA1 Message Date
Gav Verma
96e1191d01 chore: Add debug logs when residency routes 401 2026-02-01 21:12:03 -08:00
Dylan Hurd
6c22360bcb fix(core) Deduplicate prefix_rules before appending (#10309)
## Summary
We ideally shouldn't make it to this point in the first place, but if we
do try to append a rule that already exists, we shouldn't append the
same rule twice.

## Testing
- [x] Added unit test for this case
2026-02-01 20:30:38 -08:00
pakrym-oai
03fcd12e77 Do not append items on override turn context (#10354) 2026-02-01 18:51:26 -08:00
Dylan Hurd
8b95d3e082 fix(rules) Limit rules listed in conversation (#10351)
## Summary
We should probably warn users that they have a million rules, and help
clean them up. But for now, we should handle this unbounded case.

Limit rules listed in conversations, with shortest / broadest rules
first.

## Testing
- [x] Updated unit tests
2026-02-02 02:26:15 +00:00
Gav Verma
5fb46187b2 fix: System skills marker includes nested folders recursively (#10350)
Updated system skills bundled with Codex were not correctly replacing
the user's skills in their .system folder.

- Fix `.codex-system-skills.marker` not updating by hashing embedded
system skills recursively (nested dirs + file contents), so updates
trigger a reinstall.
- Added a build Cargo hook to rerun if there are changes in
`src/skills/assets/samples/*`, ensuring embedded skill updates rebuild
correctly under caching.
- Add a small unit test to ensure nested entries are included in the
fingerprint.
2026-02-01 18:17:32 -08:00
Charley Cunningham
d3514bbdd2 Bump thread updated_at on unarchive to refresh sidebar ordering (#10280)
## Summary
- Touch restored rollout files on `thread/unarchive` so `updatedAt`
reflects the unarchive time.
- Add a regression test to ensure unarchiving bumps `updated_at` from an
old mtime.

## Notes
This fixes the UX issue where unarchived old threads don’t reappear near
the top of recent threads.
2026-02-01 12:53:47 -08:00
21 changed files with 599 additions and 162 deletions

View File

@@ -202,6 +202,8 @@ use codex_utils_json_to_toml::json_to_toml;
use std::collections::HashMap;
use std::collections::HashSet;
use std::ffi::OsStr;
use std::fs::FileTimes;
use std::fs::OpenOptions;
use std::io::Error as IoError;
use std::path::Path;
use std::path::PathBuf;
@@ -209,6 +211,7 @@ use std::sync::Arc;
use std::sync::atomic::AtomicBool;
use std::sync::atomic::Ordering;
use std::time::Duration;
use std::time::SystemTime;
use tokio::sync::Mutex;
use tokio::sync::broadcast;
use tokio::sync::oneshot;
@@ -1978,6 +1981,28 @@ impl CodexMessageProcessor {
message: format!("failed to unarchive thread: {err}"),
data: None,
})?;
tokio::task::spawn_blocking({
let restored_path = restored_path.clone();
move || -> std::io::Result<()> {
let times = FileTimes::new().set_modified(SystemTime::now());
OpenOptions::new()
.append(true)
.open(&restored_path)?
.set_times(times)?;
Ok(())
}
})
.await
.map_err(|err| JSONRPCErrorError {
code: INTERNAL_ERROR_CODE,
message: format!("failed to update unarchived thread timestamp: {err}"),
data: None,
})?
.map_err(|err| JSONRPCErrorError {
code: INTERNAL_ERROR_CODE,
message: format!("failed to update unarchived thread timestamp: {err}"),
data: None,
})?;
if let Some(ctx) = state_db_ctx {
let _ = ctx
.mark_unarchived(thread_id, restored_path.as_path())

View File

@@ -11,7 +11,11 @@ use codex_app_server_protocol::ThreadUnarchiveParams;
use codex_app_server_protocol::ThreadUnarchiveResponse;
use codex_core::find_archived_thread_path_by_id_str;
use codex_core::find_thread_path_by_id_str;
use std::fs::FileTimes;
use std::fs::OpenOptions;
use std::path::Path;
use std::time::Duration;
use std::time::SystemTime;
use tempfile::TempDir;
use tokio::time::timeout;
@@ -62,6 +66,16 @@ async fn thread_unarchive_moves_rollout_back_into_sessions_directory() -> Result
archived_path.exists(),
"expected {archived_path_display} to exist"
);
let old_time = SystemTime::UNIX_EPOCH + Duration::from_secs(1);
let old_timestamp = old_time
.duration_since(SystemTime::UNIX_EPOCH)
.expect("old timestamp")
.as_secs() as i64;
let times = FileTimes::new().set_modified(old_time);
OpenOptions::new()
.append(true)
.open(&archived_path)?
.set_times(times)?;
let unarchive_id = mcp
.send_thread_unarchive_request(ThreadUnarchiveParams {
@@ -73,7 +87,13 @@ async fn thread_unarchive_moves_rollout_back_into_sessions_directory() -> Result
mcp.read_stream_until_response_message(RequestId::Integer(unarchive_id)),
)
.await??;
let _: ThreadUnarchiveResponse = to_response::<ThreadUnarchiveResponse>(unarchive_resp)?;
let ThreadUnarchiveResponse {
thread: unarchived_thread,
} = to_response::<ThreadUnarchiveResponse>(unarchive_resp)?;
assert!(
unarchived_thread.updated_at > old_timestamp,
"expected updated_at to be bumped on unarchive"
);
let rollout_path_display = rollout_path.display();
assert!(

View File

@@ -111,10 +111,41 @@ impl CodexRequestBuilder {
}
pub async fn send(self) -> Result<Response, reqwest::Error> {
let residency_header = self
.builder
.try_clone()
.and_then(|builder| builder.build().ok())
.and_then(|request| {
request
.headers()
.get("x-openai-internal-codex-residency")
.and_then(|value| value.to_str().ok())
.map(std::borrow::ToOwned::to_owned)
});
let headers = trace_headers();
match self.builder.headers(headers).send().await {
Ok(response) => {
if is_codex_responses_path(&self.url) && response.status().as_u16() == 401 {
let cf_ray = response
.headers()
.get("cf-ray")
.and_then(|value| value.to_str().ok());
let auth_error = response
.headers()
.get("x-openai-authorization-error")
.and_then(|value| value.to_str().ok());
tracing::info!(
method = %self.method,
url = %self.url,
status = %response.status(),
residency_header_present = residency_header.is_some(),
residency_header_value = ?residency_header,
cf_ray,
auth_error,
"Codex responses request returned unauthorized"
);
}
tracing::debug!(
method = %self.method,
url = %self.url,
@@ -141,6 +172,11 @@ impl CodexRequestBuilder {
}
}
fn is_codex_responses_path(url: &str) -> bool {
url.contains("/backend-api/codex/responses")
|| url.contains("/chat/backend/api/codex/responses")
}
struct HeaderMapInjector<'a>(&'a mut HeaderMap);
impl<'a> Injector for HeaderMapInjector<'a> {

View File

@@ -3,6 +3,7 @@ edition.workspace = true
license.workspace = true
name = "codex-core"
version.workspace = true
build = "build.rs"
[lib]
doctest = false

27
codex-rs/core/build.rs Normal file
View File

@@ -0,0 +1,27 @@
use std::fs;
use std::path::Path;
fn main() {
let samples_dir = Path::new("src/skills/assets/samples");
if !samples_dir.exists() {
return;
}
println!("cargo:rerun-if-changed={}", samples_dir.display());
visit_dir(samples_dir);
}
fn visit_dir(dir: &Path) {
let entries = match fs::read_dir(dir) {
Ok(entries) => entries,
Err(_) => return,
};
for entry in entries.flatten() {
let path = entry.path();
println!("cargo:rerun-if-changed={}", path.display());
if path.is_dir() {
visit_dir(&path);
}
}
}

View File

@@ -51,6 +51,7 @@ use codex_protocol::items::PlanItem;
use codex_protocol::items::TurnItem;
use codex_protocol::items::UserMessageItem;
use codex_protocol::models::BaseInstructions;
use codex_protocol::models::format_allow_prefixes;
use codex_protocol::openai_models::ModelInfo;
use codex_protocol::protocol::FileChange;
use codex_protocol::protocol::HasLegacyEvent;
@@ -212,7 +213,6 @@ use codex_protocol::models::ContentItem;
use codex_protocol::models::DeveloperInstructions;
use codex_protocol::models::ResponseInputItem;
use codex_protocol::models::ResponseItem;
use codex_protocol::models::render_command_prefix_list;
use codex_protocol::protocol::CodexErrorInfo;
use codex_protocol::protocol::InitialHistory;
use codex_protocol::user_input::UserInput;
@@ -490,7 +490,7 @@ pub(crate) struct TurnContext {
pub(crate) developer_instructions: Option<String>,
pub(crate) compact_prompt: Option<String>,
pub(crate) user_instructions: Option<String>,
pub(crate) collaboration_mode_kind: ModeKind,
pub(crate) collaboration_mode: CollaborationMode,
pub(crate) personality: Option<Personality>,
pub(crate) approval_policy: AskForApproval,
pub(crate) sandbox_policy: SandboxPolicy,
@@ -692,7 +692,7 @@ impl Session {
developer_instructions: session_configuration.developer_instructions.clone(),
compact_prompt: session_configuration.compact_prompt.clone(),
user_instructions: session_configuration.user_instructions.clone(),
collaboration_mode_kind: session_configuration.collaboration_mode.mode,
collaboration_mode: session_configuration.collaboration_mode.clone(),
personality: session_configuration.personality,
approval_policy: session_configuration.approval_policy.value(),
sandbox_policy: session_configuration.sandbox_policy.get().clone(),
@@ -1356,16 +1356,14 @@ impl Session {
fn build_collaboration_mode_update_item(
&self,
previous_collaboration_mode: &CollaborationMode,
next_collaboration_mode: Option<&CollaborationMode>,
previous: Option<&Arc<TurnContext>>,
next: &TurnContext,
) -> Option<ResponseItem> {
if let Some(next_mode) = next_collaboration_mode {
if previous_collaboration_mode == next_mode {
return None;
}
let prev = previous?;
if prev.collaboration_mode != next.collaboration_mode {
// If the next mode has empty developer instructions, this returns None and we emit no
// update, so prior collaboration instructions remain in the prompt history.
Some(DeveloperInstructions::from_collaboration_mode(next_mode)?.into())
Some(DeveloperInstructions::from_collaboration_mode(&next.collaboration_mode)?.into())
} else {
None
}
@@ -1375,8 +1373,6 @@ impl Session {
&self,
previous_context: Option<&Arc<TurnContext>>,
current_context: &TurnContext,
previous_collaboration_mode: &CollaborationMode,
next_collaboration_mode: Option<&CollaborationMode>,
) -> Vec<ResponseItem> {
let mut update_items = Vec::new();
if let Some(env_item) =
@@ -1389,10 +1385,9 @@ impl Session {
{
update_items.push(permissions_item);
}
if let Some(collaboration_mode_item) = self.build_collaboration_mode_update_item(
previous_collaboration_mode,
next_collaboration_mode,
) {
if let Some(collaboration_mode_item) =
self.build_collaboration_mode_update_item(previous_context, current_context)
{
update_items.push(collaboration_mode_item);
}
if let Some(personality_item) =
@@ -1522,7 +1517,7 @@ impl Session {
sub_id: &str,
amendment: &ExecPolicyAmendment,
) {
let Some(prefixes) = render_command_prefix_list([amendment.command.as_slice()]) else {
let Some(prefixes) = format_allow_prefixes(vec![amendment.command.clone()]) else {
warn!("execpolicy amendment for {sub_id} had no command prefix");
return;
};
@@ -2573,18 +2568,6 @@ mod handlers {
sub_id: String,
updates: SessionSettingsUpdate,
) {
let previous_context = sess
.new_default_turn_with_sub_id(sess.next_internal_sub_id())
.await;
let previous_collaboration_mode = sess
.state
.lock()
.await
.session_configuration
.collaboration_mode
.clone();
let next_collaboration_mode = updates.collaboration_mode.clone();
if let Err(err) = sess.update_settings(updates).await {
sess.send_event_raw(Event {
id: sub_id,
@@ -2594,24 +2577,6 @@ mod handlers {
}),
})
.await;
return;
}
let initial_context_seeded = sess.state.lock().await.initial_context_seeded;
if !initial_context_seeded {
return;
}
let current_context = sess.new_default_turn_with_sub_id(sub_id).await;
let update_items = sess.build_settings_update_items(
Some(&previous_context),
&current_context,
&previous_collaboration_mode,
next_collaboration_mode.as_ref(),
);
if !update_items.is_empty() {
sess.record_conversation_items(&current_context, &update_items)
.await;
}
}
@@ -2671,14 +2636,6 @@ mod handlers {
_ => unreachable!(),
};
let previous_collaboration_mode = sess
.state
.lock()
.await
.session_configuration
.collaboration_mode
.clone();
let next_collaboration_mode = updates.collaboration_mode.clone();
let Ok(current_context) = sess.new_turn_with_sub_id(sub_id, updates).await else {
// new_turn_with_sub_id already emits the error event.
return;
@@ -2691,12 +2648,8 @@ mod handlers {
// Attempt to inject input into current task
if let Err(items) = sess.inject_input(items).await {
sess.seed_initial_context_if_needed(&current_context).await;
let update_items = sess.build_settings_update_items(
previous_context.as_ref(),
&current_context,
&previous_collaboration_mode,
next_collaboration_mode.as_ref(),
);
let update_items =
sess.build_settings_update_items(previous_context.as_ref(), &current_context);
if !update_items.is_empty() {
sess.record_conversation_items(&current_context, &update_items)
.await;
@@ -3211,7 +3164,7 @@ async fn spawn_review_thread(
developer_instructions: None,
user_instructions: None,
compact_prompt: parent_turn_context.compact_prompt.clone(),
collaboration_mode_kind: parent_turn_context.collaboration_mode_kind,
collaboration_mode: parent_turn_context.collaboration_mode.clone(),
personality: parent_turn_context.personality,
approval_policy: parent_turn_context.approval_policy,
sandbox_policy: parent_turn_context.sandbox_policy.clone(),
@@ -3326,7 +3279,7 @@ pub(crate) async fn run_turn(
let total_usage_tokens = sess.get_total_token_usage().await;
let event = EventMsg::TurnStarted(TurnStartedEvent {
model_context_window: turn_context.client.get_model_context_window(),
collaboration_mode_kind: turn_context.collaboration_mode_kind,
collaboration_mode_kind: turn_context.collaboration_mode.mode,
});
sess.send_event(&turn_context, event).await;
if total_usage_tokens >= auto_compact_limit {
@@ -4239,7 +4192,7 @@ async fn try_run_sampling_request(
let mut last_agent_message: Option<String> = None;
let mut active_item: Option<TurnItem> = None;
let mut should_emit_turn_diff = false;
let plan_mode = turn_context.collaboration_mode_kind == ModeKind::Plan;
let plan_mode = turn_context.collaboration_mode.mode == ModeKind::Plan;
let mut plan_mode_state = plan_mode.then(|| PlanModeStreamState::new(&turn_context.sub_id));
let receiving_span = trace_span!("receiving_stream");
let outcome: CodexResult<SamplingRequestResult> = loop {

View File

@@ -61,7 +61,7 @@ pub(crate) async fn run_compact_task(
) {
let start_event = EventMsg::TurnStarted(TurnStartedEvent {
model_context_window: turn_context.client.get_model_context_window(),
collaboration_mode_kind: turn_context.collaboration_mode_kind,
collaboration_mode_kind: turn_context.collaboration_mode.mode,
});
sess.send_event(&turn_context, start_event).await;
run_compact_task_inner(sess.clone(), turn_context, input).await;

View File

@@ -22,7 +22,7 @@ pub(crate) async fn run_inline_remote_auto_compact_task(
pub(crate) async fn run_remote_compact_task(sess: Arc<Session>, turn_context: Arc<TurnContext>) {
let start_event = EventMsg::TurnStarted(TurnStartedEvent {
model_context_window: turn_context.client.get_model_context_window(),
collaboration_mode_kind: turn_context.collaboration_mode_kind,
collaboration_mode_kind: turn_context.collaboration_mode.mode,
});
sess.send_event(&turn_context, start_event).await;

View File

@@ -84,6 +84,13 @@ pub fn set_default_client_residency_requirement(enforce_residency: Option<Reside
tracing::warn!("Failed to acquire requirements residency lock");
return;
};
if *guard != enforce_residency {
tracing::info!(
previous = ?*guard,
updated = ?enforce_residency,
"Updated Codex residency requirement"
);
}
*guard = enforce_residency;
}
@@ -188,6 +195,11 @@ pub fn build_reqwest_client() -> reqwest::Client {
let value = match requirement {
ResidencyRequirement::Us => HeaderValue::from_static("us"),
};
tracing::debug!(
header_name = RESIDENCY_HEADER_NAME,
header_value = ?value,
"Applying residency header to default HTTP client"
);
headers.insert(RESIDENCY_HEADER_NAME, value);
}
let ua = get_codex_user_agent();

View File

@@ -86,21 +86,8 @@ fn read_marker(path: &AbsolutePathBuf) -> Result<String, SystemSkillsError> {
}
fn embedded_system_skills_fingerprint() -> String {
let mut items: Vec<(String, Option<u64>)> = SYSTEM_SKILLS_DIR
.entries()
.iter()
.map(|entry| match entry {
include_dir::DirEntry::Dir(dir) => (dir.path().to_string_lossy().to_string(), None),
include_dir::DirEntry::File(file) => {
let mut file_hasher = DefaultHasher::new();
file.contents().hash(&mut file_hasher);
(
file.path().to_string_lossy().to_string(),
Some(file_hasher.finish()),
)
}
})
.collect();
let mut items = Vec::new();
collect_fingerprint_items(&SYSTEM_SKILLS_DIR, &mut items);
items.sort_unstable_by(|(a, _), (b, _)| a.cmp(b));
let mut hasher = DefaultHasher::new();
@@ -112,6 +99,25 @@ fn embedded_system_skills_fingerprint() -> String {
format!("{:x}", hasher.finish())
}
fn collect_fingerprint_items(dir: &Dir<'_>, items: &mut Vec<(String, Option<u64>)>) {
for entry in dir.entries() {
match entry {
include_dir::DirEntry::Dir(subdir) => {
items.push((subdir.path().to_string_lossy().to_string(), None));
collect_fingerprint_items(subdir, items);
}
include_dir::DirEntry::File(file) => {
let mut file_hasher = DefaultHasher::new();
file.contents().hash(&mut file_hasher);
items.push((
file.path().to_string_lossy().to_string(),
Some(file_hasher.finish()),
));
}
}
}
}
/// Writes the embedded `include_dir::Dir` to disk under `dest`.
///
/// Preserves the embedded directory structure.
@@ -163,3 +169,28 @@ impl SystemSkillsError {
Self::Io { action, source }
}
}
#[cfg(test)]
mod tests {
use super::SYSTEM_SKILLS_DIR;
use super::collect_fingerprint_items;
#[test]
fn fingerprint_traverses_nested_entries() {
let mut items = Vec::new();
collect_fingerprint_items(&SYSTEM_SKILLS_DIR, &mut items);
let mut paths: Vec<String> = items.into_iter().map(|(path, _)| path).collect();
paths.sort_unstable();
assert!(
paths
.binary_search_by(|probe| probe.as_str().cmp("skill-creator/SKILL.md"))
.is_ok()
);
assert!(
paths
.binary_search_by(|probe| probe.as_str().cmp("skill-creator/scripts/init_skill.py"))
.is_ok()
);
}
}

View File

@@ -48,7 +48,7 @@ pub(crate) async fn handle_output_item_done(
previously_active_item: Option<TurnItem>,
) -> Result<OutputItemResult> {
let mut output = OutputItemResult::default();
let plan_mode = ctx.turn_context.collaboration_mode_kind == ModeKind::Plan;
let plan_mode = ctx.turn_context.collaboration_mode.mode == ModeKind::Plan;
match ToolRouter::build_tool_call(ctx.sess.as_ref(), item.clone()).await {
// The model emitted a tool call; log it, persist the item immediately, and queue the tool execution.

View File

@@ -67,7 +67,7 @@ impl SessionTask for UserShellCommandTask {
let event = EventMsg::TurnStarted(TurnStartedEvent {
model_context_window: turn_context.client.get_model_context_window(),
collaboration_mode_kind: turn_context.collaboration_mode_kind,
collaboration_mode_kind: turn_context.collaboration_mode.mode,
});
let session = session.clone_session();
session.send_event(turn_context.as_ref(), event).await;

View File

@@ -104,7 +104,7 @@ pub(crate) async fn handle_update_plan(
arguments: String,
_call_id: String,
) -> Result<String, FunctionCallError> {
if turn_context.collaboration_mode_kind == ModeKind::Plan {
if turn_context.collaboration_mode.mode == ModeKind::Plan {
return Err(FunctionCallError::RespondToModel(
"update_plan is a TODO/checklist tool and is not allowed in Plan mode".to_string(),
));

View File

@@ -22,9 +22,12 @@ fn sse_completed(id: &str) -> String {
sse(vec![ev_response_created(id), ev_completed(id)])
}
fn collab_mode_with_instructions(instructions: Option<&str>) -> CollaborationMode {
fn collab_mode_with_mode_and_instructions(
mode: ModeKind,
instructions: Option<&str>,
) -> CollaborationMode {
CollaborationMode {
mode: ModeKind::Custom,
mode,
settings: Settings {
model: "gpt-5.1".to_string(),
reasoning_effort: None,
@@ -33,6 +36,10 @@ fn collab_mode_with_instructions(instructions: Option<&str>) -> CollaborationMod
}
}
fn collab_mode_with_instructions(instructions: Option<&str>) -> CollaborationMode {
collab_mode_with_mode_and_instructions(ModeKind::Custom, instructions)
}
fn developer_texts(input: &[Value]) -> Vec<String> {
input
.iter()
@@ -171,7 +178,7 @@ async fn collaboration_instructions_added_on_user_turn() -> Result<()> {
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn override_then_user_turn_uses_updated_collaboration_instructions() -> Result<()> {
async fn override_then_next_turn_uses_updated_collaboration_instructions() -> Result<()> {
skip_if_no_network!(Ok(()));
let server = start_mock_server().await;
@@ -196,20 +203,12 @@ async fn override_then_user_turn_uses_updated_collaboration_instructions() -> Re
.await?;
test.codex
.submit(Op::UserTurn {
.submit(Op::UserInput {
items: vec![UserInput::Text {
text: "hello".into(),
text_elements: Vec::new(),
}],
cwd: test.config.cwd.clone(),
approval_policy: test.config.approval_policy.value(),
sandbox_policy: test.config.sandbox_policy.get().clone(),
model: test.session_configured.model.clone(),
effort: None,
summary: test.config.model_reasoning_summary,
collaboration_mode: None,
final_output_json_schema: None,
personality: None,
})
.await?;
wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
@@ -272,7 +271,7 @@ async fn user_turn_overrides_collaboration_instructions_after_override() -> Resu
let dev_texts = developer_texts(&input);
let base_text = collab_xml(base_text);
let turn_text = collab_xml(turn_text);
assert_eq!(count_exact(&dev_texts, &base_text), 1);
assert_eq!(count_exact(&dev_texts, &base_text), 0);
assert_eq!(count_exact(&dev_texts, &turn_text), 1);
Ok(())
@@ -419,6 +418,159 @@ async fn collaboration_mode_update_noop_does_not_append() -> Result<()> {
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn collaboration_mode_update_emits_new_instruction_message_when_mode_changes() -> Result<()> {
skip_if_no_network!(Ok(()));
let server = start_mock_server().await;
let _req1 = mount_sse_once(&server, sse_completed("resp-1")).await;
let req2 = mount_sse_once(&server, sse_completed("resp-2")).await;
let test = test_codex().build(&server).await?;
let code_text = "code mode instructions";
let plan_text = "plan mode instructions";
test.codex
.submit(Op::OverrideTurnContext {
cwd: None,
approval_policy: None,
sandbox_policy: None,
windows_sandbox_level: None,
model: None,
effort: None,
summary: None,
collaboration_mode: Some(collab_mode_with_mode_and_instructions(
ModeKind::Code,
Some(code_text),
)),
personality: None,
})
.await?;
test.codex
.submit(Op::UserInput {
items: vec![UserInput::Text {
text: "hello 1".into(),
text_elements: Vec::new(),
}],
final_output_json_schema: None,
})
.await?;
wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
test.codex
.submit(Op::OverrideTurnContext {
cwd: None,
approval_policy: None,
sandbox_policy: None,
windows_sandbox_level: None,
model: None,
effort: None,
summary: None,
collaboration_mode: Some(collab_mode_with_mode_and_instructions(
ModeKind::Plan,
Some(plan_text),
)),
personality: None,
})
.await?;
test.codex
.submit(Op::UserInput {
items: vec![UserInput::Text {
text: "hello 2".into(),
text_elements: Vec::new(),
}],
final_output_json_schema: None,
})
.await?;
wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
let input = req2.single_request().input();
let dev_texts = developer_texts(&input);
let code_text = collab_xml(code_text);
let plan_text = collab_xml(plan_text);
assert_eq!(count_exact(&dev_texts, &code_text), 1);
assert_eq!(count_exact(&dev_texts, &plan_text), 1);
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn collaboration_mode_update_noop_does_not_append_when_mode_is_unchanged() -> Result<()> {
skip_if_no_network!(Ok(()));
let server = start_mock_server().await;
let _req1 = mount_sse_once(&server, sse_completed("resp-1")).await;
let req2 = mount_sse_once(&server, sse_completed("resp-2")).await;
let test = test_codex().build(&server).await?;
let collab_text = "mode-stable instructions";
test.codex
.submit(Op::OverrideTurnContext {
cwd: None,
approval_policy: None,
sandbox_policy: None,
windows_sandbox_level: None,
model: None,
effort: None,
summary: None,
collaboration_mode: Some(collab_mode_with_mode_and_instructions(
ModeKind::Code,
Some(collab_text),
)),
personality: None,
})
.await?;
test.codex
.submit(Op::UserInput {
items: vec![UserInput::Text {
text: "hello 1".into(),
text_elements: Vec::new(),
}],
final_output_json_schema: None,
})
.await?;
wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
test.codex
.submit(Op::OverrideTurnContext {
cwd: None,
approval_policy: None,
sandbox_policy: None,
windows_sandbox_level: None,
model: None,
effort: None,
summary: None,
collaboration_mode: Some(collab_mode_with_mode_and_instructions(
ModeKind::Code,
Some(collab_text),
)),
personality: None,
})
.await?;
test.codex
.submit(Op::UserInput {
items: vec![UserInput::Text {
text: "hello 2".into(),
text_elements: Vec::new(),
}],
final_output_json_schema: None,
})
.await?;
wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
let input = req2.single_request().input();
let dev_texts = developer_texts(&input);
let collab_text = collab_xml(collab_text);
assert_eq!(count_exact(&dev_texts, &collab_text), 1);
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn resume_replays_collaboration_instructions() -> Result<()> {
skip_if_no_network!(Ok(()));

View File

@@ -18,7 +18,6 @@ use core_test_support::skip_if_no_network;
use core_test_support::test_codex::test_codex;
use core_test_support::wait_for_event;
use pretty_assertions::assert_eq;
use std::collections::HashSet;
use std::path::Path;
use std::time::Duration;
use tempfile::TempDir;
@@ -104,7 +103,7 @@ fn rollout_environment_texts(text: &str) -> Vec<String> {
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn override_turn_context_records_permissions_update() -> Result<()> {
async fn override_turn_context_without_user_turn_does_not_record_permissions_update() -> Result<()> {
skip_if_no_network!(Ok(()));
let server = start_mock_server().await;
@@ -138,19 +137,15 @@ async fn override_turn_context_records_permissions_update() -> Result<()> {
.filter(|text| text.contains("`approval_policy`"))
.collect();
assert!(
approval_texts
.iter()
.any(|text| text.contains("`approval_policy` is `never`")),
"expected updated approval policy instructions in rollout"
approval_texts.is_empty(),
"did not expect permissions updates before a new user turn: {approval_texts:?}"
);
let unique: HashSet<&String> = approval_texts.iter().copied().collect();
assert_eq!(unique.len(), 2);
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn override_turn_context_records_environment_update() -> Result<()> {
async fn override_turn_context_without_user_turn_does_not_record_environment_update() -> Result<()> {
skip_if_no_network!(Ok(()));
let server = start_mock_server().await;
@@ -177,17 +172,16 @@ async fn override_turn_context_records_environment_update() -> Result<()> {
let rollout_path = test.codex.rollout_path().expect("rollout path");
let rollout_text = read_rollout_text(&rollout_path).await?;
let env_texts = rollout_environment_texts(&rollout_text);
let new_cwd_text = new_cwd.path().display().to_string();
assert!(
env_texts.iter().any(|text| text.contains(&new_cwd_text)),
"expected environment update with new cwd in rollout"
env_texts.is_empty(),
"did not expect environment updates before a new user turn: {env_texts:?}"
);
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn override_turn_context_records_collaboration_update() -> Result<()> {
async fn override_turn_context_without_user_turn_does_not_record_collaboration_update() -> Result<()> {
skip_if_no_network!(Ok(()));
let server = start_mock_server().await;
@@ -220,7 +214,7 @@ async fn override_turn_context_records_collaboration_update() -> Result<()> {
.iter()
.filter(|text| text.as_str() == collab_text.as_str())
.count();
assert_eq!(collab_count, 1);
assert_eq!(collab_count, 0);
Ok(())
}

View File

@@ -136,7 +136,7 @@ async fn permissions_message_added_on_override_change() -> Result<()> {
let permissions_2 = permissions_texts(input2);
assert_eq!(permissions_1.len(), 1);
assert_eq!(permissions_2.len(), 3);
assert_eq!(permissions_2.len(), 2);
let unique = permissions_2.into_iter().collect::<HashSet<String>>();
assert_eq!(unique.len(), 2);
@@ -267,7 +267,7 @@ async fn resume_replays_permissions_messages() -> Result<()> {
let body3 = req3.single_request().body_json();
let input = body3["input"].as_array().expect("input array");
let permissions = permissions_texts(input);
assert_eq!(permissions.len(), 4);
assert_eq!(permissions.len(), 3);
let unique = permissions.into_iter().collect::<HashSet<String>>();
assert_eq!(unique.len(), 2);
@@ -337,7 +337,7 @@ async fn resume_and_fork_append_permissions_messages() -> Result<()> {
let body2 = req2.single_request().body_json();
let input2 = body2["input"].as_array().expect("input array");
let permissions_base = permissions_texts(input2);
assert_eq!(permissions_base.len(), 3);
assert_eq!(permissions_base.len(), 2);
builder = builder.with_config(|config| {
config.approval_policy = Constrained::allow_any(AskForApproval::UnlessTrusted);

View File

@@ -272,6 +272,97 @@ async fn user_turn_personality_some_adds_update_message() -> anyhow::Result<()>
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn user_turn_personality_same_value_does_not_add_update_message() -> anyhow::Result<()> {
skip_if_no_network!(Ok(()));
let server = start_mock_server().await;
let resp_mock = mount_sse_sequence(
&server,
vec![sse_completed("resp-1"), sse_completed("resp-2")],
)
.await;
let mut builder = test_codex()
.with_model("exp-codex-personality")
.with_config(|config| {
config.features.disable(Feature::RemoteModels);
config.features.enable(Feature::Personality);
config.personality = Some(Personality::Pragmatic);
});
let test = builder.build(&server).await?;
test.codex
.submit(Op::UserTurn {
items: vec![UserInput::Text {
text: "hello".into(),
text_elements: Vec::new(),
}],
final_output_json_schema: None,
cwd: test.cwd_path().to_path_buf(),
approval_policy: test.config.approval_policy.value(),
sandbox_policy: SandboxPolicy::ReadOnly,
model: test.session_configured.model.clone(),
effort: test.config.model_reasoning_effort,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
personality: None,
})
.await?;
wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
test.codex
.submit(Op::OverrideTurnContext {
cwd: None,
approval_policy: None,
sandbox_policy: None,
windows_sandbox_level: None,
model: None,
effort: None,
summary: None,
collaboration_mode: None,
personality: Some(Personality::Pragmatic),
})
.await?;
test.codex
.submit(Op::UserTurn {
items: vec![UserInput::Text {
text: "hello".into(),
text_elements: Vec::new(),
}],
final_output_json_schema: None,
cwd: test.cwd_path().to_path_buf(),
approval_policy: test.config.approval_policy.value(),
sandbox_policy: SandboxPolicy::ReadOnly,
model: test.session_configured.model.clone(),
effort: test.config.model_reasoning_effort,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
personality: None,
})
.await?;
wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
let requests = resp_mock.requests();
assert_eq!(requests.len(), 2, "expected two requests");
let request = requests
.last()
.expect("expected second request after personality override");
let developer_texts = request.message_input_texts("developer");
let personality_text = developer_texts
.iter()
.find(|text| text.contains("<personality_spec>"));
assert!(
personality_text.is_none(),
"expected no personality preamble for unchanged personality, got {personality_text:?}"
);
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn instructions_uses_base_if_feature_disabled() -> anyhow::Result<()> {
let codex_home = TempDir::new().expect("create temp dir");

View File

@@ -388,17 +388,14 @@ async fn overrides_turn_context_but_keeps_cached_prefix_and_key_constant() -> an
});
let expected_permissions_msg = body1["input"][0].clone();
let body1_input = body1["input"].as_array().expect("input array");
// After overriding the turn context, emit two updated permissions messages.
// After overriding the turn context, emit one updated permissions message.
let expected_permissions_msg_2 = body2["input"][body1_input.len()].clone();
let expected_permissions_msg_3 = body2["input"][body1_input.len() + 1].clone();
assert_ne!(
expected_permissions_msg_2, expected_permissions_msg,
"expected updated permissions message after override"
);
assert_eq!(expected_permissions_msg_2, expected_permissions_msg_3);
let mut expected_body2 = body1_input.to_vec();
expected_body2.push(expected_permissions_msg_2);
expected_body2.push(expected_permissions_msg_3);
expected_body2.push(expected_user_message_2);
assert_eq!(body2["input"], serde_json::Value::Array(expected_body2));

View File

@@ -105,35 +105,28 @@ fn append_locked_line(policy_path: &Path, line: &str) -> Result<(), AmendError>
source,
})?;
let len = file
.metadata()
.map_err(|source| AmendError::PolicyMetadata {
file.seek(SeekFrom::Start(0))
.map_err(|source| AmendError::SeekPolicyFile {
path: policy_path.to_path_buf(),
source,
})?
.len();
})?;
let mut contents = String::new();
file.read_to_string(&mut contents)
.map_err(|source| AmendError::ReadPolicyFile {
path: policy_path.to_path_buf(),
source,
})?;
// Ensure file ends in a newline before appending.
if len > 0 {
file.seek(SeekFrom::End(-1))
.map_err(|source| AmendError::SeekPolicyFile {
if contents.lines().any(|existing| existing == line) {
return Ok(());
}
if !contents.is_empty() && !contents.ends_with('\n') {
file.write_all(b"\n")
.map_err(|source| AmendError::WritePolicyFile {
path: policy_path.to_path_buf(),
source,
})?;
let mut last = [0; 1];
file.read_exact(&mut last)
.map_err(|source| AmendError::ReadPolicyFile {
path: policy_path.to_path_buf(),
source,
})?;
if last[0] != b'\n' {
file.write_all(b"\n")
.map_err(|source| AmendError::WritePolicyFile {
path: policy_path.to_path_buf(),
source,
})?;
}
}
file.write_all(format!("{line}\n").as_bytes())

View File

@@ -1,4 +1,5 @@
use std::any::Any;
use std::fs;
use std::sync::Arc;
use anyhow::Context;
@@ -10,10 +11,12 @@ use codex_execpolicy::Policy;
use codex_execpolicy::PolicyParser;
use codex_execpolicy::RuleMatch;
use codex_execpolicy::RuleRef;
use codex_execpolicy::blocking_append_allow_prefix_rule;
use codex_execpolicy::rule::PatternToken;
use codex_execpolicy::rule::PrefixPattern;
use codex_execpolicy::rule::PrefixRule;
use pretty_assertions::assert_eq;
use tempfile::tempdir;
fn tokens(cmd: &[&str]) -> Vec<String> {
cmd.iter().map(std::string::ToString::to_string).collect()
@@ -46,6 +49,24 @@ fn rule_snapshots(rules: &[RuleRef]) -> Vec<RuleSnapshot> {
.collect()
}
#[test]
fn append_allow_prefix_rule_dedupes_existing_rule() -> Result<()> {
let tmp = tempdir().context("create temp dir")?;
let policy_path = tmp.path().join("rules").join("default.rules");
let prefix = tokens(&["python3"]);
blocking_append_allow_prefix_rule(&policy_path, &prefix)?;
blocking_append_allow_prefix_rule(&policy_path, &prefix)?;
let contents = fs::read_to_string(&policy_path).context("read policy")?;
assert_eq!(
contents,
r#"prefix_rule(pattern=["python3"], decision="allow")
"#
);
Ok(())
}
#[test]
fn basic_match() -> Result<()> {
let policy_src = r#"

View File

@@ -233,10 +233,13 @@ impl DeveloperInstructions {
if !request_rule_enabled {
APPROVAL_POLICY_ON_REQUEST.to_string()
} else {
let command_prefixes = format_allow_prefixes(exec_policy);
let command_prefixes =
format_allow_prefixes(exec_policy.get_allowed_prefixes());
match command_prefixes {
Some(prefixes) => {
format!("{APPROVAL_POLICY_ON_REQUEST_RULE}\n{prefixes}")
format!(
"{APPROVAL_POLICY_ON_REQUEST_RULE}\nApproved command prefixes:\n{prefixes}"
)
}
None => APPROVAL_POLICY_ON_REQUEST_RULE.to_string(),
}
@@ -371,20 +374,51 @@ impl DeveloperInstructions {
}
}
pub fn render_command_prefix_list<I, P>(prefixes: I) -> Option<String>
where
I: IntoIterator<Item = P>,
P: AsRef<[String]>,
{
let lines = prefixes
.into_iter()
.map(|prefix| format!("- {}", render_command_prefix(prefix.as_ref())))
.collect::<Vec<_>>();
if lines.is_empty() {
return None;
const MAX_RENDERED_PREFIXES: usize = 100;
const MAX_ALLOW_PREFIX_TEXT_BYTES: usize = 5000;
const TRUNCATED_MARKER: &str = "...\n[Some commands were truncated]";
pub fn format_allow_prefixes(prefixes: Vec<Vec<String>>) -> Option<String> {
let mut truncated = false;
if prefixes.len() > MAX_RENDERED_PREFIXES {
truncated = true;
}
Some(lines.join("\n"))
let mut prefixes = prefixes;
prefixes.sort_by(|a, b| {
a.len()
.cmp(&b.len())
.then_with(|| prefix_combined_str_len(a).cmp(&prefix_combined_str_len(b)))
.then_with(|| a.cmp(b))
});
let full_text = prefixes
.into_iter()
.take(MAX_RENDERED_PREFIXES)
.map(|prefix| format!("- {}", render_command_prefix(&prefix)))
.collect::<Vec<_>>()
.join("\n");
// truncate to last UTF8 char
let mut output = full_text;
let byte_idx = output
.char_indices()
.nth(MAX_ALLOW_PREFIX_TEXT_BYTES)
.map(|(i, _)| i);
if let Some(byte_idx) = byte_idx {
truncated = true;
output = output[..byte_idx].to_string();
}
if truncated {
Some(format!("{output}{TRUNCATED_MARKER}"))
} else {
Some(output)
}
}
fn prefix_combined_str_len(prefix: &[String]) -> usize {
prefix.iter().map(String::len).sum()
}
fn render_command_prefix(prefix: &[String]) -> String {
@@ -396,12 +430,6 @@ fn render_command_prefix(prefix: &[String]) -> String {
format!("[{tokens}]")
}
fn format_allow_prefixes(exec_policy: &Policy) -> Option<String> {
let prefixes = exec_policy.get_allowed_prefixes();
let lines = render_command_prefix_list(prefixes)?;
Some(format!("Approved command prefixes:\n{lines}"))
}
impl From<DeveloperInstructions> for ResponseItem {
fn from(di: DeveloperInstructions) -> Self {
ResponseItem::Message {
@@ -1000,6 +1028,62 @@ mod tests {
assert!(text.contains(r#"["git", "pull"]"#));
}
#[test]
fn render_command_prefix_list_sorts_by_len_then_total_len_then_alphabetical() {
let prefixes = vec![
vec!["b".to_string(), "zz".to_string()],
vec!["aa".to_string()],
vec!["b".to_string()],
vec!["a".to_string(), "b".to_string(), "c".to_string()],
vec!["a".to_string()],
vec!["b".to_string(), "a".to_string()],
];
let output = format_allow_prefixes(prefixes).expect("rendered list");
assert_eq!(
output,
r#"- ["a"]
- ["b"]
- ["aa"]
- ["b", "a"]
- ["b", "zz"]
- ["a", "b", "c"]"#
.to_string(),
);
}
#[test]
fn render_command_prefix_list_limits_output_to_max_prefixes() {
let prefixes = (0..(MAX_RENDERED_PREFIXES + 5))
.map(|i| vec![format!("{i:03}")])
.collect::<Vec<_>>();
let output = format_allow_prefixes(prefixes).expect("rendered list");
assert_eq!(output.ends_with(TRUNCATED_MARKER), true);
eprintln!("output: {output}");
assert_eq!(output.lines().count(), MAX_RENDERED_PREFIXES + 1);
}
#[test]
fn format_allow_prefixes_limits_output() {
let mut exec_policy = Policy::empty();
for i in 0..200 {
exec_policy
.add_prefix_rule(
&[format!("tool-{i:03}"), "x".repeat(500)],
codex_execpolicy::Decision::Allow,
)
.expect("add rule");
}
let output =
format_allow_prefixes(exec_policy.get_allowed_prefixes()).expect("formatted prefixes");
assert!(
output.len() <= MAX_ALLOW_PREFIX_TEXT_BYTES + TRUNCATED_MARKER.len(),
"output length exceeds expected limit: {output}",
);
}
#[test]
fn serializes_success_as_plain_string() -> Result<()> {
let item = ResponseInputItem::FunctionCallOutput {