Compare commits

...

3 Commits

Author SHA1 Message Date
Ahmed Ibrahim
a4fe5244d3 codex: fix CI failure on PR #13886 2026-03-07 21:47:36 -08:00
Ahmed Ibrahim
638e759f38 codex: stabilize protocol schema fixture generation 2026-03-07 21:47:35 -08:00
Michael Bolin
3b5fe5ca35 protocol: keep root carveouts sandboxed (#13452)
## Why

A restricted filesystem policy that grants `:root` read or write access
but also carries explicit deny entries should still behave like scoped
access with carveouts, not like unrestricted disk access.

Without that distinction, later platform backends cannot preserve
blocked subpaths under root-level permissions because the protocol layer
reports the policy as fully unrestricted.

## What changed

- taught `FileSystemSandboxPolicy` to treat root access plus explicit
deny entries as scoped access rather than full-disk access
- derived readable and writable roots from the filesystem root when root
access is combined with carveouts, while preserving the denied paths as
read-only subpaths
- added protocol coverage for root-write policies with carveouts and a
core sandboxing regression so those policies still require platform
sandboxing

## Verification

- added protocol coverage in `protocol/src/permissions.rs` and
`protocol/src/protocol.rs` for root access with explicit carveouts
- added platform-sandbox regression coverage in
`core/src/sandboxing/mod.rs`
- verified the current PR state with `just clippy`




---
[//]: # (BEGIN SAPLING FOOTER)
Stack created with [Sapling](https://sapling-scm.com). Best reviewed
with [ReviewStack](https://reviewstack.dev/openai/codex/pull/13452).
* #13453
* __->__ #13452
* #13451
* #13449
* #13448
* #13445
* #13440
* #13439

---------

Co-authored-by: viyatb-oai <viyatb@openai.com>
2026-03-07 21:15:47 -08:00
9 changed files with 732 additions and 394 deletions

View File

@@ -2,6 +2,12 @@
# Do not increase, fix your test instead
slow-timeout = { period = "15s", terminate-after = 2 }
[test-groups.app_server_protocol_codegen]
max-threads = 1
[test-groups.app_server_integration]
max-threads = 1
[[profile.default.overrides]]
# Do not add new tests here
@@ -11,3 +17,13 @@ slow-timeout = { period = "1m", terminate-after = 4 }
[[profile.default.overrides]]
filter = 'test(approval_matrix_covers_all_modes)'
slow-timeout = { period = "30s", terminate-after = 2 }
[[profile.default.overrides]]
filter = 'package(codex-app-server-protocol) & (test(typescript_schema_fixtures_match_generated) | test(json_schema_fixtures_match_generated) | test(generate_ts_with_experimental_api_retains_experimental_entries) | test(generated_ts_optional_nullable_fields_only_in_params) | test(generate_json_filters_experimental_fields_and_methods))'
test-group = 'app_server_protocol_codegen'
[[profile.default.overrides]]
# These tests spawn a fresh app-server subprocess per case. Serializing them keeps
# Windows startup deterministic without widening the read timeouts.
filter = 'package(codex-app-server)'
test-group = 'app_server_integration'

View File

@@ -23,6 +23,7 @@ use schemars::schema_for;
use serde::Serialize;
use serde_json::Map;
use serde_json::Value;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::collections::HashSet;
use std::ffi::OsStr;
@@ -32,9 +33,10 @@ use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
use std::process::Command;
use std::thread;
use ts_rs::TS;
const HEADER: &str = "// GENERATED CODE! DO NOT MODIFY BY HAND!\n\n";
pub(crate) const GENERATED_TS_HEADER: &str = "// GENERATED CODE! DO NOT MODIFY BY HAND!\n\n";
const IGNORED_DEFINITIONS: &[&str] = &["Option<()>"];
const JSON_V1_ALLOWLIST: &[&str] = &["InitializeParams", "InitializeResponse"];
const SPECIAL_DEFINITIONS: &[&str] = &[
@@ -137,9 +139,29 @@ pub fn generate_ts_with_options(
}
if options.ensure_headers {
for file in &ts_files {
prepend_header_if_missing(file)?;
}
let worker_count = thread::available_parallelism()
.map_or(1, usize::from)
.min(ts_files.len().max(1));
let chunk_size = ts_files.len().div_ceil(worker_count);
thread::scope(|scope| -> Result<()> {
let mut workers = Vec::new();
for chunk in ts_files.chunks(chunk_size.max(1)) {
workers.push(scope.spawn(move || -> Result<()> {
for file in chunk {
prepend_header_if_missing(file)?;
}
Ok(())
}));
}
for worker in workers {
worker
.join()
.map_err(|_| anyhow!("TypeScript header worker panicked"))??;
}
Ok(())
})?;
}
// Optionally run Prettier on all generated TS files.
@@ -231,6 +253,41 @@ fn filter_experimental_ts(out_dir: &Path) -> Result<()> {
Ok(())
}
pub(crate) fn filter_experimental_ts_tree(tree: &mut BTreeMap<PathBuf, String>) -> Result<()> {
let registered_fields = experimental_fields();
let experimental_method_types = experimental_method_types();
if let Some(content) = tree.get_mut(Path::new("ClientRequest.ts")) {
let filtered =
filter_client_request_ts_contents(std::mem::take(content), EXPERIMENTAL_CLIENT_METHODS);
*content = filtered;
}
let mut fields_by_type_name: HashMap<String, HashSet<String>> = HashMap::new();
for field in registered_fields {
fields_by_type_name
.entry(field.type_name.to_string())
.or_default()
.insert(field.field_name.to_string());
}
for (path, content) in tree.iter_mut() {
let Some(type_name) = path.file_stem().and_then(|stem| stem.to_str()) else {
continue;
};
let Some(experimental_field_names) = fields_by_type_name.get(type_name) else {
continue;
};
let filtered = filter_experimental_type_fields_ts_contents(
std::mem::take(content),
experimental_field_names,
);
*content = filtered;
}
remove_generated_type_entries(tree, &experimental_method_types, "ts");
Ok(())
}
/// Removes union arms from `ClientRequest.ts` for methods marked experimental.
fn filter_client_request_ts(out_dir: &Path, experimental_methods: &[&str]) -> Result<()> {
let path = out_dir.join("ClientRequest.ts");
@@ -239,9 +296,15 @@ fn filter_client_request_ts(out_dir: &Path, experimental_methods: &[&str]) -> Re
}
let mut content =
fs::read_to_string(&path).with_context(|| format!("Failed to read {}", path.display()))?;
content = filter_client_request_ts_contents(content, experimental_methods);
fs::write(&path, content).with_context(|| format!("Failed to write {}", path.display()))?;
Ok(())
}
fn filter_client_request_ts_contents(mut content: String, experimental_methods: &[&str]) -> String {
let Some((prefix, body, suffix)) = split_type_alias(&content) else {
return Ok(());
return content;
};
let experimental_methods: HashSet<&str> = experimental_methods
.iter()
@@ -259,12 +322,9 @@ fn filter_client_request_ts(out_dir: &Path, experimental_methods: &[&str]) -> Re
let new_body = filtered_arms.join(" | ");
content = format!("{prefix}{new_body}{suffix}");
let import_usage_scope = split_type_alias(&content)
.map(|(_, body, _)| body)
.map(|(_, filtered_body, _)| filtered_body)
.unwrap_or_else(|| new_body.clone());
content = prune_unused_type_imports(content, &import_usage_scope);
fs::write(&path, content).with_context(|| format!("Failed to write {}", path.display()))?;
Ok(())
prune_unused_type_imports(content, &import_usage_scope)
}
/// Removes experimental properties from generated TypeScript type files.
@@ -302,8 +362,17 @@ fn filter_experimental_fields_in_ts_file(
) -> Result<()> {
let mut content =
fs::read_to_string(path).with_context(|| format!("Failed to read {}", path.display()))?;
content = filter_experimental_type_fields_ts_contents(content, experimental_field_names);
fs::write(path, content).with_context(|| format!("Failed to write {}", path.display()))?;
Ok(())
}
fn filter_experimental_type_fields_ts_contents(
mut content: String,
experimental_field_names: &HashSet<String>,
) -> String {
let Some((open_brace, close_brace)) = type_body_brace_span(&content) else {
return Ok(());
return content;
};
let inner = &content[open_brace + 1..close_brace];
let fields = split_top_level_multi(inner, &[',', ';']);
@@ -322,9 +391,7 @@ fn filter_experimental_fields_in_ts_file(
let import_usage_scope = split_type_alias(&content)
.map(|(_, body, _)| body)
.unwrap_or_else(|| new_inner.clone());
content = prune_unused_type_imports(content, &import_usage_scope);
fs::write(path, content).with_context(|| format!("Failed to write {}", path.display()))?;
Ok(())
prune_unused_type_imports(content, &import_usage_scope)
}
fn filter_experimental_schema(bundle: &mut Value) -> Result<()> {
@@ -526,6 +593,23 @@ fn remove_generated_type_files(
Ok(())
}
fn remove_generated_type_entries(
tree: &mut BTreeMap<PathBuf, String>,
type_names: &HashSet<String>,
extension: &str,
) {
for type_name in type_names {
for subdir in ["", "v1", "v2"] {
let path = if subdir.is_empty() {
PathBuf::from(format!("{type_name}.{extension}"))
} else {
PathBuf::from(subdir).join(format!("{type_name}.{extension}"))
};
tree.remove(&path);
}
}
}
fn remove_experimental_method_type_definitions(bundle: &mut Value) {
let type_names = experimental_method_types();
let Some(definitions) = bundle.get_mut("definitions").and_then(Value::as_object_mut) else {
@@ -1154,40 +1238,13 @@ fn insert_into_namespace(
.or_insert_with(|| Value::Object(Map::new()));
match entry {
Value::Object(map) => {
insert_definition(map, name, schema, &format!("namespace `{namespace}`"))
map.insert(name, schema);
Ok(())
}
_ => Err(anyhow!("expected namespace {namespace} to be an object")),
}
}
fn insert_definition(
definitions: &mut Map<String, Value>,
name: String,
schema: Value,
location: &str,
) -> Result<()> {
if let Some(existing) = definitions.get(&name) {
if existing == &schema {
return Ok(());
}
let existing_title = existing
.get("title")
.and_then(Value::as_str)
.unwrap_or("<untitled>");
let new_title = schema
.get("title")
.and_then(Value::as_str)
.unwrap_or("<untitled>");
return Err(anyhow!(
"schema definition collision in {location}: {name} (existing title: {existing_title}, new title: {new_title}); use #[schemars(rename = \"...\")] to rename one of the conflicting schema definitions"
));
}
definitions.insert(name, schema);
Ok(())
}
fn write_json_schema_with_return<T>(out_dir: &Path, name: &str) -> Result<GeneratedSchema>
where
T: JsonSchema,
@@ -1807,13 +1864,13 @@ fn prepend_header_if_missing(path: &Path) -> Result<()> {
.with_context(|| format!("Failed to read {}", path.display()))?;
}
if content.starts_with(HEADER) {
if content.starts_with(GENERATED_TS_HEADER) {
return Ok(());
}
let mut f = fs::File::create(path)
.with_context(|| format!("Failed to open {} for writing", path.display()))?;
f.write_all(HEADER.as_bytes())
f.write_all(GENERATED_TS_HEADER.as_bytes())
.with_context(|| format!("Failed to write header to {}", path.display()))?;
f.write_all(content.as_bytes())
.with_context(|| format!("Failed to write content to {}", path.display()))?;
@@ -1858,35 +1915,15 @@ fn ts_files_in_recursive(dir: &Path) -> Result<Vec<PathBuf>> {
/// Generate an index.ts file that re-exports all generated types.
/// This allows consumers to import all types from a single file.
fn generate_index_ts(out_dir: &Path) -> Result<PathBuf> {
let mut entries: Vec<String> = Vec::new();
let mut stems: Vec<String> = ts_files_in(out_dir)?
.into_iter()
.filter_map(|p| {
let stem = p.file_stem()?.to_string_lossy().into_owned();
if stem == "index" { None } else { Some(stem) }
})
.collect();
stems.sort();
stems.dedup();
for name in stems {
entries.push(format!("export type {{ {name} }} from \"./{name}\";\n"));
}
// If this is the root out_dir and a ./v2 folder exists with TS files,
// expose it as a namespace to avoid symbol collisions at the root.
let v2_dir = out_dir.join("v2");
let has_v2_ts = ts_files_in(&v2_dir).map(|v| !v.is_empty()).unwrap_or(false);
if has_v2_ts {
entries.push("export * as v2 from \"./v2\";\n".to_string());
}
let mut content =
String::with_capacity(HEADER.len() + entries.iter().map(String::len).sum::<usize>());
content.push_str(HEADER);
for line in &entries {
content.push_str(line);
}
let content = generated_index_ts_with_header(index_ts_entries(
&ts_files_in(out_dir)?
.iter()
.map(PathBuf::as_path)
.collect::<Vec<_>>(),
ts_files_in(&out_dir.join("v2"))
.map(|v| !v.is_empty())
.unwrap_or(false),
));
let index_path = out_dir.join("index.ts");
let mut f = fs::File::create(&index_path)
@@ -1896,244 +1933,278 @@ fn generate_index_ts(out_dir: &Path) -> Result<PathBuf> {
Ok(index_path)
}
pub(crate) fn generate_index_ts_tree(tree: &mut BTreeMap<PathBuf, String>) {
let root_entries = tree
.keys()
.filter(|path| path.components().count() == 1)
.map(PathBuf::as_path)
.collect::<Vec<_>>();
let has_v2_ts = tree.keys().any(|path| {
path.parent()
.is_some_and(|parent| parent == Path::new("v2"))
&& path.extension() == Some(OsStr::new("ts"))
&& path.file_stem().is_some_and(|stem| stem != "index")
});
tree.insert(
PathBuf::from("index.ts"),
index_ts_entries(&root_entries, has_v2_ts),
);
let v2_entries = tree
.keys()
.filter(|path| {
path.parent()
.is_some_and(|parent| parent == Path::new("v2"))
})
.map(PathBuf::as_path)
.collect::<Vec<_>>();
if !v2_entries.is_empty() {
tree.insert(
PathBuf::from("v2").join("index.ts"),
index_ts_entries(&v2_entries, false),
);
}
}
fn generated_index_ts_with_header(content: String) -> String {
let mut with_header = String::with_capacity(GENERATED_TS_HEADER.len() + content.len());
with_header.push_str(GENERATED_TS_HEADER);
with_header.push_str(&content);
with_header
}
fn index_ts_entries(paths: &[&Path], has_v2_ts: bool) -> String {
let mut stems: Vec<String> = paths
.iter()
.filter(|path| path.extension() == Some(OsStr::new("ts")))
.filter_map(|path| {
let stem = path.file_stem()?.to_string_lossy().into_owned();
if stem == "index" { None } else { Some(stem) }
})
.collect();
stems.sort();
stems.dedup();
let mut entries = String::new();
for name in stems {
entries.push_str(&format!("export type {{ {name} }} from \"./{name}\";\n"));
}
if has_v2_ts {
entries.push_str("export * as v2 from \"./v2\";\n");
}
entries
}
#[cfg(test)]
mod tests {
use super::*;
use crate::protocol::v2;
use crate::schema_fixtures::read_schema_fixture_subtree;
use anyhow::Context;
use anyhow::Result;
use pretty_assertions::assert_eq;
use std::collections::BTreeSet;
use std::fs;
use std::path::Path;
use std::path::PathBuf;
use uuid::Uuid;
#[test]
fn generated_ts_optional_nullable_fields_only_in_params() -> Result<()> {
// Assert that "?: T | null" only appears in generated *Params types.
let output_dir = std::env::temp_dir().join(format!("codex_ts_types_{}", Uuid::now_v7()));
fs::create_dir(&output_dir)?;
let fixture_tree = read_schema_fixture_subtree(&schema_root()?, "typescript")?;
struct TempDirGuard(PathBuf);
impl Drop for TempDirGuard {
fn drop(&mut self) {
let _ = fs::remove_dir_all(&self.0);
}
}
let _guard = TempDirGuard(output_dir.clone());
// Avoid doing more work than necessary to keep the test from timing out.
let options = GenerateTsOptions {
generate_indices: false,
ensure_headers: false,
run_prettier: false,
experimental_api: false,
};
generate_ts_with_options(&output_dir, None, options)?;
let client_request_ts = fs::read_to_string(output_dir.join("ClientRequest.ts"))?;
let client_request_ts = std::str::from_utf8(
fixture_tree
.get(Path::new("ClientRequest.ts"))
.ok_or_else(|| anyhow::anyhow!("missing ClientRequest.ts fixture"))?,
)?;
assert_eq!(client_request_ts.contains("mock/experimentalMethod"), false);
assert_eq!(
client_request_ts.contains("MockExperimentalMethodParams"),
false
);
assert_eq!(output_dir.join("EventMsg.ts").exists(), true);
let thread_start_ts =
fs::read_to_string(output_dir.join("v2").join("ThreadStartParams.ts"))?;
assert_eq!(fixture_tree.contains_key(Path::new("EventMsg.ts")), true);
let thread_start_ts = std::str::from_utf8(
fixture_tree
.get(Path::new("v2/ThreadStartParams.ts"))
.ok_or_else(|| anyhow::anyhow!("missing v2/ThreadStartParams.ts fixture"))?,
)?;
assert_eq!(thread_start_ts.contains("mockExperimentalField"), false);
assert_eq!(
output_dir
.join("v2")
.join("MockExperimentalMethodParams.ts")
.exists(),
fixture_tree.contains_key(Path::new("v2/MockExperimentalMethodParams.ts")),
false
);
assert_eq!(
output_dir
.join("v2")
.join("MockExperimentalMethodResponse.ts")
.exists(),
fixture_tree.contains_key(Path::new("v2/MockExperimentalMethodResponse.ts")),
false
);
let mut undefined_offenders = Vec::new();
let mut optional_nullable_offenders = BTreeSet::new();
let mut stack = vec![output_dir];
while let Some(dir) = stack.pop() {
for entry in fs::read_dir(&dir)? {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
stack.push(path);
for (path, contents) in &fixture_tree {
if !matches!(path.extension().and_then(|ext| ext.to_str()), Some("ts")) {
continue;
}
// Only allow "?: T | null" in objects representing JSON-RPC requests,
// which we assume are called "*Params".
let allow_optional_nullable = path
.file_stem()
.and_then(|stem| stem.to_str())
.is_some_and(|stem| {
stem.ends_with("Params")
|| stem == "InitializeCapabilities"
|| matches!(
stem,
"CollabAgentRef"
| "CollabAgentStatusEntry"
| "CollabAgentSpawnEndEvent"
| "CollabAgentInteractionEndEvent"
| "CollabCloseEndEvent"
| "CollabResumeBeginEvent"
| "CollabResumeEndEvent"
)
});
let contents = std::str::from_utf8(contents)?;
if contents.contains("| undefined") {
undefined_offenders.push(path.clone());
}
const SKIP_PREFIXES: &[&str] = &[
"const ",
"let ",
"var ",
"export const ",
"export let ",
"export var ",
];
let mut search_start = 0;
while let Some(idx) = contents[search_start..].find("| null") {
let abs_idx = search_start + idx;
// Find the property-colon for this field by scanning forward
// from the start of the segment and ignoring nested braces,
// brackets, and parens. This avoids colons inside nested
// type literals like `{ [k in string]?: string }`.
let line_start_idx = contents[..abs_idx].rfind('\n').map(|i| i + 1).unwrap_or(0);
let mut segment_start_idx = line_start_idx;
if let Some(rel_idx) = contents[line_start_idx..abs_idx].rfind(',') {
segment_start_idx = segment_start_idx.max(line_start_idx + rel_idx + 1);
}
if let Some(rel_idx) = contents[line_start_idx..abs_idx].rfind('{') {
segment_start_idx = segment_start_idx.max(line_start_idx + rel_idx + 1);
}
if let Some(rel_idx) = contents[line_start_idx..abs_idx].rfind('}') {
segment_start_idx = segment_start_idx.max(line_start_idx + rel_idx + 1);
}
// Scan forward for the colon that separates the field name from its type.
let mut level_brace = 0_i32;
let mut level_brack = 0_i32;
let mut level_paren = 0_i32;
let mut in_single = false;
let mut in_double = false;
let mut escape = false;
let mut prop_colon_idx = None;
for (i, ch) in contents[segment_start_idx..abs_idx].char_indices() {
let idx_abs = segment_start_idx + i;
if escape {
escape = false;
continue;
}
match ch {
'\\' => {
if in_single || in_double {
escape = true;
}
}
'\'' => {
if !in_double {
in_single = !in_single;
}
}
'"' => {
if !in_single {
in_double = !in_double;
}
}
'{' if !in_single && !in_double => level_brace += 1,
'}' if !in_single && !in_double => level_brace -= 1,
'[' if !in_single && !in_double => level_brack += 1,
']' if !in_single && !in_double => level_brack -= 1,
'(' if !in_single && !in_double => level_paren += 1,
')' if !in_single && !in_double => level_paren -= 1,
':' if !in_single
&& !in_double
&& level_brace == 0
&& level_brack == 0
&& level_paren == 0 =>
{
prop_colon_idx = Some(idx_abs);
break;
}
_ => {}
}
}
let Some(colon_idx) = prop_colon_idx else {
search_start = abs_idx + 5;
continue;
};
let mut field_prefix = contents[segment_start_idx..colon_idx].trim();
if field_prefix.is_empty() {
search_start = abs_idx + 5;
continue;
}
if matches!(path.extension().and_then(|ext| ext.to_str()), Some("ts")) {
// Only allow "?: T | null" in objects representing JSON-RPC requests,
// which we assume are called "*Params".
let allow_optional_nullable = path
.file_stem()
.and_then(|stem| stem.to_str())
.is_some_and(|stem| {
stem.ends_with("Params")
|| stem == "InitializeCapabilities"
|| matches!(
stem,
"CollabAgentRef"
| "CollabAgentStatusEntry"
| "CollabAgentSpawnEndEvent"
| "CollabAgentInteractionEndEvent"
| "CollabCloseEndEvent"
| "CollabResumeBeginEvent"
| "CollabResumeEndEvent"
)
});
let contents = fs::read_to_string(&path)?;
if contents.contains("| undefined") {
undefined_offenders.push(path.clone());
}
const SKIP_PREFIXES: &[&str] = &[
"const ",
"let ",
"var ",
"export const ",
"export let ",
"export var ",
];
let mut search_start = 0;
while let Some(idx) = contents[search_start..].find("| null") {
let abs_idx = search_start + idx;
// Find the property-colon for this field by scanning forward
// from the start of the segment and ignoring nested braces,
// brackets, and parens. This avoids colons inside nested
// type literals like `{ [k in string]?: string }`.
let line_start_idx =
contents[..abs_idx].rfind('\n').map(|i| i + 1).unwrap_or(0);
let mut segment_start_idx = line_start_idx;
if let Some(rel_idx) = contents[line_start_idx..abs_idx].rfind(',') {
segment_start_idx = segment_start_idx.max(line_start_idx + rel_idx + 1);
}
if let Some(rel_idx) = contents[line_start_idx..abs_idx].rfind('{') {
segment_start_idx = segment_start_idx.max(line_start_idx + rel_idx + 1);
}
if let Some(rel_idx) = contents[line_start_idx..abs_idx].rfind('}') {
segment_start_idx = segment_start_idx.max(line_start_idx + rel_idx + 1);
}
// Scan forward for the colon that separates the field name from its type.
let mut level_brace = 0_i32;
let mut level_brack = 0_i32;
let mut level_paren = 0_i32;
let mut in_single = false;
let mut in_double = false;
let mut escape = false;
let mut prop_colon_idx = None;
for (i, ch) in contents[segment_start_idx..abs_idx].char_indices() {
let idx_abs = segment_start_idx + i;
if escape {
escape = false;
continue;
}
match ch {
'\\' => {
// Only treat as escape when inside a string.
if in_single || in_double {
escape = true;
}
}
'\'' => {
if !in_double {
in_single = !in_single;
}
}
'"' => {
if !in_single {
in_double = !in_double;
}
}
'{' if !in_single && !in_double => level_brace += 1,
'}' if !in_single && !in_double => level_brace -= 1,
'[' if !in_single && !in_double => level_brack += 1,
']' if !in_single && !in_double => level_brack -= 1,
'(' if !in_single && !in_double => level_paren += 1,
')' if !in_single && !in_double => level_paren -= 1,
':' if !in_single
&& !in_double
&& level_brace == 0
&& level_brack == 0
&& level_paren == 0 =>
{
prop_colon_idx = Some(idx_abs);
break;
}
_ => {}
}
}
let Some(colon_idx) = prop_colon_idx else {
search_start = abs_idx + 5;
continue;
};
let mut field_prefix = contents[segment_start_idx..colon_idx].trim();
if field_prefix.is_empty() {
search_start = abs_idx + 5;
continue;
}
if let Some(comment_idx) = field_prefix.rfind("*/") {
field_prefix = field_prefix[comment_idx + 2..].trim_start();
}
if field_prefix.is_empty() {
search_start = abs_idx + 5;
continue;
}
if SKIP_PREFIXES
.iter()
.any(|prefix| field_prefix.starts_with(prefix))
{
search_start = abs_idx + 5;
continue;
}
if field_prefix.contains('(') {
search_start = abs_idx + 5;
continue;
}
// If the last non-whitespace before ':' is '?', then this is an
// optional field with a nullable type (i.e., "?: T | null").
// These are only allowed in *Params types.
if field_prefix.chars().rev().find(|c| !c.is_whitespace()) == Some('?')
&& !allow_optional_nullable
{
let line_number =
contents[..abs_idx].chars().filter(|c| *c == '\n').count() + 1;
let offending_line_end = contents[line_start_idx..]
.find('\n')
.map(|i| line_start_idx + i)
.unwrap_or(contents.len());
let offending_snippet =
contents[line_start_idx..offending_line_end].trim();
optional_nullable_offenders.insert(format!(
"{}:{}: {offending_snippet}",
path.display(),
line_number
));
}
search_start = abs_idx + 5;
}
if let Some(comment_idx) = field_prefix.rfind("*/") {
field_prefix = field_prefix[comment_idx + 2..].trim_start();
}
if field_prefix.is_empty() {
search_start = abs_idx + 5;
continue;
}
if SKIP_PREFIXES
.iter()
.any(|prefix| field_prefix.starts_with(prefix))
{
search_start = abs_idx + 5;
continue;
}
if field_prefix.contains('(') {
search_start = abs_idx + 5;
continue;
}
// If the last non-whitespace before ':' is '?', then this is an
// optional field with a nullable type (i.e., "?: T | null").
// These are only allowed in *Params types.
if field_prefix.chars().rev().find(|c| !c.is_whitespace()) == Some('?')
&& !allow_optional_nullable
{
let line_number =
contents[..abs_idx].chars().filter(|c| *c == '\n').count() + 1;
let offending_line_end = contents[line_start_idx..]
.find('\n')
.map(|i| line_start_idx + i)
.unwrap_or(contents.len());
let offending_snippet = contents[line_start_idx..offending_line_end].trim();
optional_nullable_offenders.insert(format!(
"{}:{}: {offending_snippet}",
path.display(),
line_number
));
}
search_start = abs_idx + 5;
}
}
@@ -2153,55 +2224,40 @@ mod tests {
Ok(())
}
fn schema_root() -> Result<PathBuf> {
let typescript_index = codex_utils_cargo_bin::find_resource!("schema/typescript/index.ts")
.context("resolve TypeScript schema index.ts")?;
let schema_root = typescript_index
.parent()
.and_then(|parent| parent.parent())
.context("derive schema root from schema/typescript/index.ts")?
.to_path_buf();
Ok(schema_root)
}
#[test]
fn generate_ts_with_experimental_api_retains_experimental_entries() -> Result<()> {
let output_dir =
std::env::temp_dir().join(format!("codex_ts_types_experimental_{}", Uuid::now_v7()));
fs::create_dir(&output_dir)?;
struct TempDirGuard(PathBuf);
impl Drop for TempDirGuard {
fn drop(&mut self) {
let _ = fs::remove_dir_all(&self.0);
}
}
let _guard = TempDirGuard(output_dir.clone());
let options = GenerateTsOptions {
generate_indices: false,
ensure_headers: false,
run_prettier: false,
experimental_api: true,
};
generate_ts_with_options(&output_dir, None, options)?;
let client_request_ts = fs::read_to_string(output_dir.join("ClientRequest.ts"))?;
let client_request_ts = ClientRequest::export_to_string()?;
assert_eq!(client_request_ts.contains("mock/experimentalMethod"), true);
assert_eq!(
output_dir
.join("v2")
.join("MockExperimentalMethodParams.ts")
.exists(),
client_request_ts.contains("MockExperimentalMethodParams"),
true
);
assert_eq!(
output_dir
.join("v2")
.join("MockExperimentalMethodResponse.ts")
.exists(),
v2::MockExperimentalMethodParams::export_to_string()?
.contains("MockExperimentalMethodParams"),
true
);
assert_eq!(
v2::MockExperimentalMethodResponse::export_to_string()?
.contains("MockExperimentalMethodResponse"),
true
);
let thread_start_ts =
fs::read_to_string(output_dir.join("v2").join("ThreadStartParams.ts"))?;
let thread_start_ts = v2::ThreadStartParams::export_to_string()?;
assert_eq!(thread_start_ts.contains("mockExperimentalField"), true);
let command_execution_request_approval_ts = fs::read_to_string(
output_dir
.join("v2")
.join("CommandExecutionRequestApprovalParams.ts"),
)?;
let command_execution_request_approval_ts =
v2::CommandExecutionRequestApprovalParams::export_to_string()?;
assert_eq!(
command_execution_request_approval_ts.contains("additionalPermissions"),
true
@@ -2318,48 +2374,6 @@ mod tests {
Ok(())
}
#[test]
fn build_schema_bundle_rejects_conflicting_duplicate_definitions() {
let err = build_schema_bundle(vec![
GeneratedSchema {
namespace: Some("v2".to_string()),
logical_name: "First".to_string(),
in_v1_dir: false,
value: serde_json::json!({
"title": "First",
"type": "object",
"definitions": {
"Shared": {
"title": "SharedString",
"type": "string"
}
}
}),
},
GeneratedSchema {
namespace: Some("v2".to_string()),
logical_name: "Second".to_string(),
in_v1_dir: false,
value: serde_json::json!({
"title": "Second",
"type": "object",
"definitions": {
"Shared": {
"title": "SharedInteger",
"type": "integer"
}
}
}),
},
])
.expect_err("conflicting schema definitions should be rejected");
assert_eq!(
err.to_string(),
"schema definition collision in namespace `v2`: Shared (existing title: SharedString, new title: SharedInteger); use #[schemars(rename = \"...\")] to rename one of the conflicting schema definitions"
);
}
#[test]
fn build_flat_v2_schema_keeps_shared_root_schemas_and_dependencies() -> Result<()> {
let bundle = serde_json::json!({

View File

@@ -17,6 +17,9 @@ pub use protocol::thread_history::*;
pub use protocol::v1::*;
pub use protocol::v2::*;
pub use schema_fixtures::SchemaFixtureOptions;
#[doc(hidden)]
pub use schema_fixtures::generate_typescript_schema_fixture_subtree_for_tests;
pub use schema_fixtures::read_schema_fixture_subtree;
pub use schema_fixtures::read_schema_fixture_tree;
pub use schema_fixtures::write_schema_fixtures;
pub use schema_fixtures::write_schema_fixtures_with_options;

View File

@@ -151,6 +151,12 @@ macro_rules! client_request_definitions {
Ok(())
}
pub(crate) fn visit_client_response_types(v: &mut impl ::ts_rs::TypeVisitor) {
$(
v.visit::<$response>();
)*
}
#[allow(clippy::vec_init_then_push)]
pub fn export_client_response_schemas(
out_dir: &::std::path::Path,
@@ -525,6 +531,12 @@ macro_rules! server_request_definitions {
Ok(())
}
pub(crate) fn visit_server_response_types(v: &mut impl ::ts_rs::TypeVisitor) {
$(
v.visit::<$response>();
)*
}
#[allow(clippy::vec_init_then_push)]
pub fn export_server_response_schemas(
out_dir: &Path,

View File

@@ -1,11 +1,25 @@
use crate::ClientNotification;
use crate::ClientRequest;
use crate::ServerNotification;
use crate::ServerRequest;
use crate::export::GENERATED_TS_HEADER;
use crate::export::filter_experimental_ts_tree;
use crate::export::generate_index_ts_tree;
use crate::protocol::common::visit_client_response_types;
use crate::protocol::common::visit_server_response_types;
use anyhow::Context;
use anyhow::Result;
use codex_protocol::protocol::EventMsg;
use serde_json::Map;
use serde_json::Value;
use std::any::TypeId;
use std::cmp::Ordering;
use std::collections::BTreeMap;
use std::collections::HashSet;
use std::path::Path;
use std::path::PathBuf;
use ts_rs::TS;
use ts_rs::TypeVisitor;
#[derive(Clone, Copy, Debug, Default)]
pub struct SchemaFixtureOptions {
@@ -27,6 +41,42 @@ pub fn read_schema_fixture_tree(schema_root: &Path) -> Result<BTreeMap<PathBuf,
Ok(all)
}
pub fn read_schema_fixture_subtree(
schema_root: &Path,
label: &str,
) -> Result<BTreeMap<PathBuf, Vec<u8>>> {
let subtree_root = schema_root.join(label);
collect_files_recursive(&subtree_root)
.with_context(|| format!("read schema fixture subtree {}", subtree_root.display()))
}
#[doc(hidden)]
pub fn generate_typescript_schema_fixture_subtree_for_tests() -> Result<BTreeMap<PathBuf, Vec<u8>>>
{
let mut files = BTreeMap::new();
let mut seen = HashSet::new();
collect_typescript_fixture_file::<ClientRequest>(&mut files, &mut seen)?;
visit_typescript_fixture_dependencies(&mut files, &mut seen, |visitor| {
visit_client_response_types(visitor);
})?;
collect_typescript_fixture_file::<ClientNotification>(&mut files, &mut seen)?;
collect_typescript_fixture_file::<ServerRequest>(&mut files, &mut seen)?;
visit_typescript_fixture_dependencies(&mut files, &mut seen, |visitor| {
visit_server_response_types(visitor);
})?;
collect_typescript_fixture_file::<ServerNotification>(&mut files, &mut seen)?;
collect_typescript_fixture_file::<EventMsg>(&mut files, &mut seen)?;
filter_experimental_ts_tree(&mut files)?;
generate_index_ts_tree(&mut files);
Ok(files
.into_iter()
.map(|(path, content)| (path, content.into_bytes()))
.collect())
}
/// Regenerates `schema/typescript/` and `schema/json/`.
///
/// This is intended to be used by tooling (e.g., `just write-app-server-schema`).
@@ -86,6 +136,12 @@ fn read_file_bytes(path: &Path) -> Result<Vec<u8>> {
let text = String::from_utf8(bytes)
.with_context(|| format!("expected UTF-8 TypeScript in {}", path.display()))?;
let text = text.replace("\r\n", "\n").replace('\r', "\n");
// Fixture comparisons care about schema content, not whether the generator
// re-prepended the standard banner to every TypeScript file.
let text = text
.strip_prefix(GENERATED_TS_HEADER)
.unwrap_or(&text)
.to_string();
return Ok(text.into_bytes());
}
Ok(bytes)
@@ -209,6 +265,73 @@ fn collect_files_recursive(root: &Path) -> Result<BTreeMap<PathBuf, Vec<u8>>> {
Ok(files)
}
fn collect_typescript_fixture_file<T: TS + 'static + ?Sized>(
files: &mut BTreeMap<PathBuf, String>,
seen: &mut HashSet<TypeId>,
) -> Result<()> {
let Some(output_path) = T::output_path() else {
return Ok(());
};
if !seen.insert(TypeId::of::<T>()) {
return Ok(());
}
let contents = T::export_to_string().context("export TypeScript fixture content")?;
let output_path = normalize_relative_fixture_path(&output_path);
files.insert(
output_path,
contents.replace("\r\n", "\n").replace('\r', "\n"),
);
let mut visitor = TypeScriptFixtureCollector {
files,
seen,
error: None,
};
T::visit_dependencies(&mut visitor);
if let Some(error) = visitor.error {
return Err(error);
}
Ok(())
}
fn normalize_relative_fixture_path(path: &Path) -> PathBuf {
path.components().collect()
}
fn visit_typescript_fixture_dependencies(
files: &mut BTreeMap<PathBuf, String>,
seen: &mut HashSet<TypeId>,
visit: impl FnOnce(&mut TypeScriptFixtureCollector<'_>),
) -> Result<()> {
let mut visitor = TypeScriptFixtureCollector {
files,
seen,
error: None,
};
visit(&mut visitor);
if let Some(error) = visitor.error {
return Err(error);
}
Ok(())
}
struct TypeScriptFixtureCollector<'a> {
files: &'a mut BTreeMap<PathBuf, String>,
seen: &'a mut HashSet<TypeId>,
error: Option<anyhow::Error>,
}
impl TypeVisitor for TypeScriptFixtureCollector<'_> {
fn visit<T: TS + 'static + ?Sized>(&mut self) {
if self.error.is_some() {
return;
}
self.error = collect_typescript_fixture_file::<T>(self.files, self.seen).err();
}
}
#[cfg(test)]
mod tests {
use super::*;

View File

@@ -1,19 +1,60 @@
use anyhow::Context;
use anyhow::Result;
use codex_app_server_protocol::read_schema_fixture_tree;
use codex_app_server_protocol::write_schema_fixtures;
use codex_app_server_protocol::generate_json_with_experimental;
use codex_app_server_protocol::generate_typescript_schema_fixture_subtree_for_tests;
use codex_app_server_protocol::read_schema_fixture_subtree;
use similar::TextDiff;
use std::collections::BTreeMap;
use std::path::Path;
use std::path::PathBuf;
#[test]
fn schema_fixtures_match_generated() -> Result<()> {
fn typescript_schema_fixtures_match_generated() -> Result<()> {
let schema_root = schema_root()?;
let fixture_tree = read_tree(&schema_root)?;
let fixture_tree = read_tree(&schema_root, "typescript")?;
let generated_tree = generate_typescript_schema_fixture_subtree_for_tests()
.context("generate in-memory typescript schema fixtures")?;
assert_schema_trees_match("typescript", &fixture_tree, &generated_tree)?;
Ok(())
}
#[test]
fn json_schema_fixtures_match_generated() -> Result<()> {
assert_schema_fixtures_match_generated("json", |output_dir| {
generate_json_with_experimental(output_dir, false)
})
}
fn assert_schema_fixtures_match_generated(
label: &'static str,
generate: impl FnOnce(&Path) -> Result<()>,
) -> Result<()> {
let schema_root = schema_root()?;
let fixture_tree = read_tree(&schema_root, label)?;
let temp_dir = tempfile::tempdir().context("create temp dir")?;
write_schema_fixtures(temp_dir.path(), None).context("generate schema fixtures")?;
let generated_tree = read_tree(temp_dir.path())?;
let generated_root = temp_dir.path().join(label);
generate(&generated_root).with_context(|| {
format!(
"generate {label} schema fixtures into {}",
generated_root.display()
)
})?;
let generated_tree = read_tree(temp_dir.path(), label)?;
assert_schema_trees_match(label, &fixture_tree, &generated_tree)?;
Ok(())
}
fn assert_schema_trees_match(
label: &str,
fixture_tree: &BTreeMap<PathBuf, Vec<u8>>,
generated_tree: &BTreeMap<PathBuf, Vec<u8>>,
) -> Result<()> {
let fixture_paths = fixture_tree
.keys()
.map(|p| p.display().to_string())
@@ -32,13 +73,13 @@ fn schema_fixtures_match_generated() -> Result<()> {
.to_string();
panic!(
"Vendored app-server schema fixture file set doesn't match freshly generated output. \
"Vendored {label} app-server schema fixture file set doesn't match freshly generated output. \
Run `just write-app-server-schema` to overwrite with your changes.\n\n{diff}"
);
}
// If the file sets match, diff contents for each file for a nicer error.
for (path, expected) in &fixture_tree {
for (path, expected) in fixture_tree {
let actual = generated_tree
.get(path)
.ok_or_else(|| anyhow::anyhow!("missing generated file: {}", path.display()))?;
@@ -54,7 +95,7 @@ Run `just write-app-server-schema` to overwrite with your changes.\n\n{diff}"
.header("fixture", "generated")
.to_string();
panic!(
"Vendored app-server schema fixture {} differs from generated output. \
"Vendored {label} app-server schema fixture {} differs from generated output. \
Run `just write-app-server-schema` to overwrite with your changes.\n\n{diff}",
path.display()
);
@@ -63,7 +104,7 @@ Run `just write-app-server-schema` to overwrite with your changes.\n\n{diff}",
Ok(())
}
fn schema_root() -> Result<std::path::PathBuf> {
fn schema_root() -> Result<PathBuf> {
// In Bazel runfiles (especially manifest-only mode), resolving directories is not
// reliable. Resolve a known file, then walk up to the schema root.
let typescript_index = codex_utils_cargo_bin::find_resource!("schema/typescript/index.ts")
@@ -92,6 +133,11 @@ fn schema_root() -> Result<std::path::PathBuf> {
Ok(schema_root)
}
fn read_tree(root: &Path) -> Result<std::collections::BTreeMap<std::path::PathBuf, Vec<u8>>> {
read_schema_fixture_tree(root).context("read schema fixture tree")
fn read_tree(root: &Path, label: &str) -> Result<BTreeMap<PathBuf, Vec<u8>>> {
read_schema_fixture_subtree(root, label).with_context(|| {
format!(
"read {label} schema fixture subtree from {}",
root.display()
)
})
}

View File

@@ -673,6 +673,32 @@ mod tests {
);
}
#[test]
fn root_write_policy_with_carveouts_still_uses_platform_sandbox() {
let blocked = AbsolutePathBuf::resolve_path_against_base(
"blocked",
std::env::current_dir().expect("current dir"),
)
.expect("blocked path");
let policy = FileSystemSandboxPolicy::restricted(vec![
FileSystemSandboxEntry {
path: FileSystemPath::Special {
value: FileSystemSpecialPath::Root,
},
access: FileSystemAccessMode::Write,
},
FileSystemSandboxEntry {
path: FileSystemPath::Path { path: blocked },
access: FileSystemAccessMode::None,
},
]);
assert_eq!(
should_require_platform_sandbox(&policy, NetworkSandboxPolicy::Enabled, false),
true
);
}
#[test]
fn full_access_restricted_policy_still_uses_platform_sandbox_for_restricted_network() {
let policy = FileSystemSandboxPolicy::restricted(vec![FileSystemSandboxEntry {

View File

@@ -123,6 +123,25 @@ impl Default for FileSystemSandboxPolicy {
}
impl FileSystemSandboxPolicy {
fn has_root_access(&self, predicate: impl Fn(FileSystemAccessMode) -> bool) -> bool {
matches!(self.kind, FileSystemSandboxKind::Restricted)
&& self.entries.iter().any(|entry| {
matches!(
&entry.path,
FileSystemPath::Special { value }
if matches!(value, FileSystemSpecialPath::Root) && predicate(entry.access)
)
})
}
fn has_explicit_deny_entries(&self) -> bool {
matches!(self.kind, FileSystemSandboxKind::Restricted)
&& self
.entries
.iter()
.any(|entry| entry.access == FileSystemAccessMode::None)
}
pub fn unrestricted() -> Self {
Self {
kind: FileSystemSandboxKind::Unrestricted,
@@ -148,13 +167,10 @@ impl FileSystemSandboxPolicy {
pub fn has_full_disk_read_access(&self) -> bool {
match self.kind {
FileSystemSandboxKind::Unrestricted | FileSystemSandboxKind::ExternalSandbox => true,
FileSystemSandboxKind::Restricted => self.entries.iter().any(|entry| {
matches!(
&entry.path,
FileSystemPath::Special { value }
if matches!(value, FileSystemSpecialPath::Root) && entry.access.can_read()
)
}),
FileSystemSandboxKind::Restricted => {
self.has_root_access(FileSystemAccessMode::can_read)
&& !self.has_explicit_deny_entries()
}
}
}
@@ -162,14 +178,10 @@ impl FileSystemSandboxPolicy {
pub fn has_full_disk_write_access(&self) -> bool {
match self.kind {
FileSystemSandboxKind::Unrestricted | FileSystemSandboxKind::ExternalSandbox => true,
FileSystemSandboxKind::Restricted => self.entries.iter().any(|entry| {
matches!(
&entry.path,
FileSystemPath::Special { value }
if matches!(value, FileSystemSpecialPath::Root)
&& entry.access.can_write()
)
}),
FileSystemSandboxKind::Restricted => {
self.has_root_access(FileSystemAccessMode::can_write)
&& !self.has_explicit_deny_entries()
}
}
}
@@ -194,11 +206,24 @@ impl FileSystemSandboxPolicy {
}
let cwd_absolute = AbsolutePathBuf::from_absolute_path(cwd).ok();
let mut readable_roots = Vec::new();
if self.has_root_access(FileSystemAccessMode::can_read)
&& let Some(cwd_absolute) = cwd_absolute.as_ref()
{
readable_roots.push(absolute_root_path_for_cwd(cwd_absolute));
}
dedup_absolute_paths(
self.entries
.iter()
.filter(|entry| entry.access.can_read())
.filter_map(|entry| resolve_file_system_path(&entry.path, cwd_absolute.as_ref()))
readable_roots
.into_iter()
.chain(
self.entries
.iter()
.filter(|entry| entry.access.can_read())
.filter_map(|entry| {
resolve_file_system_path(&entry.path, cwd_absolute.as_ref())
}),
)
.collect(),
)
}
@@ -212,11 +237,24 @@ impl FileSystemSandboxPolicy {
let cwd_absolute = AbsolutePathBuf::from_absolute_path(cwd).ok();
let unreadable_roots = self.get_unreadable_roots_with_cwd(cwd);
let mut writable_roots = Vec::new();
if self.has_root_access(FileSystemAccessMode::can_write)
&& let Some(cwd_absolute) = cwd_absolute.as_ref()
{
writable_roots.push(absolute_root_path_for_cwd(cwd_absolute));
}
dedup_absolute_paths(
self.entries
.iter()
.filter(|entry| entry.access.can_write())
.filter_map(|entry| resolve_file_system_path(&entry.path, cwd_absolute.as_ref()))
writable_roots
.into_iter()
.chain(
self.entries
.iter()
.filter(|entry| entry.access.can_write())
.filter_map(|entry| {
resolve_file_system_path(&entry.path, cwd_absolute.as_ref())
}),
)
.collect(),
)
.into_iter()
@@ -543,6 +581,16 @@ fn resolve_file_system_path(
}
}
fn absolute_root_path_for_cwd(cwd: &AbsolutePathBuf) -> AbsolutePathBuf {
let root = cwd
.as_path()
.ancestors()
.last()
.unwrap_or_else(|| panic!("cwd must have a filesystem root"));
AbsolutePathBuf::from_absolute_path(root)
.unwrap_or_else(|err| panic!("cwd root must be an absolute path: {err}"))
}
fn resolve_file_system_special_path(
value: &FileSystemSpecialPath,
cwd: Option<&AbsolutePathBuf>,

View File

@@ -3352,6 +3352,56 @@ mod tests {
assert!(writable.has_full_disk_write_access());
}
#[test]
fn restricted_file_system_policy_treats_root_with_carveouts_as_scoped_access() {
let cwd = TempDir::new().expect("tempdir");
let cwd_absolute =
AbsolutePathBuf::from_absolute_path(cwd.path()).expect("absolute tempdir");
let root = cwd_absolute
.as_path()
.ancestors()
.last()
.and_then(|path| AbsolutePathBuf::from_absolute_path(path).ok())
.expect("filesystem root");
let blocked = AbsolutePathBuf::resolve_path_against_base("blocked", cwd.path())
.expect("resolve blocked");
let policy = FileSystemSandboxPolicy::restricted(vec![
FileSystemSandboxEntry {
path: FileSystemPath::Special {
value: FileSystemSpecialPath::Root,
},
access: FileSystemAccessMode::Write,
},
FileSystemSandboxEntry {
path: FileSystemPath::Path {
path: blocked.clone(),
},
access: FileSystemAccessMode::None,
},
]);
assert!(!policy.has_full_disk_read_access());
assert!(!policy.has_full_disk_write_access());
assert_eq!(
policy.get_readable_roots_with_cwd(cwd.path()),
vec![root.clone()]
);
assert_eq!(
policy.get_unreadable_roots_with_cwd(cwd.path()),
vec![blocked.clone()]
);
let writable_roots = policy.get_writable_roots_with_cwd(cwd.path());
assert_eq!(writable_roots.len(), 1);
assert_eq!(writable_roots[0].root, root);
assert!(
writable_roots[0]
.read_only_subpaths
.iter()
.any(|path| path.as_path() == blocked.as_path())
);
}
#[test]
fn restricted_file_system_policy_derives_effective_paths() {
let cwd = TempDir::new().expect("tempdir");