Compare commits

...

9 Commits

Author SHA1 Message Date
starr-openai
150cb18230 Explore executor-bound path API
Adds executor-bound path helpers and threads them through skills, config loading, AGENTS.md loading, view_image, and apply_patch to evaluate the API blast radius. Harness-owned reads use explicit unsandboxed access while tool-call paths carry the sandbox context through the path access wrapper.

Validation: just fmt; git diff --check.

Co-authored-by: Codex <noreply@openai.com>
2026-04-16 18:16:56 -07:00
pakrym-oai
dc930b896c Normalize git trust resolver test paths 2026-04-16 17:12:43 -07:00
pakrym-oai
db6b23627e Merge origin/main into use-filesystem-abstraction 2026-04-16 15:49:21 -07:00
pakrym-oai
7be97a77e8 Restore realtime startup context budget test 2026-04-16 15:47:33 -07:00
pakrym-oai
3216503a13 Simplify realtime context fs-aware paths 2026-04-16 15:36:43 -07:00
pakrym-oai
b67808dd0d Keep model catalog loading local 2026-04-16 15:20:56 -07:00
pakrym-oai
c3fc3f1954 Reduce absolute path churn in fs-aware loading 2026-04-16 15:18:33 -07:00
pakrym-oai
90545d9fc0 Route config loading through the filesystem abstraction 2026-04-16 14:17:34 -07:00
pakrym-oai
90cd4f3557 Pass filesystem abstraction into config loading 2026-04-16 13:08:32 -07:00
44 changed files with 1375 additions and 710 deletions

7
codex-rs/Cargo.lock generated
View File

@@ -1540,7 +1540,6 @@ dependencies = [
"anyhow",
"clap",
"codex-experimental-api-macros",
"codex-git-utils",
"codex-protocol",
"codex-shell-command",
"codex-utils-absolute-path",
@@ -1866,12 +1865,11 @@ dependencies = [
"codex-app-server-protocol",
"codex-execpolicy",
"codex-features",
"codex-git-utils",
"codex-model-provider-info",
"codex-network-proxy",
"codex-protocol",
"codex-utils-absolute-path",
"dunce",
"codex-utils-path",
"futures",
"multimap",
"pretty_assertions",
@@ -2264,6 +2262,8 @@ name = "codex-git-utils"
version = "0.0.0"
dependencies = [
"assert_matches",
"codex-exec-server",
"codex-protocol",
"codex-utils-absolute-path",
"futures",
"once_cell",
@@ -2620,7 +2620,6 @@ dependencies = [
"chrono",
"codex-async-utils",
"codex-execpolicy",
"codex-git-utils",
"codex-network-proxy",
"codex-utils-absolute-path",
"codex-utils-image",

View File

@@ -15,7 +15,6 @@ workspace = true
anyhow = { workspace = true }
clap = { workspace = true, features = ["derive"] }
codex-experimental-api-macros = { workspace = true }
codex-git-utils = { workspace = true }
codex-protocol = { workspace = true }
codex-shell-command = { workspace = true }
codex-utils-absolute-path = { workspace = true }

View File

@@ -4,7 +4,6 @@ mod jsonrpc_lite;
mod protocol;
mod schema_fixtures;
pub use codex_git_utils::GitSha;
pub use experimental_api::*;
pub use export::GenerateTsOptions;
pub use export::generate_internal_json_schema;
@@ -30,6 +29,7 @@ pub use protocol::v1::GetConversationSummaryParams;
pub use protocol::v1::GetConversationSummaryResponse;
pub use protocol::v1::GitDiffToRemoteParams;
pub use protocol::v1::GitDiffToRemoteResponse;
pub use protocol::v1::GitSha;
pub use protocol::v1::InitializeCapabilities;
pub use protocol::v1::InitializeParams;
pub use protocol::v1::InitializeResponse;

View File

@@ -1,7 +1,6 @@
use std::collections::HashMap;
use std::path::PathBuf;
use codex_git_utils::GitSha;
use codex_protocol::ThreadId;
use codex_protocol::config_types::ForcedLoginMethod;
use codex_protocol::config_types::ReasoningSummary;
@@ -11,6 +10,7 @@ use codex_protocol::openai_models::ReasoningEffort;
use codex_protocol::parse_command::ParsedCommand;
use codex_protocol::protocol::AskForApproval;
use codex_protocol::protocol::FileChange;
pub use codex_protocol::protocol::GitSha;
use codex_protocol::protocol::ReviewDecision;
use codex_protocol::protocol::SandboxPolicy;
use codex_protocol::protocol::SessionSource;

View File

@@ -221,8 +221,9 @@ use codex_core::config::edit::ConfigEditsBuilder;
use codex_core::config_loader::CloudRequirementsLoadError;
use codex_core::config_loader::CloudRequirementsLoadErrorCode;
use codex_core::config_loader::CloudRequirementsLoader;
use codex_core::config_loader::ConfigLoadFileSystems;
use codex_core::config_loader::LoaderOverrides;
use codex_core::config_loader::load_config_layers_state;
use codex_core::config_loader::load_config_layers_state_with_file_systems;
use codex_core::config_loader::project_trust_key;
use codex_core::exec::ExecCapturePolicy;
use codex_core::exec::ExecExpiration;
@@ -251,6 +252,7 @@ use codex_core_plugins::loader::load_plugin_mcp_servers;
use codex_core_plugins::manifest::PluginManifestInterface;
use codex_core_plugins::marketplace::MarketplaceError;
use codex_core_plugins::marketplace::MarketplacePluginSource;
use codex_exec_server::ExecutorFileSystem;
use codex_exec_server::LOCAL_FS;
use codex_features::FEATURES;
use codex_features::Feature;
@@ -621,9 +623,25 @@ pub(crate) struct CodexMessageProcessorArgs {
}
impl CodexMessageProcessor {
async fn instruction_sources_from_config(config: &Config) -> Vec<AbsolutePathBuf> {
async fn thread_filesystem_or_local(
thread_manager: &ThreadManager,
) -> Arc<dyn ExecutorFileSystem> {
match thread_manager.environment_manager().current().await {
Ok(Some(environment)) => environment.get_filesystem(),
Ok(None) => Arc::clone(&LOCAL_FS),
Err(err) => {
warn!("failed to get current environment filesystem: {err}");
Arc::clone(&LOCAL_FS)
}
}
}
async fn instruction_sources_from_config(
config: &Config,
fs: Arc<dyn ExecutorFileSystem>,
) -> Vec<AbsolutePathBuf> {
codex_core::AgentsMdManager::new(config)
.instruction_sources(LOCAL_FS.as_ref())
.instruction_sources(fs.as_ref())
.await
}
@@ -2417,6 +2435,8 @@ impl CodexMessageProcessor {
return;
}
};
let thread_fs =
Self::thread_filesystem_or_local(&listener_task_context.thread_manager).await;
// The user may have requested WorkspaceWrite or DangerFullAccess via
// the command line, though in the process of deriving the Config, it
@@ -2441,9 +2461,9 @@ impl CodexMessageProcessor {
| codex_protocol::protocol::SandboxPolicy::ExternalSandbox { .. }
))
{
let trust_target = resolve_root_git_project_for_trust(config.cwd.as_path())
let trust_target = resolve_root_git_project_for_trust(thread_fs.as_ref(), &config.cwd)
.await
.unwrap_or_else(|| config.cwd.to_path_buf());
.unwrap_or_else(|| config.cwd.clone());
let cli_overrides_with_trust;
let cli_overrides_for_reload = if let Err(err) =
codex_core::config::set_project_trust_level(
@@ -2500,7 +2520,8 @@ impl CodexMessageProcessor {
};
}
let instruction_sources = Self::instruction_sources_from_config(&config).await;
let instruction_sources =
Self::instruction_sources_from_config(&config, thread_fs.clone()).await;
let dynamic_tools = dynamic_tools.unwrap_or_default();
let core_dynamic_tools = if dynamic_tools.is_empty() {
Vec::new()
@@ -4092,7 +4113,9 @@ impl CodexMessageProcessor {
};
let fallback_model_provider = config.model_provider_id.clone();
let instruction_sources = Self::instruction_sources_from_config(&config).await;
let instruction_sources_fs = Self::thread_filesystem_or_local(&self.thread_manager).await;
let instruction_sources =
Self::instruction_sources_from_config(&config, instruction_sources_fs).await;
let response_history = thread_history.clone();
match self
@@ -4357,8 +4380,13 @@ impl CodexMessageProcessor {
}
let mut config_for_instruction_sources = self.config.as_ref().clone();
config_for_instruction_sources.cwd = config_snapshot.cwd.clone();
let instruction_sources =
Self::instruction_sources_from_config(&config_for_instruction_sources).await;
let instruction_sources_fs =
Self::thread_filesystem_or_local(&self.thread_manager).await;
let instruction_sources = Self::instruction_sources_from_config(
&config_for_instruction_sources,
instruction_sources_fs,
)
.await;
let thread_summary = match load_thread_summary_for_rollout(
&self.config,
existing_thread_id,
@@ -4672,7 +4700,9 @@ impl CodexMessageProcessor {
};
let fallback_model_provider = config.model_provider_id.clone();
let instruction_sources = Self::instruction_sources_from_config(&config).await;
let instruction_sources_fs = Self::thread_filesystem_or_local(&self.thread_manager).await;
let instruction_sources =
Self::instruction_sources_from_config(&config, instruction_sources_fs).await;
let NewThread {
thread_id,
@@ -6116,30 +6146,36 @@ impl CodexMessageProcessor {
};
let skills_manager = self.thread_manager.skills_manager();
let plugins_manager = self.thread_manager.plugins_manager();
let fs = match self.thread_manager.environment_manager().current().await {
Ok(Some(environment)) => Some(environment.get_filesystem()),
Ok(None) => None,
Err(err) => {
self.outgoing
.send_error(
request_id,
JSONRPCErrorError {
code: INTERNAL_ERROR_CODE,
message: format!("failed to create environment: {err}"),
data: None,
},
)
.await;
return;
}
};
let (fs, remote_project_fs) =
match self.thread_manager.environment_manager().current().await {
Ok(Some(environment)) => (environment.get_filesystem(), environment.is_remote()),
Ok(None) => (Arc::clone(&LOCAL_FS), false),
Err(err) => {
self.outgoing
.send_error(
request_id,
JSONRPCErrorError {
code: INTERNAL_ERROR_CODE,
message: format!("failed to create environment: {err}"),
data: None,
},
)
.await;
return;
}
};
let cli_overrides = self.current_cli_overrides();
let mut data = Vec::new();
for cwd in cwds {
let extra_roots = extra_roots_by_cwd
.get(&cwd)
.map_or(&[][..], std::vec::Vec::as_slice);
let cwd_abs = match AbsolutePathBuf::relative_to_current_dir(cwd.as_path()) {
let cwd_abs = if remote_project_fs {
AbsolutePathBuf::from_absolute_path_checked(cwd.as_path())
} else {
AbsolutePathBuf::relative_to_current_dir(cwd.as_path())
};
let cwd_abs = match cwd_abs {
Ok(path) => path,
Err(err) => {
let error_path = cwd.clone();
@@ -6154,7 +6190,8 @@ impl CodexMessageProcessor {
continue;
}
};
let config_layer_stack = match load_config_layers_state(
let config_layer_stack = match load_config_layers_state_with_file_systems(
ConfigLoadFileSystems::same(fs.as_ref()),
&self.config.codex_home,
Some(cwd_abs.clone()),
&cli_overrides,
@@ -6194,7 +6231,7 @@ impl CodexMessageProcessor {
&skills_input,
force_reload,
extra_roots,
fs.clone(),
Some(Arc::clone(&fs)),
)
.await;
let errors = errors_to_info(&outcome.errors);

View File

@@ -1,6 +1,7 @@
use anyhow::Result;
use app_test_support::ChatGptAuthFixture;
use app_test_support::McpProcess;
use app_test_support::PathBufExt;
use app_test_support::create_mock_responses_server_repeating_assistant;
use app_test_support::to_response;
use app_test_support::write_chatgpt_auth;
@@ -20,6 +21,7 @@ use codex_app_server_protocol::ThreadStatus;
use codex_app_server_protocol::ThreadStatusChangedNotification;
use codex_config::types::AuthCredentialsStoreMode;
use codex_core::config::set_project_trust_level;
use codex_exec_server::LOCAL_FS;
use codex_git_utils::resolve_root_git_project_for_trust;
use codex_login::REFRESH_TOKEN_URL_OVERRIDE_ENV_VAR;
use codex_protocol::config_types::ServiceTier;
@@ -716,10 +718,11 @@ model_reasoning_effort = "high"
assert_eq!(reasoning_effort, Some(ReasoningEffort::High));
let config_toml = std::fs::read_to_string(codex_home.path().join("config.toml"))?;
let trusted_root = resolve_root_git_project_for_trust(workspace.path())
let workspace_abs = workspace.path().to_path_buf().abs();
let trusted_root = resolve_root_git_project_for_trust(LOCAL_FS.as_ref(), &workspace_abs)
.await
.unwrap_or_else(|| workspace.path().to_path_buf());
assert!(config_toml.contains(&persisted_trust_path(&trusted_root)));
.unwrap_or(workspace_abs);
assert!(config_toml.contains(&persisted_trust_path(trusted_root.as_path())));
assert!(config_toml.contains("trust_level = \"trusted\""));
Ok(())
@@ -754,10 +757,11 @@ async fn thread_start_with_nested_git_cwd_trusts_repo_root() -> Result<()> {
.await??;
let config_toml = std::fs::read_to_string(codex_home.path().join("config.toml"))?;
let trusted_root = resolve_root_git_project_for_trust(&nested)
let nested_abs = nested.abs();
let trusted_root = resolve_root_git_project_for_trust(LOCAL_FS.as_ref(), &nested_abs)
.await
.expect("git root should resolve");
assert!(config_toml.contains(&persisted_trust_path(&trusted_root)));
assert!(config_toml.contains(&persisted_trust_path(trusted_root.as_path())));
assert!(!config_toml.contains(&persisted_trust_path(&nested)));
Ok(())

View File

@@ -3,6 +3,7 @@ use std::path::Path;
use std::sync::LazyLock;
use codex_exec_server::ExecutorFileSystem;
use codex_exec_server::ExecutorPathRef;
use codex_utils_absolute_path::AbsolutePathBuf;
use tree_sitter::Parser;
use tree_sitter::Query;
@@ -20,7 +21,7 @@ use crate::MaybeApplyPatchVerified;
use crate::parser::Hunk;
use crate::parser::ParseError;
use crate::parser::parse_patch;
use crate::unified_diff_from_chunks;
use crate::unified_diff_from_chunks_at;
use std::str::Utf8Error;
use tree_sitter::LanguageError;
@@ -162,16 +163,16 @@ pub async fn maybe_parse_apply_patch_verified(
.unwrap_or_else(|| cwd.clone());
let mut changes = HashMap::new();
for hunk in hunks {
let path = hunk.resolve_path(&effective_cwd);
let path = ExecutorPathRef::new(fs, hunk.resolve_path(&effective_cwd));
match hunk {
Hunk::AddFile { contents, .. } => {
changes.insert(
path.into_path_buf(),
path.to_path_buf(),
ApplyPatchFileChange::Add { content: contents },
);
}
Hunk::DeleteFile { .. } => {
let content = match fs.read_file_text(&path, sandbox).await {
let content = match path.with_sandbox(sandbox).read_file_text().await {
Ok(content) => content,
Err(e) => {
return MaybeApplyPatchVerified::CorrectnessError(
@@ -182,10 +183,8 @@ pub async fn maybe_parse_apply_patch_verified(
);
}
};
changes.insert(
path.into_path_buf(),
ApplyPatchFileChange::Delete { content },
);
changes
.insert(path.to_path_buf(), ApplyPatchFileChange::Delete { content });
}
Hunk::UpdateFile {
move_path, chunks, ..
@@ -193,14 +192,14 @@ pub async fn maybe_parse_apply_patch_verified(
let ApplyPatchFileUpdate {
unified_diff,
content: contents,
} = match unified_diff_from_chunks(&path, &chunks, fs, sandbox).await {
} = match unified_diff_from_chunks_at(&path, &chunks, sandbox).await {
Ok(diff) => diff,
Err(e) => {
return MaybeApplyPatchVerified::CorrectnessError(e);
}
};
changes.insert(
path.into_path_buf(),
path.to_path_buf(),
ApplyPatchFileChange::Update {
unified_diff,
move_path: move_path.map(|p| effective_cwd.join(p).into_path_buf()),

View File

@@ -12,6 +12,7 @@ use anyhow::Context;
use anyhow::Result;
use codex_exec_server::CreateDirectoryOptions;
use codex_exec_server::ExecutorFileSystem;
use codex_exec_server::ExecutorPathRef;
use codex_exec_server::FileSystemSandboxContext;
use codex_exec_server::RemoveOptions;
use codex_utils_absolute_path::AbsolutePathBuf;
@@ -224,7 +225,8 @@ pub async fn apply_hunks(
sandbox: Option<&FileSystemSandboxContext>,
) -> Result<(), ApplyPatchError> {
// Delegate to a helper that applies each hunk to the filesystem.
match apply_hunks_to_files(hunks, cwd, fs, sandbox).await {
let cwd = ExecutorPathRef::new(fs, cwd.clone());
match apply_hunks_to_files(hunks, &cwd, sandbox).await {
Ok(affected) => {
print_summary(&affected, stdout).map_err(ApplyPatchError::from)?;
Ok(())
@@ -258,8 +260,7 @@ pub struct AffectedPaths {
/// Returns an error if the patch could not be applied.
async fn apply_hunks_to_files(
hunks: &[Hunk],
cwd: &AbsolutePathBuf,
fs: &dyn ExecutorFileSystem,
cwd: &ExecutorPathRef<'_>,
sandbox: Option<&FileSystemSandboxContext>,
) -> anyhow::Result<AffectedPaths> {
if hunks.is_empty() {
@@ -271,82 +272,71 @@ async fn apply_hunks_to_files(
let mut deleted: Vec<PathBuf> = Vec::new();
for hunk in hunks {
let affected_path = hunk.path().to_path_buf();
let path_abs = hunk.resolve_path(cwd);
let path = cwd.with_path(hunk.resolve_path(cwd.path()));
match hunk {
Hunk::AddFile { contents, .. } => {
write_file_with_missing_parent_retry(
fs,
&path_abs,
contents.clone().into_bytes(),
sandbox,
)
.await?;
write_file_with_missing_parent_retry(&path, contents.clone().into_bytes(), sandbox)
.await?;
added.push(affected_path);
}
Hunk::DeleteFile { .. } => {
let result: io::Result<()> = async {
let metadata = fs.get_metadata(&path_abs, sandbox).await?;
let path_access = path.with_sandbox(sandbox);
let metadata = path_access.get_metadata().await?;
if metadata.is_directory {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"path is a directory",
));
}
fs.remove(
&path_abs,
RemoveOptions {
path_access
.remove(RemoveOptions {
recursive: false,
force: false,
},
sandbox,
)
.await
})
.await
}
.await;
result.with_context(|| format!("Failed to delete file {}", path_abs.display()))?;
result.with_context(|| format!("Failed to delete file {}", path.display()))?;
deleted.push(affected_path);
}
Hunk::UpdateFile {
move_path, chunks, ..
} => {
let AppliedPatch { new_contents, .. } =
derive_new_contents_from_chunks(&path_abs, chunks, fs, sandbox).await?;
derive_new_contents_from_chunks(&path, chunks, sandbox).await?;
if let Some(dest) = move_path {
let dest_abs = AbsolutePathBuf::resolve_path_against_base(dest, cwd);
write_file_with_missing_parent_retry(
fs,
&dest_abs,
new_contents.into_bytes(),
sandbox,
)
.await?;
let dest = cwd.with_path(AbsolutePathBuf::resolve_path_against_base(
dest,
cwd.path().as_path(),
));
write_file_with_missing_parent_retry(&dest, new_contents.into_bytes(), sandbox)
.await?;
let result: io::Result<()> = async {
let metadata = fs.get_metadata(&path_abs, sandbox).await?;
let path_access = path.with_sandbox(sandbox);
let metadata = path_access.get_metadata().await?;
if metadata.is_directory {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"path is a directory",
));
}
fs.remove(
&path_abs,
RemoveOptions {
path_access
.remove(RemoveOptions {
recursive: false,
force: false,
},
sandbox,
)
.await
})
.await
}
.await;
result.with_context(|| {
format!("Failed to remove original {}", path_abs.display())
})?;
result
.with_context(|| format!("Failed to remove original {}", path.display()))?;
modified.push(affected_path);
} else {
fs.write_file(&path_abs, new_contents.into_bytes(), sandbox)
path.with_sandbox(sandbox)
.write_file(new_contents.into_bytes())
.await
.with_context(|| format!("Failed to write file {}", path_abs.display()))?;
.with_context(|| format!("Failed to write file {}", path.display()))?;
modified.push(affected_path);
}
}
@@ -360,36 +350,33 @@ async fn apply_hunks_to_files(
}
async fn write_file_with_missing_parent_retry(
fs: &dyn ExecutorFileSystem,
path_abs: &AbsolutePathBuf,
path: &ExecutorPathRef<'_>,
contents: Vec<u8>,
sandbox: Option<&FileSystemSandboxContext>,
) -> anyhow::Result<()> {
match fs.write_file(path_abs, contents.clone(), sandbox).await {
match path
.with_sandbox(sandbox)
.write_file(contents.clone())
.await
{
Ok(()) => Ok(()),
Err(err) if err.kind() == io::ErrorKind::NotFound => {
if let Some(parent_abs) = path_abs.parent() {
fs.create_directory(
&parent_abs,
CreateDirectoryOptions { recursive: true },
sandbox,
)
.await
.with_context(|| {
format!(
"Failed to create parent directories for {}",
path_abs.display()
)
})?;
if let Some(parent) = path.parent() {
parent
.with_sandbox(sandbox)
.create_directory(CreateDirectoryOptions { recursive: true })
.await
.with_context(|| {
format!("Failed to create parent directories for {}", path.display())
})?;
}
fs.write_file(path_abs, contents, sandbox)
path.with_sandbox(sandbox)
.write_file(contents)
.await
.with_context(|| format!("Failed to write file {}", path_abs.display()))?;
.with_context(|| format!("Failed to write file {}", path.display()))?;
Ok(())
}
Err(err) => {
Err(err).with_context(|| format!("Failed to write file {}", path_abs.display()))
}
Err(err) => Err(err).with_context(|| format!("Failed to write file {}", path.display())),
}
}
@@ -401,17 +388,20 @@ struct AppliedPatch {
/// Return *only* the new file contents (joined into a single `String`) after
/// applying the chunks to the file at `path`.
async fn derive_new_contents_from_chunks(
path_abs: &AbsolutePathBuf,
path: &ExecutorPathRef<'_>,
chunks: &[UpdateFileChunk],
fs: &dyn ExecutorFileSystem,
sandbox: Option<&FileSystemSandboxContext>,
) -> std::result::Result<AppliedPatch, ApplyPatchError> {
let original_contents = fs.read_file_text(path_abs, sandbox).await.map_err(|err| {
ApplyPatchError::IoError(IoError {
context: format!("Failed to read file to update {}", path_abs.display()),
source: err,
})
})?;
let original_contents = path
.with_sandbox(sandbox)
.read_file_text()
.await
.map_err(|err| {
ApplyPatchError::IoError(IoError {
context: format!("Failed to read file to update {}", path.display()),
source: err,
})
})?;
let mut original_lines: Vec<String> = original_contents.split('\n').map(String::from).collect();
@@ -421,7 +411,7 @@ async fn derive_new_contents_from_chunks(
original_lines.pop();
}
let replacements = compute_replacements(&original_lines, path_abs.as_path(), chunks)?;
let replacements = compute_replacements(&original_lines, path.path().as_path(), chunks)?;
let new_lines = apply_replacements(original_lines, &replacements);
let mut new_lines = new_lines;
if !new_lines.last().is_some_and(String::is_empty) {
@@ -568,7 +558,8 @@ pub async fn unified_diff_from_chunks(
fs: &dyn ExecutorFileSystem,
sandbox: Option<&FileSystemSandboxContext>,
) -> std::result::Result<ApplyPatchFileUpdate, ApplyPatchError> {
unified_diff_from_chunks_with_context(path_abs, chunks, /*context*/ 1, fs, sandbox).await
let path = ExecutorPathRef::new(fs, path_abs.clone());
unified_diff_from_chunks_at(&path, chunks, sandbox).await
}
pub async fn unified_diff_from_chunks_with_context(
@@ -577,11 +568,29 @@ pub async fn unified_diff_from_chunks_with_context(
context: usize,
fs: &dyn ExecutorFileSystem,
sandbox: Option<&FileSystemSandboxContext>,
) -> std::result::Result<ApplyPatchFileUpdate, ApplyPatchError> {
let path = ExecutorPathRef::new(fs, path_abs.clone());
unified_diff_from_chunks_with_context_at(&path, chunks, context, sandbox).await
}
pub async fn unified_diff_from_chunks_at(
path: &ExecutorPathRef<'_>,
chunks: &[UpdateFileChunk],
sandbox: Option<&FileSystemSandboxContext>,
) -> std::result::Result<ApplyPatchFileUpdate, ApplyPatchError> {
unified_diff_from_chunks_with_context_at(path, chunks, /*context*/ 1, sandbox).await
}
pub async fn unified_diff_from_chunks_with_context_at(
path: &ExecutorPathRef<'_>,
chunks: &[UpdateFileChunk],
context: usize,
sandbox: Option<&FileSystemSandboxContext>,
) -> std::result::Result<ApplyPatchFileUpdate, ApplyPatchError> {
let AppliedPatch {
original_contents,
new_contents,
} = derive_new_contents_from_chunks(path_abs, chunks, fs, sandbox).await?;
} = derive_new_contents_from_chunks(path, chunks, sandbox).await?;
let text_diff = TextDiff::from_lines(&original_contents, &new_contents);
let unified_diff = text_diff.unified_diff().context_radius(context).to_string();
Ok(ApplyPatchFileUpdate {

View File

@@ -12,12 +12,11 @@ anyhow = { workspace = true }
codex-app-server-protocol = { workspace = true }
codex-execpolicy = { workspace = true }
codex-features = { workspace = true }
codex-git-utils = { workspace = true }
codex-model-provider-info = { workspace = true }
codex-network-proxy = { workspace = true }
codex-protocol = { workspace = true }
codex-utils-absolute-path = { workspace = true }
dunce = { workspace = true }
codex-utils-path = { workspace = true }
futures = { workspace = true, features = ["alloc", "std"] }
multimap = { workspace = true }
schemars = { workspace = true }

View File

@@ -29,7 +29,6 @@ use crate::types::WindowsToml;
use codex_app_server_protocol::Tools;
use codex_app_server_protocol::UserSavedConfig;
use codex_features::FeaturesToml;
use codex_git_utils::resolve_root_git_project_for_trust;
use codex_model_provider_info::LEGACY_OLLAMA_CHAT_PROVIDER_ID;
use codex_model_provider_info::LMSTUDIO_OSS_PROVIDER_ID;
use codex_model_provider_info::ModelProviderInfo;
@@ -51,6 +50,7 @@ use codex_protocol::protocol::AskForApproval;
use codex_protocol::protocol::ReadOnlyAccess;
use codex_protocol::protocol::SandboxPolicy;
use codex_utils_absolute_path::AbsolutePathBuf;
use codex_utils_path::normalize_for_path_comparison;
use schemars::JsonSchema;
use serde::Deserialize;
use serde::Deserializer;
@@ -601,7 +601,7 @@ impl ConfigToml {
sandbox_mode_override: Option<SandboxMode>,
profile_sandbox_mode: Option<SandboxMode>,
windows_sandbox_level: WindowsSandboxLevel,
resolved_cwd: &Path,
active_project: Option<&ProjectConfig>,
sandbox_policy_constraint: Option<&crate::Constrained<SandboxPolicy>>,
) -> SandboxPolicy {
let sandbox_mode_was_explicit = sandbox_mode_override.is_some()
@@ -616,7 +616,7 @@ impl ConfigToml {
// If no sandbox_mode is set but this directory has a trust decision,
// default to workspace-write except on unsandboxed Windows where we
// default to read-only.
self.get_active_project(resolved_cwd).await.and_then(|p| {
active_project.and_then(|p| {
if p.is_trusted() || p.is_untrusted() {
if cfg!(target_os = "windows")
&& windows_sandbox_level == WindowsSandboxLevel::Disabled
@@ -677,9 +677,13 @@ impl ConfigToml {
}
/// Resolves the cwd to an existing project, or returns None if ConfigToml
/// does not contain a project corresponding to cwd or a git repo for cwd
pub async fn get_active_project(&self, resolved_cwd: &Path) -> Option<ProjectConfig> {
let repo_root = resolve_root_git_project_for_trust(resolved_cwd).await;
/// does not contain a project corresponding to cwd or the resolved git repo
/// root for cwd.
pub fn get_active_project(
&self,
resolved_cwd: &Path,
repo_root: Option<&Path>,
) -> Option<ProjectConfig> {
let projects = self.projects.clone().unwrap_or_default();
let resolved_cwd_key = project_trust_key(resolved_cwd);
@@ -691,10 +695,7 @@ impl ConfigToml {
return Some(project_config.clone());
}
// If cwd lives inside a git repo/worktree, check whether the root git project
// (the primary repository working directory) is trusted. This lets
// worktrees inherit trust from the main project.
if let Some(repo_root) = repo_root.as_deref() {
if let Some(repo_root) = repo_root {
let repo_root_key = project_trust_key(repo_root);
let repo_root_raw_key = repo_root.to_string_lossy().to_string();
if let Some(project_config_for_root) = projects
@@ -734,7 +735,7 @@ impl ConfigToml {
/// projects trust map. On Windows, strips UNC, when possible, to try to ensure
/// that different paths that point to the same location have the same key.
fn project_trust_key(project_path: &Path) -> String {
dunce::canonicalize(project_path)
normalize_for_path_comparison(project_path)
.unwrap_or_else(|_| project_path.to_path_buf())
.to_string_lossy()
.to_string()

View File

@@ -14,6 +14,7 @@ use codex_core_skills::config_rules::resolve_disabled_skill_paths;
use codex_core_skills::config_rules::skill_config_rules_from_stack;
use codex_core_skills::loader::SkillRoot;
use codex_core_skills::loader::load_skills_from_roots;
use codex_exec_server::ExecutorPath;
use codex_exec_server::LOCAL_FS;
use codex_plugin::AppConnectorId;
use codex_plugin::LoadedPlugin;
@@ -532,9 +533,8 @@ pub async fn load_plugin_skills(
let roots = plugin_skill_roots(plugin_root, manifest_paths)
.into_iter()
.map(|path| SkillRoot {
path,
path: ExecutorPath::new(Arc::clone(&LOCAL_FS), path),
scope: SkillScope::User,
file_system: Arc::clone(&LOCAL_FS),
})
.collect::<Vec<_>>();
let outcome = load_skills_from_roots(roots).await;

View File

@@ -9,7 +9,8 @@ use codex_analytics::AnalyticsEventsClient;
use codex_analytics::InvocationType;
use codex_analytics::SkillInvocation;
use codex_analytics::TrackEventsContext;
use codex_exec_server::LOCAL_FS;
use codex_exec_server::ExecutorFileSystem;
use codex_exec_server::ExecutorPath;
use codex_instructions::SkillInstructions;
use codex_otel::SessionTelemetry;
use codex_protocol::models::ResponseItem;
@@ -26,6 +27,7 @@ pub struct SkillInjections {
pub async fn build_skill_injections(
mentioned_skills: &[SkillMetadata],
loaded_skills: Option<&SkillLoadOutcome>,
fs: Arc<dyn ExecutorFileSystem>,
otel: Option<&SessionTelemetry>,
analytics_client: &AnalyticsEventsClient,
tracking: TrackEventsContext,
@@ -41,13 +43,10 @@ pub async fn build_skill_injections(
let mut invocations = Vec::new();
for skill in mentioned_skills {
let fs = loaded_skills
.and_then(|outcome| outcome.file_system_for_skill(skill))
.unwrap_or_else(|| Arc::clone(&LOCAL_FS));
match fs
.read_file_text(&skill.path_to_skills_md, /*sandbox*/ None)
.await
{
let source = loaded_skills
.and_then(|outcome| outcome.source_for_skill(skill))
.unwrap_or_else(|| ExecutorPath::new(Arc::clone(&fs), skill.path_to_skills_md.clone()));
match source.unsandboxed().read_file_text().await {
Ok(contents) => {
emit_skill_injected_metric(otel, skill, "ok");
invocations.push(SkillInvocation {

View File

@@ -1,10 +1,10 @@
use crate::model::SkillDependencies;
use crate::model::SkillError;
use crate::model::SkillFileSystemsByPath;
use crate::model::SkillInterface;
use crate::model::SkillLoadOutcome;
use crate::model::SkillMetadata;
use crate::model::SkillPolicy;
use crate::model::SkillSourcesByPath;
use crate::model::SkillToolDependency;
use crate::system::system_cache_root_dir;
use codex_app_server_protocol::ConfigLayerSource;
@@ -14,6 +14,7 @@ use codex_config::default_project_root_markers;
use codex_config::merge_toml_values;
use codex_config::project_root_markers_from_config;
use codex_exec_server::ExecutorFileSystem;
use codex_exec_server::ExecutorPath;
use codex_exec_server::LOCAL_FS;
use codex_protocol::protocol::Product;
use codex_protocol::protocol::SkillScope;
@@ -149,9 +150,8 @@ impl fmt::Display for SkillParseError {
impl Error for SkillParseError {}
pub struct SkillRoot {
pub path: AbsolutePathBuf,
pub path: ExecutorPath,
pub scope: SkillScope,
pub file_system: Arc<dyn ExecutorFileSystem>,
}
pub async fn load_skills_from_roots<I>(roots: I) -> SkillLoadOutcome
@@ -159,16 +159,14 @@ where
I: IntoIterator<Item = SkillRoot>,
{
let mut outcome = SkillLoadOutcome::default();
let mut file_systems_by_skill_path: HashMap<AbsolutePathBuf, Arc<dyn ExecutorFileSystem>> =
HashMap::new();
let mut sources_by_skill_path: HashMap<AbsolutePathBuf, ExecutorPath> = HashMap::new();
for root in roots {
let fs = root.file_system;
let skills_before_root = outcome.skills.len();
discover_skills_under_root(fs.as_ref(), &root.path, root.scope, &mut outcome).await;
discover_skills_under_root(&root.path, root.scope, &mut outcome).await;
for skill in &outcome.skills[skills_before_root..] {
file_systems_by_skill_path
sources_by_skill_path
.entry(skill.path_to_skills_md.clone())
.or_insert_with(|| Arc::clone(&fs));
.or_insert_with(|| root.path.with_path(skill.path_to_skills_md.clone()));
}
}
@@ -181,8 +179,8 @@ where
.iter()
.map(|skill| skill.path_to_skills_md.clone())
.collect();
file_systems_by_skill_path.retain(|path, _| retained_skill_paths.contains(path));
outcome.file_systems_by_skill_path = SkillFileSystemsByPath::new(file_systems_by_skill_path);
sources_by_skill_path.retain(|path, _| retained_skill_paths.contains(path));
outcome.sources_by_skill_path = SkillSourcesByPath::new(sources_by_skill_path);
fn scope_rank(scope: SkillScope) -> u8 {
// Higher-priority scopes first (matches root scan order for dedupe).
@@ -212,10 +210,12 @@ pub(crate) async fn skill_roots(
) -> Vec<SkillRoot> {
let home_dir =
home_dir().and_then(|path| AbsolutePathBuf::from_absolute_path_checked(path).ok());
let fs = fs.unwrap_or_else(|| Arc::clone(&LOCAL_FS));
let cwd = ExecutorPath::new(Arc::clone(&fs), cwd.clone());
skill_roots_with_home_dir(
fs,
&fs,
Some(&cwd),
config_layer_stack,
cwd,
home_dir.as_ref(),
plugin_skill_roots,
)
@@ -223,27 +223,27 @@ pub(crate) async fn skill_roots(
}
async fn skill_roots_with_home_dir(
fs: Option<Arc<dyn ExecutorFileSystem>>,
fs: &Arc<dyn ExecutorFileSystem>,
cwd: Option<&ExecutorPath>,
config_layer_stack: &ConfigLayerStack,
cwd: &AbsolutePathBuf,
home_dir: Option<&AbsolutePathBuf>,
plugin_skill_roots: Vec<AbsolutePathBuf>,
) -> Vec<SkillRoot> {
let mut roots = skill_roots_from_layer_stack_inner(config_layer_stack, home_dir, fs.clone());
let mut roots = skill_roots_from_layer_stack_inner(fs, config_layer_stack, home_dir, cwd);
roots.extend(plugin_skill_roots.into_iter().map(|path| SkillRoot {
path,
path: ExecutorPath::new(Arc::clone(fs), path),
scope: SkillScope::User,
file_system: Arc::clone(&LOCAL_FS),
}));
roots.extend(repo_agents_skill_roots(fs, config_layer_stack, cwd).await);
roots.extend(repo_agents_skill_roots(cwd, config_layer_stack).await);
dedupe_skill_roots_by_path(&mut roots);
roots
}
fn skill_roots_from_layer_stack_inner(
fs: &Arc<dyn ExecutorFileSystem>,
config_layer_stack: &ConfigLayerStack,
home_dir: Option<&AbsolutePathBuf>,
repo_fs: Option<Arc<dyn ExecutorFileSystem>>,
repo_cwd: Option<&ExecutorPath>,
) -> Vec<SkillRoot> {
let mut roots = Vec::new();
@@ -257,11 +257,10 @@ fn skill_roots_from_layer_stack_inner(
match &layer.name {
ConfigLayerSource::Project { .. } => {
if let Some(repo_fs) = &repo_fs {
if let Some(repo_cwd) = repo_cwd {
roots.push(SkillRoot {
path: config_folder.join(SKILLS_DIR_NAME),
path: repo_cwd.with_path(config_folder.join(SKILLS_DIR_NAME)),
scope: SkillScope::Repo,
file_system: Arc::clone(repo_fs),
});
}
}
@@ -269,35 +268,34 @@ fn skill_roots_from_layer_stack_inner(
// Deprecated user skills location (`$CODEX_HOME/skills`), kept for backward
// compatibility.
roots.push(SkillRoot {
path: config_folder.join(SKILLS_DIR_NAME),
path: ExecutorPath::new(Arc::clone(fs), config_folder.join(SKILLS_DIR_NAME)),
scope: SkillScope::User,
file_system: Arc::clone(&LOCAL_FS),
});
// `$HOME/.agents/skills` (user-installed skills).
if let Some(home_dir) = home_dir {
roots.push(SkillRoot {
path: home_dir.join(AGENTS_DIR_NAME).join(SKILLS_DIR_NAME),
path: ExecutorPath::new(
Arc::clone(fs),
home_dir.join(AGENTS_DIR_NAME).join(SKILLS_DIR_NAME),
),
scope: SkillScope::User,
file_system: Arc::clone(&LOCAL_FS),
});
}
// Embedded system skills are cached under `$CODEX_HOME/skills/.system` and are a
// special case (not a config layer).
roots.push(SkillRoot {
path: system_cache_root_dir(&config_folder),
path: ExecutorPath::new(Arc::clone(fs), system_cache_root_dir(&config_folder)),
scope: SkillScope::System,
file_system: Arc::clone(&LOCAL_FS),
});
}
ConfigLayerSource::System { .. } => {
// The system config layer lives under `/etc/codex/` on Unix, so treat
// `/etc/codex/skills` as admin-scoped skills.
roots.push(SkillRoot {
path: config_folder.join(SKILLS_DIR_NAME),
path: ExecutorPath::new(Arc::clone(fs), config_folder.join(SKILLS_DIR_NAME)),
scope: SkillScope::Admin,
file_system: Arc::clone(&LOCAL_FS),
});
}
ConfigLayerSource::Mdm { .. }
@@ -311,27 +309,24 @@ fn skill_roots_from_layer_stack_inner(
}
async fn repo_agents_skill_roots(
fs: Option<Arc<dyn ExecutorFileSystem>>,
cwd: Option<&ExecutorPath>,
config_layer_stack: &ConfigLayerStack,
cwd: &AbsolutePathBuf,
) -> Vec<SkillRoot> {
let Some(fs) = fs else {
let Some(cwd) = cwd else {
return Vec::new();
};
let project_root_markers = project_root_markers_from_stack(config_layer_stack);
let project_root = find_project_root(fs.as_ref(), cwd, &project_root_markers).await;
let project_root = find_project_root(cwd, &project_root_markers).await;
let dirs = dirs_between_project_root_and_cwd(cwd, &project_root);
let mut roots = Vec::new();
for dir in dirs {
let agents_skills = dir.join(AGENTS_DIR_NAME).join(SKILLS_DIR_NAME);
match fs.get_metadata(&agents_skills, /*sandbox*/ None).await {
Ok(metadata) if metadata.is_directory => roots.push(SkillRoot {
match agents_skills.unsandboxed().is_dir().await {
Ok(true) => roots.push(SkillRoot {
path: agents_skills,
scope: SkillScope::Repo,
file_system: Arc::clone(&fs),
}),
Ok(_) => {}
Err(err) if err.kind() == io::ErrorKind::NotFound => {}
Ok(false) => {}
Err(err) => {
tracing::warn!(
"failed to stat repo skills root {}: {err:#}",
@@ -365,21 +360,17 @@ fn project_root_markers_from_stack(config_layer_stack: &ConfigLayerStack) -> Vec
}
}
async fn find_project_root(
fs: &dyn ExecutorFileSystem,
cwd: &AbsolutePathBuf,
project_root_markers: &[String],
) -> AbsolutePathBuf {
async fn find_project_root(cwd: &ExecutorPath, project_root_markers: &[String]) -> AbsolutePathBuf {
if project_root_markers.is_empty() {
return cwd.clone();
return cwd.path().clone();
}
for ancestor in cwd.ancestors() {
for marker in project_root_markers {
let marker_path = ancestor.join(marker);
match fs.get_metadata(&marker_path, /*sandbox*/ None).await {
Ok(_) => return ancestor,
Err(err) if err.kind() == io::ErrorKind::NotFound => {}
match marker_path.unsandboxed().exists().await {
Ok(true) => return ancestor.into_path(),
Ok(false) => {}
Err(err) => {
tracing::warn!(
"failed to stat project root marker {}: {err:#}",
@@ -390,20 +381,20 @@ async fn find_project_root(
}
}
cwd.clone()
cwd.path().clone()
}
fn dirs_between_project_root_and_cwd(
cwd: &AbsolutePathBuf,
cwd: &ExecutorPath,
project_root: &AbsolutePathBuf,
) -> Vec<AbsolutePathBuf> {
) -> Vec<ExecutorPath> {
let mut dirs = cwd
.ancestors()
.scan(false, |done, dir| {
if *done {
None
} else {
if &dir == project_root {
if dir.path() == project_root {
*done = true;
}
Some(dir)
@@ -416,25 +407,31 @@ fn dirs_between_project_root_and_cwd(
fn dedupe_skill_roots_by_path(roots: &mut Vec<SkillRoot>) {
let mut seen: HashSet<AbsolutePathBuf> = HashSet::new();
roots.retain(|root| seen.insert(root.path.clone()));
roots.retain(|root| seen.insert(root.path.path().clone()));
}
fn canonicalize_for_skill_identity(path: &AbsolutePathBuf) -> AbsolutePathBuf {
path.canonicalize().unwrap_or_else(|_| path.clone())
fn canonicalize_for_skill_identity(path: &ExecutorPath) -> ExecutorPath {
if !path.is_same_file_system(&LOCAL_FS) {
return path.clone();
}
path.with_path(
path.path()
.canonicalize()
.unwrap_or_else(|_| path.path().clone()),
)
}
async fn discover_skills_under_root(
fs: &dyn ExecutorFileSystem,
root: &AbsolutePathBuf,
root: &ExecutorPath,
scope: SkillScope,
outcome: &mut SkillLoadOutcome,
) {
let root = canonicalize_for_skill_identity(root);
match fs.get_metadata(&root, /*sandbox*/ None).await {
Ok(metadata) if metadata.is_directory => {}
Ok(_) => return,
Err(err) if err.kind() == io::ErrorKind::NotFound => return,
match root.unsandboxed().is_dir().await {
Ok(true) => {}
Ok(false) => return,
Err(err) => {
error!("failed to stat skills root {}: {err:#}", root.display());
return;
@@ -442,10 +439,10 @@ async fn discover_skills_under_root(
}
fn enqueue_dir(
queue: &mut VecDeque<(AbsolutePathBuf, usize)>,
queue: &mut VecDeque<(ExecutorPath, usize)>,
visited_dirs: &mut HashSet<AbsolutePathBuf>,
truncated_by_dir_limit: &mut bool,
path: AbsolutePathBuf,
path: ExecutorPath,
depth: usize,
) {
if depth > MAX_SCAN_DEPTH {
@@ -455,7 +452,7 @@ async fn discover_skills_under_root(
*truncated_by_dir_limit = true;
return;
}
if visited_dirs.insert(path.clone()) {
if visited_dirs.insert(path.path().clone()) {
queue.push_back((path, depth));
}
}
@@ -467,13 +464,13 @@ async fn discover_skills_under_root(
);
let mut visited_dirs: HashSet<AbsolutePathBuf> = HashSet::new();
visited_dirs.insert(root.clone());
visited_dirs.insert(root.path().clone());
let mut queue: VecDeque<(AbsolutePathBuf, usize)> = VecDeque::from([(root.clone(), 0)]);
let mut queue: VecDeque<(ExecutorPath, usize)> = VecDeque::from([(root.clone(), 0)]);
let mut truncated_by_dir_limit = false;
while let Some((dir, depth)) = queue.pop_front() {
let entries = match fs.read_directory(&dir, /*sandbox*/ None).await {
let entries = match dir.unsandboxed().read_directory().await {
Ok(entries) => entries,
Err(e) => {
error!("failed to read skills dir {}: {e:#}", dir.display());
@@ -488,7 +485,7 @@ async fn discover_skills_under_root(
}
let path = dir.join(&file_name);
let metadata = match fs.get_metadata(&path, /*sandbox*/ None).await {
let metadata = match path.unsandboxed().get_metadata().await {
Ok(metadata) => metadata,
Err(e) => {
error!("failed to stat skills path {}: {e:#}", path.display());
@@ -500,8 +497,8 @@ async fn discover_skills_under_root(
if !follow_symlinks {
continue;
}
match fs.read_directory(&path, /*sandbox*/ None).await {
Ok(_) => {
match path.unsandboxed().is_dir().await {
Ok(true) => {
let resolved_dir = canonicalize_for_skill_identity(&path);
enqueue_dir(
&mut queue,
@@ -511,11 +508,7 @@ async fn discover_skills_under_root(
depth + 1,
);
}
Err(err)
if matches!(
err.kind(),
io::ErrorKind::NotADirectory | io::ErrorKind::NotFound
) => {}
Ok(false) => {}
Err(err) => {
error!(
"failed to read skills symlink dir {}: {err:#}",
@@ -539,14 +532,14 @@ async fn discover_skills_under_root(
}
if metadata.is_file && file_name == SKILLS_FILENAME {
match parse_skill_file(fs, &path, scope).await {
match parse_skill_file(&path, scope).await {
Ok(skill) => {
outcome.skills.push(skill);
}
Err(err) => {
if scope != SkillScope::System {
outcome.errors.push(SkillError {
path: path.clone(),
path: path.path().clone(),
message: err.to_string(),
});
}
@@ -566,12 +559,12 @@ async fn discover_skills_under_root(
}
async fn parse_skill_file(
fs: &dyn ExecutorFileSystem,
path: &AbsolutePathBuf,
path: &ExecutorPath,
scope: SkillScope,
) -> Result<SkillMetadata, SkillParseError> {
let contents = fs
.read_file_text(path, /*sandbox*/ None)
let contents = path
.unsandboxed()
.read_file_text()
.await
.map_err(SkillParseError::Read)?;
@@ -585,8 +578,8 @@ async fn parse_skill_file(
.as_deref()
.map(sanitize_single_line)
.filter(|value| !value.is_empty())
.unwrap_or_else(|| default_skill_name(path));
let name = namespaced_skill_name(fs, path, &base_name).await;
.unwrap_or_else(|| default_skill_name(path.path()));
let name = namespaced_skill_name(path, &base_name).await;
let description = parsed
.description
.as_deref()
@@ -602,7 +595,7 @@ async fn parse_skill_file(
interface,
dependencies,
policy,
} = load_skill_metadata(fs, path).await;
} = load_skill_metadata(path).await;
validate_len(&name, MAX_NAME_LEN, "name")?;
validate_len(&description, MAX_DESCRIPTION_LEN, "description")?;
@@ -623,7 +616,7 @@ async fn parse_skill_file(
interface,
dependencies,
policy,
path_to_skills_md: resolved_path,
path_to_skills_md: resolved_path.into_path(),
scope,
})
}
@@ -640,21 +633,14 @@ fn default_skill_name(path: &AbsolutePathBuf) -> String {
.unwrap_or_else(|| "skill".to_string())
}
async fn namespaced_skill_name(
fs: &dyn ExecutorFileSystem,
path: &AbsolutePathBuf,
base_name: &str,
) -> String {
plugin_namespace_for_skill_path(fs, path)
async fn namespaced_skill_name(path: &ExecutorPath, base_name: &str) -> String {
plugin_namespace_for_skill_path(&path.as_ref())
.await
.map(|namespace| format!("{namespace}:{base_name}"))
.unwrap_or_else(|| base_name.to_string())
}
async fn load_skill_metadata(
fs: &dyn ExecutorFileSystem,
skill_path: &AbsolutePathBuf,
) -> LoadedSkillMetadata {
async fn load_skill_metadata(skill_path: &ExecutorPath) -> LoadedSkillMetadata {
// Fail open: optional metadata should not block loading SKILL.md.
let Some(skill_dir) = skill_path.parent() else {
return LoadedSkillMetadata::default();
@@ -662,12 +648,9 @@ async fn load_skill_metadata(
let metadata_path = skill_dir
.join(SKILLS_METADATA_DIR)
.join(SKILLS_METADATA_FILENAME);
match fs.get_metadata(&metadata_path, /*sandbox*/ None).await {
Ok(metadata) if metadata.is_file => {}
Ok(_) => return LoadedSkillMetadata::default(),
Err(error) if error.kind() == io::ErrorKind::NotFound => {
return LoadedSkillMetadata::default();
}
match metadata_path.unsandboxed().is_file().await {
Ok(true) => {}
Ok(false) => return LoadedSkillMetadata::default(),
Err(error) => {
tracing::warn!(
"ignoring {path}: failed to stat {label}: {error}",
@@ -678,7 +661,7 @@ async fn load_skill_metadata(
}
}
let contents = match fs.read_file_text(&metadata_path, /*sandbox*/ None).await {
let contents = match metadata_path.unsandboxed().read_file_text().await {
Ok(contents) => contents,
Err(error) => {
tracing::warn!(
@@ -691,7 +674,7 @@ async fn load_skill_metadata(
};
let parsed: SkillMetadataFile = {
let _guard = AbsolutePathBufGuard::new(skill_dir.as_path());
let _guard = AbsolutePathBufGuard::new(skill_dir.path().as_path());
match serde_yaml::from_str(&contents) {
Ok(parsed) => parsed,
Err(error) => {
@@ -711,7 +694,7 @@ async fn load_skill_metadata(
policy,
} = parsed;
LoadedSkillMetadata {
interface: resolve_interface(interface, &skill_dir),
interface: resolve_interface(interface, skill_dir.path()),
dependencies: resolve_dependencies(dependencies),
policy: resolve_policy(policy),
}
@@ -950,7 +933,8 @@ pub(crate) async fn skill_roots_from_layer_stack(
cwd: &AbsolutePathBuf,
home_dir: Option<&AbsolutePathBuf>,
) -> Vec<SkillRoot> {
skill_roots_with_home_dir(Some(fs), config_layer_stack, cwd, home_dir, Vec::new()).await
let cwd = ExecutorPath::new(fs, cwd.clone());
skill_roots_with_home_dir(Some(&cwd), config_layer_stack, home_dir, Vec::new()).await
}
#[cfg(test)]

View File

@@ -4,6 +4,7 @@ use codex_config::ConfigLayerEntry;
use codex_config::ConfigLayerStack;
use codex_config::ConfigRequirements;
use codex_config::ConfigRequirementsToml;
use codex_exec_server::ExecutorPath;
use codex_exec_server::LOCAL_FS;
use codex_protocol::protocol::Product;
use codex_protocol::protocol::SkillScope;
@@ -26,6 +27,13 @@ struct TestConfig {
config_layer_stack: ConfigLayerStack,
}
fn local_skill_root(path: AbsolutePathBuf, scope: SkillScope) -> SkillRoot {
SkillRoot {
path: ExecutorPath::new(Arc::clone(&LOCAL_FS), path),
scope,
}
}
async fn make_config(codex_home: &TempDir) -> TestConfig {
make_config_for_cwd(codex_home, codex_home.path().to_path_buf()).await
}
@@ -932,12 +940,9 @@ async fn loads_skills_via_symlinked_subdir_for_admin_scope() {
fs::create_dir_all(admin_root.path()).unwrap();
symlink_dir(shared.path(), &admin_root.path().join("shared"));
let outcome = load_skills_from_roots([SkillRoot {
path: admin_root.path().abs(),
scope: SkillScope::Admin,
file_system: Arc::clone(&LOCAL_FS),
}])
.await;
let outcome =
load_skills_from_roots([local_skill_root(admin_root.path().abs(), SkillScope::Admin)])
.await;
assert!(
outcome.errors.is_empty(),
@@ -1010,12 +1015,8 @@ async fn system_scope_ignores_symlinked_subdir() {
fs::create_dir_all(&system_root).unwrap();
symlink_dir(shared.path(), &system_root.join("shared"));
let outcome = load_skills_from_roots([SkillRoot {
path: system_root.abs(),
scope: SkillScope::System,
file_system: Arc::clone(&LOCAL_FS),
}])
.await;
let outcome =
load_skills_from_roots([local_skill_root(system_root.abs(), SkillScope::System)]).await;
assert!(
outcome.errors.is_empty(),
"unexpected errors: {:?}",
@@ -1042,12 +1043,8 @@ async fn respects_max_scan_depth_for_user_scope() {
);
let skills_root = codex_home.path().join("skills");
let outcome = load_skills_from_roots([SkillRoot {
path: skills_root.abs(),
scope: SkillScope::User,
file_system: Arc::clone(&LOCAL_FS),
}])
.await;
let outcome =
load_skills_from_roots([local_skill_root(skills_root.abs(), SkillScope::User)]).await;
assert!(
outcome.errors.is_empty(),
@@ -1144,11 +1141,10 @@ async fn namespaces_plugin_skills_using_plugin_name() {
)
.unwrap();
let outcome = load_skills_from_roots([SkillRoot {
path: plugin_root.join("skills").abs(),
scope: SkillScope::User,
file_system: Arc::clone(&LOCAL_FS),
}])
let outcome = load_skills_from_roots([local_skill_root(
plugin_root.join("skills").abs(),
SkillScope::User,
)])
.await;
assert!(
@@ -1458,16 +1454,8 @@ async fn deduplicates_by_path_preferring_first_root() {
let skill_path = write_skill_at(root.path(), "dupe", "dupe-skill", "from repo");
let outcome = load_skills_from_roots([
SkillRoot {
path: root.path().abs(),
scope: SkillScope::Repo,
file_system: Arc::clone(&LOCAL_FS),
},
SkillRoot {
path: root.path().abs(),
scope: SkillScope::User,
file_system: Arc::clone(&LOCAL_FS),
},
local_skill_root(root.path().abs(), SkillScope::Repo),
local_skill_root(root.path().abs(), SkillScope::User),
])
.await;

View File

@@ -5,6 +5,8 @@ use std::sync::RwLock;
use codex_config::ConfigLayerStack;
use codex_exec_server::ExecutorFileSystem;
use codex_exec_server::ExecutorPath;
use codex_exec_server::LOCAL_FS;
use codex_protocol::protocol::Product;
use codex_protocol::protocol::SkillScope;
use codex_utils_absolute_path::AbsolutePathBuf;
@@ -83,9 +85,9 @@ impl SkillsManager {
/// Load skills for an already-constructed [`Config`], avoiding any additional config-layer
/// loading.
///
/// This path uses a cache keyed by the effective skill-relevant config state rather than just
/// cwd so role-local and session-local skill overrides cannot bleed across sessions that happen
/// to share a directory.
/// This path uses a cache keyed by the effective skill-relevant config state for local
/// filesystem roots. Executor-backed roots skip that cache because an absolute path alone is
/// not a stable executor identity.
pub async fn skills_for_config(
&self,
input: &SkillsLoadInput,
@@ -93,17 +95,27 @@ impl SkillsManager {
) -> SkillLoadOutcome {
let roots = self.skill_roots_for_config(input, fs).await;
let skill_config_rules = skill_config_rules_from_stack(&input.config_layer_stack);
let cache_key = config_skills_cache_key(&roots, &skill_config_rules);
if let Some(outcome) = self.cached_outcome_for_config(&cache_key) {
return outcome;
}
let use_config_cache = roots
.iter()
.all(|root| root.path.is_same_file_system(&LOCAL_FS));
let cache_key = if use_config_cache {
let cache_key = config_skills_cache_key(&roots, &skill_config_rules);
if let Some(outcome) = self.cached_outcome_for_config(&cache_key) {
return outcome;
}
Some(cache_key)
} else {
None
};
let outcome = self.build_skill_outcome(roots, &skill_config_rules).await;
let mut cache = self
.cache_by_config
.write()
.unwrap_or_else(std::sync::PoisonError::into_inner);
cache.insert(cache_key, outcome.clone());
if let Some(cache_key) = cache_key {
let mut cache = self
.cache_by_config
.write()
.unwrap_or_else(std::sync::PoisonError::into_inner);
cache.insert(cache_key, outcome.clone());
}
outcome
}
@@ -142,7 +154,9 @@ impl SkillsManager {
extra_user_roots: &[AbsolutePathBuf],
fs: Option<Arc<dyn ExecutorFileSystem>>,
) -> SkillLoadOutcome {
let use_cwd_cache = fs.is_some();
// The cwd cache key is only an absolute path, so it is safe for the
// process-local filesystem but ambiguous across executor filesystems.
let use_cwd_cache = fs.as_ref().is_some_and(|fs| Arc::ptr_eq(fs, &*LOCAL_FS));
if use_cwd_cache
&& !force_reload
&& let Some(outcome) = self.cached_outcome_for_cwd(&input.cwd)
@@ -165,9 +179,8 @@ impl SkillsManager {
normalize_extra_user_roots(extra_user_roots)
.into_iter()
.map(|path| SkillRoot {
path,
path: ExecutorPath::new(Arc::clone(&fs), path),
scope: SkillScope::User,
file_system: Arc::clone(&fs),
}),
);
}
@@ -279,7 +292,7 @@ fn config_skills_cache_key(
SkillScope::System => 2,
SkillScope::Admin => 3,
};
(root.path.clone(), scope_rank)
(root.path.path().clone(), scope_rank)
})
.collect(),
skill_config_rules: skill_config_rules.clone(),

View File

@@ -3,7 +3,7 @@ use std::collections::HashSet;
use std::fmt;
use std::sync::Arc;
use codex_exec_server::ExecutorFileSystem;
use codex_exec_server::ExecutorPath;
use codex_protocol::protocol::Product;
use codex_protocol::protocol::SkillScope;
use codex_utils_absolute_path::AbsolutePathBuf;
@@ -89,7 +89,7 @@ pub struct SkillLoadOutcome {
pub skills: Vec<SkillMetadata>,
pub errors: Vec<SkillError>,
pub disabled_paths: HashSet<AbsolutePathBuf>,
pub(crate) file_systems_by_skill_path: SkillFileSystemsByPath,
pub(crate) sources_by_skill_path: SkillSourcesByPath,
pub(crate) implicit_skills_by_scripts_dir: Arc<HashMap<AbsolutePathBuf, SkillMetadata>>,
pub(crate) implicit_skills_by_doc_path: Arc<HashMap<AbsolutePathBuf, SkillMetadata>>,
}
@@ -117,29 +117,25 @@ impl SkillLoadOutcome {
.map(|skill| (skill, self.is_skill_enabled(skill)))
}
pub(crate) fn file_system_for_skill(
&self,
skill: &SkillMetadata,
) -> Option<Arc<dyn ExecutorFileSystem>> {
self.file_systems_by_skill_path
.get(&skill.path_to_skills_md)
pub(crate) fn source_for_skill(&self, skill: &SkillMetadata) -> Option<ExecutorPath> {
self.sources_by_skill_path.get(&skill.path_to_skills_md)
}
}
#[derive(Clone, Default)]
pub(crate) struct SkillFileSystemsByPath {
values: Arc<HashMap<AbsolutePathBuf, Arc<dyn ExecutorFileSystem>>>,
pub(crate) struct SkillSourcesByPath {
values: Arc<HashMap<AbsolutePathBuf, ExecutorPath>>,
}
impl SkillFileSystemsByPath {
pub(crate) fn new(values: HashMap<AbsolutePathBuf, Arc<dyn ExecutorFileSystem>>) -> Self {
impl SkillSourcesByPath {
pub(crate) fn new(values: HashMap<AbsolutePathBuf, ExecutorPath>) -> Self {
Self {
values: Arc::new(values),
}
}
fn get(&self, path: &AbsolutePathBuf) -> Option<Arc<dyn ExecutorFileSystem>> {
self.values.get(path).map(Arc::clone)
fn get(&self, path: &AbsolutePathBuf) -> Option<ExecutorPath> {
self.values.get(path).cloned()
}
fn retain_paths(&mut self, paths: &HashSet<AbsolutePathBuf>) {
@@ -147,15 +143,15 @@ impl SkillFileSystemsByPath {
self.values
.iter()
.filter(|(path, _)| paths.contains(*path))
.map(|(path, fs)| (path.clone(), Arc::clone(fs)))
.map(|(path, source)| (path.clone(), source.clone()))
.collect(),
);
}
}
impl fmt::Debug for SkillFileSystemsByPath {
impl fmt::Debug for SkillSourcesByPath {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("SkillFileSystemsByPath")
f.debug_struct("SkillSourcesByPath")
.field("len", &self.values.len())
.finish()
}
@@ -173,9 +169,7 @@ pub fn filter_skill_load_outcome_for_product(
.iter()
.map(|skill| skill.path_to_skills_md.clone())
.collect();
outcome
.file_systems_by_skill_path
.retain_paths(&retained_paths);
outcome.sources_by_skill_path.retain_paths(&retained_paths);
outcome.implicit_skills_by_scripts_dir = Arc::new(
outcome
.implicit_skills_by_scripts_dir

View File

@@ -18,6 +18,7 @@ use crate::config_loader::resolve_relative_paths_in_config_toml;
use anyhow::anyhow;
use codex_app_server_protocol::ConfigLayerSource;
use codex_config::config_toml::ConfigToml;
use codex_exec_server::LOCAL_FS;
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::path::Path;
@@ -168,6 +169,7 @@ mod reload {
}
let mut next_config = Config::load_config_with_layer_stack(
LOCAL_FS.as_ref(),
merged_config,
reload_overrides(config, preserve_current_provider),
config.codex_home.clone(),

View File

@@ -23,9 +23,9 @@ use crate::config_loader::project_root_markers_from_config;
use codex_app_server_protocol::ConfigLayerSource;
use codex_exec_server::Environment;
use codex_exec_server::ExecutorFileSystem;
use codex_exec_server::ExecutorPathRef;
use codex_features::Feature;
use codex_utils_absolute_path::AbsolutePathBuf;
use dunce::canonicalize as normalize_path;
use std::io;
use toml::Value as TomlValue;
use tracing::error;
@@ -126,7 +126,8 @@ impl<'a> AgentsMdManager<'a> {
&self,
fs: &dyn ExecutorFileSystem,
) -> Option<String> {
let agents_md_docs = self.read_agents_md(fs).await;
let cwd = self.executor_cwd(fs);
let agents_md_docs = self.read_agents_md(&cwd).await;
let mut output = String::new();
@@ -173,8 +174,11 @@ impl<'a> AgentsMdManager<'a> {
let mut paths = Self::load_global_instructions(Some(&self.config.codex_home))
.map(|loaded| vec![loaded.path])
.unwrap_or_default();
match self.agents_md_paths(fs).await {
Ok(agents_md_paths) => paths.extend(agents_md_paths),
let cwd = self.executor_cwd(fs);
match self.agents_md_paths(&cwd).await {
Ok(agents_md_paths) => {
paths.extend(agents_md_paths.into_iter().map(|path| path.path().clone()));
}
Err(err) => {
tracing::warn!(error = %err, "failed to discover AGENTS.md docs for instruction sources");
}
@@ -188,14 +192,14 @@ impl<'a> AgentsMdManager<'a> {
/// concatenation of all discovered docs. If no documentation file is found
/// the function returns `Ok(None)`. Unexpected I/O failures bubble up as
/// `Err` so callers can decide how to handle them.
async fn read_agents_md(&self, fs: &dyn ExecutorFileSystem) -> io::Result<Option<String>> {
async fn read_agents_md(&self, cwd: &ExecutorPathRef<'_>) -> io::Result<Option<String>> {
let max_total = self.config.project_doc_max_bytes;
if max_total == 0 {
return Ok(None);
}
let paths = self.agents_md_paths(fs).await?;
let paths = self.agents_md_paths(cwd).await?;
if paths.is_empty() {
return Ok(None);
}
@@ -208,14 +212,14 @@ impl<'a> AgentsMdManager<'a> {
break;
}
match fs.get_metadata(&p, /*sandbox*/ None).await {
match p.unsandboxed().get_metadata().await {
Ok(metadata) if !metadata.is_file => continue,
Ok(_) => {}
Err(err) if err.kind() == io::ErrorKind::NotFound => continue,
Err(err) => return Err(err),
}
let mut data = match fs.read_file(&p, /*sandbox*/ None).await {
let mut data = match p.unsandboxed().read_file().await {
Ok(data) => data,
Err(err) if err.kind() == io::ErrorKind::NotFound => continue,
Err(err) => return Err(err),
@@ -252,19 +256,14 @@ impl<'a> AgentsMdManager<'a> {
/// contents. The list is ordered from project root to the current working
/// directory (inclusive). Symlinks are allowed. When `project_doc_max_bytes`
/// is zero, returns an empty list.
async fn agents_md_paths(
async fn agents_md_paths<'fs>(
&self,
fs: &dyn ExecutorFileSystem,
) -> io::Result<Vec<AbsolutePathBuf>> {
cwd: &ExecutorPathRef<'fs>,
) -> io::Result<Vec<ExecutorPathRef<'fs>>> {
if self.config.project_doc_max_bytes == 0 {
return Ok(Vec::new());
}
let mut dir = self.config.cwd.clone();
if let Ok(canon) = normalize_path(&dir) {
dir = AbsolutePathBuf::try_from(canon)?;
}
let mut merged = TomlValue::Table(toml::map::Map::new());
for layer in self.config.config_layer_stack.get_layers(
ConfigLayerStackOrdering::LowestPrecedenceFirst,
@@ -285,11 +284,10 @@ impl<'a> AgentsMdManager<'a> {
};
let mut project_root = None;
if !project_root_markers.is_empty() {
for ancestor in dir.ancestors() {
for ancestor in cwd.ancestors() {
for marker in &project_root_markers {
let marker_path = ancestor.join(marker);
let marker_exists = match fs.get_metadata(&marker_path, /*sandbox*/ None).await
{
let marker_exists = match marker_path.unsandboxed().get_metadata().await {
Ok(_) => true,
Err(err) if err.kind() == io::ErrorKind::NotFound => false,
Err(err) => return Err(err),
@@ -305,12 +303,12 @@ impl<'a> AgentsMdManager<'a> {
}
}
let search_dirs: Vec<AbsolutePathBuf> = if let Some(root) = project_root {
let search_dirs: Vec<ExecutorPathRef<'fs>> = if let Some(root) = project_root {
let mut dirs = Vec::new();
let mut cursor = dir.clone();
let mut cursor = cwd.clone();
loop {
dirs.push(cursor.clone());
if cursor == root {
if cursor.path() == root.path() {
break;
}
let Some(parent) = cursor.parent() else {
@@ -321,15 +319,15 @@ impl<'a> AgentsMdManager<'a> {
dirs.reverse();
dirs
} else {
vec![dir]
vec![cwd.clone()]
};
let mut found: Vec<AbsolutePathBuf> = Vec::new();
let mut found: Vec<ExecutorPathRef<'fs>> = Vec::new();
let candidate_filenames = self.candidate_filenames();
for d in search_dirs {
for name in &candidate_filenames {
let candidate = d.join(name);
match fs.get_metadata(&candidate, /*sandbox*/ None).await {
match candidate.unsandboxed().get_metadata().await {
Ok(md) if md.is_file => {
found.push(candidate);
break;
@@ -344,6 +342,10 @@ impl<'a> AgentsMdManager<'a> {
Ok(found)
}
fn executor_cwd<'fs>(&self, fs: &'fs dyn ExecutorFileSystem) -> ExecutorPathRef<'fs> {
ExecutorPathRef::new(fs, self.config.cwd.clone())
}
fn candidate_filenames(&self) -> Vec<&str> {
let mut names: Vec<&str> =
Vec::with_capacity(2 + self.config.project_doc_fallback_filenames.len());

View File

@@ -16,12 +16,14 @@ use crate::codex::SteerInputError;
use crate::codex::spawn_review_thread;
use crate::config::Config;
use crate::config_loader::CloudRequirementsLoader;
use crate::config_loader::ConfigLoadFileSystems;
use crate::config_loader::LoaderOverrides;
use crate::config_loader::load_config_layers_state;
use crate::config_loader::load_config_layers_state_with_file_systems;
use crate::realtime_context::REALTIME_TURN_TOKEN_BUDGET;
use crate::realtime_context::truncate_realtime_text_to_token_budget;
use crate::realtime_conversation::REALTIME_USER_TEXT_PREFIX;
use crate::realtime_conversation::prefix_realtime_v2_text;
use codex_exec_server::LOCAL_FS;
use codex_features::Feature;
use codex_utils_absolute_path::AbsolutePathBuf;
@@ -500,17 +502,23 @@ pub async fn list_skills(sess: &Session, sub_id: String, cwds: Vec<PathBuf>, for
let skills_manager = &sess.services.skills_manager;
let plugins_manager = &sess.services.plugins_manager;
let fs = sess
let (fs, remote_project_fs) = sess
.services
.environment
.as_ref()
.map(|environment| environment.get_filesystem());
.map(|environment| (environment.get_filesystem(), environment.is_remote()))
.unwrap_or_else(|| (Arc::clone(&LOCAL_FS), false));
let config = sess.get_config().await;
let codex_home = sess.codex_home().await;
let mut skills = Vec::new();
let empty_cli_overrides: &[(String, toml::Value)] = &[];
for cwd in cwds {
let cwd_abs = match AbsolutePathBuf::relative_to_current_dir(cwd.as_path()) {
let cwd_abs = if remote_project_fs {
AbsolutePathBuf::from_absolute_path_checked(cwd.as_path())
} else {
AbsolutePathBuf::relative_to_current_dir(cwd.as_path())
};
let cwd_abs = match cwd_abs {
Ok(path) => path,
Err(err) => {
let error_path = cwd.clone();
@@ -525,7 +533,8 @@ pub async fn list_skills(sess: &Session, sub_id: String, cwds: Vec<PathBuf>, for
continue;
}
};
let config_layer_stack = match load_config_layers_state(
let config_layer_stack = match load_config_layers_state_with_file_systems(
ConfigLoadFileSystems::same(fs.as_ref()),
&codex_home,
Some(cwd_abs.clone()),
empty_cli_overrides,
@@ -561,7 +570,7 @@ pub async fn list_skills(sess: &Session, sub_id: String, cwds: Vec<PathBuf>, for
config.bundled_skills_enabled(),
);
let outcome = skills_manager
.skills_for_cwd(&skills_input, force_reload, fs.clone())
.skills_for_cwd(&skills_input, force_reload, Some(Arc::clone(&fs)))
.await;
let errors = super::errors_to_info(&outcome.errors);
let skills_metadata = super::skills_to_info(&outcome.skills, &outcome.disabled_paths);

View File

@@ -243,6 +243,11 @@ pub(crate) async fn run_turn(
} = build_skill_injections(
&mentioned_skills,
skills_outcome,
turn_context
.environment
.as_ref()
.map(|environment| environment.get_filesystem())
.unwrap_or_else(|| Arc::clone(&codex_exec_server::LOCAL_FS)),
Some(&session_telemetry),
&sess.services.analytics_events_client,
tracking.clone(),

View File

@@ -4,6 +4,8 @@ use crate::config_loader::ConfigLayerStackOrdering;
use codex_config::config_toml::AgentRoleToml;
use codex_config::config_toml::AgentsToml;
use codex_config::config_toml::ConfigToml;
use codex_exec_server::ExecutorFileSystem;
use codex_exec_server::ExecutorPathRef;
use codex_utils_absolute_path::AbsolutePathBuf;
use codex_utils_absolute_path::AbsolutePathBufGuard;
use serde::Deserialize;
@@ -14,7 +16,8 @@ use std::path::Path;
use std::path::PathBuf;
use toml::Value as TomlValue;
pub(crate) fn load_agent_roles(
pub(crate) async fn load_agent_roles(
fs: &dyn ExecutorFileSystem,
cfg: &ConfigToml,
config_layer_stack: &ConfigLayerStack,
startup_warnings: &mut Vec<String>,
@@ -24,7 +27,7 @@ pub(crate) fn load_agent_roles(
/*include_disabled*/ false,
);
if layers.is_empty() {
return load_agent_roles_without_layers(cfg);
return load_agent_roles_without_layers(fs, cfg).await;
}
let mut roles: BTreeMap<String, AgentRoleConfig> = BTreeMap::new();
@@ -40,13 +43,14 @@ pub(crate) fn load_agent_roles(
};
if let Some(agents_toml) = agents_toml {
for (declared_role_name, role_toml) in &agents_toml.roles {
let (role_name, role) = match read_declared_role(declared_role_name, role_toml) {
Ok(role) => role,
Err(err) => {
push_agent_role_warning(startup_warnings, err);
continue;
}
};
let (role_name, role) =
match read_declared_role(fs, declared_role_name, role_toml).await {
Ok(role) => role,
Err(err) => {
push_agent_role_warning(startup_warnings, err);
continue;
}
};
if let Some(config_file) = role.config_file.clone() {
declared_role_files.insert(config_file);
}
@@ -67,11 +71,11 @@ pub(crate) fn load_agent_roles(
}
if let Some(config_folder) = layer.config_folder() {
for (role_name, role) in discover_agent_roles_in_dir(
config_folder.as_path().join("agents").as_path(),
&declared_role_files,
startup_warnings,
)? {
let agents_dir = ExecutorPathRef::new(fs, config_folder.join("agents"));
for (role_name, role) in
discover_agent_roles_in_dir(&agents_dir, &declared_role_files, startup_warnings)
.await?
{
if layer_roles.contains_key(&role_name) {
push_agent_role_warning(
startup_warnings,
@@ -113,13 +117,14 @@ fn push_agent_role_warning(startup_warnings: &mut Vec<String>, err: std::io::Err
startup_warnings.push(message);
}
fn load_agent_roles_without_layers(
async fn load_agent_roles_without_layers(
fs: &dyn ExecutorFileSystem,
cfg: &ConfigToml,
) -> std::io::Result<BTreeMap<String, AgentRoleConfig>> {
let mut roles = BTreeMap::new();
if let Some(agents_toml) = cfg.agents.as_ref() {
for (declared_role_name, role_toml) in &agents_toml.roles {
let (role_name, role) = read_declared_role(declared_role_name, role_toml)?;
let (role_name, role) = read_declared_role(fs, declared_role_name, role_toml).await?;
validate_required_agent_role_description(&role_name, role.description.as_deref())?;
if roles.insert(role_name.clone(), role).is_some() {
@@ -134,14 +139,18 @@ fn load_agent_roles_without_layers(
Ok(roles)
}
fn read_declared_role(
async fn read_declared_role(
fs: &dyn ExecutorFileSystem,
declared_role_name: &str,
role_toml: &AgentRoleToml,
) -> std::io::Result<(String, AgentRoleConfig)> {
let mut role = agent_role_config_from_toml(declared_role_name, role_toml)?;
let mut role = agent_role_config_from_toml(fs, declared_role_name, role_toml).await?;
let mut role_name = declared_role_name.to_string();
if let Some(config_file) = role.config_file.as_deref() {
let parsed_file = read_resolved_agent_role_file(config_file, Some(declared_role_name))?;
let config_file = AbsolutePathBuf::from_absolute_path(config_file)?;
let config_file = ExecutorPathRef::new(fs, config_file);
let parsed_file =
read_resolved_agent_role_file(&config_file, Some(declared_role_name)).await?;
role_name = parsed_file.role_name;
role.description = parsed_file.description.or(role.description);
role.nickname_candidates = parsed_file.nickname_candidates.or(role.nickname_candidates);
@@ -171,12 +180,20 @@ fn agents_toml_from_layer(layer_toml: &TomlValue) -> std::io::Result<Option<Agen
.map_err(|err| std::io::Error::new(std::io::ErrorKind::InvalidData, err))
}
fn agent_role_config_from_toml(
async fn agent_role_config_from_toml(
fs: &dyn ExecutorFileSystem,
role_name: &str,
role: &AgentRoleToml,
) -> std::io::Result<AgentRoleConfig> {
let config_file = role.config_file.as_ref().map(AbsolutePathBuf::to_path_buf);
validate_agent_role_config_file(role_name, config_file.as_deref())?;
let config_file = role
.config_file
.as_ref()
.map(AbsolutePathBuf::from_absolute_path)
.transpose()?;
let config_file_for_validation = config_file
.as_ref()
.map(|config_file| ExecutorPathRef::new(fs, config_file.clone()));
validate_agent_role_config_file(role_name, config_file_for_validation.as_ref()).await?;
let description = normalize_agent_role_description(
&format!("agents.{role_name}.description"),
role.description.as_deref(),
@@ -188,7 +205,7 @@ fn agent_role_config_from_toml(
Ok(AgentRoleConfig {
description,
config_file,
config_file: config_file.map(AbsolutePathBuf::into_path_buf),
nickname_candidates,
})
}
@@ -293,15 +310,16 @@ pub(crate) fn parse_agent_role_file_contents(
})
}
fn read_resolved_agent_role_file(
path: &Path,
async fn read_resolved_agent_role_file(
path: &ExecutorPathRef<'_>,
role_name_hint: Option<&str>,
) -> std::io::Result<ResolvedAgentRoleFile> {
let contents = std::fs::read_to_string(path)?;
let contents = path.unsandboxed().read_file_text().await?;
let config_base_dir = path.parent().unwrap_or_else(|| path.clone());
parse_agent_role_file_contents(
&contents,
path,
path.parent().unwrap_or(path),
path.path().as_path(),
config_base_dir.path().as_path(),
role_name_hint,
)
}
@@ -359,24 +377,28 @@ fn validate_agent_role_file_developer_instructions(
}
}
fn validate_agent_role_config_file(
async fn validate_agent_role_config_file(
role_name: &str,
config_file: Option<&Path>,
config_file: Option<&ExecutorPathRef<'_>>,
) -> std::io::Result<()> {
let Some(config_file) = config_file else {
return Ok(());
};
let metadata = std::fs::metadata(config_file).map_err(|e| {
std::io::Error::new(
std::io::ErrorKind::InvalidInput,
format!(
"agents.{role_name}.config_file must point to an existing file at {}: {e}",
config_file.display()
),
)
})?;
if metadata.is_file() {
let metadata = config_file
.unsandboxed()
.get_metadata()
.await
.map_err(|e| {
std::io::Error::new(
std::io::ErrorKind::InvalidInput,
format!(
"agents.{role_name}.config_file must point to an existing file at {}: {e}",
config_file.display()
),
)
})?;
if metadata.is_file {
Ok(())
} else {
Err(std::io::Error::new(
@@ -441,19 +463,19 @@ fn normalize_agent_role_nickname_candidates(
Ok(Some(normalized_candidates))
}
fn discover_agent_roles_in_dir(
agents_dir: &Path,
async fn discover_agent_roles_in_dir(
agents_dir: &ExecutorPathRef<'_>,
declared_role_files: &BTreeSet<PathBuf>,
startup_warnings: &mut Vec<String>,
) -> std::io::Result<BTreeMap<String, AgentRoleConfig>> {
let mut roles = BTreeMap::new();
for agent_file in collect_agent_role_files(agents_dir)? {
if declared_role_files.contains(&agent_file) {
for agent_file in collect_agent_role_files(agents_dir).await? {
if declared_role_files.contains(agent_file.path().as_path()) {
continue;
}
let parsed_file =
match read_resolved_agent_role_file(&agent_file, /*role_name_hint*/ None) {
match read_resolved_agent_role_file(&agent_file, /*role_name_hint*/ None).await {
Ok(parsed_file) => parsed_file,
Err(err) => {
push_agent_role_warning(startup_warnings, err);
@@ -478,7 +500,7 @@ fn discover_agent_roles_in_dir(
role_name,
AgentRoleConfig {
description: parsed_file.description,
config_file: Some(agent_file),
config_file: Some(agent_file.to_path_buf()),
nickname_candidates: parsed_file.nickname_candidates,
},
);
@@ -487,36 +509,36 @@ fn discover_agent_roles_in_dir(
Ok(roles)
}
fn collect_agent_role_files(dir: &Path) -> std::io::Result<Vec<PathBuf>> {
async fn collect_agent_role_files<'fs>(
dir: &ExecutorPathRef<'fs>,
) -> std::io::Result<Vec<ExecutorPathRef<'fs>>> {
let mut files = Vec::new();
collect_agent_role_files_recursive(dir, &mut files)?;
files.sort();
Ok(files)
}
let mut dirs = vec![dir.clone()];
while let Some(dir) = dirs.pop() {
let entries = match dir.unsandboxed().read_directory().await {
Ok(entries) => entries,
Err(err) if err.kind() == ErrorKind::NotFound => continue,
Err(err) => return Err(err),
};
fn collect_agent_role_files_recursive(dir: &Path, files: &mut Vec<PathBuf>) -> std::io::Result<()> {
let read_dir = match std::fs::read_dir(dir) {
Ok(read_dir) => read_dir,
Err(err) if err.kind() == ErrorKind::NotFound => return Ok(()),
Err(err) => return Err(err),
};
for entry in read_dir {
let entry = entry?;
let path = entry.path();
let file_type = entry.file_type()?;
if file_type.is_dir() {
collect_agent_role_files_recursive(&path, files)?;
continue;
}
if file_type.is_file()
&& path
.extension()
.is_some_and(|extension| extension == "toml")
{
files.push(path);
for entry in entries {
let path = dir.join(entry.file_name);
if entry.is_directory {
dirs.push(path);
continue;
}
if entry.is_file
&& path
.path()
.as_path()
.extension()
.is_some_and(|extension| extension == "toml")
{
files.push(path);
}
}
}
Ok(())
files.sort_by(|a, b| a.path().cmp(b.path()));
Ok(files)
}

View File

@@ -44,6 +44,7 @@ use codex_config::types::SkillsConfig;
use codex_config::types::ToolSuggestDiscoverableType;
use codex_config::types::Tui;
use codex_config::types::TuiNotificationSettings;
use codex_exec_server::LOCAL_FS;
use codex_features::Feature;
use codex_features::FeaturesToml;
use codex_model_provider_info::LMSTUDIO_OSS_PROVIDER_ID;
@@ -1174,7 +1175,7 @@ network_access = false # This should be ignored.
sandbox_mode_override,
/*profile_sandbox_mode*/ None,
WindowsSandboxLevel::Disabled,
&PathBuf::from("/tmp/test"),
/*active_project*/ None,
/*sandbox_policy_constraint*/ None,
)
.await;
@@ -1195,7 +1196,7 @@ network_access = true # This should be ignored.
sandbox_mode_override,
/*profile_sandbox_mode*/ None,
WindowsSandboxLevel::Disabled,
&PathBuf::from("/tmp/test"),
/*active_project*/ None,
/*sandbox_policy_constraint*/ None,
)
.await;
@@ -1212,6 +1213,9 @@ writable_roots = [
]
exclude_tmpdir_env_var = true
exclude_slash_tmp = true
[projects."/tmp/test"]
trust_level = "trusted"
"#,
serde_json::json!(writable_root)
);
@@ -1224,7 +1228,7 @@ exclude_slash_tmp = true
sandbox_mode_override,
/*profile_sandbox_mode*/ None,
WindowsSandboxLevel::Disabled,
&PathBuf::from("/tmp/test"),
/*active_project*/ None,
/*sandbox_policy_constraint*/ None,
)
.await;
@@ -1253,9 +1257,6 @@ writable_roots = [
]
exclude_tmpdir_env_var = true
exclude_slash_tmp = true
[projects."/tmp/test"]
trust_level = "trusted"
"#,
serde_json::json!(writable_root)
);
@@ -1268,7 +1269,7 @@ trust_level = "trusted"
sandbox_mode_override,
/*profile_sandbox_mode*/ None,
WindowsSandboxLevel::Disabled,
&PathBuf::from("/tmp/test"),
/*active_project*/ None,
/*sandbox_policy_constraint*/ None,
)
.await;
@@ -2065,6 +2066,7 @@ async fn managed_config_overrides_oauth_store_mode() -> anyhow::Result<()> {
let cwd = codex_home.path().abs();
let config_layer_stack = load_config_layers_state(
LOCAL_FS.as_ref(),
codex_home.path(),
Some(cwd),
&Vec::new(),
@@ -2198,6 +2200,7 @@ async fn managed_config_wins_over_cli_overrides() -> anyhow::Result<()> {
let cwd = codex_home.path().abs();
let config_layer_stack = load_config_layers_state(
LOCAL_FS.as_ref(),
codex_home.path(),
Some(cwd),
&[("model".to_string(), TomlValue::String("cli".to_string()))],
@@ -3466,6 +3469,7 @@ async fn load_config_uses_requirements_guardian_policy_config() -> std::io::Resu
.map_err(std::io::Error::other)?;
let config = Config::load_config_with_layer_stack(
LOCAL_FS.as_ref(),
ConfigToml::default(),
ConfigOverrides {
cwd: Some(codex_home.path().to_path_buf()),
@@ -3498,6 +3502,7 @@ async fn load_config_ignores_empty_requirements_guardian_policy_config() -> std:
.map_err(std::io::Error::other)?;
let config = Config::load_config_with_layer_stack(
LOCAL_FS.as_ref(),
ConfigToml::default(),
ConfigOverrides {
cwd: Some(codex_home.path().to_path_buf()),
@@ -5310,6 +5315,7 @@ async fn test_requirements_web_search_mode_allowlist_does_not_warn_when_unset()
.expect("config layer stack");
let config = Config::load_config_with_layer_stack(
LOCAL_FS.as_ref(),
fixture.cfg.clone(),
ConfigOverrides {
cwd: Some(fixture.cwd_path()),
@@ -5517,13 +5523,16 @@ trust_level = "untrusted"
let cfg = toml::from_str::<ConfigToml>(config_with_untrusted)
.expect("TOML deserialization should succeed");
let active_project = ProjectConfig {
trust_level: Some(TrustLevel::Untrusted),
};
let resolution = cfg
.derive_sandbox_policy(
/*sandbox_mode_override*/ None,
/*profile_sandbox_mode*/ None,
WindowsSandboxLevel::Disabled,
&PathBuf::from("/tmp/test"),
Some(&active_project),
/*sandbox_policy_constraint*/ None,
)
.await;
@@ -5559,6 +5568,9 @@ async fn derive_sandbox_policy_falls_back_to_constraint_value_for_implicit_defau
)])),
..Default::default()
};
let active_project = ProjectConfig {
trust_level: Some(TrustLevel::Trusted),
};
let constrained = Constrained::new(SandboxPolicy::DangerFullAccess, |candidate| {
if matches!(candidate, SandboxPolicy::DangerFullAccess) {
Ok(())
@@ -5577,7 +5589,7 @@ async fn derive_sandbox_policy_falls_back_to_constraint_value_for_implicit_defau
/*sandbox_mode_override*/ None,
/*profile_sandbox_mode*/ None,
WindowsSandboxLevel::Disabled,
&project_path,
Some(&active_project),
Some(&constrained),
)
.await;
@@ -5601,6 +5613,9 @@ async fn derive_sandbox_policy_preserves_windows_downgrade_for_unsupported_fallb
)])),
..Default::default()
};
let active_project = ProjectConfig {
trust_level: Some(TrustLevel::Trusted),
};
let constrained = Constrained::new(SandboxPolicy::new_workspace_write_policy(), |candidate| {
if matches!(candidate, SandboxPolicy::WorkspaceWrite { .. }) {
Ok(())
@@ -5619,7 +5634,7 @@ async fn derive_sandbox_policy_preserves_windows_downgrade_for_unsupported_fallb
/*sandbox_mode_override*/ None,
/*profile_sandbox_mode*/ None,
WindowsSandboxLevel::Disabled,
&project_path,
Some(&active_project),
Some(&constrained),
)
.await;

View File

@@ -47,6 +47,9 @@ use codex_config::types::ToolSuggestDiscoverable;
use codex_config::types::TuiNotificationSettings;
use codex_config::types::UriBasedFileOpener;
use codex_config::types::WindowsSandboxModeToml;
use codex_exec_server::ExecutorFileSystem;
use codex_exec_server::ExecutorPathRef;
use codex_exec_server::LOCAL_FS;
use codex_features::Feature;
use codex_features::FeatureConfigSource;
use codex_features::FeatureOverrides;
@@ -54,6 +57,7 @@ use codex_features::FeatureToml;
use codex_features::Features;
use codex_features::FeaturesToml;
use codex_features::MultiAgentV2ConfigToml;
use codex_git_utils::resolve_root_git_project_for_trust_at;
use codex_login::AuthManagerConfig;
use codex_mcp::McpConfig;
use codex_model_provider_info::LEGACY_OLLAMA_CHAT_PROVIDER_ID;
@@ -689,6 +693,7 @@ impl ConfigBuilder {
};
harness_overrides.cwd = Some(cwd.to_path_buf());
let config_layer_stack = load_config_layers_state(
LOCAL_FS.as_ref(),
&codex_home,
Some(cwd),
&cli_overrides,
@@ -718,6 +723,7 @@ impl ConfigBuilder {
}
};
Config::load_config_with_layer_stack(
LOCAL_FS.as_ref(),
config_toml,
harness_overrides,
codex_home,
@@ -812,6 +818,7 @@ impl Config {
let codex_home = AbsolutePathBuf::from_absolute_path_checked(codex_home)?;
let config_toml = deserialize_config_toml_with_base(merged, &codex_home)?;
Self::load_config_with_layer_stack(
LOCAL_FS.as_ref(),
config_toml,
ConfigOverrides::default(),
codex_home,
@@ -849,6 +856,7 @@ pub async fn load_config_as_toml_with_cli_overrides(
cli_overrides: Vec<(String, TomlValue)>,
) -> std::io::Result<ConfigToml> {
let config_layer_stack = load_config_layers_state(
LOCAL_FS.as_ref(),
codex_home,
cwd.cloned(),
&cli_overrides,
@@ -1019,6 +1027,7 @@ pub async fn load_global_mcp_servers(
// MCP servers defined in in-repo .codex/ folders.
let cwd: Option<AbsolutePathBuf> = None;
let config_layer_stack = load_config_layers_state(
LOCAL_FS.as_ref(),
codex_home,
cwd,
&cli_overrides,
@@ -1420,10 +1429,18 @@ impl Config {
) -> std::io::Result<Self> {
// Note this ignores requirements.toml enforcement for tests.
let config_layer_stack = ConfigLayerStack::default();
Self::load_config_with_layer_stack(cfg, overrides, codex_home, config_layer_stack).await
Self::load_config_with_layer_stack(
LOCAL_FS.as_ref(),
cfg,
overrides,
codex_home,
config_layer_stack,
)
.await
}
pub(crate) async fn load_config_with_layer_stack(
fs: &dyn ExecutorFileSystem,
cfg: ConfigToml,
overrides: ConfigOverrides,
codex_home: AbsolutePathBuf,
@@ -1545,9 +1562,13 @@ impl Config {
.into_iter()
.map(|path| AbsolutePathBuf::resolve_path_against_base(path, resolved_cwd.as_path()))
.collect();
let resolved_cwd_path = ExecutorPathRef::new(fs, resolved_cwd.clone());
let repo_root = resolve_root_git_project_for_trust_at(&resolved_cwd_path).await;
let active_project = cfg
.get_active_project(resolved_cwd.as_path())
.await
.get_active_project(
resolved_cwd.as_path(),
repo_root.as_ref().map(AbsolutePathBuf::as_path),
)
.unwrap_or(ProjectConfig { trust_level: None });
let permission_config_syntax = resolve_permission_config_syntax(
&config_layer_stack,
@@ -1643,7 +1664,7 @@ impl Config {
sandbox_mode,
config_profile.sandbox_mode,
windows_sandbox_level,
resolved_cwd.as_path(),
Some(&active_project),
Some(&constrained_sandbox_policy),
)
.await;
@@ -1712,7 +1733,8 @@ impl Config {
let multi_agent_v2 = resolve_multi_agent_v2_config(&cfg, &config_profile);
let agent_roles =
agent_roles::load_agent_roles(&cfg, &config_layer_stack, &mut startup_warnings)?;
agent_roles::load_agent_roles(fs, &cfg, &config_layer_stack, &mut startup_warnings)
.await?;
let openai_base_url = cfg
.openai_base_url
@@ -1861,8 +1883,12 @@ impl Config {
.model_instructions_file
.as_ref()
.or(cfg.model_instructions_file.as_ref());
let file_base_instructions =
Self::try_read_non_empty_file(model_instructions_path, "model instructions file")?;
let file_base_instructions = Self::try_read_non_empty_file(
fs,
model_instructions_path,
"model instructions file",
)
.await?;
let base_instructions = base_instructions.or(file_base_instructions);
let developer_instructions = developer_instructions.or(cfg.developer_instructions);
let include_permissions_instructions = config_profile
@@ -1893,9 +1919,11 @@ impl Config {
.as_ref()
.or(cfg.experimental_compact_prompt_file.as_ref());
let file_compact_prompt = Self::try_read_non_empty_file(
fs,
experimental_compact_prompt_path,
"experimental compact prompt file",
)?;
)
.await?;
let compact_prompt = compact_prompt.or(file_compact_prompt);
let js_repl_node_path = js_repl_node_path_override
.or(config_profile.js_repl_node_path.map(Into::into))
@@ -2218,15 +2246,17 @@ impl Config {
/// If `path` is `Some`, attempts to read the file at the given path and
/// returns its contents as a trimmed `String`. If the file is empty, or
/// is `Some` but cannot be read, returns an `Err`.
fn try_read_non_empty_file(
async fn try_read_non_empty_file(
fs: &dyn ExecutorFileSystem,
path: Option<&AbsolutePathBuf>,
context: &str,
) -> std::io::Result<Option<String>> {
let Some(path) = path else {
return Ok(None);
};
let path = ExecutorPathRef::new(fs, path.clone());
let contents = std::fs::read_to_string(path).map_err(|e| {
let contents = path.unsandboxed().read_file_text().await.map_err(|e| {
std::io::Error::new(
e.kind(),
format!("failed to read {context} {}: {e}", path.display()),

View File

@@ -29,6 +29,7 @@ use codex_app_server_protocol::OverriddenMetadata;
use codex_app_server_protocol::WriteStatus;
use codex_config::CONFIG_TOML_FILE;
use codex_config::config_toml::ConfigToml;
use codex_exec_server::LOCAL_FS;
use codex_utils_absolute_path::AbsolutePathBuf;
use serde_json::Value as JsonValue;
use std::borrow::Cow;
@@ -424,6 +425,7 @@ impl ConfigService {
async fn load_thread_agnostic_config(&self) -> std::io::Result<ConfigLayerStack> {
let cwd: Option<AbsolutePathBuf> = None;
load_config_layers_state(
LOCAL_FS.as_ref(),
&self.codex_home,
cwd,
&self.cli_overrides,

View File

@@ -10,7 +10,9 @@ This module is the canonical place to **load and describe Codex configuration la
Exported from `codex_core::config_loader`:
- `load_config_layers_state(codex_home, cwd_opt, cli_overrides, overrides, cloud_requirements) -> ConfigLayerStack`
- `load_config_layers_state(fs, codex_home, cwd_opt, cli_overrides, overrides, cloud_requirements) -> ConfigLayerStack`
- `load_config_layers_state_with_file_systems(file_systems, codex_home, cwd_opt, cli_overrides, overrides, cloud_requirements) -> ConfigLayerStack`
- Use this when local config files and project config files live on different executor filesystems.
- `ConfigLayerStack`
- `effective_config() -> toml::Value`
- `origins() -> HashMap<String, ConfigLayerMetadata>`
@@ -38,18 +40,22 @@ computing the effective config and origins metadata. This is what
Most callers want the effective config plus metadata:
```rust
use codex_core::config_loader::{load_config_layers_state, LoaderOverrides};
use codex_core::config_loader::{
CloudRequirementsLoader, LoaderOverrides, load_config_layers_state,
};
use codex_exec_server::LOCAL_FS;
use codex_utils_absolute_path::AbsolutePathBuf;
use toml::Value as TomlValue;
let cli_overrides: Vec<(String, TomlValue)> = Vec::new();
let cwd = AbsolutePathBuf::current_dir()?;
let layers = load_config_layers_state(
LOCAL_FS.as_ref(),
&codex_home,
Some(cwd),
&cli_overrides,
LoaderOverrides::default(),
None,
CloudRequirementsLoader::default(),
).await?;
let effective = layers.effective_config();

View File

@@ -5,11 +5,12 @@ use super::macos::ManagedAdminConfigLayer;
use super::macos::load_managed_admin_config_layer;
use codex_config::config_error_from_toml;
use codex_config::io_error_from_config_error;
use codex_exec_server::ExecutorFileSystem;
use codex_exec_server::ExecutorPathRef;
use codex_utils_absolute_path::AbsolutePathBuf;
use std::io;
use std::path::Path;
use std::path::PathBuf;
use tokio::fs;
use toml::Value as TomlValue;
#[cfg(unix)]
@@ -36,6 +37,7 @@ pub(super) struct LoadedConfigLayers {
}
pub(super) async fn load_config_layers_internal(
fs: &dyn ExecutorFileSystem,
codex_home: &Path,
overrides: LoaderOverrides,
) -> io::Result<LoadedConfigLayers> {
@@ -52,16 +54,19 @@ pub(super) async fn load_config_layers_internal(
..
} = overrides;
let managed_config_path = AbsolutePathBuf::from_absolute_path(
managed_config_path.unwrap_or_else(|| managed_config_default_path(codex_home)),
)?;
let managed_config_path = ExecutorPathRef::new(
fs,
AbsolutePathBuf::from_absolute_path(
managed_config_path.unwrap_or_else(|| managed_config_default_path(codex_home)),
)?,
);
let managed_config =
read_config_from_path(&managed_config_path, /*log_missing_as_info*/ false)
.await?
.map(|managed_config| MangedConfigFromFile {
managed_config,
file: managed_config_path.clone(),
file: managed_config_path.path().clone(),
});
#[cfg(target_os = "macos")]
@@ -89,15 +94,16 @@ fn map_managed_admin_layer(layer: ManagedAdminConfigLayer) -> ManagedConfigFromM
}
pub(super) async fn read_config_from_path(
path: impl AsRef<Path>,
path: &ExecutorPathRef<'_>,
log_missing_as_info: bool,
) -> io::Result<Option<TomlValue>> {
match fs::read_to_string(path.as_ref()).await {
match path.unsandboxed().read_file_text().await {
Ok(contents) => match toml::from_str::<TomlValue>(&contents) {
Ok(value) => Ok(Some(value)),
Err(err) => {
tracing::error!("Failed to parse {}: {err}", path.as_ref().display());
let config_error = config_error_from_toml(path.as_ref(), &contents, err.clone());
tracing::error!("Failed to parse {}: {err}", path.display());
let config_error =
config_error_from_toml(path.path().as_path(), &contents, err.clone());
Err(io_error_from_config_error(
io::ErrorKind::InvalidData,
config_error,
@@ -107,14 +113,14 @@ pub(super) async fn read_config_from_path(
},
Err(err) if err.kind() == io::ErrorKind::NotFound => {
if log_missing_as_info {
tracing::info!("{} not found, using defaults", path.as_ref().display());
tracing::info!("{} not found, using defaults", path.display());
} else {
tracing::debug!("{} not found", path.as_ref().display());
tracing::debug!("{} not found", path.display());
}
Ok(None)
}
Err(err) => {
tracing::error!("Failed to read {}: {err}", path.as_ref().display());
tracing::error!("Failed to read {}: {err}", path.display());
Err(err)
}
}

View File

@@ -11,7 +11,9 @@ use codex_config::CONFIG_TOML_FILE;
use codex_config::ConfigRequirementsWithSources;
use codex_config::config_toml::ConfigToml;
use codex_config::config_toml::ProjectConfig;
use codex_git_utils::resolve_root_git_project_for_trust;
use codex_exec_server::ExecutorFileSystem;
use codex_exec_server::ExecutorPathRef;
use codex_git_utils::resolve_root_git_project_for_trust_at;
use codex_protocol::config_types::ApprovalsReviewer;
use codex_protocol::config_types::SandboxMode;
use codex_protocol::config_types::TrustLevel;
@@ -86,6 +88,26 @@ pub(crate) async fn first_layer_config_error_from_entries(
.await
}
/// Filesystems used while loading config layers.
///
/// Local config layers (`/etc`, managed config, and `$CODEX_HOME`) are read
/// from `local`. Project config layers are read from `project` because `cwd`
/// can refer to a different executor filesystem.
#[derive(Clone, Copy)]
pub struct ConfigLoadFileSystems<'a> {
pub local: &'a dyn ExecutorFileSystem,
pub project: &'a dyn ExecutorFileSystem,
}
impl<'a> ConfigLoadFileSystems<'a> {
pub fn same(file_system: &'a dyn ExecutorFileSystem) -> Self {
Self {
local: file_system,
project: file_system,
}
}
}
/// To build up the set of admin-enforced constraints, we build up from multiple
/// configuration layers in the following order, but a constraint defined in an
/// earlier layer cannot be overridden by a later layer:
@@ -118,6 +140,26 @@ pub(crate) async fn first_layer_config_error_from_entries(
/// thread-agnostic config loading (e.g., for the app server's `/config`
/// endpoint) should `cwd` be `None`.
pub async fn load_config_layers_state(
fs: &dyn ExecutorFileSystem,
codex_home: &Path,
cwd: Option<AbsolutePathBuf>,
cli_overrides: &[(String, TomlValue)],
overrides: LoaderOverrides,
cloud_requirements: CloudRequirementsLoader,
) -> io::Result<ConfigLayerStack> {
load_config_layers_state_with_file_systems(
ConfigLoadFileSystems::same(fs),
codex_home,
cwd,
cli_overrides,
overrides,
cloud_requirements,
)
.await
}
pub async fn load_config_layers_state_with_file_systems(
file_systems: ConfigLoadFileSystems<'_>,
codex_home: &Path,
cwd: Option<AbsolutePathBuf>,
cli_overrides: &[(String, TomlValue)],
@@ -141,12 +183,14 @@ pub async fn load_config_layers_state(
.await?;
// Honor the system requirements.toml location.
let requirements_toml_file = system_requirements_toml_file()?;
load_requirements_toml(&mut config_requirements_toml, requirements_toml_file).await?;
let requirements_toml_file =
ExecutorPathRef::new(file_systems.local, system_requirements_toml_file()?);
load_requirements_toml(&mut config_requirements_toml, &requirements_toml_file).await?;
// Make a best-effort to support the legacy `managed_config.toml` as a
// requirements specification.
let loaded_config_layers = layer_io::load_config_layers_internal(codex_home, overrides).await?;
let loaded_config_layers =
layer_io::load_config_layers_internal(file_systems.local, codex_home, overrides).await?;
load_requirements_from_legacy_scheme(
&mut config_requirements_toml,
loaded_config_layers.clone(),
@@ -171,12 +215,13 @@ pub async fn load_config_layers_state(
// Include an entry for the "system" config folder, loading its config.toml,
// if it exists.
let system_config_toml_file = system_config_toml_file()?;
let system_config_toml_file =
ExecutorPathRef::new(file_systems.local, system_config_toml_file()?);
let system_layer =
load_config_toml_for_required_layer(&system_config_toml_file, |config_toml| {
ConfigLayerEntry::new(
ConfigLayerSource::System {
file: system_config_toml_file.clone(),
file: system_config_toml_file.path().clone(),
},
config_toml,
)
@@ -187,11 +232,14 @@ pub async fn load_config_layers_state(
// Add a layer for $CODEX_HOME/config.toml if it exists. Note if the file
// exists, but is malformed, then this error should be propagated to the
// user.
let user_file = AbsolutePathBuf::resolve_path_against_base(CONFIG_TOML_FILE, codex_home);
let user_file = ExecutorPathRef::new(
file_systems.local,
AbsolutePathBuf::resolve_path_against_base(CONFIG_TOML_FILE, codex_home),
);
let user_layer = load_config_toml_for_required_layer(&user_file, |config_toml| {
ConfigLayerEntry::new(
ConfigLayerSource::User {
file: user_file.clone(),
file: user_file.path().clone(),
},
config_toml,
)
@@ -221,12 +269,13 @@ pub async fn load_config_layers_state(
return Err(err);
}
};
let cwd = ExecutorPathRef::new(file_systems.project, cwd);
let project_trust_context = match project_trust_context(
&merged_so_far,
&cwd,
&project_root_markers,
codex_home,
&user_file,
user_file.path(),
)
.await
{
@@ -320,17 +369,17 @@ pub async fn load_config_layers_state(
/// - If there is an error reading the file or parsing the TOML, returns an
/// error.
async fn load_config_toml_for_required_layer(
config_toml: impl AsRef<Path>,
toml_file: &ExecutorPathRef<'_>,
create_entry: impl FnOnce(TomlValue) -> ConfigLayerEntry,
) -> io::Result<ConfigLayerEntry> {
let toml_file = config_toml.as_ref();
let toml_value = match tokio::fs::read_to_string(toml_file).await {
let toml_value = match toml_file.unsandboxed().read_file_text().await {
Ok(contents) => {
let config: TomlValue = toml::from_str(&contents).map_err(|err| {
let config_error = config_error_from_toml(toml_file, &contents, err.clone());
let config_error =
config_error_from_toml(toml_file.path().as_path(), &contents, err.clone());
io_error_from_config_error(io::ErrorKind::InvalidData, config_error, Some(err))
})?;
let config_parent = toml_file.parent().ok_or_else(|| {
let config_parent = toml_file.path().as_path().parent().ok_or_else(|| {
io::Error::new(
io::ErrorKind::InvalidData,
format!(
@@ -361,11 +410,9 @@ async fn load_config_toml_for_required_layer(
/// any unset fields.
async fn load_requirements_toml(
config_requirements_toml: &mut ConfigRequirementsWithSources,
requirements_toml_file: impl AsRef<Path>,
requirements_toml_file: &ExecutorPathRef<'_>,
) -> io::Result<()> {
let requirements_toml_file =
AbsolutePathBuf::from_absolute_path(requirements_toml_file.as_ref())?;
match tokio::fs::read_to_string(&requirements_toml_file).await {
match requirements_toml_file.unsandboxed().read_file_text().await {
Ok(contents) => {
let requirements_config: ConfigRequirementsToml =
toml::from_str(&contents).map_err(|e| {
@@ -373,13 +420,13 @@ async fn load_requirements_toml(
io::ErrorKind::InvalidData,
format!(
"Error parsing requirements file {}: {e}",
requirements_toml_file.as_ref().display(),
requirements_toml_file.display(),
),
)
})?;
config_requirements_toml.merge_unset_fields(
RequirementSource::SystemRequirementsToml {
file: requirements_toml_file.clone(),
file: requirements_toml_file.path().clone(),
},
requirements_config,
);
@@ -390,7 +437,7 @@ async fn load_requirements_toml(
e.kind(),
format!(
"Failed to read requirements file {}: {e}",
requirements_toml_file.as_ref().display(),
requirements_toml_file.display(),
),
));
}
@@ -561,7 +608,7 @@ impl ProjectTrustDecision {
impl ProjectTrustContext {
fn decision_for_dir(&self, dir: &AbsolutePathBuf) -> ProjectTrustDecision {
let dir_key = project_trust_key(dir.as_path());
let dir_key = executor_project_trust_key(dir.as_path());
if let Some(trust_level) = self.projects_trust.get(&dir_key).copied() {
return ProjectTrustDecision {
trust_level: Some(trust_level),
@@ -633,7 +680,7 @@ fn project_layer_entry(
async fn project_trust_context(
merged_config: &TomlValue,
cwd: &AbsolutePathBuf,
cwd: &ExecutorPathRef<'_>,
project_root_markers: &[String],
config_base_dir: &Path,
user_config_file: &AbsolutePathBuf,
@@ -649,16 +696,18 @@ async fn project_trust_context(
let project_root = find_project_root(cwd, project_root_markers).await?;
let projects = project_trust_config.projects.unwrap_or_default();
let project_root_key = project_trust_key(project_root.as_path());
let repo_root = resolve_root_git_project_for_trust(cwd.as_path()).await;
let repo_root_key = repo_root.as_ref().map(|root| project_trust_key(root));
let project_root_key = executor_project_trust_key(project_root.as_path());
let repo_root = resolve_root_git_project_for_trust_at(cwd).await;
let repo_root_key = repo_root
.as_ref()
.map(|root| executor_project_trust_key(root.as_path()));
let projects_trust = projects
.into_iter()
.filter_map(|(key, project)| {
project
.trust_level
.map(|trust_level| (project_trust_key(Path::new(&key)), trust_level))
.map(|trust_level| (executor_project_trust_key(Path::new(&key)), trust_level))
})
.collect();
@@ -681,6 +730,10 @@ pub fn project_trust_key(project_path: &Path) -> String {
.to_string()
}
fn executor_project_trust_key(project_path: &Path) -> String {
project_path.to_string_lossy().to_string()
}
/// Takes a `toml::Value` parsed from a config.toml file and walks through it,
/// resolving any `AbsolutePathBuf` fields against `base_dir`, returning a new
/// `toml::Value` with the same shape but with paths resolved.
@@ -742,22 +795,29 @@ fn copy_shape_from_original(original: &TomlValue, resolved: &TomlValue) -> TomlV
}
async fn find_project_root(
cwd: &AbsolutePathBuf,
cwd: &ExecutorPathRef<'_>,
project_root_markers: &[String],
) -> io::Result<AbsolutePathBuf> {
if project_root_markers.is_empty() {
return Ok(cwd.clone());
return Ok(cwd.path().clone());
}
for ancestor in cwd.as_path().ancestors() {
for ancestor in cwd.ancestors() {
for marker in project_root_markers {
let marker_path = ancestor.join(marker);
if tokio::fs::metadata(&marker_path).await.is_ok() {
return AbsolutePathBuf::from_absolute_path(ancestor);
match marker_path.unsandboxed().exists().await {
Ok(true) => return Ok(ancestor.path().clone()),
Ok(false) => {}
Err(err) => {
tracing::warn!(
"failed to stat project root marker {}: {err:#}",
marker_path.display()
);
}
}
}
}
Ok(cwd.clone())
Ok(cwd.path().clone())
}
/// Return the appropriate list of layers (each with
@@ -766,22 +826,19 @@ async fn find_project_root(
/// starting from folders closest to `project_root` (which is the lowest
/// precedence) to those closest to `cwd` (which is the highest precedence).
async fn load_project_layers(
cwd: &AbsolutePathBuf,
cwd: &ExecutorPathRef<'_>,
project_root: &AbsolutePathBuf,
trust_context: &ProjectTrustContext,
codex_home: &Path,
) -> io::Result<Vec<ConfigLayerEntry>> {
let codex_home_abs = AbsolutePathBuf::from_absolute_path(codex_home)?;
let codex_home_normalized =
normalize_path(codex_home_abs.as_path()).unwrap_or_else(|_| codex_home_abs.to_path_buf());
let mut dirs = cwd
.as_path()
.ancestors()
.scan(false, |done, a| {
if *done {
None
} else {
if a == project_root.as_path() {
if a.path() == project_root {
*done = true;
}
Some(a)
@@ -793,30 +850,30 @@ async fn load_project_layers(
let mut layers = Vec::new();
for dir in dirs {
let dot_codex = dir.join(".codex");
if !tokio::fs::metadata(&dot_codex)
.await
.map(|meta| meta.is_dir())
.unwrap_or(false)
{
continue;
match dot_codex.unsandboxed().is_dir().await {
Ok(true) => {}
Ok(false) => continue,
Err(err) => {
tracing::warn!(
"failed to stat project config dir {}: {err:#}",
dot_codex.display()
);
continue;
}
}
let layer_dir = AbsolutePathBuf::from_absolute_path(dir)?;
let decision = trust_context.decision_for_dir(&layer_dir);
let dot_codex_abs = AbsolutePathBuf::from_absolute_path(&dot_codex)?;
let dot_codex_normalized =
normalize_path(dot_codex_abs.as_path()).unwrap_or_else(|_| dot_codex_abs.to_path_buf());
if dot_codex_abs == codex_home_abs || dot_codex_normalized == codex_home_normalized {
let decision = trust_context.decision_for_dir(dir.path());
if dot_codex.path() == &codex_home_abs {
continue;
}
let config_file = dot_codex_abs.join(CONFIG_TOML_FILE);
match tokio::fs::read_to_string(&config_file).await {
let config_file = dot_codex.join(CONFIG_TOML_FILE);
match config_file.unsandboxed().read_file_text().await {
Ok(contents) => {
let config: TomlValue = match toml::from_str(&contents) {
Ok(config) => config,
Err(e) => {
if decision.is_trusted() {
let config_file_display = config_file.as_path().display();
let config_file_display = config_file.display();
return Err(io::Error::new(
io::ErrorKind::InvalidData,
format!(
@@ -826,8 +883,8 @@ async fn load_project_layers(
}
layers.push(project_layer_entry(
trust_context,
&dot_codex_abs,
&layer_dir,
dot_codex.path(),
dir.path(),
TomlValue::Table(toml::map::Map::new()),
/*config_toml_exists*/ true,
));
@@ -835,11 +892,11 @@ async fn load_project_layers(
}
};
let config =
resolve_relative_paths_in_config_toml(config, dot_codex_abs.as_path())?;
resolve_relative_paths_in_config_toml(config, dot_codex.path().as_path())?;
let entry = project_layer_entry(
trust_context,
&dot_codex_abs,
&layer_dir,
dot_codex.path(),
dir.path(),
config,
/*config_toml_exists*/ true,
);
@@ -852,13 +909,13 @@ async fn load_project_layers(
// that are significant in the overall ConfigLayerStack.
layers.push(project_layer_entry(
trust_context,
&dot_codex_abs,
&layer_dir,
dot_codex.path(),
dir.path(),
TomlValue::Table(toml::map::Map::new()),
/*config_toml_exists*/ false,
));
} else {
let config_file_display = config_file.as_path().display();
let config_file_display = config_file.display();
return Err(io::Error::new(
err.kind(),
format!("Failed to read project config file {config_file_display}: {err}"),

View File

@@ -16,6 +16,8 @@ use crate::config_loader::version_for_toml;
use codex_config::CONFIG_TOML_FILE;
use codex_config::config_toml::ConfigToml;
use codex_config::config_toml::ProjectConfig;
use codex_exec_server::ExecutorPathRef;
use codex_exec_server::LOCAL_FS;
use codex_protocol::config_types::TrustLevel;
use codex_protocol::config_types::WebSearchMode;
use codex_protocol::protocol::AskForApproval;
@@ -92,6 +94,7 @@ async fn returns_config_error_for_invalid_user_config_toml() {
let cwd = AbsolutePathBuf::try_from(tmp.path()).expect("cwd");
let err = load_config_layers_state(
LOCAL_FS.as_ref(),
tmp.path(),
Some(cwd),
&[] as &[(String, TomlValue)],
@@ -119,6 +122,7 @@ async fn returns_config_error_for_invalid_managed_config_toml() {
let cwd = AbsolutePathBuf::try_from(tmp.path()).expect("cwd");
let err = load_config_layers_state(
LOCAL_FS.as_ref(),
tmp.path(),
Some(cwd),
&[] as &[(String, TomlValue)],
@@ -203,6 +207,7 @@ extra = true
let cwd = AbsolutePathBuf::try_from(tmp.path()).expect("cwd");
let state = load_config_layers_state(
LOCAL_FS.as_ref(),
tmp.path(),
Some(cwd),
&[] as &[(String, TomlValue)],
@@ -235,6 +240,7 @@ async fn returns_empty_when_all_layers_missing() {
let cwd = AbsolutePathBuf::try_from(tmp.path()).expect("cwd");
let layers = load_config_layers_state(
LOCAL_FS.as_ref(),
tmp.path(),
Some(cwd),
&[] as &[(String, TomlValue)],
@@ -327,6 +333,7 @@ flag = false
let cwd = AbsolutePathBuf::try_from(tmp.path()).expect("cwd");
let state = load_config_layers_state(
LOCAL_FS.as_ref(),
tmp.path(),
Some(cwd),
&[] as &[(String, TomlValue)],
@@ -428,6 +435,7 @@ allowed_sandbox_modes = ["read-only"]
);
let state = load_config_layers_state(
LOCAL_FS.as_ref(),
tmp.path(),
Some(AbsolutePathBuf::try_from(tmp.path())?),
&[] as &[(String, TomlValue)],
@@ -489,6 +497,7 @@ allowed_approval_policies = ["never"]
);
let state = load_config_layers_state(
LOCAL_FS.as_ref(),
tmp.path(),
Some(AbsolutePathBuf::try_from(tmp.path())?),
&[] as &[(String, TomlValue)],
@@ -529,6 +538,8 @@ personality = true
)
.await?;
let requirements_file = AbsolutePathBuf::try_from(requirements_file)?;
let requirements_file = ExecutorPathRef::new(LOCAL_FS.as_ref(), requirements_file);
let mut config_requirements_toml = ConfigRequirementsWithSources::default();
load_requirements_toml(&mut config_requirements_toml, &requirements_file).await?;
@@ -620,6 +631,7 @@ allowed_approval_policies = ["on-request"]
),
);
let state = load_config_layers_state(
LOCAL_FS.as_ref(),
tmp.path(),
Some(AbsolutePathBuf::try_from(tmp.path())?),
&[] as &[(String, TomlValue)],
@@ -691,6 +703,10 @@ allowed_approval_policies = ["on-request"]
guardian_policy_config: None,
},
);
let requirements_file = ExecutorPathRef::new(
LOCAL_FS.as_ref(),
AbsolutePathBuf::try_from(requirements_file)?,
);
load_requirements_toml(&mut config_requirements_toml, &requirements_file).await?;
assert_eq!(
@@ -735,6 +751,7 @@ async fn load_config_layers_includes_cloud_requirements() -> anyhow::Result<()>
let cloud_requirements = CloudRequirementsLoader::new(async move { Ok(Some(requirements)) });
let layers = load_config_layers_state(
LOCAL_FS.as_ref(),
&codex_home,
Some(cwd),
&[] as &[(String, TomlValue)],
@@ -771,6 +788,7 @@ async fn load_config_layers_fails_when_cloud_requirements_loader_fails() -> anyh
let cwd = AbsolutePathBuf::from_absolute_path(tmp.path())?;
let err = load_config_layers_state(
LOCAL_FS.as_ref(),
&codex_home,
Some(cwd),
&[] as &[(String, TomlValue)],
@@ -823,6 +841,7 @@ async fn project_layers_prefer_closest_cwd() -> std::io::Result<()> {
.await?;
let cwd = AbsolutePathBuf::from_absolute_path(&nested)?;
let layers = load_config_layers_state(
LOCAL_FS.as_ref(),
&codex_home,
Some(cwd),
&[] as &[(String, TomlValue)],
@@ -967,6 +986,7 @@ async fn project_layer_is_added_when_dot_codex_exists_without_config_toml() -> s
.await?;
let cwd = AbsolutePathBuf::from_absolute_path(&nested)?;
let layers = load_config_layers_state(
LOCAL_FS.as_ref(),
&codex_home,
Some(cwd),
&[] as &[(String, TomlValue)],
@@ -1006,6 +1026,7 @@ async fn codex_home_is_not_loaded_as_project_layer_from_home_dir() -> std::io::R
let cwd = AbsolutePathBuf::from_absolute_path(&home_dir)?;
let layers = load_config_layers_state(
LOCAL_FS.as_ref(),
&codex_home,
Some(cwd),
&[] as &[(String, TomlValue)],
@@ -1062,6 +1083,7 @@ async fn codex_home_within_project_tree_is_not_double_loaded() -> std::io::Resul
let cwd = AbsolutePathBuf::from_absolute_path(&nested)?;
let layers = load_config_layers_state(
LOCAL_FS.as_ref(),
&project_dot_codex,
Some(cwd),
&[] as &[(String, TomlValue)],
@@ -1132,6 +1154,7 @@ async fn project_layers_disabled_when_untrusted_or_unknown() -> std::io::Result<
.await?;
let layers_untrusted = load_config_layers_state(
LOCAL_FS.as_ref(),
&codex_home_untrusted,
Some(cwd.clone()),
&[] as &[(String, TomlValue)],
@@ -1170,6 +1193,7 @@ async fn project_layers_disabled_when_untrusted_or_unknown() -> std::io::Result<
.await?;
let layers_unknown = load_config_layers_state(
LOCAL_FS.as_ref(),
&codex_home_unknown,
Some(cwd),
&[] as &[(String, TomlValue)],
@@ -1328,6 +1352,7 @@ async fn invalid_project_config_ignored_when_untrusted_or_unknown() -> std::io::
}
let layers = load_config_layers_state(
LOCAL_FS.as_ref(),
&codex_home,
Some(cwd.clone()),
&[] as &[(String, TomlValue)],
@@ -1390,6 +1415,7 @@ async fn cli_overrides_with_relative_paths_do_not_break_trust_check() -> std::io
)];
load_config_layers_state(
LOCAL_FS.as_ref(),
&codex_home,
Some(cwd),
&cli_overrides,
@@ -1432,6 +1458,7 @@ async fn project_root_markers_supports_alternate_markers() -> std::io::Result<()
let cwd = AbsolutePathBuf::from_absolute_path(&nested)?;
let layers = load_config_layers_state(
LOCAL_FS.as_ref(),
&codex_home,
Some(cwd),
&[] as &[(String, TomlValue)],

View File

@@ -1,3 +1,4 @@
use codex_exec_server::LOCAL_FS;
use codex_git_utils::GitInfo;
use codex_git_utils::GitSha;
use codex_git_utils::collect_git_info;
@@ -5,6 +6,9 @@ use codex_git_utils::get_has_changes;
use codex_git_utils::git_diff_to_remote;
use codex_git_utils::recent_commits;
use codex_git_utils::resolve_root_git_project_for_trust;
use codex_utils_path::normalize_for_path_comparison;
use core_test_support::PathBufExt;
use core_test_support::PathExt;
use core_test_support::skip_if_sandbox;
use std::fs;
use std::path::PathBuf;
@@ -430,7 +434,7 @@ async fn test_get_git_working_tree_state_branch_fallback() {
async fn resolve_root_git_project_for_trust_returns_none_outside_repo() {
let tmp = TempDir::new().expect("tempdir");
assert!(
resolve_root_git_project_for_trust(tmp.path())
resolve_root_git_project_for_trust(LOCAL_FS.as_ref(), &tmp.path().abs())
.await
.is_none()
);
@@ -439,18 +443,17 @@ async fn resolve_root_git_project_for_trust_returns_none_outside_repo() {
#[tokio::test]
async fn resolve_root_git_project_for_trust_regular_repo_returns_repo_root() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let repo_path = create_test_git_repo(&temp_dir).await;
let expected = std::fs::canonicalize(&repo_path).unwrap();
let repo_path = create_test_git_repo(&temp_dir).await.abs();
assert_eq!(
resolve_root_git_project_for_trust(&repo_path).await,
Some(expected.clone())
resolve_root_git_project_for_trust(LOCAL_FS.as_ref(), &repo_path).await,
Some(repo_path.clone())
);
let nested = repo_path.join("sub/dir");
std::fs::create_dir_all(&nested).unwrap();
std::fs::create_dir_all(nested.as_path()).unwrap();
assert_eq!(
resolve_root_git_project_for_trust(&nested).await,
Some(expected)
resolve_root_git_project_for_trust(LOCAL_FS.as_ref(), &nested).await,
Some(repo_path)
);
}
@@ -473,17 +476,27 @@ async fn resolve_root_git_project_for_trust_detects_worktree_and_returns_main_ro
.output()
.expect("git worktree add");
let expected = std::fs::canonicalize(&repo_path).ok();
let got = resolve_root_git_project_for_trust(&wt_root)
.await
.and_then(|p| std::fs::canonicalize(p).ok());
assert_eq!(got, expected);
let expected = normalize_for_path_comparison(&repo_path).unwrap();
let wt_root = wt_root.abs();
let got = resolve_root_git_project_for_trust(LOCAL_FS.as_ref(), &wt_root).await;
assert_eq!(
got.as_ref()
.map(normalize_for_path_comparison)
.transpose()
.unwrap(),
Some(expected.clone())
);
let nested = wt_root.join("nested/sub");
std::fs::create_dir_all(&nested).unwrap();
let got_nested = resolve_root_git_project_for_trust(&nested)
.await
.and_then(|p| std::fs::canonicalize(p).ok());
assert_eq!(got_nested, expected);
std::fs::create_dir_all(nested.as_path()).unwrap();
let got_nested = resolve_root_git_project_for_trust(LOCAL_FS.as_ref(), &nested).await;
assert_eq!(
got_nested
.as_ref()
.map(normalize_for_path_comparison)
.transpose()
.unwrap(),
Some(expected)
);
}
#[tokio::test]
@@ -502,13 +515,15 @@ async fn resolve_root_git_project_for_trust_detects_worktree_pointer_without_git
)
.unwrap();
let expected = std::fs::canonicalize(&repo_root).unwrap();
let expected = repo_root.abs();
let worktree_root = worktree_root.abs();
assert_eq!(
resolve_root_git_project_for_trust(&worktree_root).await,
resolve_root_git_project_for_trust(LOCAL_FS.as_ref(), &worktree_root).await,
Some(expected.clone())
);
let nested = worktree_root.join("nested");
assert_eq!(
resolve_root_git_project_for_trust(&worktree_root.join("nested")).await,
resolve_root_git_project_for_trust(LOCAL_FS.as_ref(), &nested).await,
Some(expected)
);
}
@@ -529,9 +544,15 @@ async fn resolve_root_git_project_for_trust_non_worktrees_gitdir_returns_none()
)
.unwrap();
assert!(resolve_root_git_project_for_trust(&proj).await.is_none());
let proj = proj.abs();
assert!(
resolve_root_git_project_for_trust(&proj.join("nested"))
resolve_root_git_project_for_trust(LOCAL_FS.as_ref(), &proj)
.await
.is_none()
);
let nested = proj.join("nested");
assert!(
resolve_root_git_project_for_trust(LOCAL_FS.as_ref(), &nested)
.await
.is_none()
);

View File

@@ -16,6 +16,7 @@ use crate::config_loader::RequirementSource;
use crate::config_loader::Sourced;
use crate::test_support;
use codex_config::config_toml::ConfigToml;
use codex_exec_server::LOCAL_FS;
use codex_network_proxy::NetworkProxyConfig;
use codex_protocol::ThreadId;
use codex_protocol::approvals::NetworkApprovalProtocol;
@@ -1740,6 +1741,7 @@ async fn guardian_review_session_config_uses_requirements_guardian_policy_config
)
.expect("config layer stack");
let parent_config = Config::load_config_with_layer_stack(
LOCAL_FS.as_ref(),
ConfigToml::default(),
ConfigOverrides {
cwd: Some(workspace.path().to_path_buf()),
@@ -1776,6 +1778,7 @@ async fn guardian_review_session_config_uses_default_guardian_policy_without_req
ConfigLayerStack::new(Vec::new(), Default::default(), Default::default())
.expect("config layer stack");
let parent_config = Config::load_config_with_layer_stack(
LOCAL_FS.as_ref(),
ConfigToml::default(),
ConfigOverrides {
cwd: Some(workspace.path().to_path_buf()),

View File

@@ -16,6 +16,7 @@ use codex_config::CONFIG_TOML_FILE;
use codex_config::permissions_toml::NetworkToml;
use codex_config::permissions_toml::PermissionsToml;
use codex_config::permissions_toml::overlay_network_domain_permissions;
use codex_exec_server::LOCAL_FS;
use codex_network_proxy::ConfigReloader;
use codex_network_proxy::ConfigState;
use codex_network_proxy::NetworkProxyConfig;
@@ -46,6 +47,7 @@ async fn build_config_state_with_mtimes() -> Result<(ConfigState, Vec<LayerMtime
let cli_overrides = Vec::new();
let overrides = LoaderOverrides::default();
let config_layer_stack = load_config_layers_state(
LOCAL_FS.as_ref(),
&codex_home,
/*cwd*/ None,
&cli_overrides,

View File

@@ -2,12 +2,14 @@ use crate::codex::Session;
use crate::compact::content_items_to_text;
use crate::event_mapping::is_contextual_user_message_content;
use chrono::Utc;
use codex_exec_server::LOCAL_FS;
use codex_git_utils::resolve_root_git_project_for_trust;
use codex_protocol::models::ResponseItem;
use codex_thread_store::ListThreadsParams;
use codex_thread_store::StoredThread;
use codex_thread_store::ThreadSortKey;
use codex_thread_store::ThreadStore;
use codex_utils_absolute_path::AbsolutePathBuf;
use codex_utils_output_truncation::TruncationPolicy;
use codex_utils_output_truncation::truncate_text;
use dirs::home_dir;
@@ -145,18 +147,26 @@ async fn load_recent_threads(sess: &Session) -> Vec<StoredThread> {
}
}
async fn build_recent_work_section(cwd: &Path, recent_threads: &[StoredThread]) -> Option<String> {
async fn build_recent_work_section(
cwd: &AbsolutePathBuf,
recent_threads: &[StoredThread],
) -> Option<String> {
let mut groups: HashMap<PathBuf, Vec<&StoredThread>> = HashMap::new();
for entry in recent_threads {
let group = resolve_root_git_project_for_trust(&entry.cwd)
.await
.unwrap_or_else(|| entry.cwd.clone());
let group = match AbsolutePathBuf::from_absolute_path(entry.cwd.as_path()) {
Ok(entry_cwd) => resolve_root_git_project_for_trust(LOCAL_FS.as_ref(), &entry_cwd)
.await
.map(AbsolutePathBuf::into_path_buf)
.unwrap_or_else(|| entry.cwd.clone()),
Err(_) => entry.cwd.clone(),
};
groups.entry(group).or_default().push(entry);
}
let current_group = resolve_root_git_project_for_trust(cwd)
let current_group = resolve_root_git_project_for_trust(LOCAL_FS.as_ref(), cwd)
.await
.unwrap_or_else(|| cwd.to_path_buf());
.map(AbsolutePathBuf::into_path_buf)
.unwrap_or_else(|| cwd.clone().into_path_buf());
let mut groups = groups.into_iter().collect::<Vec<_>>();
groups.sort_by(|(left_group, left_entries), (right_group, right_entries)| {
let left_latest = left_entries
@@ -309,18 +319,19 @@ pub(crate) fn truncate_realtime_text_to_token_budget(text: &str, budget_tokens:
}
async fn build_workspace_section_with_user_root(
cwd: &Path,
cwd: &AbsolutePathBuf,
user_root: Option<PathBuf>,
) -> Option<String> {
let git_root = resolve_root_git_project_for_trust(cwd).await;
let cwd_tree = render_tree(cwd);
let cwd_path = cwd.as_path();
let git_root = resolve_root_git_project_for_trust(LOCAL_FS.as_ref(), cwd).await;
let cwd_tree = render_tree(cwd_path);
let git_root_tree = git_root
.as_ref()
.filter(|git_root| git_root.as_path() != cwd)
.and_then(|git_root| render_tree(git_root));
.filter(|git_root| git_root.as_path() != cwd_path)
.and_then(|git_root| render_tree(git_root.as_path()));
let user_root_tree = user_root
.as_ref()
.filter(|user_root| user_root.as_path() != cwd)
.filter(|user_root| user_root.as_path() != cwd_path)
.filter(|user_root| {
git_root
.as_ref()
@@ -333,8 +344,8 @@ async fn build_workspace_section_with_user_root(
}
let mut lines = vec![
format!("Current working directory: {}", cwd.display()),
format!("Working directory name: {}", file_name_string(cwd)),
format!("Current working directory: {}", cwd_path.display()),
format!("Working directory name: {}", file_name_string(cwd_path)),
];
if let Some(git_root) = &git_root {
@@ -465,14 +476,19 @@ async fn format_thread_group(
entries: Vec<&StoredThread>,
) -> Option<String> {
let latest = entries.first()?;
let group_label = if resolve_root_git_project_for_trust(latest.cwd.as_path())
.await
.is_some()
{
format!("### Git repo: {}", group.display())
} else {
format!("### Directory: {}", group.display())
};
let group_label =
if let Ok(latest_cwd) = AbsolutePathBuf::from_absolute_path(latest.cwd.as_path()) {
if resolve_root_git_project_for_trust(LOCAL_FS.as_ref(), &latest_cwd)
.await
.is_some()
{
format!("### Git repo: {}", group.display())
} else {
format!("### Directory: {}", group.display())
}
} else {
format!("### Directory: {}", group.display())
};
let mut lines = vec![
group_label,
format!("Recent sessions: {}", entries.len()),

View File

@@ -19,6 +19,8 @@ use codex_protocol::protocol::GitInfo;
use codex_protocol::protocol::SandboxPolicy;
use codex_protocol::protocol::SessionSource;
use codex_thread_store::StoredThread;
use core_test_support::PathBufExt;
use core_test_support::PathExt;
use pretty_assertions::assert_eq;
use std::fs;
use std::path::PathBuf;
@@ -234,7 +236,7 @@ fn fixed_section_budgets_apply_per_section_without_total_blob_truncation() {
async fn workspace_section_requires_meaningful_structure() {
let cwd = TempDir::new().expect("tempdir");
assert_eq!(
build_workspace_section_with_user_root(cwd.path(), /*user_root*/ None).await,
build_workspace_section_with_user_root(&cwd.path().abs(), /*user_root*/ None).await,
None
);
}
@@ -245,9 +247,10 @@ async fn workspace_section_includes_tree_when_entries_exist() {
fs::create_dir(cwd.path().join("docs")).expect("create docs dir");
fs::write(cwd.path().join("README.md"), "hello").expect("write readme");
let section = build_workspace_section_with_user_root(cwd.path(), /*user_root*/ None)
.await
.expect("workspace section");
let section =
build_workspace_section_with_user_root(&cwd.path().abs(), /*user_root*/ None)
.await
.expect("workspace section");
assert!(section.contains("Working directory tree:"));
assert!(section.contains("- docs/"));
assert!(section.contains("- README.md"));
@@ -267,7 +270,7 @@ async fn workspace_section_includes_user_root_tree_when_distinct() {
fs::create_dir_all(user_root.join("code")).expect("create user root child");
fs::write(user_root.join(".zshrc"), "export TEST=1").expect("write home file");
let section = build_workspace_section_with_user_root(cwd.as_path(), Some(user_root))
let section = build_workspace_section_with_user_root(&cwd.abs(), Some(user_root))
.await
.expect("workspace section");
assert!(section.contains("User root tree:"));
@@ -309,9 +312,9 @@ async fn recent_work_section_groups_threads_by_cwd() {
stored_thread(outside.to_string_lossy().as_ref(), "", "Inspect flaky test"),
];
let current_cwd = workspace_a;
let repo = fs::canonicalize(repo).expect("canonicalize repo");
let repo = repo.abs();
let section = build_recent_work_section(current_cwd.as_path(), &recent_threads)
let section = build_recent_work_section(&current_cwd.abs(), &recent_threads)
.await
.expect("recent work section");
assert!(section.contains(&format!("### Git repo: {}", repo.display())));

View File

@@ -1,3 +1,4 @@
use codex_exec_server::ExecutorPath;
use codex_protocol::models::FunctionCallOutputBody;
use codex_protocol::models::FunctionCallOutputContentItem;
use codex_protocol::models::FunctionCallOutputPayload;
@@ -95,35 +96,29 @@ impl ToolHandler for ViewImageHandler {
let sandbox = environment
.is_remote()
.then(|| turn.file_system_sandbox_context(/*additional_permissions*/ None));
let image_path = ExecutorPath::new(environment.get_filesystem(), abs_path);
let image_file = image_path.with_sandbox(sandbox.as_ref());
let metadata = environment
.get_filesystem()
.get_metadata(&abs_path, sandbox.as_ref())
.await
.map_err(|error| {
FunctionCallError::RespondToModel(format!(
"unable to locate image at `{}`: {error}",
abs_path.display()
))
})?;
let metadata = image_file.get_metadata().await.map_err(|error| {
FunctionCallError::RespondToModel(format!(
"unable to locate image at `{}`: {error}",
image_path.display()
))
})?;
if !metadata.is_file {
return Err(FunctionCallError::RespondToModel(format!(
"image path `{}` is not a file",
abs_path.display()
image_path.display()
)));
}
let file_bytes = environment
.get_filesystem()
.read_file(&abs_path, sandbox.as_ref())
.await
.map_err(|error| {
FunctionCallError::RespondToModel(format!(
"unable to read image at `{}`: {error}",
abs_path.display()
))
})?;
let event_path = abs_path.clone();
let file_bytes = image_file.read_file().await.map_err(|error| {
FunctionCallError::RespondToModel(format!(
"unable to read image at `{}`: {error}",
image_path.display()
))
})?;
let event_path = image_path.path().clone();
let can_request_original_detail = can_request_original_image_detail(&turn.model_info);
let use_original_detail =
@@ -135,11 +130,11 @@ impl ToolHandler for ViewImageHandler {
};
let image_detail = use_original_detail.then_some(ImageDetail::Original);
let image =
load_for_prompt_bytes(abs_path.as_path(), file_bytes, image_mode).map_err(|error| {
let image = load_for_prompt_bytes(image_path.path().as_path(), file_bytes, image_mode)
.map_err(|error| {
FunctionCallError::RespondToModel(format!(
"unable to process image at `{}`: {error}",
abs_path.display()
image_path.display()
))
})?;
let image_url = image.into_data_url();

View File

@@ -3,6 +3,11 @@ use codex_protocol::config_types::WindowsSandboxLevel;
use codex_protocol::models::PermissionProfile;
use codex_protocol::protocol::SandboxPolicy;
use codex_utils_absolute_path::AbsolutePathBuf;
use std::fmt;
use std::path::Display;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use tokio::io;
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
@@ -70,6 +75,397 @@ impl FileSystemSandboxContext {
pub type FileSystemResult<T> = io::Result<T>;
/// A single filesystem operation mode for an executor-bound path.
///
/// Construct this from [`ExecutorPath::unsandboxed`],
/// [`ExecutorPath::with_sandbox`], [`ExecutorPathRef::unsandboxed`], or
/// [`ExecutorPathRef::with_sandbox`] so call sites make their sandbox intent
/// visible at the operation boundary.
pub struct ExecutorPathAccess<'a> {
file_system: &'a dyn ExecutorFileSystem,
path: &'a AbsolutePathBuf,
sandbox: Option<&'a FileSystemSandboxContext>,
}
impl<'a> ExecutorPathAccess<'a> {
pub fn new(
file_system: &'a dyn ExecutorFileSystem,
path: &'a AbsolutePathBuf,
sandbox: Option<&'a FileSystemSandboxContext>,
) -> Self {
Self {
file_system,
path,
sandbox,
}
}
pub fn path(&self) -> &AbsolutePathBuf {
self.path
}
pub fn display(&self) -> Display<'_> {
self.path.display()
}
pub async fn read_file(&self) -> FileSystemResult<Vec<u8>> {
self.file_system.read_file(self.path, self.sandbox).await
}
pub async fn read_file_text(&self) -> FileSystemResult<String> {
self.file_system
.read_file_text(self.path, self.sandbox)
.await
}
pub async fn write_file(&self, contents: Vec<u8>) -> FileSystemResult<()> {
self.file_system
.write_file(self.path, contents, self.sandbox)
.await
}
pub async fn create_directory(
&self,
create_directory_options: CreateDirectoryOptions,
) -> FileSystemResult<()> {
self.file_system
.create_directory(self.path, create_directory_options, self.sandbox)
.await
}
pub async fn get_metadata(&self) -> FileSystemResult<FileMetadata> {
self.file_system.get_metadata(self.path, self.sandbox).await
}
pub async fn metadata_if_exists(&self) -> FileSystemResult<Option<FileMetadata>> {
metadata_if_exists(self.file_system, self.path, self.sandbox).await
}
pub async fn exists(&self) -> FileSystemResult<bool> {
Ok(self.metadata_if_exists().await?.is_some())
}
pub async fn is_dir(&self) -> FileSystemResult<bool> {
Ok(self
.metadata_if_exists()
.await?
.is_some_and(|metadata| metadata.is_directory))
}
pub async fn is_file(&self) -> FileSystemResult<bool> {
Ok(self
.metadata_if_exists()
.await?
.is_some_and(|metadata| metadata.is_file))
}
pub async fn read_directory(&self) -> FileSystemResult<Vec<ReadDirectoryEntry>> {
self.file_system
.read_directory(self.path, self.sandbox)
.await
}
pub async fn remove(&self, remove_options: RemoveOptions) -> FileSystemResult<()> {
self.file_system
.remove(self.path, remove_options, self.sandbox)
.await
}
}
/// An absolute path bound to the executor filesystem where it should be
/// resolved.
///
/// Use this when a path names a file on a specific executor. Keeping the
/// filesystem and path together avoids call sites that accidentally read a
/// remote/executor path through local process filesystem APIs.
#[derive(Clone)]
pub struct ExecutorPath {
file_system: Arc<dyn ExecutorFileSystem>,
path: AbsolutePathBuf,
}
impl ExecutorPath {
pub fn new(file_system: Arc<dyn ExecutorFileSystem>, path: AbsolutePathBuf) -> Self {
Self { file_system, path }
}
pub fn as_ref(&self) -> ExecutorPathRef<'_> {
ExecutorPathRef::new(self.file_system.as_ref(), self.path.clone())
}
pub fn path(&self) -> &AbsolutePathBuf {
&self.path
}
pub fn to_path_buf(&self) -> PathBuf {
self.path.to_path_buf()
}
pub fn display(&self) -> Display<'_> {
self.path.display()
}
pub fn is_same_file_system(&self, file_system: &Arc<dyn ExecutorFileSystem>) -> bool {
Arc::ptr_eq(&self.file_system, file_system)
}
pub fn into_path(self) -> AbsolutePathBuf {
self.path
}
pub fn with_path(&self, path: AbsolutePathBuf) -> Self {
Self {
file_system: Arc::clone(&self.file_system),
path,
}
}
pub fn join<P: AsRef<Path>>(&self, path: P) -> Self {
self.with_path(self.path.join(path))
}
pub fn parent(&self) -> Option<Self> {
self.path.parent().map(|path| self.with_path(path))
}
pub fn ancestors(&self) -> impl Iterator<Item = Self> + '_ {
self.path.ancestors().map(|path| self.with_path(path))
}
pub fn unsandboxed(&self) -> ExecutorPathAccess<'_> {
self.with_sandbox(None)
}
pub fn sandboxed<'a>(
&'a self,
sandbox: &'a FileSystemSandboxContext,
) -> ExecutorPathAccess<'a> {
self.with_sandbox(Some(sandbox))
}
pub fn with_sandbox<'a>(
&'a self,
sandbox: Option<&'a FileSystemSandboxContext>,
) -> ExecutorPathAccess<'a> {
ExecutorPathAccess::new(self.file_system.as_ref(), &self.path, sandbox)
}
pub async fn read_file(
&self,
sandbox: Option<&FileSystemSandboxContext>,
) -> FileSystemResult<Vec<u8>> {
self.with_sandbox(sandbox).read_file().await
}
pub async fn read_file_text(
&self,
sandbox: Option<&FileSystemSandboxContext>,
) -> FileSystemResult<String> {
self.with_sandbox(sandbox).read_file_text().await
}
pub async fn get_metadata(
&self,
sandbox: Option<&FileSystemSandboxContext>,
) -> FileSystemResult<FileMetadata> {
self.with_sandbox(sandbox).get_metadata().await
}
pub async fn metadata_if_exists(
&self,
sandbox: Option<&FileSystemSandboxContext>,
) -> FileSystemResult<Option<FileMetadata>> {
self.with_sandbox(sandbox).metadata_if_exists().await
}
pub async fn exists(
&self,
sandbox: Option<&FileSystemSandboxContext>,
) -> FileSystemResult<bool> {
Ok(self.metadata_if_exists(sandbox).await?.is_some())
}
pub async fn is_dir(
&self,
sandbox: Option<&FileSystemSandboxContext>,
) -> FileSystemResult<bool> {
Ok(self
.metadata_if_exists(sandbox)
.await?
.is_some_and(|metadata| metadata.is_directory))
}
pub async fn is_file(
&self,
sandbox: Option<&FileSystemSandboxContext>,
) -> FileSystemResult<bool> {
Ok(self
.metadata_if_exists(sandbox)
.await?
.is_some_and(|metadata| metadata.is_file))
}
pub async fn read_directory(
&self,
sandbox: Option<&FileSystemSandboxContext>,
) -> FileSystemResult<Vec<ReadDirectoryEntry>> {
self.with_sandbox(sandbox).read_directory().await
}
}
impl fmt::Debug for ExecutorPath {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("ExecutorPath")
.field("path", &self.path)
.finish_non_exhaustive()
}
}
/// Borrowed filesystem plus owned absolute path for short-lived executor path
/// operations.
#[derive(Clone)]
pub struct ExecutorPathRef<'a> {
file_system: &'a dyn ExecutorFileSystem,
path: AbsolutePathBuf,
}
impl<'a> ExecutorPathRef<'a> {
pub fn new(file_system: &'a dyn ExecutorFileSystem, path: AbsolutePathBuf) -> Self {
Self { file_system, path }
}
pub fn path(&self) -> &AbsolutePathBuf {
&self.path
}
pub fn to_path_buf(&self) -> PathBuf {
self.path.to_path_buf()
}
pub fn display(&self) -> Display<'_> {
self.path.display()
}
pub fn with_path(&self, path: AbsolutePathBuf) -> Self {
Self {
file_system: self.file_system,
path,
}
}
pub fn join<P: AsRef<Path>>(&self, path: P) -> Self {
self.with_path(self.path.join(path))
}
pub fn parent(&self) -> Option<Self> {
self.path.parent().map(|path| self.with_path(path))
}
pub fn ancestors(&self) -> impl Iterator<Item = Self> + '_ {
self.path.ancestors().map(|path| self.with_path(path))
}
pub fn unsandboxed(&self) -> ExecutorPathAccess<'_> {
self.with_sandbox(None)
}
pub fn sandboxed<'b>(
&'b self,
sandbox: &'b FileSystemSandboxContext,
) -> ExecutorPathAccess<'b> {
self.with_sandbox(Some(sandbox))
}
pub fn with_sandbox<'b>(
&'b self,
sandbox: Option<&'b FileSystemSandboxContext>,
) -> ExecutorPathAccess<'b> {
ExecutorPathAccess::new(self.file_system, &self.path, sandbox)
}
pub async fn read_file(
&self,
sandbox: Option<&FileSystemSandboxContext>,
) -> FileSystemResult<Vec<u8>> {
self.with_sandbox(sandbox).read_file().await
}
pub async fn read_file_text(
&self,
sandbox: Option<&FileSystemSandboxContext>,
) -> FileSystemResult<String> {
self.with_sandbox(sandbox).read_file_text().await
}
pub async fn get_metadata(
&self,
sandbox: Option<&FileSystemSandboxContext>,
) -> FileSystemResult<FileMetadata> {
self.with_sandbox(sandbox).get_metadata().await
}
pub async fn metadata_if_exists(
&self,
sandbox: Option<&FileSystemSandboxContext>,
) -> FileSystemResult<Option<FileMetadata>> {
self.with_sandbox(sandbox).metadata_if_exists().await
}
pub async fn exists(
&self,
sandbox: Option<&FileSystemSandboxContext>,
) -> FileSystemResult<bool> {
Ok(self.metadata_if_exists(sandbox).await?.is_some())
}
pub async fn is_dir(
&self,
sandbox: Option<&FileSystemSandboxContext>,
) -> FileSystemResult<bool> {
Ok(self
.metadata_if_exists(sandbox)
.await?
.is_some_and(|metadata| metadata.is_directory))
}
pub async fn is_file(
&self,
sandbox: Option<&FileSystemSandboxContext>,
) -> FileSystemResult<bool> {
Ok(self
.metadata_if_exists(sandbox)
.await?
.is_some_and(|metadata| metadata.is_file))
}
pub async fn read_directory(
&self,
sandbox: Option<&FileSystemSandboxContext>,
) -> FileSystemResult<Vec<ReadDirectoryEntry>> {
self.with_sandbox(sandbox).read_directory().await
}
}
impl fmt::Debug for ExecutorPathRef<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("ExecutorPathRef")
.field("path", &self.path)
.finish_non_exhaustive()
}
}
async fn metadata_if_exists(
file_system: &dyn ExecutorFileSystem,
path: &AbsolutePathBuf,
sandbox: Option<&FileSystemSandboxContext>,
) -> FileSystemResult<Option<FileMetadata>> {
match file_system.get_metadata(path, sandbox).await {
Ok(metadata) => Ok(Some(metadata)),
Err(err) if err.kind() == io::ErrorKind::NotFound => Ok(None),
Err(err) => Err(err),
}
}
#[async_trait]
pub trait ExecutorFileSystem: Send + Sync {
async fn read_file(

View File

@@ -28,6 +28,9 @@ pub use environment::EnvironmentManager;
pub use file_system::CopyOptions;
pub use file_system::CreateDirectoryOptions;
pub use file_system::ExecutorFileSystem;
pub use file_system::ExecutorPath;
pub use file_system::ExecutorPathAccess;
pub use file_system::ExecutorPathRef;
pub use file_system::FileMetadata;
pub use file_system::FileSystemResult;
pub use file_system::FileSystemSandboxContext;

View File

@@ -9,6 +9,8 @@ readme = "README.md"
workspace = true
[dependencies]
codex-exec-server = { workspace = true }
codex-protocol = { workspace = true }
codex-utils-absolute-path = { workspace = true }
futures = { workspace = true, features = ["alloc"] }
once_cell = { workspace = true }

View File

@@ -4,6 +4,8 @@ use std::ffi::OsStr;
use std::path::Path;
use std::path::PathBuf;
use codex_exec_server::ExecutorFileSystem;
use codex_exec_server::ExecutorPathRef;
use codex_utils_absolute_path::AbsolutePathBuf;
use futures::future::join_all;
use schemars::JsonSchema;
@@ -618,30 +620,42 @@ async fn diff_against_sha(cwd: &Path, sha: &GitSha) -> Option<String> {
/// `[get_git_repo_root]`, but resolves to the root of the main
/// repository. Handles worktrees via filesystem inspection without invoking
/// the `git` executable.
pub async fn resolve_root_git_project_for_trust(cwd: &Path) -> Option<PathBuf> {
let base = if cwd.is_dir() { cwd } else { cwd.parent()? };
let (repo_root, dot_git) = find_ancestor_git_entry(base)?;
if dot_git.is_dir() {
return Some(canonicalize_or_raw(repo_root));
pub async fn resolve_root_git_project_for_trust(
fs: &dyn ExecutorFileSystem,
cwd: &AbsolutePathBuf,
) -> Option<AbsolutePathBuf> {
resolve_root_git_project_for_trust_at(&ExecutorPathRef::new(fs, cwd.clone())).await
}
/// Resolve the git trust root for a cwd that is already bound to an executor
/// filesystem.
pub async fn resolve_root_git_project_for_trust_at(
cwd: &ExecutorPathRef<'_>,
) -> Option<AbsolutePathBuf> {
let base = match cwd.unsandboxed().is_dir().await {
Ok(true) => cwd.clone(),
_ => cwd.parent()?,
};
let (repo_root, dot_git) = find_ancestor_git_entry_with_fs(&base).await?;
if dot_git.unsandboxed().is_dir().await.ok()? {
return Some(repo_root.path().clone());
}
let git_dir_s = std::fs::read_to_string(&dot_git).ok()?;
let git_dir_s = dot_git.unsandboxed().read_file_text().await.ok()?;
let git_dir_rel = git_dir_s.trim().strip_prefix("gitdir:")?.trim();
if git_dir_rel.is_empty() {
return None;
}
let git_dir_path = canonicalize_or_raw(
AbsolutePathBuf::resolve_path_against_base(git_dir_rel, &repo_root).into_path_buf(),
);
let git_dir_path =
AbsolutePathBuf::resolve_path_against_base(git_dir_rel, repo_root.path().as_path());
let worktrees_dir = git_dir_path.parent()?;
if worktrees_dir.file_name() != Some(OsStr::new("worktrees")) {
if worktrees_dir.as_path().file_name() != Some(OsStr::new("worktrees")) {
return None;
}
let common_dir = worktrees_dir.parent()?;
let main_repo_root = common_dir.parent()?;
Some(canonicalize_or_raw(main_repo_root.to_path_buf()))
common_dir.parent()
}
fn find_ancestor_git_entry(base_dir: &Path) -> Option<(PathBuf, PathBuf)> {
@@ -663,8 +677,17 @@ fn find_ancestor_git_entry(base_dir: &Path) -> Option<(PathBuf, PathBuf)> {
None
}
fn canonicalize_or_raw(path: PathBuf) -> PathBuf {
std::fs::canonicalize(&path).unwrap_or(path)
async fn find_ancestor_git_entry_with_fs<'a>(
base_dir: &ExecutorPathRef<'a>,
) -> Option<(ExecutorPathRef<'a>, ExecutorPathRef<'a>)> {
for dir in base_dir.ancestors() {
let dot_git = dir.join(".git");
match dot_git.unsandboxed().exists().await {
Ok(true) => return Some((dir, dot_git)),
Ok(false) | Err(_) => {}
}
}
None
}
/// Returns a list of local git branches.

View File

@@ -1,6 +1,3 @@
use std::fmt;
use std::path::PathBuf;
mod apply;
mod branch;
mod errors;
@@ -16,6 +13,8 @@ pub use apply::extract_paths_from_patch;
pub use apply::parse_git_apply_output;
pub use apply::stage_paths;
pub use branch::merge_base_with_head;
pub use codex_protocol::models::GhostCommit;
pub use codex_protocol::protocol::GitSha;
pub use errors::GitToolingError;
pub use ghost_commits::CreateGhostCommitOptions;
pub use ghost_commits::GhostSnapshotConfig;
@@ -44,73 +43,5 @@ pub use info::git_diff_to_remote;
pub use info::local_git_branches;
pub use info::recent_commits;
pub use info::resolve_root_git_project_for_trust;
pub use info::resolve_root_git_project_for_trust_at;
pub use platform::create_symlink;
use schemars::JsonSchema;
use serde::Deserialize;
use serde::Serialize;
use ts_rs::TS;
type CommitID = String;
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema, TS)]
#[serde(transparent)]
#[ts(type = "string")]
pub struct GitSha(pub String);
impl GitSha {
pub fn new(sha: &str) -> Self {
Self(sha.to_string())
}
}
/// Details of a ghost commit created from a repository state.
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema, TS)]
pub struct GhostCommit {
id: CommitID,
parent: Option<CommitID>,
preexisting_untracked_files: Vec<PathBuf>,
preexisting_untracked_dirs: Vec<PathBuf>,
}
impl GhostCommit {
/// Create a new ghost commit wrapper from a raw commit ID and optional parent.
pub fn new(
id: CommitID,
parent: Option<CommitID>,
preexisting_untracked_files: Vec<PathBuf>,
preexisting_untracked_dirs: Vec<PathBuf>,
) -> Self {
Self {
id,
parent,
preexisting_untracked_files,
preexisting_untracked_dirs,
}
}
/// Commit ID for the snapshot.
pub fn id(&self) -> &str {
&self.id
}
/// Parent commit ID, if the repository had a `HEAD` at creation time.
pub fn parent(&self) -> Option<&str> {
self.parent.as_deref()
}
/// Untracked or ignored files that already existed when the snapshot was captured.
pub fn preexisting_untracked_files(&self) -> &[PathBuf] {
&self.preexisting_untracked_files
}
/// Untracked or ignored directories that already existed when the snapshot was captured.
pub fn preexisting_untracked_dirs(&self) -> &[PathBuf] {
&self.preexisting_untracked_dirs
}
}
impl fmt::Display for GhostCommit {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.id)
}
}

View File

@@ -16,7 +16,6 @@ chardetng = { workspace = true }
chrono = { workspace = true, features = ["serde"] }
codex-async-utils = { workspace = true }
codex-execpolicy = { workspace = true }
codex-git-utils = { workspace = true }
codex-network-proxy = { workspace = true }
codex-utils-absolute-path = { workspace = true }
codex-utils-image = { workspace = true }

View File

@@ -1,5 +1,7 @@
use std::collections::HashMap;
use std::fmt;
use std::path::Path;
use std::path::PathBuf;
use std::sync::LazyLock;
use codex_utils_image::PromptImageMode;
@@ -25,7 +27,6 @@ use crate::protocol::SandboxPolicy;
use crate::protocol::WritableRoot;
use crate::user_input::UserInput;
use codex_execpolicy::Policy;
use codex_git_utils::GhostCommit;
use codex_utils_absolute_path::AbsolutePathBuf;
use codex_utils_image::ImageProcessingError;
use schemars::JsonSchema;
@@ -45,6 +46,60 @@ static SANDBOX_MODE_READ_ONLY_TEMPLATE: LazyLock<Template> = LazyLock::new(|| {
.unwrap_or_else(|err| panic!("read-only sandbox template must parse: {err}"))
});
type CommitID = String;
/// Details of a ghost commit created from a repository state.
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema, TS)]
pub struct GhostCommit {
id: CommitID,
parent: Option<CommitID>,
preexisting_untracked_files: Vec<PathBuf>,
preexisting_untracked_dirs: Vec<PathBuf>,
}
impl GhostCommit {
/// Create a new ghost commit wrapper from a raw commit ID and optional parent.
pub fn new(
id: CommitID,
parent: Option<CommitID>,
preexisting_untracked_files: Vec<PathBuf>,
preexisting_untracked_dirs: Vec<PathBuf>,
) -> Self {
Self {
id,
parent,
preexisting_untracked_files,
preexisting_untracked_dirs,
}
}
/// Commit ID for the snapshot.
pub fn id(&self) -> &str {
&self.id
}
/// Parent commit ID, if the repository had a `HEAD` at creation time.
pub fn parent(&self) -> Option<&str> {
self.parent.as_deref()
}
/// Untracked or ignored files that already existed when the snapshot was captured.
pub fn preexisting_untracked_files(&self) -> &[PathBuf] {
&self.preexisting_untracked_files
}
/// Untracked or ignored directories that already existed when the snapshot was captured.
pub fn preexisting_untracked_dirs(&self) -> &[PathBuf] {
&self.preexisting_untracked_dirs
}
}
impl fmt::Display for GhostCommit {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.id)
}
}
/// Controls the per-command sandbox override requested by a shell-like tool call.
#[derive(
Debug, Clone, Copy, Default, Eq, Hash, PartialEq, Serialize, Deserialize, JsonSchema, TS,

View File

@@ -49,7 +49,6 @@ use crate::request_permissions::RequestPermissionsEvent;
use crate::request_permissions::RequestPermissionsResponse;
use crate::request_user_input::RequestUserInputResponse;
use crate::user_input::UserInput;
use codex_git_utils::GitSha;
use codex_utils_absolute_path::AbsolutePathBuf;
use schemars::JsonSchema;
use serde::Deserialize;
@@ -103,6 +102,17 @@ pub const REALTIME_CONVERSATION_OPEN_TAG: &str = "<realtime_conversation>";
pub const REALTIME_CONVERSATION_CLOSE_TAG: &str = "</realtime_conversation>";
pub const USER_MESSAGE_BEGIN: &str = "## My request for Codex:";
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema, TS)]
#[serde(transparent)]
#[ts(type = "string")]
pub struct GitSha(pub String);
impl GitSha {
pub fn new(sha: &str) -> Self {
Self(sha.to_string())
}
}
/// Submission Queue Entry - requests from user
#[derive(Debug, Clone, Deserialize, Serialize, JsonSchema)]
pub struct Submission {

View File

@@ -4,6 +4,7 @@ use crate::legacy_core::windows_sandbox::WindowsSandboxLevelExt;
use codex_app_server_client::AppServerEvent;
use codex_app_server_client::AppServerRequestHandle;
use codex_app_server_protocol::ServerNotification;
use codex_exec_server::LOCAL_FS;
use codex_git_utils::resolve_root_git_project_for_trust;
#[cfg(target_os = "windows")]
use codex_protocol::config_types::WindowsSandboxLevel;
@@ -123,8 +124,9 @@ impl OnboardingScreen {
let show_windows_create_sandbox_hint = false;
let highlighted = TrustDirectorySelection::Trust;
if show_trust_screen {
let trust_target = resolve_root_git_project_for_trust(&cwd)
let trust_target = resolve_root_git_project_for_trust(LOCAL_FS.as_ref(), &config.cwd)
.await
.map(Into::into)
.unwrap_or_else(|| cwd.clone());
steps.push(Step::TrustDirectory(TrustDirectoryWidget {
cwd,

View File

@@ -1,7 +1,6 @@
//! Resolve plugin namespace from skill file paths by walking ancestors for `plugin.json`.
use codex_exec_server::ExecutorFileSystem;
use codex_utils_absolute_path::AbsolutePathBuf;
use codex_exec_server::ExecutorPathRef;
/// Relative path from a plugin root to its manifest file.
pub const PLUGIN_MANIFEST_PATH: &str = ".codex-plugin/plugin.json";
@@ -13,22 +12,17 @@ struct RawPluginManifestName {
name: String,
}
async fn plugin_manifest_name(
fs: &dyn ExecutorFileSystem,
plugin_root: &AbsolutePathBuf,
) -> Option<String> {
async fn plugin_manifest_name(plugin_root: &ExecutorPathRef<'_>) -> Option<String> {
let manifest_path = plugin_root.join(PLUGIN_MANIFEST_PATH);
match fs.get_metadata(&manifest_path, /*sandbox*/ None).await {
Ok(metadata) if metadata.is_file => {}
Ok(_) | Err(_) => return None,
match manifest_path.unsandboxed().is_file().await {
Ok(true) => {}
Ok(false) | Err(_) => return None,
}
let contents = fs
.read_file_text(&manifest_path, /*sandbox*/ None)
.await
.ok()?;
let contents = manifest_path.unsandboxed().read_file_text().await.ok()?;
let RawPluginManifestName { name: raw_name } = serde_json::from_str(&contents).ok()?;
Some(
plugin_root
.path()
.file_name()
.and_then(|entry| entry.to_str())
.filter(|_| raw_name.trim().is_empty())
@@ -39,12 +33,9 @@ async fn plugin_manifest_name(
/// Returns the plugin manifest `name` for the nearest ancestor of `path` that contains a valid
/// plugin manifest (same `name` rules as full manifest loading in codex-core).
pub async fn plugin_namespace_for_skill_path(
fs: &dyn ExecutorFileSystem,
path: &AbsolutePathBuf,
) -> Option<String> {
pub async fn plugin_namespace_for_skill_path(path: &ExecutorPathRef<'_>) -> Option<String> {
for ancestor in path.ancestors() {
if let Some(name) = plugin_manifest_name(fs, &ancestor).await {
if let Some(name) = plugin_manifest_name(&ancestor).await {
return Some(name);
}
}
@@ -54,6 +45,7 @@ pub async fn plugin_namespace_for_skill_path(
#[cfg(test)]
mod tests {
use super::plugin_namespace_for_skill_path;
use codex_exec_server::ExecutorPathRef;
use codex_exec_server::LOCAL_FS;
use codex_utils_absolute_path::test_support::PathBufExt;
use std::fs;
@@ -75,7 +67,11 @@ mod tests {
fs::write(&skill_path, "---\ndescription: search\n---\n").expect("write skill");
assert_eq!(
plugin_namespace_for_skill_path(LOCAL_FS.as_ref(), &skill_path.abs()).await,
plugin_namespace_for_skill_path(&ExecutorPathRef::new(
LOCAL_FS.as_ref(),
skill_path.abs(),
))
.await,
Some("sample".to_string())
);
}