Compare commits

...

4 Commits

Author SHA1 Message Date
starr-openai
cab5fd2f11 Add worktree startup helpers 2026-04-03 17:57:59 -07:00
starr-openai
eb462e8f76 Apply worktree MVP clippy fix 2026-04-01 20:43:23 -07:00
starr-openai
182c8c9390 Add startup worktree MVP fixes 2026-04-01 19:48:44 -07:00
starr-openai
bd9c85eedf codex: preserve no-executor mode and gate js_repl 2026-04-01 17:35:14 -07:00
19 changed files with 1676 additions and 107 deletions

2
codex-rs/Cargo.lock generated
View File

@@ -1628,6 +1628,7 @@ dependencies = [
"codex-exec",
"codex-execpolicy",
"codex-features",
"codex-git-utils",
"codex-login",
"codex-mcp-server",
"codex-protocol",
@@ -2144,6 +2145,7 @@ dependencies = [
"regex",
"schemars 0.8.22",
"serde",
"serde_json",
"tempfile",
"thiserror 2.0.18",
"tokio",

View File

@@ -31,6 +31,7 @@ codex-core = { workspace = true }
codex-exec = { workspace = true }
codex-execpolicy = { workspace = true }
codex-features = { workspace = true }
codex-git-utils = { workspace = true }
codex-login = { workspace = true }
codex-mcp-server = { workspace = true }
codex-protocol = { workspace = true }

View File

@@ -39,10 +39,13 @@ mod app_cmd;
#[cfg(target_os = "macos")]
mod desktop_app;
mod mcp_cmd;
mod worktree_cmd;
#[cfg(not(windows))]
mod wsl_paths;
use crate::mcp_cmd::McpCli;
use crate::worktree_cmd::WorktreeCli;
use crate::worktree_cmd::run_worktree_command;
use codex_core::config::Config;
use codex_core::config::ConfigOverrides;
@@ -150,6 +153,9 @@ enum Subcommand {
/// Inspect feature flags.
Features(FeaturesCli),
/// Manage Codex-created local git worktrees.
Worktree(WorktreeCli),
}
#[derive(Debug, Parser)]
@@ -1028,6 +1034,14 @@ async fn cli_main(arg0_paths: Arg0DispatchPaths) -> anyhow::Result<()> {
disable_feature_in_config(&interactive, &feature).await?;
}
},
Some(Subcommand::Worktree(worktree_cli)) => {
reject_remote_mode_for_subcommand(
root_remote.as_deref(),
root_remote_auth_token_env.as_deref(),
"worktree",
)?;
run_worktree_command(worktree_cli)?;
}
}
Ok(())

View File

@@ -0,0 +1,134 @@
use clap::Parser;
use codex_core::config::find_codex_home;
use codex_git_utils::CodexWorktreePruneCandidate;
use codex_git_utils::CodexWorktreePruneMode;
use codex_git_utils::CodexWorktreePruneOptions;
use codex_git_utils::CodexWorktreePruneSkipReason;
use codex_git_utils::CodexWorktreePruneSkipped;
use codex_git_utils::prune_codex_managed_worktrees;
use std::path::PathBuf;
#[derive(Debug, Parser)]
pub(crate) struct WorktreeCli {
#[command(subcommand)]
pub(crate) subcommand: WorktreeSubcommand,
}
#[derive(Debug, Parser)]
pub(crate) enum WorktreeSubcommand {
/// Remove old Codex-managed local git worktrees.
Prune(WorktreePruneCommand),
}
#[derive(Debug, Parser)]
pub(crate) struct WorktreePruneCommand {
/// Only prune worktrees created from this source repository.
#[arg(long = "repo", value_name = "DIR")]
repo: Option<PathBuf>,
/// Only prune worktrees created before this Unix timestamp (seconds).
#[arg(long = "created-before", value_name = "UNIX_SECONDS")]
created_before: Option<u64>,
/// Only prune worktrees last used before this Unix timestamp (seconds).
#[arg(long = "last-used-before", value_name = "UNIX_SECONDS")]
last_used_before: Option<u64>,
/// Print matching worktrees without deleting them.
#[arg(long = "dry-run", default_value_t = false)]
dry_run: bool,
}
pub(crate) fn run_worktree_command(worktree_cli: WorktreeCli) -> anyhow::Result<()> {
match worktree_cli.subcommand {
WorktreeSubcommand::Prune(command) => run_worktree_prune_command(command),
}
}
fn run_worktree_prune_command(command: WorktreePruneCommand) -> anyhow::Result<()> {
let options = CodexWorktreePruneOptions {
codex_home: find_codex_home()?,
source_repo_root: command.repo,
created_before: command.created_before,
last_used_before: command.last_used_before,
mode: if command.dry_run {
CodexWorktreePruneMode::DryRun
} else {
CodexWorktreePruneMode::Delete
},
};
let report = prune_codex_managed_worktrees(&options)?;
for candidate in &report.pruned {
print_candidate("pruned", candidate);
}
for skipped in &report.skipped {
print_skipped_candidate(skipped);
}
for candidate in &report.kept {
print_candidate("kept", candidate);
}
println!(
"summary: pruned={} skipped={} kept={}",
report.pruned.len(),
report.skipped.len(),
report.kept.len()
);
Ok(())
}
fn print_candidate(action: &str, candidate: &CodexWorktreePruneCandidate) {
println!(
"{action} {} repo={} created_at={} last_used_at={}",
candidate.worktree_git_root.display(),
candidate.metadata.source_repo_root.display(),
candidate.metadata.created_at,
candidate.metadata.last_used_at
);
}
fn print_skipped_candidate(skipped: &CodexWorktreePruneSkipped) {
let reason = match skipped.reason {
CodexWorktreePruneSkipReason::DirtyWorktree => "dirty-worktree",
CodexWorktreePruneSkipReason::LocalCommits => "local-commits",
CodexWorktreePruneSkipReason::MissingStartingRef => "missing-starting-ref",
};
println!(
"skipped {} reason={} repo={} created_at={} last_used_at={}",
skipped.candidate.worktree_git_root.display(),
reason,
skipped.candidate.metadata.source_repo_root.display(),
skipped.candidate.metadata.created_at,
skipped.candidate.metadata.last_used_at
);
}
#[cfg(test)]
mod tests {
use super::WorktreeCli;
use super::WorktreeSubcommand;
use clap::Parser;
use pretty_assertions::assert_eq;
use std::path::PathBuf;
#[test]
fn parse_worktree_prune_filters() {
let cli = WorktreeCli::parse_from([
"codex-worktree",
"prune",
"--repo",
"/tmp/repo",
"--created-before",
"123",
"--last-used-before",
"456",
"--dry-run",
]);
let WorktreeSubcommand::Prune(command) = cli.subcommand;
assert_eq!(command.repo, Some(PathBuf::from("/tmp/repo")));
assert_eq!(command.created_before, Some(123));
assert_eq!(command.last_used_before, Some(456));
assert!(command.dry_run);
}
}

View File

@@ -941,6 +941,7 @@ impl TurnContext {
sandbox_policy: self.sandbox_policy.get(),
windows_sandbox_level: self.windows_sandbox_level,
})
.with_has_attached_executor(self.environment.has_attached_executor())
.with_unified_exec_shell_mode(self.tools_config.unified_exec_shell_mode.clone())
.with_web_search_config(self.tools_config.web_search_config.clone())
.with_allow_login_shell(self.tools_config.allow_login_shell)
@@ -1395,6 +1396,7 @@ impl Session {
sandbox_policy: session_configuration.sandbox_policy.get(),
windows_sandbox_level: session_configuration.windows_sandbox_level,
})
.with_has_attached_executor(environment.has_attached_executor())
.with_unified_exec_shell_mode_for_session(
user_shell,
shell_zsh_path,
@@ -5472,6 +5474,7 @@ async fn spawn_review_thread(
sess.services.shell_zsh_path.as_ref(),
sess.services.main_execve_wrapper_exe.as_ref(),
)
.with_has_attached_executor(parent_turn_context.environment.has_attached_executor())
.with_web_search_config(/*web_search_config*/ None)
.with_allow_login_shell(config.permissions.allow_login_shell)
.with_agent_roles(config.agent_roles.clone());

View File

@@ -77,8 +77,8 @@ pub(crate) async fn run_codex_thread_interactive(
config,
auth_manager,
models_manager,
environment_manager: Arc::new(EnvironmentManager::new(
parent_ctx.environment.exec_server_url().map(str::to_owned),
environment_manager: Arc::new(EnvironmentManager::from_environment(
parent_ctx.environment.as_ref(),
)),
skills_manager: Arc::clone(&parent_session.services.skills_manager),
plugins_manager: Arc::clone(&parent_session.services.plugins_manager),

View File

@@ -171,6 +171,7 @@ pub(crate) struct ToolsConfig {
pub experimental_supported_tools: Vec<String>,
pub agent_jobs_tools: bool,
pub agent_jobs_worker_tools: bool,
pub has_attached_executor: bool,
}
pub(crate) struct ToolsConfigParams<'a> {
@@ -305,9 +306,15 @@ impl ToolsConfig {
experimental_supported_tools: model_info.experimental_supported_tools.clone(),
agent_jobs_tools: include_agent_jobs,
agent_jobs_worker_tools,
has_attached_executor: true,
}
}
pub fn with_has_attached_executor(mut self, has_attached_executor: bool) -> Self {
self.has_attached_executor = has_attached_executor;
self
}
pub fn with_agent_roles(mut self, agent_roles: BTreeMap<String, AgentRoleConfig>) -> Self {
self.agent_roles = agent_roles;
self
@@ -493,66 +500,68 @@ pub(crate) fn build_specs_with_discoverable_tools(
builder.register_handler(WAIT_TOOL_NAME, code_mode_wait_handler);
}
match &config.shell_type {
ConfigShellToolType::Default => {
push_tool_spec(
&mut builder,
create_shell_tool(ShellToolOptions {
exec_permission_approvals_enabled,
}),
/*supports_parallel_tool_calls*/ true,
config.code_mode_enabled,
);
if config.has_attached_executor {
match &config.shell_type {
ConfigShellToolType::Default => {
push_tool_spec(
&mut builder,
create_shell_tool(ShellToolOptions {
exec_permission_approvals_enabled,
}),
/*supports_parallel_tool_calls*/ true,
config.code_mode_enabled,
);
}
ConfigShellToolType::Local => {
push_tool_spec(
&mut builder,
ToolSpec::LocalShell {},
/*supports_parallel_tool_calls*/ true,
config.code_mode_enabled,
);
}
ConfigShellToolType::UnifiedExec => {
push_tool_spec(
&mut builder,
create_exec_command_tool(CommandToolOptions {
allow_login_shell: config.allow_login_shell,
exec_permission_approvals_enabled,
}),
/*supports_parallel_tool_calls*/ true,
config.code_mode_enabled,
);
push_tool_spec(
&mut builder,
create_write_stdin_tool(),
/*supports_parallel_tool_calls*/ false,
config.code_mode_enabled,
);
builder.register_handler("exec_command", unified_exec_handler.clone());
builder.register_handler("write_stdin", unified_exec_handler);
}
ConfigShellToolType::Disabled => {
// Do nothing.
}
ConfigShellToolType::ShellCommand => {
push_tool_spec(
&mut builder,
create_shell_command_tool(CommandToolOptions {
allow_login_shell: config.allow_login_shell,
exec_permission_approvals_enabled,
}),
/*supports_parallel_tool_calls*/ true,
config.code_mode_enabled,
);
}
}
ConfigShellToolType::Local => {
push_tool_spec(
&mut builder,
ToolSpec::LocalShell {},
/*supports_parallel_tool_calls*/ true,
config.code_mode_enabled,
);
}
ConfigShellToolType::UnifiedExec => {
push_tool_spec(
&mut builder,
create_exec_command_tool(CommandToolOptions {
allow_login_shell: config.allow_login_shell,
exec_permission_approvals_enabled,
}),
/*supports_parallel_tool_calls*/ true,
config.code_mode_enabled,
);
push_tool_spec(
&mut builder,
create_write_stdin_tool(),
/*supports_parallel_tool_calls*/ false,
config.code_mode_enabled,
);
builder.register_handler("exec_command", unified_exec_handler.clone());
builder.register_handler("write_stdin", unified_exec_handler);
}
ConfigShellToolType::Disabled => {
// Do nothing.
}
ConfigShellToolType::ShellCommand => {
push_tool_spec(
&mut builder,
create_shell_command_tool(CommandToolOptions {
allow_login_shell: config.allow_login_shell,
exec_permission_approvals_enabled,
}),
/*supports_parallel_tool_calls*/ true,
config.code_mode_enabled,
);
}
}
if config.shell_type != ConfigShellToolType::Disabled {
// Always register shell aliases so older prompts remain compatible.
builder.register_handler("shell", shell_handler.clone());
builder.register_handler("container.exec", shell_handler.clone());
builder.register_handler("local_shell", shell_handler);
builder.register_handler("shell_command", shell_command_handler);
if config.shell_type != ConfigShellToolType::Disabled {
// Always register shell aliases so older prompts remain compatible.
builder.register_handler("shell", shell_handler.clone());
builder.register_handler("container.exec", shell_handler.clone());
builder.register_handler("local_shell", shell_handler);
builder.register_handler("shell_command", shell_command_handler);
}
}
if mcp_tools.is_some() {
@@ -587,7 +596,7 @@ pub(crate) fn build_specs_with_discoverable_tools(
);
builder.register_handler("update_plan", plan_handler);
if config.js_repl_enabled {
if config.has_attached_executor && config.js_repl_enabled {
push_tool_spec(
&mut builder,
create_js_repl_tool(),
@@ -661,7 +670,9 @@ pub(crate) fn build_specs_with_discoverable_tools(
builder.register_handler(TOOL_SUGGEST_TOOL_NAME, tool_suggest_handler);
}
if let Some(apply_patch_tool_type) = &config.apply_patch_tool_type {
if config.has_attached_executor
&& let Some(apply_patch_tool_type) = &config.apply_patch_tool_type
{
match apply_patch_tool_type {
ApplyPatchToolType::Freeform => {
push_tool_spec(
@@ -683,10 +694,11 @@ pub(crate) fn build_specs_with_discoverable_tools(
builder.register_handler("apply_patch", apply_patch_handler);
}
if config
.experimental_supported_tools
.iter()
.any(|tool| tool == "list_dir")
if config.has_attached_executor
&& config
.experimental_supported_tools
.iter()
.any(|tool| tool == "list_dir")
{
let list_dir_handler = Arc::new(ListDirHandler);
push_tool_spec(
@@ -763,15 +775,17 @@ pub(crate) fn build_specs_with_discoverable_tools(
);
}
push_tool_spec(
&mut builder,
create_view_image_tool(ViewImageToolOptions {
can_request_original_image_detail: config.can_request_original_image_detail,
}),
/*supports_parallel_tool_calls*/ true,
config.code_mode_enabled,
);
builder.register_handler("view_image", view_image_handler);
if config.has_attached_executor {
push_tool_spec(
&mut builder,
create_view_image_tool(ViewImageToolOptions {
can_request_original_image_detail: config.can_request_original_image_detail,
}),
/*supports_parallel_tool_calls*/ true,
config.code_mode_enabled,
);
builder.register_handler("view_image", view_image_handler);
}
if config.collab_tools {
if config.multi_agent_v2 {

View File

@@ -987,6 +987,53 @@ fn js_repl_enabled_adds_tools() {
assert_contains_tool_names(&tools, &["js_repl", "js_repl_reset"]);
}
#[test]
fn no_attached_executor_hides_executor_backed_tools() {
let model_info = model_info_from_models_json("gpt-5-codex");
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
features.enable(Feature::JsRepl);
let available_models = Vec::new();
let mut tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Live),
session_source: SessionSource::Cli,
sandbox_policy: &SandboxPolicy::DangerFullAccess,
windows_sandbox_level: WindowsSandboxLevel::Disabled,
})
.with_has_attached_executor(false);
tools_config
.experimental_supported_tools
.push("list_dir".to_string());
let (tools, _) = build_specs(
&tools_config,
Some(std::collections::HashMap::new()),
/*app_tools*/ None,
&[],
)
.build();
if let Some(shell_tool) = shell_tool_name(&tools_config) {
assert_lacks_tool_name(&tools, shell_tool);
}
for absent in [
"exec_command",
"write_stdin",
"apply_patch",
"list_dir",
VIEW_IMAGE_TOOL_NAME,
"js_repl",
"js_repl_reset",
] {
assert_lacks_tool_name(&tools, absent);
}
assert_contains_tool_names(&tools, &["update_plan", "request_user_input", "web_search"]);
}
#[test]
fn image_generation_tools_require_feature_and_supported_model() {
let config = test_config();

View File

@@ -478,9 +478,9 @@ impl TestCodexBuilder {
test_env: TestEnv,
) -> anyhow::Result<TestCodex> {
let auth = self.auth.clone();
let environment_manager = Arc::new(codex_exec_server::EnvironmentManager::new(
test_env.exec_server_url().map(str::to_owned),
));
let environment_manager = Arc::new(
codex_exec_server::EnvironmentManager::from_environment(test_env.environment()),
);
let thread_manager = if config.model_catalog.is_some() {
ThreadManager::new(
&config,

View File

@@ -1,5 +1,6 @@
use std::sync::Arc;
use async_trait::async_trait;
use tokio::sync::OnceCell;
use crate::ExecServerClient;
@@ -9,6 +10,8 @@ use crate::file_system::ExecutorFileSystem;
use crate::local_file_system::LocalFileSystem;
use crate::local_process::LocalProcess;
use crate::process::ExecBackend;
use crate::process::StartedExecProcess;
use crate::protocol::ExecParams;
use crate::remote_file_system::RemoteFileSystem;
use crate::remote_process::RemoteProcess;
@@ -20,14 +23,21 @@ pub trait ExecutorEnvironment: Send + Sync {
#[derive(Debug, Default)]
pub struct EnvironmentManager {
exec_server_url: Option<String>,
executor_mode: ExecutorMode,
current_environment: OnceCell<Arc<Environment>>,
}
impl EnvironmentManager {
pub fn new(exec_server_url: Option<String>) -> Self {
Self {
exec_server_url: normalize_exec_server_url(exec_server_url),
executor_mode: parse_executor_mode(exec_server_url),
current_environment: OnceCell::new(),
}
}
pub fn from_environment(environment: &Environment) -> Self {
Self {
executor_mode: environment.executor_mode.clone(),
current_environment: OnceCell::new(),
}
}
@@ -37,14 +47,14 @@ impl EnvironmentManager {
}
pub fn exec_server_url(&self) -> Option<&str> {
self.exec_server_url.as_deref()
self.executor_mode.remote_exec_server_url()
}
pub async fn current(&self) -> Result<Arc<Environment>, ExecServerError> {
self.current_environment
.get_or_try_init(|| async {
Ok(Arc::new(
Environment::create(self.exec_server_url.clone()).await?,
Environment::create_with_mode(self.executor_mode.clone()).await?,
))
})
.await
@@ -52,9 +62,32 @@ impl EnvironmentManager {
}
}
#[derive(Clone, Debug, Default, Eq, PartialEq)]
enum ExecutorMode {
#[default]
LocalExecutor,
RemoteExecutor {
url: String,
},
NoExecutor,
}
impl ExecutorMode {
fn remote_exec_server_url(&self) -> Option<&str> {
match self {
Self::RemoteExecutor { url } => Some(url.as_str()),
Self::LocalExecutor | Self::NoExecutor => None,
}
}
fn has_attached_executor(&self) -> bool {
!matches!(self, Self::NoExecutor)
}
}
#[derive(Clone)]
pub struct Environment {
exec_server_url: Option<String>,
executor_mode: ExecutorMode,
remote_exec_server_client: Option<ExecServerClient>,
exec_backend: Arc<dyn ExecBackend>,
}
@@ -70,7 +103,7 @@ impl Default for Environment {
}
Self {
exec_server_url: None,
executor_mode: ExecutorMode::LocalExecutor,
remote_exec_server_client: None,
exec_backend: Arc::new(local_process),
}
@@ -80,18 +113,21 @@ impl Default for Environment {
impl std::fmt::Debug for Environment {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Environment")
.field("exec_server_url", &self.exec_server_url)
.field("executor_mode", &self.executor_mode)
.finish_non_exhaustive()
}
}
impl Environment {
pub async fn create(exec_server_url: Option<String>) -> Result<Self, ExecServerError> {
let exec_server_url = normalize_exec_server_url(exec_server_url);
let remote_exec_server_client = if let Some(url) = &exec_server_url {
Self::create_with_mode(parse_executor_mode(exec_server_url)).await
}
async fn create_with_mode(executor_mode: ExecutorMode) -> Result<Self, ExecServerError> {
let remote_exec_server_client = if let Some(url) = executor_mode.remote_exec_server_url() {
Some(
ExecServerClient::connect_websocket(RemoteExecServerConnectArgs {
websocket_url: url.clone(),
websocket_url: url.to_string(),
client_name: "codex-environment".to_string(),
connect_timeout: std::time::Duration::from_secs(5),
initialize_timeout: std::time::Duration::from_secs(5),
@@ -105,6 +141,8 @@ impl Environment {
let exec_backend: Arc<dyn ExecBackend> =
if let Some(client) = remote_exec_server_client.clone() {
Arc::new(RemoteProcess::new(client))
} else if matches!(executor_mode, ExecutorMode::NoExecutor) {
Arc::new(NoAttachedExecutorBackend)
} else {
let local_process = LocalProcess::default();
local_process
@@ -117,14 +155,18 @@ impl Environment {
};
Ok(Self {
exec_server_url,
executor_mode,
remote_exec_server_client,
exec_backend,
})
}
pub fn exec_server_url(&self) -> Option<&str> {
self.exec_server_url.as_deref()
self.executor_mode.remote_exec_server_url()
}
pub fn has_attached_executor(&self) -> bool {
self.executor_mode.has_attached_executor()
}
pub fn get_exec_backend(&self) -> Arc<dyn ExecBackend> {
@@ -140,11 +182,26 @@ impl Environment {
}
}
fn normalize_exec_server_url(exec_server_url: Option<String>) -> Option<String> {
exec_server_url.and_then(|url| {
let url = url.trim();
(!url.is_empty()).then(|| url.to_string())
})
#[derive(Clone, Default)]
struct NoAttachedExecutorBackend;
#[async_trait]
impl ExecBackend for NoAttachedExecutorBackend {
async fn start(&self, _params: ExecParams) -> Result<StartedExecProcess, ExecServerError> {
Err(ExecServerError::Protocol(
"no attached executor is configured for this session".to_string(),
))
}
}
fn parse_executor_mode(exec_server_url: Option<String>) -> ExecutorMode {
match exec_server_url.as_deref().map(str::trim) {
None | Some("") => ExecutorMode::LocalExecutor,
Some(url) if url.eq_ignore_ascii_case("none") => ExecutorMode::NoExecutor,
Some(url) => ExecutorMode::RemoteExecutor {
url: url.to_string(),
},
}
}
impl ExecutorEnvironment for Environment {
@@ -169,6 +226,8 @@ mod tests {
.expect("create environment");
assert_eq!(environment.exec_server_url(), None);
assert!(environment.has_attached_executor());
assert_eq!(environment.executor_mode, ExecutorMode::LocalExecutor);
assert!(environment.remote_exec_server_client.is_none());
}
@@ -176,7 +235,37 @@ mod tests {
fn environment_manager_normalizes_empty_url() {
let manager = EnvironmentManager::new(Some(String::new()));
assert_eq!(manager.exec_server_url(), None);
assert_eq!(manager.executor_mode, ExecutorMode::LocalExecutor);
}
#[test]
fn environment_manager_preserves_no_executor_setting() {
let manager = EnvironmentManager::new(Some("none".to_string()));
assert_eq!(manager.executor_mode, ExecutorMode::NoExecutor);
}
#[test]
fn parse_executor_mode_preserves_no_executor_semantics() {
assert_eq!(parse_executor_mode(None), ExecutorMode::LocalExecutor);
assert_eq!(
parse_executor_mode(Some(String::new())),
ExecutorMode::LocalExecutor
);
assert_eq!(
parse_executor_mode(Some("none".to_string())),
ExecutorMode::NoExecutor
);
assert_eq!(
parse_executor_mode(Some("NONE".to_string())),
ExecutorMode::NoExecutor
);
assert_eq!(
parse_executor_mode(Some("ws://localhost:1234".to_string())),
ExecutorMode::RemoteExecutor {
url: "ws://localhost:1234".to_string(),
}
);
}
#[tokio::test]
@@ -208,4 +297,41 @@ mod tests {
assert_eq!(response.process.process_id().as_str(), "default-env-proc");
}
#[tokio::test]
async fn no_executor_environment_disables_attached_executor() {
let environment = Environment::create(Some("none".to_string()))
.await
.expect("create environment");
assert_eq!(environment.exec_server_url(), None);
assert!(!environment.has_attached_executor());
assert_eq!(environment.executor_mode, ExecutorMode::NoExecutor);
assert!(environment.remote_exec_server_client.is_none());
}
#[tokio::test]
async fn no_executor_environment_rejects_exec_start() {
let environment = Environment::create(Some("none".to_string()))
.await
.expect("create environment");
let err = environment
.get_exec_backend()
.start(crate::ExecParams {
process_id: ProcessId::from("no-executor-proc"),
argv: vec!["true".to_string()],
cwd: std::env::current_dir().expect("read current dir"),
env: Default::default(),
tty: false,
arg0: None,
})
.await
.expect_err("no-executor backend should reject starts");
assert_eq!(
err.to_string(),
"exec-server protocol error: no attached executor is configured for this session"
);
}
}

View File

@@ -63,6 +63,10 @@ pub struct Cli {
#[clap(long = "cd", short = 'C', value_name = "DIR")]
pub cwd: Option<PathBuf>,
/// Create a new local git worktree and start the session from that checkout.
#[arg(long = "worktree", default_value_t = false, global = true)]
pub worktree: bool,
/// Allow running Codex outside a Git repository.
#[arg(long = "skip-git-repo-check", global = true, default_value_t = false)]
pub skip_git_repo_check: bool,
@@ -312,4 +316,13 @@ mod tests {
assert_eq!(args.session_id.as_deref(), Some("session-123"));
assert_eq!(args.prompt.as_deref(), Some(PROMPT));
}
#[test]
fn parse_worktree_flag() {
let cli = Cli::parse_from(["codex-exec", "--worktree", "--cd", "/tmp/repo", "hello"]);
assert!(cli.worktree);
assert_eq!(cli.cwd, Some(PathBuf::from("/tmp/repo")));
assert_eq!(cli.prompt.as_deref(), Some("hello"));
}
}

View File

@@ -66,7 +66,11 @@ use codex_core::config_loader::format_config_error_with_source;
use codex_core::format_exec_policy_error_with_source;
use codex_core::path_utils;
use codex_feedback::CodexFeedback;
use codex_git_utils::CodexManagedWorktree;
use codex_git_utils::GitToolingError;
use codex_git_utils::create_codex_managed_worktree;
use codex_git_utils::get_git_repo_root;
use codex_git_utils::touch_codex_managed_worktree_metadata;
use codex_otel::set_parent_from_context;
use codex_otel::traceparent_context_from_env;
use codex_protocol::config_types::SandboxMode;
@@ -165,6 +169,14 @@ struct ExecRunArgs {
stderr_with_ansi: bool,
}
#[derive(Debug, PartialEq, Eq)]
struct StartupCwd {
resolved_cwd: Option<PathBuf>,
config_cwd: AbsolutePathBuf,
}
type WorktreeCreator = fn(&Path, &Path) -> Result<CodexManagedWorktree, GitToolingError>;
fn exec_root_span() -> tracing::Span {
info_span!(
"codex.exec",
@@ -174,6 +186,36 @@ fn exec_root_span() -> tracing::Span {
)
}
fn resolve_startup_cwd(
requested_cwd: Option<PathBuf>,
codex_home: &Path,
worktree_creator: Option<WorktreeCreator>,
) -> anyhow::Result<StartupCwd> {
let config_cwd = match requested_cwd.as_deref() {
Some(path) => AbsolutePathBuf::from_absolute_path(path.canonicalize()?)?,
None => AbsolutePathBuf::current_dir()?,
};
let Some(worktree_creator) = worktree_creator else {
if let Err(err) = touch_codex_managed_worktree_metadata(config_cwd.as_path()) {
warn!(?err, "failed to refresh Codex-managed worktree metadata");
}
return Ok(StartupCwd {
resolved_cwd: requested_cwd,
config_cwd,
});
};
let worktree = worktree_creator(config_cwd.as_path(), codex_home)
.map_err(|err| anyhow::anyhow!("failed to create worktree: {err}"))?;
let config_cwd = AbsolutePathBuf::from_absolute_path(&worktree.worktree_workspace_root)?;
Ok(StartupCwd {
resolved_cwd: Some(worktree.worktree_workspace_root),
config_cwd,
})
}
pub async fn run_main(cli: Cli, arg0_paths: Arg0DispatchPaths) -> anyhow::Result<()> {
if let Err(err) = set_default_originator("codex_exec".to_string()) {
tracing::warn!(?err, "Failed to set codex exec originator override {err:?}");
@@ -189,6 +231,7 @@ pub async fn run_main(cli: Cli, arg0_paths: Arg0DispatchPaths) -> anyhow::Result
full_auto,
dangerously_bypass_approvals_and_sandbox,
cwd,
worktree,
skip_git_repo_check,
add_dir,
ephemeral,
@@ -201,6 +244,14 @@ pub async fn run_main(cli: Cli, arg0_paths: Arg0DispatchPaths) -> anyhow::Result
config_overrides,
} = cli;
// Fail no-prompt stdin reads before creating a detached worktree so
// `codex-exec --worktree </dev/null` does not leak a checkout.
let prompt = if worktree && command.is_none() && prompt.is_none() {
Some(resolve_root_prompt(None))
} else {
prompt
};
let (_stdout_with_ansi, stderr_with_ansi) = match color {
cli::Color::Always => (true, true),
cli::Color::Never => (false, false),
@@ -240,12 +291,6 @@ pub async fn run_main(cli: Cli, arg0_paths: Arg0DispatchPaths) -> anyhow::Result
}
};
let resolved_cwd = cwd.clone();
let config_cwd = match resolved_cwd.as_deref() {
Some(path) => AbsolutePathBuf::from_absolute_path(path.canonicalize()?)?,
None => AbsolutePathBuf::current_dir()?,
};
// we load config.toml here to determine project state.
#[allow(clippy::print_stderr)]
let codex_home = match find_codex_home() {
@@ -256,6 +301,15 @@ pub async fn run_main(cli: Cli, arg0_paths: Arg0DispatchPaths) -> anyhow::Result
}
};
let StartupCwd {
resolved_cwd,
config_cwd,
} = resolve_startup_cwd(
cwd,
codex_home.as_path(),
worktree.then_some(create_codex_managed_worktree),
)?;
#[allow(clippy::print_stderr)]
let config_toml = match load_config_as_toml_with_cli_overrides(
&codex_home,
@@ -1659,6 +1713,7 @@ fn build_review_request(args: &ReviewArgs) -> anyhow::Result<ReviewRequest> {
#[cfg(test)]
mod tests {
use super::*;
use codex_git_utils::CodexManagedWorktreeMetadata;
use codex_otel::set_parent_from_w3c_trace_context;
use codex_protocol::config_types::ApprovalsReviewer;
use opentelemetry::trace::TraceContextExt;
@@ -1699,6 +1754,77 @@ mod tests {
);
}
#[test]
fn resolve_startup_cwd_uses_requested_cwd_without_worktree() {
let codex_home = tempdir().expect("create temp codex home");
let cwd = tempdir().expect("create temp cwd");
let startup_cwd = resolve_startup_cwd(
Some(cwd.path().to_path_buf()),
codex_home.path(),
/*worktree_creator*/ None,
)
.expect("resolve startup cwd");
assert_eq!(
startup_cwd,
StartupCwd {
resolved_cwd: Some(cwd.path().to_path_buf()),
config_cwd: AbsolutePathBuf::from_absolute_path(
cwd.path().canonicalize().expect("canonicalize cwd")
)
.expect("absolute cwd"),
}
);
}
#[test]
fn resolve_startup_cwd_uses_worktree_workspace_root_when_enabled() {
let codex_home = tempdir().expect("create temp codex home");
let cwd = tempdir().expect("create temp cwd");
let startup_cwd = resolve_startup_cwd(
Some(cwd.path().to_path_buf()),
codex_home.path(),
Some(|source_cwd, codex_home| {
let worktree_git_root = codex_home.join("worktrees/fake/project");
let worktree_git_dir = worktree_git_root.join(".git");
let marker_path = worktree_git_dir.join("codex-managed");
let metadata_path = worktree_git_dir.join("codex-worktree.json");
Ok(CodexManagedWorktree {
source_cwd: source_cwd.to_path_buf(),
source_repo_root: source_cwd.to_path_buf(),
worktree_git_root: worktree_git_root.clone(),
worktree_git_dir,
worktree_workspace_root: worktree_git_root.join("nested/path"),
starting_ref: "main".to_string(),
marker_path,
metadata_path,
metadata: CodexManagedWorktreeMetadata {
version: 1,
source_repo_root: source_cwd.to_path_buf(),
worktree_git_root,
starting_ref: "main".to_string(),
created_at: 1,
last_used_at: 1,
},
})
}),
)
.expect("resolve startup cwd");
let expected_worktree_workspace_root =
codex_home.path().join("worktrees/fake/project/nested/path");
assert_eq!(
startup_cwd,
StartupCwd {
resolved_cwd: Some(expected_worktree_workspace_root.clone()),
config_cwd: AbsolutePathBuf::from_absolute_path(&expected_worktree_workspace_root)
.expect("absolute worktree cwd"),
}
);
}
#[test]
fn builds_uncommitted_review_request() {
let args = ReviewArgs {

View File

@@ -15,6 +15,7 @@ once_cell = { workspace = true }
regex = "1"
schemars = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
tempfile = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true, features = ["macros", "process", "rt", "time"] }

View File

@@ -29,6 +29,8 @@ pub enum GitToolingError {
#[error("failed to process path inside worktree")]
PathPrefix(#[from] std::path::StripPrefixError),
#[error(transparent)]
Json(#[from] serde_json::Error),
#[error(transparent)]
Walkdir(#[from] WalkdirError),
#[error(transparent)]
Io(#[from] std::io::Error),

View File

@@ -8,6 +8,8 @@ mod ghost_commits;
mod info;
mod operations;
mod platform;
mod worktree;
mod worktree_prune;
pub use apply::ApplyGitRequest;
pub use apply::ApplyGitResult;
@@ -49,6 +51,18 @@ use schemars::JsonSchema;
use serde::Deserialize;
use serde::Serialize;
use ts_rs::TS;
pub use worktree::CODEX_MANAGED_WORKTREE_METADATA_FILE;
pub use worktree::CodexManagedWorktree;
pub use worktree::CodexManagedWorktreeMetadata;
pub use worktree::create_codex_managed_worktree;
pub use worktree::touch_codex_managed_worktree_metadata;
pub use worktree_prune::CodexWorktreePruneCandidate;
pub use worktree_prune::CodexWorktreePruneMode;
pub use worktree_prune::CodexWorktreePruneOptions;
pub use worktree_prune::CodexWorktreePruneReport;
pub use worktree_prune::CodexWorktreePruneSkipReason;
pub use worktree_prune::CodexWorktreePruneSkipped;
pub use worktree_prune::prune_codex_managed_worktrees;
type CommitID = String;

View File

@@ -0,0 +1,538 @@
use std::collections::hash_map::DefaultHasher;
use std::ffi::OsString;
use std::fs;
use std::hash::Hash;
use std::hash::Hasher;
use std::io::ErrorKind;
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
use std::sync::atomic::AtomicU64;
use std::sync::atomic::Ordering;
use std::time::SystemTime;
use crate::GitToolingError;
use crate::operations::ensure_git_repository;
use crate::operations::repo_subdir;
use crate::operations::resolve_head;
use crate::operations::resolve_repository_root;
use crate::operations::run_git_for_status;
use crate::operations::run_git_for_stdout;
use crate::resolve_root_git_project_for_trust;
static WORKTREE_BUCKET_COUNTER: AtomicU64 = AtomicU64::new(0);
pub const CODEX_MANAGED_WORKTREE_MARKER_FILE: &str = "codex-managed";
pub const CODEX_MANAGED_WORKTREE_METADATA_FILE: &str = "codex-worktree.json";
const CODEX_MANAGED_WORKTREE_METADATA_VERSION: u64 = 1;
/// Metadata for a detached worktree created under `$CODEX_HOME/worktrees`.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct CodexManagedWorktree {
pub source_cwd: PathBuf,
pub source_repo_root: PathBuf,
pub worktree_git_root: PathBuf,
pub worktree_git_dir: PathBuf,
pub worktree_workspace_root: PathBuf,
pub starting_ref: String,
pub marker_path: PathBuf,
pub metadata_path: PathBuf,
pub metadata: CodexManagedWorktreeMetadata,
}
/// Persisted metadata for a Codex-managed worktree checkout.
#[derive(Debug, Clone, PartialEq, Eq, serde::Deserialize, serde::Serialize)]
#[serde(rename_all = "snake_case")]
pub struct CodexManagedWorktreeMetadata {
pub version: u64,
pub source_repo_root: PathBuf,
pub worktree_git_root: PathBuf,
pub starting_ref: String,
pub created_at: u64,
pub last_used_at: u64,
}
/// Creates a detached worktree for `source_cwd` and returns the mapped cwd
/// inside the new checkout.
pub fn create_codex_managed_worktree(
source_cwd: &Path,
codex_home: &Path,
) -> Result<CodexManagedWorktree, GitToolingError> {
ensure_git_repository(source_cwd)?;
let source_repo_root = resolve_repository_root(source_cwd)?;
let source_cwd = source_cwd.to_path_buf();
let relative_cwd = repo_subdir(&source_repo_root, &source_cwd);
let starting_ref = starting_ref_for_repo(source_repo_root.as_path())?;
let worktree_git_root = allocate_worktree_root(codex_home, source_repo_root.as_path())?;
let metadata = build_worktree_metadata(&source_repo_root, &worktree_git_root, &starting_ref);
create_worktree_checkout(
source_repo_root.as_path(),
&worktree_git_root,
&starting_ref,
)?;
let setup_result = setup_worktree_checkout(&worktree_git_root, &metadata);
let (worktree_git_dir, marker_path, metadata_path) = match setup_result {
Ok(setup) => setup,
Err(err) => {
cleanup_worktree_checkout(source_repo_root.as_path(), &worktree_git_root);
return Err(err);
}
};
let worktree_workspace_root = match relative_cwd {
Some(relative) => worktree_git_root.join(relative),
None => worktree_git_root.clone(),
};
Ok(CodexManagedWorktree {
source_cwd,
source_repo_root,
worktree_git_root,
worktree_git_dir,
worktree_workspace_root,
starting_ref,
marker_path,
metadata_path,
metadata,
})
}
/// Updates `last_used_at` when `cwd` belongs to a Codex-managed worktree.
pub fn touch_codex_managed_worktree_metadata(
cwd: &Path,
) -> Result<Option<CodexManagedWorktreeMetadata>, GitToolingError> {
let Ok(worktree_git_dir) = worktree_git_dir(cwd) else {
return Ok(None);
};
let marker_path = worktree_git_dir.join(CODEX_MANAGED_WORKTREE_MARKER_FILE);
if !marker_path.exists() {
return Ok(None);
}
let metadata_path = worktree_git_dir.join(CODEX_MANAGED_WORKTREE_METADATA_FILE);
let mut metadata = read_or_backfill_worktree_metadata(&metadata_path, cwd)?;
metadata.last_used_at = unix_timestamp_secs();
write_worktree_metadata(&metadata_path, &metadata)?;
Ok(Some(metadata))
}
fn starting_ref_for_repo(repo_root: &Path) -> Result<String, GitToolingError> {
let branch = run_git_for_stdout(
repo_root,
vec![OsString::from("branch"), OsString::from("--show-current")],
None,
)?;
if !branch.is_empty() {
return Ok(branch);
}
match resolve_head(repo_root)? {
Some(head) => Ok(head),
None => Ok(String::from("HEAD")),
}
}
fn allocate_worktree_root(
codex_home: &Path,
source_repo_root: &Path,
) -> Result<PathBuf, GitToolingError> {
let repo_name = source_repo_root
.file_name()
.and_then(|name| name.to_str())
.filter(|name| !name.is_empty())
.unwrap_or("repo");
let worktrees_root = codex_home.join("worktrees");
fs::create_dir_all(&worktrees_root)?;
for _ in 0..64 {
let bucket = next_worktree_bucket(source_repo_root);
let candidate = worktrees_root.join(bucket).join(repo_name);
if candidate.exists() {
continue;
}
if let Some(parent) = candidate.parent() {
fs::create_dir_all(parent)?;
}
return Ok(candidate);
}
Err(GitToolingError::Io(std::io::Error::new(
std::io::ErrorKind::AlreadyExists,
"unable to allocate a unique codex worktree path",
)))
}
fn next_worktree_bucket(source_repo_root: &Path) -> String {
let mut hasher = DefaultHasher::new();
source_repo_root.hash(&mut hasher);
std::process::id().hash(&mut hasher);
SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap_or_default()
.as_nanos()
.hash(&mut hasher);
WORKTREE_BUCKET_COUNTER
.fetch_add(1, Ordering::Relaxed)
.hash(&mut hasher);
format!("{:04x}", (hasher.finish() & 0xffff) as u16)
}
fn create_worktree_checkout(
source_repo_root: &Path,
worktree_git_root: &Path,
starting_ref: &str,
) -> Result<(), GitToolingError> {
let result = run_git_for_status(
source_repo_root,
vec![
OsString::from("worktree"),
OsString::from("add"),
OsString::from("--detach"),
OsString::from(worktree_git_root.as_os_str()),
OsString::from(starting_ref),
],
None,
);
if let Err(err) = result {
let _ = fs::remove_dir_all(worktree_git_root);
return Err(err);
}
Ok(())
}
fn setup_worktree_checkout(
worktree_git_root: &Path,
metadata: &CodexManagedWorktreeMetadata,
) -> Result<(PathBuf, PathBuf, PathBuf), GitToolingError> {
let worktree_git_dir = worktree_git_dir(worktree_git_root)?;
let marker_path = write_codex_managed_marker(&worktree_git_dir)?;
let metadata_path = write_worktree_metadata_in_git_dir(&worktree_git_dir, metadata)?;
Ok((worktree_git_dir, marker_path, metadata_path))
}
fn cleanup_worktree_checkout(source_repo_root: &Path, worktree_git_root: &Path) {
let _ = run_git_for_status(
source_repo_root,
vec![
OsString::from("worktree"),
OsString::from("remove"),
OsString::from("--force"),
OsString::from(worktree_git_root.as_os_str()),
],
/*env*/ None,
);
let _ = fs::remove_dir_all(worktree_git_root);
}
fn write_codex_managed_marker(worktree_git_dir: &Path) -> Result<PathBuf, GitToolingError> {
let marker_path = worktree_git_dir.join(CODEX_MANAGED_WORKTREE_MARKER_FILE);
let mut marker = fs::File::create(&marker_path)?;
marker.write_all(b"codex-managed\n")?;
Ok(marker_path)
}
fn write_worktree_metadata_in_git_dir(
worktree_git_dir: &Path,
metadata: &CodexManagedWorktreeMetadata,
) -> Result<PathBuf, GitToolingError> {
let metadata_path = worktree_git_dir.join(CODEX_MANAGED_WORKTREE_METADATA_FILE);
write_worktree_metadata(&metadata_path, metadata)?;
Ok(metadata_path)
}
fn write_worktree_metadata(
metadata_path: &Path,
metadata: &CodexManagedWorktreeMetadata,
) -> Result<(), GitToolingError> {
let metadata_json = serde_json::to_vec_pretty(metadata)?;
fs::write(metadata_path, metadata_json)?;
Ok(())
}
pub(crate) fn read_or_backfill_worktree_metadata(
metadata_path: &Path,
cwd: &Path,
) -> Result<CodexManagedWorktreeMetadata, GitToolingError> {
match fs::read(metadata_path) {
Ok(metadata_json) => Ok(serde_json::from_slice(&metadata_json)?),
Err(err) if err.kind() == ErrorKind::NotFound => backfill_worktree_metadata(cwd),
Err(err) => Err(GitToolingError::Io(err)),
}
}
fn backfill_worktree_metadata(cwd: &Path) -> Result<CodexManagedWorktreeMetadata, GitToolingError> {
let worktree_git_root = resolve_repository_root(cwd)?;
let source_repo_root = resolve_root_git_project_for_trust(worktree_git_root.as_path())
.unwrap_or_else(|| worktree_git_root.clone());
// Legacy marker-only worktrees do not record the original checkout ref, so
// fail closed by leaving `starting_ref` unknown instead of inferring HEAD.
Ok(build_worktree_metadata(
&source_repo_root,
&worktree_git_root,
"",
))
}
fn build_worktree_metadata(
source_repo_root: &Path,
worktree_git_root: &Path,
starting_ref: &str,
) -> CodexManagedWorktreeMetadata {
let timestamp = unix_timestamp_secs();
CodexManagedWorktreeMetadata {
version: CODEX_MANAGED_WORKTREE_METADATA_VERSION,
source_repo_root: source_repo_root.to_path_buf(),
worktree_git_root: worktree_git_root.to_path_buf(),
starting_ref: starting_ref.to_string(),
created_at: timestamp,
last_used_at: timestamp,
}
}
fn unix_timestamp_secs() -> u64 {
SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap_or_default()
.as_secs()
}
pub(crate) fn worktree_git_dir(worktree_git_root: &Path) -> Result<PathBuf, GitToolingError> {
let git_dir = run_git_for_stdout(
worktree_git_root,
vec![OsString::from("rev-parse"), OsString::from("--git-dir")],
None,
)?;
let git_dir = PathBuf::from(git_dir);
if git_dir.is_absolute() {
Ok(git_dir)
} else {
Ok(worktree_git_root.join(git_dir))
}
}
#[cfg(test)]
mod tests {
use super::CODEX_MANAGED_WORKTREE_MARKER_FILE;
use super::CODEX_MANAGED_WORKTREE_METADATA_FILE;
use super::CodexManagedWorktree;
use super::CodexManagedWorktreeMetadata;
use super::allocate_worktree_root;
use super::cleanup_worktree_checkout;
use super::create_codex_managed_worktree;
use super::create_worktree_checkout;
use super::starting_ref_for_repo;
use super::touch_codex_managed_worktree_metadata;
use crate::GitToolingError;
#[cfg(unix)]
use crate::platform::create_symlink;
use pretty_assertions::assert_eq;
use std::fs;
use std::path::Path;
use std::path::PathBuf;
use std::process::Command;
use tempfile::tempdir;
fn run_git_in(repo_path: &Path, args: &[&str]) {
let status = Command::new("git")
.current_dir(repo_path)
.args(args)
.status()
.expect("git command");
assert!(status.success(), "git command failed: {args:?}");
}
fn git_stdout_in(repo_path: &Path, args: &[&str]) -> String {
let output = Command::new("git")
.current_dir(repo_path)
.args(args)
.output()
.expect("git command");
assert!(output.status.success(), "git command failed: {args:?}");
String::from_utf8(output.stdout).expect("git stdout utf8")
}
fn init_test_repo(repo_path: &Path) {
run_git_in(repo_path, &["init", "--initial-branch=main"]);
run_git_in(repo_path, &["config", "core.autocrlf", "false"]);
run_git_in(repo_path, &["config", "user.name", "Tester"]);
run_git_in(repo_path, &["config", "user.email", "test@example.com"]);
}
fn commit(repo_path: &Path, message: &str) {
run_git_in(repo_path, &["add", "."]);
run_git_in(
repo_path,
&[
"-c",
"user.name=Tester",
"-c",
"user.email=test@example.com",
"commit",
"-m",
message,
],
);
}
fn create_repo_with_nested_cwd() -> (tempfile::TempDir, PathBuf, PathBuf) {
let temp = tempdir().expect("tempdir");
let repo = temp.path().join("repo");
let nested = repo.join("nested").join("path");
fs::create_dir_all(&nested).expect("nested dir");
init_test_repo(&repo);
fs::write(repo.join("README.md"), "hello\n").expect("write file");
fs::write(nested.join("marker.txt"), "nested\n").expect("write nested file");
commit(&repo, "initial");
(temp, repo, nested)
}
fn assert_worktree_result(
result: &CodexManagedWorktree,
codex_home: &Path,
repo: &Path,
nested: &Path,
) {
let expected_repo_root = repo.canonicalize().expect("repo canonicalized");
assert_eq!(result.source_repo_root, expected_repo_root);
assert_eq!(
result.worktree_workspace_root,
result.worktree_git_root.join("nested/path")
);
assert_eq!(result.source_cwd, nested);
assert!(
result
.worktree_git_root
.starts_with(codex_home.join("worktrees"))
);
assert!(result.worktree_git_dir.exists());
assert_eq!(
result.marker_path,
result
.worktree_git_dir
.join(CODEX_MANAGED_WORKTREE_MARKER_FILE)
);
assert_eq!(
result.metadata_path,
result
.worktree_git_dir
.join(CODEX_MANAGED_WORKTREE_METADATA_FILE)
);
assert_eq!(
result.metadata,
CodexManagedWorktreeMetadata {
version: 1,
source_repo_root: expected_repo_root,
worktree_git_root: result.worktree_git_root.clone(),
starting_ref: "main".to_string(),
created_at: result.metadata.created_at,
last_used_at: result.metadata.created_at,
}
);
}
#[test]
fn create_codex_managed_worktree_preserves_nested_cwd_mapping() -> Result<(), GitToolingError> {
let (_temp, repo, nested) = create_repo_with_nested_cwd();
let codex_home = tempdir().expect("codex home");
let result = create_codex_managed_worktree(&nested, codex_home.path())?;
assert_worktree_result(&result, codex_home.path(), &repo, &nested);
assert!(result.worktree_workspace_root.exists());
Ok(())
}
#[test]
#[cfg(unix)]
fn create_codex_managed_worktree_preserves_nested_cwd_mapping_from_symlink()
-> Result<(), GitToolingError> {
let (temp, repo, _nested) = create_repo_with_nested_cwd();
let repo_symlink = temp.path().join("repo-symlink");
create_symlink(&repo, &repo, &repo_symlink)?;
let symlinked_nested = repo_symlink.join("nested/path");
let codex_home = tempdir().expect("codex home");
let result = create_codex_managed_worktree(&symlinked_nested, codex_home.path())?;
assert_eq!(
result.worktree_workspace_root,
result.worktree_git_root.join("nested/path")
);
assert_eq!(result.source_cwd, symlinked_nested);
Ok(())
}
#[test]
fn create_codex_managed_worktree_writes_marker_file() -> Result<(), GitToolingError> {
let (_temp, repo, nested) = create_repo_with_nested_cwd();
let codex_home = tempdir().expect("codex home");
let result = create_codex_managed_worktree(&nested, codex_home.path())?;
let marker = fs::read_to_string(&result.marker_path)?;
assert_eq!(marker, "codex-managed\n");
assert_eq!(
result.marker_path,
result
.worktree_git_dir
.join(CODEX_MANAGED_WORKTREE_MARKER_FILE)
);
assert!(repo.exists());
Ok(())
}
#[test]
fn create_codex_managed_worktree_writes_and_touches_metadata() -> Result<(), GitToolingError> {
let (_temp, _repo, nested) = create_repo_with_nested_cwd();
let codex_home = tempdir().expect("codex home");
let result = create_codex_managed_worktree(&nested, codex_home.path())?;
let mut metadata: CodexManagedWorktreeMetadata =
serde_json::from_slice(&fs::read(&result.metadata_path)?)?;
assert_eq!(metadata, result.metadata);
metadata.last_used_at = 1;
fs::write(&result.metadata_path, serde_json::to_vec_pretty(&metadata)?)?;
let touched = touch_codex_managed_worktree_metadata(&result.worktree_workspace_root)?
.expect("managed worktree metadata");
assert_eq!(touched.created_at, result.metadata.created_at);
assert!(touched.last_used_at > 1);
assert_eq!(
serde_json::from_slice::<CodexManagedWorktreeMetadata>(&fs::read(
&result.metadata_path
)?)?,
touched
);
Ok(())
}
#[test]
fn cleanup_worktree_checkout_removes_worktree_registration() -> Result<(), GitToolingError> {
let (_temp, repo, _nested) = create_repo_with_nested_cwd();
let codex_home = tempdir().expect("codex home");
let starting_ref = starting_ref_for_repo(&repo)?;
let worktree_git_root = allocate_worktree_root(codex_home.path(), &repo)?;
create_worktree_checkout(&repo, &worktree_git_root, &starting_ref)?;
assert!(worktree_git_root.exists());
assert!(
git_stdout_in(&repo, &["worktree", "list", "--porcelain"])
.contains(&worktree_git_root.to_string_lossy().to_string())
);
cleanup_worktree_checkout(&repo, &worktree_git_root);
assert!(!worktree_git_root.exists());
assert!(
!git_stdout_in(&repo, &["worktree", "list", "--porcelain"])
.contains(&worktree_git_root.to_string_lossy().to_string())
);
Ok(())
}
}

View File

@@ -0,0 +1,396 @@
use std::ffi::OsString;
use std::fs;
use std::path::Path;
use std::path::PathBuf;
use crate::GitToolingError;
use crate::operations::run_git_for_status;
use crate::operations::run_git_for_stdout;
use crate::worktree::CODEX_MANAGED_WORKTREE_MARKER_FILE;
use crate::worktree::CODEX_MANAGED_WORKTREE_METADATA_FILE;
use crate::worktree::CodexManagedWorktreeMetadata;
use crate::worktree::read_or_backfill_worktree_metadata;
use crate::worktree::worktree_git_dir;
/// Filters and deletion behavior for pruning Codex-managed worktrees.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct CodexWorktreePruneOptions {
pub codex_home: PathBuf,
pub source_repo_root: Option<PathBuf>,
pub created_before: Option<u64>,
pub last_used_before: Option<u64>,
pub mode: CodexWorktreePruneMode,
}
/// Controls whether prune removes matching worktrees or only reports them.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum CodexWorktreePruneMode {
DryRun,
Delete,
}
/// One Codex-managed worktree selected by the prune scanner.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct CodexWorktreePruneCandidate {
pub worktree_git_root: PathBuf,
pub metadata_path: PathBuf,
pub metadata: CodexManagedWorktreeMetadata,
}
/// Result of a prune scan over `$CODEX_HOME/worktrees`.
#[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct CodexWorktreePruneReport {
pub pruned: Vec<CodexWorktreePruneCandidate>,
pub kept: Vec<CodexWorktreePruneCandidate>,
pub skipped: Vec<CodexWorktreePruneSkipped>,
}
/// A managed worktree was discovered but not deleted.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct CodexWorktreePruneSkipped {
pub candidate: CodexWorktreePruneCandidate,
pub reason: CodexWorktreePruneSkipReason,
}
/// Why a managed worktree was not safe to prune.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum CodexWorktreePruneSkipReason {
DirtyWorktree,
LocalCommits,
MissingStartingRef,
}
/// Scans and optionally removes Codex-managed worktrees that match `options`.
pub fn prune_codex_managed_worktrees(
options: &CodexWorktreePruneOptions,
) -> Result<CodexWorktreePruneReport, GitToolingError> {
let mut report = CodexWorktreePruneReport::default();
let candidates = discover_codex_managed_worktrees(options.codex_home.as_path())?;
for candidate in candidates {
if !worktree_matches_prune_filters(&candidate, options)? {
report.kept.push(candidate);
continue;
}
match classify_prune_candidate(&candidate) {
Ok(None) => {
if options.mode == CodexWorktreePruneMode::Delete {
delete_prune_candidate(&candidate)?;
}
report.pruned.push(candidate);
}
Ok(Some(reason)) => {
report
.skipped
.push(CodexWorktreePruneSkipped { candidate, reason });
}
Err(_) => {
report.skipped.push(CodexWorktreePruneSkipped {
candidate,
reason: CodexWorktreePruneSkipReason::MissingStartingRef,
});
}
}
}
Ok(report)
}
fn discover_codex_managed_worktrees(
codex_home: &Path,
) -> Result<Vec<CodexWorktreePruneCandidate>, GitToolingError> {
let worktrees_root = codex_home.join("worktrees");
let mut candidates = Vec::new();
let Ok(bucket_entries) = fs::read_dir(&worktrees_root) else {
return Ok(candidates);
};
for bucket_entry in bucket_entries {
let bucket_entry = bucket_entry?;
if !bucket_entry.file_type()?.is_dir() {
continue;
}
for worktree_entry in fs::read_dir(bucket_entry.path())? {
let worktree_entry = worktree_entry?;
if !worktree_entry.file_type()?.is_dir() {
continue;
}
let worktree_git_root = worktree_entry.path();
let Ok(worktree_git_dir) = worktree_git_dir(&worktree_git_root) else {
continue;
};
if !worktree_git_dir
.join(CODEX_MANAGED_WORKTREE_MARKER_FILE)
.exists()
{
continue;
}
let metadata_path = worktree_git_dir.join(CODEX_MANAGED_WORKTREE_METADATA_FILE);
let metadata = read_or_backfill_worktree_metadata(&metadata_path, &worktree_git_root)?;
candidates.push(CodexWorktreePruneCandidate {
worktree_git_root,
metadata_path,
metadata,
});
}
}
Ok(candidates)
}
fn worktree_matches_prune_filters(
candidate: &CodexWorktreePruneCandidate,
options: &CodexWorktreePruneOptions,
) -> Result<bool, GitToolingError> {
if let Some(source_repo_root) = options.source_repo_root.as_deref()
&& candidate.metadata.source_repo_root != source_repo_root.canonicalize()?
{
return Ok(false);
}
if let Some(created_before) = options.created_before
&& candidate.metadata.created_at >= created_before
{
return Ok(false);
}
if let Some(last_used_before) = options.last_used_before
&& candidate.metadata.last_used_at >= last_used_before
{
return Ok(false);
}
Ok(true)
}
fn classify_prune_candidate(
candidate: &CodexWorktreePruneCandidate,
) -> Result<Option<CodexWorktreePruneSkipReason>, GitToolingError> {
if candidate.metadata.starting_ref.is_empty() {
return Ok(Some(CodexWorktreePruneSkipReason::MissingStartingRef));
}
let status = run_git_for_stdout(
candidate.worktree_git_root.as_path(),
vec![
OsString::from("status"),
OsString::from("--porcelain"),
OsString::from("--untracked-files=all"),
],
/*env*/ None,
)?;
if !status.is_empty() {
return Ok(Some(CodexWorktreePruneSkipReason::DirtyWorktree));
}
let local_commits = run_git_for_stdout(
candidate.worktree_git_root.as_path(),
vec![
OsString::from("rev-list"),
OsString::from("--max-count=1"),
OsString::from(format!("{}..HEAD", candidate.metadata.starting_ref)),
],
/*env*/ None,
)?;
if !local_commits.is_empty() {
return Ok(Some(CodexWorktreePruneSkipReason::LocalCommits));
}
Ok(None)
}
fn delete_prune_candidate(candidate: &CodexWorktreePruneCandidate) -> Result<(), GitToolingError> {
run_git_for_status(
candidate.metadata.source_repo_root.as_path(),
vec![
OsString::from("worktree"),
OsString::from("remove"),
OsString::from("--force"),
OsString::from(candidate.worktree_git_root.as_os_str()),
],
/*env*/ None,
)?;
run_git_for_status(
candidate.metadata.source_repo_root.as_path(),
vec![OsString::from("worktree"), OsString::from("prune")],
/*env*/ None,
)?;
let _ = fs::remove_dir_all(&candidate.worktree_git_root);
if let Some(bucket_path) = candidate.worktree_git_root.parent() {
let _ = fs::remove_dir(bucket_path);
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::CodexWorktreePruneCandidate;
use super::CodexWorktreePruneMode;
use super::CodexWorktreePruneOptions;
use super::CodexWorktreePruneSkipReason;
use super::prune_codex_managed_worktrees;
use crate::CodexManagedWorktreeMetadata;
use crate::create_codex_managed_worktree;
use pretty_assertions::assert_eq;
use std::fs;
use std::path::Path;
use std::path::PathBuf;
use std::process::Command;
use tempfile::tempdir;
#[test]
fn prune_codex_managed_worktrees_filters_by_repo_and_timestamps_and_skips_dirty_worktrees() {
let (_first_temp, first_repo, first_nested) = create_repo_with_nested_cwd();
let (_second_temp, _second_repo, second_nested) = create_repo_with_nested_cwd();
let codex_home = tempdir().expect("codex home");
let old_clean = create_codex_managed_worktree(&first_nested, codex_home.path())
.expect("old clean worktree");
let old_dirty = create_codex_managed_worktree(&first_nested, codex_home.path())
.expect("old dirty worktree");
let new_clean = create_codex_managed_worktree(&first_nested, codex_home.path())
.expect("new clean worktree");
let other_repo =
create_codex_managed_worktree(&second_nested, codex_home.path()).expect("other repo");
rewrite_metadata_timestamp(&old_clean.metadata_path, 10, 20);
rewrite_metadata_timestamp(&old_dirty.metadata_path, 10, 20);
rewrite_metadata_timestamp(&new_clean.metadata_path, 100, 200);
rewrite_metadata_timestamp(&other_repo.metadata_path, 10, 20);
fs::write(old_dirty.worktree_git_root.join("dirty.txt"), "dirty\n")
.expect("dirty worktree");
let report = prune_codex_managed_worktrees(&CodexWorktreePruneOptions {
codex_home: codex_home.path().to_path_buf(),
source_repo_root: Some(first_repo),
created_before: Some(50),
last_used_before: Some(50),
mode: CodexWorktreePruneMode::Delete,
})
.expect("prune worktrees");
assert_eq!(
sorted_worktree_paths(&report.pruned),
vec![old_clean.worktree_git_root.clone()]
);
assert_eq!(
report
.skipped
.iter()
.map(|skipped| (skipped.candidate.worktree_git_root.clone(), skipped.reason))
.collect::<Vec<_>>(),
vec![(
old_dirty.worktree_git_root.clone(),
CodexWorktreePruneSkipReason::DirtyWorktree
)]
);
assert_eq!(
sorted_worktree_paths(&report.kept),
sorted_paths(vec![
new_clean.worktree_git_root.clone(),
other_repo.worktree_git_root.clone()
])
);
assert!(!old_clean.worktree_git_root.exists());
assert!(old_dirty.worktree_git_root.exists());
assert!(new_clean.worktree_git_root.exists());
assert!(other_repo.worktree_git_root.exists());
}
#[test]
fn prune_codex_managed_worktrees_skips_marker_only_legacy_worktrees() {
let (_temp, _repo, nested) = create_repo_with_nested_cwd();
let codex_home = tempdir().expect("codex home");
let legacy =
create_codex_managed_worktree(&nested, codex_home.path()).expect("legacy worktree");
fs::remove_file(&legacy.metadata_path).expect("remove metadata sidecar");
let report = prune_codex_managed_worktrees(&CodexWorktreePruneOptions {
codex_home: codex_home.path().to_path_buf(),
source_repo_root: None,
created_before: None,
last_used_before: None,
mode: CodexWorktreePruneMode::Delete,
})
.expect("prune worktrees");
assert_eq!(report.pruned, Vec::new());
assert_eq!(report.kept, Vec::new());
assert_eq!(report.skipped.len(), 1);
assert_eq!(
report.skipped[0].candidate.worktree_git_root,
legacy.worktree_git_root
);
assert_eq!(
report.skipped[0].reason,
CodexWorktreePruneSkipReason::MissingStartingRef
);
assert!(legacy.worktree_git_root.exists());
}
fn create_repo_with_nested_cwd() -> (tempfile::TempDir, PathBuf, PathBuf) {
let temp = tempdir().expect("tempdir");
let repo = temp.path().join("repo");
let nested = repo.join("nested").join("path");
fs::create_dir_all(&nested).expect("nested dir");
init_test_repo(&repo);
fs::write(repo.join("README.md"), "hello\n").expect("write file");
fs::write(nested.join("marker.txt"), "nested\n").expect("write nested file");
commit(&repo, "initial");
(temp, repo, nested)
}
fn init_test_repo(repo_path: &Path) {
run_git_in(repo_path, &["init", "--initial-branch=main"]);
run_git_in(repo_path, &["config", "core.autocrlf", "false"]);
run_git_in(repo_path, &["config", "user.name", "Tester"]);
run_git_in(repo_path, &["config", "user.email", "test@example.com"]);
}
fn commit(repo_path: &Path, message: &str) {
run_git_in(repo_path, &["add", "."]);
run_git_in(
repo_path,
&[
"-c",
"user.name=Tester",
"-c",
"user.email=test@example.com",
"commit",
"-m",
message,
],
);
}
fn run_git_in(repo_path: &Path, args: &[&str]) {
let status = Command::new("git")
.current_dir(repo_path)
.args(args)
.status()
.expect("git command");
assert!(status.success(), "git command failed: {args:?}");
}
fn rewrite_metadata_timestamp(metadata_path: &Path, created_at: u64, last_used_at: u64) {
let mut metadata: CodexManagedWorktreeMetadata =
serde_json::from_slice(&fs::read(metadata_path).expect("read metadata"))
.expect("parse metadata");
metadata.created_at = created_at;
metadata.last_used_at = last_used_at;
fs::write(
metadata_path,
serde_json::to_vec_pretty(&metadata).expect("serialize metadata"),
)
.expect("write metadata");
}
fn sorted_worktree_paths(candidates: &[CodexWorktreePruneCandidate]) -> Vec<PathBuf> {
sorted_paths(
candidates
.iter()
.map(|candidate| candidate.worktree_git_root.clone())
.collect(),
)
}
fn sorted_paths(mut paths: Vec<PathBuf>) -> Vec<PathBuf> {
paths.sort();
paths
}
}

View File

@@ -98,6 +98,10 @@ pub struct Cli {
#[clap(long = "cd", short = 'C', value_name = "DIR")]
pub cwd: Option<PathBuf>,
/// Create a new local git worktree and start the session from that checkout.
#[arg(long = "worktree", default_value_t = false)]
pub worktree: bool,
/// Enable live web search. When enabled, the native Responses `web_search` tool is available to the model (no percall approval).
#[arg(long = "search", default_value_t = false)]
pub web_search: bool,
@@ -117,3 +121,18 @@ pub struct Cli {
#[clap(skip)]
pub config_overrides: CliConfigOverrides,
}
#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_eq;
#[test]
fn parse_worktree_flag() {
let cli = Cli::parse_from(["codex-tui", "--worktree", "--cd", "/tmp/repo", "hello"]);
assert!(cli.worktree);
assert_eq!(cli.cwd, Some(PathBuf::from("/tmp/repo")));
assert_eq!(cli.prompt.as_deref(), Some("hello"));
}
}

View File

@@ -40,6 +40,10 @@ use codex_core::path_utils;
use codex_core::read_session_meta_line;
use codex_core::state_db::get_state_db;
use codex_core::windows_sandbox::WindowsSandboxLevelExt;
use codex_git_utils::CodexManagedWorktree;
use codex_git_utils::GitToolingError;
use codex_git_utils::create_codex_managed_worktree;
use codex_git_utils::touch_codex_managed_worktree_metadata;
use codex_protocol::ThreadId;
use codex_protocol::config_types::AltScreenMode;
use codex_protocol::config_types::SandboxMode;
@@ -219,6 +223,14 @@ use crate::onboarding::onboarding_screen::run_onboarding_app;
use crate::tui::Tui;
pub use cli::Cli;
use codex_arg0::Arg0DispatchPaths;
#[derive(Debug, PartialEq, Eq)]
struct StartupCwd {
cwd: Option<PathBuf>,
config_cwd: AbsolutePathBuf,
}
type WorktreeCreator = fn(&Path, &Path) -> Result<CodexManagedWorktree, GitToolingError>;
pub use markdown_render::render_markdown_text;
pub use public_widgets::composer_input::ComposerAction;
pub use public_widgets::composer_input::ComposerInput;
@@ -584,6 +596,36 @@ fn latest_session_lookup_params(
}
}
fn resolve_startup_cwd(
requested_cwd: Option<PathBuf>,
codex_home: &Path,
worktree_creator: Option<WorktreeCreator>,
) -> std::io::Result<StartupCwd> {
let config_cwd = match requested_cwd.as_deref() {
Some(path) => AbsolutePathBuf::from_absolute_path(path.canonicalize()?)?,
None => AbsolutePathBuf::current_dir()?,
};
let Some(worktree_creator) = worktree_creator else {
if let Err(err) = touch_codex_managed_worktree_metadata(config_cwd.as_path()) {
warn!(?err, "failed to refresh Codex-managed worktree metadata");
}
return Ok(StartupCwd {
cwd: requested_cwd,
config_cwd,
});
};
let worktree = worktree_creator(config_cwd.as_path(), codex_home)
.map_err(|err| std::io::Error::other(format!("Error creating worktree: {err}")))?;
let config_cwd = AbsolutePathBuf::from_absolute_path(&worktree.worktree_workspace_root)?;
Ok(StartupCwd {
cwd: Some(worktree.worktree_workspace_root),
config_cwd,
})
}
pub async fn run_main(
mut cli: Cli,
arg0_paths: Arg0DispatchPaths,
@@ -602,6 +644,11 @@ pub async fn run_main(
auth_token: remote_auth_token.clone(),
})
.unwrap_or(AppServerTarget::Embedded);
if cli.worktree && matches!(app_server_target, AppServerTarget::Remote { .. }) {
return Err(std::io::Error::other(
"--worktree is only supported for local Codex sessions",
));
}
let (sandbox_mode, approval_policy) = if cli.full_auto {
(
Some(SandboxMode::WorkspaceWrite),
@@ -651,11 +698,11 @@ pub async fn run_main(
}
};
let cwd = cli.cwd.clone();
let config_cwd = match cwd.as_deref() {
Some(path) => AbsolutePathBuf::from_absolute_path(path.canonicalize()?)?,
None => AbsolutePathBuf::current_dir()?,
};
let StartupCwd { cwd, config_cwd } = resolve_startup_cwd(
cli.cwd.clone(),
codex_home.as_path(),
cli.worktree.then_some(create_codex_managed_worktree),
)?;
#[allow(clippy::print_stderr)]
let config_toml = match load_config_as_toml_with_cli_overrides(
@@ -1649,6 +1696,7 @@ mod tests {
use codex_core::config::ConfigOverrides;
use codex_core::config::ProjectConfig;
use codex_features::Feature;
use codex_git_utils::CodexManagedWorktreeMetadata;
use codex_protocol::protocol::AskForApproval;
use codex_protocol::protocol::RolloutItem;
use codex_protocol::protocol::RolloutLine;
@@ -1699,6 +1747,77 @@ mod tests {
);
}
#[test]
fn resolve_startup_cwd_uses_requested_cwd_without_worktree() {
let codex_home = TempDir::new().expect("create temp codex home");
let cwd = TempDir::new().expect("create temp cwd");
let startup_cwd = resolve_startup_cwd(
Some(cwd.path().to_path_buf()),
codex_home.path(),
/*worktree_creator*/ None,
)
.expect("resolve startup cwd");
assert_eq!(
startup_cwd,
StartupCwd {
cwd: Some(cwd.path().to_path_buf()),
config_cwd: AbsolutePathBuf::from_absolute_path(
cwd.path().canonicalize().expect("canonicalize cwd")
)
.expect("absolute cwd"),
}
);
}
#[test]
fn resolve_startup_cwd_uses_worktree_workspace_root_when_enabled() {
let codex_home = TempDir::new().expect("create temp codex home");
let cwd = TempDir::new().expect("create temp cwd");
let startup_cwd = resolve_startup_cwd(
Some(cwd.path().to_path_buf()),
codex_home.path(),
Some(|source_cwd, codex_home| {
let worktree_git_root = codex_home.join("worktrees/fake/project");
let worktree_git_dir = worktree_git_root.join(".git");
let marker_path = worktree_git_dir.join("codex-managed");
let metadata_path = worktree_git_dir.join("codex-worktree.json");
Ok(CodexManagedWorktree {
source_cwd: source_cwd.to_path_buf(),
source_repo_root: source_cwd.to_path_buf(),
worktree_git_root: worktree_git_root.clone(),
worktree_git_dir,
worktree_workspace_root: worktree_git_root.join("nested/path"),
starting_ref: "main".to_string(),
marker_path,
metadata_path,
metadata: CodexManagedWorktreeMetadata {
version: 1,
source_repo_root: source_cwd.to_path_buf(),
worktree_git_root,
starting_ref: "main".to_string(),
created_at: 1,
last_used_at: 1,
},
})
}),
)
.expect("resolve startup cwd");
let expected_worktree_workspace_root =
codex_home.path().join("worktrees/fake/project/nested/path");
assert_eq!(
startup_cwd,
StartupCwd {
cwd: Some(expected_worktree_workspace_root.clone()),
config_cwd: AbsolutePathBuf::from_absolute_path(&expected_worktree_workspace_root)
.expect("absolute worktree cwd"),
}
);
}
#[test]
fn normalize_remote_addr_accepts_secure_websocket_url() {
assert_eq!(