Windows Sandbox: treat <workspace_root>/.git as read-only in workspace-write mode (#7142)

this functionality is
[supported](https://github.com/openai/codex/blob/main/codex-rs/protocol/src/protocol.rs#L421-L422)
in the MacOs sandbox as well. Adding it to Windows for parity

This PR also changes `rust-ci.yaml` to work around a github `hashFiles`
issue. Others have done something
[similar](https://github.com/openai/superassistant/pull/32156) today
This commit is contained in:
iceweasel-oai
2025-11-24 12:41:21 -08:00
committed by GitHub
parent ec49b56874
commit 99e5340c54
5 changed files with 160 additions and 49 deletions

View File

@@ -153,6 +153,15 @@ jobs:
targets: ${{ matrix.target }}
components: clippy
- name: Compute lockfile hash
id: lockhash
working-directory: codex-rs
shell: bash
run: |
set -euo pipefail
echo "hash=$(sha256sum Cargo.lock | cut -d' ' -f1)" >> "$GITHUB_OUTPUT"
echo "toolchain_hash=$(sha256sum rust-toolchain.toml | cut -d' ' -f1)" >> "$GITHUB_OUTPUT"
# Explicit cache restore: split cargo home vs target, so we can
# avoid caching the large target dir on the gnu-dev job.
- name: Restore cargo home cache
@@ -164,7 +173,7 @@ jobs:
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
restore-keys: |
cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
@@ -201,9 +210,9 @@ jobs:
uses: actions/cache/restore@v4
with:
path: ${{ github.workspace }}/.sccache/
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.run_id }}
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
restore-keys: |
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
@@ -278,7 +287,7 @@ jobs:
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
- name: Save sccache cache (fallback)
if: always() && !cancelled() && env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true'
@@ -286,7 +295,7 @@ jobs:
uses: actions/cache/save@v4
with:
path: ${{ github.workspace }}/.sccache/
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.run_id }}
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
- name: sccache stats
if: always() && env.USE_SCCACHE == 'true'
@@ -364,6 +373,15 @@ jobs:
with:
targets: ${{ matrix.target }}
- name: Compute lockfile hash
id: lockhash
working-directory: codex-rs
shell: bash
run: |
set -euo pipefail
echo "hash=$(sha256sum Cargo.lock | cut -d' ' -f1)" >> "$GITHUB_OUTPUT"
echo "toolchain_hash=$(sha256sum rust-toolchain.toml | cut -d' ' -f1)" >> "$GITHUB_OUTPUT"
- name: Restore cargo home cache
id: cache_cargo_home_restore
uses: actions/cache/restore@v4
@@ -373,7 +391,7 @@ jobs:
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
restore-keys: |
cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
@@ -409,9 +427,9 @@ jobs:
uses: actions/cache/restore@v4
with:
path: ${{ github.workspace }}/.sccache/
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.run_id }}
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
restore-keys: |
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
@@ -436,7 +454,7 @@ jobs:
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
- name: Save sccache cache (fallback)
if: always() && !cancelled() && env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true'
@@ -444,7 +462,7 @@ jobs:
uses: actions/cache/save@v4
with:
path: ${{ github.workspace }}/.sccache/
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.run_id }}
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
- name: sccache stats
if: always() && env.USE_SCCACHE == 'true'

1
codex-rs/Cargo.lock generated
View File

@@ -1646,6 +1646,7 @@ dependencies = [
"rand 0.8.5",
"serde",
"serde_json",
"tempfile",
"windows-sys 0.52.0",
]

View File

@@ -46,3 +46,5 @@ features = [
"Win32_Security_Authentication_Identity",
]
version = "0.52"
[dev-dependencies]
tempfile = "3"

View File

@@ -1,21 +1,33 @@
use crate::policy::SandboxPolicy;
use dunce::canonicalize;
use std::collections::HashMap;
use std::collections::HashSet;
use std::path::Path;
use std::path::PathBuf;
#[derive(Debug, Default, PartialEq, Eq)]
pub struct AllowDenyPaths {
pub allow: HashSet<PathBuf>,
pub deny: HashSet<PathBuf>,
}
pub fn compute_allow_paths(
policy: &SandboxPolicy,
policy_cwd: &Path,
command_cwd: &Path,
env_map: &HashMap<String, String>,
) -> Vec<PathBuf> {
let mut allow: Vec<PathBuf> = Vec::new();
let mut seen = std::collections::HashSet::new();
) -> AllowDenyPaths {
let mut allow: HashSet<PathBuf> = HashSet::new();
let mut deny: HashSet<PathBuf> = HashSet::new();
let mut add_path = |p: PathBuf| {
if seen.insert(p.to_string_lossy().to_string()) && p.exists() {
allow.push(p);
let mut add_allow_path = |p: PathBuf| {
if p.exists() {
allow.insert(p);
}
};
let mut add_deny_path = |p: PathBuf| {
if p.exists() {
deny.insert(p);
}
};
let include_tmp_env_vars = matches!(
@@ -27,16 +39,40 @@ pub fn compute_allow_paths(
);
if matches!(policy, SandboxPolicy::WorkspaceWrite { .. }) {
add_path(command_cwd.to_path_buf());
if let SandboxPolicy::WorkspaceWrite { writable_roots, .. } = policy {
for root in writable_roots {
let add_writable_root =
|root: PathBuf,
policy_cwd: &Path,
add_allow: &mut dyn FnMut(PathBuf),
add_deny: &mut dyn FnMut(PathBuf)| {
let candidate = if root.is_absolute() {
root.clone()
root
} else {
policy_cwd.join(root)
};
let canonical = canonicalize(&candidate).unwrap_or(candidate);
add_path(canonical);
add_allow(canonical.clone());
let git_dir = canonical.join(".git");
if git_dir.is_dir() {
add_deny(git_dir);
}
};
add_writable_root(
command_cwd.to_path_buf(),
policy_cwd,
&mut add_allow_path,
&mut add_deny_path,
);
if let SandboxPolicy::WorkspaceWrite { writable_roots, .. } = policy {
for root in writable_roots {
add_writable_root(
root.clone(),
policy_cwd,
&mut add_allow_path,
&mut add_deny_path,
);
}
}
}
@@ -44,14 +80,14 @@ pub fn compute_allow_paths(
for key in ["TEMP", "TMP"] {
if let Some(v) = env_map.get(key) {
let abs = PathBuf::from(v);
add_path(abs);
add_allow_path(abs);
} else if let Ok(v) = std::env::var(key) {
let abs = PathBuf::from(v);
add_path(abs);
add_allow_path(abs);
}
}
}
allow
AllowDenyPaths { allow, deny }
}
#[cfg(test)]
@@ -59,13 +95,16 @@ mod tests {
use super::compute_allow_paths;
use codex_protocol::protocol::SandboxPolicy;
use std::collections::HashMap;
use std::collections::HashSet;
use std::fs;
use std::path::PathBuf;
use tempfile::TempDir;
#[test]
fn includes_additional_writable_roots() {
let command_cwd = PathBuf::from(r"C:\Workspace");
let extra_root = PathBuf::from(r"C:\AdditionalWritableRoot");
let tmp = TempDir::new().expect("tempdir");
let command_cwd = tmp.path().join("workspace");
let extra_root = tmp.path().join("extra");
let _ = fs::create_dir_all(&command_cwd);
let _ = fs::create_dir_all(&extra_root);
@@ -76,22 +115,22 @@ mod tests {
exclude_slash_tmp: false,
};
let allow = compute_allow_paths(&policy, &command_cwd, &command_cwd, &HashMap::new());
let paths = compute_allow_paths(&policy, &command_cwd, &command_cwd, &HashMap::new());
assert!(
allow.iter().any(|p| p == &command_cwd),
"command cwd should be allowed"
);
assert!(
allow.iter().any(|p| p == &extra_root),
"additional writable root should be allowed"
);
assert!(paths
.allow
.contains(&dunce::canonicalize(&command_cwd).unwrap()));
assert!(paths
.allow
.contains(&dunce::canonicalize(&extra_root).unwrap()));
assert!(paths.deny.is_empty(), "no deny paths expected");
}
#[test]
fn excludes_tmp_env_vars_when_requested() {
let command_cwd = PathBuf::from(r"C:\Workspace");
let temp_dir = PathBuf::from(r"C:\TempDir");
let tmp = TempDir::new().expect("tempdir");
let command_cwd = tmp.path().join("workspace");
let temp_dir = tmp.path().join("temp");
let _ = fs::create_dir_all(&command_cwd);
let _ = fs::create_dir_all(&temp_dir);
@@ -104,15 +143,58 @@ mod tests {
let mut env_map = HashMap::new();
env_map.insert("TEMP".into(), temp_dir.to_string_lossy().to_string());
let allow = compute_allow_paths(&policy, &command_cwd, &command_cwd, &env_map);
let paths = compute_allow_paths(&policy, &command_cwd, &command_cwd, &env_map);
assert!(
allow.iter().any(|p| p == &command_cwd),
"command cwd should be allowed"
);
assert!(
!allow.iter().any(|p| p == &temp_dir),
"TEMP should be excluded when exclude_tmpdir_env_var is true"
);
assert!(paths
.allow
.contains(&dunce::canonicalize(&command_cwd).unwrap()));
assert!(!paths
.allow
.contains(&dunce::canonicalize(&temp_dir).unwrap()));
assert!(paths.deny.is_empty(), "no deny paths expected");
}
#[test]
fn denies_git_dir_inside_writable_root() {
let tmp = TempDir::new().expect("tempdir");
let command_cwd = tmp.path().join("workspace");
let git_dir = command_cwd.join(".git");
let _ = fs::create_dir_all(&git_dir);
let policy = SandboxPolicy::WorkspaceWrite {
writable_roots: vec![],
network_access: false,
exclude_tmpdir_env_var: true,
exclude_slash_tmp: false,
};
let paths = compute_allow_paths(&policy, &command_cwd, &command_cwd, &HashMap::new());
let expected_allow: HashSet<PathBuf> = [dunce::canonicalize(&command_cwd).unwrap()]
.into_iter()
.collect();
let expected_deny: HashSet<PathBuf> = [dunce::canonicalize(&git_dir).unwrap()]
.into_iter()
.collect();
assert_eq!(expected_allow, paths.allow);
assert_eq!(expected_deny, paths.deny);
}
#[test]
fn skips_git_dir_when_missing() {
let tmp = TempDir::new().expect("tempdir");
let command_cwd = tmp.path().join("workspace");
let _ = fs::create_dir_all(&command_cwd);
let policy = SandboxPolicy::WorkspaceWrite {
writable_roots: vec![],
network_access: false,
exclude_tmpdir_env_var: true,
exclude_slash_tmp: false,
};
let paths = compute_allow_paths(&policy, &command_cwd, &command_cwd, &HashMap::new());
assert_eq!(paths.allow.len(), 1);
assert!(paths.deny.is_empty(), "no deny when .git is absent");
}
}

View File

@@ -23,9 +23,11 @@ pub use stub::CaptureResult;
#[cfg(target_os = "windows")]
mod windows_impl {
use super::acl::add_allow_ace;
use super::acl::add_deny_write_ace;
use super::acl::allow_null_device;
use super::acl::revoke_ace;
use super::allow::compute_allow_paths;
use super::allow::AllowDenyPaths;
use super::cap::cap_sid_file;
use super::cap::load_or_create_cap_sids;
use super::env::apply_no_network_to_env;
@@ -195,8 +197,6 @@ mod windows_impl {
ensure_codex_home_exists(codex_home)?;
let current_dir = cwd.to_path_buf();
// for now, don't fail if we detect world-writable directories
// audit::audit_everyone_writable(&current_dir, &env_map)?;
let logs_base_dir = Some(codex_home);
log_start(&command, logs_base_dir);
let cap_sid_path = cap_sid_file(codex_home);
@@ -238,7 +238,8 @@ mod windows_impl {
}
let persist_aces = is_workspace_write;
let allow = compute_allow_paths(&policy, sandbox_policy_cwd, &current_dir, &env_map);
let AllowDenyPaths { allow, deny } =
compute_allow_paths(&policy, sandbox_policy_cwd, &current_dir, &env_map);
let mut guards: Vec<(PathBuf, *mut c_void)> = Vec::new();
unsafe {
for p in &allow {
@@ -254,6 +255,13 @@ mod windows_impl {
}
}
}
for p in &deny {
if let Ok(added) = add_deny_write_ace(p, psid_to_use) {
if added && !persist_aces {
guards.push((p.clone(), psid_to_use));
}
}
}
allow_null_device(psid_to_use);
}