mirror of
https://github.com/openai/codex.git
synced 2026-02-02 06:57:03 +00:00
Compare commits
2 Commits
fix-cmd-ex
...
system-dat
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
34809c9880 | ||
|
|
325e35388c |
4
.github/workflows/issue-deduplicator.yml
vendored
4
.github/workflows/issue-deduplicator.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
outputs:
|
||||
codex_output: ${{ steps.codex.outputs.final-message }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Prepare Codex inputs
|
||||
env:
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
issues: write
|
||||
steps:
|
||||
- name: Comment on issue
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@v7
|
||||
env:
|
||||
CODEX_OUTPUT: ${{ needs.gather-duplicates.outputs.codex_output }}
|
||||
with:
|
||||
|
||||
2
.github/workflows/issue-labeler.yml
vendored
2
.github/workflows/issue-labeler.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
outputs:
|
||||
codex_output: ${{ steps.codex.outputs.final-message }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- id: codex
|
||||
uses: openai/codex-action@main
|
||||
|
||||
@@ -33,7 +33,7 @@ Then simply run `codex` to get started:
|
||||
codex
|
||||
```
|
||||
|
||||
If you're running into upgrade issues with Homebrew, see the [FAQ entry on brew upgrade codex](./docs/faq.md#brew-upgrade-codex-isnt-upgrading-me).
|
||||
If you're running into upgrade issues with Homebrew, see the [FAQ entry on brew upgrade codex](./docs/faq.md#brew-update-codex-isnt-upgrading-me).
|
||||
|
||||
<details>
|
||||
<summary>You can also go to the <a href="https://github.com/openai/codex/releases/latest">latest GitHub Release</a> and download the appropriate binary for your platform.</summary>
|
||||
@@ -79,7 +79,7 @@ Codex CLI supports a rich set of configuration options, with preferences stored
|
||||
- [Example prompts](./docs/getting-started.md#example-prompts)
|
||||
- [Custom prompts](./docs/prompts.md)
|
||||
- [Memory with AGENTS.md](./docs/getting-started.md#memory-with-agentsmd)
|
||||
- [**Configuration**](./docs/config.md)
|
||||
- [Configuration](./docs/config.md)
|
||||
- [**Sandbox & approvals**](./docs/sandbox.md)
|
||||
- [**Authentication**](./docs/authentication.md)
|
||||
- [Auth methods](./docs/authentication.md#forcing-a-specific-auth-method-advanced)
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
[target.'cfg(all(windows, target_env = "msvc"))']
|
||||
rustflags = ["-C", "link-arg=/STACK:8388608"]
|
||||
|
||||
[target.'cfg(all(windows, target_env = "gnu"))']
|
||||
rustflags = ["-C", "link-arg=-Wl,--stack,8388608"]
|
||||
95
codex-rs/Cargo.lock
generated
95
codex-rs/Cargo.lock
generated
@@ -172,9 +172,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.100"
|
||||
version = "1.0.99"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61"
|
||||
checksum = "b0674a1ddeecb70197781e945de4b3b8ffb61fa939a5597bcf48503737663100"
|
||||
|
||||
[[package]]
|
||||
name = "app_test_support"
|
||||
@@ -891,7 +891,7 @@ dependencies = [
|
||||
"pretty_assertions",
|
||||
"similar",
|
||||
"tempfile",
|
||||
"thiserror 2.0.17",
|
||||
"thiserror 2.0.16",
|
||||
"tree-sitter",
|
||||
"tree-sitter-bash",
|
||||
]
|
||||
@@ -983,7 +983,6 @@ dependencies = [
|
||||
"codex-rmcp-client",
|
||||
"codex-stdio-to-uds",
|
||||
"codex-tui",
|
||||
"codex-windows-sandbox",
|
||||
"ctor 0.5.0",
|
||||
"owo-colors",
|
||||
"predicates",
|
||||
@@ -1032,7 +1031,7 @@ dependencies = [
|
||||
"diffy",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"thiserror 2.0.17",
|
||||
"thiserror 2.0.16",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1073,7 +1072,6 @@ dependencies = [
|
||||
"codex-utils-readiness",
|
||||
"codex-utils-string",
|
||||
"codex-utils-tokenizer",
|
||||
"codex-windows-sandbox",
|
||||
"core-foundation 0.9.4",
|
||||
"core_test_support",
|
||||
"dirs",
|
||||
@@ -1084,7 +1082,7 @@ dependencies = [
|
||||
"futures",
|
||||
"http",
|
||||
"image",
|
||||
"indexmap 2.12.0",
|
||||
"indexmap 2.10.0",
|
||||
"keyring",
|
||||
"landlock",
|
||||
"libc",
|
||||
@@ -1108,7 +1106,7 @@ dependencies = [
|
||||
"strum_macros 0.27.2",
|
||||
"tempfile",
|
||||
"test-log",
|
||||
"thiserror 2.0.17",
|
||||
"thiserror 2.0.16",
|
||||
"time",
|
||||
"tokio",
|
||||
"tokio-test",
|
||||
@@ -1214,7 +1212,7 @@ dependencies = [
|
||||
"schemars 0.8.22",
|
||||
"serde",
|
||||
"tempfile",
|
||||
"thiserror 2.0.17",
|
||||
"thiserror 2.0.16",
|
||||
"ts-rs",
|
||||
"walkdir",
|
||||
]
|
||||
@@ -1506,7 +1504,7 @@ dependencies = [
|
||||
"codex-utils-cache",
|
||||
"image",
|
||||
"tempfile",
|
||||
"thiserror 2.0.17",
|
||||
"thiserror 2.0.16",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
@@ -1534,7 +1532,7 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"assert_matches",
|
||||
"async-trait",
|
||||
"thiserror 2.0.17",
|
||||
"thiserror 2.0.16",
|
||||
"time",
|
||||
"tokio",
|
||||
]
|
||||
@@ -1549,22 +1547,10 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"pretty_assertions",
|
||||
"thiserror 2.0.17",
|
||||
"thiserror 2.0.16",
|
||||
"tiktoken-rs",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-windows-sandbox"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"dirs-next",
|
||||
"rand 0.8.5",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "color-eyre"
|
||||
version = "0.6.5"
|
||||
@@ -2722,7 +2708,7 @@ dependencies = [
|
||||
"futures-core",
|
||||
"futures-sink",
|
||||
"http",
|
||||
"indexmap 2.12.0",
|
||||
"indexmap 2.10.0",
|
||||
"slab",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
@@ -2766,12 +2752,6 @@ dependencies = [
|
||||
"foldhash",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.16.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d"
|
||||
|
||||
[[package]]
|
||||
name = "heck"
|
||||
version = "0.5.0"
|
||||
@@ -3207,14 +3187,13 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "2.12.0"
|
||||
version = "2.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6717a8d2a5a929a1a2eb43a12812498ed141a0bcfb7e8f7844fbdbe4303bba9f"
|
||||
checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
"hashbrown 0.16.0",
|
||||
"hashbrown 0.15.4",
|
||||
"serde",
|
||||
"serde_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3502,7 +3481,7 @@ checksum = "b3d2ef408b88e913bfc6594f5e693d57676f6463ded7d8bf994175364320c706"
|
||||
dependencies = [
|
||||
"enumflags2",
|
||||
"libc",
|
||||
"thiserror 2.0.17",
|
||||
"thiserror 2.0.16",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4182,7 +4161,7 @@ dependencies = [
|
||||
"futures-sink",
|
||||
"js-sys",
|
||||
"pin-project-lite",
|
||||
"thiserror 2.0.17",
|
||||
"thiserror 2.0.16",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
@@ -4225,7 +4204,7 @@ dependencies = [
|
||||
"prost",
|
||||
"reqwest",
|
||||
"serde_json",
|
||||
"thiserror 2.0.17",
|
||||
"thiserror 2.0.16",
|
||||
"tokio",
|
||||
"tonic",
|
||||
"tracing",
|
||||
@@ -4265,7 +4244,7 @@ dependencies = [
|
||||
"percent-encoding",
|
||||
"rand 0.9.2",
|
||||
"serde_json",
|
||||
"thiserror 2.0.17",
|
||||
"thiserror 2.0.16",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
]
|
||||
@@ -4376,7 +4355,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db"
|
||||
dependencies = [
|
||||
"fixedbitset",
|
||||
"indexmap 2.12.0",
|
||||
"indexmap 2.10.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4444,7 +4423,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3af6b589e163c5a788fab00ce0c0366f6efbb9959c2f9874b224936af7fce7e1"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"indexmap 2.12.0",
|
||||
"indexmap 2.10.0",
|
||||
"quick-xml",
|
||||
"serde",
|
||||
"time",
|
||||
@@ -4610,7 +4589,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a3ef4f2f0422f23a82ec9f628ea2acd12871c81a9362b02c43c1aa86acfc3ba1"
|
||||
dependencies = [
|
||||
"futures",
|
||||
"indexmap 2.12.0",
|
||||
"indexmap 2.10.0",
|
||||
"nix 0.30.1",
|
||||
"tokio",
|
||||
"tracing",
|
||||
@@ -4697,7 +4676,7 @@ dependencies = [
|
||||
"rustc-hash 2.1.1",
|
||||
"rustls",
|
||||
"socket2 0.6.0",
|
||||
"thiserror 2.0.17",
|
||||
"thiserror 2.0.16",
|
||||
"tokio",
|
||||
"tracing",
|
||||
"web-time",
|
||||
@@ -4718,7 +4697,7 @@ dependencies = [
|
||||
"rustls",
|
||||
"rustls-pki-types",
|
||||
"slab",
|
||||
"thiserror 2.0.17",
|
||||
"thiserror 2.0.16",
|
||||
"tinyvec",
|
||||
"tracing",
|
||||
"web-time",
|
||||
@@ -4879,7 +4858,7 @@ checksum = "dd6f9d3d47bdd2ad6945c5015a226ec6155d0bcdfd8f7cd29f86b71f8de99d2b"
|
||||
dependencies = [
|
||||
"getrandom 0.2.16",
|
||||
"libredox",
|
||||
"thiserror 2.0.17",
|
||||
"thiserror 2.0.16",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -5030,7 +5009,7 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sse-stream",
|
||||
"thiserror 2.0.17",
|
||||
"thiserror 2.0.16",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
"tokio-util",
|
||||
@@ -5555,7 +5534,7 @@ version = "1.0.145"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c"
|
||||
dependencies = [
|
||||
"indexmap 2.12.0",
|
||||
"indexmap 2.10.0",
|
||||
"itoa",
|
||||
"memchr",
|
||||
"ryu",
|
||||
@@ -5616,7 +5595,7 @@ dependencies = [
|
||||
"chrono",
|
||||
"hex",
|
||||
"indexmap 1.9.3",
|
||||
"indexmap 2.12.0",
|
||||
"indexmap 2.10.0",
|
||||
"schemars 0.9.0",
|
||||
"schemars 1.0.4",
|
||||
"serde",
|
||||
@@ -6193,11 +6172,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "2.0.17"
|
||||
version = "2.0.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8"
|
||||
checksum = "3467d614147380f2e4e374161426ff399c91084acd2363eaf549172b3d5e60c0"
|
||||
dependencies = [
|
||||
"thiserror-impl 2.0.17",
|
||||
"thiserror-impl 2.0.16",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6213,9 +6192,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "thiserror-impl"
|
||||
version = "2.0.17"
|
||||
version = "2.0.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913"
|
||||
checksum = "6c5e1be1c48b9172ee610da68fd9cd2770e7a4056cb3fc98710ee6906f0c7960"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -6434,7 +6413,7 @@ version = "0.9.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "75129e1dc5000bfbaa9fee9d1b21f974f9fbad9daec557a521ee6e080825f6e8"
|
||||
dependencies = [
|
||||
"indexmap 2.12.0",
|
||||
"indexmap 2.10.0",
|
||||
"serde",
|
||||
"serde_spanned",
|
||||
"toml_datetime",
|
||||
@@ -6458,7 +6437,7 @@ version = "0.23.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7211ff1b8f0d3adae1663b7da9ffe396eabe1ca25f0b0bee42b0da29a9ddce93"
|
||||
dependencies = [
|
||||
"indexmap 2.12.0",
|
||||
"indexmap 2.10.0",
|
||||
"toml_datetime",
|
||||
"toml_parser",
|
||||
"toml_writer",
|
||||
@@ -6517,7 +6496,7 @@ checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
"indexmap 2.12.0",
|
||||
"indexmap 2.10.0",
|
||||
"pin-project-lite",
|
||||
"slab",
|
||||
"sync_wrapper",
|
||||
@@ -6695,7 +6674,7 @@ checksum = "adc5f880ad8d8f94e88cb81c3557024cf1a8b75e3b504c50481ed4f5a6006ff3"
|
||||
dependencies = [
|
||||
"regex",
|
||||
"streaming-iterator",
|
||||
"thiserror 2.0.17",
|
||||
"thiserror 2.0.16",
|
||||
"tree-sitter",
|
||||
]
|
||||
|
||||
@@ -6718,7 +6697,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6ef1b7a6d914a34127ed8e1fa927eb7088903787bcded4fa3eef8f85ee1568be"
|
||||
dependencies = [
|
||||
"serde_json",
|
||||
"thiserror 2.0.17",
|
||||
"thiserror 2.0.16",
|
||||
"ts-rs-macros",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
@@ -87,7 +87,6 @@ codex-utils-pty = { path = "utils/pty" }
|
||||
codex-utils-readiness = { path = "utils/readiness" }
|
||||
codex-utils-string = { path = "utils/string" }
|
||||
codex-utils-tokenizer = { path = "utils/tokenizer" }
|
||||
codex-windows-sandbox = { path = "windows-sandbox" }
|
||||
core_test_support = { path = "core/tests/common" }
|
||||
mcp-types = { path = "mcp-types" }
|
||||
mcp_test_support = { path = "mcp-server/tests/common" }
|
||||
@@ -128,7 +127,7 @@ icu_provider = { version = "2.1", features = ["sync"] }
|
||||
icu_locale_core = "2.1"
|
||||
ignore = "0.4.23"
|
||||
image = { version = "^0.25.8", default-features = false }
|
||||
indexmap = "2.12.0"
|
||||
indexmap = "2.6.0"
|
||||
insta = "1.43.2"
|
||||
itertools = "0.14.0"
|
||||
keyring = "3.6"
|
||||
@@ -182,7 +181,7 @@ sys-locale = "0.3.2"
|
||||
tempfile = "3.23.0"
|
||||
test-log = "0.2.18"
|
||||
textwrap = "0.16.2"
|
||||
thiserror = "2.0.17"
|
||||
thiserror = "2.0.16"
|
||||
time = "0.3"
|
||||
tiny_http = "0.12"
|
||||
tokio = "1"
|
||||
@@ -211,7 +210,6 @@ walkdir = "2.5.0"
|
||||
webbrowser = "1.0"
|
||||
which = "6"
|
||||
wildmatch = "2.5.0"
|
||||
|
||||
wiremock = "0.6"
|
||||
zeroize = "1.8.1"
|
||||
|
||||
|
||||
@@ -63,9 +63,6 @@ codex sandbox macos [--full-auto] [COMMAND]...
|
||||
# Linux
|
||||
codex sandbox linux [--full-auto] [COMMAND]...
|
||||
|
||||
# Windows
|
||||
codex sandbox windows [--full-auto] [COMMAND]...
|
||||
|
||||
# Legacy aliases
|
||||
codex debug seatbelt [--full-auto] [COMMAND]...
|
||||
codex debug landlock [--full-auto] [COMMAND]...
|
||||
|
||||
@@ -6,6 +6,4 @@ pub use export::generate_json;
|
||||
pub use export::generate_ts;
|
||||
pub use export::generate_types;
|
||||
pub use jsonrpc_lite::*;
|
||||
pub use protocol::common::*;
|
||||
pub use protocol::v1::*;
|
||||
pub use protocol::v2::*;
|
||||
pub use protocol::*;
|
||||
|
||||
1289
codex-rs/app-server-protocol/src/protocol.rs
Normal file
1289
codex-rs/app-server-protocol/src/protocol.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,685 +0,0 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::JSONRPCNotification;
|
||||
use crate::JSONRPCRequest;
|
||||
use crate::RequestId;
|
||||
use crate::protocol::v1;
|
||||
use crate::protocol::v2;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::FileChange;
|
||||
use codex_protocol::protocol::RateLimitSnapshot;
|
||||
use codex_protocol::protocol::ReviewDecision;
|
||||
use codex_protocol::protocol::SandboxCommandAssessment;
|
||||
use paste::paste;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use strum_macros::Display;
|
||||
use ts_rs::TS;
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema, TS)]
|
||||
#[ts(type = "string")]
|
||||
pub struct GitSha(pub String);
|
||||
|
||||
impl GitSha {
|
||||
pub fn new(sha: &str) -> Self {
|
||||
Self(sha.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Display, JsonSchema, TS)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum AuthMode {
|
||||
ApiKey,
|
||||
ChatGPT,
|
||||
}
|
||||
|
||||
/// Generates an `enum ClientRequest` where each variant is a request that the
|
||||
/// client can send to the server. Each variant has associated `params` and
|
||||
/// `response` types. Also generates a `export_client_responses()` function to
|
||||
/// export all response types to TypeScript.
|
||||
macro_rules! client_request_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident {
|
||||
params: $(#[$params_meta:meta])* $params:ty,
|
||||
response: $response:ty,
|
||||
}
|
||||
),* $(,)?
|
||||
) => {
|
||||
/// Request from the client to the server.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "method", rename_all = "camelCase")]
|
||||
pub enum ClientRequest {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$variant {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
$(#[$params_meta])*
|
||||
params: $params,
|
||||
},
|
||||
)*
|
||||
}
|
||||
|
||||
pub fn export_client_responses(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::std::result::Result<(), ::ts_rs::ExportError> {
|
||||
$(
|
||||
<$response as ::ts_rs::TS>::export_all_to(out_dir)?;
|
||||
)*
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn export_client_response_schemas(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<()> {
|
||||
$(
|
||||
crate::export::write_json_schema::<$response>(out_dir, stringify!($response))?;
|
||||
)*
|
||||
Ok(())
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
client_request_definitions! {
|
||||
/// NEW APIs
|
||||
#[serde(rename = "model/list")]
|
||||
#[ts(rename = "model/list")]
|
||||
ListModels {
|
||||
params: v2::ListModelsParams,
|
||||
response: v2::ListModelsResponse,
|
||||
},
|
||||
|
||||
#[serde(rename = "account/login")]
|
||||
#[ts(rename = "account/login")]
|
||||
LoginAccount {
|
||||
params: v2::LoginAccountParams,
|
||||
response: v2::LoginAccountResponse,
|
||||
},
|
||||
|
||||
#[serde(rename = "account/logout")]
|
||||
#[ts(rename = "account/logout")]
|
||||
LogoutAccount {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v2::LogoutAccountResponse,
|
||||
},
|
||||
|
||||
#[serde(rename = "account/rateLimits/read")]
|
||||
#[ts(rename = "account/rateLimits/read")]
|
||||
GetAccountRateLimits {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v2::GetAccountRateLimitsResponse,
|
||||
},
|
||||
|
||||
#[serde(rename = "feedback/upload")]
|
||||
#[ts(rename = "feedback/upload")]
|
||||
UploadFeedback {
|
||||
params: v2::UploadFeedbackParams,
|
||||
response: v2::UploadFeedbackResponse,
|
||||
},
|
||||
|
||||
#[serde(rename = "account/read")]
|
||||
#[ts(rename = "account/read")]
|
||||
GetAccount {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v2::GetAccountResponse,
|
||||
},
|
||||
|
||||
/// DEPRECATED APIs below
|
||||
Initialize {
|
||||
params: v1::InitializeParams,
|
||||
response: v1::InitializeResponse,
|
||||
},
|
||||
NewConversation {
|
||||
params: v1::NewConversationParams,
|
||||
response: v1::NewConversationResponse,
|
||||
},
|
||||
GetConversationSummary {
|
||||
params: v1::GetConversationSummaryParams,
|
||||
response: v1::GetConversationSummaryResponse,
|
||||
},
|
||||
/// List recorded Codex conversations (rollouts) with optional pagination and search.
|
||||
ListConversations {
|
||||
params: v1::ListConversationsParams,
|
||||
response: v1::ListConversationsResponse,
|
||||
},
|
||||
/// Resume a recorded Codex conversation from a rollout file.
|
||||
ResumeConversation {
|
||||
params: v1::ResumeConversationParams,
|
||||
response: v1::ResumeConversationResponse,
|
||||
},
|
||||
ArchiveConversation {
|
||||
params: v1::ArchiveConversationParams,
|
||||
response: v1::ArchiveConversationResponse,
|
||||
},
|
||||
SendUserMessage {
|
||||
params: v1::SendUserMessageParams,
|
||||
response: v1::SendUserMessageResponse,
|
||||
},
|
||||
SendUserTurn {
|
||||
params: v1::SendUserTurnParams,
|
||||
response: v1::SendUserTurnResponse,
|
||||
},
|
||||
InterruptConversation {
|
||||
params: v1::InterruptConversationParams,
|
||||
response: v1::InterruptConversationResponse,
|
||||
},
|
||||
AddConversationListener {
|
||||
params: v1::AddConversationListenerParams,
|
||||
response: v1::AddConversationSubscriptionResponse,
|
||||
},
|
||||
RemoveConversationListener {
|
||||
params: v1::RemoveConversationListenerParams,
|
||||
response: v1::RemoveConversationSubscriptionResponse,
|
||||
},
|
||||
GitDiffToRemote {
|
||||
params: v1::GitDiffToRemoteParams,
|
||||
response: v1::GitDiffToRemoteResponse,
|
||||
},
|
||||
LoginApiKey {
|
||||
params: v1::LoginApiKeyParams,
|
||||
response: v1::LoginApiKeyResponse,
|
||||
},
|
||||
LoginChatGpt {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::LoginChatGptResponse,
|
||||
},
|
||||
CancelLoginChatGpt {
|
||||
params: v1::CancelLoginChatGptParams,
|
||||
response: v1::CancelLoginChatGptResponse,
|
||||
},
|
||||
LogoutChatGpt {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::LogoutChatGptResponse,
|
||||
},
|
||||
GetAuthStatus {
|
||||
params: v1::GetAuthStatusParams,
|
||||
response: v1::GetAuthStatusResponse,
|
||||
},
|
||||
GetUserSavedConfig {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::GetUserSavedConfigResponse,
|
||||
},
|
||||
SetDefaultModel {
|
||||
params: v1::SetDefaultModelParams,
|
||||
response: v1::SetDefaultModelResponse,
|
||||
},
|
||||
GetUserAgent {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::GetUserAgentResponse,
|
||||
},
|
||||
UserInfo {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::UserInfoResponse,
|
||||
},
|
||||
FuzzyFileSearch {
|
||||
params: FuzzyFileSearchParams,
|
||||
response: FuzzyFileSearchResponse,
|
||||
},
|
||||
/// Execute a command (argv vector) under the server's sandbox.
|
||||
ExecOneOffCommand {
|
||||
params: v1::ExecOneOffCommandParams,
|
||||
response: v1::ExecOneOffCommandResponse,
|
||||
},
|
||||
}
|
||||
|
||||
/// Generates an `enum ServerRequest` where each variant is a request that the
|
||||
/// server can send to the client along with the corresponding params and
|
||||
/// response types. It also generates helper types used by the app/server
|
||||
/// infrastructure (payload enum, request constructor, and export helpers).
|
||||
macro_rules! server_request_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident
|
||||
),* $(,)?
|
||||
) => {
|
||||
paste! {
|
||||
/// Request initiated from the server and sent to the client.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "method", rename_all = "camelCase")]
|
||||
pub enum ServerRequest {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$variant {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
params: [<$variant Params>],
|
||||
},
|
||||
)*
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, JsonSchema)]
|
||||
pub enum ServerRequestPayload {
|
||||
$( $variant([<$variant Params>]), )*
|
||||
}
|
||||
|
||||
impl ServerRequestPayload {
|
||||
pub fn request_with_id(self, request_id: RequestId) -> ServerRequest {
|
||||
match self {
|
||||
$(Self::$variant(params) => ServerRequest::$variant { request_id, params },)*
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn export_server_responses(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::std::result::Result<(), ::ts_rs::ExportError> {
|
||||
paste! {
|
||||
$(<[<$variant Response>] as ::ts_rs::TS>::export_all_to(out_dir)?;)*
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn export_server_response_schemas(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<()> {
|
||||
paste! {
|
||||
$(crate::export::write_json_schema::<[<$variant Response>]>(out_dir, stringify!([<$variant Response>]))?;)*
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl TryFrom<JSONRPCRequest> for ServerRequest {
|
||||
type Error = serde_json::Error;
|
||||
|
||||
fn try_from(value: JSONRPCRequest) -> Result<Self, Self::Error> {
|
||||
serde_json::from_value(serde_json::to_value(value)?)
|
||||
}
|
||||
}
|
||||
|
||||
server_request_definitions! {
|
||||
/// Request to approve a patch.
|
||||
ApplyPatchApproval,
|
||||
/// Request to exec a command.
|
||||
ExecCommandApproval,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ApplyPatchApprovalParams {
|
||||
pub conversation_id: ConversationId,
|
||||
/// Use to correlate this with [codex_core::protocol::PatchApplyBeginEvent]
|
||||
/// and [codex_core::protocol::PatchApplyEndEvent].
|
||||
pub call_id: String,
|
||||
pub file_changes: HashMap<PathBuf, FileChange>,
|
||||
/// Optional explanatory reason (e.g. request for extra write access).
|
||||
pub reason: Option<String>,
|
||||
/// When set, the agent is asking the user to allow writes under this root
|
||||
/// for the remainder of the session (unclear if this is honored today).
|
||||
pub grant_root: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecCommandApprovalParams {
|
||||
pub conversation_id: ConversationId,
|
||||
/// Use to correlate this with [codex_core::protocol::ExecCommandBeginEvent]
|
||||
/// and [codex_core::protocol::ExecCommandEndEvent].
|
||||
pub call_id: String,
|
||||
pub command: Vec<String>,
|
||||
pub cwd: PathBuf,
|
||||
pub reason: Option<String>,
|
||||
pub risk: Option<SandboxCommandAssessment>,
|
||||
pub parsed_cmd: Vec<ParsedCommand>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct ExecCommandApprovalResponse {
|
||||
pub decision: ReviewDecision,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct ApplyPatchApprovalResponse {
|
||||
pub decision: ReviewDecision,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
pub struct FuzzyFileSearchParams {
|
||||
pub query: String,
|
||||
pub roots: Vec<String>,
|
||||
// if provided, will cancel any previous request that used the same value
|
||||
pub cancellation_token: Option<String>,
|
||||
}
|
||||
|
||||
/// Superset of [`codex_file_search::FileMatch`]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct FuzzyFileSearchResult {
|
||||
pub root: String,
|
||||
pub path: String,
|
||||
pub file_name: String,
|
||||
pub score: u32,
|
||||
pub indices: Option<Vec<u32>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct FuzzyFileSearchResponse {
|
||||
pub files: Vec<FuzzyFileSearchResult>,
|
||||
}
|
||||
|
||||
/// Notification sent from the server to the client.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS, Display)]
|
||||
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
|
||||
#[strum(serialize_all = "camelCase")]
|
||||
pub enum ServerNotification {
|
||||
/// NEW NOTIFICATIONS
|
||||
#[serde(rename = "account/rateLimits/updated")]
|
||||
#[ts(rename = "account/rateLimits/updated")]
|
||||
#[strum(serialize = "account/rateLimits/updated")]
|
||||
AccountRateLimitsUpdated(RateLimitSnapshot),
|
||||
|
||||
/// DEPRECATED NOTIFICATIONS below
|
||||
/// Authentication status changed
|
||||
AuthStatusChange(v1::AuthStatusChangeNotification),
|
||||
|
||||
/// ChatGPT login flow completed
|
||||
LoginChatGptComplete(v1::LoginChatGptCompleteNotification),
|
||||
|
||||
/// The special session configured event for a new or resumed conversation.
|
||||
SessionConfigured(v1::SessionConfiguredNotification),
|
||||
}
|
||||
|
||||
impl ServerNotification {
|
||||
pub fn to_params(self) -> Result<serde_json::Value, serde_json::Error> {
|
||||
match self {
|
||||
ServerNotification::AccountRateLimitsUpdated(params) => serde_json::to_value(params),
|
||||
ServerNotification::AuthStatusChange(params) => serde_json::to_value(params),
|
||||
ServerNotification::LoginChatGptComplete(params) => serde_json::to_value(params),
|
||||
ServerNotification::SessionConfigured(params) => serde_json::to_value(params),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<JSONRPCNotification> for ServerNotification {
|
||||
type Error = serde_json::Error;
|
||||
|
||||
fn try_from(value: JSONRPCNotification) -> Result<Self, Self::Error> {
|
||||
serde_json::from_value(serde_json::to_value(value)?)
|
||||
}
|
||||
}
|
||||
|
||||
/// Notification sent from the client to the server.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS, Display)]
|
||||
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
|
||||
#[strum(serialize_all = "camelCase")]
|
||||
pub enum ClientNotification {
|
||||
Initialized,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use anyhow::Result;
|
||||
use codex_protocol::account::PlanType;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
|
||||
#[test]
|
||||
fn serialize_new_conversation() -> Result<()> {
|
||||
let request = ClientRequest::NewConversation {
|
||||
request_id: RequestId::Integer(42),
|
||||
params: v1::NewConversationParams {
|
||||
model: Some("gpt-5-codex".to_string()),
|
||||
model_provider: None,
|
||||
profile: None,
|
||||
cwd: None,
|
||||
approval_policy: Some(AskForApproval::OnRequest),
|
||||
sandbox: None,
|
||||
config: None,
|
||||
base_instructions: None,
|
||||
developer_instructions: None,
|
||||
compact_prompt: None,
|
||||
include_apply_patch_tool: None,
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "newConversation",
|
||||
"id": 42,
|
||||
"params": {
|
||||
"model": "gpt-5-codex",
|
||||
"modelProvider": null,
|
||||
"profile": null,
|
||||
"cwd": null,
|
||||
"approvalPolicy": "on-request",
|
||||
"sandbox": null,
|
||||
"config": null,
|
||||
"baseInstructions": null,
|
||||
"includeApplyPatchTool": null
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn conversation_id_serializes_as_plain_string() -> Result<()> {
|
||||
let id = ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
|
||||
|
||||
assert_eq!(
|
||||
json!("67e55044-10b1-426f-9247-bb680e5fe0c8"),
|
||||
serde_json::to_value(id)?
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn conversation_id_deserializes_from_plain_string() -> Result<()> {
|
||||
let id: ConversationId =
|
||||
serde_json::from_value(json!("67e55044-10b1-426f-9247-bb680e5fe0c8"))?;
|
||||
|
||||
assert_eq!(
|
||||
ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?,
|
||||
id,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_client_notification() -> Result<()> {
|
||||
let notification = ClientNotification::Initialized;
|
||||
// Note there is no "params" field for this notification.
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "initialized",
|
||||
}),
|
||||
serde_json::to_value(¬ification)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_server_request() -> Result<()> {
|
||||
let conversation_id = ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
|
||||
let params = ExecCommandApprovalParams {
|
||||
conversation_id,
|
||||
call_id: "call-42".to_string(),
|
||||
command: vec!["echo".to_string(), "hello".to_string()],
|
||||
cwd: PathBuf::from("/tmp"),
|
||||
reason: Some("because tests".to_string()),
|
||||
risk: None,
|
||||
parsed_cmd: vec![ParsedCommand::Unknown {
|
||||
cmd: "echo hello".to_string(),
|
||||
}],
|
||||
};
|
||||
let request = ServerRequest::ExecCommandApproval {
|
||||
request_id: RequestId::Integer(7),
|
||||
params: params.clone(),
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "execCommandApproval",
|
||||
"id": 7,
|
||||
"params": {
|
||||
"conversationId": "67e55044-10b1-426f-9247-bb680e5fe0c8",
|
||||
"callId": "call-42",
|
||||
"command": ["echo", "hello"],
|
||||
"cwd": "/tmp",
|
||||
"reason": "because tests",
|
||||
"risk": null,
|
||||
"parsedCmd": [
|
||||
{
|
||||
"type": "unknown",
|
||||
"cmd": "echo hello"
|
||||
}
|
||||
]
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
|
||||
let payload = ServerRequestPayload::ExecCommandApproval(params);
|
||||
assert_eq!(payload.request_with_id(RequestId::Integer(7)), request);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_get_account_rate_limits() -> Result<()> {
|
||||
let request = ClientRequest::GetAccountRateLimits {
|
||||
request_id: RequestId::Integer(1),
|
||||
params: None,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/rateLimits/read",
|
||||
"id": 1,
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_account_login_api_key() -> Result<()> {
|
||||
let request = ClientRequest::LoginAccount {
|
||||
request_id: RequestId::Integer(2),
|
||||
params: v2::LoginAccountParams::ApiKey {
|
||||
api_key: "secret".to_string(),
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/login",
|
||||
"id": 2,
|
||||
"params": {
|
||||
"type": "apiKey",
|
||||
"apiKey": "secret"
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_account_login_chatgpt() -> Result<()> {
|
||||
let request = ClientRequest::LoginAccount {
|
||||
request_id: RequestId::Integer(3),
|
||||
params: v2::LoginAccountParams::ChatGpt,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/login",
|
||||
"id": 3,
|
||||
"params": {
|
||||
"type": "chatgpt"
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_account_logout() -> Result<()> {
|
||||
let request = ClientRequest::LogoutAccount {
|
||||
request_id: RequestId::Integer(4),
|
||||
params: None,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/logout",
|
||||
"id": 4,
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_get_account() -> Result<()> {
|
||||
let request = ClientRequest::GetAccount {
|
||||
request_id: RequestId::Integer(5),
|
||||
params: None,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/read",
|
||||
"id": 5,
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn account_serializes_fields_in_camel_case() -> Result<()> {
|
||||
let api_key = v2::Account::ApiKey {
|
||||
api_key: "secret".to_string(),
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"type": "apiKey",
|
||||
"apiKey": "secret",
|
||||
}),
|
||||
serde_json::to_value(&api_key)?,
|
||||
);
|
||||
|
||||
let chatgpt = v2::Account::ChatGpt {
|
||||
email: Some("user@example.com".to_string()),
|
||||
plan_type: PlanType::Plus,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"type": "chatgpt",
|
||||
"email": "user@example.com",
|
||||
"planType": "plus",
|
||||
}),
|
||||
serde_json::to_value(&chatgpt)?,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_list_models() -> Result<()> {
|
||||
let request = ClientRequest::ListModels {
|
||||
request_id: RequestId::Integer(6),
|
||||
params: v2::ListModelsParams::default(),
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "model/list",
|
||||
"id": 6,
|
||||
"params": {
|
||||
"pageSize": null,
|
||||
"cursor": null
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
// Module declarations for the app-server protocol namespace.
|
||||
// Exposes protocol pieces used by `lib.rs` via `pub use protocol::common::*;`.
|
||||
|
||||
pub mod common;
|
||||
pub mod v1;
|
||||
pub mod v2;
|
||||
@@ -1,405 +0,0 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::config_types::ForcedLoginMethod;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
use codex_protocol::config_types::Verbosity;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use codex_protocol::protocol::TurnAbortReason;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use ts_rs::TS;
|
||||
use uuid::Uuid;
|
||||
|
||||
// Reuse shared types defined in `common.rs`.
|
||||
use crate::protocol::common::AuthMode;
|
||||
use crate::protocol::common::GitSha;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InitializeParams {
|
||||
pub client_info: ClientInfo,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ClientInfo {
|
||||
pub name: String,
|
||||
pub title: Option<String>,
|
||||
pub version: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InitializeResponse {
|
||||
pub user_agent: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct NewConversationParams {
|
||||
pub model: Option<String>,
|
||||
pub model_provider: Option<String>,
|
||||
pub profile: Option<String>,
|
||||
pub cwd: Option<String>,
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
pub sandbox: Option<SandboxMode>,
|
||||
pub config: Option<HashMap<String, serde_json::Value>>,
|
||||
pub base_instructions: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub developer_instructions: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub compact_prompt: Option<String>,
|
||||
pub include_apply_patch_tool: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct NewConversationResponse {
|
||||
pub conversation_id: ConversationId,
|
||||
pub model: String,
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResumeConversationResponse {
|
||||
pub conversation_id: ConversationId,
|
||||
pub model: String,
|
||||
pub initial_messages: Option<Vec<EventMsg>>,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(untagged)]
|
||||
pub enum GetConversationSummaryParams {
|
||||
RolloutPath {
|
||||
#[serde(rename = "rolloutPath")]
|
||||
rollout_path: PathBuf,
|
||||
},
|
||||
ConversationId {
|
||||
#[serde(rename = "conversationId")]
|
||||
conversation_id: ConversationId,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetConversationSummaryResponse {
|
||||
pub summary: ConversationSummary,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ListConversationsParams {
|
||||
pub page_size: Option<usize>,
|
||||
pub cursor: Option<String>,
|
||||
pub model_providers: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ConversationSummary {
|
||||
pub conversation_id: ConversationId,
|
||||
pub path: PathBuf,
|
||||
pub preview: String,
|
||||
pub timestamp: Option<String>,
|
||||
pub model_provider: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ListConversationsResponse {
|
||||
pub items: Vec<ConversationSummary>,
|
||||
pub next_cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResumeConversationParams {
|
||||
pub path: Option<PathBuf>,
|
||||
pub conversation_id: Option<ConversationId>,
|
||||
pub history: Option<Vec<ResponseItem>>,
|
||||
pub overrides: Option<NewConversationParams>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AddConversationSubscriptionResponse {
|
||||
#[schemars(with = "String")]
|
||||
pub subscription_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ArchiveConversationParams {
|
||||
pub conversation_id: ConversationId,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ArchiveConversationResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RemoveConversationSubscriptionResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginApiKeyParams {
|
||||
pub api_key: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginApiKeyResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginChatGptResponse {
|
||||
#[schemars(with = "String")]
|
||||
pub login_id: Uuid,
|
||||
pub auth_url: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GitDiffToRemoteResponse {
|
||||
pub sha: GitSha,
|
||||
pub diff: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CancelLoginChatGptParams {
|
||||
#[schemars(with = "String")]
|
||||
pub login_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GitDiffToRemoteParams {
|
||||
pub cwd: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CancelLoginChatGptResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LogoutChatGptParams {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LogoutChatGptResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetAuthStatusParams {
|
||||
pub include_token: Option<bool>,
|
||||
pub refresh_token: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecOneOffCommandParams {
|
||||
pub command: Vec<String>,
|
||||
pub timeout_ms: Option<u64>,
|
||||
pub cwd: Option<PathBuf>,
|
||||
pub sandbox_policy: Option<SandboxPolicy>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecOneOffCommandResponse {
|
||||
pub exit_code: i32,
|
||||
pub stdout: String,
|
||||
pub stderr: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetAuthStatusResponse {
|
||||
pub auth_method: Option<AuthMode>,
|
||||
pub auth_token: Option<String>,
|
||||
pub requires_openai_auth: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetUserAgentResponse {
|
||||
pub user_agent: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UserInfoResponse {
|
||||
pub alleged_user_email: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetUserSavedConfigResponse {
|
||||
pub config: UserSavedConfig,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SetDefaultModelParams {
|
||||
pub model: Option<String>,
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SetDefaultModelResponse {}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UserSavedConfig {
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
pub sandbox_mode: Option<SandboxMode>,
|
||||
pub sandbox_settings: Option<SandboxSettings>,
|
||||
pub forced_chatgpt_workspace_id: Option<String>,
|
||||
pub forced_login_method: Option<ForcedLoginMethod>,
|
||||
pub model: Option<String>,
|
||||
pub model_reasoning_effort: Option<ReasoningEffort>,
|
||||
pub model_reasoning_summary: Option<ReasoningSummary>,
|
||||
pub model_verbosity: Option<Verbosity>,
|
||||
pub tools: Option<Tools>,
|
||||
pub profile: Option<String>,
|
||||
pub profiles: HashMap<String, Profile>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Profile {
|
||||
pub model: Option<String>,
|
||||
pub model_provider: Option<String>,
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
pub model_reasoning_effort: Option<ReasoningEffort>,
|
||||
pub model_reasoning_summary: Option<ReasoningSummary>,
|
||||
pub model_verbosity: Option<Verbosity>,
|
||||
pub chatgpt_base_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Tools {
|
||||
pub web_search: Option<bool>,
|
||||
pub view_image: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SandboxSettings {
|
||||
#[serde(default)]
|
||||
pub writable_roots: Vec<PathBuf>,
|
||||
pub network_access: Option<bool>,
|
||||
pub exclude_tmpdir_env_var: Option<bool>,
|
||||
pub exclude_slash_tmp: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserMessageParams {
|
||||
pub conversation_id: ConversationId,
|
||||
pub items: Vec<InputItem>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserTurnParams {
|
||||
pub conversation_id: ConversationId,
|
||||
pub items: Vec<InputItem>,
|
||||
pub cwd: PathBuf,
|
||||
pub approval_policy: AskForApproval,
|
||||
pub sandbox_policy: SandboxPolicy,
|
||||
pub model: String,
|
||||
pub effort: Option<ReasoningEffort>,
|
||||
pub summary: ReasoningSummary,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserTurnResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InterruptConversationParams {
|
||||
pub conversation_id: ConversationId,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InterruptConversationResponse {
|
||||
pub abort_reason: TurnAbortReason,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserMessageResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AddConversationListenerParams {
|
||||
pub conversation_id: ConversationId,
|
||||
#[serde(default)]
|
||||
pub experimental_raw_events: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RemoveConversationListenerParams {
|
||||
#[schemars(with = "String")]
|
||||
pub subscription_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[serde(tag = "type", content = "data")]
|
||||
pub enum InputItem {
|
||||
Text { text: String },
|
||||
Image { image_url: String },
|
||||
LocalImage { path: PathBuf },
|
||||
}
|
||||
|
||||
// Deprecated notifications (v1)
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginChatGptCompleteNotification {
|
||||
#[schemars(with = "String")]
|
||||
pub login_id: Uuid,
|
||||
pub success: bool,
|
||||
pub error: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SessionConfiguredNotification {
|
||||
pub session_id: ConversationId,
|
||||
pub model: String,
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
pub history_log_id: u64,
|
||||
#[ts(type = "number")]
|
||||
pub history_entry_count: usize,
|
||||
pub initial_messages: Option<Vec<EventMsg>>,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AuthStatusChangeNotification {
|
||||
pub auth_method: Option<AuthMode>,
|
||||
}
|
||||
@@ -1,122 +0,0 @@
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::account::PlanType;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
use codex_protocol::protocol::RateLimitSnapshot;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use ts_rs::TS;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "type", rename_all = "camelCase")]
|
||||
#[ts(tag = "type")]
|
||||
pub enum Account {
|
||||
#[serde(rename = "apiKey", rename_all = "camelCase")]
|
||||
#[ts(rename = "apiKey", rename_all = "camelCase")]
|
||||
ApiKey { api_key: String },
|
||||
|
||||
#[serde(rename = "chatgpt", rename_all = "camelCase")]
|
||||
#[ts(rename = "chatgpt", rename_all = "camelCase")]
|
||||
ChatGpt {
|
||||
email: Option<String>,
|
||||
plan_type: PlanType,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "type")]
|
||||
#[ts(tag = "type")]
|
||||
pub enum LoginAccountParams {
|
||||
#[serde(rename = "apiKey")]
|
||||
#[ts(rename = "apiKey")]
|
||||
ApiKey {
|
||||
#[serde(rename = "apiKey")]
|
||||
#[ts(rename = "apiKey")]
|
||||
api_key: String,
|
||||
},
|
||||
#[serde(rename = "chatgpt")]
|
||||
#[ts(rename = "chatgpt")]
|
||||
ChatGpt,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginAccountResponse {
|
||||
/// Only set if the login method is ChatGPT.
|
||||
#[schemars(with = "String")]
|
||||
pub login_id: Option<Uuid>,
|
||||
|
||||
/// URL the client should open in a browser to initiate the OAuth flow.
|
||||
/// Only set if the login method is ChatGPT.
|
||||
pub auth_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LogoutAccountResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetAccountRateLimitsResponse {
|
||||
pub rate_limits: RateLimitSnapshot,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetAccountResponse {
|
||||
pub account: Account,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ListModelsParams {
|
||||
/// Optional page size; defaults to a reasonable server-side value.
|
||||
pub page_size: Option<usize>,
|
||||
/// Opaque pagination cursor returned by a previous call.
|
||||
pub cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Model {
|
||||
pub id: String,
|
||||
pub model: String,
|
||||
pub display_name: String,
|
||||
pub description: String,
|
||||
pub supported_reasoning_efforts: Vec<ReasoningEffortOption>,
|
||||
pub default_reasoning_effort: ReasoningEffort,
|
||||
// Only one model should be marked as default.
|
||||
pub is_default: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ReasoningEffortOption {
|
||||
pub reasoning_effort: ReasoningEffort,
|
||||
pub description: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ListModelsResponse {
|
||||
pub items: Vec<Model>,
|
||||
/// Opaque cursor to pass to the next call to continue after the last item.
|
||||
/// if None, there are no more items to return.
|
||||
pub next_cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UploadFeedbackParams {
|
||||
pub classification: String,
|
||||
pub reason: Option<String>,
|
||||
pub conversation_id: Option<ConversationId>,
|
||||
pub include_logs: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UploadFeedbackResponse {
|
||||
pub thread_id: String,
|
||||
}
|
||||
@@ -313,11 +313,10 @@ fn assert_instructions_message(item: &ResponseItem) {
|
||||
ResponseItem::Message { role, content, .. } => {
|
||||
assert_eq!(role, "user");
|
||||
let texts = content_texts(content);
|
||||
let is_instructions = texts
|
||||
.iter()
|
||||
.any(|text| text.starts_with("# AGENTS.md instructions for "));
|
||||
assert!(
|
||||
is_instructions,
|
||||
texts
|
||||
.iter()
|
||||
.any(|text| text.contains("<user_instructions>")),
|
||||
"expected instructions message, got {texts:?}"
|
||||
);
|
||||
}
|
||||
|
||||
@@ -47,9 +47,6 @@ tokio = { workspace = true, features = [
|
||||
"signal",
|
||||
] }
|
||||
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
codex_windows_sandbox = { package = "codex-windows-sandbox", path = "../windows-sandbox-rs" }
|
||||
|
||||
[dev-dependencies]
|
||||
assert_cmd = { workspace = true }
|
||||
assert_matches = { workspace = true }
|
||||
|
||||
@@ -11,7 +11,6 @@ use codex_protocol::config_types::SandboxMode;
|
||||
|
||||
use crate::LandlockCommand;
|
||||
use crate::SeatbeltCommand;
|
||||
use crate::WindowsCommand;
|
||||
use crate::exit_status::handle_exit_status;
|
||||
|
||||
pub async fn run_command_under_seatbelt(
|
||||
@@ -52,29 +51,9 @@ pub async fn run_command_under_landlock(
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn run_command_under_windows(
|
||||
command: WindowsCommand,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
) -> anyhow::Result<()> {
|
||||
let WindowsCommand {
|
||||
full_auto,
|
||||
config_overrides,
|
||||
command,
|
||||
} = command;
|
||||
run_command_under_sandbox(
|
||||
full_auto,
|
||||
command,
|
||||
config_overrides,
|
||||
codex_linux_sandbox_exe,
|
||||
SandboxType::Windows,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
enum SandboxType {
|
||||
Seatbelt,
|
||||
Landlock,
|
||||
Windows,
|
||||
}
|
||||
|
||||
async fn run_command_under_sandbox(
|
||||
@@ -108,63 +87,6 @@ async fn run_command_under_sandbox(
|
||||
let stdio_policy = StdioPolicy::Inherit;
|
||||
let env = create_env(&config.shell_environment_policy);
|
||||
|
||||
// Special-case Windows sandbox: execute and exit the process to emulate inherited stdio.
|
||||
if let SandboxType::Windows = sandbox_type {
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
use codex_windows_sandbox::run_windows_sandbox_capture;
|
||||
|
||||
let policy_str = match &config.sandbox_policy {
|
||||
codex_core::protocol::SandboxPolicy::DangerFullAccess => "workspace-write",
|
||||
codex_core::protocol::SandboxPolicy::ReadOnly => "read-only",
|
||||
codex_core::protocol::SandboxPolicy::WorkspaceWrite { .. } => "workspace-write",
|
||||
};
|
||||
|
||||
let sandbox_cwd = sandbox_policy_cwd.clone();
|
||||
let cwd_clone = cwd.clone();
|
||||
let env_map = env.clone();
|
||||
let command_vec = command.clone();
|
||||
let res = tokio::task::spawn_blocking(move || {
|
||||
run_windows_sandbox_capture(
|
||||
policy_str,
|
||||
&sandbox_cwd,
|
||||
command_vec,
|
||||
&cwd_clone,
|
||||
env_map,
|
||||
None,
|
||||
)
|
||||
})
|
||||
.await;
|
||||
|
||||
let capture = match res {
|
||||
Ok(Ok(v)) => v,
|
||||
Ok(Err(err)) => {
|
||||
eprintln!("windows sandbox failed: {err}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
Err(join_err) => {
|
||||
eprintln!("windows sandbox join error: {join_err}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
if !capture.stdout.is_empty() {
|
||||
use std::io::Write;
|
||||
let _ = std::io::stdout().write_all(&capture.stdout);
|
||||
}
|
||||
if !capture.stderr.is_empty() {
|
||||
use std::io::Write;
|
||||
let _ = std::io::stderr().write_all(&capture.stderr);
|
||||
}
|
||||
|
||||
std::process::exit(capture.exit_code);
|
||||
}
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
{
|
||||
anyhow::bail!("Windows sandbox is only available on Windows");
|
||||
}
|
||||
}
|
||||
|
||||
let mut child = match sandbox_type {
|
||||
SandboxType::Seatbelt => {
|
||||
spawn_command_under_seatbelt(
|
||||
@@ -193,9 +115,6 @@ async fn run_command_under_sandbox(
|
||||
)
|
||||
.await?
|
||||
}
|
||||
SandboxType::Windows => {
|
||||
unreachable!("Windows sandbox should have been handled above");
|
||||
}
|
||||
};
|
||||
let status = child.wait().await?;
|
||||
|
||||
|
||||
@@ -32,17 +32,3 @@ pub struct LandlockCommand {
|
||||
#[arg(trailing_var_arg = true)]
|
||||
pub command: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
pub struct WindowsCommand {
|
||||
/// Convenience alias for low-friction sandboxed automatic execution (network-disabled sandbox that can write to cwd and TMPDIR)
|
||||
#[arg(long = "full-auto", default_value_t = false)]
|
||||
pub full_auto: bool,
|
||||
|
||||
#[clap(skip)]
|
||||
pub config_overrides: CliConfigOverrides,
|
||||
|
||||
/// Full command args to run under Windows restricted token sandbox.
|
||||
#[arg(trailing_var_arg = true)]
|
||||
pub command: Vec<String>,
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ use codex_chatgpt::apply_command::ApplyCommand;
|
||||
use codex_chatgpt::apply_command::run_apply_command;
|
||||
use codex_cli::LandlockCommand;
|
||||
use codex_cli::SeatbeltCommand;
|
||||
use codex_cli::WindowsCommand;
|
||||
use codex_cli::login::read_api_key_from_stdin;
|
||||
use codex_cli::login::run_login_status;
|
||||
use codex_cli::login::run_login_with_api_key;
|
||||
@@ -152,9 +151,6 @@ enum SandboxCommand {
|
||||
/// Run a command under Landlock+seccomp (Linux only).
|
||||
#[clap(visible_alias = "landlock")]
|
||||
Linux(LandlockCommand),
|
||||
|
||||
/// Run a command under Windows restricted token (Windows only).
|
||||
Windows(WindowsCommand),
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
@@ -476,17 +472,6 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
SandboxCommand::Windows(mut windows_cli) => {
|
||||
prepend_config_flags(
|
||||
&mut windows_cli.config_overrides,
|
||||
root_config_overrides.clone(),
|
||||
);
|
||||
codex_cli::debug_sandbox::run_command_under_windows(
|
||||
windows_cli,
|
||||
codex_linux_sandbox_exe,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
},
|
||||
Some(Subcommand::Apply(mut apply_cli)) => {
|
||||
prepend_config_flags(
|
||||
@@ -512,7 +497,7 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
// Respect root-level `-c` overrides plus top-level flags like `--profile`.
|
||||
let cli_kv_overrides = root_config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
.map_err(|e| anyhow::anyhow!(e))?;
|
||||
|
||||
// Thread through relevant top-level flags (at minimum, `--profile`).
|
||||
// Also honor `--search` since it maps to a feature toggle.
|
||||
|
||||
@@ -196,9 +196,7 @@ impl McpCli {
|
||||
|
||||
async fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Result<()> {
|
||||
// Validate any provided overrides even though they are not currently applied.
|
||||
let overrides = config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
let overrides = config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let config = Config::load_with_cli_overrides(overrides, ConfigOverrides::default())
|
||||
.await
|
||||
.context("failed to load configuration")?;
|
||||
@@ -312,9 +310,7 @@ async fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Re
|
||||
}
|
||||
|
||||
async fn run_remove(config_overrides: &CliConfigOverrides, remove_args: RemoveArgs) -> Result<()> {
|
||||
config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
|
||||
let RemoveArgs { name } = remove_args;
|
||||
|
||||
@@ -345,9 +341,7 @@ async fn run_remove(config_overrides: &CliConfigOverrides, remove_args: RemoveAr
|
||||
}
|
||||
|
||||
async fn run_login(config_overrides: &CliConfigOverrides, login_args: LoginArgs) -> Result<()> {
|
||||
let overrides = config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
let overrides = config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let config = Config::load_with_cli_overrides(overrides, ConfigOverrides::default())
|
||||
.await
|
||||
.context("failed to load configuration")?;
|
||||
@@ -386,9 +380,7 @@ async fn run_login(config_overrides: &CliConfigOverrides, login_args: LoginArgs)
|
||||
}
|
||||
|
||||
async fn run_logout(config_overrides: &CliConfigOverrides, logout_args: LogoutArgs) -> Result<()> {
|
||||
let overrides = config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
let overrides = config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let config = Config::load_with_cli_overrides(overrides, ConfigOverrides::default())
|
||||
.await
|
||||
.context("failed to load configuration")?;
|
||||
@@ -415,9 +407,7 @@ async fn run_logout(config_overrides: &CliConfigOverrides, logout_args: LogoutAr
|
||||
}
|
||||
|
||||
async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) -> Result<()> {
|
||||
let overrides = config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
let overrides = config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let config = Config::load_with_cli_overrides(overrides, ConfigOverrides::default())
|
||||
.await
|
||||
.context("failed to load configuration")?;
|
||||
@@ -672,9 +662,7 @@ async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) ->
|
||||
}
|
||||
|
||||
async fn run_get(config_overrides: &CliConfigOverrides, get_args: GetArgs) -> Result<()> {
|
||||
let overrides = config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
let overrides = config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let config = Config::load_with_cli_overrides(overrides, ConfigOverrides::default())
|
||||
.await
|
||||
.context("failed to load configuration")?;
|
||||
|
||||
@@ -22,6 +22,6 @@ chrono = { version = "0.4", features = ["serde"] }
|
||||
diffy = "0.4.2"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
thiserror = "2.0.17"
|
||||
thiserror = "2.0.12"
|
||||
codex-backend-client = { path = "../backend-client", optional = true }
|
||||
codex-git = { workspace = true }
|
||||
|
||||
@@ -83,7 +83,6 @@ tree-sitter-bash = { workspace = true }
|
||||
uuid = { workspace = true, features = ["serde", "v4"] }
|
||||
which = { workspace = true }
|
||||
wildmatch = { workspace = true }
|
||||
codex_windows_sandbox = { package = "codex-windows-sandbox", path = "../windows-sandbox-rs" }
|
||||
|
||||
|
||||
[target.'cfg(target_os = "linux")'.dependencies]
|
||||
|
||||
@@ -82,6 +82,6 @@ OUTPUT FORMAT:
|
||||
|
||||
* **Do not** wrap the JSON in markdown fences or extra prose.
|
||||
* The code_location field is required and must include absolute_file_path and line_range.
|
||||
* Line ranges must be as short as possible for interpreting the issue (avoid ranges over 5–10 lines; pick the most suitable subrange).
|
||||
*Line ranges must be as short as possible for interpreting the issue (avoid ranges over 5–10 lines; pick the most suitable subrange).
|
||||
* The code_location should overlap with the diff.
|
||||
* Do not generate a PR fix.
|
||||
* Do not generate a PR fix.
|
||||
@@ -216,12 +216,10 @@ impl ModelClient {
|
||||
let verbosity = if self.config.model_family.support_verbosity {
|
||||
self.config.model_verbosity
|
||||
} else {
|
||||
if self.config.model_verbosity.is_some() {
|
||||
warn!(
|
||||
"model_verbosity is set but ignored as the model does not support verbosity: {}",
|
||||
self.config.model_family.family
|
||||
);
|
||||
}
|
||||
warn!(
|
||||
"model_verbosity is set but ignored as the model does not support verbosity: {}",
|
||||
self.config.model_family.family
|
||||
);
|
||||
None
|
||||
};
|
||||
|
||||
|
||||
@@ -116,6 +116,7 @@ use crate::user_instructions::DeveloperInstructions;
|
||||
use crate::user_instructions::UserInstructions;
|
||||
use crate::user_notification::UserNotification;
|
||||
use crate::util::backoff;
|
||||
use chrono::Local;
|
||||
use codex_async_utils::OrCancelExt;
|
||||
use codex_otel::otel_event_manager::OtelEventManager;
|
||||
use codex_protocol::config_types::ReasoningEffort as ReasoningEffortConfig;
|
||||
@@ -267,6 +268,7 @@ pub(crate) struct TurnContext {
|
||||
/// the model as well as sandbox policies are resolved against this path
|
||||
/// instead of `std::env::current_dir()`.
|
||||
pub(crate) cwd: PathBuf,
|
||||
pub(crate) local_date_with_timezone: Option<String>,
|
||||
pub(crate) developer_instructions: Option<String>,
|
||||
pub(crate) base_instructions: Option<String>,
|
||||
pub(crate) compact_prompt: Option<String>,
|
||||
@@ -423,6 +425,7 @@ impl Session {
|
||||
sub_id,
|
||||
client,
|
||||
cwd: session_configuration.cwd.clone(),
|
||||
local_date_with_timezone: Some(Local::now().format("%Y-%m-%d %:z").to_string()),
|
||||
developer_instructions: session_configuration.developer_instructions.clone(),
|
||||
base_instructions: session_configuration.base_instructions.clone(),
|
||||
compact_prompt: session_configuration.compact_prompt.clone(),
|
||||
@@ -1003,16 +1006,11 @@ impl Session {
|
||||
items.push(DeveloperInstructions::new(developer_instructions.to_string()).into());
|
||||
}
|
||||
if let Some(user_instructions) = turn_context.user_instructions.as_deref() {
|
||||
items.push(
|
||||
UserInstructions {
|
||||
text: user_instructions.to_string(),
|
||||
directory: turn_context.cwd.to_string_lossy().into_owned(),
|
||||
}
|
||||
.into(),
|
||||
);
|
||||
items.push(UserInstructions::new(user_instructions.to_string()).into());
|
||||
}
|
||||
items.push(ResponseItem::from(EnvironmentContext::new(
|
||||
Some(turn_context.cwd.clone()),
|
||||
turn_context.local_date_with_timezone.clone(),
|
||||
Some(turn_context.approval_policy),
|
||||
Some(turn_context.sandbox_policy.clone()),
|
||||
Some(self.user_shell().clone()),
|
||||
@@ -1698,6 +1696,7 @@ async fn spawn_review_thread(
|
||||
sandbox_policy: parent_turn_context.sandbox_policy.clone(),
|
||||
shell_environment_policy: parent_turn_context.shell_environment_policy.clone(),
|
||||
cwd: parent_turn_context.cwd.clone(),
|
||||
local_date_with_timezone: parent_turn_context.local_date_with_timezone.clone(),
|
||||
final_output_json_schema: None,
|
||||
codex_linux_sandbox_exe: parent_turn_context.codex_linux_sandbox_exe.clone(),
|
||||
tool_call_gate: Arc::new(ReadinessFlag::new()),
|
||||
|
||||
@@ -13,7 +13,6 @@ use crate::protocol::ErrorEvent;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::TaskStartedEvent;
|
||||
use crate::protocol::TurnContextItem;
|
||||
use crate::protocol::WarningEvent;
|
||||
use crate::truncate::truncate_middle;
|
||||
use crate::util::backoff;
|
||||
use askama::Template;
|
||||
@@ -169,11 +168,6 @@ async fn run_compact_task_inner(
|
||||
message: "Compact task completed".to_string(),
|
||||
});
|
||||
sess.send_event(&turn_context, event).await;
|
||||
|
||||
let warning = EventMsg::Warning(WarningEvent {
|
||||
message: "Heads up: Long conversations and multiple compactions can cause the model to be less accurate. Start new a new conversation when possible to keep conversations small and targeted.".to_string(),
|
||||
});
|
||||
sess.send_event(&turn_context, warning).await;
|
||||
}
|
||||
|
||||
pub fn content_items_to_text(content: &[ContentItem]) -> Option<String> {
|
||||
@@ -353,8 +347,7 @@ mod tests {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::InputText {
|
||||
text: "# AGENTS.md instructions for project\n\n<INSTRUCTIONS>\ndo things\n</INSTRUCTIONS>"
|
||||
.to_string(),
|
||||
text: "<user_instructions>do things</user_instructions>".to_string(),
|
||||
}],
|
||||
},
|
||||
ResponseItem::Message {
|
||||
|
||||
@@ -769,8 +769,6 @@ impl ConfigToml {
|
||||
let mut forced_auto_mode_downgraded_on_windows = false;
|
||||
if cfg!(target_os = "windows")
|
||||
&& matches!(resolved_sandbox_mode, SandboxMode::WorkspaceWrite)
|
||||
// If the experimental Windows sandbox is enabled, do not force a downgrade.
|
||||
&& crate::safety::get_platform_sandbox().is_none()
|
||||
{
|
||||
sandbox_policy = SandboxPolicy::new_read_only_policy();
|
||||
forced_auto_mode_downgraded_on_windows = true;
|
||||
@@ -902,10 +900,6 @@ impl Config {
|
||||
};
|
||||
|
||||
let features = Features::from_config(&cfg, &config_profile, feature_overrides);
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
crate::safety::set_windows_sandbox_enabled(features.enabled(Feature::WindowsSandbox));
|
||||
}
|
||||
|
||||
let resolved_cwd = {
|
||||
use std::env;
|
||||
|
||||
@@ -73,6 +73,7 @@ impl ConversationHistory {
|
||||
pub(crate) fn get_history_for_prompt(&mut self) -> Vec<ResponseItem> {
|
||||
let mut history = self.get_history();
|
||||
Self::remove_ghost_snapshots(&mut history);
|
||||
Self::remove_reasoning_before_last_turn(&mut history);
|
||||
history
|
||||
}
|
||||
|
||||
@@ -124,6 +125,25 @@ impl ConversationHistory {
|
||||
items.retain(|item| !matches!(item, ResponseItem::GhostSnapshot { .. }));
|
||||
}
|
||||
|
||||
fn remove_reasoning_before_last_turn(items: &mut Vec<ResponseItem>) {
|
||||
// Responses API drops reasoning items before the last user message.
|
||||
// Sending them is harmless but can lead to validation errors when switching between API organizations.
|
||||
// https://cookbook.openai.com/examples/responses_api/reasoning_items#caching
|
||||
let Some(last_user_index) = items
|
||||
.iter()
|
||||
// Use last user message as the turn boundary.
|
||||
.rposition(|item| matches!(item, ResponseItem::Message { role, .. } if role == "user"))
|
||||
else {
|
||||
return;
|
||||
};
|
||||
let mut index = 0usize;
|
||||
items.retain(|item| {
|
||||
let keep = index >= last_user_index || !matches!(item, ResponseItem::Reasoning { .. });
|
||||
index += 1;
|
||||
keep
|
||||
});
|
||||
}
|
||||
|
||||
fn ensure_call_outputs_present(&mut self) {
|
||||
// Collect synthetic outputs to insert immediately after their calls.
|
||||
// Store the insertion position (index of call) alongside the item so
|
||||
@@ -366,10 +386,23 @@ impl ConversationHistory {
|
||||
match item {
|
||||
ResponseItem::FunctionCallOutput { call_id, output } => {
|
||||
let truncated = format_output_for_model_body(output.content.as_str());
|
||||
let truncated_items = output
|
||||
.content_items
|
||||
.as_ref()
|
||||
.map(|items| globally_truncate_function_output_items(items));
|
||||
let truncated_items = output.content_items.as_ref().map(|items| {
|
||||
items
|
||||
.iter()
|
||||
.map(|it| match it {
|
||||
FunctionCallOutputContentItem::InputText { text } => {
|
||||
FunctionCallOutputContentItem::InputText {
|
||||
text: format_output_for_model_body(text),
|
||||
}
|
||||
}
|
||||
FunctionCallOutputContentItem::InputImage { image_url } => {
|
||||
FunctionCallOutputContentItem::InputImage {
|
||||
image_url: image_url.clone(),
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
});
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: call_id.clone(),
|
||||
output: FunctionCallOutputPayload {
|
||||
@@ -398,53 +431,6 @@ impl ConversationHistory {
|
||||
}
|
||||
}
|
||||
|
||||
fn globally_truncate_function_output_items(
|
||||
items: &[FunctionCallOutputContentItem],
|
||||
) -> Vec<FunctionCallOutputContentItem> {
|
||||
let mut out: Vec<FunctionCallOutputContentItem> = Vec::with_capacity(items.len());
|
||||
let mut remaining = MODEL_FORMAT_MAX_BYTES;
|
||||
let mut omitted_text_items = 0usize;
|
||||
|
||||
for it in items {
|
||||
match it {
|
||||
FunctionCallOutputContentItem::InputText { text } => {
|
||||
if remaining == 0 {
|
||||
omitted_text_items += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
let len = text.len();
|
||||
if len <= remaining {
|
||||
out.push(FunctionCallOutputContentItem::InputText { text: text.clone() });
|
||||
remaining -= len;
|
||||
} else {
|
||||
let slice = take_bytes_at_char_boundary(text, remaining);
|
||||
if !slice.is_empty() {
|
||||
out.push(FunctionCallOutputContentItem::InputText {
|
||||
text: slice.to_string(),
|
||||
});
|
||||
}
|
||||
remaining = 0;
|
||||
}
|
||||
}
|
||||
// todo(aibrahim): handle input images; resize
|
||||
FunctionCallOutputContentItem::InputImage { image_url } => {
|
||||
out.push(FunctionCallOutputContentItem::InputImage {
|
||||
image_url: image_url.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if omitted_text_items > 0 {
|
||||
out.push(FunctionCallOutputContentItem::InputText {
|
||||
text: format!("[omitted {omitted_text_items} text items ...]"),
|
||||
});
|
||||
}
|
||||
|
||||
out
|
||||
}
|
||||
|
||||
pub(crate) fn format_output_for_model_body(content: &str) -> String {
|
||||
// Head+tail truncation for the model: show the beginning and end with an elision.
|
||||
// Clients still receive full streams; only this formatted summary is capped.
|
||||
@@ -554,6 +540,15 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
fn reasoning(id: &str) -> ResponseItem {
|
||||
ResponseItem::Reasoning {
|
||||
id: id.to_string(),
|
||||
summary: Vec::new(),
|
||||
content: None,
|
||||
encrypted_content: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn create_history_with_items(items: Vec<ResponseItem>) -> ConversationHistory {
|
||||
let mut h = ConversationHistory::new();
|
||||
h.record_items(items.iter());
|
||||
@@ -610,6 +605,40 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_history_drops_reasoning_before_last_user_message() {
|
||||
let mut history = ConversationHistory::new();
|
||||
let items = vec![
|
||||
user_msg("initial"),
|
||||
reasoning("first"),
|
||||
assistant_msg("ack"),
|
||||
user_msg("latest"),
|
||||
reasoning("second"),
|
||||
assistant_msg("ack"),
|
||||
reasoning("third"),
|
||||
];
|
||||
history.record_items(items.iter());
|
||||
|
||||
let filtered = history.get_history_for_prompt();
|
||||
assert_eq!(
|
||||
filtered,
|
||||
vec![
|
||||
user_msg("initial"),
|
||||
assistant_msg("ack"),
|
||||
user_msg("latest"),
|
||||
reasoning("second"),
|
||||
assistant_msg("ack"),
|
||||
reasoning("third"),
|
||||
]
|
||||
);
|
||||
let reasoning_count = history
|
||||
.contents()
|
||||
.iter()
|
||||
.filter(|item| matches!(item, ResponseItem::Reasoning { .. }))
|
||||
.count();
|
||||
assert_eq!(reasoning_count, 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_history_for_prompt_drops_ghost_commits() {
|
||||
let items = vec![ResponseItem::GhostSnapshot {
|
||||
@@ -890,81 +919,6 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn truncates_across_multiple_under_limit_texts_and_reports_omitted() {
|
||||
// Arrange: several text items, none exceeding per-item limit, but total exceeds budget.
|
||||
let budget = MODEL_FORMAT_MAX_BYTES;
|
||||
let t1_len = (budget / 2).saturating_sub(10);
|
||||
let t2_len = (budget / 2).saturating_sub(10);
|
||||
let remaining_after_t1_t2 = budget.saturating_sub(t1_len + t2_len);
|
||||
let t3_len = 50; // gets truncated to remaining_after_t1_t2
|
||||
let t4_len = 5; // omitted
|
||||
let t5_len = 7; // omitted
|
||||
|
||||
let t1 = "a".repeat(t1_len);
|
||||
let t2 = "b".repeat(t2_len);
|
||||
let t3 = "c".repeat(t3_len);
|
||||
let t4 = "d".repeat(t4_len);
|
||||
let t5 = "e".repeat(t5_len);
|
||||
|
||||
let item = ResponseItem::FunctionCallOutput {
|
||||
call_id: "call-omit".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "irrelevant".to_string(),
|
||||
content_items: Some(vec![
|
||||
FunctionCallOutputContentItem::InputText { text: t1 },
|
||||
FunctionCallOutputContentItem::InputText { text: t2 },
|
||||
FunctionCallOutputContentItem::InputImage {
|
||||
image_url: "img:mid".to_string(),
|
||||
},
|
||||
FunctionCallOutputContentItem::InputText { text: t3 },
|
||||
FunctionCallOutputContentItem::InputText { text: t4 },
|
||||
FunctionCallOutputContentItem::InputText { text: t5 },
|
||||
]),
|
||||
success: Some(true),
|
||||
},
|
||||
};
|
||||
|
||||
let mut history = ConversationHistory::new();
|
||||
history.record_items([&item]);
|
||||
assert_eq!(history.items.len(), 1);
|
||||
let json = serde_json::to_value(&history.items[0]).expect("serialize to json");
|
||||
|
||||
let output = json
|
||||
.get("output")
|
||||
.expect("output field")
|
||||
.as_array()
|
||||
.expect("array output");
|
||||
|
||||
// Expect: t1 (full), t2 (full), image, t3 (truncated), summary mentioning 2 omitted.
|
||||
assert_eq!(output.len(), 5);
|
||||
|
||||
let first = output[0].as_object().expect("first obj");
|
||||
assert_eq!(first.get("type").unwrap(), "input_text");
|
||||
let first_text = first.get("text").unwrap().as_str().unwrap();
|
||||
assert_eq!(first_text.len(), t1_len);
|
||||
|
||||
let second = output[1].as_object().expect("second obj");
|
||||
assert_eq!(second.get("type").unwrap(), "input_text");
|
||||
let second_text = second.get("text").unwrap().as_str().unwrap();
|
||||
assert_eq!(second_text.len(), t2_len);
|
||||
|
||||
assert_eq!(
|
||||
output[2],
|
||||
serde_json::json!({"type": "input_image", "image_url": "img:mid"})
|
||||
);
|
||||
|
||||
let fourth = output[3].as_object().expect("fourth obj");
|
||||
assert_eq!(fourth.get("type").unwrap(), "input_text");
|
||||
let fourth_text = fourth.get("text").unwrap().as_str().unwrap();
|
||||
assert_eq!(fourth_text.len(), remaining_after_t1_t2);
|
||||
|
||||
let summary = output[4].as_object().expect("summary obj");
|
||||
assert_eq!(summary.get("type").unwrap(), "input_text");
|
||||
let summary_text = summary.get("text").unwrap().as_str().unwrap();
|
||||
assert!(summary_text.contains("omitted 2 text items"));
|
||||
}
|
||||
|
||||
//TODO(aibrahim): run CI in release mode.
|
||||
#[cfg(not(debug_assertions))]
|
||||
#[test]
|
||||
|
||||
@@ -24,6 +24,7 @@ pub enum NetworkAccess {
|
||||
#[serde(rename = "environment_context", rename_all = "snake_case")]
|
||||
pub(crate) struct EnvironmentContext {
|
||||
pub cwd: Option<PathBuf>,
|
||||
pub local_date: Option<String>,
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
pub sandbox_mode: Option<SandboxMode>,
|
||||
pub network_access: Option<NetworkAccess>,
|
||||
@@ -34,12 +35,14 @@ pub(crate) struct EnvironmentContext {
|
||||
impl EnvironmentContext {
|
||||
pub fn new(
|
||||
cwd: Option<PathBuf>,
|
||||
local_date: Option<String>,
|
||||
approval_policy: Option<AskForApproval>,
|
||||
sandbox_policy: Option<SandboxPolicy>,
|
||||
shell: Option<Shell>,
|
||||
) -> Self {
|
||||
Self {
|
||||
cwd,
|
||||
local_date,
|
||||
approval_policy,
|
||||
sandbox_mode: match sandbox_policy {
|
||||
Some(SandboxPolicy::DangerFullAccess) => Some(SandboxMode::DangerFullAccess),
|
||||
@@ -79,6 +82,7 @@ impl EnvironmentContext {
|
||||
pub fn equals_except_shell(&self, other: &EnvironmentContext) -> bool {
|
||||
let EnvironmentContext {
|
||||
cwd,
|
||||
local_date,
|
||||
approval_policy,
|
||||
sandbox_mode,
|
||||
network_access,
|
||||
@@ -88,6 +92,7 @@ impl EnvironmentContext {
|
||||
} = other;
|
||||
|
||||
self.cwd == *cwd
|
||||
&& self.local_date == *local_date
|
||||
&& self.approval_policy == *approval_policy
|
||||
&& self.sandbox_mode == *sandbox_mode
|
||||
&& self.network_access == *network_access
|
||||
@@ -100,6 +105,11 @@ impl EnvironmentContext {
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let local_date = if before.local_date_with_timezone != after.local_date_with_timezone {
|
||||
after.local_date_with_timezone.clone()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let approval_policy = if before.approval_policy != after.approval_policy {
|
||||
Some(after.approval_policy)
|
||||
} else {
|
||||
@@ -110,7 +120,7 @@ impl EnvironmentContext {
|
||||
} else {
|
||||
None
|
||||
};
|
||||
EnvironmentContext::new(cwd, approval_policy, sandbox_policy, None)
|
||||
EnvironmentContext::new(cwd, local_date, approval_policy, sandbox_policy, None)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -118,6 +128,7 @@ impl From<&TurnContext> for EnvironmentContext {
|
||||
fn from(turn_context: &TurnContext) -> Self {
|
||||
Self::new(
|
||||
Some(turn_context.cwd.clone()),
|
||||
turn_context.local_date_with_timezone.clone(),
|
||||
Some(turn_context.approval_policy),
|
||||
Some(turn_context.sandbox_policy.clone()),
|
||||
// Shell is not configurable from turn to turn
|
||||
@@ -134,6 +145,7 @@ impl EnvironmentContext {
|
||||
/// ```xml
|
||||
/// <environment_context>
|
||||
/// <cwd>...</cwd>
|
||||
/// <local_date>...</local_date>
|
||||
/// <approval_policy>...</approval_policy>
|
||||
/// <sandbox_mode>...</sandbox_mode>
|
||||
/// <writable_roots>...</writable_roots>
|
||||
@@ -146,6 +158,9 @@ impl EnvironmentContext {
|
||||
if let Some(cwd) = self.cwd {
|
||||
lines.push(format!(" <cwd>{}</cwd>", cwd.to_string_lossy()));
|
||||
}
|
||||
if let Some(local_date) = self.local_date {
|
||||
lines.push(format!(" <local_date>{local_date}</local_date>"));
|
||||
}
|
||||
if let Some(approval_policy) = self.approval_policy {
|
||||
lines.push(format!(
|
||||
" <approval_policy>{approval_policy}</approval_policy>"
|
||||
@@ -212,6 +227,7 @@ mod tests {
|
||||
fn serialize_workspace_write_environment_context() {
|
||||
let context = EnvironmentContext::new(
|
||||
Some(PathBuf::from("/repo")),
|
||||
Some("2025-01-01 +00:00".to_string()),
|
||||
Some(AskForApproval::OnRequest),
|
||||
Some(workspace_write_policy(vec!["/repo", "/tmp"], false)),
|
||||
None,
|
||||
@@ -219,6 +235,7 @@ mod tests {
|
||||
|
||||
let expected = r#"<environment_context>
|
||||
<cwd>/repo</cwd>
|
||||
<local_date>2025-01-01 +00:00</local_date>
|
||||
<approval_policy>on-request</approval_policy>
|
||||
<sandbox_mode>workspace-write</sandbox_mode>
|
||||
<network_access>restricted</network_access>
|
||||
@@ -235,12 +252,14 @@ mod tests {
|
||||
fn serialize_read_only_environment_context() {
|
||||
let context = EnvironmentContext::new(
|
||||
None,
|
||||
Some("2025-01-01 +00:00".to_string()),
|
||||
Some(AskForApproval::Never),
|
||||
Some(SandboxPolicy::ReadOnly),
|
||||
None,
|
||||
);
|
||||
|
||||
let expected = r#"<environment_context>
|
||||
<local_date>2025-01-01 +00:00</local_date>
|
||||
<approval_policy>never</approval_policy>
|
||||
<sandbox_mode>read-only</sandbox_mode>
|
||||
<network_access>restricted</network_access>
|
||||
@@ -253,12 +272,14 @@ mod tests {
|
||||
fn serialize_full_access_environment_context() {
|
||||
let context = EnvironmentContext::new(
|
||||
None,
|
||||
Some("2025-01-01 +00:00".to_string()),
|
||||
Some(AskForApproval::OnFailure),
|
||||
Some(SandboxPolicy::DangerFullAccess),
|
||||
None,
|
||||
);
|
||||
|
||||
let expected = r#"<environment_context>
|
||||
<local_date>2025-01-01 +00:00</local_date>
|
||||
<approval_policy>on-failure</approval_policy>
|
||||
<sandbox_mode>danger-full-access</sandbox_mode>
|
||||
<network_access>enabled</network_access>
|
||||
@@ -272,12 +293,14 @@ mod tests {
|
||||
// Approval policy
|
||||
let context1 = EnvironmentContext::new(
|
||||
Some(PathBuf::from("/repo")),
|
||||
Some("2025-01-01 +00:00".to_string()),
|
||||
Some(AskForApproval::OnRequest),
|
||||
Some(workspace_write_policy(vec!["/repo"], false)),
|
||||
None,
|
||||
);
|
||||
let context2 = EnvironmentContext::new(
|
||||
Some(PathBuf::from("/repo")),
|
||||
Some("2025-01-01 +00:00".to_string()),
|
||||
Some(AskForApproval::Never),
|
||||
Some(workspace_write_policy(vec!["/repo"], true)),
|
||||
None,
|
||||
@@ -289,12 +312,14 @@ mod tests {
|
||||
fn equals_except_shell_compares_sandbox_policy() {
|
||||
let context1 = EnvironmentContext::new(
|
||||
Some(PathBuf::from("/repo")),
|
||||
Some("2025-01-01 +00:00".to_string()),
|
||||
Some(AskForApproval::OnRequest),
|
||||
Some(SandboxPolicy::new_read_only_policy()),
|
||||
None,
|
||||
);
|
||||
let context2 = EnvironmentContext::new(
|
||||
Some(PathBuf::from("/repo")),
|
||||
Some("2025-01-01 +00:00".to_string()),
|
||||
Some(AskForApproval::OnRequest),
|
||||
Some(SandboxPolicy::new_workspace_write_policy()),
|
||||
None,
|
||||
@@ -307,12 +332,14 @@ mod tests {
|
||||
fn equals_except_shell_compares_workspace_write_policy() {
|
||||
let context1 = EnvironmentContext::new(
|
||||
Some(PathBuf::from("/repo")),
|
||||
Some("2025-01-01 +00:00".to_string()),
|
||||
Some(AskForApproval::OnRequest),
|
||||
Some(workspace_write_policy(vec!["/repo", "/tmp", "/var"], false)),
|
||||
None,
|
||||
);
|
||||
let context2 = EnvironmentContext::new(
|
||||
Some(PathBuf::from("/repo")),
|
||||
Some("2025-01-01 +00:00".to_string()),
|
||||
Some(AskForApproval::OnRequest),
|
||||
Some(workspace_write_policy(vec!["/repo", "/tmp"], true)),
|
||||
None,
|
||||
@@ -325,6 +352,7 @@ mod tests {
|
||||
fn equals_except_shell_ignores_shell() {
|
||||
let context1 = EnvironmentContext::new(
|
||||
Some(PathBuf::from("/repo")),
|
||||
Some("2025-01-01 +00:00".to_string()),
|
||||
Some(AskForApproval::OnRequest),
|
||||
Some(workspace_write_policy(vec!["/repo"], false)),
|
||||
Some(Shell::Bash(BashShell {
|
||||
@@ -334,6 +362,7 @@ mod tests {
|
||||
);
|
||||
let context2 = EnvironmentContext::new(
|
||||
Some(PathBuf::from("/repo")),
|
||||
Some("2025-01-01 +00:00".to_string()),
|
||||
Some(AskForApproval::OnRequest),
|
||||
Some(workspace_write_policy(vec!["/repo"], false)),
|
||||
Some(Shell::Zsh(ZshShell {
|
||||
|
||||
@@ -4,8 +4,6 @@ use crate::token_data::KnownPlan;
|
||||
use crate::token_data::PlanType;
|
||||
use crate::truncate::truncate_middle;
|
||||
use chrono::DateTime;
|
||||
use chrono::Datelike;
|
||||
use chrono::Local;
|
||||
use chrono::Utc;
|
||||
use codex_async_utils::CancelErr;
|
||||
use codex_protocol::ConversationId;
|
||||
@@ -288,46 +286,28 @@ impl std::fmt::Display for UsageLimitReachedError {
|
||||
}
|
||||
|
||||
fn retry_suffix(resets_at: Option<&DateTime<Utc>>) -> String {
|
||||
if let Some(resets_at) = resets_at {
|
||||
let formatted = format_retry_timestamp(resets_at);
|
||||
format!(" Try again at {formatted}.")
|
||||
if let Some(secs) = remaining_seconds(resets_at) {
|
||||
let reset_duration = format_reset_duration(secs);
|
||||
format!(" Try again in {reset_duration}.")
|
||||
} else {
|
||||
" Try again later.".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
fn retry_suffix_after_or(resets_at: Option<&DateTime<Utc>>) -> String {
|
||||
if let Some(resets_at) = resets_at {
|
||||
let formatted = format_retry_timestamp(resets_at);
|
||||
format!(" or try again at {formatted}.")
|
||||
if let Some(secs) = remaining_seconds(resets_at) {
|
||||
let reset_duration = format_reset_duration(secs);
|
||||
format!(" or try again in {reset_duration}.")
|
||||
} else {
|
||||
" or try again later.".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
fn format_retry_timestamp(resets_at: &DateTime<Utc>) -> String {
|
||||
let local_reset = resets_at.with_timezone(&Local);
|
||||
let local_now = now_for_retry().with_timezone(&Local);
|
||||
if local_reset.date_naive() == local_now.date_naive() {
|
||||
local_reset.format("%-I:%M %p").to_string()
|
||||
} else {
|
||||
let suffix = day_suffix(local_reset.day());
|
||||
local_reset
|
||||
.format(&format!("%b %-d{suffix}, %Y %-I:%M %p"))
|
||||
.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
fn day_suffix(day: u32) -> &'static str {
|
||||
match day {
|
||||
11..=13 => "th",
|
||||
_ => match day % 10 {
|
||||
1 => "st",
|
||||
2 => "nd", // codespell:ignore
|
||||
3 => "rd",
|
||||
_ => "th",
|
||||
},
|
||||
}
|
||||
fn remaining_seconds(resets_at: Option<&DateTime<Utc>>) -> Option<u64> {
|
||||
let resets_at = resets_at.cloned()?;
|
||||
let now = now_for_retry();
|
||||
let secs = resets_at.signed_duration_since(now).num_seconds();
|
||||
Some(if secs <= 0 { 0 } else { secs as u64 })
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -346,6 +326,36 @@ fn now_for_retry() -> DateTime<Utc> {
|
||||
Utc::now()
|
||||
}
|
||||
|
||||
fn format_reset_duration(total_secs: u64) -> String {
|
||||
let days = total_secs / 86_400;
|
||||
let hours = (total_secs % 86_400) / 3_600;
|
||||
let minutes = (total_secs % 3_600) / 60;
|
||||
|
||||
let mut parts: Vec<String> = Vec::new();
|
||||
if days > 0 {
|
||||
let unit = if days == 1 { "day" } else { "days" };
|
||||
parts.push(format!("{days} {unit}"));
|
||||
}
|
||||
if hours > 0 {
|
||||
let unit = if hours == 1 { "hour" } else { "hours" };
|
||||
parts.push(format!("{hours} {unit}"));
|
||||
}
|
||||
if minutes > 0 {
|
||||
let unit = if minutes == 1 { "minute" } else { "minutes" };
|
||||
parts.push(format!("{minutes} {unit}"));
|
||||
}
|
||||
|
||||
if parts.is_empty() {
|
||||
return "less than a minute".to_string();
|
||||
}
|
||||
|
||||
match parts.len() {
|
||||
1 => parts[0].clone(),
|
||||
2 => format!("{} {}", parts[0], parts[1]),
|
||||
_ => format!("{} {} {}", parts[0], parts[1], parts[2]),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct EnvVarError {
|
||||
/// Name of the environment variable that is missing.
|
||||
@@ -562,16 +572,15 @@ mod tests {
|
||||
let base = Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap();
|
||||
let resets_at = base + ChronoDuration::hours(1);
|
||||
with_now_override(base, move || {
|
||||
let expected_time = format_retry_timestamp(&resets_at);
|
||||
let err = UsageLimitReachedError {
|
||||
plan_type: Some(PlanType::Known(KnownPlan::Team)),
|
||||
resets_at: Some(resets_at),
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
};
|
||||
let expected = format!(
|
||||
"You've hit your usage limit. To get more access now, send a request to your admin or try again at {expected_time}."
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"You've hit your usage limit. To get more access now, send a request to your admin or try again in 1 hour."
|
||||
);
|
||||
assert_eq!(err.to_string(), expected);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -606,16 +615,15 @@ mod tests {
|
||||
let base = Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap();
|
||||
let resets_at = base + ChronoDuration::hours(1);
|
||||
with_now_override(base, move || {
|
||||
let expected_time = format_retry_timestamp(&resets_at);
|
||||
let err = UsageLimitReachedError {
|
||||
plan_type: Some(PlanType::Known(KnownPlan::Pro)),
|
||||
resets_at: Some(resets_at),
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
};
|
||||
let expected = format!(
|
||||
"You've hit your usage limit. Visit chatgpt.com/codex/settings/usage to purchase more credits or try again at {expected_time}."
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"You've hit your usage limit. Visit chatgpt.com/codex/settings/usage to purchase more credits or try again in 1 hour."
|
||||
);
|
||||
assert_eq!(err.to_string(), expected);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -624,14 +632,15 @@ mod tests {
|
||||
let base = Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap();
|
||||
let resets_at = base + ChronoDuration::minutes(5);
|
||||
with_now_override(base, move || {
|
||||
let expected_time = format_retry_timestamp(&resets_at);
|
||||
let err = UsageLimitReachedError {
|
||||
plan_type: None,
|
||||
resets_at: Some(resets_at),
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
};
|
||||
let expected = format!("You've hit your usage limit. Try again at {expected_time}.");
|
||||
assert_eq!(err.to_string(), expected);
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"You've hit your usage limit. Try again in 5 minutes."
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -640,16 +649,15 @@ mod tests {
|
||||
let base = Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap();
|
||||
let resets_at = base + ChronoDuration::hours(3) + ChronoDuration::minutes(32);
|
||||
with_now_override(base, move || {
|
||||
let expected_time = format_retry_timestamp(&resets_at);
|
||||
let err = UsageLimitReachedError {
|
||||
plan_type: Some(PlanType::Known(KnownPlan::Plus)),
|
||||
resets_at: Some(resets_at),
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
};
|
||||
let expected = format!(
|
||||
"You've hit your usage limit. Upgrade to Pro (https://openai.com/chatgpt/pricing), visit chatgpt.com/codex/settings/usage to purchase more credits or try again at {expected_time}."
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"You've hit your usage limit. Upgrade to Pro (https://openai.com/chatgpt/pricing), visit chatgpt.com/codex/settings/usage to purchase more credits or try again in 3 hours 32 minutes."
|
||||
);
|
||||
assert_eq!(err.to_string(), expected);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -659,14 +667,15 @@ mod tests {
|
||||
let resets_at =
|
||||
base + ChronoDuration::days(2) + ChronoDuration::hours(3) + ChronoDuration::minutes(5);
|
||||
with_now_override(base, move || {
|
||||
let expected_time = format_retry_timestamp(&resets_at);
|
||||
let err = UsageLimitReachedError {
|
||||
plan_type: None,
|
||||
resets_at: Some(resets_at),
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
};
|
||||
let expected = format!("You've hit your usage limit. Try again at {expected_time}.");
|
||||
assert_eq!(err.to_string(), expected);
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"You've hit your usage limit. Try again in 2 days 3 hours 5 minutes."
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -675,14 +684,15 @@ mod tests {
|
||||
let base = Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap();
|
||||
let resets_at = base + ChronoDuration::seconds(30);
|
||||
with_now_override(base, move || {
|
||||
let expected_time = format_retry_timestamp(&resets_at);
|
||||
let err = UsageLimitReachedError {
|
||||
plan_type: None,
|
||||
resets_at: Some(resets_at),
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
};
|
||||
let expected = format!("You've hit your usage limit. Try again at {expected_time}.");
|
||||
assert_eq!(err.to_string(), expected);
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"You've hit your usage limit. Try again in less than a minute."
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,19 +13,13 @@ use codex_protocol::user_input::UserInput;
|
||||
use tracing::warn;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::user_instructions::UserInstructions;
|
||||
|
||||
fn is_session_prefix(text: &str) -> bool {
|
||||
let trimmed = text.trim_start();
|
||||
let lowered = trimmed.to_ascii_lowercase();
|
||||
lowered.starts_with("<environment_context>")
|
||||
lowered.starts_with("<environment_context>") || lowered.starts_with("<user_instructions>")
|
||||
}
|
||||
|
||||
fn parse_user_message(message: &[ContentItem]) -> Option<UserMessageItem> {
|
||||
if UserInstructions::is_user_instructions(message) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut content: Vec<UserInput> = Vec::new();
|
||||
|
||||
for content_item in message.iter() {
|
||||
@@ -173,38 +167,6 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn skips_user_instructions_and_env() {
|
||||
let items = vec![
|
||||
ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::InputText {
|
||||
text: "<user_instructions>test_text</user_instructions>".to_string(),
|
||||
}],
|
||||
},
|
||||
ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::InputText {
|
||||
text: "<environment_context>test_text</environment_context>".to_string(),
|
||||
}],
|
||||
},
|
||||
ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::InputText {
|
||||
text: "# AGENTS.md instructions for test_directory\n\n<INSTRUCTIONS>\ntest_text\n</INSTRUCTIONS>".to_string(),
|
||||
}],
|
||||
},
|
||||
];
|
||||
|
||||
for item in items {
|
||||
let turn_item = parse_turn_item(&item);
|
||||
assert!(turn_item.is_none(), "expected none, got {turn_item:?}");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_agent_message() {
|
||||
let item = ResponseItem::Message {
|
||||
|
||||
@@ -72,9 +72,6 @@ pub enum SandboxType {
|
||||
|
||||
/// Only available on Linux.
|
||||
LinuxSeccomp,
|
||||
|
||||
/// Only available on Windows.
|
||||
WindowsRestrictedToken,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
@@ -161,79 +158,11 @@ pub(crate) async fn execute_exec_env(
|
||||
};
|
||||
|
||||
let start = Instant::now();
|
||||
let raw_output_result = exec(params, sandbox, sandbox_policy, stdout_stream).await;
|
||||
let raw_output_result = exec(params, sandbox_policy, stdout_stream).await;
|
||||
let duration = start.elapsed();
|
||||
finalize_exec_result(raw_output_result, sandbox, duration)
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
async fn exec_windows_sandbox(
|
||||
params: ExecParams,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
) -> Result<RawExecToolCallOutput> {
|
||||
use codex_windows_sandbox::run_windows_sandbox_capture;
|
||||
|
||||
let ExecParams {
|
||||
command,
|
||||
cwd,
|
||||
env,
|
||||
timeout_ms,
|
||||
..
|
||||
} = params;
|
||||
|
||||
let policy_str = match sandbox_policy {
|
||||
SandboxPolicy::DangerFullAccess => "workspace-write",
|
||||
SandboxPolicy::ReadOnly => "read-only",
|
||||
SandboxPolicy::WorkspaceWrite { .. } => "workspace-write",
|
||||
};
|
||||
|
||||
let sandbox_cwd = cwd.clone();
|
||||
let spawn_res = tokio::task::spawn_blocking(move || {
|
||||
run_windows_sandbox_capture(policy_str, &sandbox_cwd, command, &cwd, env, timeout_ms)
|
||||
})
|
||||
.await;
|
||||
|
||||
let capture = match spawn_res {
|
||||
Ok(Ok(v)) => v,
|
||||
Ok(Err(err)) => {
|
||||
return Err(CodexErr::Io(io::Error::other(format!(
|
||||
"windows sandbox: {err}"
|
||||
))));
|
||||
}
|
||||
Err(join_err) => {
|
||||
return Err(CodexErr::Io(io::Error::other(format!(
|
||||
"windows sandbox join error: {join_err}"
|
||||
))));
|
||||
}
|
||||
};
|
||||
|
||||
let exit_status = synthetic_exit_status(capture.exit_code);
|
||||
let stdout = StreamOutput {
|
||||
text: capture.stdout,
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
let stderr = StreamOutput {
|
||||
text: capture.stderr,
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
// Best-effort aggregate: stdout then stderr
|
||||
let mut aggregated = Vec::with_capacity(stdout.text.len() + stderr.text.len());
|
||||
append_all(&mut aggregated, &stdout.text);
|
||||
append_all(&mut aggregated, &stderr.text);
|
||||
let aggregated_output = StreamOutput {
|
||||
text: aggregated,
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
|
||||
Ok(RawExecToolCallOutput {
|
||||
exit_status,
|
||||
stdout,
|
||||
stderr,
|
||||
aggregated_output,
|
||||
timed_out: capture.timed_out,
|
||||
})
|
||||
}
|
||||
|
||||
fn finalize_exec_result(
|
||||
raw_output_result: std::result::Result<RawExecToolCallOutput, CodexErr>,
|
||||
sandbox_type: SandboxType,
|
||||
@@ -418,17 +347,11 @@ pub struct ExecToolCallOutput {
|
||||
pub timed_out: bool,
|
||||
}
|
||||
|
||||
#[cfg_attr(not(target_os = "windows"), allow(unused_variables))]
|
||||
async fn exec(
|
||||
params: ExecParams,
|
||||
sandbox: SandboxType,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
stdout_stream: Option<StdoutStream>,
|
||||
) -> Result<RawExecToolCallOutput> {
|
||||
#[cfg(target_os = "windows")]
|
||||
if sandbox == SandboxType::WindowsRestrictedToken {
|
||||
return exec_windows_sandbox(params, sandbox_policy).await;
|
||||
}
|
||||
let timeout = params.timeout_duration();
|
||||
let ExecParams {
|
||||
command,
|
||||
@@ -602,9 +525,8 @@ fn synthetic_exit_status(code: i32) -> ExitStatus {
|
||||
#[cfg(windows)]
|
||||
fn synthetic_exit_status(code: i32) -> ExitStatus {
|
||||
use std::os::windows::process::ExitStatusExt;
|
||||
// On Windows the raw status is a u32. Use a direct cast to avoid
|
||||
// panicking on negative i32 values produced by prior narrowing casts.
|
||||
std::process::ExitStatus::from_raw(code as u32)
|
||||
#[expect(clippy::unwrap_used)]
|
||||
std::process::ExitStatus::from_raw(code.try_into().unwrap())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
@@ -43,8 +43,6 @@ pub enum Feature {
|
||||
SandboxCommandAssessment,
|
||||
/// Create a ghost commit at each turn.
|
||||
GhostCommit,
|
||||
/// Enable Windows sandbox (restricted token) on Windows.
|
||||
WindowsSandbox,
|
||||
}
|
||||
|
||||
impl Feature {
|
||||
@@ -294,10 +292,4 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
stage: Stage::Experimental,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::WindowsSandbox,
|
||||
key: "enable_experimental_windows_sandbox",
|
||||
stage: Stage::Experimental,
|
||||
default_enabled: false,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -160,7 +160,7 @@ pub fn find_family_for_model(slug: &str) -> Option<ModelFamily> {
|
||||
reasoning_summary_format: ReasoningSummaryFormat::Experimental,
|
||||
base_instructions: GPT_5_CODEX_INSTRUCTIONS.to_string(),
|
||||
apply_patch_tool_type: Some(ApplyPatchToolType::Freeform),
|
||||
support_verbosity: false,
|
||||
support_verbosity: true,
|
||||
)
|
||||
} else if slug.starts_with("gpt-5") {
|
||||
model_family!(
|
||||
|
||||
@@ -46,7 +46,6 @@ pub(crate) fn should_persist_event_msg(ev: &EventMsg) -> bool {
|
||||
| EventMsg::UndoCompleted(_)
|
||||
| EventMsg::TurnAborted(_) => true,
|
||||
EventMsg::Error(_)
|
||||
| EventMsg::Warning(_)
|
||||
| EventMsg::TaskStarted(_)
|
||||
| EventMsg::TaskComplete(_)
|
||||
| EventMsg::AgentMessageDelta(_)
|
||||
|
||||
@@ -10,23 +10,6 @@ use crate::exec::SandboxType;
|
||||
use crate::protocol::AskForApproval;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
use std::sync::atomic::AtomicBool;
|
||||
#[cfg(target_os = "windows")]
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
static WINDOWS_SANDBOX_ENABLED: AtomicBool = AtomicBool::new(false);
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
pub fn set_windows_sandbox_enabled(enabled: bool) {
|
||||
WINDOWS_SANDBOX_ENABLED.store(enabled, Ordering::Relaxed);
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
#[allow(dead_code)]
|
||||
pub fn set_windows_sandbox_enabled(_enabled: bool) {}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum SafetyCheck {
|
||||
AutoApprove {
|
||||
@@ -101,14 +84,6 @@ pub fn get_platform_sandbox() -> Option<SandboxType> {
|
||||
Some(SandboxType::MacosSeatbelt)
|
||||
} else if cfg!(target_os = "linux") {
|
||||
Some(SandboxType::LinuxSeccomp)
|
||||
} else if cfg!(target_os = "windows") {
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
if WINDOWS_SANDBOX_ENABLED.load(Ordering::Relaxed) {
|
||||
return Some(SandboxType::WindowsRestrictedToken);
|
||||
}
|
||||
}
|
||||
None
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
||||
@@ -74,13 +74,25 @@ impl SandboxManager {
|
||||
match pref {
|
||||
SandboxablePreference::Forbid => SandboxType::None,
|
||||
SandboxablePreference::Require => {
|
||||
// Require a platform sandbox when available; on Windows this
|
||||
// respects the enable_experimental_windows_sandbox feature.
|
||||
crate::safety::get_platform_sandbox().unwrap_or(SandboxType::None)
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
return SandboxType::MacosSeatbelt;
|
||||
}
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
return SandboxType::LinuxSeccomp;
|
||||
}
|
||||
#[allow(unreachable_code)]
|
||||
SandboxType::None
|
||||
}
|
||||
SandboxablePreference::Auto => match policy {
|
||||
SandboxPolicy::DangerFullAccess => SandboxType::None,
|
||||
_ => crate::safety::get_platform_sandbox().unwrap_or(SandboxType::None),
|
||||
#[cfg(target_os = "macos")]
|
||||
_ => SandboxType::MacosSeatbelt,
|
||||
#[cfg(target_os = "linux")]
|
||||
_ => SandboxType::LinuxSeccomp,
|
||||
#[cfg(not(any(target_os = "macos", target_os = "linux")))]
|
||||
_ => SandboxType::None,
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -131,14 +143,6 @@ impl SandboxManager {
|
||||
Some("codex-linux-sandbox".to_string()),
|
||||
)
|
||||
}
|
||||
// On Windows, the restricted token sandbox executes in-process via the
|
||||
// codex-windows-sandbox crate. We leave the command unchanged here and
|
||||
// branch during execution based on the sandbox type.
|
||||
#[cfg(target_os = "windows")]
|
||||
SandboxType::WindowsRestrictedToken => (command, HashMap::new(), None),
|
||||
// When building for non-Windows targets, this variant is never constructed.
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
SandboxType::WindowsRestrictedToken => (command, HashMap::new(), None),
|
||||
};
|
||||
|
||||
env.extend(sandbox_env);
|
||||
|
||||
@@ -89,10 +89,7 @@ impl SessionTask for UserShellCommandTask {
|
||||
let tool_call = ToolCall {
|
||||
tool_name: USER_SHELL_TOOL_NAME.to_string(),
|
||||
call_id: Uuid::new_v4().to_string(),
|
||||
payload: ToolPayload::LocalShell {
|
||||
params,
|
||||
is_user_shell_command: true,
|
||||
},
|
||||
payload: ToolPayload::LocalShell { params },
|
||||
};
|
||||
|
||||
let router = Arc::new(ToolRouter::from_config(&turn_context.tools_config, None));
|
||||
|
||||
@@ -40,7 +40,6 @@ pub enum ToolPayload {
|
||||
},
|
||||
LocalShell {
|
||||
params: ShellToolCallParams,
|
||||
is_user_shell_command: bool,
|
||||
},
|
||||
UnifiedExec {
|
||||
arguments: String,
|
||||
@@ -57,7 +56,7 @@ impl ToolPayload {
|
||||
match self {
|
||||
ToolPayload::Function { arguments } => Cow::Borrowed(arguments),
|
||||
ToolPayload::Custom { input } => Cow::Borrowed(input),
|
||||
ToolPayload::LocalShell { params, .. } => Cow::Owned(params.command.join(" ")),
|
||||
ToolPayload::LocalShell { params } => Cow::Owned(params.command.join(" ")),
|
||||
ToolPayload::UnifiedExec { arguments } => Cow::Borrowed(arguments),
|
||||
ToolPayload::Mcp { raw_arguments, .. } => Cow::Borrowed(raw_arguments),
|
||||
}
|
||||
|
||||
@@ -82,10 +82,7 @@ impl ToolHandler for ShellHandler {
|
||||
)
|
||||
.await
|
||||
}
|
||||
ToolPayload::LocalShell {
|
||||
params,
|
||||
is_user_shell_command,
|
||||
} => {
|
||||
ToolPayload::LocalShell { params } => {
|
||||
let exec_params = Self::to_exec_params(params, turn.as_ref());
|
||||
Self::run_exec_like(
|
||||
tool_name.as_str(),
|
||||
@@ -94,7 +91,7 @@ impl ToolHandler for ShellHandler {
|
||||
turn,
|
||||
tracker,
|
||||
call_id,
|
||||
is_user_shell_command,
|
||||
true,
|
||||
)
|
||||
.await
|
||||
}
|
||||
@@ -222,7 +219,6 @@ impl ShellHandler {
|
||||
env: exec_params.env.clone(),
|
||||
with_escalated_permissions: exec_params.with_escalated_permissions,
|
||||
justification: exec_params.justification.clone(),
|
||||
is_user_shell_command,
|
||||
};
|
||||
let mut orchestrator = ToolOrchestrator::new();
|
||||
let mut runtime = ShellRuntime::new();
|
||||
|
||||
@@ -83,8 +83,6 @@ impl ToolOrchestrator {
|
||||
if tool.wants_escalated_first_attempt(req) {
|
||||
initial_sandbox = crate::exec::SandboxType::None;
|
||||
}
|
||||
// Platform-specific flag gating is handled by SandboxManager::select_initial
|
||||
// via crate::safety::get_platform_sandbox().
|
||||
let initial_attempt = SandboxAttempt {
|
||||
sandbox: initial_sandbox,
|
||||
policy: &turn_ctx.sandbox_policy,
|
||||
|
||||
@@ -120,10 +120,7 @@ impl ToolRouter {
|
||||
Ok(Some(ToolCall {
|
||||
tool_name: "local_shell".to_string(),
|
||||
call_id,
|
||||
payload: ToolPayload::LocalShell {
|
||||
params,
|
||||
is_user_shell_command: false,
|
||||
},
|
||||
payload: ToolPayload::LocalShell { params },
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,7 +34,6 @@ pub struct ShellRequest {
|
||||
pub env: std::collections::HashMap<String, String>,
|
||||
pub with_escalated_permissions: Option<bool>,
|
||||
pub justification: Option<String>,
|
||||
pub is_user_shell_command: bool,
|
||||
}
|
||||
|
||||
impl ProvidesSandboxRetryData for ShellRequest {
|
||||
@@ -122,9 +121,6 @@ impl Approvable<ShellRequest> for ShellRuntime {
|
||||
policy: AskForApproval,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
) -> bool {
|
||||
if req.is_user_shell_command {
|
||||
return false;
|
||||
}
|
||||
if is_known_safe_command(&req.command) {
|
||||
return false;
|
||||
}
|
||||
@@ -150,7 +146,7 @@ impl Approvable<ShellRequest> for ShellRuntime {
|
||||
}
|
||||
|
||||
fn wants_escalated_first_attempt(&self, req: &ShellRequest) -> bool {
|
||||
req.is_user_shell_command || req.with_escalated_permissions.unwrap_or(false)
|
||||
req.with_escalated_permissions.unwrap_or(false)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,25 +3,29 @@ use serde::Serialize;
|
||||
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::USER_INSTRUCTIONS_CLOSE_TAG;
|
||||
use codex_protocol::protocol::USER_INSTRUCTIONS_OPEN_TAG;
|
||||
|
||||
pub const USER_INSTRUCTIONS_OPEN_TAG_LEGACY: &str = "<user_instructions>";
|
||||
pub const USER_INSTRUCTIONS_PREFIX: &str = "# AGENTS.md instructions for ";
|
||||
/// Wraps user instructions in a tag so the model can classify them easily.
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
#[serde(rename = "user_instructions", rename_all = "snake_case")]
|
||||
pub(crate) struct UserInstructions {
|
||||
pub directory: String,
|
||||
pub text: String,
|
||||
text: String,
|
||||
}
|
||||
|
||||
impl UserInstructions {
|
||||
pub fn is_user_instructions(message: &[ContentItem]) -> bool {
|
||||
if let [ContentItem::InputText { text }] = message {
|
||||
text.starts_with(USER_INSTRUCTIONS_PREFIX)
|
||||
|| text.starts_with(USER_INSTRUCTIONS_OPEN_TAG_LEGACY)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
pub fn new<T: Into<String>>(text: T) -> Self {
|
||||
Self { text: text.into() }
|
||||
}
|
||||
|
||||
/// Serializes the user instructions to an XML-like tagged block that starts
|
||||
/// with <user_instructions> so clients can classify it.
|
||||
pub fn serialize_to_xml(self) -> String {
|
||||
format!(
|
||||
"{USER_INSTRUCTIONS_OPEN_TAG}\n\n{}\n\n{USER_INSTRUCTIONS_CLOSE_TAG}",
|
||||
self.text
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,11 +35,7 @@ impl From<UserInstructions> for ResponseItem {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::InputText {
|
||||
text: format!(
|
||||
"{USER_INSTRUCTIONS_PREFIX}{directory}\n\n<INSTRUCTIONS>\n{contents}\n</INSTRUCTIONS>",
|
||||
directory = ui.directory,
|
||||
contents = ui.text
|
||||
),
|
||||
text: ui.serialize_to_xml(),
|
||||
}],
|
||||
}
|
||||
}
|
||||
@@ -68,51 +68,3 @@ impl From<DeveloperInstructions> for ResponseItem {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_user_instructions() {
|
||||
let user_instructions = UserInstructions {
|
||||
directory: "test_directory".to_string(),
|
||||
text: "test_text".to_string(),
|
||||
};
|
||||
let response_item: ResponseItem = user_instructions.into();
|
||||
|
||||
let ResponseItem::Message { role, content, .. } = response_item else {
|
||||
panic!("expected ResponseItem::Message");
|
||||
};
|
||||
|
||||
assert_eq!(role, "user");
|
||||
|
||||
let [ContentItem::InputText { text }] = content.as_slice() else {
|
||||
panic!("expected one InputText content item");
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
text,
|
||||
"# AGENTS.md instructions for test_directory\n\n<INSTRUCTIONS>\ntest_text\n</INSTRUCTIONS>",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_user_instructions() {
|
||||
assert!(UserInstructions::is_user_instructions(
|
||||
&[ContentItem::InputText {
|
||||
text: "# AGENTS.md instructions for test_directory\n\n<INSTRUCTIONS>\ntest_text\n</INSTRUCTIONS>".to_string(),
|
||||
}]
|
||||
));
|
||||
assert!(UserInstructions::is_user_instructions(&[
|
||||
ContentItem::InputText {
|
||||
text: "<user_instructions>test_text</user_instructions>".to_string(),
|
||||
}
|
||||
]));
|
||||
assert!(!UserInstructions::is_user_instructions(&[
|
||||
ContentItem::InputText {
|
||||
text: "test_text".to_string(),
|
||||
}
|
||||
]));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -479,7 +479,6 @@ pub async fn mount_sse_sequence(server: &MockServer, bodies: Vec<String>) -> Res
|
||||
|
||||
let (mock, response_mock) = base_mock();
|
||||
mock.respond_with(responder)
|
||||
.up_to_n_times(num_calls as u64)
|
||||
.expect(num_calls as u64)
|
||||
.mount(server)
|
||||
.await;
|
||||
|
||||
@@ -613,13 +613,8 @@ async fn includes_user_instructions_message_in_request() {
|
||||
.contains("be nice")
|
||||
);
|
||||
assert_message_role(&request_body["input"][0], "user");
|
||||
assert_message_starts_with(&request_body["input"][0], "# AGENTS.md instructions for ");
|
||||
assert_message_ends_with(&request_body["input"][0], "</INSTRUCTIONS>");
|
||||
let ui_text = request_body["input"][0]["content"][0]["text"]
|
||||
.as_str()
|
||||
.expect("invalid message content");
|
||||
assert!(ui_text.contains("<INSTRUCTIONS>"));
|
||||
assert!(ui_text.contains("be nice"));
|
||||
assert_message_starts_with(&request_body["input"][0], "<user_instructions>");
|
||||
assert_message_ends_with(&request_body["input"][0], "</user_instructions>");
|
||||
assert_message_role(&request_body["input"][1], "user");
|
||||
assert_message_starts_with(&request_body["input"][1], "<environment_context>");
|
||||
assert_message_ends_with(&request_body["input"][1], "</environment_context>");
|
||||
@@ -676,13 +671,8 @@ async fn includes_developer_instructions_message_in_request() {
|
||||
assert_message_role(&request_body["input"][0], "developer");
|
||||
assert_message_equals(&request_body["input"][0], "be useful");
|
||||
assert_message_role(&request_body["input"][1], "user");
|
||||
assert_message_starts_with(&request_body["input"][1], "# AGENTS.md instructions for ");
|
||||
assert_message_ends_with(&request_body["input"][1], "</INSTRUCTIONS>");
|
||||
let ui_text = request_body["input"][1]["content"][0]["text"]
|
||||
.as_str()
|
||||
.expect("invalid message content");
|
||||
assert!(ui_text.contains("<INSTRUCTIONS>"));
|
||||
assert!(ui_text.contains("be nice"));
|
||||
assert_message_starts_with(&request_body["input"][1], "<user_instructions>");
|
||||
assert_message_ends_with(&request_body["input"][1], "</user_instructions>");
|
||||
assert_message_role(&request_body["input"][2], "user");
|
||||
assert_message_starts_with(&request_body["input"][2], "<environment_context>");
|
||||
assert_message_ends_with(&request_body["input"][2], "</environment_context>");
|
||||
|
||||
@@ -8,7 +8,6 @@ use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_core::protocol::RolloutItem;
|
||||
use codex_core::protocol::RolloutLine;
|
||||
use codex_core::protocol::WarningEvent;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use core_test_support::skip_if_no_network;
|
||||
@@ -46,7 +45,6 @@ const CONTEXT_LIMIT_MESSAGE: &str =
|
||||
const DUMMY_FUNCTION_NAME: &str = "unsupported_tool";
|
||||
const DUMMY_CALL_ID: &str = "call-multi-auto";
|
||||
const FUNCTION_CALL_LIMIT_MSG: &str = "function call limit push";
|
||||
pub(super) const COMPACT_WARNING_MESSAGE: &str = "Heads up: Long conversations and multiple compactions can cause the model to be less accurate. Start new a new conversation when possible to keep conversations small and targeted.";
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn summarize_context_three_requests_and_instructions() {
|
||||
@@ -120,11 +118,6 @@ async fn summarize_context_three_requests_and_instructions() {
|
||||
|
||||
// 2) Summarize – second hit should include the summarization prompt.
|
||||
codex.submit(Op::Compact).await.unwrap();
|
||||
let warning_event = wait_for_event(&codex, |ev| matches!(ev, EventMsg::Warning(_))).await;
|
||||
let EventMsg::Warning(WarningEvent { message }) = warning_event else {
|
||||
panic!("expected warning event after compact");
|
||||
};
|
||||
assert_eq!(message, COMPACT_WARNING_MESSAGE);
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
// 3) Next user input – third hit; history should include only the summary.
|
||||
@@ -295,11 +288,6 @@ async fn manual_compact_uses_custom_prompt() {
|
||||
.conversation;
|
||||
|
||||
codex.submit(Op::Compact).await.expect("trigger compact");
|
||||
let warning_event = wait_for_event(&codex, |ev| matches!(ev, EventMsg::Warning(_))).await;
|
||||
let EventMsg::Warning(WarningEvent { message }) = warning_event else {
|
||||
panic!("expected warning event after compact");
|
||||
};
|
||||
assert_eq!(message, COMPACT_WARNING_MESSAGE);
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
let requests = server.received_requests().await.expect("collect requests");
|
||||
@@ -754,6 +742,7 @@ async fn manual_compact_retries_after_context_window_error() {
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
codex.submit(Op::Compact).await.unwrap();
|
||||
|
||||
let EventMsg::BackgroundEvent(event) =
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::BackgroundEvent(_))).await
|
||||
else {
|
||||
@@ -764,11 +753,6 @@ async fn manual_compact_retries_after_context_window_error() {
|
||||
"background event should mention trimmed item count: {}",
|
||||
event.message
|
||||
);
|
||||
let warning_event = wait_for_event(&codex, |ev| matches!(ev, EventMsg::Warning(_))).await;
|
||||
let EventMsg::Warning(WarningEvent { message }) = warning_event else {
|
||||
panic!("expected warning event after compact retry");
|
||||
};
|
||||
assert_eq!(message, COMPACT_WARNING_MESSAGE);
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
let requests = request_log.requests();
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
//! request payload that Codex would send to the model and assert that the
|
||||
//! model-visible history matches the expected sequence of messages.
|
||||
|
||||
use super::compact::COMPACT_WARNING_MESSAGE;
|
||||
use super::compact::FIRST_REPLY;
|
||||
use super::compact::SUMMARY_TEXT;
|
||||
use codex_core::CodexAuth;
|
||||
@@ -21,7 +20,6 @@ use codex_core::config::Config;
|
||||
use codex_core::config::OPENAI_DEFAULT_MODEL;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_core::protocol::WarningEvent;
|
||||
use codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
@@ -815,11 +813,6 @@ async fn compact_conversation(conversation: &Arc<CodexConversation>) {
|
||||
.submit(Op::Compact)
|
||||
.await
|
||||
.expect("compact conversation");
|
||||
let warning_event = wait_for_event(conversation, |ev| matches!(ev, EventMsg::Warning(_))).await;
|
||||
let EventMsg::Warning(WarningEvent { message }) = warning_event else {
|
||||
panic!("expected warning event after compact");
|
||||
};
|
||||
assert_eq!(message, COMPACT_WARNING_MESSAGE);
|
||||
wait_for_event(conversation, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
}
|
||||
|
||||
|
||||
@@ -38,7 +38,6 @@ mod tool_harness;
|
||||
mod tool_parallelism;
|
||||
mod tools;
|
||||
mod truncation;
|
||||
mod undo;
|
||||
mod unified_exec;
|
||||
mod user_notification;
|
||||
mod user_shell_cmd;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
#![allow(clippy::unwrap_used)]
|
||||
|
||||
use chrono::Local;
|
||||
use codex_core::CodexAuth;
|
||||
use codex_core::ConversationManager;
|
||||
use codex_core::ModelProviderInfo;
|
||||
@@ -18,7 +19,10 @@ use codex_core::shell::default_user_shell;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use core_test_support::load_sse_fixture_with_id;
|
||||
use core_test_support::responses;
|
||||
use core_test_support::responses::mount_sse_once;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
use core_test_support::wait_for_event;
|
||||
use std::collections::HashMap;
|
||||
use tempfile::TempDir;
|
||||
@@ -37,9 +41,11 @@ fn text_user_input(text: String) -> serde_json::Value {
|
||||
}
|
||||
|
||||
fn default_env_context_str(cwd: &str, shell: &Shell) -> String {
|
||||
let local_date = Local::now().format("%Y-%m-%d %:z").to_string();
|
||||
format!(
|
||||
r#"<environment_context>
|
||||
<cwd>{}</cwd>
|
||||
<local_date>{local_date}</local_date>
|
||||
<approval_policy>on-request</approval_policy>
|
||||
<sandbox_mode>read-only</sandbox_mode>
|
||||
<network_access>restricted</network_access>
|
||||
@@ -341,23 +347,25 @@ async fn prefixes_context_and_instructions_once_and_consistently_across_requests
|
||||
|
||||
let shell = default_user_shell().await;
|
||||
|
||||
let expected_env_text = format!(
|
||||
r#"<environment_context>
|
||||
let expected_env_text = {
|
||||
let local_date = Local::now().format("%Y-%m-%d %:z").to_string();
|
||||
format!(
|
||||
r#"<environment_context>
|
||||
<cwd>{}</cwd>
|
||||
<local_date>{local_date}</local_date>
|
||||
<approval_policy>on-request</approval_policy>
|
||||
<sandbox_mode>read-only</sandbox_mode>
|
||||
<network_access>restricted</network_access>
|
||||
{}</environment_context>"#,
|
||||
cwd.path().to_string_lossy(),
|
||||
match shell.name() {
|
||||
Some(name) => format!(" <shell>{name}</shell>\n"),
|
||||
None => String::new(),
|
||||
}
|
||||
);
|
||||
let expected_ui_text = format!(
|
||||
"# AGENTS.md instructions for {}\n\n<INSTRUCTIONS>\nbe consistent and helpful\n</INSTRUCTIONS>",
|
||||
cwd.path().to_string_lossy()
|
||||
);
|
||||
cwd.path().to_string_lossy(),
|
||||
match shell.name() {
|
||||
Some(name) => format!(" <shell>{name}</shell>\n"),
|
||||
None => String::new(),
|
||||
}
|
||||
)
|
||||
};
|
||||
let expected_ui_text =
|
||||
"<user_instructions>\n\nbe consistent and helpful\n\n</user_instructions>";
|
||||
|
||||
let expected_env_msg = serde_json::json!({
|
||||
"type": "message",
|
||||
@@ -736,11 +744,9 @@ async fn send_user_turn_with_no_changes_does_not_send_environment_context() {
|
||||
let body2 = requests[1].body_json::<serde_json::Value>().unwrap();
|
||||
|
||||
let shell = default_user_shell().await;
|
||||
let expected_ui_text = format!(
|
||||
"# AGENTS.md instructions for {}\n\n<INSTRUCTIONS>\nbe consistent and helpful\n</INSTRUCTIONS>",
|
||||
default_cwd.to_string_lossy()
|
||||
);
|
||||
let expected_ui_msg = text_user_input(expected_ui_text);
|
||||
let expected_ui_text =
|
||||
"<user_instructions>\n\nbe consistent and helpful\n\n</user_instructions>";
|
||||
let expected_ui_msg = text_user_input(expected_ui_text.to_string());
|
||||
|
||||
let expected_env_msg_1 = text_user_input(default_env_context_str(
|
||||
&cwd.path().to_string_lossy(),
|
||||
@@ -852,10 +858,8 @@ async fn send_user_turn_with_changes_sends_environment_context() {
|
||||
let body2 = requests[1].body_json::<serde_json::Value>().unwrap();
|
||||
|
||||
let shell = default_user_shell().await;
|
||||
let expected_ui_text = format!(
|
||||
"# AGENTS.md instructions for {}\n\n<INSTRUCTIONS>\nbe consistent and helpful\n</INSTRUCTIONS>",
|
||||
default_cwd.to_string_lossy()
|
||||
);
|
||||
let expected_ui_text =
|
||||
"<user_instructions>\n\nbe consistent and helpful\n\n</user_instructions>";
|
||||
let expected_ui_msg = serde_json::json!({
|
||||
"type": "message",
|
||||
"role": "user",
|
||||
@@ -889,3 +893,68 @@ async fn send_user_turn_with_changes_sends_environment_context() {
|
||||
]);
|
||||
assert_eq!(body2["input"], expected_input_2);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn cached_prompt_filters_reasoning_items_from_previous_turns() -> anyhow::Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = responses::start_mock_server().await;
|
||||
let call_id = "shell-call";
|
||||
let shell_args = serde_json::json!({
|
||||
"command": ["/bin/echo", "tool output"],
|
||||
"timeout_ms": 1_000,
|
||||
});
|
||||
|
||||
let initial_response = responses::sse(vec![
|
||||
responses::ev_response_created("resp-first"),
|
||||
responses::ev_reasoning_item("reason-1", &["Planning shell command"], &[]),
|
||||
responses::ev_function_call(
|
||||
call_id,
|
||||
"shell",
|
||||
&serde_json::to_string(&shell_args).expect("serialize shell args"),
|
||||
),
|
||||
responses::ev_completed("resp-first"),
|
||||
]);
|
||||
let follow_up_response = responses::sse(vec![
|
||||
responses::ev_response_created("resp-follow-up"),
|
||||
responses::ev_reasoning_item(
|
||||
"reason-2",
|
||||
&["Shell execution completed"],
|
||||
&["stdout: tool output"],
|
||||
),
|
||||
responses::ev_assistant_message("assistant-1", "First turn reply"),
|
||||
responses::ev_completed("resp-follow-up"),
|
||||
]);
|
||||
let second_turn_response = responses::sse(vec![
|
||||
responses::ev_response_created("resp-second"),
|
||||
responses::ev_assistant_message("assistant-2", "Second turn reply"),
|
||||
responses::ev_completed("resp-second"),
|
||||
]);
|
||||
mount_sse_once(&server, initial_response).await;
|
||||
let second_request = mount_sse_once(&server, follow_up_response).await;
|
||||
let third_request = mount_sse_once(&server, second_turn_response).await;
|
||||
|
||||
let mut builder = test_codex();
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
test.submit_turn("hello 1").await?;
|
||||
test.submit_turn("hello 2").await?;
|
||||
|
||||
let second_request_input = second_request.single_request();
|
||||
let reasoning_items = second_request_input.inputs_of_type("reasoning");
|
||||
assert_eq!(
|
||||
reasoning_items.len(),
|
||||
1,
|
||||
"expected first turn follow-up to include reasoning item"
|
||||
);
|
||||
|
||||
let third_request_input = third_request.single_request();
|
||||
let cached_reasoning = third_request_input.inputs_of_type("reasoning");
|
||||
assert_eq!(
|
||||
cached_reasoning.len(),
|
||||
0,
|
||||
"expected cached prompt to filter out prior reasoning items"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -30,18 +30,6 @@ use serde_json::Value;
|
||||
use serde_json::json;
|
||||
use std::fs;
|
||||
|
||||
const FIXTURE_JSON: &str = r#"{
|
||||
"description": "This is an example JSON file.",
|
||||
"foo": "bar",
|
||||
"isTest": true,
|
||||
"testNumber": 123,
|
||||
"testArray": [1, 2, 3],
|
||||
"testObject": {
|
||||
"foo": "bar"
|
||||
}
|
||||
}
|
||||
"#;
|
||||
|
||||
async fn submit_turn(test: &TestCodex, prompt: &str, sandbox_policy: SandboxPolicy) -> Result<()> {
|
||||
let session_model = test.session_configured.model.clone();
|
||||
|
||||
@@ -237,154 +225,6 @@ freeform shell
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn shell_output_preserves_fixture_json_without_serialization() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.features.disable(Feature::ApplyPatchFreeform);
|
||||
config.model = "gpt-5".to_string();
|
||||
config.model_family = find_family_for_model("gpt-5").expect("gpt-5 is a model family");
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
let fixture_path = test.cwd.path().join("fixture.json");
|
||||
fs::write(&fixture_path, FIXTURE_JSON)?;
|
||||
let fixture_path_str = fixture_path.to_string_lossy().to_string();
|
||||
|
||||
let call_id = "shell-json-fixture";
|
||||
let args = json!({
|
||||
"command": ["/usr/bin/sed", "-n", "p", fixture_path_str],
|
||||
"timeout_ms": 1_000,
|
||||
});
|
||||
let responses = vec![
|
||||
sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_function_call(call_id, "shell", &serde_json::to_string(&args)?),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
sse(vec![
|
||||
ev_assistant_message("msg-1", "done"),
|
||||
ev_completed("resp-2"),
|
||||
]),
|
||||
];
|
||||
mount_sse_sequence(&server, responses).await;
|
||||
|
||||
submit_turn(
|
||||
&test,
|
||||
"read the fixture JSON with sed",
|
||||
SandboxPolicy::DangerFullAccess,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let requests = server
|
||||
.received_requests()
|
||||
.await
|
||||
.expect("recorded requests present");
|
||||
let bodies = request_bodies(&requests)?;
|
||||
let output_item = find_function_call_output(&bodies, call_id).expect("shell output present");
|
||||
let output = output_item
|
||||
.get("output")
|
||||
.and_then(Value::as_str)
|
||||
.expect("shell output string");
|
||||
|
||||
let mut parsed: Value = serde_json::from_str(output)?;
|
||||
if let Some(metadata) = parsed.get_mut("metadata").and_then(Value::as_object_mut) {
|
||||
let _ = metadata.remove("duration_seconds");
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
parsed
|
||||
.get("metadata")
|
||||
.and_then(|metadata| metadata.get("exit_code"))
|
||||
.and_then(Value::as_i64),
|
||||
Some(0),
|
||||
"expected zero exit code when serialization is disabled",
|
||||
);
|
||||
let stdout = parsed
|
||||
.get("output")
|
||||
.and_then(Value::as_str)
|
||||
.unwrap_or_default()
|
||||
.to_string();
|
||||
assert_eq!(
|
||||
stdout, FIXTURE_JSON,
|
||||
"expected shell output to match the fixture contents"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn shell_output_structures_fixture_with_serialization() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.features.enable(Feature::ApplyPatchFreeform);
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
let fixture_path = test.cwd.path().join("fixture.json");
|
||||
fs::write(&fixture_path, FIXTURE_JSON)?;
|
||||
let fixture_path_str = fixture_path.to_string_lossy().to_string();
|
||||
|
||||
let call_id = "shell-structured-fixture";
|
||||
let args = json!({
|
||||
"command": ["/usr/bin/sed", "-n", "p", fixture_path_str],
|
||||
"timeout_ms": 1_000,
|
||||
});
|
||||
let responses = vec![
|
||||
sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_function_call(call_id, "shell", &serde_json::to_string(&args)?),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
sse(vec![
|
||||
ev_assistant_message("msg-1", "done"),
|
||||
ev_completed("resp-2"),
|
||||
]),
|
||||
];
|
||||
mount_sse_sequence(&server, responses).await;
|
||||
|
||||
submit_turn(
|
||||
&test,
|
||||
"read the fixture JSON with structured output",
|
||||
SandboxPolicy::DangerFullAccess,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let requests = server
|
||||
.received_requests()
|
||||
.await
|
||||
.expect("recorded requests present");
|
||||
let bodies = request_bodies(&requests)?;
|
||||
let output_item =
|
||||
find_function_call_output(&bodies, call_id).expect("structured output present");
|
||||
let output = output_item
|
||||
.get("output")
|
||||
.and_then(Value::as_str)
|
||||
.expect("structured output string");
|
||||
|
||||
assert!(
|
||||
serde_json::from_str::<Value>(output).is_err(),
|
||||
"expected structured output to be plain text"
|
||||
);
|
||||
let (header, body) = output
|
||||
.split_once("Output:\n")
|
||||
.expect("structured output contains an Output section");
|
||||
assert_regex_match(
|
||||
r"(?s)^Exit code: 0\nWall time: [0-9]+(?:\.[0-9]+)? seconds$",
|
||||
header.trim_end(),
|
||||
);
|
||||
assert_eq!(
|
||||
body, FIXTURE_JSON,
|
||||
"expected Output section to include the fixture contents"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn shell_output_for_freeform_tool_records_duration() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
@@ -3,16 +3,9 @@
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use codex_core::config::types::McpServerConfig;
|
||||
use codex_core::config::types::McpServerTransportConfig;
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::model_family::find_family_for_model;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use core_test_support::assert_regex_match;
|
||||
use core_test_support::responses;
|
||||
use core_test_support::responses::ev_assistant_message;
|
||||
@@ -25,13 +18,10 @@ use core_test_support::responses::sse;
|
||||
use core_test_support::responses::start_mock_server;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
use core_test_support::wait_for_event;
|
||||
use escargot::CargoBuild;
|
||||
use regex_lite::Regex;
|
||||
use serde_json::Value;
|
||||
use serde_json::json;
|
||||
use std::collections::HashMap;
|
||||
use std::time::Duration;
|
||||
use wiremock::matchers::any;
|
||||
|
||||
// Verifies byte-truncation formatting for function error output (RespondToModel errors)
|
||||
@@ -278,105 +268,3 @@ async fn mcp_tool_call_output_exceeds_limit_truncated_for_model() -> Result<()>
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Verifies that an MCP image tool output is serialized as content_items array with
|
||||
// the image preserved and no truncation summary appended (since there are no text items).
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
|
||||
async fn mcp_image_output_preserves_image_and_no_text_summary() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
|
||||
let call_id = "rmcp-image-no-trunc";
|
||||
let server_name = "rmcp";
|
||||
let tool_name = format!("mcp__{server_name}__image");
|
||||
|
||||
mount_sse_once_match(
|
||||
&server,
|
||||
any(),
|
||||
sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_function_call(call_id, &tool_name, "{}"),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
)
|
||||
.await;
|
||||
let final_mock = mount_sse_once_match(
|
||||
&server,
|
||||
any(),
|
||||
sse(vec![
|
||||
ev_assistant_message("msg-1", "done"),
|
||||
ev_completed("resp-2"),
|
||||
]),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Build the stdio rmcp server and pass a tiny PNG via data URL so it can construct ImageContent.
|
||||
let rmcp_test_server_bin = CargoBuild::new()
|
||||
.package("codex-rmcp-client")
|
||||
.bin("test_stdio_server")
|
||||
.run()?
|
||||
.path()
|
||||
.to_string_lossy()
|
||||
.into_owned();
|
||||
|
||||
// 1x1 PNG data URL
|
||||
let openai_png = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mP8/x8AAwMB/ee9bQAAAABJRU5ErkJggg==";
|
||||
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.features.enable(Feature::RmcpClient);
|
||||
config.mcp_servers.insert(
|
||||
server_name.to_string(),
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::Stdio {
|
||||
command: rmcp_test_server_bin,
|
||||
args: Vec::new(),
|
||||
env: Some(HashMap::from([(
|
||||
"MCP_TEST_IMAGE_DATA_URL".to_string(),
|
||||
openai_png.to_string(),
|
||||
)])),
|
||||
env_vars: Vec::new(),
|
||||
cwd: None,
|
||||
},
|
||||
enabled: true,
|
||||
startup_timeout_sec: Some(Duration::from_secs(10)),
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
},
|
||||
);
|
||||
});
|
||||
let fixture = builder.build(&server).await?;
|
||||
let session_model = fixture.session_configured.model.clone();
|
||||
|
||||
fixture
|
||||
.codex
|
||||
.submit(Op::UserTurn {
|
||||
items: vec![UserInput::Text {
|
||||
text: "call the rmcp image tool".into(),
|
||||
}],
|
||||
final_output_json_schema: None,
|
||||
cwd: fixture.cwd.path().to_path_buf(),
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: SandboxPolicy::ReadOnly,
|
||||
model: session_model,
|
||||
effort: None,
|
||||
summary: ReasoningSummary::Auto,
|
||||
})
|
||||
.await?;
|
||||
|
||||
// Wait for completion to ensure the outbound request is captured.
|
||||
wait_for_event(&fixture.codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
let output_item = final_mock.single_request().function_call_output(call_id);
|
||||
// Expect exactly one array element: the image item; and no trailing summary text.
|
||||
let output = output_item.get("output").expect("output");
|
||||
assert!(output.is_array(), "expected array output");
|
||||
let arr = output.as_array().unwrap();
|
||||
assert_eq!(arr.len(), 1, "no truncation summary should be appended");
|
||||
assert_eq!(
|
||||
arr[0],
|
||||
json!({"type": "input_image", "image_url": openai_png})
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,491 +0,0 @@
|
||||
#![cfg(not(target_os = "windows"))]
|
||||
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::bail;
|
||||
use codex_core::CodexConversation;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::model_family::find_family_for_model;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_core::protocol::UndoCompletedEvent;
|
||||
use core_test_support::responses::ev_apply_patch_function_call;
|
||||
use core_test_support::responses::ev_assistant_message;
|
||||
use core_test_support::responses::ev_completed;
|
||||
use core_test_support::responses::ev_response_created;
|
||||
use core_test_support::responses::mount_sse_sequence;
|
||||
use core_test_support::responses::sse;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use core_test_support::test_codex::TestCodexHarness;
|
||||
use core_test_support::wait_for_event_match;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[allow(clippy::expect_used)]
|
||||
async fn undo_harness() -> Result<TestCodexHarness> {
|
||||
TestCodexHarness::with_config(|config: &mut Config| {
|
||||
config.include_apply_patch_tool = true;
|
||||
config.model = "gpt-5".to_string();
|
||||
config.model_family = find_family_for_model("gpt-5").expect("gpt-5 is valid");
|
||||
config.features.enable(Feature::GhostCommit);
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
fn git(path: &Path, args: &[&str]) -> Result<()> {
|
||||
let status = Command::new("git")
|
||||
.args(args)
|
||||
.current_dir(path)
|
||||
.status()
|
||||
.with_context(|| format!("failed to run git {args:?}"))?;
|
||||
if status.success() {
|
||||
return Ok(());
|
||||
}
|
||||
let exit_status = status;
|
||||
bail!("git {args:?} exited with {exit_status}");
|
||||
}
|
||||
|
||||
fn git_output(path: &Path, args: &[&str]) -> Result<String> {
|
||||
let output = Command::new("git")
|
||||
.args(args)
|
||||
.current_dir(path)
|
||||
.output()
|
||||
.with_context(|| format!("failed to run git {args:?}"))?;
|
||||
if !output.status.success() {
|
||||
let exit_status = output.status;
|
||||
bail!("git {args:?} exited with {exit_status}");
|
||||
}
|
||||
String::from_utf8(output.stdout).context("stdout was not valid utf8")
|
||||
}
|
||||
|
||||
fn init_git_repo(path: &Path) -> Result<()> {
|
||||
// Use a consistent initial branch and config across environments to avoid
|
||||
// CI variance (default-branch hints, line ending differences, etc.).
|
||||
git(path, &["init", "--initial-branch=main"])?;
|
||||
git(path, &["config", "core.autocrlf", "false"])?;
|
||||
git(path, &["config", "user.name", "Codex Tests"])?;
|
||||
git(path, &["config", "user.email", "codex-tests@example.com"])?;
|
||||
|
||||
// Create README.txt
|
||||
let readme_path = path.join("README.txt");
|
||||
fs::write(&readme_path, "Test repository initialized by Codex.\n")?;
|
||||
|
||||
// Stage and commit
|
||||
git(path, &["add", "README.txt"])?;
|
||||
git(path, &["commit", "-m", "Add README.txt"])?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn apply_patch_responses(call_id: &str, patch: &str, assistant_msg: &str) -> Vec<String> {
|
||||
vec![
|
||||
sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_apply_patch_function_call(call_id, patch),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
sse(vec![
|
||||
ev_assistant_message("msg-1", assistant_msg),
|
||||
ev_completed("resp-2"),
|
||||
]),
|
||||
]
|
||||
}
|
||||
|
||||
async fn run_apply_patch_turn(
|
||||
harness: &TestCodexHarness,
|
||||
prompt: &str,
|
||||
call_id: &str,
|
||||
patch: &str,
|
||||
assistant_msg: &str,
|
||||
) -> Result<()> {
|
||||
mount_sse_sequence(
|
||||
harness.server(),
|
||||
apply_patch_responses(call_id, patch, assistant_msg),
|
||||
)
|
||||
.await;
|
||||
harness.submit(prompt).await
|
||||
}
|
||||
|
||||
async fn invoke_undo(codex: &Arc<CodexConversation>) -> Result<UndoCompletedEvent> {
|
||||
codex.submit(Op::Undo).await?;
|
||||
let event = wait_for_event_match(codex, |msg| match msg {
|
||||
EventMsg::UndoCompleted(done) => Some(done.clone()),
|
||||
_ => None,
|
||||
})
|
||||
.await;
|
||||
Ok(event)
|
||||
}
|
||||
|
||||
async fn expect_successful_undo(codex: &Arc<CodexConversation>) -> Result<UndoCompletedEvent> {
|
||||
let event = invoke_undo(codex).await?;
|
||||
assert!(
|
||||
event.success,
|
||||
"expected undo to succeed but failed with message {:?}",
|
||||
event.message
|
||||
);
|
||||
Ok(event)
|
||||
}
|
||||
|
||||
async fn expect_failed_undo(codex: &Arc<CodexConversation>) -> Result<UndoCompletedEvent> {
|
||||
let event = invoke_undo(codex).await?;
|
||||
assert!(
|
||||
!event.success,
|
||||
"expected undo to fail but succeeded with message {:?}",
|
||||
event.message
|
||||
);
|
||||
assert_eq!(
|
||||
event.message.as_deref(),
|
||||
Some("No ghost snapshot available to undo.")
|
||||
);
|
||||
Ok(event)
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn undo_removes_new_file_created_during_turn() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let harness = undo_harness().await?;
|
||||
init_git_repo(harness.cwd())?;
|
||||
|
||||
let call_id = "undo-create-file";
|
||||
let patch = "*** Begin Patch\n*** Add File: new_file.txt\n+from turn\n*** End Patch";
|
||||
run_apply_patch_turn(&harness, "create file", call_id, patch, "ok").await?;
|
||||
|
||||
let new_path = harness.path("new_file.txt");
|
||||
assert_eq!(fs::read_to_string(&new_path)?, "from turn\n");
|
||||
|
||||
let codex = Arc::clone(&harness.test().codex);
|
||||
let completed = expect_successful_undo(&codex).await?;
|
||||
assert!(completed.success, "undo failed: {:?}", completed.message);
|
||||
|
||||
assert!(!new_path.exists());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn undo_restores_tracked_file_edit() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let harness = undo_harness().await?;
|
||||
init_git_repo(harness.cwd())?;
|
||||
|
||||
let tracked = harness.path("tracked.txt");
|
||||
fs::write(&tracked, "before\n")?;
|
||||
git(harness.cwd(), &["add", "tracked.txt"])?;
|
||||
git(harness.cwd(), &["commit", "-m", "track file"])?;
|
||||
|
||||
let patch = "*** Begin Patch\n*** Update File: tracked.txt\n@@\n-before\n+after\n*** End Patch";
|
||||
run_apply_patch_turn(
|
||||
&harness,
|
||||
"update tracked file",
|
||||
"undo-tracked-edit",
|
||||
patch,
|
||||
"done",
|
||||
)
|
||||
.await?;
|
||||
println!(
|
||||
"apply_patch output: {}",
|
||||
harness.function_call_stdout("undo-tracked-edit").await
|
||||
);
|
||||
|
||||
assert_eq!(fs::read_to_string(&tracked)?, "after\n");
|
||||
|
||||
let codex = Arc::clone(&harness.test().codex);
|
||||
let completed = expect_successful_undo(&codex).await?;
|
||||
assert!(completed.success, "undo failed: {:?}", completed.message);
|
||||
|
||||
assert_eq!(fs::read_to_string(&tracked)?, "before\n");
|
||||
let status = git_output(harness.cwd(), &["status", "--short"])?;
|
||||
assert_eq!(status, "");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn undo_restores_untracked_file_edit() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let harness = undo_harness().await?;
|
||||
init_git_repo(harness.cwd())?;
|
||||
git(harness.cwd(), &["commit", "--allow-empty", "-m", "init"])?;
|
||||
|
||||
let notes = harness.path("notes.txt");
|
||||
fs::write(¬es, "original\n")?;
|
||||
let status_before = git_output(harness.cwd(), &["status", "--short", "--ignored"])?;
|
||||
assert!(status_before.contains("?? notes.txt"));
|
||||
|
||||
let patch =
|
||||
"*** Begin Patch\n*** Update File: notes.txt\n@@\n-original\n+modified\n*** End Patch";
|
||||
run_apply_patch_turn(
|
||||
&harness,
|
||||
"edit untracked",
|
||||
"undo-untracked-edit",
|
||||
patch,
|
||||
"done",
|
||||
)
|
||||
.await?;
|
||||
|
||||
assert_eq!(fs::read_to_string(¬es)?, "modified\n");
|
||||
|
||||
let codex = Arc::clone(&harness.test().codex);
|
||||
let completed = expect_successful_undo(&codex).await?;
|
||||
assert!(completed.success, "undo failed: {:?}", completed.message);
|
||||
|
||||
assert_eq!(fs::read_to_string(¬es)?, "original\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn undo_reverts_only_latest_turn() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let harness = undo_harness().await?;
|
||||
init_git_repo(harness.cwd())?;
|
||||
|
||||
let call_id_one = "undo-turn-one";
|
||||
let add_patch = "*** Begin Patch\n*** Add File: story.txt\n+first version\n*** End Patch";
|
||||
run_apply_patch_turn(&harness, "create story", call_id_one, add_patch, "done").await?;
|
||||
let story = harness.path("story.txt");
|
||||
assert_eq!(fs::read_to_string(&story)?, "first version\n");
|
||||
|
||||
let call_id_two = "undo-turn-two";
|
||||
let update_patch = "*** Begin Patch\n*** Update File: story.txt\n@@\n-first version\n+second version\n*** End Patch";
|
||||
run_apply_patch_turn(&harness, "revise story", call_id_two, update_patch, "done").await?;
|
||||
assert_eq!(fs::read_to_string(&story)?, "second version\n");
|
||||
|
||||
let codex = Arc::clone(&harness.test().codex);
|
||||
let completed = expect_successful_undo(&codex).await?;
|
||||
assert!(completed.success, "undo failed: {:?}", completed.message);
|
||||
|
||||
assert_eq!(fs::read_to_string(&story)?, "first version\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn undo_does_not_touch_unrelated_files() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let harness = undo_harness().await?;
|
||||
init_git_repo(harness.cwd())?;
|
||||
|
||||
let tracked_constant = harness.path("stable.txt");
|
||||
fs::write(&tracked_constant, "stable\n")?;
|
||||
let target = harness.path("target.txt");
|
||||
fs::write(&target, "start\n")?;
|
||||
let gitignore = harness.path(".gitignore");
|
||||
fs::write(&gitignore, "ignored-stable.log\n")?;
|
||||
git(
|
||||
harness.cwd(),
|
||||
&["add", "stable.txt", "target.txt", ".gitignore"],
|
||||
)?;
|
||||
git(harness.cwd(), &["commit", "-m", "seed tracked"])?;
|
||||
|
||||
let preexisting_untracked = harness.path("scratch.txt");
|
||||
fs::write(&preexisting_untracked, "scratch before\n")?;
|
||||
let ignored = harness.path("ignored-stable.log");
|
||||
fs::write(&ignored, "ignored before\n")?;
|
||||
|
||||
let full_patch = "*** Begin Patch\n*** Update File: target.txt\n@@\n-start\n+edited\n*** Add File: temp.txt\n+ephemeral\n*** End Patch";
|
||||
run_apply_patch_turn(
|
||||
&harness,
|
||||
"modify target",
|
||||
"undo-unrelated",
|
||||
full_patch,
|
||||
"done",
|
||||
)
|
||||
.await?;
|
||||
let temp = harness.path("temp.txt");
|
||||
assert_eq!(fs::read_to_string(&target)?, "edited\n");
|
||||
assert_eq!(fs::read_to_string(&temp)?, "ephemeral\n");
|
||||
|
||||
let codex = Arc::clone(&harness.test().codex);
|
||||
let completed = expect_successful_undo(&codex).await?;
|
||||
assert!(completed.success, "undo failed: {:?}", completed.message);
|
||||
|
||||
assert_eq!(fs::read_to_string(&tracked_constant)?, "stable\n");
|
||||
assert_eq!(fs::read_to_string(&target)?, "start\n");
|
||||
assert_eq!(
|
||||
fs::read_to_string(&preexisting_untracked)?,
|
||||
"scratch before\n"
|
||||
);
|
||||
assert_eq!(fs::read_to_string(&ignored)?, "ignored before\n");
|
||||
assert!(!temp.exists());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn undo_sequential_turns_consumes_snapshots() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let harness = undo_harness().await?;
|
||||
init_git_repo(harness.cwd())?;
|
||||
|
||||
let story = harness.path("story.txt");
|
||||
fs::write(&story, "initial\n")?;
|
||||
git(harness.cwd(), &["add", "story.txt"])?;
|
||||
git(harness.cwd(), &["commit", "-m", "seed story"])?;
|
||||
|
||||
run_apply_patch_turn(
|
||||
&harness,
|
||||
"first change",
|
||||
"seq-turn-1",
|
||||
"*** Begin Patch\n*** Update File: story.txt\n@@\n-initial\n+turn one\n*** End Patch",
|
||||
"ok",
|
||||
)
|
||||
.await?;
|
||||
assert_eq!(fs::read_to_string(&story)?, "turn one\n");
|
||||
|
||||
run_apply_patch_turn(
|
||||
&harness,
|
||||
"second change",
|
||||
"seq-turn-2",
|
||||
"*** Begin Patch\n*** Update File: story.txt\n@@\n-turn one\n+turn two\n*** End Patch",
|
||||
"ok",
|
||||
)
|
||||
.await?;
|
||||
assert_eq!(fs::read_to_string(&story)?, "turn two\n");
|
||||
|
||||
run_apply_patch_turn(
|
||||
&harness,
|
||||
"third change",
|
||||
"seq-turn-3",
|
||||
"*** Begin Patch\n*** Update File: story.txt\n@@\n-turn two\n+turn three\n*** End Patch",
|
||||
"ok",
|
||||
)
|
||||
.await?;
|
||||
assert_eq!(fs::read_to_string(&story)?, "turn three\n");
|
||||
|
||||
let codex = Arc::clone(&harness.test().codex);
|
||||
expect_successful_undo(&codex).await?;
|
||||
assert_eq!(fs::read_to_string(&story)?, "turn two\n");
|
||||
|
||||
expect_successful_undo(&codex).await?;
|
||||
assert_eq!(fs::read_to_string(&story)?, "turn one\n");
|
||||
|
||||
expect_successful_undo(&codex).await?;
|
||||
assert_eq!(fs::read_to_string(&story)?, "initial\n");
|
||||
|
||||
expect_failed_undo(&codex).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn undo_without_snapshot_reports_failure() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let harness = undo_harness().await?;
|
||||
let codex = Arc::clone(&harness.test().codex);
|
||||
|
||||
expect_failed_undo(&codex).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn undo_restores_moves_and_renames() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let harness = undo_harness().await?;
|
||||
init_git_repo(harness.cwd())?;
|
||||
|
||||
let source = harness.path("rename_me.txt");
|
||||
fs::write(&source, "original\n")?;
|
||||
git(harness.cwd(), &["add", "rename_me.txt"])?;
|
||||
git(harness.cwd(), &["commit", "-m", "add rename target"])?;
|
||||
|
||||
let patch = "*** Begin Patch\n*** Update File: rename_me.txt\n*** Move to: relocated/renamed.txt\n@@\n-original\n+renamed content\n*** End Patch";
|
||||
run_apply_patch_turn(&harness, "rename file", "undo-rename", patch, "done").await?;
|
||||
|
||||
let destination = harness.path("relocated/renamed.txt");
|
||||
assert!(!source.exists());
|
||||
assert_eq!(fs::read_to_string(&destination)?, "renamed content\n");
|
||||
|
||||
let codex = Arc::clone(&harness.test().codex);
|
||||
expect_successful_undo(&codex).await?;
|
||||
|
||||
assert_eq!(fs::read_to_string(&source)?, "original\n");
|
||||
assert!(!destination.exists());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn undo_does_not_touch_ignored_directory_contents() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let harness = undo_harness().await?;
|
||||
init_git_repo(harness.cwd())?;
|
||||
|
||||
let gitignore = harness.path(".gitignore");
|
||||
fs::write(&gitignore, "logs/\n")?;
|
||||
git(harness.cwd(), &["add", ".gitignore"])?;
|
||||
git(harness.cwd(), &["commit", "-m", "ignore logs directory"])?;
|
||||
|
||||
let logs_dir = harness.path("logs");
|
||||
fs::create_dir_all(&logs_dir)?;
|
||||
let preserved = logs_dir.join("persistent.log");
|
||||
fs::write(&preserved, "keep me\n")?;
|
||||
|
||||
run_apply_patch_turn(
|
||||
&harness,
|
||||
"write log",
|
||||
"undo-log",
|
||||
"*** Begin Patch\n*** Add File: logs/session.log\n+ephemeral log\n*** End Patch",
|
||||
"ok",
|
||||
)
|
||||
.await?;
|
||||
|
||||
let new_log = logs_dir.join("session.log");
|
||||
assert_eq!(fs::read_to_string(&new_log)?, "ephemeral log\n");
|
||||
|
||||
let codex = Arc::clone(&harness.test().codex);
|
||||
expect_successful_undo(&codex).await?;
|
||||
|
||||
assert!(new_log.exists());
|
||||
assert_eq!(fs::read_to_string(&preserved)?, "keep me\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn undo_overwrites_manual_edits_after_turn() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let harness = undo_harness().await?;
|
||||
init_git_repo(harness.cwd())?;
|
||||
|
||||
let tracked = harness.path("tracked.txt");
|
||||
fs::write(&tracked, "baseline\n")?;
|
||||
git(harness.cwd(), &["add", "tracked.txt"])?;
|
||||
git(harness.cwd(), &["commit", "-m", "baseline tracked"])?;
|
||||
|
||||
run_apply_patch_turn(
|
||||
&harness,
|
||||
"modify tracked",
|
||||
"undo-manual-overwrite",
|
||||
"*** Begin Patch\n*** Update File: tracked.txt\n@@\n-baseline\n+turn change\n*** End Patch",
|
||||
"ok",
|
||||
)
|
||||
.await?;
|
||||
assert_eq!(fs::read_to_string(&tracked)?, "turn change\n");
|
||||
|
||||
fs::write(&tracked, "manual edit\n")?;
|
||||
assert_eq!(fs::read_to_string(&tracked)?, "manual edit\n");
|
||||
|
||||
let codex = Arc::clone(&harness.test().codex);
|
||||
expect_successful_undo(&codex).await?;
|
||||
|
||||
assert_eq!(fs::read_to_string(&tracked)?, "baseline\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -73,7 +73,6 @@ For complete documentation of the `Op` and `EventMsg` variants, refer to [protoc
|
||||
- `EventMsg::ExecApprovalRequest` – Request approval from user to execute a command
|
||||
- `EventMsg::TaskComplete` – A task completed successfully
|
||||
- `EventMsg::Error` – A task stopped with an error
|
||||
- `EventMsg::Warning` – A non-fatal warning that the client should surface to the user
|
||||
- `EventMsg::TurnComplete` – Contains a `response_id` bookmark for last `response_id` executed by the task. This can be used to continue the task at a later point in time, perhaps with additional user input.
|
||||
|
||||
The `response_id` returned from each task matches the OpenAI `response_id` stored in the API's `/responses` endpoint. It can be stored and used in future `Sessions` to resume threads of work.
|
||||
|
||||
@@ -21,7 +21,6 @@ use codex_core::protocol::StreamErrorEvent;
|
||||
use codex_core::protocol::TaskCompleteEvent;
|
||||
use codex_core::protocol::TurnAbortReason;
|
||||
use codex_core::protocol::TurnDiffEvent;
|
||||
use codex_core::protocol::WarningEvent;
|
||||
use codex_core::protocol::WebSearchEndEvent;
|
||||
use codex_protocol::num_format::format_with_separators;
|
||||
use owo_colors::OwoColorize;
|
||||
@@ -55,7 +54,6 @@ pub(crate) struct EventProcessorWithHumanOutput {
|
||||
red: Style,
|
||||
green: Style,
|
||||
cyan: Style,
|
||||
yellow: Style,
|
||||
|
||||
/// Whether to include `AgentReasoning` events in the output.
|
||||
show_agent_reasoning: bool,
|
||||
@@ -83,7 +81,6 @@ impl EventProcessorWithHumanOutput {
|
||||
red: Style::new().red(),
|
||||
green: Style::new().green(),
|
||||
cyan: Style::new().cyan(),
|
||||
yellow: Style::new().yellow(),
|
||||
show_agent_reasoning: !config.hide_agent_reasoning,
|
||||
show_raw_agent_reasoning: config.show_raw_agent_reasoning,
|
||||
last_message_path,
|
||||
@@ -100,7 +97,6 @@ impl EventProcessorWithHumanOutput {
|
||||
red: Style::new(),
|
||||
green: Style::new(),
|
||||
cyan: Style::new(),
|
||||
yellow: Style::new(),
|
||||
show_agent_reasoning: !config.hide_agent_reasoning,
|
||||
show_raw_agent_reasoning: config.show_raw_agent_reasoning,
|
||||
last_message_path,
|
||||
@@ -165,13 +161,6 @@ impl EventProcessor for EventProcessorWithHumanOutput {
|
||||
let prefix = "ERROR:".style(self.red);
|
||||
ts_msg!(self, "{prefix} {message}");
|
||||
}
|
||||
EventMsg::Warning(WarningEvent { message }) => {
|
||||
ts_msg!(
|
||||
self,
|
||||
"{} {message}",
|
||||
"warning:".style(self.yellow).style(self.bold)
|
||||
);
|
||||
}
|
||||
EventMsg::DeprecationNotice(DeprecationNoticeEvent { summary, details }) => {
|
||||
ts_msg!(
|
||||
self,
|
||||
|
||||
@@ -8,7 +8,6 @@ use crate::event_processor::handle_last_message;
|
||||
use crate::exec_events::AgentMessageItem;
|
||||
use crate::exec_events::CommandExecutionItem;
|
||||
use crate::exec_events::CommandExecutionStatus;
|
||||
use crate::exec_events::ErrorItem;
|
||||
use crate::exec_events::FileChangeItem;
|
||||
use crate::exec_events::FileUpdateChange;
|
||||
use crate::exec_events::ItemCompletedEvent;
|
||||
@@ -130,15 +129,6 @@ impl EventProcessorWithJsonOutput {
|
||||
self.last_critical_error = Some(error.clone());
|
||||
vec![ThreadEvent::Error(error)]
|
||||
}
|
||||
EventMsg::Warning(ev) => {
|
||||
let item = ThreadItem {
|
||||
id: self.get_next_item_id(),
|
||||
details: ThreadItemDetails::Error(ErrorItem {
|
||||
message: ev.message.clone(),
|
||||
}),
|
||||
};
|
||||
vec![ThreadEvent::ItemCompleted(ItemCompletedEvent { item })]
|
||||
}
|
||||
EventMsg::StreamError(ev) => vec![ThreadEvent::Error(ThreadErrorEvent {
|
||||
message: ev.message.clone(),
|
||||
})],
|
||||
|
||||
@@ -12,13 +12,11 @@ use codex_core::protocol::McpToolCallEndEvent;
|
||||
use codex_core::protocol::PatchApplyBeginEvent;
|
||||
use codex_core::protocol::PatchApplyEndEvent;
|
||||
use codex_core::protocol::SessionConfiguredEvent;
|
||||
use codex_core::protocol::WarningEvent;
|
||||
use codex_core::protocol::WebSearchEndEvent;
|
||||
use codex_exec::event_processor_with_jsonl_output::EventProcessorWithJsonOutput;
|
||||
use codex_exec::exec_events::AgentMessageItem;
|
||||
use codex_exec::exec_events::CommandExecutionItem;
|
||||
use codex_exec::exec_events::CommandExecutionStatus;
|
||||
use codex_exec::exec_events::ErrorItem;
|
||||
use codex_exec::exec_events::ItemCompletedEvent;
|
||||
use codex_exec::exec_events::ItemStartedEvent;
|
||||
use codex_exec::exec_events::ItemUpdatedEvent;
|
||||
@@ -542,28 +540,6 @@ fn error_event_produces_error() {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn warning_event_produces_error_item() {
|
||||
let mut ep = EventProcessorWithJsonOutput::new(None);
|
||||
let out = ep.collect_thread_events(&event(
|
||||
"e1",
|
||||
EventMsg::Warning(WarningEvent {
|
||||
message: "Heads up: Long conversations and multiple compactions can cause the model to be less accurate. Start new a new conversation when possible to keep conversations small and targeted.".to_string(),
|
||||
}),
|
||||
));
|
||||
assert_eq!(
|
||||
out,
|
||||
vec![ThreadEvent::ItemCompleted(ItemCompletedEvent {
|
||||
item: ThreadItem {
|
||||
id: "item_0".to_string(),
|
||||
details: ThreadItemDetails::Error(ErrorItem {
|
||||
message: "Heads up: Long conversations and multiple compactions can cause the model to be less accurate. Start new a new conversation when possible to keep conversations small and targeted.".to_string(),
|
||||
}),
|
||||
},
|
||||
})]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn stream_error_event_produces_error() {
|
||||
let mut ep = EventProcessorWithJsonOutput::new(None);
|
||||
|
||||
@@ -204,9 +204,6 @@ async fn run_codex_tool_session_inner(
|
||||
outgoing.send_response(request_id.clone(), result).await;
|
||||
break;
|
||||
}
|
||||
EventMsg::Warning(_) => {
|
||||
continue;
|
||||
}
|
||||
EventMsg::ApplyPatchApprovalRequest(ApplyPatchApprovalRequestEvent {
|
||||
call_id,
|
||||
reason,
|
||||
|
||||
@@ -438,10 +438,6 @@ pub enum EventMsg {
|
||||
/// Error while executing a submission
|
||||
Error(ErrorEvent),
|
||||
|
||||
/// Warning issued while processing a submission. Unlike `Error`, this
|
||||
/// indicates the task continued but the user should still be notified.
|
||||
Warning(WarningEvent),
|
||||
|
||||
/// Agent has started a task
|
||||
TaskStarted(TaskStartedEvent),
|
||||
|
||||
@@ -676,11 +672,6 @@ pub struct ErrorEvent {
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, JsonSchema, TS)]
|
||||
pub struct WarningEvent {
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, JsonSchema, TS)]
|
||||
pub struct TaskCompleteEvent {
|
||||
pub last_agent_message: Option<String>,
|
||||
|
||||
@@ -1,246 +0,0 @@
|
||||
<#
|
||||
Setup script for building codex-rs on Windows.
|
||||
|
||||
What it does:
|
||||
- Installs Rust toolchain (via winget rustup) and required components
|
||||
- Installs Visual Studio 2022 Build Tools (MSVC + Windows SDK)
|
||||
- Installs helpful CLIs used by the repo: git, ripgrep (rg), just, cmake
|
||||
- Installs cargo-insta (for snapshot tests) via cargo
|
||||
- Ensures PATH contains Cargo bin for the current session
|
||||
- Builds the workspace (cargo build)
|
||||
|
||||
Usage:
|
||||
- Right-click PowerShell and "Run as Administrator" (VS Build Tools require elevation)
|
||||
- From the repo root (codex-rs), run:
|
||||
powershell -ExecutionPolicy Bypass -File scripts/setup-windows.ps1
|
||||
|
||||
Notes:
|
||||
- Requires winget (Windows Package Manager). Most modern Windows 10/11 have it preinstalled.
|
||||
- The script is re-runnable; winget/cargo will skip/reinstall as appropriate.
|
||||
#>
|
||||
|
||||
param(
|
||||
[switch] $SkipBuild
|
||||
)
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
function Ensure-Command($Name) {
|
||||
$exists = Get-Command $Name -ErrorAction SilentlyContinue
|
||||
return $null -ne $exists
|
||||
}
|
||||
|
||||
function Add-CargoBinToPath() {
|
||||
$cargoBin = Join-Path $env:USERPROFILE ".cargo\bin"
|
||||
if (Test-Path $cargoBin) {
|
||||
if (-not ($env:Path.Split(';') -contains $cargoBin)) {
|
||||
$env:Path = "$env:Path;$cargoBin"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function Ensure-UserPathContains([string] $Segment) {
|
||||
try {
|
||||
$userPath = [Environment]::GetEnvironmentVariable('Path', 'User')
|
||||
if ($null -eq $userPath) { $userPath = '' }
|
||||
$parts = $userPath.Split(';') | Where-Object { $_ -ne '' }
|
||||
if (-not ($parts -contains $Segment)) {
|
||||
$newPath = if ($userPath) { "$userPath;$Segment" } else { $Segment }
|
||||
[Environment]::SetEnvironmentVariable('Path', $newPath, 'User')
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
function Ensure-UserEnvVar([string] $Name, [string] $Value) {
|
||||
try { [Environment]::SetEnvironmentVariable($Name, $Value, 'User') } catch {}
|
||||
}
|
||||
|
||||
function Ensure-VSComponents([string[]]$Components) {
|
||||
$vsInstaller = "${env:ProgramFiles(x86)}\Microsoft Visual Studio\Installer\vs_installer.exe"
|
||||
$vswhere = "${env:ProgramFiles(x86)}\Microsoft Visual Studio\Installer\vswhere.exe"
|
||||
if (-not (Test-Path $vsInstaller) -or -not (Test-Path $vswhere)) { return }
|
||||
|
||||
$instPath = & $vswhere -latest -products * -version "[17.0,18.0)" -requires Microsoft.VisualStudio.Workload.VCTools -property installationPath 2>$null
|
||||
if (-not $instPath) {
|
||||
# 2022 instance may be present without VC Tools; pick BuildTools 2022 and add components
|
||||
$instPath = & $vswhere -latest -products Microsoft.VisualStudio.Product.BuildTools -version "[17.0,18.0)" -property installationPath 2>$null
|
||||
}
|
||||
if (-not $instPath) {
|
||||
$instPath = & $vswhere -latest -products * -requires Microsoft.VisualStudio.Workload.VCTools -property installationPath 2>$null
|
||||
}
|
||||
if (-not $instPath) {
|
||||
$default2022 = 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2022\\BuildTools'
|
||||
if (Test-Path $default2022) { $instPath = $default2022 }
|
||||
}
|
||||
if (-not $instPath) { return }
|
||||
|
||||
$vsDevCmd = Join-Path $instPath 'Common7\Tools\VsDevCmd.bat'
|
||||
$verb = if (Test-Path $vsDevCmd) { 'modify' } else { 'install' }
|
||||
$args = @($verb, '--installPath', $instPath, '--quiet', '--norestart', '--nocache')
|
||||
if ($verb -eq 'install') { $args += @('--productId', 'Microsoft.VisualStudio.Product.BuildTools') }
|
||||
foreach ($c in $Components) { $args += @('--add', $c) }
|
||||
Write-Host "-- Ensuring VS components installed: $($Components -join ', ')" -ForegroundColor DarkCyan
|
||||
& $vsInstaller @args | Out-Host
|
||||
}
|
||||
|
||||
function Enter-VsDevShell() {
|
||||
$vswhere = "${env:ProgramFiles(x86)}\Microsoft Visual Studio\Installer\vswhere.exe"
|
||||
if (-not (Test-Path $vswhere)) { return }
|
||||
|
||||
$instPath = & $vswhere -latest -products * -requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64 -property installationPath 2>$null
|
||||
if (-not $instPath) {
|
||||
# Try ARM64 components
|
||||
$instPath = & $vswhere -latest -products * -requires Microsoft.VisualStudio.Component.VC.Tools.ARM64 -property installationPath 2>$null
|
||||
}
|
||||
if (-not $instPath) { return }
|
||||
|
||||
$vsDevCmd = Join-Path $instPath 'Common7\Tools\VsDevCmd.bat'
|
||||
if (-not (Test-Path $vsDevCmd)) { return }
|
||||
|
||||
# Prefer ARM64 on ARM machines, otherwise x64
|
||||
$arch = if ($env:PROCESSOR_ARCHITEW6432 -eq 'ARM64' -or $env:PROCESSOR_ARCHITECTURE -eq 'ARM64') { 'arm64' } else { 'x64' }
|
||||
$devCmdStr = ('"{0}" -no_logo -arch={1} -host_arch={1} & set' -f $vsDevCmd, $arch)
|
||||
$envLines = & cmd.exe /c $devCmdStr
|
||||
foreach ($line in $envLines) {
|
||||
if ($line -match '^(.*?)=(.*)$') {
|
||||
$name = $matches[1]
|
||||
$value = $matches[2]
|
||||
try { [Environment]::SetEnvironmentVariable($name, $value, 'Process') } catch {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host "==> Installing prerequisites via winget (may take a while)" -ForegroundColor Cyan
|
||||
|
||||
# Accept agreements up-front for non-interactive installs
|
||||
$WingetArgs = @('--accept-package-agreements', '--accept-source-agreements', '-e')
|
||||
|
||||
if (-not (Ensure-Command 'winget')) {
|
||||
throw "winget is required. Please update to the latest Windows 10/11 or install winget."
|
||||
}
|
||||
|
||||
# 1) Visual Studio 2022 Build Tools (MSVC toolchain + Windows SDK)
|
||||
# The VC Tools workload brings the required MSVC toolchains; include recommended components to pick up a Windows SDK.
|
||||
Write-Host "-- Installing Visual Studio Build Tools (VC Tools workload + ARM64 toolchains)" -ForegroundColor DarkCyan
|
||||
$vsOverride = @(
|
||||
'--quiet', '--wait', '--norestart', '--nocache',
|
||||
'--add', 'Microsoft.VisualStudio.Workload.VCTools',
|
||||
'--add', 'Microsoft.VisualStudio.Component.VC.Tools.ARM64',
|
||||
'--add', 'Microsoft.VisualStudio.Component.VC.Tools.ARM64EC',
|
||||
'--add', 'Microsoft.VisualStudio.Component.Windows11SDK.22000'
|
||||
) -join ' '
|
||||
winget install @WingetArgs --id Microsoft.VisualStudio.2022.BuildTools --override $vsOverride | Out-Host
|
||||
|
||||
# Ensure required VC components even if winget doesn't modify the instance
|
||||
$isArm64 = ($env:PROCESSOR_ARCHITEW6432 -eq 'ARM64' -or $env:PROCESSOR_ARCHITECTURE -eq 'ARM64')
|
||||
$components = @(
|
||||
'Microsoft.VisualStudio.Workload.VCTools',
|
||||
'Microsoft.VisualStudio.Component.VC.Tools.ARM64',
|
||||
'Microsoft.VisualStudio.Component.VC.Tools.ARM64EC',
|
||||
'Microsoft.VisualStudio.Component.Windows11SDK.22000'
|
||||
)
|
||||
Ensure-VSComponents -Components $components
|
||||
|
||||
# 2) Rustup
|
||||
Write-Host "-- Installing rustup" -ForegroundColor DarkCyan
|
||||
winget install @WingetArgs --id Rustlang.Rustup | Out-Host
|
||||
|
||||
# Make cargo available in this session
|
||||
Add-CargoBinToPath
|
||||
|
||||
# 3) Git (often present, but ensure installed)
|
||||
Write-Host "-- Installing Git" -ForegroundColor DarkCyan
|
||||
winget install @WingetArgs --id Git.Git | Out-Host
|
||||
|
||||
# 4) ripgrep (rg)
|
||||
Write-Host "-- Installing ripgrep (rg)" -ForegroundColor DarkCyan
|
||||
winget install @WingetArgs --id BurntSushi.ripgrep.MSVC | Out-Host
|
||||
|
||||
# 5) just
|
||||
Write-Host "-- Installing just" -ForegroundColor DarkCyan
|
||||
winget install @WingetArgs --id Casey.Just | Out-Host
|
||||
|
||||
# 6) cmake (commonly needed by native crates)
|
||||
Write-Host "-- Installing CMake" -ForegroundColor DarkCyan
|
||||
winget install @WingetArgs --id Kitware.CMake | Out-Host
|
||||
|
||||
# Ensure cargo is available after rustup install
|
||||
Add-CargoBinToPath
|
||||
if (-not (Ensure-Command 'cargo')) {
|
||||
# Some shells need a re-login; attempt to source cargo.env if present
|
||||
$cargoEnv = Join-Path $env:USERPROFILE ".cargo\env"
|
||||
if (Test-Path $cargoEnv) { . $cargoEnv }
|
||||
Add-CargoBinToPath
|
||||
}
|
||||
if (-not (Ensure-Command 'cargo')) {
|
||||
throw "cargo not found in PATH after rustup install. Please open a new terminal and re-run the script."
|
||||
}
|
||||
|
||||
Write-Host "==> Configuring Rust toolchain per rust-toolchain.toml" -ForegroundColor Cyan
|
||||
|
||||
# Pin to the workspace toolchain and install components
|
||||
$toolchain = '1.90.0'
|
||||
& rustup toolchain install $toolchain --profile minimal | Out-Host
|
||||
& rustup default $toolchain | Out-Host
|
||||
& rustup component add clippy rustfmt rust-src --toolchain $toolchain | Out-Host
|
||||
|
||||
# 6.5) LLVM/Clang (some crates/bindgen require clang/libclang)
|
||||
function Add-LLVMToPath() {
|
||||
$llvmBin = 'C:\\Program Files\\LLVM\\bin'
|
||||
if (Test-Path $llvmBin) {
|
||||
if (-not ($env:Path.Split(';') -contains $llvmBin)) {
|
||||
$env:Path = "$env:Path;$llvmBin"
|
||||
}
|
||||
if (-not $env:LIBCLANG_PATH) {
|
||||
$env:LIBCLANG_PATH = $llvmBin
|
||||
}
|
||||
Ensure-UserPathContains $llvmBin
|
||||
Ensure-UserEnvVar -Name 'LIBCLANG_PATH' -Value $llvmBin
|
||||
|
||||
$clang = Join-Path $llvmBin 'clang.exe'
|
||||
$clangxx = Join-Path $llvmBin 'clang++.exe'
|
||||
if (Test-Path $clang) {
|
||||
$env:CC = $clang
|
||||
Ensure-UserEnvVar -Name 'CC' -Value $clang
|
||||
}
|
||||
if (Test-Path $clangxx) {
|
||||
$env:CXX = $clangxx
|
||||
Ensure-UserEnvVar -Name 'CXX' -Value $clangxx
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host "-- Installing LLVM/Clang" -ForegroundColor DarkCyan
|
||||
winget install @WingetArgs --id LLVM.LLVM | Out-Host
|
||||
Add-LLVMToPath
|
||||
|
||||
# 7) cargo-insta (used by snapshot tests)
|
||||
# Ensure MSVC linker is available before building/cargo-install by entering VS dev shell
|
||||
Enter-VsDevShell
|
||||
$hasLink = $false
|
||||
try { & where.exe link | Out-Null; $hasLink = $true } catch {}
|
||||
if ($hasLink) {
|
||||
Write-Host "-- Installing cargo-insta" -ForegroundColor DarkCyan
|
||||
& cargo install cargo-insta --locked | Out-Host
|
||||
} else {
|
||||
Write-Host "-- Skipping cargo-insta for now (MSVC linker not found yet)" -ForegroundColor Yellow
|
||||
}
|
||||
|
||||
if ($SkipBuild) {
|
||||
Write-Host "==> Skipping cargo build (SkipBuild specified)" -ForegroundColor Yellow
|
||||
exit 0
|
||||
}
|
||||
|
||||
Write-Host "==> Building workspace (cargo build)" -ForegroundColor Cyan
|
||||
pushd "$PSScriptRoot\.." | Out-Null
|
||||
try {
|
||||
# Clear RUSTFLAGS if coming from constrained environments
|
||||
$env:RUSTFLAGS = ''
|
||||
Enter-VsDevShell
|
||||
& cargo build
|
||||
}
|
||||
finally {
|
||||
popd | Out-Null
|
||||
}
|
||||
|
||||
Write-Host "==> Build complete" -ForegroundColor Green
|
||||
@@ -42,7 +42,6 @@ use codex_core::protocol::UndoCompletedEvent;
|
||||
use codex_core::protocol::UndoStartedEvent;
|
||||
use codex_core::protocol::UserMessageEvent;
|
||||
use codex_core::protocol::ViewImageToolCallEvent;
|
||||
use codex_core::protocol::WarningEvent;
|
||||
use codex_core::protocol::WebSearchBeginEvent;
|
||||
use codex_core::protocol::WebSearchEndEvent;
|
||||
use codex_protocol::ConversationId;
|
||||
@@ -520,11 +519,6 @@ impl ChatWidget {
|
||||
self.maybe_send_next_queued_input();
|
||||
}
|
||||
|
||||
fn on_warning(&mut self, message: String) {
|
||||
self.add_to_history(history_cell::new_warning_event(message));
|
||||
self.request_redraw();
|
||||
}
|
||||
|
||||
/// Handle a turn aborted due to user interrupt (Esc).
|
||||
/// When there are queued user messages, restore them into the composer
|
||||
/// separated by newlines rather than auto‑submitting the next one.
|
||||
@@ -663,7 +657,7 @@ impl ChatWidget {
|
||||
}
|
||||
|
||||
fn on_shutdown_complete(&mut self) {
|
||||
self.request_exit();
|
||||
self.app_event_tx.send(AppEvent::ExitRequest);
|
||||
}
|
||||
|
||||
fn on_turn_diff(&mut self, unified_diff: String) {
|
||||
@@ -1235,8 +1229,8 @@ impl ChatWidget {
|
||||
SlashCommand::Approvals => {
|
||||
self.open_approvals_popup();
|
||||
}
|
||||
SlashCommand::Quit | SlashCommand::Exit => {
|
||||
self.request_exit();
|
||||
SlashCommand::Quit => {
|
||||
self.app_event_tx.send(AppEvent::ExitRequest);
|
||||
}
|
||||
SlashCommand::Logout => {
|
||||
if let Err(e) = codex_core::auth::logout(
|
||||
@@ -1245,7 +1239,7 @@ impl ChatWidget {
|
||||
) {
|
||||
tracing::error!("failed to logout: {e}");
|
||||
}
|
||||
self.request_exit();
|
||||
self.app_event_tx.send(AppEvent::ExitRequest);
|
||||
}
|
||||
SlashCommand::Undo => {
|
||||
self.app_event_tx.send(AppEvent::CodexOp(Op::Undo));
|
||||
@@ -1483,7 +1477,6 @@ impl ChatWidget {
|
||||
self.set_token_info(ev.info);
|
||||
self.on_rate_limit_snapshot(ev.rate_limits);
|
||||
}
|
||||
EventMsg::Warning(WarningEvent { message }) => self.on_warning(message),
|
||||
EventMsg::Error(ErrorEvent { message }) => self.on_error(message),
|
||||
EventMsg::TurnAborted(ev) => match ev.reason {
|
||||
TurnAbortReason::Interrupted => {
|
||||
@@ -1600,10 +1593,6 @@ impl ChatWidget {
|
||||
}
|
||||
}
|
||||
|
||||
fn request_exit(&self) {
|
||||
self.app_event_tx.send(AppEvent::ExitRequest);
|
||||
}
|
||||
|
||||
fn request_redraw(&mut self) {
|
||||
self.frame_requester.schedule_frame();
|
||||
}
|
||||
@@ -1873,10 +1862,7 @@ impl ChatWidget {
|
||||
current_approval == preset.approval && current_sandbox == preset.sandbox;
|
||||
let name = preset.label.to_string();
|
||||
let description_text = preset.description;
|
||||
let description = if cfg!(target_os = "windows")
|
||||
&& preset.id == "auto"
|
||||
&& codex_core::get_platform_sandbox().is_none()
|
||||
{
|
||||
let description = if cfg!(target_os = "windows") && preset.id == "auto" {
|
||||
Some(format!(
|
||||
"{description_text}\nRequires Windows Subsystem for Linux (WSL). Show installation instructions..."
|
||||
))
|
||||
@@ -1896,10 +1882,7 @@ impl ChatWidget {
|
||||
preset: preset_clone.clone(),
|
||||
});
|
||||
})]
|
||||
} else if cfg!(target_os = "windows")
|
||||
&& preset.id == "auto"
|
||||
&& codex_core::get_platform_sandbox().is_none()
|
||||
{
|
||||
} else if cfg!(target_os = "windows") && preset.id == "auto" {
|
||||
vec![Box::new(|tx| {
|
||||
tx.send(AppEvent::ShowWindowsAutoModeInstructions);
|
||||
})]
|
||||
|
||||
@@ -37,7 +37,6 @@ use codex_core::protocol::TaskStartedEvent;
|
||||
use codex_core::protocol::UndoCompletedEvent;
|
||||
use codex_core::protocol::UndoStartedEvent;
|
||||
use codex_core::protocol::ViewImageToolCallEvent;
|
||||
use codex_core::protocol::WarningEvent;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::plan_tool::PlanItemArg;
|
||||
@@ -57,8 +56,6 @@ use tempfile::tempdir;
|
||||
use tokio::sync::mpsc::error::TryRecvError;
|
||||
use tokio::sync::mpsc::unbounded_channel;
|
||||
|
||||
const TEST_WARNING_MESSAGE: &str = "Heads up: Long conversations and multiple compactions can cause the model to be less accurate. Start new a new conversation when possible to keep conversations small and targeted.";
|
||||
|
||||
fn test_config() -> Config {
|
||||
// Use base defaults to avoid depending on host state.
|
||||
Config::load_from_base_config_with_overrides(
|
||||
@@ -854,24 +851,6 @@ fn slash_init_skips_when_project_doc_exists() {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn slash_quit_requests_exit() {
|
||||
let (mut chat, mut rx, _op_rx) = make_chatwidget_manual();
|
||||
|
||||
chat.dispatch_command(SlashCommand::Quit);
|
||||
|
||||
assert_matches!(rx.try_recv(), Ok(AppEvent::ExitRequest));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn slash_exit_requests_exit() {
|
||||
let (mut chat, mut rx, _op_rx) = make_chatwidget_manual();
|
||||
|
||||
chat.dispatch_command(SlashCommand::Exit);
|
||||
|
||||
assert_matches!(rx.try_recv(), Ok(AppEvent::ExitRequest));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn slash_undo_sends_op() {
|
||||
let (mut chat, mut rx, _op_rx) = make_chatwidget_manual();
|
||||
@@ -1585,8 +1564,7 @@ async fn binary_size_transcript_snapshot() {
|
||||
}
|
||||
has_emitted_history = true;
|
||||
transcript.push_str(&lines_to_single_string(&lines));
|
||||
crate::insert_history::insert_history_lines(&mut terminal, lines)
|
||||
.expect("Failed to insert history lines in test");
|
||||
crate::insert_history::insert_history_lines(&mut terminal, lines);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1607,8 +1585,7 @@ async fn binary_size_transcript_snapshot() {
|
||||
}
|
||||
has_emitted_history = true;
|
||||
transcript.push_str(&lines_to_single_string(&lines));
|
||||
crate::insert_history::insert_history_lines(&mut terminal, lines)
|
||||
.expect("Failed to insert history lines in test");
|
||||
crate::insert_history::insert_history_lines(&mut terminal, lines);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2448,25 +2425,6 @@ fn stream_error_updates_status_indicator() {
|
||||
assert_eq!(status.header(), msg);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn warning_event_adds_warning_history_cell() {
|
||||
let (mut chat, mut rx, _op_rx) = make_chatwidget_manual();
|
||||
chat.handle_codex_event(Event {
|
||||
id: "sub-1".into(),
|
||||
msg: EventMsg::Warning(WarningEvent {
|
||||
message: TEST_WARNING_MESSAGE.to_string(),
|
||||
}),
|
||||
});
|
||||
|
||||
let cells = drain_insert_history(&mut rx);
|
||||
assert_eq!(cells.len(), 1, "expected one warning history cell");
|
||||
let rendered = lines_to_single_string(&cells[0]);
|
||||
assert!(
|
||||
rendered.contains(TEST_WARNING_MESSAGE),
|
||||
"warning cell missing content: {rendered}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_agent_messages_in_single_turn_emit_multiple_headers() {
|
||||
let (mut chat, mut rx, _op_rx) = make_chatwidget_manual();
|
||||
@@ -2678,8 +2636,7 @@ fn chatwidget_exec_and_status_layout_vt100_snapshot() {
|
||||
term.set_viewport_area(viewport);
|
||||
|
||||
for lines in drain_insert_history(&mut rx) {
|
||||
crate::insert_history::insert_history_lines(&mut term, lines)
|
||||
.expect("Failed to insert history lines in test");
|
||||
crate::insert_history::insert_history_lines(&mut term, lines);
|
||||
}
|
||||
|
||||
term.draw(|f| {
|
||||
@@ -2756,8 +2713,7 @@ printf 'fenced within fenced\n'
|
||||
while let Ok(app_ev) = rx.try_recv() {
|
||||
if let AppEvent::InsertHistoryCell(cell) = app_ev {
|
||||
let lines = cell.display_lines(width);
|
||||
crate::insert_history::insert_history_lines(&mut term, lines)
|
||||
.expect("Failed to insert history lines in test");
|
||||
crate::insert_history::insert_history_lines(&mut term, lines);
|
||||
inserted_any = true;
|
||||
}
|
||||
}
|
||||
@@ -2775,8 +2731,7 @@ printf 'fenced within fenced\n'
|
||||
}),
|
||||
});
|
||||
for lines in drain_insert_history(&mut rx) {
|
||||
crate::insert_history::insert_history_lines(&mut term, lines)
|
||||
.expect("Failed to insert history lines in test");
|
||||
crate::insert_history::insert_history_lines(&mut term, lines);
|
||||
}
|
||||
|
||||
assert_snapshot!(term.backend().vt100().screen().contents());
|
||||
|
||||
@@ -1424,7 +1424,7 @@ fn format_mcp_invocation<'a>(invocation: McpInvocation) -> Line<'a> {
|
||||
let args_str = invocation
|
||||
.arguments
|
||||
.as_ref()
|
||||
.map(|v: &serde_json::Value| {
|
||||
.map(|v| {
|
||||
// Use compact form to keep things short but readable.
|
||||
serde_json::to_string(v).unwrap_or_else(|_| v.to_string())
|
||||
})
|
||||
|
||||
@@ -24,10 +24,7 @@ use ratatui::text::Span;
|
||||
|
||||
/// Insert `lines` above the viewport using the terminal's backend writer
|
||||
/// (avoids direct stdout references).
|
||||
pub fn insert_history_lines<B>(
|
||||
terminal: &mut crate::custom_terminal::Terminal<B>,
|
||||
lines: Vec<Line>,
|
||||
) -> io::Result<()>
|
||||
pub fn insert_history_lines<B>(terminal: &mut crate::custom_terminal::Terminal<B>, lines: Vec<Line>)
|
||||
where
|
||||
B: Backend + Write,
|
||||
{
|
||||
@@ -54,13 +51,13 @@ where
|
||||
// 3) Emitting Reverse Index (RI, ESC M) `scroll_amount` times
|
||||
// 4) Resetting the scroll region back to full screen
|
||||
let top_1based = area.top() + 1; // Convert 0-based row to 1-based for DECSTBM
|
||||
queue!(writer, SetScrollRegion(top_1based..screen_size.height))?;
|
||||
queue!(writer, MoveTo(0, area.top()))?;
|
||||
queue!(writer, SetScrollRegion(top_1based..screen_size.height)).ok();
|
||||
queue!(writer, MoveTo(0, area.top())).ok();
|
||||
for _ in 0..scroll_amount {
|
||||
// Reverse Index (RI): ESC M
|
||||
queue!(writer, Print("\x1bM"))?;
|
||||
queue!(writer, Print("\x1bM")).ok();
|
||||
}
|
||||
queue!(writer, ResetScrollRegion)?;
|
||||
queue!(writer, ResetScrollRegion).ok();
|
||||
|
||||
let cursor_top = area.top().saturating_sub(1);
|
||||
area.y += scroll_amount;
|
||||
@@ -85,15 +82,15 @@ where
|
||||
// ││ ││
|
||||
// │╰────────────────────────────╯│
|
||||
// └──────────────────────────────┘
|
||||
queue!(writer, SetScrollRegion(1..area.top()))?;
|
||||
queue!(writer, SetScrollRegion(1..area.top())).ok();
|
||||
|
||||
// NB: we are using MoveTo instead of set_cursor_position here to avoid messing with the
|
||||
// terminal's last_known_cursor_position, which hopefully will still be accurate after we
|
||||
// fetch/restore the cursor position. insert_history_lines should be cursor-position-neutral :)
|
||||
queue!(writer, MoveTo(0, cursor_top))?;
|
||||
queue!(writer, MoveTo(0, cursor_top)).ok();
|
||||
|
||||
for line in wrapped {
|
||||
queue!(writer, Print("\r\n"))?;
|
||||
queue!(writer, Print("\r\n")).ok();
|
||||
queue!(
|
||||
writer,
|
||||
SetColors(Colors::new(
|
||||
@@ -106,8 +103,9 @@ where
|
||||
.map(std::convert::Into::into)
|
||||
.unwrap_or(CColor::Reset)
|
||||
))
|
||||
)?;
|
||||
queue!(writer, Clear(ClearType::UntilNewLine))?;
|
||||
)
|
||||
.ok();
|
||||
queue!(writer, Clear(ClearType::UntilNewLine)).ok();
|
||||
// Merge line-level style into each span so that ANSI colors reflect
|
||||
// line styles (e.g., blockquotes with green fg).
|
||||
let merged_spans: Vec<Span> = line
|
||||
@@ -118,20 +116,18 @@ where
|
||||
content: s.content.clone(),
|
||||
})
|
||||
.collect();
|
||||
write_spans(writer, merged_spans.iter())?;
|
||||
write_spans(writer, merged_spans.iter()).ok();
|
||||
}
|
||||
|
||||
queue!(writer, ResetScrollRegion)?;
|
||||
queue!(writer, ResetScrollRegion).ok();
|
||||
|
||||
// Restore the cursor position to where it was before we started.
|
||||
queue!(writer, MoveTo(last_cursor_pos.x, last_cursor_pos.y))?;
|
||||
queue!(writer, MoveTo(last_cursor_pos.x, last_cursor_pos.y)).ok();
|
||||
|
||||
let _ = writer;
|
||||
if should_update_area {
|
||||
terminal.set_viewport_area(area);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
@@ -332,8 +328,7 @@ mod tests {
|
||||
// Build a blockquote-like line: apply line-level green style and prefix "> "
|
||||
let mut line: Line<'static> = Line::from(vec!["> ".into(), "Hello world".into()]);
|
||||
line = line.style(Color::Green);
|
||||
insert_history_lines(&mut term, vec![line])
|
||||
.expect("Failed to insert history lines in test");
|
||||
insert_history_lines(&mut term, vec![line]);
|
||||
|
||||
let mut saw_colored = false;
|
||||
'outer: for row in 0..height {
|
||||
@@ -371,8 +366,7 @@ mod tests {
|
||||
]);
|
||||
line = line.style(Color::Green);
|
||||
|
||||
insert_history_lines(&mut term, vec![line])
|
||||
.expect("Failed to insert history lines in test");
|
||||
insert_history_lines(&mut term, vec![line]);
|
||||
|
||||
// Parse and inspect the final screen buffer.
|
||||
let screen = term.backend().vt100().screen();
|
||||
@@ -434,8 +428,7 @@ mod tests {
|
||||
Span::raw("Hello world"),
|
||||
]);
|
||||
|
||||
insert_history_lines(&mut term, vec![line])
|
||||
.expect("Failed to insert history lines in test");
|
||||
insert_history_lines(&mut term, vec![line]);
|
||||
|
||||
let screen = term.backend().vt100().screen();
|
||||
|
||||
@@ -491,7 +484,7 @@ mod tests {
|
||||
let viewport = ratatui::layout::Rect::new(0, height - 1, width, 1);
|
||||
term.set_viewport_area(viewport);
|
||||
|
||||
insert_history_lines(&mut term, lines).expect("Failed to insert history lines in test");
|
||||
insert_history_lines(&mut term, lines);
|
||||
|
||||
let screen = term.backend().vt100().screen();
|
||||
|
||||
|
||||
@@ -989,7 +989,7 @@ mod tests {
|
||||
"type": "message",
|
||||
"role": "user",
|
||||
"content": [
|
||||
{ "type": "input_text", "text": "# AGENTS.md instructions for project\n\n<INSTRUCTIONS>\nhi\n</INSTRUCTIONS>" },
|
||||
{ "type": "input_text", "text": "<user_instructions>hi</user_instructions>" },
|
||||
]
|
||||
}),
|
||||
json!({
|
||||
|
||||
@@ -25,7 +25,6 @@ pub enum SlashCommand {
|
||||
Mcp,
|
||||
Logout,
|
||||
Quit,
|
||||
Exit,
|
||||
Feedback,
|
||||
Rollout,
|
||||
TestApproval,
|
||||
@@ -41,7 +40,7 @@ impl SlashCommand {
|
||||
SlashCommand::Compact => "summarize conversation to prevent hitting the context limit",
|
||||
SlashCommand::Review => "review my current changes and find issues",
|
||||
SlashCommand::Undo => "ask Codex to undo a turn",
|
||||
SlashCommand::Quit | SlashCommand::Exit => "exit Codex",
|
||||
SlashCommand::Quit => "exit Codex",
|
||||
SlashCommand::Diff => "show git diff (including untracked files)",
|
||||
SlashCommand::Mention => "mention a file",
|
||||
SlashCommand::Status => "show current session configuration and token usage",
|
||||
@@ -76,8 +75,7 @@ impl SlashCommand {
|
||||
| SlashCommand::Status
|
||||
| SlashCommand::Mcp
|
||||
| SlashCommand::Feedback
|
||||
| SlashCommand::Quit
|
||||
| SlashCommand::Exit => true,
|
||||
| SlashCommand::Quit => true,
|
||||
SlashCommand::Rollout => true,
|
||||
SlashCommand::TestApproval => true,
|
||||
}
|
||||
|
||||
@@ -89,7 +89,7 @@ pub(crate) fn compose_account_display(config: &Config) -> Option<StatusAccountDi
|
||||
if let Some(tokens) = auth.tokens.as_ref() {
|
||||
let info = &tokens.id_token;
|
||||
let email = info.email.clone();
|
||||
let plan = info.get_chatgpt_plan_type().as_deref().map(title_case);
|
||||
let plan = info.get_chatgpt_plan_type().map(|plan| title_case(&plan));
|
||||
return Some(StatusAccountDisplay::ChatGpt { email, plan });
|
||||
}
|
||||
|
||||
|
||||
@@ -548,7 +548,7 @@ impl Tui {
|
||||
crate::insert_history::insert_history_lines(
|
||||
terminal,
|
||||
self.pending_history_lines.clone(),
|
||||
)?;
|
||||
);
|
||||
self.pending_history_lines.clear();
|
||||
}
|
||||
// Update the y position for suspending so Ctrl-Z can place the cursor correctly.
|
||||
|
||||
@@ -36,8 +36,7 @@ impl TestScenario {
|
||||
}
|
||||
|
||||
fn run_insert(&mut self, lines: Vec<Line<'static>>) {
|
||||
codex_tui::insert_history::insert_history_lines(&mut self.term, lines)
|
||||
.expect("Failed to insert history lines in test");
|
||||
codex_tui::insert_history::insert_history_lines(&mut self.term, lines);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -26,8 +26,7 @@ fn live_001_commit_on_overflow() {
|
||||
let commit_rows = rb.drain_commit_ready(3);
|
||||
let lines: Vec<Line<'static>> = commit_rows.into_iter().map(|r| r.text.into()).collect();
|
||||
|
||||
codex_tui::insert_history::insert_history_lines(&mut term, lines)
|
||||
.expect("Failed to insert history lines in test");
|
||||
codex_tui::insert_history::insert_history_lines(&mut term, lines);
|
||||
|
||||
let screen = term.backend().vt100().screen();
|
||||
|
||||
|
||||
369
codex-rs/windows-sandbox-rs/Cargo.lock
generated
369
codex-rs/windows-sandbox-rs/Cargo.lock
generated
@@ -1,369 +0,0 @@
|
||||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 4
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "2.9.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2261d10cca569e4643e526d8dc2e62e433cc8aba21ab764233731f8d369bf394"
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801"
|
||||
|
||||
[[package]]
|
||||
name = "codex-windows-sandbox"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"dirs-next",
|
||||
"rand",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"thiserror",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dirs-next"
|
||||
version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"dirs-sys-next",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dirs-sys-next"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"redox_users",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "getrandom"
|
||||
version = "0.2.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"wasi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "itoa"
|
||||
version = "1.0.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.177"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976"
|
||||
|
||||
[[package]]
|
||||
name = "libredox"
|
||||
version = "0.1.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "416f7e718bdb06000964960ffa43b4335ad4012ae8b99060261aa4a8088d5ccb"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "2.7.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273"
|
||||
|
||||
[[package]]
|
||||
name = "ppv-lite86"
|
||||
version = "0.2.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9"
|
||||
dependencies = [
|
||||
"zerocopy",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.101"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.41"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ce25767e7b499d1b604768e7cde645d14cc8584231ea6b295e9c9eb22c02e1d1"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"rand_chacha",
|
||||
"rand_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_chacha"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
|
||||
dependencies = [
|
||||
"ppv-lite86",
|
||||
"rand_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_core"
|
||||
version = "0.6.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
|
||||
dependencies = [
|
||||
"getrandom",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_users"
|
||||
version = "0.4.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43"
|
||||
dependencies = [
|
||||
"getrandom",
|
||||
"libredox",
|
||||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ryu"
|
||||
version = "1.0.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.228"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
|
||||
dependencies = [
|
||||
"serde_core",
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_core"
|
||||
version = "1.0.228"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.228"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.145"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"memchr",
|
||||
"ryu",
|
||||
"serde",
|
||||
"serde_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.106"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "1.0.69"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52"
|
||||
dependencies = [
|
||||
"thiserror-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror-impl"
|
||||
version = "1.0.69"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f63a545481291138910575129486daeaf8ac54aee4387fe7906919f7830c7d9d"
|
||||
|
||||
[[package]]
|
||||
name = "wasi"
|
||||
version = "0.11.1+wasi-snapshot-preview1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b"
|
||||
|
||||
[[package]]
|
||||
name = "winapi"
|
||||
version = "0.3.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
|
||||
dependencies = [
|
||||
"winapi-i686-pc-windows-gnu",
|
||||
"winapi-x86_64-pc-windows-gnu",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi-i686-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
||||
|
||||
[[package]]
|
||||
name = "winapi-x86_64-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
|
||||
dependencies = [
|
||||
"windows-targets",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm",
|
||||
"windows_aarch64_msvc",
|
||||
"windows_i686_gnu",
|
||||
"windows_i686_gnullvm",
|
||||
"windows_i686_msvc",
|
||||
"windows_x86_64_gnu",
|
||||
"windows_x86_64_gnullvm",
|
||||
"windows_x86_64_msvc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_gnullvm"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnullvm"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy"
|
||||
version = "0.8.27"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0894878a5fa3edfd6da3f88c4805f4c8558e2b996227a3d864f47fe11e38282c"
|
||||
dependencies = [
|
||||
"zerocopy-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy-derive"
|
||||
version = "0.8.27"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "88d2b8d9c68ad2b9e4340d7832716a4d21a22a1154777ad56ea55c51a9cf3831"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
@@ -1,43 +0,0 @@
|
||||
[package]
|
||||
name = "codex-windows-sandbox"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
name = "codex_windows_sandbox"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
[dependencies.rand]
|
||||
version = "0.8"
|
||||
default-features = false
|
||||
features = ["std", "small_rng"]
|
||||
[dependencies.dirs-next]
|
||||
version = "2.0"
|
||||
[dependencies.windows-sys]
|
||||
version = "0.52"
|
||||
features = [
|
||||
"Win32_Foundation",
|
||||
"Win32_System_Diagnostics_Debug",
|
||||
"Win32_Security",
|
||||
"Win32_Security_Authorization",
|
||||
"Win32_System_Threading",
|
||||
"Win32_System_JobObjects",
|
||||
"Win32_System_SystemServices",
|
||||
"Win32_System_Environment",
|
||||
"Win32_System_Pipes",
|
||||
"Win32_System_WindowsProgramming",
|
||||
"Win32_System_IO",
|
||||
"Win32_System_Memory",
|
||||
"Win32_System_Kernel",
|
||||
"Win32_System_Console",
|
||||
"Win32_Storage_FileSystem",
|
||||
"Win32_System_Diagnostics_ToolHelp",
|
||||
"Win32_Networking_WinSock",
|
||||
"Win32_System_LibraryLoader",
|
||||
"Win32_System_Com",
|
||||
"Win32_Security_Authentication_Identity",
|
||||
]
|
||||
@@ -1,306 +0,0 @@
|
||||
# sandbox_smoketests.py
|
||||
# Run a suite of smoke tests against the Windows sandbox via the Codex CLI
|
||||
# Requires: Python 3.8+ on Windows. No pip requirements.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
def _resolve_codex_cmd() -> List[str]:
|
||||
"""Resolve the Codex CLI to invoke `codex sandbox windows`.
|
||||
|
||||
Prefer `codex` on PATH; if not found, try common local build locations.
|
||||
Returns the argv prefix to run Codex.
|
||||
"""
|
||||
# 1) Prefer PATH
|
||||
try:
|
||||
cp = subprocess.run(["where", "codex"], stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, text=True)
|
||||
if cp.returncode == 0:
|
||||
for line in cp.stdout.splitlines():
|
||||
p = Path(line.strip())
|
||||
if p.exists():
|
||||
return [str(p)]
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# 2) Try workspace targets
|
||||
root = Path(__file__).parent
|
||||
ws_root = root.parent
|
||||
cargo_target = os.environ.get("CARGO_TARGET_DIR")
|
||||
candidates = [
|
||||
ws_root / "target" / "release" / "codex.exe",
|
||||
ws_root / "target" / "debug" / "codex.exe",
|
||||
]
|
||||
if cargo_target:
|
||||
candidates.extend([
|
||||
Path(cargo_target) / "release" / "codex.exe",
|
||||
Path(cargo_target) / "debug" / "codex.exe",
|
||||
])
|
||||
for p in candidates:
|
||||
if p.exists():
|
||||
return [str(p)]
|
||||
|
||||
raise FileNotFoundError(
|
||||
"Codex CLI not found. Build it first, e.g.\n"
|
||||
" cargo build -p codex-cli --release\n"
|
||||
"or for debug:\n"
|
||||
" cargo build -p codex-cli\n"
|
||||
)
|
||||
|
||||
CODEX_CMD = _resolve_codex_cmd()
|
||||
TIMEOUT_SEC = 20
|
||||
|
||||
WS_ROOT = Path(os.environ["USERPROFILE"]) / "sbx_ws_tests"
|
||||
OUTSIDE = Path(os.environ["USERPROFILE"]) / "sbx_ws_outside" # outside CWD for deny checks
|
||||
|
||||
ENV_BASE = {} # extend if needed
|
||||
|
||||
class CaseResult:
|
||||
def __init__(self, name: str, ok: bool, detail: str = ""):
|
||||
self.name, self.ok, self.detail = name, ok, detail
|
||||
|
||||
def run_sbx(policy: str, cmd_argv: List[str], cwd: Path, env_extra: Optional[dict] = None) -> Tuple[int, str, str]:
|
||||
env = os.environ.copy()
|
||||
env.update(ENV_BASE)
|
||||
if env_extra:
|
||||
env.update(env_extra)
|
||||
# Map policy to codex CLI flags
|
||||
# read-only => default; workspace-write => --full-auto
|
||||
if policy not in ("read-only", "workspace-write"):
|
||||
raise ValueError(f"unknown policy: {policy}")
|
||||
policy_flags: List[str] = ["--full-auto"] if policy == "workspace-write" else []
|
||||
|
||||
argv = [*CODEX_CMD, "sandbox", "windows", *policy_flags, "--", *cmd_argv]
|
||||
print(cmd_argv)
|
||||
cp = subprocess.run(argv, cwd=str(cwd), env=env,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
|
||||
timeout=TIMEOUT_SEC, text=True)
|
||||
return cp.returncode, cp.stdout, cp.stderr
|
||||
|
||||
def have(cmd: str) -> bool:
|
||||
try:
|
||||
cp = subprocess.run(["where", cmd], stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, text=True)
|
||||
return cp.returncode == 0 and any(Path(p.strip()).exists() for p in cp.stdout.splitlines())
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def make_dir_clean(p: Path) -> None:
|
||||
if p.exists():
|
||||
shutil.rmtree(p, ignore_errors=True)
|
||||
p.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def write_file(p: Path, content: str = "x") -> None:
|
||||
p.parent.mkdir(parents=True, exist_ok=True)
|
||||
p.write_text(content, encoding="utf-8")
|
||||
|
||||
def remove_if_exists(p: Path) -> None:
|
||||
try:
|
||||
if p.is_dir(): shutil.rmtree(p, ignore_errors=True)
|
||||
elif p.exists(): p.unlink(missing_ok=True)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def assert_exists(p: Path) -> bool:
|
||||
return p.exists()
|
||||
|
||||
def assert_not_exists(p: Path) -> bool:
|
||||
return not p.exists()
|
||||
|
||||
def summarize(results: List[CaseResult]) -> int:
|
||||
ok = sum(1 for r in results if r.ok)
|
||||
total = len(results)
|
||||
print("\n" + "=" * 72)
|
||||
print(f"Sandbox smoke tests: {ok}/{total} passed")
|
||||
for r in results:
|
||||
print(f"[{'PASS' if r.ok else 'FAIL'}] {r.name}" + (f" :: {r.detail.strip()}" if r.detail and not r.ok else ""))
|
||||
print("=" * 72)
|
||||
return 0 if ok == total else 1
|
||||
|
||||
def main() -> int:
|
||||
results: List[CaseResult] = []
|
||||
make_dir_clean(WS_ROOT)
|
||||
OUTSIDE.mkdir(exist_ok=True)
|
||||
# Environment probe: some hosts allow TEMP writes even under read-only
|
||||
# tokens due to ACLs and restricted SID semantics. Detect and adapt tests.
|
||||
probe_rc, _, _ = run_sbx(
|
||||
"read-only",
|
||||
["cmd", "/c", "echo probe > %TEMP%\\sbx_ro_probe.txt"],
|
||||
WS_ROOT,
|
||||
)
|
||||
ro_temp_denied = probe_rc != 0
|
||||
|
||||
def add(name: str, ok: bool, detail: str = ""):
|
||||
print('running', name)
|
||||
results.append(CaseResult(name, ok, detail))
|
||||
|
||||
# 1. RO: deny write in CWD
|
||||
target = WS_ROOT / "ro_should_fail.txt"
|
||||
remove_if_exists(target)
|
||||
rc, out, err = run_sbx("read-only", ["cmd", "/c", "echo nope > ro_should_fail.txt"], WS_ROOT)
|
||||
add("RO: write in CWD denied", rc != 0 and assert_not_exists(target), f"rc={rc}, err={err}")
|
||||
|
||||
# 2. WS: allow write in CWD
|
||||
target = WS_ROOT / "ws_ok.txt"
|
||||
remove_if_exists(target)
|
||||
rc, out, err = run_sbx("workspace-write", ["cmd", "/c", "echo ok > ws_ok.txt"], WS_ROOT)
|
||||
add("WS: write in CWD allowed", rc == 0 and assert_exists(target), f"rc={rc}, err={err}")
|
||||
|
||||
# 3. WS: deny write outside workspace
|
||||
outside_file = OUTSIDE / "blocked.txt"
|
||||
remove_if_exists(outside_file)
|
||||
rc, out, err = run_sbx("workspace-write", ["cmd", "/c", f"echo nope > {outside_file}"], WS_ROOT)
|
||||
add("WS: write outside workspace denied", rc != 0 and assert_not_exists(outside_file), f"rc={rc}")
|
||||
|
||||
# 4. WS: allow TEMP write
|
||||
rc, out, err = run_sbx("workspace-write", ["cmd", "/c", "echo tempok > %TEMP%\\ws_temp_ok.txt"], WS_ROOT)
|
||||
add("WS: TEMP write allowed", rc == 0, f"rc={rc}")
|
||||
|
||||
# 5. RO: deny TEMP write
|
||||
rc, out, err = run_sbx("read-only", ["cmd", "/c", "echo tempno > %TEMP%\\ro_temp_fail.txt"], WS_ROOT)
|
||||
if ro_temp_denied:
|
||||
add("RO: TEMP write denied", rc != 0, f"rc={rc}")
|
||||
else:
|
||||
add("RO: TEMP write denied (skipped on this host)", True)
|
||||
|
||||
# 6. WS: append OK in CWD
|
||||
target = WS_ROOT / "append.txt"
|
||||
remove_if_exists(target); write_file(target, "line1\n")
|
||||
rc, out, err = run_sbx("workspace-write", ["cmd", "/c", "echo line2 >> append.txt"], WS_ROOT)
|
||||
add("WS: append allowed", rc == 0 and target.read_text().strip().endswith("line2"), f"rc={rc}")
|
||||
|
||||
# 7. RO: append denied
|
||||
target = WS_ROOT / "ro_append.txt"
|
||||
write_file(target, "line1\n")
|
||||
rc, out, err = run_sbx("read-only", ["cmd", "/c", "echo line2 >> ro_append.txt"], WS_ROOT)
|
||||
add("RO: append denied", rc != 0 and target.read_text() == "line1\n", f"rc={rc}")
|
||||
|
||||
# 8. WS: PowerShell Set-Content in CWD (OK)
|
||||
target = WS_ROOT / "ps_ok.txt"
|
||||
remove_if_exists(target)
|
||||
rc, out, err = run_sbx("workspace-write",
|
||||
["powershell", "-NoLogo", "-NoProfile", "-Command",
|
||||
"Set-Content -LiteralPath ps_ok.txt -Value 'hello' -Encoding ASCII"], WS_ROOT)
|
||||
add("WS: PowerShell Set-Content allowed", rc == 0 and assert_exists(target), f"rc={rc}, err={err}")
|
||||
|
||||
# 9. RO: PowerShell Set-Content denied
|
||||
target = WS_ROOT / "ps_ro_fail.txt"
|
||||
remove_if_exists(target)
|
||||
rc, out, err = run_sbx("read-only",
|
||||
["powershell", "-NoLogo", "-NoProfile", "-Command",
|
||||
"Set-Content -LiteralPath ps_ro_fail.txt -Value 'x'"], WS_ROOT)
|
||||
add("RO: PowerShell Set-Content denied", rc != 0 and assert_not_exists(target), f"rc={rc}")
|
||||
|
||||
# 10. WS: mkdir and write (OK)
|
||||
rc, out, err = run_sbx("workspace-write", ["cmd", "/c", "mkdir sub && echo hi > sub\\in_sub.txt"], WS_ROOT)
|
||||
add("WS: mkdir+write allowed", rc == 0 and (WS_ROOT / "sub/in_sub.txt").exists(), f"rc={rc}")
|
||||
|
||||
# 11. WS: rename (EXPECTED SUCCESS on this host)
|
||||
rc, out, err = run_sbx("workspace-write", ["cmd", "/c", "echo x > r.txt & ren r.txt r2.txt"], WS_ROOT)
|
||||
add("WS: rename succeeds (expected on this host)", rc == 0 and (WS_ROOT / "r2.txt").exists(), f"rc={rc}, err={err}")
|
||||
|
||||
# 12. WS: delete (EXPECTED SUCCESS on this host)
|
||||
target = WS_ROOT / "delme.txt"; write_file(target, "x")
|
||||
rc, out, err = run_sbx("workspace-write", ["cmd", "/c", "del /q delme.txt"], WS_ROOT)
|
||||
add("WS: delete succeeds (expected on this host)", rc == 0 and not target.exists(), f"rc={rc}, err={err}")
|
||||
|
||||
# 13. RO: python tries to write (denied)
|
||||
pyfile = WS_ROOT / "py_should_fail.txt"; remove_if_exists(pyfile)
|
||||
rc, out, err = run_sbx("read-only", ["python", "-c", "open('py_should_fail.txt','w').write('x')"], WS_ROOT)
|
||||
add("RO: python file write denied", rc != 0 and assert_not_exists(pyfile), f"rc={rc}")
|
||||
|
||||
# 14. WS: python writes file (OK)
|
||||
pyfile = WS_ROOT / "py_ok.txt"; remove_if_exists(pyfile)
|
||||
rc, out, err = run_sbx("workspace-write", ["python", "-c", "open('py_ok.txt','w').write('x')"], WS_ROOT)
|
||||
add("WS: python file write allowed", rc == 0 and assert_exists(pyfile), f"rc={rc}, err={err}")
|
||||
|
||||
# 15. WS: curl network blocked (short timeout)
|
||||
rc, out, err = run_sbx("workspace-write", ["curl", "--connect-timeout", "1", "--max-time", "2", "https://example.com"], WS_ROOT)
|
||||
add("WS: curl network blocked", rc != 0, f"rc={rc}")
|
||||
|
||||
# 16. WS: iwr network blocked (HTTP)
|
||||
rc, out, err = run_sbx("workspace-write", ["powershell", "-NoLogo", "-NoProfile", "-Command",
|
||||
"try { iwr http://neverssl.com -TimeoutSec 2 } catch { exit 1 }"], WS_ROOT)
|
||||
add("WS: iwr network blocked", rc != 0, f"rc={rc}")
|
||||
|
||||
# 17. RO: deny TEMP writes via PowerShell
|
||||
rc, out, err = run_sbx("read-only",
|
||||
["powershell", "-NoLogo", "-NoProfile", "-Command",
|
||||
"Set-Content -LiteralPath $env:TEMP\\ro_tmpfail.txt -Value 'x'"], WS_ROOT)
|
||||
if ro_temp_denied:
|
||||
add("RO: TEMP write denied (PS)", rc != 0, f"rc={rc}")
|
||||
else:
|
||||
add("RO: TEMP write denied (PS, skipped)", True)
|
||||
|
||||
# 18. WS: curl version check — don't rely on stub, just succeed
|
||||
if have("curl"):
|
||||
rc, out, err = run_sbx("workspace-write", ["cmd", "/c", "curl --version"], WS_ROOT)
|
||||
add("WS: curl present (version prints)", rc == 0, f"rc={rc}, err={err}")
|
||||
else:
|
||||
add("WS: curl present (optional, skipped)", True)
|
||||
|
||||
# 19. Optional: ripgrep version
|
||||
if have("rg"):
|
||||
rc, out, err = run_sbx("workspace-write", ["cmd", "/c", "rg --version"], WS_ROOT)
|
||||
add("WS: rg --version (optional)", rc == 0, f"rc={rc}, err={err}")
|
||||
else:
|
||||
add("WS: rg --version (optional, skipped)", True)
|
||||
|
||||
# 20. Optional: git --version
|
||||
if have("git"):
|
||||
rc, out, err = run_sbx("workspace-write", ["git", "--version"], WS_ROOT)
|
||||
add("WS: git --version (optional)", rc == 0, f"rc={rc}, err={err}")
|
||||
else:
|
||||
add("WS: git --version (optional, skipped)", True)
|
||||
|
||||
# 21–23. JSON policy: allow only .\allowed — note CWD is still allowed by current impl
|
||||
(WS_ROOT / "allowed").mkdir(exist_ok=True)
|
||||
(WS_ROOT / "blocked").mkdir(exist_ok=True)
|
||||
policy_json = '{"mode":"workspace-write","workspace_roots":[".\\\\allowed"]}'
|
||||
|
||||
# Allowed: inside .\allowed (OK)
|
||||
rc, out, err = run_sbx(policy_json, ["cmd", "/c", "echo ok > allowed\\in_allowed.txt"], WS_ROOT)
|
||||
add("JSON WS: write in allowed/ OK", rc == 0 and (WS_ROOT / "allowed/in_allowed.txt").exists(), f"rc={rc}")
|
||||
|
||||
# Outside CWD (deny)
|
||||
json_outside = OUTSIDE / "json_blocked.txt"; remove_if_exists(json_outside)
|
||||
rc, out, err = run_sbx(policy_json, ["cmd", "/c", f"echo nope > {json_outside}"], WS_ROOT)
|
||||
add("JSON WS: write outside allowed/ denied", rc != 0 and not json_outside.exists(), f"rc={rc}")
|
||||
|
||||
# CWD is still allowed by current sandbox (documented behavior)
|
||||
rc, out, err = run_sbx(policy_json, ["cmd", "/c", "echo ok > cwd_ok_under_json.txt"], WS_ROOT)
|
||||
add("JSON WS: write in CWD allowed (by design)", rc == 0 and (WS_ROOT / "cwd_ok_under_json.txt").exists(), f"rc={rc}")
|
||||
|
||||
# 24. WS: PS bytes write (OK)
|
||||
rc, out, err = run_sbx("workspace-write",
|
||||
["powershell", "-NoLogo", "-NoProfile", "-Command",
|
||||
"[IO.File]::WriteAllBytes('bytes_ok.bin',[byte[]](0..255))"], WS_ROOT)
|
||||
add("WS: PS bytes write allowed", rc == 0 and (WS_ROOT / "bytes_ok.bin").exists(), f"rc={rc}")
|
||||
|
||||
# 25. RO: PS bytes write denied
|
||||
rc, out, err = run_sbx("read-only",
|
||||
["powershell", "-NoLogo", "-NoProfile", "-Command",
|
||||
"[IO.File]::WriteAllBytes('bytes_fail.bin',[byte[]](0..10))"], WS_ROOT)
|
||||
add("RO: PS bytes write denied", rc != 0 and not (WS_ROOT / "bytes_fail.bin").exists(), f"rc={rc}")
|
||||
|
||||
# 26. WS: deep mkdir and write (OK)
|
||||
rc, out, err = run_sbx("workspace-write",
|
||||
["cmd", "/c", "mkdir deep\\nest && echo ok > deep\\nest\\f.txt"], WS_ROOT)
|
||||
add("WS: deep mkdir+write allowed", rc == 0 and (WS_ROOT / "deep/nest/f.txt").exists(), f"rc={rc}")
|
||||
|
||||
# 27. WS: move (EXPECTED SUCCESS on this host)
|
||||
rc, out, err = run_sbx("workspace-write",
|
||||
["cmd", "/c", "echo x > m1.txt & move /y m1.txt m2.txt"], WS_ROOT)
|
||||
add("WS: move succeeds (expected on this host)", rc == 0 and (WS_ROOT / "m2.txt").exists(), f"rc={rc}, err={err}")
|
||||
|
||||
# 28. RO: cmd redirection denied
|
||||
target = WS_ROOT / "cmd_ro.txt"; remove_if_exists(target)
|
||||
rc, out, err = run_sbx("read-only", ["cmd", "/c", "echo nope > cmd_ro.txt"], WS_ROOT)
|
||||
add("RO: cmd redirection denied", rc != 0 and not target.exists(), f"rc={rc}")
|
||||
|
||||
return summarize(results)
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
@@ -1,286 +0,0 @@
|
||||
use crate::winutil::to_wide;
|
||||
use anyhow::anyhow;
|
||||
use anyhow::Result;
|
||||
use std::ffi::c_void;
|
||||
use std::path::Path;
|
||||
use windows_sys::Win32::Foundation::CloseHandle;
|
||||
use windows_sys::Win32::Foundation::LocalFree;
|
||||
use windows_sys::Win32::Foundation::ERROR_SUCCESS;
|
||||
use windows_sys::Win32::Foundation::HLOCAL;
|
||||
use windows_sys::Win32::Foundation::INVALID_HANDLE_VALUE;
|
||||
use windows_sys::Win32::Security::AclSizeInformation;
|
||||
use windows_sys::Win32::Security::Authorization::GetNamedSecurityInfoW;
|
||||
use windows_sys::Win32::Security::Authorization::GetSecurityInfo;
|
||||
use windows_sys::Win32::Security::Authorization::SetEntriesInAclW;
|
||||
use windows_sys::Win32::Security::Authorization::SetNamedSecurityInfoW;
|
||||
use windows_sys::Win32::Security::Authorization::SetSecurityInfo;
|
||||
use windows_sys::Win32::Security::Authorization::EXPLICIT_ACCESS_W;
|
||||
use windows_sys::Win32::Security::Authorization::TRUSTEE_IS_SID;
|
||||
use windows_sys::Win32::Security::Authorization::TRUSTEE_IS_UNKNOWN;
|
||||
use windows_sys::Win32::Security::Authorization::TRUSTEE_W;
|
||||
use windows_sys::Win32::Security::EqualSid;
|
||||
use windows_sys::Win32::Security::GetAce;
|
||||
use windows_sys::Win32::Security::GetAclInformation;
|
||||
use windows_sys::Win32::Security::ACCESS_ALLOWED_ACE;
|
||||
use windows_sys::Win32::Security::ACE_HEADER;
|
||||
use windows_sys::Win32::Security::ACL;
|
||||
use windows_sys::Win32::Security::ACL_SIZE_INFORMATION;
|
||||
use windows_sys::Win32::Security::DACL_SECURITY_INFORMATION;
|
||||
use windows_sys::Win32::Storage::FileSystem::CreateFileW;
|
||||
use windows_sys::Win32::Storage::FileSystem::FILE_ATTRIBUTE_NORMAL;
|
||||
use windows_sys::Win32::Storage::FileSystem::FILE_GENERIC_EXECUTE;
|
||||
use windows_sys::Win32::Storage::FileSystem::FILE_GENERIC_READ;
|
||||
use windows_sys::Win32::Storage::FileSystem::FILE_GENERIC_WRITE;
|
||||
use windows_sys::Win32::Storage::FileSystem::FILE_SHARE_READ;
|
||||
use windows_sys::Win32::Storage::FileSystem::FILE_SHARE_WRITE;
|
||||
use windows_sys::Win32::Storage::FileSystem::OPEN_EXISTING;
|
||||
const SE_KERNEL_OBJECT: u32 = 6;
|
||||
const INHERIT_ONLY_ACE: u8 = 0x08;
|
||||
|
||||
pub unsafe fn dacl_has_write_allow_for_sid(p_dacl: *mut ACL, psid: *mut c_void) -> bool {
|
||||
if p_dacl.is_null() {
|
||||
return false;
|
||||
}
|
||||
let mut info: ACL_SIZE_INFORMATION = std::mem::zeroed();
|
||||
let ok = GetAclInformation(
|
||||
p_dacl as *const ACL,
|
||||
&mut info as *mut _ as *mut c_void,
|
||||
std::mem::size_of::<ACL_SIZE_INFORMATION>() as u32,
|
||||
AclSizeInformation,
|
||||
);
|
||||
if ok == 0 {
|
||||
return false;
|
||||
}
|
||||
let count = info.AceCount as usize;
|
||||
for i in 0..count {
|
||||
let mut p_ace: *mut c_void = std::ptr::null_mut();
|
||||
if GetAce(p_dacl as *const ACL, i as u32, &mut p_ace) == 0 {
|
||||
continue;
|
||||
}
|
||||
let hdr = &*(p_ace as *const ACE_HEADER);
|
||||
if hdr.AceType != 0 {
|
||||
continue; // ACCESS_ALLOWED_ACE_TYPE
|
||||
}
|
||||
// Ignore ACEs that are inherit-only (do not apply to the current object)
|
||||
if (hdr.AceFlags & INHERIT_ONLY_ACE) != 0 {
|
||||
continue;
|
||||
}
|
||||
let ace = &*(p_ace as *const ACCESS_ALLOWED_ACE);
|
||||
let mask = ace.Mask;
|
||||
let base = p_ace as usize;
|
||||
let sid_ptr =
|
||||
(base + std::mem::size_of::<ACE_HEADER>() + std::mem::size_of::<u32>()) as *mut c_void;
|
||||
let eq = EqualSid(sid_ptr, psid);
|
||||
if eq != 0 && (mask & FILE_GENERIC_WRITE) != 0 {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
// Compute effective rights for a trustee SID against a DACL and decide if write is effectively allowed.
|
||||
// This accounts for deny ACEs and ordering; falls back to a conservative per-ACE scan if the API fails.
|
||||
#[allow(dead_code)]
|
||||
pub unsafe fn dacl_effective_allows_write(p_dacl: *mut ACL, psid: *mut c_void) -> bool {
|
||||
if p_dacl.is_null() {
|
||||
return false;
|
||||
}
|
||||
use windows_sys::Win32::Security::Authorization::GetEffectiveRightsFromAclW;
|
||||
use windows_sys::Win32::Security::Authorization::TRUSTEE_IS_SID;
|
||||
use windows_sys::Win32::Security::Authorization::TRUSTEE_IS_UNKNOWN;
|
||||
use windows_sys::Win32::Security::Authorization::TRUSTEE_W;
|
||||
|
||||
let trustee = TRUSTEE_W {
|
||||
pMultipleTrustee: std::ptr::null_mut(),
|
||||
MultipleTrusteeOperation: 0,
|
||||
TrusteeForm: TRUSTEE_IS_SID,
|
||||
TrusteeType: TRUSTEE_IS_UNKNOWN,
|
||||
ptstrName: psid as *mut u16,
|
||||
};
|
||||
let mut access: u32 = 0;
|
||||
let ok = GetEffectiveRightsFromAclW(p_dacl, &trustee, &mut access);
|
||||
if ok != 0 {
|
||||
// Check for generic or specific write bits
|
||||
let write_bits = FILE_GENERIC_WRITE
|
||||
| windows_sys::Win32::Storage::FileSystem::FILE_WRITE_DATA
|
||||
| windows_sys::Win32::Storage::FileSystem::FILE_APPEND_DATA
|
||||
| windows_sys::Win32::Storage::FileSystem::FILE_WRITE_EA
|
||||
| windows_sys::Win32::Storage::FileSystem::FILE_WRITE_ATTRIBUTES;
|
||||
return (access & write_bits) != 0;
|
||||
}
|
||||
// Fallback: simple allow ACE scan (already ignores inherit-only)
|
||||
dacl_has_write_allow_for_sid(p_dacl, psid)
|
||||
}
|
||||
pub unsafe fn add_allow_ace(path: &Path, psid: *mut c_void) -> Result<bool> {
|
||||
let mut p_sd: *mut c_void = std::ptr::null_mut();
|
||||
let mut p_dacl: *mut ACL = std::ptr::null_mut();
|
||||
let code = GetNamedSecurityInfoW(
|
||||
to_wide(path).as_ptr(),
|
||||
1,
|
||||
DACL_SECURITY_INFORMATION,
|
||||
std::ptr::null_mut(),
|
||||
std::ptr::null_mut(),
|
||||
&mut p_dacl,
|
||||
std::ptr::null_mut(),
|
||||
&mut p_sd,
|
||||
);
|
||||
if code != ERROR_SUCCESS {
|
||||
return Err(anyhow!("GetNamedSecurityInfoW failed: {}", code));
|
||||
}
|
||||
let mut added = false;
|
||||
if !dacl_has_write_allow_for_sid(p_dacl, psid) {
|
||||
let trustee = TRUSTEE_W {
|
||||
pMultipleTrustee: std::ptr::null_mut(),
|
||||
MultipleTrusteeOperation: 0,
|
||||
TrusteeForm: TRUSTEE_IS_SID,
|
||||
TrusteeType: TRUSTEE_IS_UNKNOWN,
|
||||
ptstrName: psid as *mut u16,
|
||||
};
|
||||
let mut explicit: EXPLICIT_ACCESS_W = std::mem::zeroed();
|
||||
explicit.grfAccessPermissions =
|
||||
FILE_GENERIC_READ | FILE_GENERIC_WRITE | FILE_GENERIC_EXECUTE;
|
||||
explicit.grfAccessMode = 2; // SET_ACCESS
|
||||
explicit.grfInheritance = CONTAINER_INHERIT_ACE | OBJECT_INHERIT_ACE;
|
||||
explicit.Trustee = trustee;
|
||||
let mut p_new_dacl: *mut ACL = std::ptr::null_mut();
|
||||
let code2 = SetEntriesInAclW(1, &explicit, p_dacl, &mut p_new_dacl);
|
||||
if code2 == ERROR_SUCCESS {
|
||||
let code3 = SetNamedSecurityInfoW(
|
||||
to_wide(path).as_ptr() as *mut u16,
|
||||
1,
|
||||
DACL_SECURITY_INFORMATION,
|
||||
std::ptr::null_mut(),
|
||||
std::ptr::null_mut(),
|
||||
p_new_dacl,
|
||||
std::ptr::null_mut(),
|
||||
);
|
||||
if code3 == ERROR_SUCCESS {
|
||||
added = true;
|
||||
}
|
||||
if !p_new_dacl.is_null() {
|
||||
LocalFree(p_new_dacl as HLOCAL);
|
||||
}
|
||||
}
|
||||
}
|
||||
if !p_sd.is_null() {
|
||||
LocalFree(p_sd as HLOCAL);
|
||||
}
|
||||
Ok(added)
|
||||
}
|
||||
|
||||
pub unsafe fn revoke_ace(path: &Path, psid: *mut c_void) {
|
||||
let mut p_sd: *mut c_void = std::ptr::null_mut();
|
||||
let mut p_dacl: *mut ACL = std::ptr::null_mut();
|
||||
let code = GetNamedSecurityInfoW(
|
||||
to_wide(path).as_ptr(),
|
||||
1,
|
||||
DACL_SECURITY_INFORMATION,
|
||||
std::ptr::null_mut(),
|
||||
std::ptr::null_mut(),
|
||||
&mut p_dacl,
|
||||
std::ptr::null_mut(),
|
||||
&mut p_sd,
|
||||
);
|
||||
if code != ERROR_SUCCESS {
|
||||
if !p_sd.is_null() {
|
||||
LocalFree(p_sd as HLOCAL);
|
||||
}
|
||||
return;
|
||||
}
|
||||
let trustee = TRUSTEE_W {
|
||||
pMultipleTrustee: std::ptr::null_mut(),
|
||||
MultipleTrusteeOperation: 0,
|
||||
TrusteeForm: TRUSTEE_IS_SID,
|
||||
TrusteeType: TRUSTEE_IS_UNKNOWN,
|
||||
ptstrName: psid as *mut u16,
|
||||
};
|
||||
let mut explicit: EXPLICIT_ACCESS_W = std::mem::zeroed();
|
||||
explicit.grfAccessPermissions = 0;
|
||||
explicit.grfAccessMode = 4; // REVOKE_ACCESS
|
||||
explicit.grfInheritance = CONTAINER_INHERIT_ACE | OBJECT_INHERIT_ACE;
|
||||
explicit.Trustee = trustee;
|
||||
let mut p_new_dacl: *mut ACL = std::ptr::null_mut();
|
||||
let code2 = SetEntriesInAclW(1, &explicit, p_dacl, &mut p_new_dacl);
|
||||
if code2 == ERROR_SUCCESS {
|
||||
let _ = SetNamedSecurityInfoW(
|
||||
to_wide(path).as_ptr() as *mut u16,
|
||||
1,
|
||||
DACL_SECURITY_INFORMATION,
|
||||
std::ptr::null_mut(),
|
||||
std::ptr::null_mut(),
|
||||
p_new_dacl,
|
||||
std::ptr::null_mut(),
|
||||
);
|
||||
if !p_new_dacl.is_null() {
|
||||
LocalFree(p_new_dacl as HLOCAL);
|
||||
}
|
||||
}
|
||||
if !p_sd.is_null() {
|
||||
LocalFree(p_sd as HLOCAL);
|
||||
}
|
||||
}
|
||||
|
||||
pub unsafe fn allow_null_device(psid: *mut c_void) {
|
||||
let desired = 0x00020000 | 0x00040000; // READ_CONTROL | WRITE_DAC
|
||||
let h = CreateFileW(
|
||||
to_wide(r"\\\\.\\NUL").as_ptr(),
|
||||
desired,
|
||||
FILE_SHARE_READ | FILE_SHARE_WRITE,
|
||||
std::ptr::null_mut(),
|
||||
OPEN_EXISTING,
|
||||
FILE_ATTRIBUTE_NORMAL,
|
||||
0,
|
||||
);
|
||||
if h == 0 || h == INVALID_HANDLE_VALUE {
|
||||
return;
|
||||
}
|
||||
let mut p_sd: *mut c_void = std::ptr::null_mut();
|
||||
let mut p_dacl: *mut ACL = std::ptr::null_mut();
|
||||
let code = GetSecurityInfo(
|
||||
h,
|
||||
SE_KERNEL_OBJECT as i32,
|
||||
DACL_SECURITY_INFORMATION,
|
||||
std::ptr::null_mut(),
|
||||
std::ptr::null_mut(),
|
||||
&mut p_dacl,
|
||||
std::ptr::null_mut(),
|
||||
&mut p_sd,
|
||||
);
|
||||
if code == ERROR_SUCCESS {
|
||||
let trustee = TRUSTEE_W {
|
||||
pMultipleTrustee: std::ptr::null_mut(),
|
||||
MultipleTrusteeOperation: 0,
|
||||
TrusteeForm: TRUSTEE_IS_SID,
|
||||
TrusteeType: TRUSTEE_IS_UNKNOWN,
|
||||
ptstrName: psid as *mut u16,
|
||||
};
|
||||
let mut explicit: EXPLICIT_ACCESS_W = std::mem::zeroed();
|
||||
explicit.grfAccessPermissions =
|
||||
FILE_GENERIC_READ | FILE_GENERIC_WRITE | FILE_GENERIC_EXECUTE;
|
||||
explicit.grfAccessMode = 2; // SET_ACCESS
|
||||
explicit.grfInheritance = 0;
|
||||
explicit.Trustee = trustee;
|
||||
let mut p_new_dacl: *mut ACL = std::ptr::null_mut();
|
||||
let code2 = SetEntriesInAclW(1, &explicit, p_dacl, &mut p_new_dacl);
|
||||
if code2 == ERROR_SUCCESS {
|
||||
let _ = SetSecurityInfo(
|
||||
h,
|
||||
SE_KERNEL_OBJECT as i32,
|
||||
DACL_SECURITY_INFORMATION,
|
||||
std::ptr::null_mut(),
|
||||
std::ptr::null_mut(),
|
||||
p_new_dacl,
|
||||
std::ptr::null_mut(),
|
||||
);
|
||||
if !p_new_dacl.is_null() {
|
||||
LocalFree(p_new_dacl as HLOCAL);
|
||||
}
|
||||
}
|
||||
}
|
||||
if !p_sd.is_null() {
|
||||
LocalFree(p_sd as HLOCAL);
|
||||
}
|
||||
CloseHandle(h);
|
||||
}
|
||||
const CONTAINER_INHERIT_ACE: u32 = 0x2;
|
||||
const OBJECT_INHERIT_ACE: u32 = 0x1;
|
||||
@@ -1,37 +0,0 @@
|
||||
use crate::policy::SandboxMode;
|
||||
use crate::policy::SandboxPolicy;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub fn compute_allow_paths(
|
||||
policy: &SandboxPolicy,
|
||||
_policy_cwd: &Path,
|
||||
command_cwd: &Path,
|
||||
env_map: &HashMap<String, String>,
|
||||
) -> Vec<PathBuf> {
|
||||
let mut allow: Vec<PathBuf> = Vec::new();
|
||||
let mut seen = std::collections::HashSet::new();
|
||||
if matches!(policy.0, SandboxMode::WorkspaceWrite) {
|
||||
let abs = command_cwd.to_path_buf();
|
||||
if seen.insert(abs.to_string_lossy().to_string()) && abs.exists() {
|
||||
allow.push(abs);
|
||||
}
|
||||
}
|
||||
if !matches!(policy.0, SandboxMode::ReadOnly) {
|
||||
for key in ["TEMP", "TMP"] {
|
||||
if let Some(v) = env_map.get(key) {
|
||||
let abs = PathBuf::from(v);
|
||||
if seen.insert(abs.to_string_lossy().to_string()) && abs.exists() {
|
||||
allow.push(abs);
|
||||
}
|
||||
} else if let Ok(v) = std::env::var(key) {
|
||||
let abs = PathBuf::from(v);
|
||||
if seen.insert(abs.to_string_lossy().to_string()) && abs.exists() {
|
||||
allow.push(abs);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
allow
|
||||
}
|
||||
@@ -1,147 +0,0 @@
|
||||
use crate::acl::dacl_effective_allows_write;
|
||||
use crate::token::world_sid;
|
||||
use crate::winutil::to_wide;
|
||||
use anyhow::anyhow;
|
||||
use anyhow::Result;
|
||||
use std::collections::HashSet;
|
||||
use std::ffi::c_void;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::time::Duration;
|
||||
use std::time::Instant;
|
||||
use windows_sys::Win32::Foundation::LocalFree;
|
||||
use windows_sys::Win32::Foundation::ERROR_SUCCESS;
|
||||
use windows_sys::Win32::Foundation::HLOCAL;
|
||||
use windows_sys::Win32::Security::Authorization::GetNamedSecurityInfoW;
|
||||
use windows_sys::Win32::Security::ACL;
|
||||
use windows_sys::Win32::Security::DACL_SECURITY_INFORMATION;
|
||||
|
||||
fn unique_push(set: &mut HashSet<PathBuf>, out: &mut Vec<PathBuf>, p: PathBuf) {
|
||||
if let Ok(abs) = p.canonicalize() {
|
||||
if set.insert(abs.clone()) {
|
||||
out.push(abs);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn gather_candidates(cwd: &Path, env: &std::collections::HashMap<String, String>) -> Vec<PathBuf> {
|
||||
let mut set: HashSet<PathBuf> = HashSet::new();
|
||||
let mut out: Vec<PathBuf> = Vec::new();
|
||||
// Core roots
|
||||
for p in [
|
||||
PathBuf::from("C:/"),
|
||||
PathBuf::from("C:/Windows"),
|
||||
PathBuf::from("C:/ProgramData"),
|
||||
] {
|
||||
unique_push(&mut set, &mut out, p);
|
||||
}
|
||||
// User roots
|
||||
if let Some(up) = std::env::var_os("USERPROFILE") {
|
||||
unique_push(&mut set, &mut out, PathBuf::from(up));
|
||||
}
|
||||
if let Some(pubp) = std::env::var_os("PUBLIC") {
|
||||
unique_push(&mut set, &mut out, PathBuf::from(pubp));
|
||||
}
|
||||
// CWD
|
||||
unique_push(&mut set, &mut out, cwd.to_path_buf());
|
||||
// TEMP/TMP
|
||||
for k in ["TEMP", "TMP"] {
|
||||
if let Some(v) = env.get(k).cloned().or_else(|| std::env::var(k).ok()) {
|
||||
unique_push(&mut set, &mut out, PathBuf::from(v));
|
||||
}
|
||||
}
|
||||
// PATH entries
|
||||
if let Some(path) = env
|
||||
.get("PATH")
|
||||
.cloned()
|
||||
.or_else(|| std::env::var("PATH").ok())
|
||||
{
|
||||
for part in path.split(std::path::MAIN_SEPARATOR) {
|
||||
if !part.is_empty() {
|
||||
unique_push(&mut set, &mut out, PathBuf::from(part));
|
||||
}
|
||||
}
|
||||
}
|
||||
out
|
||||
}
|
||||
|
||||
unsafe fn path_has_world_write_allow(path: &Path) -> Result<bool> {
|
||||
let mut p_sd: *mut c_void = std::ptr::null_mut();
|
||||
let mut p_dacl: *mut ACL = std::ptr::null_mut();
|
||||
let code = GetNamedSecurityInfoW(
|
||||
to_wide(path).as_ptr(),
|
||||
1,
|
||||
DACL_SECURITY_INFORMATION,
|
||||
std::ptr::null_mut(),
|
||||
std::ptr::null_mut(),
|
||||
&mut p_dacl,
|
||||
std::ptr::null_mut(),
|
||||
&mut p_sd,
|
||||
);
|
||||
if code != ERROR_SUCCESS {
|
||||
if !p_sd.is_null() {
|
||||
LocalFree(p_sd as HLOCAL);
|
||||
}
|
||||
return Ok(false);
|
||||
}
|
||||
let mut world = world_sid()?;
|
||||
let psid_world = world.as_mut_ptr() as *mut c_void;
|
||||
let has = dacl_effective_allows_write(p_dacl, psid_world);
|
||||
if !p_sd.is_null() {
|
||||
LocalFree(p_sd as HLOCAL);
|
||||
}
|
||||
Ok(has)
|
||||
}
|
||||
|
||||
pub fn audit_everyone_writable(
|
||||
cwd: &Path,
|
||||
env: &std::collections::HashMap<String, String>,
|
||||
) -> Result<()> {
|
||||
let start = Instant::now();
|
||||
let mut flagged: Vec<PathBuf> = Vec::new();
|
||||
let mut checked = 0usize;
|
||||
let candidates = gather_candidates(cwd, env);
|
||||
for root in candidates {
|
||||
if start.elapsed() > Duration::from_secs(5) || checked > 5000 {
|
||||
break;
|
||||
}
|
||||
checked += 1;
|
||||
if unsafe { path_has_world_write_allow(&root)? } {
|
||||
flagged.push(root.clone());
|
||||
}
|
||||
// one level down best-effort
|
||||
if let Ok(read) = std::fs::read_dir(&root) {
|
||||
for ent in read.flatten().take(50) {
|
||||
let p = ent.path();
|
||||
if start.elapsed() > Duration::from_secs(5) || checked > 5000 {
|
||||
break;
|
||||
}
|
||||
// Skip reparse points (symlinks/junctions) to avoid auditing link ACLs
|
||||
let ft = match ent.file_type() {
|
||||
Ok(ft) => ft,
|
||||
Err(_) => continue,
|
||||
};
|
||||
if ft.is_symlink() {
|
||||
continue;
|
||||
}
|
||||
if ft.is_dir() {
|
||||
checked += 1;
|
||||
if unsafe { path_has_world_write_allow(&p)? } {
|
||||
flagged.push(p);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if !flagged.is_empty() {
|
||||
let mut list = String::new();
|
||||
for p in flagged {
|
||||
list.push_str(&format!("\n - {}", p.display()));
|
||||
}
|
||||
return Err(anyhow!(
|
||||
"Refusing to run: found directories writable by Everyone: {}",
|
||||
list
|
||||
));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,50 +0,0 @@
|
||||
use rand::rngs::SmallRng;
|
||||
use rand::RngCore;
|
||||
use rand::SeedableRng;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
pub struct CapSids {
|
||||
pub workspace: String,
|
||||
pub readonly: String,
|
||||
}
|
||||
|
||||
pub fn cap_sid_file(policy_cwd: &Path) -> PathBuf {
|
||||
policy_cwd.join(".codex").join("cap_sid")
|
||||
}
|
||||
|
||||
fn make_random_cap_sid_string() -> String {
|
||||
let mut rng = SmallRng::from_entropy();
|
||||
let a = rng.next_u32();
|
||||
let b = rng.next_u32();
|
||||
let c = rng.next_u32();
|
||||
let d = rng.next_u32();
|
||||
format!("S-1-5-21-{}-{}-{}-{}", a, b, c, d)
|
||||
}
|
||||
|
||||
pub fn load_or_create_cap_sids(policy_cwd: &Path) -> CapSids {
|
||||
let path = cap_sid_file(policy_cwd);
|
||||
if path.exists() {
|
||||
if let Ok(txt) = fs::read_to_string(&path) {
|
||||
let t = txt.trim();
|
||||
if t.starts_with('{') && t.ends_with('}') {
|
||||
if let Ok(obj) = serde_json::from_str::<CapSids>(t) {
|
||||
return obj;
|
||||
}
|
||||
} else if !t.is_empty() {
|
||||
return CapSids {
|
||||
workspace: t.to_string(),
|
||||
readonly: make_random_cap_sid_string(),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
CapSids {
|
||||
workspace: make_random_cap_sid_string(),
|
||||
readonly: make_random_cap_sid_string(),
|
||||
}
|
||||
}
|
||||
@@ -1,165 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
use std::fs::File;
|
||||
use std::fs::{self};
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub fn normalize_null_device_env(env_map: &mut HashMap<String, String>) {
|
||||
let keys: Vec<String> = env_map.keys().cloned().collect();
|
||||
for k in keys {
|
||||
if let Some(v) = env_map.get(&k).cloned() {
|
||||
let t = v.trim().to_ascii_lowercase();
|
||||
if t == "/dev/null" || t == "\\\\\\\\dev\\\\\\\\null" {
|
||||
env_map.insert(k, "NUL".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ensure_non_interactive_pager(env_map: &mut HashMap<String, String>) {
|
||||
env_map
|
||||
.entry("GIT_PAGER".into())
|
||||
.or_insert_with(|| "more.com".into());
|
||||
env_map
|
||||
.entry("PAGER".into())
|
||||
.or_insert_with(|| "more.com".into());
|
||||
env_map.entry("LESS".into()).or_insert_with(|| "".into());
|
||||
}
|
||||
|
||||
fn prepend_path(env_map: &mut HashMap<String, String>, prefix: &str) {
|
||||
let existing = env_map
|
||||
.get("PATH")
|
||||
.cloned()
|
||||
.or_else(|| env::var("PATH").ok())
|
||||
.unwrap_or_default();
|
||||
let parts: Vec<String> = existing.split(';').map(|s| s.to_string()).collect();
|
||||
if parts
|
||||
.first()
|
||||
.map(|p| p.eq_ignore_ascii_case(prefix))
|
||||
.unwrap_or(false)
|
||||
{
|
||||
return;
|
||||
}
|
||||
let mut new_path = String::new();
|
||||
new_path.push_str(prefix);
|
||||
if !existing.is_empty() {
|
||||
new_path.push(';');
|
||||
new_path.push_str(&existing);
|
||||
}
|
||||
env_map.insert("PATH".into(), new_path);
|
||||
}
|
||||
|
||||
fn reorder_pathext_for_stubs(env_map: &mut HashMap<String, String>) {
|
||||
let default = env_map
|
||||
.get("PATHEXT")
|
||||
.cloned()
|
||||
.or_else(|| env::var("PATHEXT").ok())
|
||||
.unwrap_or(".COM;.EXE;.BAT;.CMD".to_string());
|
||||
let exts: Vec<String> = default
|
||||
.split(';')
|
||||
.filter(|e| !e.is_empty())
|
||||
.map(|s| s.to_string())
|
||||
.collect();
|
||||
let exts_norm: Vec<String> = exts.iter().map(|e| e.to_ascii_uppercase()).collect();
|
||||
let want = [".BAT", ".CMD"]; // move to front if present
|
||||
let mut front: Vec<String> = Vec::new();
|
||||
for w in want {
|
||||
if let Some(idx) = exts_norm.iter().position(|e| e == w) {
|
||||
front.push(exts[idx].clone());
|
||||
}
|
||||
}
|
||||
let rest: Vec<String> = exts
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.filter(|(i, _)| {
|
||||
let up = &exts_norm[*i];
|
||||
up != ".BAT" && up != ".CMD"
|
||||
})
|
||||
.map(|(_, e)| e)
|
||||
.collect();
|
||||
let mut combined = Vec::new();
|
||||
combined.extend(front);
|
||||
combined.extend(rest);
|
||||
env_map.insert("PATHEXT".into(), combined.join(";"));
|
||||
}
|
||||
|
||||
fn ensure_denybin(tools: &[&str], denybin_dir: Option<&Path>) -> Result<PathBuf> {
|
||||
let base = match denybin_dir {
|
||||
Some(p) => p.to_path_buf(),
|
||||
None => {
|
||||
let home = dirs_next::home_dir().ok_or_else(|| anyhow::anyhow!("no home dir"))?;
|
||||
home.join(".sbx-denybin")
|
||||
}
|
||||
};
|
||||
fs::create_dir_all(&base)?;
|
||||
for tool in tools {
|
||||
for ext in [".bat", ".cmd"] {
|
||||
let path = base.join(format!("{}{}", tool, ext));
|
||||
if !path.exists() {
|
||||
let mut f = File::create(&path)?;
|
||||
f.write_all(b"@echo off\\r\\nexit /b 1\\r\\n")?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(base)
|
||||
}
|
||||
|
||||
pub fn apply_no_network_to_env(env_map: &mut HashMap<String, String>) -> Result<()> {
|
||||
env_map.insert("SBX_NONET_ACTIVE".into(), "1".into());
|
||||
env_map
|
||||
.entry("HTTP_PROXY".into())
|
||||
.or_insert_with(|| "http://127.0.0.1:9".into());
|
||||
env_map
|
||||
.entry("HTTPS_PROXY".into())
|
||||
.or_insert_with(|| "http://127.0.0.1:9".into());
|
||||
env_map
|
||||
.entry("ALL_PROXY".into())
|
||||
.or_insert_with(|| "http://127.0.0.1:9".into());
|
||||
env_map
|
||||
.entry("NO_PROXY".into())
|
||||
.or_insert_with(|| "localhost,127.0.0.1,::1".into());
|
||||
env_map
|
||||
.entry("PIP_NO_INDEX".into())
|
||||
.or_insert_with(|| "1".into());
|
||||
env_map
|
||||
.entry("PIP_DISABLE_PIP_VERSION_CHECK".into())
|
||||
.or_insert_with(|| "1".into());
|
||||
env_map
|
||||
.entry("NPM_CONFIG_OFFLINE".into())
|
||||
.or_insert_with(|| "true".into());
|
||||
env_map
|
||||
.entry("CARGO_NET_OFFLINE".into())
|
||||
.or_insert_with(|| "true".into());
|
||||
env_map
|
||||
.entry("GIT_HTTP_PROXY".into())
|
||||
.or_insert_with(|| "http://127.0.0.1:9".into());
|
||||
env_map
|
||||
.entry("GIT_HTTPS_PROXY".into())
|
||||
.or_insert_with(|| "http://127.0.0.1:9".into());
|
||||
env_map
|
||||
.entry("GIT_SSH_COMMAND".into())
|
||||
.or_insert_with(|| "cmd /c exit 1".into());
|
||||
env_map
|
||||
.entry("GIT_ALLOW_PROTOCOLS".into())
|
||||
.or_insert_with(|| "".into());
|
||||
|
||||
// Block interactive network tools that bypass HTTP(S) proxy settings, but
|
||||
// allow curl/wget to run so commands like `curl --version` still succeed.
|
||||
// Network access is disabled via proxy envs above.
|
||||
let base = ensure_denybin(&["ssh", "scp"], None)?;
|
||||
// Clean up any stale stubs from previous runs so real curl/wget can run.
|
||||
for tool in ["curl", "wget"] {
|
||||
for ext in [".bat", ".cmd"] {
|
||||
let p = base.join(format!("{}{}", tool, ext));
|
||||
if p.exists() {
|
||||
let _ = std::fs::remove_file(&p);
|
||||
}
|
||||
}
|
||||
}
|
||||
prepend_path(env_map, &base.to_string_lossy());
|
||||
reorder_pathext_for_stubs(env_map);
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,452 +0,0 @@
|
||||
macro_rules! windows_modules {
|
||||
($($name:ident),+ $(,)?) => {
|
||||
$(#[cfg(target_os = "windows")] mod $name;)+
|
||||
};
|
||||
}
|
||||
|
||||
windows_modules!(acl, allow, audit, cap, env, logging, policy, token, winutil);
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
pub use windows_impl::preflight_audit_everyone_writable;
|
||||
#[cfg(target_os = "windows")]
|
||||
pub use windows_impl::run_windows_sandbox_capture;
|
||||
#[cfg(target_os = "windows")]
|
||||
pub use windows_impl::CaptureResult;
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
pub use stub::preflight_audit_everyone_writable;
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
pub use stub::run_windows_sandbox_capture;
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
pub use stub::CaptureResult;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
mod windows_impl {
|
||||
use super::acl::add_allow_ace;
|
||||
use super::acl::allow_null_device;
|
||||
use super::acl::revoke_ace;
|
||||
use super::allow::compute_allow_paths;
|
||||
use super::audit;
|
||||
use super::cap::cap_sid_file;
|
||||
use super::cap::load_or_create_cap_sids;
|
||||
use super::env::apply_no_network_to_env;
|
||||
use super::env::ensure_non_interactive_pager;
|
||||
use super::env::normalize_null_device_env;
|
||||
use super::logging::debug_log;
|
||||
use super::logging::log_failure;
|
||||
use super::logging::log_start;
|
||||
use super::logging::log_success;
|
||||
use super::policy::SandboxMode;
|
||||
use super::policy::SandboxPolicy;
|
||||
use super::token::convert_string_sid_to_sid;
|
||||
use super::winutil::format_last_error;
|
||||
use super::winutil::to_wide;
|
||||
use anyhow::Result;
|
||||
use std::collections::HashMap;
|
||||
use std::ffi::c_void;
|
||||
use std::fs;
|
||||
use std::io;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::ptr;
|
||||
use windows_sys::Win32::Foundation::CloseHandle;
|
||||
use windows_sys::Win32::Foundation::GetLastError;
|
||||
use windows_sys::Win32::Foundation::SetHandleInformation;
|
||||
use windows_sys::Win32::Foundation::HANDLE;
|
||||
use windows_sys::Win32::Foundation::HANDLE_FLAG_INHERIT;
|
||||
use windows_sys::Win32::System::Pipes::CreatePipe;
|
||||
use windows_sys::Win32::System::Threading::CreateProcessAsUserW;
|
||||
use windows_sys::Win32::System::Threading::GetExitCodeProcess;
|
||||
use windows_sys::Win32::System::Threading::WaitForSingleObject;
|
||||
use windows_sys::Win32::System::Threading::CREATE_UNICODE_ENVIRONMENT;
|
||||
use windows_sys::Win32::System::Threading::INFINITE;
|
||||
use windows_sys::Win32::System::Threading::PROCESS_INFORMATION;
|
||||
use windows_sys::Win32::System::Threading::STARTF_USESTDHANDLES;
|
||||
use windows_sys::Win32::System::Threading::STARTUPINFOW;
|
||||
|
||||
type PipeHandles = ((HANDLE, HANDLE), (HANDLE, HANDLE), (HANDLE, HANDLE));
|
||||
|
||||
fn ensure_dir(p: &Path) -> Result<()> {
|
||||
if let Some(d) = p.parent() {
|
||||
std::fs::create_dir_all(d)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn make_env_block(env: &HashMap<String, String>) -> Vec<u16> {
|
||||
let mut items: Vec<(String, String)> =
|
||||
env.iter().map(|(k, v)| (k.clone(), v.clone())).collect();
|
||||
items.sort_by(|a, b| {
|
||||
a.0.to_uppercase()
|
||||
.cmp(&b.0.to_uppercase())
|
||||
.then(a.0.cmp(&b.0))
|
||||
});
|
||||
let mut w: Vec<u16> = Vec::new();
|
||||
for (k, v) in items {
|
||||
let mut s = to_wide(format!("{}={}", k, v));
|
||||
s.pop();
|
||||
w.extend_from_slice(&s);
|
||||
w.push(0);
|
||||
}
|
||||
w.push(0);
|
||||
w
|
||||
}
|
||||
|
||||
// Quote a single Windows command-line argument following the rules used by
|
||||
// CommandLineToArgvW/CRT so that spaces, quotes, and backslashes are preserved.
|
||||
// Reference behavior matches Rust std::process::Command on Windows.
|
||||
fn quote_windows_arg(arg: &str) -> String {
|
||||
let needs_quotes = arg.is_empty()
|
||||
|| arg
|
||||
.chars()
|
||||
.any(|c| matches!(c, ' ' | '\t' | '\n' | '\r' | '"'));
|
||||
if !needs_quotes {
|
||||
return arg.to_string();
|
||||
}
|
||||
|
||||
let mut quoted = String::with_capacity(arg.len() + 2);
|
||||
quoted.push('"');
|
||||
let mut backslashes = 0;
|
||||
for ch in arg.chars() {
|
||||
match ch {
|
||||
'\\' => {
|
||||
backslashes += 1;
|
||||
}
|
||||
'"' => {
|
||||
quoted.push_str(&"\\".repeat(backslashes * 2 + 1));
|
||||
quoted.push('"');
|
||||
backslashes = 0;
|
||||
}
|
||||
_ => {
|
||||
if backslashes > 0 {
|
||||
quoted.push_str(&"\\".repeat(backslashes));
|
||||
backslashes = 0;
|
||||
}
|
||||
quoted.push(ch);
|
||||
}
|
||||
}
|
||||
}
|
||||
if backslashes > 0 {
|
||||
quoted.push_str(&"\\".repeat(backslashes * 2));
|
||||
}
|
||||
quoted.push('"');
|
||||
quoted
|
||||
}
|
||||
|
||||
unsafe fn setup_stdio_pipes() -> io::Result<PipeHandles> {
|
||||
let mut in_r: HANDLE = 0;
|
||||
let mut in_w: HANDLE = 0;
|
||||
let mut out_r: HANDLE = 0;
|
||||
let mut out_w: HANDLE = 0;
|
||||
let mut err_r: HANDLE = 0;
|
||||
let mut err_w: HANDLE = 0;
|
||||
if CreatePipe(&mut in_r, &mut in_w, ptr::null_mut(), 0) == 0 {
|
||||
return Err(io::Error::from_raw_os_error(GetLastError() as i32));
|
||||
}
|
||||
if CreatePipe(&mut out_r, &mut out_w, ptr::null_mut(), 0) == 0 {
|
||||
return Err(io::Error::from_raw_os_error(GetLastError() as i32));
|
||||
}
|
||||
if CreatePipe(&mut err_r, &mut err_w, ptr::null_mut(), 0) == 0 {
|
||||
return Err(io::Error::from_raw_os_error(GetLastError() as i32));
|
||||
}
|
||||
if SetHandleInformation(in_r, HANDLE_FLAG_INHERIT, HANDLE_FLAG_INHERIT) == 0 {
|
||||
return Err(io::Error::from_raw_os_error(GetLastError() as i32));
|
||||
}
|
||||
if SetHandleInformation(out_w, HANDLE_FLAG_INHERIT, HANDLE_FLAG_INHERIT) == 0 {
|
||||
return Err(io::Error::from_raw_os_error(GetLastError() as i32));
|
||||
}
|
||||
if SetHandleInformation(err_w, HANDLE_FLAG_INHERIT, HANDLE_FLAG_INHERIT) == 0 {
|
||||
return Err(io::Error::from_raw_os_error(GetLastError() as i32));
|
||||
}
|
||||
Ok(((in_r, in_w), (out_r, out_w), (err_r, err_w)))
|
||||
}
|
||||
|
||||
pub struct CaptureResult {
|
||||
pub exit_code: i32,
|
||||
pub stdout: Vec<u8>,
|
||||
pub stderr: Vec<u8>,
|
||||
pub timed_out: bool,
|
||||
}
|
||||
|
||||
pub fn preflight_audit_everyone_writable(
|
||||
cwd: &Path,
|
||||
env_map: &HashMap<String, String>,
|
||||
) -> Result<()> {
|
||||
audit::audit_everyone_writable(cwd, env_map)
|
||||
}
|
||||
|
||||
pub fn run_windows_sandbox_capture(
|
||||
policy_json_or_preset: &str,
|
||||
sandbox_policy_cwd: &Path,
|
||||
command: Vec<String>,
|
||||
cwd: &Path,
|
||||
mut env_map: HashMap<String, String>,
|
||||
timeout_ms: Option<u64>,
|
||||
) -> Result<CaptureResult> {
|
||||
let policy = SandboxPolicy::parse(policy_json_or_preset)?;
|
||||
normalize_null_device_env(&mut env_map);
|
||||
ensure_non_interactive_pager(&mut env_map);
|
||||
apply_no_network_to_env(&mut env_map)?;
|
||||
|
||||
let current_dir = cwd.to_path_buf();
|
||||
// for now, don't fail if we detect world-writable directories
|
||||
// audit::audit_everyone_writable(¤t_dir, &env_map)?;
|
||||
log_start(&command);
|
||||
let (h_token, psid_to_use): (HANDLE, *mut c_void) = unsafe {
|
||||
match &policy.0 {
|
||||
SandboxMode::ReadOnly => {
|
||||
let caps = load_or_create_cap_sids(sandbox_policy_cwd);
|
||||
ensure_dir(&cap_sid_file(sandbox_policy_cwd))?;
|
||||
fs::write(
|
||||
cap_sid_file(sandbox_policy_cwd),
|
||||
serde_json::to_string(&caps)?,
|
||||
)?;
|
||||
let psid = convert_string_sid_to_sid(&caps.readonly).unwrap();
|
||||
super::token::create_readonly_token_with_cap(psid)?
|
||||
}
|
||||
SandboxMode::WorkspaceWrite => {
|
||||
let caps = load_or_create_cap_sids(sandbox_policy_cwd);
|
||||
ensure_dir(&cap_sid_file(sandbox_policy_cwd))?;
|
||||
fs::write(
|
||||
cap_sid_file(sandbox_policy_cwd),
|
||||
serde_json::to_string(&caps)?,
|
||||
)?;
|
||||
let psid = convert_string_sid_to_sid(&caps.workspace).unwrap();
|
||||
super::token::create_workspace_write_token_with_cap(psid)?
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
unsafe {
|
||||
if matches!(policy.0, SandboxMode::WorkspaceWrite) {
|
||||
if let Ok(base) = super::token::get_current_token_for_restriction() {
|
||||
if let Ok(bytes) = super::token::get_logon_sid_bytes(base) {
|
||||
let mut tmp = bytes.clone();
|
||||
let psid2 = tmp.as_mut_ptr() as *mut c_void;
|
||||
allow_null_device(psid2);
|
||||
}
|
||||
windows_sys::Win32::Foundation::CloseHandle(base);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let persist_aces = matches!(policy.0, SandboxMode::WorkspaceWrite);
|
||||
let allow = compute_allow_paths(&policy, sandbox_policy_cwd, ¤t_dir, &env_map);
|
||||
let mut guards: Vec<(PathBuf, *mut c_void)> = Vec::new();
|
||||
unsafe {
|
||||
for p in &allow {
|
||||
if let Ok(added) = add_allow_ace(p, psid_to_use) {
|
||||
if added {
|
||||
if persist_aces {
|
||||
if p.is_dir() {
|
||||
// best-effort seeding omitted intentionally
|
||||
}
|
||||
} else {
|
||||
guards.push((p.clone(), psid_to_use));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
allow_null_device(psid_to_use);
|
||||
}
|
||||
|
||||
let (stdin_pair, stdout_pair, stderr_pair) = unsafe { setup_stdio_pipes()? };
|
||||
let ((in_r, in_w), (out_r, out_w), (err_r, err_w)) = (stdin_pair, stdout_pair, stderr_pair);
|
||||
let mut si: STARTUPINFOW = unsafe { std::mem::zeroed() };
|
||||
si.cb = std::mem::size_of::<STARTUPINFOW>() as u32;
|
||||
si.dwFlags |= STARTF_USESTDHANDLES;
|
||||
si.hStdInput = in_r;
|
||||
si.hStdOutput = out_w;
|
||||
si.hStdError = err_w;
|
||||
|
||||
let mut pi: PROCESS_INFORMATION = unsafe { std::mem::zeroed() };
|
||||
let cmdline_str = command
|
||||
.iter()
|
||||
.map(|a| quote_windows_arg(a))
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ");
|
||||
let mut cmdline: Vec<u16> = to_wide(&cmdline_str);
|
||||
let env_block = make_env_block(&env_map);
|
||||
let desktop = to_wide("Winsta0\\Default");
|
||||
si.lpDesktop = desktop.as_ptr() as *mut u16;
|
||||
let spawn_res = unsafe {
|
||||
CreateProcessAsUserW(
|
||||
h_token,
|
||||
ptr::null(),
|
||||
cmdline.as_mut_ptr(),
|
||||
ptr::null_mut(),
|
||||
ptr::null_mut(),
|
||||
1,
|
||||
CREATE_UNICODE_ENVIRONMENT,
|
||||
env_block.as_ptr() as *mut c_void,
|
||||
to_wide(cwd).as_ptr(),
|
||||
&si,
|
||||
&mut pi,
|
||||
)
|
||||
};
|
||||
if spawn_res == 0 {
|
||||
let err = unsafe { GetLastError() } as i32;
|
||||
let dbg = format!(
|
||||
"CreateProcessAsUserW failed: {} ({}) | cwd={} | cmd={} | env_u16_len={} | si_flags={}",
|
||||
err,
|
||||
format_last_error(err),
|
||||
cwd.display(),
|
||||
cmdline_str,
|
||||
env_block.len(),
|
||||
si.dwFlags,
|
||||
);
|
||||
debug_log(&dbg);
|
||||
unsafe {
|
||||
CloseHandle(in_r);
|
||||
CloseHandle(in_w);
|
||||
CloseHandle(out_r);
|
||||
CloseHandle(out_w);
|
||||
CloseHandle(err_r);
|
||||
CloseHandle(err_w);
|
||||
CloseHandle(h_token);
|
||||
}
|
||||
return Err(anyhow::anyhow!("CreateProcessAsUserW failed: {}", err));
|
||||
}
|
||||
|
||||
unsafe {
|
||||
CloseHandle(in_r);
|
||||
// Close the parent's stdin write end so the child sees EOF immediately.
|
||||
CloseHandle(in_w);
|
||||
CloseHandle(out_w);
|
||||
CloseHandle(err_w);
|
||||
}
|
||||
|
||||
let (tx_out, rx_out) = std::sync::mpsc::channel::<Vec<u8>>();
|
||||
let (tx_err, rx_err) = std::sync::mpsc::channel::<Vec<u8>>();
|
||||
let t_out = std::thread::spawn(move || {
|
||||
let mut buf = Vec::new();
|
||||
let mut tmp = [0u8; 8192];
|
||||
loop {
|
||||
let mut read_bytes: u32 = 0;
|
||||
let ok = unsafe {
|
||||
windows_sys::Win32::Storage::FileSystem::ReadFile(
|
||||
out_r,
|
||||
tmp.as_mut_ptr(),
|
||||
tmp.len() as u32,
|
||||
&mut read_bytes,
|
||||
std::ptr::null_mut(),
|
||||
)
|
||||
};
|
||||
if ok == 0 || read_bytes == 0 {
|
||||
break;
|
||||
}
|
||||
buf.extend_from_slice(&tmp[..read_bytes as usize]);
|
||||
}
|
||||
let _ = tx_out.send(buf);
|
||||
});
|
||||
let t_err = std::thread::spawn(move || {
|
||||
let mut buf = Vec::new();
|
||||
let mut tmp = [0u8; 8192];
|
||||
loop {
|
||||
let mut read_bytes: u32 = 0;
|
||||
let ok = unsafe {
|
||||
windows_sys::Win32::Storage::FileSystem::ReadFile(
|
||||
err_r,
|
||||
tmp.as_mut_ptr(),
|
||||
tmp.len() as u32,
|
||||
&mut read_bytes,
|
||||
std::ptr::null_mut(),
|
||||
)
|
||||
};
|
||||
if ok == 0 || read_bytes == 0 {
|
||||
break;
|
||||
}
|
||||
buf.extend_from_slice(&tmp[..read_bytes as usize]);
|
||||
}
|
||||
let _ = tx_err.send(buf);
|
||||
});
|
||||
|
||||
let timeout = timeout_ms.map(|ms| ms as u32).unwrap_or(INFINITE);
|
||||
let res = unsafe { WaitForSingleObject(pi.hProcess, timeout) };
|
||||
let timed_out = res == 0x0000_0102;
|
||||
let mut exit_code_u32: u32 = 1;
|
||||
if !timed_out {
|
||||
unsafe {
|
||||
GetExitCodeProcess(pi.hProcess, &mut exit_code_u32);
|
||||
}
|
||||
} else {
|
||||
unsafe {
|
||||
windows_sys::Win32::System::Threading::TerminateProcess(pi.hProcess, 1);
|
||||
}
|
||||
}
|
||||
|
||||
unsafe {
|
||||
if pi.hThread != 0 {
|
||||
CloseHandle(pi.hThread);
|
||||
}
|
||||
if pi.hProcess != 0 {
|
||||
CloseHandle(pi.hProcess);
|
||||
}
|
||||
CloseHandle(h_token);
|
||||
}
|
||||
let _ = t_out.join();
|
||||
let _ = t_err.join();
|
||||
let stdout = rx_out.recv().unwrap_or_default();
|
||||
let stderr = rx_err.recv().unwrap_or_default();
|
||||
let exit_code = if timed_out {
|
||||
128 + 64
|
||||
} else {
|
||||
exit_code_u32 as i32
|
||||
};
|
||||
|
||||
if exit_code == 0 {
|
||||
log_success(&command);
|
||||
} else {
|
||||
log_failure(&command, &format!("exit code {}", exit_code));
|
||||
}
|
||||
|
||||
if !persist_aces {
|
||||
unsafe {
|
||||
for (p, sid) in guards {
|
||||
revoke_ace(&p, sid);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(CaptureResult {
|
||||
exit_code,
|
||||
stdout,
|
||||
stderr,
|
||||
timed_out,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
mod stub {
|
||||
use anyhow::bail;
|
||||
use anyhow::Result;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct CaptureResult {
|
||||
pub exit_code: i32,
|
||||
pub stdout: Vec<u8>,
|
||||
pub stderr: Vec<u8>,
|
||||
pub timed_out: bool,
|
||||
}
|
||||
|
||||
pub fn preflight_audit_everyone_writable(
|
||||
_cwd: &Path,
|
||||
_env_map: &HashMap<String, String>,
|
||||
) -> Result<()> {
|
||||
bail!("Windows sandbox is only available on Windows")
|
||||
}
|
||||
|
||||
pub fn run_windows_sandbox_capture(
|
||||
_policy_json_or_preset: &str,
|
||||
_sandbox_policy_cwd: &Path,
|
||||
_command: Vec<String>,
|
||||
_cwd: &Path,
|
||||
_env_map: HashMap<String, String>,
|
||||
_timeout_ms: Option<u64>,
|
||||
) -> Result<CaptureResult> {
|
||||
bail!("Windows sandbox is only available on Windows")
|
||||
}
|
||||
}
|
||||
@@ -1,47 +0,0 @@
|
||||
use std::fs::OpenOptions;
|
||||
use std::io::Write;
|
||||
|
||||
const LOG_COMMAND_PREVIEW_LIMIT: usize = 200;
|
||||
pub const LOG_FILE_NAME: &str = "sandbox_commands.rust.log";
|
||||
|
||||
fn preview(command: &[String]) -> String {
|
||||
let joined = command.join(" ");
|
||||
if joined.len() <= LOG_COMMAND_PREVIEW_LIMIT {
|
||||
joined
|
||||
} else {
|
||||
joined[..LOG_COMMAND_PREVIEW_LIMIT].to_string()
|
||||
}
|
||||
}
|
||||
|
||||
fn append_line(line: &str) {
|
||||
if let Ok(mut f) = OpenOptions::new()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.open(LOG_FILE_NAME)
|
||||
{
|
||||
let _ = writeln!(f, "{}", line);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn log_start(command: &[String]) {
|
||||
let p = preview(command);
|
||||
append_line(&format!("START: {}", p));
|
||||
}
|
||||
|
||||
pub fn log_success(command: &[String]) {
|
||||
let p = preview(command);
|
||||
append_line(&format!("SUCCESS: {}", p));
|
||||
}
|
||||
|
||||
pub fn log_failure(command: &[String], detail: &str) {
|
||||
let p = preview(command);
|
||||
append_line(&format!("FAILURE: {} ({})", p, detail));
|
||||
}
|
||||
|
||||
// Debug logging helper. Emits only when SBX_DEBUG=1 to avoid noisy logs.
|
||||
pub fn debug_log(msg: &str) {
|
||||
if std::env::var("SBX_DEBUG").ok().as_deref() == Some("1") {
|
||||
append_line(&format!("DEBUG: {}", msg));
|
||||
eprintln!("{}", msg);
|
||||
}
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct PolicyJson {
|
||||
pub mode: String,
|
||||
#[serde(default)]
|
||||
pub workspace_roots: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum SandboxMode {
|
||||
ReadOnly,
|
||||
WorkspaceWrite,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct SandboxPolicy(pub SandboxMode);
|
||||
|
||||
impl SandboxPolicy {
|
||||
pub fn parse(value: &str) -> Result<Self> {
|
||||
match value {
|
||||
"read-only" => Ok(SandboxPolicy(SandboxMode::ReadOnly)),
|
||||
"workspace-write" => Ok(SandboxPolicy(SandboxMode::WorkspaceWrite)),
|
||||
other => {
|
||||
let pj: PolicyJson = serde_json::from_str(other)?;
|
||||
Ok(match pj.mode.as_str() {
|
||||
"read-only" => SandboxPolicy(SandboxMode::ReadOnly),
|
||||
"workspace-write" => SandboxPolicy(SandboxMode::WorkspaceWrite),
|
||||
_ => SandboxPolicy(SandboxMode::ReadOnly),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,193 +0,0 @@
|
||||
use crate::logging;
|
||||
use crate::winutil::format_last_error;
|
||||
use crate::winutil::to_wide;
|
||||
use anyhow::anyhow;
|
||||
use anyhow::Result;
|
||||
use std::collections::HashMap;
|
||||
use std::ffi::c_void;
|
||||
use std::path::Path;
|
||||
use windows_sys::Win32::Foundation::GetLastError;
|
||||
use windows_sys::Win32::Foundation::SetHandleInformation;
|
||||
use windows_sys::Win32::Foundation::HANDLE;
|
||||
use windows_sys::Win32::Foundation::HANDLE_FLAG_INHERIT;
|
||||
use windows_sys::Win32::Foundation::INVALID_HANDLE_VALUE;
|
||||
use windows_sys::Win32::System::Console::GetStdHandle;
|
||||
use windows_sys::Win32::System::Console::STD_ERROR_HANDLE;
|
||||
use windows_sys::Win32::System::Console::STD_INPUT_HANDLE;
|
||||
use windows_sys::Win32::System::Console::STD_OUTPUT_HANDLE;
|
||||
use windows_sys::Win32::System::JobObjects::AssignProcessToJobObject;
|
||||
use windows_sys::Win32::System::JobObjects::CreateJobObjectW;
|
||||
use windows_sys::Win32::System::JobObjects::JobObjectExtendedLimitInformation;
|
||||
use windows_sys::Win32::System::JobObjects::SetInformationJobObject;
|
||||
use windows_sys::Win32::System::JobObjects::JOBOBJECT_EXTENDED_LIMIT_INFORMATION;
|
||||
use windows_sys::Win32::System::JobObjects::JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE;
|
||||
use windows_sys::Win32::System::Threading::CreateProcessAsUserW;
|
||||
use windows_sys::Win32::System::Threading::GetExitCodeProcess;
|
||||
use windows_sys::Win32::System::Threading::WaitForSingleObject;
|
||||
use windows_sys::Win32::System::Threading::CREATE_UNICODE_ENVIRONMENT;
|
||||
use windows_sys::Win32::System::Threading::INFINITE;
|
||||
use windows_sys::Win32::System::Threading::PROCESS_INFORMATION;
|
||||
use windows_sys::Win32::System::Threading::STARTF_USESTDHANDLES;
|
||||
use windows_sys::Win32::System::Threading::STARTUPINFOW;
|
||||
|
||||
pub fn make_env_block(env: &HashMap<String, String>) -> Vec<u16> {
|
||||
let mut items: Vec<(String, String)> =
|
||||
env.iter().map(|(k, v)| (k.clone(), v.clone())).collect();
|
||||
items.sort_by(|a, b| {
|
||||
a.0.to_uppercase()
|
||||
.cmp(&b.0.to_uppercase())
|
||||
.then(a.0.cmp(&b.0))
|
||||
});
|
||||
let mut w: Vec<u16> = Vec::new();
|
||||
for (k, v) in items {
|
||||
let mut s = to_wide(format!("{}={}", k, v));
|
||||
s.pop();
|
||||
w.extend_from_slice(&s);
|
||||
w.push(0);
|
||||
}
|
||||
w.push(0);
|
||||
w
|
||||
}
|
||||
|
||||
fn quote_arg(a: &str) -> String {
|
||||
let needs_quote = a.is_empty() || a.chars().any(|ch| ch.is_whitespace() || ch == '"');
|
||||
if !needs_quote {
|
||||
return a.to_string();
|
||||
}
|
||||
let mut out = String::from("\"");
|
||||
let mut bs: usize = 0;
|
||||
for ch in a.chars() {
|
||||
if (ch as u32) == 92 {
|
||||
bs += 1;
|
||||
continue;
|
||||
}
|
||||
if ch == '"' {
|
||||
out.push_str(&"\\".repeat(bs * 2 + 1));
|
||||
out.push('"');
|
||||
bs = 0;
|
||||
continue;
|
||||
}
|
||||
if bs > 0 {
|
||||
out.push_str(&"\\".repeat(bs * 2));
|
||||
bs = 0;
|
||||
}
|
||||
out.push(ch);
|
||||
}
|
||||
if bs > 0 {
|
||||
out.push_str(&"\\".repeat(bs * 2));
|
||||
}
|
||||
out.push('"');
|
||||
out
|
||||
}
|
||||
unsafe fn ensure_inheritable_stdio(si: &mut STARTUPINFOW) -> Result<()> {
|
||||
for kind in [STD_INPUT_HANDLE, STD_OUTPUT_HANDLE, STD_ERROR_HANDLE] {
|
||||
let h = GetStdHandle(kind);
|
||||
if h == 0 || h == INVALID_HANDLE_VALUE {
|
||||
return Err(anyhow!("GetStdHandle failed: {}", GetLastError()));
|
||||
}
|
||||
if SetHandleInformation(h, HANDLE_FLAG_INHERIT, HANDLE_FLAG_INHERIT) == 0 {
|
||||
return Err(anyhow!("SetHandleInformation failed: {}", GetLastError()));
|
||||
}
|
||||
}
|
||||
si.dwFlags |= STARTF_USESTDHANDLES;
|
||||
si.hStdInput = GetStdHandle(STD_INPUT_HANDLE);
|
||||
si.hStdOutput = GetStdHandle(STD_OUTPUT_HANDLE);
|
||||
si.hStdError = GetStdHandle(STD_ERROR_HANDLE);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub unsafe fn create_process_as_user(
|
||||
h_token: HANDLE,
|
||||
argv: &[String],
|
||||
cwd: &Path,
|
||||
env_map: &HashMap<String, String>,
|
||||
) -> Result<(PROCESS_INFORMATION, STARTUPINFOW)> {
|
||||
let cmdline_str = argv
|
||||
.iter()
|
||||
.map(|a| quote_arg(a))
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ");
|
||||
let mut cmdline: Vec<u16> = to_wide(&cmdline_str);
|
||||
let env_block = make_env_block(env_map);
|
||||
let mut si: STARTUPINFOW = std::mem::zeroed();
|
||||
si.cb = std::mem::size_of::<STARTUPINFOW>() as u32;
|
||||
// Some processes (e.g., PowerShell) can fail with STATUS_DLL_INIT_FAILED
|
||||
// if lpDesktop is not set when launching with a restricted token.
|
||||
// Point explicitly at the interactive desktop.
|
||||
let desktop = to_wide("Winsta0\\Default");
|
||||
si.lpDesktop = desktop.as_ptr() as *mut u16;
|
||||
ensure_inheritable_stdio(&mut si)?;
|
||||
let mut pi: PROCESS_INFORMATION = std::mem::zeroed();
|
||||
let ok = CreateProcessAsUserW(
|
||||
h_token,
|
||||
std::ptr::null(),
|
||||
cmdline.as_mut_ptr(),
|
||||
std::ptr::null_mut(),
|
||||
std::ptr::null_mut(),
|
||||
1,
|
||||
CREATE_UNICODE_ENVIRONMENT,
|
||||
env_block.as_ptr() as *mut c_void,
|
||||
to_wide(cwd).as_ptr(),
|
||||
&si,
|
||||
&mut pi,
|
||||
);
|
||||
if ok == 0 {
|
||||
let err = GetLastError() as i32;
|
||||
let msg = format!(
|
||||
"CreateProcessAsUserW failed: {} ({}) | cwd={} | cmd={} | env_u16_len={} | si_flags={}",
|
||||
err,
|
||||
format_last_error(err),
|
||||
cwd.display(),
|
||||
cmdline_str,
|
||||
env_block.len(),
|
||||
si.dwFlags,
|
||||
);
|
||||
logging::debug_log(&msg);
|
||||
return Err(anyhow!("CreateProcessAsUserW failed: {}", err));
|
||||
}
|
||||
Ok((pi, si))
|
||||
}
|
||||
|
||||
pub unsafe fn wait_process_and_exitcode(pi: &PROCESS_INFORMATION) -> Result<i32> {
|
||||
let res = WaitForSingleObject(pi.hProcess, INFINITE);
|
||||
if res != 0 {
|
||||
return Err(anyhow!("WaitForSingleObject failed: {}", GetLastError()));
|
||||
}
|
||||
let mut code: u32 = 0;
|
||||
if GetExitCodeProcess(pi.hProcess, &mut code) == 0 {
|
||||
return Err(anyhow!("GetExitCodeProcess failed: {}", GetLastError()));
|
||||
}
|
||||
Ok(code as i32)
|
||||
}
|
||||
|
||||
pub unsafe fn create_job_kill_on_close() -> Result<HANDLE> {
|
||||
let h = CreateJobObjectW(std::ptr::null_mut(), std::ptr::null());
|
||||
if h == 0 {
|
||||
return Err(anyhow!("CreateJobObjectW failed: {}", GetLastError()));
|
||||
}
|
||||
let mut limits: JOBOBJECT_EXTENDED_LIMIT_INFORMATION = std::mem::zeroed();
|
||||
limits.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE;
|
||||
let ok = SetInformationJobObject(
|
||||
h,
|
||||
JobObjectExtendedLimitInformation,
|
||||
&mut limits as *mut _ as *mut c_void,
|
||||
std::mem::size_of::<JOBOBJECT_EXTENDED_LIMIT_INFORMATION>() as u32,
|
||||
);
|
||||
if ok == 0 {
|
||||
return Err(anyhow!(
|
||||
"SetInformationJobObject failed: {}",
|
||||
GetLastError()
|
||||
));
|
||||
}
|
||||
Ok(h)
|
||||
}
|
||||
|
||||
pub unsafe fn assign_to_job(h_job: HANDLE, h_process: HANDLE) -> Result<()> {
|
||||
if AssignProcessToJobObject(h_job, h_process) == 0 {
|
||||
return Err(anyhow!(
|
||||
"AssignProcessToJobObject failed: {}",
|
||||
GetLastError()
|
||||
));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,272 +0,0 @@
|
||||
use crate::winutil::to_wide;
|
||||
use anyhow::anyhow;
|
||||
use anyhow::Result;
|
||||
use std::ffi::c_void;
|
||||
use windows_sys::Win32::Foundation::CloseHandle;
|
||||
use windows_sys::Win32::Foundation::GetLastError;
|
||||
use windows_sys::Win32::Foundation::HANDLE;
|
||||
use windows_sys::Win32::Foundation::LUID;
|
||||
use windows_sys::Win32::Security::AdjustTokenPrivileges;
|
||||
use windows_sys::Win32::Security::CopySid;
|
||||
use windows_sys::Win32::Security::CreateRestrictedToken;
|
||||
use windows_sys::Win32::Security::CreateWellKnownSid;
|
||||
use windows_sys::Win32::Security::GetLengthSid;
|
||||
use windows_sys::Win32::Security::GetTokenInformation;
|
||||
use windows_sys::Win32::Security::LookupPrivilegeValueW;
|
||||
|
||||
use windows_sys::Win32::Security::TokenGroups;
|
||||
use windows_sys::Win32::Security::SID_AND_ATTRIBUTES;
|
||||
use windows_sys::Win32::Security::TOKEN_ADJUST_DEFAULT;
|
||||
use windows_sys::Win32::Security::TOKEN_ADJUST_PRIVILEGES;
|
||||
use windows_sys::Win32::Security::TOKEN_ADJUST_SESSIONID;
|
||||
use windows_sys::Win32::Security::TOKEN_ASSIGN_PRIMARY;
|
||||
use windows_sys::Win32::Security::TOKEN_DUPLICATE;
|
||||
use windows_sys::Win32::Security::TOKEN_PRIVILEGES;
|
||||
use windows_sys::Win32::Security::TOKEN_QUERY;
|
||||
use windows_sys::Win32::System::Threading::GetCurrentProcess;
|
||||
|
||||
const DISABLE_MAX_PRIVILEGE: u32 = 0x01;
|
||||
const LUA_TOKEN: u32 = 0x04;
|
||||
const WRITE_RESTRICTED: u32 = 0x08;
|
||||
const WIN_WORLD_SID: i32 = 1;
|
||||
const SE_GROUP_LOGON_ID: u32 = 0xC0000000;
|
||||
|
||||
pub unsafe fn world_sid() -> Result<Vec<u8>> {
|
||||
let mut size: u32 = 0;
|
||||
CreateWellKnownSid(
|
||||
WIN_WORLD_SID,
|
||||
std::ptr::null_mut(),
|
||||
std::ptr::null_mut(),
|
||||
&mut size,
|
||||
);
|
||||
let mut buf: Vec<u8> = vec![0u8; size as usize];
|
||||
let ok = CreateWellKnownSid(
|
||||
WIN_WORLD_SID,
|
||||
std::ptr::null_mut(),
|
||||
buf.as_mut_ptr() as *mut c_void,
|
||||
&mut size,
|
||||
);
|
||||
if ok == 0 {
|
||||
return Err(anyhow!("CreateWellKnownSid failed: {}", GetLastError()));
|
||||
}
|
||||
Ok(buf)
|
||||
}
|
||||
|
||||
pub unsafe fn convert_string_sid_to_sid(s: &str) -> Option<*mut c_void> {
|
||||
#[link(name = "advapi32")]
|
||||
extern "system" {
|
||||
fn ConvertStringSidToSidW(StringSid: *const u16, Sid: *mut *mut c_void) -> i32;
|
||||
}
|
||||
let mut psid: *mut c_void = std::ptr::null_mut();
|
||||
let ok = unsafe { ConvertStringSidToSidW(to_wide(s).as_ptr(), &mut psid) };
|
||||
if ok != 0 {
|
||||
Some(psid)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub unsafe fn get_current_token_for_restriction() -> Result<HANDLE> {
|
||||
let desired = TOKEN_DUPLICATE
|
||||
| TOKEN_QUERY
|
||||
| TOKEN_ASSIGN_PRIMARY
|
||||
| TOKEN_ADJUST_DEFAULT
|
||||
| TOKEN_ADJUST_SESSIONID
|
||||
| TOKEN_ADJUST_PRIVILEGES;
|
||||
let mut h: HANDLE = 0;
|
||||
#[link(name = "advapi32")]
|
||||
extern "system" {
|
||||
fn OpenProcessToken(
|
||||
ProcessHandle: HANDLE,
|
||||
DesiredAccess: u32,
|
||||
TokenHandle: *mut HANDLE,
|
||||
) -> i32;
|
||||
}
|
||||
let ok = unsafe { OpenProcessToken(GetCurrentProcess(), desired, &mut h) };
|
||||
if ok == 0 {
|
||||
return Err(anyhow!("OpenProcessToken failed: {}", GetLastError()));
|
||||
}
|
||||
Ok(h)
|
||||
}
|
||||
|
||||
pub unsafe fn get_logon_sid_bytes(h_token: HANDLE) -> Result<Vec<u8>> {
|
||||
unsafe fn scan_token_groups_for_logon(h: HANDLE) -> Option<Vec<u8>> {
|
||||
let mut needed: u32 = 0;
|
||||
GetTokenInformation(h, TokenGroups, std::ptr::null_mut(), 0, &mut needed);
|
||||
if needed == 0 {
|
||||
return None;
|
||||
}
|
||||
let mut buf: Vec<u8> = vec![0u8; needed as usize];
|
||||
let ok = GetTokenInformation(
|
||||
h,
|
||||
TokenGroups,
|
||||
buf.as_mut_ptr() as *mut c_void,
|
||||
needed,
|
||||
&mut needed,
|
||||
);
|
||||
if ok == 0 || (needed as usize) < std::mem::size_of::<u32>() {
|
||||
return None;
|
||||
}
|
||||
let group_count = std::ptr::read_unaligned(buf.as_ptr() as *const u32) as usize;
|
||||
// TOKEN_GROUPS layout is: DWORD GroupCount; SID_AND_ATTRIBUTES Groups[];
|
||||
// On 64-bit, Groups is aligned to pointer alignment after 4-byte GroupCount.
|
||||
let after_count = unsafe { buf.as_ptr().add(std::mem::size_of::<u32>()) } as usize;
|
||||
let align = std::mem::align_of::<SID_AND_ATTRIBUTES>();
|
||||
let aligned = (after_count + (align - 1)) & !(align - 1);
|
||||
let groups_ptr = aligned as *const SID_AND_ATTRIBUTES;
|
||||
for i in 0..group_count {
|
||||
let entry: SID_AND_ATTRIBUTES = std::ptr::read_unaligned(groups_ptr.add(i));
|
||||
if (entry.Attributes & SE_GROUP_LOGON_ID) == SE_GROUP_LOGON_ID {
|
||||
let sid = entry.Sid;
|
||||
let sid_len = GetLengthSid(sid);
|
||||
if sid_len == 0 {
|
||||
return None;
|
||||
}
|
||||
let mut out = vec![0u8; sid_len as usize];
|
||||
if CopySid(sid_len, out.as_mut_ptr() as *mut c_void, sid) == 0 {
|
||||
return None;
|
||||
}
|
||||
return Some(out);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
if let Some(v) = scan_token_groups_for_logon(h_token) {
|
||||
return Ok(v);
|
||||
}
|
||||
|
||||
#[repr(C)]
|
||||
struct TOKEN_LINKED_TOKEN {
|
||||
linked_token: HANDLE,
|
||||
}
|
||||
const TOKEN_LINKED_TOKEN_CLASS: i32 = 19; // TokenLinkedToken
|
||||
let mut ln_needed: u32 = 0;
|
||||
GetTokenInformation(
|
||||
h_token,
|
||||
TOKEN_LINKED_TOKEN_CLASS,
|
||||
std::ptr::null_mut(),
|
||||
0,
|
||||
&mut ln_needed,
|
||||
);
|
||||
if ln_needed >= std::mem::size_of::<TOKEN_LINKED_TOKEN>() as u32 {
|
||||
let mut ln_buf: Vec<u8> = vec![0u8; ln_needed as usize];
|
||||
let ok = GetTokenInformation(
|
||||
h_token,
|
||||
TOKEN_LINKED_TOKEN_CLASS,
|
||||
ln_buf.as_mut_ptr() as *mut c_void,
|
||||
ln_needed,
|
||||
&mut ln_needed,
|
||||
);
|
||||
if ok != 0 {
|
||||
let lt: TOKEN_LINKED_TOKEN =
|
||||
std::ptr::read_unaligned(ln_buf.as_ptr() as *const TOKEN_LINKED_TOKEN);
|
||||
if lt.linked_token != 0 {
|
||||
let res = scan_token_groups_for_logon(lt.linked_token);
|
||||
CloseHandle(lt.linked_token);
|
||||
if let Some(v) = res {
|
||||
return Ok(v);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err(anyhow!("Logon SID not present on token"))
|
||||
}
|
||||
unsafe fn enable_single_privilege(h_token: HANDLE, name: &str) -> Result<()> {
|
||||
let mut luid = LUID {
|
||||
LowPart: 0,
|
||||
HighPart: 0,
|
||||
};
|
||||
let ok = LookupPrivilegeValueW(std::ptr::null(), to_wide(name).as_ptr(), &mut luid);
|
||||
if ok == 0 {
|
||||
return Err(anyhow!("LookupPrivilegeValueW failed: {}", GetLastError()));
|
||||
}
|
||||
let mut tp: TOKEN_PRIVILEGES = std::mem::zeroed();
|
||||
tp.PrivilegeCount = 1;
|
||||
tp.Privileges[0].Luid = luid;
|
||||
tp.Privileges[0].Attributes = 0x00000002; // SE_PRIVILEGE_ENABLED
|
||||
let ok2 = AdjustTokenPrivileges(h_token, 0, &tp, 0, std::ptr::null_mut(), std::ptr::null_mut());
|
||||
if ok2 == 0 {
|
||||
return Err(anyhow!("AdjustTokenPrivileges failed: {}", GetLastError()));
|
||||
}
|
||||
let err = GetLastError();
|
||||
if err != 0 {
|
||||
return Err(anyhow!("AdjustTokenPrivileges error {}", err));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// removed unused create_write_restricted_token_strict
|
||||
|
||||
pub unsafe fn create_workspace_write_token_with_cap(
|
||||
psid_capability: *mut c_void,
|
||||
) -> Result<(HANDLE, *mut c_void)> {
|
||||
let base = get_current_token_for_restriction()?;
|
||||
let mut logon_sid_bytes = get_logon_sid_bytes(base)?;
|
||||
let psid_logon = logon_sid_bytes.as_mut_ptr() as *mut c_void;
|
||||
let mut everyone = world_sid()?;
|
||||
let psid_everyone = everyone.as_mut_ptr() as *mut c_void;
|
||||
let mut entries: [SID_AND_ATTRIBUTES; 3] = std::mem::zeroed();
|
||||
// Exact set and order: Capability, Logon, Everyone
|
||||
entries[0].Sid = psid_capability;
|
||||
entries[0].Attributes = 0;
|
||||
entries[1].Sid = psid_logon;
|
||||
entries[1].Attributes = 0;
|
||||
entries[2].Sid = psid_everyone;
|
||||
entries[2].Attributes = 0;
|
||||
let mut new_token: HANDLE = 0;
|
||||
let flags = DISABLE_MAX_PRIVILEGE | LUA_TOKEN | WRITE_RESTRICTED;
|
||||
let ok = CreateRestrictedToken(
|
||||
base,
|
||||
flags,
|
||||
0,
|
||||
std::ptr::null(),
|
||||
0,
|
||||
std::ptr::null(),
|
||||
3,
|
||||
entries.as_mut_ptr(),
|
||||
&mut new_token,
|
||||
);
|
||||
if ok == 0 {
|
||||
return Err(anyhow!("CreateRestrictedToken failed: {}", GetLastError()));
|
||||
}
|
||||
enable_single_privilege(new_token, "SeChangeNotifyPrivilege")?;
|
||||
Ok((new_token, psid_capability))
|
||||
}
|
||||
|
||||
pub unsafe fn create_readonly_token_with_cap(
|
||||
psid_capability: *mut c_void,
|
||||
) -> Result<(HANDLE, *mut c_void)> {
|
||||
let base = get_current_token_for_restriction()?;
|
||||
let mut logon_sid_bytes = get_logon_sid_bytes(base)?;
|
||||
let psid_logon = logon_sid_bytes.as_mut_ptr() as *mut c_void;
|
||||
let mut everyone = world_sid()?;
|
||||
let psid_everyone = everyone.as_mut_ptr() as *mut c_void;
|
||||
let mut entries: [SID_AND_ATTRIBUTES; 3] = std::mem::zeroed();
|
||||
// Exact set and order: Capability, Logon, Everyone
|
||||
entries[0].Sid = psid_capability;
|
||||
entries[0].Attributes = 0;
|
||||
entries[1].Sid = psid_logon;
|
||||
entries[1].Attributes = 0;
|
||||
entries[2].Sid = psid_everyone;
|
||||
entries[2].Attributes = 0;
|
||||
let mut new_token: HANDLE = 0;
|
||||
let flags = DISABLE_MAX_PRIVILEGE | LUA_TOKEN | WRITE_RESTRICTED;
|
||||
let ok = CreateRestrictedToken(
|
||||
base,
|
||||
flags,
|
||||
0,
|
||||
std::ptr::null(),
|
||||
0,
|
||||
std::ptr::null(),
|
||||
3,
|
||||
entries.as_mut_ptr(),
|
||||
&mut new_token,
|
||||
);
|
||||
if ok == 0 {
|
||||
return Err(anyhow!("CreateRestrictedToken failed: {}", GetLastError()));
|
||||
}
|
||||
enable_single_privilege(new_token, "SeChangeNotifyPrivilege")?;
|
||||
Ok((new_token, psid_capability))
|
||||
}
|
||||
@@ -1,43 +0,0 @@
|
||||
use std::ffi::OsStr;
|
||||
use std::os::windows::ffi::OsStrExt;
|
||||
use windows_sys::Win32::Foundation::LocalFree;
|
||||
use windows_sys::Win32::Foundation::HLOCAL;
|
||||
use windows_sys::Win32::System::Diagnostics::Debug::FormatMessageW;
|
||||
use windows_sys::Win32::System::Diagnostics::Debug::FORMAT_MESSAGE_ALLOCATE_BUFFER;
|
||||
use windows_sys::Win32::System::Diagnostics::Debug::FORMAT_MESSAGE_FROM_SYSTEM;
|
||||
use windows_sys::Win32::System::Diagnostics::Debug::FORMAT_MESSAGE_IGNORE_INSERTS;
|
||||
|
||||
pub fn to_wide<S: AsRef<OsStr>>(s: S) -> Vec<u16> {
|
||||
let mut v: Vec<u16> = s.as_ref().encode_wide().collect();
|
||||
v.push(0);
|
||||
v
|
||||
}
|
||||
|
||||
// Produce a readable description for a Win32 error code.
|
||||
pub fn format_last_error(err: i32) -> String {
|
||||
unsafe {
|
||||
let mut buf_ptr: *mut u16 = std::ptr::null_mut();
|
||||
let flags = FORMAT_MESSAGE_ALLOCATE_BUFFER
|
||||
| FORMAT_MESSAGE_FROM_SYSTEM
|
||||
| FORMAT_MESSAGE_IGNORE_INSERTS;
|
||||
let len = FormatMessageW(
|
||||
flags,
|
||||
std::ptr::null(),
|
||||
err as u32,
|
||||
0,
|
||||
// FORMAT_MESSAGE_ALLOCATE_BUFFER expects a pointer to receive the allocated buffer.
|
||||
// Cast &mut *mut u16 to *mut u16 as required by windows-sys.
|
||||
(&mut buf_ptr as *mut *mut u16) as *mut u16,
|
||||
0,
|
||||
std::ptr::null_mut(),
|
||||
);
|
||||
if len == 0 || buf_ptr.is_null() {
|
||||
return format!("Win32 error {}", err);
|
||||
}
|
||||
let slice = std::slice::from_raw_parts(buf_ptr, len as usize);
|
||||
let mut s = String::from_utf16_lossy(slice);
|
||||
s = s.trim().to_string();
|
||||
let _ = LocalFree(buf_ptr as HLOCAL);
|
||||
s
|
||||
}
|
||||
}
|
||||
@@ -69,12 +69,6 @@ The mechanism Codex uses to enforce the sandbox policy depends on your OS:
|
||||
|
||||
- **macOS 12+** uses **Apple Seatbelt**. Codex invokes `sandbox-exec` with a profile that corresponds to the selected `--sandbox` mode, constraining filesystem and network access at the OS level.
|
||||
- **Linux** combines **Landlock** and **seccomp** APIs to approximate the same guarantees. Kernel support is required; older kernels may not expose the necessary features.
|
||||
- **Windows (experimental)**:
|
||||
- Launches commands inside a restricted token derived from an AppContainer profile.
|
||||
- Grants only specifically requested filesystem capabilities by attaching capability SIDs to that profile.
|
||||
- Disables outbound network access by overriding proxy-related environment variables and inserting stub executables for common network tools.
|
||||
|
||||
Windows sandbox support remains highly experimental. It cannot prevent file writes, deletions, or creations in any directory where the Everyone SID already has write permissions (for example, world-writable folders).
|
||||
|
||||
In containerized Linux environments (for example Docker), sandboxing may not work when the host or container configuration does not expose Landlock/seccomp. In those cases, configure the container to provide the isolation you need and run Codex with `--sandbox danger-full-access` (or the shorthand `--dangerously-bypass-approvals-and-sandbox`) inside that container.
|
||||
|
||||
|
||||
Reference in New Issue
Block a user