Compare commits

..

12 Commits

Author SHA1 Message Date
Channing Conger
e213999759 merge fix 2026-01-05 12:41:13 -08:00
Channing Conger
0124a95c83 Move compression into the request builder 2026-01-05 12:41:13 -08:00
Channing Conger
7361c1fea2 await on default_config_for_test 2026-01-05 12:41:13 -08:00
Channing Conger
f5adf8291a Request compression is no longer tied to the provider 2026-01-05 12:41:12 -08:00
Channing Conger
47366e8417 Revert to boolean for the feature since they only operate on booleans 2026-01-05 12:41:12 -08:00
Channing Conger
eef24681c8 Update to use Feature, with log line about time and compression ratio 2026-01-05 12:41:12 -08:00
Channing Conger
c069660811 Fix test 2026-01-05 12:41:12 -08:00
Channing Conger
55acaaffac Request compression.
Add new model_provider flag for compression to enable request
compression.  We support zstd and gzip, server also supports brotli

You can test this against the sign in with chatgpt flow by adding the
following profile:

```
[profiles.compressed]
name = "compressed"
model_provider = "openai-zstd"

[model_providers.openai-zstd]
name = "OpenAI (ChatGPT, zstd)"
wire_api = "responses"
request_compression = "zstd"
requires_openai_auth = true
```

This will zstd compress your request before sending it to the server.
2026-01-05 12:41:08 -08:00
iceweasel-oai
07f077dfb3 best effort to "hide" Sandbox users (#8492)
The elevated sandbox creates two new Windows users - CodexSandboxOffline
and CodexSandboxOnline. This is necessary, so this PR does all that it
can to "hide" those users. It uses the registry plus directory flags (on
their home directories) to get them to show up as little as possible.
2026-01-05 12:29:10 -08:00
Abrar Ahmed
7cf6f1c723 Use issuer URL in device auth prompt link (#7858)
## Summary

When using device-code login with a custom issuer
(`--experimental_issuer`), Codex correctly uses that issuer for the auth
flow — but the **terminal prompt still told users to open the default
OpenAI device URL** (`https://auth.openai.com/codex/device`). That’s
confusing and can send users to the **wrong domain** (especially for
enterprise/staging issuers). This PR updates the prompt (and related
URLs) to consistently use the configured issuer. 🎯

---

## 🔧 What changed

* 🔗 **Device auth prompt link** now uses the configured issuer (instead
of a hard-coded OpenAI URL)
* 🧭 **Redirect callback URL** is derived from the same issuer for
consistency
* 🧼 Minor cleanup: normalize the issuer base URL once and reuse it
(avoids formatting quirks like trailing `/`)

---

## 🧪 Repro + Before/After

### ▶️ Command

```bash
codex login --device-auth --experimental_issuer https://auth.example.com
```

###  Before (wrong link shown)

```text
1. Open this link in your browser and sign in to your account
   https://auth.openai.com/codex/device
```

###  After (correct link shown)

```text
1. Open this link in your browser and sign in to your account
   https://auth.example.com/codex/device
```

Full example output (same as before, but with the correct URL):

```text
Welcome to Codex [v0.72.0]
OpenAI's command-line coding agent

Follow these steps to sign in with ChatGPT using device code authorization:

1. Open this link in your browser and sign in to your account
   https://auth.example.com/codex/device

2. Enter this one-time code (expires in 15 minutes)
   BUT6-0M8K4

Device codes are a common phishing target. Never share this code.
```

---

##  Test plan

* 🟦 `codex login --device-auth` (default issuer): output remains
unchanged
* 🟩 `codex login --device-auth --experimental_issuer
https://auth.example.com`:

  * prompt link points to the issuer 
  * callback URL is derived from the same issuer 
  * no double slashes / mismatched domains 

Co-authored-by: Eric Traut <etraut@openai.com>
2026-01-05 13:09:05 -07:00
Gav Verma
57f8158608 chore: improve skills render section (#8459)
This change improves the skills render section
- Separate the skills list from usage rules with clear subheadings
- Define skill more clearly upfront
- Remove confusing trigger/discovery wording and make reference-following guidance more actionable
2026-01-05 11:55:26 -08:00
iceweasel-oai
95580f229e never let sandbox write to .codex/ or .codex/.sandbox/ (#8683)
Never treat .codex or .codex/.sandbox as a workspace root.
Handle write permissions to .codex/.sandbox in a single method so that
the sandbox setup/runner can write logs and other setup files to that
directory.
2026-01-05 11:54:21 -08:00
49 changed files with 862 additions and 357 deletions

45
codex-rs/Cargo.lock generated
View File

@@ -819,6 +819,8 @@ version = "1.2.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "deec109607ca693028562ed836a5f1c4b8bd77755c4e132fc5ce11b0b6211ae7"
dependencies = [
"jobserver",
"libc",
"shlex",
]
@@ -985,6 +987,7 @@ dependencies = [
"tokio-util",
"tracing",
"wiremock",
"zstd",
]
[[package]]
@@ -1348,6 +1351,7 @@ dependencies = [
"which",
"wildmatch",
"wiremock",
"zstd",
]
[[package]]
@@ -2109,16 +2113,19 @@ dependencies = [
"codex-protocol",
"codex-utils-absolute-path",
"codex-utils-cargo-bin",
"http 1.3.1",
"notify",
"pretty_assertions",
"regex-lite",
"reqwest",
"serde",
"serde_json",
"shlex",
"tempfile",
"tokio",
"walkdir",
"wiremock",
"zstd",
]
[[package]]
@@ -3924,6 +3931,16 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130"
[[package]]
name = "jobserver"
version = "0.1.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33"
dependencies = [
"getrandom 0.3.3",
"libc",
]
[[package]]
name = "js-sys"
version = "0.3.77"
@@ -8809,6 +8826,34 @@ dependencies = [
"syn 2.0.104",
]
[[package]]
name = "zstd"
version = "0.13.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a"
dependencies = [
"zstd-safe",
]
[[package]]
name = "zstd-safe"
version = "7.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d"
dependencies = [
"zstd-sys",
]
[[package]]
name = "zstd-sys"
version = "2.0.16+zstd.1.5.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748"
dependencies = [
"cc",
"pkg-config",
]
[[package]]
name = "zune-core"
version = "0.4.12"

View File

@@ -235,6 +235,7 @@ wildmatch = "2.6.1"
wiremock = "0.6"
zeroize = "1.8.2"
zstd = "0.13"
[workspace.lints]
rust = {}

View File

@@ -13,7 +13,6 @@ use std::time::Duration;
use anyhow::Context;
use anyhow::Result;
use anyhow::bail;
use clap::ArgAction;
use clap::Parser;
use clap::Subcommand;
use codex_app_server_protocol::AddConversationListenerParams;
@@ -66,19 +65,6 @@ struct Cli {
#[arg(long, env = "CODEX_BIN", default_value = "codex")]
codex_bin: String,
/// Forwarded to the `codex` CLI as `--config key=value`. Repeatable.
///
/// Example:
/// `--config 'model_providers.mock.base_url="http://localhost:4010/v2"'`
#[arg(
short = 'c',
long = "config",
value_name = "key=value",
action = ArgAction::Append,
global = true
)]
config_overrides: Vec<String>,
#[command(subcommand)]
command: CliCommand,
}
@@ -130,42 +116,29 @@ enum CliCommand {
}
fn main() -> Result<()> {
let Cli {
codex_bin,
config_overrides,
command,
} = Cli::parse();
let Cli { codex_bin, command } = Cli::parse();
match command {
CliCommand::SendMessage { user_message } => {
send_message(&codex_bin, &config_overrides, user_message)
}
CliCommand::SendMessageV2 { user_message } => {
send_message_v2(&codex_bin, &config_overrides, user_message)
}
CliCommand::SendMessage { user_message } => send_message(codex_bin, user_message),
CliCommand::SendMessageV2 { user_message } => send_message_v2(codex_bin, user_message),
CliCommand::TriggerCmdApproval { user_message } => {
trigger_cmd_approval(&codex_bin, &config_overrides, user_message)
trigger_cmd_approval(codex_bin, user_message)
}
CliCommand::TriggerPatchApproval { user_message } => {
trigger_patch_approval(&codex_bin, &config_overrides, user_message)
trigger_patch_approval(codex_bin, user_message)
}
CliCommand::NoTriggerCmdApproval => no_trigger_cmd_approval(&codex_bin, &config_overrides),
CliCommand::NoTriggerCmdApproval => no_trigger_cmd_approval(codex_bin),
CliCommand::SendFollowUpV2 {
first_message,
follow_up_message,
} => send_follow_up_v2(
&codex_bin,
&config_overrides,
first_message,
follow_up_message,
),
CliCommand::TestLogin => test_login(&codex_bin, &config_overrides),
CliCommand::GetAccountRateLimits => get_account_rate_limits(&codex_bin, &config_overrides),
} => send_follow_up_v2(codex_bin, first_message, follow_up_message),
CliCommand::TestLogin => test_login(codex_bin),
CliCommand::GetAccountRateLimits => get_account_rate_limits(codex_bin),
}
}
fn send_message(codex_bin: &str, config_overrides: &[String], user_message: String) -> Result<()> {
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
fn send_message(codex_bin: String, user_message: String) -> Result<()> {
let mut client = CodexClient::spawn(codex_bin)?;
let initialize = client.initialize()?;
println!("< initialize response: {initialize:?}");
@@ -186,61 +159,46 @@ fn send_message(codex_bin: &str, config_overrides: &[String], user_message: Stri
Ok(())
}
fn send_message_v2(
codex_bin: &str,
config_overrides: &[String],
user_message: String,
) -> Result<()> {
send_message_v2_with_policies(codex_bin, config_overrides, user_message, None, None)
fn send_message_v2(codex_bin: String, user_message: String) -> Result<()> {
send_message_v2_with_policies(codex_bin, user_message, None, None)
}
fn trigger_cmd_approval(
codex_bin: &str,
config_overrides: &[String],
user_message: Option<String>,
) -> Result<()> {
fn trigger_cmd_approval(codex_bin: String, user_message: Option<String>) -> Result<()> {
let default_prompt =
"Run `touch /tmp/should-trigger-approval` so I can confirm the file exists.";
let message = user_message.unwrap_or_else(|| default_prompt.to_string());
send_message_v2_with_policies(
codex_bin,
config_overrides,
message,
Some(AskForApproval::OnRequest),
Some(SandboxPolicy::ReadOnly),
)
}
fn trigger_patch_approval(
codex_bin: &str,
config_overrides: &[String],
user_message: Option<String>,
) -> Result<()> {
fn trigger_patch_approval(codex_bin: String, user_message: Option<String>) -> Result<()> {
let default_prompt =
"Create a file named APPROVAL_DEMO.txt containing a short hello message using apply_patch.";
let message = user_message.unwrap_or_else(|| default_prompt.to_string());
send_message_v2_with_policies(
codex_bin,
config_overrides,
message,
Some(AskForApproval::OnRequest),
Some(SandboxPolicy::ReadOnly),
)
}
fn no_trigger_cmd_approval(codex_bin: &str, config_overrides: &[String]) -> Result<()> {
fn no_trigger_cmd_approval(codex_bin: String) -> Result<()> {
let prompt = "Run `touch should_not_trigger_approval.txt`";
send_message_v2_with_policies(codex_bin, config_overrides, prompt.to_string(), None, None)
send_message_v2_with_policies(codex_bin, prompt.to_string(), None, None)
}
fn send_message_v2_with_policies(
codex_bin: &str,
config_overrides: &[String],
codex_bin: String,
user_message: String,
approval_policy: Option<AskForApproval>,
sandbox_policy: Option<SandboxPolicy>,
) -> Result<()> {
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
let mut client = CodexClient::spawn(codex_bin)?;
let initialize = client.initialize()?;
println!("< initialize response: {initialize:?}");
@@ -264,12 +222,11 @@ fn send_message_v2_with_policies(
}
fn send_follow_up_v2(
codex_bin: &str,
config_overrides: &[String],
codex_bin: String,
first_message: String,
follow_up_message: String,
) -> Result<()> {
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
let mut client = CodexClient::spawn(codex_bin)?;
let initialize = client.initialize()?;
println!("< initialize response: {initialize:?}");
@@ -302,8 +259,8 @@ fn send_follow_up_v2(
Ok(())
}
fn test_login(codex_bin: &str, config_overrides: &[String]) -> Result<()> {
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
fn test_login(codex_bin: String) -> Result<()> {
let mut client = CodexClient::spawn(codex_bin)?;
let initialize = client.initialize()?;
println!("< initialize response: {initialize:?}");
@@ -332,8 +289,8 @@ fn test_login(codex_bin: &str, config_overrides: &[String]) -> Result<()> {
}
}
fn get_account_rate_limits(codex_bin: &str, config_overrides: &[String]) -> Result<()> {
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
fn get_account_rate_limits(codex_bin: String) -> Result<()> {
let mut client = CodexClient::spawn(codex_bin)?;
let initialize = client.initialize()?;
println!("< initialize response: {initialize:?}");
@@ -352,12 +309,8 @@ struct CodexClient {
}
impl CodexClient {
fn spawn(codex_bin: &str, config_overrides: &[String]) -> Result<Self> {
let mut cmd = Command::new(codex_bin);
for override_kv in config_overrides {
cmd.arg("--config").arg(override_kv);
}
let mut codex_app_server = cmd
fn spawn(codex_bin: String) -> Result<Self> {
let mut codex_app_server = Command::new(&codex_bin)
.arg("app-server")
.stdin(Stdio::piped())
.stdout(Stdio::piped())

View File

@@ -82,7 +82,7 @@ Example (from OpenAI's official VSCode extension):
- `mcpServerStatus/list` — enumerate configured MCP servers with their tools, resources, resource templates, and auth status; supports cursor+limit pagination.
- `feedback/upload` — submit a feedback report (classification + optional reason/logs and conversation_id); returns the tracking thread id.
- `command/exec` — run a single command under the server sandbox without starting a thread/turn (handy for utilities and validation).
- `config/read` — fetch the effective config on disk after resolving config layering (thread-agnostic; does not include in-repo `.codex/` layers).
- `config/read` — fetch the effective config on disk after resolving config layering.
- `config/value/write` — write a single config key/value to the user's config.toml on disk.
- `config/batchWrite` — apply multiple config edits atomically to the user's config.toml on disk.

View File

@@ -1,5 +1,4 @@
use crate::bespoke_event_handling::apply_bespoke_event_handling;
use crate::config_api::ConfigApi;
use crate::error_code::INTERNAL_ERROR_CODE;
use crate::error_code::INVALID_REQUEST_ERROR_CODE;
use crate::fuzzy_file_search::run_fuzzy_file_search;
@@ -156,6 +155,7 @@ use codex_protocol::protocol::SessionMetaLine;
use codex_protocol::protocol::USER_MESSAGE_BEGIN;
use codex_protocol::user_input::UserInput as CoreInputItem;
use codex_rmcp_client::perform_oauth_login_return_url;
use codex_utils_json_to_toml::json_to_toml;
use std::collections::HashMap;
use std::collections::HashSet;
use std::ffi::OsStr;
@@ -215,7 +215,7 @@ pub(crate) struct CodexMessageProcessor {
outgoing: Arc<OutgoingMessageSender>,
codex_linux_sandbox_exe: Option<PathBuf>,
config: Arc<Config>,
config_api: ConfigApi,
cli_overrides: Vec<(String, TomlValue)>,
conversation_listeners: HashMap<Uuid, oneshot::Sender<()>>,
active_login: Arc<Mutex<Option<ActiveLogin>>>,
// Queue of pending interrupt requests per conversation. We reply when TurnAborted arrives.
@@ -265,14 +265,13 @@ impl CodexMessageProcessor {
cli_overrides: Vec<(String, TomlValue)>,
feedback: CodexFeedback,
) -> Self {
let config_api = ConfigApi::new(config.codex_home.clone(), cli_overrides.clone());
Self {
auth_manager,
conversation_manager,
outgoing,
codex_linux_sandbox_exe,
config,
config_api,
cli_overrides,
conversation_listeners: HashMap::new(),
active_login: Arc::new(Mutex::new(None)),
pending_interrupts: Arc::new(Mutex::new(HashMap::new())),
@@ -283,7 +282,13 @@ impl CodexMessageProcessor {
}
async fn load_latest_config(&self) -> Result<Config, JSONRPCErrorError> {
self.config_api.load_latest_thread_agnostic_config().await
Config::load_with_cli_overrides(self.cli_overrides.clone())
.await
.map_err(|err| JSONRPCErrorError {
code: INTERNAL_ERROR_CODE,
message: format!("failed to reload config: {err}"),
data: None,
})
}
fn review_request_from_target(
@@ -1273,20 +1278,18 @@ impl CodexMessageProcessor {
);
}
let config =
match derive_config_from_params(&self.config_api, overrides, Some(cli_overrides)).await
{
Ok(config) => config,
Err(err) => {
let error = JSONRPCErrorError {
code: INVALID_REQUEST_ERROR_CODE,
message: format!("error deriving config: {err}"),
data: None,
};
self.outgoing.send_error(request_id, error).await;
return;
}
};
let config = match derive_config_from_params(overrides, Some(cli_overrides)).await {
Ok(config) => config,
Err(err) => {
let error = JSONRPCErrorError {
code: INVALID_REQUEST_ERROR_CODE,
message: format!("error deriving config: {err}"),
data: None,
};
self.outgoing.send_error(request_id, error).await;
return;
}
};
match self.conversation_manager.new_conversation(config).await {
Ok(conversation_id) => {
@@ -1325,19 +1328,18 @@ impl CodexMessageProcessor {
params.developer_instructions,
);
let config =
match derive_config_from_params(&self.config_api, overrides, params.config).await {
Ok(config) => config,
Err(err) => {
let error = JSONRPCErrorError {
code: INVALID_REQUEST_ERROR_CODE,
message: format!("error deriving config: {err}"),
data: None,
};
self.outgoing.send_error(request_id, error).await;
return;
}
};
let config = match derive_config_from_params(overrides, params.config).await {
Ok(config) => config,
Err(err) => {
let error = JSONRPCErrorError {
code: INVALID_REQUEST_ERROR_CODE,
message: format!("error deriving config: {err}"),
data: None,
};
self.outgoing.send_error(request_id, error).await;
return;
}
};
match self.conversation_manager.new_conversation(config).await {
Ok(new_conv) => {
@@ -1565,7 +1567,7 @@ impl CodexMessageProcessor {
base_instructions,
developer_instructions,
);
match derive_config_from_params(&self.config_api, overrides, cli_overrides).await {
match derive_config_from_params(overrides, cli_overrides).await {
Ok(config) => config,
Err(err) => {
let error = JSONRPCErrorError {
@@ -2226,7 +2228,7 @@ impl CodexMessageProcessor {
..Default::default()
};
derive_config_from_params(&self.config_api, overrides, Some(cli_overrides)).await
derive_config_from_params(overrides, Some(cli_overrides)).await
}
None => Ok(self.config.as_ref().clone()),
};
@@ -3342,13 +3344,16 @@ fn errors_to_info(
}
async fn derive_config_from_params(
config_api: &ConfigApi,
overrides: ConfigOverrides,
cli_overrides: Option<HashMap<String, serde_json::Value>>,
) -> std::io::Result<Config> {
config_api
.load_thread_agnostic_config(overrides, cli_overrides)
.await
let cli_overrides = cli_overrides
.unwrap_or_default()
.into_iter()
.map(|(k, v)| (k, json_to_toml(v)))
.collect();
Config::load_with_cli_overrides_and_harness_overrides(cli_overrides, overrides).await
}
async fn read_summary_from_rollout(

View File

@@ -7,28 +7,21 @@ use codex_app_server_protocol::ConfigValueWriteParams;
use codex_app_server_protocol::ConfigWriteErrorCode;
use codex_app_server_protocol::ConfigWriteResponse;
use codex_app_server_protocol::JSONRPCErrorError;
use codex_core::config::Config;
use codex_core::config::ConfigBuilder;
use codex_core::config::ConfigService;
use codex_core::config::ConfigServiceError;
use codex_utils_json_to_toml::json_to_toml;
use serde_json::json;
use std::path::PathBuf;
use toml::Value as TomlValue;
#[derive(Clone)]
pub(crate) struct ConfigApi {
codex_home: PathBuf,
cli_overrides: Vec<(String, TomlValue)>,
service: ConfigService,
}
impl ConfigApi {
pub(crate) fn new(codex_home: PathBuf, cli_overrides: Vec<(String, TomlValue)>) -> Self {
Self {
service: ConfigService::new(codex_home.clone(), cli_overrides.clone()),
codex_home,
cli_overrides,
service: ConfigService::new(codex_home, cli_overrides),
}
}
@@ -39,30 +32,6 @@ impl ConfigApi {
self.service.read(params).await.map_err(map_error)
}
pub(crate) async fn load_thread_agnostic_config(
&self,
overrides: codex_core::config::ConfigOverrides,
request_cli_overrides: Option<std::collections::HashMap<String, serde_json::Value>>,
) -> std::io::Result<Config> {
// Apply the app server's startup `--config` overrides, then apply request-scoped overrides
// with higher precedence.
let mut merged_cli_overrides = self.cli_overrides.clone();
merged_cli_overrides.extend(
request_cli_overrides
.unwrap_or_default()
.into_iter()
.map(|(k, v)| (k, json_to_toml(v))),
);
ConfigBuilder::default()
.codex_home(self.codex_home.clone())
.cli_overrides(merged_cli_overrides)
.harness_overrides(overrides)
.thread_agnostic()
.build()
.await
}
pub(crate) async fn write_value(
&self,
params: ConfigValueWriteParams,
@@ -76,18 +45,6 @@ impl ConfigApi {
) -> Result<ConfigWriteResponse, JSONRPCErrorError> {
self.service.batch_write(params).await.map_err(map_error)
}
pub(crate) async fn load_latest_thread_agnostic_config(
&self,
) -> Result<Config, JSONRPCErrorError> {
self.load_thread_agnostic_config(codex_core::config::ConfigOverrides::default(), None)
.await
.map_err(|err| JSONRPCErrorError {
code: INTERNAL_ERROR_CODE,
message: format!("failed to reload config: {err}"),
data: None,
})
}
}
fn map_error(err: ConfigServiceError) -> JSONRPCErrorError {

View File

@@ -19,6 +19,7 @@ tracing = { workspace = true }
eventsource-stream = { workspace = true }
regex-lite = { workspace = true }
tokio-util = { workspace = true, features = ["codec"] }
zstd = { workspace = true }
[dev-dependencies]
anyhow = { workspace = true }

View File

@@ -6,7 +6,10 @@ use crate::common::ResponseStream;
use crate::endpoint::streaming::StreamingClient;
use crate::error::ApiError;
use crate::provider::Provider;
use crate::provider::RequestCompression;
use crate::provider::WireApi;
use crate::requests::body::encode_body;
use crate::requests::body::insert_compression_headers;
use crate::sse::chat::spawn_chat_stream;
use crate::telemetry::SseTelemetry;
use codex_client::HttpTransport;
@@ -45,8 +48,13 @@ impl<T: HttpTransport, A: AuthProvider> ChatClient<T, A> {
}
}
pub async fn stream_request(&self, request: ChatRequest) -> Result<ResponseStream, ApiError> {
self.stream(request.body, request.headers).await
pub async fn stream_request(
&self,
request: ChatRequest,
request_compression: RequestCompression,
) -> Result<ResponseStream, ApiError> {
self.stream(request.body, request.headers, request_compression)
.await
}
pub async fn stream_prompt(
@@ -55,6 +63,7 @@ impl<T: HttpTransport, A: AuthProvider> ChatClient<T, A> {
prompt: &ApiPrompt,
conversation_id: Option<String>,
session_source: Option<SessionSource>,
request_compression: RequestCompression,
) -> Result<ResponseStream, ApiError> {
use crate::requests::ChatRequestBuilder;
@@ -64,7 +73,7 @@ impl<T: HttpTransport, A: AuthProvider> ChatClient<T, A> {
.session_source(session_source)
.build(self.streaming.provider())?;
self.stream_request(request).await
self.stream_request(request, request_compression).await
}
fn path(&self) -> &'static str {
@@ -78,9 +87,13 @@ impl<T: HttpTransport, A: AuthProvider> ChatClient<T, A> {
&self,
body: Value,
extra_headers: HeaderMap,
request_compression: RequestCompression,
) -> Result<ResponseStream, ApiError> {
let mut headers = extra_headers;
insert_compression_headers(&mut headers, request_compression);
let encoded_body = encode_body(&body, request_compression)?;
self.streaming
.stream(self.path(), body, extra_headers, spawn_chat_stream)
.stream(self.path(), encoded_body, headers, spawn_chat_stream)
.await
}
}

View File

@@ -5,6 +5,7 @@ use crate::error::ApiError;
use crate::provider::Provider;
use crate::provider::WireApi;
use crate::telemetry::run_with_request_telemetry;
use codex_client::Body;
use codex_client::HttpTransport;
use codex_client::RequestTelemetry;
use codex_protocol::models::ResponseItem;
@@ -54,7 +55,7 @@ impl<T: HttpTransport, A: AuthProvider> CompactClient<T, A> {
let builder = || {
let mut req = self.provider.build_request(Method::POST, path);
req.headers.extend(extra_headers.clone());
req.body = Some(body.clone());
req.body = Some(Body::Json(body.clone()));
add_auth_headers(&self.auth, req)
};
@@ -89,6 +90,7 @@ struct CompactHistoryResponse {
#[cfg(test)]
mod tests {
use super::*;
use crate::provider::RetryConfig;
use async_trait::async_trait;
use codex_client::Request;

View File

@@ -6,6 +6,7 @@ use crate::common::TextControls;
use crate::endpoint::streaming::StreamingClient;
use crate::error::ApiError;
use crate::provider::Provider;
use crate::provider::RequestCompression;
use crate::provider::WireApi;
use crate::requests::ResponsesRequest;
use crate::requests::ResponsesRequestBuilder;
@@ -15,7 +16,6 @@ use codex_client::HttpTransport;
use codex_client::RequestTelemetry;
use codex_protocol::protocol::SessionSource;
use http::HeaderMap;
use serde_json::Value;
use std::sync::Arc;
use tracing::instrument;
@@ -33,6 +33,7 @@ pub struct ResponsesOptions {
pub conversation_id: Option<String>,
pub session_source: Option<SessionSource>,
pub extra_headers: HeaderMap,
pub request_compression: RequestCompression,
}
impl<T: HttpTransport, A: AuthProvider> ResponsesClient<T, A> {
@@ -56,7 +57,7 @@ impl<T: HttpTransport, A: AuthProvider> ResponsesClient<T, A> {
&self,
request: ResponsesRequest,
) -> Result<ResponseStream, ApiError> {
self.stream(request.body, request.headers).await
self.stream(request).await
}
#[instrument(level = "trace", skip_all, err)]
@@ -75,6 +76,7 @@ impl<T: HttpTransport, A: AuthProvider> ResponsesClient<T, A> {
conversation_id,
session_source,
extra_headers,
request_compression,
} = options;
let request = ResponsesRequestBuilder::new(model, &prompt.instructions, &prompt.input)
@@ -88,6 +90,7 @@ impl<T: HttpTransport, A: AuthProvider> ResponsesClient<T, A> {
.session_source(session_source)
.store_override(store_override)
.extra_headers(extra_headers)
.request_compression(request_compression)
.build(self.streaming.provider())?;
self.stream_request(request).await
@@ -100,13 +103,14 @@ impl<T: HttpTransport, A: AuthProvider> ResponsesClient<T, A> {
}
}
pub async fn stream(
&self,
body: Value,
extra_headers: HeaderMap,
) -> Result<ResponseStream, ApiError> {
pub async fn stream(&self, request: ResponsesRequest) -> Result<ResponseStream, ApiError> {
self.streaming
.stream(self.path(), body, extra_headers, spawn_response_stream)
.stream(
self.path(),
request.body,
request.headers,
spawn_response_stream,
)
.await
}
}

View File

@@ -5,12 +5,15 @@ use crate::error::ApiError;
use crate::provider::Provider;
use crate::telemetry::SseTelemetry;
use crate::telemetry::run_with_request_telemetry;
use codex_client::Body;
use codex_client::HttpTransport;
use codex_client::RequestTelemetry;
use codex_client::StreamResponse;
use http::HeaderMap;
use http::HeaderValue;
use http::Method;
use serde_json::Value;
use http::header::ACCEPT;
use http::header::CONTENT_TYPE;
use std::sync::Arc;
use std::time::Duration;
@@ -50,17 +53,18 @@ impl<T: HttpTransport, A: AuthProvider> StreamingClient<T, A> {
pub(crate) async fn stream(
&self,
path: &str,
body: Value,
body: Body,
extra_headers: HeaderMap,
spawner: fn(StreamResponse, Duration, Option<Arc<dyn SseTelemetry>>) -> ResponseStream,
) -> Result<ResponseStream, ApiError> {
let builder = || {
let mut req = self.provider.build_request(Method::POST, path);
req.headers.extend(extra_headers.clone());
req.headers.insert(
http::header::ACCEPT,
http::HeaderValue::from_static("text/event-stream"),
);
req.headers
.insert(ACCEPT, HeaderValue::from_static("text/event-stream"));
req.headers
.entry(CONTENT_TYPE)
.or_insert_with(|| HeaderValue::from_static("application/json"));
req.body = Some(body.clone());
add_auth_headers(&self.auth, req)
};

View File

@@ -41,6 +41,13 @@ impl RetryConfig {
}
}
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
pub enum RequestCompression {
#[default]
None,
Zstd,
}
/// HTTP endpoint configuration used to talk to a concrete API deployment.
///
/// Encapsulates base URL, default headers, query params, retry policy, and

View File

@@ -0,0 +1,40 @@
use crate::error::ApiError;
use crate::provider::RequestCompression;
use bytes::Bytes;
use codex_client::Body;
use http::HeaderMap;
use http::HeaderValue;
use http::header::CONTENT_ENCODING;
use serde_json::Value;
use std::time::Instant;
use tracing::info;
use zstd::stream::encode_all;
pub(crate) fn encode_body(body: &Value, compression: RequestCompression) -> Result<Body, ApiError> {
match compression {
RequestCompression::None => Ok(Body::Json(body.clone())),
RequestCompression::Zstd => {
let json = serde_json::to_vec(body).map_err(|err| {
ApiError::Stream(format!("failed to encode request body as json: {err}"))
})?;
let started_at = Instant::now();
let compressed = encode_all(json.as_slice(), 0).map_err(|err| {
ApiError::Stream(format!("failed to compress request body: {err}"))
})?;
let elapsed = started_at.elapsed();
info!(
input_bytes = json.len(),
output_bytes = compressed.len(),
elapsed_ms = elapsed.as_millis(),
"compressed request body"
);
Ok(Body::Bytes(Bytes::from(compressed)))
}
}
}
pub(crate) fn insert_compression_headers(headers: &mut HeaderMap, compression: RequestCompression) {
if matches!(compression, RequestCompression::Zstd) {
headers.insert(CONTENT_ENCODING, HeaderValue::from_static("zstd"));
}
}

View File

@@ -351,6 +351,7 @@ fn push_tool_call_message(messages: &mut Vec<Value>, tool_call: Value, reasoning
#[cfg(test)]
mod tests {
use super::*;
use crate::provider::RetryConfig;
use crate::provider::WireApi;
use codex_protocol::models::FunctionCallOutputPayload;

View File

@@ -1,3 +1,4 @@
pub(crate) mod body;
pub mod chat;
pub(crate) mod headers;
pub mod responses;

View File

@@ -3,9 +3,13 @@ use crate::common::ResponsesApiRequest;
use crate::common::TextControls;
use crate::error::ApiError;
use crate::provider::Provider;
use crate::provider::RequestCompression;
use crate::requests::body::encode_body;
use crate::requests::body::insert_compression_headers;
use crate::requests::headers::build_conversation_headers;
use crate::requests::headers::insert_header;
use crate::requests::headers::subagent_header;
use codex_client::Body;
use codex_protocol::models::ResponseItem;
use codex_protocol::protocol::SessionSource;
use http::HeaderMap;
@@ -13,7 +17,7 @@ use serde_json::Value;
/// Assembled request body plus headers for a Responses stream request.
pub struct ResponsesRequest {
pub body: Value,
pub body: Body,
pub headers: HeaderMap,
}
@@ -32,6 +36,7 @@ pub struct ResponsesRequestBuilder<'a> {
session_source: Option<SessionSource>,
store_override: Option<bool>,
headers: HeaderMap,
request_compression: RequestCompression,
}
impl<'a> ResponsesRequestBuilder<'a> {
@@ -94,6 +99,11 @@ impl<'a> ResponsesRequestBuilder<'a> {
self
}
pub fn request_compression(mut self, request_compression: RequestCompression) -> Self {
self.request_compression = request_compression;
self
}
pub fn build(self, provider: &Provider) -> Result<ResponsesRequest, ApiError> {
let model = self
.model
@@ -137,6 +147,8 @@ impl<'a> ResponsesRequestBuilder<'a> {
if let Some(subagent) = subagent_header(&self.session_source) {
insert_header(&mut headers, "x-openai-subagent", &subagent);
}
insert_compression_headers(&mut headers, self.request_compression);
let body = encode_body(&body, self.request_compression)?;
Ok(ResponsesRequest { body, headers })
}
@@ -172,8 +184,10 @@ fn attach_item_ids(payload_json: &mut Value, original_items: &[ResponseItem]) {
#[cfg(test)]
mod tests {
use super::*;
use crate::provider::RetryConfig;
use crate::provider::WireApi;
use codex_client::Body;
use codex_protocol::protocol::SubAgentSource;
use http::HeaderValue;
use pretty_assertions::assert_eq;
@@ -219,10 +233,12 @@ mod tests {
.build(&provider)
.expect("request");
assert_eq!(request.body.get("store"), Some(&Value::Bool(true)));
let Body::Json(body) = &request.body else {
panic!("expected json body for responses request");
};
assert_eq!(body.get("store"), Some(&Value::Bool(true)));
let ids: Vec<Option<String>> = request
.body
let ids: Vec<Option<String>> = body
.get("input")
.and_then(|v| v.as_array())
.into_iter()

View File

@@ -10,7 +10,9 @@ use codex_api::ChatClient;
use codex_api::Provider;
use codex_api::ResponsesClient;
use codex_api::ResponsesOptions;
use codex_api::ResponsesRequest;
use codex_api::WireApi;
use codex_client::Body;
use codex_client::HttpTransport;
use codex_client::Request;
use codex_client::Response;
@@ -136,6 +138,13 @@ fn provider(name: &str, wire: WireApi) -> Provider {
}
}
fn responses_request(body: Value) -> ResponsesRequest {
ResponsesRequest {
body: Body::Json(body),
headers: HeaderMap::new(),
}
}
#[derive(Clone)]
struct FlakyTransport {
state: Arc<Mutex<i64>>,
@@ -201,7 +210,9 @@ async fn chat_client_uses_chat_completions_path_for_chat_wire() -> Result<()> {
let client = ChatClient::new(transport, provider("openai", WireApi::Chat), NoAuth);
let body = serde_json::json!({ "echo": true });
let _stream = client.stream(body, HeaderMap::new()).await?;
let _stream = client
.stream(body, HeaderMap::new(), Default::default())
.await?;
let requests = state.take_stream_requests();
assert_path_ends_with(&requests, "/chat/completions");
@@ -215,7 +226,9 @@ async fn chat_client_uses_responses_path_for_responses_wire() -> Result<()> {
let client = ChatClient::new(transport, provider("openai", WireApi::Responses), NoAuth);
let body = serde_json::json!({ "echo": true });
let _stream = client.stream(body, HeaderMap::new()).await?;
let _stream = client
.stream(body, HeaderMap::new(), Default::default())
.await?;
let requests = state.take_stream_requests();
assert_path_ends_with(&requests, "/responses");
@@ -228,8 +241,8 @@ async fn responses_client_uses_responses_path_for_responses_wire() -> Result<()>
let transport = RecordingTransport::new(state.clone());
let client = ResponsesClient::new(transport, provider("openai", WireApi::Responses), NoAuth);
let body = serde_json::json!({ "echo": true });
let _stream = client.stream(body, HeaderMap::new()).await?;
let request = responses_request(serde_json::json!({ "echo": true }));
let _stream = client.stream(request).await?;
let requests = state.take_stream_requests();
assert_path_ends_with(&requests, "/responses");
@@ -242,8 +255,8 @@ async fn responses_client_uses_chat_path_for_chat_wire() -> Result<()> {
let transport = RecordingTransport::new(state.clone());
let client = ResponsesClient::new(transport, provider("openai", WireApi::Chat), NoAuth);
let body = serde_json::json!({ "echo": true });
let _stream = client.stream(body, HeaderMap::new()).await?;
let request = responses_request(serde_json::json!({ "echo": true }));
let _stream = client.stream(request).await?;
let requests = state.take_stream_requests();
assert_path_ends_with(&requests, "/chat/completions");
@@ -257,8 +270,8 @@ async fn streaming_client_adds_auth_headers() -> Result<()> {
let auth = StaticAuth::new("secret-token", "acct-1");
let client = ResponsesClient::new(transport, provider("openai", WireApi::Responses), auth);
let body = serde_json::json!({ "model": "gpt-test" });
let _stream = client.stream(body, HeaderMap::new()).await?;
let request = responses_request(serde_json::json!({ "model": "gpt-test" }));
let _stream = client.stream(request).await?;
let requests = state.take_stream_requests();
assert_eq!(requests.len(), 1);

View File

@@ -8,7 +8,9 @@ use codex_api::AuthProvider;
use codex_api::Provider;
use codex_api::ResponseEvent;
use codex_api::ResponsesClient;
use codex_api::ResponsesRequest;
use codex_api::WireApi;
use codex_client::Body;
use codex_client::HttpTransport;
use codex_client::Request;
use codex_client::Response;
@@ -94,6 +96,13 @@ fn build_responses_body(events: Vec<Value>) -> String {
body
}
fn responses_request(body: Value) -> ResponsesRequest {
ResponsesRequest {
body: Body::Json(body),
headers: HeaderMap::new(),
}
}
#[tokio::test]
async fn responses_stream_parses_items_and_completed_end_to_end() -> Result<()> {
let item1 = serde_json::json!({
@@ -123,9 +132,8 @@ async fn responses_stream_parses_items_and_completed_end_to_end() -> Result<()>
let transport = FixtureSseTransport::new(body);
let client = ResponsesClient::new(transport, provider("openai", WireApi::Responses), NoAuth);
let mut stream = client
.stream(serde_json::json!({"echo": true}), HeaderMap::new())
.await?;
let request = responses_request(serde_json::json!({"echo": true}));
let mut stream = client.stream(request).await?;
let mut events = Vec::new();
while let Some(ev) = stream.next().await {
@@ -188,9 +196,8 @@ async fn responses_stream_aggregates_output_text_deltas() -> Result<()> {
let transport = FixtureSseTransport::new(body);
let client = ResponsesClient::new(transport, provider("openai", WireApi::Responses), NoAuth);
let stream = client
.stream(serde_json::json!({"echo": true}), HeaderMap::new())
.await?;
let request = responses_request(serde_json::json!({"echo": true}));
let stream = client.stream(request).await?;
let mut stream = stream.aggregate();
let mut events = Vec::new();

View File

@@ -104,6 +104,13 @@ impl CodexRequestBuilder {
self.map(|builder| builder.json(value))
}
pub fn body<T>(self, body: T) -> Self
where
T: Into<reqwest::Body>,
{
self.map(|builder| builder.body(body))
}
pub async fn send(self) -> Result<Response, reqwest::Error> {
let headers = trace_headers();

View File

@@ -10,6 +10,7 @@ pub use crate::default_client::CodexHttpClient;
pub use crate::default_client::CodexRequestBuilder;
pub use crate::error::StreamError;
pub use crate::error::TransportError;
pub use crate::request::Body;
pub use crate::request::Request;
pub use crate::request::Response;
pub use crate::retry::RetryOn;

View File

@@ -5,12 +5,18 @@ use serde::Serialize;
use serde_json::Value;
use std::time::Duration;
#[derive(Debug, Clone)]
pub enum Body {
Json(Value),
Bytes(Bytes),
}
#[derive(Debug, Clone)]
pub struct Request {
pub method: Method,
pub url: String,
pub headers: HeaderMap,
pub body: Option<Value>,
pub body: Option<Body>,
pub timeout: Option<Duration>,
}
@@ -26,7 +32,7 @@ impl Request {
}
pub fn with_json<T: Serialize>(mut self, body: &T) -> Self {
self.body = serde_json::to_value(body).ok();
self.body = serde_json::to_value(body).ok().map(Body::Json);
self
}
}

View File

@@ -1,6 +1,7 @@
use crate::default_client::CodexHttpClient;
use crate::default_client::CodexRequestBuilder;
use crate::error::TransportError;
use crate::request::Body;
use crate::request::Request;
use crate::request::Response;
use async_trait::async_trait;
@@ -52,7 +53,10 @@ impl ReqwestTransport {
builder = builder.timeout(timeout);
}
if let Some(body) = req.body {
builder = builder.json(&body);
builder = match body {
Body::Json(value) => builder.json(&value),
Body::Bytes(bytes) => builder.body(bytes),
};
}
Ok(builder)
}
@@ -101,10 +105,10 @@ impl HttpTransport for ReqwestTransport {
async fn stream(&self, req: Request) -> Result<StreamResponse, TransportError> {
if enabled!(Level::TRACE) {
trace!(
"{} to {}: {}",
req.method,
req.url,
req.body.as_ref().unwrap_or_default()
method = %req.method,
url = %req.url,
body = ?req.body,
"Sending streaming request"
);
}

View File

@@ -89,6 +89,7 @@ url = { workspace = true }
uuid = { workspace = true, features = ["serde", "v4", "v5"] }
which = { workspace = true }
wildmatch = { workspace = true }
zstd = { workspace = true }
[features]
deterministic_process_ids = []

View File

@@ -156,6 +156,9 @@ impl ModelClient {
let mut refreshed = false;
loop {
let auth = auth_manager.as_ref().and_then(|m| m.auth());
let request_compression = self
.provider
.request_compression_for(auth.as_ref().map(|a| a.mode), &self.config.features);
let api_provider = self
.provider
.to_api_provider(auth.as_ref().map(|a| a.mode))?;
@@ -171,6 +174,7 @@ impl ModelClient {
&api_prompt,
Some(conversation_id.clone()),
Some(session_source.clone()),
request_compression,
)
.await;
@@ -245,6 +249,9 @@ impl ModelClient {
let mut refreshed = false;
loop {
let auth = auth_manager.as_ref().and_then(|m| m.auth());
let request_compression = self
.provider
.request_compression_for(auth.as_ref().map(|a| a.mode), &self.config.features);
let api_provider = self
.provider
.to_api_provider(auth.as_ref().map(|a| a.mode))?;
@@ -263,6 +270,7 @@ impl ModelClient {
conversation_id: Some(conversation_id.clone()),
session_source: Some(session_source.clone()),
extra_headers: beta_feature_headers(&self.config),
request_compression,
};
let stream_result = client

View File

@@ -363,7 +363,6 @@ pub struct ConfigBuilder {
cli_overrides: Option<Vec<(String, TomlValue)>>,
harness_overrides: Option<ConfigOverrides>,
loader_overrides: Option<LoaderOverrides>,
thread_agnostic: bool,
}
impl ConfigBuilder {
@@ -372,13 +371,6 @@ impl ConfigBuilder {
self
}
/// Load a "thread-agnostic" config stack, which intentionally ignores any
/// in-repo `.codex/` config layers (because there is no cwd/project context).
pub fn thread_agnostic(mut self) -> Self {
self.thread_agnostic = true;
self
}
pub fn cli_overrides(mut self, cli_overrides: Vec<(String, TomlValue)>) -> Self {
self.cli_overrides = Some(cli_overrides);
self
@@ -400,22 +392,18 @@ impl ConfigBuilder {
cli_overrides,
harness_overrides,
loader_overrides,
thread_agnostic,
} = self;
let codex_home = codex_home.map_or_else(find_codex_home, std::io::Result::Ok)?;
let cli_overrides = cli_overrides.unwrap_or_default();
let harness_overrides = harness_overrides.unwrap_or_default();
let loader_overrides = loader_overrides.unwrap_or_default();
let cwd = if thread_agnostic {
None
} else {
Some(match harness_overrides.cwd.as_deref() {
Some(path) => AbsolutePathBuf::try_from(path)?,
None => AbsolutePathBuf::current_dir()?,
})
let cwd = match harness_overrides.cwd.as_deref() {
Some(path) => AbsolutePathBuf::try_from(path)?,
None => AbsolutePathBuf::current_dir()?,
};
let config_layer_stack =
load_config_layers_state(&codex_home, cwd, &cli_overrides, loader_overrides).await?;
load_config_layers_state(&codex_home, Some(cwd), &cli_overrides, loader_overrides)
.await?;
let merged_toml = config_layer_stack.effective_config();
// Note that each layer in ConfigLayerStack should have resolved
@@ -2094,43 +2082,6 @@ trust_level = "trusted"
Ok(())
}
#[tokio::test]
async fn config_builder_thread_agnostic_ignores_project_layers() -> anyhow::Result<()> {
let tmp = TempDir::new()?;
let codex_home = tmp.path().join("codex_home");
std::fs::create_dir_all(&codex_home)?;
std::fs::write(codex_home.join(CONFIG_TOML_FILE), "model = \"from-user\"\n")?;
let project = tmp.path().join("project");
std::fs::create_dir_all(project.join(".codex"))?;
std::fs::write(
project.join(".codex").join(CONFIG_TOML_FILE),
"model = \"from-project\"\n",
)?;
let harness_overrides = ConfigOverrides {
cwd: Some(project),
..Default::default()
};
let with_project_layers = ConfigBuilder::default()
.codex_home(codex_home.clone())
.harness_overrides(harness_overrides.clone())
.build()
.await?;
assert_eq!(with_project_layers.model.as_deref(), Some("from-project"));
let thread_agnostic = ConfigBuilder::default()
.codex_home(codex_home)
.harness_overrides(harness_overrides)
.thread_agnostic()
.build()
.await?;
assert_eq!(thread_agnostic.model.as_deref(), Some("from-user"));
Ok(())
}
#[tokio::test]
async fn load_global_mcp_servers_returns_empty_if_missing() -> anyhow::Result<()> {
let codex_home = TempDir::new()?;

View File

@@ -24,7 +24,6 @@ use std::sync::OnceLock;
pub static USER_AGENT_SUFFIX: LazyLock<Mutex<Option<String>>> = LazyLock::new(|| Mutex::new(None));
pub const DEFAULT_ORIGINATOR: &str = "codex_cli_rs";
pub const CODEX_INTERNAL_ORIGINATOR_OVERRIDE_ENV_VAR: &str = "CODEX_INTERNAL_ORIGINATOR_OVERRIDE";
#[derive(Debug, Clone)]
pub struct Originator {
pub value: String,

View File

@@ -8,7 +8,6 @@
use crate::config::ConfigToml;
use crate::config::profile::ConfigProfile;
use serde::Deserialize;
use serde::Serialize;
use std::collections::BTreeMap;
use std::collections::BTreeSet;
@@ -74,6 +73,8 @@ pub enum Feature {
ApplyPatchFreeform,
/// Allow the model to request web searches.
WebSearchRequest,
/// Allow request body compression when using ChatGPT auth.
RequestCompression,
/// Gate the execpolicy enforcement for shell/unified exec.
ExecPolicy,
/// Enable Windows sandbox (restricted token) on Windows.
@@ -150,16 +151,16 @@ impl FeatureOverrides {
impl Features {
/// Starts with built-in defaults.
pub fn with_defaults() -> Self {
let mut set = BTreeSet::new();
let mut features = Self {
enabled: BTreeSet::new(),
legacy_usages: BTreeSet::new(),
};
for spec in FEATURES {
if spec.default_enabled {
set.insert(spec.id);
features.enable(spec.id);
}
}
Self {
enabled: set,
legacy_usages: BTreeSet::new(),
}
features
}
pub fn enabled(&self, f: Feature) -> bool {
@@ -196,7 +197,7 @@ impl Features {
.map(|usage| (usage.alias.as_str(), usage.feature))
}
/// Apply a table of key -> bool toggles (e.g. from TOML).
/// Apply a table of key -> value toggles (e.g. from TOML).
pub fn apply_map(&mut self, m: &BTreeMap<String, bool>) {
for (k, v) in m {
match feature_for_key(k) {
@@ -330,6 +331,12 @@ pub const FEATURES: &[FeatureSpec] = &[
stage: Stage::Stable,
default_enabled: false,
},
FeatureSpec {
id: Feature::RequestCompression,
key: "request_compression",
stage: Stage::Experimental,
default_enabled: false,
},
// Beta program. Rendered in the `/experimental` menu for users.
FeatureSpec {
id: Feature::UnifiedExec,

View File

@@ -19,6 +19,8 @@ use std::env::VarError;
use std::time::Duration;
use crate::error::EnvVarError;
use crate::features::Feature;
use crate::features::Features;
const DEFAULT_STREAM_IDLE_TIMEOUT_MS: u64 = 300_000;
const DEFAULT_STREAM_MAX_RETRIES: u64 = 5;
const DEFAULT_REQUEST_MAX_RETRIES: u64 = 4;
@@ -253,6 +255,21 @@ impl ModelProviderInfo {
pub fn is_openai(&self) -> bool {
self.name == OPENAI_PROVIDER_NAME
}
pub fn request_compression_for(
&self,
auth_mode: Option<AuthMode>,
features: &Features,
) -> codex_api::provider::RequestCompression {
if self.is_openai()
&& matches!(auth_mode, Some(AuthMode::ChatGPT))
&& features.enabled(Feature::RequestCompression)
{
codex_api::provider::RequestCompression::Zstd
} else {
codex_api::provider::RequestCompression::None
}
}
}
pub const DEFAULT_LMSTUDIO_PORT: u16 = 1234;

View File

@@ -513,9 +513,9 @@ mod tests {
)
.unwrap_or_else(|_| cfg.codex_home.join("skills/pdf-processing/SKILL.md"));
let expected_path_str = expected_path.to_string_lossy().replace('\\', "/");
let usage_rules = "- Discovery: Available skills are listed in project docs and may also appear in a runtime \"## Skills\" section (name + description + file path). These are the sources of truth; skill bodies live on disk at the listed paths.\n- Trigger rules: If the user names a skill (with `$SkillName` or plain text) OR the task clearly matches a skill's description, you must use that skill for that turn. Multiple mentions mean use them all. Do not carry skills across turns unless re-mentioned.\n- Missing/blocked: If a named skill isn't in the list or the path can't be read, say so briefly and continue with the best fallback.\n- How to use a skill (progressive disclosure):\n 1) After deciding to use a skill, open its `SKILL.md`. Read only enough to follow the workflow.\n 2) If `SKILL.md` points to extra folders such as `references/`, load only the specific files needed for the request; don't bulk-load everything.\n 3) If `scripts/` exist, prefer running or patching them instead of retyping large code blocks.\n 4) If `assets/` or templates exist, reuse them instead of recreating from scratch.\n- Description as trigger: The YAML `description` in `SKILL.md` is the primary trigger signal; rely on it to decide applicability. If unsure, ask a brief clarification before proceeding.\n- Coordination and sequencing:\n - If multiple skills apply, choose the minimal set that covers the request and state the order you'll use them.\n - Announce which skill(s) you're using and why (one short line). If you skip an obvious skill, say why.\n- Context hygiene:\n - Keep context small: summarize long sections instead of pasting them; only load extra files when needed.\n - Avoid deeply nested references; prefer one-hop files explicitly linked from `SKILL.md`.\n - When variants exist (frameworks, providers, domains), pick only the relevant reference file(s) and note that choice.\n- Safety and fallback: If a skill can't be applied cleanly (missing files, unclear instructions), state the issue, pick the next-best approach, and continue.";
let usage_rules = "- Discovery: The list above is the skills available in this session (name + description + file path). Skill bodies live on disk at the listed paths.\n- Trigger rules: If the user names a skill (with `$SkillName` or plain text) OR the task clearly matches a skill's description shown above, you must use that skill for that turn. Multiple mentions mean use them all. Do not carry skills across turns unless re-mentioned.\n- Missing/blocked: If a named skill isn't in the list or the path can't be read, say so briefly and continue with the best fallback.\n- How to use a skill (progressive disclosure):\n 1) After deciding to use a skill, open its `SKILL.md`. Read only enough to follow the workflow.\n 2) If `SKILL.md` points to extra folders such as `references/`, load only the specific files needed for the request; don't bulk-load everything.\n 3) If `scripts/` exist, prefer running or patching them instead of retyping large code blocks.\n 4) If `assets/` or templates exist, reuse them instead of recreating from scratch.\n- Coordination and sequencing:\n - If multiple skills apply, choose the minimal set that covers the request and state the order you'll use them.\n - Announce which skill(s) you're using and why (one short line). If you skip an obvious skill, say why.\n- Context hygiene:\n - Keep context small: summarize long sections instead of pasting them; only load extra files when needed.\n - Avoid deep reference-chasing: prefer opening only files directly linked from `SKILL.md` unless you're blocked.\n - When variants exist (frameworks, providers, domains), pick only the relevant reference file(s) and note that choice.\n- Safety and fallback: If a skill can't be applied cleanly (missing files, unclear instructions), state the issue, pick the next-best approach, and continue.";
let expected = format!(
"base doc\n\n## Skills\nThese skills are discovered at startup from multiple local sources. Each entry includes a name, description, and file path so you can open the source for full instructions.\n- pdf-processing: extract from pdfs (file: {expected_path_str})\n{usage_rules}"
"base doc\n\n## Skills\nA skill is a set of local instructions to follow that is stored in a `SKILL.md` file. Below is the list of skills that can be used. Each entry includes a name, description, and file path so you can open the source for full instructions when using a specific skill.\n### Available skills\n- pdf-processing: extract from pdfs (file: {expected_path_str})\n### How to use skills\n{usage_rules}"
);
assert_eq!(res, expected);
}
@@ -537,9 +537,9 @@ mod tests {
dunce::canonicalize(cfg.codex_home.join("skills/linting/SKILL.md").as_path())
.unwrap_or_else(|_| cfg.codex_home.join("skills/linting/SKILL.md"));
let expected_path_str = expected_path.to_string_lossy().replace('\\', "/");
let usage_rules = "- Discovery: Available skills are listed in project docs and may also appear in a runtime \"## Skills\" section (name + description + file path). These are the sources of truth; skill bodies live on disk at the listed paths.\n- Trigger rules: If the user names a skill (with `$SkillName` or plain text) OR the task clearly matches a skill's description, you must use that skill for that turn. Multiple mentions mean use them all. Do not carry skills across turns unless re-mentioned.\n- Missing/blocked: If a named skill isn't in the list or the path can't be read, say so briefly and continue with the best fallback.\n- How to use a skill (progressive disclosure):\n 1) After deciding to use a skill, open its `SKILL.md`. Read only enough to follow the workflow.\n 2) If `SKILL.md` points to extra folders such as `references/`, load only the specific files needed for the request; don't bulk-load everything.\n 3) If `scripts/` exist, prefer running or patching them instead of retyping large code blocks.\n 4) If `assets/` or templates exist, reuse them instead of recreating from scratch.\n- Description as trigger: The YAML `description` in `SKILL.md` is the primary trigger signal; rely on it to decide applicability. If unsure, ask a brief clarification before proceeding.\n- Coordination and sequencing:\n - If multiple skills apply, choose the minimal set that covers the request and state the order you'll use them.\n - Announce which skill(s) you're using and why (one short line). If you skip an obvious skill, say why.\n- Context hygiene:\n - Keep context small: summarize long sections instead of pasting them; only load extra files when needed.\n - Avoid deeply nested references; prefer one-hop files explicitly linked from `SKILL.md`.\n - When variants exist (frameworks, providers, domains), pick only the relevant reference file(s) and note that choice.\n- Safety and fallback: If a skill can't be applied cleanly (missing files, unclear instructions), state the issue, pick the next-best approach, and continue.";
let usage_rules = "- Discovery: The list above is the skills available in this session (name + description + file path). Skill bodies live on disk at the listed paths.\n- Trigger rules: If the user names a skill (with `$SkillName` or plain text) OR the task clearly matches a skill's description shown above, you must use that skill for that turn. Multiple mentions mean use them all. Do not carry skills across turns unless re-mentioned.\n- Missing/blocked: If a named skill isn't in the list or the path can't be read, say so briefly and continue with the best fallback.\n- How to use a skill (progressive disclosure):\n 1) After deciding to use a skill, open its `SKILL.md`. Read only enough to follow the workflow.\n 2) If `SKILL.md` points to extra folders such as `references/`, load only the specific files needed for the request; don't bulk-load everything.\n 3) If `scripts/` exist, prefer running or patching them instead of retyping large code blocks.\n 4) If `assets/` or templates exist, reuse them instead of recreating from scratch.\n- Coordination and sequencing:\n - If multiple skills apply, choose the minimal set that covers the request and state the order you'll use them.\n - Announce which skill(s) you're using and why (one short line). If you skip an obvious skill, say why.\n- Context hygiene:\n - Keep context small: summarize long sections instead of pasting them; only load extra files when needed.\n - Avoid deep reference-chasing: prefer opening only files directly linked from `SKILL.md` unless you're blocked.\n - When variants exist (frameworks, providers, domains), pick only the relevant reference file(s) and note that choice.\n- Safety and fallback: If a skill can't be applied cleanly (missing files, unclear instructions), state the issue, pick the next-best approach, and continue.";
let expected = format!(
"## Skills\nThese skills are discovered at startup from multiple local sources. Each entry includes a name, description, and file path so you can open the source for full instructions.\n- linting: run clippy (file: {expected_path_str})\n{usage_rules}"
"## Skills\nA skill is a set of local instructions to follow that is stored in a `SKILL.md` file. Below is the list of skills that can be used. Each entry includes a name, description, and file path so you can open the source for full instructions when using a specific skill.\n### Available skills\n- linting: run clippy (file: {expected_path_str})\n### How to use skills\n{usage_rules}"
);
assert_eq!(res, expected);
}

View File

@@ -7,7 +7,8 @@ pub fn render_skills_section(skills: &[SkillMetadata]) -> Option<String> {
let mut lines: Vec<String> = Vec::new();
lines.push("## Skills".to_string());
lines.push("These skills are discovered at startup from multiple local sources. Each entry includes a name, description, and file path so you can open the source for full instructions.".to_string());
lines.push("A skill is a set of local instructions to follow that is stored in a `SKILL.md` file. Below is the list of skills that can be used. Each entry includes a name, description, and file path so you can open the source for full instructions when using a specific skill.".to_string());
lines.push("### Available skills".to_string());
for skill in skills {
let path_str = skill.path.to_string_lossy().replace('\\', "/");
@@ -16,22 +17,22 @@ pub fn render_skills_section(skills: &[SkillMetadata]) -> Option<String> {
lines.push(format!("- {name}: {description} (file: {path_str})"));
}
lines.push("### How to use skills".to_string());
lines.push(
r###"- Discovery: Available skills are listed in project docs and may also appear in a runtime "## Skills" section (name + description + file path). These are the sources of truth; skill bodies live on disk at the listed paths.
- Trigger rules: If the user names a skill (with `$SkillName` or plain text) OR the task clearly matches a skill's description, you must use that skill for that turn. Multiple mentions mean use them all. Do not carry skills across turns unless re-mentioned.
r###"- Discovery: The list above is the skills available in this session (name + description + file path). Skill bodies live on disk at the listed paths.
- Trigger rules: If the user names a skill (with `$SkillName` or plain text) OR the task clearly matches a skill's description shown above, you must use that skill for that turn. Multiple mentions mean use them all. Do not carry skills across turns unless re-mentioned.
- Missing/blocked: If a named skill isn't in the list or the path can't be read, say so briefly and continue with the best fallback.
- How to use a skill (progressive disclosure):
1) After deciding to use a skill, open its `SKILL.md`. Read only enough to follow the workflow.
2) If `SKILL.md` points to extra folders such as `references/`, load only the specific files needed for the request; don't bulk-load everything.
3) If `scripts/` exist, prefer running or patching them instead of retyping large code blocks.
4) If `assets/` or templates exist, reuse them instead of recreating from scratch.
- Description as trigger: The YAML `description` in `SKILL.md` is the primary trigger signal; rely on it to decide applicability. If unsure, ask a brief clarification before proceeding.
- Coordination and sequencing:
- If multiple skills apply, choose the minimal set that covers the request and state the order you'll use them.
- Announce which skill(s) you're using and why (one short line). If you skip an obvious skill, say why.
- Context hygiene:
- Keep context small: summarize long sections instead of pasting them; only load extra files when needed.
- Avoid deeply nested references; prefer one-hop files explicitly linked from `SKILL.md`.
- Avoid deep reference-chasing: prefer opening only files directly linked from `SKILL.md` unless you're blocked.
- When variants exist (frameworks, providers, domains), pick only the relevant reference file(s) and note that choice.
- Safety and fallback: If a skill can't be applied cleanly (missing files, unclear instructions), state the issue, pick the next-best approach, and continue."###
.to_string(),

View File

@@ -15,14 +15,17 @@ codex-core = { workspace = true, features = ["test-support"] }
codex-protocol = { workspace = true }
codex-utils-absolute-path = { workspace = true }
codex-utils-cargo-bin = { workspace = true }
http = { workspace = true }
notify = { workspace = true }
regex-lite = { workspace = true }
serde_json = { workspace = true }
serde = { workspace = true }
tempfile = { workspace = true }
tokio = { workspace = true, features = ["time"] }
walkdir = { workspace = true }
wiremock = { workspace = true }
shlex = { workspace = true }
zstd = { workspace = true }
[dev-dependencies]
pretty_assertions = { workspace = true }

View File

@@ -11,11 +11,15 @@ use regex_lite::Regex;
use std::path::PathBuf;
pub mod process;
pub mod request;
pub mod responses;
pub mod streaming_sse;
pub mod test_codex;
pub mod test_codex_exec;
pub use request::RequestBodyExt;
pub use request::body_contains;
#[track_caller]
pub fn assert_regex_match<'s>(pattern: &str, actual: &'s str) -> regex_lite::Captures<'s> {
let regex = Regex::new(pattern).unwrap_or_else(|err| {
@@ -178,7 +182,7 @@ where
F: FnMut(&codex_core::protocol::EventMsg) -> bool,
{
use tokio::time::Duration;
wait_for_event_with_timeout(codex, predicate, Duration::from_secs(1)).await
wait_for_event_with_timeout(codex, predicate, Duration::from_secs(10)).await
}
pub async fn wait_for_event_match<T, F>(codex: &CodexConversation, matcher: F) -> T

View File

@@ -0,0 +1,59 @@
use http::header::CONTENT_ENCODING;
use serde::de::DeserializeOwned;
use wiremock::Match;
pub fn decoded_body_bytes(request: &wiremock::Request) -> Vec<u8> {
if is_zstd_encoded(request) {
zstd::decode_all(request.body.as_slice()).unwrap_or_else(|err| {
panic!("failed to decode zstd-encoded request body: {err}");
})
} else {
request.body.clone()
}
}
pub fn decoded_body_string(request: &wiremock::Request) -> String {
String::from_utf8_lossy(&decoded_body_bytes(request)).into_owned()
}
pub trait RequestBodyExt {
fn json_body<T: DeserializeOwned>(&self) -> T;
fn text_body(&self) -> String;
}
impl RequestBodyExt for wiremock::Request {
fn json_body<T: DeserializeOwned>(&self) -> T {
serde_json::from_slice(&decoded_body_bytes(self)).unwrap_or_else(|err| {
panic!("failed to decode request body as JSON: {err}");
})
}
fn text_body(&self) -> String {
decoded_body_string(self)
}
}
pub fn body_contains(needle: impl Into<String>) -> impl Match {
BodyContains {
needle: needle.into(),
}
}
struct BodyContains {
needle: String,
}
impl Match for BodyContains {
fn matches(&self, request: &wiremock::Request) -> bool {
decoded_body_string(request).contains(self.needle.as_str())
}
}
fn is_zstd_encoded(request: &wiremock::Request) -> bool {
request
.headers
.get(CONTENT_ENCODING)
.and_then(|value| value.to_str().ok())
.map(|value| value.eq_ignore_ascii_case("zstd"))
.unwrap_or(false)
}

View File

@@ -15,6 +15,7 @@ use wiremock::ResponseTemplate;
use wiremock::matchers::method;
use wiremock::matchers::path_regex;
use crate::RequestBodyExt;
use crate::test_codex::ApplyPatchModelOutput;
#[derive(Debug, Clone)]
@@ -67,7 +68,7 @@ pub struct ResponsesRequest(wiremock::Request);
impl ResponsesRequest {
pub fn body_json(&self) -> Value {
self.0.body_json().unwrap()
self.0.json_body()
}
/// Returns all `input_text` spans from `message` inputs for the provided role.
@@ -83,7 +84,7 @@ impl ResponsesRequest {
}
pub fn input(&self) -> Vec<Value> {
self.0.body_json::<Value>().unwrap()["input"]
self.body_json()["input"]
.as_array()
.expect("input array not found in request")
.clone()
@@ -721,10 +722,7 @@ pub async fn get_responses_request_bodies(server: &MockServer) -> Vec<Value> {
get_responses_requests(server)
.await
.into_iter()
.map(|req| {
req.body_json::<Value>()
.expect("request body to be valid JSON")
})
.map(|req| req.json_body::<Value>())
.collect()
}

View File

@@ -10,6 +10,7 @@ use codex_core::Prompt;
use codex_core::ResponseEvent;
use codex_core::ResponseItem;
use codex_core::WireApi;
use codex_core::features::Feature;
use codex_core::models_manager::manager::ModelsManager;
use codex_otel::otel_manager::OtelManager;
use codex_protocol::ConversationId;
@@ -317,3 +318,191 @@ async fn responses_respects_model_family_overrides_from_config() {
Some("detailed")
);
}
#[tokio::test]
async fn responses_request_body_is_zstd_encoded() {
core_test_support::skip_if_no_network!();
let server = responses::start_mock_server().await;
let response_body = responses::sse(vec![
responses::ev_response_created("resp-1"),
responses::ev_completed("resp-1"),
]);
let request_recorder = responses::mount_sse_once(&server, response_body).await;
let provider = ModelProviderInfo {
name: "OpenAI".into(),
base_url: Some(format!("{}/v1", server.uri())),
env_key: None,
env_key_instructions: None,
experimental_bearer_token: None,
wire_api: WireApi::Responses,
query_params: None,
http_headers: None,
env_http_headers: None,
request_max_retries: Some(0),
stream_max_retries: Some(0),
stream_idle_timeout_ms: Some(5_000),
requires_openai_auth: false,
};
let codex_home = TempDir::new().expect("failed to create TempDir");
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider_id = provider.name.clone();
config.model_provider = provider.clone();
config.features.enable(Feature::RequestCompression);
let effort = config.model_reasoning_effort;
let summary = config.model_reasoning_summary;
let model = ModelsManager::get_model_offline(config.model.as_deref());
config.model = Some(model.clone());
let config = Arc::new(config);
let conversation_id = ConversationId::new();
let session_source = SessionSource::Exec;
let model_family = ModelsManager::construct_model_family_offline(model.as_str(), &config);
let otel_manager = OtelManager::new(
conversation_id,
model.as_str(),
model_family.slug.as_str(),
None,
Some("test@test.com".to_string()),
Some(AuthMode::ChatGPT),
false,
"test".to_string(),
session_source.clone(),
);
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::create_dummy_chatgpt_auth_for_testing());
let client = ModelClient::new(
Arc::clone(&config),
Some(auth_manager),
model_family,
otel_manager,
provider,
effort,
summary,
conversation_id,
session_source,
);
let mut prompt = Prompt::default();
prompt.input = vec![ResponseItem::Message {
id: None,
role: "user".into(),
content: vec![ContentItem::InputText {
text: "hello".into(),
}],
}];
let mut stream = client.stream(&prompt).await.expect("stream failed");
while let Some(event) = stream.next().await {
if matches!(event, Ok(ResponseEvent::Completed { .. })) {
break;
}
}
let request = request_recorder.single_request();
assert_eq!(request.header("content-encoding").as_deref(), Some("zstd"));
assert_eq!(
request.header("content-type").as_deref(),
Some("application/json")
);
let request_body = request.body_json();
assert_eq!(request_body["stream"].as_bool(), Some(true));
assert_eq!(
request_body["input"][0]["content"][0]["text"].as_str(),
Some("hello")
);
}
#[tokio::test]
async fn responses_request_body_is_uncompressed_when_disabled() {
core_test_support::skip_if_no_network!();
let server = responses::start_mock_server().await;
let response_body = responses::sse(vec![
responses::ev_response_created("resp-1"),
responses::ev_completed("resp-1"),
]);
let request_recorder = responses::mount_sse_once(&server, response_body).await;
let provider = ModelProviderInfo {
name: "OpenAI".into(),
base_url: Some(format!("{}/v1", server.uri())),
env_key: None,
env_key_instructions: None,
experimental_bearer_token: None,
wire_api: WireApi::Responses,
query_params: None,
http_headers: None,
env_http_headers: None,
request_max_retries: Some(0),
stream_max_retries: Some(0),
stream_idle_timeout_ms: Some(5_000),
requires_openai_auth: false,
};
let codex_home = TempDir::new().expect("failed to create TempDir");
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider_id = provider.name.clone();
config.model_provider = provider.clone();
let effort = config.model_reasoning_effort;
let summary = config.model_reasoning_summary;
let model = ModelsManager::get_model_offline(config.model.as_deref());
config.model = Some(model.clone());
let config = Arc::new(config);
let conversation_id = ConversationId::new();
let session_source = SessionSource::Exec;
let model_family = ModelsManager::construct_model_family_offline(model.as_str(), &config);
let otel_manager = OtelManager::new(
conversation_id,
model.as_str(),
model_family.slug.as_str(),
None,
Some("test@test.com".to_string()),
Some(AuthMode::ChatGPT),
false,
"test".to_string(),
session_source.clone(),
);
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::create_dummy_chatgpt_auth_for_testing());
let client = ModelClient::new(
Arc::clone(&config),
Some(auth_manager),
model_family,
otel_manager,
provider,
effort,
summary,
conversation_id,
session_source,
);
let mut prompt = Prompt::default();
prompt.input = vec![ResponseItem::Message {
id: None,
role: "user".into(),
content: vec![ContentItem::InputText {
text: "hello".into(),
}],
}];
let mut stream = client.stream(&prompt).await.expect("stream failed");
while let Some(event) = stream.next().await {
if matches!(event, Ok(ResponseEvent::Completed { .. })) {
break;
}
}
let request = request_recorder.single_request();
assert_eq!(request.header("content-encoding"), None);
assert_eq!(
request.header("content-type").as_deref(),
Some("application/json")
);
}

View File

@@ -29,6 +29,8 @@ use codex_protocol::models::ReasoningItemReasoningSummary;
use codex_protocol::models::WebSearchAction;
use codex_protocol::openai_models::ReasoningEffort;
use codex_protocol::user_input::UserInput;
use core_test_support::RequestBodyExt;
use core_test_support::body_contains;
use core_test_support::load_default_config_for_test;
use core_test_support::load_sse_fixture_with_id;
use core_test_support::responses::ev_completed_with_tokens;
@@ -51,7 +53,6 @@ use uuid::Uuid;
use wiremock::Mock;
use wiremock::MockServer;
use wiremock::ResponseTemplate;
use wiremock::matchers::body_string_contains;
use wiremock::matchers::header_regex;
use wiremock::matchers::method;
use wiremock::matchers::path;
@@ -507,7 +508,7 @@ async fn chatgpt_auth_sends_correct_request() {
let request_authorization = request.headers.get("authorization").unwrap();
let request_originator = request.headers.get("originator").unwrap();
let request_chatgpt_account_id = request.headers.get("chatgpt-account-id").unwrap();
let request_body = request.body_json::<serde_json::Value>().unwrap();
let request_body = request.json_body::<serde_json::Value>();
assert_eq!(
request_conversation_id.to_str().unwrap(),
@@ -1495,7 +1496,7 @@ async fn context_window_error_sets_total_tokens_to_model_window() -> anyhow::Res
mount_sse_once_match(
&server,
body_string_contains("trigger context window"),
body_contains("trigger context window"),
sse_failed(
"resp_context_window",
"context_length_exceeded",
@@ -1506,7 +1507,7 @@ async fn context_window_error_sets_total_tokens_to_model_window() -> anyhow::Res
mount_sse_once_match(
&server,
body_string_contains("seed turn"),
body_contains("seed turn"),
sse_completed("resp_seed"),
)
.await;
@@ -1882,8 +1883,7 @@ async fn history_dedupes_streamed_and_final_messages_across_turns() {
]);
let r3_input_array = requests[2]
.body_json::<serde_json::Value>()
.unwrap()
.json_body::<serde_json::Value>()
.get("input")
.and_then(|v| v.as_array())
.cloned()

View File

@@ -17,6 +17,7 @@ use codex_core::protocol::SandboxPolicy;
use codex_core::protocol::WarningEvent;
use codex_protocol::config_types::ReasoningSummary;
use codex_protocol::user_input::UserInput;
use core_test_support::RequestBodyExt;
use core_test_support::load_default_config_for_test;
use core_test_support::responses::ev_local_shell_call;
use core_test_support::responses::ev_reasoning_item;
@@ -132,7 +133,6 @@ async fn summarize_context_three_requests_and_instructions() {
// SSE 3: minimal completed; we only need to capture the request body.
let sse3 = sse(vec![ev_completed("r3")]);
// Mount the three expected requests in sequence so the assertions below can
// inspect them without relying on specific prompt markers.
let request_log = mount_sse_sequence(&server, vec![sse1, sse2, sse3]).await;
@@ -361,7 +361,8 @@ async fn manual_compact_uses_custom_prompt() {
let requests = get_responses_requests(&server).await;
let body = requests
.iter()
.find_map(|req| req.body_json::<serde_json::Value>().ok())
.map(core_test_support::RequestBodyExt::json_body::<serde_json::Value>)
.next()
.expect("summary request body");
let input = body
@@ -591,9 +592,7 @@ async fn multiple_auto_compact_per_task_runs_after_token_limit_hit() {
// collect the requests payloads from the model
let requests_payloads = get_responses_requests(&server).await;
let body = requests_payloads[0]
.body_json::<serde_json::Value>()
.unwrap();
let body = requests_payloads[0].json_body::<serde_json::Value>();
let input = body.get("input").and_then(|v| v.as_array()).unwrap();
fn normalize_inputs(values: &[serde_json::Value]) -> Vec<serde_json::Value> {
@@ -634,9 +633,7 @@ async fn multiple_auto_compact_per_task_runs_after_token_limit_hit() {
prefixed_third_summary.as_str(),
];
for (i, expected_summary) in compaction_indices.into_iter().zip(expected_summaries) {
let body = requests_payloads.clone()[i]
.body_json::<serde_json::Value>()
.unwrap();
let body = requests_payloads.clone()[i].json_body::<serde_json::Value>();
let input = body.get("input").and_then(|v| v.as_array()).unwrap();
let input = normalize_inputs(input);
assert_eq!(input.len(), 3);
@@ -999,7 +996,7 @@ async fn multiple_auto_compact_per_task_runs_after_token_limit_hit() {
]);
for (i, request) in requests_payloads.iter().enumerate() {
let body = request.body_json::<serde_json::Value>().unwrap();
let body = request.json_body::<serde_json::Value>();
let input = body.get("input").and_then(|v| v.as_array()).unwrap();
let expected_input = expected_requests_inputs[i].as_array().unwrap();
assert_eq!(normalize_inputs(input), normalize_inputs(expected_input));
@@ -1038,30 +1035,30 @@ async fn auto_compact_runs_after_token_limit_hit() {
let prefixed_auto_summary = AUTO_SUMMARY_TEXT;
let first_matcher = |req: &wiremock::Request| {
let body = std::str::from_utf8(&req.body).unwrap_or("");
let body = req.text_body();
body.contains(FIRST_AUTO_MSG)
&& !body.contains(SECOND_AUTO_MSG)
&& !body_contains_text(body, SUMMARIZATION_PROMPT)
&& !body_contains_text(body.as_str(), SUMMARIZATION_PROMPT)
};
mount_sse_once_match(&server, first_matcher, sse1).await;
let second_matcher = |req: &wiremock::Request| {
let body = std::str::from_utf8(&req.body).unwrap_or("");
let body = req.text_body();
body.contains(SECOND_AUTO_MSG)
&& body.contains(FIRST_AUTO_MSG)
&& !body_contains_text(body, SUMMARIZATION_PROMPT)
&& !body_contains_text(body.as_str(), SUMMARIZATION_PROMPT)
};
mount_sse_once_match(&server, second_matcher, sse2).await;
let third_matcher = |req: &wiremock::Request| {
let body = std::str::from_utf8(&req.body).unwrap_or("");
body_contains_text(body, SUMMARIZATION_PROMPT)
let body = req.text_body();
body_contains_text(body.as_str(), SUMMARIZATION_PROMPT)
};
mount_sse_once_match(&server, third_matcher, sse3).await;
let fourth_matcher = |req: &wiremock::Request| {
let body = std::str::from_utf8(&req.body).unwrap_or("");
body.contains(POST_AUTO_USER_MSG) && !body_contains_text(body, SUMMARIZATION_PROMPT)
let body = req.text_body();
body.contains(POST_AUTO_USER_MSG)
&& !body_contains_text(body.as_str(), SUMMARIZATION_PROMPT)
};
mount_sse_once_match(&server, fourth_matcher, sse4).await;
@@ -1126,10 +1123,7 @@ async fn auto_compact_runs_after_token_limit_hit() {
requests.len()
);
let is_auto_compact = |req: &wiremock::Request| {
body_contains_text(
std::str::from_utf8(&req.body).unwrap_or(""),
SUMMARIZATION_PROMPT,
)
body_contains_text(req.text_body().as_str(), SUMMARIZATION_PROMPT)
};
let auto_compact_count = requests.iter().filter(|req| is_auto_compact(req)).count();
assert_eq!(
@@ -1151,20 +1145,16 @@ async fn auto_compact_runs_after_token_limit_hit() {
.enumerate()
.rev()
.find_map(|(idx, req)| {
let body = std::str::from_utf8(&req.body).unwrap_or("");
(body.contains(POST_AUTO_USER_MSG) && !body_contains_text(body, SUMMARIZATION_PROMPT))
let body = req.text_body();
(body.contains(POST_AUTO_USER_MSG) && !body_contains_text(&body, SUMMARIZATION_PROMPT))
.then_some(idx)
})
.expect("follow-up request missing");
assert_eq!(follow_up_index, 3, "follow-up request should be last");
let body_first = requests[0].body_json::<serde_json::Value>().unwrap();
let body_auto = requests[auto_compact_index]
.body_json::<serde_json::Value>()
.unwrap();
let body_follow_up = requests[follow_up_index]
.body_json::<serde_json::Value>()
.unwrap();
let body_first = requests[0].json_body::<serde_json::Value>();
let body_auto = requests[auto_compact_index].json_body::<serde_json::Value>();
let body_follow_up = requests[follow_up_index].json_body::<serde_json::Value>();
let instructions = body_auto
.get("instructions")
.and_then(|v| v.as_str())
@@ -1375,24 +1365,24 @@ async fn auto_compact_persists_rollout_entries() {
]);
let first_matcher = |req: &wiremock::Request| {
let body = std::str::from_utf8(&req.body).unwrap_or("");
let body = req.text_body();
body.contains(FIRST_AUTO_MSG)
&& !body.contains(SECOND_AUTO_MSG)
&& !body_contains_text(body, SUMMARIZATION_PROMPT)
&& !body_contains_text(body.as_str(), SUMMARIZATION_PROMPT)
};
mount_sse_once_match(&server, first_matcher, sse1).await;
let second_matcher = |req: &wiremock::Request| {
let body = std::str::from_utf8(&req.body).unwrap_or("");
let body = req.text_body();
body.contains(SECOND_AUTO_MSG)
&& body.contains(FIRST_AUTO_MSG)
&& !body_contains_text(body, SUMMARIZATION_PROMPT)
&& !body_contains_text(body.as_str(), SUMMARIZATION_PROMPT)
};
mount_sse_once_match(&server, second_matcher, sse2).await;
let third_matcher = |req: &wiremock::Request| {
let body = std::str::from_utf8(&req.body).unwrap_or("");
body_contains_text(body, SUMMARIZATION_PROMPT)
let body = req.text_body();
body_contains_text(body.as_str(), SUMMARIZATION_PROMPT)
};
mount_sse_once_match(&server, third_matcher, sse3).await;

View File

@@ -23,6 +23,7 @@ use codex_core::protocol::Op;
use codex_core::protocol::WarningEvent;
use codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
use codex_protocol::user_input::UserInput;
use core_test_support::RequestBodyExt;
use core_test_support::load_default_config_for_test;
use core_test_support::responses::ev_assistant_message;
use core_test_support::responses::ev_completed;
@@ -796,8 +797,9 @@ async fn mount_initial_flow(server: &MockServer) {
let sse5 = sse(vec![ev_completed("r5")]);
let match_first = |req: &wiremock::Request| {
let body = std::str::from_utf8(&req.body).unwrap_or("");
let body = req.text_body();
body.contains("\"text\":\"hello world\"")
&& !body_contains_text(body.as_str(), SUMMARIZATION_PROMPT)
&& !body.contains(&format!("\"text\":\"{SUMMARY_TEXT}\""))
&& !body.contains("\"text\":\"AFTER_COMPACT\"")
&& !body.contains("\"text\":\"AFTER_RESUME\"")
@@ -806,13 +808,13 @@ async fn mount_initial_flow(server: &MockServer) {
mount_sse_once_match(server, match_first, sse1).await;
let match_compact = |req: &wiremock::Request| {
let body = std::str::from_utf8(&req.body).unwrap_or("");
body_contains_text(body, SUMMARIZATION_PROMPT) || body.contains(&json_fragment(FIRST_REPLY))
let body = req.text_body();
body_contains_text(body.as_str(), SUMMARIZATION_PROMPT)
};
mount_sse_once_match(server, match_compact, sse2).await;
let match_after_compact = |req: &wiremock::Request| {
let body = std::str::from_utf8(&req.body).unwrap_or("");
let body = req.text_body();
body.contains("\"text\":\"AFTER_COMPACT\"")
&& !body.contains("\"text\":\"AFTER_RESUME\"")
&& !body.contains("\"text\":\"AFTER_FORK\"")
@@ -820,13 +822,13 @@ async fn mount_initial_flow(server: &MockServer) {
mount_sse_once_match(server, match_after_compact, sse3).await;
let match_after_resume = |req: &wiremock::Request| {
let body = std::str::from_utf8(&req.body).unwrap_or("");
let body = req.text_body();
body.contains("\"text\":\"AFTER_RESUME\"")
};
mount_sse_once_match(server, match_after_resume, sse4).await;
let match_after_fork = |req: &wiremock::Request| {
let body = std::str::from_utf8(&req.body).unwrap_or("");
let body = req.text_body();
body.contains("\"text\":\"AFTER_FORK\"")
};
mount_sse_once_match(server, match_after_fork, sse5).await;
@@ -840,13 +842,13 @@ async fn mount_second_compact_flow(server: &MockServer) {
let sse7 = sse(vec![ev_completed("r7")]);
let match_second_compact = |req: &wiremock::Request| {
let body = std::str::from_utf8(&req.body).unwrap_or("");
body.contains("AFTER_FORK")
let body = req.text_body();
body_contains_text(body.as_str(), SUMMARIZATION_PROMPT) && body.contains("AFTER_FORK")
};
mount_sse_once_match(server, match_second_compact, sse6).await;
let match_after_second_resume = |req: &wiremock::Request| {
let body = std::str::from_utf8(&req.body).unwrap_or("");
let body = req.text_body();
body.contains(&format!("\"text\":\"{AFTER_SECOND_RESUME}\""))
};
mount_sse_once_match(server, match_after_second_resume, sse7).await;

View File

@@ -6,6 +6,7 @@ use codex_core::protocol::Op;
use codex_core::protocol::SandboxPolicy;
use codex_protocol::config_types::ReasoningSummary;
use codex_protocol::user_input::UserInput;
use core_test_support::RequestBodyExt;
use core_test_support::responses;
use core_test_support::skip_if_no_network;
use core_test_support::test_codex::TestCodex;
@@ -54,7 +55,7 @@ async fn codex_returns_json_result(model: String) -> anyhow::Result<()> {
let expected_schema: serde_json::Value = serde_json::from_str(SCHEMA)?;
let match_json_text_param = move |req: &wiremock::Request| {
let body: serde_json::Value = serde_json::from_slice(&req.body).unwrap_or_default();
let body: serde_json::Value = req.json_body();
let Some(text) = body.get("text") else {
return false;
};

View File

@@ -21,6 +21,7 @@ use codex_core::protocol::RolloutItem;
use codex_core::protocol::RolloutLine;
use codex_core::review_format::render_review_output_text;
use codex_protocol::user_input::UserInput;
use core_test_support::RequestBodyExt;
use core_test_support::load_default_config_for_test;
use core_test_support::load_sse_fixture_with_id_from_str;
use core_test_support::responses::get_responses_requests;
@@ -430,7 +431,7 @@ async fn review_uses_custom_review_model_from_config() {
let request = requests
.first()
.expect("expected POST request to /responses");
let body = request.body_json::<serde_json::Value>().unwrap();
let body = request.json_body::<serde_json::Value>();
assert_eq!(body["model"].as_str().unwrap(), "gpt-5.1");
server.verify().await;
@@ -551,7 +552,7 @@ async fn review_input_isolated_from_parent_history() {
let request = requests
.first()
.expect("expected POST request to /responses");
let body = request.body_json::<serde_json::Value>().unwrap();
let body = request.json_body::<serde_json::Value>();
let input = body["input"].as_array().expect("input array");
assert!(
input.len() >= 2,
@@ -676,7 +677,7 @@ async fn review_history_surfaces_in_parent_session() {
// Critically, no messages from the review thread should appear.
let requests = get_responses_requests(&server).await;
assert_eq!(requests.len(), 2);
let body = requests[1].body_json::<serde_json::Value>().unwrap();
let body = requests[1].json_body::<serde_json::Value>();
let input = body["input"].as_array().expect("input array");
// Must include the followup as the last item for this turn

View File

@@ -3,6 +3,7 @@ use codex_core::WireApi;
use codex_core::protocol::EventMsg;
use codex_core::protocol::Op;
use codex_protocol::user_input::UserInput;
use core_test_support::body_contains;
use core_test_support::load_sse_fixture_with_id;
use core_test_support::skip_if_no_network;
use core_test_support::test_codex::TestCodex;
@@ -11,7 +12,6 @@ use core_test_support::wait_for_event;
use wiremock::Mock;
use wiremock::MockServer;
use wiremock::ResponseTemplate;
use wiremock::matchers::body_string_contains;
use wiremock::matchers::method;
use wiremock::matchers::path;
@@ -38,7 +38,7 @@ async fn continue_after_stream_error() {
// so the failing request should only occur once.
Mock::given(method("POST"))
.and(path("/v1/responses"))
.and(body_string_contains("first message"))
.and(body_contains("first message"))
.respond_with(fail)
.up_to_n_times(2)
.mount(&server)
@@ -50,7 +50,7 @@ async fn continue_after_stream_error() {
Mock::given(method("POST"))
.and(path("/v1/responses"))
.and(body_string_contains("follow up"))
.and(body_contains("follow up"))
.respond_with(ok)
.expect(1)
.mount(&server)

View File

@@ -137,26 +137,27 @@ async fn poll_for_token(
}
}
fn print_device_code_prompt(code: &str) {
fn print_device_code_prompt(code: &str, issuer_base_url: &str) {
println!(
"\nWelcome to Codex [v{ANSI_GRAY}{version}{ANSI_RESET}]\n{ANSI_GRAY}OpenAI's command-line coding agent{ANSI_RESET}\n\
\nFollow these steps to sign in with ChatGPT using device code authorization:\n\
\n1. Open this link in your browser and sign in to your account\n {ANSI_BLUE}https://auth.openai.com/codex/device{ANSI_RESET}\n\
\n1. Open this link in your browser and sign in to your account\n {ANSI_BLUE}{issuer_base_url}/codex/device{ANSI_RESET}\n\
\n2. Enter this one-time code {ANSI_GRAY}(expires in 15 minutes){ANSI_RESET}\n {ANSI_BLUE}{code}{ANSI_RESET}\n\
\n{ANSI_GRAY}Device codes are a common phishing target. Never share this code.{ANSI_RESET}\n",
version = env!("CARGO_PKG_VERSION"),
code = code
code = code,
issuer_base_url = issuer_base_url
);
}
/// Full device code login flow.
pub async fn run_device_code_login(opts: ServerOptions) -> std::io::Result<()> {
let client = reqwest::Client::new();
let base_url = opts.issuer.trim_end_matches('/');
let api_base_url = format!("{}/api/accounts", opts.issuer.trim_end_matches('/'));
let issuer_base_url = opts.issuer.trim_end_matches('/');
let api_base_url = format!("{issuer_base_url}/api/accounts");
let uc = request_user_code(&client, &api_base_url, &opts.client_id).await?;
print_device_code_prompt(&uc.user_code);
print_device_code_prompt(&uc.user_code, issuer_base_url);
let code_resp = poll_for_token(
&client,
@@ -171,10 +172,10 @@ pub async fn run_device_code_login(opts: ServerOptions) -> std::io::Result<()> {
code_verifier: code_resp.code_verifier,
code_challenge: code_resp.code_challenge,
};
let redirect_uri = format!("{base_url}/deviceauth/callback");
let redirect_uri = format!("{issuer_base_url}/deviceauth/callback");
let tokens = crate::server::exchange_code_for_tokens(
base_url,
issuer_base_url,
&opts.client_id,
&redirect_uri,
&pkce,

View File

@@ -30,7 +30,7 @@ const SKIP_DIR_SUFFIXES: &[&str] = &[
"/programdata",
];
fn normalize_path_key(p: &Path) -> String {
pub(crate) fn normalize_path_key(p: &Path) -> String {
let n = dunce::canonicalize(p).unwrap_or_else(|_| p.to_path_buf());
n.to_string_lossy().replace('\\', "/").to_ascii_lowercase()
}

View File

@@ -8,6 +8,7 @@ use codex_windows_sandbox::create_process_as_user;
use codex_windows_sandbox::create_readonly_token_with_cap_from;
use codex_windows_sandbox::create_workspace_write_token_with_cap_from;
use codex_windows_sandbox::get_current_token_for_restriction;
use codex_windows_sandbox::hide_current_user_profile_dir;
use codex_windows_sandbox::log_note;
use codex_windows_sandbox::parse_policy;
use codex_windows_sandbox::to_wide;
@@ -91,6 +92,7 @@ pub fn main() -> Result<()> {
}
let req: RunnerRequest = serde_json::from_str(&input).context("parse runner request json")?;
let log_dir = Some(req.codex_home.as_path());
hide_current_user_profile_dir(req.codex_home.as_path());
log_note(
&format!(
"runner start cwd={} cmd={:?} real_codex_home={}",

View File

@@ -0,0 +1,160 @@
#![cfg(target_os = "windows")]
use crate::logging::log_note;
use crate::winutil::format_last_error;
use crate::winutil::to_wide;
use anyhow::anyhow;
use std::ffi::OsStr;
use std::path::Path;
use std::path::PathBuf;
use windows_sys::Win32::Foundation::GetLastError;
use windows_sys::Win32::Storage::FileSystem::GetFileAttributesW;
use windows_sys::Win32::Storage::FileSystem::SetFileAttributesW;
use windows_sys::Win32::Storage::FileSystem::FILE_ATTRIBUTE_HIDDEN;
use windows_sys::Win32::Storage::FileSystem::FILE_ATTRIBUTE_SYSTEM;
use windows_sys::Win32::Storage::FileSystem::INVALID_FILE_ATTRIBUTES;
use windows_sys::Win32::System::Registry::RegCloseKey;
use windows_sys::Win32::System::Registry::RegCreateKeyExW;
use windows_sys::Win32::System::Registry::RegSetValueExW;
use windows_sys::Win32::System::Registry::HKEY;
use windows_sys::Win32::System::Registry::HKEY_LOCAL_MACHINE;
use windows_sys::Win32::System::Registry::KEY_WRITE;
use windows_sys::Win32::System::Registry::REG_DWORD;
use windows_sys::Win32::System::Registry::REG_OPTION_NON_VOLATILE;
const USERLIST_KEY_PATH: &str =
r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Winlogon\SpecialAccounts\UserList";
pub fn hide_newly_created_users(usernames: &[String], log_base: &Path) {
if usernames.is_empty() {
return;
}
if let Err(err) = hide_users_in_winlogon(usernames, log_base) {
log_note(
&format!("hide users: failed to update Winlogon UserList: {err}"),
Some(log_base),
);
}
}
/// Best-effort: hides the current sandbox user's profile directory once it exists.
///
/// Windows only creates profile directories when that user first logs in.
/// This intentionally runs in the command-runner (as the sandbox user) because
/// command running is what causes us to log in as a particular sandbox user.
pub fn hide_current_user_profile_dir(log_base: &Path) {
let Some(profile) = std::env::var_os("USERPROFILE") else {
return;
};
let profile_dir = PathBuf::from(profile);
if !profile_dir.exists() {
return;
}
match hide_directory(&profile_dir) {
Ok(true) => {
// Log only when we actually change attributes, so this stays one-time per profile dir.
log_note(
&format!(
"hide users: profile dir hidden for current user ({})",
profile_dir.display()
),
Some(log_base),
);
}
Ok(false) => {}
Err(err) => {
log_note(
&format!(
"hide users: failed to hide current user profile dir ({}): {err}",
profile_dir.display()
),
Some(log_base),
);
}
}
}
fn hide_users_in_winlogon(usernames: &[String], log_base: &Path) -> anyhow::Result<()> {
let key = create_userlist_key()?;
for username in usernames {
let name_w = to_wide(OsStr::new(username));
let value: u32 = 0;
let status = unsafe {
RegSetValueExW(
key,
name_w.as_ptr(),
0,
REG_DWORD,
&value as *const u32 as *const u8,
std::mem::size_of_val(&value) as u32,
)
};
if status != 0 {
log_note(
&format!(
"hide users: failed to set UserList value for {username}: {status} ({error})",
error = format_last_error(status as i32)
),
Some(log_base),
);
}
}
unsafe {
RegCloseKey(key);
}
Ok(())
}
fn create_userlist_key() -> anyhow::Result<HKEY> {
let key_path = to_wide(USERLIST_KEY_PATH);
let mut key: HKEY = 0;
let status = unsafe {
RegCreateKeyExW(
HKEY_LOCAL_MACHINE,
key_path.as_ptr(),
0,
std::ptr::null_mut(),
REG_OPTION_NON_VOLATILE,
KEY_WRITE,
std::ptr::null_mut(),
&mut key,
std::ptr::null_mut(),
)
};
if status != 0 {
return Err(anyhow!(
"RegCreateKeyExW failed: {status} ({error})",
error = format_last_error(status as i32)
));
}
Ok(key)
}
/// Sets HIDDEN|SYSTEM on `path` if needed, returning whether it changed anything.
fn hide_directory(path: &Path) -> anyhow::Result<bool> {
let wide = to_wide(path);
let attrs = unsafe { GetFileAttributesW(wide.as_ptr()) };
if attrs == INVALID_FILE_ATTRIBUTES {
let err = unsafe { GetLastError() } as i32;
return Err(anyhow!(
"GetFileAttributesW failed for {}: {err} ({error})",
path.display(),
error = format_last_error(err)
));
}
let new_attrs = attrs | FILE_ATTRIBUTE_HIDDEN | FILE_ATTRIBUTE_SYSTEM;
if new_attrs == attrs {
return Ok(false);
}
let ok = unsafe { SetFileAttributesW(wide.as_ptr(), new_attrs) };
if ok == 0 {
let err = unsafe { GetLastError() } as i32;
return Err(anyhow!(
"SetFileAttributesW failed for {}: {err} ({error})",
path.display(),
error = format_last_error(err)
));
}
Ok(true)
}

View File

@@ -119,9 +119,10 @@ pub fn require_logon_sandbox_creds(
) -> Result<SandboxCreds> {
let sandbox_dir = crate::setup::sandbox_dir(codex_home);
let needed_read = gather_read_roots(command_cwd, policy);
let mut needed_write = gather_write_roots(policy, policy_cwd, command_cwd, env_map);
// Ensure the sandbox directory itself is writable by sandbox users.
needed_write.push(sandbox_dir.clone());
let needed_write = gather_write_roots(policy, policy_cwd, command_cwd, env_map);
// NOTE: Do not add CODEX_HOME/.sandbox to `needed_write`; it must remain non-writable by the
// restricted capability token. The setup helper's `lock_sandbox_dir` is responsible for
// granting the sandbox group access to this directory without granting the capability SID.
let mut setup_reason: Option<String> = None;
let mut _existing_marker: Option<SetupMarker> = None;

View File

@@ -5,7 +5,8 @@ macro_rules! windows_modules {
}
windows_modules!(
acl, allow, audit, cap, dpapi, env, identity, logging, policy, process, token, winutil
acl, allow, audit, cap, dpapi, env, hide_users, identity, logging, policy, process, token,
winutil
);
#[cfg(target_os = "windows")]
@@ -38,6 +39,10 @@ pub use dpapi::unprotect as dpapi_unprotect;
#[cfg(target_os = "windows")]
pub use elevated_impl::run_windows_sandbox_capture as run_windows_sandbox_capture_elevated;
#[cfg(target_os = "windows")]
pub use hide_users::hide_current_user_profile_dir;
#[cfg(target_os = "windows")]
pub use hide_users::hide_newly_created_users;
#[cfg(target_os = "windows")]
pub use identity::require_logon_sandbox_creds;
#[cfg(target_os = "windows")]
pub use logging::log_note;

View File

@@ -9,6 +9,7 @@ use base64::Engine;
use codex_windows_sandbox::convert_string_sid_to_sid;
use codex_windows_sandbox::ensure_allow_mask_aces_with_inheritance;
use codex_windows_sandbox::ensure_allow_write_aces;
use codex_windows_sandbox::hide_newly_created_users;
use codex_windows_sandbox::load_or_create_cap_sids;
use codex_windows_sandbox::log_note;
use codex_windows_sandbox::path_mask_allows;
@@ -448,6 +449,11 @@ fn run_setup_full(payload: &Payload, log: &mut File, sbx_dir: &Path) -> Result<(
&payload.online_username,
log,
)?;
let users = vec![
payload.offline_username.clone(),
payload.online_username.clone(),
];
hide_newly_created_users(&users, sbx_dir);
}
let offline_sid = resolve_sid(&payload.offline_username)?;
let offline_sid_str = string_from_sid_bytes(&offline_sid).map_err(anyhow::Error::msg)?;

View File

@@ -408,15 +408,12 @@ fn build_payload_roots(
read_roots_override: Option<Vec<PathBuf>>,
write_roots_override: Option<Vec<PathBuf>>,
) -> (Vec<PathBuf>, Vec<PathBuf>) {
let sbx_dir = sandbox_dir(codex_home);
let mut write_roots = if let Some(roots) = write_roots_override {
let write_roots = if let Some(roots) = write_roots_override {
canonical_existing(&roots)
} else {
gather_write_roots(policy, policy_cwd, command_cwd, env_map)
};
if !write_roots.contains(&sbx_dir) {
write_roots.push(sbx_dir.clone());
}
let write_roots = filter_sensitive_write_roots(write_roots, codex_home);
let mut read_roots = if let Some(roots) = read_roots_override {
canonical_existing(&roots)
} else {
@@ -426,3 +423,17 @@ fn build_payload_roots(
read_roots.retain(|root| !write_root_set.contains(root));
(read_roots, write_roots)
}
fn filter_sensitive_write_roots(mut roots: Vec<PathBuf>, codex_home: &Path) -> Vec<PathBuf> {
// Never grant capability write access to CODEX_HOME or anything under CODEX_HOME/.sandbox.
// These locations contain sandbox control/state and must remain tamper-resistant.
let codex_home_key = crate::audit::normalize_path_key(codex_home);
let sbx_dir_key = crate::audit::normalize_path_key(&sandbox_dir(codex_home));
let sbx_dir_prefix = format!("{}/", sbx_dir_key.trim_end_matches('/'));
roots.retain(|root| {
let key = crate::audit::normalize_path_key(root);
key != codex_home_key && key != sbx_dir_key && !key.starts_with(&sbx_dir_prefix)
});
roots
}