mirror of
https://github.com/openai/codex.git
synced 2026-02-02 06:57:03 +00:00
Compare commits
26 Commits
shell-proc
...
dev/shell-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5db3e5fba4 | ||
|
|
f01f2ec9ee | ||
|
|
980886498c | ||
|
|
e743d251a7 | ||
|
|
788badd221 | ||
|
|
fbdedd9a06 | ||
|
|
5916153157 | ||
|
|
b46012e483 | ||
|
|
42683dadfb | ||
|
|
65cb1a1b77 | ||
|
|
50a77dc138 | ||
|
|
557ac63094 | ||
|
|
131c384361 | ||
|
|
e2598f5094 | ||
|
|
78b2aeea55 | ||
|
|
082d2fa19a | ||
|
|
7c7c7567d5 | ||
|
|
625f2208c4 | ||
|
|
5f1fab0e7c | ||
|
|
c07461e6f3 | ||
|
|
8b80a0a269 | ||
|
|
a47181e471 | ||
|
|
5beb6167c8 | ||
|
|
917f39ec12 | ||
|
|
a2fdfce02a | ||
|
|
91b16b8682 |
23
.github/workflows/cla.yml
vendored
23
.github/workflows/cla.yml
vendored
@@ -16,10 +16,27 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: contributor-assistant/github-action@v2.6.1
|
||||
# Run on close only if the PR was merged. This will lock the PR to preserve
|
||||
# the CLA agreement. We don't want to lock PRs that have been closed without
|
||||
# merging because the contributor may want to respond with additional comments.
|
||||
# This action has a "lock-pullrequest-aftermerge" option that can be set to false,
|
||||
# but that would unconditionally skip locking even in cases where the PR was merged.
|
||||
if: |
|
||||
github.event_name == 'pull_request_target' ||
|
||||
github.event.comment.body == 'recheck' ||
|
||||
github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA'
|
||||
(
|
||||
github.event_name == 'pull_request_target' &&
|
||||
(
|
||||
github.event.action == 'opened' ||
|
||||
github.event.action == 'synchronize' ||
|
||||
(github.event.action == 'closed' && github.event.pull_request.merged == true)
|
||||
)
|
||||
) ||
|
||||
(
|
||||
github.event_name == 'issue_comment' &&
|
||||
(
|
||||
github.event.comment.body == 'recheck' ||
|
||||
github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA'
|
||||
)
|
||||
)
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
|
||||
2
.github/workflows/codespell.yml
vendored
2
.github/workflows/codespell.yml
vendored
@@ -22,6 +22,6 @@ jobs:
|
||||
- name: Annotate locations with typos
|
||||
uses: codespell-project/codespell-problem-matcher@b80729f885d32f78a716c2f107b4db1025001c42 # v1
|
||||
- name: Codespell
|
||||
uses: codespell-project/actions-codespell@406322ec52dd7b488e48c1c4b82e2a8b3a1bf630 # v2.1
|
||||
uses: codespell-project/actions-codespell@8f01853be192eb0f849a5c7d721450e7a467c579 # v2.2
|
||||
with:
|
||||
ignore_words_file: .codespellignore
|
||||
|
||||
10
.github/workflows/rust-ci.yml
vendored
10
.github/workflows/rust-ci.yml
vendored
@@ -76,7 +76,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
- uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: cargo-shear
|
||||
version: 1.5.1
|
||||
@@ -170,7 +170,7 @@ jobs:
|
||||
|
||||
# Install and restore sccache cache
|
||||
- name: Install sccache
|
||||
uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: sccache
|
||||
version: 0.7.5
|
||||
@@ -228,7 +228,7 @@ jobs:
|
||||
|
||||
- name: Install cargo-chef
|
||||
if: ${{ matrix.profile == 'release' }}
|
||||
uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: cargo-chef
|
||||
version: 0.1.71
|
||||
@@ -370,7 +370,7 @@ jobs:
|
||||
cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
- name: Install sccache
|
||||
uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: sccache
|
||||
version: 0.7.5
|
||||
@@ -399,7 +399,7 @@ jobs:
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
- uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: nextest
|
||||
version: 0.9.103
|
||||
|
||||
65
codex-rs/Cargo.lock
generated
65
codex-rs/Cargo.lock
generated
@@ -237,46 +237,44 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "askama"
|
||||
version = "0.12.1"
|
||||
version = "0.14.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b79091df18a97caea757e28cd2d5fda49c6cd4bd01ddffd7ff01ace0c0ad2c28"
|
||||
checksum = "f75363874b771be265f4ffe307ca705ef6f3baa19011c149da8674a87f1b75c4"
|
||||
dependencies = [
|
||||
"askama_derive",
|
||||
"askama_escape",
|
||||
"humansize",
|
||||
"num-traits",
|
||||
"itoa",
|
||||
"percent-encoding",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "askama_derive"
|
||||
version = "0.12.5"
|
||||
version = "0.14.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "19fe8d6cb13c4714962c072ea496f3392015f0989b1a2847bb4b2d9effd71d83"
|
||||
checksum = "129397200fe83088e8a68407a8e2b1f826cf0086b21ccdb866a722c8bcd3a94f"
|
||||
dependencies = [
|
||||
"askama_parser",
|
||||
"basic-toml",
|
||||
"mime",
|
||||
"mime_guess",
|
||||
"memchr",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"rustc-hash 2.1.1",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "askama_escape"
|
||||
version = "0.10.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "619743e34b5ba4e9703bba34deac3427c72507c7159f5fd030aea8cac0cfe341"
|
||||
|
||||
[[package]]
|
||||
name = "askama_parser"
|
||||
version = "0.2.1"
|
||||
version = "0.14.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "acb1161c6b64d1c3d83108213c2a2533a342ac225aabd0bda218278c2ddb00c0"
|
||||
checksum = "d6ab5630b3d5eaf232620167977f95eb51f3432fc76852328774afbd242d4358"
|
||||
dependencies = [
|
||||
"nom",
|
||||
"memchr",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"winnow",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -981,7 +979,6 @@ dependencies = [
|
||||
"codex-mcp-server",
|
||||
"codex-process-hardening",
|
||||
"codex-protocol",
|
||||
"codex-protocol-ts",
|
||||
"codex-responses-api-proxy",
|
||||
"codex-rmcp-client",
|
||||
"codex-stdio-to-uds",
|
||||
@@ -1365,16 +1362,6 @@ dependencies = [
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-protocol-ts"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"codex-app-server-protocol",
|
||||
"ts-rs",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-responses-api-proxy"
|
||||
version = "0.0.0"
|
||||
@@ -1566,6 +1553,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"dirs-next",
|
||||
"dunce",
|
||||
"rand 0.8.5",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -2890,15 +2878,6 @@ version = "1.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
|
||||
|
||||
[[package]]
|
||||
name = "humansize"
|
||||
version = "2.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6cb51c9a029ddc91b07a787f1d86b53ccfa49b0e86688c946ebe8d3555685dd7"
|
||||
dependencies = [
|
||||
"libm",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hyper"
|
||||
version = "1.7.0"
|
||||
@@ -3542,12 +3521,6 @@ dependencies = [
|
||||
"pkg-config",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libm"
|
||||
version = "0.2.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de"
|
||||
|
||||
[[package]]
|
||||
name = "libredox"
|
||||
version = "0.1.6"
|
||||
@@ -7803,9 +7776,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zeroize"
|
||||
version = "1.8.1"
|
||||
version = "1.8.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde"
|
||||
checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0"
|
||||
dependencies = [
|
||||
"zeroize_derive",
|
||||
]
|
||||
|
||||
@@ -25,7 +25,6 @@ members = [
|
||||
"ollama",
|
||||
"process-hardening",
|
||||
"protocol",
|
||||
"protocol-ts",
|
||||
"rmcp-client",
|
||||
"responses-api-proxy",
|
||||
"stdio-to-uds",
|
||||
@@ -75,7 +74,6 @@ codex-ollama = { path = "ollama" }
|
||||
codex-otel = { path = "otel" }
|
||||
codex-process-hardening = { path = "process-hardening" }
|
||||
codex-protocol = { path = "protocol" }
|
||||
codex-protocol-ts = { path = "protocol-ts" }
|
||||
codex-responses-api-proxy = { path = "responses-api-proxy" }
|
||||
codex-rmcp-client = { path = "rmcp-client" }
|
||||
codex-stdio-to-uds = { path = "stdio-to-uds" }
|
||||
@@ -97,7 +95,7 @@ allocative = "0.3.3"
|
||||
ansi-to-tui = "7.0.0"
|
||||
anyhow = "1"
|
||||
arboard = "3"
|
||||
askama = "0.12"
|
||||
askama = "0.14"
|
||||
assert_cmd = "2"
|
||||
assert_matches = "1.5.0"
|
||||
async-channel = "2.3.1"
|
||||
@@ -213,7 +211,7 @@ which = "6"
|
||||
wildmatch = "2.5.0"
|
||||
|
||||
wiremock = "0.6"
|
||||
zeroize = "1.8.1"
|
||||
zeroize = "1.8.2"
|
||||
|
||||
[workspace.lints]
|
||||
rust = {}
|
||||
|
||||
@@ -666,6 +666,8 @@ fn ts_files_in_recursive(dir: &Path) -> Result<Vec<PathBuf>> {
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
/// Generate an index.ts file that re-exports all generated types.
|
||||
/// This allows consumers to import all types from a single file.
|
||||
fn generate_index_ts(out_dir: &Path) -> Result<PathBuf> {
|
||||
let mut entries: Vec<String> = Vec::new();
|
||||
let mut stems: Vec<String> = ts_files_in(out_dir)?
|
||||
|
||||
@@ -8,10 +8,11 @@ Similar to [MCP](https://modelcontextprotocol.io/), `codex app-server` supports
|
||||
|
||||
## Message Schema
|
||||
|
||||
Currently, you can dump a TypeScript version of the schema using `codex generate-ts`. It is specific to the version of Codex you used to run `generate-ts`, so the two are guaranteed to be compatible.
|
||||
Currently, you can dump a TypeScript version of the schema using `codex app-server generate-ts`, or a JSON Schema bundle via `codex app-server generate-json-schema`. Each output is specific to the version of Codex you used to run the command, so the generated artifacts are guaranteed to match that version.
|
||||
|
||||
```
|
||||
codex generate-ts --out DIR
|
||||
codex app-server generate-ts --out DIR
|
||||
codex app-server generate-json-schema --out DIR
|
||||
```
|
||||
|
||||
## Initialization
|
||||
@@ -49,15 +50,16 @@ The JSON-RPC API exposes dedicated methods for managing Codex conversations. Thr
|
||||
|
||||
### 1) Start or resume a thread
|
||||
|
||||
Start a fresh thread when you need a new Codex conversation. Optional fields mirror CLI defaults: set `model`, `modelProvider`, `cwd`, `approvalPolicy`, `sandbox`, or custom `config` values. Instructions can be set via `baseInstructions` and `developerInstructions`:
|
||||
Start a fresh thread when you need a new Codex conversation.
|
||||
|
||||
```json
|
||||
{ "method": "thread/start", "id": 10, "params": {
|
||||
// Optionally set config settings. If not specified, will use the user's
|
||||
// current config settings.
|
||||
"model": "gpt-5-codex",
|
||||
"cwd": "/Users/me/project",
|
||||
"approvalPolicy": "never",
|
||||
"sandbox": "workspace-write",
|
||||
"baseInstructions": "You're helping with refactors."
|
||||
"sandbox": "workspaceWrite",
|
||||
} }
|
||||
{ "id": 10, "result": {
|
||||
"thread": {
|
||||
@@ -90,7 +92,6 @@ Example:
|
||||
{ "method": "thread/list", "id": 20, "params": {
|
||||
"cursor": null,
|
||||
"limit": 25,
|
||||
"modelProviders": ["openai"]
|
||||
} }
|
||||
{ "id": 20, "result": {
|
||||
"data": [
|
||||
@@ -122,18 +123,23 @@ Turns attach user input (text or images) to a thread and trigger Codex generatio
|
||||
- `{"type":"image","url":"https://…png"}`
|
||||
- `{"type":"localImage","path":"/tmp/screenshot.png"}`
|
||||
|
||||
Override knobs apply to the new turn and become the defaults for subsequent turns on the same thread:
|
||||
You can optionally specify config overrides on the new turn. If specified, these settings become the default for subsequent turns on the same thread.
|
||||
|
||||
```json
|
||||
{ "method": "turn/start", "id": 30, "params": {
|
||||
"threadId": "thr_123",
|
||||
"input": [ { "type": "text", "text": "Run tests" } ],
|
||||
// Below are optional config overrides
|
||||
"cwd": "/Users/me/project",
|
||||
"approvalPolicy": "untrusted",
|
||||
"sandboxPolicy": "workspace-write",
|
||||
"approvalPolicy": "unlessTrusted",
|
||||
"sandboxPolicy": {
|
||||
"mode": "workspaceWrite",
|
||||
"writableRoots": ["/Users/me/project"],
|
||||
"networkAccess": true
|
||||
},
|
||||
"model": "gpt-5-codex",
|
||||
"effort": "medium",
|
||||
"summary": "focus-on-test-failures"
|
||||
"summary": "concise"
|
||||
} }
|
||||
{ "id": 30, "result": { "turn": {
|
||||
"id": "turn_456",
|
||||
@@ -159,7 +165,7 @@ The server requests cancellations for running subprocesses, then emits a `turn/c
|
||||
|
||||
## Auth endpoints
|
||||
|
||||
The v2 JSON-RPC auth/account surface exposes request/response methods plus server-initiated notifications (no `id`). Use these to determine auth state, start or cancel logins, logout, and inspect ChatGPT rate limits.
|
||||
The JSON-RPC auth/account surface exposes request/response methods plus server-initiated notifications (no `id`). Use these to determine auth state, start or cancel logins, logout, and inspect ChatGPT rate limits.
|
||||
|
||||
### Quick reference
|
||||
- `account/read` — fetch current account info; optionally refresh tokens.
|
||||
@@ -249,5 +255,6 @@ Field notes:
|
||||
|
||||
### Dev notes
|
||||
|
||||
- `codex generate-ts --out <dir>` emits v2 types under `v2/`.
|
||||
- `codex app-server generate-ts --out <dir>` emits v2 types under `v2/`.
|
||||
- `codex app-server generate-json-schema --out <dir>` outputs `codex_app_server_protocol.schemas.json`.
|
||||
- See [“Authentication and authorization” in the config docs](../../docs/config.md#authentication-and-authorization) for configuration knobs.
|
||||
|
||||
@@ -30,7 +30,6 @@ codex-login = { workspace = true }
|
||||
codex-mcp-server = { workspace = true }
|
||||
codex-process-hardening = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-protocol-ts = { workspace = true }
|
||||
codex-responses-api-proxy = { workspace = true }
|
||||
codex-rmcp-client = { workspace = true }
|
||||
codex-stdio-to-uds = { workspace = true }
|
||||
|
||||
@@ -136,6 +136,8 @@ async fn run_command_under_sandbox(
|
||||
let env_map = env.clone();
|
||||
let command_vec = command.clone();
|
||||
let base_dir = config.codex_home.clone();
|
||||
|
||||
// Preflight audit is invoked elsewhere at the appropriate times.
|
||||
let res = tokio::task::spawn_blocking(move || {
|
||||
run_windows_sandbox_capture(
|
||||
policy_str,
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use clap::Args;
|
||||
use clap::CommandFactory;
|
||||
use clap::Parser;
|
||||
use clap_complete::Shell;
|
||||
@@ -20,16 +21,17 @@ use codex_exec::Cli as ExecCli;
|
||||
use codex_responses_api_proxy::Args as ResponsesApiProxyArgs;
|
||||
use codex_tui::AppExitInfo;
|
||||
use codex_tui::Cli as TuiCli;
|
||||
use codex_tui::updates::UpdateAction;
|
||||
use codex_tui::update_action::UpdateAction;
|
||||
use owo_colors::OwoColorize;
|
||||
use std::path::PathBuf;
|
||||
use supports_color::Stream;
|
||||
|
||||
mod mcp_cmd;
|
||||
#[cfg(not(windows))]
|
||||
mod wsl_paths;
|
||||
|
||||
use crate::mcp_cmd::McpCli;
|
||||
use crate::wsl_paths::normalize_for_wsl;
|
||||
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::features::is_known_feature_key;
|
||||
@@ -81,8 +83,8 @@ enum Subcommand {
|
||||
/// [experimental] Run the Codex MCP server (stdio transport).
|
||||
McpServer,
|
||||
|
||||
/// [experimental] Run the app server.
|
||||
AppServer,
|
||||
/// [experimental] Run the app server or related tooling.
|
||||
AppServer(AppServerCommand),
|
||||
|
||||
/// Generate shell completion scripts.
|
||||
Completion(CompletionCommand),
|
||||
@@ -98,9 +100,6 @@ enum Subcommand {
|
||||
/// Resume a previous interactive session (picker by default; use --last to continue the most recent).
|
||||
Resume(ResumeCommand),
|
||||
|
||||
/// Internal: generate TypeScript protocol bindings.
|
||||
#[clap(hide = true)]
|
||||
GenerateTs(GenerateTsCommand),
|
||||
/// [EXPERIMENTAL] Browse tasks from Codex Cloud and apply changes locally.
|
||||
#[clap(name = "cloud", alias = "cloud-tasks")]
|
||||
Cloud(CloudTasksCli),
|
||||
@@ -207,6 +206,22 @@ struct LogoutCommand {
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct AppServerCommand {
|
||||
/// Omit to run the app server; specify a subcommand for tooling.
|
||||
#[command(subcommand)]
|
||||
subcommand: Option<AppServerSubcommand>,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
enum AppServerSubcommand {
|
||||
/// [experimental] Generate TypeScript bindings for the app server protocol.
|
||||
GenerateTs(GenerateTsCommand),
|
||||
|
||||
/// [experimental] Generate JSON Schema for the app server protocol.
|
||||
GenerateJsonSchema(GenerateJsonSchemaCommand),
|
||||
}
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
struct GenerateTsCommand {
|
||||
/// Output directory where .ts files will be written
|
||||
#[arg(short = 'o', long = "out", value_name = "DIR")]
|
||||
@@ -217,6 +232,13 @@ struct GenerateTsCommand {
|
||||
prettier: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
struct GenerateJsonSchemaCommand {
|
||||
/// Output directory where the schema bundle will be written
|
||||
#[arg(short = 'o', long = "out", value_name = "DIR")]
|
||||
out_dir: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct StdioToUdsCommand {
|
||||
/// Path to the Unix domain socket to connect to.
|
||||
@@ -269,14 +291,30 @@ fn handle_app_exit(exit_info: AppExitInfo) -> anyhow::Result<()> {
|
||||
/// Run the update action and print the result.
|
||||
fn run_update_action(action: UpdateAction) -> anyhow::Result<()> {
|
||||
println!();
|
||||
let (cmd, args) = action.command_args();
|
||||
let cmd_str = action.command_str();
|
||||
println!("Updating Codex via `{cmd_str}`...");
|
||||
let command_path = normalize_for_wsl(cmd);
|
||||
let normalized_args: Vec<String> = args.iter().map(normalize_for_wsl).collect();
|
||||
let status = std::process::Command::new(&command_path)
|
||||
.args(&normalized_args)
|
||||
.status()?;
|
||||
|
||||
let status = {
|
||||
#[cfg(windows)]
|
||||
{
|
||||
// On Windows, run via cmd.exe so .CMD/.BAT are correctly resolved (PATHEXT semantics).
|
||||
std::process::Command::new("cmd")
|
||||
.args(["/C", &cmd_str])
|
||||
.status()?
|
||||
}
|
||||
#[cfg(not(windows))]
|
||||
{
|
||||
let (cmd, args) = action.command_args();
|
||||
let command_path = crate::wsl_paths::normalize_for_wsl(cmd);
|
||||
let normalized_args: Vec<String> = args
|
||||
.iter()
|
||||
.map(crate::wsl_paths::normalize_for_wsl)
|
||||
.collect();
|
||||
std::process::Command::new(&command_path)
|
||||
.args(&normalized_args)
|
||||
.status()?
|
||||
}
|
||||
};
|
||||
if !status.success() {
|
||||
anyhow::bail!("`{cmd_str}` failed with status {status}");
|
||||
}
|
||||
@@ -393,9 +431,20 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
prepend_config_flags(&mut mcp_cli.config_overrides, root_config_overrides.clone());
|
||||
mcp_cli.run().await?;
|
||||
}
|
||||
Some(Subcommand::AppServer) => {
|
||||
codex_app_server::run_main(codex_linux_sandbox_exe, root_config_overrides).await?;
|
||||
}
|
||||
Some(Subcommand::AppServer(app_server_cli)) => match app_server_cli.subcommand {
|
||||
None => {
|
||||
codex_app_server::run_main(codex_linux_sandbox_exe, root_config_overrides).await?;
|
||||
}
|
||||
Some(AppServerSubcommand::GenerateTs(gen_cli)) => {
|
||||
codex_app_server_protocol::generate_ts(
|
||||
&gen_cli.out_dir,
|
||||
gen_cli.prettier.as_deref(),
|
||||
)?;
|
||||
}
|
||||
Some(AppServerSubcommand::GenerateJsonSchema(gen_cli)) => {
|
||||
codex_app_server_protocol::generate_json(&gen_cli.out_dir)?;
|
||||
}
|
||||
},
|
||||
Some(Subcommand::Resume(ResumeCommand {
|
||||
session_id,
|
||||
last,
|
||||
@@ -510,9 +559,6 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
tokio::task::spawn_blocking(move || codex_stdio_to_uds::run(socket_path.as_path()))
|
||||
.await??;
|
||||
}
|
||||
Some(Subcommand::GenerateTs(gen_cli)) => {
|
||||
codex_protocol_ts::generate_ts(&gen_cli.out_dir, gen_cli.prettier.as_deref())?;
|
||||
}
|
||||
Some(Subcommand::Features(FeaturesCli { sub })) => match sub {
|
||||
FeaturesSubcommand::List => {
|
||||
// Respect root-level `-c` overrides plus top-level flags like `--profile`.
|
||||
|
||||
@@ -8,6 +8,7 @@ pub mod util;
|
||||
pub use cli::Cli;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use codex_login::AuthManager;
|
||||
use std::io::IsTerminal;
|
||||
use std::io::Read;
|
||||
use std::path::PathBuf;
|
||||
@@ -56,20 +57,8 @@ async fn init_backend(user_agent_suffix: &str) -> anyhow::Result<BackendContext>
|
||||
};
|
||||
append_error_log(format!("startup: base_url={base_url} path_style={style}"));
|
||||
|
||||
let auth = match codex_core::config::find_codex_home()
|
||||
.ok()
|
||||
.map(|home| {
|
||||
let store_mode = codex_core::config::Config::load_from_base_config_with_overrides(
|
||||
codex_core::config::ConfigToml::default(),
|
||||
codex_core::config::ConfigOverrides::default(),
|
||||
home.clone(),
|
||||
)
|
||||
.map(|cfg| cfg.cli_auth_credentials_store_mode)
|
||||
.unwrap_or_default();
|
||||
codex_login::AuthManager::new(home, false, store_mode)
|
||||
})
|
||||
.and_then(|am| am.auth())
|
||||
{
|
||||
let auth_manager = util::load_auth_manager().await;
|
||||
let auth = match auth_manager.as_ref().and_then(AuthManager::auth) {
|
||||
Some(auth) => auth,
|
||||
None => {
|
||||
eprintln!(
|
||||
|
||||
@@ -2,6 +2,10 @@ use base64::Engine as _;
|
||||
use chrono::Utc;
|
||||
use reqwest::header::HeaderMap;
|
||||
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_login::AuthManager;
|
||||
|
||||
pub fn set_user_agent_suffix(suffix: &str) {
|
||||
if let Ok(mut guard) = codex_core::default_client::USER_AGENT_SUFFIX.lock() {
|
||||
guard.replace(suffix.to_string());
|
||||
@@ -54,6 +58,18 @@ pub fn extract_chatgpt_account_id(token: &str) -> Option<String> {
|
||||
.map(str::to_string)
|
||||
}
|
||||
|
||||
pub async fn load_auth_manager() -> Option<AuthManager> {
|
||||
// TODO: pass in cli overrides once cloud tasks properly support them.
|
||||
let config = Config::load_with_cli_overrides(Vec::new(), ConfigOverrides::default())
|
||||
.await
|
||||
.ok()?;
|
||||
Some(AuthManager::new(
|
||||
config.codex_home,
|
||||
false,
|
||||
config.cli_auth_credentials_store_mode,
|
||||
))
|
||||
}
|
||||
|
||||
/// Build headers for ChatGPT-backed requests: `User-Agent`, optional `Authorization`,
|
||||
/// and optional `ChatGPT-Account-Id`.
|
||||
pub async fn build_chatgpt_headers() -> HeaderMap {
|
||||
@@ -69,31 +85,22 @@ pub async fn build_chatgpt_headers() -> HeaderMap {
|
||||
USER_AGENT,
|
||||
HeaderValue::from_str(&ua).unwrap_or(HeaderValue::from_static("codex-cli")),
|
||||
);
|
||||
if let Ok(home) = codex_core::config::find_codex_home() {
|
||||
let store_mode = codex_core::config::Config::load_from_base_config_with_overrides(
|
||||
codex_core::config::ConfigToml::default(),
|
||||
codex_core::config::ConfigOverrides::default(),
|
||||
home.clone(),
|
||||
)
|
||||
.map(|cfg| cfg.cli_auth_credentials_store_mode)
|
||||
.unwrap_or_default();
|
||||
let am = codex_login::AuthManager::new(home, false, store_mode);
|
||||
if let Some(auth) = am.auth()
|
||||
&& let Ok(tok) = auth.get_token().await
|
||||
&& !tok.is_empty()
|
||||
if let Some(am) = load_auth_manager().await
|
||||
&& let Some(auth) = am.auth()
|
||||
&& let Ok(tok) = auth.get_token().await
|
||||
&& !tok.is_empty()
|
||||
{
|
||||
let v = format!("Bearer {tok}");
|
||||
if let Ok(hv) = HeaderValue::from_str(&v) {
|
||||
headers.insert(AUTHORIZATION, hv);
|
||||
}
|
||||
if let Some(acc) = auth
|
||||
.get_account_id()
|
||||
.or_else(|| extract_chatgpt_account_id(&tok))
|
||||
&& let Ok(name) = HeaderName::from_bytes(b"ChatGPT-Account-Id")
|
||||
&& let Ok(hv) = HeaderValue::from_str(&acc)
|
||||
{
|
||||
let v = format!("Bearer {tok}");
|
||||
if let Ok(hv) = HeaderValue::from_str(&v) {
|
||||
headers.insert(AUTHORIZATION, hv);
|
||||
}
|
||||
if let Some(acc) = auth
|
||||
.get_account_id()
|
||||
.or_else(|| extract_chatgpt_account_id(&tok))
|
||||
&& let Ok(name) = HeaderName::from_bytes(b"ChatGPT-Account-Id")
|
||||
&& let Ok(hv) = HeaderValue::from_str(&acc)
|
||||
{
|
||||
headers.insert(name, hv);
|
||||
}
|
||||
headers.insert(name, hv);
|
||||
}
|
||||
}
|
||||
headers
|
||||
|
||||
@@ -19,8 +19,8 @@ use toml::Value;
|
||||
pub struct CliConfigOverrides {
|
||||
/// Override a configuration value that would otherwise be loaded from
|
||||
/// `~/.codex/config.toml`. Use a dotted path (`foo.bar.baz`) to override
|
||||
/// nested values. The `value` portion is parsed as JSON. If it fails to
|
||||
/// parse as JSON, the raw string is used as a literal.
|
||||
/// nested values. The `value` portion is parsed as TOML. If it fails to
|
||||
/// parse as TOML, the raw string is used as a literal.
|
||||
///
|
||||
/// Examples:
|
||||
/// - `-c model="o3"`
|
||||
@@ -59,7 +59,7 @@ impl CliConfigOverrides {
|
||||
return Err(format!("Empty key in override: {s}"));
|
||||
}
|
||||
|
||||
// Attempt to parse as JSON. If that fails, treat it as a raw
|
||||
// Attempt to parse as TOML. If that fails, treat it as a raw
|
||||
// string. This allows convenient usage such as
|
||||
// `-c model=o3` without the quotes.
|
||||
let value: Value = match parse_toml_value(value_str) {
|
||||
|
||||
@@ -2,8 +2,6 @@ You are Codex, based on GPT-5. You are running as a coding agent in the Codex CL
|
||||
|
||||
## General
|
||||
|
||||
- The arguments to `shell` will be passed to execvp(). Most terminal commands should be prefixed with ["bash", "-lc"].
|
||||
- Always set the `workdir` param when using the shell function. Do not use `cd` unless absolutely necessary.
|
||||
- When searching for text or files, prefer using `rg` or `rg --files` respectively because `rg` is much faster than alternatives like `grep`. (If the `rg` command is not found, then use alternatives.)
|
||||
|
||||
## Editing constraints
|
||||
|
||||
@@ -6,6 +6,7 @@ use std::sync::atomic::AtomicU64;
|
||||
|
||||
use crate::AuthManager;
|
||||
use crate::client_common::REVIEW_PROMPT;
|
||||
use crate::compact;
|
||||
use crate::features::Feature;
|
||||
use crate::function_tool::FunctionCallError;
|
||||
use crate::mcp::auth::McpAuthStatusEntry;
|
||||
@@ -66,6 +67,8 @@ use crate::error::Result as CodexResult;
|
||||
use crate::exec::StreamOutput;
|
||||
// Removed: legacy executor wiring replaced by ToolOrchestrator flows.
|
||||
// legacy normalize_exec_result no longer used after orchestrator migration
|
||||
use crate::compact::build_compacted_history;
|
||||
use crate::compact::collect_user_messages;
|
||||
use crate::mcp::auth::compute_auth_statuses;
|
||||
use crate::mcp_connection_manager::McpConnectionManager;
|
||||
use crate::model_family::find_family_for_model;
|
||||
@@ -129,10 +132,6 @@ use codex_protocol::user_input::UserInput;
|
||||
use codex_utils_readiness::Readiness;
|
||||
use codex_utils_readiness::ReadinessFlag;
|
||||
|
||||
pub mod compact;
|
||||
use self::compact::build_compacted_history;
|
||||
use self::compact::collect_user_messages;
|
||||
|
||||
/// The high-level interface to the Codex system.
|
||||
/// It operates as a queue pair where you send submissions and receive events.
|
||||
pub struct Codex {
|
||||
@@ -968,7 +967,7 @@ impl Session {
|
||||
}
|
||||
|
||||
/// Append ResponseItems to the in-memory conversation history only.
|
||||
async fn record_into_history(&self, items: &[ResponseItem]) {
|
||||
pub(crate) async fn record_into_history(&self, items: &[ResponseItem]) {
|
||||
let mut state = self.state.lock().await;
|
||||
state.record_items(items.iter());
|
||||
}
|
||||
@@ -1020,7 +1019,7 @@ impl Session {
|
||||
items
|
||||
}
|
||||
|
||||
async fn persist_rollout_items(&self, items: &[RolloutItem]) {
|
||||
pub(crate) async fn persist_rollout_items(&self, items: &[RolloutItem]) {
|
||||
let recorder = {
|
||||
let guard = self.services.rollout.lock().await;
|
||||
guard.clone()
|
||||
@@ -1037,7 +1036,7 @@ impl Session {
|
||||
state.clone_history()
|
||||
}
|
||||
|
||||
async fn update_token_usage_info(
|
||||
pub(crate) async fn update_token_usage_info(
|
||||
&self,
|
||||
turn_context: &TurnContext,
|
||||
token_usage: Option<&TokenUsage>,
|
||||
@@ -1054,7 +1053,7 @@ impl Session {
|
||||
self.send_token_count_event(turn_context).await;
|
||||
}
|
||||
|
||||
async fn update_rate_limits(
|
||||
pub(crate) async fn update_rate_limits(
|
||||
&self,
|
||||
turn_context: &TurnContext,
|
||||
new_rate_limits: RateLimitSnapshot,
|
||||
@@ -1075,7 +1074,7 @@ impl Session {
|
||||
self.send_event(turn_context, event).await;
|
||||
}
|
||||
|
||||
async fn set_total_tokens_full(&self, turn_context: &TurnContext) {
|
||||
pub(crate) async fn set_total_tokens_full(&self, turn_context: &TurnContext) {
|
||||
let context_window = turn_context.client.get_model_context_window();
|
||||
if let Some(context_window) = context_window {
|
||||
{
|
||||
@@ -1118,7 +1117,11 @@ impl Session {
|
||||
self.send_event(turn_context, event).await;
|
||||
}
|
||||
|
||||
async fn notify_stream_error(&self, turn_context: &TurnContext, message: impl Into<String>) {
|
||||
pub(crate) async fn notify_stream_error(
|
||||
&self,
|
||||
turn_context: &TurnContext,
|
||||
message: impl Into<String>,
|
||||
) {
|
||||
let event = EventMsg::StreamError(StreamErrorEvent {
|
||||
message: message.into(),
|
||||
});
|
||||
|
||||
@@ -1,4 +1,38 @@
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
|
||||
use crate::bash::parse_shell_lc_plain_commands;
|
||||
use crate::is_safe_command::is_known_safe_command;
|
||||
|
||||
pub fn requires_initial_appoval(
|
||||
policy: AskForApproval,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
command: &[String],
|
||||
with_escalated_permissions: bool,
|
||||
) -> bool {
|
||||
if is_known_safe_command(command) {
|
||||
return false;
|
||||
}
|
||||
match policy {
|
||||
AskForApproval::Never | AskForApproval::OnFailure => false,
|
||||
AskForApproval::OnRequest => {
|
||||
// In DangerFullAccess, only prompt if the command looks dangerous.
|
||||
if matches!(sandbox_policy, SandboxPolicy::DangerFullAccess) {
|
||||
return command_might_be_dangerous(command);
|
||||
}
|
||||
|
||||
// In restricted sandboxes (ReadOnly/WorkspaceWrite), do not prompt for
|
||||
// non‑escalated, non‑dangerous commands — let the sandbox enforce
|
||||
// restrictions (e.g., block network/write) without a user prompt.
|
||||
let wants_escalation: bool = with_escalated_permissions;
|
||||
if wants_escalation {
|
||||
return true;
|
||||
}
|
||||
command_might_be_dangerous(command)
|
||||
}
|
||||
AskForApproval::UnlessTrusted => !is_known_safe_command(command),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn command_might_be_dangerous(command: &[String]) -> bool {
|
||||
if is_dangerous_to_call_with_exec(command) {
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use super::Session;
|
||||
use super::TurnContext;
|
||||
use super::get_last_assistant_message_from_turn;
|
||||
use crate::Prompt;
|
||||
use crate::client_common::ResponseEvent;
|
||||
use crate::codex::Session;
|
||||
use crate::codex::TurnContext;
|
||||
use crate::codex::get_last_assistant_message_from_turn;
|
||||
use crate::error::CodexErr;
|
||||
use crate::error::Result as CodexResult;
|
||||
use crate::protocol::AgentMessageEvent;
|
||||
@@ -25,7 +25,7 @@ use codex_protocol::user_input::UserInput;
|
||||
use futures::prelude::*;
|
||||
use tracing::error;
|
||||
|
||||
pub const SUMMARIZATION_PROMPT: &str = include_str!("../../templates/compact/prompt.md");
|
||||
pub const SUMMARIZATION_PROMPT: &str = include_str!("../templates/compact/prompt.md");
|
||||
const COMPACT_USER_MESSAGE_MAX_TOKENS: usize = 20_000;
|
||||
|
||||
pub(crate) async fn run_inline_auto_compact_task(
|
||||
@@ -164,7 +164,7 @@ async fn run_compact_task_inner(
|
||||
sess.send_event(&turn_context, event).await;
|
||||
|
||||
let warning = EventMsg::Warning(WarningEvent {
|
||||
message: "Heads up: Long conversations and multiple compactions can cause the model to be less accurate. Start new a new conversation when possible to keep conversations small and targeted.".to_string(),
|
||||
message: "Heads up: Long conversations and multiple compactions can cause the model to be less accurate. Start a new conversation when possible to keep conversations small and targeted.".to_string(),
|
||||
});
|
||||
sess.send_event(&turn_context, warning).await;
|
||||
}
|
||||
@@ -25,6 +25,8 @@ pub enum ConfigEdit {
|
||||
SetNoticeHideFullAccessWarning(bool),
|
||||
/// Toggle the Windows world-writable directories warning acknowledgement flag.
|
||||
SetNoticeHideWorldWritableWarning(bool),
|
||||
/// Toggle the rate limit model nudge acknowledgement flag.
|
||||
SetNoticeHideRateLimitModelNudge(bool),
|
||||
/// Toggle the Windows onboarding acknowledgement flag.
|
||||
SetWindowsWslSetupAcknowledged(bool),
|
||||
/// Replace the entire `[mcp_servers]` table.
|
||||
@@ -246,6 +248,11 @@ impl ConfigDocument {
|
||||
&[Notice::TABLE_KEY, "hide_world_writable_warning"],
|
||||
value(*acknowledged),
|
||||
)),
|
||||
ConfigEdit::SetNoticeHideRateLimitModelNudge(acknowledged) => Ok(self.write_value(
|
||||
Scope::Global,
|
||||
&[Notice::TABLE_KEY, "hide_rate_limit_model_nudge"],
|
||||
value(*acknowledged),
|
||||
)),
|
||||
ConfigEdit::SetWindowsWslSetupAcknowledged(acknowledged) => Ok(self.write_value(
|
||||
Scope::Global,
|
||||
&["windows_wsl_setup_acknowledged"],
|
||||
@@ -486,6 +493,12 @@ impl ConfigEditsBuilder {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn set_hide_rate_limit_model_nudge(mut self, acknowledged: bool) -> Self {
|
||||
self.edits
|
||||
.push(ConfigEdit::SetNoticeHideRateLimitModelNudge(acknowledged));
|
||||
self
|
||||
}
|
||||
|
||||
pub fn set_windows_wsl_setup_acknowledged(mut self, acknowledged: bool) -> Self {
|
||||
self.edits
|
||||
.push(ConfigEdit::SetWindowsWslSetupAcknowledged(acknowledged));
|
||||
@@ -733,6 +746,34 @@ hide_full_access_warning = true
|
||||
assert_eq!(contents, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn blocking_set_hide_rate_limit_model_nudge_preserves_table() {
|
||||
let tmp = tempdir().expect("tmpdir");
|
||||
let codex_home = tmp.path();
|
||||
std::fs::write(
|
||||
codex_home.join(CONFIG_TOML_FILE),
|
||||
r#"[notice]
|
||||
existing = "value"
|
||||
"#,
|
||||
)
|
||||
.expect("seed");
|
||||
|
||||
apply_blocking(
|
||||
codex_home,
|
||||
None,
|
||||
&[ConfigEdit::SetNoticeHideRateLimitModelNudge(true)],
|
||||
)
|
||||
.expect("persist");
|
||||
|
||||
let contents =
|
||||
std::fs::read_to_string(codex_home.join(CONFIG_TOML_FILE)).expect("read config");
|
||||
let expected = r#"[notice]
|
||||
existing = "value"
|
||||
hide_rate_limit_model_nudge = true
|
||||
"#;
|
||||
assert_eq!(contents, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn blocking_replace_mcp_servers_round_trips() {
|
||||
let tmp = tempdir().expect("tmpdir");
|
||||
|
||||
@@ -360,6 +360,8 @@ pub struct Notice {
|
||||
pub hide_full_access_warning: Option<bool>,
|
||||
/// Tracks whether the user has acknowledged the Windows world-writable directories warning.
|
||||
pub hide_world_writable_warning: Option<bool>,
|
||||
/// Tracks whether the user opted out of the rate limit model switch reminder.
|
||||
pub hide_rate_limit_model_nudge: Option<bool>,
|
||||
}
|
||||
|
||||
impl Notice {
|
||||
|
||||
@@ -14,6 +14,7 @@ use tracing::warn;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::user_instructions::UserInstructions;
|
||||
use crate::user_shell_command::is_user_shell_command_text;
|
||||
|
||||
fn is_session_prefix(text: &str) -> bool {
|
||||
let trimmed = text.trim_start();
|
||||
@@ -31,7 +32,7 @@ fn parse_user_message(message: &[ContentItem]) -> Option<UserMessageItem> {
|
||||
for content_item in message.iter() {
|
||||
match content_item {
|
||||
ContentItem::InputText { text } => {
|
||||
if is_session_prefix(text) {
|
||||
if is_session_prefix(text) || is_user_shell_command_text(text) {
|
||||
return None;
|
||||
}
|
||||
content.push(UserInput::Text { text: text.clone() });
|
||||
@@ -197,7 +198,14 @@ mod tests {
|
||||
text: "# AGENTS.md instructions for test_directory\n\n<INSTRUCTIONS>\ntest_text\n</INSTRUCTIONS>".to_string(),
|
||||
}],
|
||||
},
|
||||
];
|
||||
ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::InputText {
|
||||
text: "<user_shell_command>echo 42</user_shell_command>".to_string(),
|
||||
}],
|
||||
},
|
||||
];
|
||||
|
||||
for item in items {
|
||||
let turn_item = parse_turn_item(&item);
|
||||
|
||||
@@ -727,4 +727,51 @@ mod tests {
|
||||
let output = make_exec_output(exit_code, "", "", "");
|
||||
assert!(is_likely_sandbox_denied(SandboxType::LinuxSeccomp, &output));
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
#[tokio::test]
|
||||
async fn kill_child_process_group_kills_grandchildren_on_timeout() -> Result<()> {
|
||||
let command = vec![
|
||||
"/bin/bash".to_string(),
|
||||
"-c".to_string(),
|
||||
"sleep 60 & echo $!; sleep 60".to_string(),
|
||||
];
|
||||
let env: HashMap<String, String> = std::env::vars().collect();
|
||||
let params = ExecParams {
|
||||
command,
|
||||
cwd: std::env::current_dir()?,
|
||||
timeout_ms: Some(500),
|
||||
env,
|
||||
with_escalated_permissions: None,
|
||||
justification: None,
|
||||
arg0: None,
|
||||
};
|
||||
|
||||
let output = exec(params, SandboxType::None, &SandboxPolicy::ReadOnly, None).await?;
|
||||
assert!(output.timed_out);
|
||||
|
||||
let stdout = output.stdout.from_utf8_lossy().text;
|
||||
let pid_line = stdout.lines().next().unwrap_or("").trim();
|
||||
let pid: i32 = pid_line.parse().map_err(|error| {
|
||||
io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
format!("Failed to parse pid from stdout '{pid_line}': {error}"),
|
||||
)
|
||||
})?;
|
||||
|
||||
let mut killed = false;
|
||||
for _ in 0..20 {
|
||||
// Use kill(pid, 0) to check if the process is alive.
|
||||
if unsafe { libc::kill(pid, 0) } == -1
|
||||
&& let Some(libc::ESRCH) = std::io::Error::last_os_error().raw_os_error()
|
||||
{
|
||||
killed = true;
|
||||
break;
|
||||
}
|
||||
tokio::time::sleep(Duration::from_millis(100)).await;
|
||||
}
|
||||
|
||||
assert!(killed, "grandchild process with pid {pid} is still alive");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -81,6 +81,7 @@ mod function_tool;
|
||||
mod state;
|
||||
mod tasks;
|
||||
mod user_notification;
|
||||
mod user_shell_command;
|
||||
pub mod util;
|
||||
|
||||
pub use apply_patch::CODEX_APPLY_PATCH_ARG1;
|
||||
@@ -99,11 +100,12 @@ pub use client_common::Prompt;
|
||||
pub use client_common::REVIEW_PROMPT;
|
||||
pub use client_common::ResponseEvent;
|
||||
pub use client_common::ResponseStream;
|
||||
pub use codex::compact::content_items_to_text;
|
||||
pub use codex_protocol::models::ContentItem;
|
||||
pub use codex_protocol::models::LocalShellAction;
|
||||
pub use codex_protocol::models::LocalShellExecAction;
|
||||
pub use codex_protocol::models::LocalShellStatus;
|
||||
pub use codex_protocol::models::ResponseItem;
|
||||
pub use compact::content_items_to_text;
|
||||
pub use event_mapping::parse_turn_item;
|
||||
pub mod compact;
|
||||
pub mod otel_init;
|
||||
|
||||
@@ -49,6 +49,7 @@
|
||||
(sysctl-name "hw.packages")
|
||||
(sysctl-name "hw.pagesize_compat")
|
||||
(sysctl-name "hw.pagesize")
|
||||
(sysctl-name "hw.physicalcpu")
|
||||
(sysctl-name "hw.physicalcpu_max")
|
||||
(sysctl-name "hw.tbfrequency_compat")
|
||||
(sysctl-name "hw.vectorunit")
|
||||
|
||||
@@ -66,8 +66,9 @@ pub(crate) async fn spawn_child_async(
|
||||
|
||||
#[cfg(unix)]
|
||||
unsafe {
|
||||
#[cfg(target_os = "linux")]
|
||||
let parent_pid = libc::getpid();
|
||||
cmd.pre_exec(|| {
|
||||
cmd.pre_exec(move || {
|
||||
if libc::setpgid(0, 0) == -1 {
|
||||
return Err(std::io::Error::last_os_error());
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ use async_trait::async_trait;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
|
||||
use crate::codex::TurnContext;
|
||||
use crate::codex::compact;
|
||||
use crate::compact;
|
||||
use crate::state::TaskKind;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
|
||||
|
||||
@@ -1,28 +1,35 @@
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use codex_protocol::models::ShellToolCallParams;
|
||||
use codex_async_utils::CancelErr;
|
||||
use codex_async_utils::OrCancelExt;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use tokio::sync::Mutex;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
use tracing::error;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::codex::TurnContext;
|
||||
use crate::exec::ExecToolCallOutput;
|
||||
use crate::exec::SandboxType;
|
||||
use crate::exec::StdoutStream;
|
||||
use crate::exec::StreamOutput;
|
||||
use crate::exec::execute_exec_env;
|
||||
use crate::exec_env::create_env;
|
||||
use crate::parse_command::parse_command;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::ExecCommandBeginEvent;
|
||||
use crate::protocol::ExecCommandEndEvent;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
use crate::protocol::TaskStartedEvent;
|
||||
use crate::sandboxing::ExecEnv;
|
||||
use crate::state::TaskKind;
|
||||
use crate::tools::context::ToolPayload;
|
||||
use crate::tools::parallel::ToolCallRuntime;
|
||||
use crate::tools::router::ToolCall;
|
||||
use crate::tools::router::ToolRouter;
|
||||
use crate::turn_diff_tracker::TurnDiffTracker;
|
||||
use crate::tools::format_exec_output_str;
|
||||
use crate::user_shell_command::user_shell_command_record_item;
|
||||
|
||||
use super::SessionTask;
|
||||
use super::SessionTaskContext;
|
||||
|
||||
const USER_SHELL_TOOL_NAME: &str = "local_shell";
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct UserShellCommandTask {
|
||||
command: String,
|
||||
@@ -78,34 +85,126 @@ impl SessionTask for UserShellCommandTask {
|
||||
}
|
||||
};
|
||||
|
||||
let params = ShellToolCallParams {
|
||||
let call_id = Uuid::new_v4().to_string();
|
||||
let raw_command = self.command.clone();
|
||||
|
||||
let parsed_cmd = parse_command(&shell_invocation);
|
||||
session
|
||||
.send_event(
|
||||
turn_context.as_ref(),
|
||||
EventMsg::ExecCommandBegin(ExecCommandBeginEvent {
|
||||
call_id: call_id.clone(),
|
||||
command: shell_invocation.clone(),
|
||||
cwd: turn_context.cwd.clone(),
|
||||
parsed_cmd,
|
||||
is_user_shell_command: true,
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let exec_env = ExecEnv {
|
||||
command: shell_invocation,
|
||||
workdir: None,
|
||||
cwd: turn_context.cwd.clone(),
|
||||
env: create_env(&turn_context.shell_environment_policy),
|
||||
timeout_ms: None,
|
||||
sandbox: SandboxType::None,
|
||||
with_escalated_permissions: None,
|
||||
justification: None,
|
||||
arg0: None,
|
||||
};
|
||||
|
||||
let tool_call = ToolCall {
|
||||
tool_name: USER_SHELL_TOOL_NAME.to_string(),
|
||||
call_id: Uuid::new_v4().to_string(),
|
||||
payload: ToolPayload::LocalShell { params },
|
||||
};
|
||||
let stdout_stream = Some(StdoutStream {
|
||||
sub_id: turn_context.sub_id.clone(),
|
||||
call_id: call_id.clone(),
|
||||
tx_event: session.get_tx_event(),
|
||||
});
|
||||
|
||||
let router = Arc::new(ToolRouter::from_config(&turn_context.tools_config, None));
|
||||
let tracker = Arc::new(Mutex::new(TurnDiffTracker::new()));
|
||||
let runtime = ToolCallRuntime::new(
|
||||
Arc::clone(&router),
|
||||
Arc::clone(&session),
|
||||
Arc::clone(&turn_context),
|
||||
Arc::clone(&tracker),
|
||||
);
|
||||
let sandbox_policy = SandboxPolicy::DangerFullAccess;
|
||||
let exec_result = execute_exec_env(exec_env, &sandbox_policy, stdout_stream)
|
||||
.or_cancel(&cancellation_token)
|
||||
.await;
|
||||
|
||||
if let Err(err) = runtime
|
||||
.handle_tool_call(tool_call, cancellation_token)
|
||||
.await
|
||||
{
|
||||
error!("user shell command failed: {err:?}");
|
||||
match exec_result {
|
||||
Err(CancelErr::Cancelled) => {
|
||||
let aborted_message = "command aborted by user".to_string();
|
||||
let exec_output = ExecToolCallOutput {
|
||||
exit_code: -1,
|
||||
stdout: StreamOutput::new(String::new()),
|
||||
stderr: StreamOutput::new(aborted_message.clone()),
|
||||
aggregated_output: StreamOutput::new(aborted_message.clone()),
|
||||
duration: Duration::ZERO,
|
||||
timed_out: false,
|
||||
};
|
||||
let output_items = [user_shell_command_record_item(&raw_command, &exec_output)];
|
||||
session
|
||||
.record_conversation_items(turn_context.as_ref(), &output_items)
|
||||
.await;
|
||||
session
|
||||
.send_event(
|
||||
turn_context.as_ref(),
|
||||
EventMsg::ExecCommandEnd(ExecCommandEndEvent {
|
||||
call_id,
|
||||
stdout: String::new(),
|
||||
stderr: aborted_message.clone(),
|
||||
aggregated_output: aborted_message.clone(),
|
||||
exit_code: -1,
|
||||
duration: Duration::ZERO,
|
||||
formatted_output: aborted_message,
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
}
|
||||
Ok(Ok(output)) => {
|
||||
session
|
||||
.send_event(
|
||||
turn_context.as_ref(),
|
||||
EventMsg::ExecCommandEnd(ExecCommandEndEvent {
|
||||
call_id: call_id.clone(),
|
||||
stdout: output.stdout.text.clone(),
|
||||
stderr: output.stderr.text.clone(),
|
||||
aggregated_output: output.aggregated_output.text.clone(),
|
||||
exit_code: output.exit_code,
|
||||
duration: output.duration,
|
||||
formatted_output: format_exec_output_str(&output),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let output_items = [user_shell_command_record_item(&raw_command, &output)];
|
||||
session
|
||||
.record_conversation_items(turn_context.as_ref(), &output_items)
|
||||
.await;
|
||||
}
|
||||
Ok(Err(err)) => {
|
||||
error!("user shell command failed: {err:?}");
|
||||
let message = format!("execution error: {err:?}");
|
||||
let exec_output = ExecToolCallOutput {
|
||||
exit_code: -1,
|
||||
stdout: StreamOutput::new(String::new()),
|
||||
stderr: StreamOutput::new(message.clone()),
|
||||
aggregated_output: StreamOutput::new(message.clone()),
|
||||
duration: Duration::ZERO,
|
||||
timed_out: false,
|
||||
};
|
||||
session
|
||||
.send_event(
|
||||
turn_context.as_ref(),
|
||||
EventMsg::ExecCommandEnd(ExecCommandEndEvent {
|
||||
call_id,
|
||||
stdout: exec_output.stdout.text.clone(),
|
||||
stderr: exec_output.stderr.text.clone(),
|
||||
aggregated_output: exec_output.aggregated_output.text.clone(),
|
||||
exit_code: exec_output.exit_code,
|
||||
duration: exec_output.duration,
|
||||
formatted_output: format_exec_output_str(&exec_output),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let output_items = [user_shell_command_record_item(&raw_command, &exec_output)];
|
||||
session
|
||||
.record_conversation_items(turn_context.as_ref(), &output_items)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use serde::Deserialize;
|
||||
|
||||
@@ -24,6 +26,8 @@ pub struct UnifiedExecHandler;
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ExecCommandArgs {
|
||||
cmd: String,
|
||||
#[serde(default)]
|
||||
workdir: Option<String>,
|
||||
#[serde(default = "default_shell")]
|
||||
shell: String,
|
||||
#[serde(default = "default_login")]
|
||||
@@ -96,6 +100,12 @@ impl ToolHandler for UnifiedExecHandler {
|
||||
"failed to parse exec_command arguments: {err:?}"
|
||||
))
|
||||
})?;
|
||||
let workdir = args
|
||||
.workdir
|
||||
.as_deref()
|
||||
.filter(|value| !value.is_empty())
|
||||
.map(PathBuf::from);
|
||||
let cwd = workdir.clone().unwrap_or_else(|| context.turn.cwd.clone());
|
||||
|
||||
let event_ctx = ToolEventCtx::new(
|
||||
context.session.as_ref(),
|
||||
@@ -103,8 +113,7 @@ impl ToolHandler for UnifiedExecHandler {
|
||||
&context.call_id,
|
||||
None,
|
||||
);
|
||||
let emitter =
|
||||
ToolEmitter::unified_exec(args.cmd.clone(), context.turn.cwd.clone(), true);
|
||||
let emitter = ToolEmitter::unified_exec(args.cmd.clone(), cwd.clone(), true);
|
||||
emitter.emit(event_ctx, ToolEventStage::Begin).await;
|
||||
|
||||
manager
|
||||
@@ -115,6 +124,7 @@ impl ToolHandler for UnifiedExecHandler {
|
||||
login: args.login,
|
||||
yield_time_ms: args.yield_time_ms,
|
||||
max_output_tokens: args.max_output_tokens,
|
||||
workdir,
|
||||
},
|
||||
&context,
|
||||
)
|
||||
|
||||
@@ -4,8 +4,7 @@ Runtime: shell
|
||||
Executes shell requests under the orchestrator: asks for approval when needed,
|
||||
builds a CommandSpec, and runs it under the current SandboxAttempt.
|
||||
*/
|
||||
use crate::command_safety::is_dangerous_command::command_might_be_dangerous;
|
||||
use crate::command_safety::is_safe_command::is_known_safe_command;
|
||||
use crate::command_safety::is_dangerous_command::requires_initial_appoval;
|
||||
use crate::exec::ExecToolCallOutput;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
use crate::sandboxing::execute_env;
|
||||
@@ -121,28 +120,12 @@ impl Approvable<ShellRequest> for ShellRuntime {
|
||||
policy: AskForApproval,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
) -> bool {
|
||||
if is_known_safe_command(&req.command) {
|
||||
return false;
|
||||
}
|
||||
match policy {
|
||||
AskForApproval::Never | AskForApproval::OnFailure => false,
|
||||
AskForApproval::OnRequest => {
|
||||
// In DangerFullAccess, only prompt if the command looks dangerous.
|
||||
if matches!(sandbox_policy, SandboxPolicy::DangerFullAccess) {
|
||||
return command_might_be_dangerous(&req.command);
|
||||
}
|
||||
|
||||
// In restricted sandboxes (ReadOnly/WorkspaceWrite), do not prompt for
|
||||
// non‑escalated, non‑dangerous commands — let the sandbox enforce
|
||||
// restrictions (e.g., block network/write) without a user prompt.
|
||||
let wants_escalation = req.with_escalated_permissions.unwrap_or(false);
|
||||
if wants_escalation {
|
||||
return true;
|
||||
}
|
||||
command_might_be_dangerous(&req.command)
|
||||
}
|
||||
AskForApproval::UnlessTrusted => !is_known_safe_command(&req.command),
|
||||
}
|
||||
requires_initial_appoval(
|
||||
policy,
|
||||
sandbox_policy,
|
||||
&req.command,
|
||||
req.with_escalated_permissions.unwrap_or(false),
|
||||
)
|
||||
}
|
||||
|
||||
fn wants_escalated_first_attempt(&self, req: &ShellRequest) -> bool {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use crate::command_safety::is_dangerous_command::requires_initial_appoval;
|
||||
/*
|
||||
Runtime: unified exec
|
||||
|
||||
@@ -21,7 +22,9 @@ use crate::tools::sandboxing::with_cached_approval;
|
||||
use crate::unified_exec::UnifiedExecError;
|
||||
use crate::unified_exec::UnifiedExecSession;
|
||||
use crate::unified_exec::UnifiedExecSessionManager;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::ReviewDecision;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use futures::future::BoxFuture;
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
@@ -106,6 +109,15 @@ impl Approvable<UnifiedExecRequest> for UnifiedExecRuntime<'_> {
|
||||
.await
|
||||
})
|
||||
}
|
||||
|
||||
fn wants_initial_approval(
|
||||
&self,
|
||||
req: &UnifiedExecRequest,
|
||||
policy: AskForApproval,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
) -> bool {
|
||||
requires_initial_appoval(policy, sandbox_policy, &req.command, false)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ToolRuntime<UnifiedExecRequest, UnifiedExecSession> for UnifiedExecRuntime<'a> {
|
||||
|
||||
@@ -138,6 +138,15 @@ fn create_exec_command_tool() -> ToolSpec {
|
||||
description: Some("Shell command to execute.".to_string()),
|
||||
},
|
||||
);
|
||||
properties.insert(
|
||||
"workdir".to_string(),
|
||||
JsonSchema::String {
|
||||
description: Some(
|
||||
"Optional working directory to run the command in; defaults to the turn cwd."
|
||||
.to_string(),
|
||||
),
|
||||
},
|
||||
);
|
||||
properties.insert(
|
||||
"shell".to_string(),
|
||||
JsonSchema::String {
|
||||
@@ -156,8 +165,7 @@ fn create_exec_command_tool() -> ToolSpec {
|
||||
"yield_time_ms".to_string(),
|
||||
JsonSchema::Number {
|
||||
description: Some(
|
||||
"Maximum time in milliseconds to wait for output after writing the input (default: 1000)."
|
||||
.to_string(),
|
||||
"How long to wait (in milliseconds) for output before yielding.".to_string(),
|
||||
),
|
||||
},
|
||||
);
|
||||
@@ -173,8 +181,11 @@ fn create_exec_command_tool() -> ToolSpec {
|
||||
ToolSpec::Function(ResponsesApiTool {
|
||||
name: "exec_command".to_string(),
|
||||
description:
|
||||
"Runs a command in a PTY, returning output or a session ID for ongoing interaction."
|
||||
.to_string(),
|
||||
concat!(
|
||||
"Runs a command in a PTY, returning output or a session ID for ongoing interaction.\n",
|
||||
"- Always set the `workdir` param when using the shell function. Do not use `cd` unless absolutely necessary."
|
||||
)
|
||||
.to_string(),
|
||||
strict: false,
|
||||
parameters: JsonSchema::Object {
|
||||
properties,
|
||||
@@ -247,9 +258,7 @@ fn create_shell_tool() -> ToolSpec {
|
||||
properties.insert(
|
||||
"timeout_ms".to_string(),
|
||||
JsonSchema::Number {
|
||||
description: Some(
|
||||
"The timeout for the command in milliseconds (default: 1000).".to_string(),
|
||||
),
|
||||
description: Some("The timeout for the command in milliseconds".to_string()),
|
||||
},
|
||||
);
|
||||
|
||||
@@ -268,7 +277,12 @@ fn create_shell_tool() -> ToolSpec {
|
||||
|
||||
ToolSpec::Function(ResponsesApiTool {
|
||||
name: "shell".to_string(),
|
||||
description: "Runs a shell command and returns its output.".to_string(),
|
||||
description: concat!(
|
||||
"Runs a shell command and returns its output.\n",
|
||||
"- The value of `command` will be passed to execvp(). Most terminal commands should be prefixed with [`bash`, `-lc`].\n",
|
||||
"- Always set the `workdir` param when using the shell function. Do not use `cd` unless absolutely necessary.",
|
||||
)
|
||||
.to_string(),
|
||||
strict: false,
|
||||
parameters: JsonSchema::Object {
|
||||
properties,
|
||||
@@ -1720,7 +1734,7 @@ mod tests {
|
||||
};
|
||||
assert_eq!(name, "shell");
|
||||
|
||||
let expected = "Runs a shell command and returns its output.";
|
||||
let expected = "Runs a shell command and returns its output.\n- The value of `command` will be passed to execvp(). Most terminal commands should be prefixed with [`bash`, `-lc`].\n- Always set the `workdir` param when using the shell function. Do not use `cd` unless absolutely necessary.";
|
||||
assert_eq!(description, expected);
|
||||
}
|
||||
|
||||
|
||||
@@ -70,6 +70,7 @@ pub(crate) struct ExecCommandRequest<'a> {
|
||||
pub login: bool,
|
||||
pub yield_time_ms: Option<u64>,
|
||||
pub max_output_tokens: Option<usize>,
|
||||
pub workdir: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -199,6 +200,7 @@ mod tests {
|
||||
login: true,
|
||||
yield_time_ms,
|
||||
max_output_tokens: None,
|
||||
workdir: None,
|
||||
},
|
||||
&context,
|
||||
)
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use tokio::sync::Notify;
|
||||
@@ -38,6 +39,10 @@ impl UnifiedExecSessionManager {
|
||||
request: ExecCommandRequest<'_>,
|
||||
context: &UnifiedExecContext,
|
||||
) -> Result<UnifiedExecResponse, UnifiedExecError> {
|
||||
let cwd = request
|
||||
.workdir
|
||||
.clone()
|
||||
.unwrap_or_else(|| context.turn.cwd.clone());
|
||||
let shell_flag = if request.login { "-lc" } else { "-c" };
|
||||
let command = vec![
|
||||
request.shell.to_string(),
|
||||
@@ -45,7 +50,9 @@ impl UnifiedExecSessionManager {
|
||||
request.command.to_string(),
|
||||
];
|
||||
|
||||
let session = self.open_session_with_sandbox(command, context).await?;
|
||||
let session = self
|
||||
.open_session_with_sandbox(command, cwd.clone(), context)
|
||||
.await?;
|
||||
|
||||
let max_tokens = resolve_max_tokens(request.max_output_tokens);
|
||||
let yield_time_ms =
|
||||
@@ -66,7 +73,7 @@ impl UnifiedExecSessionManager {
|
||||
None
|
||||
} else {
|
||||
Some(
|
||||
self.store_session(session, context, request.command, start)
|
||||
self.store_session(session, context, request.command, cwd.clone(), start)
|
||||
.await,
|
||||
)
|
||||
};
|
||||
@@ -87,6 +94,7 @@ impl UnifiedExecSessionManager {
|
||||
Self::emit_exec_end_from_context(
|
||||
context,
|
||||
request.command.to_string(),
|
||||
cwd,
|
||||
response.output.clone(),
|
||||
exit,
|
||||
response.wall_time,
|
||||
@@ -211,6 +219,7 @@ impl UnifiedExecSessionManager {
|
||||
session: UnifiedExecSession,
|
||||
context: &UnifiedExecContext,
|
||||
command: &str,
|
||||
cwd: PathBuf,
|
||||
started_at: Instant,
|
||||
) -> i32 {
|
||||
let session_id = self
|
||||
@@ -222,7 +231,7 @@ impl UnifiedExecSessionManager {
|
||||
turn_ref: Arc::clone(&context.turn),
|
||||
call_id: context.call_id.clone(),
|
||||
command: command.to_string(),
|
||||
cwd: context.turn.cwd.clone(),
|
||||
cwd,
|
||||
started_at,
|
||||
};
|
||||
self.sessions.lock().await.insert(session_id, entry);
|
||||
@@ -258,6 +267,7 @@ impl UnifiedExecSessionManager {
|
||||
async fn emit_exec_end_from_context(
|
||||
context: &UnifiedExecContext,
|
||||
command: String,
|
||||
cwd: PathBuf,
|
||||
aggregated_output: String,
|
||||
exit_code: i32,
|
||||
duration: Duration,
|
||||
@@ -276,7 +286,7 @@ impl UnifiedExecSessionManager {
|
||||
&context.call_id,
|
||||
None,
|
||||
);
|
||||
let emitter = ToolEmitter::unified_exec(command, context.turn.cwd.clone(), true);
|
||||
let emitter = ToolEmitter::unified_exec(command, cwd, true);
|
||||
emitter
|
||||
.emit(event_ctx, ToolEventStage::Success(output))
|
||||
.await;
|
||||
@@ -300,13 +310,14 @@ impl UnifiedExecSessionManager {
|
||||
pub(super) async fn open_session_with_sandbox(
|
||||
&self,
|
||||
command: Vec<String>,
|
||||
cwd: PathBuf,
|
||||
context: &UnifiedExecContext,
|
||||
) -> Result<UnifiedExecSession, UnifiedExecError> {
|
||||
let mut orchestrator = ToolOrchestrator::new();
|
||||
let mut runtime = UnifiedExecRuntime::new(self);
|
||||
let req = UnifiedExecToolRequest::new(
|
||||
command,
|
||||
context.turn.cwd.clone(),
|
||||
cwd,
|
||||
create_env(&context.turn.shell_environment_policy),
|
||||
);
|
||||
let tool_ctx = ToolCtx {
|
||||
|
||||
108
codex-rs/core/src/user_shell_command.rs
Normal file
108
codex-rs/core/src/user_shell_command.rs
Normal file
@@ -0,0 +1,108 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
|
||||
use crate::exec::ExecToolCallOutput;
|
||||
use crate::tools::format_exec_output_str;
|
||||
|
||||
pub const USER_SHELL_COMMAND_OPEN: &str = "<user_shell_command>";
|
||||
pub const USER_SHELL_COMMAND_CLOSE: &str = "</user_shell_command>";
|
||||
|
||||
pub fn is_user_shell_command_text(text: &str) -> bool {
|
||||
let trimmed = text.trim_start();
|
||||
let lowered = trimmed.to_ascii_lowercase();
|
||||
lowered.starts_with(USER_SHELL_COMMAND_OPEN)
|
||||
}
|
||||
|
||||
fn format_duration_line(duration: Duration) -> String {
|
||||
let duration_seconds = duration.as_secs_f64();
|
||||
format!("Duration: {duration_seconds:.4} seconds")
|
||||
}
|
||||
|
||||
fn format_user_shell_command_body(command: &str, exec_output: &ExecToolCallOutput) -> String {
|
||||
let mut sections = Vec::new();
|
||||
sections.push("<command>".to_string());
|
||||
sections.push(command.to_string());
|
||||
sections.push("</command>".to_string());
|
||||
sections.push("<result>".to_string());
|
||||
sections.push(format!("Exit code: {}", exec_output.exit_code));
|
||||
sections.push(format_duration_line(exec_output.duration));
|
||||
sections.push("Output:".to_string());
|
||||
sections.push(format_exec_output_str(exec_output));
|
||||
sections.push("</result>".to_string());
|
||||
sections.join("\n")
|
||||
}
|
||||
|
||||
pub fn format_user_shell_command_record(command: &str, exec_output: &ExecToolCallOutput) -> String {
|
||||
let body = format_user_shell_command_body(command, exec_output);
|
||||
format!("{USER_SHELL_COMMAND_OPEN}\n{body}\n{USER_SHELL_COMMAND_CLOSE}")
|
||||
}
|
||||
|
||||
pub fn user_shell_command_record_item(
|
||||
command: &str,
|
||||
exec_output: &ExecToolCallOutput,
|
||||
) -> ResponseItem {
|
||||
ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::InputText {
|
||||
text: format_user_shell_command_record(command, exec_output),
|
||||
}],
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::exec::StreamOutput;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn detects_user_shell_command_text_variants() {
|
||||
assert!(is_user_shell_command_text(
|
||||
"<user_shell_command>\necho hi\n</user_shell_command>"
|
||||
));
|
||||
assert!(!is_user_shell_command_text("echo hi"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn formats_basic_record() {
|
||||
let exec_output = ExecToolCallOutput {
|
||||
exit_code: 0,
|
||||
stdout: StreamOutput::new("hi".to_string()),
|
||||
stderr: StreamOutput::new(String::new()),
|
||||
aggregated_output: StreamOutput::new("hi".to_string()),
|
||||
duration: Duration::from_secs(1),
|
||||
timed_out: false,
|
||||
};
|
||||
let item = user_shell_command_record_item("echo hi", &exec_output);
|
||||
let ResponseItem::Message { content, .. } = item else {
|
||||
panic!("expected message");
|
||||
};
|
||||
let [ContentItem::InputText { text }] = content.as_slice() else {
|
||||
panic!("expected input text");
|
||||
};
|
||||
assert_eq!(
|
||||
text,
|
||||
"<user_shell_command>\n<command>\necho hi\n</command>\n<result>\nExit code: 0\nDuration: 1.0000 seconds\nOutput:\nhi\n</result>\n</user_shell_command>"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn uses_aggregated_output_over_streams() {
|
||||
let exec_output = ExecToolCallOutput {
|
||||
exit_code: 42,
|
||||
stdout: StreamOutput::new("stdout-only".to_string()),
|
||||
stderr: StreamOutput::new("stderr-only".to_string()),
|
||||
aggregated_output: StreamOutput::new("combined output wins".to_string()),
|
||||
duration: Duration::from_millis(120),
|
||||
timed_out: false,
|
||||
};
|
||||
let record = format_user_shell_command_record("false", &exec_output);
|
||||
assert_eq!(
|
||||
record,
|
||||
"<user_shell_command>\n<command>\nfalse\n</command>\n<result>\nExit code: 42\nDuration: 0.1200 seconds\nOutput:\ncombined output wins\n</result>\n</user_shell_command>"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -61,6 +61,18 @@ impl ResponsesRequest {
|
||||
self.0.body_json().unwrap()
|
||||
}
|
||||
|
||||
/// Returns all `input_text` spans from `message` inputs for the provided role.
|
||||
pub fn message_input_texts(&self, role: &str) -> Vec<String> {
|
||||
self.inputs_of_type("message")
|
||||
.into_iter()
|
||||
.filter(|item| item.get("role").and_then(Value::as_str) == Some(role))
|
||||
.filter_map(|item| item.get("content").and_then(Value::as_array).cloned())
|
||||
.flatten()
|
||||
.filter(|span| span.get("type").and_then(Value::as_str) == Some("input_text"))
|
||||
.filter_map(|span| span.get("text").and_then(Value::as_str).map(str::to_owned))
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn input(&self) -> Vec<Value> {
|
||||
self.0.body_json::<Value>().unwrap()["input"]
|
||||
.as_array()
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
#![allow(clippy::unwrap_used, clippy::expect_used)]
|
||||
|
||||
use anyhow::Result;
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::model_family::find_family_for_model;
|
||||
use codex_core::protocol::ApplyPatchApprovalRequestEvent;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
@@ -24,6 +25,7 @@ use core_test_support::test_codex::TestCodex;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
use core_test_support::wait_for_event;
|
||||
use pretty_assertions::assert_eq;
|
||||
use regex_lite::Regex;
|
||||
use serde_json::Value;
|
||||
use serde_json::json;
|
||||
use std::env;
|
||||
@@ -71,6 +73,9 @@ enum ActionKind {
|
||||
RunCommand {
|
||||
command: &'static [&'static str],
|
||||
},
|
||||
RunUnifiedExecCommand {
|
||||
command: &'static str,
|
||||
},
|
||||
ApplyPatchFunction {
|
||||
target: TargetPath,
|
||||
content: &'static str,
|
||||
@@ -134,6 +139,17 @@ impl ActionKind {
|
||||
let event = shell_event(call_id, &command, 1_000, with_escalated_permissions)?;
|
||||
Ok((event, Some(command)))
|
||||
}
|
||||
ActionKind::RunUnifiedExecCommand { command } => {
|
||||
let event = exec_command_event(call_id, command, Some(1000))?;
|
||||
Ok((
|
||||
event,
|
||||
Some(vec![
|
||||
"/bin/bash".to_string(),
|
||||
"-lc".to_string(),
|
||||
command.to_string(),
|
||||
]),
|
||||
))
|
||||
}
|
||||
ActionKind::ApplyPatchFunction { target, content } => {
|
||||
let (path, patch_path) = target.resolve_for_patch(test);
|
||||
let _ = fs::remove_file(&path);
|
||||
@@ -183,6 +199,17 @@ fn shell_event(
|
||||
Ok(ev_function_call(call_id, "shell", &args_str))
|
||||
}
|
||||
|
||||
fn exec_command_event(call_id: &str, cmd: &str, yield_time_ms: Option<u64>) -> Result<Value> {
|
||||
let mut args = json!({
|
||||
"cmd": cmd.to_string(),
|
||||
});
|
||||
if let Some(yield_time_ms) = yield_time_ms {
|
||||
args["yield_time_ms"] = json!(yield_time_ms);
|
||||
}
|
||||
let args_str = serde_json::to_string(&args)?;
|
||||
Ok(ev_function_call(call_id, "exec_command", &args_str))
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
enum Expectation {
|
||||
FileCreated {
|
||||
@@ -206,6 +233,9 @@ enum Expectation {
|
||||
CommandSuccess {
|
||||
stdout_contains: &'static str,
|
||||
},
|
||||
CommandFailure {
|
||||
output_contains: &'static str,
|
||||
},
|
||||
}
|
||||
|
||||
impl Expectation {
|
||||
@@ -337,6 +367,19 @@ impl Expectation {
|
||||
result.stdout
|
||||
);
|
||||
}
|
||||
Expectation::CommandFailure { output_contains } => {
|
||||
assert_ne!(
|
||||
result.exit_code,
|
||||
Some(0),
|
||||
"expected non-zero exit for command failure: {}",
|
||||
result.stdout
|
||||
);
|
||||
assert!(
|
||||
result.stdout.contains(output_contains),
|
||||
"command failure stderr missing {output_contains:?}: {}",
|
||||
result.stdout
|
||||
);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -362,7 +405,7 @@ struct ScenarioSpec {
|
||||
sandbox_policy: SandboxPolicy,
|
||||
action: ActionKind,
|
||||
with_escalated_permissions: bool,
|
||||
requires_apply_patch_tool: bool,
|
||||
features: Vec<Feature>,
|
||||
model_override: Option<&'static str>,
|
||||
outcome: Outcome,
|
||||
expectation: Expectation,
|
||||
@@ -410,10 +453,24 @@ fn parse_result(item: &Value) -> CommandResult {
|
||||
let stdout = parsed["output"].as_str().unwrap_or_default().to_string();
|
||||
CommandResult { exit_code, stdout }
|
||||
}
|
||||
Err(_) => CommandResult {
|
||||
exit_code: None,
|
||||
stdout: output_str.to_string(),
|
||||
},
|
||||
Err(_) => {
|
||||
let regex =
|
||||
Regex::new(r"(?s)^.*?Process exited with code (\d+)\n.*?Output:\n(.*)$").unwrap();
|
||||
// parse freeform output
|
||||
if let Some(captures) = regex.captures(output_str) {
|
||||
let exit_code = captures.get(1).unwrap().as_str().parse::<i64>().unwrap();
|
||||
let output = captures.get(2).unwrap().as_str();
|
||||
CommandResult {
|
||||
exit_code: Some(exit_code),
|
||||
stdout: output.to_string(),
|
||||
}
|
||||
} else {
|
||||
CommandResult {
|
||||
exit_code: None,
|
||||
stdout: output_str.to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -506,7 +563,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
content: "danger-on-request",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
requires_apply_patch_tool: false,
|
||||
features: vec![],
|
||||
model_override: None,
|
||||
outcome: Outcome::Auto,
|
||||
expectation: Expectation::FileCreated {
|
||||
@@ -523,7 +580,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
response_body: "danger-network-ok",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
requires_apply_patch_tool: false,
|
||||
features: vec![],
|
||||
model_override: None,
|
||||
outcome: Outcome::Auto,
|
||||
expectation: Expectation::NetworkSuccess {
|
||||
@@ -538,7 +595,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
command: &["echo", "trusted-unless"],
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
requires_apply_patch_tool: false,
|
||||
features: vec![],
|
||||
model_override: None,
|
||||
outcome: Outcome::Auto,
|
||||
expectation: Expectation::CommandSuccess {
|
||||
@@ -554,7 +611,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
content: "danger-on-failure",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
requires_apply_patch_tool: false,
|
||||
features: vec![],
|
||||
model_override: None,
|
||||
outcome: Outcome::Auto,
|
||||
expectation: Expectation::FileCreated {
|
||||
@@ -571,7 +628,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
content: "danger-unless-trusted",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
requires_apply_patch_tool: false,
|
||||
features: vec![],
|
||||
model_override: None,
|
||||
outcome: Outcome::ExecApproval {
|
||||
decision: ReviewDecision::Approved,
|
||||
@@ -591,7 +648,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
content: "danger-never",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
requires_apply_patch_tool: false,
|
||||
features: vec![],
|
||||
model_override: None,
|
||||
outcome: Outcome::Auto,
|
||||
expectation: Expectation::FileCreated {
|
||||
@@ -608,7 +665,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
content: "read-only-approval",
|
||||
},
|
||||
with_escalated_permissions: true,
|
||||
requires_apply_patch_tool: false,
|
||||
features: vec![],
|
||||
model_override: None,
|
||||
outcome: Outcome::ExecApproval {
|
||||
decision: ReviewDecision::Approved,
|
||||
@@ -627,7 +684,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
command: &["echo", "trusted-read-only"],
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
requires_apply_patch_tool: false,
|
||||
features: vec![],
|
||||
model_override: None,
|
||||
outcome: Outcome::Auto,
|
||||
expectation: Expectation::CommandSuccess {
|
||||
@@ -643,7 +700,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
response_body: "should-not-see",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
requires_apply_patch_tool: false,
|
||||
features: vec![],
|
||||
model_override: None,
|
||||
outcome: Outcome::Auto,
|
||||
expectation: Expectation::NetworkFailure { expect_tag: "ERR:" },
|
||||
@@ -657,7 +714,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
content: "should-not-write",
|
||||
},
|
||||
with_escalated_permissions: true,
|
||||
requires_apply_patch_tool: false,
|
||||
features: vec![],
|
||||
model_override: None,
|
||||
outcome: Outcome::ExecApproval {
|
||||
decision: ReviewDecision::Denied,
|
||||
@@ -678,7 +735,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
content: "read-only-on-failure",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
requires_apply_patch_tool: false,
|
||||
features: vec![],
|
||||
model_override: None,
|
||||
outcome: Outcome::ExecApproval {
|
||||
decision: ReviewDecision::Approved,
|
||||
@@ -698,7 +755,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
response_body: "read-only-network-ok",
|
||||
},
|
||||
with_escalated_permissions: true,
|
||||
requires_apply_patch_tool: false,
|
||||
features: vec![],
|
||||
model_override: None,
|
||||
outcome: Outcome::ExecApproval {
|
||||
decision: ReviewDecision::Approved,
|
||||
@@ -717,7 +774,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
content: "shell-apply-patch",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
requires_apply_patch_tool: true,
|
||||
features: vec![],
|
||||
model_override: None,
|
||||
outcome: Outcome::PatchApproval {
|
||||
decision: ReviewDecision::Approved,
|
||||
@@ -737,7 +794,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
content: "function-apply-patch",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
requires_apply_patch_tool: true,
|
||||
features: vec![],
|
||||
model_override: Some("gpt-5-codex"),
|
||||
outcome: Outcome::Auto,
|
||||
expectation: Expectation::PatchApplied {
|
||||
@@ -754,7 +811,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
content: "function-patch-danger",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
requires_apply_patch_tool: true,
|
||||
features: vec![Feature::ApplyPatchFreeform],
|
||||
model_override: Some("gpt-5-codex"),
|
||||
outcome: Outcome::Auto,
|
||||
expectation: Expectation::PatchApplied {
|
||||
@@ -771,7 +828,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
content: "function-patch-outside",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
requires_apply_patch_tool: true,
|
||||
features: vec![],
|
||||
model_override: Some("gpt-5-codex"),
|
||||
outcome: Outcome::PatchApproval {
|
||||
decision: ReviewDecision::Approved,
|
||||
@@ -791,7 +848,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
content: "function-patch-outside-denied",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
requires_apply_patch_tool: true,
|
||||
features: vec![],
|
||||
model_override: Some("gpt-5-codex"),
|
||||
outcome: Outcome::PatchApproval {
|
||||
decision: ReviewDecision::Denied,
|
||||
@@ -811,7 +868,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
content: "shell-patch-outside",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
requires_apply_patch_tool: true,
|
||||
features: vec![],
|
||||
model_override: None,
|
||||
outcome: Outcome::PatchApproval {
|
||||
decision: ReviewDecision::Approved,
|
||||
@@ -831,7 +888,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
content: "function-patch-unless-trusted",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
requires_apply_patch_tool: true,
|
||||
features: vec![],
|
||||
model_override: Some("gpt-5-codex"),
|
||||
outcome: Outcome::PatchApproval {
|
||||
decision: ReviewDecision::Approved,
|
||||
@@ -851,7 +908,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
content: "function-patch-never",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
requires_apply_patch_tool: true,
|
||||
features: vec![],
|
||||
model_override: Some("gpt-5-codex"),
|
||||
outcome: Outcome::Auto,
|
||||
expectation: Expectation::FileNotCreated {
|
||||
@@ -870,7 +927,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
content: "read-only-unless-trusted",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
requires_apply_patch_tool: false,
|
||||
features: vec![],
|
||||
model_override: None,
|
||||
outcome: Outcome::ExecApproval {
|
||||
decision: ReviewDecision::Approved,
|
||||
@@ -890,7 +947,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
content: "read-only-never",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
requires_apply_patch_tool: false,
|
||||
features: vec![],
|
||||
model_override: None,
|
||||
outcome: Outcome::Auto,
|
||||
expectation: Expectation::FileNotCreated {
|
||||
@@ -910,7 +967,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
command: &["echo", "trusted-never"],
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
requires_apply_patch_tool: false,
|
||||
features: vec![],
|
||||
model_override: None,
|
||||
outcome: Outcome::Auto,
|
||||
expectation: Expectation::CommandSuccess {
|
||||
@@ -926,7 +983,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
content: "workspace-on-request",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
requires_apply_patch_tool: false,
|
||||
features: vec![],
|
||||
model_override: None,
|
||||
outcome: Outcome::Auto,
|
||||
expectation: Expectation::FileCreated {
|
||||
@@ -943,7 +1000,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
response_body: "workspace-network-blocked",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
requires_apply_patch_tool: false,
|
||||
features: vec![],
|
||||
model_override: None,
|
||||
outcome: Outcome::Auto,
|
||||
expectation: Expectation::NetworkFailure { expect_tag: "ERR:" },
|
||||
@@ -957,7 +1014,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
content: "workspace-on-request-outside",
|
||||
},
|
||||
with_escalated_permissions: true,
|
||||
requires_apply_patch_tool: false,
|
||||
features: vec![],
|
||||
model_override: None,
|
||||
outcome: Outcome::ExecApproval {
|
||||
decision: ReviewDecision::Approved,
|
||||
@@ -977,7 +1034,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
response_body: "workspace-network-ok",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
requires_apply_patch_tool: false,
|
||||
features: vec![],
|
||||
model_override: None,
|
||||
outcome: Outcome::Auto,
|
||||
expectation: Expectation::NetworkSuccess {
|
||||
@@ -994,7 +1051,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
content: "workspace-on-failure",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
requires_apply_patch_tool: false,
|
||||
features: vec![],
|
||||
model_override: None,
|
||||
outcome: Outcome::ExecApproval {
|
||||
decision: ReviewDecision::Approved,
|
||||
@@ -1014,7 +1071,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
content: "workspace-unless-trusted",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
requires_apply_patch_tool: false,
|
||||
features: vec![],
|
||||
model_override: None,
|
||||
outcome: Outcome::ExecApproval {
|
||||
decision: ReviewDecision::Approved,
|
||||
@@ -1034,7 +1091,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
content: "workspace-never",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
requires_apply_patch_tool: false,
|
||||
features: vec![],
|
||||
model_override: None,
|
||||
outcome: Outcome::Auto,
|
||||
expectation: Expectation::FileNotCreated {
|
||||
@@ -1046,6 +1103,39 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
},
|
||||
},
|
||||
},
|
||||
ScenarioSpec {
|
||||
name: "unified exec on request no approval for safe command",
|
||||
approval_policy: OnRequest,
|
||||
sandbox_policy: SandboxPolicy::DangerFullAccess,
|
||||
action: ActionKind::RunUnifiedExecCommand {
|
||||
command: "echo \"hello unified exec\"",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
features: vec![Feature::UnifiedExec],
|
||||
model_override: None,
|
||||
outcome: Outcome::Auto,
|
||||
expectation: Expectation::CommandSuccess {
|
||||
stdout_contains: "hello unified exec",
|
||||
},
|
||||
},
|
||||
ScenarioSpec {
|
||||
name: "unified exec on request requires approval unless trusted",
|
||||
approval_policy: AskForApproval::UnlessTrusted,
|
||||
sandbox_policy: SandboxPolicy::DangerFullAccess,
|
||||
action: ActionKind::RunUnifiedExecCommand {
|
||||
command: "git reset --hard",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
features: vec![Feature::UnifiedExec],
|
||||
model_override: None,
|
||||
outcome: Outcome::ExecApproval {
|
||||
decision: ReviewDecision::Denied,
|
||||
expected_reason: None,
|
||||
},
|
||||
expectation: Expectation::CommandFailure {
|
||||
output_contains: "rejected by user",
|
||||
},
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1065,7 +1155,7 @@ async fn run_scenario(scenario: &ScenarioSpec) -> Result<()> {
|
||||
let server = start_mock_server().await;
|
||||
let approval_policy = scenario.approval_policy;
|
||||
let sandbox_policy = scenario.sandbox_policy.clone();
|
||||
let requires_apply_patch_tool = scenario.requires_apply_patch_tool;
|
||||
let features = scenario.features.clone();
|
||||
let model_override = scenario.model_override;
|
||||
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
@@ -1075,8 +1165,8 @@ async fn run_scenario(scenario: &ScenarioSpec) -> Result<()> {
|
||||
config.model = model.to_string();
|
||||
config.model_family =
|
||||
find_family_for_model(model).expect("model should map to a known family");
|
||||
if requires_apply_patch_tool {
|
||||
config.include_apply_patch_tool = true;
|
||||
for feature in features {
|
||||
config.features.enable(feature);
|
||||
}
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
@@ -54,7 +54,7 @@ const COMPACT_PROMPT_MARKER: &str =
|
||||
pub(super) const TEST_COMPACT_PROMPT: &str =
|
||||
"You are performing a CONTEXT CHECKPOINT COMPACTION for a tool.\nTest-only compact prompt.";
|
||||
|
||||
pub(super) const COMPACT_WARNING_MESSAGE: &str = "Heads up: Long conversations and multiple compactions can cause the model to be less accurate. Start new a new conversation when possible to keep conversations small and targeted.";
|
||||
pub(super) const COMPACT_WARNING_MESSAGE: &str = "Heads up: Long conversations and multiple compactions can cause the model to be less accurate. Start a new conversation when possible to keep conversations small and targeted.";
|
||||
|
||||
fn auto_summary(summary: &str) -> String {
|
||||
summary.to_string()
|
||||
|
||||
@@ -223,6 +223,90 @@ async fn unified_exec_emits_exec_command_begin_event() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn unified_exec_respects_workdir_override() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
skip_if_sandbox!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.use_experimental_unified_exec_tool = true;
|
||||
config.features.enable(Feature::UnifiedExec);
|
||||
});
|
||||
let TestCodex {
|
||||
codex,
|
||||
cwd,
|
||||
session_configured,
|
||||
..
|
||||
} = builder.build(&server).await?;
|
||||
|
||||
let workdir = cwd.path().join("uexec_workdir_test");
|
||||
std::fs::create_dir_all(&workdir)?;
|
||||
|
||||
let call_id = "uexec-workdir";
|
||||
let args = json!({
|
||||
"cmd": "pwd",
|
||||
"yield_time_ms": 250,
|
||||
"workdir": workdir.to_string_lossy().to_string(),
|
||||
});
|
||||
|
||||
let responses = vec![
|
||||
sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_function_call(call_id, "exec_command", &serde_json::to_string(&args)?),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
sse(vec![
|
||||
ev_response_created("resp-2"),
|
||||
ev_assistant_message("msg-1", "finished"),
|
||||
ev_completed("resp-2"),
|
||||
]),
|
||||
];
|
||||
mount_sse_sequence(&server, responses).await;
|
||||
|
||||
let session_model = session_configured.model.clone();
|
||||
|
||||
codex
|
||||
.submit(Op::UserTurn {
|
||||
items: vec![UserInput::Text {
|
||||
text: "run workdir test".into(),
|
||||
}],
|
||||
final_output_json_schema: None,
|
||||
cwd: cwd.path().to_path_buf(),
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: SandboxPolicy::DangerFullAccess,
|
||||
model: session_model,
|
||||
effort: None,
|
||||
summary: ReasoningSummary::Auto,
|
||||
})
|
||||
.await?;
|
||||
|
||||
wait_for_event(&codex, |event| matches!(event, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
let requests = server.received_requests().await.expect("recorded requests");
|
||||
assert!(!requests.is_empty(), "expected at least one POST request");
|
||||
|
||||
let bodies = requests
|
||||
.iter()
|
||||
.map(|req| req.body_json::<Value>().expect("request json"))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let outputs = collect_tool_outputs(&bodies)?;
|
||||
let output = outputs
|
||||
.get(call_id)
|
||||
.expect("missing exec_command workdir output");
|
||||
let output_text = output.output.trim();
|
||||
let output_canonical = std::fs::canonicalize(output_text)?;
|
||||
let expected_canonical = std::fs::canonicalize(&workdir)?;
|
||||
assert_eq!(
|
||||
output_canonical, expected_canonical,
|
||||
"pwd should reflect the requested workdir override"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn unified_exec_emits_exec_command_end_event() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
@@ -2,35 +2,20 @@ use codex_core::ConversationManager;
|
||||
use codex_core::NewConversation;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::ExecCommandEndEvent;
|
||||
use codex_core::protocol::ExecOutputStream;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_core::protocol::TurnAbortReason;
|
||||
use core_test_support::assert_regex_match;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use core_test_support::responses;
|
||||
use core_test_support::wait_for_event;
|
||||
use core_test_support::wait_for_event_match;
|
||||
use regex_lite::escape;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
use std::process::Stdio;
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn detect_python_executable() -> Option<String> {
|
||||
let candidates = ["python3", "python"];
|
||||
candidates.iter().find_map(|candidate| {
|
||||
Command::new(candidate)
|
||||
.arg("--version")
|
||||
.stdout(Stdio::null())
|
||||
.stderr(Stdio::null())
|
||||
.status()
|
||||
.ok()
|
||||
.and_then(|status| status.success().then(|| (*candidate).to_string()))
|
||||
})
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn user_shell_cmd_ls_and_cat_in_temp_dir() {
|
||||
let Some(python) = detect_python_executable() else {
|
||||
eprintln!("skipping test: python3 not found in PATH");
|
||||
return;
|
||||
};
|
||||
|
||||
// Create a temporary working directory with a known file.
|
||||
let cwd = TempDir::new().unwrap();
|
||||
let file_name = "hello.txt";
|
||||
@@ -55,10 +40,8 @@ async fn user_shell_cmd_ls_and_cat_in_temp_dir() {
|
||||
.await
|
||||
.expect("create new conversation");
|
||||
|
||||
// 1) python should list the file
|
||||
let list_cmd = format!(
|
||||
"{python} -c \"import pathlib; print('\\n'.join(sorted(p.name for p in pathlib.Path('.').iterdir())))\""
|
||||
);
|
||||
// 1) shell command should list the file
|
||||
let list_cmd = "ls".to_string();
|
||||
codex
|
||||
.submit(Op::RunUserShellCommand { command: list_cmd })
|
||||
.await
|
||||
@@ -76,10 +59,8 @@ async fn user_shell_cmd_ls_and_cat_in_temp_dir() {
|
||||
"ls output should include {file_name}, got: {stdout:?}"
|
||||
);
|
||||
|
||||
// 2) python should print the file contents verbatim
|
||||
let cat_cmd = format!(
|
||||
"{python} -c \"import pathlib; print(pathlib.Path('{file_name}').read_text(), end='')\""
|
||||
);
|
||||
// 2) shell command should print the file contents verbatim
|
||||
let cat_cmd = format!("cat {file_name}");
|
||||
codex
|
||||
.submit(Op::RunUserShellCommand { command: cat_cmd })
|
||||
.await
|
||||
@@ -95,7 +76,7 @@ async fn user_shell_cmd_ls_and_cat_in_temp_dir() {
|
||||
};
|
||||
assert_eq!(exit_code, 0);
|
||||
if cfg!(windows) {
|
||||
// Windows' Python writes CRLF line endings; normalize so the assertion remains portable.
|
||||
// Windows shells emit CRLF line endings; normalize so the assertion remains portable.
|
||||
stdout = stdout.replace("\r\n", "\n");
|
||||
}
|
||||
assert_eq!(stdout, contents);
|
||||
@@ -103,10 +84,6 @@ async fn user_shell_cmd_ls_and_cat_in_temp_dir() {
|
||||
|
||||
#[tokio::test]
|
||||
async fn user_shell_cmd_can_be_interrupted() {
|
||||
let Some(python) = detect_python_executable() else {
|
||||
eprintln!("skipping test: python3 not found in PATH");
|
||||
return;
|
||||
};
|
||||
// Set up isolated config and conversation.
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let config = load_default_config_for_test(&codex_home);
|
||||
@@ -121,7 +98,7 @@ async fn user_shell_cmd_can_be_interrupted() {
|
||||
.expect("create new conversation");
|
||||
|
||||
// Start a long-running command and then interrupt it.
|
||||
let sleep_cmd = format!("{python} -c \"import time; time.sleep(5)\"");
|
||||
let sleep_cmd = "sleep 5".to_string();
|
||||
codex
|
||||
.submit(Op::RunUserShellCommand { command: sleep_cmd })
|
||||
.await
|
||||
@@ -138,3 +115,137 @@ async fn user_shell_cmd_can_be_interrupted() {
|
||||
};
|
||||
assert_eq!(ev.reason, TurnAbortReason::Interrupted);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn user_shell_command_history_is_persisted_and_shared_with_model() -> anyhow::Result<()> {
|
||||
let server = responses::start_mock_server().await;
|
||||
let mut builder = core_test_support::test_codex::test_codex();
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
#[cfg(windows)]
|
||||
let command = r#"$val = $env:CODEX_SANDBOX; if ([string]::IsNullOrEmpty($val)) { $val = 'not-set' } ; [System.Console]::Write($val)"#.to_string();
|
||||
#[cfg(not(windows))]
|
||||
let command = r#"sh -c "printf '%s' \"${CODEX_SANDBOX:-not-set}\"""#.to_string();
|
||||
|
||||
test.codex
|
||||
.submit(Op::RunUserShellCommand {
|
||||
command: command.clone(),
|
||||
})
|
||||
.await?;
|
||||
|
||||
let begin_event = wait_for_event_match(&test.codex, |ev| match ev {
|
||||
EventMsg::ExecCommandBegin(event) => Some(event.clone()),
|
||||
_ => None,
|
||||
})
|
||||
.await;
|
||||
assert!(begin_event.is_user_shell_command);
|
||||
let matches_last_arg = begin_event.command.last() == Some(&command);
|
||||
let matches_split = shlex::split(&command).is_some_and(|split| split == begin_event.command);
|
||||
assert!(
|
||||
matches_last_arg || matches_split,
|
||||
"user command begin event should include the original command; got: {:?}",
|
||||
begin_event.command
|
||||
);
|
||||
|
||||
let delta_event = wait_for_event_match(&test.codex, |ev| match ev {
|
||||
EventMsg::ExecCommandOutputDelta(event) => Some(event.clone()),
|
||||
_ => None,
|
||||
})
|
||||
.await;
|
||||
assert_eq!(delta_event.stream, ExecOutputStream::Stdout);
|
||||
let chunk_text =
|
||||
String::from_utf8(delta_event.chunk.clone()).expect("user command chunk is valid utf-8");
|
||||
assert_eq!(chunk_text.trim(), "not-set");
|
||||
|
||||
let end_event = wait_for_event_match(&test.codex, |ev| match ev {
|
||||
EventMsg::ExecCommandEnd(event) => Some(event.clone()),
|
||||
_ => None,
|
||||
})
|
||||
.await;
|
||||
assert_eq!(end_event.exit_code, 0);
|
||||
assert_eq!(end_event.stdout.trim(), "not-set");
|
||||
|
||||
let _ = wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
let responses = vec![responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_assistant_message("msg-1", "done"),
|
||||
responses::ev_completed("resp-1"),
|
||||
])];
|
||||
let mock = responses::mount_sse_sequence(&server, responses).await;
|
||||
|
||||
test.submit_turn("follow-up after shell command").await?;
|
||||
|
||||
let request = mock.single_request();
|
||||
|
||||
let command_message = request
|
||||
.message_input_texts("user")
|
||||
.into_iter()
|
||||
.find(|text| text.contains("<user_shell_command>"))
|
||||
.expect("command message recorded in request");
|
||||
let command_message = command_message.replace("\r\n", "\n");
|
||||
let escaped_command = escape(&command);
|
||||
let expected_pattern = format!(
|
||||
r"(?m)\A<user_shell_command>\n<command>\n{escaped_command}\n</command>\n<result>\nExit code: 0\nDuration: [0-9]+(?:\.[0-9]+)? seconds\nOutput:\nnot-set\n</result>\n</user_shell_command>\z"
|
||||
);
|
||||
assert_regex_match(&expected_pattern, &command_message);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn user_shell_command_output_is_truncated_in_history() -> anyhow::Result<()> {
|
||||
let server = responses::start_mock_server().await;
|
||||
let mut builder = core_test_support::test_codex::test_codex();
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
#[cfg(windows)]
|
||||
let command = r#"for ($i=1; $i -le 400; $i++) { Write-Output $i }"#.to_string();
|
||||
#[cfg(not(windows))]
|
||||
let command = "seq 1 400".to_string();
|
||||
|
||||
test.codex
|
||||
.submit(Op::RunUserShellCommand {
|
||||
command: command.clone(),
|
||||
})
|
||||
.await?;
|
||||
|
||||
let end_event = wait_for_event_match(&test.codex, |ev| match ev {
|
||||
EventMsg::ExecCommandEnd(event) => Some(event.clone()),
|
||||
_ => None,
|
||||
})
|
||||
.await;
|
||||
assert_eq!(end_event.exit_code, 0);
|
||||
|
||||
let _ = wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
let responses = vec![responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_assistant_message("msg-1", "done"),
|
||||
responses::ev_completed("resp-1"),
|
||||
])];
|
||||
let mock = responses::mount_sse_sequence(&server, responses).await;
|
||||
|
||||
test.submit_turn("follow-up after shell command").await?;
|
||||
|
||||
let request = mock.single_request();
|
||||
let command_message = request
|
||||
.message_input_texts("user")
|
||||
.into_iter()
|
||||
.find(|text| text.contains("<user_shell_command>"))
|
||||
.expect("command message recorded in request");
|
||||
let command_message = command_message.replace("\r\n", "\n");
|
||||
|
||||
let head = (1..=128).map(|i| format!("{i}\n")).collect::<String>();
|
||||
let tail = (273..=400).map(|i| format!("{i}\n")).collect::<String>();
|
||||
let truncated_body =
|
||||
format!("Total output lines: 400\n\n{head}\n[... omitted 144 of 400 lines ...]\n\n{tail}");
|
||||
let escaped_command = escape(&command);
|
||||
let escaped_truncated_body = escape(&truncated_body);
|
||||
let expected_pattern = format!(
|
||||
r"(?m)\A<user_shell_command>\n<command>\n{escaped_command}\n</command>\n<result>\nExit code: 0\nDuration: [0-9]+(?:\.[0-9]+)? seconds\nOutput:\n{escaped_truncated_body}\n</result>\n</user_shell_command>\z"
|
||||
);
|
||||
assert_regex_match(&expected_pattern, &command_message);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -548,7 +548,7 @@ fn warning_event_produces_error_item() {
|
||||
let out = ep.collect_thread_events(&event(
|
||||
"e1",
|
||||
EventMsg::Warning(WarningEvent {
|
||||
message: "Heads up: Long conversations and multiple compactions can cause the model to be less accurate. Start new a new conversation when possible to keep conversations small and targeted.".to_string(),
|
||||
message: "Heads up: Long conversations and multiple compactions can cause the model to be less accurate. Start a new conversation when possible to keep conversations small and targeted.".to_string(),
|
||||
}),
|
||||
));
|
||||
assert_eq!(
|
||||
@@ -557,7 +557,7 @@ fn warning_event_produces_error_item() {
|
||||
item: ThreadItem {
|
||||
id: "item_0".to_string(),
|
||||
details: ThreadItemDetails::Error(ErrorItem {
|
||||
message: "Heads up: Long conversations and multiple compactions can cause the model to be less accurate. Start new a new conversation when possible to keep conversations small and targeted.".to_string(),
|
||||
message: "Heads up: Long conversations and multiple compactions can cause the model to be less accurate. Start a new conversation when possible to keep conversations small and targeted.".to_string(),
|
||||
}),
|
||||
},
|
||||
})]
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
[package]
|
||||
edition = "2024"
|
||||
name = "codex-protocol-ts"
|
||||
version = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
name = "codex_protocol_ts"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "codex-protocol-ts"
|
||||
path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive"] }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
ts-rs = { workspace = true }
|
||||
@@ -1,10 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
cd "$(dirname "$0")"/..
|
||||
|
||||
tmpdir=$(mktemp -d)
|
||||
just codex generate-ts --prettier ../node_modules/.bin/prettier --out "$tmpdir"
|
||||
|
||||
echo "wrote output to $tmpdir"
|
||||
@@ -1,135 +0,0 @@
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use codex_app_server_protocol::ClientNotification;
|
||||
use codex_app_server_protocol::ClientRequest;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::export_client_responses;
|
||||
use codex_app_server_protocol::export_server_responses;
|
||||
use std::ffi::OsStr;
|
||||
use std::fs;
|
||||
use std::io::Read;
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
use ts_rs::TS;
|
||||
|
||||
const HEADER: &str = "// GENERATED CODE! DO NOT MODIFY BY HAND!\n\n";
|
||||
|
||||
pub fn generate_ts(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
||||
ensure_dir(out_dir)?;
|
||||
|
||||
// Generate the TS bindings client -> server messages.
|
||||
ClientRequest::export_all_to(out_dir)?;
|
||||
export_client_responses(out_dir)?;
|
||||
ClientNotification::export_all_to(out_dir)?;
|
||||
|
||||
// Generate the TS bindings server -> client messages.
|
||||
ServerRequest::export_all_to(out_dir)?;
|
||||
export_server_responses(out_dir)?;
|
||||
ServerNotification::export_all_to(out_dir)?;
|
||||
|
||||
// Generate index.ts that re-exports all types.
|
||||
generate_index_ts(out_dir)?;
|
||||
|
||||
// Prepend header to each generated .ts file
|
||||
let ts_files = ts_files_in(out_dir)?;
|
||||
for file in &ts_files {
|
||||
prepend_header_if_missing(file)?;
|
||||
}
|
||||
|
||||
// Format with Prettier by passing individual files (no shell globbing)
|
||||
if let Some(prettier_bin) = prettier
|
||||
&& !ts_files.is_empty()
|
||||
{
|
||||
let status = Command::new(prettier_bin)
|
||||
.arg("--write")
|
||||
.arg("--log-level")
|
||||
.arg("warn")
|
||||
.args(ts_files.iter().map(|p| p.as_os_str()))
|
||||
.status()
|
||||
.with_context(|| format!("Failed to invoke Prettier at {}", prettier_bin.display()))?;
|
||||
if !status.success() {
|
||||
return Err(anyhow!("Prettier failed with status {status}"));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn ensure_dir(dir: &Path) -> Result<()> {
|
||||
fs::create_dir_all(dir)
|
||||
.with_context(|| format!("Failed to create output directory {}", dir.display()))
|
||||
}
|
||||
|
||||
fn prepend_header_if_missing(path: &Path) -> Result<()> {
|
||||
let mut content = String::new();
|
||||
{
|
||||
let mut f = fs::File::open(path)
|
||||
.with_context(|| format!("Failed to open {} for reading", path.display()))?;
|
||||
f.read_to_string(&mut content)
|
||||
.with_context(|| format!("Failed to read {}", path.display()))?;
|
||||
}
|
||||
|
||||
if content.starts_with(HEADER) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut f = fs::File::create(path)
|
||||
.with_context(|| format!("Failed to open {} for writing", path.display()))?;
|
||||
f.write_all(HEADER.as_bytes())
|
||||
.with_context(|| format!("Failed to write header to {}", path.display()))?;
|
||||
f.write_all(content.as_bytes())
|
||||
.with_context(|| format!("Failed to write content to {}", path.display()))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn ts_files_in(dir: &Path) -> Result<Vec<PathBuf>> {
|
||||
let mut files = Vec::new();
|
||||
for entry in
|
||||
fs::read_dir(dir).with_context(|| format!("Failed to read dir {}", dir.display()))?
|
||||
{
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
if path.is_file() && path.extension() == Some(OsStr::new("ts")) {
|
||||
files.push(path);
|
||||
}
|
||||
}
|
||||
files.sort();
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
/// Generate an index.ts file that re-exports all generated types.
|
||||
/// This allows consumers to import all types from a single file.
|
||||
fn generate_index_ts(out_dir: &Path) -> Result<PathBuf> {
|
||||
let mut entries: Vec<String> = Vec::new();
|
||||
let mut stems: Vec<String> = ts_files_in(out_dir)?
|
||||
.into_iter()
|
||||
.filter_map(|p| {
|
||||
let stem = p.file_stem()?.to_string_lossy().into_owned();
|
||||
if stem == "index" { None } else { Some(stem) }
|
||||
})
|
||||
.collect();
|
||||
stems.sort();
|
||||
stems.dedup();
|
||||
|
||||
for name in stems {
|
||||
entries.push(format!("export type {{ {name} }} from \"./{name}\";\n"));
|
||||
}
|
||||
|
||||
let mut content =
|
||||
String::with_capacity(HEADER.len() + entries.iter().map(String::len).sum::<usize>());
|
||||
content.push_str(HEADER);
|
||||
for line in &entries {
|
||||
content.push_str(line);
|
||||
}
|
||||
|
||||
let index_path = out_dir.join("index.ts");
|
||||
let mut f = fs::File::create(&index_path)
|
||||
.with_context(|| format!("Failed to create {}", index_path.display()))?;
|
||||
f.write_all(content.as_bytes())
|
||||
.with_context(|| format!("Failed to write {}", index_path.display()))?;
|
||||
Ok(index_path)
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use clap::Parser;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(about = "Generate TypeScript bindings for the Codex protocol")]
|
||||
struct Args {
|
||||
/// Output directory where .ts files will be written
|
||||
#[arg(short = 'o', long = "out", value_name = "DIR")]
|
||||
out_dir: PathBuf,
|
||||
|
||||
/// Optional path to the Prettier executable to format generated files
|
||||
#[arg(short = 'p', long = "prettier", value_name = "PRETTIER_BIN")]
|
||||
prettier: Option<PathBuf>,
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
let args = Args::parse();
|
||||
codex_protocol_ts::generate_ts(&args.out_dir, args.prettier.as_deref())
|
||||
}
|
||||
@@ -298,7 +298,7 @@ pub struct ShellToolCallParams {
|
||||
pub command: Vec<String>,
|
||||
pub workdir: Option<String>,
|
||||
|
||||
/// Maximum time in milliseconds that the command is allowed to run (defaults to 1_000 ms when omitted).
|
||||
/// This is the maximum time in milliseconds that the command is allowed to run.
|
||||
#[serde(alias = "timeout")]
|
||||
pub timeout_ms: Option<u64>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
|
||||
@@ -13,7 +13,7 @@ use crate::render::renderable::Renderable;
|
||||
use crate::resume_picker::ResumeSelection;
|
||||
use crate::tui;
|
||||
use crate::tui::TuiEvent;
|
||||
use crate::updates::UpdateAction;
|
||||
use crate::update_action::UpdateAction;
|
||||
use codex_ansi_escape::ansi_escape_line;
|
||||
use codex_core::AuthManager;
|
||||
use codex_core::ConversationManager;
|
||||
@@ -174,13 +174,14 @@ impl App {
|
||||
skip_world_writable_scan_once: false,
|
||||
};
|
||||
|
||||
// On startup, if Auto mode (workspace-write) is active, warn about world-writable dirs on Windows.
|
||||
// On startup, if Auto mode (workspace-write) or ReadOnly is active, warn about world-writable dirs on Windows.
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
let should_check = codex_core::get_platform_sandbox().is_some()
|
||||
&& matches!(
|
||||
app.config.sandbox_policy,
|
||||
codex_core::protocol::SandboxPolicy::WorkspaceWrite { .. }
|
||||
| codex_core::protocol::SandboxPolicy::ReadOnly
|
||||
)
|
||||
&& !app
|
||||
.config
|
||||
@@ -191,7 +192,8 @@ impl App {
|
||||
let cwd = app.config.cwd.clone();
|
||||
let env_map: std::collections::HashMap<String, String> = std::env::vars().collect();
|
||||
let tx = app.app_event_tx.clone();
|
||||
Self::spawn_world_writable_scan(cwd, env_map, tx, false);
|
||||
let logs_base_dir = app.config.codex_home.clone();
|
||||
Self::spawn_world_writable_scan(cwd, env_map, logs_base_dir, tx);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -201,7 +203,7 @@ impl App {
|
||||
tui,
|
||||
AppEvent::InsertHistoryCell(Box::new(UpdateAvailableHistoryCell::new(
|
||||
latest_version,
|
||||
crate::updates::get_update_action(),
|
||||
crate::update_action::get_update_action(),
|
||||
))),
|
||||
)
|
||||
.await?;
|
||||
@@ -385,9 +387,18 @@ impl App {
|
||||
AppEvent::OpenFullAccessConfirmation { preset } => {
|
||||
self.chat_widget.open_full_access_confirmation(preset);
|
||||
}
|
||||
AppEvent::OpenWorldWritableWarningConfirmation { preset } => {
|
||||
self.chat_widget
|
||||
.open_world_writable_warning_confirmation(preset);
|
||||
AppEvent::OpenWorldWritableWarningConfirmation {
|
||||
preset,
|
||||
sample_paths,
|
||||
extra_count,
|
||||
failed_scan,
|
||||
} => {
|
||||
self.chat_widget.open_world_writable_warning_confirmation(
|
||||
preset,
|
||||
sample_paths,
|
||||
extra_count,
|
||||
failed_scan,
|
||||
);
|
||||
}
|
||||
AppEvent::OpenFeedbackNote {
|
||||
category,
|
||||
@@ -448,14 +459,15 @@ impl App {
|
||||
}
|
||||
AppEvent::UpdateSandboxPolicy(policy) => {
|
||||
#[cfg(target_os = "windows")]
|
||||
let policy_is_workspace_write = matches!(
|
||||
let policy_is_workspace_write_or_ro = matches!(
|
||||
policy,
|
||||
codex_core::protocol::SandboxPolicy::WorkspaceWrite { .. }
|
||||
| codex_core::protocol::SandboxPolicy::ReadOnly
|
||||
);
|
||||
|
||||
self.chat_widget.set_sandbox_policy(policy);
|
||||
|
||||
// If sandbox policy becomes workspace-write, run the Windows world-writable scan.
|
||||
// If sandbox policy becomes workspace-write or read-only, run the Windows world-writable scan.
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
// One-shot suppression if the user just confirmed continue.
|
||||
@@ -465,14 +477,15 @@ impl App {
|
||||
}
|
||||
|
||||
let should_check = codex_core::get_platform_sandbox().is_some()
|
||||
&& policy_is_workspace_write
|
||||
&& policy_is_workspace_write_or_ro
|
||||
&& !self.chat_widget.world_writable_warning_hidden();
|
||||
if should_check {
|
||||
let cwd = self.config.cwd.clone();
|
||||
let env_map: std::collections::HashMap<String, String> =
|
||||
std::env::vars().collect();
|
||||
let tx = self.app_event_tx.clone();
|
||||
Self::spawn_world_writable_scan(cwd, env_map, tx, false);
|
||||
let logs_base_dir = self.config.codex_home.clone();
|
||||
Self::spawn_world_writable_scan(cwd, env_map, logs_base_dir, tx);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -486,6 +499,9 @@ impl App {
|
||||
self.chat_widget
|
||||
.set_world_writable_warning_acknowledged(ack);
|
||||
}
|
||||
AppEvent::UpdateRateLimitSwitchPromptHidden(hidden) => {
|
||||
self.chat_widget.set_rate_limit_switch_prompt_hidden(hidden);
|
||||
}
|
||||
AppEvent::PersistFullAccessWarningAcknowledged => {
|
||||
if let Err(err) = ConfigEditsBuilder::new(&self.config.codex_home)
|
||||
.set_hide_full_access_warning(true)
|
||||
@@ -516,6 +532,21 @@ impl App {
|
||||
));
|
||||
}
|
||||
}
|
||||
AppEvent::PersistRateLimitSwitchPromptHidden => {
|
||||
if let Err(err) = ConfigEditsBuilder::new(&self.config.codex_home)
|
||||
.set_hide_rate_limit_model_nudge(true)
|
||||
.apply()
|
||||
.await
|
||||
{
|
||||
tracing::error!(
|
||||
error = %err,
|
||||
"failed to persist rate limit switch prompt preference"
|
||||
);
|
||||
self.chat_widget.add_error_message(format!(
|
||||
"Failed to save rate limit reminder preference: {err}"
|
||||
));
|
||||
}
|
||||
}
|
||||
AppEvent::OpenApprovalsPopup => {
|
||||
self.chat_widget.open_approvals_popup();
|
||||
}
|
||||
@@ -624,23 +655,50 @@ impl App {
|
||||
fn spawn_world_writable_scan(
|
||||
cwd: PathBuf,
|
||||
env_map: std::collections::HashMap<String, String>,
|
||||
logs_base_dir: PathBuf,
|
||||
tx: AppEventSender,
|
||||
apply_preset_on_continue: bool,
|
||||
) {
|
||||
#[inline]
|
||||
fn normalize_windows_path_for_display(p: &std::path::Path) -> String {
|
||||
let canon = dunce::canonicalize(p).unwrap_or_else(|_| p.to_path_buf());
|
||||
canon.display().to_string().replace('/', "\\")
|
||||
}
|
||||
tokio::task::spawn_blocking(move || {
|
||||
if codex_windows_sandbox::preflight_audit_everyone_writable(&cwd, &env_map).is_err() {
|
||||
if apply_preset_on_continue {
|
||||
if let Some(preset) = codex_common::approval_presets::builtin_approval_presets()
|
||||
.into_iter()
|
||||
.find(|p| p.id == "auto")
|
||||
{
|
||||
tx.send(AppEvent::OpenWorldWritableWarningConfirmation {
|
||||
preset: Some(preset),
|
||||
});
|
||||
}
|
||||
let result = codex_windows_sandbox::preflight_audit_everyone_writable(
|
||||
&cwd,
|
||||
&env_map,
|
||||
Some(logs_base_dir.as_path()),
|
||||
);
|
||||
if let Ok(ref paths) = result
|
||||
&& !paths.is_empty()
|
||||
{
|
||||
let as_strings: Vec<String> = paths
|
||||
.iter()
|
||||
.map(|p| normalize_windows_path_for_display(p))
|
||||
.collect();
|
||||
let sample_paths: Vec<String> = as_strings.iter().take(3).cloned().collect();
|
||||
let extra_count = if as_strings.len() > sample_paths.len() {
|
||||
as_strings.len() - sample_paths.len()
|
||||
} else {
|
||||
tx.send(AppEvent::OpenWorldWritableWarningConfirmation { preset: None });
|
||||
}
|
||||
0
|
||||
};
|
||||
|
||||
tx.send(AppEvent::OpenWorldWritableWarningConfirmation {
|
||||
preset: None,
|
||||
sample_paths,
|
||||
extra_count,
|
||||
failed_scan: false,
|
||||
});
|
||||
} else if result.is_err() {
|
||||
// Scan failed: still warn, but with no examples and mark as failed.
|
||||
let sample_paths: Vec<String> = Vec::new();
|
||||
let extra_count = 0usize;
|
||||
tx.send(AppEvent::OpenWorldWritableWarningConfirmation {
|
||||
preset: None,
|
||||
sample_paths,
|
||||
extra_count,
|
||||
failed_scan: true,
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@@ -79,6 +79,12 @@ pub(crate) enum AppEvent {
|
||||
#[cfg_attr(not(target_os = "windows"), allow(dead_code))]
|
||||
OpenWorldWritableWarningConfirmation {
|
||||
preset: Option<ApprovalPreset>,
|
||||
/// Up to 3 sample world-writable directories to display in the warning.
|
||||
sample_paths: Vec<String>,
|
||||
/// If there are more than `sample_paths`, this carries the remaining count.
|
||||
extra_count: usize,
|
||||
/// True when the scan failed (e.g. ACL query error) and protections could not be verified.
|
||||
failed_scan: bool,
|
||||
},
|
||||
|
||||
/// Show Windows Subsystem for Linux setup instructions for auto mode.
|
||||
@@ -98,6 +104,9 @@ pub(crate) enum AppEvent {
|
||||
#[cfg_attr(not(target_os = "windows"), allow(dead_code))]
|
||||
UpdateWorldWritableWarningAcknowledged(bool),
|
||||
|
||||
/// Update whether the rate limit switch prompt has been acknowledged for the session.
|
||||
UpdateRateLimitSwitchPromptHidden(bool),
|
||||
|
||||
/// Persist the acknowledgement flag for the full access warning prompt.
|
||||
PersistFullAccessWarningAcknowledged,
|
||||
|
||||
@@ -105,6 +114,9 @@ pub(crate) enum AppEvent {
|
||||
#[cfg_attr(not(target_os = "windows"), allow(dead_code))]
|
||||
PersistWorldWritableWarningAcknowledged,
|
||||
|
||||
/// Persist the acknowledgement flag for the rate limit switch prompt.
|
||||
PersistRateLimitSwitchPromptHidden,
|
||||
|
||||
/// Skip the next world-writable scan (one-shot) after a user-confirmed continue.
|
||||
#[cfg_attr(not(target_os = "windows"), allow(dead_code))]
|
||||
SkipNextWorldWritableScan,
|
||||
|
||||
@@ -117,7 +117,9 @@ impl ApprovalOverlay {
|
||||
.iter()
|
||||
.map(|opt| SelectionItem {
|
||||
name: opt.label.clone(),
|
||||
display_shortcut: opt.display_shortcut,
|
||||
display_shortcut: opt
|
||||
.display_shortcut
|
||||
.or_else(|| opt.additional_shortcuts.first().copied()),
|
||||
dismiss_on_select: false,
|
||||
..Default::default()
|
||||
})
|
||||
|
||||
@@ -3,6 +3,7 @@ use ratatui::layout::Rect;
|
||||
// Note: Table-based layout previously used Constraint; the manual renderer
|
||||
// below no longer requires it.
|
||||
use ratatui::style::Color;
|
||||
use ratatui::style::Style;
|
||||
use ratatui::style::Stylize;
|
||||
use ratatui::text::Line;
|
||||
use ratatui::text::Span;
|
||||
@@ -96,8 +97,9 @@ fn build_full_line(row: &GenericDisplayRow, desc_col: usize) -> Line<'static> {
|
||||
let this_name_width = Line::from(name_spans.clone()).width();
|
||||
let mut full_spans: Vec<Span> = name_spans;
|
||||
if let Some(display_shortcut) = row.display_shortcut {
|
||||
full_spans.push(" ".into());
|
||||
full_spans.push(" (".into());
|
||||
full_spans.push(display_shortcut.into());
|
||||
full_spans.push(")".into());
|
||||
}
|
||||
if let Some(desc) = row.description.as_ref() {
|
||||
let gap = desc_col.saturating_sub(this_name_width);
|
||||
@@ -179,8 +181,9 @@ pub(crate) fn render_rows(
|
||||
);
|
||||
if Some(i) == state.selected_idx {
|
||||
// Match previous behavior: cyan + bold for the selected row.
|
||||
// Reset the style first to avoid inheriting dim from keyboard shortcuts.
|
||||
full_line.spans.iter_mut().for_each(|span| {
|
||||
span.style = span.style.fg(Color::Cyan).bold();
|
||||
span.style = Style::default().fg(Color::Cyan).bold();
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -522,6 +522,7 @@ impl ChatWidget {
|
||||
.unwrap_or(false);
|
||||
|
||||
if high_usage
|
||||
&& !self.rate_limit_switch_prompt_hidden()
|
||||
&& self.config.model != NUDGE_MODEL_SLUG
|
||||
&& !matches!(
|
||||
self.rate_limit_switch_prompt,
|
||||
@@ -1710,7 +1711,18 @@ impl ChatWidget {
|
||||
.find(|preset| preset.model == NUDGE_MODEL_SLUG)
|
||||
}
|
||||
|
||||
fn rate_limit_switch_prompt_hidden(&self) -> bool {
|
||||
self.config
|
||||
.notices
|
||||
.hide_rate_limit_model_nudge
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
fn maybe_show_pending_rate_limit_prompt(&mut self) {
|
||||
if self.rate_limit_switch_prompt_hidden() {
|
||||
self.rate_limit_switch_prompt = RateLimitSwitchPromptState::Idle;
|
||||
return;
|
||||
}
|
||||
if !matches!(
|
||||
self.rate_limit_switch_prompt,
|
||||
RateLimitSwitchPromptState::Pending
|
||||
@@ -1744,6 +1756,10 @@ impl ChatWidget {
|
||||
})];
|
||||
|
||||
let keep_actions: Vec<SelectionAction> = Vec::new();
|
||||
let never_actions: Vec<SelectionAction> = vec![Box::new(|tx| {
|
||||
tx.send(AppEvent::UpdateRateLimitSwitchPromptHidden(true));
|
||||
tx.send(AppEvent::PersistRateLimitSwitchPromptHidden);
|
||||
})];
|
||||
let description = if preset.description.is_empty() {
|
||||
Some("Uses fewer credits for upcoming turns.".to_string())
|
||||
} else {
|
||||
@@ -1769,6 +1785,17 @@ impl ChatWidget {
|
||||
dismiss_on_select: true,
|
||||
..Default::default()
|
||||
},
|
||||
SelectionItem {
|
||||
name: "Keep current model (never show again)".to_string(),
|
||||
description: Some(
|
||||
"Hide future rate limit reminders about switching models.".to_string(),
|
||||
),
|
||||
selected_description: None,
|
||||
is_current: false,
|
||||
actions: never_actions,
|
||||
dismiss_on_select: true,
|
||||
..Default::default()
|
||||
},
|
||||
];
|
||||
|
||||
self.bottom_pane.show_selection_view(SelectionViewParams {
|
||||
@@ -2045,9 +2072,48 @@ impl ChatWidget {
|
||||
&& self.windows_world_writable_flagged()
|
||||
{
|
||||
let preset_clone = preset.clone();
|
||||
// Compute sample paths for the warning popup.
|
||||
let mut env_map: std::collections::HashMap<String, String> =
|
||||
std::collections::HashMap::new();
|
||||
for (k, v) in std::env::vars() {
|
||||
env_map.insert(k, v);
|
||||
}
|
||||
let (sample_paths, extra_count, failed_scan) =
|
||||
match codex_windows_sandbox::preflight_audit_everyone_writable(
|
||||
&self.config.cwd,
|
||||
&env_map,
|
||||
Some(self.config.codex_home.as_path()),
|
||||
) {
|
||||
Ok(paths) if !paths.is_empty() => {
|
||||
fn normalize_windows_path_for_display(
|
||||
p: &std::path::Path,
|
||||
) -> String {
|
||||
let canon = dunce::canonicalize(p)
|
||||
.unwrap_or_else(|_| p.to_path_buf());
|
||||
canon.display().to_string().replace('/', "\\")
|
||||
}
|
||||
let as_strings: Vec<String> = paths
|
||||
.iter()
|
||||
.map(|p| normalize_windows_path_for_display(p))
|
||||
.collect();
|
||||
let samples: Vec<String> =
|
||||
as_strings.iter().take(3).cloned().collect();
|
||||
let extra = if as_strings.len() > samples.len() {
|
||||
as_strings.len() - samples.len()
|
||||
} else {
|
||||
0
|
||||
};
|
||||
(samples, extra, false)
|
||||
}
|
||||
Err(_) => (Vec::new(), 0, true),
|
||||
_ => (Vec::new(), 0, false),
|
||||
};
|
||||
vec![Box::new(move |tx| {
|
||||
tx.send(AppEvent::OpenWorldWritableWarningConfirmation {
|
||||
preset: Some(preset_clone.clone()),
|
||||
sample_paths: sample_paths.clone(),
|
||||
extra_count,
|
||||
failed_scan,
|
||||
});
|
||||
})]
|
||||
} else {
|
||||
@@ -2106,8 +2172,12 @@ impl ChatWidget {
|
||||
for (k, v) in std::env::vars() {
|
||||
env_map.insert(k, v);
|
||||
}
|
||||
match codex_windows_sandbox::preflight_audit_everyone_writable(&self.config.cwd, &env_map) {
|
||||
Ok(()) => false,
|
||||
match codex_windows_sandbox::preflight_audit_everyone_writable(
|
||||
&self.config.cwd,
|
||||
&env_map,
|
||||
Some(self.config.codex_home.as_path()),
|
||||
) {
|
||||
Ok(paths) => !paths.is_empty(),
|
||||
Err(_) => true,
|
||||
}
|
||||
}
|
||||
@@ -2180,31 +2250,66 @@ impl ChatWidget {
|
||||
pub(crate) fn open_world_writable_warning_confirmation(
|
||||
&mut self,
|
||||
preset: Option<ApprovalPreset>,
|
||||
sample_paths: Vec<String>,
|
||||
extra_count: usize,
|
||||
failed_scan: bool,
|
||||
) {
|
||||
let (approval, sandbox) = match &preset {
|
||||
Some(p) => (Some(p.approval), Some(p.sandbox.clone())),
|
||||
None => (None, None),
|
||||
};
|
||||
let mut header_children: Vec<Box<dyn Renderable>> = Vec::new();
|
||||
let title_line = Line::from("Auto mode has unprotected directories").bold();
|
||||
let info_line = Line::from(vec![
|
||||
"Some important directories on this system are world-writable. ".into(),
|
||||
"The Windows sandbox cannot protect writes to these locations in Auto mode."
|
||||
let mode_label = match self.config.sandbox_policy {
|
||||
SandboxPolicy::WorkspaceWrite { .. } => "Auto mode",
|
||||
SandboxPolicy::ReadOnly => "Read-Only mode",
|
||||
_ => "Auto mode",
|
||||
};
|
||||
let title_line = Line::from("Unprotected directories found").bold();
|
||||
let info_line = if failed_scan {
|
||||
Line::from(vec![
|
||||
"We couldn't complete the world-writable scan, so protections cannot be verified. "
|
||||
.into(),
|
||||
format!("The Windows sandbox cannot guarantee protection in {mode_label}.")
|
||||
.fg(Color::Red),
|
||||
])
|
||||
} else {
|
||||
Line::from(vec![
|
||||
"Some important directories on this system are world-writable. ".into(),
|
||||
format!(
|
||||
"The Windows sandbox cannot protect writes to these locations in {mode_label}."
|
||||
)
|
||||
.fg(Color::Red),
|
||||
]);
|
||||
])
|
||||
};
|
||||
header_children.push(Box::new(title_line));
|
||||
header_children.push(Box::new(
|
||||
Paragraph::new(vec![info_line]).wrap(Wrap { trim: false }),
|
||||
));
|
||||
|
||||
if !sample_paths.is_empty() {
|
||||
// Show up to three examples and optionally an "and X more" line.
|
||||
let mut lines: Vec<Line> = Vec::new();
|
||||
lines.push(Line::from("Examples:").bold());
|
||||
for p in &sample_paths {
|
||||
lines.push(Line::from(format!(" - {p}")));
|
||||
}
|
||||
if extra_count > 0 {
|
||||
lines.push(Line::from(format!("and {extra_count} more")));
|
||||
}
|
||||
header_children.push(Box::new(Paragraph::new(lines).wrap(Wrap { trim: false })));
|
||||
}
|
||||
let header = ColumnRenderable::with(header_children);
|
||||
|
||||
// Build actions ensuring acknowledgement happens before applying the new sandbox policy,
|
||||
// so downstream policy-change hooks don't re-trigger the warning.
|
||||
let mut accept_actions: Vec<SelectionAction> = Vec::new();
|
||||
// Suppress the immediate re-scan once after user confirms continue.
|
||||
accept_actions.push(Box::new(|tx| {
|
||||
tx.send(AppEvent::SkipNextWorldWritableScan);
|
||||
}));
|
||||
// Suppress the immediate re-scan only when a preset will be applied (i.e., via /approvals),
|
||||
// to avoid duplicate warnings from the ensuing policy change.
|
||||
if preset.is_some() {
|
||||
accept_actions.push(Box::new(|tx| {
|
||||
tx.send(AppEvent::SkipNextWorldWritableScan);
|
||||
}));
|
||||
}
|
||||
if let (Some(approval), Some(sandbox)) = (approval, sandbox.clone()) {
|
||||
accept_actions.extend(Self::approval_preset_actions(approval, sandbox));
|
||||
}
|
||||
@@ -2218,36 +2323,21 @@ impl ChatWidget {
|
||||
accept_and_remember_actions.extend(Self::approval_preset_actions(approval, sandbox));
|
||||
}
|
||||
|
||||
let deny_actions: Vec<SelectionAction> = if preset.is_some() {
|
||||
vec![Box::new(|tx| {
|
||||
tx.send(AppEvent::OpenApprovalsPopup);
|
||||
})]
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
let items = vec![
|
||||
SelectionItem {
|
||||
name: "Continue".to_string(),
|
||||
description: Some("Apply Auto mode for this session".to_string()),
|
||||
description: Some(format!("Apply {mode_label} for this session")),
|
||||
actions: accept_actions,
|
||||
dismiss_on_select: true,
|
||||
..Default::default()
|
||||
},
|
||||
SelectionItem {
|
||||
name: "Continue and don't warn again".to_string(),
|
||||
description: Some("Enable Auto mode and remember this choice".to_string()),
|
||||
description: Some(format!("Enable {mode_label} and remember this choice")),
|
||||
actions: accept_and_remember_actions,
|
||||
dismiss_on_select: true,
|
||||
..Default::default()
|
||||
},
|
||||
SelectionItem {
|
||||
name: "Cancel".to_string(),
|
||||
description: Some("Go back without enabling Auto mode".to_string()),
|
||||
actions: deny_actions,
|
||||
dismiss_on_select: true,
|
||||
..Default::default()
|
||||
},
|
||||
];
|
||||
|
||||
self.bottom_pane.show_selection_view(SelectionViewParams {
|
||||
@@ -2262,6 +2352,9 @@ impl ChatWidget {
|
||||
pub(crate) fn open_world_writable_warning_confirmation(
|
||||
&mut self,
|
||||
_preset: Option<ApprovalPreset>,
|
||||
_sample_paths: Vec<String>,
|
||||
_extra_count: usize,
|
||||
_failed_scan: bool,
|
||||
) {
|
||||
}
|
||||
|
||||
@@ -2320,6 +2413,13 @@ impl ChatWidget {
|
||||
self.config.notices.hide_world_writable_warning = Some(acknowledged);
|
||||
}
|
||||
|
||||
pub(crate) fn set_rate_limit_switch_prompt_hidden(&mut self, hidden: bool) {
|
||||
self.config.notices.hide_rate_limit_model_nudge = Some(hidden);
|
||||
if hidden {
|
||||
self.rate_limit_switch_prompt = RateLimitSwitchPromptState::Idle;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(not(target_os = "windows"), allow(dead_code))]
|
||||
pub(crate) fn world_writable_warning_hidden(&self) -> bool {
|
||||
self.config
|
||||
|
||||
@@ -9,8 +9,8 @@ expression: terminal.backend().vt100().screen().contents()
|
||||
|
||||
$ echo hello world
|
||||
|
||||
› 1. Yes, proceed
|
||||
2. Yes, and don't ask again for this command
|
||||
3. No, and tell Codex what to do differently esc
|
||||
› 1. Yes, proceed (y)
|
||||
2. Yes, and don't ask again for this command (a)
|
||||
3. No, and tell Codex what to do differently (esc)
|
||||
|
||||
Press enter to confirm or esc to cancel
|
||||
|
||||
@@ -6,8 +6,8 @@ expression: terminal.backend().vt100().screen().contents()
|
||||
|
||||
$ echo hello world
|
||||
|
||||
› 1. Yes, proceed
|
||||
2. Yes, and don't ask again for this command
|
||||
3. No, and tell Codex what to do differently esc
|
||||
› 1. Yes, proceed (y)
|
||||
2. Yes, and don't ask again for this command (a)
|
||||
3. No, and tell Codex what to do differently (esc)
|
||||
|
||||
Press enter to confirm or esc to cancel
|
||||
|
||||
@@ -11,7 +11,7 @@ expression: terminal.backend().vt100().screen().contents()
|
||||
1 +hello
|
||||
2 +world
|
||||
|
||||
› 1. Yes, proceed
|
||||
2. No, and tell Codex what to do differently esc
|
||||
› 1. Yes, proceed (y)
|
||||
2. No, and tell Codex what to do differently (esc)
|
||||
|
||||
Press enter to confirm or esc to cancel
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
---
|
||||
source: tui/src/chatwidget/tests.rs
|
||||
assertion_line: 409
|
||||
expression: "format!(\"{buf:?}\")"
|
||||
---
|
||||
Buffer {
|
||||
@@ -15,9 +14,9 @@ Buffer {
|
||||
" ",
|
||||
" $ echo hello world ",
|
||||
" ",
|
||||
"› 1. Yes, proceed ",
|
||||
" 2. Yes, and don't ask again for this command ",
|
||||
" 3. No, and tell Codex what to do differently esc ",
|
||||
"› 1. Yes, proceed (y) ",
|
||||
" 2. Yes, and don't ask again for this command (a) ",
|
||||
" 3. No, and tell Codex what to do differently (esc) ",
|
||||
" ",
|
||||
" Press enter to confirm or esc to cancel ",
|
||||
],
|
||||
@@ -30,9 +29,11 @@ Buffer {
|
||||
x: 2, y: 5, fg: Reset, bg: Reset, underline: Reset, modifier: ITALIC,
|
||||
x: 7, y: 5, fg: Reset, bg: Reset, underline: Reset, modifier: NONE,
|
||||
x: 0, y: 9, fg: Cyan, bg: Reset, underline: Reset, modifier: BOLD,
|
||||
x: 17, y: 9, fg: Reset, bg: Reset, underline: Reset, modifier: NONE,
|
||||
x: 47, y: 11, fg: Reset, bg: Reset, underline: Reset, modifier: DIM,
|
||||
x: 50, y: 11, fg: Reset, bg: Reset, underline: Reset, modifier: NONE,
|
||||
x: 21, y: 9, fg: Reset, bg: Reset, underline: Reset, modifier: NONE,
|
||||
x: 48, y: 10, fg: Reset, bg: Reset, underline: Reset, modifier: DIM,
|
||||
x: 49, y: 10, fg: Reset, bg: Reset, underline: Reset, modifier: NONE,
|
||||
x: 48, y: 11, fg: Reset, bg: Reset, underline: Reset, modifier: DIM,
|
||||
x: 51, y: 11, fg: Reset, bg: Reset, underline: Reset, modifier: NONE,
|
||||
x: 2, y: 13, fg: Reset, bg: Reset, underline: Reset, modifier: DIM,
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
---
|
||||
source: tui/src/chatwidget/tests.rs
|
||||
assertion_line: 500
|
||||
expression: popup
|
||||
---
|
||||
Approaching rate limits
|
||||
Switch to gpt-5-codex-mini for lower credit usage?
|
||||
|
||||
› 1. Switch to gpt-5-codex-mini Optimized for codex. Cheaper, faster, but
|
||||
less capable.
|
||||
› 1. Switch to gpt-5-codex-mini Optimized for codex. Cheaper,
|
||||
faster, but less capable.
|
||||
2. Keep current model
|
||||
3. Keep current model (never show again) Hide future rate limit reminders
|
||||
about switching models.
|
||||
|
||||
Press enter to confirm or esc to go back
|
||||
|
||||
@@ -12,8 +12,8 @@ expression: terminal.backend()
|
||||
" "
|
||||
" $ echo 'hello world' "
|
||||
" "
|
||||
"› 1. Yes, proceed "
|
||||
" 2. Yes, and don't ask again for this command "
|
||||
" 3. No, and tell Codex what to do differently esc "
|
||||
"› 1. Yes, proceed (y) "
|
||||
" 2. Yes, and don't ask again for this command (a) "
|
||||
" 3. No, and tell Codex what to do differently (esc) "
|
||||
" "
|
||||
" Press enter to confirm or esc to cancel "
|
||||
|
||||
@@ -60,7 +60,7 @@ use tempfile::tempdir;
|
||||
use tokio::sync::mpsc::error::TryRecvError;
|
||||
use tokio::sync::mpsc::unbounded_channel;
|
||||
|
||||
const TEST_WARNING_MESSAGE: &str = "Heads up: Long conversations and multiple compactions can cause the model to be less accurate. Start new a new conversation when possible to keep conversations small and targeted.";
|
||||
const TEST_WARNING_MESSAGE: &str = "Heads up: Long conversations and multiple compactions can cause the model to be less accurate. Start a new conversation when possible to keep conversations small and targeted.";
|
||||
|
||||
fn test_config() -> Config {
|
||||
// Use base defaults to avoid depending on host state.
|
||||
@@ -448,6 +448,22 @@ fn rate_limit_switch_prompt_shows_once_per_session() {
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rate_limit_switch_prompt_respects_hidden_notice() {
|
||||
let auth = CodexAuth::create_dummy_chatgpt_auth_for_testing();
|
||||
let (mut chat, _, _) = make_chatwidget_manual();
|
||||
chat.config.model = "gpt-5".to_string();
|
||||
chat.auth_manager = AuthManager::from_auth_for_testing(auth);
|
||||
chat.config.notices.hide_rate_limit_model_nudge = Some(true);
|
||||
|
||||
chat.on_rate_limit_snapshot(Some(snapshot(95.0)));
|
||||
|
||||
assert!(matches!(
|
||||
chat.rate_limit_switch_prompt,
|
||||
RateLimitSwitchPromptState::Idle
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rate_limit_switch_prompt_defers_until_task_complete() {
|
||||
let auth = CodexAuth::create_dummy_chatgpt_auth_for_testing();
|
||||
|
||||
@@ -317,7 +317,13 @@ impl ExecCell {
|
||||
Some(false) => "•".red().bold(),
|
||||
None => spinner(call.start_time),
|
||||
};
|
||||
let title = if self.is_active() { "Running" } else { "Ran" };
|
||||
let title = if self.is_active() {
|
||||
"Running"
|
||||
} else if call.is_user_shell_command {
|
||||
"You ran"
|
||||
} else {
|
||||
"Ran"
|
||||
};
|
||||
|
||||
let mut header_line =
|
||||
Line::from(vec![bullet.clone(), " ".into(), title.bold(), " ".into()]);
|
||||
|
||||
@@ -16,7 +16,7 @@ use crate::style::user_message_style;
|
||||
use crate::text_formatting::format_and_truncate_tool_result;
|
||||
use crate::text_formatting::truncate_text;
|
||||
use crate::ui_consts::LIVE_PREFIX_COLS;
|
||||
use crate::updates::UpdateAction;
|
||||
use crate::update_action::UpdateAction;
|
||||
use crate::version::CODEX_CLI_VERSION;
|
||||
use crate::wrapping::RtOptions;
|
||||
use crate::wrapping::word_wrap_line;
|
||||
|
||||
@@ -71,8 +71,9 @@ mod terminal_palette;
|
||||
mod text_formatting;
|
||||
mod tui;
|
||||
mod ui_consts;
|
||||
pub mod update_action;
|
||||
mod update_prompt;
|
||||
pub mod updates;
|
||||
mod updates;
|
||||
mod version;
|
||||
|
||||
mod wrapping;
|
||||
@@ -353,6 +354,16 @@ async fn run_ratatui_app(
|
||||
&mut tui,
|
||||
)
|
||||
.await?;
|
||||
if onboarding_result.should_exit {
|
||||
restore();
|
||||
session_log::log_session_end();
|
||||
let _ = tui.terminal.clear();
|
||||
return Ok(AppExitInfo {
|
||||
token_usage: codex_core::protocol::TokenUsage::default(),
|
||||
conversation_id: None,
|
||||
update_action: None,
|
||||
});
|
||||
}
|
||||
if onboarding_result.windows_install_selected {
|
||||
restore();
|
||||
session_log::log_session_end();
|
||||
|
||||
@@ -57,6 +57,7 @@ pub(crate) struct OnboardingScreen {
|
||||
steps: Vec<Step>,
|
||||
is_done: bool,
|
||||
windows_install_selected: bool,
|
||||
should_exit: bool,
|
||||
}
|
||||
|
||||
pub(crate) struct OnboardingScreenArgs {
|
||||
@@ -71,6 +72,7 @@ pub(crate) struct OnboardingScreenArgs {
|
||||
pub(crate) struct OnboardingResult {
|
||||
pub directory_trust_decision: Option<TrustDirectorySelection>,
|
||||
pub windows_install_selected: bool,
|
||||
pub should_exit: bool,
|
||||
}
|
||||
|
||||
impl OnboardingScreen {
|
||||
@@ -137,6 +139,7 @@ impl OnboardingScreen {
|
||||
steps,
|
||||
is_done: false,
|
||||
windows_install_selected: false,
|
||||
should_exit: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -200,6 +203,10 @@ impl OnboardingScreen {
|
||||
pub fn windows_install_selected(&self) -> bool {
|
||||
self.windows_install_selected
|
||||
}
|
||||
|
||||
pub fn should_exit(&self) -> bool {
|
||||
self.should_exit
|
||||
}
|
||||
}
|
||||
|
||||
impl KeyboardHandler for OnboardingScreen {
|
||||
@@ -222,9 +229,12 @@ impl KeyboardHandler for OnboardingScreen {
|
||||
kind: KeyEventKind::Press,
|
||||
..
|
||||
} => {
|
||||
if !self.is_auth_in_progress() {
|
||||
self.is_done = true;
|
||||
if self.is_auth_in_progress() {
|
||||
// If the user cancels the auth menu, exit the app rather than
|
||||
// leave the user at a prompt in an unauthed state.
|
||||
self.should_exit = true;
|
||||
}
|
||||
self.is_done = true;
|
||||
}
|
||||
_ => {
|
||||
if let Some(Step::Welcome(widget)) = self
|
||||
@@ -442,5 +452,6 @@ pub(crate) async fn run_onboarding_app(
|
||||
Ok(OnboardingResult {
|
||||
directory_trust_decision: onboarding_screen.directory_trust_decision(),
|
||||
windows_install_selected: onboarding_screen.windows_install_selected(),
|
||||
should_exit: onboarding_screen.should_exit(),
|
||||
})
|
||||
}
|
||||
|
||||
101
codex-rs/tui/src/update_action.rs
Normal file
101
codex-rs/tui/src/update_action.rs
Normal file
@@ -0,0 +1,101 @@
|
||||
/// Update action the CLI should perform after the TUI exits.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum UpdateAction {
|
||||
/// Update via `npm install -g @openai/codex@latest`.
|
||||
NpmGlobalLatest,
|
||||
/// Update via `bun install -g @openai/codex@latest`.
|
||||
BunGlobalLatest,
|
||||
/// Update via `brew upgrade codex`.
|
||||
BrewUpgrade,
|
||||
}
|
||||
|
||||
impl UpdateAction {
|
||||
/// Returns the list of command-line arguments for invoking the update.
|
||||
pub fn command_args(self) -> (&'static str, &'static [&'static str]) {
|
||||
match self {
|
||||
UpdateAction::NpmGlobalLatest => ("npm", &["install", "-g", "@openai/codex"]),
|
||||
UpdateAction::BunGlobalLatest => ("bun", &["install", "-g", "@openai/codex"]),
|
||||
UpdateAction::BrewUpgrade => ("brew", &["upgrade", "codex"]),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns string representation of the command-line arguments for invoking the update.
|
||||
pub fn command_str(self) -> String {
|
||||
let (command, args) = self.command_args();
|
||||
shlex::try_join(std::iter::once(command).chain(args.iter().copied()))
|
||||
.unwrap_or_else(|_| format!("{command} {}", args.join(" ")))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
pub(crate) fn get_update_action() -> Option<UpdateAction> {
|
||||
let exe = std::env::current_exe().unwrap_or_default();
|
||||
let managed_by_npm = std::env::var_os("CODEX_MANAGED_BY_NPM").is_some();
|
||||
let managed_by_bun = std::env::var_os("CODEX_MANAGED_BY_BUN").is_some();
|
||||
|
||||
detect_update_action(
|
||||
cfg!(target_os = "macos"),
|
||||
&exe,
|
||||
managed_by_npm,
|
||||
managed_by_bun,
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(any(not(debug_assertions), test))]
|
||||
fn detect_update_action(
|
||||
is_macos: bool,
|
||||
current_exe: &std::path::Path,
|
||||
managed_by_npm: bool,
|
||||
managed_by_bun: bool,
|
||||
) -> Option<UpdateAction> {
|
||||
if managed_by_npm {
|
||||
Some(UpdateAction::NpmGlobalLatest)
|
||||
} else if managed_by_bun {
|
||||
Some(UpdateAction::BunGlobalLatest)
|
||||
} else if is_macos
|
||||
&& (current_exe.starts_with("/opt/homebrew") || current_exe.starts_with("/usr/local"))
|
||||
{
|
||||
Some(UpdateAction::BrewUpgrade)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn detects_update_action_without_env_mutation() {
|
||||
assert_eq!(
|
||||
detect_update_action(false, std::path::Path::new("/any/path"), false, false),
|
||||
None
|
||||
);
|
||||
assert_eq!(
|
||||
detect_update_action(false, std::path::Path::new("/any/path"), true, false),
|
||||
Some(UpdateAction::NpmGlobalLatest)
|
||||
);
|
||||
assert_eq!(
|
||||
detect_update_action(false, std::path::Path::new("/any/path"), false, true),
|
||||
Some(UpdateAction::BunGlobalLatest)
|
||||
);
|
||||
assert_eq!(
|
||||
detect_update_action(
|
||||
true,
|
||||
std::path::Path::new("/opt/homebrew/bin/codex"),
|
||||
false,
|
||||
false
|
||||
),
|
||||
Some(UpdateAction::BrewUpgrade)
|
||||
);
|
||||
assert_eq!(
|
||||
detect_update_action(
|
||||
true,
|
||||
std::path::Path::new("/usr/local/bin/codex"),
|
||||
false,
|
||||
false
|
||||
),
|
||||
Some(UpdateAction::BrewUpgrade)
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -10,8 +10,8 @@ use crate::selection_list::selection_option_row;
|
||||
use crate::tui::FrameRequester;
|
||||
use crate::tui::Tui;
|
||||
use crate::tui::TuiEvent;
|
||||
use crate::update_action::UpdateAction;
|
||||
use crate::updates;
|
||||
use crate::updates::UpdateAction;
|
||||
use codex_core::config::Config;
|
||||
use color_eyre::Result;
|
||||
use crossterm::event::KeyCode;
|
||||
@@ -39,7 +39,7 @@ pub(crate) async fn run_update_prompt_if_needed(
|
||||
let Some(latest_version) = updates::get_upgrade_version_for_popup(config) else {
|
||||
return Ok(UpdatePromptOutcome::Continue);
|
||||
};
|
||||
let Some(update_action) = crate::updates::get_update_action() else {
|
||||
let Some(update_action) = crate::update_action::get_update_action() else {
|
||||
return Ok(UpdatePromptOutcome::Continue);
|
||||
};
|
||||
|
||||
|
||||
@@ -1,14 +1,17 @@
|
||||
#![cfg(not(debug_assertions))]
|
||||
|
||||
use crate::update_action;
|
||||
use crate::update_action::UpdateAction;
|
||||
use chrono::DateTime;
|
||||
use chrono::Duration;
|
||||
use chrono::Utc;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::default_client::create_client;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use codex_core::config::Config;
|
||||
use codex_core::default_client::create_client;
|
||||
|
||||
use crate::version::CODEX_CLI_VERSION;
|
||||
|
||||
pub fn get_upgrade_version(config: &Config) -> Option<String> {
|
||||
@@ -47,14 +50,17 @@ struct VersionInfo {
|
||||
dismissed_version: Option<String>,
|
||||
}
|
||||
|
||||
const VERSION_FILENAME: &str = "version.json";
|
||||
// We use the latest version from the cask if installation is via homebrew - homebrew does not immediately pick up the latest release and can lag behind.
|
||||
const HOMEBREW_CASK_URL: &str =
|
||||
"https://raw.githubusercontent.com/Homebrew/homebrew-cask/HEAD/Casks/c/codex.rb";
|
||||
const LATEST_RELEASE_URL: &str = "https://api.github.com/repos/openai/codex/releases/latest";
|
||||
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
struct ReleaseInfo {
|
||||
tag_name: String,
|
||||
}
|
||||
|
||||
const VERSION_FILENAME: &str = "version.json";
|
||||
const LATEST_RELEASE_URL: &str = "https://api.github.com/repos/openai/codex/releases/latest";
|
||||
|
||||
fn version_filepath(config: &Config) -> PathBuf {
|
||||
config.codex_home.join(VERSION_FILENAME)
|
||||
}
|
||||
@@ -65,23 +71,35 @@ fn read_version_info(version_file: &Path) -> anyhow::Result<VersionInfo> {
|
||||
}
|
||||
|
||||
async fn check_for_update(version_file: &Path) -> anyhow::Result<()> {
|
||||
let ReleaseInfo {
|
||||
tag_name: latest_tag_name,
|
||||
} = create_client()
|
||||
.get(LATEST_RELEASE_URL)
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?
|
||||
.json::<ReleaseInfo>()
|
||||
.await?;
|
||||
let latest_version = match update_action::get_update_action() {
|
||||
Some(UpdateAction::BrewUpgrade) => {
|
||||
let cask_contents = create_client()
|
||||
.get(HOMEBREW_CASK_URL)
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?
|
||||
.text()
|
||||
.await?;
|
||||
extract_version_from_cask(&cask_contents)?
|
||||
}
|
||||
_ => {
|
||||
let ReleaseInfo {
|
||||
tag_name: latest_tag_name,
|
||||
} = create_client()
|
||||
.get(LATEST_RELEASE_URL)
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?
|
||||
.json::<ReleaseInfo>()
|
||||
.await?;
|
||||
extract_version_from_latest_tag(&latest_tag_name)?
|
||||
}
|
||||
};
|
||||
|
||||
// Preserve any previously dismissed version if present.
|
||||
let prev_info = read_version_info(version_file).ok();
|
||||
let info = VersionInfo {
|
||||
latest_version: latest_tag_name
|
||||
.strip_prefix("rust-v")
|
||||
.ok_or_else(|| anyhow::anyhow!("Failed to parse latest tag name '{latest_tag_name}'"))?
|
||||
.into(),
|
||||
latest_version,
|
||||
last_checked_at: Utc::now(),
|
||||
dismissed_version: prev_info.and_then(|p| p.dismissed_version),
|
||||
};
|
||||
@@ -101,6 +119,25 @@ fn is_newer(latest: &str, current: &str) -> Option<bool> {
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_version_from_cask(cask_contents: &str) -> anyhow::Result<String> {
|
||||
cask_contents
|
||||
.lines()
|
||||
.find_map(|line| {
|
||||
let line = line.trim();
|
||||
line.strip_prefix("version \"")
|
||||
.and_then(|rest| rest.strip_suffix('"'))
|
||||
.map(ToString::to_string)
|
||||
})
|
||||
.ok_or_else(|| anyhow::anyhow!("Failed to find version in Homebrew cask file"))
|
||||
}
|
||||
|
||||
fn extract_version_from_latest_tag(latest_tag_name: &str) -> anyhow::Result<String> {
|
||||
latest_tag_name
|
||||
.strip_prefix("rust-v")
|
||||
.map(str::to_owned)
|
||||
.ok_or_else(|| anyhow::anyhow!("Failed to parse latest tag name '{latest_tag_name}'"))
|
||||
}
|
||||
|
||||
/// Returns the latest version to show in a popup, if it should be shown.
|
||||
/// This respects the user's dismissal choice for the current latest version.
|
||||
pub fn get_upgrade_version_for_popup(config: &Config) -> Option<String> {
|
||||
@@ -140,57 +177,36 @@ fn parse_version(v: &str) -> Option<(u64, u64, u64)> {
|
||||
Some((maj, min, pat))
|
||||
}
|
||||
|
||||
/// Update action the CLI should perform after the TUI exits.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum UpdateAction {
|
||||
/// Update via `npm install -g @openai/codex@latest`.
|
||||
NpmGlobalLatest,
|
||||
/// Update via `bun install -g @openai/codex@latest`.
|
||||
BunGlobalLatest,
|
||||
/// Update via `brew upgrade codex`.
|
||||
BrewUpgrade,
|
||||
}
|
||||
|
||||
#[cfg(any(not(debug_assertions), test))]
|
||||
pub(crate) fn get_update_action() -> Option<UpdateAction> {
|
||||
let exe = std::env::current_exe().unwrap_or_default();
|
||||
let managed_by_npm = std::env::var_os("CODEX_MANAGED_BY_NPM").is_some();
|
||||
let managed_by_bun = std::env::var_os("CODEX_MANAGED_BY_BUN").is_some();
|
||||
if managed_by_npm {
|
||||
Some(UpdateAction::NpmGlobalLatest)
|
||||
} else if managed_by_bun {
|
||||
Some(UpdateAction::BunGlobalLatest)
|
||||
} else if cfg!(target_os = "macos")
|
||||
&& (exe.starts_with("/opt/homebrew") || exe.starts_with("/usr/local"))
|
||||
{
|
||||
Some(UpdateAction::BrewUpgrade)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl UpdateAction {
|
||||
/// Returns the list of command-line arguments for invoking the update.
|
||||
pub fn command_args(self) -> (&'static str, &'static [&'static str]) {
|
||||
match self {
|
||||
UpdateAction::NpmGlobalLatest => ("npm", &["install", "-g", "@openai/codex@latest"]),
|
||||
UpdateAction::BunGlobalLatest => ("bun", &["install", "-g", "@openai/codex@latest"]),
|
||||
UpdateAction::BrewUpgrade => ("brew", &["upgrade", "--cask", "codex"]),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns string representation of the command-line arguments for invoking the update.
|
||||
pub fn command_str(self) -> String {
|
||||
let (command, args) = self.command_args();
|
||||
let args_str = args.join(" ");
|
||||
format!("{command} {args_str}")
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn parses_version_from_cask_contents() {
|
||||
let cask = r#"
|
||||
cask "codex" do
|
||||
version "0.55.0"
|
||||
end
|
||||
"#;
|
||||
assert_eq!(
|
||||
extract_version_from_cask(cask).expect("failed to parse version"),
|
||||
"0.55.0"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extracts_version_from_latest_tag() {
|
||||
assert_eq!(
|
||||
extract_version_from_latest_tag("rust-v1.5.0").expect("failed to parse version"),
|
||||
"1.5.0"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn latest_tag_without_prefix_is_invalid() {
|
||||
assert!(extract_version_from_latest_tag("v1.5.0").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prerelease_version_is_not_considered_newer() {
|
||||
assert_eq!(is_newer("0.11.0-beta.1", "0.11.0"), None);
|
||||
@@ -210,24 +226,4 @@ mod tests {
|
||||
assert_eq!(parse_version(" 1.2.3 \n"), Some((1, 2, 3)));
|
||||
assert_eq!(is_newer(" 1.2.3 ", "1.2.2"), Some(true));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_update_action() {
|
||||
let prev = std::env::var_os("CODEX_MANAGED_BY_NPM");
|
||||
|
||||
// First: no npm var -> expect None (we do not run from brew in CI)
|
||||
unsafe { std::env::remove_var("CODEX_MANAGED_BY_NPM") };
|
||||
assert_eq!(get_update_action(), None);
|
||||
|
||||
// Then: with npm var -> expect NpmGlobalLatest
|
||||
unsafe { std::env::set_var("CODEX_MANAGED_BY_NPM", "1") };
|
||||
assert_eq!(get_update_action(), Some(UpdateAction::NpmGlobalLatest));
|
||||
|
||||
// Restore prior value to avoid leaking state
|
||||
if let Some(v) = prev {
|
||||
unsafe { std::env::set_var("CODEX_MANAGED_BY_NPM", v) };
|
||||
} else {
|
||||
unsafe { std::env::remove_var("CODEX_MANAGED_BY_NPM") };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ path = "src/lib.rs"
|
||||
anyhow = "1.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
dunce = "1.0"
|
||||
[dependencies.rand]
|
||||
version = "0.8"
|
||||
default-features = false
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use crate::token::world_sid;
|
||||
use crate::winutil::to_wide;
|
||||
use anyhow::anyhow;
|
||||
use anyhow::Result;
|
||||
use std::collections::HashSet;
|
||||
use std::ffi::c_void;
|
||||
@@ -38,6 +37,22 @@ use windows_sys::Win32::Security::ACCESS_ALLOWED_ACE;
|
||||
use windows_sys::Win32::Security::ACE_HEADER;
|
||||
use windows_sys::Win32::Security::EqualSid;
|
||||
|
||||
// Preflight scan limits
|
||||
const MAX_ITEMS_PER_DIR: i32 = 1000;
|
||||
const AUDIT_TIME_LIMIT_SECS: i64 = 2;
|
||||
const MAX_CHECKED_LIMIT: i32 = 50000;
|
||||
// Case-insensitive suffixes (normalized to forward slashes) to skip during one-level child scan
|
||||
const SKIP_DIR_SUFFIXES: &[&str] = &[
|
||||
"/windows/installer",
|
||||
"/windows/registration",
|
||||
"/programdata",
|
||||
];
|
||||
|
||||
fn normalize_path_key(p: &Path) -> String {
|
||||
let n = dunce::canonicalize(p).unwrap_or_else(|_| p.to_path_buf());
|
||||
n.to_string_lossy().replace('\\', "/").to_ascii_lowercase()
|
||||
}
|
||||
|
||||
fn unique_push(set: &mut HashSet<PathBuf>, out: &mut Vec<PathBuf>, p: PathBuf) {
|
||||
if let Ok(abs) = p.canonicalize() {
|
||||
if set.insert(abs.clone()) {
|
||||
@@ -77,11 +92,7 @@ fn gather_candidates(cwd: &Path, env: &std::collections::HashMap<String, String>
|
||||
}
|
||||
}
|
||||
// 5) Core system roots last
|
||||
for p in [
|
||||
PathBuf::from("C:/"),
|
||||
PathBuf::from("C:/Windows"),
|
||||
PathBuf::from("C:/ProgramData"),
|
||||
] {
|
||||
for p in [PathBuf::from("C:/"), PathBuf::from("C:/Windows")] {
|
||||
unique_push(&mut set, &mut out, p);
|
||||
}
|
||||
out
|
||||
@@ -164,14 +175,18 @@ unsafe fn path_has_world_write_allow(path: &Path) -> Result<bool> {
|
||||
pub fn audit_everyone_writable(
|
||||
cwd: &Path,
|
||||
env: &std::collections::HashMap<String, String>,
|
||||
) -> Result<()> {
|
||||
logs_base_dir: Option<&Path>,
|
||||
) -> Result<Vec<PathBuf>> {
|
||||
let start = Instant::now();
|
||||
let mut flagged: Vec<PathBuf> = Vec::new();
|
||||
let mut seen: HashSet<String> = HashSet::new();
|
||||
let mut checked = 0usize;
|
||||
// Fast path: check CWD immediate children first so workspace issues are caught early.
|
||||
if let Ok(read) = std::fs::read_dir(cwd) {
|
||||
for ent in read.flatten().take(250) {
|
||||
if start.elapsed() > Duration::from_secs(5) || checked > 5000 {
|
||||
for ent in read.flatten().take(MAX_ITEMS_PER_DIR as usize) {
|
||||
if start.elapsed() > Duration::from_secs(AUDIT_TIME_LIMIT_SECS as u64)
|
||||
|| checked > MAX_CHECKED_LIMIT as usize
|
||||
{
|
||||
break;
|
||||
}
|
||||
let ft = match ent.file_type() {
|
||||
@@ -185,26 +200,32 @@ pub fn audit_everyone_writable(
|
||||
checked += 1;
|
||||
let has = unsafe { path_has_world_write_allow(&p)? };
|
||||
if has {
|
||||
flagged.push(p);
|
||||
let key = normalize_path_key(&p);
|
||||
if seen.insert(key) { flagged.push(p); }
|
||||
}
|
||||
}
|
||||
}
|
||||
// Continue with broader candidate sweep
|
||||
let candidates = gather_candidates(cwd, env);
|
||||
for root in candidates {
|
||||
if start.elapsed() > Duration::from_secs(5) || checked > 5000 {
|
||||
if start.elapsed() > Duration::from_secs(AUDIT_TIME_LIMIT_SECS as u64)
|
||||
|| checked > MAX_CHECKED_LIMIT as usize
|
||||
{
|
||||
break;
|
||||
}
|
||||
checked += 1;
|
||||
let has_root = unsafe { path_has_world_write_allow(&root)? };
|
||||
if has_root {
|
||||
flagged.push(root.clone());
|
||||
let key = normalize_path_key(&root);
|
||||
if seen.insert(key) { flagged.push(root.clone()); }
|
||||
}
|
||||
// one level down best-effort
|
||||
if let Ok(read) = std::fs::read_dir(&root) {
|
||||
for ent in read.flatten().take(250) {
|
||||
for ent in read.flatten().take(MAX_ITEMS_PER_DIR as usize) {
|
||||
let p = ent.path();
|
||||
if start.elapsed() > Duration::from_secs(5) || checked > 5000 {
|
||||
if start.elapsed() > Duration::from_secs(AUDIT_TIME_LIMIT_SECS as u64)
|
||||
|| checked > MAX_CHECKED_LIMIT as usize
|
||||
{
|
||||
break;
|
||||
}
|
||||
// Skip reparse points (symlinks/junctions) to avoid auditing link ACLs
|
||||
@@ -215,11 +236,16 @@ pub fn audit_everyone_writable(
|
||||
if ft.is_symlink() {
|
||||
continue;
|
||||
}
|
||||
// Skip noisy/irrelevant Windows system subdirectories
|
||||
let pl = p.to_string_lossy().to_ascii_lowercase();
|
||||
let norm = pl.replace('\\', "/");
|
||||
if SKIP_DIR_SUFFIXES.iter().any(|s| norm.ends_with(s)) { continue; }
|
||||
if ft.is_dir() {
|
||||
checked += 1;
|
||||
let has_child = unsafe { path_has_world_write_allow(&p)? };
|
||||
if has_child {
|
||||
flagged.push(p);
|
||||
let key = normalize_path_key(&p);
|
||||
if seen.insert(key) { flagged.push(p); }
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -236,25 +262,18 @@ pub fn audit_everyone_writable(
|
||||
"AUDIT: world-writable scan FAILED; checked={checked}; duration_ms={elapsed_ms}; flagged:{}",
|
||||
list
|
||||
),
|
||||
Some(cwd),
|
||||
logs_base_dir,
|
||||
);
|
||||
let mut list_err = String::new();
|
||||
for p in flagged {
|
||||
list_err.push_str(&format!("\n - {}", p.display()));
|
||||
}
|
||||
return Err(anyhow!(
|
||||
"Refusing to run: found directories writable by Everyone: {}",
|
||||
list_err
|
||||
));
|
||||
return Ok(flagged);
|
||||
}
|
||||
// Log success once if nothing flagged
|
||||
crate::logging::log_note(
|
||||
&format!(
|
||||
"AUDIT: world-writable scan OK; checked={checked}; duration_ms={elapsed_ms}"
|
||||
),
|
||||
Some(cwd),
|
||||
logs_base_dir,
|
||||
);
|
||||
Ok(())
|
||||
Ok(Vec::new())
|
||||
}
|
||||
// Fast mask-based check: does the DACL contain any ACCESS_ALLOWED ACE for
|
||||
// Everyone that includes generic or specific write bits? Skips inherit-only
|
||||
|
||||
@@ -171,8 +171,9 @@ mod windows_impl {
|
||||
pub fn preflight_audit_everyone_writable(
|
||||
cwd: &Path,
|
||||
env_map: &HashMap<String, String>,
|
||||
) -> Result<()> {
|
||||
audit::audit_everyone_writable(cwd, env_map)
|
||||
logs_base_dir: Option<&Path>,
|
||||
) -> Result<Vec<PathBuf>> {
|
||||
audit::audit_everyone_writable(cwd, env_map, logs_base_dir)
|
||||
}
|
||||
|
||||
pub fn run_windows_sandbox_capture(
|
||||
@@ -436,7 +437,8 @@ mod stub {
|
||||
pub fn preflight_audit_everyone_writable(
|
||||
_cwd: &Path,
|
||||
_env_map: &HashMap<String, String>,
|
||||
) -> Result<()> {
|
||||
_logs_base_dir: Option<&Path>,
|
||||
) -> Result<Vec<std::path::PathBuf>> {
|
||||
bail!("Windows sandbox is only available on Windows")
|
||||
}
|
||||
|
||||
|
||||
@@ -347,11 +347,13 @@ Use the optional `[tools]` table to toggle built-in tools that the agent may cal
|
||||
|
||||
```toml
|
||||
[tools]
|
||||
web_search = true # allow Codex to issue first-party web searches without prompting you
|
||||
web_search = true # allow Codex to issue first-party web searches without prompting you (deprecated)
|
||||
view_image = false # disable image uploads (they're enabled by default)
|
||||
```
|
||||
|
||||
`web_search` is also recognized under the legacy name `web_search_request`. The `view_image` toggle is useful when you want to include screenshots or diagrams from your repo without pasting them manually. Codex still respects sandboxing: it can only attach files inside the workspace roots you allow.
|
||||
`web_search` is deprecated; use the `web_search_request` feature flag instead.
|
||||
|
||||
The `view_image` toggle is useful when you want to include screenshots or diagrams from your repo without pasting them manually. Codex still respects sandboxing: it can only attach files inside the workspace roots you allow.
|
||||
|
||||
### approval_presets
|
||||
|
||||
@@ -917,6 +919,7 @@ Valid values:
|
||||
| `sandbox_workspace_write.exclude_slash_tmp` | boolean | Exclude `/tmp` from writable roots (default: false). |
|
||||
| `notify` | array<string> | External program for notifications. |
|
||||
| `instructions` | string | Currently ignored; use `experimental_instructions_file` or `AGENTS.md`. |
|
||||
| `features.<feature-flag>` | boolean | See [feature flags](#feature-flags) for details |
|
||||
| `mcp_servers.<id>.command` | string | MCP server launcher command (stdio servers only). |
|
||||
| `mcp_servers.<id>.args` | array<string> | MCP server args (stdio servers only). |
|
||||
| `mcp_servers.<id>.env` | map<string,string> | MCP server env vars (stdio servers only). |
|
||||
@@ -956,7 +959,7 @@ Valid values:
|
||||
| `experimental_instructions_file` | string (path) | Replace built‑in instructions (experimental). |
|
||||
| `experimental_use_exec_command_tool` | boolean | Use experimental exec command tool. |
|
||||
| `projects.<path>.trust_level` | string | Mark project/worktree as trusted (only `"trusted"` is recognized). |
|
||||
| `tools.web_search` | boolean | Enable web search tool (alias: `web_search_request`) (default: false). |
|
||||
| `tools.web_search` | boolean | Enable web search tool (deprecated) (default: false). |
|
||||
| `tools.view_image` | boolean | Enable or disable the `view_image` tool so Codex can attach local image files from the workspace (default: true). |
|
||||
| `forced_login_method` | `chatgpt` \| `api` | Only allow Codex to be used with ChatGPT or API keys. |
|
||||
| `forced_chatgpt_workspace_id` | string (uuid) | Only allow Codex to be used with the specified ChatGPT workspace. |
|
||||
|
||||
@@ -159,6 +159,7 @@ windows_wsl_setup_acknowledged = false
|
||||
# In-product notices (mostly set automatically by Codex).
|
||||
[notice]
|
||||
# hide_full_access_warning = true
|
||||
# hide_rate_limit_model_nudge = true
|
||||
|
||||
################################################################################
|
||||
# Authentication & Login
|
||||
|
||||
@@ -33,7 +33,7 @@ const nextTurn = await thread.run("Implement the fix");
|
||||
|
||||
### Streaming responses
|
||||
|
||||
`run()` buffers events until the turn finishes. To react to intermediate progress—tool calls, streaming responses, and file diffs—use `runStreamed()` instead, which returns an async generator of structured events.
|
||||
`run()` buffers events until the turn finishes. To react to intermediate progress—tool calls, streaming responses, and file change notifications—use `runStreamed()` instead, which returns an async generator of structured events.
|
||||
|
||||
```typescript
|
||||
const { events } = await thread.runStreamed("Diagnose the test failure and propose a fix");
|
||||
|
||||
Reference in New Issue
Block a user