mirror of
https://github.com/openai/codex.git
synced 2026-02-02 15:03:38 +00:00
Compare commits
120 Commits
fix_compac
...
fix/image-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8fb94fb11b | ||
|
|
7aab45e060 | ||
|
|
bcd64c7e72 | ||
|
|
c124f24354 | ||
|
|
c7e4e6d0ee | ||
|
|
88abbf58ce | ||
|
|
71f838389b | ||
|
|
0533bd2e7c | ||
|
|
6af83d86ff | ||
|
|
e2e1b65da6 | ||
|
|
817d1508bc | ||
|
|
f8af4f5c8d | ||
|
|
a4be4d78b9 | ||
|
|
00c1de0c56 | ||
|
|
190e7eb104 | ||
|
|
061862a0e2 | ||
|
|
c72b2ad766 | ||
|
|
80783a7bb9 | ||
|
|
ed77d2d977 | ||
|
|
abccd3e367 | ||
|
|
0f4fd33ddd | ||
|
|
e258f0f044 | ||
|
|
a6b9471548 | ||
|
|
3059373e06 | ||
|
|
0b4527146e | ||
|
|
6745b12427 | ||
|
|
f59978ed3d | ||
|
|
3ab6028e80 | ||
|
|
892eaff46d | ||
|
|
8e291a1706 | ||
|
|
aee321f62b | ||
|
|
ed32da04d7 | ||
|
|
8ae3949072 | ||
|
|
273819aaae | ||
|
|
4cd6b01494 | ||
|
|
dd59b16a17 | ||
|
|
bac7acaa7c | ||
|
|
3c90728a29 | ||
|
|
34c5a9eaa9 | ||
|
|
f522aafb7f | ||
|
|
fd0673e457 | ||
|
|
00b1e130b3 | ||
|
|
53cadb4df6 | ||
|
|
db7eb9a7ce | ||
|
|
cdd106b930 | ||
|
|
404cae7d40 | ||
|
|
682d05512f | ||
|
|
5cd8803998 | ||
|
|
26f314904a | ||
|
|
da82153a8d | ||
|
|
4bd68e4d9e | ||
|
|
1b10a3a1b2 | ||
|
|
ad9a289951 | ||
|
|
a517f6f55b | ||
|
|
789e65b9d2 | ||
|
|
42d5c35020 | ||
|
|
ab95eaa356 | ||
|
|
7fc01c6e9b | ||
|
|
df15a2f6ef | ||
|
|
ef806456e4 | ||
|
|
bd6ab8c665 | ||
|
|
d2bae07687 | ||
|
|
9c09094583 | ||
|
|
7e4ab31488 | ||
|
|
32d50bda94 | ||
|
|
740b4a95f4 | ||
|
|
c37469b5ba | ||
|
|
c782f8c68d | ||
|
|
7d6e318f87 | ||
|
|
58159383c4 | ||
|
|
5c680c6587 | ||
|
|
39a2446716 | ||
|
|
9c903c4716 | ||
|
|
5e4f3bbb0b | ||
|
|
c84fc83222 | ||
|
|
8044b55335 | ||
|
|
846960ae3d | ||
|
|
049a61bcfc | ||
|
|
cda6db6ccf | ||
|
|
73a1787eb8 | ||
|
|
0e8d937a3f | ||
|
|
3282e86a60 | ||
|
|
540abfa05e | ||
|
|
d87f87e25b | ||
|
|
d01f91ecec | ||
|
|
0170860ef2 | ||
|
|
2d9ee9dbe9 | ||
|
|
3ed728790b | ||
|
|
3e071c4c95 | ||
|
|
c127062b40 | ||
|
|
1d9b27387b | ||
|
|
4f46360aa4 | ||
|
|
2287d2afde | ||
|
|
d6a9e38575 | ||
|
|
c81e1477ae | ||
|
|
11c019d6c5 | ||
|
|
a182c1315c | ||
|
|
98c6dfa537 | ||
|
|
0e08dd6055 | ||
|
|
41900e9d0f | ||
|
|
c1bde2a4ef | ||
|
|
6b0c486861 | ||
|
|
44ceaf085b | ||
|
|
c03e31ecf5 | ||
|
|
6915ba2100 | ||
|
|
50f53e7071 | ||
|
|
40fba1bb4c | ||
|
|
bdda762deb | ||
|
|
da5492694b | ||
|
|
a5d48a775b | ||
|
|
78f2785595 | ||
|
|
fc1723f131 | ||
|
|
ed5b0bfeb3 | ||
|
|
4b01f0f50a | ||
|
|
0139f6780c | ||
|
|
86ba270926 | ||
|
|
c146585cdb | ||
|
|
5fa7844ad7 | ||
|
|
84c9b574f9 | ||
|
|
272e13dd90 |
2
.github/workflows/rust-ci.yml
vendored
2
.github/workflows/rust-ci.yml
vendored
@@ -201,7 +201,7 @@ jobs:
|
||||
# Tests take too long for release builds to run them on every PR.
|
||||
if: ${{ matrix.profile != 'release' }}
|
||||
continue-on-error: true
|
||||
run: cargo nextest run --all-features --no-fail-fast --target ${{ matrix.target }}
|
||||
run: cargo nextest run --all-features --no-fail-fast --target ${{ matrix.target }} --cargo-profile ci-test
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
|
||||
|
||||
73
.github/workflows/rust-release.yml
vendored
73
.github/workflows/rust-release.yml
vendored
@@ -58,9 +58,9 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: macos-14
|
||||
- runner: macos-15-xlarge
|
||||
target: aarch64-apple-darwin
|
||||
- runner: macos-14
|
||||
- runner: macos-15-xlarge
|
||||
target: x86_64-apple-darwin
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
@@ -100,7 +100,7 @@ jobs:
|
||||
- name: Cargo build
|
||||
run: cargo build --target ${{ matrix.target }} --release --bin codex --bin codex-responses-api-proxy
|
||||
|
||||
- if: ${{ matrix.runner == 'macos-14' }}
|
||||
- if: ${{ matrix.runner == 'macos-15-xlarge' }}
|
||||
name: Configure Apple code signing
|
||||
shell: bash
|
||||
env:
|
||||
@@ -185,7 +185,7 @@ jobs:
|
||||
echo "APPLE_CODESIGN_KEYCHAIN=$keychain_path" >> "$GITHUB_ENV"
|
||||
echo "::add-mask::$APPLE_CODESIGN_IDENTITY"
|
||||
|
||||
- if: ${{ matrix.runner == 'macos-14' }}
|
||||
- if: ${{ matrix.runner == 'macos-15-xlarge' }}
|
||||
name: Sign macOS binaries
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -206,6 +206,69 @@ jobs:
|
||||
codesign --force --options runtime --timestamp --sign "$APPLE_CODESIGN_IDENTITY" "${keychain_args[@]}" "$path"
|
||||
done
|
||||
|
||||
- if: ${{ matrix.runner == 'macos-15-xlarge' }}
|
||||
name: Notarize macOS binaries
|
||||
shell: bash
|
||||
env:
|
||||
APPLE_NOTARIZATION_KEY_P8: ${{ secrets.APPLE_NOTARIZATION_KEY_P8 }}
|
||||
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
|
||||
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
for var in APPLE_NOTARIZATION_KEY_P8 APPLE_NOTARIZATION_KEY_ID APPLE_NOTARIZATION_ISSUER_ID; do
|
||||
if [[ -z "${!var:-}" ]]; then
|
||||
echo "$var is required for notarization"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
notary_key_path="${RUNNER_TEMP}/notarytool.key.p8"
|
||||
echo "$APPLE_NOTARIZATION_KEY_P8" | base64 -d > "$notary_key_path"
|
||||
cleanup_notary() {
|
||||
rm -f "$notary_key_path"
|
||||
}
|
||||
trap cleanup_notary EXIT
|
||||
|
||||
notarize_binary() {
|
||||
local binary="$1"
|
||||
local source_path="target/${{ matrix.target }}/release/${binary}"
|
||||
local archive_path="${RUNNER_TEMP}/${binary}.zip"
|
||||
|
||||
if [[ ! -f "$source_path" ]]; then
|
||||
echo "Binary $source_path not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
rm -f "$archive_path"
|
||||
ditto -c -k --keepParent "$source_path" "$archive_path"
|
||||
|
||||
submission_json=$(xcrun notarytool submit "$archive_path" \
|
||||
--key "$notary_key_path" \
|
||||
--key-id "$APPLE_NOTARIZATION_KEY_ID" \
|
||||
--issuer "$APPLE_NOTARIZATION_ISSUER_ID" \
|
||||
--output-format json \
|
||||
--wait)
|
||||
|
||||
status=$(printf '%s\n' "$submission_json" | jq -r '.status // "Unknown"')
|
||||
submission_id=$(printf '%s\n' "$submission_json" | jq -r '.id // ""')
|
||||
|
||||
if [[ -z "$submission_id" ]]; then
|
||||
echo "Failed to retrieve submission ID for $binary"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "::notice title=Notarization::$binary submission ${submission_id} completed with status ${status}"
|
||||
|
||||
if [[ "$status" != "Accepted" ]]; then
|
||||
echo "Notarization failed for ${binary} (submission ${submission_id}, status ${status})"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
notarize_binary "codex"
|
||||
notarize_binary "codex-responses-api-proxy"
|
||||
|
||||
- name: Stage artifacts
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -265,7 +328,7 @@ jobs:
|
||||
done
|
||||
|
||||
- name: Remove signing keychain
|
||||
if: ${{ always() && matrix.runner == 'macos-14' }}
|
||||
if: ${{ always() && matrix.runner == 'macos-15-xlarge' }}
|
||||
shell: bash
|
||||
env:
|
||||
APPLE_CODESIGN_KEYCHAIN: ${{ env.APPLE_CODESIGN_KEYCHAIN }}
|
||||
|
||||
1
.vscode/settings.json
vendored
1
.vscode/settings.json
vendored
@@ -3,6 +3,7 @@
|
||||
"rust-analyzer.check.command": "clippy",
|
||||
"rust-analyzer.check.extraArgs": ["--all-features", "--tests"],
|
||||
"rust-analyzer.rustfmt.extraArgs": ["--config", "imports_granularity=Item"],
|
||||
"rust-analyzer.cargo.targetDir": "${workspaceFolder}/codex-rs/target/rust-analyzer",
|
||||
"[rust]": {
|
||||
"editor.defaultFormatter": "rust-lang.rust-analyzer",
|
||||
"editor.formatOnSave": true,
|
||||
|
||||
@@ -11,7 +11,9 @@ In the codex-rs folder where the rust code lives:
|
||||
- Always collapse if statements per https://rust-lang.github.io/rust-clippy/master/index.html#collapsible_if
|
||||
- Always inline format! args when possible per https://rust-lang.github.io/rust-clippy/master/index.html#uninlined_format_args
|
||||
- Use method references over closures when possible per https://rust-lang.github.io/rust-clippy/master/index.html#redundant_closure_for_method_calls
|
||||
- Do not use unsigned integer even if the number cannot be negative.
|
||||
- When writing tests, prefer comparing the equality of entire objects over fields one by one.
|
||||
- When making a change that adds or changes an API, ensure that the documentation in the `docs/` folder is up to date if applicable.
|
||||
|
||||
Run `just fmt` (in `codex-rs` directory) automatically after making Rust code changes; do not ask for approval to run it. Before finalizing a change to `codex-rs`, run `just fix -p <project>` (in `codex-rs` directory) to fix any linter issues in the code. Prefer scoping with `-p` to avoid slow workspace‑wide Clippy builds; only run `just fix` without `-p` if you changed shared crates. Additionally, run the tests:
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
<p align="center"><code>npm i -g @openai/codex</code><br />or <code>brew install codex</code></p>
|
||||
<p align="center"><code>npm i -g @openai/codex</code><br />or <code>brew install --cask codex</code></p>
|
||||
|
||||
<p align="center"><strong>Codex CLI</strong> is a coding agent from OpenAI that runs locally on your computer.
|
||||
</br>
|
||||
@@ -24,7 +24,7 @@ npm install -g @openai/codex
|
||||
Alternatively, if you use Homebrew:
|
||||
|
||||
```shell
|
||||
brew install codex
|
||||
brew install --cask codex
|
||||
```
|
||||
|
||||
Then simply run `codex` to get started:
|
||||
@@ -33,6 +33,8 @@ Then simply run `codex` to get started:
|
||||
codex
|
||||
```
|
||||
|
||||
If you're running into upgrade issues with Homebrew, see the [FAQ entry on brew upgrade codex](./docs/faq.md#brew-update-codex-isnt-upgrading-me).
|
||||
|
||||
<details>
|
||||
<summary>You can also go to the <a href="https://github.com/openai/codex/releases/latest">latest GitHub Release</a> and download the appropriate binary for your platform.</summary>
|
||||
|
||||
@@ -75,6 +77,7 @@ Codex CLI supports a rich set of configuration options, with preferences stored
|
||||
- [CLI usage](./docs/getting-started.md#cli-usage)
|
||||
- [Running with a prompt as input](./docs/getting-started.md#running-with-a-prompt-as-input)
|
||||
- [Example prompts](./docs/getting-started.md#example-prompts)
|
||||
- [Custom prompts](./docs/prompts.md)
|
||||
- [Memory with AGENTS.md](./docs/getting-started.md#memory-with-agentsmd)
|
||||
- [Configuration](./docs/config.md)
|
||||
- [**Sandbox & approvals**](./docs/sandbox.md)
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
header = """
|
||||
# Changelog
|
||||
|
||||
You can install any of these versions: `npm install -g codex@version`
|
||||
You can install any of these versions: `npm install -g @openai/codex@<version>`
|
||||
"""
|
||||
|
||||
body = """
|
||||
|
||||
1
codex-rs/.gitignore
vendored
1
codex-rs/.gitignore
vendored
@@ -1,4 +1,5 @@
|
||||
/target/
|
||||
/target-*/
|
||||
|
||||
# Recommended value of CARGO_TARGET_DIR when using Docker as explained in .devcontainer/README.md.
|
||||
/target-amd64/
|
||||
|
||||
352
codex-rs/Cargo.lock
generated
352
codex-rs/Cargo.lock
generated
@@ -182,7 +182,10 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"assert_cmd",
|
||||
"base64",
|
||||
"chrono",
|
||||
"codex-app-server-protocol",
|
||||
"codex-core",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
@@ -834,8 +837,10 @@ dependencies = [
|
||||
"app_test_support",
|
||||
"assert_cmd",
|
||||
"base64",
|
||||
"chrono",
|
||||
"codex-app-server-protocol",
|
||||
"codex-arg0",
|
||||
"codex-backend-client",
|
||||
"codex-common",
|
||||
"codex-core",
|
||||
"codex-file-search",
|
||||
@@ -843,10 +848,12 @@ dependencies = [
|
||||
"codex-protocol",
|
||||
"codex-utils-json-to-toml",
|
||||
"core_test_support",
|
||||
"opentelemetry-appender-tracing",
|
||||
"os_info",
|
||||
"pretty_assertions",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serial_test",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"toml",
|
||||
@@ -861,9 +868,11 @@ name = "codex-app-server-protocol"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"codex-protocol",
|
||||
"paste",
|
||||
"pretty_assertions",
|
||||
"schemars 0.8.22",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"strum_macros 0.27.2",
|
||||
@@ -899,12 +908,24 @@ dependencies = [
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-async-utils"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"pretty_assertions",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-backend-client"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"codex-backend-openapi-models",
|
||||
"codex-core",
|
||||
"codex-protocol",
|
||||
"pretty_assertions",
|
||||
"reqwest",
|
||||
"serde",
|
||||
@@ -917,6 +938,7 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_with",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -958,6 +980,7 @@ dependencies = [
|
||||
"codex-protocol-ts",
|
||||
"codex-responses-api-proxy",
|
||||
"codex-rmcp-client",
|
||||
"codex-stdio-to-uds",
|
||||
"codex-tui",
|
||||
"ctor 0.5.0",
|
||||
"owo-colors",
|
||||
@@ -1037,12 +1060,14 @@ dependencies = [
|
||||
"chrono",
|
||||
"codex-app-server-protocol",
|
||||
"codex-apply-patch",
|
||||
"codex-async-utils",
|
||||
"codex-file-search",
|
||||
"codex-mcp-client",
|
||||
"codex-otel",
|
||||
"codex-protocol",
|
||||
"codex-rmcp-client",
|
||||
"codex-utils-pty",
|
||||
"codex-utils-string",
|
||||
"codex-utils-tokenizer",
|
||||
"core-foundation 0.9.4",
|
||||
"core_test_support",
|
||||
"dirs",
|
||||
@@ -1051,6 +1076,7 @@ dependencies = [
|
||||
"escargot",
|
||||
"eventsource-stream",
|
||||
"futures",
|
||||
"http",
|
||||
"indexmap 2.10.0",
|
||||
"landlock",
|
||||
"libc",
|
||||
@@ -1058,7 +1084,6 @@ dependencies = [
|
||||
"mcp-types",
|
||||
"openssl-sys",
|
||||
"os_info",
|
||||
"portable-pty",
|
||||
"predicates",
|
||||
"pretty_assertions",
|
||||
"rand 0.9.2",
|
||||
@@ -1073,6 +1098,7 @@ dependencies = [
|
||||
"similar",
|
||||
"strum_macros 0.27.2",
|
||||
"tempfile",
|
||||
"test-log",
|
||||
"thiserror 2.0.16",
|
||||
"time",
|
||||
"tokio",
|
||||
@@ -1144,6 +1170,17 @@ dependencies = [
|
||||
"tempfile",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-feedback"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"codex-protocol",
|
||||
"pretty_assertions",
|
||||
"sentry",
|
||||
"tracing-subscriber",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-file-search"
|
||||
version = "0.0.0"
|
||||
@@ -1214,19 +1251,6 @@ dependencies = [
|
||||
"wiremock",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-mcp-client"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"mcp-types",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-mcp-server"
|
||||
version = "0.0.0"
|
||||
@@ -1308,6 +1332,7 @@ dependencies = [
|
||||
"icu_locale_core",
|
||||
"mcp-types",
|
||||
"mime_guess",
|
||||
"schemars 0.8.22",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_with",
|
||||
@@ -1354,6 +1379,7 @@ dependencies = [
|
||||
"axum",
|
||||
"codex-protocol",
|
||||
"dirs",
|
||||
"escargot",
|
||||
"futures",
|
||||
"keyring",
|
||||
"mcp-types",
|
||||
@@ -1363,6 +1389,7 @@ dependencies = [
|
||||
"rmcp",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serial_test",
|
||||
"sha2",
|
||||
"tempfile",
|
||||
"tiny_http",
|
||||
@@ -1372,6 +1399,17 @@ dependencies = [
|
||||
"webbrowser",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-stdio-to-uds"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"assert_cmd",
|
||||
"pretty_assertions",
|
||||
"tempfile",
|
||||
"uds_windows",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-tui"
|
||||
version = "0.0.0"
|
||||
@@ -1388,6 +1426,7 @@ dependencies = [
|
||||
"codex-arg0",
|
||||
"codex-common",
|
||||
"codex-core",
|
||||
"codex-feedback",
|
||||
"codex-file-search",
|
||||
"codex-git-tooling",
|
||||
"codex-login",
|
||||
@@ -1405,12 +1444,12 @@ dependencies = [
|
||||
"libc",
|
||||
"mcp-types",
|
||||
"opentelemetry-appender-tracing",
|
||||
"path-clean",
|
||||
"pathdiff",
|
||||
"pretty_assertions",
|
||||
"pulldown-cmark",
|
||||
"rand 0.9.2",
|
||||
"ratatui",
|
||||
"ratatui-macros",
|
||||
"regex-lite",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -1422,6 +1461,7 @@ dependencies = [
|
||||
"textwrap 0.16.2",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
"toml",
|
||||
"tracing",
|
||||
"tracing-appender",
|
||||
"tracing-subscriber",
|
||||
@@ -1442,6 +1482,15 @@ dependencies = [
|
||||
"toml",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-utils-pty"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"portable-pty",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-utils-readiness"
|
||||
version = "0.0.0"
|
||||
@@ -1457,6 +1506,16 @@ dependencies = [
|
||||
name = "codex-utils-string"
|
||||
version = "0.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "codex-utils-tokenizer"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"pretty_assertions",
|
||||
"thiserror 2.0.16",
|
||||
"tiktoken-rs",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "color-eyre"
|
||||
version = "0.6.5"
|
||||
@@ -1577,6 +1636,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"assert_cmd",
|
||||
"codex-core",
|
||||
"codex-protocol",
|
||||
"notify",
|
||||
"regex-lite",
|
||||
"serde_json",
|
||||
@@ -1823,6 +1883,16 @@ version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "092966b41edc516079bdf31ec78a2e0588d1d0c08f78b91d8307215928642b2b"
|
||||
|
||||
[[package]]
|
||||
name = "debugid"
|
||||
version = "0.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bef552e6f588e446098f6ba40d89ac146c8c7b64aade83c051ee00bb5d2bc18d"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "debugserver-types"
|
||||
version = "0.5.0"
|
||||
@@ -2244,6 +2314,17 @@ dependencies = [
|
||||
"once_cell",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fancy-regex"
|
||||
version = "0.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "531e46835a22af56d1e3b66f04844bed63158bc094a628bec1d321d9b4c44bf2"
|
||||
dependencies = [
|
||||
"bit-set",
|
||||
"regex-automata",
|
||||
"regex-syntax 0.8.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fastrand"
|
||||
version = "2.3.0"
|
||||
@@ -2301,6 +2382,18 @@ dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "findshlibs"
|
||||
version = "0.10.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "40b9e59cd0f7e0806cca4be089683ecb6434e602038df21fe6bf6711b2f07f64"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"lazy_static",
|
||||
"libc",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fixed_decimal"
|
||||
version = "0.7.0"
|
||||
@@ -2669,6 +2762,17 @@ dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hostname"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a56f203cd1c76362b69e3863fd987520ac36cf70a8c92627449b2f64a8cf7d65"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"windows-link 0.1.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "http"
|
||||
version = "1.3.1"
|
||||
@@ -3507,6 +3611,7 @@ checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3"
|
||||
name = "mcp-types"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"schemars 0.8.22",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"ts-rs",
|
||||
@@ -4192,12 +4297,6 @@ dependencies = [
|
||||
"path-dedot",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "path-clean"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "17359afc20d7ab31fdb42bb844c8b3bb1dabd7dcf7e68428492da7f16966fcef"
|
||||
|
||||
[[package]]
|
||||
name = "path-dedot"
|
||||
version = "3.1.1"
|
||||
@@ -4543,7 +4642,7 @@ dependencies = [
|
||||
"pin-project-lite",
|
||||
"quinn-proto",
|
||||
"quinn-udp",
|
||||
"rustc-hash",
|
||||
"rustc-hash 2.1.1",
|
||||
"rustls",
|
||||
"socket2 0.6.0",
|
||||
"thiserror 2.0.16",
|
||||
@@ -4563,7 +4662,7 @@ dependencies = [
|
||||
"lru-slab",
|
||||
"rand 0.9.2",
|
||||
"ring",
|
||||
"rustc-hash",
|
||||
"rustc-hash 2.1.1",
|
||||
"rustls",
|
||||
"rustls-pki-types",
|
||||
"slab",
|
||||
@@ -4691,6 +4790,15 @@ dependencies = [
|
||||
"unicode-width 0.2.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ratatui-macros"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6fef540f80dbe8a0773266fa6077788ceb65ef624cdbf36e131aaf90b4a52df4"
|
||||
dependencies = [
|
||||
"ratatui",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.5.15"
|
||||
@@ -4848,9 +4956,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rmcp"
|
||||
version = "0.8.1"
|
||||
version = "0.8.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6f35acda8f89fca5fd8c96cae3c6d5b4c38ea0072df4c8030915f3b5ff469c1c"
|
||||
checksum = "1fdad1258f7259fdc0f2dfc266939c82c3b5d1fd72bcde274d600cdc27e60243"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"bytes",
|
||||
@@ -4882,9 +4990,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rmcp-macros"
|
||||
version = "0.8.1"
|
||||
version = "0.8.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c9f1d5220aaa23b79c3d02e18f7a554403b3ccea544bbb6c69d6bcb3e854a274"
|
||||
checksum = "ede0589a208cc7ce81d1be68aa7e74b917fcd03c81528408bab0457e187dcd9b"
|
||||
dependencies = [
|
||||
"darling 0.21.3",
|
||||
"proc-macro2",
|
||||
@@ -4899,12 +5007,27 @@ version = "0.1.25"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "989e6739f80c4ad5b13e0fd7fe89531180375b18520cc8c82080e4dc4035b84f"
|
||||
|
||||
[[package]]
|
||||
name = "rustc-hash"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
|
||||
|
||||
[[package]]
|
||||
name = "rustc-hash"
|
||||
version = "2.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
|
||||
|
||||
[[package]]
|
||||
name = "rustc_version"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92"
|
||||
dependencies = [
|
||||
"semver",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustix"
|
||||
version = "0.38.44"
|
||||
@@ -5219,6 +5342,120 @@ dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "semver"
|
||||
version = "1.0.27"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2"
|
||||
|
||||
[[package]]
|
||||
name = "sentry"
|
||||
version = "0.34.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5484316556650182f03b43d4c746ce0e3e48074a21e2f51244b648b6542e1066"
|
||||
dependencies = [
|
||||
"httpdate",
|
||||
"native-tls",
|
||||
"reqwest",
|
||||
"sentry-backtrace",
|
||||
"sentry-contexts",
|
||||
"sentry-core",
|
||||
"sentry-debug-images",
|
||||
"sentry-panic",
|
||||
"sentry-tracing",
|
||||
"tokio",
|
||||
"ureq",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sentry-backtrace"
|
||||
version = "0.34.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "40aa225bb41e2ec9d7c90886834367f560efc1af028f1c5478a6cce6a59c463a"
|
||||
dependencies = [
|
||||
"backtrace",
|
||||
"once_cell",
|
||||
"regex",
|
||||
"sentry-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sentry-contexts"
|
||||
version = "0.34.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1a8dd746da3d16cb8c39751619cefd4fcdbd6df9610f3310fd646b55f6e39910"
|
||||
dependencies = [
|
||||
"hostname",
|
||||
"libc",
|
||||
"os_info",
|
||||
"rustc_version",
|
||||
"sentry-core",
|
||||
"uname",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sentry-core"
|
||||
version = "0.34.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "161283cfe8e99c8f6f236a402b9ccf726b201f365988b5bb637ebca0abbd4a30"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"rand 0.8.5",
|
||||
"sentry-types",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sentry-debug-images"
|
||||
version = "0.34.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8fc6b25e945fcaa5e97c43faee0267eebda9f18d4b09a251775d8fef1086238a"
|
||||
dependencies = [
|
||||
"findshlibs",
|
||||
"once_cell",
|
||||
"sentry-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sentry-panic"
|
||||
version = "0.34.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bc74f229c7186dd971a9491ffcbe7883544aa064d1589bd30b83fb856cd22d63"
|
||||
dependencies = [
|
||||
"sentry-backtrace",
|
||||
"sentry-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sentry-tracing"
|
||||
version = "0.34.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cd3c5faf2103cd01eeda779ea439b68c4ee15adcdb16600836e97feafab362ec"
|
||||
dependencies = [
|
||||
"sentry-backtrace",
|
||||
"sentry-core",
|
||||
"tracing-core",
|
||||
"tracing-subscriber",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sentry-types"
|
||||
version = "0.34.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5d68cdf6bc41b8ff3ae2a9c4671e97426dcdd154cc1d4b6b72813f285d6b163f"
|
||||
dependencies = [
|
||||
"debugid",
|
||||
"hex",
|
||||
"rand 0.8.5",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"thiserror 1.0.69",
|
||||
"time",
|
||||
"url",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.226"
|
||||
@@ -5851,6 +6088,28 @@ version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8f50febec83f5ee1df3015341d8bd429f2d1cc62bcba7ea2076759d315084683"
|
||||
|
||||
[[package]]
|
||||
name = "test-log"
|
||||
version = "0.2.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e33b98a582ea0be1168eba097538ee8dd4bbe0f2b01b22ac92ea30054e5be7b"
|
||||
dependencies = [
|
||||
"env_logger",
|
||||
"test-log-macros",
|
||||
"tracing-subscriber",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "test-log-macros"
|
||||
version = "0.2.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "451b374529930d7601b1eef8d32bc79ae870b6079b069401709c2a8bf9e75f36"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "textwrap"
|
||||
version = "0.11.0"
|
||||
@@ -5934,6 +6193,21 @@ dependencies = [
|
||||
"zune-jpeg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tiktoken-rs"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "25563eeba904d770acf527e8b370fe9a5547bacd20ff84a0b6c3bc41288e5625"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"base64",
|
||||
"bstr",
|
||||
"fancy-regex",
|
||||
"lazy_static",
|
||||
"regex",
|
||||
"rustc-hash 1.1.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "time"
|
||||
version = "0.3.44"
|
||||
@@ -6426,6 +6700,15 @@ dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uname"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b72f89f0ca32e4db1c04e2a72f5345d59796d4866a1ee0609084569f73683dc8"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicase"
|
||||
version = "2.8.1"
|
||||
@@ -6485,6 +6768,19 @@ version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
|
||||
|
||||
[[package]]
|
||||
name = "ureq"
|
||||
version = "2.12.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "02d1a66277ed75f640d608235660df48c8e3c19f3b4edb6a263315626cc3c01d"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"log",
|
||||
"native-tls",
|
||||
"once_cell",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "url"
|
||||
version = "2.5.4"
|
||||
|
||||
@@ -2,10 +2,12 @@
|
||||
members = [
|
||||
"backend-client",
|
||||
"ansi-escape",
|
||||
"async-utils",
|
||||
"app-server",
|
||||
"app-server-protocol",
|
||||
"apply-patch",
|
||||
"arg0",
|
||||
"feedback",
|
||||
"codex-backend-openapi-models",
|
||||
"cloud-tasks",
|
||||
"cloud-tasks-client",
|
||||
@@ -18,7 +20,6 @@ members = [
|
||||
"git-tooling",
|
||||
"linux-sandbox",
|
||||
"login",
|
||||
"mcp-client",
|
||||
"mcp-server",
|
||||
"mcp-types",
|
||||
"ollama",
|
||||
@@ -27,12 +28,15 @@ members = [
|
||||
"protocol-ts",
|
||||
"rmcp-client",
|
||||
"responses-api-proxy",
|
||||
"stdio-to-uds",
|
||||
"otel",
|
||||
"tui",
|
||||
"git-apply",
|
||||
"utils/json-to-toml",
|
||||
"utils/readiness",
|
||||
"utils/pty",
|
||||
"utils/string",
|
||||
"utils/tokenizer",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
@@ -52,15 +56,17 @@ codex-app-server = { path = "app-server" }
|
||||
codex-app-server-protocol = { path = "app-server-protocol" }
|
||||
codex-apply-patch = { path = "apply-patch" }
|
||||
codex-arg0 = { path = "arg0" }
|
||||
codex-async-utils = { path = "async-utils" }
|
||||
codex-backend-client = { path = "backend-client" }
|
||||
codex-chatgpt = { path = "chatgpt" }
|
||||
codex-common = { path = "common" }
|
||||
codex-core = { path = "core" }
|
||||
codex-exec = { path = "exec" }
|
||||
codex-feedback = { path = "feedback" }
|
||||
codex-file-search = { path = "file-search" }
|
||||
codex-git-tooling = { path = "git-tooling" }
|
||||
codex-linux-sandbox = { path = "linux-sandbox" }
|
||||
codex-login = { path = "login" }
|
||||
codex-mcp-client = { path = "mcp-client" }
|
||||
codex-mcp-server = { path = "mcp-server" }
|
||||
codex-ollama = { path = "ollama" }
|
||||
codex-otel = { path = "otel" }
|
||||
@@ -69,10 +75,13 @@ codex-protocol = { path = "protocol" }
|
||||
codex-protocol-ts = { path = "protocol-ts" }
|
||||
codex-responses-api-proxy = { path = "responses-api-proxy" }
|
||||
codex-rmcp-client = { path = "rmcp-client" }
|
||||
codex-stdio-to-uds = { path = "stdio-to-uds" }
|
||||
codex-tui = { path = "tui" }
|
||||
codex-utils-json-to-toml = { path = "utils/json-to-toml" }
|
||||
codex-utils-pty = { path = "utils/pty" }
|
||||
codex-utils-readiness = { path = "utils/readiness" }
|
||||
codex-utils-string = { path = "utils/string" }
|
||||
codex-utils-tokenizer = { path = "utils/tokenizer" }
|
||||
core_test_support = { path = "core/tests/common" }
|
||||
mcp-types = { path = "mcp-types" }
|
||||
mcp_test_support = { path = "mcp-server/tests/common" }
|
||||
@@ -83,8 +92,8 @@ ansi-to-tui = "7.0.0"
|
||||
anyhow = "1"
|
||||
arboard = "3"
|
||||
askama = "0.12"
|
||||
assert_matches = "1.5.0"
|
||||
assert_cmd = "2"
|
||||
assert_matches = "1.5.0"
|
||||
async-channel = "2.3.1"
|
||||
async-stream = "0.3.6"
|
||||
async-trait = "0.1.89"
|
||||
@@ -107,6 +116,7 @@ env_logger = "0.11.5"
|
||||
escargot = "0.5"
|
||||
eventsource-stream = "0.2.3"
|
||||
futures = { version = "0.3", default-features = false }
|
||||
http = "1.3.1"
|
||||
icu_decimal = "2.0.0"
|
||||
icu_locale_core = "2.0.0"
|
||||
ignore = "0.4.23"
|
||||
@@ -134,7 +144,6 @@ os_info = "3.12.0"
|
||||
owo-colors = "4.2.0"
|
||||
paste = "1.0.15"
|
||||
path-absolutize = "3.1.1"
|
||||
path-clean = "1.0.1"
|
||||
pathdiff = "0.2"
|
||||
portable-pty = "0.9.0"
|
||||
predicates = "3"
|
||||
@@ -142,11 +151,13 @@ pretty_assertions = "1.4.1"
|
||||
pulldown-cmark = "0.10"
|
||||
rand = "0.9"
|
||||
ratatui = "0.29.0"
|
||||
ratatui-macros = "0.6.0"
|
||||
regex-lite = "0.1.7"
|
||||
reqwest = "0.12"
|
||||
rmcp = { version = "0.8.0", default-features = false }
|
||||
rmcp = { version = "0.8.3", default-features = false }
|
||||
schemars = "0.8.22"
|
||||
seccompiler = "0.5.0"
|
||||
sentry = "0.34.0"
|
||||
serde = "1"
|
||||
serde_json = "1"
|
||||
serde_with = "3.14"
|
||||
@@ -161,6 +172,7 @@ strum_macros = "0.27.2"
|
||||
supports-color = "3.0.2"
|
||||
sys-locale = "0.3.2"
|
||||
tempfile = "3.23.0"
|
||||
test-log = "0.2.18"
|
||||
textwrap = "0.16.2"
|
||||
thiserror = "2.0.16"
|
||||
time = "0.3"
|
||||
@@ -180,6 +192,7 @@ tree-sitter = "0.25.10"
|
||||
tree-sitter-bash = "0.25"
|
||||
tree-sitter-highlight = "0.25.10"
|
||||
ts-rs = "11"
|
||||
uds_windows = "1.1.0"
|
||||
unicode-segmentation = "1.12.0"
|
||||
unicode-width = "0.2"
|
||||
url = "2"
|
||||
@@ -234,7 +247,7 @@ unwrap_used = "deny"
|
||||
# cargo-shear cannot see the platform-specific openssl-sys usage, so we
|
||||
# silence the false positive here instead of deleting a real dependency.
|
||||
[workspace.metadata.cargo-shear]
|
||||
ignored = ["openssl-sys", "codex-utils-readiness"]
|
||||
ignored = ["openssl-sys", "codex-utils-readiness", "codex-utils-tokenizer"]
|
||||
|
||||
[profile.release]
|
||||
lto = "fat"
|
||||
@@ -245,6 +258,11 @@ strip = "symbols"
|
||||
# See https://github.com/openai/codex/issues/1411 for details.
|
||||
codegen-units = 1
|
||||
|
||||
[profile.ci-test]
|
||||
debug = 1 # Reduce debug symbol size
|
||||
inherits = "test"
|
||||
opt-level = 0
|
||||
|
||||
[patch.crates-io]
|
||||
# Uncomment to debug local changes.
|
||||
# ratatui = { path = "../../ratatui" }
|
||||
|
||||
@@ -11,7 +11,12 @@ npm i -g @openai/codex
|
||||
codex
|
||||
```
|
||||
|
||||
You can also install via Homebrew (`brew install codex`) or download a platform-specific release directly from our [GitHub Releases](https://github.com/openai/codex/releases).
|
||||
You can also install via Homebrew (`brew install --cask codex`) or download a platform-specific release directly from our [GitHub Releases](https://github.com/openai/codex/releases).
|
||||
|
||||
## Documentation quickstart
|
||||
|
||||
- First run with Codex? Follow the walkthrough in [`docs/getting-started.md`](../docs/getting-started.md) for prompts, keyboard shortcuts, and session management.
|
||||
- Already shipping with Codex and want deeper control? Jump to [`docs/advanced.md`](../docs/advanced.md) and the configuration reference at [`docs/config.md`](../docs/config.md).
|
||||
|
||||
## What's new in the Rust CLI
|
||||
|
||||
@@ -47,30 +52,6 @@ You can enable notifications by configuring a script that is run whenever the ag
|
||||
|
||||
To run Codex non-interactively, run `codex exec PROMPT` (you can also pass the prompt via `stdin`) and Codex will work on your task until it decides that it is done and exits. Output is printed to the terminal directly. You can set the `RUST_LOG` environment variable to see more about what's going on.
|
||||
|
||||
### Use `@` for file search
|
||||
|
||||
Typing `@` triggers a fuzzy-filename search over the workspace root. Use up/down to select among the results and Tab or Enter to replace the `@` with the selected path. You can use Esc to cancel the search.
|
||||
|
||||
### Esc–Esc to edit a previous message
|
||||
|
||||
When the chat composer is empty, press Esc to prime “backtrack” mode. Press Esc again to open a transcript preview highlighting the last user message; press Esc repeatedly to step to older user messages. Press Enter to confirm and Codex will fork the conversation from that point, trim the visible transcript accordingly, and pre‑fill the composer with the selected user message so you can edit and resubmit it.
|
||||
|
||||
In the transcript preview, the footer shows an `Esc edit prev` hint while editing is active.
|
||||
|
||||
### `--cd`/`-C` flag
|
||||
|
||||
Sometimes it is not convenient to `cd` to the directory you want Codex to use as the "working root" before running Codex. Fortunately, `codex` supports a `--cd` option so you can specify whatever folder you want. You can confirm that Codex is honoring `--cd` by double-checking the **workdir** it reports in the TUI at the start of a new session.
|
||||
|
||||
### Shell completions
|
||||
|
||||
Generate shell completion scripts via:
|
||||
|
||||
```shell
|
||||
codex completion bash
|
||||
codex completion zsh
|
||||
codex completion fish
|
||||
```
|
||||
|
||||
### Experimenting with the Codex Sandbox
|
||||
|
||||
To test to see what happens when a command is run under the sandbox provided by Codex, we provide the following subcommands in Codex CLI:
|
||||
|
||||
@@ -11,8 +11,11 @@ path = "src/lib.rs"
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive"] }
|
||||
codex-protocol = { workspace = true }
|
||||
paste = { workspace = true }
|
||||
schemars = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
strum_macros = { workspace = true }
|
||||
|
||||
22
codex-rs/app-server-protocol/src/bin/export.rs
Normal file
22
codex-rs/app-server-protocol/src/bin/export.rs
Normal file
@@ -0,0 +1,22 @@
|
||||
use anyhow::Result;
|
||||
use clap::Parser;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(
|
||||
about = "Generate TypeScript bindings and JSON Schemas for the Codex app-server protocol"
|
||||
)]
|
||||
struct Args {
|
||||
/// Output directory where generated files will be written
|
||||
#[arg(short = 'o', long = "out", value_name = "DIR")]
|
||||
out_dir: PathBuf,
|
||||
|
||||
/// Optional Prettier executable path to format generated TypeScript files
|
||||
#[arg(short = 'p', long = "prettier", value_name = "PRETTIER_BIN")]
|
||||
prettier: Option<PathBuf>,
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
let args = Args::parse();
|
||||
codex_app_server_protocol::generate_types(&args.out_dir, args.prettier.as_deref())
|
||||
}
|
||||
422
codex-rs/app-server-protocol/src/export.rs
Normal file
422
codex-rs/app-server-protocol/src/export.rs
Normal file
@@ -0,0 +1,422 @@
|
||||
use crate::ClientNotification;
|
||||
use crate::ClientRequest;
|
||||
use crate::ServerNotification;
|
||||
use crate::ServerRequest;
|
||||
use crate::export_client_response_schemas;
|
||||
use crate::export_client_responses;
|
||||
use crate::export_server_response_schemas;
|
||||
use crate::export_server_responses;
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use schemars::JsonSchema;
|
||||
use schemars::schema::RootSchema;
|
||||
use schemars::schema_for;
|
||||
use serde::Serialize;
|
||||
use serde_json::Map;
|
||||
use serde_json::Value;
|
||||
use std::collections::BTreeMap;
|
||||
use std::ffi::OsStr;
|
||||
use std::fs;
|
||||
use std::io::Read;
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
use ts_rs::ExportError;
|
||||
use ts_rs::TS;
|
||||
|
||||
const HEADER: &str = "// GENERATED CODE! DO NOT MODIFY BY HAND!\n\n";
|
||||
|
||||
macro_rules! for_each_schema_type {
|
||||
($macro:ident) => {
|
||||
$macro!(crate::RequestId);
|
||||
$macro!(crate::JSONRPCMessage);
|
||||
$macro!(crate::JSONRPCRequest);
|
||||
$macro!(crate::JSONRPCNotification);
|
||||
$macro!(crate::JSONRPCResponse);
|
||||
$macro!(crate::JSONRPCError);
|
||||
$macro!(crate::JSONRPCErrorError);
|
||||
$macro!(crate::AddConversationListenerParams);
|
||||
$macro!(crate::AddConversationSubscriptionResponse);
|
||||
$macro!(crate::ApplyPatchApprovalParams);
|
||||
$macro!(crate::ApplyPatchApprovalResponse);
|
||||
$macro!(crate::ArchiveConversationParams);
|
||||
$macro!(crate::ArchiveConversationResponse);
|
||||
$macro!(crate::AuthMode);
|
||||
$macro!(crate::AuthStatusChangeNotification);
|
||||
$macro!(crate::CancelLoginChatGptParams);
|
||||
$macro!(crate::CancelLoginChatGptResponse);
|
||||
$macro!(crate::ClientInfo);
|
||||
$macro!(crate::ClientNotification);
|
||||
$macro!(crate::ClientRequest);
|
||||
$macro!(crate::ConversationSummary);
|
||||
$macro!(crate::ExecCommandApprovalParams);
|
||||
$macro!(crate::ExecCommandApprovalResponse);
|
||||
$macro!(crate::ExecOneOffCommandParams);
|
||||
$macro!(crate::ExecOneOffCommandResponse);
|
||||
$macro!(crate::FuzzyFileSearchParams);
|
||||
$macro!(crate::FuzzyFileSearchResponse);
|
||||
$macro!(crate::FuzzyFileSearchResult);
|
||||
$macro!(crate::GetAuthStatusParams);
|
||||
$macro!(crate::GetAuthStatusResponse);
|
||||
$macro!(crate::GetUserAgentResponse);
|
||||
$macro!(crate::GetUserSavedConfigResponse);
|
||||
$macro!(crate::GitDiffToRemoteParams);
|
||||
$macro!(crate::GitDiffToRemoteResponse);
|
||||
$macro!(crate::GitSha);
|
||||
$macro!(crate::InitializeParams);
|
||||
$macro!(crate::InitializeResponse);
|
||||
$macro!(crate::InputItem);
|
||||
$macro!(crate::InterruptConversationParams);
|
||||
$macro!(crate::InterruptConversationResponse);
|
||||
$macro!(crate::ListConversationsParams);
|
||||
$macro!(crate::ListConversationsResponse);
|
||||
$macro!(crate::LoginApiKeyParams);
|
||||
$macro!(crate::LoginApiKeyResponse);
|
||||
$macro!(crate::LoginChatGptCompleteNotification);
|
||||
$macro!(crate::LoginChatGptResponse);
|
||||
$macro!(crate::LogoutChatGptParams);
|
||||
$macro!(crate::LogoutChatGptResponse);
|
||||
$macro!(crate::NewConversationParams);
|
||||
$macro!(crate::NewConversationResponse);
|
||||
$macro!(crate::Profile);
|
||||
$macro!(crate::RemoveConversationListenerParams);
|
||||
$macro!(crate::RemoveConversationSubscriptionResponse);
|
||||
$macro!(crate::ResumeConversationParams);
|
||||
$macro!(crate::ResumeConversationResponse);
|
||||
$macro!(crate::SandboxSettings);
|
||||
$macro!(crate::SendUserMessageParams);
|
||||
$macro!(crate::SendUserMessageResponse);
|
||||
$macro!(crate::SendUserTurnParams);
|
||||
$macro!(crate::SendUserTurnResponse);
|
||||
$macro!(crate::ServerNotification);
|
||||
$macro!(crate::ServerRequest);
|
||||
$macro!(crate::SessionConfiguredNotification);
|
||||
$macro!(crate::SetDefaultModelParams);
|
||||
$macro!(crate::SetDefaultModelResponse);
|
||||
$macro!(crate::Tools);
|
||||
$macro!(crate::UserInfoResponse);
|
||||
$macro!(crate::UserSavedConfig);
|
||||
$macro!(codex_protocol::protocol::EventMsg);
|
||||
$macro!(codex_protocol::protocol::FileChange);
|
||||
$macro!(codex_protocol::parse_command::ParsedCommand);
|
||||
$macro!(codex_protocol::protocol::SandboxPolicy);
|
||||
};
|
||||
}
|
||||
|
||||
fn export_ts_with_context<F>(label: &str, export: F) -> Result<()>
|
||||
where
|
||||
F: FnOnce() -> std::result::Result<(), ExportError>,
|
||||
{
|
||||
match export() {
|
||||
Ok(()) => Ok(()),
|
||||
Err(ExportError::CannotBeExported(ty)) => Err(anyhow!(
|
||||
"failed to export {label}: dependency {ty} cannot be exported"
|
||||
)),
|
||||
Err(err) => Err(err.into()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn generate_types(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
||||
generate_ts(out_dir, prettier)?;
|
||||
generate_json(out_dir)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn generate_ts(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
||||
ensure_dir(out_dir)?;
|
||||
|
||||
export_ts_with_context("ClientRequest", || ClientRequest::export_all_to(out_dir))?;
|
||||
export_ts_with_context("client responses", || export_client_responses(out_dir))?;
|
||||
export_ts_with_context("ClientNotification", || {
|
||||
ClientNotification::export_all_to(out_dir)
|
||||
})?;
|
||||
|
||||
export_ts_with_context("ServerRequest", || ServerRequest::export_all_to(out_dir))?;
|
||||
export_ts_with_context("server responses", || export_server_responses(out_dir))?;
|
||||
export_ts_with_context("ServerNotification", || {
|
||||
ServerNotification::export_all_to(out_dir)
|
||||
})?;
|
||||
|
||||
generate_index_ts(out_dir)?;
|
||||
|
||||
let ts_files = ts_files_in(out_dir)?;
|
||||
for file in &ts_files {
|
||||
prepend_header_if_missing(file)?;
|
||||
}
|
||||
|
||||
if let Some(prettier_bin) = prettier
|
||||
&& !ts_files.is_empty()
|
||||
{
|
||||
let status = Command::new(prettier_bin)
|
||||
.arg("--write")
|
||||
.args(ts_files.iter().map(|p| p.as_os_str()))
|
||||
.status()
|
||||
.with_context(|| format!("Failed to invoke Prettier at {}", prettier_bin.display()))?;
|
||||
if !status.success() {
|
||||
return Err(anyhow!("Prettier failed with status {status}"));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn generate_json(out_dir: &Path) -> Result<()> {
|
||||
ensure_dir(out_dir)?;
|
||||
let mut bundle: BTreeMap<String, RootSchema> = BTreeMap::new();
|
||||
|
||||
macro_rules! add_schema {
|
||||
($ty:path) => {{
|
||||
let name = type_basename(stringify!($ty));
|
||||
let schema = write_json_schema_with_return::<$ty>(out_dir, &name)?;
|
||||
bundle.insert(name, schema);
|
||||
}};
|
||||
}
|
||||
|
||||
for_each_schema_type!(add_schema);
|
||||
|
||||
export_client_response_schemas(out_dir)?;
|
||||
export_server_response_schemas(out_dir)?;
|
||||
|
||||
let mut definitions = Map::new();
|
||||
|
||||
const SPECIAL_DEFINITIONS: &[&str] = &[
|
||||
"ClientNotification",
|
||||
"ClientRequest",
|
||||
"EventMsg",
|
||||
"FileChange",
|
||||
"InputItem",
|
||||
"ParsedCommand",
|
||||
"SandboxPolicy",
|
||||
"ServerNotification",
|
||||
"ServerRequest",
|
||||
];
|
||||
|
||||
for (name, schema) in bundle {
|
||||
let mut schema_value = serde_json::to_value(schema)?;
|
||||
if let Value::Object(ref mut obj) = schema_value {
|
||||
if let Some(defs) = obj.remove("definitions")
|
||||
&& let Value::Object(defs_obj) = defs
|
||||
{
|
||||
for (def_name, def_schema) in defs_obj {
|
||||
if !SPECIAL_DEFINITIONS.contains(&def_name.as_str()) {
|
||||
definitions.insert(def_name, def_schema);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(Value::Array(one_of)) = obj.get_mut("oneOf") {
|
||||
for variant in one_of.iter_mut() {
|
||||
if let Some(variant_name) = variant_definition_name(&name, variant)
|
||||
&& let Value::Object(variant_obj) = variant
|
||||
{
|
||||
variant_obj.insert("title".into(), Value::String(variant_name));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
definitions.insert(name, schema_value);
|
||||
}
|
||||
|
||||
let mut root = Map::new();
|
||||
root.insert(
|
||||
"$schema".to_string(),
|
||||
Value::String("http://json-schema.org/draft-07/schema#".into()),
|
||||
);
|
||||
root.insert(
|
||||
"title".to_string(),
|
||||
Value::String("CodexAppServerProtocol".into()),
|
||||
);
|
||||
root.insert("type".to_string(), Value::String("object".into()));
|
||||
root.insert("definitions".to_string(), Value::Object(definitions));
|
||||
|
||||
write_pretty_json(
|
||||
out_dir.join("codex_app_server_protocol.schemas.json"),
|
||||
&Value::Object(root),
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_json_schema_with_return<T>(out_dir: &Path, name: &str) -> Result<RootSchema>
|
||||
where
|
||||
T: JsonSchema,
|
||||
{
|
||||
let file_stem = name.trim();
|
||||
let schema = schema_for!(T);
|
||||
write_pretty_json(out_dir.join(format!("{file_stem}.json")), &schema)
|
||||
.with_context(|| format!("Failed to write JSON schema for {file_stem}"))?;
|
||||
Ok(schema)
|
||||
}
|
||||
|
||||
pub(crate) fn write_json_schema<T>(out_dir: &Path, name: &str) -> Result<()>
|
||||
where
|
||||
T: JsonSchema,
|
||||
{
|
||||
write_json_schema_with_return::<T>(out_dir, name).map(|_| ())
|
||||
}
|
||||
|
||||
fn write_pretty_json(path: PathBuf, value: &impl Serialize) -> Result<()> {
|
||||
let json = serde_json::to_vec_pretty(value)
|
||||
.with_context(|| format!("Failed to serialize JSON schema to {}", path.display()))?;
|
||||
fs::write(&path, json).with_context(|| format!("Failed to write {}", path.display()))?;
|
||||
Ok(())
|
||||
}
|
||||
fn type_basename(type_path: &str) -> String {
|
||||
type_path
|
||||
.rsplit_once("::")
|
||||
.map(|(_, name)| name)
|
||||
.unwrap_or(type_path)
|
||||
.trim()
|
||||
.to_string()
|
||||
}
|
||||
|
||||
fn variant_definition_name(base: &str, variant: &Value) -> Option<String> {
|
||||
if let Some(props) = variant.get("properties").and_then(Value::as_object) {
|
||||
if let Some(method_literal) = literal_from_property(props, "method") {
|
||||
let pascal = to_pascal_case(method_literal);
|
||||
return Some(match base {
|
||||
"ClientRequest" | "ServerRequest" => format!("{pascal}Request"),
|
||||
"ClientNotification" | "ServerNotification" => format!("{pascal}Notification"),
|
||||
_ => format!("{pascal}{base}"),
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(type_literal) = literal_from_property(props, "type") {
|
||||
let pascal = to_pascal_case(type_literal);
|
||||
return Some(match base {
|
||||
"EventMsg" => format!("{pascal}EventMsg"),
|
||||
_ => format!("{pascal}{base}"),
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(mode_literal) = literal_from_property(props, "mode") {
|
||||
let pascal = to_pascal_case(mode_literal);
|
||||
return Some(match base {
|
||||
"SandboxPolicy" => format!("{pascal}SandboxPolicy"),
|
||||
_ => format!("{pascal}{base}"),
|
||||
});
|
||||
}
|
||||
|
||||
if props.len() == 1
|
||||
&& let Some(key) = props.keys().next()
|
||||
{
|
||||
let pascal = to_pascal_case(key);
|
||||
return Some(format!("{pascal}{base}"));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(required) = variant.get("required").and_then(Value::as_array)
|
||||
&& required.len() == 1
|
||||
&& let Some(key) = required[0].as_str()
|
||||
{
|
||||
let pascal = to_pascal_case(key);
|
||||
return Some(format!("{pascal}{base}"));
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn literal_from_property<'a>(props: &'a Map<String, Value>, key: &str) -> Option<&'a str> {
|
||||
props
|
||||
.get(key)
|
||||
.and_then(|value| value.get("enum"))
|
||||
.and_then(Value::as_array)
|
||||
.and_then(|arr| arr.first())
|
||||
.and_then(Value::as_str)
|
||||
}
|
||||
|
||||
fn to_pascal_case(input: &str) -> String {
|
||||
let mut result = String::new();
|
||||
let mut capitalize_next = true;
|
||||
|
||||
for c in input.chars() {
|
||||
if c == '_' || c == '-' {
|
||||
capitalize_next = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if capitalize_next {
|
||||
result.extend(c.to_uppercase());
|
||||
capitalize_next = false;
|
||||
} else {
|
||||
result.push(c);
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn ensure_dir(dir: &Path) -> Result<()> {
|
||||
fs::create_dir_all(dir)
|
||||
.with_context(|| format!("Failed to create output directory {}", dir.display()))
|
||||
}
|
||||
|
||||
fn prepend_header_if_missing(path: &Path) -> Result<()> {
|
||||
let mut content = String::new();
|
||||
{
|
||||
let mut f = fs::File::open(path)
|
||||
.with_context(|| format!("Failed to open {} for reading", path.display()))?;
|
||||
f.read_to_string(&mut content)
|
||||
.with_context(|| format!("Failed to read {}", path.display()))?;
|
||||
}
|
||||
|
||||
if content.starts_with(HEADER) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut f = fs::File::create(path)
|
||||
.with_context(|| format!("Failed to open {} for writing", path.display()))?;
|
||||
f.write_all(HEADER.as_bytes())
|
||||
.with_context(|| format!("Failed to write header to {}", path.display()))?;
|
||||
f.write_all(content.as_bytes())
|
||||
.with_context(|| format!("Failed to write content to {}", path.display()))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn ts_files_in(dir: &Path) -> Result<Vec<PathBuf>> {
|
||||
let mut files = Vec::new();
|
||||
for entry in
|
||||
fs::read_dir(dir).with_context(|| format!("Failed to read dir {}", dir.display()))?
|
||||
{
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
if path.is_file() && path.extension() == Some(OsStr::new("ts")) {
|
||||
files.push(path);
|
||||
}
|
||||
}
|
||||
files.sort();
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
fn generate_index_ts(out_dir: &Path) -> Result<PathBuf> {
|
||||
let mut entries: Vec<String> = Vec::new();
|
||||
let mut stems: Vec<String> = ts_files_in(out_dir)?
|
||||
.into_iter()
|
||||
.filter_map(|p| {
|
||||
let stem = p.file_stem()?.to_string_lossy().into_owned();
|
||||
if stem == "index" { None } else { Some(stem) }
|
||||
})
|
||||
.collect();
|
||||
stems.sort();
|
||||
stems.dedup();
|
||||
|
||||
for name in stems {
|
||||
entries.push(format!("export type {{ {name} }} from \"./{name}\";\n"));
|
||||
}
|
||||
|
||||
let mut content =
|
||||
String::with_capacity(HEADER.len() + entries.iter().map(String::len).sum::<usize>());
|
||||
content.push_str(HEADER);
|
||||
for line in &entries {
|
||||
content.push_str(line);
|
||||
}
|
||||
|
||||
let index_path = out_dir.join("index.ts");
|
||||
let mut f = fs::File::create(&index_path)
|
||||
.with_context(|| format!("Failed to create {}", index_path.display()))?;
|
||||
f.write_all(content.as_bytes())
|
||||
.with_context(|| format!("Failed to write {}", index_path.display()))?;
|
||||
Ok(index_path)
|
||||
}
|
||||
@@ -1,13 +1,14 @@
|
||||
//! We do not do true JSON-RPC 2.0, as we neither send nor expect the
|
||||
//! "jsonrpc": "2.0" field.
|
||||
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use ts_rs::TS;
|
||||
|
||||
pub const JSONRPC_VERSION: &str = "2.0";
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, Hash, Eq, TS)]
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, Hash, Eq, JsonSchema, TS)]
|
||||
#[serde(untagged)]
|
||||
pub enum RequestId {
|
||||
String(String),
|
||||
@@ -18,7 +19,7 @@ pub enum RequestId {
|
||||
pub type Result = serde_json::Value;
|
||||
|
||||
/// Refers to any valid JSON-RPC object that can be decoded off the wire, or encoded to be sent.
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, TS)]
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, JsonSchema, TS)]
|
||||
#[serde(untagged)]
|
||||
pub enum JSONRPCMessage {
|
||||
Request(JSONRPCRequest),
|
||||
@@ -28,7 +29,7 @@ pub enum JSONRPCMessage {
|
||||
}
|
||||
|
||||
/// A request that expects a response.
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, TS)]
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, JsonSchema, TS)]
|
||||
pub struct JSONRPCRequest {
|
||||
pub id: RequestId,
|
||||
pub method: String,
|
||||
@@ -37,7 +38,7 @@ pub struct JSONRPCRequest {
|
||||
}
|
||||
|
||||
/// A notification which does not expect a response.
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, TS)]
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, JsonSchema, TS)]
|
||||
pub struct JSONRPCNotification {
|
||||
pub method: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
@@ -45,20 +46,20 @@ pub struct JSONRPCNotification {
|
||||
}
|
||||
|
||||
/// A successful (non-error) response to a request.
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, TS)]
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, JsonSchema, TS)]
|
||||
pub struct JSONRPCResponse {
|
||||
pub id: RequestId,
|
||||
pub result: Result,
|
||||
}
|
||||
|
||||
/// A response to a request that indicates an error occurred.
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, TS)]
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, JsonSchema, TS)]
|
||||
pub struct JSONRPCError {
|
||||
pub error: JSONRPCErrorError,
|
||||
pub id: RequestId,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, TS)]
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, JsonSchema, TS)]
|
||||
pub struct JSONRPCErrorError {
|
||||
pub code: i64,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
mod export;
|
||||
mod jsonrpc_lite;
|
||||
mod protocol;
|
||||
|
||||
pub use export::generate_json;
|
||||
pub use export::generate_ts;
|
||||
pub use export::generate_types;
|
||||
pub use jsonrpc_lite::*;
|
||||
pub use protocol::*;
|
||||
|
||||
@@ -5,6 +5,8 @@ use crate::JSONRPCNotification;
|
||||
use crate::JSONRPCRequest;
|
||||
use crate::RequestId;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::account::Account;
|
||||
use codex_protocol::config_types::ForcedLoginMethod;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
@@ -13,17 +15,20 @@ use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::FileChange;
|
||||
use codex_protocol::protocol::RateLimitSnapshot;
|
||||
use codex_protocol::protocol::ReviewDecision;
|
||||
use codex_protocol::protocol::SandboxCommandAssessment;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use codex_protocol::protocol::TurnAbortReason;
|
||||
use paste::paste;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use strum_macros::Display;
|
||||
use ts_rs::TS;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema, TS)]
|
||||
#[ts(type = "string")]
|
||||
pub struct GitSha(pub String);
|
||||
|
||||
@@ -33,7 +38,7 @@ impl GitSha {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Display, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Display, JsonSchema, TS)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum AuthMode {
|
||||
ApiKey,
|
||||
@@ -55,7 +60,7 @@ macro_rules! client_request_definitions {
|
||||
),* $(,)?
|
||||
) => {
|
||||
/// Request from the client to the server.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "method", rename_all = "camelCase")]
|
||||
pub enum ClientRequest {
|
||||
$(
|
||||
@@ -77,10 +82,56 @@ macro_rules! client_request_definitions {
|
||||
)*
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn export_client_response_schemas(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<()> {
|
||||
$(
|
||||
crate::export::write_json_schema::<$response>(out_dir, stringify!($response))?;
|
||||
)*
|
||||
Ok(())
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
client_request_definitions! {
|
||||
/// NEW APIs
|
||||
#[serde(rename = "model/list")]
|
||||
#[ts(rename = "model/list")]
|
||||
ListModels {
|
||||
params: ListModelsParams,
|
||||
response: ListModelsResponse,
|
||||
},
|
||||
|
||||
#[serde(rename = "account/login")]
|
||||
#[ts(rename = "account/login")]
|
||||
LoginAccount {
|
||||
params: LoginAccountParams,
|
||||
response: LoginAccountResponse,
|
||||
},
|
||||
|
||||
#[serde(rename = "account/logout")]
|
||||
#[ts(rename = "account/logout")]
|
||||
LogoutAccount {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: LogoutAccountResponse,
|
||||
},
|
||||
|
||||
#[serde(rename = "account/rateLimits/read")]
|
||||
#[ts(rename = "account/rateLimits/read")]
|
||||
GetAccountRateLimits {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: GetAccountRateLimitsResponse,
|
||||
},
|
||||
|
||||
#[serde(rename = "account/read")]
|
||||
#[ts(rename = "account/read")]
|
||||
GetAccount {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: GetAccountResponse,
|
||||
},
|
||||
|
||||
/// DEPRECATED APIs below
|
||||
Initialize {
|
||||
params: InitializeParams,
|
||||
response: InitializeResponse,
|
||||
@@ -174,13 +225,13 @@ client_request_definitions! {
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InitializeParams {
|
||||
pub client_info: ClientInfo,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ClientInfo {
|
||||
pub name: String,
|
||||
@@ -189,13 +240,13 @@ pub struct ClientInfo {
|
||||
pub version: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InitializeResponse {
|
||||
pub user_agent: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct NewConversationParams {
|
||||
/// Optional override for the model name (e.g. "o3", "o4-mini").
|
||||
@@ -229,16 +280,12 @@ pub struct NewConversationParams {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub base_instructions: Option<String>,
|
||||
|
||||
/// Whether to include the plan tool in the conversation.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub include_plan_tool: Option<bool>,
|
||||
|
||||
/// Whether to include the apply patch tool in the conversation.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub include_apply_patch_tool: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct NewConversationResponse {
|
||||
pub conversation_id: ConversationId,
|
||||
@@ -249,7 +296,7 @@ pub struct NewConversationResponse {
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResumeConversationResponse {
|
||||
pub conversation_id: ConversationId,
|
||||
@@ -258,7 +305,7 @@ pub struct ResumeConversationResponse {
|
||||
pub initial_messages: Option<Vec<EventMsg>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ListConversationsParams {
|
||||
/// Optional page size; defaults to a reasonable server-side value.
|
||||
@@ -269,7 +316,7 @@ pub struct ListConversationsParams {
|
||||
pub cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ConversationSummary {
|
||||
pub conversation_id: ConversationId,
|
||||
@@ -280,7 +327,7 @@ pub struct ConversationSummary {
|
||||
pub timestamp: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ListConversationsResponse {
|
||||
pub items: Vec<ConversationSummary>,
|
||||
@@ -290,7 +337,80 @@ pub struct ListConversationsResponse {
|
||||
pub next_cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ListModelsParams {
|
||||
/// Optional page size; defaults to a reasonable server-side value.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub page_size: Option<usize>,
|
||||
/// Opaque pagination cursor returned by a previous call.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Model {
|
||||
pub id: String,
|
||||
pub model: String,
|
||||
pub display_name: String,
|
||||
pub description: String,
|
||||
pub supported_reasoning_efforts: Vec<ReasoningEffortOption>,
|
||||
pub default_reasoning_effort: ReasoningEffort,
|
||||
// Only one model should be marked as default.
|
||||
pub is_default: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ReasoningEffortOption {
|
||||
pub reasoning_effort: ReasoningEffort,
|
||||
pub description: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ListModelsResponse {
|
||||
pub items: Vec<Model>,
|
||||
/// Opaque cursor to pass to the next call to continue after the last item.
|
||||
/// if None, there are no more items to return.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub next_cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "type")]
|
||||
#[ts(tag = "type")]
|
||||
pub enum LoginAccountParams {
|
||||
#[serde(rename = "apiKey")]
|
||||
#[ts(rename = "apiKey")]
|
||||
ApiKey {
|
||||
#[serde(rename = "apiKey")]
|
||||
#[ts(rename = "apiKey")]
|
||||
api_key: String,
|
||||
},
|
||||
#[serde(rename = "chatgpt")]
|
||||
#[ts(rename = "chatgpt")]
|
||||
ChatGpt,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginAccountResponse {
|
||||
/// Only set if the login method is ChatGPT.
|
||||
#[schemars(with = "String")]
|
||||
pub login_id: Option<Uuid>,
|
||||
|
||||
/// URL the client should open in a browser to initiate the OAuth flow.
|
||||
/// Only set if the login method is ChatGPT.
|
||||
pub auth_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LogoutAccountResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResumeConversationParams {
|
||||
/// Absolute path to the rollout JSONL file.
|
||||
@@ -300,78 +420,81 @@ pub struct ResumeConversationParams {
|
||||
pub overrides: Option<NewConversationParams>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AddConversationSubscriptionResponse {
|
||||
#[schemars(with = "String")]
|
||||
pub subscription_id: Uuid,
|
||||
}
|
||||
|
||||
/// The [`ConversationId`] must match the `rollout_path`.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ArchiveConversationParams {
|
||||
pub conversation_id: ConversationId,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ArchiveConversationResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RemoveConversationSubscriptionResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginApiKeyParams {
|
||||
pub api_key: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginApiKeyResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginChatGptResponse {
|
||||
#[schemars(with = "String")]
|
||||
pub login_id: Uuid,
|
||||
/// URL the client should open in a browser to initiate the OAuth flow.
|
||||
pub auth_url: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GitDiffToRemoteResponse {
|
||||
pub sha: GitSha,
|
||||
pub diff: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CancelLoginChatGptParams {
|
||||
#[schemars(with = "String")]
|
||||
pub login_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GitDiffToRemoteParams {
|
||||
pub cwd: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CancelLoginChatGptResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LogoutChatGptParams {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LogoutChatGptResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetAuthStatusParams {
|
||||
/// If true, include the current auth token (if available) in the response.
|
||||
@@ -382,7 +505,7 @@ pub struct GetAuthStatusParams {
|
||||
pub refresh_token: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecOneOffCommandParams {
|
||||
/// Command argv to execute.
|
||||
@@ -398,7 +521,7 @@ pub struct ExecOneOffCommandParams {
|
||||
pub sandbox_policy: Option<SandboxPolicy>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecOneOffCommandResponse {
|
||||
pub exit_code: i32,
|
||||
@@ -406,7 +529,19 @@ pub struct ExecOneOffCommandResponse {
|
||||
pub stderr: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetAccountRateLimitsResponse {
|
||||
pub rate_limits: RateLimitSnapshot,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(transparent)]
|
||||
#[ts(export)]
|
||||
#[ts(type = "Account | null")]
|
||||
pub struct GetAccountResponse(#[ts(type = "Account | null")] pub Option<Account>);
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetAuthStatusResponse {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
@@ -421,13 +556,13 @@ pub struct GetAuthStatusResponse {
|
||||
pub requires_openai_auth: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetUserAgentResponse {
|
||||
pub user_agent: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UserInfoResponse {
|
||||
/// Note: `alleged_user_email` is not currently verified. We read it from
|
||||
@@ -437,13 +572,13 @@ pub struct UserInfoResponse {
|
||||
pub alleged_user_email: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetUserSavedConfigResponse {
|
||||
pub config: UserSavedConfig,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SetDefaultModelParams {
|
||||
/// If set to None, this means `model` should be cleared in config.toml.
|
||||
@@ -455,14 +590,14 @@ pub struct SetDefaultModelParams {
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SetDefaultModelResponse {}
|
||||
|
||||
/// UserSavedConfig contains a subset of the config. It is meant to expose mcp
|
||||
/// client-configurable settings that can be specified in the NewConversation
|
||||
/// and SendUserTurn requests.
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, TS)]
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UserSavedConfig {
|
||||
/// Approvals
|
||||
@@ -473,6 +608,11 @@ pub struct UserSavedConfig {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub sandbox_settings: Option<SandboxSettings>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub forced_chatgpt_workspace_id: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub forced_login_method: Option<ForcedLoginMethod>,
|
||||
|
||||
/// Model-specific configuration
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub model: Option<String>,
|
||||
@@ -495,7 +635,7 @@ pub struct UserSavedConfig {
|
||||
}
|
||||
|
||||
/// MCP representation of a [`codex_core::config_profile::ConfigProfile`].
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, TS)]
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Profile {
|
||||
pub model: Option<String>,
|
||||
@@ -509,7 +649,7 @@ pub struct Profile {
|
||||
pub chatgpt_base_url: Option<String>,
|
||||
}
|
||||
/// MCP representation of a [`codex_core::config::ToolsToml`].
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, TS)]
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Tools {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
@@ -519,7 +659,7 @@ pub struct Tools {
|
||||
}
|
||||
|
||||
/// MCP representation of a [`codex_core::config_types::SandboxWorkspaceWrite`].
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, TS)]
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SandboxSettings {
|
||||
#[serde(default)]
|
||||
@@ -532,14 +672,14 @@ pub struct SandboxSettings {
|
||||
pub exclude_slash_tmp: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserMessageParams {
|
||||
pub conversation_id: ConversationId,
|
||||
pub items: Vec<InputItem>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserTurnParams {
|
||||
pub conversation_id: ConversationId,
|
||||
@@ -553,39 +693,42 @@ pub struct SendUserTurnParams {
|
||||
pub summary: ReasoningSummary,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserTurnResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InterruptConversationParams {
|
||||
pub conversation_id: ConversationId,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InterruptConversationResponse {
|
||||
pub abort_reason: TurnAbortReason,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserMessageResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AddConversationListenerParams {
|
||||
pub conversation_id: ConversationId,
|
||||
#[serde(default)]
|
||||
pub experimental_raw_events: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RemoveConversationListenerParams {
|
||||
#[schemars(with = "String")]
|
||||
pub subscription_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[serde(tag = "type", content = "data")]
|
||||
pub enum InputItem {
|
||||
@@ -617,7 +760,7 @@ macro_rules! server_request_definitions {
|
||||
) => {
|
||||
paste! {
|
||||
/// Request initiated from the server and sent to the client.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "method", rename_all = "camelCase")]
|
||||
pub enum ServerRequest {
|
||||
$(
|
||||
@@ -630,7 +773,7 @@ macro_rules! server_request_definitions {
|
||||
)*
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
#[derive(Debug, Clone, PartialEq, JsonSchema)]
|
||||
pub enum ServerRequestPayload {
|
||||
$( $variant([<$variant Params>]), )*
|
||||
}
|
||||
@@ -652,6 +795,15 @@ macro_rules! server_request_definitions {
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn export_server_response_schemas(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<()> {
|
||||
paste! {
|
||||
$(crate::export::write_json_schema::<[<$variant Response>]>(out_dir, stringify!([<$variant Response>]))?;)*
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -670,7 +822,7 @@ server_request_definitions! {
|
||||
ExecCommandApproval,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ApplyPatchApprovalParams {
|
||||
pub conversation_id: ConversationId,
|
||||
@@ -687,7 +839,7 @@ pub struct ApplyPatchApprovalParams {
|
||||
pub grant_root: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecCommandApprovalParams {
|
||||
pub conversation_id: ConversationId,
|
||||
@@ -698,20 +850,22 @@ pub struct ExecCommandApprovalParams {
|
||||
pub cwd: PathBuf,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub reason: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub risk: Option<SandboxCommandAssessment>,
|
||||
pub parsed_cmd: Vec<ParsedCommand>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct ExecCommandApprovalResponse {
|
||||
pub decision: ReviewDecision,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct ApplyPatchApprovalResponse {
|
||||
pub decision: ReviewDecision,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
pub struct FuzzyFileSearchParams {
|
||||
@@ -723,7 +877,7 @@ pub struct FuzzyFileSearchParams {
|
||||
}
|
||||
|
||||
/// Superset of [`codex_file_search::FileMatch`]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct FuzzyFileSearchResult {
|
||||
pub root: String,
|
||||
pub path: String,
|
||||
@@ -733,21 +887,22 @@ pub struct FuzzyFileSearchResult {
|
||||
pub indices: Option<Vec<u32>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct FuzzyFileSearchResponse {
|
||||
pub files: Vec<FuzzyFileSearchResult>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginChatGptCompleteNotification {
|
||||
#[schemars(with = "String")]
|
||||
pub login_id: Uuid,
|
||||
pub success: bool,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub error: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SessionConfiguredNotification {
|
||||
/// Name left as session_id instead of conversation_id for backwards compatibility.
|
||||
@@ -775,7 +930,7 @@ pub struct SessionConfiguredNotification {
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AuthStatusChangeNotification {
|
||||
/// Current authentication method; omitted if signed out.
|
||||
@@ -784,10 +939,17 @@ pub struct AuthStatusChangeNotification {
|
||||
}
|
||||
|
||||
/// Notification sent from the server to the client.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, TS, Display)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS, Display)]
|
||||
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
|
||||
#[strum(serialize_all = "camelCase")]
|
||||
pub enum ServerNotification {
|
||||
/// NEW NOTIFICATIONS
|
||||
#[serde(rename = "account/rateLimits/updated")]
|
||||
#[ts(rename = "account/rateLimits/updated")]
|
||||
#[strum(serialize = "account/rateLimits/updated")]
|
||||
AccountRateLimitsUpdated(RateLimitSnapshot),
|
||||
|
||||
/// DEPRECATED NOTIFICATIONS below
|
||||
/// Authentication status changed
|
||||
AuthStatusChange(AuthStatusChangeNotification),
|
||||
|
||||
@@ -801,6 +963,7 @@ pub enum ServerNotification {
|
||||
impl ServerNotification {
|
||||
pub fn to_params(self) -> Result<serde_json::Value, serde_json::Error> {
|
||||
match self {
|
||||
ServerNotification::AccountRateLimitsUpdated(params) => serde_json::to_value(params),
|
||||
ServerNotification::AuthStatusChange(params) => serde_json::to_value(params),
|
||||
ServerNotification::LoginChatGptComplete(params) => serde_json::to_value(params),
|
||||
ServerNotification::SessionConfigured(params) => serde_json::to_value(params),
|
||||
@@ -817,7 +980,7 @@ impl TryFrom<JSONRPCNotification> for ServerNotification {
|
||||
}
|
||||
|
||||
/// Notification sent from the client to the server.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, TS, Display)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS, Display)]
|
||||
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
|
||||
#[strum(serialize_all = "camelCase")]
|
||||
pub enum ClientNotification {
|
||||
@@ -843,7 +1006,6 @@ mod tests {
|
||||
sandbox: None,
|
||||
config: None,
|
||||
base_instructions: None,
|
||||
include_plan_tool: None,
|
||||
include_apply_patch_tool: None,
|
||||
},
|
||||
};
|
||||
@@ -906,6 +1068,7 @@ mod tests {
|
||||
command: vec!["echo".to_string(), "hello".to_string()],
|
||||
cwd: PathBuf::from("/tmp"),
|
||||
reason: Some("because tests".to_string()),
|
||||
risk: None,
|
||||
parsed_cmd: vec![ParsedCommand::Unknown {
|
||||
cmd: "echo hello".to_string(),
|
||||
}],
|
||||
@@ -940,4 +1103,110 @@ mod tests {
|
||||
assert_eq!(payload.request_with_id(RequestId::Integer(7)), request);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_get_account_rate_limits() -> Result<()> {
|
||||
let request = ClientRequest::GetAccountRateLimits {
|
||||
request_id: RequestId::Integer(1),
|
||||
params: None,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/rateLimits/read",
|
||||
"id": 1,
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_account_login_api_key() -> Result<()> {
|
||||
let request = ClientRequest::LoginAccount {
|
||||
request_id: RequestId::Integer(2),
|
||||
params: LoginAccountParams::ApiKey {
|
||||
api_key: "secret".to_string(),
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/login",
|
||||
"id": 2,
|
||||
"params": {
|
||||
"type": "apiKey",
|
||||
"apiKey": "secret"
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_account_login_chatgpt() -> Result<()> {
|
||||
let request = ClientRequest::LoginAccount {
|
||||
request_id: RequestId::Integer(3),
|
||||
params: LoginAccountParams::ChatGpt,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/login",
|
||||
"id": 3,
|
||||
"params": {
|
||||
"type": "chatgpt"
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_account_logout() -> Result<()> {
|
||||
let request = ClientRequest::LogoutAccount {
|
||||
request_id: RequestId::Integer(4),
|
||||
params: None,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/logout",
|
||||
"id": 4,
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_get_account() -> Result<()> {
|
||||
let request = ClientRequest::GetAccount {
|
||||
request_id: RequestId::Integer(5),
|
||||
params: None,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/read",
|
||||
"id": 5,
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_list_models() -> Result<()> {
|
||||
let request = ClientRequest::ListModels {
|
||||
request_id: RequestId::Integer(6),
|
||||
params: ListModelsParams::default(),
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "model/list",
|
||||
"id": 6,
|
||||
"params": {}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,11 +19,13 @@ anyhow = { workspace = true }
|
||||
codex-arg0 = { workspace = true }
|
||||
codex-common = { workspace = true, features = ["cli"] }
|
||||
codex-core = { workspace = true }
|
||||
codex-backend-client = { workspace = true }
|
||||
codex-file-search = { workspace = true }
|
||||
codex-login = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-utils-json-to-toml = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
@@ -35,6 +37,7 @@ tokio = { workspace = true, features = [
|
||||
] }
|
||||
tracing = { workspace = true, features = ["log"] }
|
||||
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
|
||||
opentelemetry-appender-tracing = { workspace = true }
|
||||
uuid = { workspace = true, features = ["serde", "v7"] }
|
||||
|
||||
[dev-dependencies]
|
||||
@@ -44,6 +47,7 @@ base64 = { workspace = true }
|
||||
core_test_support = { workspace = true }
|
||||
os_info = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
serial_test = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use crate::error_code::INTERNAL_ERROR_CODE;
|
||||
use crate::error_code::INVALID_REQUEST_ERROR_CODE;
|
||||
use crate::fuzzy_file_search::run_fuzzy_file_search;
|
||||
use crate::models::supported_models;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use crate::outgoing_message::OutgoingNotification;
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
@@ -9,6 +10,7 @@ use codex_app_server_protocol::ApplyPatchApprovalParams;
|
||||
use codex_app_server_protocol::ApplyPatchApprovalResponse;
|
||||
use codex_app_server_protocol::ArchiveConversationParams;
|
||||
use codex_app_server_protocol::ArchiveConversationResponse;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_app_server_protocol::AuthStatusChangeNotification;
|
||||
use codex_app_server_protocol::ClientRequest;
|
||||
use codex_app_server_protocol::ConversationSummary;
|
||||
@@ -18,6 +20,7 @@ use codex_app_server_protocol::ExecOneOffCommandParams;
|
||||
use codex_app_server_protocol::ExecOneOffCommandResponse;
|
||||
use codex_app_server_protocol::FuzzyFileSearchParams;
|
||||
use codex_app_server_protocol::FuzzyFileSearchResponse;
|
||||
use codex_app_server_protocol::GetAccountRateLimitsResponse;
|
||||
use codex_app_server_protocol::GetUserAgentResponse;
|
||||
use codex_app_server_protocol::GetUserSavedConfigResponse;
|
||||
use codex_app_server_protocol::GitDiffToRemoteResponse;
|
||||
@@ -27,6 +30,8 @@ use codex_app_server_protocol::InterruptConversationResponse;
|
||||
use codex_app_server_protocol::JSONRPCErrorError;
|
||||
use codex_app_server_protocol::ListConversationsParams;
|
||||
use codex_app_server_protocol::ListConversationsResponse;
|
||||
use codex_app_server_protocol::ListModelsParams;
|
||||
use codex_app_server_protocol::ListModelsResponse;
|
||||
use codex_app_server_protocol::LoginApiKeyParams;
|
||||
use codex_app_server_protocol::LoginApiKeyResponse;
|
||||
use codex_app_server_protocol::LoginChatGptCompleteNotification;
|
||||
@@ -49,6 +54,7 @@ use codex_app_server_protocol::SetDefaultModelParams;
|
||||
use codex_app_server_protocol::SetDefaultModelResponse;
|
||||
use codex_app_server_protocol::UserInfoResponse;
|
||||
use codex_app_server_protocol::UserSavedConfig;
|
||||
use codex_backend_client::Client as BackendClient;
|
||||
use codex_core::AuthManager;
|
||||
use codex_core::CodexConversation;
|
||||
use codex_core::ConversationManager;
|
||||
@@ -77,17 +83,18 @@ use codex_core::protocol::ApplyPatchApprovalRequestEvent;
|
||||
use codex_core::protocol::Event;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::ExecApprovalRequestEvent;
|
||||
use codex_core::protocol::InputItem as CoreInputItem;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_core::protocol::ReviewDecision;
|
||||
use codex_login::ServerOptions as LoginServerOptions;
|
||||
use codex_login::ShutdownHandle;
|
||||
use codex_login::run_login_server;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::config_types::ForcedLoginMethod;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::InputMessageKind;
|
||||
use codex_protocol::protocol::RateLimitSnapshot;
|
||||
use codex_protocol::protocol::USER_MESSAGE_BEGIN;
|
||||
use codex_protocol::user_input::UserInput as CoreInputItem;
|
||||
use codex_utils_json_to_toml::json_to_toml;
|
||||
use std::collections::HashMap;
|
||||
use std::ffi::OsStr;
|
||||
@@ -106,7 +113,6 @@ use uuid::Uuid;
|
||||
|
||||
// Duration before a ChatGPT login attempt is abandoned.
|
||||
const LOGIN_CHATGPT_TIMEOUT: Duration = Duration::from_secs(10 * 60);
|
||||
|
||||
struct ActiveLogin {
|
||||
shutdown_handle: ShutdownHandle,
|
||||
login_id: Uuid,
|
||||
@@ -167,6 +173,30 @@ impl CodexMessageProcessor {
|
||||
ClientRequest::ListConversations { request_id, params } => {
|
||||
self.handle_list_conversations(request_id, params).await;
|
||||
}
|
||||
ClientRequest::ListModels { request_id, params } => {
|
||||
self.list_models(request_id, params).await;
|
||||
}
|
||||
ClientRequest::LoginAccount {
|
||||
request_id,
|
||||
params: _,
|
||||
} => {
|
||||
self.send_unimplemented_error(request_id, "account/login")
|
||||
.await;
|
||||
}
|
||||
ClientRequest::LogoutAccount {
|
||||
request_id,
|
||||
params: _,
|
||||
} => {
|
||||
self.send_unimplemented_error(request_id, "account/logout")
|
||||
.await;
|
||||
}
|
||||
ClientRequest::GetAccount {
|
||||
request_id,
|
||||
params: _,
|
||||
} => {
|
||||
self.send_unimplemented_error(request_id, "account/read")
|
||||
.await;
|
||||
}
|
||||
ClientRequest::ResumeConversation { request_id, params } => {
|
||||
self.handle_resume_conversation(request_id, params).await;
|
||||
}
|
||||
@@ -239,10 +269,38 @@ impl CodexMessageProcessor {
|
||||
ClientRequest::ExecOneOffCommand { request_id, params } => {
|
||||
self.exec_one_off_command(request_id, params).await;
|
||||
}
|
||||
ClientRequest::GetAccountRateLimits {
|
||||
request_id,
|
||||
params: _,
|
||||
} => {
|
||||
self.get_account_rate_limits(request_id).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn send_unimplemented_error(&self, request_id: RequestId, method: &str) {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: format!("{method} is not implemented yet"),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
}
|
||||
|
||||
async fn login_api_key(&mut self, request_id: RequestId, params: LoginApiKeyParams) {
|
||||
if matches!(
|
||||
self.config.forced_login_method,
|
||||
Some(ForcedLoginMethod::Chatgpt)
|
||||
) {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "API key login is disabled. Use ChatGPT login instead.".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
|
||||
{
|
||||
let mut guard = self.active_login.lock().await;
|
||||
if let Some(active) = guard.take() {
|
||||
@@ -278,9 +336,23 @@ impl CodexMessageProcessor {
|
||||
async fn login_chatgpt(&mut self, request_id: RequestId) {
|
||||
let config = self.config.as_ref();
|
||||
|
||||
if matches!(config.forced_login_method, Some(ForcedLoginMethod::Api)) {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "ChatGPT login is disabled. Use API key login instead.".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
|
||||
let opts = LoginServerOptions {
|
||||
open_browser: false,
|
||||
..LoginServerOptions::new(config.codex_home.clone(), CLIENT_ID.to_string())
|
||||
..LoginServerOptions::new(
|
||||
config.codex_home.clone(),
|
||||
CLIENT_ID.to_string(),
|
||||
config.forced_chatgpt_workspace_id.clone(),
|
||||
)
|
||||
};
|
||||
|
||||
enum LoginChatGptReply {
|
||||
@@ -499,6 +571,53 @@ impl CodexMessageProcessor {
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
}
|
||||
|
||||
async fn get_account_rate_limits(&self, request_id: RequestId) {
|
||||
match self.fetch_account_rate_limits().await {
|
||||
Ok(rate_limits) => {
|
||||
let response = GetAccountRateLimitsResponse { rate_limits };
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
}
|
||||
Err(error) => {
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn fetch_account_rate_limits(&self) -> Result<RateLimitSnapshot, JSONRPCErrorError> {
|
||||
let Some(auth) = self.auth_manager.auth() else {
|
||||
return Err(JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "codex account authentication required to read rate limits".to_string(),
|
||||
data: None,
|
||||
});
|
||||
};
|
||||
|
||||
if auth.mode != AuthMode::ChatGPT {
|
||||
return Err(JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "chatgpt authentication required to read rate limits".to_string(),
|
||||
data: None,
|
||||
});
|
||||
}
|
||||
|
||||
let client = BackendClient::from_auth(self.config.chatgpt_base_url.clone(), &auth)
|
||||
.await
|
||||
.map_err(|err| JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: format!("failed to construct backend client: {err}"),
|
||||
data: None,
|
||||
})?;
|
||||
|
||||
client
|
||||
.get_rate_limits()
|
||||
.await
|
||||
.map_err(|err| JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: format!("failed to fetch codex rate limits: {err}"),
|
||||
data: None,
|
||||
})
|
||||
}
|
||||
|
||||
async fn get_user_saved_config(&self, request_id: RequestId) {
|
||||
let toml_value = match load_config_as_toml(&self.config.codex_home).await {
|
||||
Ok(val) => val,
|
||||
@@ -603,6 +722,7 @@ impl CodexMessageProcessor {
|
||||
env,
|
||||
with_escalated_permissions: None,
|
||||
justification: None,
|
||||
arg0: None,
|
||||
};
|
||||
|
||||
let effective_policy = params
|
||||
@@ -745,6 +865,58 @@ impl CodexMessageProcessor {
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
}
|
||||
|
||||
async fn list_models(&self, request_id: RequestId, params: ListModelsParams) {
|
||||
let ListModelsParams { page_size, cursor } = params;
|
||||
let models = supported_models();
|
||||
let total = models.len();
|
||||
|
||||
if total == 0 {
|
||||
let response = ListModelsResponse {
|
||||
items: Vec::new(),
|
||||
next_cursor: None,
|
||||
};
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
return;
|
||||
}
|
||||
|
||||
let effective_page_size = page_size.unwrap_or(total).max(1).min(total);
|
||||
let start = match cursor {
|
||||
Some(cursor) => match cursor.parse::<usize>() {
|
||||
Ok(idx) => idx,
|
||||
Err(_) => {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!("invalid cursor: {cursor}"),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
},
|
||||
None => 0,
|
||||
};
|
||||
|
||||
if start > total {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!("cursor {start} exceeds total models {total}"),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
|
||||
let end = start.saturating_add(effective_page_size).min(total);
|
||||
let items = models[start..end].to_vec();
|
||||
let next_cursor = if end < total {
|
||||
Some(end.to_string())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let response = ListModelsResponse { items, next_cursor };
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
}
|
||||
|
||||
async fn handle_resume_conversation(
|
||||
&self,
|
||||
request_id: RequestId,
|
||||
@@ -797,18 +969,9 @@ impl CodexMessageProcessor {
|
||||
},
|
||||
))
|
||||
.await;
|
||||
let initial_messages = session_configured.initial_messages.map(|msgs| {
|
||||
msgs.into_iter()
|
||||
.filter(|event| {
|
||||
// Don't send non-plain user messages (like user instructions
|
||||
// or environment context) back so they don't get rendered.
|
||||
if let EventMsg::UserMessage(user_message) = event {
|
||||
return matches!(user_message.kind, Some(InputMessageKind::Plain));
|
||||
}
|
||||
true
|
||||
})
|
||||
.collect()
|
||||
});
|
||||
let initial_messages = session_configured
|
||||
.initial_messages
|
||||
.map(|msgs| msgs.into_iter().collect());
|
||||
|
||||
// Reply with conversation id + model and initial messages (when present)
|
||||
let response = codex_app_server_protocol::ResumeConversationResponse {
|
||||
@@ -1093,7 +1256,10 @@ impl CodexMessageProcessor {
|
||||
request_id: RequestId,
|
||||
params: AddConversationListenerParams,
|
||||
) {
|
||||
let AddConversationListenerParams { conversation_id } = params;
|
||||
let AddConversationListenerParams {
|
||||
conversation_id,
|
||||
experimental_raw_events,
|
||||
} = params;
|
||||
let Ok(conversation) = self
|
||||
.conversation_manager
|
||||
.get_conversation(conversation_id)
|
||||
@@ -1130,6 +1296,11 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
};
|
||||
|
||||
if let EventMsg::RawResponseItem(_) = &event.msg
|
||||
&& !experimental_raw_events {
|
||||
continue;
|
||||
}
|
||||
|
||||
// For now, we send a notification for every event,
|
||||
// JSON-serializing the `Event` as-is, but these should
|
||||
// be migrated to be variants of `ServerNotification`
|
||||
@@ -1284,6 +1455,7 @@ async fn apply_bespoke_event_handling(
|
||||
command,
|
||||
cwd,
|
||||
reason,
|
||||
risk,
|
||||
parsed_cmd,
|
||||
}) => {
|
||||
let params = ExecCommandApprovalParams {
|
||||
@@ -1292,6 +1464,7 @@ async fn apply_bespoke_event_handling(
|
||||
command,
|
||||
cwd,
|
||||
reason,
|
||||
risk,
|
||||
parsed_cmd,
|
||||
};
|
||||
let rx = outgoing
|
||||
@@ -1303,6 +1476,15 @@ async fn apply_bespoke_event_handling(
|
||||
on_exec_approval_response(event_id, rx, conversation).await;
|
||||
});
|
||||
}
|
||||
EventMsg::TokenCount(token_count_event) => {
|
||||
if let Some(rate_limits) = token_count_event.rate_limits {
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::AccountRateLimitsUpdated(
|
||||
rate_limits,
|
||||
))
|
||||
.await;
|
||||
}
|
||||
}
|
||||
// If this is a TurnAborted, reply to any pending interrupt requests.
|
||||
EventMsg::TurnAborted(turn_aborted_event) => {
|
||||
let pending = {
|
||||
@@ -1335,7 +1517,6 @@ async fn derive_config_from_params(
|
||||
sandbox: sandbox_mode,
|
||||
config: cli_overrides,
|
||||
base_instructions,
|
||||
include_plan_tool,
|
||||
include_apply_patch_tool,
|
||||
} = params;
|
||||
let overrides = ConfigOverrides {
|
||||
@@ -1348,11 +1529,12 @@ async fn derive_config_from_params(
|
||||
model_provider: None,
|
||||
codex_linux_sandbox_exe,
|
||||
base_instructions,
|
||||
include_plan_tool,
|
||||
include_apply_patch_tool,
|
||||
include_view_image_tool: None,
|
||||
show_raw_agent_reasoning: None,
|
||||
tools_web_search_request: None,
|
||||
experimental_sandbox_command_assessment: None,
|
||||
additional_writable_roots: Vec::new(),
|
||||
};
|
||||
|
||||
let cli_overrides = cli_overrides
|
||||
@@ -1454,18 +1636,8 @@ fn extract_conversation_summary(
|
||||
let preview = head
|
||||
.iter()
|
||||
.filter_map(|value| serde_json::from_value::<ResponseItem>(value.clone()).ok())
|
||||
.find_map(|item| match item {
|
||||
ResponseItem::Message { content, .. } => {
|
||||
content.into_iter().find_map(|content| match content {
|
||||
ContentItem::InputText { text } => {
|
||||
match InputMessageKind::from(("user", &text)) {
|
||||
InputMessageKind::Plain => Some(text),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
}
|
||||
.find_map(|item| match codex_core::parse_turn_item(&item) {
|
||||
Some(TurnItem::UserMessage(user)) => Some(user.message()),
|
||||
_ => None,
|
||||
})?;
|
||||
|
||||
|
||||
@@ -46,6 +46,7 @@ pub(crate) async fn run_fuzzy_file_search(
|
||||
threads,
|
||||
cancel_flag,
|
||||
COMPUTE_INDICES,
|
||||
true,
|
||||
) {
|
||||
Ok(res) => Ok((root, res)),
|
||||
Err(err) => Err((root, err)),
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
#![deny(clippy::print_stdout, clippy::print_stderr)]
|
||||
|
||||
use std::io::ErrorKind;
|
||||
use std::io::Result as IoResult;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use opentelemetry_appender_tracing::layer::OpenTelemetryTracingBridge;
|
||||
use std::io::ErrorKind;
|
||||
use std::io::Result as IoResult;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::message_processor::MessageProcessor;
|
||||
use crate::outgoing_message::OutgoingMessage;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use codex_app_server_protocol::JSONRPCMessage;
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
@@ -18,15 +21,15 @@ use tracing::debug;
|
||||
use tracing::error;
|
||||
use tracing::info;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
use crate::message_processor::MessageProcessor;
|
||||
use crate::outgoing_message::OutgoingMessage;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use tracing_subscriber::Layer;
|
||||
use tracing_subscriber::layer::SubscriberExt;
|
||||
use tracing_subscriber::util::SubscriberInitExt;
|
||||
|
||||
mod codex_message_processor;
|
||||
mod error_code;
|
||||
mod fuzzy_file_search;
|
||||
mod message_processor;
|
||||
mod models;
|
||||
mod outgoing_message;
|
||||
|
||||
/// Size of the bounded channels used to communicate between tasks. The value
|
||||
@@ -38,13 +41,6 @@ pub async fn run_main(
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
cli_config_overrides: CliConfigOverrides,
|
||||
) -> IoResult<()> {
|
||||
// Install a simple subscriber so `tracing` output is visible. Users can
|
||||
// control the log level with `RUST_LOG`.
|
||||
tracing_subscriber::fmt()
|
||||
.with_writer(std::io::stderr)
|
||||
.with_env_filter(EnvFilter::from_default_env())
|
||||
.init();
|
||||
|
||||
// Set up channels.
|
||||
let (incoming_tx, mut incoming_rx) = mpsc::channel::<JSONRPCMessage>(CHANNEL_CAPACITY);
|
||||
let (outgoing_tx, mut outgoing_rx) = mpsc::unbounded_channel::<OutgoingMessage>();
|
||||
@@ -86,6 +82,29 @@ pub async fn run_main(
|
||||
std::io::Error::new(ErrorKind::InvalidData, format!("error loading config: {e}"))
|
||||
})?;
|
||||
|
||||
let otel =
|
||||
codex_core::otel_init::build_provider(&config, env!("CARGO_PKG_VERSION")).map_err(|e| {
|
||||
std::io::Error::new(
|
||||
ErrorKind::InvalidData,
|
||||
format!("error loading otel config: {e}"),
|
||||
)
|
||||
})?;
|
||||
|
||||
// Install a simple subscriber so `tracing` output is visible. Users can
|
||||
// control the log level with `RUST_LOG`.
|
||||
let stderr_fmt = tracing_subscriber::fmt::layer()
|
||||
.with_writer(std::io::stderr)
|
||||
.with_filter(EnvFilter::from_default_env());
|
||||
|
||||
let _ = tracing_subscriber::registry()
|
||||
.with(stderr_fmt)
|
||||
.with(otel.as_ref().map(|provider| {
|
||||
OpenTelemetryTracingBridge::new(&provider.logger).with_filter(
|
||||
tracing_subscriber::filter::filter_fn(codex_core::otel_init::codex_export_filter),
|
||||
)
|
||||
}))
|
||||
.try_init();
|
||||
|
||||
// Task: process incoming messages.
|
||||
let processor_handle = tokio::spawn({
|
||||
let outgoing_message_sender = OutgoingMessageSender::new(outgoing_tx);
|
||||
|
||||
38
codex-rs/app-server/src/models.rs
Normal file
38
codex-rs/app-server/src/models.rs
Normal file
@@ -0,0 +1,38 @@
|
||||
use codex_app_server_protocol::Model;
|
||||
use codex_app_server_protocol::ReasoningEffortOption;
|
||||
use codex_common::model_presets::ModelPreset;
|
||||
use codex_common::model_presets::ReasoningEffortPreset;
|
||||
use codex_common::model_presets::builtin_model_presets;
|
||||
|
||||
pub fn supported_models() -> Vec<Model> {
|
||||
builtin_model_presets(None)
|
||||
.into_iter()
|
||||
.map(model_from_preset)
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn model_from_preset(preset: ModelPreset) -> Model {
|
||||
Model {
|
||||
id: preset.id.to_string(),
|
||||
model: preset.model.to_string(),
|
||||
display_name: preset.display_name.to_string(),
|
||||
description: preset.description.to_string(),
|
||||
supported_reasoning_efforts: reasoning_efforts_from_preset(
|
||||
preset.supported_reasoning_efforts,
|
||||
),
|
||||
default_reasoning_effort: preset.default_reasoning_effort,
|
||||
is_default: preset.is_default,
|
||||
}
|
||||
}
|
||||
|
||||
fn reasoning_efforts_from_preset(
|
||||
efforts: &'static [ReasoningEffortPreset],
|
||||
) -> Vec<ReasoningEffortOption> {
|
||||
efforts
|
||||
.iter()
|
||||
.map(|preset| ReasoningEffortOption {
|
||||
reasoning_effort: preset.effort,
|
||||
description: preset.description.to_string(),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
@@ -142,6 +142,8 @@ pub(crate) struct OutgoingError {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use codex_app_server_protocol::LoginChatGptCompleteNotification;
|
||||
use codex_protocol::protocol::RateLimitSnapshot;
|
||||
use codex_protocol::protocol::RateLimitWindow;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use uuid::Uuid;
|
||||
@@ -171,4 +173,34 @@ mod tests {
|
||||
"ensure the strum macros serialize the method field correctly"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn verify_account_rate_limits_notification_serialization() {
|
||||
let notification = ServerNotification::AccountRateLimitsUpdated(RateLimitSnapshot {
|
||||
primary: Some(RateLimitWindow {
|
||||
used_percent: 25.0,
|
||||
window_minutes: Some(15),
|
||||
resets_at: Some(123),
|
||||
}),
|
||||
secondary: None,
|
||||
});
|
||||
|
||||
let jsonrpc_notification = OutgoingMessage::AppServerNotification(notification);
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/rateLimits/updated",
|
||||
"params": {
|
||||
"primary": {
|
||||
"used_percent": 25.0,
|
||||
"window_minutes": 15,
|
||||
"resets_at": 123,
|
||||
},
|
||||
"secondary": null,
|
||||
},
|
||||
}),
|
||||
serde_json::to_value(jsonrpc_notification)
|
||||
.expect("ensure the notification serializes correctly"),
|
||||
"ensure the notification serializes correctly"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,10 @@ path = "lib.rs"
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
assert_cmd = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-core = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
|
||||
131
codex-rs/app-server/tests/common/auth_fixtures.rs
Normal file
131
codex-rs/app-server/tests/common/auth_fixtures.rs
Normal file
@@ -0,0 +1,131 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use base64::Engine;
|
||||
use base64::engine::general_purpose::URL_SAFE_NO_PAD;
|
||||
use chrono::DateTime;
|
||||
use chrono::Utc;
|
||||
use codex_core::auth::AuthDotJson;
|
||||
use codex_core::auth::get_auth_file;
|
||||
use codex_core::auth::write_auth_json;
|
||||
use codex_core::token_data::TokenData;
|
||||
use codex_core::token_data::parse_id_token;
|
||||
use serde_json::json;
|
||||
|
||||
/// Builder for writing a fake ChatGPT auth.json in tests.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ChatGptAuthFixture {
|
||||
access_token: String,
|
||||
refresh_token: String,
|
||||
account_id: Option<String>,
|
||||
claims: ChatGptIdTokenClaims,
|
||||
last_refresh: Option<Option<DateTime<Utc>>>,
|
||||
}
|
||||
|
||||
impl ChatGptAuthFixture {
|
||||
pub fn new(access_token: impl Into<String>) -> Self {
|
||||
Self {
|
||||
access_token: access_token.into(),
|
||||
refresh_token: "refresh-token".to_string(),
|
||||
account_id: None,
|
||||
claims: ChatGptIdTokenClaims::default(),
|
||||
last_refresh: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn refresh_token(mut self, refresh_token: impl Into<String>) -> Self {
|
||||
self.refresh_token = refresh_token.into();
|
||||
self
|
||||
}
|
||||
|
||||
pub fn account_id(mut self, account_id: impl Into<String>) -> Self {
|
||||
self.account_id = Some(account_id.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn plan_type(mut self, plan_type: impl Into<String>) -> Self {
|
||||
self.claims.plan_type = Some(plan_type.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn email(mut self, email: impl Into<String>) -> Self {
|
||||
self.claims.email = Some(email.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn last_refresh(mut self, last_refresh: Option<DateTime<Utc>>) -> Self {
|
||||
self.last_refresh = Some(last_refresh);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn claims(mut self, claims: ChatGptIdTokenClaims) -> Self {
|
||||
self.claims = claims;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct ChatGptIdTokenClaims {
|
||||
pub email: Option<String>,
|
||||
pub plan_type: Option<String>,
|
||||
}
|
||||
|
||||
impl ChatGptIdTokenClaims {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
pub fn email(mut self, email: impl Into<String>) -> Self {
|
||||
self.email = Some(email.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn plan_type(mut self, plan_type: impl Into<String>) -> Self {
|
||||
self.plan_type = Some(plan_type.into());
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
pub fn encode_id_token(claims: &ChatGptIdTokenClaims) -> Result<String> {
|
||||
let header = json!({ "alg": "none", "typ": "JWT" });
|
||||
let mut payload = serde_json::Map::new();
|
||||
if let Some(email) = &claims.email {
|
||||
payload.insert("email".to_string(), json!(email));
|
||||
}
|
||||
if let Some(plan_type) = &claims.plan_type {
|
||||
payload.insert(
|
||||
"https://api.openai.com/auth".to_string(),
|
||||
json!({ "chatgpt_plan_type": plan_type }),
|
||||
);
|
||||
}
|
||||
let payload = serde_json::Value::Object(payload);
|
||||
|
||||
let header_b64 =
|
||||
URL_SAFE_NO_PAD.encode(serde_json::to_vec(&header).context("serialize jwt header")?);
|
||||
let payload_b64 =
|
||||
URL_SAFE_NO_PAD.encode(serde_json::to_vec(&payload).context("serialize jwt payload")?);
|
||||
let signature_b64 = URL_SAFE_NO_PAD.encode(b"signature");
|
||||
Ok(format!("{header_b64}.{payload_b64}.{signature_b64}"))
|
||||
}
|
||||
|
||||
pub fn write_chatgpt_auth(codex_home: &Path, fixture: ChatGptAuthFixture) -> Result<()> {
|
||||
let id_token_raw = encode_id_token(&fixture.claims)?;
|
||||
let id_token = parse_id_token(&id_token_raw).context("parse id token")?;
|
||||
let tokens = TokenData {
|
||||
id_token,
|
||||
access_token: fixture.access_token,
|
||||
refresh_token: fixture.refresh_token,
|
||||
account_id: fixture.account_id,
|
||||
};
|
||||
|
||||
let last_refresh = fixture.last_refresh.unwrap_or_else(|| Some(Utc::now()));
|
||||
|
||||
let auth = AuthDotJson {
|
||||
openai_api_key: None,
|
||||
tokens: Some(tokens),
|
||||
last_refresh,
|
||||
};
|
||||
|
||||
write_auth_json(&get_auth_file(codex_home), &auth).context("write auth.json")
|
||||
}
|
||||
@@ -1,7 +1,12 @@
|
||||
mod auth_fixtures;
|
||||
mod mcp_process;
|
||||
mod mock_model_server;
|
||||
mod responses;
|
||||
|
||||
pub use auth_fixtures::ChatGptAuthFixture;
|
||||
pub use auth_fixtures::ChatGptIdTokenClaims;
|
||||
pub use auth_fixtures::encode_id_token;
|
||||
pub use auth_fixtures::write_chatgpt_auth;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
pub use mcp_process::McpProcess;
|
||||
pub use mock_model_server::create_mock_chat_completions_server;
|
||||
|
||||
@@ -21,6 +21,7 @@ use codex_app_server_protocol::GetAuthStatusParams;
|
||||
use codex_app_server_protocol::InitializeParams;
|
||||
use codex_app_server_protocol::InterruptConversationParams;
|
||||
use codex_app_server_protocol::ListConversationsParams;
|
||||
use codex_app_server_protocol::ListModelsParams;
|
||||
use codex_app_server_protocol::LoginApiKeyParams;
|
||||
use codex_app_server_protocol::NewConversationParams;
|
||||
use codex_app_server_protocol::RemoveConversationListenerParams;
|
||||
@@ -236,6 +237,11 @@ impl McpProcess {
|
||||
self.send_request("getUserAgent", None).await
|
||||
}
|
||||
|
||||
/// Send an `account/rateLimits/read` JSON-RPC request.
|
||||
pub async fn send_get_account_rate_limits_request(&mut self) -> anyhow::Result<i64> {
|
||||
self.send_request("account/rateLimits/read", None).await
|
||||
}
|
||||
|
||||
/// Send a `userInfo` JSON-RPC request.
|
||||
pub async fn send_user_info_request(&mut self) -> anyhow::Result<i64> {
|
||||
self.send_request("userInfo", None).await
|
||||
@@ -259,6 +265,15 @@ impl McpProcess {
|
||||
self.send_request("listConversations", params).await
|
||||
}
|
||||
|
||||
/// Send a `model/list` JSON-RPC request.
|
||||
pub async fn send_list_models_request(
|
||||
&mut self,
|
||||
params: ListModelsParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("model/list", params).await
|
||||
}
|
||||
|
||||
/// Send a `resumeConversation` JSON-RPC request.
|
||||
pub async fn send_resume_conversation_request(
|
||||
&mut self,
|
||||
|
||||
@@ -5,6 +5,7 @@ use app_test_support::to_response;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_app_server_protocol::GetAuthStatusParams;
|
||||
use codex_app_server_protocol::GetAuthStatusResponse;
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::LoginApiKeyParams;
|
||||
use codex_app_server_protocol::LoginApiKeyResponse;
|
||||
@@ -57,6 +58,19 @@ sandbox_mode = "danger-full-access"
|
||||
)
|
||||
}
|
||||
|
||||
fn create_config_toml_forced_login(codex_home: &Path, forced_method: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
let contents = format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "danger-full-access"
|
||||
forced_login_method = "{forced_method}"
|
||||
"#
|
||||
);
|
||||
std::fs::write(config_toml, contents)
|
||||
}
|
||||
|
||||
async fn login_with_api_key_via_request(mcp: &mut McpProcess, api_key: &str) {
|
||||
let request_id = mcp
|
||||
.send_login_api_key_request(LoginApiKeyParams {
|
||||
@@ -221,3 +235,38 @@ async fn get_auth_status_with_api_key_no_include_token() {
|
||||
assert_eq!(status.auth_method, Some(AuthMode::ApiKey));
|
||||
assert!(status.auth_token.is_none(), "token must be omitted");
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn login_api_key_rejected_when_forced_chatgpt() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml_forced_login(codex_home.path(), "chatgpt")
|
||||
.unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
|
||||
let request_id = mcp
|
||||
.send_login_api_key_request(LoginApiKeyParams {
|
||||
api_key: "sk-test-key".to_string(),
|
||||
})
|
||||
.await
|
||||
.expect("send loginApiKey");
|
||||
|
||||
let err: JSONRPCError = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("loginApiKey error timeout")
|
||||
.expect("loginApiKey error");
|
||||
|
||||
assert_eq!(
|
||||
err.error.message,
|
||||
"API key login is disabled. Use ChatGPT login instead."
|
||||
);
|
||||
}
|
||||
|
||||
@@ -30,7 +30,6 @@ use codex_protocol::config_types::SandboxMode;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::Event;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::InputMessageKind;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::env;
|
||||
use tempfile::TempDir;
|
||||
@@ -104,7 +103,10 @@ async fn test_codex_jsonrpc_conversation_flow() {
|
||||
|
||||
// 2) addConversationListener
|
||||
let add_listener_id = mcp
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams { conversation_id })
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams {
|
||||
conversation_id,
|
||||
experimental_raw_events: false,
|
||||
})
|
||||
.await
|
||||
.expect("send addConversationListener");
|
||||
let add_listener_resp: JSONRPCResponse = timeout(
|
||||
@@ -253,7 +255,10 @@ async fn test_send_user_turn_changes_approval_policy_behavior() {
|
||||
|
||||
// 2) addConversationListener
|
||||
let add_listener_id = mcp
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams { conversation_id })
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams {
|
||||
conversation_id,
|
||||
experimental_raw_events: false,
|
||||
})
|
||||
.await
|
||||
.expect("send addConversationListener");
|
||||
let _: AddConversationSubscriptionResponse =
|
||||
@@ -312,6 +317,7 @@ async fn test_send_user_turn_changes_approval_policy_behavior() {
|
||||
],
|
||||
cwd: working_directory.clone(),
|
||||
reason: None,
|
||||
risk: None,
|
||||
parsed_cmd: vec![ParsedCommand::Unknown {
|
||||
cmd: "python3 -c 'print(42)'".to_string()
|
||||
}],
|
||||
@@ -459,7 +465,10 @@ async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() {
|
||||
.expect("deserialize newConversation response");
|
||||
|
||||
let add_listener_id = mcp
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams { conversation_id })
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams {
|
||||
conversation_id,
|
||||
experimental_raw_events: false,
|
||||
})
|
||||
.await
|
||||
.expect("send addConversationListener");
|
||||
timeout(
|
||||
@@ -528,43 +537,6 @@ async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() {
|
||||
.expect("sendUserTurn 2 timeout")
|
||||
.expect("sendUserTurn 2 resp");
|
||||
|
||||
let mut env_message: Option<String> = None;
|
||||
let second_cwd_str = second_cwd.to_string_lossy().into_owned();
|
||||
for _ in 0..10 {
|
||||
let notification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/user_message"),
|
||||
)
|
||||
.await
|
||||
.expect("user_message timeout")
|
||||
.expect("user_message notification");
|
||||
let params = notification
|
||||
.params
|
||||
.clone()
|
||||
.expect("user_message should include params");
|
||||
let event: Event = serde_json::from_value(params).expect("deserialize user_message event");
|
||||
if let EventMsg::UserMessage(user) = event.msg
|
||||
&& matches!(user.kind, Some(InputMessageKind::EnvironmentContext))
|
||||
&& user.message.contains(&second_cwd_str)
|
||||
{
|
||||
env_message = Some(user.message);
|
||||
break;
|
||||
}
|
||||
}
|
||||
let env_message = env_message.expect("expected environment context update");
|
||||
assert!(
|
||||
env_message.contains("<sandbox_mode>danger-full-access</sandbox_mode>"),
|
||||
"env context should reflect new sandbox mode: {env_message}"
|
||||
);
|
||||
assert!(
|
||||
env_message.contains("<network_access>enabled</network_access>"),
|
||||
"env context should enable network access for danger-full-access policy: {env_message}"
|
||||
);
|
||||
assert!(
|
||||
env_message.contains(&second_cwd_str),
|
||||
"env context should include updated cwd: {env_message}"
|
||||
);
|
||||
|
||||
let exec_begin_notification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/exec_command_begin"),
|
||||
|
||||
@@ -11,6 +11,7 @@ use codex_app_server_protocol::SandboxSettings;
|
||||
use codex_app_server_protocol::Tools;
|
||||
use codex_app_server_protocol::UserSavedConfig;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_protocol::config_types::ForcedLoginMethod;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
@@ -33,6 +34,8 @@ model_reasoning_summary = "detailed"
|
||||
model_reasoning_effort = "high"
|
||||
model_verbosity = "medium"
|
||||
profile = "test"
|
||||
forced_chatgpt_workspace_id = "12345678-0000-0000-0000-000000000000"
|
||||
forced_login_method = "chatgpt"
|
||||
|
||||
[sandbox_workspace_write]
|
||||
writable_roots = ["/tmp"]
|
||||
@@ -92,6 +95,8 @@ async fn get_config_toml_parses_all_fields() {
|
||||
exclude_tmpdir_env_var: Some(true),
|
||||
exclude_slash_tmp: Some(true),
|
||||
}),
|
||||
forced_chatgpt_workspace_id: Some("12345678-0000-0000-0000-000000000000".into()),
|
||||
forced_login_method: Some(ForcedLoginMethod::Chatgpt),
|
||||
model: Some("gpt-5-codex".into()),
|
||||
model_reasoning_effort: Some(ReasoningEffort::High),
|
||||
model_reasoning_summary: Some(ReasoningSummary::Detailed),
|
||||
@@ -149,6 +154,8 @@ async fn get_config_toml_empty() {
|
||||
approval_policy: None,
|
||||
sandbox_mode: None,
|
||||
sandbox_settings: None,
|
||||
forced_chatgpt_workspace_id: None,
|
||||
forced_login_method: None,
|
||||
model: None,
|
||||
model_reasoning_effort: None,
|
||||
model_reasoning_summary: None,
|
||||
|
||||
@@ -67,7 +67,10 @@ async fn test_conversation_create_and_send_message_ok() {
|
||||
|
||||
// Add a listener so we receive notifications for this conversation (not strictly required for this test).
|
||||
let add_listener_id = mcp
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams { conversation_id })
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams {
|
||||
conversation_id,
|
||||
experimental_raw_events: false,
|
||||
})
|
||||
.await
|
||||
.expect("send addConversationListener");
|
||||
let _sub: AddConversationSubscriptionResponse =
|
||||
|
||||
@@ -88,7 +88,10 @@ async fn shell_command_interruption() -> anyhow::Result<()> {
|
||||
|
||||
// 2) addConversationListener
|
||||
let add_listener_id = mcp
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams { conversation_id })
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams {
|
||||
conversation_id,
|
||||
experimental_raw_events: false,
|
||||
})
|
||||
.await?;
|
||||
let _add_listener_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
|
||||
@@ -7,11 +7,13 @@ use codex_app_server_protocol::CancelLoginChatGptParams;
|
||||
use codex_app_server_protocol::CancelLoginChatGptResponse;
|
||||
use codex_app_server_protocol::GetAuthStatusParams;
|
||||
use codex_app_server_protocol::GetAuthStatusResponse;
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::LoginChatGptResponse;
|
||||
use codex_app_server_protocol::LogoutChatGptResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_login::login_with_api_key;
|
||||
use serial_test::serial;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
@@ -93,6 +95,8 @@ async fn logout_chatgpt_removes_auth() {
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
// Serialize tests that launch the login server since it binds to a fixed port.
|
||||
#[serial(login_port)]
|
||||
async fn login_and_cancel_chatgpt() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml(codex_home.path()).unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||
@@ -144,3 +148,99 @@ async fn login_and_cancel_chatgpt() {
|
||||
eprintln!("warning: did not observe login_chat_gpt_complete notification after cancel");
|
||||
}
|
||||
}
|
||||
|
||||
fn create_config_toml_forced_login(codex_home: &Path, forced_method: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
let contents = format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "danger-full-access"
|
||||
forced_login_method = "{forced_method}"
|
||||
"#
|
||||
);
|
||||
std::fs::write(config_toml, contents)
|
||||
}
|
||||
|
||||
fn create_config_toml_forced_workspace(
|
||||
codex_home: &Path,
|
||||
workspace_id: &str,
|
||||
) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
let contents = format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "danger-full-access"
|
||||
forced_chatgpt_workspace_id = "{workspace_id}"
|
||||
"#
|
||||
);
|
||||
std::fs::write(config_toml, contents)
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn login_chatgpt_rejected_when_forced_api() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml_forced_login(codex_home.path(), "api")
|
||||
.unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
|
||||
let request_id = mcp
|
||||
.send_login_chat_gpt_request()
|
||||
.await
|
||||
.expect("send loginChatGpt");
|
||||
let err: JSONRPCError = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("loginChatGpt error timeout")
|
||||
.expect("loginChatGpt error");
|
||||
|
||||
assert_eq!(
|
||||
err.error.message,
|
||||
"ChatGPT login is disabled. Use API key login instead."
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
// Serialize tests that launch the login server since it binds to a fixed port.
|
||||
#[serial(login_port)]
|
||||
async fn login_chatgpt_includes_forced_workspace_query_param() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml_forced_workspace(codex_home.path(), "ws-forced")
|
||||
.unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
|
||||
let request_id = mcp
|
||||
.send_login_chat_gpt_request()
|
||||
.await
|
||||
.expect("send loginChatGpt");
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("loginChatGpt timeout")
|
||||
.expect("loginChatGpt response");
|
||||
|
||||
let login: LoginChatGptResponse = to_response(resp).expect("deserialize login resp");
|
||||
assert!(
|
||||
login.auth_url.contains("allowed_workspace_id=ws-forced"),
|
||||
"auth URL should include forced workspace"
|
||||
);
|
||||
}
|
||||
|
||||
@@ -7,6 +7,8 @@ mod fuzzy_file_search;
|
||||
mod interrupt;
|
||||
mod list_resume;
|
||||
mod login;
|
||||
mod model_list;
|
||||
mod rate_limits;
|
||||
mod send_message;
|
||||
mod set_default_model;
|
||||
mod user_agent;
|
||||
|
||||
183
codex-rs/app-server/tests/suite/model_list.rs
Normal file
183
codex-rs/app-server/tests/suite/model_list.rs
Normal file
@@ -0,0 +1,183 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::ListModelsParams;
|
||||
use codex_app_server_protocol::ListModelsResponse;
|
||||
use codex_app_server_protocol::Model;
|
||||
use codex_app_server_protocol::ReasoningEffortOption;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_TIMEOUT: Duration = Duration::from_secs(10);
|
||||
const INVALID_REQUEST_ERROR_CODE: i64 = -32600;
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn list_models_returns_all_models_with_large_limit() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
|
||||
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_list_models_request(ListModelsParams {
|
||||
page_size: Some(100),
|
||||
cursor: None,
|
||||
})
|
||||
.await?;
|
||||
|
||||
let response: JSONRPCResponse = timeout(
|
||||
DEFAULT_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let ListModelsResponse { items, next_cursor } = to_response::<ListModelsResponse>(response)?;
|
||||
|
||||
let expected_models = vec![
|
||||
Model {
|
||||
id: "gpt-5-codex".to_string(),
|
||||
model: "gpt-5-codex".to_string(),
|
||||
display_name: "gpt-5-codex".to_string(),
|
||||
description: "Optimized for coding tasks with many tools.".to_string(),
|
||||
supported_reasoning_efforts: vec![
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::Low,
|
||||
description: "Fastest responses with limited reasoning".to_string(),
|
||||
},
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::Medium,
|
||||
description: "Dynamically adjusts reasoning based on the task".to_string(),
|
||||
},
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::High,
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems"
|
||||
.to_string(),
|
||||
},
|
||||
],
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
is_default: true,
|
||||
},
|
||||
Model {
|
||||
id: "gpt-5".to_string(),
|
||||
model: "gpt-5".to_string(),
|
||||
display_name: "gpt-5".to_string(),
|
||||
description: "Broad world knowledge with strong general reasoning.".to_string(),
|
||||
supported_reasoning_efforts: vec![
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::Minimal,
|
||||
description: "Fastest responses with little reasoning".to_string(),
|
||||
},
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::Low,
|
||||
description: "Balances speed with some reasoning; useful for straightforward \
|
||||
queries and short explanations"
|
||||
.to_string(),
|
||||
},
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::Medium,
|
||||
description: "Provides a solid balance of reasoning depth and latency for \
|
||||
general-purpose tasks"
|
||||
.to_string(),
|
||||
},
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::High,
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems"
|
||||
.to_string(),
|
||||
},
|
||||
],
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
is_default: false,
|
||||
},
|
||||
];
|
||||
|
||||
assert_eq!(items, expected_models);
|
||||
assert!(next_cursor.is_none());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn list_models_pagination_works() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
|
||||
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let first_request = mcp
|
||||
.send_list_models_request(ListModelsParams {
|
||||
page_size: Some(1),
|
||||
cursor: None,
|
||||
})
|
||||
.await?;
|
||||
|
||||
let first_response: JSONRPCResponse = timeout(
|
||||
DEFAULT_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(first_request)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let ListModelsResponse {
|
||||
items: first_items,
|
||||
next_cursor: first_cursor,
|
||||
} = to_response::<ListModelsResponse>(first_response)?;
|
||||
|
||||
assert_eq!(first_items.len(), 1);
|
||||
assert_eq!(first_items[0].id, "gpt-5-codex");
|
||||
let next_cursor = first_cursor.ok_or_else(|| anyhow!("cursor for second page"))?;
|
||||
|
||||
let second_request = mcp
|
||||
.send_list_models_request(ListModelsParams {
|
||||
page_size: Some(1),
|
||||
cursor: Some(next_cursor.clone()),
|
||||
})
|
||||
.await?;
|
||||
|
||||
let second_response: JSONRPCResponse = timeout(
|
||||
DEFAULT_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(second_request)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let ListModelsResponse {
|
||||
items: second_items,
|
||||
next_cursor: second_cursor,
|
||||
} = to_response::<ListModelsResponse>(second_response)?;
|
||||
|
||||
assert_eq!(second_items.len(), 1);
|
||||
assert_eq!(second_items[0].id, "gpt-5");
|
||||
assert!(second_cursor.is_none());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn list_models_rejects_invalid_cursor() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
|
||||
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_list_models_request(ListModelsParams {
|
||||
page_size: None,
|
||||
cursor: Some("invalid".to_string()),
|
||||
})
|
||||
.await?;
|
||||
|
||||
let error: JSONRPCError = timeout(
|
||||
DEFAULT_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
assert_eq!(error.id, RequestId::Integer(request_id));
|
||||
assert_eq!(error.error.code, INVALID_REQUEST_ERROR_CODE);
|
||||
assert_eq!(error.error.message, "invalid cursor: invalid");
|
||||
Ok(())
|
||||
}
|
||||
215
codex-rs/app-server/tests/suite/rate_limits.rs
Normal file
215
codex-rs/app-server/tests/suite/rate_limits.rs
Normal file
@@ -0,0 +1,215 @@
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use app_test_support::ChatGptAuthFixture;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use app_test_support::write_chatgpt_auth;
|
||||
use codex_app_server_protocol::GetAccountRateLimitsResponse;
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::LoginApiKeyParams;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_protocol::protocol::RateLimitSnapshot;
|
||||
use codex_protocol::protocol::RateLimitWindow;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
use wiremock::Mock;
|
||||
use wiremock::MockServer;
|
||||
use wiremock::ResponseTemplate;
|
||||
use wiremock::matchers::header;
|
||||
use wiremock::matchers::method;
|
||||
use wiremock::matchers::path;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
const INVALID_REQUEST_ERROR_CODE: i64 = -32600;
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn get_account_rate_limits_requires_auth() -> Result<()> {
|
||||
let codex_home = TempDir::new().context("create codex home tempdir")?;
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)])
|
||||
.await
|
||||
.context("spawn mcp process")?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.context("initialize timeout")?
|
||||
.context("initialize request")?;
|
||||
|
||||
let request_id = mcp
|
||||
.send_get_account_rate_limits_request()
|
||||
.await
|
||||
.context("send account/rateLimits/read")?;
|
||||
|
||||
let error: JSONRPCError = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.context("account/rateLimits/read timeout")?
|
||||
.context("account/rateLimits/read error")?;
|
||||
|
||||
assert_eq!(error.id, RequestId::Integer(request_id));
|
||||
assert_eq!(error.error.code, INVALID_REQUEST_ERROR_CODE);
|
||||
assert_eq!(
|
||||
error.error.message,
|
||||
"codex account authentication required to read rate limits"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn get_account_rate_limits_requires_chatgpt_auth() -> Result<()> {
|
||||
let codex_home = TempDir::new().context("create codex home tempdir")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.context("spawn mcp process")?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.context("initialize timeout")?
|
||||
.context("initialize request")?;
|
||||
|
||||
login_with_api_key(&mut mcp, "sk-test-key").await?;
|
||||
|
||||
let request_id = mcp
|
||||
.send_get_account_rate_limits_request()
|
||||
.await
|
||||
.context("send account/rateLimits/read")?;
|
||||
|
||||
let error: JSONRPCError = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.context("account/rateLimits/read timeout")?
|
||||
.context("account/rateLimits/read error")?;
|
||||
|
||||
assert_eq!(error.id, RequestId::Integer(request_id));
|
||||
assert_eq!(error.error.code, INVALID_REQUEST_ERROR_CODE);
|
||||
assert_eq!(
|
||||
error.error.message,
|
||||
"chatgpt authentication required to read rate limits"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn get_account_rate_limits_returns_snapshot() -> Result<()> {
|
||||
let codex_home = TempDir::new().context("create codex home tempdir")?;
|
||||
write_chatgpt_auth(
|
||||
codex_home.path(),
|
||||
ChatGptAuthFixture::new("chatgpt-token")
|
||||
.account_id("account-123")
|
||||
.plan_type("pro"),
|
||||
)
|
||||
.context("write chatgpt auth")?;
|
||||
|
||||
let server = MockServer::start().await;
|
||||
let server_url = server.uri();
|
||||
write_chatgpt_base_url(codex_home.path(), &server_url).context("write chatgpt base url")?;
|
||||
|
||||
let primary_reset_timestamp = chrono::DateTime::parse_from_rfc3339("2025-01-01T00:02:00Z")
|
||||
.expect("parse primary reset timestamp")
|
||||
.timestamp();
|
||||
let secondary_reset_timestamp = chrono::DateTime::parse_from_rfc3339("2025-01-01T01:00:00Z")
|
||||
.expect("parse secondary reset timestamp")
|
||||
.timestamp();
|
||||
let response_body = json!({
|
||||
"plan_type": "pro",
|
||||
"rate_limit": {
|
||||
"allowed": true,
|
||||
"limit_reached": false,
|
||||
"primary_window": {
|
||||
"used_percent": 42,
|
||||
"limit_window_seconds": 3600,
|
||||
"reset_after_seconds": 120,
|
||||
"reset_at": primary_reset_timestamp,
|
||||
},
|
||||
"secondary_window": {
|
||||
"used_percent": 5,
|
||||
"limit_window_seconds": 86400,
|
||||
"reset_after_seconds": 43200,
|
||||
"reset_at": secondary_reset_timestamp,
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/api/codex/usage"))
|
||||
.and(header("authorization", "Bearer chatgpt-token"))
|
||||
.and(header("chatgpt-account-id", "account-123"))
|
||||
.respond_with(ResponseTemplate::new(200).set_body_json(response_body))
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)])
|
||||
.await
|
||||
.context("spawn mcp process")?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.context("initialize timeout")?
|
||||
.context("initialize request")?;
|
||||
|
||||
let request_id = mcp
|
||||
.send_get_account_rate_limits_request()
|
||||
.await
|
||||
.context("send account/rateLimits/read")?;
|
||||
|
||||
let response: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.context("account/rateLimits/read timeout")?
|
||||
.context("account/rateLimits/read response")?;
|
||||
|
||||
let received: GetAccountRateLimitsResponse =
|
||||
to_response(response).context("deserialize rate limit response")?;
|
||||
|
||||
let expected = GetAccountRateLimitsResponse {
|
||||
rate_limits: RateLimitSnapshot {
|
||||
primary: Some(RateLimitWindow {
|
||||
used_percent: 42.0,
|
||||
window_minutes: Some(60),
|
||||
resets_at: Some(primary_reset_timestamp),
|
||||
}),
|
||||
secondary: Some(RateLimitWindow {
|
||||
used_percent: 5.0,
|
||||
window_minutes: Some(1440),
|
||||
resets_at: Some(secondary_reset_timestamp),
|
||||
}),
|
||||
},
|
||||
};
|
||||
assert_eq!(received, expected);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn login_with_api_key(mcp: &mut McpProcess, api_key: &str) -> Result<()> {
|
||||
let request_id = mcp
|
||||
.send_login_api_key_request(LoginApiKeyParams {
|
||||
api_key: api_key.to_string(),
|
||||
})
|
||||
.await
|
||||
.context("send loginApiKey")?;
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.context("loginApiKey timeout")?
|
||||
.context("loginApiKey response")?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_chatgpt_base_url(codex_home: &Path, base_url: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(config_toml, format!("chatgpt_base_url = \"{base_url}\"\n"))
|
||||
}
|
||||
@@ -15,6 +15,8 @@ use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserMessageResponse;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
@@ -62,7 +64,10 @@ async fn test_send_message_success() {
|
||||
|
||||
// 2) addConversationListener
|
||||
let add_listener_id = mcp
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams { conversation_id })
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams {
|
||||
conversation_id,
|
||||
experimental_raw_events: false,
|
||||
})
|
||||
.await
|
||||
.expect("send addConversationListener");
|
||||
let add_listener_resp: JSONRPCResponse = timeout(
|
||||
@@ -124,6 +129,105 @@ async fn send_message(message: &str, conversation_id: ConversationId, mcp: &mut
|
||||
.expect("should have conversationId"),
|
||||
&serde_json::Value::String(conversation_id.to_string())
|
||||
);
|
||||
|
||||
let raw_attempt = tokio::time::timeout(
|
||||
std::time::Duration::from_millis(200),
|
||||
mcp.read_stream_until_notification_message("codex/event/raw_response_item"),
|
||||
)
|
||||
.await;
|
||||
assert!(
|
||||
raw_attempt.is_err(),
|
||||
"unexpected raw item notification when not opted in"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_send_message_raw_notifications_opt_in() {
|
||||
let responses = vec![
|
||||
create_final_assistant_message_sse_response("Done").expect("build mock assistant message"),
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
|
||||
let codex_home = TempDir::new().expect("create temp dir");
|
||||
create_config_toml(codex_home.path(), &server.uri()).expect("write config.toml");
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timed out")
|
||||
.expect("init failed");
|
||||
|
||||
let new_conv_id = mcp
|
||||
.send_new_conversation_request(NewConversationParams::default())
|
||||
.await
|
||||
.expect("send newConversation");
|
||||
let new_conv_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(new_conv_id)),
|
||||
)
|
||||
.await
|
||||
.expect("newConversation timeout")
|
||||
.expect("newConversation resp");
|
||||
let NewConversationResponse {
|
||||
conversation_id, ..
|
||||
} = to_response::<_>(new_conv_resp).expect("deserialize newConversation response");
|
||||
|
||||
let add_listener_id = mcp
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams {
|
||||
conversation_id,
|
||||
experimental_raw_events: true,
|
||||
})
|
||||
.await
|
||||
.expect("send addConversationListener");
|
||||
let add_listener_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(add_listener_id)),
|
||||
)
|
||||
.await
|
||||
.expect("addConversationListener timeout")
|
||||
.expect("addConversationListener resp");
|
||||
let AddConversationSubscriptionResponse { subscription_id: _ } =
|
||||
to_response::<_>(add_listener_resp).expect("deserialize addConversationListener response");
|
||||
|
||||
let send_id = mcp
|
||||
.send_send_user_message_request(SendUserMessageParams {
|
||||
conversation_id,
|
||||
items: vec![InputItem::Text {
|
||||
text: "Hello".to_string(),
|
||||
}],
|
||||
})
|
||||
.await
|
||||
.expect("send sendUserMessage");
|
||||
|
||||
let instructions = read_raw_response_item(&mut mcp, conversation_id).await;
|
||||
assert_instructions_message(&instructions);
|
||||
|
||||
let environment = read_raw_response_item(&mut mcp, conversation_id).await;
|
||||
assert_environment_message(&environment);
|
||||
|
||||
let response: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(send_id)),
|
||||
)
|
||||
.await
|
||||
.expect("sendUserMessage response timeout")
|
||||
.expect("sendUserMessage response error");
|
||||
let _ok: SendUserMessageResponse = to_response::<SendUserMessageResponse>(response)
|
||||
.expect("deserialize sendUserMessage response");
|
||||
|
||||
let user_message = read_raw_response_item(&mut mcp, conversation_id).await;
|
||||
assert_user_message(&user_message, "Hello");
|
||||
|
||||
let assistant_message = read_raw_response_item(&mut mcp, conversation_id).await;
|
||||
assert_assistant_message(&assistant_message, "Done");
|
||||
|
||||
let _ = tokio::time::timeout(
|
||||
std::time::Duration::from_millis(250),
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
@@ -184,3 +288,108 @@ stream_max_retries = 0
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
#[expect(clippy::expect_used)]
|
||||
async fn read_raw_response_item(
|
||||
mcp: &mut McpProcess,
|
||||
conversation_id: ConversationId,
|
||||
) -> ResponseItem {
|
||||
let raw_notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/raw_response_item"),
|
||||
)
|
||||
.await
|
||||
.expect("codex/event/raw_response_item notification timeout")
|
||||
.expect("codex/event/raw_response_item notification resp");
|
||||
|
||||
let serde_json::Value::Object(params) = raw_notification
|
||||
.params
|
||||
.expect("codex/event/raw_response_item should have params")
|
||||
else {
|
||||
panic!("codex/event/raw_response_item should have params");
|
||||
};
|
||||
|
||||
let conversation_id_value = params
|
||||
.get("conversationId")
|
||||
.and_then(|value| value.as_str())
|
||||
.expect("raw response item should include conversationId");
|
||||
|
||||
assert_eq!(
|
||||
conversation_id_value,
|
||||
conversation_id.to_string(),
|
||||
"raw response item conversation mismatch"
|
||||
);
|
||||
|
||||
let msg_value = params
|
||||
.get("msg")
|
||||
.cloned()
|
||||
.expect("raw response item should include msg payload");
|
||||
|
||||
serde_json::from_value(msg_value).expect("deserialize raw response item")
|
||||
}
|
||||
|
||||
fn assert_instructions_message(item: &ResponseItem) {
|
||||
match item {
|
||||
ResponseItem::Message { role, content, .. } => {
|
||||
assert_eq!(role, "user");
|
||||
let texts = content_texts(content);
|
||||
assert!(
|
||||
texts
|
||||
.iter()
|
||||
.any(|text| text.contains("<user_instructions>")),
|
||||
"expected instructions message, got {texts:?}"
|
||||
);
|
||||
}
|
||||
other => panic!("expected instructions message, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn assert_environment_message(item: &ResponseItem) {
|
||||
match item {
|
||||
ResponseItem::Message { role, content, .. } => {
|
||||
assert_eq!(role, "user");
|
||||
let texts = content_texts(content);
|
||||
assert!(
|
||||
texts
|
||||
.iter()
|
||||
.any(|text| text.contains("<environment_context>")),
|
||||
"expected environment context message, got {texts:?}"
|
||||
);
|
||||
}
|
||||
other => panic!("expected environment message, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn assert_user_message(item: &ResponseItem, expected_text: &str) {
|
||||
match item {
|
||||
ResponseItem::Message { role, content, .. } => {
|
||||
assert_eq!(role, "user");
|
||||
let texts = content_texts(content);
|
||||
assert_eq!(texts, vec![expected_text]);
|
||||
}
|
||||
other => panic!("expected user message, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn assert_assistant_message(item: &ResponseItem, expected_text: &str) {
|
||||
match item {
|
||||
ResponseItem::Message { role, content, .. } => {
|
||||
assert_eq!(role, "assistant");
|
||||
let texts = content_texts(content);
|
||||
assert_eq!(texts, vec![expected_text]);
|
||||
}
|
||||
other => panic!("expected assistant message, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn content_texts(content: &[ContentItem]) -> Vec<&str> {
|
||||
content
|
||||
.iter()
|
||||
.filter_map(|item| match item {
|
||||
ContentItem::InputText { text } | ContentItem::OutputText { text } => {
|
||||
Some(text.as_str())
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
@@ -1,20 +1,13 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Context;
|
||||
use app_test_support::ChatGptAuthFixture;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use base64::Engine;
|
||||
use base64::engine::general_purpose::URL_SAFE_NO_PAD;
|
||||
use app_test_support::write_chatgpt_auth;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::UserInfoResponse;
|
||||
use codex_core::auth::AuthDotJson;
|
||||
use codex_core::auth::get_auth_file;
|
||||
use codex_core::auth::write_auth_json;
|
||||
use codex_core::token_data::IdTokenInfo;
|
||||
use codex_core::token_data::TokenData;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
@@ -24,22 +17,13 @@ const DEFAULT_READ_TIMEOUT: Duration = Duration::from_secs(10);
|
||||
async fn user_info_returns_email_from_auth_json() {
|
||||
let codex_home = TempDir::new().expect("create tempdir");
|
||||
|
||||
let auth_path = get_auth_file(codex_home.path());
|
||||
let mut id_token = IdTokenInfo::default();
|
||||
id_token.email = Some("user@example.com".to_string());
|
||||
id_token.raw_jwt = encode_id_token_with_email("user@example.com").expect("encode id token");
|
||||
|
||||
let auth = AuthDotJson {
|
||||
openai_api_key: None,
|
||||
tokens: Some(TokenData {
|
||||
id_token,
|
||||
access_token: "access".to_string(),
|
||||
refresh_token: "refresh".to_string(),
|
||||
account_id: None,
|
||||
}),
|
||||
last_refresh: None,
|
||||
};
|
||||
write_auth_json(&auth_path, &auth).expect("write auth.json");
|
||||
write_chatgpt_auth(
|
||||
codex_home.path(),
|
||||
ChatGptAuthFixture::new("access")
|
||||
.refresh_token("refresh")
|
||||
.email("user@example.com"),
|
||||
)
|
||||
.expect("write chatgpt auth");
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
@@ -65,14 +49,3 @@ async fn user_info_returns_email_from_auth_json() {
|
||||
|
||||
assert_eq!(received, expected);
|
||||
}
|
||||
|
||||
fn encode_id_token_with_email(email: &str) -> anyhow::Result<String> {
|
||||
let header_b64 = URL_SAFE_NO_PAD.encode(
|
||||
serde_json::to_vec(&json!({ "alg": "none", "typ": "JWT" }))
|
||||
.context("serialize jwt header")?,
|
||||
);
|
||||
let payload =
|
||||
serde_json::to_vec(&json!({ "email": email })).context("serialize jwt payload")?;
|
||||
let payload_b64 = URL_SAFE_NO_PAD.encode(payload);
|
||||
Ok(format!("{header_b64}.{payload_b64}.signature"))
|
||||
}
|
||||
|
||||
@@ -1 +1,3 @@
|
||||
mod cli;
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
mod tool;
|
||||
|
||||
257
codex-rs/apply-patch/tests/suite/tool.rs
Normal file
257
codex-rs/apply-patch/tests/suite/tool.rs
Normal file
@@ -0,0 +1,257 @@
|
||||
use assert_cmd::Command;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use tempfile::tempdir;
|
||||
|
||||
fn run_apply_patch_in_dir(dir: &Path, patch: &str) -> anyhow::Result<assert_cmd::assert::Assert> {
|
||||
let mut cmd = Command::cargo_bin("apply_patch")?;
|
||||
cmd.current_dir(dir);
|
||||
Ok(cmd.arg(patch).assert())
|
||||
}
|
||||
|
||||
fn apply_patch_command(dir: &Path) -> anyhow::Result<Command> {
|
||||
let mut cmd = Command::cargo_bin("apply_patch")?;
|
||||
cmd.current_dir(dir);
|
||||
Ok(cmd)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_cli_applies_multiple_operations() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
let modify_path = tmp.path().join("modify.txt");
|
||||
let delete_path = tmp.path().join("delete.txt");
|
||||
|
||||
fs::write(&modify_path, "line1\nline2\n")?;
|
||||
fs::write(&delete_path, "obsolete\n")?;
|
||||
|
||||
let patch = "*** Begin Patch\n*** Add File: nested/new.txt\n+created\n*** Delete File: delete.txt\n*** Update File: modify.txt\n@@\n-line2\n+changed\n*** End Patch";
|
||||
|
||||
run_apply_patch_in_dir(tmp.path(), patch)?.success().stdout(
|
||||
"Success. Updated the following files:\nA nested/new.txt\nM modify.txt\nD delete.txt\n",
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
fs::read_to_string(tmp.path().join("nested/new.txt"))?,
|
||||
"created\n"
|
||||
);
|
||||
assert_eq!(fs::read_to_string(&modify_path)?, "line1\nchanged\n");
|
||||
assert!(!delete_path.exists());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_cli_applies_multiple_chunks() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
let target_path = tmp.path().join("multi.txt");
|
||||
fs::write(&target_path, "line1\nline2\nline3\nline4\n")?;
|
||||
|
||||
let patch = "*** Begin Patch\n*** Update File: multi.txt\n@@\n-line2\n+changed2\n@@\n-line4\n+changed4\n*** End Patch";
|
||||
|
||||
run_apply_patch_in_dir(tmp.path(), patch)?
|
||||
.success()
|
||||
.stdout("Success. Updated the following files:\nM multi.txt\n");
|
||||
|
||||
assert_eq!(
|
||||
fs::read_to_string(&target_path)?,
|
||||
"line1\nchanged2\nline3\nchanged4\n"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_cli_moves_file_to_new_directory() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
let original_path = tmp.path().join("old/name.txt");
|
||||
let new_path = tmp.path().join("renamed/dir/name.txt");
|
||||
fs::create_dir_all(original_path.parent().expect("parent should exist"))?;
|
||||
fs::write(&original_path, "old content\n")?;
|
||||
|
||||
let patch = "*** Begin Patch\n*** Update File: old/name.txt\n*** Move to: renamed/dir/name.txt\n@@\n-old content\n+new content\n*** End Patch";
|
||||
|
||||
run_apply_patch_in_dir(tmp.path(), patch)?
|
||||
.success()
|
||||
.stdout("Success. Updated the following files:\nM renamed/dir/name.txt\n");
|
||||
|
||||
assert!(!original_path.exists());
|
||||
assert_eq!(fs::read_to_string(&new_path)?, "new content\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_cli_rejects_empty_patch() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
|
||||
apply_patch_command(tmp.path())?
|
||||
.arg("*** Begin Patch\n*** End Patch")
|
||||
.assert()
|
||||
.failure()
|
||||
.stderr("No files were modified.\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_cli_reports_missing_context() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
let target_path = tmp.path().join("modify.txt");
|
||||
fs::write(&target_path, "line1\nline2\n")?;
|
||||
|
||||
apply_patch_command(tmp.path())?
|
||||
.arg("*** Begin Patch\n*** Update File: modify.txt\n@@\n-missing\n+changed\n*** End Patch")
|
||||
.assert()
|
||||
.failure()
|
||||
.stderr("Failed to find expected lines in modify.txt:\nmissing\n");
|
||||
assert_eq!(fs::read_to_string(&target_path)?, "line1\nline2\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_cli_rejects_missing_file_delete() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
|
||||
apply_patch_command(tmp.path())?
|
||||
.arg("*** Begin Patch\n*** Delete File: missing.txt\n*** End Patch")
|
||||
.assert()
|
||||
.failure()
|
||||
.stderr("Failed to delete file missing.txt\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_cli_rejects_empty_update_hunk() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
|
||||
apply_patch_command(tmp.path())?
|
||||
.arg("*** Begin Patch\n*** Update File: foo.txt\n*** End Patch")
|
||||
.assert()
|
||||
.failure()
|
||||
.stderr("Invalid patch hunk on line 2: Update file hunk for path 'foo.txt' is empty\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_cli_requires_existing_file_for_update() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
|
||||
apply_patch_command(tmp.path())?
|
||||
.arg("*** Begin Patch\n*** Update File: missing.txt\n@@\n-old\n+new\n*** End Patch")
|
||||
.assert()
|
||||
.failure()
|
||||
.stderr(
|
||||
"Failed to read file to update missing.txt: No such file or directory (os error 2)\n",
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_cli_move_overwrites_existing_destination() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
let original_path = tmp.path().join("old/name.txt");
|
||||
let destination = tmp.path().join("renamed/dir/name.txt");
|
||||
fs::create_dir_all(original_path.parent().expect("parent should exist"))?;
|
||||
fs::create_dir_all(destination.parent().expect("parent should exist"))?;
|
||||
fs::write(&original_path, "from\n")?;
|
||||
fs::write(&destination, "existing\n")?;
|
||||
|
||||
run_apply_patch_in_dir(
|
||||
tmp.path(),
|
||||
"*** Begin Patch\n*** Update File: old/name.txt\n*** Move to: renamed/dir/name.txt\n@@\n-from\n+new\n*** End Patch",
|
||||
)?
|
||||
.success()
|
||||
.stdout("Success. Updated the following files:\nM renamed/dir/name.txt\n");
|
||||
|
||||
assert!(!original_path.exists());
|
||||
assert_eq!(fs::read_to_string(&destination)?, "new\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_cli_add_overwrites_existing_file() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
let path = tmp.path().join("duplicate.txt");
|
||||
fs::write(&path, "old content\n")?;
|
||||
|
||||
run_apply_patch_in_dir(
|
||||
tmp.path(),
|
||||
"*** Begin Patch\n*** Add File: duplicate.txt\n+new content\n*** End Patch",
|
||||
)?
|
||||
.success()
|
||||
.stdout("Success. Updated the following files:\nA duplicate.txt\n");
|
||||
|
||||
assert_eq!(fs::read_to_string(&path)?, "new content\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_cli_delete_directory_fails() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
fs::create_dir(tmp.path().join("dir"))?;
|
||||
|
||||
apply_patch_command(tmp.path())?
|
||||
.arg("*** Begin Patch\n*** Delete File: dir\n*** End Patch")
|
||||
.assert()
|
||||
.failure()
|
||||
.stderr("Failed to delete file dir\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_cli_rejects_invalid_hunk_header() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
|
||||
apply_patch_command(tmp.path())?
|
||||
.arg("*** Begin Patch\n*** Frobnicate File: foo\n*** End Patch")
|
||||
.assert()
|
||||
.failure()
|
||||
.stderr("Invalid patch hunk on line 2: '*** Frobnicate File: foo' is not a valid hunk header. Valid hunk headers: '*** Add File: {path}', '*** Delete File: {path}', '*** Update File: {path}'\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_cli_updates_file_appends_trailing_newline() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
let target_path = tmp.path().join("no_newline.txt");
|
||||
fs::write(&target_path, "no newline at end")?;
|
||||
|
||||
run_apply_patch_in_dir(
|
||||
tmp.path(),
|
||||
"*** Begin Patch\n*** Update File: no_newline.txt\n@@\n-no newline at end\n+first line\n+second line\n*** End Patch",
|
||||
)?
|
||||
.success()
|
||||
.stdout("Success. Updated the following files:\nM no_newline.txt\n");
|
||||
|
||||
let contents = fs::read_to_string(&target_path)?;
|
||||
assert!(contents.ends_with('\n'));
|
||||
assert_eq!(contents, "first line\nsecond line\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_cli_failure_after_partial_success_leaves_changes() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
let new_file = tmp.path().join("created.txt");
|
||||
|
||||
apply_patch_command(tmp.path())?
|
||||
.arg("*** Begin Patch\n*** Add File: created.txt\n+hello\n*** Update File: missing.txt\n@@\n-old\n+new\n*** End Patch")
|
||||
.assert()
|
||||
.failure()
|
||||
.stdout("")
|
||||
.stderr("Failed to read file to update missing.txt: No such file or directory (os error 2)\n");
|
||||
|
||||
assert_eq!(fs::read_to_string(&new_file)?, "hello\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
15
codex-rs/async-utils/Cargo.toml
Normal file
15
codex-rs/async-utils/Cargo.toml
Normal file
@@ -0,0 +1,15 @@
|
||||
[package]
|
||||
edition.workspace = true
|
||||
name = "codex-async-utils"
|
||||
version.workspace = true
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
async-trait.workspace = true
|
||||
tokio = { workspace = true, features = ["macros", "rt", "rt-multi-thread", "time"] }
|
||||
tokio-util.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions.workspace = true
|
||||
86
codex-rs/async-utils/src/lib.rs
Normal file
86
codex-rs/async-utils/src/lib.rs
Normal file
@@ -0,0 +1,86 @@
|
||||
use async_trait::async_trait;
|
||||
use std::future::Future;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum CancelErr {
|
||||
Cancelled,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait OrCancelExt: Sized {
|
||||
type Output;
|
||||
|
||||
async fn or_cancel(self, token: &CancellationToken) -> Result<Self::Output, CancelErr>;
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl<F> OrCancelExt for F
|
||||
where
|
||||
F: Future + Send,
|
||||
F::Output: Send,
|
||||
{
|
||||
type Output = F::Output;
|
||||
|
||||
async fn or_cancel(self, token: &CancellationToken) -> Result<Self::Output, CancelErr> {
|
||||
tokio::select! {
|
||||
_ = token.cancelled() => Err(CancelErr::Cancelled),
|
||||
res = self => Ok(res),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::time::Duration;
|
||||
use tokio::task;
|
||||
use tokio::time::sleep;
|
||||
|
||||
#[tokio::test]
|
||||
async fn returns_ok_when_future_completes_first() {
|
||||
let token = CancellationToken::new();
|
||||
let value = async { 42 };
|
||||
|
||||
let result = value.or_cancel(&token).await;
|
||||
|
||||
assert_eq!(Ok(42), result);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn returns_err_when_token_cancelled_first() {
|
||||
let token = CancellationToken::new();
|
||||
let token_clone = token.clone();
|
||||
|
||||
let cancel_handle = task::spawn(async move {
|
||||
sleep(Duration::from_millis(10)).await;
|
||||
token_clone.cancel();
|
||||
});
|
||||
|
||||
let result = async {
|
||||
sleep(Duration::from_millis(100)).await;
|
||||
7
|
||||
}
|
||||
.or_cancel(&token)
|
||||
.await;
|
||||
|
||||
cancel_handle.await.expect("cancel task panicked");
|
||||
assert_eq!(Err(CancelErr::Cancelled), result);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn returns_err_when_token_already_cancelled() {
|
||||
let token = CancellationToken::new();
|
||||
token.cancel();
|
||||
|
||||
let result = async {
|
||||
sleep(Duration::from_millis(50)).await;
|
||||
5
|
||||
}
|
||||
.or_cancel(&token)
|
||||
.await;
|
||||
|
||||
assert_eq!(Err(CancelErr::Cancelled), result);
|
||||
}
|
||||
}
|
||||
@@ -13,6 +13,8 @@ serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] }
|
||||
codex-backend-openapi-models = { path = "../codex-backend-openapi-models" }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-core = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = "1"
|
||||
|
||||
@@ -1,7 +1,13 @@
|
||||
use crate::types::CodeTaskDetailsResponse;
|
||||
use crate::types::PaginatedListTaskListItem;
|
||||
use crate::types::RateLimitStatusPayload;
|
||||
use crate::types::RateLimitWindowSnapshot;
|
||||
use crate::types::TurnAttemptsSiblingTurnsResponse;
|
||||
use anyhow::Result;
|
||||
use codex_core::auth::CodexAuth;
|
||||
use codex_core::default_client::get_codex_user_agent;
|
||||
use codex_protocol::protocol::RateLimitSnapshot;
|
||||
use codex_protocol::protocol::RateLimitWindow;
|
||||
use reqwest::header::AUTHORIZATION;
|
||||
use reqwest::header::CONTENT_TYPE;
|
||||
use reqwest::header::HeaderMap;
|
||||
@@ -64,6 +70,17 @@ impl Client {
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn from_auth(base_url: impl Into<String>, auth: &CodexAuth) -> Result<Self> {
|
||||
let token = auth.get_token().await.map_err(anyhow::Error::from)?;
|
||||
let mut client = Self::new(base_url)?
|
||||
.with_user_agent(get_codex_user_agent())
|
||||
.with_bearer_token(token);
|
||||
if let Some(account_id) = auth.get_account_id() {
|
||||
client = client.with_chatgpt_account_id(account_id);
|
||||
}
|
||||
Ok(client)
|
||||
}
|
||||
|
||||
pub fn with_bearer_token(mut self, token: impl Into<String>) -> Self {
|
||||
self.bearer_token = Some(token.into());
|
||||
self
|
||||
@@ -138,6 +155,17 @@ impl Client {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_rate_limits(&self) -> Result<RateLimitSnapshot> {
|
||||
let url = match self.path_style {
|
||||
PathStyle::CodexApi => format!("{}/api/codex/usage", self.base_url),
|
||||
PathStyle::ChatGptApi => format!("{}/wham/usage", self.base_url),
|
||||
};
|
||||
let req = self.http.get(&url).headers(self.headers());
|
||||
let (body, ct) = self.exec_request(req, "GET", &url).await?;
|
||||
let payload: RateLimitStatusPayload = self.decode_json(&url, &ct, &body)?;
|
||||
Ok(Self::rate_limit_snapshot_from_payload(payload))
|
||||
}
|
||||
|
||||
pub async fn list_tasks(
|
||||
&self,
|
||||
limit: Option<i32>,
|
||||
@@ -241,4 +269,49 @@ impl Client {
|
||||
Err(e) => anyhow::bail!("Decode error for {url}: {e}; content-type={ct}; body={body}"),
|
||||
}
|
||||
}
|
||||
|
||||
// rate limit helpers
|
||||
fn rate_limit_snapshot_from_payload(payload: RateLimitStatusPayload) -> RateLimitSnapshot {
|
||||
let Some(details) = payload
|
||||
.rate_limit
|
||||
.and_then(|inner| inner.map(|boxed| *boxed))
|
||||
else {
|
||||
return RateLimitSnapshot {
|
||||
primary: None,
|
||||
secondary: None,
|
||||
};
|
||||
};
|
||||
|
||||
RateLimitSnapshot {
|
||||
primary: Self::map_rate_limit_window(details.primary_window),
|
||||
secondary: Self::map_rate_limit_window(details.secondary_window),
|
||||
}
|
||||
}
|
||||
|
||||
fn map_rate_limit_window(
|
||||
window: Option<Option<Box<RateLimitWindowSnapshot>>>,
|
||||
) -> Option<RateLimitWindow> {
|
||||
let snapshot = match window {
|
||||
Some(Some(snapshot)) => *snapshot,
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
let used_percent = f64::from(snapshot.used_percent);
|
||||
let window_minutes = Self::window_minutes_from_seconds(snapshot.limit_window_seconds);
|
||||
let resets_at = Some(i64::from(snapshot.reset_at));
|
||||
Some(RateLimitWindow {
|
||||
used_percent,
|
||||
window_minutes,
|
||||
resets_at,
|
||||
})
|
||||
}
|
||||
|
||||
fn window_minutes_from_seconds(seconds: i32) -> Option<i64> {
|
||||
if seconds <= 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
let seconds_i64 = i64::from(seconds);
|
||||
Some((seconds_i64 + 59) / 60)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
pub use codex_backend_openapi_models::models::PaginatedListTaskListItem;
|
||||
pub use codex_backend_openapi_models::models::PlanType;
|
||||
pub use codex_backend_openapi_models::models::RateLimitStatusDetails;
|
||||
pub use codex_backend_openapi_models::models::RateLimitStatusPayload;
|
||||
pub use codex_backend_openapi_models::models::RateLimitWindowSnapshot;
|
||||
pub use codex_backend_openapi_models::models::TaskListItem;
|
||||
|
||||
use serde::Deserialize;
|
||||
|
||||
@@ -19,8 +19,10 @@ anyhow = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive"] }
|
||||
clap_complete = { workspace = true }
|
||||
codex-app-server = { workspace = true }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-arg0 = { workspace = true }
|
||||
codex-chatgpt = { workspace = true }
|
||||
codex-cloud-tasks = { path = "../cloud-tasks" }
|
||||
codex-common = { workspace = true, features = ["cli"] }
|
||||
codex-core = { workspace = true }
|
||||
codex-exec = { workspace = true }
|
||||
@@ -28,12 +30,11 @@ codex-login = { workspace = true }
|
||||
codex-mcp-server = { workspace = true }
|
||||
codex-process-hardening = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-protocol-ts = { workspace = true }
|
||||
codex-responses-api-proxy = { workspace = true }
|
||||
codex-tui = { workspace = true }
|
||||
codex-rmcp-client = { workspace = true }
|
||||
codex-cloud-tasks = { path = "../cloud-tasks" }
|
||||
codex-stdio-to-uds = { workspace = true }
|
||||
codex-tui = { workspace = true }
|
||||
ctor = { workspace = true }
|
||||
owo-colors = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
@@ -47,8 +48,8 @@ tokio = { workspace = true, features = [
|
||||
] }
|
||||
|
||||
[dev-dependencies]
|
||||
assert_matches = { workspace = true }
|
||||
assert_cmd = { workspace = true }
|
||||
assert_matches = { workspace = true }
|
||||
predicates = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
@@ -9,12 +9,20 @@ use codex_core::config::ConfigOverrides;
|
||||
use codex_login::ServerOptions;
|
||||
use codex_login::run_device_code_login;
|
||||
use codex_login::run_login_server;
|
||||
use codex_protocol::config_types::ForcedLoginMethod;
|
||||
use std::io::IsTerminal;
|
||||
use std::io::Read;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub async fn login_with_chatgpt(codex_home: PathBuf) -> std::io::Result<()> {
|
||||
let opts = ServerOptions::new(codex_home, CLIENT_ID.to_string());
|
||||
pub async fn login_with_chatgpt(
|
||||
codex_home: PathBuf,
|
||||
forced_chatgpt_workspace_id: Option<String>,
|
||||
) -> std::io::Result<()> {
|
||||
let opts = ServerOptions::new(
|
||||
codex_home,
|
||||
CLIENT_ID.to_string(),
|
||||
forced_chatgpt_workspace_id,
|
||||
);
|
||||
let server = run_login_server(opts)?;
|
||||
|
||||
eprintln!(
|
||||
@@ -28,7 +36,14 @@ pub async fn login_with_chatgpt(codex_home: PathBuf) -> std::io::Result<()> {
|
||||
pub async fn run_login_with_chatgpt(cli_config_overrides: CliConfigOverrides) -> ! {
|
||||
let config = load_config_or_exit(cli_config_overrides).await;
|
||||
|
||||
match login_with_chatgpt(config.codex_home).await {
|
||||
if matches!(config.forced_login_method, Some(ForcedLoginMethod::Api)) {
|
||||
eprintln!("ChatGPT login is disabled. Use API key login instead.");
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
let forced_chatgpt_workspace_id = config.forced_chatgpt_workspace_id.clone();
|
||||
|
||||
match login_with_chatgpt(config.codex_home, forced_chatgpt_workspace_id).await {
|
||||
Ok(_) => {
|
||||
eprintln!("Successfully logged in");
|
||||
std::process::exit(0);
|
||||
@@ -46,6 +61,11 @@ pub async fn run_login_with_api_key(
|
||||
) -> ! {
|
||||
let config = load_config_or_exit(cli_config_overrides).await;
|
||||
|
||||
if matches!(config.forced_login_method, Some(ForcedLoginMethod::Chatgpt)) {
|
||||
eprintln!("API key login is disabled. Use ChatGPT login instead.");
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
match login_with_api_key(&config.codex_home, &api_key) {
|
||||
Ok(_) => {
|
||||
eprintln!("Successfully logged in");
|
||||
@@ -92,9 +112,15 @@ pub async fn run_login_with_device_code(
|
||||
client_id: Option<String>,
|
||||
) -> ! {
|
||||
let config = load_config_or_exit(cli_config_overrides).await;
|
||||
if matches!(config.forced_login_method, Some(ForcedLoginMethod::Api)) {
|
||||
eprintln!("ChatGPT login is disabled. Use API key login instead.");
|
||||
std::process::exit(1);
|
||||
}
|
||||
let forced_chatgpt_workspace_id = config.forced_chatgpt_workspace_id.clone();
|
||||
let mut opts = ServerOptions::new(
|
||||
config.codex_home,
|
||||
client_id.unwrap_or(CLIENT_ID.to_string()),
|
||||
forced_chatgpt_workspace_id,
|
||||
);
|
||||
if let Some(iss) = issuer_base_url {
|
||||
opts.issuer = iss;
|
||||
|
||||
@@ -19,6 +19,7 @@ use codex_exec::Cli as ExecCli;
|
||||
use codex_responses_api_proxy::Args as ResponsesApiProxyArgs;
|
||||
use codex_tui::AppExitInfo;
|
||||
use codex_tui::Cli as TuiCli;
|
||||
use codex_tui::updates::UpdateAction;
|
||||
use owo_colors::OwoColorize;
|
||||
use std::path::PathBuf;
|
||||
use supports_color::Stream;
|
||||
@@ -41,7 +42,8 @@ use codex_core::config::ConfigOverrides;
|
||||
// The executable is sometimes invoked via a platform‑specific name like
|
||||
// `codex-x86_64-unknown-linux-musl`, but the help output should always use
|
||||
// the generic `codex` command name that users run.
|
||||
bin_name = "codex"
|
||||
bin_name = "codex",
|
||||
override_usage = "codex [OPTIONS] [PROMPT]\n codex [OPTIONS] <COMMAND> [ARGS]"
|
||||
)]
|
||||
struct MultitoolCli {
|
||||
#[clap(flatten)]
|
||||
@@ -103,6 +105,10 @@ enum Subcommand {
|
||||
#[clap(hide = true)]
|
||||
ResponsesApiProxy(ResponsesApiProxyArgs),
|
||||
|
||||
/// Internal: relay stdio to a Unix domain socket.
|
||||
#[clap(hide = true, name = "stdio-to-uds")]
|
||||
StdioToUds(StdioToUdsCommand),
|
||||
|
||||
/// Inspect feature flags.
|
||||
Features(FeaturesCli),
|
||||
}
|
||||
@@ -204,10 +210,18 @@ struct GenerateTsCommand {
|
||||
prettier: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct StdioToUdsCommand {
|
||||
/// Path to the Unix domain socket to connect to.
|
||||
#[arg(value_name = "SOCKET_PATH")]
|
||||
socket_path: PathBuf,
|
||||
}
|
||||
|
||||
fn format_exit_messages(exit_info: AppExitInfo, color_enabled: bool) -> Vec<String> {
|
||||
let AppExitInfo {
|
||||
token_usage,
|
||||
conversation_id,
|
||||
..
|
||||
} = exit_info;
|
||||
|
||||
if token_usage.is_zero() {
|
||||
@@ -232,11 +246,32 @@ fn format_exit_messages(exit_info: AppExitInfo, color_enabled: bool) -> Vec<Stri
|
||||
lines
|
||||
}
|
||||
|
||||
fn print_exit_messages(exit_info: AppExitInfo) {
|
||||
/// Handle the app exit and print the results. Optionally run the update action.
|
||||
fn handle_app_exit(exit_info: AppExitInfo) -> anyhow::Result<()> {
|
||||
let update_action = exit_info.update_action;
|
||||
let color_enabled = supports_color::on(Stream::Stdout).is_some();
|
||||
for line in format_exit_messages(exit_info, color_enabled) {
|
||||
println!("{line}");
|
||||
}
|
||||
if let Some(action) = update_action {
|
||||
run_update_action(action)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Run the update action and print the result.
|
||||
fn run_update_action(action: UpdateAction) -> anyhow::Result<()> {
|
||||
println!();
|
||||
let (cmd, args) = action.command_args();
|
||||
let cmd_str = action.command_str();
|
||||
println!("Updating Codex via `{cmd_str}`...");
|
||||
let status = std::process::Command::new(cmd).args(args).status()?;
|
||||
if !status.success() {
|
||||
anyhow::bail!("`{cmd_str}` failed with status {status}");
|
||||
}
|
||||
println!();
|
||||
println!("🎉 Update ran successfully! Please restart Codex.");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Parser, Clone)]
|
||||
@@ -321,7 +356,7 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
root_config_overrides.clone(),
|
||||
);
|
||||
let exit_info = codex_tui::run_main(interactive, codex_linux_sandbox_exe).await?;
|
||||
print_exit_messages(exit_info);
|
||||
handle_app_exit(exit_info)?;
|
||||
}
|
||||
Some(Subcommand::Exec(mut exec_cli)) => {
|
||||
prepend_config_flags(
|
||||
@@ -354,7 +389,7 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
config_overrides,
|
||||
);
|
||||
let exit_info = codex_tui::run_main(interactive, codex_linux_sandbox_exe).await?;
|
||||
print_exit_messages(exit_info);
|
||||
handle_app_exit(exit_info)?;
|
||||
}
|
||||
Some(Subcommand::Login(mut login_cli)) => {
|
||||
prepend_config_flags(
|
||||
@@ -439,6 +474,11 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
tokio::task::spawn_blocking(move || codex_responses_api_proxy::run_main(args))
|
||||
.await??;
|
||||
}
|
||||
Some(Subcommand::StdioToUds(cmd)) => {
|
||||
let socket_path = cmd.socket_path;
|
||||
tokio::task::spawn_blocking(move || codex_stdio_to_uds::run(socket_path.as_path()))
|
||||
.await??;
|
||||
}
|
||||
Some(Subcommand::GenerateTs(gen_cli)) => {
|
||||
codex_protocol_ts::generate_ts(&gen_cli.out_dir, gen_cli.prettier.as_deref())?;
|
||||
}
|
||||
@@ -540,6 +580,9 @@ fn merge_resume_cli_flags(interactive: &mut TuiCli, resume_cli: TuiCli) {
|
||||
if !resume_cli.images.is_empty() {
|
||||
interactive.images = resume_cli.images;
|
||||
}
|
||||
if !resume_cli.add_dir.is_empty() {
|
||||
interactive.add_dir.extend(resume_cli.add_dir);
|
||||
}
|
||||
if let Some(prompt) = resume_cli.prompt {
|
||||
interactive.prompt = Some(prompt);
|
||||
}
|
||||
@@ -595,6 +638,7 @@ mod tests {
|
||||
conversation_id: conversation
|
||||
.map(ConversationId::from_string)
|
||||
.map(Result::unwrap),
|
||||
update_action: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -603,6 +647,7 @@ mod tests {
|
||||
let exit_info = AppExitInfo {
|
||||
token_usage: TokenUsage::default(),
|
||||
conversation_id: None,
|
||||
update_action: None,
|
||||
};
|
||||
let lines = format_exit_messages(exit_info, false);
|
||||
assert!(lines.is_empty());
|
||||
|
||||
@@ -6,6 +6,7 @@ use anyhow::anyhow;
|
||||
use anyhow::bail;
|
||||
use clap::ArgGroup;
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_common::format_env_display::format_env_display;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::config::find_codex_home;
|
||||
@@ -149,6 +150,10 @@ pub struct RemoveArgs {
|
||||
pub struct LoginArgs {
|
||||
/// Name of the MCP server to authenticate with oauth.
|
||||
pub name: String,
|
||||
|
||||
/// Comma-separated list of OAuth scopes to request.
|
||||
#[arg(long, value_delimiter = ',', value_name = "SCOPE,SCOPE")]
|
||||
pub scopes: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Parser)]
|
||||
@@ -227,6 +232,8 @@ async fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Re
|
||||
command: command_bin,
|
||||
args: command_args,
|
||||
env: env_map,
|
||||
env_vars: Vec::new(),
|
||||
cwd: None,
|
||||
}
|
||||
}
|
||||
AddMcpTransportArgs {
|
||||
@@ -239,6 +246,8 @@ async fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Re
|
||||
} => McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
http_headers: None,
|
||||
env_http_headers: None,
|
||||
},
|
||||
AddMcpTransportArgs { .. } => bail!("exactly one of --command or --url must be provided"),
|
||||
};
|
||||
@@ -248,6 +257,8 @@ async fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Re
|
||||
enabled: true,
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
};
|
||||
|
||||
servers.insert(name.clone(), new_entry);
|
||||
@@ -260,12 +271,36 @@ async fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Re
|
||||
if let McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var: None,
|
||||
http_headers,
|
||||
env_http_headers,
|
||||
} = transport
|
||||
&& matches!(supports_oauth_login(&url).await, Ok(true))
|
||||
{
|
||||
println!("Detected OAuth support. Starting OAuth flow…");
|
||||
perform_oauth_login(&name, &url, config.mcp_oauth_credentials_store_mode).await?;
|
||||
println!("Successfully logged in.");
|
||||
match supports_oauth_login(&url).await {
|
||||
Ok(true) => {
|
||||
if !config.features.enabled(Feature::RmcpClient) {
|
||||
println!(
|
||||
"MCP server supports login. Add `experimental_use_rmcp_client = true` \
|
||||
to your config.toml and run `codex mcp login {name}` to login."
|
||||
);
|
||||
} else {
|
||||
println!("Detected OAuth support. Starting OAuth flow…");
|
||||
perform_oauth_login(
|
||||
&name,
|
||||
&url,
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
http_headers.clone(),
|
||||
env_http_headers.clone(),
|
||||
&Vec::new(),
|
||||
)
|
||||
.await?;
|
||||
println!("Successfully logged in.");
|
||||
}
|
||||
}
|
||||
Ok(false) => {}
|
||||
Err(_) => println!(
|
||||
"MCP server may or may not require login. Run `codex mcp login {name}` to login."
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -311,18 +346,31 @@ async fn run_login(config_overrides: &CliConfigOverrides, login_args: LoginArgs)
|
||||
);
|
||||
}
|
||||
|
||||
let LoginArgs { name } = login_args;
|
||||
let LoginArgs { name, scopes } = login_args;
|
||||
|
||||
let Some(server) = config.mcp_servers.get(&name) else {
|
||||
bail!("No MCP server named '{name}' found.");
|
||||
};
|
||||
|
||||
let url = match &server.transport {
|
||||
McpServerTransportConfig::StreamableHttp { url, .. } => url.clone(),
|
||||
let (url, http_headers, env_http_headers) = match &server.transport {
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
http_headers,
|
||||
env_http_headers,
|
||||
..
|
||||
} => (url.clone(), http_headers.clone(), env_http_headers.clone()),
|
||||
_ => bail!("OAuth login is only supported for streamable HTTP servers."),
|
||||
};
|
||||
|
||||
perform_oauth_login(&name, &url, config.mcp_oauth_credentials_store_mode).await?;
|
||||
perform_oauth_login(
|
||||
&name,
|
||||
&url,
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
http_headers,
|
||||
env_http_headers,
|
||||
&scopes,
|
||||
)
|
||||
.await?;
|
||||
println!("Successfully logged in to MCP server '{name}'.");
|
||||
Ok(())
|
||||
}
|
||||
@@ -374,23 +422,35 @@ async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) ->
|
||||
.map(|(name, cfg)| {
|
||||
let auth_status = auth_statuses
|
||||
.get(name.as_str())
|
||||
.copied()
|
||||
.map(|entry| entry.auth_status)
|
||||
.unwrap_or(McpAuthStatus::Unsupported);
|
||||
let transport = match &cfg.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => serde_json::json!({
|
||||
McpServerTransportConfig::Stdio {
|
||||
command,
|
||||
args,
|
||||
env,
|
||||
env_vars,
|
||||
cwd,
|
||||
} => serde_json::json!({
|
||||
"type": "stdio",
|
||||
"command": command,
|
||||
"args": args,
|
||||
"env": env,
|
||||
"env_vars": env_vars,
|
||||
"cwd": cwd,
|
||||
}),
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
http_headers,
|
||||
env_http_headers,
|
||||
} => {
|
||||
serde_json::json!({
|
||||
"type": "streamable_http",
|
||||
"url": url,
|
||||
"bearer_token_env_var": bearer_token_env_var,
|
||||
"http_headers": http_headers,
|
||||
"env_http_headers": env_http_headers,
|
||||
})
|
||||
}
|
||||
};
|
||||
@@ -419,30 +479,29 @@ async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) ->
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut stdio_rows: Vec<[String; 6]> = Vec::new();
|
||||
let mut stdio_rows: Vec<[String; 7]> = Vec::new();
|
||||
let mut http_rows: Vec<[String; 5]> = Vec::new();
|
||||
|
||||
for (name, cfg) in entries {
|
||||
match &cfg.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => {
|
||||
McpServerTransportConfig::Stdio {
|
||||
command,
|
||||
args,
|
||||
env,
|
||||
env_vars,
|
||||
cwd,
|
||||
} => {
|
||||
let args_display = if args.is_empty() {
|
||||
"-".to_string()
|
||||
} else {
|
||||
args.join(" ")
|
||||
};
|
||||
let env_display = match env.as_ref() {
|
||||
None => "-".to_string(),
|
||||
Some(map) if map.is_empty() => "-".to_string(),
|
||||
Some(map) => {
|
||||
let mut pairs: Vec<_> = map.iter().collect();
|
||||
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
pairs
|
||||
.into_iter()
|
||||
.map(|(k, v)| format!("{k}={v}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
}
|
||||
};
|
||||
let env_display = format_env_display(env.as_ref(), env_vars);
|
||||
let cwd_display = cwd
|
||||
.as_ref()
|
||||
.map(|path| path.display().to_string())
|
||||
.filter(|value| !value.is_empty())
|
||||
.unwrap_or_else(|| "-".to_string());
|
||||
let status = if cfg.enabled {
|
||||
"enabled".to_string()
|
||||
} else {
|
||||
@@ -450,7 +509,7 @@ async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) ->
|
||||
};
|
||||
let auth_status = auth_statuses
|
||||
.get(name.as_str())
|
||||
.copied()
|
||||
.map(|entry| entry.auth_status)
|
||||
.unwrap_or(McpAuthStatus::Unsupported)
|
||||
.to_string();
|
||||
stdio_rows.push([
|
||||
@@ -458,6 +517,7 @@ async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) ->
|
||||
command.clone(),
|
||||
args_display,
|
||||
env_display,
|
||||
cwd_display,
|
||||
status,
|
||||
auth_status,
|
||||
]);
|
||||
@@ -465,6 +525,7 @@ async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) ->
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
..
|
||||
} => {
|
||||
let status = if cfg.enabled {
|
||||
"enabled".to_string()
|
||||
@@ -473,13 +534,15 @@ async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) ->
|
||||
};
|
||||
let auth_status = auth_statuses
|
||||
.get(name.as_str())
|
||||
.copied()
|
||||
.map(|entry| entry.auth_status)
|
||||
.unwrap_or(McpAuthStatus::Unsupported)
|
||||
.to_string();
|
||||
let bearer_token_display =
|
||||
bearer_token_env_var.as_deref().unwrap_or("-").to_string();
|
||||
http_rows.push([
|
||||
name.clone(),
|
||||
url.clone(),
|
||||
bearer_token_env_var.clone().unwrap_or("-".to_string()),
|
||||
bearer_token_display,
|
||||
status,
|
||||
auth_status,
|
||||
]);
|
||||
@@ -493,6 +556,7 @@ async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) ->
|
||||
"Command".len(),
|
||||
"Args".len(),
|
||||
"Env".len(),
|
||||
"Cwd".len(),
|
||||
"Status".len(),
|
||||
"Auth".len(),
|
||||
];
|
||||
@@ -503,36 +567,40 @@ async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) ->
|
||||
}
|
||||
|
||||
println!(
|
||||
"{name:<name_w$} {command:<cmd_w$} {args:<args_w$} {env:<env_w$} {status:<status_w$} {auth:<auth_w$}",
|
||||
"{name:<name_w$} {command:<cmd_w$} {args:<args_w$} {env:<env_w$} {cwd:<cwd_w$} {status:<status_w$} {auth:<auth_w$}",
|
||||
name = "Name",
|
||||
command = "Command",
|
||||
args = "Args",
|
||||
env = "Env",
|
||||
cwd = "Cwd",
|
||||
status = "Status",
|
||||
auth = "Auth",
|
||||
name_w = widths[0],
|
||||
cmd_w = widths[1],
|
||||
args_w = widths[2],
|
||||
env_w = widths[3],
|
||||
status_w = widths[4],
|
||||
auth_w = widths[5],
|
||||
cwd_w = widths[4],
|
||||
status_w = widths[5],
|
||||
auth_w = widths[6],
|
||||
);
|
||||
|
||||
for row in &stdio_rows {
|
||||
println!(
|
||||
"{name:<name_w$} {command:<cmd_w$} {args:<args_w$} {env:<env_w$} {status:<status_w$} {auth:<auth_w$}",
|
||||
"{name:<name_w$} {command:<cmd_w$} {args:<args_w$} {env:<env_w$} {cwd:<cwd_w$} {status:<status_w$} {auth:<auth_w$}",
|
||||
name = row[0].as_str(),
|
||||
command = row[1].as_str(),
|
||||
args = row[2].as_str(),
|
||||
env = row[3].as_str(),
|
||||
status = row[4].as_str(),
|
||||
auth = row[5].as_str(),
|
||||
cwd = row[4].as_str(),
|
||||
status = row[5].as_str(),
|
||||
auth = row[6].as_str(),
|
||||
name_w = widths[0],
|
||||
cmd_w = widths[1],
|
||||
args_w = widths[2],
|
||||
env_w = widths[3],
|
||||
status_w = widths[4],
|
||||
auth_w = widths[5],
|
||||
cwd_w = widths[4],
|
||||
status_w = widths[5],
|
||||
auth_w = widths[6],
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -601,25 +669,39 @@ async fn run_get(config_overrides: &CliConfigOverrides, get_args: GetArgs) -> Re
|
||||
|
||||
if get_args.json {
|
||||
let transport = match &server.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => serde_json::json!({
|
||||
McpServerTransportConfig::Stdio {
|
||||
command,
|
||||
args,
|
||||
env,
|
||||
env_vars,
|
||||
cwd,
|
||||
} => serde_json::json!({
|
||||
"type": "stdio",
|
||||
"command": command,
|
||||
"args": args,
|
||||
"env": env,
|
||||
"env_vars": env_vars,
|
||||
"cwd": cwd,
|
||||
}),
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
http_headers,
|
||||
env_http_headers,
|
||||
} => serde_json::json!({
|
||||
"type": "streamable_http",
|
||||
"url": url,
|
||||
"bearer_token_env_var": bearer_token_env_var,
|
||||
"http_headers": http_headers,
|
||||
"env_http_headers": env_http_headers,
|
||||
}),
|
||||
};
|
||||
let output = serde_json::to_string_pretty(&serde_json::json!({
|
||||
"name": get_args.name,
|
||||
"enabled": server.enabled,
|
||||
"transport": transport,
|
||||
"enabled_tools": server.enabled_tools.clone(),
|
||||
"disabled_tools": server.disabled_tools.clone(),
|
||||
"startup_timeout_sec": server
|
||||
.startup_timeout_sec
|
||||
.map(|timeout| timeout.as_secs_f64()),
|
||||
@@ -631,10 +713,36 @@ async fn run_get(config_overrides: &CliConfigOverrides, get_args: GetArgs) -> Re
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if !server.enabled {
|
||||
println!("{} (disabled)", get_args.name);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
println!("{}", get_args.name);
|
||||
println!(" enabled: {}", server.enabled);
|
||||
let format_tool_list = |tools: &Option<Vec<String>>| -> String {
|
||||
match tools {
|
||||
Some(list) if list.is_empty() => "[]".to_string(),
|
||||
Some(list) => list.join(", "),
|
||||
None => "-".to_string(),
|
||||
}
|
||||
};
|
||||
if server.enabled_tools.is_some() {
|
||||
let enabled_tools_display = format_tool_list(&server.enabled_tools);
|
||||
println!(" enabled_tools: {enabled_tools_display}");
|
||||
}
|
||||
if server.disabled_tools.is_some() {
|
||||
let disabled_tools_display = format_tool_list(&server.disabled_tools);
|
||||
println!(" disabled_tools: {disabled_tools_display}");
|
||||
}
|
||||
match &server.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => {
|
||||
McpServerTransportConfig::Stdio {
|
||||
command,
|
||||
args,
|
||||
env,
|
||||
env_vars,
|
||||
cwd,
|
||||
} => {
|
||||
println!(" transport: stdio");
|
||||
println!(" command: {command}");
|
||||
let args_display = if args.is_empty() {
|
||||
@@ -643,29 +751,51 @@ async fn run_get(config_overrides: &CliConfigOverrides, get_args: GetArgs) -> Re
|
||||
args.join(" ")
|
||||
};
|
||||
println!(" args: {args_display}");
|
||||
let env_display = match env.as_ref() {
|
||||
None => "-".to_string(),
|
||||
Some(map) if map.is_empty() => "-".to_string(),
|
||||
Some(map) => {
|
||||
let mut pairs: Vec<_> = map.iter().collect();
|
||||
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
pairs
|
||||
.into_iter()
|
||||
.map(|(k, v)| format!("{k}={v}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
}
|
||||
};
|
||||
let cwd_display = cwd
|
||||
.as_ref()
|
||||
.map(|path| path.display().to_string())
|
||||
.filter(|value| !value.is_empty())
|
||||
.unwrap_or_else(|| "-".to_string());
|
||||
println!(" cwd: {cwd_display}");
|
||||
let env_display = format_env_display(env.as_ref(), env_vars);
|
||||
println!(" env: {env_display}");
|
||||
}
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
http_headers,
|
||||
env_http_headers,
|
||||
} => {
|
||||
println!(" transport: streamable_http");
|
||||
println!(" url: {url}");
|
||||
let env_var = bearer_token_env_var.as_deref().unwrap_or("-");
|
||||
println!(" bearer_token_env_var: {env_var}");
|
||||
let bearer_token_display = bearer_token_env_var.as_deref().unwrap_or("-");
|
||||
println!(" bearer_token_env_var: {bearer_token_display}");
|
||||
let headers_display = match http_headers {
|
||||
Some(map) if !map.is_empty() => {
|
||||
let mut pairs: Vec<_> = map.iter().collect();
|
||||
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
pairs
|
||||
.into_iter()
|
||||
.map(|(k, _)| format!("{k}=*****"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
}
|
||||
_ => "-".to_string(),
|
||||
};
|
||||
println!(" http_headers: {headers_display}");
|
||||
let env_headers_display = match env_http_headers {
|
||||
Some(map) if !map.is_empty() => {
|
||||
let mut pairs: Vec<_> = map.iter().collect();
|
||||
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
pairs
|
||||
.into_iter()
|
||||
.map(|(k, var)| format!("{k}={var}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
}
|
||||
_ => "-".to_string(),
|
||||
};
|
||||
println!(" env_http_headers: {env_headers_display}");
|
||||
}
|
||||
}
|
||||
if let Some(timeout) = server.startup_timeout_sec {
|
||||
|
||||
@@ -28,10 +28,18 @@ async fn add_and_remove_server_updates_global_config() -> Result<()> {
|
||||
assert_eq!(servers.len(), 1);
|
||||
let docs = servers.get("docs").expect("server should exist");
|
||||
match &docs.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => {
|
||||
McpServerTransportConfig::Stdio {
|
||||
command,
|
||||
args,
|
||||
env,
|
||||
env_vars,
|
||||
cwd,
|
||||
} => {
|
||||
assert_eq!(command, "echo");
|
||||
assert_eq!(args, &vec!["hello".to_string()]);
|
||||
assert!(env.is_none());
|
||||
assert!(env_vars.is_empty());
|
||||
assert!(cwd.is_none());
|
||||
}
|
||||
other => panic!("unexpected transport: {other:?}"),
|
||||
}
|
||||
@@ -112,9 +120,13 @@ async fn add_streamable_http_without_manual_token() -> Result<()> {
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
http_headers,
|
||||
env_http_headers,
|
||||
} => {
|
||||
assert_eq!(url, "https://example.com/mcp");
|
||||
assert!(bearer_token_env_var.is_none());
|
||||
assert!(http_headers.is_none());
|
||||
assert!(env_http_headers.is_none());
|
||||
}
|
||||
other => panic!("unexpected transport: {other:?}"),
|
||||
}
|
||||
@@ -150,9 +162,13 @@ async fn add_streamable_http_with_custom_env_var() -> Result<()> {
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
http_headers,
|
||||
env_http_headers,
|
||||
} => {
|
||||
assert_eq!(url, "https://example.com/issues");
|
||||
assert_eq!(bearer_token_env_var.as_deref(), Some("GITHUB_TOKEN"));
|
||||
assert!(http_headers.is_none());
|
||||
assert!(env_http_headers.is_none());
|
||||
}
|
||||
other => panic!("unexpected transport: {other:?}"),
|
||||
}
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use codex_core::config::load_global_mcp_servers;
|
||||
use codex_core::config::write_global_mcp_servers;
|
||||
use codex_core::config_types::McpServerTransportConfig;
|
||||
use predicates::prelude::PredicateBooleanExt;
|
||||
use predicates::str::contains;
|
||||
use pretty_assertions::assert_eq;
|
||||
@@ -27,8 +30,8 @@ fn list_shows_empty_state() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list_and_get_render_expected_output() -> Result<()> {
|
||||
#[tokio::test]
|
||||
async fn list_and_get_render_expected_output() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut add = codex_command(codex_home.path())?;
|
||||
@@ -46,6 +49,18 @@ fn list_and_get_render_expected_output() -> Result<()> {
|
||||
.assert()
|
||||
.success();
|
||||
|
||||
let mut servers = load_global_mcp_servers(codex_home.path()).await?;
|
||||
let docs_entry = servers
|
||||
.get_mut("docs")
|
||||
.expect("docs server should exist after add");
|
||||
match &mut docs_entry.transport {
|
||||
McpServerTransportConfig::Stdio { env_vars, .. } => {
|
||||
*env_vars = vec!["APP_TOKEN".to_string(), "WORKSPACE_ID".to_string()];
|
||||
}
|
||||
other => panic!("unexpected transport: {other:?}"),
|
||||
}
|
||||
write_global_mcp_servers(codex_home.path(), &servers)?;
|
||||
|
||||
let mut list_cmd = codex_command(codex_home.path())?;
|
||||
let list_output = list_cmd.args(["mcp", "list"]).output()?;
|
||||
assert!(list_output.status.success());
|
||||
@@ -53,7 +68,9 @@ fn list_and_get_render_expected_output() -> Result<()> {
|
||||
assert!(stdout.contains("Name"));
|
||||
assert!(stdout.contains("docs"));
|
||||
assert!(stdout.contains("docs-server"));
|
||||
assert!(stdout.contains("TOKEN=secret"));
|
||||
assert!(stdout.contains("TOKEN=*****"));
|
||||
assert!(stdout.contains("APP_TOKEN=*****"));
|
||||
assert!(stdout.contains("WORKSPACE_ID=*****"));
|
||||
assert!(stdout.contains("Status"));
|
||||
assert!(stdout.contains("Auth"));
|
||||
assert!(stdout.contains("enabled"));
|
||||
@@ -79,7 +96,12 @@ fn list_and_get_render_expected_output() -> Result<()> {
|
||||
],
|
||||
"env": {
|
||||
"TOKEN": "secret"
|
||||
}
|
||||
},
|
||||
"env_vars": [
|
||||
"APP_TOKEN",
|
||||
"WORKSPACE_ID"
|
||||
],
|
||||
"cwd": null
|
||||
},
|
||||
"startup_timeout_sec": null,
|
||||
"tool_timeout_sec": null,
|
||||
@@ -97,7 +119,9 @@ fn list_and_get_render_expected_output() -> Result<()> {
|
||||
assert!(stdout.contains("transport: stdio"));
|
||||
assert!(stdout.contains("command: docs-server"));
|
||||
assert!(stdout.contains("args: --port 4000"));
|
||||
assert!(stdout.contains("env: TOKEN=secret"));
|
||||
assert!(stdout.contains("env: TOKEN=*****"));
|
||||
assert!(stdout.contains("APP_TOKEN=*****"));
|
||||
assert!(stdout.contains("WORKSPACE_ID=*****"));
|
||||
assert!(stdout.contains("enabled: true"));
|
||||
assert!(stdout.contains("remove: codex mcp remove docs"));
|
||||
|
||||
@@ -110,3 +134,28 @@ fn list_and_get_render_expected_output() -> Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn get_disabled_server_shows_single_line() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut add = codex_command(codex_home.path())?;
|
||||
add.args(["mcp", "add", "docs", "--", "docs-server"])
|
||||
.assert()
|
||||
.success();
|
||||
|
||||
let mut servers = load_global_mcp_servers(codex_home.path()).await?;
|
||||
let docs = servers
|
||||
.get_mut("docs")
|
||||
.expect("docs server should exist after add");
|
||||
docs.enabled = false;
|
||||
write_global_mcp_servers(codex_home.path(), &servers)?;
|
||||
|
||||
let mut get_cmd = codex_command(codex_home.path())?;
|
||||
let get_output = get_cmd.args(["mcp", "get", "docs"]).output()?;
|
||||
assert!(get_output.status.success());
|
||||
let stdout = String::from_utf8(get_output.stdout)?;
|
||||
assert_eq!(stdout.trim_end(), "docs (disabled)");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -7,3 +7,7 @@ disallowed-methods = [
|
||||
{ path = "ratatui::style::Stylize::black", reason = "Avoid hardcoding black; prefer default fg or dim/bold. Exception: Disable this rule if rendering over a hardcoded ANSI background." },
|
||||
{ path = "ratatui::style::Stylize::yellow", reason = "Avoid yellow; prefer other colors in `tui/styles.md`." },
|
||||
]
|
||||
|
||||
# Increase the size threshold for result_large_err to accommodate
|
||||
# richer error variants.
|
||||
large-error-threshold = 256
|
||||
|
||||
@@ -15,3 +15,4 @@ path = "src/lib.rs"
|
||||
[dependencies]
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
serde_with = "3"
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
// Currently export only the types referenced by the workspace
|
||||
// The process for this will change
|
||||
|
||||
// Cloud Tasks
|
||||
pub mod code_task_details_response;
|
||||
pub use self::code_task_details_response::CodeTaskDetailsResponse;
|
||||
|
||||
@@ -20,3 +21,14 @@ pub use self::task_list_item::TaskListItem;
|
||||
|
||||
pub mod paginated_list_task_list_item_;
|
||||
pub use self::paginated_list_task_list_item_::PaginatedListTaskListItem;
|
||||
|
||||
// Rate Limits
|
||||
pub mod rate_limit_status_payload;
|
||||
pub use self::rate_limit_status_payload::PlanType;
|
||||
pub use self::rate_limit_status_payload::RateLimitStatusPayload;
|
||||
|
||||
pub mod rate_limit_status_details;
|
||||
pub use self::rate_limit_status_details::RateLimitStatusDetails;
|
||||
|
||||
pub mod rate_limit_window_snapshot;
|
||||
pub use self::rate_limit_window_snapshot::RateLimitWindowSnapshot;
|
||||
|
||||
@@ -0,0 +1,46 @@
|
||||
/*
|
||||
* codex-backend
|
||||
*
|
||||
* codex-backend
|
||||
*
|
||||
* The version of the OpenAPI document: 0.0.1
|
||||
*
|
||||
* Generated by: https://openapi-generator.tech
|
||||
*/
|
||||
|
||||
use crate::models;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct RateLimitStatusDetails {
|
||||
#[serde(rename = "allowed")]
|
||||
pub allowed: bool,
|
||||
#[serde(rename = "limit_reached")]
|
||||
pub limit_reached: bool,
|
||||
#[serde(
|
||||
rename = "primary_window",
|
||||
default,
|
||||
with = "::serde_with::rust::double_option",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub primary_window: Option<Option<Box<models::RateLimitWindowSnapshot>>>,
|
||||
#[serde(
|
||||
rename = "secondary_window",
|
||||
default,
|
||||
with = "::serde_with::rust::double_option",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub secondary_window: Option<Option<Box<models::RateLimitWindowSnapshot>>>,
|
||||
}
|
||||
|
||||
impl RateLimitStatusDetails {
|
||||
pub fn new(allowed: bool, limit_reached: bool) -> RateLimitStatusDetails {
|
||||
RateLimitStatusDetails {
|
||||
allowed,
|
||||
limit_reached,
|
||||
primary_window: None,
|
||||
secondary_window: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,65 @@
|
||||
/*
|
||||
* codex-backend
|
||||
*
|
||||
* codex-backend
|
||||
*
|
||||
* The version of the OpenAPI document: 0.0.1
|
||||
*
|
||||
* Generated by: https://openapi-generator.tech
|
||||
*/
|
||||
|
||||
use crate::models;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct RateLimitStatusPayload {
|
||||
#[serde(rename = "plan_type")]
|
||||
pub plan_type: PlanType,
|
||||
#[serde(
|
||||
rename = "rate_limit",
|
||||
default,
|
||||
with = "::serde_with::rust::double_option",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub rate_limit: Option<Option<Box<models::RateLimitStatusDetails>>>,
|
||||
}
|
||||
|
||||
impl RateLimitStatusPayload {
|
||||
pub fn new(plan_type: PlanType) -> RateLimitStatusPayload {
|
||||
RateLimitStatusPayload {
|
||||
plan_type,
|
||||
rate_limit: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||
pub enum PlanType {
|
||||
#[serde(rename = "free")]
|
||||
Free,
|
||||
#[serde(rename = "go")]
|
||||
Go,
|
||||
#[serde(rename = "plus")]
|
||||
Plus,
|
||||
#[serde(rename = "pro")]
|
||||
Pro,
|
||||
#[serde(rename = "team")]
|
||||
Team,
|
||||
#[serde(rename = "business")]
|
||||
Business,
|
||||
#[serde(rename = "education")]
|
||||
Education,
|
||||
#[serde(rename = "quorum")]
|
||||
Quorum,
|
||||
#[serde(rename = "enterprise")]
|
||||
Enterprise,
|
||||
#[serde(rename = "edu")]
|
||||
Edu,
|
||||
}
|
||||
|
||||
impl Default for PlanType {
|
||||
fn default() -> PlanType {
|
||||
Self::Free
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
/*
|
||||
* codex-backend
|
||||
*
|
||||
* codex-backend
|
||||
*
|
||||
* The version of the OpenAPI document: 0.0.1
|
||||
*
|
||||
* Generated by: https://openapi-generator.tech
|
||||
*/
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct RateLimitWindowSnapshot {
|
||||
#[serde(rename = "used_percent")]
|
||||
pub used_percent: i32,
|
||||
#[serde(rename = "limit_window_seconds")]
|
||||
pub limit_window_seconds: i32,
|
||||
#[serde(rename = "reset_after_seconds")]
|
||||
pub reset_after_seconds: i32,
|
||||
#[serde(rename = "reset_at")]
|
||||
pub reset_at: i32,
|
||||
}
|
||||
|
||||
impl RateLimitWindowSnapshot {
|
||||
pub fn new(
|
||||
used_percent: i32,
|
||||
limit_window_seconds: i32,
|
||||
reset_after_seconds: i32,
|
||||
reset_at: i32,
|
||||
) -> RateLimitWindowSnapshot {
|
||||
RateLimitWindowSnapshot {
|
||||
used_percent,
|
||||
limit_window_seconds,
|
||||
reset_after_seconds,
|
||||
reset_at,
|
||||
}
|
||||
}
|
||||
}
|
||||
62
codex-rs/common/src/format_env_display.rs
Normal file
62
codex-rs/common/src/format_env_display.rs
Normal file
@@ -0,0 +1,62 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub fn format_env_display(env: Option<&HashMap<String, String>>, env_vars: &[String]) -> String {
|
||||
let mut parts: Vec<String> = Vec::new();
|
||||
|
||||
if let Some(map) = env {
|
||||
let mut pairs: Vec<_> = map.iter().collect();
|
||||
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
parts.extend(pairs.into_iter().map(|(key, _)| format!("{key}=*****")));
|
||||
}
|
||||
|
||||
if !env_vars.is_empty() {
|
||||
parts.extend(env_vars.iter().map(|var| format!("{var}=*****")));
|
||||
}
|
||||
|
||||
if parts.is_empty() {
|
||||
"-".to_string()
|
||||
} else {
|
||||
parts.join(", ")
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn returns_dash_when_empty() {
|
||||
assert_eq!(format_env_display(None, &[]), "-");
|
||||
|
||||
let empty_map = HashMap::new();
|
||||
assert_eq!(format_env_display(Some(&empty_map), &[]), "-");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn formats_sorted_env_pairs() {
|
||||
let mut env = HashMap::new();
|
||||
env.insert("B".to_string(), "two".to_string());
|
||||
env.insert("A".to_string(), "one".to_string());
|
||||
|
||||
assert_eq!(format_env_display(Some(&env), &[]), "A=*****, B=*****");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn formats_env_vars_with_dollar_prefix() {
|
||||
let vars = vec!["TOKEN".to_string(), "PATH".to_string()];
|
||||
|
||||
assert_eq!(format_env_display(None, &vars), "TOKEN=*****, PATH=*****");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn combines_env_pairs_and_vars() {
|
||||
let mut env = HashMap::new();
|
||||
env.insert("HOME".to_string(), "/tmp".to_string());
|
||||
let vars = vec!["TOKEN".to_string()];
|
||||
|
||||
assert_eq!(
|
||||
format_env_display(Some(&env), &vars),
|
||||
"HOME=*****, TOKEN=*****"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -13,6 +13,9 @@ mod sandbox_mode_cli_arg;
|
||||
#[cfg(feature = "cli")]
|
||||
pub use sandbox_mode_cli_arg::SandboxModeCliArg;
|
||||
|
||||
#[cfg(feature = "cli")]
|
||||
pub mod format_env_display;
|
||||
|
||||
#[cfg(any(feature = "cli", test))]
|
||||
mod config_override;
|
||||
|
||||
|
||||
@@ -1,73 +1,96 @@
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_core::protocol_config_types::ReasoningEffort;
|
||||
|
||||
/// A simple preset pairing a model slug with a reasoning effort.
|
||||
/// A reasoning effort option that can be surfaced for a model.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct ReasoningEffortPreset {
|
||||
/// Effort level that the model supports.
|
||||
pub effort: ReasoningEffort,
|
||||
/// Short human description shown next to the effort in UIs.
|
||||
pub description: &'static str,
|
||||
}
|
||||
|
||||
/// Metadata describing a Codex-supported model.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct ModelPreset {
|
||||
/// Stable identifier for the preset.
|
||||
pub id: &'static str,
|
||||
/// Display label shown in UIs.
|
||||
pub label: &'static str,
|
||||
/// Short human description shown next to the label in UIs.
|
||||
pub description: &'static str,
|
||||
/// Model slug (e.g., "gpt-5").
|
||||
pub model: &'static str,
|
||||
/// Reasoning effort to apply for this preset.
|
||||
pub effort: Option<ReasoningEffort>,
|
||||
/// Display name shown in UIs.
|
||||
pub display_name: &'static str,
|
||||
/// Short human description shown in UIs.
|
||||
pub description: &'static str,
|
||||
/// Reasoning effort applied when none is explicitly chosen.
|
||||
pub default_reasoning_effort: ReasoningEffort,
|
||||
/// Supported reasoning effort options.
|
||||
pub supported_reasoning_efforts: &'static [ReasoningEffortPreset],
|
||||
/// Whether this is the default model for new users.
|
||||
pub is_default: bool,
|
||||
}
|
||||
|
||||
const PRESETS: &[ModelPreset] = &[
|
||||
ModelPreset {
|
||||
id: "gpt-5-codex-low",
|
||||
label: "gpt-5-codex low",
|
||||
description: "Fastest responses with limited reasoning",
|
||||
id: "gpt-5-codex",
|
||||
model: "gpt-5-codex",
|
||||
effort: Some(ReasoningEffort::Low),
|
||||
display_name: "gpt-5-codex",
|
||||
description: "Optimized for coding tasks with many tools.",
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
supported_reasoning_efforts: &[
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Low,
|
||||
description: "Fastest responses with limited reasoning",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Medium,
|
||||
description: "Dynamically adjusts reasoning based on the task",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::High,
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems",
|
||||
},
|
||||
],
|
||||
is_default: true,
|
||||
},
|
||||
ModelPreset {
|
||||
id: "gpt-5-codex-medium",
|
||||
label: "gpt-5-codex medium",
|
||||
description: "Dynamically adjusts reasoning based on the task",
|
||||
model: "gpt-5-codex",
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
},
|
||||
ModelPreset {
|
||||
id: "gpt-5-codex-high",
|
||||
label: "gpt-5-codex high",
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems",
|
||||
model: "gpt-5-codex",
|
||||
effort: Some(ReasoningEffort::High),
|
||||
},
|
||||
ModelPreset {
|
||||
id: "gpt-5-minimal",
|
||||
label: "gpt-5 minimal",
|
||||
description: "Fastest responses with little reasoning",
|
||||
id: "gpt-5",
|
||||
model: "gpt-5",
|
||||
effort: Some(ReasoningEffort::Minimal),
|
||||
},
|
||||
ModelPreset {
|
||||
id: "gpt-5-low",
|
||||
label: "gpt-5 low",
|
||||
description: "Balances speed with some reasoning; useful for straightforward queries and short explanations",
|
||||
model: "gpt-5",
|
||||
effort: Some(ReasoningEffort::Low),
|
||||
},
|
||||
ModelPreset {
|
||||
id: "gpt-5-medium",
|
||||
label: "gpt-5 medium",
|
||||
description: "Provides a solid balance of reasoning depth and latency for general-purpose tasks",
|
||||
model: "gpt-5",
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
},
|
||||
ModelPreset {
|
||||
id: "gpt-5-high",
|
||||
label: "gpt-5 high",
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems",
|
||||
model: "gpt-5",
|
||||
effort: Some(ReasoningEffort::High),
|
||||
display_name: "gpt-5",
|
||||
description: "Broad world knowledge with strong general reasoning.",
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
supported_reasoning_efforts: &[
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Minimal,
|
||||
description: "Fastest responses with little reasoning",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Low,
|
||||
description: "Balances speed with some reasoning; useful for straightforward queries and short explanations",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Medium,
|
||||
description: "Provides a solid balance of reasoning depth and latency for general-purpose tasks",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::High,
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems",
|
||||
},
|
||||
],
|
||||
is_default: false,
|
||||
},
|
||||
];
|
||||
|
||||
pub fn builtin_model_presets(_auth_mode: Option<AuthMode>) -> Vec<ModelPreset> {
|
||||
PRESETS.to_vec()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn only_one_default_model_is_configured() {
|
||||
let default_models = PRESETS.iter().filter(|preset| preset.is_default).count();
|
||||
assert!(default_models == 1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,4 +3,4 @@
|
||||
This file has moved. Please see the latest configuration documentation here:
|
||||
|
||||
- Full config docs: [docs/config.md](../docs/config.md)
|
||||
- MCP servers section: [docs/config.md#mcp_servers](../docs/config.md#mcp_servers)
|
||||
- MCP servers section: [docs/config.md#connecting-to-mcp-servers](../docs/config.md#connecting-to-mcp-servers)
|
||||
|
||||
@@ -22,21 +22,23 @@ chrono = { workspace = true, features = ["serde"] }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-apply-patch = { workspace = true }
|
||||
codex-file-search = { workspace = true }
|
||||
codex-mcp-client = { workspace = true }
|
||||
codex-otel = { workspace = true, features = ["otel"] }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-rmcp-client = { workspace = true }
|
||||
codex-async-utils = { workspace = true }
|
||||
codex-utils-string = { workspace = true }
|
||||
codex-utils-pty = { workspace = true }
|
||||
codex-utils-tokenizer = { workspace = true }
|
||||
dirs = { workspace = true }
|
||||
dunce = { workspace = true }
|
||||
env-flags = { workspace = true }
|
||||
eventsource-stream = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
http = { workspace = true }
|
||||
indexmap = { workspace = true }
|
||||
libc = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
os_info = { workspace = true }
|
||||
portable-pty = { workspace = true }
|
||||
rand = { workspace = true }
|
||||
regex-lite = { workspace = true }
|
||||
reqwest = { workspace = true, features = ["json", "stream"] }
|
||||
@@ -47,6 +49,7 @@ shlex = { workspace = true }
|
||||
similar = { workspace = true }
|
||||
strum_macros = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
test-log = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
time = { workspace = true, features = [
|
||||
"formatting",
|
||||
|
||||
@@ -4,7 +4,7 @@ This crate implements the business logic for Codex. It is designed to be used by
|
||||
|
||||
## Dependencies
|
||||
|
||||
Note that `codex-core` makes some assumptions about certain helper utilities being available in the environment. Currently, this
|
||||
Note that `codex-core` makes some assumptions about certain helper utilities being available in the environment. Currently, this support matrix is:
|
||||
|
||||
### macOS
|
||||
|
||||
|
||||
@@ -36,7 +36,6 @@ pub(crate) struct ApplyPatchExec {
|
||||
pub(crate) async fn apply_patch(
|
||||
sess: &Session,
|
||||
turn_context: &TurnContext,
|
||||
sub_id: &str,
|
||||
call_id: &str,
|
||||
action: ApplyPatchAction,
|
||||
) -> InternalApplyPatchInvocation {
|
||||
@@ -62,7 +61,7 @@ pub(crate) async fn apply_patch(
|
||||
// that similar patches can be auto-approved in the future during
|
||||
// this session.
|
||||
let rx_approve = sess
|
||||
.request_patch_approval(sub_id.to_owned(), call_id.to_owned(), &action, None, None)
|
||||
.request_patch_approval(turn_context, call_id.to_owned(), &action, None, None)
|
||||
.await;
|
||||
match rx_approve.await.unwrap_or_default() {
|
||||
ReviewDecision::Approved | ReviewDecision::ApprovedForSession => {
|
||||
|
||||
@@ -2,6 +2,8 @@ use chrono::DateTime;
|
||||
use chrono::Utc;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
#[cfg(test)]
|
||||
use serial_test::serial;
|
||||
use std::env;
|
||||
use std::fs::File;
|
||||
use std::fs::OpenOptions;
|
||||
@@ -16,7 +18,10 @@ use std::sync::Mutex;
|
||||
use std::time::Duration;
|
||||
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_protocol::config_types::ForcedLoginMethod;
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::default_client::CodexHttpClient;
|
||||
use crate::token_data::PlanType;
|
||||
use crate::token_data::TokenData;
|
||||
use crate::token_data::parse_id_token;
|
||||
@@ -28,7 +33,7 @@ pub struct CodexAuth {
|
||||
pub(crate) api_key: Option<String>,
|
||||
pub(crate) auth_dot_json: Arc<Mutex<Option<AuthDotJson>>>,
|
||||
pub(crate) auth_file: PathBuf,
|
||||
pub(crate) client: reqwest::Client,
|
||||
pub(crate) client: CodexHttpClient,
|
||||
}
|
||||
|
||||
impl PartialEq for CodexAuth {
|
||||
@@ -39,6 +44,8 @@ impl PartialEq for CodexAuth {
|
||||
|
||||
impl CodexAuth {
|
||||
pub async fn refresh_token(&self) -> Result<String, std::io::Error> {
|
||||
tracing::info!("Refreshing token");
|
||||
|
||||
let token_data = self
|
||||
.get_current_token_data()
|
||||
.ok_or(std::io::Error::other("Token data is not available."))?;
|
||||
@@ -135,6 +142,10 @@ impl CodexAuth {
|
||||
self.get_current_token_data().and_then(|t| t.account_id)
|
||||
}
|
||||
|
||||
pub fn get_account_email(&self) -> Option<String> {
|
||||
self.get_current_token_data().and_then(|t| t.id_token.email)
|
||||
}
|
||||
|
||||
pub(crate) fn get_plan_type(&self) -> Option<PlanType> {
|
||||
self.get_current_token_data()
|
||||
.and_then(|t| t.id_token.chatgpt_plan_type)
|
||||
@@ -172,7 +183,7 @@ impl CodexAuth {
|
||||
}
|
||||
}
|
||||
|
||||
fn from_api_key_with_client(api_key: &str, client: reqwest::Client) -> Self {
|
||||
fn from_api_key_with_client(api_key: &str, client: CodexHttpClient) -> Self {
|
||||
Self {
|
||||
api_key: Some(api_key.to_owned()),
|
||||
mode: AuthMode::ApiKey,
|
||||
@@ -229,6 +240,74 @@ pub fn login_with_api_key(codex_home: &Path, api_key: &str) -> std::io::Result<(
|
||||
write_auth_json(&get_auth_file(codex_home), &auth_dot_json)
|
||||
}
|
||||
|
||||
pub async fn enforce_login_restrictions(config: &Config) -> std::io::Result<()> {
|
||||
let Some(auth) = load_auth(&config.codex_home, true)? else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
if let Some(required_method) = config.forced_login_method {
|
||||
let method_violation = match (required_method, auth.mode) {
|
||||
(ForcedLoginMethod::Api, AuthMode::ApiKey) => None,
|
||||
(ForcedLoginMethod::Chatgpt, AuthMode::ChatGPT) => None,
|
||||
(ForcedLoginMethod::Api, AuthMode::ChatGPT) => Some(
|
||||
"API key login is required, but ChatGPT is currently being used. Logging out."
|
||||
.to_string(),
|
||||
),
|
||||
(ForcedLoginMethod::Chatgpt, AuthMode::ApiKey) => Some(
|
||||
"ChatGPT login is required, but an API key is currently being used. Logging out."
|
||||
.to_string(),
|
||||
),
|
||||
};
|
||||
|
||||
if let Some(message) = method_violation {
|
||||
return logout_with_message(&config.codex_home, message);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(expected_account_id) = config.forced_chatgpt_workspace_id.as_deref() {
|
||||
if auth.mode != AuthMode::ChatGPT {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let token_data = match auth.get_token_data().await {
|
||||
Ok(data) => data,
|
||||
Err(err) => {
|
||||
return logout_with_message(
|
||||
&config.codex_home,
|
||||
format!(
|
||||
"Failed to load ChatGPT credentials while enforcing workspace restrictions: {err}. Logging out."
|
||||
),
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
// workspace is the external identifier for account id.
|
||||
let chatgpt_account_id = token_data.id_token.chatgpt_account_id.as_deref();
|
||||
if chatgpt_account_id != Some(expected_account_id) {
|
||||
let message = match chatgpt_account_id {
|
||||
Some(actual) => format!(
|
||||
"Login is restricted to workspace {expected_account_id}, but current credentials belong to {actual}. Logging out."
|
||||
),
|
||||
None => format!(
|
||||
"Login is restricted to workspace {expected_account_id}, but current credentials lack a workspace identifier. Logging out."
|
||||
),
|
||||
};
|
||||
return logout_with_message(&config.codex_home, message);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn logout_with_message(codex_home: &Path, message: String) -> std::io::Result<()> {
|
||||
match logout(codex_home) {
|
||||
Ok(_) => Err(std::io::Error::other(message)),
|
||||
Err(err) => Err(std::io::Error::other(format!(
|
||||
"{message}. Failed to remove auth.json: {err}"
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
fn load_auth(
|
||||
codex_home: &Path,
|
||||
enable_codex_api_key_env: bool,
|
||||
@@ -245,9 +324,8 @@ fn load_auth(
|
||||
let client = crate::default_client::create_client();
|
||||
let auth_dot_json = match try_read_auth_json(&auth_file) {
|
||||
Ok(auth) => auth,
|
||||
Err(e) => {
|
||||
return Err(e);
|
||||
}
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(None),
|
||||
Err(err) => return Err(err),
|
||||
};
|
||||
|
||||
let AuthDotJson {
|
||||
@@ -325,7 +403,7 @@ async fn update_tokens(
|
||||
|
||||
async fn try_refresh_token(
|
||||
refresh_token: String,
|
||||
client: &reqwest::Client,
|
||||
client: &CodexHttpClient,
|
||||
) -> std::io::Result<RefreshResponse> {
|
||||
let refresh_request = RefreshRequest {
|
||||
client_id: CLIENT_ID,
|
||||
@@ -399,17 +477,19 @@ struct CachedAuth {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::config::Config;
|
||||
use crate::config::ConfigOverrides;
|
||||
use crate::config::ConfigToml;
|
||||
use crate::token_data::IdTokenInfo;
|
||||
use crate::token_data::KnownPlan;
|
||||
use crate::token_data::PlanType;
|
||||
use base64::Engine;
|
||||
use codex_protocol::config_types::ForcedLoginMethod;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde::Serialize;
|
||||
use serde_json::json;
|
||||
use tempfile::tempdir;
|
||||
|
||||
const LAST_REFRESH: &str = "2025-08-06T20:41:36.232376Z";
|
||||
|
||||
#[tokio::test]
|
||||
async fn roundtrip_auth_dot_json() {
|
||||
let codex_home = tempdir().unwrap();
|
||||
@@ -417,6 +497,7 @@ mod tests {
|
||||
AuthFileParams {
|
||||
openai_api_key: None,
|
||||
chatgpt_plan_type: "pro".to_string(),
|
||||
chatgpt_account_id: None,
|
||||
},
|
||||
codex_home.path(),
|
||||
)
|
||||
@@ -456,13 +537,22 @@ mod tests {
|
||||
assert!(auth.tokens.is_none(), "tokens should be cleared");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_auth_json_returns_none() {
|
||||
let dir = tempdir().unwrap();
|
||||
let auth = CodexAuth::from_codex_home(dir.path()).expect("call should succeed");
|
||||
assert_eq!(auth, None);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[serial(codex_api_key)]
|
||||
async fn pro_account_with_no_api_key_uses_chatgpt_auth() {
|
||||
let codex_home = tempdir().unwrap();
|
||||
let fake_jwt = write_auth_file(
|
||||
AuthFileParams {
|
||||
openai_api_key: None,
|
||||
chatgpt_plan_type: "pro".to_string(),
|
||||
chatgpt_account_id: None,
|
||||
},
|
||||
codex_home.path(),
|
||||
)
|
||||
@@ -480,6 +570,10 @@ mod tests {
|
||||
|
||||
let guard = auth_dot_json.lock().unwrap();
|
||||
let auth_dot_json = guard.as_ref().expect("AuthDotJson should exist");
|
||||
let last_refresh = auth_dot_json
|
||||
.last_refresh
|
||||
.expect("last_refresh should be recorded");
|
||||
|
||||
assert_eq!(
|
||||
&AuthDotJson {
|
||||
openai_api_key: None,
|
||||
@@ -487,23 +581,21 @@ mod tests {
|
||||
id_token: IdTokenInfo {
|
||||
email: Some("user@example.com".to_string()),
|
||||
chatgpt_plan_type: Some(PlanType::Known(KnownPlan::Pro)),
|
||||
chatgpt_account_id: None,
|
||||
raw_jwt: fake_jwt,
|
||||
},
|
||||
access_token: "test-access-token".to_string(),
|
||||
refresh_token: "test-refresh-token".to_string(),
|
||||
account_id: None,
|
||||
}),
|
||||
last_refresh: Some(
|
||||
DateTime::parse_from_rfc3339(LAST_REFRESH)
|
||||
.unwrap()
|
||||
.with_timezone(&Utc)
|
||||
),
|
||||
last_refresh: Some(last_refresh),
|
||||
},
|
||||
auth_dot_json
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[serial(codex_api_key)]
|
||||
async fn loads_api_key_from_auth_json() {
|
||||
let dir = tempdir().unwrap();
|
||||
let auth_file = dir.path().join("auth.json");
|
||||
@@ -539,6 +631,7 @@ mod tests {
|
||||
struct AuthFileParams {
|
||||
openai_api_key: Option<String>,
|
||||
chatgpt_plan_type: String,
|
||||
chatgpt_account_id: Option<String>,
|
||||
}
|
||||
|
||||
fn write_auth_file(params: AuthFileParams, codex_home: &Path) -> std::io::Result<String> {
|
||||
@@ -553,15 +646,21 @@ mod tests {
|
||||
alg: "none",
|
||||
typ: "JWT",
|
||||
};
|
||||
let mut auth_payload = serde_json::json!({
|
||||
"chatgpt_plan_type": params.chatgpt_plan_type,
|
||||
"chatgpt_user_id": "user-12345",
|
||||
"user_id": "user-12345",
|
||||
});
|
||||
|
||||
if let Some(chatgpt_account_id) = params.chatgpt_account_id {
|
||||
let org_value = serde_json::Value::String(chatgpt_account_id);
|
||||
auth_payload["chatgpt_account_id"] = org_value;
|
||||
}
|
||||
|
||||
let payload = serde_json::json!({
|
||||
"email": "user@example.com",
|
||||
"email_verified": true,
|
||||
"https://api.openai.com/auth": {
|
||||
"chatgpt_account_id": "bc3618e3-489d-4d49-9362-1561dc53ba53",
|
||||
"chatgpt_plan_type": params.chatgpt_plan_type,
|
||||
"chatgpt_user_id": "user-12345",
|
||||
"user_id": "user-12345",
|
||||
}
|
||||
"https://api.openai.com/auth": auth_payload,
|
||||
});
|
||||
let b64 = |b: &[u8]| base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(b);
|
||||
let header_b64 = b64(&serde_json::to_vec(&header)?);
|
||||
@@ -576,12 +675,161 @@ mod tests {
|
||||
"access_token": "test-access-token",
|
||||
"refresh_token": "test-refresh-token"
|
||||
},
|
||||
"last_refresh": LAST_REFRESH,
|
||||
"last_refresh": Utc::now(),
|
||||
});
|
||||
let auth_json = serde_json::to_string_pretty(&auth_json_data)?;
|
||||
std::fs::write(auth_file, auth_json)?;
|
||||
Ok(fake_jwt)
|
||||
}
|
||||
|
||||
fn build_config(
|
||||
codex_home: &Path,
|
||||
forced_login_method: Option<ForcedLoginMethod>,
|
||||
forced_chatgpt_workspace_id: Option<String>,
|
||||
) -> Config {
|
||||
let mut config = Config::load_from_base_config_with_overrides(
|
||||
ConfigToml::default(),
|
||||
ConfigOverrides::default(),
|
||||
codex_home.to_path_buf(),
|
||||
)
|
||||
.expect("config should load");
|
||||
config.forced_login_method = forced_login_method;
|
||||
config.forced_chatgpt_workspace_id = forced_chatgpt_workspace_id;
|
||||
config
|
||||
}
|
||||
|
||||
/// Use sparingly.
|
||||
/// TODO (gpeal): replace this with an injectable env var provider.
|
||||
#[cfg(test)]
|
||||
struct EnvVarGuard {
|
||||
key: &'static str,
|
||||
original: Option<std::ffi::OsString>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
impl EnvVarGuard {
|
||||
fn set(key: &'static str, value: &str) -> Self {
|
||||
let original = env::var_os(key);
|
||||
unsafe {
|
||||
env::set_var(key, value);
|
||||
}
|
||||
Self { key, original }
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
impl Drop for EnvVarGuard {
|
||||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
match &self.original {
|
||||
Some(value) => env::set_var(self.key, value),
|
||||
None => env::remove_var(self.key),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn enforce_login_restrictions_logs_out_for_method_mismatch() {
|
||||
let codex_home = tempdir().unwrap();
|
||||
login_with_api_key(codex_home.path(), "sk-test").expect("seed api key");
|
||||
|
||||
let config = build_config(codex_home.path(), Some(ForcedLoginMethod::Chatgpt), None);
|
||||
|
||||
let err = super::enforce_login_restrictions(&config)
|
||||
.await
|
||||
.expect_err("expected method mismatch to error");
|
||||
assert!(err.to_string().contains("ChatGPT login is required"));
|
||||
assert!(
|
||||
!codex_home.path().join("auth.json").exists(),
|
||||
"auth.json should be removed on mismatch"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[serial(codex_api_key)]
|
||||
async fn enforce_login_restrictions_logs_out_for_workspace_mismatch() {
|
||||
let codex_home = tempdir().unwrap();
|
||||
let _jwt = write_auth_file(
|
||||
AuthFileParams {
|
||||
openai_api_key: None,
|
||||
chatgpt_plan_type: "pro".to_string(),
|
||||
chatgpt_account_id: Some("org_another_org".to_string()),
|
||||
},
|
||||
codex_home.path(),
|
||||
)
|
||||
.expect("failed to write auth file");
|
||||
|
||||
let config = build_config(codex_home.path(), None, Some("org_mine".to_string()));
|
||||
|
||||
let err = super::enforce_login_restrictions(&config)
|
||||
.await
|
||||
.expect_err("expected workspace mismatch to error");
|
||||
assert!(err.to_string().contains("workspace org_mine"));
|
||||
assert!(
|
||||
!codex_home.path().join("auth.json").exists(),
|
||||
"auth.json should be removed on mismatch"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[serial(codex_api_key)]
|
||||
async fn enforce_login_restrictions_allows_matching_workspace() {
|
||||
let codex_home = tempdir().unwrap();
|
||||
let _jwt = write_auth_file(
|
||||
AuthFileParams {
|
||||
openai_api_key: None,
|
||||
chatgpt_plan_type: "pro".to_string(),
|
||||
chatgpt_account_id: Some("org_mine".to_string()),
|
||||
},
|
||||
codex_home.path(),
|
||||
)
|
||||
.expect("failed to write auth file");
|
||||
|
||||
let config = build_config(codex_home.path(), None, Some("org_mine".to_string()));
|
||||
|
||||
super::enforce_login_restrictions(&config)
|
||||
.await
|
||||
.expect("matching workspace should succeed");
|
||||
assert!(
|
||||
codex_home.path().join("auth.json").exists(),
|
||||
"auth.json should remain when restrictions pass"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn enforce_login_restrictions_allows_api_key_if_login_method_not_set_but_forced_chatgpt_workspace_id_is_set()
|
||||
{
|
||||
let codex_home = tempdir().unwrap();
|
||||
login_with_api_key(codex_home.path(), "sk-test").expect("seed api key");
|
||||
|
||||
let config = build_config(codex_home.path(), None, Some("org_mine".to_string()));
|
||||
|
||||
super::enforce_login_restrictions(&config)
|
||||
.await
|
||||
.expect("matching workspace should succeed");
|
||||
assert!(
|
||||
codex_home.path().join("auth.json").exists(),
|
||||
"auth.json should remain when restrictions pass"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[serial(codex_api_key)]
|
||||
async fn enforce_login_restrictions_blocks_env_api_key_when_chatgpt_required() {
|
||||
let _guard = EnvVarGuard::set(CODEX_API_KEY_ENV_VAR, "sk-env");
|
||||
let codex_home = tempdir().unwrap();
|
||||
|
||||
let config = build_config(codex_home.path(), Some(ForcedLoginMethod::Chatgpt), None);
|
||||
|
||||
let err = super::enforce_login_restrictions(&config)
|
||||
.await
|
||||
.expect_err("environment API key should not satisfy forced ChatGPT login");
|
||||
assert!(
|
||||
err.to_string()
|
||||
.contains("ChatGPT login is required, but an API key is currently being used.")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Central manager providing a single source of truth for auth.json derived
|
||||
@@ -671,7 +919,10 @@ impl AuthManager {
|
||||
self.reload();
|
||||
Ok(Some(token))
|
||||
}
|
||||
Err(e) => Err(e),
|
||||
Err(e) => {
|
||||
tracing::error!("Failed to refresh token: {}", e);
|
||||
Err(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -5,13 +5,13 @@ use tree_sitter_bash::LANGUAGE as BASH;
|
||||
|
||||
/// Parse the provided bash source using tree-sitter-bash, returning a Tree on
|
||||
/// success or None if parsing failed.
|
||||
pub fn try_parse_bash(bash_lc_arg: &str) -> Option<Tree> {
|
||||
pub fn try_parse_shell(shell_lc_arg: &str) -> Option<Tree> {
|
||||
let lang = BASH.into();
|
||||
let mut parser = Parser::new();
|
||||
#[expect(clippy::expect_used)]
|
||||
parser.set_language(&lang).expect("load bash grammar");
|
||||
let old_tree: Option<&Tree> = None;
|
||||
parser.parse(bash_lc_arg, old_tree)
|
||||
parser.parse(shell_lc_arg, old_tree)
|
||||
}
|
||||
|
||||
/// Parse a script which may contain multiple simple commands joined only by
|
||||
@@ -88,18 +88,19 @@ pub fn try_parse_word_only_commands_sequence(tree: &Tree, src: &str) -> Option<V
|
||||
Some(commands)
|
||||
}
|
||||
|
||||
/// Returns the sequence of plain commands within a `bash -lc "..."` invocation
|
||||
/// when the script only contains word-only commands joined by safe operators.
|
||||
pub fn parse_bash_lc_plain_commands(command: &[String]) -> Option<Vec<Vec<String>>> {
|
||||
let [bash, flag, script] = command else {
|
||||
/// Returns the sequence of plain commands within a `bash -lc "..."` or
|
||||
/// `zsh -lc "..."` invocation when the script only contains word-only commands
|
||||
/// joined by safe operators.
|
||||
pub fn parse_shell_lc_plain_commands(command: &[String]) -> Option<Vec<Vec<String>>> {
|
||||
let [shell, flag, script] = command else {
|
||||
return None;
|
||||
};
|
||||
|
||||
if bash != "bash" || flag != "-lc" {
|
||||
if flag != "-lc" || !(shell == "bash" || shell == "zsh") {
|
||||
return None;
|
||||
}
|
||||
|
||||
let tree = try_parse_bash(script)?;
|
||||
let tree = try_parse_shell(script)?;
|
||||
try_parse_word_only_commands_sequence(&tree, script)
|
||||
}
|
||||
|
||||
@@ -154,7 +155,7 @@ mod tests {
|
||||
use super::*;
|
||||
|
||||
fn parse_seq(src: &str) -> Option<Vec<Vec<String>>> {
|
||||
let tree = try_parse_bash(src)?;
|
||||
let tree = try_parse_shell(src)?;
|
||||
try_parse_word_only_commands_sequence(&tree, src)
|
||||
}
|
||||
|
||||
@@ -234,4 +235,11 @@ mod tests {
|
||||
fn rejects_trailing_operator_parse_error() {
|
||||
assert!(parse_seq("ls &&").is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_zsh_lc_plain_commands() {
|
||||
let command = vec!["zsh".to_string(), "-lc".to_string(), "ls".to_string()];
|
||||
let parsed = parse_shell_lc_plain_commands(&command).unwrap();
|
||||
assert_eq!(parsed, vec![vec!["ls".to_string()]]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,12 +4,15 @@ use crate::ModelProviderInfo;
|
||||
use crate::client_common::Prompt;
|
||||
use crate::client_common::ResponseEvent;
|
||||
use crate::client_common::ResponseStream;
|
||||
use crate::default_client::CodexHttpClient;
|
||||
use crate::error::CodexErr;
|
||||
use crate::error::ConnectionFailedError;
|
||||
use crate::error::ResponseStreamFailed;
|
||||
use crate::error::Result;
|
||||
use crate::error::RetryLimitReachedError;
|
||||
use crate::error::UnexpectedResponseError;
|
||||
use crate::model_family::ModelFamily;
|
||||
use crate::openai_tools::create_tools_json_for_chat_completions_api;
|
||||
use crate::tools::spec::create_tools_json_for_chat_completions_api;
|
||||
use crate::util::backoff;
|
||||
use bytes::Bytes;
|
||||
use codex_otel::otel_event_manager::OtelEventManager;
|
||||
@@ -34,7 +37,7 @@ use tracing::trace;
|
||||
pub(crate) async fn stream_chat_completions(
|
||||
prompt: &Prompt,
|
||||
model_family: &ModelFamily,
|
||||
client: &reqwest::Client,
|
||||
client: &CodexHttpClient,
|
||||
provider: &ModelProviderInfo,
|
||||
otel_event_manager: &OtelEventManager,
|
||||
) -> Result<ResponseStream> {
|
||||
@@ -102,10 +105,10 @@ pub(crate) async fn stream_chat_completions(
|
||||
} = item
|
||||
{
|
||||
let mut text = String::new();
|
||||
for c in items {
|
||||
match c {
|
||||
ReasoningItemContent::ReasoningText { text: t }
|
||||
| ReasoningItemContent::Text { text: t } => text.push_str(t),
|
||||
for entry in items {
|
||||
match entry {
|
||||
ReasoningItemContent::ReasoningText { text: segment }
|
||||
| ReasoningItemContent::Text { text: segment } => text.push_str(segment),
|
||||
}
|
||||
}
|
||||
if text.trim().is_empty() {
|
||||
@@ -309,7 +312,12 @@ pub(crate) async fn stream_chat_completions(
|
||||
match res {
|
||||
Ok(resp) if resp.status().is_success() => {
|
||||
let (tx_event, rx_event) = mpsc::channel::<Result<ResponseEvent>>(1600);
|
||||
let stream = resp.bytes_stream().map_err(CodexErr::Reqwest);
|
||||
let stream = resp.bytes_stream().map_err(|e| {
|
||||
CodexErr::ResponseStreamFailed(ResponseStreamFailed {
|
||||
source: e,
|
||||
request_id: None,
|
||||
})
|
||||
});
|
||||
tokio::spawn(process_chat_sse(
|
||||
stream,
|
||||
tx_event,
|
||||
@@ -349,7 +357,9 @@ pub(crate) async fn stream_chat_completions(
|
||||
}
|
||||
Err(e) => {
|
||||
if attempt > max_retries {
|
||||
return Err(e.into());
|
||||
return Err(CodexErr::ConnectionFailed(ConnectionFailedError {
|
||||
source: e,
|
||||
}));
|
||||
}
|
||||
let delay = backoff(attempt);
|
||||
tokio::time::sleep(delay).await;
|
||||
|
||||
@@ -1,15 +1,18 @@
|
||||
use std::io::BufRead;
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use std::sync::OnceLock;
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::AuthManager;
|
||||
use crate::auth::CodexAuth;
|
||||
use crate::error::RetryLimitReachedError;
|
||||
use crate::error::UnexpectedResponseError;
|
||||
use bytes::Bytes;
|
||||
use chrono::DateTime;
|
||||
use chrono::Utc;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_otel::otel_event_manager::OtelEventManager;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::config_types::ReasoningEffort as ReasoningEffortConfig;
|
||||
use codex_protocol::config_types::ReasoningSummary as ReasoningSummaryConfig;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use eventsource_stream::Eventsource;
|
||||
use futures::prelude::*;
|
||||
use regex_lite::Regex;
|
||||
@@ -25,6 +28,8 @@ use tracing::debug;
|
||||
use tracing::trace;
|
||||
use tracing::warn;
|
||||
|
||||
use crate::AuthManager;
|
||||
use crate::auth::CodexAuth;
|
||||
use crate::chat_completions::AggregateStreamExt;
|
||||
use crate::chat_completions::stream_chat_completions;
|
||||
use crate::client_common::Prompt;
|
||||
@@ -34,27 +39,27 @@ use crate::client_common::ResponsesApiRequest;
|
||||
use crate::client_common::create_reasoning_param_for_request;
|
||||
use crate::client_common::create_text_param_for_request;
|
||||
use crate::config::Config;
|
||||
use crate::default_client::CodexHttpClient;
|
||||
use crate::default_client::create_client;
|
||||
use crate::error::CodexErr;
|
||||
use crate::error::ConnectionFailedError;
|
||||
use crate::error::ResponseStreamFailed;
|
||||
use crate::error::Result;
|
||||
use crate::error::RetryLimitReachedError;
|
||||
use crate::error::UnexpectedResponseError;
|
||||
use crate::error::UsageLimitReachedError;
|
||||
use crate::flags::CODEX_RS_SSE_FIXTURE;
|
||||
use crate::model_family::ModelFamily;
|
||||
use crate::model_provider_info::ModelProviderInfo;
|
||||
use crate::model_provider_info::WireApi;
|
||||
use crate::openai_model_info::get_model_info;
|
||||
use crate::openai_tools::create_tools_json_for_responses_api;
|
||||
use crate::protocol::RateLimitSnapshot;
|
||||
use crate::protocol::RateLimitWindow;
|
||||
use crate::protocol::TokenUsage;
|
||||
use crate::state::TaskKind;
|
||||
use crate::token_data::PlanType;
|
||||
use crate::tools::spec::create_tools_json_for_responses_api;
|
||||
use crate::util::backoff;
|
||||
use codex_otel::otel_event_manager::OtelEventManager;
|
||||
use codex_protocol::config_types::ReasoningEffort as ReasoningEffortConfig;
|
||||
use codex_protocol::config_types::ReasoningSummary as ReasoningSummaryConfig;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ErrorResponse {
|
||||
@@ -69,7 +74,7 @@ struct Error {
|
||||
|
||||
// Optional fields available on "usage_limit_reached" and "usage_not_included" errors
|
||||
plan_type: Option<PlanType>,
|
||||
resets_in_seconds: Option<u64>,
|
||||
resets_at: Option<i64>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -77,7 +82,7 @@ pub struct ModelClient {
|
||||
config: Arc<Config>,
|
||||
auth_manager: Option<Arc<AuthManager>>,
|
||||
otel_event_manager: OtelEventManager,
|
||||
client: reqwest::Client,
|
||||
client: CodexHttpClient,
|
||||
provider: ModelProviderInfo,
|
||||
conversation_id: ConversationId,
|
||||
effort: Option<ReasoningEffortConfig>,
|
||||
@@ -108,10 +113,12 @@ impl ModelClient {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_model_context_window(&self) -> Option<u64> {
|
||||
pub fn get_model_context_window(&self) -> Option<i64> {
|
||||
let pct = self.config.model_family.effective_context_window_percent;
|
||||
self.config
|
||||
.model_context_window
|
||||
.or_else(|| get_model_info(&self.config.model_family).map(|info| info.context_window))
|
||||
.map(|w| w.saturating_mul(pct) / 100)
|
||||
}
|
||||
|
||||
pub fn get_auto_compact_token_limit(&self) -> Option<i64> {
|
||||
@@ -127,6 +134,14 @@ impl ModelClient {
|
||||
self.stream_with_task_kind(prompt, TaskKind::Regular).await
|
||||
}
|
||||
|
||||
pub fn config(&self) -> Arc<Config> {
|
||||
Arc::clone(&self.config)
|
||||
}
|
||||
|
||||
pub fn provider(&self) -> &ModelProviderInfo {
|
||||
&self.provider
|
||||
}
|
||||
|
||||
pub(crate) async fn stream_with_task_kind(
|
||||
&self,
|
||||
prompt: &Prompt,
|
||||
@@ -294,6 +309,7 @@ impl ModelClient {
|
||||
"POST to {}: {:?}",
|
||||
self.provider.get_full_url(&auth),
|
||||
serde_json::to_string(payload_json)
|
||||
.unwrap_or("<unable to serialize payload>".to_string())
|
||||
);
|
||||
|
||||
let mut req_builder = self
|
||||
@@ -329,12 +345,6 @@ impl ModelClient {
|
||||
.headers()
|
||||
.get("cf-ray")
|
||||
.map(|v| v.to_str().unwrap_or_default().to_string());
|
||||
|
||||
trace!(
|
||||
"Response status: {}, cf-ray: {:?}",
|
||||
resp.status(),
|
||||
request_id
|
||||
);
|
||||
}
|
||||
|
||||
match res {
|
||||
@@ -351,7 +361,12 @@ impl ModelClient {
|
||||
}
|
||||
|
||||
// spawn task to process SSE
|
||||
let stream = resp.bytes_stream().map_err(CodexErr::Reqwest);
|
||||
let stream = resp.bytes_stream().map_err(move |e| {
|
||||
CodexErr::ResponseStreamFailed(ResponseStreamFailed {
|
||||
source: e,
|
||||
request_id: request_id.clone(),
|
||||
})
|
||||
});
|
||||
tokio::spawn(process_sse(
|
||||
stream,
|
||||
tx_event,
|
||||
@@ -412,10 +427,12 @@ impl ModelClient {
|
||||
let plan_type = error
|
||||
.plan_type
|
||||
.or_else(|| auth.as_ref().and_then(CodexAuth::get_plan_type));
|
||||
let resets_in_seconds = error.resets_in_seconds;
|
||||
let resets_at = error
|
||||
.resets_at
|
||||
.and_then(|seconds| DateTime::<Utc>::from_timestamp(seconds, 0));
|
||||
let codex_err = CodexErr::UsageLimitReached(UsageLimitReachedError {
|
||||
plan_type,
|
||||
resets_in_seconds,
|
||||
resets_at,
|
||||
rate_limits: rate_limit_snapshot,
|
||||
});
|
||||
return Err(StreamAttemptError::Fatal(codex_err));
|
||||
@@ -431,7 +448,9 @@ impl ModelClient {
|
||||
request_id,
|
||||
})
|
||||
}
|
||||
Err(e) => Err(StreamAttemptError::RetryableTransportError(e.into())),
|
||||
Err(e) => Err(StreamAttemptError::RetryableTransportError(
|
||||
CodexErr::ConnectionFailed(ConnectionFailedError { source: e }),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -529,11 +548,11 @@ struct ResponseCompleted {
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ResponseCompletedUsage {
|
||||
input_tokens: u64,
|
||||
input_tokens: i64,
|
||||
input_tokens_details: Option<ResponseCompletedInputTokensDetails>,
|
||||
output_tokens: u64,
|
||||
output_tokens: i64,
|
||||
output_tokens_details: Option<ResponseCompletedOutputTokensDetails>,
|
||||
total_tokens: u64,
|
||||
total_tokens: i64,
|
||||
}
|
||||
|
||||
impl From<ResponseCompletedUsage> for TokenUsage {
|
||||
@@ -556,12 +575,12 @@ impl From<ResponseCompletedUsage> for TokenUsage {
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ResponseCompletedInputTokensDetails {
|
||||
cached_tokens: u64,
|
||||
cached_tokens: i64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ResponseCompletedOutputTokensDetails {
|
||||
reasoning_tokens: u64,
|
||||
reasoning_tokens: i64,
|
||||
}
|
||||
|
||||
fn attach_item_ids(payload_json: &mut Value, original_items: &[ResponseItem]) {
|
||||
@@ -596,14 +615,14 @@ fn parse_rate_limit_snapshot(headers: &HeaderMap) -> Option<RateLimitSnapshot> {
|
||||
headers,
|
||||
"x-codex-primary-used-percent",
|
||||
"x-codex-primary-window-minutes",
|
||||
"x-codex-primary-reset-after-seconds",
|
||||
"x-codex-primary-reset-at",
|
||||
);
|
||||
|
||||
let secondary = parse_rate_limit_window(
|
||||
headers,
|
||||
"x-codex-secondary-used-percent",
|
||||
"x-codex-secondary-window-minutes",
|
||||
"x-codex-secondary-reset-after-seconds",
|
||||
"x-codex-secondary-reset-at",
|
||||
);
|
||||
|
||||
Some(RateLimitSnapshot { primary, secondary })
|
||||
@@ -613,22 +632,22 @@ fn parse_rate_limit_window(
|
||||
headers: &HeaderMap,
|
||||
used_percent_header: &str,
|
||||
window_minutes_header: &str,
|
||||
resets_header: &str,
|
||||
resets_at_header: &str,
|
||||
) -> Option<RateLimitWindow> {
|
||||
let used_percent: Option<f64> = parse_header_f64(headers, used_percent_header);
|
||||
|
||||
used_percent.and_then(|used_percent| {
|
||||
let window_minutes = parse_header_u64(headers, window_minutes_header);
|
||||
let resets_in_seconds = parse_header_u64(headers, resets_header);
|
||||
let window_minutes = parse_header_i64(headers, window_minutes_header);
|
||||
let resets_at = parse_header_i64(headers, resets_at_header);
|
||||
|
||||
let has_data = used_percent != 0.0
|
||||
|| window_minutes.is_some_and(|minutes| minutes != 0)
|
||||
|| resets_in_seconds.is_some_and(|seconds| seconds != 0);
|
||||
|| resets_at.is_some();
|
||||
|
||||
has_data.then_some(RateLimitWindow {
|
||||
used_percent,
|
||||
window_minutes,
|
||||
resets_in_seconds,
|
||||
resets_at,
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -640,8 +659,8 @@ fn parse_header_f64(headers: &HeaderMap, name: &str) -> Option<f64> {
|
||||
.filter(|v| v.is_finite())
|
||||
}
|
||||
|
||||
fn parse_header_u64(headers: &HeaderMap, name: &str) -> Option<u64> {
|
||||
parse_header_str(headers, name)?.parse::<u64>().ok()
|
||||
fn parse_header_i64(headers: &HeaderMap, name: &str) -> Option<i64> {
|
||||
parse_header_str(headers, name)?.parse::<i64>().ok()
|
||||
}
|
||||
|
||||
fn parse_header_str<'a>(headers: &'a HeaderMap, name: &str) -> Option<&'a str> {
|
||||
@@ -1030,6 +1049,7 @@ mod tests {
|
||||
"test",
|
||||
"test",
|
||||
None,
|
||||
Some("test@test.com".to_string()),
|
||||
Some(AuthMode::ChatGPT),
|
||||
false,
|
||||
"test".to_string(),
|
||||
@@ -1077,6 +1097,7 @@ mod tests {
|
||||
base_url: Some("https://test.com".to_string()),
|
||||
env_key: Some("TEST_API_KEY".to_string()),
|
||||
env_key_instructions: None,
|
||||
experimental_bearer_token: None,
|
||||
wire_api: WireApi::Responses,
|
||||
query_params: None,
|
||||
http_headers: None,
|
||||
@@ -1140,6 +1161,7 @@ mod tests {
|
||||
base_url: Some("https://test.com".to_string()),
|
||||
env_key: Some("TEST_API_KEY".to_string()),
|
||||
env_key_instructions: None,
|
||||
experimental_bearer_token: None,
|
||||
wire_api: WireApi::Responses,
|
||||
query_params: None,
|
||||
http_headers: None,
|
||||
@@ -1176,6 +1198,7 @@ mod tests {
|
||||
base_url: Some("https://test.com".to_string()),
|
||||
env_key: Some("TEST_API_KEY".to_string()),
|
||||
env_key_instructions: None,
|
||||
experimental_bearer_token: None,
|
||||
wire_api: WireApi::Responses,
|
||||
query_params: None,
|
||||
http_headers: None,
|
||||
@@ -1214,6 +1237,7 @@ mod tests {
|
||||
base_url: Some("https://test.com".to_string()),
|
||||
env_key: Some("TEST_API_KEY".to_string()),
|
||||
env_key_instructions: None,
|
||||
experimental_bearer_token: None,
|
||||
wire_api: WireApi::Responses,
|
||||
query_params: None,
|
||||
http_headers: None,
|
||||
@@ -1248,6 +1272,7 @@ mod tests {
|
||||
base_url: Some("https://test.com".to_string()),
|
||||
env_key: Some("TEST_API_KEY".to_string()),
|
||||
env_key_instructions: None,
|
||||
experimental_bearer_token: None,
|
||||
wire_api: WireApi::Responses,
|
||||
query_params: None,
|
||||
http_headers: None,
|
||||
@@ -1351,6 +1376,7 @@ mod tests {
|
||||
base_url: Some("https://test.com".to_string()),
|
||||
env_key: Some("TEST_API_KEY".to_string()),
|
||||
env_key_instructions: None,
|
||||
experimental_bearer_token: None,
|
||||
wire_api: WireApi::Responses,
|
||||
query_params: None,
|
||||
http_headers: None,
|
||||
@@ -1380,7 +1406,7 @@ mod tests {
|
||||
message: Some("Rate limit reached for gpt-5 in organization org- on tokens per min (TPM): Limit 1, Used 1, Requested 19304. Please try again in 28ms. Visit https://platform.openai.com/account/rate-limits to learn more.".to_string()),
|
||||
code: Some("rate_limit_exceeded".to_string()),
|
||||
plan_type: None,
|
||||
resets_in_seconds: None
|
||||
resets_at: None
|
||||
};
|
||||
|
||||
let delay = try_parse_retry_after(&err);
|
||||
@@ -1394,20 +1420,20 @@ mod tests {
|
||||
message: Some("Rate limit reached for gpt-5 in organization <ORG> on tokens per min (TPM): Limit 30000, Used 6899, Requested 24050. Please try again in 1.898s. Visit https://platform.openai.com/account/rate-limits to learn more.".to_string()),
|
||||
code: Some("rate_limit_exceeded".to_string()),
|
||||
plan_type: None,
|
||||
resets_in_seconds: None
|
||||
resets_at: None
|
||||
};
|
||||
let delay = try_parse_retry_after(&err);
|
||||
assert_eq!(delay, Some(Duration::from_secs_f64(1.898)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_response_deserializes_old_schema_known_plan_type_and_serializes_back() {
|
||||
fn error_response_deserializes_schema_known_plan_type_and_serializes_back() {
|
||||
use crate::token_data::KnownPlan;
|
||||
use crate::token_data::PlanType;
|
||||
|
||||
let json = r#"{"error":{"type":"usage_limit_reached","plan_type":"pro","resets_in_seconds":3600}}"#;
|
||||
let resp: ErrorResponse =
|
||||
serde_json::from_str(json).expect("should deserialize old schema");
|
||||
let json =
|
||||
r#"{"error":{"type":"usage_limit_reached","plan_type":"pro","resets_at":1704067200}}"#;
|
||||
let resp: ErrorResponse = serde_json::from_str(json).expect("should deserialize schema");
|
||||
|
||||
assert_matches!(resp.error.plan_type, Some(PlanType::Known(KnownPlan::Pro)));
|
||||
|
||||
@@ -1416,13 +1442,12 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_response_deserializes_old_schema_unknown_plan_type_and_serializes_back() {
|
||||
fn error_response_deserializes_schema_unknown_plan_type_and_serializes_back() {
|
||||
use crate::token_data::PlanType;
|
||||
|
||||
let json =
|
||||
r#"{"error":{"type":"usage_limit_reached","plan_type":"vip","resets_in_seconds":60}}"#;
|
||||
let resp: ErrorResponse =
|
||||
serde_json::from_str(json).expect("should deserialize old schema");
|
||||
r#"{"error":{"type":"usage_limit_reached","plan_type":"vip","resets_at":1704067260}}"#;
|
||||
let resp: ErrorResponse = serde_json::from_str(json).expect("should deserialize schema");
|
||||
|
||||
assert_matches!(resp.error.plan_type, Some(PlanType::Unknown(ref s)) if s == "vip");
|
||||
|
||||
|
||||
@@ -281,7 +281,7 @@ pub(crate) struct ResponsesApiRequest<'a> {
|
||||
}
|
||||
|
||||
pub(crate) mod tools {
|
||||
use crate::openai_tools::JsonSchema;
|
||||
use crate::tools::spec::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -10,21 +10,21 @@ use crate::error::Result as CodexResult;
|
||||
use crate::protocol::AgentMessageEvent;
|
||||
use crate::protocol::CompactedItem;
|
||||
use crate::protocol::ErrorEvent;
|
||||
use crate::protocol::Event;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::InputItem;
|
||||
use crate::protocol::InputMessageKind;
|
||||
use crate::protocol::TaskStartedEvent;
|
||||
use crate::protocol::TurnContextItem;
|
||||
use crate::state::TaskKind;
|
||||
use crate::truncate::truncate_middle;
|
||||
use crate::util::backoff;
|
||||
use askama::Template;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseInputItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use futures::prelude::*;
|
||||
use tracing::error;
|
||||
|
||||
pub const SUMMARIZATION_PROMPT: &str = include_str!("../../templates/compact/prompt.md");
|
||||
const COMPACT_USER_MESSAGE_MAX_TOKENS: usize = 20_000;
|
||||
@@ -40,46 +40,39 @@ pub(crate) async fn run_inline_auto_compact_task(
|
||||
sess: Arc<Session>,
|
||||
turn_context: Arc<TurnContext>,
|
||||
) {
|
||||
let sub_id = sess.next_internal_sub_id();
|
||||
let input = vec![InputItem::Text {
|
||||
let input = vec![UserInput::Text {
|
||||
text: SUMMARIZATION_PROMPT.to_string(),
|
||||
}];
|
||||
run_compact_task_inner(sess, turn_context, sub_id, input).await;
|
||||
run_compact_task_inner(sess, turn_context, input).await;
|
||||
}
|
||||
|
||||
pub(crate) async fn run_compact_task(
|
||||
sess: Arc<Session>,
|
||||
turn_context: Arc<TurnContext>,
|
||||
sub_id: String,
|
||||
input: Vec<InputItem>,
|
||||
input: Vec<UserInput>,
|
||||
) -> Option<String> {
|
||||
let start_event = Event {
|
||||
id: sub_id.clone(),
|
||||
msg: EventMsg::TaskStarted(TaskStartedEvent {
|
||||
model_context_window: turn_context.client.get_model_context_window(),
|
||||
}),
|
||||
};
|
||||
sess.send_event(start_event).await;
|
||||
run_compact_task_inner(sess.clone(), turn_context, sub_id.clone(), input).await;
|
||||
let start_event = EventMsg::TaskStarted(TaskStartedEvent {
|
||||
model_context_window: turn_context.client.get_model_context_window(),
|
||||
});
|
||||
sess.send_event(&turn_context, start_event).await;
|
||||
run_compact_task_inner(sess.clone(), turn_context, input).await;
|
||||
None
|
||||
}
|
||||
|
||||
async fn run_compact_task_inner(
|
||||
sess: Arc<Session>,
|
||||
turn_context: Arc<TurnContext>,
|
||||
sub_id: String,
|
||||
input: Vec<InputItem>,
|
||||
input: Vec<UserInput>,
|
||||
) {
|
||||
let initial_input_for_turn: ResponseInputItem = ResponseInputItem::from(input);
|
||||
// Track the items we append for this compact prompt so trimming does not drop them.
|
||||
let extra_items: Vec<ResponseItem> = vec![initial_input_for_turn.clone().into()];
|
||||
let mut turn_input = sess.turn_input_with_history(extra_items.clone()).await;
|
||||
|
||||
let mut history = sess.clone_history().await;
|
||||
history.record_items(&[initial_input_for_turn.into()]);
|
||||
|
||||
let mut truncated_count = 0usize;
|
||||
let mut trimmed_tails: Vec<Vec<ResponseItem>> = Vec::new();
|
||||
|
||||
let max_retries = turn_context.client.get_provider().stream_max_retries();
|
||||
let mut context_retries = 0;
|
||||
let mut stream_retries = 0;
|
||||
let mut retries = 0;
|
||||
|
||||
let rollout_item = RolloutItem::TurnContext(TurnContextItem {
|
||||
cwd: turn_context.cwd.clone(),
|
||||
@@ -92,18 +85,18 @@ async fn run_compact_task_inner(
|
||||
sess.persist_rollout_items(&[rollout_item]).await;
|
||||
|
||||
loop {
|
||||
let turn_input = history.get_history();
|
||||
let prompt = Prompt {
|
||||
input: turn_input.clone(),
|
||||
..Default::default()
|
||||
};
|
||||
let attempt_result =
|
||||
drain_to_completed(&sess, turn_context.as_ref(), &sub_id, &prompt).await;
|
||||
let attempt_result = drain_to_completed(&sess, turn_context.as_ref(), &prompt).await;
|
||||
|
||||
match attempt_result {
|
||||
Ok(()) => {
|
||||
if truncated_count > 0 {
|
||||
sess.notify_background_event(
|
||||
&sub_id,
|
||||
turn_context.as_ref(),
|
||||
format!(
|
||||
"Trimmed {truncated_count} older conversation item(s) before compacting so the prompt fits the model context window."
|
||||
),
|
||||
@@ -116,63 +109,39 @@ async fn run_compact_task_inner(
|
||||
return;
|
||||
}
|
||||
Err(e @ CodexErr::ContextWindowExceeded) => {
|
||||
// Drop the most recent user turn (its message plus ensuing traffic) and retry.
|
||||
if turn_input.len() > extra_items.len() {
|
||||
let history_len = turn_input.len() - extra_items.len();
|
||||
let mut prompt_items = turn_input.split_off(history_len);
|
||||
let trimmed = trim_recent_history_to_previous_user_message(&mut turn_input);
|
||||
turn_input.append(&mut prompt_items);
|
||||
if !trimmed.is_empty() {
|
||||
truncated_count += trimmed.len();
|
||||
trimmed_tails.push(trimmed);
|
||||
if context_retries >= max_retries {
|
||||
sess.set_total_tokens_full(&sub_id, turn_context.as_ref())
|
||||
.await;
|
||||
let event = Event {
|
||||
id: sub_id.clone(),
|
||||
msg: EventMsg::Error(ErrorEvent {
|
||||
message: e.to_string(),
|
||||
}),
|
||||
};
|
||||
sess.send_event(event).await;
|
||||
return;
|
||||
}
|
||||
context_retries += 1;
|
||||
stream_retries = 0;
|
||||
// Keep stream retry budget untouched; we trimmed context successfully.
|
||||
continue;
|
||||
}
|
||||
if turn_input.len() > 1 {
|
||||
// Trim from the beginning to preserve cache (prefix-based) and keep recent messages intact.
|
||||
error!(
|
||||
"Context window exceeded while compacting; removing oldest history item. Error: {e}"
|
||||
);
|
||||
history.remove_first_item();
|
||||
truncated_count += 1;
|
||||
retries = 0;
|
||||
continue;
|
||||
}
|
||||
sess.set_total_tokens_full(&sub_id, turn_context.as_ref())
|
||||
.await;
|
||||
let event = Event {
|
||||
id: sub_id.clone(),
|
||||
msg: EventMsg::Error(ErrorEvent {
|
||||
message: e.to_string(),
|
||||
}),
|
||||
};
|
||||
sess.send_event(event).await;
|
||||
sess.set_total_tokens_full(turn_context.as_ref()).await;
|
||||
let event = EventMsg::Error(ErrorEvent {
|
||||
message: e.to_string(),
|
||||
});
|
||||
sess.send_event(&turn_context, event).await;
|
||||
return;
|
||||
}
|
||||
Err(e) => {
|
||||
if stream_retries < max_retries {
|
||||
stream_retries += 1;
|
||||
let delay = backoff(stream_retries);
|
||||
if retries < max_retries {
|
||||
retries += 1;
|
||||
let delay = backoff(retries);
|
||||
sess.notify_stream_error(
|
||||
&sub_id,
|
||||
format!("Re-connecting... {stream_retries}/{max_retries}"),
|
||||
turn_context.as_ref(),
|
||||
format!("Re-connecting... {retries}/{max_retries}"),
|
||||
)
|
||||
.await;
|
||||
tokio::time::sleep(delay).await;
|
||||
continue;
|
||||
} else {
|
||||
let event = Event {
|
||||
id: sub_id.clone(),
|
||||
msg: EventMsg::Error(ErrorEvent {
|
||||
message: e.to_string(),
|
||||
}),
|
||||
};
|
||||
sess.send_event(event).await;
|
||||
let event = EventMsg::Error(ErrorEvent {
|
||||
message: e.to_string(),
|
||||
});
|
||||
sess.send_event(&turn_context, event).await;
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -183,10 +152,7 @@ async fn run_compact_task_inner(
|
||||
let summary_text = get_last_assistant_message_from_turn(&history_snapshot).unwrap_or_default();
|
||||
let user_messages = collect_user_messages(&history_snapshot);
|
||||
let initial_context = sess.build_initial_context(turn_context.as_ref());
|
||||
let mut new_history = build_compacted_history(initial_context, &user_messages, &summary_text);
|
||||
for mut trimmed in trimmed_tails.into_iter().rev() {
|
||||
new_history.append(&mut trimmed);
|
||||
}
|
||||
let new_history = build_compacted_history(initial_context, &user_messages, &summary_text);
|
||||
sess.replace_history(new_history).await;
|
||||
|
||||
let rollout_item = RolloutItem::Compacted(CompactedItem {
|
||||
@@ -194,34 +160,10 @@ async fn run_compact_task_inner(
|
||||
});
|
||||
sess.persist_rollout_items(&[rollout_item]).await;
|
||||
|
||||
let event = Event {
|
||||
id: sub_id.clone(),
|
||||
msg: EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "Compact task completed".to_string(),
|
||||
}),
|
||||
};
|
||||
sess.send_event(event).await;
|
||||
}
|
||||
|
||||
/// Trim conversation history back to the previous user message boundary, removing that user turn.
|
||||
///
|
||||
/// Returns the removed items in their original order so they can be restored later.
|
||||
fn trim_recent_history_to_previous_user_message(
|
||||
turn_input: &mut Vec<ResponseItem>,
|
||||
) -> Vec<ResponseItem> {
|
||||
if turn_input.is_empty() {
|
||||
return Vec::new();
|
||||
}
|
||||
if let Some(last_user_index) = turn_input.iter().rposition(|item| {
|
||||
matches!(
|
||||
item,
|
||||
ResponseItem::Message { role, .. } if role == "user"
|
||||
)
|
||||
}) {
|
||||
turn_input.split_off(last_user_index)
|
||||
} else {
|
||||
std::mem::take(turn_input)
|
||||
}
|
||||
let event = EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "Compact task completed".to_string(),
|
||||
});
|
||||
sess.send_event(&turn_context, event).await;
|
||||
}
|
||||
|
||||
pub fn content_items_to_text(content: &[ContentItem]) -> Option<String> {
|
||||
@@ -246,29 +188,32 @@ pub fn content_items_to_text(content: &[ContentItem]) -> Option<String> {
|
||||
pub(crate) fn collect_user_messages(items: &[ResponseItem]) -> Vec<String> {
|
||||
items
|
||||
.iter()
|
||||
.filter_map(|item| match item {
|
||||
ResponseItem::Message { role, content, .. } if role == "user" => {
|
||||
content_items_to_text(content)
|
||||
}
|
||||
.filter_map(|item| match crate::event_mapping::parse_turn_item(item) {
|
||||
Some(TurnItem::UserMessage(user)) => Some(user.message()),
|
||||
_ => None,
|
||||
})
|
||||
.filter(|text| !is_session_prefix_message(text))
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn is_session_prefix_message(text: &str) -> bool {
|
||||
matches!(
|
||||
InputMessageKind::from(("user", text)),
|
||||
InputMessageKind::UserInstructions | InputMessageKind::EnvironmentContext
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn build_compacted_history(
|
||||
initial_context: Vec<ResponseItem>,
|
||||
user_messages: &[String],
|
||||
summary_text: &str,
|
||||
) -> Vec<ResponseItem> {
|
||||
let mut history = initial_context;
|
||||
build_compacted_history_with_limit(
|
||||
initial_context,
|
||||
user_messages,
|
||||
summary_text,
|
||||
COMPACT_USER_MESSAGE_MAX_TOKENS * 4,
|
||||
)
|
||||
}
|
||||
|
||||
fn build_compacted_history_with_limit(
|
||||
mut history: Vec<ResponseItem>,
|
||||
user_messages: &[String],
|
||||
summary_text: &str,
|
||||
max_bytes: usize,
|
||||
) -> Vec<ResponseItem> {
|
||||
let mut user_messages_text = if user_messages.is_empty() {
|
||||
"(none)".to_string()
|
||||
} else {
|
||||
@@ -276,7 +221,6 @@ pub(crate) fn build_compacted_history(
|
||||
};
|
||||
// Truncate the concatenated prior user messages so the bridge message
|
||||
// stays well under the context window (approx. 4 bytes/token).
|
||||
let max_bytes = COMPACT_USER_MESSAGE_MAX_TOKENS * 4;
|
||||
if user_messages_text.len() > max_bytes {
|
||||
user_messages_text = truncate_middle(&user_messages_text, max_bytes).0;
|
||||
}
|
||||
@@ -303,7 +247,6 @@ pub(crate) fn build_compacted_history(
|
||||
async fn drain_to_completed(
|
||||
sess: &Session,
|
||||
turn_context: &TurnContext,
|
||||
sub_id: &str,
|
||||
prompt: &Prompt,
|
||||
) -> CodexResult<()> {
|
||||
let mut stream = turn_context
|
||||
@@ -324,10 +267,10 @@ async fn drain_to_completed(
|
||||
sess.record_into_history(std::slice::from_ref(&item)).await;
|
||||
}
|
||||
Ok(ResponseEvent::RateLimits(snapshot)) => {
|
||||
sess.update_rate_limits(sub_id, snapshot).await;
|
||||
sess.update_rate_limits(turn_context, snapshot).await;
|
||||
}
|
||||
Ok(ResponseEvent::Completed { token_usage, .. }) => {
|
||||
sess.update_token_usage_info(sub_id, turn_context, token_usage.as_ref())
|
||||
sess.update_token_usage_info(turn_context, token_usage.as_ref())
|
||||
.await;
|
||||
return Ok(());
|
||||
}
|
||||
@@ -385,21 +328,16 @@ mod tests {
|
||||
ResponseItem::Message {
|
||||
id: Some("user".to_string()),
|
||||
role: "user".to_string(),
|
||||
content: vec![
|
||||
ContentItem::InputText {
|
||||
text: "first".to_string(),
|
||||
},
|
||||
ContentItem::OutputText {
|
||||
text: "second".to_string(),
|
||||
},
|
||||
],
|
||||
content: vec![ContentItem::InputText {
|
||||
text: "first".to_string(),
|
||||
}],
|
||||
},
|
||||
ResponseItem::Other,
|
||||
];
|
||||
|
||||
let collected = collect_user_messages(&items);
|
||||
|
||||
assert_eq!(vec!["first\nsecond".to_string()], collected);
|
||||
assert_eq!(vec!["first".to_string()], collected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -435,11 +373,16 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn build_compacted_history_truncates_overlong_user_messages() {
|
||||
// Prepare a very large prior user message so the aggregated
|
||||
// `user_messages_text` exceeds the truncation threshold used by
|
||||
// `build_compacted_history` (80k bytes).
|
||||
let big = "X".repeat(200_000);
|
||||
let history = build_compacted_history(Vec::new(), std::slice::from_ref(&big), "SUMMARY");
|
||||
// Use a small truncation limit so the test remains fast while still validating
|
||||
// that oversized user content is truncated.
|
||||
let max_bytes = 128;
|
||||
let big = "X".repeat(max_bytes + 50);
|
||||
let history = super::build_compacted_history_with_limit(
|
||||
Vec::new(),
|
||||
std::slice::from_ref(&big),
|
||||
"SUMMARY",
|
||||
max_bytes,
|
||||
);
|
||||
|
||||
// Expect exactly one bridge message added to history (plus any initial context we provided, which is none).
|
||||
assert_eq!(history.len(), 1);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::bash::parse_bash_lc_plain_commands;
|
||||
use crate::bash::parse_shell_lc_plain_commands;
|
||||
|
||||
pub fn command_might_be_dangerous(command: &[String]) -> bool {
|
||||
if is_dangerous_to_call_with_exec(command) {
|
||||
@@ -6,7 +6,7 @@ pub fn command_might_be_dangerous(command: &[String]) -> bool {
|
||||
}
|
||||
|
||||
// Support `bash -lc "<script>"` where the any part of the script might contain a dangerous command.
|
||||
if let Some(all_commands) = parse_bash_lc_plain_commands(command)
|
||||
if let Some(all_commands) = parse_shell_lc_plain_commands(command)
|
||||
&& all_commands
|
||||
.iter()
|
||||
.any(|cmd| is_dangerous_to_call_with_exec(cmd))
|
||||
@@ -57,6 +57,15 @@ mod tests {
|
||||
])));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn zsh_git_reset_is_dangerous() {
|
||||
assert!(command_might_be_dangerous(&vec_str(&[
|
||||
"zsh",
|
||||
"-lc",
|
||||
"git reset --hard"
|
||||
])));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn git_status_is_not_dangerous() {
|
||||
assert!(!command_might_be_dangerous(&vec_str(&["git", "status"])));
|
||||
|
||||
@@ -1,15 +1,25 @@
|
||||
use crate::bash::parse_bash_lc_plain_commands;
|
||||
use crate::bash::parse_shell_lc_plain_commands;
|
||||
|
||||
pub fn is_known_safe_command(command: &[String]) -> bool {
|
||||
let command: Vec<String> = command
|
||||
.iter()
|
||||
.map(|s| {
|
||||
if s == "zsh" {
|
||||
"bash".to_string()
|
||||
} else {
|
||||
s.clone()
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
use super::windows_safe_commands::is_safe_command_windows;
|
||||
if is_safe_command_windows(command) {
|
||||
if is_safe_command_windows(&command) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
if is_safe_to_call_with_exec(command) {
|
||||
if is_safe_to_call_with_exec(&command) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -19,7 +29,7 @@ pub fn is_known_safe_command(command: &[String]) -> bool {
|
||||
// introduce side effects ( "&&", "||", ";", and "|" ). If every
|
||||
// individual command in the script is itself a known‑safe command, then
|
||||
// the composite expression is considered safe.
|
||||
if let Some(all_commands) = parse_bash_lc_plain_commands(command)
|
||||
if let Some(all_commands) = parse_shell_lc_plain_commands(&command)
|
||||
&& !all_commands.is_empty()
|
||||
&& all_commands
|
||||
.iter()
|
||||
@@ -31,9 +41,14 @@ pub fn is_known_safe_command(command: &[String]) -> bool {
|
||||
}
|
||||
|
||||
fn is_safe_to_call_with_exec(command: &[String]) -> bool {
|
||||
let cmd0 = command.first().map(String::as_str);
|
||||
let Some(cmd0) = command.first().map(String::as_str) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
match cmd0 {
|
||||
match std::path::Path::new(&cmd0)
|
||||
.file_name()
|
||||
.and_then(|osstr| osstr.to_str())
|
||||
{
|
||||
#[rustfmt::skip]
|
||||
Some(
|
||||
"cat" |
|
||||
@@ -103,13 +118,12 @@ fn is_safe_to_call_with_exec(command: &[String]) -> bool {
|
||||
// Rust
|
||||
Some("cargo") if command.get(1).map(String::as_str) == Some("check") => true,
|
||||
|
||||
// Special-case `sed -n {N|M,N}p FILE`
|
||||
// Special-case `sed -n {N|M,N}p`
|
||||
Some("sed")
|
||||
if {
|
||||
command.len() == 4
|
||||
command.len() <= 4
|
||||
&& command.get(1).map(String::as_str) == Some("-n")
|
||||
&& is_valid_sed_n_arg(command.get(2).map(String::as_str))
|
||||
&& command.get(3).map(String::is_empty) == Some(false)
|
||||
} =>
|
||||
{
|
||||
true
|
||||
@@ -187,6 +201,11 @@ mod tests {
|
||||
])));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn zsh_lc_safe_command_sequence() {
|
||||
assert!(is_known_safe_command(&vec_str(&["zsh", "-lc", "ls"])));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unknown_or_partial() {
|
||||
assert!(!is_safe_to_call_with_exec(&vec_str(&["foo"])));
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -4,6 +4,7 @@ use std::path::PathBuf;
|
||||
use crate::protocol::AskForApproval;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
use codex_protocol::config_types::Verbosity;
|
||||
|
||||
/// Collection of common configuration options that a user can define as a unit
|
||||
@@ -15,18 +16,19 @@ pub struct ConfigProfile {
|
||||
/// [`ModelProviderInfo`] to use.
|
||||
pub model_provider: Option<String>,
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
pub sandbox_mode: Option<SandboxMode>,
|
||||
pub model_reasoning_effort: Option<ReasoningEffort>,
|
||||
pub model_reasoning_summary: Option<ReasoningSummary>,
|
||||
pub model_verbosity: Option<Verbosity>,
|
||||
pub chatgpt_base_url: Option<String>,
|
||||
pub experimental_instructions_file: Option<PathBuf>,
|
||||
pub include_plan_tool: Option<bool>,
|
||||
pub include_apply_patch_tool: Option<bool>,
|
||||
pub include_view_image_tool: Option<bool>,
|
||||
pub experimental_use_unified_exec_tool: Option<bool>,
|
||||
pub experimental_use_exec_command_tool: Option<bool>,
|
||||
pub experimental_use_rmcp_client: Option<bool>,
|
||||
pub experimental_use_freeform_apply_patch: Option<bool>,
|
||||
pub experimental_sandbox_command_assessment: Option<bool>,
|
||||
pub tools_web_search: Option<bool>,
|
||||
pub tools_view_image: Option<bool>,
|
||||
/// Optional feature toggles scoped to this profile.
|
||||
|
||||
@@ -35,6 +35,14 @@ pub struct McpServerConfig {
|
||||
/// Default timeout for MCP tool calls initiated via this server.
|
||||
#[serde(default, with = "option_duration_secs")]
|
||||
pub tool_timeout_sec: Option<Duration>,
|
||||
|
||||
/// Explicit allow-list of tools exposed from this server. When set, only these tools will be registered.
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub enabled_tools: Option<Vec<String>>,
|
||||
|
||||
/// Explicit deny-list of tools. These tools will be removed after applying `enabled_tools`.
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub disabled_tools: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for McpServerConfig {
|
||||
@@ -42,18 +50,28 @@ impl<'de> Deserialize<'de> for McpServerConfig {
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
#[derive(Deserialize)]
|
||||
#[derive(Deserialize, Clone)]
|
||||
struct RawMcpServerConfig {
|
||||
// stdio
|
||||
command: Option<String>,
|
||||
#[serde(default)]
|
||||
args: Option<Vec<String>>,
|
||||
#[serde(default)]
|
||||
env: Option<HashMap<String, String>>,
|
||||
#[serde(default)]
|
||||
env_vars: Option<Vec<String>>,
|
||||
#[serde(default)]
|
||||
cwd: Option<PathBuf>,
|
||||
http_headers: Option<HashMap<String, String>>,
|
||||
#[serde(default)]
|
||||
env_http_headers: Option<HashMap<String, String>>,
|
||||
|
||||
// streamable_http
|
||||
url: Option<String>,
|
||||
bearer_token: Option<String>,
|
||||
bearer_token_env_var: Option<String>,
|
||||
|
||||
// shared
|
||||
#[serde(default)]
|
||||
startup_timeout_sec: Option<f64>,
|
||||
#[serde(default)]
|
||||
@@ -62,9 +80,13 @@ impl<'de> Deserialize<'de> for McpServerConfig {
|
||||
tool_timeout_sec: Option<Duration>,
|
||||
#[serde(default)]
|
||||
enabled: Option<bool>,
|
||||
#[serde(default)]
|
||||
enabled_tools: Option<Vec<String>>,
|
||||
#[serde(default)]
|
||||
disabled_tools: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
let raw = RawMcpServerConfig::deserialize(deserializer)?;
|
||||
let mut raw = RawMcpServerConfig::deserialize(deserializer)?;
|
||||
|
||||
let startup_timeout_sec = match (raw.startup_timeout_sec, raw.startup_timeout_ms) {
|
||||
(Some(sec), _) => {
|
||||
@@ -74,6 +96,10 @@ impl<'de> Deserialize<'de> for McpServerConfig {
|
||||
(None, Some(ms)) => Some(Duration::from_millis(ms)),
|
||||
(None, None) => None,
|
||||
};
|
||||
let tool_timeout_sec = raw.tool_timeout_sec;
|
||||
let enabled = raw.enabled.unwrap_or_else(default_enabled);
|
||||
let enabled_tools = raw.enabled_tools.clone();
|
||||
let disabled_tools = raw.disabled_tools.clone();
|
||||
|
||||
fn throw_if_set<E, T>(transport: &str, field: &str, value: Option<&T>) -> Result<(), E>
|
||||
where
|
||||
@@ -87,53 +113,46 @@ impl<'de> Deserialize<'de> for McpServerConfig {
|
||||
)))
|
||||
}
|
||||
|
||||
let transport = match raw {
|
||||
RawMcpServerConfig {
|
||||
command: Some(command),
|
||||
args,
|
||||
env,
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
..
|
||||
} => {
|
||||
throw_if_set("stdio", "url", url.as_ref())?;
|
||||
throw_if_set(
|
||||
"stdio",
|
||||
"bearer_token_env_var",
|
||||
bearer_token_env_var.as_ref(),
|
||||
)?;
|
||||
McpServerTransportConfig::Stdio {
|
||||
command,
|
||||
args: args.unwrap_or_default(),
|
||||
env,
|
||||
}
|
||||
}
|
||||
RawMcpServerConfig {
|
||||
url: Some(url),
|
||||
bearer_token,
|
||||
bearer_token_env_var,
|
||||
let transport = if let Some(command) = raw.command.clone() {
|
||||
throw_if_set("stdio", "url", raw.url.as_ref())?;
|
||||
throw_if_set(
|
||||
"stdio",
|
||||
"bearer_token_env_var",
|
||||
raw.bearer_token_env_var.as_ref(),
|
||||
)?;
|
||||
throw_if_set("stdio", "bearer_token", raw.bearer_token.as_ref())?;
|
||||
throw_if_set("stdio", "http_headers", raw.http_headers.as_ref())?;
|
||||
throw_if_set("stdio", "env_http_headers", raw.env_http_headers.as_ref())?;
|
||||
McpServerTransportConfig::Stdio {
|
||||
command,
|
||||
args,
|
||||
env,
|
||||
..
|
||||
} => {
|
||||
throw_if_set("streamable_http", "command", command.as_ref())?;
|
||||
throw_if_set("streamable_http", "args", args.as_ref())?;
|
||||
throw_if_set("streamable_http", "env", env.as_ref())?;
|
||||
throw_if_set("streamable_http", "bearer_token", bearer_token.as_ref())?;
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
}
|
||||
args: raw.args.clone().unwrap_or_default(),
|
||||
env: raw.env.clone(),
|
||||
env_vars: raw.env_vars.clone().unwrap_or_default(),
|
||||
cwd: raw.cwd.take(),
|
||||
}
|
||||
_ => return Err(SerdeError::custom("invalid transport")),
|
||||
} else if let Some(url) = raw.url.clone() {
|
||||
throw_if_set("streamable_http", "args", raw.args.as_ref())?;
|
||||
throw_if_set("streamable_http", "env", raw.env.as_ref())?;
|
||||
throw_if_set("streamable_http", "env_vars", raw.env_vars.as_ref())?;
|
||||
throw_if_set("streamable_http", "cwd", raw.cwd.as_ref())?;
|
||||
throw_if_set("streamable_http", "bearer_token", raw.bearer_token.as_ref())?;
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var: raw.bearer_token_env_var.clone(),
|
||||
http_headers: raw.http_headers.clone(),
|
||||
env_http_headers: raw.env_http_headers.take(),
|
||||
}
|
||||
} else {
|
||||
return Err(SerdeError::custom("invalid transport"));
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
transport,
|
||||
startup_timeout_sec,
|
||||
tool_timeout_sec: raw.tool_timeout_sec,
|
||||
enabled: raw.enabled.unwrap_or_else(default_enabled),
|
||||
tool_timeout_sec,
|
||||
enabled,
|
||||
enabled_tools,
|
||||
disabled_tools,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -152,6 +171,10 @@ pub enum McpServerTransportConfig {
|
||||
args: Vec<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
env: Option<HashMap<String, String>>,
|
||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||
env_vars: Vec<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
cwd: Option<PathBuf>,
|
||||
},
|
||||
/// https://modelcontextprotocol.io/specification/2025-06-18/basic/transports#streamable-http
|
||||
StreamableHttp {
|
||||
@@ -161,6 +184,12 @@ pub enum McpServerTransportConfig {
|
||||
/// The actual secret value must be provided via the environment.
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
bearer_token_env_var: Option<String>,
|
||||
/// Additional HTTP headers to include in requests to this server.
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
http_headers: Option<HashMap<String, String>>,
|
||||
/// HTTP headers where the value is sourced from an environment variable.
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
env_http_headers: Option<HashMap<String, String>>,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -322,6 +351,20 @@ pub struct Tui {
|
||||
pub notifications: Notifications,
|
||||
}
|
||||
|
||||
/// Settings for notices we display to users via the tui and app-server clients
|
||||
/// (primarily the Codex IDE extension). NOTE: these are different from
|
||||
/// notifications - notices are warnings, NUX screens, acknowledgements, etc.
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Default)]
|
||||
pub struct Notice {
|
||||
/// Tracks whether the user has acknowledged the full access warning prompt.
|
||||
pub hide_full_access_warning: Option<bool>,
|
||||
}
|
||||
|
||||
impl Notice {
|
||||
/// used by set_hide_full_access_warning until we refactor config updates
|
||||
pub(crate) const TABLE_KEY: &'static str = "notice";
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Default)]
|
||||
pub struct SandboxWorkspaceWrite {
|
||||
#[serde(default)]
|
||||
@@ -468,10 +511,14 @@ mod tests {
|
||||
McpServerTransportConfig::Stdio {
|
||||
command: "echo".to_string(),
|
||||
args: vec![],
|
||||
env: None
|
||||
env: None,
|
||||
env_vars: Vec::new(),
|
||||
cwd: None,
|
||||
}
|
||||
);
|
||||
assert!(cfg.enabled);
|
||||
assert!(cfg.enabled_tools.is_none());
|
||||
assert!(cfg.disabled_tools.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -489,7 +536,9 @@ mod tests {
|
||||
McpServerTransportConfig::Stdio {
|
||||
command: "echo".to_string(),
|
||||
args: vec!["hello".to_string(), "world".to_string()],
|
||||
env: None
|
||||
env: None,
|
||||
env_vars: Vec::new(),
|
||||
cwd: None,
|
||||
}
|
||||
);
|
||||
assert!(cfg.enabled);
|
||||
@@ -511,12 +560,58 @@ mod tests {
|
||||
McpServerTransportConfig::Stdio {
|
||||
command: "echo".to_string(),
|
||||
args: vec!["hello".to_string(), "world".to_string()],
|
||||
env: Some(HashMap::from([("FOO".to_string(), "BAR".to_string())]))
|
||||
env: Some(HashMap::from([("FOO".to_string(), "BAR".to_string())])),
|
||||
env_vars: Vec::new(),
|
||||
cwd: None,
|
||||
}
|
||||
);
|
||||
assert!(cfg.enabled);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_stdio_command_server_config_with_env_vars() {
|
||||
let cfg: McpServerConfig = toml::from_str(
|
||||
r#"
|
||||
command = "echo"
|
||||
env_vars = ["FOO", "BAR"]
|
||||
"#,
|
||||
)
|
||||
.expect("should deserialize command config with env_vars");
|
||||
|
||||
assert_eq!(
|
||||
cfg.transport,
|
||||
McpServerTransportConfig::Stdio {
|
||||
command: "echo".to_string(),
|
||||
args: vec![],
|
||||
env: None,
|
||||
env_vars: vec!["FOO".to_string(), "BAR".to_string()],
|
||||
cwd: None,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_stdio_command_server_config_with_cwd() {
|
||||
let cfg: McpServerConfig = toml::from_str(
|
||||
r#"
|
||||
command = "echo"
|
||||
cwd = "/tmp"
|
||||
"#,
|
||||
)
|
||||
.expect("should deserialize command config with cwd");
|
||||
|
||||
assert_eq!(
|
||||
cfg.transport,
|
||||
McpServerTransportConfig::Stdio {
|
||||
command: "echo".to_string(),
|
||||
args: vec![],
|
||||
env: None,
|
||||
env_vars: Vec::new(),
|
||||
cwd: Some(PathBuf::from("/tmp")),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_disabled_server_config() {
|
||||
let cfg: McpServerConfig = toml::from_str(
|
||||
@@ -543,7 +638,9 @@ mod tests {
|
||||
cfg.transport,
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url: "https://example.com/mcp".to_string(),
|
||||
bearer_token_env_var: None
|
||||
bearer_token_env_var: None,
|
||||
http_headers: None,
|
||||
env_http_headers: None,
|
||||
}
|
||||
);
|
||||
assert!(cfg.enabled);
|
||||
@@ -563,12 +660,54 @@ mod tests {
|
||||
cfg.transport,
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url: "https://example.com/mcp".to_string(),
|
||||
bearer_token_env_var: Some("GITHUB_TOKEN".to_string())
|
||||
bearer_token_env_var: Some("GITHUB_TOKEN".to_string()),
|
||||
http_headers: None,
|
||||
env_http_headers: None,
|
||||
}
|
||||
);
|
||||
assert!(cfg.enabled);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_streamable_http_server_config_with_headers() {
|
||||
let cfg: McpServerConfig = toml::from_str(
|
||||
r#"
|
||||
url = "https://example.com/mcp"
|
||||
http_headers = { "X-Foo" = "bar" }
|
||||
env_http_headers = { "X-Token" = "TOKEN_ENV" }
|
||||
"#,
|
||||
)
|
||||
.expect("should deserialize http config with headers");
|
||||
|
||||
assert_eq!(
|
||||
cfg.transport,
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url: "https://example.com/mcp".to_string(),
|
||||
bearer_token_env_var: None,
|
||||
http_headers: Some(HashMap::from([("X-Foo".to_string(), "bar".to_string())])),
|
||||
env_http_headers: Some(HashMap::from([(
|
||||
"X-Token".to_string(),
|
||||
"TOKEN_ENV".to_string()
|
||||
)])),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_server_config_with_tool_filters() {
|
||||
let cfg: McpServerConfig = toml::from_str(
|
||||
r#"
|
||||
command = "echo"
|
||||
enabled_tools = ["allowed"]
|
||||
disabled_tools = ["blocked"]
|
||||
"#,
|
||||
)
|
||||
.expect("should deserialize tool filters");
|
||||
|
||||
assert_eq!(cfg.enabled_tools, Some(vec!["allowed".to_string()]));
|
||||
assert_eq!(cfg.disabled_tools, Some(vec!["blocked".to_string()]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_rejects_command_and_url() {
|
||||
toml::from_str::<McpServerConfig>(
|
||||
@@ -591,6 +730,25 @@ mod tests {
|
||||
.expect_err("should reject env for http transport");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_rejects_headers_for_stdio() {
|
||||
toml::from_str::<McpServerConfig>(
|
||||
r#"
|
||||
command = "echo"
|
||||
http_headers = { "X-Foo" = "bar" }
|
||||
"#,
|
||||
)
|
||||
.expect_err("should reject http_headers for stdio transport");
|
||||
|
||||
toml::from_str::<McpServerConfig>(
|
||||
r#"
|
||||
command = "echo"
|
||||
env_http_headers = { "X-Foo" = "BAR_ENV" }
|
||||
"#,
|
||||
)
|
||||
.expect_err("should reject env_http_headers for stdio transport");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_rejects_inline_bearer_token_field() {
|
||||
let err = toml::from_str::<McpServerConfig>(
|
||||
|
||||
@@ -1,20 +1,36 @@
|
||||
use codex_protocol::models::FunctionCallOutputPayload;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::TokenUsage;
|
||||
use codex_protocol::protocol::TokenUsageInfo;
|
||||
use tracing::error;
|
||||
|
||||
/// Transcript of conversation history
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub(crate) struct ConversationHistory {
|
||||
/// The oldest items are at the beginning of the vector.
|
||||
items: Vec<ResponseItem>,
|
||||
token_info: Option<TokenUsageInfo>,
|
||||
}
|
||||
|
||||
impl ConversationHistory {
|
||||
pub(crate) fn new() -> Self {
|
||||
Self { items: Vec::new() }
|
||||
Self {
|
||||
items: Vec::new(),
|
||||
token_info: TokenUsageInfo::new_or_append(&None, &None, None),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a clone of the contents in the transcript.
|
||||
pub(crate) fn contents(&self) -> Vec<ResponseItem> {
|
||||
self.items.clone()
|
||||
pub(crate) fn token_info(&self) -> Option<TokenUsageInfo> {
|
||||
self.token_info.clone()
|
||||
}
|
||||
|
||||
pub(crate) fn set_token_usage_full(&mut self, context_window: i64) {
|
||||
match &mut self.token_info {
|
||||
Some(info) => info.fill_to_context_window(context_window),
|
||||
None => {
|
||||
self.token_info = Some(TokenUsageInfo::full_context_window(context_window));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// `items` is ordered from oldest to newest.
|
||||
@@ -32,9 +48,299 @@ impl ConversationHistory {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get_history(&mut self) -> Vec<ResponseItem> {
|
||||
self.normalize_history();
|
||||
self.contents()
|
||||
}
|
||||
|
||||
pub(crate) fn remove_first_item(&mut self) {
|
||||
if !self.items.is_empty() {
|
||||
// Remove the oldest item (front of the list). Items are ordered from
|
||||
// oldest → newest, so index 0 is the first entry recorded.
|
||||
let removed = self.items.remove(0);
|
||||
// If the removed item participates in a call/output pair, also remove
|
||||
// its corresponding counterpart to keep the invariants intact without
|
||||
// running a full normalization pass.
|
||||
self.remove_corresponding_for(&removed);
|
||||
}
|
||||
}
|
||||
|
||||
/// This function enforces a couple of invariants on the in-memory history:
|
||||
/// 1. every call (function/custom) has a corresponding output entry
|
||||
/// 2. every output has a corresponding call entry
|
||||
fn normalize_history(&mut self) {
|
||||
// all function/tool calls must have a corresponding output
|
||||
self.ensure_call_outputs_present();
|
||||
|
||||
// all outputs must have a corresponding function/tool call
|
||||
self.remove_orphan_outputs();
|
||||
}
|
||||
|
||||
/// Returns a clone of the contents in the transcript.
|
||||
fn contents(&self) -> Vec<ResponseItem> {
|
||||
self.items.clone()
|
||||
}
|
||||
|
||||
fn ensure_call_outputs_present(&mut self) {
|
||||
// Collect synthetic outputs to insert immediately after their calls.
|
||||
// Store the insertion position (index of call) alongside the item so
|
||||
// we can insert in reverse order and avoid index shifting.
|
||||
let mut missing_outputs_to_insert: Vec<(usize, ResponseItem)> = Vec::new();
|
||||
|
||||
for (idx, item) in self.items.iter().enumerate() {
|
||||
match item {
|
||||
ResponseItem::FunctionCall { call_id, .. } => {
|
||||
let has_output = self.items.iter().any(|i| match i {
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: existing, ..
|
||||
} => existing == call_id,
|
||||
_ => false,
|
||||
});
|
||||
|
||||
if !has_output {
|
||||
error_or_panic(format!(
|
||||
"Function call output is missing for call id: {call_id}"
|
||||
));
|
||||
missing_outputs_to_insert.push((
|
||||
idx,
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: call_id.clone(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "aborted".to_string(),
|
||||
success: None,
|
||||
},
|
||||
},
|
||||
));
|
||||
}
|
||||
}
|
||||
ResponseItem::CustomToolCall { call_id, .. } => {
|
||||
let has_output = self.items.iter().any(|i| match i {
|
||||
ResponseItem::CustomToolCallOutput {
|
||||
call_id: existing, ..
|
||||
} => existing == call_id,
|
||||
_ => false,
|
||||
});
|
||||
|
||||
if !has_output {
|
||||
error_or_panic(format!(
|
||||
"Custom tool call output is missing for call id: {call_id}"
|
||||
));
|
||||
missing_outputs_to_insert.push((
|
||||
idx,
|
||||
ResponseItem::CustomToolCallOutput {
|
||||
call_id: call_id.clone(),
|
||||
output: "aborted".to_string(),
|
||||
},
|
||||
));
|
||||
}
|
||||
}
|
||||
// LocalShellCall is represented in upstream streams by a FunctionCallOutput
|
||||
ResponseItem::LocalShellCall { call_id, .. } => {
|
||||
if let Some(call_id) = call_id.as_ref() {
|
||||
let has_output = self.items.iter().any(|i| match i {
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: existing, ..
|
||||
} => existing == call_id,
|
||||
_ => false,
|
||||
});
|
||||
|
||||
if !has_output {
|
||||
error_or_panic(format!(
|
||||
"Local shell call output is missing for call id: {call_id}"
|
||||
));
|
||||
missing_outputs_to_insert.push((
|
||||
idx,
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: call_id.clone(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "aborted".to_string(),
|
||||
success: None,
|
||||
},
|
||||
},
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
ResponseItem::Reasoning { .. }
|
||||
| ResponseItem::WebSearchCall { .. }
|
||||
| ResponseItem::FunctionCallOutput { .. }
|
||||
| ResponseItem::CustomToolCallOutput { .. }
|
||||
| ResponseItem::Other
|
||||
| ResponseItem::Message { .. } => {
|
||||
// nothing to do for these variants
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !missing_outputs_to_insert.is_empty() {
|
||||
// Insert from the end to avoid shifting subsequent indices.
|
||||
missing_outputs_to_insert.sort_by_key(|(i, _)| *i);
|
||||
for (idx, item) in missing_outputs_to_insert.into_iter().rev() {
|
||||
let insert_pos = idx + 1; // place immediately after the call
|
||||
if insert_pos <= self.items.len() {
|
||||
self.items.insert(insert_pos, item);
|
||||
} else {
|
||||
self.items.push(item);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn remove_orphan_outputs(&mut self) {
|
||||
// Work on a snapshot to avoid borrowing `self.items` while mutating it.
|
||||
let snapshot = self.items.clone();
|
||||
let mut orphan_output_call_ids: std::collections::HashSet<String> =
|
||||
std::collections::HashSet::new();
|
||||
|
||||
for item in &snapshot {
|
||||
match item {
|
||||
ResponseItem::FunctionCallOutput { call_id, .. } => {
|
||||
let has_call = snapshot.iter().any(|i| match i {
|
||||
ResponseItem::FunctionCall {
|
||||
call_id: existing, ..
|
||||
} => existing == call_id,
|
||||
ResponseItem::LocalShellCall {
|
||||
call_id: Some(existing),
|
||||
..
|
||||
} => existing == call_id,
|
||||
_ => false,
|
||||
});
|
||||
|
||||
if !has_call {
|
||||
error_or_panic(format!("Function call is missing for call id: {call_id}"));
|
||||
orphan_output_call_ids.insert(call_id.clone());
|
||||
}
|
||||
}
|
||||
ResponseItem::CustomToolCallOutput { call_id, .. } => {
|
||||
let has_call = snapshot.iter().any(|i| match i {
|
||||
ResponseItem::CustomToolCall {
|
||||
call_id: existing, ..
|
||||
} => existing == call_id,
|
||||
_ => false,
|
||||
});
|
||||
|
||||
if !has_call {
|
||||
error_or_panic(format!(
|
||||
"Custom tool call is missing for call id: {call_id}"
|
||||
));
|
||||
orphan_output_call_ids.insert(call_id.clone());
|
||||
}
|
||||
}
|
||||
ResponseItem::FunctionCall { .. }
|
||||
| ResponseItem::CustomToolCall { .. }
|
||||
| ResponseItem::LocalShellCall { .. }
|
||||
| ResponseItem::Reasoning { .. }
|
||||
| ResponseItem::WebSearchCall { .. }
|
||||
| ResponseItem::Other
|
||||
| ResponseItem::Message { .. } => {
|
||||
// nothing to do for these variants
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !orphan_output_call_ids.is_empty() {
|
||||
let ids = orphan_output_call_ids;
|
||||
self.items.retain(|i| match i {
|
||||
ResponseItem::FunctionCallOutput { call_id, .. }
|
||||
| ResponseItem::CustomToolCallOutput { call_id, .. } => !ids.contains(call_id),
|
||||
_ => true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn replace(&mut self, items: Vec<ResponseItem>) {
|
||||
self.items = items;
|
||||
}
|
||||
|
||||
/// Removes the corresponding paired item for the provided `item`, if any.
|
||||
///
|
||||
/// Pairs:
|
||||
/// - FunctionCall <-> FunctionCallOutput
|
||||
/// - CustomToolCall <-> CustomToolCallOutput
|
||||
/// - LocalShellCall(call_id: Some) <-> FunctionCallOutput
|
||||
fn remove_corresponding_for(&mut self, item: &ResponseItem) {
|
||||
match item {
|
||||
ResponseItem::FunctionCall { call_id, .. } => {
|
||||
self.remove_first_matching(|i| match i {
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: existing, ..
|
||||
} => existing == call_id,
|
||||
_ => false,
|
||||
});
|
||||
}
|
||||
ResponseItem::CustomToolCall { call_id, .. } => {
|
||||
self.remove_first_matching(|i| match i {
|
||||
ResponseItem::CustomToolCallOutput {
|
||||
call_id: existing, ..
|
||||
} => existing == call_id,
|
||||
_ => false,
|
||||
});
|
||||
}
|
||||
ResponseItem::LocalShellCall {
|
||||
call_id: Some(call_id),
|
||||
..
|
||||
} => {
|
||||
self.remove_first_matching(|i| match i {
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: existing, ..
|
||||
} => existing == call_id,
|
||||
_ => false,
|
||||
});
|
||||
}
|
||||
ResponseItem::FunctionCallOutput { call_id, .. } => {
|
||||
self.remove_first_matching(|i| match i {
|
||||
ResponseItem::FunctionCall {
|
||||
call_id: existing, ..
|
||||
} => existing == call_id,
|
||||
ResponseItem::LocalShellCall {
|
||||
call_id: Some(existing),
|
||||
..
|
||||
} => existing == call_id,
|
||||
_ => false,
|
||||
});
|
||||
}
|
||||
ResponseItem::CustomToolCallOutput { call_id, .. } => {
|
||||
self.remove_first_matching(|i| match i {
|
||||
ResponseItem::CustomToolCall {
|
||||
call_id: existing, ..
|
||||
} => existing == call_id,
|
||||
_ => false,
|
||||
});
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
/// Remove the first item matching the predicate.
|
||||
fn remove_first_matching<F>(&mut self, predicate: F)
|
||||
where
|
||||
F: FnMut(&ResponseItem) -> bool,
|
||||
{
|
||||
if let Some(pos) = self.items.iter().position(predicate) {
|
||||
self.items.remove(pos);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn update_token_info(
|
||||
&mut self,
|
||||
usage: &TokenUsage,
|
||||
model_context_window: Option<i64>,
|
||||
) {
|
||||
self.token_info = TokenUsageInfo::new_or_append(
|
||||
&self.token_info,
|
||||
&Some(usage.clone()),
|
||||
model_context_window,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn error_or_panic(message: String) {
|
||||
if cfg!(debug_assertions) || env!("CARGO_PKG_VERSION").contains("alpha") {
|
||||
panic!("{message}");
|
||||
} else {
|
||||
error!("{message}");
|
||||
}
|
||||
}
|
||||
|
||||
/// Anything that is not a system message or "reasoning" message is considered
|
||||
@@ -57,6 +363,11 @@ fn is_api_message(message: &ResponseItem) -> bool {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::FunctionCallOutputPayload;
|
||||
use codex_protocol::models::LocalShellAction;
|
||||
use codex_protocol::models::LocalShellExecAction;
|
||||
use codex_protocol::models::LocalShellStatus;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
fn assistant_msg(text: &str) -> ResponseItem {
|
||||
ResponseItem::Message {
|
||||
@@ -68,6 +379,12 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
fn create_history_with_items(items: Vec<ResponseItem>) -> ConversationHistory {
|
||||
let mut h = ConversationHistory::new();
|
||||
h.record_items(items.iter());
|
||||
h
|
||||
}
|
||||
|
||||
fn user_msg(text: &str) -> ResponseItem {
|
||||
ResponseItem::Message {
|
||||
id: None,
|
||||
@@ -117,4 +434,452 @@ mod tests {
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_first_item_removes_matching_output_for_function_call() {
|
||||
let items = vec![
|
||||
ResponseItem::FunctionCall {
|
||||
id: None,
|
||||
name: "do_it".to_string(),
|
||||
arguments: "{}".to_string(),
|
||||
call_id: "call-1".to_string(),
|
||||
},
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "call-1".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "ok".to_string(),
|
||||
success: None,
|
||||
},
|
||||
},
|
||||
];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.remove_first_item();
|
||||
assert_eq!(h.contents(), vec![]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_first_item_removes_matching_call_for_output() {
|
||||
let items = vec![
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "call-2".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "ok".to_string(),
|
||||
success: None,
|
||||
},
|
||||
},
|
||||
ResponseItem::FunctionCall {
|
||||
id: None,
|
||||
name: "do_it".to_string(),
|
||||
arguments: "{}".to_string(),
|
||||
call_id: "call-2".to_string(),
|
||||
},
|
||||
];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.remove_first_item();
|
||||
assert_eq!(h.contents(), vec![]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_first_item_handles_local_shell_pair() {
|
||||
let items = vec![
|
||||
ResponseItem::LocalShellCall {
|
||||
id: None,
|
||||
call_id: Some("call-3".to_string()),
|
||||
status: LocalShellStatus::Completed,
|
||||
action: LocalShellAction::Exec(LocalShellExecAction {
|
||||
command: vec!["echo".to_string(), "hi".to_string()],
|
||||
timeout_ms: None,
|
||||
working_directory: None,
|
||||
env: None,
|
||||
user: None,
|
||||
}),
|
||||
},
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "call-3".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "ok".to_string(),
|
||||
success: None,
|
||||
},
|
||||
},
|
||||
];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.remove_first_item();
|
||||
assert_eq!(h.contents(), vec![]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_first_item_handles_custom_tool_pair() {
|
||||
let items = vec![
|
||||
ResponseItem::CustomToolCall {
|
||||
id: None,
|
||||
status: None,
|
||||
call_id: "tool-1".to_string(),
|
||||
name: "my_tool".to_string(),
|
||||
input: "{}".to_string(),
|
||||
},
|
||||
ResponseItem::CustomToolCallOutput {
|
||||
call_id: "tool-1".to_string(),
|
||||
output: "ok".to_string(),
|
||||
},
|
||||
];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.remove_first_item();
|
||||
assert_eq!(h.contents(), vec![]);
|
||||
}
|
||||
|
||||
//TODO(aibrahim): run CI in release mode.
|
||||
#[cfg(not(debug_assertions))]
|
||||
#[test]
|
||||
fn normalize_adds_missing_output_for_function_call() {
|
||||
let items = vec![ResponseItem::FunctionCall {
|
||||
id: None,
|
||||
name: "do_it".to_string(),
|
||||
arguments: "{}".to_string(),
|
||||
call_id: "call-x".to_string(),
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
|
||||
h.normalize_history();
|
||||
|
||||
assert_eq!(
|
||||
h.contents(),
|
||||
vec![
|
||||
ResponseItem::FunctionCall {
|
||||
id: None,
|
||||
name: "do_it".to_string(),
|
||||
arguments: "{}".to_string(),
|
||||
call_id: "call-x".to_string(),
|
||||
},
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "call-x".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "aborted".to_string(),
|
||||
success: None,
|
||||
},
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
#[test]
|
||||
fn normalize_adds_missing_output_for_custom_tool_call() {
|
||||
let items = vec![ResponseItem::CustomToolCall {
|
||||
id: None,
|
||||
status: None,
|
||||
call_id: "tool-x".to_string(),
|
||||
name: "custom".to_string(),
|
||||
input: "{}".to_string(),
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
|
||||
h.normalize_history();
|
||||
|
||||
assert_eq!(
|
||||
h.contents(),
|
||||
vec![
|
||||
ResponseItem::CustomToolCall {
|
||||
id: None,
|
||||
status: None,
|
||||
call_id: "tool-x".to_string(),
|
||||
name: "custom".to_string(),
|
||||
input: "{}".to_string(),
|
||||
},
|
||||
ResponseItem::CustomToolCallOutput {
|
||||
call_id: "tool-x".to_string(),
|
||||
output: "aborted".to_string(),
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
#[test]
|
||||
fn normalize_adds_missing_output_for_local_shell_call_with_id() {
|
||||
let items = vec![ResponseItem::LocalShellCall {
|
||||
id: None,
|
||||
call_id: Some("shell-1".to_string()),
|
||||
status: LocalShellStatus::Completed,
|
||||
action: LocalShellAction::Exec(LocalShellExecAction {
|
||||
command: vec!["echo".to_string(), "hi".to_string()],
|
||||
timeout_ms: None,
|
||||
working_directory: None,
|
||||
env: None,
|
||||
user: None,
|
||||
}),
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
|
||||
h.normalize_history();
|
||||
|
||||
assert_eq!(
|
||||
h.contents(),
|
||||
vec![
|
||||
ResponseItem::LocalShellCall {
|
||||
id: None,
|
||||
call_id: Some("shell-1".to_string()),
|
||||
status: LocalShellStatus::Completed,
|
||||
action: LocalShellAction::Exec(LocalShellExecAction {
|
||||
command: vec!["echo".to_string(), "hi".to_string()],
|
||||
timeout_ms: None,
|
||||
working_directory: None,
|
||||
env: None,
|
||||
user: None,
|
||||
}),
|
||||
},
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "shell-1".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "aborted".to_string(),
|
||||
success: None,
|
||||
},
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
#[test]
|
||||
fn normalize_removes_orphan_function_call_output() {
|
||||
let items = vec![ResponseItem::FunctionCallOutput {
|
||||
call_id: "orphan-1".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "ok".to_string(),
|
||||
success: None,
|
||||
},
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
|
||||
h.normalize_history();
|
||||
|
||||
assert_eq!(h.contents(), vec![]);
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
#[test]
|
||||
fn normalize_removes_orphan_custom_tool_call_output() {
|
||||
let items = vec![ResponseItem::CustomToolCallOutput {
|
||||
call_id: "orphan-2".to_string(),
|
||||
output: "ok".to_string(),
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
|
||||
h.normalize_history();
|
||||
|
||||
assert_eq!(h.contents(), vec![]);
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
#[test]
|
||||
fn normalize_mixed_inserts_and_removals() {
|
||||
let items = vec![
|
||||
// Will get an inserted output
|
||||
ResponseItem::FunctionCall {
|
||||
id: None,
|
||||
name: "f1".to_string(),
|
||||
arguments: "{}".to_string(),
|
||||
call_id: "c1".to_string(),
|
||||
},
|
||||
// Orphan output that should be removed
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "c2".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "ok".to_string(),
|
||||
success: None,
|
||||
},
|
||||
},
|
||||
// Will get an inserted custom tool output
|
||||
ResponseItem::CustomToolCall {
|
||||
id: None,
|
||||
status: None,
|
||||
call_id: "t1".to_string(),
|
||||
name: "tool".to_string(),
|
||||
input: "{}".to_string(),
|
||||
},
|
||||
// Local shell call also gets an inserted function call output
|
||||
ResponseItem::LocalShellCall {
|
||||
id: None,
|
||||
call_id: Some("s1".to_string()),
|
||||
status: LocalShellStatus::Completed,
|
||||
action: LocalShellAction::Exec(LocalShellExecAction {
|
||||
command: vec!["echo".to_string()],
|
||||
timeout_ms: None,
|
||||
working_directory: None,
|
||||
env: None,
|
||||
user: None,
|
||||
}),
|
||||
},
|
||||
];
|
||||
let mut h = create_history_with_items(items);
|
||||
|
||||
h.normalize_history();
|
||||
|
||||
assert_eq!(
|
||||
h.contents(),
|
||||
vec![
|
||||
ResponseItem::FunctionCall {
|
||||
id: None,
|
||||
name: "f1".to_string(),
|
||||
arguments: "{}".to_string(),
|
||||
call_id: "c1".to_string(),
|
||||
},
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "c1".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "aborted".to_string(),
|
||||
success: None,
|
||||
},
|
||||
},
|
||||
ResponseItem::CustomToolCall {
|
||||
id: None,
|
||||
status: None,
|
||||
call_id: "t1".to_string(),
|
||||
name: "tool".to_string(),
|
||||
input: "{}".to_string(),
|
||||
},
|
||||
ResponseItem::CustomToolCallOutput {
|
||||
call_id: "t1".to_string(),
|
||||
output: "aborted".to_string(),
|
||||
},
|
||||
ResponseItem::LocalShellCall {
|
||||
id: None,
|
||||
call_id: Some("s1".to_string()),
|
||||
status: LocalShellStatus::Completed,
|
||||
action: LocalShellAction::Exec(LocalShellExecAction {
|
||||
command: vec!["echo".to_string()],
|
||||
timeout_ms: None,
|
||||
working_directory: None,
|
||||
env: None,
|
||||
user: None,
|
||||
}),
|
||||
},
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "s1".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "aborted".to_string(),
|
||||
success: None,
|
||||
},
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
// In debug builds we panic on normalization errors instead of silently fixing them.
|
||||
#[cfg(debug_assertions)]
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn normalize_adds_missing_output_for_function_call_panics_in_debug() {
|
||||
let items = vec![ResponseItem::FunctionCall {
|
||||
id: None,
|
||||
name: "do_it".to_string(),
|
||||
arguments: "{}".to_string(),
|
||||
call_id: "call-x".to_string(),
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.normalize_history();
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn normalize_adds_missing_output_for_custom_tool_call_panics_in_debug() {
|
||||
let items = vec![ResponseItem::CustomToolCall {
|
||||
id: None,
|
||||
status: None,
|
||||
call_id: "tool-x".to_string(),
|
||||
name: "custom".to_string(),
|
||||
input: "{}".to_string(),
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.normalize_history();
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn normalize_adds_missing_output_for_local_shell_call_with_id_panics_in_debug() {
|
||||
let items = vec![ResponseItem::LocalShellCall {
|
||||
id: None,
|
||||
call_id: Some("shell-1".to_string()),
|
||||
status: LocalShellStatus::Completed,
|
||||
action: LocalShellAction::Exec(LocalShellExecAction {
|
||||
command: vec!["echo".to_string(), "hi".to_string()],
|
||||
timeout_ms: None,
|
||||
working_directory: None,
|
||||
env: None,
|
||||
user: None,
|
||||
}),
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.normalize_history();
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn normalize_removes_orphan_function_call_output_panics_in_debug() {
|
||||
let items = vec![ResponseItem::FunctionCallOutput {
|
||||
call_id: "orphan-1".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "ok".to_string(),
|
||||
success: None,
|
||||
},
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.normalize_history();
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn normalize_removes_orphan_custom_tool_call_output_panics_in_debug() {
|
||||
let items = vec![ResponseItem::CustomToolCallOutput {
|
||||
call_id: "orphan-2".to_string(),
|
||||
output: "ok".to_string(),
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.normalize_history();
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn normalize_mixed_inserts_and_removals_panics_in_debug() {
|
||||
let items = vec![
|
||||
ResponseItem::FunctionCall {
|
||||
id: None,
|
||||
name: "f1".to_string(),
|
||||
arguments: "{}".to_string(),
|
||||
call_id: "c1".to_string(),
|
||||
},
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "c2".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "ok".to_string(),
|
||||
success: None,
|
||||
},
|
||||
},
|
||||
ResponseItem::CustomToolCall {
|
||||
id: None,
|
||||
status: None,
|
||||
call_id: "t1".to_string(),
|
||||
name: "tool".to_string(),
|
||||
input: "{}".to_string(),
|
||||
},
|
||||
ResponseItem::LocalShellCall {
|
||||
id: None,
|
||||
call_id: Some("s1".to_string()),
|
||||
status: LocalShellStatus::Completed,
|
||||
action: LocalShellAction::Exec(LocalShellExecAction {
|
||||
command: vec!["echo".to_string()],
|
||||
timeout_ms: None,
|
||||
working_directory: None,
|
||||
env: None,
|
||||
user: None,
|
||||
}),
|
||||
},
|
||||
];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.normalize_history();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,8 +3,6 @@ use crate::CodexAuth;
|
||||
use crate::codex::Codex;
|
||||
use crate::codex::CodexSpawnOk;
|
||||
use crate::codex::INITIAL_SUBMIT_ID;
|
||||
use crate::codex::compact::content_items_to_text;
|
||||
use crate::codex::compact::is_session_prefix_message;
|
||||
use crate::codex_conversation::CodexConversation;
|
||||
use crate::config::Config;
|
||||
use crate::error::CodexErr;
|
||||
@@ -14,6 +12,7 @@ use crate::protocol::EventMsg;
|
||||
use crate::protocol::SessionConfiguredEvent;
|
||||
use crate::rollout::RolloutRecorder;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::InitialHistory;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
@@ -182,9 +181,11 @@ fn truncate_before_nth_user_message(history: InitialHistory, n: usize) -> Initia
|
||||
// Find indices of user message inputs in rollout order.
|
||||
let mut user_positions: Vec<usize> = Vec::new();
|
||||
for (idx, item) in items.iter().enumerate() {
|
||||
if let RolloutItem::ResponseItem(ResponseItem::Message { role, content, .. }) = item
|
||||
&& role == "user"
|
||||
&& content_items_to_text(content).is_some_and(|text| !is_session_prefix_message(&text))
|
||||
if let RolloutItem::ResponseItem(item @ ResponseItem::Message { .. }) = item
|
||||
&& matches!(
|
||||
crate::event_mapping::parse_turn_item(item),
|
||||
Some(TurnItem::UserMessage(_))
|
||||
)
|
||||
{
|
||||
user_positions.push(idx);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,13 @@
|
||||
use crate::spawn::CODEX_SANDBOX_ENV_VAR;
|
||||
use http::Error as HttpError;
|
||||
use reqwest::IntoUrl;
|
||||
use reqwest::Method;
|
||||
use reqwest::Response;
|
||||
use reqwest::header::HeaderName;
|
||||
use reqwest::header::HeaderValue;
|
||||
use serde::Serialize;
|
||||
use std::collections::HashMap;
|
||||
use std::fmt::Display;
|
||||
use std::sync::LazyLock;
|
||||
use std::sync::Mutex;
|
||||
use std::sync::OnceLock;
|
||||
@@ -22,6 +30,130 @@ use std::sync::OnceLock;
|
||||
pub static USER_AGENT_SUFFIX: LazyLock<Mutex<Option<String>>> = LazyLock::new(|| Mutex::new(None));
|
||||
pub const DEFAULT_ORIGINATOR: &str = "codex_cli_rs";
|
||||
pub const CODEX_INTERNAL_ORIGINATOR_OVERRIDE_ENV_VAR: &str = "CODEX_INTERNAL_ORIGINATOR_OVERRIDE";
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct CodexHttpClient {
|
||||
inner: reqwest::Client,
|
||||
}
|
||||
|
||||
impl CodexHttpClient {
|
||||
fn new(inner: reqwest::Client) -> Self {
|
||||
Self { inner }
|
||||
}
|
||||
|
||||
pub fn get<U>(&self, url: U) -> CodexRequestBuilder
|
||||
where
|
||||
U: IntoUrl,
|
||||
{
|
||||
self.request(Method::GET, url)
|
||||
}
|
||||
|
||||
pub fn post<U>(&self, url: U) -> CodexRequestBuilder
|
||||
where
|
||||
U: IntoUrl,
|
||||
{
|
||||
self.request(Method::POST, url)
|
||||
}
|
||||
|
||||
pub fn request<U>(&self, method: Method, url: U) -> CodexRequestBuilder
|
||||
where
|
||||
U: IntoUrl,
|
||||
{
|
||||
let url_str = url.as_str().to_string();
|
||||
CodexRequestBuilder::new(self.inner.request(method.clone(), url), method, url_str)
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use = "requests are not sent unless `send` is awaited"]
|
||||
#[derive(Debug)]
|
||||
pub struct CodexRequestBuilder {
|
||||
builder: reqwest::RequestBuilder,
|
||||
method: Method,
|
||||
url: String,
|
||||
}
|
||||
|
||||
impl CodexRequestBuilder {
|
||||
fn new(builder: reqwest::RequestBuilder, method: Method, url: String) -> Self {
|
||||
Self {
|
||||
builder,
|
||||
method,
|
||||
url,
|
||||
}
|
||||
}
|
||||
|
||||
fn map(self, f: impl FnOnce(reqwest::RequestBuilder) -> reqwest::RequestBuilder) -> Self {
|
||||
Self {
|
||||
builder: f(self.builder),
|
||||
method: self.method,
|
||||
url: self.url,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn header<K, V>(self, key: K, value: V) -> Self
|
||||
where
|
||||
HeaderName: TryFrom<K>,
|
||||
<HeaderName as TryFrom<K>>::Error: Into<HttpError>,
|
||||
HeaderValue: TryFrom<V>,
|
||||
<HeaderValue as TryFrom<V>>::Error: Into<HttpError>,
|
||||
{
|
||||
self.map(|builder| builder.header(key, value))
|
||||
}
|
||||
|
||||
pub fn bearer_auth<T>(self, token: T) -> Self
|
||||
where
|
||||
T: Display,
|
||||
{
|
||||
self.map(|builder| builder.bearer_auth(token))
|
||||
}
|
||||
|
||||
pub fn json<T>(self, value: &T) -> Self
|
||||
where
|
||||
T: ?Sized + Serialize,
|
||||
{
|
||||
self.map(|builder| builder.json(value))
|
||||
}
|
||||
|
||||
pub async fn send(self) -> Result<Response, reqwest::Error> {
|
||||
match self.builder.send().await {
|
||||
Ok(response) => {
|
||||
let request_ids = Self::extract_request_ids(&response);
|
||||
tracing::debug!(
|
||||
method = %self.method,
|
||||
url = %self.url,
|
||||
status = %response.status(),
|
||||
request_ids = ?request_ids,
|
||||
version = ?response.version(),
|
||||
"Request completed"
|
||||
);
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
Err(error) => {
|
||||
let status = error.status();
|
||||
tracing::debug!(
|
||||
method = %self.method,
|
||||
url = %self.url,
|
||||
status = status.map(|s| s.as_u16()),
|
||||
error = %error,
|
||||
"Request failed"
|
||||
);
|
||||
Err(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_request_ids(response: &Response) -> HashMap<String, String> {
|
||||
["cf-ray", "x-request-id", "x-oai-request-id"]
|
||||
.iter()
|
||||
.filter_map(|&name| {
|
||||
let header_name = HeaderName::from_static(name);
|
||||
let value = response.headers().get(header_name)?;
|
||||
let value = value.to_str().ok()?.to_owned();
|
||||
Some((name.to_owned(), value))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Originator {
|
||||
pub value: String,
|
||||
@@ -124,8 +256,8 @@ fn sanitize_user_agent(candidate: String, fallback: &str) -> String {
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a reqwest client with default `originator` and `User-Agent` headers set.
|
||||
pub fn create_client() -> reqwest::Client {
|
||||
/// Create an HTTP client with default `originator` and `User-Agent` headers set.
|
||||
pub fn create_client() -> CodexHttpClient {
|
||||
use reqwest::header::HeaderMap;
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
@@ -140,7 +272,8 @@ pub fn create_client() -> reqwest::Client {
|
||||
builder = builder.no_proxy();
|
||||
}
|
||||
|
||||
builder.build().unwrap_or_else(|_| reqwest::Client::new())
|
||||
let inner = builder.build().unwrap_or_else(|_| reqwest::Client::new());
|
||||
CodexHttpClient::new(inner)
|
||||
}
|
||||
|
||||
fn is_sandboxed() -> bool {
|
||||
|
||||
@@ -93,6 +93,25 @@ impl EnvironmentContext {
|
||||
&& self.network_access == *network_access
|
||||
&& self.writable_roots == *writable_roots
|
||||
}
|
||||
|
||||
pub fn diff(before: &TurnContext, after: &TurnContext) -> Self {
|
||||
let cwd = if before.cwd != after.cwd {
|
||||
Some(after.cwd.clone())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let approval_policy = if before.approval_policy != after.approval_policy {
|
||||
Some(after.approval_policy)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let sandbox_policy = if before.sandbox_policy != after.sandbox_policy {
|
||||
Some(after.sandbox_policy.clone())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
EnvironmentContext::new(cwd, approval_policy, sandbox_policy, None)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&TurnContext> for EnvironmentContext {
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
use crate::codex::ProcessedResponseItem;
|
||||
use crate::exec::ExecToolCallOutput;
|
||||
use crate::token_data::KnownPlan;
|
||||
use crate::token_data::PlanType;
|
||||
use crate::truncate::truncate_middle;
|
||||
use chrono::DateTime;
|
||||
use chrono::Utc;
|
||||
use codex_async_utils::CancelErr;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::protocol::RateLimitSnapshot;
|
||||
use reqwest::StatusCode;
|
||||
@@ -50,6 +54,12 @@ pub enum SandboxErr {
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum CodexErr {
|
||||
// todo(aibrahim): git rid of this error carrying the dangling artifacts
|
||||
#[error("turn aborted. Something went wrong? Hit `/feedback` to report the issue.")]
|
||||
TurnAborted {
|
||||
dangling_artifacts: Vec<ProcessedResponseItem>,
|
||||
},
|
||||
|
||||
/// Returned by ResponsesClient when the SSE stream disconnects or errors out **after** the HTTP
|
||||
/// handshake has succeeded but **before** it finished emitting `response.completed`.
|
||||
///
|
||||
@@ -81,7 +91,7 @@ pub enum CodexErr {
|
||||
|
||||
/// Returned by run_command_stream when the user pressed Ctrl‑C (SIGINT). Session uses this to
|
||||
/// surface a polite FunctionCallOutput back to the model instead of crashing the CLI.
|
||||
#[error("interrupted (Ctrl-C)")]
|
||||
#[error("interrupted (Ctrl-C). Something went wrong? Hit `/feedback` to report the issue.")]
|
||||
Interrupted,
|
||||
|
||||
/// Unexpected HTTP status code.
|
||||
@@ -91,6 +101,12 @@ pub enum CodexErr {
|
||||
#[error("{0}")]
|
||||
UsageLimitReached(UsageLimitReachedError),
|
||||
|
||||
#[error("{0}")]
|
||||
ResponseStreamFailed(ResponseStreamFailed),
|
||||
|
||||
#[error("{0}")]
|
||||
ConnectionFailed(ConnectionFailedError),
|
||||
|
||||
#[error(
|
||||
"To use Codex with your ChatGPT plan, upgrade to Plus: https://openai.com/chatgpt/pricing."
|
||||
)]
|
||||
@@ -126,9 +142,6 @@ pub enum CodexErr {
|
||||
#[error(transparent)]
|
||||
Io(#[from] io::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
Reqwest(#[from] reqwest::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
Json(#[from] serde_json::Error),
|
||||
|
||||
@@ -147,6 +160,45 @@ pub enum CodexErr {
|
||||
EnvVar(EnvVarError),
|
||||
}
|
||||
|
||||
impl From<CancelErr> for CodexErr {
|
||||
fn from(_: CancelErr) -> Self {
|
||||
CodexErr::TurnAborted {
|
||||
dangling_artifacts: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ConnectionFailedError {
|
||||
pub source: reqwest::Error,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ConnectionFailedError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "Connection failed: {}", self.source)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ResponseStreamFailed {
|
||||
pub source: reqwest::Error,
|
||||
pub request_id: Option<String>,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ResponseStreamFailed {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"Error while reading the server response: {}{}",
|
||||
self.source,
|
||||
self.request_id
|
||||
.as_ref()
|
||||
.map(|id| format!(", request id: {id}"))
|
||||
.unwrap_or_default()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct UnexpectedResponseError {
|
||||
pub status: StatusCode,
|
||||
@@ -193,7 +245,7 @@ impl std::fmt::Display for RetryLimitReachedError {
|
||||
#[derive(Debug)]
|
||||
pub struct UsageLimitReachedError {
|
||||
pub(crate) plan_type: Option<PlanType>,
|
||||
pub(crate) resets_in_seconds: Option<u64>,
|
||||
pub(crate) resets_at: Option<DateTime<Utc>>,
|
||||
pub(crate) rate_limits: Option<RateLimitSnapshot>,
|
||||
}
|
||||
|
||||
@@ -202,12 +254,12 @@ impl std::fmt::Display for UsageLimitReachedError {
|
||||
let message = match self.plan_type.as_ref() {
|
||||
Some(PlanType::Known(KnownPlan::Plus)) => format!(
|
||||
"You've hit your usage limit. Upgrade to Pro (https://openai.com/chatgpt/pricing){}",
|
||||
retry_suffix_after_or(self.resets_in_seconds)
|
||||
retry_suffix_after_or(self.resets_at.as_ref())
|
||||
),
|
||||
Some(PlanType::Known(KnownPlan::Team)) | Some(PlanType::Known(KnownPlan::Business)) => {
|
||||
format!(
|
||||
"You've hit your usage limit. To get more access now, send a request to your admin{}",
|
||||
retry_suffix_after_or(self.resets_in_seconds)
|
||||
retry_suffix_after_or(self.resets_at.as_ref())
|
||||
)
|
||||
}
|
||||
Some(PlanType::Known(KnownPlan::Free)) => {
|
||||
@@ -218,11 +270,11 @@ impl std::fmt::Display for UsageLimitReachedError {
|
||||
| Some(PlanType::Known(KnownPlan::Enterprise))
|
||||
| Some(PlanType::Known(KnownPlan::Edu)) => format!(
|
||||
"You've hit your usage limit.{}",
|
||||
retry_suffix(self.resets_in_seconds)
|
||||
retry_suffix(self.resets_at.as_ref())
|
||||
),
|
||||
Some(PlanType::Unknown(_)) | None => format!(
|
||||
"You've hit your usage limit.{}",
|
||||
retry_suffix(self.resets_in_seconds)
|
||||
retry_suffix(self.resets_at.as_ref())
|
||||
),
|
||||
};
|
||||
|
||||
@@ -230,8 +282,8 @@ impl std::fmt::Display for UsageLimitReachedError {
|
||||
}
|
||||
}
|
||||
|
||||
fn retry_suffix(resets_in_seconds: Option<u64>) -> String {
|
||||
if let Some(secs) = resets_in_seconds {
|
||||
fn retry_suffix(resets_at: Option<&DateTime<Utc>>) -> String {
|
||||
if let Some(secs) = remaining_seconds(resets_at) {
|
||||
let reset_duration = format_reset_duration(secs);
|
||||
format!(" Try again in {reset_duration}.")
|
||||
} else {
|
||||
@@ -239,8 +291,8 @@ fn retry_suffix(resets_in_seconds: Option<u64>) -> String {
|
||||
}
|
||||
}
|
||||
|
||||
fn retry_suffix_after_or(resets_in_seconds: Option<u64>) -> String {
|
||||
if let Some(secs) = resets_in_seconds {
|
||||
fn retry_suffix_after_or(resets_at: Option<&DateTime<Utc>>) -> String {
|
||||
if let Some(secs) = remaining_seconds(resets_at) {
|
||||
let reset_duration = format_reset_duration(secs);
|
||||
format!(" or try again in {reset_duration}.")
|
||||
} else {
|
||||
@@ -248,6 +300,29 @@ fn retry_suffix_after_or(resets_in_seconds: Option<u64>) -> String {
|
||||
}
|
||||
}
|
||||
|
||||
fn remaining_seconds(resets_at: Option<&DateTime<Utc>>) -> Option<u64> {
|
||||
let resets_at = resets_at.cloned()?;
|
||||
let now = now_for_retry();
|
||||
let secs = resets_at.signed_duration_since(now).num_seconds();
|
||||
Some(if secs <= 0 { 0 } else { secs as u64 })
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
thread_local! {
|
||||
static NOW_OVERRIDE: std::cell::RefCell<Option<DateTime<Utc>>> =
|
||||
const { std::cell::RefCell::new(None) };
|
||||
}
|
||||
|
||||
fn now_for_retry() -> DateTime<Utc> {
|
||||
#[cfg(test)]
|
||||
{
|
||||
if let Some(now) = NOW_OVERRIDE.with(|cell| *cell.borrow()) {
|
||||
return now;
|
||||
}
|
||||
}
|
||||
Utc::now()
|
||||
}
|
||||
|
||||
fn format_reset_duration(total_secs: u64) -> String {
|
||||
let days = total_secs / 86_400;
|
||||
let hours = (total_secs % 86_400) / 3_600;
|
||||
@@ -344,29 +419,50 @@ pub fn get_error_message_ui(e: &CodexErr) -> String {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::exec::StreamOutput;
|
||||
use chrono::DateTime;
|
||||
use chrono::Duration as ChronoDuration;
|
||||
use chrono::TimeZone;
|
||||
use chrono::Utc;
|
||||
use codex_protocol::protocol::RateLimitWindow;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
fn rate_limit_snapshot() -> RateLimitSnapshot {
|
||||
let primary_reset_at = Utc
|
||||
.with_ymd_and_hms(2024, 1, 1, 1, 0, 0)
|
||||
.unwrap()
|
||||
.timestamp();
|
||||
let secondary_reset_at = Utc
|
||||
.with_ymd_and_hms(2024, 1, 1, 2, 0, 0)
|
||||
.unwrap()
|
||||
.timestamp();
|
||||
RateLimitSnapshot {
|
||||
primary: Some(RateLimitWindow {
|
||||
used_percent: 50.0,
|
||||
window_minutes: Some(60),
|
||||
resets_in_seconds: Some(3600),
|
||||
resets_at: Some(primary_reset_at),
|
||||
}),
|
||||
secondary: Some(RateLimitWindow {
|
||||
used_percent: 30.0,
|
||||
window_minutes: Some(120),
|
||||
resets_in_seconds: Some(7200),
|
||||
resets_at: Some(secondary_reset_at),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn with_now_override<T>(now: DateTime<Utc>, f: impl FnOnce() -> T) -> T {
|
||||
NOW_OVERRIDE.with(|cell| {
|
||||
*cell.borrow_mut() = Some(now);
|
||||
let result = f();
|
||||
*cell.borrow_mut() = None;
|
||||
result
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn usage_limit_reached_error_formats_plus_plan() {
|
||||
let err = UsageLimitReachedError {
|
||||
plan_type: Some(PlanType::Known(KnownPlan::Plus)),
|
||||
resets_in_seconds: None,
|
||||
resets_at: None,
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
};
|
||||
assert_eq!(
|
||||
@@ -446,7 +542,7 @@ mod tests {
|
||||
fn usage_limit_reached_error_formats_free_plan() {
|
||||
let err = UsageLimitReachedError {
|
||||
plan_type: Some(PlanType::Known(KnownPlan::Free)),
|
||||
resets_in_seconds: Some(3600),
|
||||
resets_at: None,
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
};
|
||||
assert_eq!(
|
||||
@@ -459,7 +555,7 @@ mod tests {
|
||||
fn usage_limit_reached_error_formats_default_when_none() {
|
||||
let err = UsageLimitReachedError {
|
||||
plan_type: None,
|
||||
resets_in_seconds: None,
|
||||
resets_at: None,
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
};
|
||||
assert_eq!(
|
||||
@@ -470,22 +566,26 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn usage_limit_reached_error_formats_team_plan() {
|
||||
let err = UsageLimitReachedError {
|
||||
plan_type: Some(PlanType::Known(KnownPlan::Team)),
|
||||
resets_in_seconds: Some(3600),
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"You've hit your usage limit. To get more access now, send a request to your admin or try again in 1 hour."
|
||||
);
|
||||
let base = Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap();
|
||||
let resets_at = base + ChronoDuration::hours(1);
|
||||
with_now_override(base, move || {
|
||||
let err = UsageLimitReachedError {
|
||||
plan_type: Some(PlanType::Known(KnownPlan::Team)),
|
||||
resets_at: Some(resets_at),
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"You've hit your usage limit. To get more access now, send a request to your admin or try again in 1 hour."
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn usage_limit_reached_error_formats_business_plan_without_reset() {
|
||||
let err = UsageLimitReachedError {
|
||||
plan_type: Some(PlanType::Known(KnownPlan::Business)),
|
||||
resets_in_seconds: None,
|
||||
resets_at: None,
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
};
|
||||
assert_eq!(
|
||||
@@ -498,7 +598,7 @@ mod tests {
|
||||
fn usage_limit_reached_error_formats_default_for_other_plans() {
|
||||
let err = UsageLimitReachedError {
|
||||
plan_type: Some(PlanType::Known(KnownPlan::Pro)),
|
||||
resets_in_seconds: None,
|
||||
resets_at: None,
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
};
|
||||
assert_eq!(
|
||||
@@ -509,53 +609,70 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn usage_limit_reached_includes_minutes_when_available() {
|
||||
let err = UsageLimitReachedError {
|
||||
plan_type: None,
|
||||
resets_in_seconds: Some(5 * 60),
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"You've hit your usage limit. Try again in 5 minutes."
|
||||
);
|
||||
let base = Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap();
|
||||
let resets_at = base + ChronoDuration::minutes(5);
|
||||
with_now_override(base, move || {
|
||||
let err = UsageLimitReachedError {
|
||||
plan_type: None,
|
||||
resets_at: Some(resets_at),
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"You've hit your usage limit. Try again in 5 minutes."
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn usage_limit_reached_includes_hours_and_minutes() {
|
||||
let err = UsageLimitReachedError {
|
||||
plan_type: Some(PlanType::Known(KnownPlan::Plus)),
|
||||
resets_in_seconds: Some(3 * 3600 + 32 * 60),
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"You've hit your usage limit. Upgrade to Pro (https://openai.com/chatgpt/pricing) or try again in 3 hours 32 minutes."
|
||||
);
|
||||
let base = Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap();
|
||||
let resets_at = base + ChronoDuration::hours(3) + ChronoDuration::minutes(32);
|
||||
with_now_override(base, move || {
|
||||
let err = UsageLimitReachedError {
|
||||
plan_type: Some(PlanType::Known(KnownPlan::Plus)),
|
||||
resets_at: Some(resets_at),
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"You've hit your usage limit. Upgrade to Pro (https://openai.com/chatgpt/pricing) or try again in 3 hours 32 minutes."
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn usage_limit_reached_includes_days_hours_minutes() {
|
||||
let err = UsageLimitReachedError {
|
||||
plan_type: None,
|
||||
resets_in_seconds: Some(2 * 86_400 + 3 * 3600 + 5 * 60),
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"You've hit your usage limit. Try again in 2 days 3 hours 5 minutes."
|
||||
);
|
||||
let base = Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap();
|
||||
let resets_at =
|
||||
base + ChronoDuration::days(2) + ChronoDuration::hours(3) + ChronoDuration::minutes(5);
|
||||
with_now_override(base, move || {
|
||||
let err = UsageLimitReachedError {
|
||||
plan_type: None,
|
||||
resets_at: Some(resets_at),
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"You've hit your usage limit. Try again in 2 days 3 hours 5 minutes."
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn usage_limit_reached_less_than_minute() {
|
||||
let err = UsageLimitReachedError {
|
||||
plan_type: None,
|
||||
resets_in_seconds: Some(30),
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"You've hit your usage limit. Try again in less than a minute."
|
||||
);
|
||||
let base = Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap();
|
||||
let resets_at = base + ChronoDuration::seconds(30);
|
||||
with_now_override(base, move || {
|
||||
let err = UsageLimitReachedError {
|
||||
plan_type: None,
|
||||
resets_at: Some(resets_at),
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"You've hit your usage limit. Try again in less than a minute."
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,139 +1,131 @@
|
||||
use crate::protocol::AgentMessageEvent;
|
||||
use crate::protocol::AgentReasoningEvent;
|
||||
use crate::protocol::AgentReasoningRawContentEvent;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::InputMessageKind;
|
||||
use crate::protocol::UserMessageEvent;
|
||||
use crate::protocol::WebSearchEndEvent;
|
||||
use codex_protocol::items::AgentMessageContent;
|
||||
use codex_protocol::items::AgentMessageItem;
|
||||
use codex_protocol::items::ReasoningItem;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::items::UserMessageItem;
|
||||
use codex_protocol::items::WebSearchItem;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ReasoningItemContent;
|
||||
use codex_protocol::models::ReasoningItemReasoningSummary;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::models::WebSearchAction;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use tracing::warn;
|
||||
|
||||
/// Convert a `ResponseItem` into zero or more `EventMsg` values that the UI can render.
|
||||
///
|
||||
/// When `show_raw_agent_reasoning` is false, raw reasoning content events are omitted.
|
||||
pub(crate) fn map_response_item_to_event_messages(
|
||||
item: &ResponseItem,
|
||||
show_raw_agent_reasoning: bool,
|
||||
) -> Vec<EventMsg> {
|
||||
fn is_session_prefix(text: &str) -> bool {
|
||||
let trimmed = text.trim_start();
|
||||
let lowered = trimmed.to_ascii_lowercase();
|
||||
lowered.starts_with("<environment_context>") || lowered.starts_with("<user_instructions>")
|
||||
}
|
||||
|
||||
fn parse_user_message(message: &[ContentItem]) -> Option<UserMessageItem> {
|
||||
let mut content: Vec<UserInput> = Vec::new();
|
||||
|
||||
for content_item in message.iter() {
|
||||
match content_item {
|
||||
ContentItem::InputText { text } => {
|
||||
if is_session_prefix(text) {
|
||||
return None;
|
||||
}
|
||||
content.push(UserInput::Text { text: text.clone() });
|
||||
}
|
||||
ContentItem::InputImage { image_url } => {
|
||||
content.push(UserInput::Image {
|
||||
image_url: image_url.clone(),
|
||||
});
|
||||
}
|
||||
ContentItem::OutputText { text } => {
|
||||
if is_session_prefix(text) {
|
||||
return None;
|
||||
}
|
||||
warn!("Output text in user message: {}", text);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some(UserMessageItem::new(&content))
|
||||
}
|
||||
|
||||
fn parse_agent_message(message: &[ContentItem]) -> AgentMessageItem {
|
||||
let mut content: Vec<AgentMessageContent> = Vec::new();
|
||||
for content_item in message.iter() {
|
||||
match content_item {
|
||||
ContentItem::OutputText { text } => {
|
||||
content.push(AgentMessageContent::Text { text: text.clone() });
|
||||
}
|
||||
_ => {
|
||||
warn!(
|
||||
"Unexpected content item in agent message: {:?}",
|
||||
content_item
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
AgentMessageItem::new(&content)
|
||||
}
|
||||
|
||||
pub fn parse_turn_item(item: &ResponseItem) -> Option<TurnItem> {
|
||||
match item {
|
||||
ResponseItem::Message { role, content, .. } => {
|
||||
// Do not surface system messages as user events.
|
||||
if role == "system" {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let mut events: Vec<EventMsg> = Vec::new();
|
||||
let mut message_parts: Vec<String> = Vec::new();
|
||||
let mut images: Vec<String> = Vec::new();
|
||||
let mut kind: Option<InputMessageKind> = None;
|
||||
|
||||
for content_item in content.iter() {
|
||||
match content_item {
|
||||
ContentItem::InputText { text } => {
|
||||
if kind.is_none() {
|
||||
let trimmed = text.trim_start();
|
||||
kind = if trimmed.starts_with("<environment_context>") {
|
||||
Some(InputMessageKind::EnvironmentContext)
|
||||
} else if trimmed.starts_with("<user_instructions>") {
|
||||
Some(InputMessageKind::UserInstructions)
|
||||
} else {
|
||||
Some(InputMessageKind::Plain)
|
||||
};
|
||||
}
|
||||
message_parts.push(text.clone());
|
||||
}
|
||||
ContentItem::InputImage { image_url } => {
|
||||
images.push(image_url.clone());
|
||||
}
|
||||
ContentItem::OutputText { text } => {
|
||||
events.push(EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: text.clone(),
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !message_parts.is_empty() || !images.is_empty() {
|
||||
let message = if message_parts.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
message_parts.join("")
|
||||
};
|
||||
let images = if images.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(images)
|
||||
};
|
||||
|
||||
events.push(EventMsg::UserMessage(UserMessageEvent {
|
||||
message,
|
||||
kind,
|
||||
images,
|
||||
}));
|
||||
}
|
||||
|
||||
events
|
||||
}
|
||||
|
||||
ResponseItem::Reasoning {
|
||||
summary, content, ..
|
||||
} => {
|
||||
let mut events = Vec::new();
|
||||
for ReasoningItemReasoningSummary::SummaryText { text } in summary {
|
||||
events.push(EventMsg::AgentReasoning(AgentReasoningEvent {
|
||||
text: text.clone(),
|
||||
}));
|
||||
}
|
||||
if let Some(items) = content.as_ref().filter(|_| show_raw_agent_reasoning) {
|
||||
for c in items {
|
||||
let text = match c {
|
||||
ReasoningItemContent::ReasoningText { text }
|
||||
| ReasoningItemContent::Text { text } => text,
|
||||
};
|
||||
events.push(EventMsg::AgentReasoningRawContent(
|
||||
AgentReasoningRawContentEvent { text: text.clone() },
|
||||
));
|
||||
}
|
||||
}
|
||||
events
|
||||
}
|
||||
|
||||
ResponseItem::WebSearchCall { id, action, .. } => match action {
|
||||
WebSearchAction::Search { query } => {
|
||||
let call_id = id.clone().unwrap_or_else(|| "".to_string());
|
||||
vec![EventMsg::WebSearchEnd(WebSearchEndEvent {
|
||||
call_id,
|
||||
query: query.clone(),
|
||||
})]
|
||||
}
|
||||
WebSearchAction::Other => Vec::new(),
|
||||
ResponseItem::Message { role, content, .. } => match role.as_str() {
|
||||
"user" => parse_user_message(content).map(TurnItem::UserMessage),
|
||||
"assistant" => Some(TurnItem::AgentMessage(parse_agent_message(content))),
|
||||
"system" => None,
|
||||
_ => None,
|
||||
},
|
||||
|
||||
// Variants that require side effects are handled by higher layers and do not emit events here.
|
||||
ResponseItem::FunctionCall { .. }
|
||||
| ResponseItem::FunctionCallOutput { .. }
|
||||
| ResponseItem::LocalShellCall { .. }
|
||||
| ResponseItem::CustomToolCall { .. }
|
||||
| ResponseItem::CustomToolCallOutput { .. }
|
||||
| ResponseItem::Other => Vec::new(),
|
||||
ResponseItem::Reasoning {
|
||||
id,
|
||||
summary,
|
||||
content,
|
||||
..
|
||||
} => {
|
||||
let summary_text = summary
|
||||
.iter()
|
||||
.map(|entry| match entry {
|
||||
ReasoningItemReasoningSummary::SummaryText { text } => text.clone(),
|
||||
})
|
||||
.collect();
|
||||
let raw_content = content
|
||||
.clone()
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.map(|entry| match entry {
|
||||
ReasoningItemContent::ReasoningText { text }
|
||||
| ReasoningItemContent::Text { text } => text,
|
||||
})
|
||||
.collect();
|
||||
Some(TurnItem::Reasoning(ReasoningItem {
|
||||
id: id.clone(),
|
||||
summary_text,
|
||||
raw_content,
|
||||
}))
|
||||
}
|
||||
ResponseItem::WebSearchCall {
|
||||
id,
|
||||
action: WebSearchAction::Search { query },
|
||||
..
|
||||
} => Some(TurnItem::WebSearch(WebSearchItem {
|
||||
id: id.clone().unwrap_or_default(),
|
||||
query: query.clone(),
|
||||
})),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::map_response_item_to_event_messages;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::InputMessageKind;
|
||||
use assert_matches::assert_matches;
|
||||
use super::parse_turn_item;
|
||||
use codex_protocol::items::AgentMessageContent;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ReasoningItemContent;
|
||||
use codex_protocol::models::ReasoningItemReasoningSummary;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::models::WebSearchAction;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn maps_user_message_with_text_and_two_images() {
|
||||
fn parses_user_message_with_text_and_two_images() {
|
||||
let img1 = "https://example.com/one.png".to_string();
|
||||
let img2 = "https://example.com/two.jpg".to_string();
|
||||
|
||||
@@ -153,16 +145,128 @@ mod tests {
|
||||
],
|
||||
};
|
||||
|
||||
let events = map_response_item_to_event_messages(&item, false);
|
||||
assert_eq!(events.len(), 1, "expected a single user message event");
|
||||
let turn_item = parse_turn_item(&item).expect("expected user message turn item");
|
||||
|
||||
match &events[0] {
|
||||
EventMsg::UserMessage(user) => {
|
||||
assert_eq!(user.message, "Hello world");
|
||||
assert_matches!(user.kind, Some(InputMessageKind::Plain));
|
||||
assert_eq!(user.images, Some(vec![img1, img2]));
|
||||
match turn_item {
|
||||
TurnItem::UserMessage(user) => {
|
||||
let expected_content = vec![
|
||||
UserInput::Text {
|
||||
text: "Hello world".to_string(),
|
||||
},
|
||||
UserInput::Image { image_url: img1 },
|
||||
UserInput::Image { image_url: img2 },
|
||||
];
|
||||
assert_eq!(user.content, expected_content);
|
||||
}
|
||||
other => panic!("expected UserMessage, got {other:?}"),
|
||||
other => panic!("expected TurnItem::UserMessage, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_agent_message() {
|
||||
let item = ResponseItem::Message {
|
||||
id: Some("msg-1".to_string()),
|
||||
role: "assistant".to_string(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: "Hello from Codex".to_string(),
|
||||
}],
|
||||
};
|
||||
|
||||
let turn_item = parse_turn_item(&item).expect("expected agent message turn item");
|
||||
|
||||
match turn_item {
|
||||
TurnItem::AgentMessage(message) => {
|
||||
let Some(AgentMessageContent::Text { text }) = message.content.first() else {
|
||||
panic!("expected agent message text content");
|
||||
};
|
||||
assert_eq!(text, "Hello from Codex");
|
||||
}
|
||||
other => panic!("expected TurnItem::AgentMessage, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_reasoning_summary_and_raw_content() {
|
||||
let item = ResponseItem::Reasoning {
|
||||
id: "reasoning_1".to_string(),
|
||||
summary: vec![
|
||||
ReasoningItemReasoningSummary::SummaryText {
|
||||
text: "Step 1".to_string(),
|
||||
},
|
||||
ReasoningItemReasoningSummary::SummaryText {
|
||||
text: "Step 2".to_string(),
|
||||
},
|
||||
],
|
||||
content: Some(vec![ReasoningItemContent::ReasoningText {
|
||||
text: "raw details".to_string(),
|
||||
}]),
|
||||
encrypted_content: None,
|
||||
};
|
||||
|
||||
let turn_item = parse_turn_item(&item).expect("expected reasoning turn item");
|
||||
|
||||
match turn_item {
|
||||
TurnItem::Reasoning(reasoning) => {
|
||||
assert_eq!(
|
||||
reasoning.summary_text,
|
||||
vec!["Step 1".to_string(), "Step 2".to_string()]
|
||||
);
|
||||
assert_eq!(reasoning.raw_content, vec!["raw details".to_string()]);
|
||||
}
|
||||
other => panic!("expected TurnItem::Reasoning, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_reasoning_including_raw_content() {
|
||||
let item = ResponseItem::Reasoning {
|
||||
id: "reasoning_2".to_string(),
|
||||
summary: vec![ReasoningItemReasoningSummary::SummaryText {
|
||||
text: "Summarized step".to_string(),
|
||||
}],
|
||||
content: Some(vec![
|
||||
ReasoningItemContent::ReasoningText {
|
||||
text: "raw step".to_string(),
|
||||
},
|
||||
ReasoningItemContent::Text {
|
||||
text: "final thought".to_string(),
|
||||
},
|
||||
]),
|
||||
encrypted_content: None,
|
||||
};
|
||||
|
||||
let turn_item = parse_turn_item(&item).expect("expected reasoning turn item");
|
||||
|
||||
match turn_item {
|
||||
TurnItem::Reasoning(reasoning) => {
|
||||
assert_eq!(reasoning.summary_text, vec!["Summarized step".to_string()]);
|
||||
assert_eq!(
|
||||
reasoning.raw_content,
|
||||
vec!["raw step".to_string(), "final thought".to_string()]
|
||||
);
|
||||
}
|
||||
other => panic!("expected TurnItem::Reasoning, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_web_search_call() {
|
||||
let item = ResponseItem::WebSearchCall {
|
||||
id: Some("ws_1".to_string()),
|
||||
status: Some("completed".to_string()),
|
||||
action: WebSearchAction::Search {
|
||||
query: "weather".to_string(),
|
||||
},
|
||||
};
|
||||
|
||||
let turn_item = parse_turn_item(&item).expect("expected web search turn item");
|
||||
|
||||
match turn_item {
|
||||
TurnItem::WebSearch(search) => {
|
||||
assert_eq!(search.id, "ws_1");
|
||||
assert_eq!(search.query, "weather");
|
||||
}
|
||||
other => panic!("expected TurnItem::WebSearch, got {other:?}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,13 +18,14 @@ use tokio::process::Child;
|
||||
use crate::error::CodexErr;
|
||||
use crate::error::Result;
|
||||
use crate::error::SandboxErr;
|
||||
use crate::landlock::spawn_command_under_linux_sandbox;
|
||||
use crate::protocol::Event;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::ExecCommandOutputDeltaEvent;
|
||||
use crate::protocol::ExecOutputStream;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
use crate::seatbelt::spawn_command_under_seatbelt;
|
||||
use crate::sandboxing::CommandSpec;
|
||||
use crate::sandboxing::ExecEnv;
|
||||
use crate::sandboxing::SandboxManager;
|
||||
use crate::spawn::StdioPolicy;
|
||||
use crate::spawn::spawn_child_async;
|
||||
|
||||
@@ -53,6 +54,7 @@ pub struct ExecParams {
|
||||
pub env: HashMap<String, String>,
|
||||
pub with_escalated_permissions: Option<bool>,
|
||||
pub justification: Option<String>,
|
||||
pub arg0: Option<String>,
|
||||
}
|
||||
|
||||
impl ExecParams {
|
||||
@@ -87,57 +89,85 @@ pub async fn process_exec_tool_call(
|
||||
codex_linux_sandbox_exe: &Option<PathBuf>,
|
||||
stdout_stream: Option<StdoutStream>,
|
||||
) -> Result<ExecToolCallOutput> {
|
||||
let start = Instant::now();
|
||||
let ExecParams {
|
||||
command,
|
||||
cwd,
|
||||
timeout_ms,
|
||||
env,
|
||||
with_escalated_permissions,
|
||||
justification,
|
||||
arg0: _,
|
||||
} = params;
|
||||
|
||||
let timeout_duration = params.timeout_duration();
|
||||
let (program, args) = command.split_first().ok_or_else(|| {
|
||||
CodexErr::Io(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"command args are empty",
|
||||
))
|
||||
})?;
|
||||
|
||||
let raw_output_result: std::result::Result<RawExecToolCallOutput, CodexErr> = match sandbox_type
|
||||
{
|
||||
SandboxType::None => exec(params, sandbox_policy, stdout_stream.clone()).await,
|
||||
SandboxType::MacosSeatbelt => {
|
||||
let ExecParams {
|
||||
command,
|
||||
cwd: command_cwd,
|
||||
env,
|
||||
..
|
||||
} = params;
|
||||
let child = spawn_command_under_seatbelt(
|
||||
command,
|
||||
command_cwd,
|
||||
sandbox_policy,
|
||||
sandbox_cwd,
|
||||
StdioPolicy::RedirectForShellTool,
|
||||
env,
|
||||
)
|
||||
.await?;
|
||||
consume_truncated_output(child, timeout_duration, stdout_stream.clone()).await
|
||||
}
|
||||
SandboxType::LinuxSeccomp => {
|
||||
let ExecParams {
|
||||
command,
|
||||
cwd: command_cwd,
|
||||
env,
|
||||
..
|
||||
} = params;
|
||||
|
||||
let codex_linux_sandbox_exe = codex_linux_sandbox_exe
|
||||
.as_ref()
|
||||
.ok_or(CodexErr::LandlockSandboxExecutableNotProvided)?;
|
||||
let child = spawn_command_under_linux_sandbox(
|
||||
codex_linux_sandbox_exe,
|
||||
command,
|
||||
command_cwd,
|
||||
sandbox_policy,
|
||||
sandbox_cwd,
|
||||
StdioPolicy::RedirectForShellTool,
|
||||
env,
|
||||
)
|
||||
.await?;
|
||||
|
||||
consume_truncated_output(child, timeout_duration, stdout_stream).await
|
||||
}
|
||||
let spec = CommandSpec {
|
||||
program: program.clone(),
|
||||
args: args.to_vec(),
|
||||
cwd,
|
||||
env,
|
||||
timeout_ms,
|
||||
with_escalated_permissions,
|
||||
justification,
|
||||
};
|
||||
|
||||
let manager = SandboxManager::new();
|
||||
let exec_env = manager
|
||||
.transform(
|
||||
&spec,
|
||||
sandbox_policy,
|
||||
sandbox_type,
|
||||
sandbox_cwd,
|
||||
codex_linux_sandbox_exe.as_ref(),
|
||||
)
|
||||
.map_err(CodexErr::from)?;
|
||||
|
||||
// Route through the sandboxing module for a single, unified execution path.
|
||||
crate::sandboxing::execute_env(&exec_env, sandbox_policy, stdout_stream).await
|
||||
}
|
||||
|
||||
pub(crate) async fn execute_exec_env(
|
||||
env: ExecEnv,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
stdout_stream: Option<StdoutStream>,
|
||||
) -> Result<ExecToolCallOutput> {
|
||||
let ExecEnv {
|
||||
command,
|
||||
cwd,
|
||||
env,
|
||||
timeout_ms,
|
||||
sandbox,
|
||||
with_escalated_permissions,
|
||||
justification,
|
||||
arg0,
|
||||
} = env;
|
||||
|
||||
let params = ExecParams {
|
||||
command,
|
||||
cwd,
|
||||
timeout_ms,
|
||||
env,
|
||||
with_escalated_permissions,
|
||||
justification,
|
||||
arg0,
|
||||
};
|
||||
|
||||
let start = Instant::now();
|
||||
let raw_output_result = exec(params, sandbox_policy, stdout_stream).await;
|
||||
let duration = start.elapsed();
|
||||
finalize_exec_result(raw_output_result, sandbox, duration)
|
||||
}
|
||||
|
||||
fn finalize_exec_result(
|
||||
raw_output_result: std::result::Result<RawExecToolCallOutput, CodexErr>,
|
||||
sandbox_type: SandboxType,
|
||||
duration: Duration,
|
||||
) -> Result<ExecToolCallOutput> {
|
||||
match raw_output_result {
|
||||
Ok(raw_output) => {
|
||||
#[allow(unused_mut)]
|
||||
@@ -192,12 +222,30 @@ pub async fn process_exec_tool_call(
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) mod errors {
|
||||
use super::CodexErr;
|
||||
use crate::sandboxing::SandboxTransformError;
|
||||
|
||||
impl From<SandboxTransformError> for CodexErr {
|
||||
fn from(err: SandboxTransformError) -> Self {
|
||||
match err {
|
||||
SandboxTransformError::MissingLinuxSandboxExecutable => {
|
||||
CodexErr::LandlockSandboxExecutableNotProvided
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// We don't have a fully deterministic way to tell if our command failed
|
||||
/// because of the sandbox - a command in the user's zshrc file might hit an
|
||||
/// error, but the command itself might fail or succeed for other reasons.
|
||||
/// For now, we conservatively check for well known command failure exit codes and
|
||||
/// also look for common sandbox denial keywords in the command output.
|
||||
fn is_likely_sandbox_denied(sandbox_type: SandboxType, exec_output: &ExecToolCallOutput) -> bool {
|
||||
pub(crate) fn is_likely_sandbox_denied(
|
||||
sandbox_type: SandboxType,
|
||||
exec_output: &ExecToolCallOutput,
|
||||
) -> bool {
|
||||
if sandbox_type == SandboxType::None || exec_output.exit_code == 0 {
|
||||
return false;
|
||||
}
|
||||
@@ -206,21 +254,17 @@ fn is_likely_sandbox_denied(sandbox_type: SandboxType, exec_output: &ExecToolCal
|
||||
// 2: misuse of shell builtins
|
||||
// 126: permission denied
|
||||
// 127: command not found
|
||||
const QUICK_REJECT_EXIT_CODES: [i32; 3] = [2, 126, 127];
|
||||
if QUICK_REJECT_EXIT_CODES.contains(&exec_output.exit_code) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const SANDBOX_DENIED_KEYWORDS: [&str; 6] = [
|
||||
const SANDBOX_DENIED_KEYWORDS: [&str; 7] = [
|
||||
"operation not permitted",
|
||||
"permission denied",
|
||||
"read-only file system",
|
||||
"seccomp",
|
||||
"sandbox",
|
||||
"landlock",
|
||||
"failed to write file",
|
||||
];
|
||||
|
||||
if [
|
||||
let has_sandbox_keyword = [
|
||||
&exec_output.stderr.text,
|
||||
&exec_output.stdout.text,
|
||||
&exec_output.aggregated_output.text,
|
||||
@@ -231,10 +275,17 @@ fn is_likely_sandbox_denied(sandbox_type: SandboxType, exec_output: &ExecToolCal
|
||||
SANDBOX_DENIED_KEYWORDS
|
||||
.iter()
|
||||
.any(|needle| lower.contains(needle))
|
||||
}) {
|
||||
});
|
||||
|
||||
if has_sandbox_keyword {
|
||||
return true;
|
||||
}
|
||||
|
||||
const QUICK_REJECT_EXIT_CODES: [i32; 3] = [2, 126, 127];
|
||||
if QUICK_REJECT_EXIT_CODES.contains(&exec_output.exit_code) {
|
||||
return false;
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
const SIGSYS_CODE: i32 = libc::SIGSYS;
|
||||
@@ -248,11 +299,12 @@ fn is_likely_sandbox_denied(sandbox_type: SandboxType, exec_output: &ExecToolCal
|
||||
false
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct StreamOutput<T> {
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct StreamOutput<T: Clone> {
|
||||
pub text: T,
|
||||
pub truncated_after_lines: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct RawExecToolCallOutput {
|
||||
pub exit_status: ExitStatus,
|
||||
@@ -285,7 +337,7 @@ fn append_all(dst: &mut Vec<u8>, src: &[u8]) {
|
||||
dst.extend_from_slice(src);
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ExecToolCallOutput {
|
||||
pub exit_code: i32,
|
||||
pub stdout: StreamOutput<String>,
|
||||
@@ -302,7 +354,11 @@ async fn exec(
|
||||
) -> Result<RawExecToolCallOutput> {
|
||||
let timeout = params.timeout_duration();
|
||||
let ExecParams {
|
||||
command, cwd, env, ..
|
||||
command,
|
||||
cwd,
|
||||
env,
|
||||
arg0,
|
||||
..
|
||||
} = params;
|
||||
|
||||
let (program, args) = command.split_first().ok_or_else(|| {
|
||||
@@ -311,11 +367,11 @@ async fn exec(
|
||||
"command args are empty",
|
||||
))
|
||||
})?;
|
||||
let arg0 = None;
|
||||
let arg0_ref = arg0.as_deref();
|
||||
let child = spawn_child_async(
|
||||
PathBuf::from(program),
|
||||
args.into(),
|
||||
arg0,
|
||||
arg0_ref,
|
||||
cwd,
|
||||
sandbox_policy,
|
||||
StdioPolicy::RedirectForShellTool,
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::exec_command::session_id::SessionId;
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct ExecCommandParams {
|
||||
pub(crate) cmd: String,
|
||||
|
||||
#[serde(default = "default_yield_time")]
|
||||
pub(crate) yield_time_ms: u64,
|
||||
|
||||
#[serde(default = "max_output_tokens")]
|
||||
pub(crate) max_output_tokens: u64,
|
||||
|
||||
#[serde(default = "default_shell")]
|
||||
pub(crate) shell: String,
|
||||
|
||||
#[serde(default = "default_login")]
|
||||
pub(crate) login: bool,
|
||||
}
|
||||
|
||||
fn default_yield_time() -> u64 {
|
||||
10_000
|
||||
}
|
||||
|
||||
fn max_output_tokens() -> u64 {
|
||||
10_000
|
||||
}
|
||||
|
||||
fn default_login() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn default_shell() -> String {
|
||||
"/bin/bash".to_string()
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
pub struct WriteStdinParams {
|
||||
pub(crate) session_id: SessionId,
|
||||
pub(crate) chars: String,
|
||||
|
||||
#[serde(default = "write_stdin_default_yield_time_ms")]
|
||||
pub(crate) yield_time_ms: u64,
|
||||
|
||||
#[serde(default = "write_stdin_default_max_output_tokens")]
|
||||
pub(crate) max_output_tokens: u64,
|
||||
}
|
||||
|
||||
fn write_stdin_default_yield_time_ms() -> u64 {
|
||||
250
|
||||
}
|
||||
|
||||
fn write_stdin_default_max_output_tokens() -> u64 {
|
||||
10_000
|
||||
}
|
||||
@@ -1,96 +0,0 @@
|
||||
use std::sync::Mutex as StdMutex;
|
||||
|
||||
use tokio::sync::broadcast;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::task::JoinHandle;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct ExecCommandSession {
|
||||
/// Queue for writing bytes to the process stdin (PTY master write side).
|
||||
writer_tx: mpsc::Sender<Vec<u8>>,
|
||||
/// Broadcast stream of output chunks read from the PTY. New subscribers
|
||||
/// receive only chunks emitted after they subscribe.
|
||||
output_tx: broadcast::Sender<Vec<u8>>,
|
||||
|
||||
/// Child killer handle for termination on drop (can signal independently
|
||||
/// of a thread blocked in `.wait()`).
|
||||
killer: StdMutex<Option<Box<dyn portable_pty::ChildKiller + Send + Sync>>>,
|
||||
|
||||
/// JoinHandle for the blocking PTY reader task.
|
||||
reader_handle: StdMutex<Option<JoinHandle<()>>>,
|
||||
|
||||
/// JoinHandle for the stdin writer task.
|
||||
writer_handle: StdMutex<Option<JoinHandle<()>>>,
|
||||
|
||||
/// JoinHandle for the child wait task.
|
||||
wait_handle: StdMutex<Option<JoinHandle<()>>>,
|
||||
|
||||
/// Tracks whether the underlying process has exited.
|
||||
exit_status: std::sync::Arc<std::sync::atomic::AtomicBool>,
|
||||
}
|
||||
|
||||
impl ExecCommandSession {
|
||||
pub(crate) fn new(
|
||||
writer_tx: mpsc::Sender<Vec<u8>>,
|
||||
output_tx: broadcast::Sender<Vec<u8>>,
|
||||
killer: Box<dyn portable_pty::ChildKiller + Send + Sync>,
|
||||
reader_handle: JoinHandle<()>,
|
||||
writer_handle: JoinHandle<()>,
|
||||
wait_handle: JoinHandle<()>,
|
||||
exit_status: std::sync::Arc<std::sync::atomic::AtomicBool>,
|
||||
) -> (Self, broadcast::Receiver<Vec<u8>>) {
|
||||
let initial_output_rx = output_tx.subscribe();
|
||||
(
|
||||
Self {
|
||||
writer_tx,
|
||||
output_tx,
|
||||
killer: StdMutex::new(Some(killer)),
|
||||
reader_handle: StdMutex::new(Some(reader_handle)),
|
||||
writer_handle: StdMutex::new(Some(writer_handle)),
|
||||
wait_handle: StdMutex::new(Some(wait_handle)),
|
||||
exit_status,
|
||||
},
|
||||
initial_output_rx,
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn writer_sender(&self) -> mpsc::Sender<Vec<u8>> {
|
||||
self.writer_tx.clone()
|
||||
}
|
||||
|
||||
pub(crate) fn output_receiver(&self) -> broadcast::Receiver<Vec<u8>> {
|
||||
self.output_tx.subscribe()
|
||||
}
|
||||
|
||||
pub(crate) fn has_exited(&self) -> bool {
|
||||
self.exit_status.load(std::sync::atomic::Ordering::SeqCst)
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for ExecCommandSession {
|
||||
fn drop(&mut self) {
|
||||
// Best-effort: terminate child first so blocking tasks can complete.
|
||||
if let Ok(mut killer_opt) = self.killer.lock()
|
||||
&& let Some(mut killer) = killer_opt.take()
|
||||
{
|
||||
let _ = killer.kill();
|
||||
}
|
||||
|
||||
// Abort background tasks; they may already have exited after kill.
|
||||
if let Ok(mut h) = self.reader_handle.lock()
|
||||
&& let Some(handle) = h.take()
|
||||
{
|
||||
handle.abort();
|
||||
}
|
||||
if let Ok(mut h) = self.writer_handle.lock()
|
||||
&& let Some(handle) = h.take()
|
||||
{
|
||||
handle.abort();
|
||||
}
|
||||
if let Ok(mut h) = self.wait_handle.lock()
|
||||
&& let Some(handle) = h.take()
|
||||
{
|
||||
handle.abort();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
mod exec_command_params;
|
||||
mod exec_command_session;
|
||||
mod responses_api;
|
||||
mod session_id;
|
||||
mod session_manager;
|
||||
|
||||
pub use exec_command_params::ExecCommandParams;
|
||||
pub use exec_command_params::WriteStdinParams;
|
||||
pub(crate) use exec_command_session::ExecCommandSession;
|
||||
pub use responses_api::EXEC_COMMAND_TOOL_NAME;
|
||||
pub use responses_api::WRITE_STDIN_TOOL_NAME;
|
||||
pub use responses_api::create_exec_command_tool_for_responses_api;
|
||||
pub use responses_api::create_write_stdin_tool_for_responses_api;
|
||||
pub use session_manager::SessionManager as ExecSessionManager;
|
||||
@@ -1,98 +0,0 @@
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use crate::client_common::tools::ResponsesApiTool;
|
||||
use crate::openai_tools::JsonSchema;
|
||||
|
||||
pub const EXEC_COMMAND_TOOL_NAME: &str = "exec_command";
|
||||
pub const WRITE_STDIN_TOOL_NAME: &str = "write_stdin";
|
||||
|
||||
pub fn create_exec_command_tool_for_responses_api() -> ResponsesApiTool {
|
||||
let mut properties = BTreeMap::<String, JsonSchema>::new();
|
||||
properties.insert(
|
||||
"cmd".to_string(),
|
||||
JsonSchema::String {
|
||||
description: Some("The shell command to execute.".to_string()),
|
||||
},
|
||||
);
|
||||
properties.insert(
|
||||
"yield_time_ms".to_string(),
|
||||
JsonSchema::Number {
|
||||
description: Some("The maximum time in milliseconds to wait for output.".to_string()),
|
||||
},
|
||||
);
|
||||
properties.insert(
|
||||
"max_output_tokens".to_string(),
|
||||
JsonSchema::Number {
|
||||
description: Some("The maximum number of tokens to output.".to_string()),
|
||||
},
|
||||
);
|
||||
properties.insert(
|
||||
"shell".to_string(),
|
||||
JsonSchema::String {
|
||||
description: Some("The shell to use. Defaults to \"/bin/bash\".".to_string()),
|
||||
},
|
||||
);
|
||||
properties.insert(
|
||||
"login".to_string(),
|
||||
JsonSchema::Boolean {
|
||||
description: Some(
|
||||
"Whether to run the command as a login shell. Defaults to true.".to_string(),
|
||||
),
|
||||
},
|
||||
);
|
||||
|
||||
ResponsesApiTool {
|
||||
name: EXEC_COMMAND_TOOL_NAME.to_owned(),
|
||||
description: r#"Execute shell commands on the local machine with streaming output."#
|
||||
.to_string(),
|
||||
strict: false,
|
||||
parameters: JsonSchema::Object {
|
||||
properties,
|
||||
required: Some(vec!["cmd".to_string()]),
|
||||
additional_properties: Some(false.into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_write_stdin_tool_for_responses_api() -> ResponsesApiTool {
|
||||
let mut properties = BTreeMap::<String, JsonSchema>::new();
|
||||
properties.insert(
|
||||
"session_id".to_string(),
|
||||
JsonSchema::Number {
|
||||
description: Some("The ID of the exec_command session.".to_string()),
|
||||
},
|
||||
);
|
||||
properties.insert(
|
||||
"chars".to_string(),
|
||||
JsonSchema::String {
|
||||
description: Some("The characters to write to stdin.".to_string()),
|
||||
},
|
||||
);
|
||||
properties.insert(
|
||||
"yield_time_ms".to_string(),
|
||||
JsonSchema::Number {
|
||||
description: Some(
|
||||
"The maximum time in milliseconds to wait for output after writing.".to_string(),
|
||||
),
|
||||
},
|
||||
);
|
||||
properties.insert(
|
||||
"max_output_tokens".to_string(),
|
||||
JsonSchema::Number {
|
||||
description: Some("The maximum number of tokens to output.".to_string()),
|
||||
},
|
||||
);
|
||||
|
||||
ResponsesApiTool {
|
||||
name: WRITE_STDIN_TOOL_NAME.to_owned(),
|
||||
description: r#"Write characters to an exec session's stdin. Returns all stdout+stderr received within yield_time_ms.
|
||||
Can write control characters (\u0003 for Ctrl-C), or an empty string to just poll stdout+stderr."#
|
||||
.to_string(),
|
||||
strict: false,
|
||||
parameters: JsonSchema::Object {
|
||||
properties,
|
||||
required: Some(vec!["session_id".to_string(), "chars".to_string()]),
|
||||
additional_properties: Some(false.into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
pub(crate) struct SessionId(pub u32);
|
||||
@@ -1,506 +0,0 @@
|
||||
use std::collections::HashMap;
|
||||
use std::io::ErrorKind;
|
||||
use std::io::Read;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex as StdMutex;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::atomic::AtomicU32;
|
||||
|
||||
use portable_pty::CommandBuilder;
|
||||
use portable_pty::PtySize;
|
||||
use portable_pty::native_pty_system;
|
||||
use tokio::sync::Mutex;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::sync::oneshot;
|
||||
use tokio::time::Duration;
|
||||
use tokio::time::Instant;
|
||||
use tokio::time::timeout;
|
||||
|
||||
use crate::exec_command::exec_command_params::ExecCommandParams;
|
||||
use crate::exec_command::exec_command_params::WriteStdinParams;
|
||||
use crate::exec_command::exec_command_session::ExecCommandSession;
|
||||
use crate::exec_command::session_id::SessionId;
|
||||
use crate::truncate::truncate_middle;
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SessionManager {
|
||||
next_session_id: AtomicU32,
|
||||
sessions: Mutex<HashMap<SessionId, ExecCommandSession>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ExecCommandOutput {
|
||||
wall_time: Duration,
|
||||
exit_status: ExitStatus,
|
||||
original_token_count: Option<u64>,
|
||||
output: String,
|
||||
}
|
||||
|
||||
impl ExecCommandOutput {
|
||||
pub(crate) fn to_text_output(&self) -> String {
|
||||
let wall_time_secs = self.wall_time.as_secs_f32();
|
||||
let termination_status = match self.exit_status {
|
||||
ExitStatus::Exited(code) => format!("Process exited with code {code}"),
|
||||
ExitStatus::Ongoing(session_id) => {
|
||||
format!("Process running with session ID {}", session_id.0)
|
||||
}
|
||||
};
|
||||
let truncation_status = match self.original_token_count {
|
||||
Some(tokens) => {
|
||||
format!("\nWarning: truncated output (original token count: {tokens})")
|
||||
}
|
||||
None => "".to_string(),
|
||||
};
|
||||
format!(
|
||||
r#"Wall time: {wall_time_secs:.3} seconds
|
||||
{termination_status}{truncation_status}
|
||||
Output:
|
||||
{output}"#,
|
||||
output = self.output
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ExitStatus {
|
||||
Exited(i32),
|
||||
Ongoing(SessionId),
|
||||
}
|
||||
|
||||
impl SessionManager {
|
||||
/// Processes the request and is required to send a response via `outgoing`.
|
||||
pub async fn handle_exec_command_request(
|
||||
&self,
|
||||
params: ExecCommandParams,
|
||||
) -> Result<ExecCommandOutput, String> {
|
||||
// Allocate a session id.
|
||||
let session_id = SessionId(
|
||||
self.next_session_id
|
||||
.fetch_add(1, std::sync::atomic::Ordering::SeqCst),
|
||||
);
|
||||
|
||||
let (session, mut output_rx, mut exit_rx) = create_exec_command_session(params.clone())
|
||||
.await
|
||||
.map_err(|err| {
|
||||
format!(
|
||||
"failed to create exec command session for session id {}: {err}",
|
||||
session_id.0
|
||||
)
|
||||
})?;
|
||||
|
||||
// Insert into session map.
|
||||
self.sessions.lock().await.insert(session_id, session);
|
||||
|
||||
// Collect output until either timeout expires or process exits.
|
||||
// Do not cap during collection; truncate at the end if needed.
|
||||
// Use a modest initial capacity to avoid large preallocation.
|
||||
let cap_bytes_u64 = params.max_output_tokens.saturating_mul(4);
|
||||
let cap_bytes: usize = cap_bytes_u64.min(usize::MAX as u64) as usize;
|
||||
let mut collected: Vec<u8> = Vec::with_capacity(4096);
|
||||
|
||||
let start_time = Instant::now();
|
||||
let deadline = start_time + Duration::from_millis(params.yield_time_ms);
|
||||
let mut exit_code: Option<i32> = None;
|
||||
|
||||
loop {
|
||||
if Instant::now() >= deadline {
|
||||
break;
|
||||
}
|
||||
let remaining = deadline.saturating_duration_since(Instant::now());
|
||||
tokio::select! {
|
||||
biased;
|
||||
exit = &mut exit_rx => {
|
||||
exit_code = exit.ok();
|
||||
// Small grace period to pull remaining buffered output
|
||||
let grace_deadline = Instant::now() + Duration::from_millis(25);
|
||||
while Instant::now() < grace_deadline {
|
||||
match timeout(Duration::from_millis(1), output_rx.recv()).await {
|
||||
Ok(Ok(chunk)) => {
|
||||
collected.extend_from_slice(&chunk);
|
||||
}
|
||||
Ok(Err(tokio::sync::broadcast::error::RecvError::Lagged(_))) => {
|
||||
// Skip missed messages; keep trying within grace period.
|
||||
continue;
|
||||
}
|
||||
Ok(Err(tokio::sync::broadcast::error::RecvError::Closed)) => break,
|
||||
Err(_) => break,
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
chunk = timeout(remaining, output_rx.recv()) => {
|
||||
match chunk {
|
||||
Ok(Ok(chunk)) => {
|
||||
collected.extend_from_slice(&chunk);
|
||||
}
|
||||
Ok(Err(tokio::sync::broadcast::error::RecvError::Lagged(_))) => {
|
||||
// Skip missed messages; continue collecting fresh output.
|
||||
}
|
||||
Ok(Err(tokio::sync::broadcast::error::RecvError::Closed)) => { break; }
|
||||
Err(_) => { break; }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let output = String::from_utf8_lossy(&collected).to_string();
|
||||
|
||||
let exit_status = if let Some(code) = exit_code {
|
||||
ExitStatus::Exited(code)
|
||||
} else {
|
||||
ExitStatus::Ongoing(session_id)
|
||||
};
|
||||
|
||||
// If output exceeds cap, truncate the middle and record original token estimate.
|
||||
let (output, original_token_count) = truncate_middle(&output, cap_bytes);
|
||||
Ok(ExecCommandOutput {
|
||||
wall_time: Instant::now().duration_since(start_time),
|
||||
exit_status,
|
||||
original_token_count,
|
||||
output,
|
||||
})
|
||||
}
|
||||
|
||||
/// Write characters to a session's stdin and collect combined output for up to `yield_time_ms`.
|
||||
pub async fn handle_write_stdin_request(
|
||||
&self,
|
||||
params: WriteStdinParams,
|
||||
) -> Result<ExecCommandOutput, String> {
|
||||
let WriteStdinParams {
|
||||
session_id,
|
||||
chars,
|
||||
yield_time_ms,
|
||||
max_output_tokens,
|
||||
} = params;
|
||||
|
||||
// Grab handles without holding the sessions lock across await points.
|
||||
let (writer_tx, mut output_rx) = {
|
||||
let sessions = self.sessions.lock().await;
|
||||
match sessions.get(&session_id) {
|
||||
Some(session) => (session.writer_sender(), session.output_receiver()),
|
||||
None => {
|
||||
return Err(format!("unknown session id {}", session_id.0));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Write stdin if provided.
|
||||
if !chars.is_empty() && writer_tx.send(chars.into_bytes()).await.is_err() {
|
||||
return Err("failed to write to stdin".to_string());
|
||||
}
|
||||
|
||||
// Collect output up to yield_time_ms, truncating to max_output_tokens bytes.
|
||||
let mut collected: Vec<u8> = Vec::with_capacity(4096);
|
||||
let start_time = Instant::now();
|
||||
let deadline = start_time + Duration::from_millis(yield_time_ms);
|
||||
loop {
|
||||
let now = Instant::now();
|
||||
if now >= deadline {
|
||||
break;
|
||||
}
|
||||
let remaining = deadline - now;
|
||||
match timeout(remaining, output_rx.recv()).await {
|
||||
Ok(Ok(chunk)) => {
|
||||
// Collect all output within the time budget; truncate at the end.
|
||||
collected.extend_from_slice(&chunk);
|
||||
}
|
||||
Ok(Err(tokio::sync::broadcast::error::RecvError::Lagged(_))) => {
|
||||
// Skip missed messages; continue collecting fresh output.
|
||||
}
|
||||
Ok(Err(tokio::sync::broadcast::error::RecvError::Closed)) => break,
|
||||
Err(_) => break, // timeout
|
||||
}
|
||||
}
|
||||
|
||||
// Return structured output, truncating middle if over cap.
|
||||
let output = String::from_utf8_lossy(&collected).to_string();
|
||||
let cap_bytes_u64 = max_output_tokens.saturating_mul(4);
|
||||
let cap_bytes: usize = cap_bytes_u64.min(usize::MAX as u64) as usize;
|
||||
let (output, original_token_count) = truncate_middle(&output, cap_bytes);
|
||||
Ok(ExecCommandOutput {
|
||||
wall_time: Instant::now().duration_since(start_time),
|
||||
exit_status: ExitStatus::Ongoing(session_id),
|
||||
original_token_count,
|
||||
output,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Spawn PTY and child process per spawn_exec_command_session logic.
|
||||
async fn create_exec_command_session(
|
||||
params: ExecCommandParams,
|
||||
) -> anyhow::Result<(
|
||||
ExecCommandSession,
|
||||
tokio::sync::broadcast::Receiver<Vec<u8>>,
|
||||
oneshot::Receiver<i32>,
|
||||
)> {
|
||||
let ExecCommandParams {
|
||||
cmd,
|
||||
yield_time_ms: _,
|
||||
max_output_tokens: _,
|
||||
shell,
|
||||
login,
|
||||
} = params;
|
||||
|
||||
// Use the native pty implementation for the system
|
||||
let pty_system = native_pty_system();
|
||||
|
||||
// Create a new pty
|
||||
let pair = pty_system.openpty(PtySize {
|
||||
rows: 24,
|
||||
cols: 80,
|
||||
pixel_width: 0,
|
||||
pixel_height: 0,
|
||||
})?;
|
||||
|
||||
// Spawn a shell into the pty
|
||||
let mut command_builder = CommandBuilder::new(shell);
|
||||
let shell_mode_opt = if login { "-lc" } else { "-c" };
|
||||
command_builder.arg(shell_mode_opt);
|
||||
command_builder.arg(cmd);
|
||||
|
||||
let mut child = pair.slave.spawn_command(command_builder)?;
|
||||
// Obtain a killer that can signal the process independently of `.wait()`.
|
||||
let killer = child.clone_killer();
|
||||
|
||||
// Channel to forward write requests to the PTY writer.
|
||||
let (writer_tx, mut writer_rx) = mpsc::channel::<Vec<u8>>(128);
|
||||
// Broadcast for streaming PTY output to readers: subscribers receive from subscription time.
|
||||
let (output_tx, _) = tokio::sync::broadcast::channel::<Vec<u8>>(256);
|
||||
// Reader task: drain PTY and forward chunks to output channel.
|
||||
let mut reader = pair.master.try_clone_reader()?;
|
||||
let output_tx_clone = output_tx.clone();
|
||||
let reader_handle = tokio::task::spawn_blocking(move || {
|
||||
let mut buf = [0u8; 8192];
|
||||
loop {
|
||||
match reader.read(&mut buf) {
|
||||
Ok(0) => break, // EOF
|
||||
Ok(n) => {
|
||||
// Forward to broadcast; best-effort if there are subscribers.
|
||||
let _ = output_tx_clone.send(buf[..n].to_vec());
|
||||
}
|
||||
Err(ref e) if e.kind() == ErrorKind::Interrupted => {
|
||||
// Retry on EINTR
|
||||
continue;
|
||||
}
|
||||
Err(ref e) if e.kind() == ErrorKind::WouldBlock => {
|
||||
// We're in a blocking thread; back off briefly and retry.
|
||||
std::thread::sleep(Duration::from_millis(5));
|
||||
continue;
|
||||
}
|
||||
Err(_) => break,
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Writer task: apply stdin writes to the PTY writer.
|
||||
let writer = pair.master.take_writer()?;
|
||||
let writer = Arc::new(StdMutex::new(writer));
|
||||
let writer_handle = tokio::spawn({
|
||||
let writer = writer.clone();
|
||||
async move {
|
||||
while let Some(bytes) = writer_rx.recv().await {
|
||||
let writer = writer.clone();
|
||||
// Perform blocking write on a blocking thread.
|
||||
let _ = tokio::task::spawn_blocking(move || {
|
||||
if let Ok(mut guard) = writer.lock() {
|
||||
use std::io::Write;
|
||||
let _ = guard.write_all(&bytes);
|
||||
let _ = guard.flush();
|
||||
}
|
||||
})
|
||||
.await;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Keep the child alive until it exits, then signal exit code.
|
||||
let (exit_tx, exit_rx) = oneshot::channel::<i32>();
|
||||
let exit_status = Arc::new(AtomicBool::new(false));
|
||||
let wait_exit_status = exit_status.clone();
|
||||
let wait_handle = tokio::task::spawn_blocking(move || {
|
||||
let code = match child.wait() {
|
||||
Ok(status) => status.exit_code() as i32,
|
||||
Err(_) => -1,
|
||||
};
|
||||
wait_exit_status.store(true, std::sync::atomic::Ordering::SeqCst);
|
||||
let _ = exit_tx.send(code);
|
||||
});
|
||||
|
||||
// Create and store the session with channels.
|
||||
let (session, initial_output_rx) = ExecCommandSession::new(
|
||||
writer_tx,
|
||||
output_tx,
|
||||
killer,
|
||||
reader_handle,
|
||||
writer_handle,
|
||||
wait_handle,
|
||||
exit_status,
|
||||
);
|
||||
Ok((session, initial_output_rx, exit_rx))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::exec_command::session_id::SessionId;
|
||||
|
||||
/// Test that verifies that [`SessionManager::handle_exec_command_request()`]
|
||||
/// and [`SessionManager::handle_write_stdin_request()`] work as expected
|
||||
/// in the presence of a process that never terminates (but produces
|
||||
/// output continuously).
|
||||
#[cfg(unix)]
|
||||
#[allow(clippy::print_stderr)]
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 4)]
|
||||
async fn session_manager_streams_and_truncates_from_now() {
|
||||
use crate::exec_command::exec_command_params::ExecCommandParams;
|
||||
use crate::exec_command::exec_command_params::WriteStdinParams;
|
||||
use tokio::time::sleep;
|
||||
|
||||
let session_manager = SessionManager::default();
|
||||
// Long-running loop that prints an increasing counter every ~100ms.
|
||||
// Use Python for a portable, reliable sleep across shells/PTYs.
|
||||
let cmd = r#"python3 - <<'PY'
|
||||
import sys, time
|
||||
count = 0
|
||||
while True:
|
||||
print(count)
|
||||
sys.stdout.flush()
|
||||
count += 100
|
||||
time.sleep(0.1)
|
||||
PY"#
|
||||
.to_string();
|
||||
|
||||
// Start the session and collect ~3s of output.
|
||||
let params = ExecCommandParams {
|
||||
cmd,
|
||||
yield_time_ms: 3_000,
|
||||
max_output_tokens: 1_000, // large enough to avoid truncation here
|
||||
shell: "/bin/bash".to_string(),
|
||||
login: false,
|
||||
};
|
||||
let initial_output = match session_manager
|
||||
.handle_exec_command_request(params.clone())
|
||||
.await
|
||||
{
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
// PTY may be restricted in some sandboxes; skip in that case.
|
||||
if e.contains("openpty") || e.contains("Operation not permitted") {
|
||||
eprintln!("skipping test due to restricted PTY: {e}");
|
||||
return;
|
||||
}
|
||||
panic!("exec request failed unexpectedly: {e}");
|
||||
}
|
||||
};
|
||||
eprintln!("initial output: {initial_output:?}");
|
||||
|
||||
// Should be ongoing (we launched a never-ending loop).
|
||||
let session_id = match initial_output.exit_status {
|
||||
ExitStatus::Ongoing(id) => id,
|
||||
_ => panic!("expected ongoing session"),
|
||||
};
|
||||
|
||||
// Parse the numeric lines and get the max observed value in the first window.
|
||||
let first_nums = extract_monotonic_numbers(&initial_output.output);
|
||||
assert!(
|
||||
!first_nums.is_empty(),
|
||||
"expected some output from first window"
|
||||
);
|
||||
let first_max = *first_nums.iter().max().unwrap();
|
||||
|
||||
// Wait ~4s so counters progress while we're not reading.
|
||||
sleep(Duration::from_millis(4_000)).await;
|
||||
|
||||
// Now read ~3s of output "from now" only.
|
||||
// Use a small token cap so truncation occurs and we test middle truncation.
|
||||
let write_params = WriteStdinParams {
|
||||
session_id,
|
||||
chars: String::new(),
|
||||
yield_time_ms: 3_000,
|
||||
max_output_tokens: 16, // 16 tokens ~= 64 bytes -> likely truncation
|
||||
};
|
||||
let second = session_manager
|
||||
.handle_write_stdin_request(write_params)
|
||||
.await
|
||||
.expect("write stdin should succeed");
|
||||
|
||||
// Verify truncation metadata and size bound (cap is tokens*4 bytes).
|
||||
assert!(second.original_token_count.is_some());
|
||||
let cap_bytes = (16u64 * 4) as usize;
|
||||
assert!(second.output.len() <= cap_bytes);
|
||||
// New middle marker should be present.
|
||||
assert!(
|
||||
second.output.contains("tokens truncated") && second.output.contains('…'),
|
||||
"expected truncation marker in output, got: {}",
|
||||
second.output
|
||||
);
|
||||
|
||||
// Minimal freshness check: the earliest number we see in the second window
|
||||
// should be significantly larger than the last from the first window.
|
||||
let second_nums = extract_monotonic_numbers(&second.output);
|
||||
assert!(
|
||||
!second_nums.is_empty(),
|
||||
"expected some numeric output from second window"
|
||||
);
|
||||
let second_min = *second_nums.iter().min().unwrap();
|
||||
|
||||
// We slept 4 seconds (~40 ticks at 100ms/tick, each +100), so expect
|
||||
// an increase of roughly 4000 or more. Allow a generous margin.
|
||||
assert!(
|
||||
second_min >= first_max + 2000,
|
||||
"second_min={second_min} first_max={first_max}",
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn extract_monotonic_numbers(s: &str) -> Vec<i64> {
|
||||
s.lines()
|
||||
.filter_map(|line| {
|
||||
if !line.is_empty()
|
||||
&& line.chars().all(|c| c.is_ascii_digit())
|
||||
&& let Ok(n) = line.parse::<i64>()
|
||||
{
|
||||
// Our generator increments by 100; ignore spurious fragments.
|
||||
if n % 100 == 0 {
|
||||
return Some(n);
|
||||
}
|
||||
}
|
||||
None
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn to_text_output_exited_no_truncation() {
|
||||
let out = ExecCommandOutput {
|
||||
wall_time: Duration::from_millis(1234),
|
||||
exit_status: ExitStatus::Exited(0),
|
||||
original_token_count: None,
|
||||
output: "hello".to_string(),
|
||||
};
|
||||
let text = out.to_text_output();
|
||||
let expected = r#"Wall time: 1.234 seconds
|
||||
Process exited with code 0
|
||||
Output:
|
||||
hello"#;
|
||||
assert_eq!(expected, text);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn to_text_output_ongoing_with_truncation() {
|
||||
let out = ExecCommandOutput {
|
||||
wall_time: Duration::from_millis(500),
|
||||
exit_status: ExitStatus::Ongoing(SessionId(42)),
|
||||
original_token_count: Some(1000),
|
||||
output: "abc".to_string(),
|
||||
};
|
||||
let text = out.to_text_output();
|
||||
let expected = r#"Wall time: 0.500 seconds
|
||||
Process running with session ID 42
|
||||
Warning: truncated output (original token count: 1000)
|
||||
Output:
|
||||
abc"#;
|
||||
assert_eq!(expected, text);
|
||||
}
|
||||
}
|
||||
@@ -1,109 +0,0 @@
|
||||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
|
||||
use async_trait::async_trait;
|
||||
|
||||
use crate::CODEX_APPLY_PATCH_ARG1;
|
||||
use crate::apply_patch::ApplyPatchExec;
|
||||
use crate::exec::ExecParams;
|
||||
use crate::executor::ExecutorConfig;
|
||||
use crate::function_tool::FunctionCallError;
|
||||
|
||||
pub(crate) enum ExecutionMode {
|
||||
Shell,
|
||||
ApplyPatch(ApplyPatchExec),
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
/// Backend-specific hooks that prepare and post-process execution requests for a
|
||||
/// given [`ExecutionMode`].
|
||||
pub(crate) trait ExecutionBackend: Send + Sync {
|
||||
fn prepare(
|
||||
&self,
|
||||
params: ExecParams,
|
||||
// Required for downcasting the apply_patch.
|
||||
mode: &ExecutionMode,
|
||||
config: &ExecutorConfig,
|
||||
) -> Result<ExecParams, FunctionCallError>;
|
||||
|
||||
fn stream_stdout(&self, _mode: &ExecutionMode) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
static SHELL_BACKEND: ShellBackend = ShellBackend;
|
||||
static APPLY_PATCH_BACKEND: ApplyPatchBackend = ApplyPatchBackend;
|
||||
|
||||
pub(crate) fn backend_for_mode(mode: &ExecutionMode) -> &'static dyn ExecutionBackend {
|
||||
match mode {
|
||||
ExecutionMode::Shell => &SHELL_BACKEND,
|
||||
ExecutionMode::ApplyPatch(_) => &APPLY_PATCH_BACKEND,
|
||||
}
|
||||
}
|
||||
|
||||
struct ShellBackend;
|
||||
|
||||
#[async_trait]
|
||||
impl ExecutionBackend for ShellBackend {
|
||||
fn prepare(
|
||||
&self,
|
||||
params: ExecParams,
|
||||
mode: &ExecutionMode,
|
||||
_config: &ExecutorConfig,
|
||||
) -> Result<ExecParams, FunctionCallError> {
|
||||
match mode {
|
||||
ExecutionMode::Shell => Ok(params),
|
||||
_ => Err(FunctionCallError::RespondToModel(
|
||||
"shell backend invoked with non-shell mode".to_string(),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct ApplyPatchBackend;
|
||||
|
||||
#[async_trait]
|
||||
impl ExecutionBackend for ApplyPatchBackend {
|
||||
fn prepare(
|
||||
&self,
|
||||
params: ExecParams,
|
||||
mode: &ExecutionMode,
|
||||
config: &ExecutorConfig,
|
||||
) -> Result<ExecParams, FunctionCallError> {
|
||||
match mode {
|
||||
ExecutionMode::ApplyPatch(exec) => {
|
||||
let path_to_codex = if let Some(exe_path) = &config.codex_exe {
|
||||
exe_path.to_string_lossy().to_string()
|
||||
} else {
|
||||
env::current_exe()
|
||||
.ok()
|
||||
.map(|p| p.to_string_lossy().to_string())
|
||||
.ok_or_else(|| {
|
||||
FunctionCallError::RespondToModel(
|
||||
"failed to determine path to codex executable".to_string(),
|
||||
)
|
||||
})?
|
||||
};
|
||||
|
||||
let patch = exec.action.patch.clone();
|
||||
Ok(ExecParams {
|
||||
command: vec![path_to_codex, CODEX_APPLY_PATCH_ARG1.to_string(), patch],
|
||||
cwd: exec.action.cwd.clone(),
|
||||
timeout_ms: params.timeout_ms,
|
||||
// Run apply_patch with a minimal environment for determinism and to
|
||||
// avoid leaking host environment variables into the patch process.
|
||||
env: HashMap::new(),
|
||||
with_escalated_permissions: params.with_escalated_permissions,
|
||||
justification: params.justification,
|
||||
})
|
||||
}
|
||||
ExecutionMode::Shell => Err(FunctionCallError::RespondToModel(
|
||||
"apply_patch backend invoked without patch context".to_string(),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
fn stream_stdout(&self, _mode: &ExecutionMode) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
@@ -1,51 +0,0 @@
|
||||
use std::collections::HashSet;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
/// Thread-safe store of user approvals so repeated commands can reuse
|
||||
/// previously granted trust.
|
||||
pub(crate) struct ApprovalCache {
|
||||
inner: Arc<Mutex<HashSet<Vec<String>>>>,
|
||||
}
|
||||
|
||||
impl ApprovalCache {
|
||||
pub(crate) fn insert(&self, command: Vec<String>) {
|
||||
if command.is_empty() {
|
||||
return;
|
||||
}
|
||||
if let Ok(mut guard) = self.inner.lock() {
|
||||
guard.insert(command);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn snapshot(&self) -> HashSet<Vec<String>> {
|
||||
self.inner.lock().map(|g| g.clone()).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn insert_ignores_empty_and_dedupes() {
|
||||
let cache = ApprovalCache::default();
|
||||
|
||||
// Empty should be ignored
|
||||
cache.insert(vec![]);
|
||||
assert!(cache.snapshot().is_empty());
|
||||
|
||||
// Insert a command and verify snapshot contains it
|
||||
let cmd = vec!["foo".to_string(), "bar".to_string()];
|
||||
cache.insert(cmd.clone());
|
||||
let snap1 = cache.snapshot();
|
||||
assert!(snap1.contains(&cmd));
|
||||
|
||||
// Reinserting should not create duplicates
|
||||
cache.insert(cmd);
|
||||
let snap2 = cache.snapshot();
|
||||
assert_eq!(snap1, snap2);
|
||||
}
|
||||
}
|
||||
@@ -1,64 +0,0 @@
|
||||
mod backends;
|
||||
mod cache;
|
||||
mod runner;
|
||||
mod sandbox;
|
||||
|
||||
pub(crate) use backends::ExecutionMode;
|
||||
pub(crate) use runner::ExecutionRequest;
|
||||
pub(crate) use runner::Executor;
|
||||
pub(crate) use runner::ExecutorConfig;
|
||||
pub(crate) use runner::normalize_exec_result;
|
||||
|
||||
pub(crate) mod linkers {
|
||||
use crate::exec::ExecParams;
|
||||
use crate::exec::StdoutStream;
|
||||
use crate::executor::backends::ExecutionMode;
|
||||
use crate::executor::runner::ExecutionRequest;
|
||||
use crate::tools::context::ExecCommandContext;
|
||||
|
||||
pub struct PreparedExec {
|
||||
pub(crate) context: ExecCommandContext,
|
||||
pub(crate) request: ExecutionRequest,
|
||||
}
|
||||
|
||||
impl PreparedExec {
|
||||
pub fn new(
|
||||
context: ExecCommandContext,
|
||||
params: ExecParams,
|
||||
approval_command: Vec<String>,
|
||||
mode: ExecutionMode,
|
||||
stdout_stream: Option<StdoutStream>,
|
||||
use_shell_profile: bool,
|
||||
) -> Self {
|
||||
let request = ExecutionRequest {
|
||||
params,
|
||||
approval_command,
|
||||
mode,
|
||||
stdout_stream,
|
||||
use_shell_profile,
|
||||
};
|
||||
|
||||
Self { context, request }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub mod errors {
|
||||
use crate::error::CodexErr;
|
||||
use crate::function_tool::FunctionCallError;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum ExecError {
|
||||
#[error(transparent)]
|
||||
Function(#[from] FunctionCallError),
|
||||
#[error(transparent)]
|
||||
Codex(#[from] CodexErr),
|
||||
}
|
||||
|
||||
impl ExecError {
|
||||
pub(crate) fn rejection(msg: impl Into<String>) -> Self {
|
||||
FunctionCallError::RespondToModel(msg.into()).into()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,426 +0,0 @@
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::RwLock;
|
||||
use std::time::Duration;
|
||||
|
||||
use super::backends::ExecutionMode;
|
||||
use super::backends::backend_for_mode;
|
||||
use super::cache::ApprovalCache;
|
||||
use crate::codex::Session;
|
||||
use crate::error::CodexErr;
|
||||
use crate::error::SandboxErr;
|
||||
use crate::error::get_error_message_ui;
|
||||
use crate::exec::ExecParams;
|
||||
use crate::exec::ExecToolCallOutput;
|
||||
use crate::exec::SandboxType;
|
||||
use crate::exec::StdoutStream;
|
||||
use crate::exec::StreamOutput;
|
||||
use crate::exec::process_exec_tool_call;
|
||||
use crate::executor::errors::ExecError;
|
||||
use crate::executor::sandbox::select_sandbox;
|
||||
use crate::function_tool::FunctionCallError;
|
||||
use crate::protocol::AskForApproval;
|
||||
use crate::protocol::ReviewDecision;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
use crate::shell;
|
||||
use crate::tools::context::ExecCommandContext;
|
||||
use codex_otel::otel_event_manager::ToolDecisionSource;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct ExecutorConfig {
|
||||
pub(crate) sandbox_policy: SandboxPolicy,
|
||||
pub(crate) sandbox_cwd: PathBuf,
|
||||
pub(crate) codex_exe: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl ExecutorConfig {
|
||||
pub(crate) fn new(
|
||||
sandbox_policy: SandboxPolicy,
|
||||
sandbox_cwd: PathBuf,
|
||||
codex_exe: Option<PathBuf>,
|
||||
) -> Self {
|
||||
Self {
|
||||
sandbox_policy,
|
||||
sandbox_cwd,
|
||||
codex_exe,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Coordinates sandbox selection, backend-specific preparation, and command
|
||||
/// execution for tool calls requested by the model.
|
||||
pub(crate) struct Executor {
|
||||
approval_cache: ApprovalCache,
|
||||
config: Arc<RwLock<ExecutorConfig>>,
|
||||
}
|
||||
|
||||
impl Executor {
|
||||
pub(crate) fn new(config: ExecutorConfig) -> Self {
|
||||
Self {
|
||||
approval_cache: ApprovalCache::default(),
|
||||
config: Arc::new(RwLock::new(config)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Updates the sandbox policy and working directory used for future
|
||||
/// executions without recreating the executor.
|
||||
pub(crate) fn update_environment(&self, sandbox_policy: SandboxPolicy, sandbox_cwd: PathBuf) {
|
||||
if let Ok(mut cfg) = self.config.write() {
|
||||
cfg.sandbox_policy = sandbox_policy;
|
||||
cfg.sandbox_cwd = sandbox_cwd;
|
||||
}
|
||||
}
|
||||
|
||||
/// Runs a prepared execution request end-to-end: prepares parameters, decides on
|
||||
/// sandbox placement (prompting the user when necessary), launches the command,
|
||||
/// and lets the backend post-process the final output.
|
||||
pub(crate) async fn run(
|
||||
&self,
|
||||
mut request: ExecutionRequest,
|
||||
session: &Session,
|
||||
approval_policy: AskForApproval,
|
||||
context: &ExecCommandContext,
|
||||
) -> Result<ExecToolCallOutput, ExecError> {
|
||||
if matches!(request.mode, ExecutionMode::Shell) {
|
||||
request.params =
|
||||
maybe_translate_shell_command(request.params, session, request.use_shell_profile);
|
||||
}
|
||||
|
||||
// Step 1: Snapshot sandbox configuration so it stays stable for this run.
|
||||
let config = self
|
||||
.config
|
||||
.read()
|
||||
.map_err(|_| ExecError::rejection("executor config poisoned"))?
|
||||
.clone();
|
||||
|
||||
// Step 2: Normalise parameters via the selected backend.
|
||||
let backend = backend_for_mode(&request.mode);
|
||||
let stdout_stream = if backend.stream_stdout(&request.mode) {
|
||||
request.stdout_stream.clone()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
request.params = backend
|
||||
.prepare(request.params, &request.mode, &config)
|
||||
.map_err(ExecError::from)?;
|
||||
|
||||
// Step 3: Decide sandbox placement, prompting for approval when needed.
|
||||
let sandbox_decision = select_sandbox(
|
||||
&request,
|
||||
approval_policy,
|
||||
self.approval_cache.snapshot(),
|
||||
&config,
|
||||
session,
|
||||
&context.sub_id,
|
||||
&context.call_id,
|
||||
&context.otel_event_manager,
|
||||
)
|
||||
.await?;
|
||||
if sandbox_decision.record_session_approval {
|
||||
self.approval_cache.insert(request.approval_command.clone());
|
||||
}
|
||||
|
||||
// Step 4: Launch the command within the chosen sandbox.
|
||||
let first_attempt = self
|
||||
.spawn(
|
||||
request.params.clone(),
|
||||
sandbox_decision.initial_sandbox,
|
||||
&config,
|
||||
stdout_stream.clone(),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Step 5: Handle sandbox outcomes, optionally escalating to an unsandboxed retry.
|
||||
match first_attempt {
|
||||
Ok(output) => Ok(output),
|
||||
Err(CodexErr::Sandbox(SandboxErr::Timeout { output })) => {
|
||||
Err(CodexErr::Sandbox(SandboxErr::Timeout { output }).into())
|
||||
}
|
||||
Err(CodexErr::Sandbox(error)) => {
|
||||
if sandbox_decision.escalate_on_failure {
|
||||
self.retry_without_sandbox(
|
||||
&request,
|
||||
&config,
|
||||
session,
|
||||
context,
|
||||
stdout_stream,
|
||||
error,
|
||||
)
|
||||
.await
|
||||
} else {
|
||||
let message = sandbox_failure_message(error);
|
||||
Err(ExecError::rejection(message))
|
||||
}
|
||||
}
|
||||
Err(err) => Err(err.into()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Fallback path invoked when a sandboxed run is denied so the user can
|
||||
/// approve rerunning without isolation.
|
||||
async fn retry_without_sandbox(
|
||||
&self,
|
||||
request: &ExecutionRequest,
|
||||
config: &ExecutorConfig,
|
||||
session: &Session,
|
||||
context: &ExecCommandContext,
|
||||
stdout_stream: Option<StdoutStream>,
|
||||
sandbox_error: SandboxErr,
|
||||
) -> Result<ExecToolCallOutput, ExecError> {
|
||||
session
|
||||
.notify_background_event(
|
||||
&context.sub_id,
|
||||
format!("Execution failed: {sandbox_error}"),
|
||||
)
|
||||
.await;
|
||||
let decision = session
|
||||
.request_command_approval(
|
||||
context.sub_id.to_string(),
|
||||
context.call_id.to_string(),
|
||||
request.approval_command.clone(),
|
||||
request.params.cwd.clone(),
|
||||
Some("command failed; retry without sandbox?".to_string()),
|
||||
)
|
||||
.await;
|
||||
|
||||
context.otel_event_manager.tool_decision(
|
||||
&context.tool_name,
|
||||
&context.call_id,
|
||||
decision,
|
||||
ToolDecisionSource::User,
|
||||
);
|
||||
match decision {
|
||||
ReviewDecision::Approved | ReviewDecision::ApprovedForSession => {
|
||||
if matches!(decision, ReviewDecision::ApprovedForSession) {
|
||||
self.approval_cache.insert(request.approval_command.clone());
|
||||
}
|
||||
session
|
||||
.notify_background_event(&context.sub_id, "retrying command without sandbox")
|
||||
.await;
|
||||
|
||||
let retry_output = self
|
||||
.spawn(
|
||||
request.params.clone(),
|
||||
SandboxType::None,
|
||||
config,
|
||||
stdout_stream,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(retry_output)
|
||||
}
|
||||
ReviewDecision::Denied | ReviewDecision::Abort => {
|
||||
Err(ExecError::rejection("exec command rejected by user"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn spawn(
|
||||
&self,
|
||||
params: ExecParams,
|
||||
sandbox: SandboxType,
|
||||
config: &ExecutorConfig,
|
||||
stdout_stream: Option<StdoutStream>,
|
||||
) -> Result<ExecToolCallOutput, CodexErr> {
|
||||
process_exec_tool_call(
|
||||
params,
|
||||
sandbox,
|
||||
&config.sandbox_policy,
|
||||
&config.sandbox_cwd,
|
||||
&config.codex_exe,
|
||||
stdout_stream,
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
fn maybe_translate_shell_command(
|
||||
params: ExecParams,
|
||||
session: &Session,
|
||||
use_shell_profile: bool,
|
||||
) -> ExecParams {
|
||||
let should_translate =
|
||||
matches!(session.user_shell(), shell::Shell::PowerShell(_)) || use_shell_profile;
|
||||
|
||||
if should_translate
|
||||
&& let Some(command) = session
|
||||
.user_shell()
|
||||
.format_default_shell_invocation(params.command.clone())
|
||||
{
|
||||
return ExecParams { command, ..params };
|
||||
}
|
||||
|
||||
params
|
||||
}
|
||||
|
||||
fn sandbox_failure_message(error: SandboxErr) -> String {
|
||||
let codex_error = CodexErr::Sandbox(error);
|
||||
let friendly = get_error_message_ui(&codex_error);
|
||||
format!("failed in sandbox: {friendly}")
|
||||
}
|
||||
|
||||
pub(crate) struct ExecutionRequest {
|
||||
pub params: ExecParams,
|
||||
pub approval_command: Vec<String>,
|
||||
pub mode: ExecutionMode,
|
||||
pub stdout_stream: Option<StdoutStream>,
|
||||
pub use_shell_profile: bool,
|
||||
}
|
||||
|
||||
pub(crate) struct NormalizedExecOutput<'a> {
|
||||
borrowed: Option<&'a ExecToolCallOutput>,
|
||||
synthetic: Option<ExecToolCallOutput>,
|
||||
}
|
||||
|
||||
impl<'a> NormalizedExecOutput<'a> {
|
||||
pub(crate) fn event_output(&'a self) -> &'a ExecToolCallOutput {
|
||||
match (self.borrowed, self.synthetic.as_ref()) {
|
||||
(Some(output), _) => output,
|
||||
(None, Some(output)) => output,
|
||||
(None, None) => unreachable!("normalized exec output missing data"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts a raw execution result into a uniform view that always exposes an
|
||||
/// [`ExecToolCallOutput`], synthesizing error output when the command fails
|
||||
/// before producing a response.
|
||||
pub(crate) fn normalize_exec_result(
|
||||
result: &Result<ExecToolCallOutput, ExecError>,
|
||||
) -> NormalizedExecOutput<'_> {
|
||||
match result {
|
||||
Ok(output) => NormalizedExecOutput {
|
||||
borrowed: Some(output),
|
||||
synthetic: None,
|
||||
},
|
||||
Err(ExecError::Codex(CodexErr::Sandbox(SandboxErr::Timeout { output }))) => {
|
||||
NormalizedExecOutput {
|
||||
borrowed: Some(output.as_ref()),
|
||||
synthetic: None,
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
let message = match err {
|
||||
ExecError::Function(FunctionCallError::RespondToModel(msg)) => msg.clone(),
|
||||
ExecError::Codex(e) => get_error_message_ui(e),
|
||||
err => err.to_string(),
|
||||
};
|
||||
let synthetic = ExecToolCallOutput {
|
||||
exit_code: -1,
|
||||
stdout: StreamOutput::new(String::new()),
|
||||
stderr: StreamOutput::new(message.clone()),
|
||||
aggregated_output: StreamOutput::new(message),
|
||||
duration: Duration::default(),
|
||||
timed_out: false,
|
||||
};
|
||||
NormalizedExecOutput {
|
||||
borrowed: None,
|
||||
synthetic: Some(synthetic),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::error::CodexErr;
|
||||
use crate::error::EnvVarError;
|
||||
use crate::error::SandboxErr;
|
||||
use crate::exec::StreamOutput;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
fn make_output(text: &str) -> ExecToolCallOutput {
|
||||
ExecToolCallOutput {
|
||||
exit_code: 1,
|
||||
stdout: StreamOutput::new(String::new()),
|
||||
stderr: StreamOutput::new(String::new()),
|
||||
aggregated_output: StreamOutput::new(text.to_string()),
|
||||
duration: Duration::from_millis(123),
|
||||
timed_out: false,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_success_borrows() {
|
||||
let out = make_output("ok");
|
||||
let result: Result<ExecToolCallOutput, ExecError> = Ok(out);
|
||||
let normalized = normalize_exec_result(&result);
|
||||
assert_eq!(normalized.event_output().aggregated_output.text, "ok");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_timeout_borrows_embedded_output() {
|
||||
let out = make_output("timed out payload");
|
||||
let err = CodexErr::Sandbox(SandboxErr::Timeout {
|
||||
output: Box::new(out),
|
||||
});
|
||||
let result: Result<ExecToolCallOutput, ExecError> = Err(ExecError::Codex(err));
|
||||
let normalized = normalize_exec_result(&result);
|
||||
assert_eq!(
|
||||
normalized.event_output().aggregated_output.text,
|
||||
"timed out payload"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sandbox_failure_message_uses_denied_stderr() {
|
||||
let output = ExecToolCallOutput {
|
||||
exit_code: 101,
|
||||
stdout: StreamOutput::new(String::new()),
|
||||
stderr: StreamOutput::new("sandbox stderr".to_string()),
|
||||
aggregated_output: StreamOutput::new(String::new()),
|
||||
duration: Duration::from_millis(10),
|
||||
timed_out: false,
|
||||
};
|
||||
let err = SandboxErr::Denied {
|
||||
output: Box::new(output),
|
||||
};
|
||||
let message = sandbox_failure_message(err);
|
||||
assert_eq!(message, "failed in sandbox: sandbox stderr");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sandbox_failure_message_falls_back_to_aggregated_output() {
|
||||
let output = ExecToolCallOutput {
|
||||
exit_code: 101,
|
||||
stdout: StreamOutput::new(String::new()),
|
||||
stderr: StreamOutput::new(String::new()),
|
||||
aggregated_output: StreamOutput::new("aggregate text".to_string()),
|
||||
duration: Duration::from_millis(10),
|
||||
timed_out: false,
|
||||
};
|
||||
let err = SandboxErr::Denied {
|
||||
output: Box::new(output),
|
||||
};
|
||||
let message = sandbox_failure_message(err);
|
||||
assert_eq!(message, "failed in sandbox: aggregate text");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_function_error_synthesizes_payload() {
|
||||
let err = FunctionCallError::RespondToModel("boom".to_string());
|
||||
let result: Result<ExecToolCallOutput, ExecError> = Err(ExecError::Function(err));
|
||||
let normalized = normalize_exec_result(&result);
|
||||
assert_eq!(normalized.event_output().aggregated_output.text, "boom");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_codex_error_synthesizes_user_message() {
|
||||
// Use a simple EnvVar error which formats to a clear message
|
||||
let e = CodexErr::EnvVar(EnvVarError {
|
||||
var: "FOO".to_string(),
|
||||
instructions: Some("set it".to_string()),
|
||||
});
|
||||
let result: Result<ExecToolCallOutput, ExecError> = Err(ExecError::Codex(e));
|
||||
let normalized = normalize_exec_result(&result);
|
||||
assert!(
|
||||
normalized
|
||||
.event_output()
|
||||
.aggregated_output
|
||||
.text
|
||||
.contains("Missing environment variable: `FOO`"),
|
||||
"expected synthesized user-friendly message"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,405 +0,0 @@
|
||||
use crate::apply_patch::ApplyPatchExec;
|
||||
use crate::codex::Session;
|
||||
use crate::exec::SandboxType;
|
||||
use crate::executor::ExecutionMode;
|
||||
use crate::executor::ExecutionRequest;
|
||||
use crate::executor::ExecutorConfig;
|
||||
use crate::executor::errors::ExecError;
|
||||
use crate::safety::SafetyCheck;
|
||||
use crate::safety::assess_command_safety;
|
||||
use crate::safety::assess_patch_safety;
|
||||
use codex_otel::otel_event_manager::OtelEventManager;
|
||||
use codex_otel::otel_event_manager::ToolDecisionSource;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::ReviewDecision;
|
||||
use std::collections::HashSet;
|
||||
|
||||
/// Sandbox placement options selected for an execution run, including whether
|
||||
/// to escalate after failures and whether approvals should persist.
|
||||
pub(crate) struct SandboxDecision {
|
||||
pub(crate) initial_sandbox: SandboxType,
|
||||
pub(crate) escalate_on_failure: bool,
|
||||
pub(crate) record_session_approval: bool,
|
||||
}
|
||||
|
||||
impl SandboxDecision {
|
||||
fn auto(sandbox: SandboxType, escalate_on_failure: bool) -> Self {
|
||||
Self {
|
||||
initial_sandbox: sandbox,
|
||||
escalate_on_failure,
|
||||
record_session_approval: false,
|
||||
}
|
||||
}
|
||||
|
||||
fn user_override(record_session_approval: bool) -> Self {
|
||||
Self {
|
||||
initial_sandbox: SandboxType::None,
|
||||
escalate_on_failure: false,
|
||||
record_session_approval,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn should_escalate_on_failure(approval: AskForApproval, sandbox: SandboxType) -> bool {
|
||||
matches!(
|
||||
(approval, sandbox),
|
||||
(
|
||||
AskForApproval::UnlessTrusted | AskForApproval::OnFailure,
|
||||
SandboxType::MacosSeatbelt | SandboxType::LinuxSeccomp
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
/// Determines how a command should be sandboxed, prompting the user when
|
||||
/// policy requires explicit approval.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn select_sandbox(
|
||||
request: &ExecutionRequest,
|
||||
approval_policy: AskForApproval,
|
||||
approval_cache: HashSet<Vec<String>>,
|
||||
config: &ExecutorConfig,
|
||||
session: &Session,
|
||||
sub_id: &str,
|
||||
call_id: &str,
|
||||
otel_event_manager: &OtelEventManager,
|
||||
) -> Result<SandboxDecision, ExecError> {
|
||||
match &request.mode {
|
||||
ExecutionMode::Shell => {
|
||||
select_shell_sandbox(
|
||||
request,
|
||||
approval_policy,
|
||||
approval_cache,
|
||||
config,
|
||||
session,
|
||||
sub_id,
|
||||
call_id,
|
||||
otel_event_manager,
|
||||
)
|
||||
.await
|
||||
}
|
||||
ExecutionMode::ApplyPatch(exec) => {
|
||||
select_apply_patch_sandbox(exec, approval_policy, config)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn select_shell_sandbox(
|
||||
request: &ExecutionRequest,
|
||||
approval_policy: AskForApproval,
|
||||
approved_snapshot: HashSet<Vec<String>>,
|
||||
config: &ExecutorConfig,
|
||||
session: &Session,
|
||||
sub_id: &str,
|
||||
call_id: &str,
|
||||
otel_event_manager: &OtelEventManager,
|
||||
) -> Result<SandboxDecision, ExecError> {
|
||||
let command_for_safety = if request.approval_command.is_empty() {
|
||||
request.params.command.clone()
|
||||
} else {
|
||||
request.approval_command.clone()
|
||||
};
|
||||
|
||||
let safety = assess_command_safety(
|
||||
&command_for_safety,
|
||||
approval_policy,
|
||||
&config.sandbox_policy,
|
||||
&approved_snapshot,
|
||||
request.params.with_escalated_permissions.unwrap_or(false),
|
||||
);
|
||||
|
||||
match safety {
|
||||
SafetyCheck::AutoApprove {
|
||||
sandbox_type,
|
||||
user_explicitly_approved,
|
||||
} => {
|
||||
let mut decision = SandboxDecision::auto(
|
||||
sandbox_type,
|
||||
should_escalate_on_failure(approval_policy, sandbox_type),
|
||||
);
|
||||
if user_explicitly_approved {
|
||||
decision.record_session_approval = true;
|
||||
}
|
||||
let (decision_for_event, source) = if user_explicitly_approved {
|
||||
(ReviewDecision::ApprovedForSession, ToolDecisionSource::User)
|
||||
} else {
|
||||
(ReviewDecision::Approved, ToolDecisionSource::Config)
|
||||
};
|
||||
otel_event_manager.tool_decision("local_shell", call_id, decision_for_event, source);
|
||||
Ok(decision)
|
||||
}
|
||||
SafetyCheck::AskUser => {
|
||||
let decision = session
|
||||
.request_command_approval(
|
||||
sub_id.to_string(),
|
||||
call_id.to_string(),
|
||||
request.approval_command.clone(),
|
||||
request.params.cwd.clone(),
|
||||
request.params.justification.clone(),
|
||||
)
|
||||
.await;
|
||||
|
||||
otel_event_manager.tool_decision(
|
||||
"local_shell",
|
||||
call_id,
|
||||
decision,
|
||||
ToolDecisionSource::User,
|
||||
);
|
||||
match decision {
|
||||
ReviewDecision::Approved => Ok(SandboxDecision::user_override(false)),
|
||||
ReviewDecision::ApprovedForSession => Ok(SandboxDecision::user_override(true)),
|
||||
ReviewDecision::Denied | ReviewDecision::Abort => {
|
||||
Err(ExecError::rejection("exec command rejected by user"))
|
||||
}
|
||||
}
|
||||
}
|
||||
SafetyCheck::Reject { reason } => Err(ExecError::rejection(format!(
|
||||
"exec command rejected: {reason}"
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
fn select_apply_patch_sandbox(
|
||||
exec: &ApplyPatchExec,
|
||||
approval_policy: AskForApproval,
|
||||
config: &ExecutorConfig,
|
||||
) -> Result<SandboxDecision, ExecError> {
|
||||
if exec.user_explicitly_approved_this_action {
|
||||
return Ok(SandboxDecision::user_override(false));
|
||||
}
|
||||
|
||||
match assess_patch_safety(
|
||||
&exec.action,
|
||||
approval_policy,
|
||||
&config.sandbox_policy,
|
||||
&config.sandbox_cwd,
|
||||
) {
|
||||
SafetyCheck::AutoApprove { sandbox_type, .. } => Ok(SandboxDecision::auto(
|
||||
sandbox_type,
|
||||
should_escalate_on_failure(approval_policy, sandbox_type),
|
||||
)),
|
||||
SafetyCheck::AskUser => Err(ExecError::rejection(
|
||||
"patch requires approval but none was recorded",
|
||||
)),
|
||||
SafetyCheck::Reject { reason } => {
|
||||
Err(ExecError::rejection(format!("patch rejected: {reason}")))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::codex::make_session_and_context;
|
||||
use crate::exec::ExecParams;
|
||||
use crate::function_tool::FunctionCallError;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
use codex_apply_patch::ApplyPatchAction;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[tokio::test]
|
||||
async fn select_apply_patch_user_override_when_explicit() {
|
||||
let (session, ctx) = make_session_and_context();
|
||||
let tmp = tempfile::tempdir().expect("tmp");
|
||||
let p = tmp.path().join("a.txt");
|
||||
let action = ApplyPatchAction::new_add_for_test(&p, "hello".to_string());
|
||||
let exec = ApplyPatchExec {
|
||||
action,
|
||||
user_explicitly_approved_this_action: true,
|
||||
};
|
||||
let cfg = ExecutorConfig::new(SandboxPolicy::ReadOnly, std::env::temp_dir(), None);
|
||||
let request = ExecutionRequest {
|
||||
params: ExecParams {
|
||||
command: vec!["apply_patch".into()],
|
||||
cwd: std::env::temp_dir(),
|
||||
timeout_ms: None,
|
||||
env: std::collections::HashMap::new(),
|
||||
with_escalated_permissions: None,
|
||||
justification: None,
|
||||
},
|
||||
approval_command: vec!["apply_patch".into()],
|
||||
mode: ExecutionMode::ApplyPatch(exec),
|
||||
stdout_stream: None,
|
||||
use_shell_profile: false,
|
||||
};
|
||||
let otel_event_manager = ctx.client.get_otel_event_manager();
|
||||
let decision = select_sandbox(
|
||||
&request,
|
||||
AskForApproval::OnRequest,
|
||||
Default::default(),
|
||||
&cfg,
|
||||
&session,
|
||||
"sub",
|
||||
"call",
|
||||
&otel_event_manager,
|
||||
)
|
||||
.await
|
||||
.expect("ok");
|
||||
// Explicit user override runs without sandbox
|
||||
assert_eq!(decision.initial_sandbox, SandboxType::None);
|
||||
assert_eq!(decision.escalate_on_failure, false);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn select_apply_patch_autoapprove_in_danger() {
|
||||
let (session, ctx) = make_session_and_context();
|
||||
let tmp = tempfile::tempdir().expect("tmp");
|
||||
let p = tmp.path().join("a.txt");
|
||||
let action = ApplyPatchAction::new_add_for_test(&p, "hello".to_string());
|
||||
let exec = ApplyPatchExec {
|
||||
action,
|
||||
user_explicitly_approved_this_action: false,
|
||||
};
|
||||
let cfg = ExecutorConfig::new(SandboxPolicy::DangerFullAccess, std::env::temp_dir(), None);
|
||||
let request = ExecutionRequest {
|
||||
params: ExecParams {
|
||||
command: vec!["apply_patch".into()],
|
||||
cwd: std::env::temp_dir(),
|
||||
timeout_ms: None,
|
||||
env: std::collections::HashMap::new(),
|
||||
with_escalated_permissions: None,
|
||||
justification: None,
|
||||
},
|
||||
approval_command: vec!["apply_patch".into()],
|
||||
mode: ExecutionMode::ApplyPatch(exec),
|
||||
stdout_stream: None,
|
||||
use_shell_profile: false,
|
||||
};
|
||||
let otel_event_manager = ctx.client.get_otel_event_manager();
|
||||
let decision = select_sandbox(
|
||||
&request,
|
||||
AskForApproval::OnRequest,
|
||||
Default::default(),
|
||||
&cfg,
|
||||
&session,
|
||||
"sub",
|
||||
"call",
|
||||
&otel_event_manager,
|
||||
)
|
||||
.await
|
||||
.expect("ok");
|
||||
// On platforms with a sandbox, DangerFullAccess still prefers it
|
||||
let expected = crate::safety::get_platform_sandbox().unwrap_or(SandboxType::None);
|
||||
assert_eq!(decision.initial_sandbox, expected);
|
||||
assert_eq!(decision.escalate_on_failure, false);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn select_apply_patch_requires_approval_on_unless_trusted() {
|
||||
let (session, ctx) = make_session_and_context();
|
||||
let tempdir = tempfile::tempdir().expect("tmpdir");
|
||||
let p = tempdir.path().join("a.txt");
|
||||
let action = ApplyPatchAction::new_add_for_test(&p, "hello".to_string());
|
||||
let exec = ApplyPatchExec {
|
||||
action,
|
||||
user_explicitly_approved_this_action: false,
|
||||
};
|
||||
let cfg = ExecutorConfig::new(SandboxPolicy::ReadOnly, std::env::temp_dir(), None);
|
||||
let request = ExecutionRequest {
|
||||
params: ExecParams {
|
||||
command: vec!["apply_patch".into()],
|
||||
cwd: std::env::temp_dir(),
|
||||
timeout_ms: None,
|
||||
env: std::collections::HashMap::new(),
|
||||
with_escalated_permissions: None,
|
||||
justification: None,
|
||||
},
|
||||
approval_command: vec!["apply_patch".into()],
|
||||
mode: ExecutionMode::ApplyPatch(exec),
|
||||
stdout_stream: None,
|
||||
use_shell_profile: false,
|
||||
};
|
||||
let otel_event_manager = ctx.client.get_otel_event_manager();
|
||||
let result = select_sandbox(
|
||||
&request,
|
||||
AskForApproval::UnlessTrusted,
|
||||
Default::default(),
|
||||
&cfg,
|
||||
&session,
|
||||
"sub",
|
||||
"call",
|
||||
&otel_event_manager,
|
||||
)
|
||||
.await;
|
||||
match result {
|
||||
Ok(_) => panic!("expected error"),
|
||||
Err(ExecError::Function(FunctionCallError::RespondToModel(msg))) => {
|
||||
assert!(msg.contains("requires approval"))
|
||||
}
|
||||
Err(other) => panic!("unexpected error: {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn select_shell_autoapprove_in_danger_mode() {
|
||||
let (session, ctx) = make_session_and_context();
|
||||
let cfg = ExecutorConfig::new(SandboxPolicy::DangerFullAccess, std::env::temp_dir(), None);
|
||||
let request = ExecutionRequest {
|
||||
params: ExecParams {
|
||||
command: vec!["some-unknown".into()],
|
||||
cwd: std::env::temp_dir(),
|
||||
timeout_ms: None,
|
||||
env: std::collections::HashMap::new(),
|
||||
with_escalated_permissions: None,
|
||||
justification: None,
|
||||
},
|
||||
approval_command: vec!["some-unknown".into()],
|
||||
mode: ExecutionMode::Shell,
|
||||
stdout_stream: None,
|
||||
use_shell_profile: false,
|
||||
};
|
||||
let otel_event_manager = ctx.client.get_otel_event_manager();
|
||||
let decision = select_sandbox(
|
||||
&request,
|
||||
AskForApproval::OnRequest,
|
||||
Default::default(),
|
||||
&cfg,
|
||||
&session,
|
||||
"sub",
|
||||
"call",
|
||||
&otel_event_manager,
|
||||
)
|
||||
.await
|
||||
.expect("ok");
|
||||
assert_eq!(decision.initial_sandbox, SandboxType::None);
|
||||
assert_eq!(decision.escalate_on_failure, false);
|
||||
}
|
||||
|
||||
#[cfg(any(target_os = "macos", target_os = "linux"))]
|
||||
#[tokio::test]
|
||||
async fn select_shell_escalates_on_failure_with_platform_sandbox() {
|
||||
let (session, ctx) = make_session_and_context();
|
||||
let cfg = ExecutorConfig::new(SandboxPolicy::ReadOnly, std::env::temp_dir(), None);
|
||||
let request = ExecutionRequest {
|
||||
params: ExecParams {
|
||||
// Unknown command => untrusted but not flagged dangerous
|
||||
command: vec!["some-unknown".into()],
|
||||
cwd: std::env::temp_dir(),
|
||||
timeout_ms: None,
|
||||
env: std::collections::HashMap::new(),
|
||||
with_escalated_permissions: None,
|
||||
justification: None,
|
||||
},
|
||||
approval_command: vec!["some-unknown".into()],
|
||||
mode: ExecutionMode::Shell,
|
||||
stdout_stream: None,
|
||||
use_shell_profile: false,
|
||||
};
|
||||
let otel_event_manager = ctx.client.get_otel_event_manager();
|
||||
let decision = select_sandbox(
|
||||
&request,
|
||||
AskForApproval::OnFailure,
|
||||
Default::default(),
|
||||
&cfg,
|
||||
&session,
|
||||
"sub",
|
||||
"call",
|
||||
&otel_event_manager,
|
||||
)
|
||||
.await
|
||||
.expect("ok");
|
||||
// On macOS/Linux we should have a platform sandbox and escalate on failure
|
||||
assert_ne!(decision.initial_sandbox, SandboxType::None);
|
||||
assert_eq!(decision.escalate_on_failure, true);
|
||||
}
|
||||
}
|
||||
@@ -31,18 +31,16 @@ pub enum Feature {
|
||||
UnifiedExec,
|
||||
/// Use the streamable exec-command/write-stdin tool pair.
|
||||
StreamableShell,
|
||||
/// Use the official Rust MCP client (rmcp).
|
||||
/// Enable experimental RMCP features such as OAuth login.
|
||||
RmcpClient,
|
||||
/// Include the plan tool.
|
||||
PlanTool,
|
||||
/// Include the freeform apply_patch tool.
|
||||
ApplyPatchFreeform,
|
||||
/// Include the view_image tool.
|
||||
ViewImageTool,
|
||||
/// Allow the model to request web searches.
|
||||
WebSearchRequest,
|
||||
/// Automatically approve all approval requests from the harness.
|
||||
ApproveAll,
|
||||
/// Enable the model-based risk assessments for sandboxed commands.
|
||||
SandboxCommandAssessment,
|
||||
}
|
||||
|
||||
impl Feature {
|
||||
@@ -74,16 +72,15 @@ pub struct Features {
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct FeatureOverrides {
|
||||
pub include_plan_tool: Option<bool>,
|
||||
pub include_apply_patch_tool: Option<bool>,
|
||||
pub include_view_image_tool: Option<bool>,
|
||||
pub web_search_request: Option<bool>,
|
||||
pub experimental_sandbox_command_assessment: Option<bool>,
|
||||
}
|
||||
|
||||
impl FeatureOverrides {
|
||||
fn apply(self, features: &mut Features) {
|
||||
LegacyFeatureToggles {
|
||||
include_plan_tool: self.include_plan_tool,
|
||||
include_apply_patch_tool: self.include_apply_patch_tool,
|
||||
include_view_image_tool: self.include_view_image_tool,
|
||||
tools_web_search: self.web_search_request,
|
||||
@@ -143,6 +140,7 @@ impl Features {
|
||||
let mut features = Features::with_defaults();
|
||||
|
||||
let base_legacy = LegacyFeatureToggles {
|
||||
experimental_sandbox_command_assessment: cfg.experimental_sandbox_command_assessment,
|
||||
experimental_use_freeform_apply_patch: cfg.experimental_use_freeform_apply_patch,
|
||||
experimental_use_exec_command_tool: cfg.experimental_use_exec_command_tool,
|
||||
experimental_use_unified_exec_tool: cfg.experimental_use_unified_exec_tool,
|
||||
@@ -158,9 +156,10 @@ impl Features {
|
||||
}
|
||||
|
||||
let profile_legacy = LegacyFeatureToggles {
|
||||
include_plan_tool: config_profile.include_plan_tool,
|
||||
include_apply_patch_tool: config_profile.include_apply_patch_tool,
|
||||
include_view_image_tool: config_profile.include_view_image_tool,
|
||||
experimental_sandbox_command_assessment: config_profile
|
||||
.experimental_sandbox_command_assessment,
|
||||
experimental_use_freeform_apply_patch: config_profile
|
||||
.experimental_use_freeform_apply_patch,
|
||||
experimental_use_exec_command_tool: config_profile.experimental_use_exec_command_tool,
|
||||
@@ -225,12 +224,6 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
stage: Stage::Experimental,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::PlanTool,
|
||||
key: "plan_tool",
|
||||
stage: Stage::Stable,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::ApplyPatchFreeform,
|
||||
key: "apply_patch_freeform",
|
||||
@@ -250,8 +243,8 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::ApproveAll,
|
||||
key: "approve_all",
|
||||
id: Feature::SandboxCommandAssessment,
|
||||
key: "experimental_sandbox_command_assessment",
|
||||
stage: Stage::Experimental,
|
||||
default_enabled: false,
|
||||
},
|
||||
|
||||
@@ -9,6 +9,10 @@ struct Alias {
|
||||
}
|
||||
|
||||
const ALIASES: &[Alias] = &[
|
||||
Alias {
|
||||
legacy_key: "experimental_sandbox_command_assessment",
|
||||
feature: Feature::SandboxCommandAssessment,
|
||||
},
|
||||
Alias {
|
||||
legacy_key: "experimental_use_unified_exec_tool",
|
||||
feature: Feature::UnifiedExec,
|
||||
@@ -29,10 +33,6 @@ const ALIASES: &[Alias] = &[
|
||||
legacy_key: "include_apply_patch_tool",
|
||||
feature: Feature::ApplyPatchFreeform,
|
||||
},
|
||||
Alias {
|
||||
legacy_key: "include_plan_tool",
|
||||
feature: Feature::PlanTool,
|
||||
},
|
||||
Alias {
|
||||
legacy_key: "include_view_image_tool",
|
||||
feature: Feature::ViewImageTool,
|
||||
@@ -55,9 +55,9 @@ pub(crate) fn feature_for_key(key: &str) -> Option<Feature> {
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct LegacyFeatureToggles {
|
||||
pub include_plan_tool: Option<bool>,
|
||||
pub include_apply_patch_tool: Option<bool>,
|
||||
pub include_view_image_tool: Option<bool>,
|
||||
pub experimental_sandbox_command_assessment: Option<bool>,
|
||||
pub experimental_use_freeform_apply_patch: Option<bool>,
|
||||
pub experimental_use_exec_command_tool: Option<bool>,
|
||||
pub experimental_use_unified_exec_tool: Option<bool>,
|
||||
@@ -68,18 +68,18 @@ pub struct LegacyFeatureToggles {
|
||||
|
||||
impl LegacyFeatureToggles {
|
||||
pub fn apply(self, features: &mut Features) {
|
||||
set_if_some(
|
||||
features,
|
||||
Feature::PlanTool,
|
||||
self.include_plan_tool,
|
||||
"include_plan_tool",
|
||||
);
|
||||
set_if_some(
|
||||
features,
|
||||
Feature::ApplyPatchFreeform,
|
||||
self.include_apply_patch_tool,
|
||||
"include_apply_patch_tool",
|
||||
);
|
||||
set_if_some(
|
||||
features,
|
||||
Feature::SandboxCommandAssessment,
|
||||
self.experimental_sandbox_command_assessment,
|
||||
"experimental_sandbox_command_assessment",
|
||||
);
|
||||
set_if_some(
|
||||
features,
|
||||
Feature::ApplyPatchFreeform,
|
||||
|
||||
@@ -4,6 +4,9 @@ use thiserror::Error;
|
||||
pub enum FunctionCallError {
|
||||
#[error("{0}")]
|
||||
RespondToModel(String),
|
||||
#[error("{0}")]
|
||||
#[allow(dead_code)] // TODO(jif) fix in a follow-up PR
|
||||
Denied(String),
|
||||
#[error("LocalShellCall without call_id or id")]
|
||||
MissingLocalShellCallId,
|
||||
#[error("Fatal error: {0}")]
|
||||
|
||||
@@ -40,7 +40,7 @@ where
|
||||
}
|
||||
|
||||
/// Converts the sandbox policy into the CLI invocation for `codex-linux-sandbox`.
|
||||
fn create_linux_sandbox_command_args(
|
||||
pub(crate) fn create_linux_sandbox_command_args(
|
||||
command: Vec<String>,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
sandbox_policy_cwd: &Path,
|
||||
@@ -56,7 +56,9 @@ fn create_linux_sandbox_command_args(
|
||||
serde_json::to_string(sandbox_policy).expect("Failed to serialize SandboxPolicy to JSON");
|
||||
|
||||
let mut linux_cmd: Vec<String> = vec![
|
||||
"--sandbox-policy-cwd".to_string(),
|
||||
sandbox_policy_cwd,
|
||||
"--sandbox-policy".to_string(),
|
||||
sandbox_policy_json,
|
||||
// Separator so that command arguments starting with `-` are not parsed as
|
||||
// options of the helper itself.
|
||||
|
||||
@@ -13,7 +13,6 @@ mod client;
|
||||
mod client_common;
|
||||
pub mod codex;
|
||||
mod codex_conversation;
|
||||
pub mod token_data;
|
||||
pub use codex_conversation::CodexConversation;
|
||||
mod command_safety;
|
||||
pub mod config;
|
||||
@@ -26,9 +25,7 @@ pub mod custom_prompts;
|
||||
mod environment_context;
|
||||
pub mod error;
|
||||
pub mod exec;
|
||||
mod exec_command;
|
||||
pub mod exec_env;
|
||||
pub mod executor;
|
||||
pub mod features;
|
||||
mod flags;
|
||||
pub mod git_info;
|
||||
@@ -39,6 +36,9 @@ mod mcp_tool_call;
|
||||
mod message_history;
|
||||
mod model_provider_info;
|
||||
pub mod parse_command;
|
||||
mod response_processing;
|
||||
pub mod sandboxing;
|
||||
pub mod token_data;
|
||||
mod truncate;
|
||||
mod unified_exec;
|
||||
mod user_instructions;
|
||||
@@ -59,7 +59,6 @@ pub use auth::CodexAuth;
|
||||
pub mod default_client;
|
||||
pub mod model_family;
|
||||
mod openai_model_info;
|
||||
mod openai_tools;
|
||||
pub mod project_doc;
|
||||
mod rollout;
|
||||
pub(crate) mod safety;
|
||||
@@ -100,12 +99,10 @@ pub use client_common::REVIEW_PROMPT;
|
||||
pub use client_common::ResponseEvent;
|
||||
pub use client_common::ResponseStream;
|
||||
pub use codex::compact::content_items_to_text;
|
||||
pub use codex::compact::is_session_prefix_message;
|
||||
pub use codex_protocol::models::ContentItem;
|
||||
pub use codex_protocol::models::LocalShellAction;
|
||||
pub use codex_protocol::models::LocalShellExecAction;
|
||||
pub use codex_protocol::models::LocalShellStatus;
|
||||
pub use codex_protocol::models::ReasoningItemContent;
|
||||
pub use codex_protocol::models::ResponseItem;
|
||||
|
||||
pub use event_mapping::parse_turn_item;
|
||||
pub mod otel_init;
|
||||
|
||||
@@ -10,10 +10,16 @@ use tracing::warn;
|
||||
use crate::config_types::McpServerConfig;
|
||||
use crate::config_types::McpServerTransportConfig;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct McpAuthStatusEntry {
|
||||
pub config: McpServerConfig,
|
||||
pub auth_status: McpAuthStatus,
|
||||
}
|
||||
|
||||
pub async fn compute_auth_statuses<'a, I>(
|
||||
servers: I,
|
||||
store_mode: OAuthCredentialsStoreMode,
|
||||
) -> HashMap<String, McpAuthStatus>
|
||||
) -> HashMap<String, McpAuthStatusEntry>
|
||||
where
|
||||
I: IntoIterator<Item = (&'a String, &'a McpServerConfig)>,
|
||||
{
|
||||
@@ -21,14 +27,18 @@ where
|
||||
let name = name.clone();
|
||||
let config = config.clone();
|
||||
async move {
|
||||
let status = match compute_auth_status(&name, &config, store_mode).await {
|
||||
let auth_status = match compute_auth_status(&name, &config, store_mode).await {
|
||||
Ok(status) => status,
|
||||
Err(error) => {
|
||||
warn!("failed to determine auth status for MCP server `{name}`: {error:?}");
|
||||
McpAuthStatus::Unsupported
|
||||
}
|
||||
};
|
||||
(name, status)
|
||||
let entry = McpAuthStatusEntry {
|
||||
config,
|
||||
auth_status,
|
||||
};
|
||||
(name, entry)
|
||||
}
|
||||
});
|
||||
|
||||
@@ -45,11 +55,15 @@ async fn compute_auth_status(
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
http_headers,
|
||||
env_http_headers,
|
||||
} => {
|
||||
determine_streamable_http_auth_status(
|
||||
server_name,
|
||||
url,
|
||||
bearer_token_env_var.as_deref(),
|
||||
http_headers.clone(),
|
||||
env_http_headers.clone(),
|
||||
store_mode,
|
||||
)
|
||||
.await
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
//! Connection manager for Model Context Protocol (MCP) servers.
|
||||
//!
|
||||
//! The [`McpConnectionManager`] owns one [`codex_mcp_client::McpClient`] per
|
||||
//! The [`McpConnectionManager`] owns one [`codex_rmcp_client::RmcpClient`] per
|
||||
//! configured server (keyed by the *server name*). It offers convenience
|
||||
//! helpers to query the available tools across *all* servers and returns them
|
||||
//! in a single aggregated map using the fully-qualified tool name
|
||||
@@ -16,11 +16,18 @@ use std::time::Duration;
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use codex_mcp_client::McpClient;
|
||||
use codex_rmcp_client::OAuthCredentialsStoreMode;
|
||||
use codex_rmcp_client::RmcpClient;
|
||||
use mcp_types::ClientCapabilities;
|
||||
use mcp_types::Implementation;
|
||||
use mcp_types::ListResourceTemplatesRequestParams;
|
||||
use mcp_types::ListResourceTemplatesResult;
|
||||
use mcp_types::ListResourcesRequestParams;
|
||||
use mcp_types::ListResourcesResult;
|
||||
use mcp_types::ReadResourceRequestParams;
|
||||
use mcp_types::ReadResourceResult;
|
||||
use mcp_types::Resource;
|
||||
use mcp_types::ResourceTemplate;
|
||||
use mcp_types::Tool;
|
||||
|
||||
use serde_json::json;
|
||||
@@ -42,7 +49,7 @@ const MCP_TOOL_NAME_DELIMITER: &str = "__";
|
||||
const MAX_TOOL_NAME_LENGTH: usize = 64;
|
||||
|
||||
/// Default timeout for initializing MCP server & initially listing tools.
|
||||
const DEFAULT_STARTUP_TIMEOUT: Duration = Duration::from_secs(10);
|
||||
pub const DEFAULT_STARTUP_TIMEOUT: Duration = Duration::from_secs(10);
|
||||
|
||||
/// Default timeout for individual tool calls.
|
||||
const DEFAULT_TOOL_TIMEOUT: Duration = Duration::from_secs(60);
|
||||
@@ -56,8 +63,8 @@ fn qualify_tools(tools: Vec<ToolInfo>) -> HashMap<String, ToolInfo> {
|
||||
let mut qualified_tools = HashMap::new();
|
||||
for tool in tools {
|
||||
let mut qualified_name = format!(
|
||||
"{}{}{}",
|
||||
tool.server_name, MCP_TOOL_NAME_DELIMITER, tool.tool_name
|
||||
"mcp{}{}{}{}",
|
||||
MCP_TOOL_NAME_DELIMITER, tool.server_name, MCP_TOOL_NAME_DELIMITER, tool.tool_name
|
||||
);
|
||||
if qualified_name.len() > MAX_TOOL_NAME_LENGTH {
|
||||
let mut hasher = Sha1::new();
|
||||
@@ -90,78 +97,12 @@ struct ToolInfo {
|
||||
}
|
||||
|
||||
struct ManagedClient {
|
||||
client: McpClientAdapter,
|
||||
client: Arc<RmcpClient>,
|
||||
startup_timeout: Duration,
|
||||
tool_timeout: Option<Duration>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
enum McpClientAdapter {
|
||||
Legacy(Arc<McpClient>),
|
||||
Rmcp(Arc<RmcpClient>),
|
||||
}
|
||||
|
||||
impl McpClientAdapter {
|
||||
async fn new_stdio_client(
|
||||
use_rmcp_client: bool,
|
||||
program: OsString,
|
||||
args: Vec<OsString>,
|
||||
env: Option<HashMap<String, String>>,
|
||||
params: mcp_types::InitializeRequestParams,
|
||||
startup_timeout: Duration,
|
||||
) -> Result<Self> {
|
||||
if use_rmcp_client {
|
||||
let client = Arc::new(RmcpClient::new_stdio_client(program, args, env).await?);
|
||||
client.initialize(params, Some(startup_timeout)).await?;
|
||||
Ok(McpClientAdapter::Rmcp(client))
|
||||
} else {
|
||||
let client = Arc::new(McpClient::new_stdio_client(program, args, env).await?);
|
||||
client.initialize(params, Some(startup_timeout)).await?;
|
||||
Ok(McpClientAdapter::Legacy(client))
|
||||
}
|
||||
}
|
||||
|
||||
async fn new_streamable_http_client(
|
||||
server_name: String,
|
||||
url: String,
|
||||
bearer_token: Option<String>,
|
||||
params: mcp_types::InitializeRequestParams,
|
||||
startup_timeout: Duration,
|
||||
store_mode: OAuthCredentialsStoreMode,
|
||||
) -> Result<Self> {
|
||||
let client = Arc::new(
|
||||
RmcpClient::new_streamable_http_client(&server_name, &url, bearer_token, store_mode)
|
||||
.await?,
|
||||
);
|
||||
client.initialize(params, Some(startup_timeout)).await?;
|
||||
Ok(McpClientAdapter::Rmcp(client))
|
||||
}
|
||||
|
||||
async fn list_tools(
|
||||
&self,
|
||||
params: Option<mcp_types::ListToolsRequestParams>,
|
||||
timeout: Option<Duration>,
|
||||
) -> Result<mcp_types::ListToolsResult> {
|
||||
match self {
|
||||
McpClientAdapter::Legacy(client) => client.list_tools(params, timeout).await,
|
||||
McpClientAdapter::Rmcp(client) => client.list_tools(params, timeout).await,
|
||||
}
|
||||
}
|
||||
|
||||
async fn call_tool(
|
||||
&self,
|
||||
name: String,
|
||||
arguments: Option<serde_json::Value>,
|
||||
timeout: Option<Duration>,
|
||||
) -> Result<mcp_types::CallToolResult> {
|
||||
match self {
|
||||
McpClientAdapter::Legacy(client) => client.call_tool(name, arguments, timeout).await,
|
||||
McpClientAdapter::Rmcp(client) => client.call_tool(name, arguments, timeout).await,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A thin wrapper around a set of running [`McpClient`] instances.
|
||||
/// A thin wrapper around a set of running [`RmcpClient`] instances.
|
||||
#[derive(Default)]
|
||||
pub(crate) struct McpConnectionManager {
|
||||
/// Server-name -> client instance.
|
||||
@@ -172,10 +113,13 @@ pub(crate) struct McpConnectionManager {
|
||||
|
||||
/// Fully qualified tool name -> tool instance.
|
||||
tools: HashMap<String, ToolInfo>,
|
||||
|
||||
/// Server-name -> configured tool filters.
|
||||
tool_filters: HashMap<String, ToolFilter>,
|
||||
}
|
||||
|
||||
impl McpConnectionManager {
|
||||
/// Spawn a [`McpClient`] for each configured server.
|
||||
/// Spawn a [`RmcpClient`] for each configured server.
|
||||
///
|
||||
/// * `mcp_servers` – Map loaded from the user configuration where *keys*
|
||||
/// are human-readable server identifiers and *values* are the spawn
|
||||
@@ -185,7 +129,6 @@ impl McpConnectionManager {
|
||||
/// user should be informed about these errors.
|
||||
pub async fn new(
|
||||
mcp_servers: HashMap<String, McpServerConfig>,
|
||||
use_rmcp_client: bool,
|
||||
store_mode: OAuthCredentialsStoreMode,
|
||||
) -> Result<(Self, ClientStartErrors)> {
|
||||
// Early exit if no servers are configured.
|
||||
@@ -196,6 +139,7 @@ impl McpConnectionManager {
|
||||
// Launch all configured servers concurrently.
|
||||
let mut join_set = JoinSet::new();
|
||||
let mut errors = ClientStartErrors::new();
|
||||
let mut tool_filters: HashMap<String, ToolFilter> = HashMap::new();
|
||||
|
||||
for (server_name, cfg) in mcp_servers {
|
||||
// Validate server name before spawning
|
||||
@@ -208,11 +152,13 @@ impl McpConnectionManager {
|
||||
}
|
||||
|
||||
if !cfg.enabled {
|
||||
tool_filters.insert(server_name, ToolFilter::from_config(&cfg));
|
||||
continue;
|
||||
}
|
||||
|
||||
let startup_timeout = cfg.startup_timeout_sec.unwrap_or(DEFAULT_STARTUP_TIMEOUT);
|
||||
let tool_timeout = cfg.tool_timeout_sec.unwrap_or(DEFAULT_TOOL_TIMEOUT);
|
||||
tool_filters.insert(server_name.clone(), ToolFilter::from_config(&cfg));
|
||||
|
||||
let resolved_bearer_token = match &cfg.transport {
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
@@ -245,35 +191,62 @@ impl McpConnectionManager {
|
||||
protocol_version: mcp_types::MCP_SCHEMA_VERSION.to_owned(),
|
||||
};
|
||||
|
||||
let client = match transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => {
|
||||
let resolved_bearer_token = resolved_bearer_token.unwrap_or_default();
|
||||
let client_result = match transport {
|
||||
McpServerTransportConfig::Stdio {
|
||||
command,
|
||||
args,
|
||||
env,
|
||||
env_vars,
|
||||
cwd,
|
||||
} => {
|
||||
let command_os: OsString = command.into();
|
||||
let args_os: Vec<OsString> = args.into_iter().map(Into::into).collect();
|
||||
McpClientAdapter::new_stdio_client(
|
||||
use_rmcp_client,
|
||||
command_os,
|
||||
args_os,
|
||||
env,
|
||||
params,
|
||||
startup_timeout,
|
||||
)
|
||||
.await
|
||||
match RmcpClient::new_stdio_client(command_os, args_os, env, &env_vars, cwd)
|
||||
.await
|
||||
{
|
||||
Ok(client) => {
|
||||
let client = Arc::new(client);
|
||||
client
|
||||
.initialize(params.clone(), Some(startup_timeout))
|
||||
.await
|
||||
.map(|_| client)
|
||||
}
|
||||
Err(err) => Err(err.into()),
|
||||
}
|
||||
}
|
||||
McpServerTransportConfig::StreamableHttp { url, .. } => {
|
||||
McpClientAdapter::new_streamable_http_client(
|
||||
server_name.clone(),
|
||||
url,
|
||||
resolved_bearer_token.unwrap_or_default(),
|
||||
params,
|
||||
startup_timeout,
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
http_headers,
|
||||
env_http_headers,
|
||||
..
|
||||
} => {
|
||||
match RmcpClient::new_streamable_http_client(
|
||||
&server_name,
|
||||
&url,
|
||||
resolved_bearer_token.clone(),
|
||||
http_headers,
|
||||
env_http_headers,
|
||||
store_mode,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(client) => {
|
||||
let client = Arc::new(client);
|
||||
client
|
||||
.initialize(params.clone(), Some(startup_timeout))
|
||||
.await
|
||||
.map(|_| client)
|
||||
}
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
}
|
||||
.map(|c| (c, startup_timeout));
|
||||
};
|
||||
|
||||
((server_name, tool_timeout), client)
|
||||
(
|
||||
(server_name, tool_timeout),
|
||||
client_result.map(|client| (client, startup_timeout)),
|
||||
)
|
||||
});
|
||||
}
|
||||
|
||||
@@ -313,12 +286,20 @@ impl McpConnectionManager {
|
||||
}
|
||||
};
|
||||
|
||||
let tools = qualify_tools(all_tools);
|
||||
let filtered_tools = filter_tools(all_tools, &tool_filters);
|
||||
let tools = qualify_tools(filtered_tools);
|
||||
|
||||
Ok((Self { clients, tools }, errors))
|
||||
Ok((
|
||||
Self {
|
||||
clients,
|
||||
tools,
|
||||
tool_filters,
|
||||
},
|
||||
errors,
|
||||
))
|
||||
}
|
||||
|
||||
/// Returns a single map that contains **all** tools. Each key is the
|
||||
/// Returns a single map that contains all tools. Each key is the
|
||||
/// fully-qualified name for the tool.
|
||||
pub fn list_all_tools(&self) -> HashMap<String, Tool> {
|
||||
self.tools
|
||||
@@ -327,6 +308,133 @@ impl McpConnectionManager {
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Returns a single map that contains all resources. Each key is the
|
||||
/// server name and the value is a vector of resources.
|
||||
pub async fn list_all_resources(&self) -> HashMap<String, Vec<Resource>> {
|
||||
let mut join_set = JoinSet::new();
|
||||
|
||||
for (server_name, managed_client) in &self.clients {
|
||||
let server_name_cloned = server_name.clone();
|
||||
let client_clone = managed_client.client.clone();
|
||||
let timeout = managed_client.tool_timeout;
|
||||
|
||||
join_set.spawn(async move {
|
||||
let mut collected: Vec<Resource> = Vec::new();
|
||||
let mut cursor: Option<String> = None;
|
||||
|
||||
loop {
|
||||
let params = cursor.as_ref().map(|next| ListResourcesRequestParams {
|
||||
cursor: Some(next.clone()),
|
||||
});
|
||||
let response = match client_clone.list_resources(params, timeout).await {
|
||||
Ok(result) => result,
|
||||
Err(err) => return (server_name_cloned, Err(err)),
|
||||
};
|
||||
|
||||
collected.extend(response.resources);
|
||||
|
||||
match response.next_cursor {
|
||||
Some(next) => {
|
||||
if cursor.as_ref() == Some(&next) {
|
||||
return (
|
||||
server_name_cloned,
|
||||
Err(anyhow!("resources/list returned duplicate cursor")),
|
||||
);
|
||||
}
|
||||
cursor = Some(next);
|
||||
}
|
||||
None => return (server_name_cloned, Ok(collected)),
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
let mut aggregated: HashMap<String, Vec<Resource>> = HashMap::new();
|
||||
|
||||
while let Some(join_res) = join_set.join_next().await {
|
||||
match join_res {
|
||||
Ok((server_name, Ok(resources))) => {
|
||||
aggregated.insert(server_name, resources);
|
||||
}
|
||||
Ok((server_name, Err(err))) => {
|
||||
warn!("Failed to list resources for MCP server '{server_name}': {err:#}");
|
||||
}
|
||||
Err(err) => {
|
||||
warn!("Task panic when listing resources for MCP server: {err:#}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
aggregated
|
||||
}
|
||||
|
||||
/// Returns a single map that contains all resource templates. Each key is the
|
||||
/// server name and the value is a vector of resource templates.
|
||||
pub async fn list_all_resource_templates(&self) -> HashMap<String, Vec<ResourceTemplate>> {
|
||||
let mut join_set = JoinSet::new();
|
||||
|
||||
for (server_name, managed_client) in &self.clients {
|
||||
let server_name_cloned = server_name.clone();
|
||||
let client_clone = managed_client.client.clone();
|
||||
let timeout = managed_client.tool_timeout;
|
||||
|
||||
join_set.spawn(async move {
|
||||
let mut collected: Vec<ResourceTemplate> = Vec::new();
|
||||
let mut cursor: Option<String> = None;
|
||||
|
||||
loop {
|
||||
let params = cursor
|
||||
.as_ref()
|
||||
.map(|next| ListResourceTemplatesRequestParams {
|
||||
cursor: Some(next.clone()),
|
||||
});
|
||||
let response = match client_clone.list_resource_templates(params, timeout).await
|
||||
{
|
||||
Ok(result) => result,
|
||||
Err(err) => return (server_name_cloned, Err(err)),
|
||||
};
|
||||
|
||||
collected.extend(response.resource_templates);
|
||||
|
||||
match response.next_cursor {
|
||||
Some(next) => {
|
||||
if cursor.as_ref() == Some(&next) {
|
||||
return (
|
||||
server_name_cloned,
|
||||
Err(anyhow!(
|
||||
"resources/templates/list returned duplicate cursor"
|
||||
)),
|
||||
);
|
||||
}
|
||||
cursor = Some(next);
|
||||
}
|
||||
None => return (server_name_cloned, Ok(collected)),
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
let mut aggregated: HashMap<String, Vec<ResourceTemplate>> = HashMap::new();
|
||||
|
||||
while let Some(join_res) = join_set.join_next().await {
|
||||
match join_res {
|
||||
Ok((server_name, Ok(templates))) => {
|
||||
aggregated.insert(server_name, templates);
|
||||
}
|
||||
Ok((server_name, Err(err))) => {
|
||||
warn!(
|
||||
"Failed to list resource templates for MCP server '{server_name}': {err:#}"
|
||||
);
|
||||
}
|
||||
Err(err) => {
|
||||
warn!("Task panic when listing resource templates for MCP server: {err:#}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
aggregated
|
||||
}
|
||||
|
||||
/// Invoke the tool indicated by the (server, tool) pair.
|
||||
pub async fn call_tool(
|
||||
&self,
|
||||
@@ -334,11 +442,18 @@ impl McpConnectionManager {
|
||||
tool: &str,
|
||||
arguments: Option<serde_json::Value>,
|
||||
) -> Result<mcp_types::CallToolResult> {
|
||||
if let Some(filter) = self.tool_filters.get(server)
|
||||
&& !filter.allows(tool)
|
||||
{
|
||||
return Err(anyhow!(
|
||||
"tool '{tool}' is disabled for MCP server '{server}'"
|
||||
));
|
||||
}
|
||||
let managed = self
|
||||
.clients
|
||||
.get(server)
|
||||
.ok_or_else(|| anyhow!("unknown MCP server '{server}'"))?;
|
||||
let client = managed.client.clone();
|
||||
let client = &managed.client;
|
||||
let timeout = managed.tool_timeout;
|
||||
|
||||
client
|
||||
@@ -347,6 +462,64 @@ impl McpConnectionManager {
|
||||
.with_context(|| format!("tool call failed for `{server}/{tool}`"))
|
||||
}
|
||||
|
||||
/// List resources from the specified server.
|
||||
pub async fn list_resources(
|
||||
&self,
|
||||
server: &str,
|
||||
params: Option<ListResourcesRequestParams>,
|
||||
) -> Result<ListResourcesResult> {
|
||||
let managed = self
|
||||
.clients
|
||||
.get(server)
|
||||
.ok_or_else(|| anyhow!("unknown MCP server '{server}'"))?;
|
||||
let client = managed.client.clone();
|
||||
let timeout = managed.tool_timeout;
|
||||
|
||||
client
|
||||
.list_resources(params, timeout)
|
||||
.await
|
||||
.with_context(|| format!("resources/list failed for `{server}`"))
|
||||
}
|
||||
|
||||
/// List resource templates from the specified server.
|
||||
pub async fn list_resource_templates(
|
||||
&self,
|
||||
server: &str,
|
||||
params: Option<ListResourceTemplatesRequestParams>,
|
||||
) -> Result<ListResourceTemplatesResult> {
|
||||
let managed = self
|
||||
.clients
|
||||
.get(server)
|
||||
.ok_or_else(|| anyhow!("unknown MCP server '{server}'"))?;
|
||||
let client = managed.client.clone();
|
||||
let timeout = managed.tool_timeout;
|
||||
|
||||
client
|
||||
.list_resource_templates(params, timeout)
|
||||
.await
|
||||
.with_context(|| format!("resources/templates/list failed for `{server}`"))
|
||||
}
|
||||
|
||||
/// Read a resource from the specified server.
|
||||
pub async fn read_resource(
|
||||
&self,
|
||||
server: &str,
|
||||
params: ReadResourceRequestParams,
|
||||
) -> Result<ReadResourceResult> {
|
||||
let managed = self
|
||||
.clients
|
||||
.get(server)
|
||||
.ok_or_else(|| anyhow!("unknown MCP server '{server}'"))?;
|
||||
let client = managed.client.clone();
|
||||
let timeout = managed.tool_timeout;
|
||||
let uri = params.uri.clone();
|
||||
|
||||
client
|
||||
.read_resource(params, timeout)
|
||||
.await
|
||||
.with_context(|| format!("resources/read failed for `{server}` ({uri})"))
|
||||
}
|
||||
|
||||
pub fn parse_tool_name(&self, tool_name: &str) -> Option<(String, String)> {
|
||||
self.tools
|
||||
.get(tool_name)
|
||||
@@ -354,6 +527,52 @@ impl McpConnectionManager {
|
||||
}
|
||||
}
|
||||
|
||||
/// A tool is allowed to be used if both are true:
|
||||
/// 1. enabled is None (no allowlist is set) or the tool is explicitly enabled.
|
||||
/// 2. The tool is not explicitly disabled.
|
||||
#[derive(Default, Clone)]
|
||||
struct ToolFilter {
|
||||
enabled: Option<HashSet<String>>,
|
||||
disabled: HashSet<String>,
|
||||
}
|
||||
|
||||
impl ToolFilter {
|
||||
fn from_config(cfg: &McpServerConfig) -> Self {
|
||||
let enabled = cfg
|
||||
.enabled_tools
|
||||
.as_ref()
|
||||
.map(|tools| tools.iter().cloned().collect::<HashSet<_>>());
|
||||
let disabled = cfg
|
||||
.disabled_tools
|
||||
.as_ref()
|
||||
.map(|tools| tools.iter().cloned().collect::<HashSet<_>>())
|
||||
.unwrap_or_default();
|
||||
|
||||
Self { enabled, disabled }
|
||||
}
|
||||
|
||||
fn allows(&self, tool_name: &str) -> bool {
|
||||
if let Some(enabled) = &self.enabled
|
||||
&& !enabled.contains(tool_name)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
!self.disabled.contains(tool_name)
|
||||
}
|
||||
}
|
||||
|
||||
fn filter_tools(tools: Vec<ToolInfo>, filters: &HashMap<String, ToolFilter>) -> Vec<ToolInfo> {
|
||||
tools
|
||||
.into_iter()
|
||||
.filter(|tool| {
|
||||
filters
|
||||
.get(&tool.server_name)
|
||||
.is_none_or(|filter| filter.allows(&tool.tool_name))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn resolve_bearer_token(
|
||||
server_name: &str,
|
||||
bearer_token_env_var: Option<&str>,
|
||||
@@ -382,7 +601,7 @@ fn resolve_bearer_token(
|
||||
}
|
||||
|
||||
/// Query every server for its available tools and return a single map that
|
||||
/// contains **all** tools. Each key is the fully-qualified name for the tool.
|
||||
/// contains all tools. Each key is the fully-qualified name for the tool.
|
||||
async fn list_all_tools(clients: &HashMap<String, ManagedClient>) -> Result<Vec<ToolInfo>> {
|
||||
let mut join_set = JoinSet::new();
|
||||
|
||||
@@ -446,6 +665,7 @@ fn is_valid_mcp_server_name(server_name: &str) -> bool {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use mcp_types::ToolInputSchema;
|
||||
use std::collections::HashSet;
|
||||
|
||||
fn create_test_tool(server_name: &str, tool_name: &str) -> ToolInfo {
|
||||
ToolInfo {
|
||||
@@ -476,8 +696,8 @@ mod tests {
|
||||
let qualified_tools = qualify_tools(tools);
|
||||
|
||||
assert_eq!(qualified_tools.len(), 2);
|
||||
assert!(qualified_tools.contains_key("server1__tool1"));
|
||||
assert!(qualified_tools.contains_key("server1__tool2"));
|
||||
assert!(qualified_tools.contains_key("mcp__server1__tool1"));
|
||||
assert!(qualified_tools.contains_key("mcp__server1__tool2"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -491,7 +711,7 @@ mod tests {
|
||||
|
||||
// Only the first tool should remain, the second is skipped
|
||||
assert_eq!(qualified_tools.len(), 1);
|
||||
assert!(qualified_tools.contains_key("server1__duplicate_tool"));
|
||||
assert!(qualified_tools.contains_key("mcp__server1__duplicate_tool"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -519,13 +739,84 @@ mod tests {
|
||||
assert_eq!(keys[0].len(), 64);
|
||||
assert_eq!(
|
||||
keys[0],
|
||||
"my_server__extremely_lena02e507efc5a9de88637e436690364fd4219e4ef"
|
||||
"mcp__my_server__extremel119a2b97664e41363932dc84de21e2ff1b93b3e9"
|
||||
);
|
||||
|
||||
assert_eq!(keys[1].len(), 64);
|
||||
assert_eq!(
|
||||
keys[1],
|
||||
"my_server__yet_another_e1c3987bd9c50b826cbe1687966f79f0c602d19ca"
|
||||
"mcp__my_server__yet_anot419a82a89325c1b477274a41f8c65ea5f3a7f341"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_filter_allows_by_default() {
|
||||
let filter = ToolFilter::default();
|
||||
|
||||
assert!(filter.allows("any"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_filter_applies_enabled_list() {
|
||||
let filter = ToolFilter {
|
||||
enabled: Some(HashSet::from(["allowed".to_string()])),
|
||||
disabled: HashSet::new(),
|
||||
};
|
||||
|
||||
assert!(filter.allows("allowed"));
|
||||
assert!(!filter.allows("denied"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_filter_applies_disabled_list() {
|
||||
let filter = ToolFilter {
|
||||
enabled: None,
|
||||
disabled: HashSet::from(["blocked".to_string()]),
|
||||
};
|
||||
|
||||
assert!(!filter.allows("blocked"));
|
||||
assert!(filter.allows("open"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_filter_applies_enabled_then_disabled() {
|
||||
let filter = ToolFilter {
|
||||
enabled: Some(HashSet::from(["keep".to_string(), "remove".to_string()])),
|
||||
disabled: HashSet::from(["remove".to_string()]),
|
||||
};
|
||||
|
||||
assert!(filter.allows("keep"));
|
||||
assert!(!filter.allows("remove"));
|
||||
assert!(!filter.allows("unknown"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn filter_tools_applies_per_server_filters() {
|
||||
let tools = vec![
|
||||
create_test_tool("server1", "tool_a"),
|
||||
create_test_tool("server1", "tool_b"),
|
||||
create_test_tool("server2", "tool_a"),
|
||||
];
|
||||
let mut filters = HashMap::new();
|
||||
filters.insert(
|
||||
"server1".to_string(),
|
||||
ToolFilter {
|
||||
enabled: Some(HashSet::from(["tool_a".to_string(), "tool_b".to_string()])),
|
||||
disabled: HashSet::from(["tool_b".to_string()]),
|
||||
},
|
||||
);
|
||||
filters.insert(
|
||||
"server2".to_string(),
|
||||
ToolFilter {
|
||||
enabled: None,
|
||||
disabled: HashSet::from(["tool_a".to_string()]),
|
||||
},
|
||||
);
|
||||
|
||||
let filtered = filter_tools(tools, &filters);
|
||||
|
||||
assert_eq!(filtered.len(), 1);
|
||||
assert_eq!(filtered[0].server_name, "server1");
|
||||
assert_eq!(filtered[0].tool_name, "tool_a");
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user