mirror of
https://github.com/openai/codex.git
synced 2026-02-06 17:03:42 +00:00
Compare commits
110 Commits
fix-cmd-ex
...
dev/shell-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5db3e5fba4 | ||
|
|
f01f2ec9ee | ||
|
|
980886498c | ||
|
|
e743d251a7 | ||
|
|
788badd221 | ||
|
|
fbdedd9a06 | ||
|
|
5916153157 | ||
|
|
b46012e483 | ||
|
|
42683dadfb | ||
|
|
65cb1a1b77 | ||
|
|
50a77dc138 | ||
|
|
557ac63094 | ||
|
|
131c384361 | ||
|
|
e2598f5094 | ||
|
|
78b2aeea55 | ||
|
|
082d2fa19a | ||
|
|
7c7c7567d5 | ||
|
|
625f2208c4 | ||
|
|
5f1fab0e7c | ||
|
|
c07461e6f3 | ||
|
|
8b80a0a269 | ||
|
|
a47181e471 | ||
|
|
5beb6167c8 | ||
|
|
917f39ec12 | ||
|
|
a2fdfce02a | ||
|
|
91b16b8682 | ||
|
|
183fc8e01a | ||
|
|
9fba811764 | ||
|
|
db408b9e62 | ||
|
|
2eecc1a2e4 | ||
|
|
c76528ca1f | ||
|
|
bb47f2226f | ||
|
|
c6ab92bc50 | ||
|
|
4c1a6f0ee0 | ||
|
|
361d43b969 | ||
|
|
2e81f1900d | ||
|
|
2030b28083 | ||
|
|
e84e39940b | ||
|
|
e8905f6d20 | ||
|
|
316352be94 | ||
|
|
f8b30af6dc | ||
|
|
039a4b070e | ||
|
|
c368c6aeea | ||
|
|
0c647bc566 | ||
|
|
e30f65118d | ||
|
|
1bd2d7a659 | ||
|
|
65d53fd4b1 | ||
|
|
b5349202e9 | ||
|
|
1b8cc8b625 | ||
|
|
8501b0b768 | ||
|
|
fe7eb18104 | ||
|
|
8c75ed39d5 | ||
|
|
fdb9fa301e | ||
|
|
871d442b8e | ||
|
|
dbad5eeec6 | ||
|
|
4b4252210b | ||
|
|
6582554926 | ||
|
|
649ce520c4 | ||
|
|
667e841d3e | ||
|
|
63e1ef25af | ||
|
|
229d18f4d2 | ||
|
|
4a1a7f9685 | ||
|
|
86c149ae8e | ||
|
|
05f0b4f590 | ||
|
|
d4eda9d10b | ||
|
|
d7953aed74 | ||
|
|
2ab1650d4d | ||
|
|
79aa83ee39 | ||
|
|
c4ebe4b078 | ||
|
|
1a89f70015 | ||
|
|
62474a30e8 | ||
|
|
9a10e80ab7 | ||
|
|
9b538a8672 | ||
|
|
95af417923 | ||
|
|
fff576cf98 | ||
|
|
1575f0504c | ||
|
|
edf4c3f627 | ||
|
|
d40a6b7f73 | ||
|
|
3a22018edd | ||
|
|
fe54c216a3 | ||
|
|
cb6584de46 | ||
|
|
7e068e1094 | ||
|
|
d3187dbc17 | ||
|
|
dc2f26f7b5 | ||
|
|
553db8def1 | ||
|
|
ab63a47173 | ||
|
|
e658c6c73b | ||
|
|
1e0e553304 | ||
|
|
07b7d28937 | ||
|
|
6ee7fbcfff | ||
|
|
5f3a0473f1 | ||
|
|
2eda75a8ee | ||
|
|
e1f098b9b7 | ||
|
|
e5e13479d0 | ||
|
|
7bc3ca9e40 | ||
|
|
4d8b71d412 | ||
|
|
b484672961 | ||
|
|
a1ee10b438 | ||
|
|
dccce34d84 | ||
|
|
f5945d7c03 | ||
|
|
5fcf923c19 | ||
|
|
0c7efa0cfd | ||
|
|
d5853d9c47 | ||
|
|
d9118c04bf | ||
|
|
91e65ac0ce | ||
|
|
1ac4fb45d2 | ||
|
|
07b8bdfbf1 | ||
|
|
0f22067242 | ||
|
|
d7f8b97541 | ||
|
|
611e00c862 |
2
.github/pull_request_template.md
vendored
2
.github/pull_request_template.md
vendored
@@ -4,3 +4,5 @@ Before opening this Pull Request, please read the dedicated "Contributing" markd
|
||||
https://github.com/openai/codex/blob/main/docs/contributing.md
|
||||
|
||||
If your PR conforms to our contribution guidelines, replace this text with a detailed and high quality description of your changes.
|
||||
|
||||
Include a link to a bug report or enhancement request.
|
||||
|
||||
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@@ -46,7 +46,7 @@ jobs:
|
||||
echo "pack_output=$PACK_OUTPUT" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Upload staged npm package artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: codex-npm-staging
|
||||
path: ${{ steps.stage_npm_package.outputs.pack_output }}
|
||||
|
||||
23
.github/workflows/cla.yml
vendored
23
.github/workflows/cla.yml
vendored
@@ -16,10 +16,27 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: contributor-assistant/github-action@v2.6.1
|
||||
# Run on close only if the PR was merged. This will lock the PR to preserve
|
||||
# the CLA agreement. We don't want to lock PRs that have been closed without
|
||||
# merging because the contributor may want to respond with additional comments.
|
||||
# This action has a "lock-pullrequest-aftermerge" option that can be set to false,
|
||||
# but that would unconditionally skip locking even in cases where the PR was merged.
|
||||
if: |
|
||||
github.event_name == 'pull_request_target' ||
|
||||
github.event.comment.body == 'recheck' ||
|
||||
github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA'
|
||||
(
|
||||
github.event_name == 'pull_request_target' &&
|
||||
(
|
||||
github.event.action == 'opened' ||
|
||||
github.event.action == 'synchronize' ||
|
||||
(github.event.action == 'closed' && github.event.pull_request.merged == true)
|
||||
)
|
||||
) ||
|
||||
(
|
||||
github.event_name == 'issue_comment' &&
|
||||
(
|
||||
github.event.comment.body == 'recheck' ||
|
||||
github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA'
|
||||
)
|
||||
)
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
|
||||
2
.github/workflows/codespell.yml
vendored
2
.github/workflows/codespell.yml
vendored
@@ -22,6 +22,6 @@ jobs:
|
||||
- name: Annotate locations with typos
|
||||
uses: codespell-project/codespell-problem-matcher@b80729f885d32f78a716c2f107b4db1025001c42 # v1
|
||||
- name: Codespell
|
||||
uses: codespell-project/actions-codespell@406322ec52dd7b488e48c1c4b82e2a8b3a1bf630 # v2.1
|
||||
uses: codespell-project/actions-codespell@8f01853be192eb0f849a5c7d721450e7a467c579 # v2.2
|
||||
with:
|
||||
ignore_words_file: .codespellignore
|
||||
|
||||
37
.github/workflows/issue-labeler.yml
vendored
37
.github/workflows/issue-labeler.yml
vendored
@@ -26,21 +26,36 @@ jobs:
|
||||
prompt: |
|
||||
You are an assistant that reviews GitHub issues for the repository.
|
||||
|
||||
Your job is to choose the most appropriate existing labels for the issue described later in this prompt.
|
||||
Your job is to choose the most appropriate labels for the issue described later in this prompt.
|
||||
Follow these rules:
|
||||
- Only pick labels out of the list below.
|
||||
- Prefer a small set of precise labels over many broad ones.
|
||||
|
||||
Labels to apply:
|
||||
- Add one (and only one) of the following three labels to distinguish the type of issue. Default to "bug" if unsure.
|
||||
1. bug — Reproducible defects in Codex products (CLI, VS Code extension, web, auth).
|
||||
2. enhancement — Feature requests or usability improvements that ask for new capabilities, better ergonomics, or quality-of-life tweaks.
|
||||
3. extension — VS Code (or other IDE) extension-specific issues.
|
||||
4. windows-os — Bugs or friction specific to Windows environments (always when PowerShell is mentioned, path handling, copy/paste, OS-specific auth or tooling failures).
|
||||
5. mcp — Topics involving Model Context Protocol servers/clients.
|
||||
6. codex-web — Issues targeting the Codex web UI/Cloud experience.
|
||||
8. azure — Problems or requests tied to Azure OpenAI deployments.
|
||||
9. documentation — Updates or corrections needed in docs/README/config references (broken links, missing examples, outdated keys, clarification requests).
|
||||
10. model-behavior — Undesirable LLM behavior: forgetting goals, refusing work, hallucinating environment details, quota misreports, or other reasoning/performance anomalies.
|
||||
3. documentation — Updates or corrections needed in docs/README/config references (broken links, missing examples, outdated keys, clarification requests).
|
||||
|
||||
- If applicable, add one of the following labels to specify which sub-product or product surface the issue relates to.
|
||||
1. CLI — the Codex command line interface.
|
||||
2. extension — VS Code (or other IDE) extension-specific issues.
|
||||
3. codex-web — Issues targeting the Codex web UI/Cloud experience.
|
||||
4. github-action — Issues with the Codex GitHub action.
|
||||
5. iOS — Issues with the Codex iOS app.
|
||||
|
||||
- Additionally add zero or more of the following labels that are relevant to the issue content. Prefer a small set of precise labels over many broad ones.
|
||||
1. windows-os — Bugs or friction specific to Windows environments (always when PowerShell is mentioned, path handling, copy/paste, OS-specific auth or tooling failures).
|
||||
2. mcp — Topics involving Model Context Protocol servers/clients.
|
||||
3. mcp-server — Problems related to the codex mcp-server command, where codex runs as an MCP server.
|
||||
4. azure — Problems or requests tied to Azure OpenAI deployments.
|
||||
5. model-behavior — Undesirable LLM behavior: forgetting goals, refusing work, hallucinating environment details, quota misreports, or other reasoning/performance anomalies.
|
||||
6. code-review — Issues related to the code review feature or functionality.
|
||||
7. auth - Problems related to authentication, login, or access tokens.
|
||||
8. codex-exec - Problems related to the "codex exec" command or functionality.
|
||||
9. context-management - Problems related to compaction, context windows, or available context reporting.
|
||||
10. custom-model - Problems that involve using custom model providers, local models, or OSS models.
|
||||
11. rate-limits - Problems related to token limits, rate limits, or token usage reporting.
|
||||
12. sandbox - Issues related to local sandbox environments or tool call approvals to override sandbox restrictions.
|
||||
13. tool-calls - Problems related to specific tool call invocations including unexpected errors, failures, or hangs.
|
||||
14. TUI - Problems with the terminal user interface (TUI) including keyboard shortcuts, copy & pasting, menus, or screen update issues.
|
||||
|
||||
Issue number: ${{ github.event.issue.number }}
|
||||
|
||||
|
||||
10
.github/workflows/rust-ci.yml
vendored
10
.github/workflows/rust-ci.yml
vendored
@@ -76,7 +76,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
- uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: cargo-shear
|
||||
version: 1.5.1
|
||||
@@ -170,7 +170,7 @@ jobs:
|
||||
|
||||
# Install and restore sccache cache
|
||||
- name: Install sccache
|
||||
uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: sccache
|
||||
version: 0.7.5
|
||||
@@ -228,7 +228,7 @@ jobs:
|
||||
|
||||
- name: Install cargo-chef
|
||||
if: ${{ matrix.profile == 'release' }}
|
||||
uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: cargo-chef
|
||||
version: 0.1.71
|
||||
@@ -370,7 +370,7 @@ jobs:
|
||||
cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
- name: Install sccache
|
||||
uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: sccache
|
||||
version: 0.7.5
|
||||
@@ -399,7 +399,7 @@ jobs:
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
- uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: nextest
|
||||
version: 0.9.103
|
||||
|
||||
2
.github/workflows/rust-release.yml
vendored
2
.github/workflows/rust-release.yml
vendored
@@ -350,7 +350,7 @@ jobs:
|
||||
fi
|
||||
fi
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: ${{ matrix.target }}
|
||||
# Upload the per-binary .zst files as well as the new .tar.gz
|
||||
|
||||
@@ -84,6 +84,7 @@ If you don’t have the tool:
|
||||
- Use `ResponseMock::single_request()` when a test should only issue one POST, or `ResponseMock::requests()` to inspect every captured `ResponsesRequest`.
|
||||
- `ResponsesRequest` exposes helpers (`body_json`, `input`, `function_call_output`, `custom_tool_call_output`, `call_output`, `header`, `path`, `query_param`) so assertions can target structured payloads instead of manual JSON digging.
|
||||
- Build SSE payloads with the provided `ev_*` constructors and the `sse(...)`.
|
||||
- Prefer `wait_for_event` over `wait_for_event_with_timeout`.
|
||||
|
||||
- Typical pattern:
|
||||
|
||||
|
||||
@@ -75,11 +75,13 @@ Codex CLI supports a rich set of configuration options, with preferences stored
|
||||
|
||||
- [**Getting started**](./docs/getting-started.md)
|
||||
- [CLI usage](./docs/getting-started.md#cli-usage)
|
||||
- [Slash Commands](./docs/slash_commands.md)
|
||||
- [Running with a prompt as input](./docs/getting-started.md#running-with-a-prompt-as-input)
|
||||
- [Example prompts](./docs/getting-started.md#example-prompts)
|
||||
- [Custom prompts](./docs/prompts.md)
|
||||
- [Memory with AGENTS.md](./docs/getting-started.md#memory-with-agentsmd)
|
||||
- [**Configuration**](./docs/config.md)
|
||||
- [Example config](./docs/example-config.md)
|
||||
- [**Sandbox & approvals**](./docs/sandbox.md)
|
||||
- [**Authentication**](./docs/authentication.md)
|
||||
- [Auth methods](./docs/authentication.md#forcing-a-specific-auth-method-advanced)
|
||||
|
||||
102
codex-rs/Cargo.lock
generated
102
codex-rs/Cargo.lock
generated
@@ -186,9 +186,11 @@ dependencies = [
|
||||
"chrono",
|
||||
"codex-app-server-protocol",
|
||||
"codex-core",
|
||||
"codex-protocol",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
"uuid",
|
||||
"wiremock",
|
||||
]
|
||||
|
||||
@@ -235,46 +237,44 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "askama"
|
||||
version = "0.12.1"
|
||||
version = "0.14.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b79091df18a97caea757e28cd2d5fda49c6cd4bd01ddffd7ff01ace0c0ad2c28"
|
||||
checksum = "f75363874b771be265f4ffe307ca705ef6f3baa19011c149da8674a87f1b75c4"
|
||||
dependencies = [
|
||||
"askama_derive",
|
||||
"askama_escape",
|
||||
"humansize",
|
||||
"num-traits",
|
||||
"itoa",
|
||||
"percent-encoding",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "askama_derive"
|
||||
version = "0.12.5"
|
||||
version = "0.14.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "19fe8d6cb13c4714962c072ea496f3392015f0989b1a2847bb4b2d9effd71d83"
|
||||
checksum = "129397200fe83088e8a68407a8e2b1f826cf0086b21ccdb866a722c8bcd3a94f"
|
||||
dependencies = [
|
||||
"askama_parser",
|
||||
"basic-toml",
|
||||
"mime",
|
||||
"mime_guess",
|
||||
"memchr",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"rustc-hash 2.1.1",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "askama_escape"
|
||||
version = "0.10.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "619743e34b5ba4e9703bba34deac3427c72507c7159f5fd030aea8cac0cfe341"
|
||||
|
||||
[[package]]
|
||||
name = "askama_parser"
|
||||
version = "0.2.1"
|
||||
version = "0.14.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "acb1161c6b64d1c3d83108213c2a2533a342ac225aabd0bda218278c2ddb00c0"
|
||||
checksum = "d6ab5630b3d5eaf232620167977f95eb51f3432fc76852328774afbd242d4358"
|
||||
dependencies = [
|
||||
"nom",
|
||||
"memchr",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"winnow",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -871,6 +871,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"codex-protocol",
|
||||
"mcp-types",
|
||||
"paste",
|
||||
"pretty_assertions",
|
||||
"schemars 0.8.22",
|
||||
@@ -978,7 +979,6 @@ dependencies = [
|
||||
"codex-mcp-server",
|
||||
"codex-process-hardening",
|
||||
"codex-protocol",
|
||||
"codex-protocol-ts",
|
||||
"codex-responses-api-proxy",
|
||||
"codex-rmcp-client",
|
||||
"codex-stdio-to-uds",
|
||||
@@ -992,6 +992,7 @@ dependencies = [
|
||||
"supports-color",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"toml",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1361,16 +1362,6 @@ dependencies = [
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-protocol-ts"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"codex-app-server-protocol",
|
||||
"ts-rs",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-responses-api-proxy"
|
||||
version = "0.0.0"
|
||||
@@ -1448,8 +1439,10 @@ dependencies = [
|
||||
"codex-login",
|
||||
"codex-ollama",
|
||||
"codex-protocol",
|
||||
"codex-windows-sandbox",
|
||||
"color-eyre",
|
||||
"crossterm",
|
||||
"derive_more 2.0.1",
|
||||
"diffy",
|
||||
"dirs",
|
||||
"dunce",
|
||||
@@ -1469,6 +1462,7 @@ dependencies = [
|
||||
"regex-lite",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serial_test",
|
||||
"shlex",
|
||||
"strum 0.27.2",
|
||||
"strum_macros 0.27.2",
|
||||
@@ -1559,6 +1553,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"dirs-next",
|
||||
"dunce",
|
||||
"rand 0.8.5",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -1652,6 +1647,15 @@ dependencies = [
|
||||
"unicode-segmentation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "convert_case"
|
||||
version = "0.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bb402b8d4c85569410425650ce3eddc7d698ed96d39a73f941b08fb63082f1e7"
|
||||
dependencies = [
|
||||
"unicode-segmentation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "core-foundation"
|
||||
version = "0.9.4"
|
||||
@@ -1750,8 +1754,7 @@ checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
|
||||
[[package]]
|
||||
name = "crossterm"
|
||||
version = "0.28.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "829d955a0bb380ef178a640b91779e3987da38c9aea133b20614cfed8cdea9c6"
|
||||
source = "git+https://github.com/nornagon/crossterm?branch=nornagon%2Fcolor-query#87db8bfa6dc99427fd3b071681b07fc31c6ce995"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"crossterm_winapi",
|
||||
@@ -1998,7 +2001,7 @@ version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22"
|
||||
dependencies = [
|
||||
"convert_case",
|
||||
"convert_case 0.6.0",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.104",
|
||||
@@ -2011,6 +2014,7 @@ version = "2.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3"
|
||||
dependencies = [
|
||||
"convert_case 0.7.1",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.104",
|
||||
@@ -2874,15 +2878,6 @@ version = "1.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
|
||||
|
||||
[[package]]
|
||||
name = "humansize"
|
||||
version = "2.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6cb51c9a029ddc91b07a787f1d86b53ccfa49b0e86688c946ebe8d3555685dd7"
|
||||
dependencies = [
|
||||
"libm",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hyper"
|
||||
version = "1.7.0"
|
||||
@@ -3526,12 +3521,6 @@ dependencies = [
|
||||
"pkg-config",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libm"
|
||||
version = "0.2.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de"
|
||||
|
||||
[[package]]
|
||||
name = "libredox"
|
||||
version = "0.1.6"
|
||||
@@ -5008,9 +4997,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rmcp"
|
||||
version = "0.8.3"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1fdad1258f7259fdc0f2dfc266939c82c3b5d1fd72bcde274d600cdc27e60243"
|
||||
checksum = "e5947688160b56fb6c827e3c20a72c90392a1d7e9dec74749197aa1780ac42ca"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"bytes",
|
||||
@@ -5042,9 +5031,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rmcp-macros"
|
||||
version = "0.8.3"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ede0589a208cc7ce81d1be68aa7e74b917fcd03c81528408bab0457e187dcd9b"
|
||||
checksum = "01263441d3f8635c628e33856c468b96ebbce1af2d3699ea712ca71432d4ee7a"
|
||||
dependencies = [
|
||||
"darling 0.21.3",
|
||||
"proc-macro2",
|
||||
@@ -5685,6 +5674,12 @@ dependencies = [
|
||||
"digest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sha1_smol"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bbfa15b3dddfee50a0fff136974b3e1bde555604ba463834a7eb7deb6417705d"
|
||||
|
||||
[[package]]
|
||||
name = "sha2"
|
||||
version = "0.10.9"
|
||||
@@ -6872,6 +6867,7 @@ dependencies = [
|
||||
"getrandom 0.3.3",
|
||||
"js-sys",
|
||||
"serde",
|
||||
"sha1_smol",
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
@@ -7780,9 +7776,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zeroize"
|
||||
version = "1.8.1"
|
||||
version = "1.8.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde"
|
||||
checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0"
|
||||
dependencies = [
|
||||
"zeroize_derive",
|
||||
]
|
||||
|
||||
@@ -25,7 +25,6 @@ members = [
|
||||
"ollama",
|
||||
"process-hardening",
|
||||
"protocol",
|
||||
"protocol-ts",
|
||||
"rmcp-client",
|
||||
"responses-api-proxy",
|
||||
"stdio-to-uds",
|
||||
@@ -75,7 +74,6 @@ codex-ollama = { path = "ollama" }
|
||||
codex-otel = { path = "otel" }
|
||||
codex-process-hardening = { path = "process-hardening" }
|
||||
codex-protocol = { path = "protocol" }
|
||||
codex-protocol-ts = { path = "protocol-ts" }
|
||||
codex-responses-api-proxy = { path = "responses-api-proxy" }
|
||||
codex-rmcp-client = { path = "rmcp-client" }
|
||||
codex-stdio-to-uds = { path = "stdio-to-uds" }
|
||||
@@ -87,7 +85,7 @@ codex-utils-pty = { path = "utils/pty" }
|
||||
codex-utils-readiness = { path = "utils/readiness" }
|
||||
codex-utils-string = { path = "utils/string" }
|
||||
codex-utils-tokenizer = { path = "utils/tokenizer" }
|
||||
codex-windows-sandbox = { path = "windows-sandbox" }
|
||||
codex-windows-sandbox = { path = "windows-sandbox-rs" }
|
||||
core_test_support = { path = "core/tests/common" }
|
||||
mcp-types = { path = "mcp-types" }
|
||||
mcp_test_support = { path = "mcp-server/tests/common" }
|
||||
@@ -97,7 +95,7 @@ allocative = "0.3.3"
|
||||
ansi-to-tui = "7.0.0"
|
||||
anyhow = "1"
|
||||
arboard = "3"
|
||||
askama = "0.12"
|
||||
askama = "0.14"
|
||||
assert_cmd = "2"
|
||||
assert_matches = "1.5.0"
|
||||
async-channel = "2.3.1"
|
||||
@@ -162,7 +160,7 @@ ratatui = "0.29.0"
|
||||
ratatui-macros = "0.6.0"
|
||||
regex-lite = "0.1.7"
|
||||
reqwest = "0.12"
|
||||
rmcp = { version = "0.8.3", default-features = false }
|
||||
rmcp = { version = "0.8.5", default-features = false }
|
||||
schemars = "0.8.22"
|
||||
seccompiler = "0.5.0"
|
||||
sentry = "0.34.0"
|
||||
@@ -213,7 +211,7 @@ which = "6"
|
||||
wildmatch = "2.5.0"
|
||||
|
||||
wiremock = "0.6"
|
||||
zeroize = "1.8.1"
|
||||
zeroize = "1.8.2"
|
||||
|
||||
[workspace.lints]
|
||||
rust = {}
|
||||
@@ -256,7 +254,12 @@ unwrap_used = "deny"
|
||||
# cargo-shear cannot see the platform-specific openssl-sys usage, so we
|
||||
# silence the false positive here instead of deleting a real dependency.
|
||||
[workspace.metadata.cargo-shear]
|
||||
ignored = ["icu_provider", "openssl-sys", "codex-utils-readiness", "codex-utils-tokenizer"]
|
||||
ignored = [
|
||||
"icu_provider",
|
||||
"openssl-sys",
|
||||
"codex-utils-readiness",
|
||||
"codex-utils-tokenizer",
|
||||
]
|
||||
|
||||
[profile.release]
|
||||
lto = "fat"
|
||||
@@ -276,6 +279,7 @@ opt-level = 0
|
||||
# Uncomment to debug local changes.
|
||||
# ratatui = { path = "../../ratatui" }
|
||||
ratatui = { git = "https://github.com/nornagon/ratatui", branch = "nornagon-v0.29.0-patch" }
|
||||
crossterm = { git = "https://github.com/nornagon/crossterm", branch = "nornagon/color-query" }
|
||||
|
||||
# Uncomment to debug local changes.
|
||||
# rmcp = { path = "../../rust-sdk/crates/rmcp" }
|
||||
|
||||
@@ -14,6 +14,7 @@ workspace = true
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive"] }
|
||||
codex-protocol = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
paste = { workspace = true }
|
||||
schemars = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
|
||||
@@ -2,20 +2,27 @@ use crate::ClientNotification;
|
||||
use crate::ClientRequest;
|
||||
use crate::ServerNotification;
|
||||
use crate::ServerRequest;
|
||||
use crate::export_client_notification_schemas;
|
||||
use crate::export_client_param_schemas;
|
||||
use crate::export_client_response_schemas;
|
||||
use crate::export_client_responses;
|
||||
use crate::export_server_notification_schemas;
|
||||
use crate::export_server_param_schemas;
|
||||
use crate::export_server_response_schemas;
|
||||
use crate::export_server_responses;
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::FileChange;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use schemars::JsonSchema;
|
||||
use schemars::schema::RootSchema;
|
||||
use schemars::schema_for;
|
||||
use serde::Serialize;
|
||||
use serde_json::Map;
|
||||
use serde_json::Value;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::ffi::OsStr;
|
||||
use std::fs;
|
||||
@@ -28,83 +35,29 @@ use ts_rs::TS;
|
||||
|
||||
const HEADER: &str = "// GENERATED CODE! DO NOT MODIFY BY HAND!\n\n";
|
||||
|
||||
macro_rules! for_each_schema_type {
|
||||
($macro:ident) => {
|
||||
$macro!(crate::RequestId);
|
||||
$macro!(crate::JSONRPCMessage);
|
||||
$macro!(crate::JSONRPCRequest);
|
||||
$macro!(crate::JSONRPCNotification);
|
||||
$macro!(crate::JSONRPCResponse);
|
||||
$macro!(crate::JSONRPCError);
|
||||
$macro!(crate::JSONRPCErrorError);
|
||||
$macro!(crate::AddConversationListenerParams);
|
||||
$macro!(crate::AddConversationSubscriptionResponse);
|
||||
$macro!(crate::ApplyPatchApprovalParams);
|
||||
$macro!(crate::ApplyPatchApprovalResponse);
|
||||
$macro!(crate::ArchiveConversationParams);
|
||||
$macro!(crate::ArchiveConversationResponse);
|
||||
$macro!(crate::AuthMode);
|
||||
$macro!(crate::AuthStatusChangeNotification);
|
||||
$macro!(crate::CancelLoginChatGptParams);
|
||||
$macro!(crate::CancelLoginChatGptResponse);
|
||||
$macro!(crate::ClientInfo);
|
||||
$macro!(crate::ClientNotification);
|
||||
$macro!(crate::ClientRequest);
|
||||
$macro!(crate::ConversationSummary);
|
||||
$macro!(crate::ExecCommandApprovalParams);
|
||||
$macro!(crate::ExecCommandApprovalResponse);
|
||||
$macro!(crate::ExecOneOffCommandParams);
|
||||
$macro!(crate::ExecOneOffCommandResponse);
|
||||
$macro!(crate::FuzzyFileSearchParams);
|
||||
$macro!(crate::FuzzyFileSearchResponse);
|
||||
$macro!(crate::FuzzyFileSearchResult);
|
||||
$macro!(crate::GetAuthStatusParams);
|
||||
$macro!(crate::GetAuthStatusResponse);
|
||||
$macro!(crate::GetUserAgentResponse);
|
||||
$macro!(crate::GetUserSavedConfigResponse);
|
||||
$macro!(crate::GitDiffToRemoteParams);
|
||||
$macro!(crate::GitDiffToRemoteResponse);
|
||||
$macro!(crate::GitSha);
|
||||
$macro!(crate::InitializeParams);
|
||||
$macro!(crate::InitializeResponse);
|
||||
$macro!(crate::InputItem);
|
||||
$macro!(crate::InterruptConversationParams);
|
||||
$macro!(crate::InterruptConversationResponse);
|
||||
$macro!(crate::ListConversationsParams);
|
||||
$macro!(crate::ListConversationsResponse);
|
||||
$macro!(crate::LoginApiKeyParams);
|
||||
$macro!(crate::LoginApiKeyResponse);
|
||||
$macro!(crate::LoginChatGptCompleteNotification);
|
||||
$macro!(crate::LoginChatGptResponse);
|
||||
$macro!(crate::LogoutChatGptParams);
|
||||
$macro!(crate::LogoutChatGptResponse);
|
||||
$macro!(crate::NewConversationParams);
|
||||
$macro!(crate::NewConversationResponse);
|
||||
$macro!(crate::Profile);
|
||||
$macro!(crate::RemoveConversationListenerParams);
|
||||
$macro!(crate::RemoveConversationSubscriptionResponse);
|
||||
$macro!(crate::ResumeConversationParams);
|
||||
$macro!(crate::ResumeConversationResponse);
|
||||
$macro!(crate::SandboxSettings);
|
||||
$macro!(crate::SendUserMessageParams);
|
||||
$macro!(crate::SendUserMessageResponse);
|
||||
$macro!(crate::SendUserTurnParams);
|
||||
$macro!(crate::SendUserTurnResponse);
|
||||
$macro!(crate::ServerNotification);
|
||||
$macro!(crate::ServerRequest);
|
||||
$macro!(crate::SessionConfiguredNotification);
|
||||
$macro!(crate::SetDefaultModelParams);
|
||||
$macro!(crate::SetDefaultModelResponse);
|
||||
$macro!(crate::Tools);
|
||||
$macro!(crate::UserInfoResponse);
|
||||
$macro!(crate::UserSavedConfig);
|
||||
$macro!(codex_protocol::protocol::EventMsg);
|
||||
$macro!(codex_protocol::protocol::FileChange);
|
||||
$macro!(codex_protocol::parse_command::ParsedCommand);
|
||||
$macro!(codex_protocol::protocol::SandboxPolicy);
|
||||
};
|
||||
#[derive(Clone)]
|
||||
pub struct GeneratedSchema {
|
||||
namespace: Option<String>,
|
||||
logical_name: String,
|
||||
value: Value,
|
||||
in_v1_dir: bool,
|
||||
}
|
||||
|
||||
impl GeneratedSchema {
|
||||
fn namespace(&self) -> Option<&str> {
|
||||
self.namespace.as_deref()
|
||||
}
|
||||
|
||||
fn logical_name(&self) -> &str {
|
||||
&self.logical_name
|
||||
}
|
||||
|
||||
fn value(&self) -> &Value {
|
||||
&self.value
|
||||
}
|
||||
}
|
||||
|
||||
type JsonSchemaEmitter = fn(&Path) -> Result<GeneratedSchema>;
|
||||
pub fn generate_types(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
||||
generate_ts(out_dir, prettier)?;
|
||||
generate_json(out_dir)?;
|
||||
@@ -112,7 +65,9 @@ pub fn generate_types(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
||||
}
|
||||
|
||||
pub fn generate_ts(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
||||
let v2_out_dir = out_dir.join("v2");
|
||||
ensure_dir(out_dir)?;
|
||||
ensure_dir(&v2_out_dir)?;
|
||||
|
||||
ClientRequest::export_all_to(out_dir)?;
|
||||
export_client_responses(out_dir)?;
|
||||
@@ -123,12 +78,15 @@ pub fn generate_ts(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
||||
ServerNotification::export_all_to(out_dir)?;
|
||||
|
||||
generate_index_ts(out_dir)?;
|
||||
generate_index_ts(&v2_out_dir)?;
|
||||
|
||||
let ts_files = ts_files_in(out_dir)?;
|
||||
// Ensure our header is present on all TS files (root + subdirs like v2/).
|
||||
let ts_files = ts_files_in_recursive(out_dir)?;
|
||||
for file in &ts_files {
|
||||
prepend_header_if_missing(file)?;
|
||||
}
|
||||
|
||||
// Optionally run Prettier on all generated TS files.
|
||||
if let Some(prettier_bin) = prettier
|
||||
&& !ts_files.is_empty()
|
||||
{
|
||||
@@ -147,23 +105,47 @@ pub fn generate_ts(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
||||
|
||||
pub fn generate_json(out_dir: &Path) -> Result<()> {
|
||||
ensure_dir(out_dir)?;
|
||||
let mut bundle: BTreeMap<String, RootSchema> = BTreeMap::new();
|
||||
let envelope_emitters: &[JsonSchemaEmitter] = &[
|
||||
|d| write_json_schema_with_return::<crate::RequestId>(d, "RequestId"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCMessage>(d, "JSONRPCMessage"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCRequest>(d, "JSONRPCRequest"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCNotification>(d, "JSONRPCNotification"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCResponse>(d, "JSONRPCResponse"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCError>(d, "JSONRPCError"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCErrorError>(d, "JSONRPCErrorError"),
|
||||
|d| write_json_schema_with_return::<crate::ClientRequest>(d, "ClientRequest"),
|
||||
|d| write_json_schema_with_return::<crate::ServerRequest>(d, "ServerRequest"),
|
||||
|d| write_json_schema_with_return::<crate::ClientNotification>(d, "ClientNotification"),
|
||||
|d| write_json_schema_with_return::<crate::ServerNotification>(d, "ServerNotification"),
|
||||
|d| write_json_schema_with_return::<EventMsg>(d, "EventMsg"),
|
||||
|d| write_json_schema_with_return::<FileChange>(d, "FileChange"),
|
||||
|d| write_json_schema_with_return::<crate::protocol::v1::InputItem>(d, "InputItem"),
|
||||
|d| write_json_schema_with_return::<ParsedCommand>(d, "ParsedCommand"),
|
||||
|d| write_json_schema_with_return::<SandboxPolicy>(d, "SandboxPolicy"),
|
||||
];
|
||||
|
||||
macro_rules! add_schema {
|
||||
($ty:path) => {{
|
||||
let name = type_basename(stringify!($ty));
|
||||
let schema = write_json_schema_with_return::<$ty>(out_dir, &name)?;
|
||||
bundle.insert(name, schema);
|
||||
}};
|
||||
let mut schemas: Vec<GeneratedSchema> = Vec::new();
|
||||
for emit in envelope_emitters {
|
||||
schemas.push(emit(out_dir)?);
|
||||
}
|
||||
|
||||
for_each_schema_type!(add_schema);
|
||||
schemas.extend(export_client_param_schemas(out_dir)?);
|
||||
schemas.extend(export_client_response_schemas(out_dir)?);
|
||||
schemas.extend(export_server_param_schemas(out_dir)?);
|
||||
schemas.extend(export_server_response_schemas(out_dir)?);
|
||||
schemas.extend(export_client_notification_schemas(out_dir)?);
|
||||
schemas.extend(export_server_notification_schemas(out_dir)?);
|
||||
|
||||
export_client_response_schemas(out_dir)?;
|
||||
export_server_response_schemas(out_dir)?;
|
||||
let bundle = build_schema_bundle(schemas)?;
|
||||
write_pretty_json(
|
||||
out_dir.join("codex_app_server_protocol.schemas.json"),
|
||||
&bundle,
|
||||
)?;
|
||||
|
||||
let mut definitions = Map::new();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn build_schema_bundle(schemas: Vec<GeneratedSchema>) -> Result<Value> {
|
||||
const SPECIAL_DEFINITIONS: &[&str] = &[
|
||||
"ClientNotification",
|
||||
"ClientRequest",
|
||||
@@ -176,22 +158,62 @@ pub fn generate_json(out_dir: &Path) -> Result<()> {
|
||||
"ServerRequest",
|
||||
];
|
||||
|
||||
for (name, schema) in bundle {
|
||||
let mut schema_value = serde_json::to_value(schema)?;
|
||||
annotate_schema(&mut schema_value, Some(name.as_str()));
|
||||
let namespaced_types = collect_namespaced_types(&schemas);
|
||||
let mut definitions = Map::new();
|
||||
|
||||
if let Value::Object(ref mut obj) = schema_value
|
||||
for schema in schemas {
|
||||
let GeneratedSchema {
|
||||
namespace,
|
||||
logical_name,
|
||||
mut value,
|
||||
in_v1_dir,
|
||||
} = schema;
|
||||
|
||||
if let Some(ref ns) = namespace {
|
||||
rewrite_refs_to_namespace(&mut value, ns);
|
||||
}
|
||||
|
||||
let mut forced_namespace_refs: Vec<(String, String)> = Vec::new();
|
||||
if let Value::Object(ref mut obj) = value
|
||||
&& let Some(defs) = obj.remove("definitions")
|
||||
&& let Value::Object(defs_obj) = defs
|
||||
{
|
||||
for (def_name, mut def_schema) in defs_obj {
|
||||
if !SPECIAL_DEFINITIONS.contains(&def_name.as_str()) {
|
||||
annotate_schema(&mut def_schema, Some(def_name.as_str()));
|
||||
if SPECIAL_DEFINITIONS.contains(&def_name.as_str()) {
|
||||
continue;
|
||||
}
|
||||
annotate_schema(&mut def_schema, Some(def_name.as_str()));
|
||||
let target_namespace = match namespace {
|
||||
Some(ref ns) => Some(ns.clone()),
|
||||
None => namespace_for_definition(&def_name, &namespaced_types)
|
||||
.cloned()
|
||||
.filter(|_| !in_v1_dir),
|
||||
};
|
||||
if let Some(ref ns) = target_namespace {
|
||||
if namespace.as_deref() == Some(ns.as_str()) {
|
||||
rewrite_refs_to_namespace(&mut def_schema, ns);
|
||||
insert_into_namespace(&mut definitions, ns, def_name.clone(), def_schema)?;
|
||||
} else if !forced_namespace_refs
|
||||
.iter()
|
||||
.any(|(name, existing_ns)| name == &def_name && existing_ns == ns)
|
||||
{
|
||||
forced_namespace_refs.push((def_name.clone(), ns.clone()));
|
||||
}
|
||||
} else {
|
||||
definitions.insert(def_name, def_schema);
|
||||
}
|
||||
}
|
||||
}
|
||||
definitions.insert(name, schema_value);
|
||||
|
||||
for (name, ns) in forced_namespace_refs {
|
||||
rewrite_named_ref_to_namespace(&mut value, &ns, &name);
|
||||
}
|
||||
|
||||
if let Some(ref ns) = namespace {
|
||||
insert_into_namespace(&mut definitions, ns, logical_name.clone(), value)?;
|
||||
} else {
|
||||
definitions.insert(logical_name, value);
|
||||
}
|
||||
}
|
||||
|
||||
let mut root = Map::new();
|
||||
@@ -206,15 +228,28 @@ pub fn generate_json(out_dir: &Path) -> Result<()> {
|
||||
root.insert("type".to_string(), Value::String("object".into()));
|
||||
root.insert("definitions".to_string(), Value::Object(definitions));
|
||||
|
||||
write_pretty_json(
|
||||
out_dir.join("codex_app_server_protocol.schemas.json"),
|
||||
&Value::Object(root),
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
Ok(Value::Object(root))
|
||||
}
|
||||
|
||||
fn write_json_schema_with_return<T>(out_dir: &Path, name: &str) -> Result<RootSchema>
|
||||
fn insert_into_namespace(
|
||||
definitions: &mut Map<String, Value>,
|
||||
namespace: &str,
|
||||
name: String,
|
||||
schema: Value,
|
||||
) -> Result<()> {
|
||||
let entry = definitions
|
||||
.entry(namespace.to_string())
|
||||
.or_insert_with(|| Value::Object(Map::new()));
|
||||
match entry {
|
||||
Value::Object(map) => {
|
||||
map.insert(name, schema);
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(anyhow!("expected namespace {namespace} to be an object")),
|
||||
}
|
||||
}
|
||||
|
||||
fn write_json_schema_with_return<T>(out_dir: &Path, name: &str) -> Result<GeneratedSchema>
|
||||
where
|
||||
T: JsonSchema,
|
||||
{
|
||||
@@ -222,17 +257,37 @@ where
|
||||
let schema = schema_for!(T);
|
||||
let mut schema_value = serde_json::to_value(schema)?;
|
||||
annotate_schema(&mut schema_value, Some(file_stem));
|
||||
write_pretty_json(out_dir.join(format!("{file_stem}.json")), &schema_value)
|
||||
// If the name looks like a namespaced path (e.g., "v2::Type"), mirror
|
||||
// the TypeScript layout and write to out_dir/v2/Type.json. Otherwise
|
||||
// write alongside the legacy files.
|
||||
let (raw_namespace, logical_name) = split_namespace(file_stem);
|
||||
let out_path = if let Some(ns) = raw_namespace {
|
||||
let dir = out_dir.join(ns);
|
||||
ensure_dir(&dir)?;
|
||||
dir.join(format!("{logical_name}.json"))
|
||||
} else {
|
||||
out_dir.join(format!("{file_stem}.json"))
|
||||
};
|
||||
|
||||
write_pretty_json(out_path, &schema_value)
|
||||
.with_context(|| format!("Failed to write JSON schema for {file_stem}"))?;
|
||||
let annotated_schema = serde_json::from_value(schema_value)?;
|
||||
Ok(annotated_schema)
|
||||
let namespace = match raw_namespace {
|
||||
Some("v1") | None => None,
|
||||
Some(ns) => Some(ns.to_string()),
|
||||
};
|
||||
Ok(GeneratedSchema {
|
||||
in_v1_dir: raw_namespace == Some("v1"),
|
||||
namespace,
|
||||
logical_name: logical_name.to_string(),
|
||||
value: schema_value,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn write_json_schema<T>(out_dir: &Path, name: &str) -> Result<()>
|
||||
pub(crate) fn write_json_schema<T>(out_dir: &Path, name: &str) -> Result<GeneratedSchema>
|
||||
where
|
||||
T: JsonSchema,
|
||||
{
|
||||
write_json_schema_with_return::<T>(out_dir, name).map(|_| ())
|
||||
write_json_schema_with_return::<T>(out_dir, name)
|
||||
}
|
||||
|
||||
fn write_pretty_json(path: PathBuf, value: &impl Serialize) -> Result<()> {
|
||||
@@ -241,13 +296,73 @@ fn write_pretty_json(path: PathBuf, value: &impl Serialize) -> Result<()> {
|
||||
fs::write(&path, json).with_context(|| format!("Failed to write {}", path.display()))?;
|
||||
Ok(())
|
||||
}
|
||||
fn type_basename(type_path: &str) -> String {
|
||||
type_path
|
||||
.rsplit_once("::")
|
||||
.map(|(_, name)| name)
|
||||
.unwrap_or(type_path)
|
||||
.trim()
|
||||
.to_string()
|
||||
|
||||
/// Split a fully-qualified type name like "v2::Type" into its namespace and logical name.
|
||||
fn split_namespace(name: &str) -> (Option<&str>, &str) {
|
||||
name.split_once("::")
|
||||
.map_or((None, name), |(ns, rest)| (Some(ns), rest))
|
||||
}
|
||||
|
||||
/// Recursively rewrite $ref values that point at "#/definitions/..." so that
|
||||
/// they point to a namespaced location under the bundle.
|
||||
fn rewrite_refs_to_namespace(value: &mut Value, ns: &str) {
|
||||
match value {
|
||||
Value::Object(obj) => {
|
||||
if let Some(Value::String(r)) = obj.get_mut("$ref")
|
||||
&& let Some(suffix) = r.strip_prefix("#/definitions/")
|
||||
{
|
||||
let prefix = format!("{ns}/");
|
||||
if !suffix.starts_with(&prefix) {
|
||||
*r = format!("#/definitions/{ns}/{suffix}");
|
||||
}
|
||||
}
|
||||
for v in obj.values_mut() {
|
||||
rewrite_refs_to_namespace(v, ns);
|
||||
}
|
||||
}
|
||||
Value::Array(items) => {
|
||||
for v in items.iter_mut() {
|
||||
rewrite_refs_to_namespace(v, ns);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_namespaced_types(schemas: &[GeneratedSchema]) -> HashMap<String, String> {
|
||||
let mut types = HashMap::new();
|
||||
for schema in schemas {
|
||||
if let Some(ns) = schema.namespace() {
|
||||
types
|
||||
.entry(schema.logical_name().to_string())
|
||||
.or_insert_with(|| ns.to_string());
|
||||
if let Some(Value::Object(defs)) = schema.value().get("definitions") {
|
||||
for key in defs.keys() {
|
||||
types.entry(key.clone()).or_insert_with(|| ns.to_string());
|
||||
}
|
||||
}
|
||||
if let Some(Value::Object(defs)) = schema.value().get("$defs") {
|
||||
for key in defs.keys() {
|
||||
types.entry(key.clone()).or_insert_with(|| ns.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
types
|
||||
}
|
||||
|
||||
fn namespace_for_definition<'a>(
|
||||
name: &str,
|
||||
types: &'a HashMap<String, String>,
|
||||
) -> Option<&'a String> {
|
||||
if let Some(ns) = types.get(name) {
|
||||
return Some(ns);
|
||||
}
|
||||
let trimmed = name.trim_end_matches(|c: char| c.is_ascii_digit());
|
||||
if trimmed != name {
|
||||
return types.get(trimmed);
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn variant_definition_name(base: &str, variant: &Value) -> Option<String> {
|
||||
@@ -467,6 +582,33 @@ fn ensure_dir(dir: &Path) -> Result<()> {
|
||||
.with_context(|| format!("Failed to create output directory {}", dir.display()))
|
||||
}
|
||||
|
||||
fn rewrite_named_ref_to_namespace(value: &mut Value, ns: &str, name: &str) {
|
||||
let direct = format!("#/definitions/{name}");
|
||||
let prefixed = format!("{direct}/");
|
||||
let replacement = format!("#/definitions/{ns}/{name}");
|
||||
let replacement_prefixed = format!("{replacement}/");
|
||||
match value {
|
||||
Value::Object(obj) => {
|
||||
if let Some(Value::String(reference)) = obj.get_mut("$ref") {
|
||||
if reference == &direct {
|
||||
*reference = replacement;
|
||||
} else if let Some(rest) = reference.strip_prefix(&prefixed) {
|
||||
*reference = format!("{replacement_prefixed}{rest}");
|
||||
}
|
||||
}
|
||||
for child in obj.values_mut() {
|
||||
rewrite_named_ref_to_namespace(child, ns, name);
|
||||
}
|
||||
}
|
||||
Value::Array(items) => {
|
||||
for child in items {
|
||||
rewrite_named_ref_to_namespace(child, ns, name);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn prepend_header_if_missing(path: &Path) -> Result<()> {
|
||||
let mut content = String::new();
|
||||
{
|
||||
@@ -504,6 +646,28 @@ fn ts_files_in(dir: &Path) -> Result<Vec<PathBuf>> {
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
fn ts_files_in_recursive(dir: &Path) -> Result<Vec<PathBuf>> {
|
||||
let mut files = Vec::new();
|
||||
let mut stack = vec![dir.to_path_buf()];
|
||||
while let Some(d) = stack.pop() {
|
||||
for entry in
|
||||
fs::read_dir(&d).with_context(|| format!("Failed to read dir {}", d.display()))?
|
||||
{
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
stack.push(path);
|
||||
} else if path.is_file() && path.extension() == Some(OsStr::new("ts")) {
|
||||
files.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
files.sort();
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
/// Generate an index.ts file that re-exports all generated types.
|
||||
/// This allows consumers to import all types from a single file.
|
||||
fn generate_index_ts(out_dir: &Path) -> Result<PathBuf> {
|
||||
let mut entries: Vec<String> = Vec::new();
|
||||
let mut stems: Vec<String> = ts_files_in(out_dir)?
|
||||
@@ -520,6 +684,14 @@ fn generate_index_ts(out_dir: &Path) -> Result<PathBuf> {
|
||||
entries.push(format!("export type {{ {name} }} from \"./{name}\";\n"));
|
||||
}
|
||||
|
||||
// If this is the root out_dir and a ./v2 folder exists with TS files,
|
||||
// expose it as a namespace to avoid symbol collisions at the root.
|
||||
let v2_dir = out_dir.join("v2");
|
||||
let has_v2_ts = ts_files_in(&v2_dir).map(|v| !v.is_empty()).unwrap_or(false);
|
||||
if has_v2_ts {
|
||||
entries.push("export * as v2 from \"./v2\";\n".to_string());
|
||||
}
|
||||
|
||||
let mut content =
|
||||
String::with_capacity(HEADER.len() + entries.iter().map(String::len).sum::<usize>());
|
||||
content.push_str(HEADER);
|
||||
@@ -546,6 +718,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn generated_ts_has_no_optional_nullable_fields() -> Result<()> {
|
||||
// Assert that there are no types of the form "?: T | null" in the generated TS files.
|
||||
let output_dir = std::env::temp_dir().join(format!("codex_ts_types_{}", Uuid::now_v7()));
|
||||
fs::create_dir(&output_dir)?;
|
||||
|
||||
|
||||
@@ -1,15 +1,17 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::JSONRPCNotification;
|
||||
use crate::JSONRPCRequest;
|
||||
use crate::RequestId;
|
||||
use crate::export::GeneratedSchema;
|
||||
use crate::export::write_json_schema;
|
||||
use crate::protocol::v1;
|
||||
use crate::protocol::v2;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::FileChange;
|
||||
use codex_protocol::protocol::RateLimitSnapshot;
|
||||
use codex_protocol::protocol::ReviewDecision;
|
||||
use codex_protocol::protocol::SandboxCommandAssessment;
|
||||
use paste::paste;
|
||||
@@ -74,33 +76,97 @@ macro_rules! client_request_definitions {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_client_response_schemas(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<()> {
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
$(
|
||||
crate::export::write_json_schema::<$response>(out_dir, stringify!($response))?;
|
||||
schemas.push(write_json_schema::<$response>(out_dir, stringify!($response))?);
|
||||
)*
|
||||
Ok(())
|
||||
Ok(schemas)
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_client_param_schemas(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
$(
|
||||
schemas.push(write_json_schema::<$params>(out_dir, stringify!($params))?);
|
||||
)*
|
||||
Ok(schemas)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
client_request_definitions! {
|
||||
/// NEW APIs
|
||||
#[serde(rename = "model/list")]
|
||||
#[ts(rename = "model/list")]
|
||||
ListModels {
|
||||
params: v2::ListModelsParams,
|
||||
response: v2::ListModelsResponse,
|
||||
// Thread lifecycle
|
||||
#[serde(rename = "thread/start")]
|
||||
#[ts(rename = "thread/start")]
|
||||
ThreadStart {
|
||||
params: v2::ThreadStartParams,
|
||||
response: v2::ThreadStartResponse,
|
||||
},
|
||||
#[serde(rename = "thread/resume")]
|
||||
#[ts(rename = "thread/resume")]
|
||||
ThreadResume {
|
||||
params: v2::ThreadResumeParams,
|
||||
response: v2::ThreadResumeResponse,
|
||||
},
|
||||
#[serde(rename = "thread/archive")]
|
||||
#[ts(rename = "thread/archive")]
|
||||
ThreadArchive {
|
||||
params: v2::ThreadArchiveParams,
|
||||
response: v2::ThreadArchiveResponse,
|
||||
},
|
||||
#[serde(rename = "thread/list")]
|
||||
#[ts(rename = "thread/list")]
|
||||
ThreadList {
|
||||
params: v2::ThreadListParams,
|
||||
response: v2::ThreadListResponse,
|
||||
},
|
||||
#[serde(rename = "thread/compact")]
|
||||
#[ts(rename = "thread/compact")]
|
||||
ThreadCompact {
|
||||
params: v2::ThreadCompactParams,
|
||||
response: v2::ThreadCompactResponse,
|
||||
},
|
||||
#[serde(rename = "turn/start")]
|
||||
#[ts(rename = "turn/start")]
|
||||
TurnStart {
|
||||
params: v2::TurnStartParams,
|
||||
response: v2::TurnStartResponse,
|
||||
},
|
||||
#[serde(rename = "turn/interrupt")]
|
||||
#[ts(rename = "turn/interrupt")]
|
||||
TurnInterrupt {
|
||||
params: v2::TurnInterruptParams,
|
||||
response: v2::TurnInterruptResponse,
|
||||
},
|
||||
|
||||
#[serde(rename = "account/login")]
|
||||
#[ts(rename = "account/login")]
|
||||
#[serde(rename = "model/list")]
|
||||
#[ts(rename = "model/list")]
|
||||
ModelList {
|
||||
params: v2::ModelListParams,
|
||||
response: v2::ModelListResponse,
|
||||
},
|
||||
|
||||
#[serde(rename = "account/login/start")]
|
||||
#[ts(rename = "account/login/start")]
|
||||
LoginAccount {
|
||||
params: v2::LoginAccountParams,
|
||||
response: v2::LoginAccountResponse,
|
||||
},
|
||||
|
||||
#[serde(rename = "account/login/cancel")]
|
||||
#[ts(rename = "account/login/cancel")]
|
||||
CancelLoginAccount {
|
||||
params: v2::CancelLoginAccountParams,
|
||||
response: v2::CancelLoginAccountResponse,
|
||||
},
|
||||
|
||||
#[serde(rename = "account/logout")]
|
||||
#[ts(rename = "account/logout")]
|
||||
LogoutAccount {
|
||||
@@ -117,15 +183,15 @@ client_request_definitions! {
|
||||
|
||||
#[serde(rename = "feedback/upload")]
|
||||
#[ts(rename = "feedback/upload")]
|
||||
UploadFeedback {
|
||||
params: v2::UploadFeedbackParams,
|
||||
response: v2::UploadFeedbackResponse,
|
||||
FeedbackUpload {
|
||||
params: v2::FeedbackUploadParams,
|
||||
response: v2::FeedbackUploadResponse,
|
||||
},
|
||||
|
||||
#[serde(rename = "account/read")]
|
||||
#[ts(rename = "account/read")]
|
||||
GetAccount {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
params: v2::GetAccountParams,
|
||||
response: v2::GetAccountResponse,
|
||||
},
|
||||
|
||||
@@ -188,6 +254,7 @@ client_request_definitions! {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::LoginChatGptResponse,
|
||||
},
|
||||
// DEPRECATED in favor of CancelLoginAccount
|
||||
CancelLoginChatGpt {
|
||||
params: v1::CancelLoginChatGptParams,
|
||||
response: v1::CancelLoginChatGptResponse,
|
||||
@@ -196,6 +263,7 @@ client_request_definitions! {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::LogoutChatGptResponse,
|
||||
},
|
||||
/// DEPRECATED in favor of GetAccount
|
||||
GetAuthStatus {
|
||||
params: v1::GetAuthStatusParams,
|
||||
response: v1::GetAuthStatusResponse,
|
||||
@@ -276,13 +344,101 @@ macro_rules! server_request_definitions {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_server_response_schemas(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<()> {
|
||||
out_dir: &Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
paste! {
|
||||
$(crate::export::write_json_schema::<[<$variant Response>]>(out_dir, stringify!([<$variant Response>]))?;)*
|
||||
$(schemas.push(crate::export::write_json_schema::<[<$variant Response>]>(out_dir, stringify!([<$variant Response>]))?);)*
|
||||
}
|
||||
Ok(())
|
||||
Ok(schemas)
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_server_param_schemas(
|
||||
out_dir: &Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
paste! {
|
||||
$(schemas.push(crate::export::write_json_schema::<[<$variant Params>]>(out_dir, stringify!([<$variant Params>]))?);)*
|
||||
}
|
||||
Ok(schemas)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Generates `ServerNotification` enum and helpers, including a JSON Schema
|
||||
/// exporter for each notification.
|
||||
macro_rules! server_notification_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident $(=> $wire:literal)? ( $payload:ty )
|
||||
),* $(,)?
|
||||
) => {
|
||||
/// Notification sent from the server to the client.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS, Display)]
|
||||
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
|
||||
#[strum(serialize_all = "camelCase")]
|
||||
pub enum ServerNotification {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$(#[serde(rename = $wire)] #[ts(rename = $wire)] #[strum(serialize = $wire)])?
|
||||
$variant($payload),
|
||||
)*
|
||||
}
|
||||
|
||||
impl ServerNotification {
|
||||
pub fn to_params(self) -> Result<serde_json::Value, serde_json::Error> {
|
||||
match self {
|
||||
$(Self::$variant(params) => serde_json::to_value(params),)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<JSONRPCNotification> for ServerNotification {
|
||||
type Error = serde_json::Error;
|
||||
|
||||
fn try_from(value: JSONRPCNotification) -> Result<Self, Self::Error> {
|
||||
serde_json::from_value(serde_json::to_value(value)?)
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_server_notification_schemas(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
$(schemas.push(crate::export::write_json_schema::<$payload>(out_dir, stringify!($payload))?);)*
|
||||
Ok(schemas)
|
||||
}
|
||||
};
|
||||
}
|
||||
/// Notifications sent from the client to the server.
|
||||
macro_rules! client_notification_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident $( ( $payload:ty ) )?
|
||||
),* $(,)?
|
||||
) => {
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS, Display)]
|
||||
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
|
||||
#[strum(serialize_all = "camelCase")]
|
||||
pub enum ClientNotification {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$variant $( ( $payload ) )?,
|
||||
)*
|
||||
}
|
||||
|
||||
pub fn export_client_notification_schemas(
|
||||
_out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let schemas = Vec::new();
|
||||
$( $(schemas.push(crate::export::write_json_schema::<$payload>(_out_dir, stringify!($payload))?);)? )*
|
||||
Ok(schemas)
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -366,52 +522,33 @@ pub struct FuzzyFileSearchResponse {
|
||||
pub files: Vec<FuzzyFileSearchResult>,
|
||||
}
|
||||
|
||||
/// Notification sent from the server to the client.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS, Display)]
|
||||
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
|
||||
#[strum(serialize_all = "camelCase")]
|
||||
pub enum ServerNotification {
|
||||
server_notification_definitions! {
|
||||
/// NEW NOTIFICATIONS
|
||||
#[serde(rename = "account/rateLimits/updated")]
|
||||
#[ts(rename = "account/rateLimits/updated")]
|
||||
#[strum(serialize = "account/rateLimits/updated")]
|
||||
AccountRateLimitsUpdated(RateLimitSnapshot),
|
||||
ThreadStarted => "thread/started" (v2::ThreadStartedNotification),
|
||||
TurnStarted => "turn/started" (v2::TurnStartedNotification),
|
||||
TurnCompleted => "turn/completed" (v2::TurnCompletedNotification),
|
||||
ItemStarted => "item/started" (v2::ItemStartedNotification),
|
||||
ItemCompleted => "item/completed" (v2::ItemCompletedNotification),
|
||||
AgentMessageDelta => "item/agentMessage/delta" (v2::AgentMessageDeltaNotification),
|
||||
CommandExecutionOutputDelta => "item/commandExecution/outputDelta" (v2::CommandExecutionOutputDeltaNotification),
|
||||
McpToolCallProgress => "item/mcpToolCall/progress" (v2::McpToolCallProgressNotification),
|
||||
AccountUpdated => "account/updated" (v2::AccountUpdatedNotification),
|
||||
AccountRateLimitsUpdated => "account/rateLimits/updated" (v2::AccountRateLimitsUpdatedNotification),
|
||||
|
||||
#[serde(rename = "account/login/completed")]
|
||||
#[ts(rename = "account/login/completed")]
|
||||
#[strum(serialize = "account/login/completed")]
|
||||
AccountLoginCompleted(v2::AccountLoginCompletedNotification),
|
||||
|
||||
/// DEPRECATED NOTIFICATIONS below
|
||||
/// Authentication status changed
|
||||
AuthStatusChange(v1::AuthStatusChangeNotification),
|
||||
|
||||
/// ChatGPT login flow completed
|
||||
/// Deprecated: use `account/login/completed` instead.
|
||||
LoginChatGptComplete(v1::LoginChatGptCompleteNotification),
|
||||
|
||||
/// The special session configured event for a new or resumed conversation.
|
||||
SessionConfigured(v1::SessionConfiguredNotification),
|
||||
}
|
||||
|
||||
impl ServerNotification {
|
||||
pub fn to_params(self) -> Result<serde_json::Value, serde_json::Error> {
|
||||
match self {
|
||||
ServerNotification::AccountRateLimitsUpdated(params) => serde_json::to_value(params),
|
||||
ServerNotification::AuthStatusChange(params) => serde_json::to_value(params),
|
||||
ServerNotification::LoginChatGptComplete(params) => serde_json::to_value(params),
|
||||
ServerNotification::SessionConfigured(params) => serde_json::to_value(params),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<JSONRPCNotification> for ServerNotification {
|
||||
type Error = serde_json::Error;
|
||||
|
||||
fn try_from(value: JSONRPCNotification) -> Result<Self, Self::Error> {
|
||||
serde_json::from_value(serde_json::to_value(value)?)
|
||||
}
|
||||
}
|
||||
|
||||
/// Notification sent from the client to the server.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS, Display)]
|
||||
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
|
||||
#[strum(serialize_all = "camelCase")]
|
||||
pub enum ClientNotification {
|
||||
client_notification_definitions! {
|
||||
Initialized,
|
||||
}
|
||||
|
||||
@@ -571,7 +708,7 @@ mod tests {
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/login",
|
||||
"method": "account/login/start",
|
||||
"id": 2,
|
||||
"params": {
|
||||
"type": "apiKey",
|
||||
@@ -587,11 +724,11 @@ mod tests {
|
||||
fn serialize_account_login_chatgpt() -> Result<()> {
|
||||
let request = ClientRequest::LoginAccount {
|
||||
request_id: RequestId::Integer(3),
|
||||
params: v2::LoginAccountParams::ChatGpt,
|
||||
params: v2::LoginAccountParams::Chatgpt,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/login",
|
||||
"method": "account/login/start",
|
||||
"id": 3,
|
||||
"params": {
|
||||
"type": "chatgpt"
|
||||
@@ -622,12 +759,17 @@ mod tests {
|
||||
fn serialize_get_account() -> Result<()> {
|
||||
let request = ClientRequest::GetAccount {
|
||||
request_id: RequestId::Integer(5),
|
||||
params: None,
|
||||
params: v2::GetAccountParams {
|
||||
refresh_token: false,
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/read",
|
||||
"id": 5,
|
||||
"params": {
|
||||
"refreshToken": false
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
@@ -636,19 +778,16 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn account_serializes_fields_in_camel_case() -> Result<()> {
|
||||
let api_key = v2::Account::ApiKey {
|
||||
api_key: "secret".to_string(),
|
||||
};
|
||||
let api_key = v2::Account::ApiKey {};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"type": "apiKey",
|
||||
"apiKey": "secret",
|
||||
}),
|
||||
serde_json::to_value(&api_key)?,
|
||||
);
|
||||
|
||||
let chatgpt = v2::Account::ChatGpt {
|
||||
email: Some("user@example.com".to_string()),
|
||||
let chatgpt = v2::Account::Chatgpt {
|
||||
email: "user@example.com".to_string(),
|
||||
plan_type: PlanType::Plus,
|
||||
};
|
||||
assert_eq!(
|
||||
@@ -665,16 +804,16 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn serialize_list_models() -> Result<()> {
|
||||
let request = ClientRequest::ListModels {
|
||||
let request = ClientRequest::ModelList {
|
||||
request_id: RequestId::Integer(6),
|
||||
params: v2::ListModelsParams::default(),
|
||||
params: v2::ModelListParams::default(),
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "model/list",
|
||||
"id": 6,
|
||||
"params": {
|
||||
"pageSize": null,
|
||||
"limit": null,
|
||||
"cursor": null
|
||||
}
|
||||
}),
|
||||
|
||||
@@ -11,6 +11,7 @@ use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_protocol::protocol::TurnAbortReason;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
@@ -113,6 +114,18 @@ pub struct ConversationSummary {
|
||||
pub preview: String,
|
||||
pub timestamp: Option<String>,
|
||||
pub model_provider: String,
|
||||
pub cwd: PathBuf,
|
||||
pub cli_version: String,
|
||||
pub source: SessionSource,
|
||||
pub git_info: Option<ConversationGitInfo>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub struct ConversationGitInfo {
|
||||
pub sha: Option<String>,
|
||||
pub branch: Option<String>,
|
||||
pub origin_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -374,10 +387,9 @@ pub enum InputItem {
|
||||
LocalImage { path: PathBuf },
|
||||
}
|
||||
|
||||
// Deprecated notifications (v1)
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
/// Deprecated in favor of AccountLoginCompletedNotification.
|
||||
pub struct LoginChatGptCompleteNotification {
|
||||
#[schemars(with = "String")]
|
||||
pub login_id: Uuid,
|
||||
@@ -400,6 +412,7 @@ pub struct SessionConfiguredNotification {
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
/// Deprecated notification. Use AccountUpdatedNotification instead.
|
||||
pub struct AuthStatusChangeNotification {
|
||||
pub auth_method: Option<AuthMode>,
|
||||
}
|
||||
|
||||
@@ -1,35 +1,142 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::protocol::common::AuthMode;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::account::PlanType;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
use codex_protocol::protocol::RateLimitSnapshot;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::protocol::RateLimitSnapshot as CoreRateLimitSnapshot;
|
||||
use codex_protocol::protocol::RateLimitWindow as CoreRateLimitWindow;
|
||||
use codex_protocol::user_input::UserInput as CoreUserInput;
|
||||
use mcp_types::ContentBlock as McpContentBlock;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use serde_json::Value as JsonValue;
|
||||
use ts_rs::TS;
|
||||
use uuid::Uuid;
|
||||
|
||||
// Macro to declare a camelCased API v2 enum mirroring a core enum which
|
||||
// tends to use kebab-case.
|
||||
macro_rules! v2_enum_from_core {
|
||||
(
|
||||
pub enum $Name:ident from $Src:path { $( $Variant:ident ),+ $(,)? }
|
||||
) => {
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum $Name { $( $Variant ),+ }
|
||||
|
||||
impl $Name {
|
||||
pub fn to_core(self) -> $Src {
|
||||
match self { $( $Name::$Variant => <$Src>::$Variant ),+ }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<$Src> for $Name {
|
||||
fn from(value: $Src) -> Self {
|
||||
match value { $( <$Src>::$Variant => $Name::$Variant ),+ }
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
v2_enum_from_core!(
|
||||
pub enum AskForApproval from codex_protocol::protocol::AskForApproval {
|
||||
UnlessTrusted, OnFailure, OnRequest, Never
|
||||
}
|
||||
);
|
||||
|
||||
v2_enum_from_core!(
|
||||
pub enum SandboxMode from codex_protocol::config_types::SandboxMode {
|
||||
ReadOnly, WorkspaceWrite, DangerFullAccess
|
||||
}
|
||||
);
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(tag = "mode", rename_all = "camelCase")]
|
||||
#[ts(tag = "mode")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum SandboxPolicy {
|
||||
DangerFullAccess,
|
||||
ReadOnly,
|
||||
WorkspaceWrite {
|
||||
#[serde(default)]
|
||||
writable_roots: Vec<PathBuf>,
|
||||
#[serde(default)]
|
||||
network_access: bool,
|
||||
#[serde(default)]
|
||||
exclude_tmpdir_env_var: bool,
|
||||
#[serde(default)]
|
||||
exclude_slash_tmp: bool,
|
||||
},
|
||||
}
|
||||
|
||||
impl SandboxPolicy {
|
||||
pub fn to_core(&self) -> codex_protocol::protocol::SandboxPolicy {
|
||||
match self {
|
||||
SandboxPolicy::DangerFullAccess => {
|
||||
codex_protocol::protocol::SandboxPolicy::DangerFullAccess
|
||||
}
|
||||
SandboxPolicy::ReadOnly => codex_protocol::protocol::SandboxPolicy::ReadOnly,
|
||||
SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots,
|
||||
network_access,
|
||||
exclude_tmpdir_env_var,
|
||||
exclude_slash_tmp,
|
||||
} => codex_protocol::protocol::SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots: writable_roots.clone(),
|
||||
network_access: *network_access,
|
||||
exclude_tmpdir_env_var: *exclude_tmpdir_env_var,
|
||||
exclude_slash_tmp: *exclude_slash_tmp,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<codex_protocol::protocol::SandboxPolicy> for SandboxPolicy {
|
||||
fn from(value: codex_protocol::protocol::SandboxPolicy) -> Self {
|
||||
match value {
|
||||
codex_protocol::protocol::SandboxPolicy::DangerFullAccess => {
|
||||
SandboxPolicy::DangerFullAccess
|
||||
}
|
||||
codex_protocol::protocol::SandboxPolicy::ReadOnly => SandboxPolicy::ReadOnly,
|
||||
codex_protocol::protocol::SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots,
|
||||
network_access,
|
||||
exclude_tmpdir_env_var,
|
||||
exclude_slash_tmp,
|
||||
} => SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots,
|
||||
network_access,
|
||||
exclude_tmpdir_env_var,
|
||||
exclude_slash_tmp,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "type", rename_all = "camelCase")]
|
||||
#[ts(tag = "type")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum Account {
|
||||
#[serde(rename = "apiKey", rename_all = "camelCase")]
|
||||
#[ts(rename = "apiKey", rename_all = "camelCase")]
|
||||
ApiKey { api_key: String },
|
||||
ApiKey {},
|
||||
|
||||
#[serde(rename = "chatgpt", rename_all = "camelCase")]
|
||||
#[ts(rename = "chatgpt", rename_all = "camelCase")]
|
||||
ChatGpt {
|
||||
email: Option<String>,
|
||||
plan_type: PlanType,
|
||||
},
|
||||
Chatgpt { email: String, plan_type: PlanType },
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "type")]
|
||||
#[ts(tag = "type")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum LoginAccountParams {
|
||||
#[serde(rename = "apiKey")]
|
||||
#[ts(rename = "apiKey")]
|
||||
#[serde(rename = "apiKey", rename_all = "camelCase")]
|
||||
#[ts(rename = "apiKey", rename_all = "camelCase")]
|
||||
ApiKey {
|
||||
#[serde(rename = "apiKey")]
|
||||
#[ts(rename = "apiKey")]
|
||||
@@ -37,48 +144,81 @@ pub enum LoginAccountParams {
|
||||
},
|
||||
#[serde(rename = "chatgpt")]
|
||||
#[ts(rename = "chatgpt")]
|
||||
ChatGpt,
|
||||
Chatgpt,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "type", rename_all = "camelCase")]
|
||||
#[ts(tag = "type")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum LoginAccountResponse {
|
||||
#[serde(rename = "apiKey", rename_all = "camelCase")]
|
||||
#[ts(rename = "apiKey", rename_all = "camelCase")]
|
||||
ApiKey {},
|
||||
#[serde(rename = "chatgpt", rename_all = "camelCase")]
|
||||
#[ts(rename = "chatgpt", rename_all = "camelCase")]
|
||||
Chatgpt {
|
||||
// Use plain String for identifiers to avoid TS/JSON Schema quirks around uuid-specific types.
|
||||
// Convert to/from UUIDs at the application layer as needed.
|
||||
login_id: String,
|
||||
/// URL the client should open in a browser to initiate the OAuth flow.
|
||||
auth_url: String,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginAccountResponse {
|
||||
/// Only set if the login method is ChatGPT.
|
||||
#[schemars(with = "String")]
|
||||
pub login_id: Option<Uuid>,
|
||||
|
||||
/// URL the client should open in a browser to initiate the OAuth flow.
|
||||
/// Only set if the login method is ChatGPT.
|
||||
pub auth_url: Option<String>,
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct CancelLoginAccountParams {
|
||||
pub login_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct CancelLoginAccountResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct LogoutAccountResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct GetAccountRateLimitsResponse {
|
||||
pub rate_limits: RateLimitSnapshot,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetAccountResponse {
|
||||
pub account: Account,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ListModelsParams {
|
||||
/// Optional page size; defaults to a reasonable server-side value.
|
||||
pub page_size: Option<usize>,
|
||||
/// Opaque pagination cursor returned by a previous call.
|
||||
pub cursor: Option<String>,
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct GetAccountParams {
|
||||
#[serde(default)]
|
||||
pub refresh_token: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct GetAccountResponse {
|
||||
pub account: Option<Account>,
|
||||
pub requires_openai_auth: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ModelListParams {
|
||||
/// Opaque pagination cursor returned by a previous call.
|
||||
pub cursor: Option<String>,
|
||||
/// Optional page size; defaults to a reasonable server-side value.
|
||||
pub limit: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct Model {
|
||||
pub id: String,
|
||||
pub model: String,
|
||||
@@ -92,6 +232,7 @@ pub struct Model {
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ReasoningEffortOption {
|
||||
pub reasoning_effort: ReasoningEffort,
|
||||
pub description: String,
|
||||
@@ -99,16 +240,18 @@ pub struct ReasoningEffortOption {
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ListModelsResponse {
|
||||
pub items: Vec<Model>,
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ModelListResponse {
|
||||
pub data: Vec<Model>,
|
||||
/// Opaque cursor to pass to the next call to continue after the last item.
|
||||
/// if None, there are no more items to return.
|
||||
/// If None, there are no more items to return.
|
||||
pub next_cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UploadFeedbackParams {
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct FeedbackUploadParams {
|
||||
pub classification: String,
|
||||
pub reason: Option<String>,
|
||||
pub conversation_id: Option<ConversationId>,
|
||||
@@ -117,6 +260,451 @@ pub struct UploadFeedbackParams {
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UploadFeedbackResponse {
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct FeedbackUploadResponse {
|
||||
pub thread_id: String,
|
||||
}
|
||||
|
||||
// === Threads, Turns, and Items ===
|
||||
// Thread APIs
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadStartParams {
|
||||
pub model: Option<String>,
|
||||
pub model_provider: Option<String>,
|
||||
pub cwd: Option<String>,
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
pub sandbox: Option<SandboxMode>,
|
||||
pub config: Option<HashMap<String, serde_json::Value>>,
|
||||
pub base_instructions: Option<String>,
|
||||
pub developer_instructions: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadStartResponse {
|
||||
pub thread: Thread,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadResumeParams {
|
||||
pub thread_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadResumeResponse {
|
||||
pub thread: Thread,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadArchiveParams {
|
||||
pub thread_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadArchiveResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadListParams {
|
||||
/// Opaque pagination cursor returned by a previous call.
|
||||
pub cursor: Option<String>,
|
||||
/// Optional page size; defaults to a reasonable server-side value.
|
||||
pub limit: Option<u32>,
|
||||
/// Optional provider filter; when set, only sessions recorded under these
|
||||
/// providers are returned. When present but empty, includes all providers.
|
||||
pub model_providers: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadListResponse {
|
||||
pub data: Vec<Thread>,
|
||||
/// Opaque cursor to pass to the next call to continue after the last item.
|
||||
/// if None, there are no more items to return.
|
||||
pub next_cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadCompactParams {
|
||||
pub thread_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadCompactResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct Thread {
|
||||
pub id: String,
|
||||
/// Usually the first user message in the thread, if available.
|
||||
pub preview: String,
|
||||
pub model_provider: String,
|
||||
/// Unix timestamp (in seconds) when the thread was created.
|
||||
pub created_at: i64,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct AccountUpdatedNotification {
|
||||
pub auth_mode: Option<AuthMode>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct Turn {
|
||||
pub id: String,
|
||||
pub items: Vec<ThreadItem>,
|
||||
pub status: TurnStatus,
|
||||
pub error: Option<TurnError>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TurnError {
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum TurnStatus {
|
||||
Completed,
|
||||
Interrupted,
|
||||
Failed,
|
||||
InProgress,
|
||||
}
|
||||
|
||||
// Turn APIs
|
||||
#[derive(Serialize, Deserialize, Debug, Default, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TurnStartParams {
|
||||
pub thread_id: String,
|
||||
pub input: Vec<UserInput>,
|
||||
/// Override the working directory for this turn and subsequent turns.
|
||||
pub cwd: Option<PathBuf>,
|
||||
/// Override the approval policy for this turn and subsequent turns.
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
/// Override the sandbox policy for this turn and subsequent turns.
|
||||
pub sandbox_policy: Option<SandboxPolicy>,
|
||||
/// Override the model for this turn and subsequent turns.
|
||||
pub model: Option<String>,
|
||||
/// Override the reasoning effort for this turn and subsequent turns.
|
||||
pub effort: Option<ReasoningEffort>,
|
||||
/// Override the reasoning summary for this turn and subsequent turns.
|
||||
pub summary: Option<ReasoningSummary>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TurnStartResponse {
|
||||
pub turn: Turn,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TurnInterruptParams {
|
||||
pub thread_id: String,
|
||||
pub turn_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TurnInterruptResponse {}
|
||||
|
||||
// User input types
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "type", rename_all = "camelCase")]
|
||||
#[ts(tag = "type")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum UserInput {
|
||||
Text { text: String },
|
||||
Image { url: String },
|
||||
LocalImage { path: PathBuf },
|
||||
}
|
||||
|
||||
impl UserInput {
|
||||
pub fn into_core(self) -> CoreUserInput {
|
||||
match self {
|
||||
UserInput::Text { text } => CoreUserInput::Text { text },
|
||||
UserInput::Image { url } => CoreUserInput::Image { image_url: url },
|
||||
UserInput::LocalImage { path } => CoreUserInput::LocalImage { path },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "type", rename_all = "camelCase")]
|
||||
#[ts(tag = "type")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum ThreadItem {
|
||||
UserMessage {
|
||||
id: String,
|
||||
content: Vec<UserInput>,
|
||||
},
|
||||
AgentMessage {
|
||||
id: String,
|
||||
text: String,
|
||||
},
|
||||
Reasoning {
|
||||
id: String,
|
||||
text: String,
|
||||
},
|
||||
CommandExecution {
|
||||
id: String,
|
||||
command: String,
|
||||
aggregated_output: String,
|
||||
exit_code: Option<i32>,
|
||||
status: CommandExecutionStatus,
|
||||
duration_ms: Option<i64>,
|
||||
},
|
||||
FileChange {
|
||||
id: String,
|
||||
changes: Vec<FileUpdateChange>,
|
||||
status: PatchApplyStatus,
|
||||
},
|
||||
McpToolCall {
|
||||
id: String,
|
||||
server: String,
|
||||
tool: String,
|
||||
status: McpToolCallStatus,
|
||||
arguments: JsonValue,
|
||||
result: Option<McpToolCallResult>,
|
||||
error: Option<McpToolCallError>,
|
||||
},
|
||||
WebSearch {
|
||||
id: String,
|
||||
query: String,
|
||||
},
|
||||
TodoList {
|
||||
id: String,
|
||||
items: Vec<TodoItem>,
|
||||
},
|
||||
ImageView {
|
||||
id: String,
|
||||
path: String,
|
||||
},
|
||||
CodeReview {
|
||||
id: String,
|
||||
review: String,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum CommandExecutionStatus {
|
||||
InProgress,
|
||||
Completed,
|
||||
Failed,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct FileUpdateChange {
|
||||
pub path: String,
|
||||
pub kind: PatchChangeKind,
|
||||
pub diff: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum PatchChangeKind {
|
||||
Add,
|
||||
Delete,
|
||||
Update,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum PatchApplyStatus {
|
||||
Completed,
|
||||
Failed,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum McpToolCallStatus {
|
||||
InProgress,
|
||||
Completed,
|
||||
Failed,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct McpToolCallResult {
|
||||
pub content: Vec<McpContentBlock>,
|
||||
pub structured_content: JsonValue,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct McpToolCallError {
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TodoItem {
|
||||
pub id: String,
|
||||
pub text: String,
|
||||
pub completed: bool,
|
||||
}
|
||||
|
||||
// === Server Notifications ===
|
||||
// Thread/Turn lifecycle notifications and item progress events
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadStartedNotification {
|
||||
pub thread: Thread,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TurnStartedNotification {
|
||||
pub turn: Turn,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct Usage {
|
||||
pub input_tokens: i32,
|
||||
pub cached_input_tokens: i32,
|
||||
pub output_tokens: i32,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TurnCompletedNotification {
|
||||
pub turn: Turn,
|
||||
// TODO: should usage be stored on the Turn object, and we return that instead?
|
||||
pub usage: Usage,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ItemStartedNotification {
|
||||
pub item: ThreadItem,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ItemCompletedNotification {
|
||||
pub item: ThreadItem,
|
||||
}
|
||||
|
||||
// Item-specific progress notifications
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct AgentMessageDeltaNotification {
|
||||
pub item_id: String,
|
||||
pub delta: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct CommandExecutionOutputDeltaNotification {
|
||||
pub item_id: String,
|
||||
pub delta: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct McpToolCallProgressNotification {
|
||||
pub item_id: String,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct AccountRateLimitsUpdatedNotification {
|
||||
pub rate_limits: RateLimitSnapshot,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct RateLimitSnapshot {
|
||||
pub primary: Option<RateLimitWindow>,
|
||||
pub secondary: Option<RateLimitWindow>,
|
||||
}
|
||||
|
||||
impl From<CoreRateLimitSnapshot> for RateLimitSnapshot {
|
||||
fn from(value: CoreRateLimitSnapshot) -> Self {
|
||||
Self {
|
||||
primary: value.primary.map(RateLimitWindow::from),
|
||||
secondary: value.secondary.map(RateLimitWindow::from),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct RateLimitWindow {
|
||||
pub used_percent: i32,
|
||||
pub window_duration_mins: Option<i64>,
|
||||
pub resets_at: Option<i64>,
|
||||
}
|
||||
|
||||
impl From<CoreRateLimitWindow> for RateLimitWindow {
|
||||
fn from(value: CoreRateLimitWindow) -> Self {
|
||||
Self {
|
||||
used_percent: value.used_percent.round() as i32,
|
||||
window_duration_mins: value.window_minutes,
|
||||
resets_at: value.resets_at,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct AccountLoginCompletedNotification {
|
||||
// Use plain String for identifiers to avoid TS/JSON Schema quirks around uuid-specific types.
|
||||
// Convert to/from UUIDs at the application layer as needed.
|
||||
pub login_id: Option<String>,
|
||||
pub success: bool,
|
||||
pub error: Option<String>,
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# codex-app-server
|
||||
|
||||
`codex app-server` is the harness Codex uses to power rich interfaces such as the [Codex VS Code extension](https://marketplace.visualstudio.com/items?itemName=openai.chatgpt). The message schema is currently unstable, but those who wish to build experimental UIs on top of Codex may find it valuable.
|
||||
`codex app-server` is the interface Codex uses to power rich interfaces such as the [Codex VS Code extension](https://marketplace.visualstudio.com/items?itemName=openai.chatgpt). The message schema is currently unstable, but those who wish to build experimental UIs on top of Codex may find it valuable.
|
||||
|
||||
## Protocol
|
||||
|
||||
@@ -8,8 +8,253 @@ Similar to [MCP](https://modelcontextprotocol.io/), `codex app-server` supports
|
||||
|
||||
## Message Schema
|
||||
|
||||
Currently, you can dump a TypeScript version of the schema using `codex generate-ts`. It is specific to the version of Codex you used to run `generate-ts`, so the two are guaranteed to be compatible.
|
||||
Currently, you can dump a TypeScript version of the schema using `codex app-server generate-ts`, or a JSON Schema bundle via `codex app-server generate-json-schema`. Each output is specific to the version of Codex you used to run the command, so the generated artifacts are guaranteed to match that version.
|
||||
|
||||
```
|
||||
codex generate-ts --out DIR
|
||||
codex app-server generate-ts --out DIR
|
||||
codex app-server generate-json-schema --out DIR
|
||||
```
|
||||
|
||||
## Initialization
|
||||
|
||||
Clients must send a single `initialize` request before invoking any other method, then acknowledge with an `initialized` notification. The server returns the user agent string it will present to upstream services; subsequent requests issued before initialization receive a `"Not initialized"` error, and repeated `initialize` calls receive an `"Already initialized"` error.
|
||||
|
||||
Example:
|
||||
|
||||
```json
|
||||
{ "method": "initialize", "id": 0, "params": {
|
||||
"clientInfo": { "name": "codex-vscode", "title": "Codex VS Code Extension", "version": "0.1.0" }
|
||||
} }
|
||||
{ "id": 0, "result": { "userAgent": "codex-app-server/0.1.0 codex-vscode/0.1.0" } }
|
||||
{ "method": "initialized" }
|
||||
```
|
||||
|
||||
## Core primitives
|
||||
|
||||
We have 3 top level primitives:
|
||||
- Thread - a conversation between the Codex agent and a user. Each thread contains multiple turns.
|
||||
- Turn - one turn of the conversation, typically starting with a user message and finishing with an agent message. Each turn contains multiple items.
|
||||
- Item - represents user inputs and agent outputs as part of the turn, persisted and used as the context for future conversations.
|
||||
|
||||
## Thread & turn endpoints
|
||||
|
||||
The JSON-RPC API exposes dedicated methods for managing Codex conversations. Threads store long-lived conversation metadata, and turns store the per-message exchange (input → Codex output, including streamed items). Use the thread APIs to create, list, or archive sessions, then drive the conversation with turn APIs and notifications.
|
||||
|
||||
### Quick reference
|
||||
- `thread/start` — create a new thread; emits `thread/started` and auto-subscribes you to turn/item events for that thread.
|
||||
- `thread/resume` — reopen an existing thread by id so subsequent `turn/start` calls append to it.
|
||||
- `thread/list` — page through stored rollouts; supports cursor-based pagination and optional `modelProviders` filtering.
|
||||
- `thread/archive` — move a thread’s rollout file into the archived directory; returns `{}` on success.
|
||||
- `turn/start` — add user input to a thread and begin Codex generation; responds with the initial `turn` object and streams `turn/started`, `item/*`, and `turn/completed` notifications.
|
||||
- `turn/interrupt` — request cancellation of an in-flight turn by `(thread_id, turn_id)`; success is an empty `{}` response and the turn finishes with `status: "interrupted"`.
|
||||
|
||||
### 1) Start or resume a thread
|
||||
|
||||
Start a fresh thread when you need a new Codex conversation.
|
||||
|
||||
```json
|
||||
{ "method": "thread/start", "id": 10, "params": {
|
||||
// Optionally set config settings. If not specified, will use the user's
|
||||
// current config settings.
|
||||
"model": "gpt-5-codex",
|
||||
"cwd": "/Users/me/project",
|
||||
"approvalPolicy": "never",
|
||||
"sandbox": "workspaceWrite",
|
||||
} }
|
||||
{ "id": 10, "result": {
|
||||
"thread": {
|
||||
"id": "thr_123",
|
||||
"preview": "",
|
||||
"modelProvider": "openai",
|
||||
"createdAt": 1730910000
|
||||
}
|
||||
} }
|
||||
{ "method": "thread/started", "params": { "thread": { … } } }
|
||||
```
|
||||
|
||||
To continue a stored session, call `thread/resume` with the `thread.id` you previously recorded. The response shape matches `thread/start`, and no additional notifications are emitted:
|
||||
|
||||
```json
|
||||
{ "method": "thread/resume", "id": 11, "params": { "threadId": "thr_123" } }
|
||||
{ "id": 11, "result": { "thread": { "id": "thr_123", … } } }
|
||||
```
|
||||
|
||||
### 2) List threads (pagination & filters)
|
||||
|
||||
`thread/list` lets you render a history UI. Pass any combination of:
|
||||
- `cursor` — opaque string from a prior response; omit for the first page.
|
||||
- `limit` — server defaults to a reasonable page size if unset.
|
||||
- `modelProviders` — restrict results to specific providers; unset, null, or an empty array will include all providers.
|
||||
|
||||
Example:
|
||||
|
||||
```json
|
||||
{ "method": "thread/list", "id": 20, "params": {
|
||||
"cursor": null,
|
||||
"limit": 25,
|
||||
} }
|
||||
{ "id": 20, "result": {
|
||||
"data": [
|
||||
{ "id": "thr_a", "preview": "Create a TUI", "modelProvider": "openai", "createdAt": 1730831111 },
|
||||
{ "id": "thr_b", "preview": "Fix tests", "modelProvider": "openai", "createdAt": 1730750000 }
|
||||
],
|
||||
"nextCursor": "opaque-token-or-null"
|
||||
} }
|
||||
```
|
||||
|
||||
When `nextCursor` is `null`, you’ve reached the final page.
|
||||
|
||||
### 3) Archive a thread
|
||||
|
||||
Use `thread/archive` to move the persisted rollout (stored as a JSONL file on disk) into the archived sessions directory.
|
||||
|
||||
```json
|
||||
{ "method": "thread/archive", "id": 21, "params": { "threadId": "thr_b" } }
|
||||
{ "id": 21, "result": {} }
|
||||
```
|
||||
|
||||
An archived thread will not appear in future calls to `thread/list`.
|
||||
|
||||
### 4) Start a turn (send user input)
|
||||
|
||||
Turns attach user input (text or images) to a thread and trigger Codex generation. The `input` field is a list of discriminated unions:
|
||||
|
||||
- `{"type":"text","text":"Explain this diff"}`
|
||||
- `{"type":"image","url":"https://…png"}`
|
||||
- `{"type":"localImage","path":"/tmp/screenshot.png"}`
|
||||
|
||||
You can optionally specify config overrides on the new turn. If specified, these settings become the default for subsequent turns on the same thread.
|
||||
|
||||
```json
|
||||
{ "method": "turn/start", "id": 30, "params": {
|
||||
"threadId": "thr_123",
|
||||
"input": [ { "type": "text", "text": "Run tests" } ],
|
||||
// Below are optional config overrides
|
||||
"cwd": "/Users/me/project",
|
||||
"approvalPolicy": "unlessTrusted",
|
||||
"sandboxPolicy": {
|
||||
"mode": "workspaceWrite",
|
||||
"writableRoots": ["/Users/me/project"],
|
||||
"networkAccess": true
|
||||
},
|
||||
"model": "gpt-5-codex",
|
||||
"effort": "medium",
|
||||
"summary": "concise"
|
||||
} }
|
||||
{ "id": 30, "result": { "turn": {
|
||||
"id": "turn_456",
|
||||
"status": "inProgress",
|
||||
"items": [],
|
||||
"error": null
|
||||
} } }
|
||||
```
|
||||
|
||||
### 5) Interrupt an active turn
|
||||
|
||||
You can cancel a running Turn with `turn/interrupt`.
|
||||
|
||||
```json
|
||||
{ "method": "turn/interrupt", "id": 31, "params": {
|
||||
"threadId": "thr_123",
|
||||
"turnId": "turn_456"
|
||||
} }
|
||||
{ "id": 31, "result": {} }
|
||||
```
|
||||
|
||||
The server requests cancellations for running subprocesses, then emits a `turn/completed` event with `status: "interrupted"`. Rely on the `turn/completed` to know when Codex-side cleanup is done.
|
||||
|
||||
## Auth endpoints
|
||||
|
||||
The JSON-RPC auth/account surface exposes request/response methods plus server-initiated notifications (no `id`). Use these to determine auth state, start or cancel logins, logout, and inspect ChatGPT rate limits.
|
||||
|
||||
### Quick reference
|
||||
- `account/read` — fetch current account info; optionally refresh tokens.
|
||||
- `account/login/start` — begin login (`apiKey` or `chatgpt`).
|
||||
- `account/login/completed` (notify) — emitted when a login attempt finishes (success or error).
|
||||
- `account/login/cancel` — cancel a pending ChatGPT login by `loginId`.
|
||||
- `account/logout` — sign out; triggers `account/updated`.
|
||||
- `account/updated` (notify) — emitted whenever auth mode changes (`authMode`: `apikey`, `chatgpt`, or `null`).
|
||||
- `account/rateLimits/read` — fetch ChatGPT rate limits; updates arrive via `account/rateLimits/updated` (notify).
|
||||
|
||||
### 1) Check auth state
|
||||
|
||||
Request:
|
||||
```json
|
||||
{ "method": "account/read", "id": 1, "params": { "refreshToken": false } }
|
||||
```
|
||||
|
||||
Response examples:
|
||||
```json
|
||||
{ "id": 1, "result": { "account": null, "requiresOpenaiAuth": false } } // No OpenAI auth needed (e.g., OSS/local models)
|
||||
{ "id": 1, "result": { "account": null, "requiresOpenaiAuth": true } } // OpenAI auth required (typical for OpenAI-hosted models)
|
||||
{ "id": 1, "result": { "account": { "type": "apiKey" }, "requiresOpenaiAuth": true } }
|
||||
{ "id": 1, "result": { "account": { "type": "chatgpt", "email": "user@example.com", "planType": "pro" }, "requiresOpenaiAuth": true } }
|
||||
```
|
||||
|
||||
Field notes:
|
||||
- `refreshToken` (bool): set `true` to force a token refresh.
|
||||
- `requiresOpenaiAuth` reflects the active provider; when `false`, Codex can run without OpenAI credentials.
|
||||
|
||||
### 2) Log in with an API key
|
||||
|
||||
1. Send:
|
||||
```json
|
||||
{ "method": "account/login/start", "id": 2, "params": { "type": "apiKey", "apiKey": "sk-…" } }
|
||||
```
|
||||
2. Expect:
|
||||
```json
|
||||
{ "id": 2, "result": { "type": "apiKey" } }
|
||||
```
|
||||
3. Notifications:
|
||||
```json
|
||||
{ "method": "account/login/completed", "params": { "loginId": null, "success": true, "error": null } }
|
||||
{ "method": "account/updated", "params": { "authMode": "apikey" } }
|
||||
```
|
||||
|
||||
### 3) Log in with ChatGPT (browser flow)
|
||||
|
||||
1. Start:
|
||||
```json
|
||||
{ "method": "account/login/start", "id": 3, "params": { "type": "chatgpt" } }
|
||||
{ "id": 3, "result": { "type": "chatgpt", "loginId": "<uuid>", "authUrl": "https://chatgpt.com/…&redirect_uri=http%3A%2F%2Flocalhost%3A<port>%2Fauth%2Fcallback" } }
|
||||
```
|
||||
2. Open `authUrl` in a browser; the app-server hosts the local callback.
|
||||
3. Wait for notifications:
|
||||
```json
|
||||
{ "method": "account/login/completed", "params": { "loginId": "<uuid>", "success": true, "error": null } }
|
||||
{ "method": "account/updated", "params": { "authMode": "chatgpt" } }
|
||||
```
|
||||
|
||||
### 4) Cancel a ChatGPT login
|
||||
|
||||
```json
|
||||
{ "method": "account/login/cancel", "id": 4, "params": { "loginId": "<uuid>" } }
|
||||
{ "method": "account/login/completed", "params": { "loginId": "<uuid>", "success": false, "error": "…" } }
|
||||
```
|
||||
|
||||
### 5) Logout
|
||||
|
||||
```json
|
||||
{ "method": "account/logout", "id": 5 }
|
||||
{ "id": 5, "result": {} }
|
||||
{ "method": "account/updated", "params": { "authMode": null } }
|
||||
```
|
||||
|
||||
### 6) Rate limits (ChatGPT)
|
||||
|
||||
```json
|
||||
{ "method": "account/rateLimits/read", "id": 6 }
|
||||
{ "id": 6, "result": { "rateLimits": { "primary": { "usedPercent": 25, "windowDurationMins": 15, "resetsAt": 1730947200 }, "secondary": null } } }
|
||||
{ "method": "account/rateLimits/updated", "params": { "rateLimits": { … } } }
|
||||
```
|
||||
|
||||
Field notes:
|
||||
- `usedPercent` is current usage within the OpenAI quota window.
|
||||
- `windowDurationMins` is the quota window length.
|
||||
- `resetsAt` is a Unix timestamp (seconds) for the next reset.
|
||||
|
||||
### Dev notes
|
||||
|
||||
- `codex app-server generate-ts --out <dir>` emits v2 types under `v2/`.
|
||||
- `codex app-server generate-json-schema --out <dir>` outputs `codex_app_server_protocol.schemas.json`.
|
||||
- See [“Authentication and authorization” in the config docs](../../docs/config.md#authentication-and-authorization) for configuration knobs.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -64,64 +64,79 @@ impl MessageProcessor {
|
||||
|
||||
pub(crate) async fn process_request(&mut self, request: JSONRPCRequest) {
|
||||
let request_id = request.id.clone();
|
||||
if let Ok(request_json) = serde_json::to_value(request)
|
||||
&& let Ok(codex_request) = serde_json::from_value::<ClientRequest>(request_json)
|
||||
{
|
||||
match codex_request {
|
||||
// Handle Initialize internally so CodexMessageProcessor does not have to concern
|
||||
// itself with the `initialized` bool.
|
||||
ClientRequest::Initialize { request_id, params } => {
|
||||
if self.initialized {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Already initialized".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
} else {
|
||||
let ClientInfo {
|
||||
name,
|
||||
title: _title,
|
||||
version,
|
||||
} = params.client_info;
|
||||
let user_agent_suffix = format!("{name}; {version}");
|
||||
if let Ok(mut suffix) = USER_AGENT_SUFFIX.lock() {
|
||||
*suffix = Some(user_agent_suffix);
|
||||
}
|
||||
let request_json = match serde_json::to_value(&request) {
|
||||
Ok(request_json) => request_json,
|
||||
Err(err) => {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!("Invalid request: {err}"),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let user_agent = get_codex_user_agent();
|
||||
let response = InitializeResponse { user_agent };
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
let codex_request = match serde_json::from_value::<ClientRequest>(request_json) {
|
||||
Ok(codex_request) => codex_request,
|
||||
Err(err) => {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!("Invalid request: {err}"),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
self.initialized = true;
|
||||
return;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
if !self.initialized {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Not initialized".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
match codex_request {
|
||||
// Handle Initialize internally so CodexMessageProcessor does not have to concern
|
||||
// itself with the `initialized` bool.
|
||||
ClientRequest::Initialize { request_id, params } => {
|
||||
if self.initialized {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Already initialized".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
} else {
|
||||
let ClientInfo {
|
||||
name,
|
||||
title: _title,
|
||||
version,
|
||||
} = params.client_info;
|
||||
let user_agent_suffix = format!("{name}; {version}");
|
||||
if let Ok(mut suffix) = USER_AGENT_SUFFIX.lock() {
|
||||
*suffix = Some(user_agent_suffix);
|
||||
}
|
||||
|
||||
let user_agent = get_codex_user_agent();
|
||||
let response = InitializeResponse { user_agent };
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
|
||||
self.initialized = true;
|
||||
return;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
if !self.initialized {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Not initialized".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
self.codex_message_processor
|
||||
.process_request(codex_request)
|
||||
.await;
|
||||
} else {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Invalid request".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
}
|
||||
|
||||
self.codex_message_processor
|
||||
.process_request(codex_request)
|
||||
.await;
|
||||
}
|
||||
|
||||
pub(crate) async fn process_notification(&self, notification: JSONRPCNotification) {
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_app_server_protocol::Model;
|
||||
use codex_app_server_protocol::ReasoningEffortOption;
|
||||
use codex_common::model_presets::ModelPreset;
|
||||
use codex_common::model_presets::ReasoningEffortPreset;
|
||||
use codex_common::model_presets::builtin_model_presets;
|
||||
|
||||
pub fn supported_models() -> Vec<Model> {
|
||||
builtin_model_presets(None)
|
||||
pub fn supported_models(auth_mode: Option<AuthMode>) -> Vec<Model> {
|
||||
builtin_model_presets(auth_mode)
|
||||
.into_iter()
|
||||
.map(model_from_preset)
|
||||
.collect()
|
||||
|
||||
@@ -141,9 +141,13 @@ pub(crate) struct OutgoingError {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use codex_app_server_protocol::AccountLoginCompletedNotification;
|
||||
use codex_app_server_protocol::AccountRateLimitsUpdatedNotification;
|
||||
use codex_app_server_protocol::AccountUpdatedNotification;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_app_server_protocol::LoginChatGptCompleteNotification;
|
||||
use codex_protocol::protocol::RateLimitSnapshot;
|
||||
use codex_protocol::protocol::RateLimitWindow;
|
||||
use codex_app_server_protocol::RateLimitSnapshot;
|
||||
use codex_app_server_protocol::RateLimitWindow;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use uuid::Uuid;
|
||||
@@ -176,27 +180,77 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn verify_account_rate_limits_notification_serialization() {
|
||||
let notification = ServerNotification::AccountRateLimitsUpdated(RateLimitSnapshot {
|
||||
primary: Some(RateLimitWindow {
|
||||
used_percent: 25.0,
|
||||
window_minutes: Some(15),
|
||||
resets_at: Some(123),
|
||||
fn verify_account_login_completed_notification_serialization() {
|
||||
let notification =
|
||||
ServerNotification::AccountLoginCompleted(AccountLoginCompletedNotification {
|
||||
login_id: Some(Uuid::nil().to_string()),
|
||||
success: true,
|
||||
error: None,
|
||||
});
|
||||
|
||||
let jsonrpc_notification = OutgoingMessage::AppServerNotification(notification);
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/login/completed",
|
||||
"params": {
|
||||
"loginId": Uuid::nil().to_string(),
|
||||
"success": true,
|
||||
"error": null,
|
||||
},
|
||||
}),
|
||||
secondary: None,
|
||||
});
|
||||
serde_json::to_value(jsonrpc_notification)
|
||||
.expect("ensure the notification serializes correctly"),
|
||||
"ensure the notification serializes correctly"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn verify_account_rate_limits_notification_serialization() {
|
||||
let notification =
|
||||
ServerNotification::AccountRateLimitsUpdated(AccountRateLimitsUpdatedNotification {
|
||||
rate_limits: RateLimitSnapshot {
|
||||
primary: Some(RateLimitWindow {
|
||||
used_percent: 25,
|
||||
window_duration_mins: Some(15),
|
||||
resets_at: Some(123),
|
||||
}),
|
||||
secondary: None,
|
||||
},
|
||||
});
|
||||
|
||||
let jsonrpc_notification = OutgoingMessage::AppServerNotification(notification);
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/rateLimits/updated",
|
||||
"params": {
|
||||
"primary": {
|
||||
"used_percent": 25.0,
|
||||
"window_minutes": 15,
|
||||
"resets_at": 123,
|
||||
},
|
||||
"secondary": null,
|
||||
"rateLimits": {
|
||||
"primary": {
|
||||
"usedPercent": 25,
|
||||
"windowDurationMins": 15,
|
||||
"resetsAt": 123
|
||||
},
|
||||
"secondary": null
|
||||
}
|
||||
},
|
||||
}),
|
||||
serde_json::to_value(jsonrpc_notification)
|
||||
.expect("ensure the notification serializes correctly"),
|
||||
"ensure the notification serializes correctly"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn verify_account_updated_notification_serialization() {
|
||||
let notification = ServerNotification::AccountUpdated(AccountUpdatedNotification {
|
||||
auth_mode: Some(AuthMode::ApiKey),
|
||||
});
|
||||
|
||||
let jsonrpc_notification = OutgoingMessage::AppServerNotification(notification);
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/updated",
|
||||
"params": {
|
||||
"authMode": "apikey"
|
||||
},
|
||||
}),
|
||||
serde_json::to_value(jsonrpc_notification)
|
||||
|
||||
@@ -13,6 +13,7 @@ base64 = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-core = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
@@ -21,4 +22,5 @@ tokio = { workspace = true, features = [
|
||||
"process",
|
||||
"rt-multi-thread",
|
||||
] }
|
||||
uuid = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
|
||||
@@ -2,6 +2,7 @@ mod auth_fixtures;
|
||||
mod mcp_process;
|
||||
mod mock_model_server;
|
||||
mod responses;
|
||||
mod rollout;
|
||||
|
||||
pub use auth_fixtures::ChatGptAuthFixture;
|
||||
pub use auth_fixtures::ChatGptIdTokenClaims;
|
||||
@@ -10,9 +11,11 @@ pub use auth_fixtures::write_chatgpt_auth;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
pub use mcp_process::McpProcess;
|
||||
pub use mock_model_server::create_mock_chat_completions_server;
|
||||
pub use mock_model_server::create_mock_chat_completions_server_unchecked;
|
||||
pub use responses::create_apply_patch_sse_response;
|
||||
pub use responses::create_final_assistant_message_sse_response;
|
||||
pub use responses::create_shell_sse_response;
|
||||
pub use rollout::create_fake_rollout;
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
pub fn to_response<T: DeserializeOwned>(response: JSONRPCResponse) -> anyhow::Result<T> {
|
||||
|
||||
@@ -14,30 +14,37 @@ use anyhow::Context;
|
||||
use assert_cmd::prelude::*;
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
use codex_app_server_protocol::ArchiveConversationParams;
|
||||
use codex_app_server_protocol::CancelLoginAccountParams;
|
||||
use codex_app_server_protocol::CancelLoginChatGptParams;
|
||||
use codex_app_server_protocol::ClientInfo;
|
||||
use codex_app_server_protocol::ClientNotification;
|
||||
use codex_app_server_protocol::FeedbackUploadParams;
|
||||
use codex_app_server_protocol::GetAccountParams;
|
||||
use codex_app_server_protocol::GetAuthStatusParams;
|
||||
use codex_app_server_protocol::InitializeParams;
|
||||
use codex_app_server_protocol::InterruptConversationParams;
|
||||
use codex_app_server_protocol::ListConversationsParams;
|
||||
use codex_app_server_protocol::ListModelsParams;
|
||||
use codex_app_server_protocol::LoginApiKeyParams;
|
||||
use codex_app_server_protocol::NewConversationParams;
|
||||
use codex_app_server_protocol::RemoveConversationListenerParams;
|
||||
use codex_app_server_protocol::ResumeConversationParams;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserTurnParams;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::SetDefaultModelParams;
|
||||
use codex_app_server_protocol::UploadFeedbackParams;
|
||||
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCMessage;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCRequest;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::ListConversationsParams;
|
||||
use codex_app_server_protocol::LoginApiKeyParams;
|
||||
use codex_app_server_protocol::ModelListParams;
|
||||
use codex_app_server_protocol::NewConversationParams;
|
||||
use codex_app_server_protocol::RemoveConversationListenerParams;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ResumeConversationParams;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserTurnParams;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::SetDefaultModelParams;
|
||||
use codex_app_server_protocol::ThreadArchiveParams;
|
||||
use codex_app_server_protocol::ThreadListParams;
|
||||
use codex_app_server_protocol::ThreadResumeParams;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::TurnInterruptParams;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use std::process::Command as StdCommand;
|
||||
use tokio::process::Command;
|
||||
|
||||
@@ -243,10 +250,19 @@ impl McpProcess {
|
||||
self.send_request("account/rateLimits/read", None).await
|
||||
}
|
||||
|
||||
/// Send a `feedback/upload` JSON-RPC request.
|
||||
pub async fn send_upload_feedback_request(
|
||||
/// Send an `account/read` JSON-RPC request.
|
||||
pub async fn send_get_account_request(
|
||||
&mut self,
|
||||
params: UploadFeedbackParams,
|
||||
params: GetAccountParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("account/read", params).await
|
||||
}
|
||||
|
||||
/// Send a `feedback/upload` JSON-RPC request.
|
||||
pub async fn send_feedback_upload_request(
|
||||
&mut self,
|
||||
params: FeedbackUploadParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("feedback/upload", params).await
|
||||
@@ -275,10 +291,46 @@ impl McpProcess {
|
||||
self.send_request("listConversations", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/start` JSON-RPC request.
|
||||
pub async fn send_thread_start_request(
|
||||
&mut self,
|
||||
params: ThreadStartParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/start", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/resume` JSON-RPC request.
|
||||
pub async fn send_thread_resume_request(
|
||||
&mut self,
|
||||
params: ThreadResumeParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/resume", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/archive` JSON-RPC request.
|
||||
pub async fn send_thread_archive_request(
|
||||
&mut self,
|
||||
params: ThreadArchiveParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/archive", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/list` JSON-RPC request.
|
||||
pub async fn send_thread_list_request(
|
||||
&mut self,
|
||||
params: ThreadListParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/list", params).await
|
||||
}
|
||||
|
||||
/// Send a `model/list` JSON-RPC request.
|
||||
pub async fn send_list_models_request(
|
||||
&mut self,
|
||||
params: ListModelsParams,
|
||||
params: ModelListParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("model/list", params).await
|
||||
@@ -307,6 +359,24 @@ impl McpProcess {
|
||||
self.send_request("loginChatGpt", None).await
|
||||
}
|
||||
|
||||
/// Send a `turn/start` JSON-RPC request (v2).
|
||||
pub async fn send_turn_start_request(
|
||||
&mut self,
|
||||
params: TurnStartParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("turn/start", params).await
|
||||
}
|
||||
|
||||
/// Send a `turn/interrupt` JSON-RPC request (v2).
|
||||
pub async fn send_turn_interrupt_request(
|
||||
&mut self,
|
||||
params: TurnInterruptParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("turn/interrupt", params).await
|
||||
}
|
||||
|
||||
/// Send a `cancelLoginChatGpt` JSON-RPC request.
|
||||
pub async fn send_cancel_login_chat_gpt_request(
|
||||
&mut self,
|
||||
@@ -321,6 +391,40 @@ impl McpProcess {
|
||||
self.send_request("logoutChatGpt", None).await
|
||||
}
|
||||
|
||||
/// Send an `account/logout` JSON-RPC request.
|
||||
pub async fn send_logout_account_request(&mut self) -> anyhow::Result<i64> {
|
||||
self.send_request("account/logout", None).await
|
||||
}
|
||||
|
||||
/// Send an `account/login/start` JSON-RPC request for API key login.
|
||||
pub async fn send_login_account_api_key_request(
|
||||
&mut self,
|
||||
api_key: &str,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = serde_json::json!({
|
||||
"type": "apiKey",
|
||||
"apiKey": api_key,
|
||||
});
|
||||
self.send_request("account/login/start", Some(params)).await
|
||||
}
|
||||
|
||||
/// Send an `account/login/start` JSON-RPC request for ChatGPT login.
|
||||
pub async fn send_login_account_chatgpt_request(&mut self) -> anyhow::Result<i64> {
|
||||
let params = serde_json::json!({
|
||||
"type": "chatgpt"
|
||||
});
|
||||
self.send_request("account/login/start", Some(params)).await
|
||||
}
|
||||
|
||||
/// Send an `account/login/cancel` JSON-RPC request.
|
||||
pub async fn send_cancel_login_account_request(
|
||||
&mut self,
|
||||
params: CancelLoginAccountParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("account/login/cancel", params).await
|
||||
}
|
||||
|
||||
/// Send a `fuzzyFileSearch` JSON-RPC request.
|
||||
pub async fn send_fuzzy_file_search_request(
|
||||
&mut self,
|
||||
|
||||
@@ -29,6 +29,25 @@ pub async fn create_mock_chat_completions_server(responses: Vec<String>) -> Mock
|
||||
server
|
||||
}
|
||||
|
||||
/// Same as `create_mock_chat_completions_server` but does not enforce an
|
||||
/// expectation on the number of calls.
|
||||
pub async fn create_mock_chat_completions_server_unchecked(responses: Vec<String>) -> MockServer {
|
||||
let server = MockServer::start().await;
|
||||
|
||||
let seq_responder = SeqResponder {
|
||||
num_calls: AtomicUsize::new(0),
|
||||
responses,
|
||||
};
|
||||
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/chat/completions"))
|
||||
.respond_with(seq_responder)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
server
|
||||
}
|
||||
|
||||
struct SeqResponder {
|
||||
num_calls: AtomicUsize,
|
||||
responses: Vec<String>,
|
||||
|
||||
82
codex-rs/app-server/tests/common/rollout.rs
Normal file
82
codex-rs/app-server/tests/common/rollout.rs
Normal file
@@ -0,0 +1,82 @@
|
||||
use anyhow::Result;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::protocol::SessionMeta;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use serde_json::json;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use uuid::Uuid;
|
||||
|
||||
/// Create a minimal rollout file under `CODEX_HOME/sessions/YYYY/MM/DD/`.
|
||||
///
|
||||
/// - `filename_ts` is the filename timestamp component in `YYYY-MM-DDThh-mm-ss` format.
|
||||
/// - `meta_rfc3339` is the envelope timestamp used in JSON lines.
|
||||
/// - `preview` is the user message preview text.
|
||||
/// - `model_provider` optionally sets the provider in the session meta payload.
|
||||
///
|
||||
/// Returns the generated conversation/session UUID as a string.
|
||||
pub fn create_fake_rollout(
|
||||
codex_home: &Path,
|
||||
filename_ts: &str,
|
||||
meta_rfc3339: &str,
|
||||
preview: &str,
|
||||
model_provider: Option<&str>,
|
||||
) -> Result<String> {
|
||||
let uuid = Uuid::new_v4();
|
||||
let uuid_str = uuid.to_string();
|
||||
let conversation_id = ConversationId::from_string(&uuid_str)?;
|
||||
|
||||
// sessions/YYYY/MM/DD derived from filename_ts (YYYY-MM-DDThh-mm-ss)
|
||||
let year = &filename_ts[0..4];
|
||||
let month = &filename_ts[5..7];
|
||||
let day = &filename_ts[8..10];
|
||||
let dir = codex_home.join("sessions").join(year).join(month).join(day);
|
||||
fs::create_dir_all(&dir)?;
|
||||
|
||||
let file_path = dir.join(format!("rollout-{filename_ts}-{uuid}.jsonl"));
|
||||
|
||||
// Build JSONL lines
|
||||
let payload = serde_json::to_value(SessionMeta {
|
||||
id: conversation_id,
|
||||
timestamp: meta_rfc3339.to_string(),
|
||||
cwd: PathBuf::from("/"),
|
||||
originator: "codex".to_string(),
|
||||
cli_version: "0.0.0".to_string(),
|
||||
instructions: None,
|
||||
source: SessionSource::Cli,
|
||||
model_provider: model_provider.map(str::to_string),
|
||||
})?;
|
||||
|
||||
let lines = [
|
||||
json!({
|
||||
"timestamp": meta_rfc3339,
|
||||
"type": "session_meta",
|
||||
"payload": payload
|
||||
})
|
||||
.to_string(),
|
||||
json!({
|
||||
"timestamp": meta_rfc3339,
|
||||
"type":"response_item",
|
||||
"payload": {
|
||||
"type":"message",
|
||||
"role":"user",
|
||||
"content":[{"type":"input_text","text": preview}]
|
||||
}
|
||||
})
|
||||
.to_string(),
|
||||
json!({
|
||||
"timestamp": meta_rfc3339,
|
||||
"type":"event_msg",
|
||||
"payload": {
|
||||
"type":"user_message",
|
||||
"message": preview,
|
||||
"kind": "plain"
|
||||
}
|
||||
})
|
||||
.to_string(),
|
||||
];
|
||||
|
||||
fs::write(file_path, lines.join("\n") + "\n")?;
|
||||
Ok(uuid_str)
|
||||
}
|
||||
@@ -12,7 +12,7 @@ use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(20);
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn archive_conversation_moves_rollout_into_archived_directory() -> Result<()> {
|
||||
|
||||
@@ -146,7 +146,7 @@ fn create_config_toml(codex_home: &Path, server_uri: String) -> std::io::Result<
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "danger-full-access"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_fake_rollout;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
@@ -15,12 +16,8 @@ use codex_core::protocol::EventMsg;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
use uuid::Uuid;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
@@ -357,70 +354,3 @@ async fn test_list_and_resume_conversations() -> Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_fake_rollout(
|
||||
codex_home: &Path,
|
||||
filename_ts: &str,
|
||||
meta_rfc3339: &str,
|
||||
preview: &str,
|
||||
model_provider: Option<&str>,
|
||||
) -> Result<()> {
|
||||
let uuid = Uuid::new_v4();
|
||||
// sessions/YYYY/MM/DD/ derived from filename_ts (YYYY-MM-DDThh-mm-ss)
|
||||
let year = &filename_ts[0..4];
|
||||
let month = &filename_ts[5..7];
|
||||
let day = &filename_ts[8..10];
|
||||
let dir = codex_home.join("sessions").join(year).join(month).join(day);
|
||||
fs::create_dir_all(&dir)?;
|
||||
|
||||
let file_path = dir.join(format!("rollout-{filename_ts}-{uuid}.jsonl"));
|
||||
let mut lines = Vec::new();
|
||||
// Meta line with timestamp (flattened meta in payload for new schema)
|
||||
let mut payload = json!({
|
||||
"id": uuid,
|
||||
"timestamp": meta_rfc3339,
|
||||
"cwd": "/",
|
||||
"originator": "codex",
|
||||
"cli_version": "0.0.0",
|
||||
"instructions": null,
|
||||
});
|
||||
if let Some(provider) = model_provider {
|
||||
payload["model_provider"] = json!(provider);
|
||||
}
|
||||
lines.push(
|
||||
json!({
|
||||
"timestamp": meta_rfc3339,
|
||||
"type": "session_meta",
|
||||
"payload": payload
|
||||
})
|
||||
.to_string(),
|
||||
);
|
||||
// Minimal user message entry as a persisted response item (with envelope timestamp)
|
||||
lines.push(
|
||||
json!({
|
||||
"timestamp": meta_rfc3339,
|
||||
"type":"response_item",
|
||||
"payload": {
|
||||
"type":"message",
|
||||
"role":"user",
|
||||
"content":[{"type":"input_text","text": preview}]
|
||||
}
|
||||
})
|
||||
.to_string(),
|
||||
);
|
||||
// Add a matching user message event line to satisfy filters
|
||||
lines.push(
|
||||
json!({
|
||||
"timestamp": meta_rfc3339,
|
||||
"type":"event_msg",
|
||||
"payload": {
|
||||
"type":"user_message",
|
||||
"message": preview,
|
||||
"kind": "plain"
|
||||
}
|
||||
})
|
||||
.to_string(),
|
||||
);
|
||||
fs::write(file_path, lines.join("\n") + "\n")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -7,9 +7,8 @@ mod fuzzy_file_search;
|
||||
mod interrupt;
|
||||
mod list_resume;
|
||||
mod login;
|
||||
mod model_list;
|
||||
mod rate_limits;
|
||||
mod send_message;
|
||||
mod set_default_model;
|
||||
mod user_agent;
|
||||
mod user_info;
|
||||
mod v2;
|
||||
|
||||
492
codex-rs/app-server/tests/suite/v2/account.rs
Normal file
492
codex-rs/app-server/tests/suite/v2/account.rs
Normal file
@@ -0,0 +1,492 @@
|
||||
use anyhow::Result;
|
||||
use anyhow::bail;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
|
||||
use app_test_support::ChatGptAuthFixture;
|
||||
use app_test_support::write_chatgpt_auth;
|
||||
use codex_app_server_protocol::Account;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_app_server_protocol::CancelLoginAccountParams;
|
||||
use codex_app_server_protocol::CancelLoginAccountResponse;
|
||||
use codex_app_server_protocol::GetAccountParams;
|
||||
use codex_app_server_protocol::GetAccountResponse;
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::LoginAccountResponse;
|
||||
use codex_app_server_protocol::LogoutAccountResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use codex_login::login_with_api_key;
|
||||
use codex_protocol::account::PlanType as AccountPlanType;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serial_test::serial;
|
||||
use std::path::Path;
|
||||
use std::time::Duration;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
// Helper to create a minimal config.toml for the app server
|
||||
#[derive(Default)]
|
||||
struct CreateConfigTomlParams {
|
||||
forced_method: Option<String>,
|
||||
forced_workspace_id: Option<String>,
|
||||
requires_openai_auth: Option<bool>,
|
||||
}
|
||||
|
||||
fn create_config_toml(codex_home: &Path, params: CreateConfigTomlParams) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
let forced_line = if let Some(method) = params.forced_method {
|
||||
format!("forced_login_method = \"{method}\"\n")
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
let forced_workspace_line = if let Some(ws) = params.forced_workspace_id {
|
||||
format!("forced_chatgpt_workspace_id = \"{ws}\"\n")
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
let requires_line = match params.requires_openai_auth {
|
||||
Some(true) => "requires_openai_auth = true\n".to_string(),
|
||||
Some(false) => String::new(),
|
||||
None => String::new(),
|
||||
};
|
||||
let contents = format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "danger-full-access"
|
||||
{forced_line}
|
||||
{forced_workspace_line}
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "http://127.0.0.1:0/v1"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
{requires_line}
|
||||
"#
|
||||
);
|
||||
std::fs::write(config_toml, contents)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn logout_account_removes_auth_and_notifies() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), CreateConfigTomlParams::default())?;
|
||||
|
||||
login_with_api_key(
|
||||
codex_home.path(),
|
||||
"sk-test-key",
|
||||
AuthCredentialsStoreMode::File,
|
||||
)?;
|
||||
assert!(codex_home.path().join("auth.json").exists());
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let id = mcp.send_logout_account_request().await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(id)),
|
||||
)
|
||||
.await??;
|
||||
let _ok: LogoutAccountResponse = to_response(resp)?;
|
||||
|
||||
let note = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/updated"),
|
||||
)
|
||||
.await??;
|
||||
let parsed: ServerNotification = note.try_into()?;
|
||||
let ServerNotification::AccountUpdated(payload) = parsed else {
|
||||
bail!("unexpected notification: {parsed:?}");
|
||||
};
|
||||
assert!(
|
||||
payload.auth_mode.is_none(),
|
||||
"auth_method should be None after logout"
|
||||
);
|
||||
|
||||
assert!(
|
||||
!codex_home.path().join("auth.json").exists(),
|
||||
"auth.json should be deleted"
|
||||
);
|
||||
|
||||
let get_id = mcp
|
||||
.send_get_account_request(GetAccountParams {
|
||||
refresh_token: false,
|
||||
})
|
||||
.await?;
|
||||
let get_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(get_id)),
|
||||
)
|
||||
.await??;
|
||||
let account: GetAccountResponse = to_response(get_resp)?;
|
||||
assert_eq!(account.account, None);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn login_account_api_key_succeeds_and_notifies() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), CreateConfigTomlParams::default())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let req_id = mcp
|
||||
.send_login_account_api_key_request("sk-test-key")
|
||||
.await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(req_id)),
|
||||
)
|
||||
.await??;
|
||||
let login: LoginAccountResponse = to_response(resp)?;
|
||||
assert_eq!(login, LoginAccountResponse::ApiKey {});
|
||||
|
||||
let note = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/login/completed"),
|
||||
)
|
||||
.await??;
|
||||
let parsed: ServerNotification = note.try_into()?;
|
||||
let ServerNotification::AccountLoginCompleted(payload) = parsed else {
|
||||
bail!("unexpected notification: {parsed:?}");
|
||||
};
|
||||
pretty_assertions::assert_eq!(payload.login_id, None);
|
||||
pretty_assertions::assert_eq!(payload.success, true);
|
||||
pretty_assertions::assert_eq!(payload.error, None);
|
||||
|
||||
let note = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/updated"),
|
||||
)
|
||||
.await??;
|
||||
let parsed: ServerNotification = note.try_into()?;
|
||||
let ServerNotification::AccountUpdated(payload) = parsed else {
|
||||
bail!("unexpected notification: {parsed:?}");
|
||||
};
|
||||
pretty_assertions::assert_eq!(payload.auth_mode, Some(AuthMode::ApiKey));
|
||||
|
||||
assert!(codex_home.path().join("auth.json").exists());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn login_account_api_key_rejected_when_forced_chatgpt() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
forced_method: Some("chatgpt".to_string()),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_login_account_api_key_request("sk-test-key")
|
||||
.await?;
|
||||
let err: JSONRPCError = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
assert_eq!(
|
||||
err.error.message,
|
||||
"API key login is disabled. Use ChatGPT login instead."
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn login_account_chatgpt_rejected_when_forced_api() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
forced_method: Some("api".to_string()),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp.send_login_account_chatgpt_request().await?;
|
||||
let err: JSONRPCError = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
assert_eq!(
|
||||
err.error.message,
|
||||
"ChatGPT login is disabled. Use API key login instead."
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
// Serialize tests that launch the login server since it binds to a fixed port.
|
||||
#[serial(login_port)]
|
||||
async fn login_account_chatgpt_start() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), CreateConfigTomlParams::default())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp.send_login_account_chatgpt_request().await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let login: LoginAccountResponse = to_response(resp)?;
|
||||
let LoginAccountResponse::Chatgpt { login_id, auth_url } = login else {
|
||||
bail!("unexpected login response: {login:?}");
|
||||
};
|
||||
assert!(
|
||||
auth_url.contains("redirect_uri=http%3A%2F%2Flocalhost"),
|
||||
"auth_url should contain a redirect_uri to localhost"
|
||||
);
|
||||
|
||||
let cancel_id = mcp
|
||||
.send_cancel_login_account_request(CancelLoginAccountParams {
|
||||
login_id: login_id.clone(),
|
||||
})
|
||||
.await?;
|
||||
let cancel_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(cancel_id)),
|
||||
)
|
||||
.await??;
|
||||
let _ok: CancelLoginAccountResponse = to_response(cancel_resp)?;
|
||||
|
||||
let note = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/login/completed"),
|
||||
)
|
||||
.await??;
|
||||
let parsed: ServerNotification = note.try_into()?;
|
||||
let ServerNotification::AccountLoginCompleted(payload) = parsed else {
|
||||
bail!("unexpected notification: {parsed:?}");
|
||||
};
|
||||
pretty_assertions::assert_eq!(payload.login_id, Some(login_id));
|
||||
pretty_assertions::assert_eq!(payload.success, false);
|
||||
assert!(
|
||||
payload.error.is_some(),
|
||||
"expected a non-empty error on cancel"
|
||||
);
|
||||
|
||||
let maybe_updated = timeout(
|
||||
Duration::from_millis(500),
|
||||
mcp.read_stream_until_notification_message("account/updated"),
|
||||
)
|
||||
.await;
|
||||
assert!(
|
||||
maybe_updated.is_err(),
|
||||
"account/updated should not be emitted when login is cancelled"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
// Serialize tests that launch the login server since it binds to a fixed port.
|
||||
#[serial(login_port)]
|
||||
async fn login_account_chatgpt_includes_forced_workspace_query_param() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
forced_workspace_id: Some("ws-forced".to_string()),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp.send_login_account_chatgpt_request().await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let login: LoginAccountResponse = to_response(resp)?;
|
||||
let LoginAccountResponse::Chatgpt { auth_url, .. } = login else {
|
||||
bail!("unexpected login response: {login:?}");
|
||||
};
|
||||
assert!(
|
||||
auth_url.contains("allowed_workspace_id=ws-forced"),
|
||||
"auth URL should include forced workspace"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn get_account_no_auth() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
requires_openai_auth: Some(true),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let params = GetAccountParams {
|
||||
refresh_token: false,
|
||||
};
|
||||
let request_id = mcp.send_get_account_request(params).await?;
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
let account: GetAccountResponse = to_response(resp)?;
|
||||
|
||||
assert_eq!(account.account, None, "expected no account");
|
||||
assert_eq!(account.requires_openai_auth, true);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn get_account_with_api_key() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
requires_openai_auth: Some(true),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let req_id = mcp
|
||||
.send_login_account_api_key_request("sk-test-key")
|
||||
.await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(req_id)),
|
||||
)
|
||||
.await??;
|
||||
let _login_ok = to_response::<LoginAccountResponse>(resp)?;
|
||||
|
||||
let params = GetAccountParams {
|
||||
refresh_token: false,
|
||||
};
|
||||
let request_id = mcp.send_get_account_request(params).await?;
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
let received: GetAccountResponse = to_response(resp)?;
|
||||
|
||||
let expected = GetAccountResponse {
|
||||
account: Some(Account::ApiKey {}),
|
||||
requires_openai_auth: true,
|
||||
};
|
||||
assert_eq!(received, expected);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn get_account_when_auth_not_required() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
requires_openai_auth: Some(false),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let params = GetAccountParams {
|
||||
refresh_token: false,
|
||||
};
|
||||
let request_id = mcp.send_get_account_request(params).await?;
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
let received: GetAccountResponse = to_response(resp)?;
|
||||
|
||||
let expected = GetAccountResponse {
|
||||
account: None,
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
assert_eq!(received, expected);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn get_account_with_chatgpt() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
requires_openai_auth: Some(true),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
write_chatgpt_auth(
|
||||
codex_home.path(),
|
||||
ChatGptAuthFixture::new("access-chatgpt")
|
||||
.email("user@example.com")
|
||||
.plan_type("pro"),
|
||||
AuthCredentialsStoreMode::File,
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let params = GetAccountParams {
|
||||
refresh_token: false,
|
||||
};
|
||||
let request_id = mcp.send_get_account_request(params).await?;
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
let received: GetAccountResponse = to_response(resp)?;
|
||||
|
||||
let expected = GetAccountResponse {
|
||||
account: Some(Account::Chatgpt {
|
||||
email: "user@example.com".to_string(),
|
||||
plan_type: AccountPlanType::Pro,
|
||||
}),
|
||||
requires_openai_auth: true,
|
||||
};
|
||||
assert_eq!(received, expected);
|
||||
Ok(())
|
||||
}
|
||||
9
codex-rs/app-server/tests/suite/v2/mod.rs
Normal file
9
codex-rs/app-server/tests/suite/v2/mod.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
mod account;
|
||||
mod model_list;
|
||||
mod rate_limits;
|
||||
mod thread_archive;
|
||||
mod thread_list;
|
||||
mod thread_resume;
|
||||
mod thread_start;
|
||||
mod turn_interrupt;
|
||||
mod turn_start;
|
||||
@@ -6,9 +6,9 @@ use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::ListModelsParams;
|
||||
use codex_app_server_protocol::ListModelsResponse;
|
||||
use codex_app_server_protocol::Model;
|
||||
use codex_app_server_protocol::ModelListParams;
|
||||
use codex_app_server_protocol::ModelListResponse;
|
||||
use codex_app_server_protocol::ReasoningEffortOption;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
@@ -19,7 +19,7 @@ use tokio::time::timeout;
|
||||
const DEFAULT_TIMEOUT: Duration = Duration::from_secs(10);
|
||||
const INVALID_REQUEST_ERROR_CODE: i64 = -32600;
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
#[tokio::test]
|
||||
async fn list_models_returns_all_models_with_large_limit() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
@@ -27,8 +27,8 @@ async fn list_models_returns_all_models_with_large_limit() -> Result<()> {
|
||||
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_list_models_request(ListModelsParams {
|
||||
page_size: Some(100),
|
||||
.send_list_models_request(ModelListParams {
|
||||
limit: Some(100),
|
||||
cursor: None,
|
||||
})
|
||||
.await?;
|
||||
@@ -39,14 +39,17 @@ async fn list_models_returns_all_models_with_large_limit() -> Result<()> {
|
||||
)
|
||||
.await??;
|
||||
|
||||
let ListModelsResponse { items, next_cursor } = to_response::<ListModelsResponse>(response)?;
|
||||
let ModelListResponse {
|
||||
data: items,
|
||||
next_cursor,
|
||||
} = to_response::<ModelListResponse>(response)?;
|
||||
|
||||
let expected_models = vec![
|
||||
Model {
|
||||
id: "gpt-5-codex".to_string(),
|
||||
model: "gpt-5-codex".to_string(),
|
||||
display_name: "gpt-5-codex".to_string(),
|
||||
description: "Optimized for coding tasks with many tools.".to_string(),
|
||||
description: "Optimized for codex.".to_string(),
|
||||
supported_reasoning_efforts: vec![
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::Low,
|
||||
@@ -103,7 +106,7 @@ async fn list_models_returns_all_models_with_large_limit() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
#[tokio::test]
|
||||
async fn list_models_pagination_works() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
@@ -111,8 +114,8 @@ async fn list_models_pagination_works() -> Result<()> {
|
||||
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let first_request = mcp
|
||||
.send_list_models_request(ListModelsParams {
|
||||
page_size: Some(1),
|
||||
.send_list_models_request(ModelListParams {
|
||||
limit: Some(1),
|
||||
cursor: None,
|
||||
})
|
||||
.await?;
|
||||
@@ -123,18 +126,18 @@ async fn list_models_pagination_works() -> Result<()> {
|
||||
)
|
||||
.await??;
|
||||
|
||||
let ListModelsResponse {
|
||||
items: first_items,
|
||||
let ModelListResponse {
|
||||
data: first_items,
|
||||
next_cursor: first_cursor,
|
||||
} = to_response::<ListModelsResponse>(first_response)?;
|
||||
} = to_response::<ModelListResponse>(first_response)?;
|
||||
|
||||
assert_eq!(first_items.len(), 1);
|
||||
assert_eq!(first_items[0].id, "gpt-5-codex");
|
||||
let next_cursor = first_cursor.ok_or_else(|| anyhow!("cursor for second page"))?;
|
||||
|
||||
let second_request = mcp
|
||||
.send_list_models_request(ListModelsParams {
|
||||
page_size: Some(1),
|
||||
.send_list_models_request(ModelListParams {
|
||||
limit: Some(1),
|
||||
cursor: Some(next_cursor.clone()),
|
||||
})
|
||||
.await?;
|
||||
@@ -145,10 +148,10 @@ async fn list_models_pagination_works() -> Result<()> {
|
||||
)
|
||||
.await??;
|
||||
|
||||
let ListModelsResponse {
|
||||
items: second_items,
|
||||
let ModelListResponse {
|
||||
data: second_items,
|
||||
next_cursor: second_cursor,
|
||||
} = to_response::<ListModelsResponse>(second_response)?;
|
||||
} = to_response::<ModelListResponse>(second_response)?;
|
||||
|
||||
assert_eq!(second_items.len(), 1);
|
||||
assert_eq!(second_items[0].id, "gpt-5");
|
||||
@@ -156,7 +159,7 @@ async fn list_models_pagination_works() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
#[tokio::test]
|
||||
async fn list_models_rejects_invalid_cursor() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
@@ -164,8 +167,8 @@ async fn list_models_rejects_invalid_cursor() -> Result<()> {
|
||||
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_list_models_request(ListModelsParams {
|
||||
page_size: None,
|
||||
.send_list_models_request(ModelListParams {
|
||||
limit: None,
|
||||
cursor: Some("invalid".to_string()),
|
||||
})
|
||||
.await?;
|
||||
@@ -7,10 +7,10 @@ use codex_app_server_protocol::GetAccountRateLimitsResponse;
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::LoginApiKeyParams;
|
||||
use codex_app_server_protocol::RateLimitSnapshot;
|
||||
use codex_app_server_protocol::RateLimitWindow;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use codex_protocol::protocol::RateLimitSnapshot;
|
||||
use codex_protocol::protocol::RateLimitWindow;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use std::path::Path;
|
||||
@@ -26,7 +26,7 @@ use wiremock::matchers::path;
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
const INVALID_REQUEST_ERROR_CODE: i64 = -32600;
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
#[tokio::test]
|
||||
async fn get_account_rate_limits_requires_auth() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
@@ -51,7 +51,7 @@ async fn get_account_rate_limits_requires_auth() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
#[tokio::test]
|
||||
async fn get_account_rate_limits_requires_chatgpt_auth() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
@@ -78,7 +78,7 @@ async fn get_account_rate_limits_requires_chatgpt_auth() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
#[tokio::test]
|
||||
async fn get_account_rate_limits_returns_snapshot() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
write_chatgpt_auth(
|
||||
@@ -143,13 +143,13 @@ async fn get_account_rate_limits_returns_snapshot() -> Result<()> {
|
||||
let expected = GetAccountRateLimitsResponse {
|
||||
rate_limits: RateLimitSnapshot {
|
||||
primary: Some(RateLimitWindow {
|
||||
used_percent: 42.0,
|
||||
window_minutes: Some(60),
|
||||
used_percent: 42,
|
||||
window_duration_mins: Some(60),
|
||||
resets_at: Some(primary_reset_timestamp),
|
||||
}),
|
||||
secondary: Some(RateLimitWindow {
|
||||
used_percent: 5.0,
|
||||
window_minutes: Some(1440),
|
||||
used_percent: 5,
|
||||
window_duration_mins: Some(1440),
|
||||
resets_at: Some(secondary_reset_timestamp),
|
||||
}),
|
||||
},
|
||||
93
codex-rs/app-server/tests/suite/v2/thread_archive.rs
Normal file
93
codex-rs/app-server/tests/suite/v2/thread_archive.rs
Normal file
@@ -0,0 +1,93 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadArchiveParams;
|
||||
use codex_app_server_protocol::ThreadArchiveResponse;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_core::ARCHIVED_SESSIONS_SUBDIR;
|
||||
use codex_core::find_conversation_path_by_id_str;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_archive_moves_rollout_into_archived_directory() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Start a thread.
|
||||
let start_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
assert!(!thread.id.is_empty());
|
||||
|
||||
// Locate the rollout path recorded for this thread id.
|
||||
let rollout_path = find_conversation_path_by_id_str(codex_home.path(), &thread.id)
|
||||
.await?
|
||||
.expect("expected rollout path for thread id to exist");
|
||||
assert!(
|
||||
rollout_path.exists(),
|
||||
"expected {} to exist",
|
||||
rollout_path.display()
|
||||
);
|
||||
|
||||
// Archive the thread.
|
||||
let archive_id = mcp
|
||||
.send_thread_archive_request(ThreadArchiveParams {
|
||||
thread_id: thread.id.clone(),
|
||||
})
|
||||
.await?;
|
||||
let archive_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(archive_id)),
|
||||
)
|
||||
.await??;
|
||||
let _: ThreadArchiveResponse = to_response::<ThreadArchiveResponse>(archive_resp)?;
|
||||
|
||||
// Verify file moved.
|
||||
let archived_directory = codex_home.path().join(ARCHIVED_SESSIONS_SUBDIR);
|
||||
// The archived file keeps the original filename (rollout-...-<id>.jsonl).
|
||||
let archived_rollout_path =
|
||||
archived_directory.join(rollout_path.file_name().expect("rollout file name"));
|
||||
assert!(
|
||||
!rollout_path.exists(),
|
||||
"expected rollout path {} to be moved",
|
||||
rollout_path.display()
|
||||
);
|
||||
assert!(
|
||||
archived_rollout_path.exists(),
|
||||
"expected archived rollout path {} to exist",
|
||||
archived_rollout_path.display()
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_config_toml(codex_home: &Path) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(config_toml, config_contents())
|
||||
}
|
||||
|
||||
fn config_contents() -> &'static str {
|
||||
r#"model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
"#
|
||||
}
|
||||
220
codex-rs/app-server/tests/suite/v2/thread_list.rs
Normal file
220
codex-rs/app-server/tests/suite/v2/thread_list.rs
Normal file
@@ -0,0 +1,220 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_fake_rollout;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadListParams;
|
||||
use codex_app_server_protocol::ThreadListResponse;
|
||||
use serde_json::json;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
use uuid::Uuid;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_basic_empty() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_minimal_config(codex_home.path())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// List threads in an empty CODEX_HOME; should return an empty page with nextCursor: null.
|
||||
let list_id = mcp
|
||||
.send_thread_list_request(ThreadListParams {
|
||||
cursor: None,
|
||||
limit: Some(10),
|
||||
model_providers: None,
|
||||
})
|
||||
.await?;
|
||||
let list_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(list_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadListResponse { data, next_cursor } = to_response::<ThreadListResponse>(list_resp)?;
|
||||
assert!(data.is_empty());
|
||||
assert_eq!(next_cursor, None);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Minimal config.toml for listing.
|
||||
fn create_minimal_config(codex_home: &std::path::Path) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_pagination_next_cursor_none_on_last_page() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_minimal_config(codex_home.path())?;
|
||||
|
||||
// Create three rollouts so we can paginate with limit=2.
|
||||
let _a = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-02T12-00-00",
|
||||
"2025-01-02T12:00:00Z",
|
||||
"Hello",
|
||||
Some("mock_provider"),
|
||||
)?;
|
||||
let _b = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-01T13-00-00",
|
||||
"2025-01-01T13:00:00Z",
|
||||
"Hello",
|
||||
Some("mock_provider"),
|
||||
)?;
|
||||
let _c = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-01T12-00-00",
|
||||
"2025-01-01T12:00:00Z",
|
||||
"Hello",
|
||||
Some("mock_provider"),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Page 1: limit 2 → expect next_cursor Some.
|
||||
let page1_id = mcp
|
||||
.send_thread_list_request(ThreadListParams {
|
||||
cursor: None,
|
||||
limit: Some(2),
|
||||
model_providers: Some(vec!["mock_provider".to_string()]),
|
||||
})
|
||||
.await?;
|
||||
let page1_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(page1_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadListResponse {
|
||||
data: data1,
|
||||
next_cursor: cursor1,
|
||||
} = to_response::<ThreadListResponse>(page1_resp)?;
|
||||
assert_eq!(data1.len(), 2);
|
||||
for thread in &data1 {
|
||||
assert_eq!(thread.preview, "Hello");
|
||||
assert_eq!(thread.model_provider, "mock_provider");
|
||||
assert!(thread.created_at > 0);
|
||||
}
|
||||
let cursor1 = cursor1.expect("expected nextCursor on first page");
|
||||
|
||||
// Page 2: with cursor → expect next_cursor None when no more results.
|
||||
let page2_id = mcp
|
||||
.send_thread_list_request(ThreadListParams {
|
||||
cursor: Some(cursor1),
|
||||
limit: Some(2),
|
||||
model_providers: Some(vec!["mock_provider".to_string()]),
|
||||
})
|
||||
.await?;
|
||||
let page2_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(page2_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadListResponse {
|
||||
data: data2,
|
||||
next_cursor: cursor2,
|
||||
} = to_response::<ThreadListResponse>(page2_resp)?;
|
||||
assert!(data2.len() <= 2);
|
||||
for thread in &data2 {
|
||||
assert_eq!(thread.preview, "Hello");
|
||||
assert_eq!(thread.model_provider, "mock_provider");
|
||||
assert!(thread.created_at > 0);
|
||||
}
|
||||
assert_eq!(cursor2, None, "expected nextCursor to be null on last page");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_respects_provider_filter() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_minimal_config(codex_home.path())?;
|
||||
|
||||
// Create rollouts under two providers.
|
||||
let _a = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-02T10-00-00",
|
||||
"2025-01-02T10:00:00Z",
|
||||
"X",
|
||||
Some("mock_provider"),
|
||||
)?; // mock_provider
|
||||
// one with a different provider
|
||||
let uuid = Uuid::new_v4();
|
||||
let dir = codex_home
|
||||
.path()
|
||||
.join("sessions")
|
||||
.join("2025")
|
||||
.join("01")
|
||||
.join("02");
|
||||
std::fs::create_dir_all(&dir)?;
|
||||
let file_path = dir.join(format!("rollout-2025-01-02T11-00-00-{uuid}.jsonl"));
|
||||
let lines = [
|
||||
json!({
|
||||
"timestamp": "2025-01-02T11:00:00Z",
|
||||
"type": "session_meta",
|
||||
"payload": {
|
||||
"id": uuid,
|
||||
"timestamp": "2025-01-02T11:00:00Z",
|
||||
"cwd": "/",
|
||||
"originator": "codex",
|
||||
"cli_version": "0.0.0",
|
||||
"instructions": null,
|
||||
"source": "vscode",
|
||||
"model_provider": "other_provider"
|
||||
}
|
||||
})
|
||||
.to_string(),
|
||||
json!({
|
||||
"timestamp": "2025-01-02T11:00:00Z",
|
||||
"type":"response_item",
|
||||
"payload": {"type":"message","role":"user","content":[{"type":"input_text","text":"X"}]}
|
||||
})
|
||||
.to_string(),
|
||||
json!({
|
||||
"timestamp": "2025-01-02T11:00:00Z",
|
||||
"type":"event_msg",
|
||||
"payload": {"type":"user_message","message":"X","kind":"plain"}
|
||||
})
|
||||
.to_string(),
|
||||
];
|
||||
std::fs::write(file_path, lines.join("\n") + "\n")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Filter to only other_provider; expect 1 item, nextCursor None.
|
||||
let list_id = mcp
|
||||
.send_thread_list_request(ThreadListParams {
|
||||
cursor: None,
|
||||
limit: Some(10),
|
||||
model_providers: Some(vec!["other_provider".to_string()]),
|
||||
})
|
||||
.await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(list_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadListResponse { data, next_cursor } = to_response::<ThreadListResponse>(resp)?;
|
||||
assert_eq!(data.len(), 1);
|
||||
assert_eq!(next_cursor, None);
|
||||
let thread = &data[0];
|
||||
assert_eq!(thread.preview, "X");
|
||||
assert_eq!(thread.model_provider, "other_provider");
|
||||
let expected_ts = chrono::DateTime::parse_from_rfc3339("2025-01-02T11:00:00Z")?.timestamp();
|
||||
assert_eq!(thread.created_at, expected_ts);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
79
codex-rs/app-server/tests/suite/v2/thread_resume.rs
Normal file
79
codex-rs/app-server/tests/suite/v2/thread_resume.rs
Normal file
@@ -0,0 +1,79 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadResumeParams;
|
||||
use codex_app_server_protocol::ThreadResumeResponse;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_returns_existing_thread() -> Result<()> {
|
||||
let server = create_mock_chat_completions_server(vec![]).await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Start a thread.
|
||||
let start_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("gpt-5-codex".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
// Resume it via v2 API.
|
||||
let resume_id = mcp
|
||||
.send_thread_resume_request(ThreadResumeParams {
|
||||
thread_id: thread.id.clone(),
|
||||
})
|
||||
.await?;
|
||||
let resume_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(resume_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadResumeResponse { thread: resumed } =
|
||||
to_response::<ThreadResumeResponse>(resume_resp)?;
|
||||
assert_eq!(resumed, thread);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper to create a config.toml pointing at the mock model server.
|
||||
fn create_config_toml(codex_home: &std::path::Path, server_uri: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
90
codex-rs/app-server/tests/suite/v2/thread_start.rs
Normal file
90
codex-rs/app-server/tests/suite/v2/thread_start.rs
Normal file
@@ -0,0 +1,90 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::ThreadStartedNotification;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_start_creates_thread_and_emits_started() -> Result<()> {
|
||||
// Provide a mock server and config so model wiring is valid.
|
||||
let server = create_mock_chat_completions_server(vec![]).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
// Start server and initialize.
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Start a v2 thread with an explicit model override.
|
||||
let req_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("gpt-5".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
|
||||
// Expect a proper JSON-RPC response with a thread id.
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(req_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread } = to_response::<ThreadStartResponse>(resp)?;
|
||||
assert!(!thread.id.is_empty(), "thread id should not be empty");
|
||||
assert!(
|
||||
thread.preview.is_empty(),
|
||||
"new threads should start with an empty preview"
|
||||
);
|
||||
assert_eq!(thread.model_provider, "mock_provider");
|
||||
assert!(
|
||||
thread.created_at > 0,
|
||||
"created_at should be a positive UNIX timestamp"
|
||||
);
|
||||
|
||||
// A corresponding thread/started notification should arrive.
|
||||
let notif: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("thread/started"),
|
||||
)
|
||||
.await??;
|
||||
let started: ThreadStartedNotification =
|
||||
serde_json::from_value(notif.params.expect("params must be present"))?;
|
||||
assert_eq!(started.thread, thread);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper to create a config.toml pointing at the mock model server.
|
||||
fn create_config_toml(codex_home: &Path, server_uri: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
128
codex-rs/app-server/tests/suite/v2/turn_interrupt.rs
Normal file
128
codex-rs/app-server/tests/suite/v2/turn_interrupt.rs
Normal file
@@ -0,0 +1,128 @@
|
||||
#![cfg(unix)]
|
||||
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::create_shell_sse_response;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::TurnInterruptParams;
|
||||
use codex_app_server_protocol::TurnInterruptResponse;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_app_server_protocol::TurnStartResponse;
|
||||
use codex_app_server_protocol::UserInput as V2UserInput;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_interrupt_aborts_running_turn() -> Result<()> {
|
||||
// Use a portable sleep command to keep the turn running.
|
||||
#[cfg(target_os = "windows")]
|
||||
let shell_command = vec![
|
||||
"powershell".to_string(),
|
||||
"-Command".to_string(),
|
||||
"Start-Sleep -Seconds 10".to_string(),
|
||||
];
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
let shell_command = vec!["sleep".to_string(), "10".to_string()];
|
||||
|
||||
let tmp = TempDir::new()?;
|
||||
let codex_home = tmp.path().join("codex_home");
|
||||
std::fs::create_dir(&codex_home)?;
|
||||
let working_directory = tmp.path().join("workdir");
|
||||
std::fs::create_dir(&working_directory)?;
|
||||
|
||||
// Mock server: long-running shell command then (after abort) nothing else needed.
|
||||
let server = create_mock_chat_completions_server(vec![create_shell_sse_response(
|
||||
shell_command.clone(),
|
||||
Some(&working_directory),
|
||||
Some(10_000),
|
||||
"call_sleep",
|
||||
)?])
|
||||
.await;
|
||||
create_config_toml(&codex_home, &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(&codex_home).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Start a v2 thread and capture its id.
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
// Start a turn that triggers a long-running command.
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "run sleep".to_string(),
|
||||
}],
|
||||
cwd: Some(working_directory.clone()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let TurnStartResponse { turn } = to_response::<TurnStartResponse>(turn_resp)?;
|
||||
|
||||
// Give the command a brief moment to start.
|
||||
tokio::time::sleep(std::time::Duration::from_secs(1)).await;
|
||||
|
||||
// Interrupt the in-progress turn by id (v2 API).
|
||||
let interrupt_id = mcp
|
||||
.send_turn_interrupt_request(TurnInterruptParams {
|
||||
thread_id: thread.id,
|
||||
turn_id: turn.id,
|
||||
})
|
||||
.await?;
|
||||
let interrupt_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(interrupt_id)),
|
||||
)
|
||||
.await??;
|
||||
let _resp: TurnInterruptResponse = to_response::<TurnInterruptResponse>(interrupt_resp)?;
|
||||
|
||||
// No fields to assert on; successful deserialization confirms proper response shape.
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper to create a config.toml pointing at the mock model server.
|
||||
fn create_config_toml(codex_home: &std::path::Path, server_uri: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "workspace-write"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
486
codex-rs/app-server/tests/suite/v2/turn_start.rs
Normal file
486
codex-rs/app-server/tests/suite/v2/turn_start.rs
Normal file
@@ -0,0 +1,486 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_final_assistant_message_sse_response;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::create_mock_chat_completions_server_unchecked;
|
||||
use app_test_support::create_shell_sse_response;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_app_server_protocol::TurnStartResponse;
|
||||
use codex_app_server_protocol::TurnStartedNotification;
|
||||
use codex_app_server_protocol::UserInput as V2UserInput;
|
||||
use codex_core::protocol_config_types::ReasoningEffort;
|
||||
use codex_core::protocol_config_types::ReasoningSummary;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::Event;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_emits_notifications_and_accepts_model_override() -> Result<()> {
|
||||
// Provide a mock server and config so model wiring is valid.
|
||||
// Three Codex turns hit the mock model (session start + two turn/start calls).
|
||||
let responses = vec![
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server_unchecked(responses).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri(), "never")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Start a thread (v2) and capture its id.
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
// Start a turn with only input and thread_id set (no overrides).
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let TurnStartResponse { turn } = to_response::<TurnStartResponse>(turn_resp)?;
|
||||
assert!(!turn.id.is_empty());
|
||||
|
||||
// Expect a turn/started notification.
|
||||
let notif: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/started"),
|
||||
)
|
||||
.await??;
|
||||
let started: TurnStartedNotification =
|
||||
serde_json::from_value(notif.params.expect("params must be present"))?;
|
||||
assert_eq!(
|
||||
started.turn.status,
|
||||
codex_app_server_protocol::TurnStatus::InProgress
|
||||
);
|
||||
|
||||
// Send a second turn that exercises the overrides path: change the model.
|
||||
let turn_req2 = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "Second".to_string(),
|
||||
}],
|
||||
model: Some("mock-model-override".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp2: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req2)),
|
||||
)
|
||||
.await??;
|
||||
let TurnStartResponse { turn: turn2 } = to_response::<TurnStartResponse>(turn_resp2)?;
|
||||
assert!(!turn2.id.is_empty());
|
||||
// Ensure the second turn has a different id than the first.
|
||||
assert_ne!(turn.id, turn2.id);
|
||||
|
||||
// Expect a second turn/started notification as well.
|
||||
let _notif2: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/started"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
// And we should ultimately get a task_complete without having to add a
|
||||
// legacy conversation listener explicitly (auto-attached by thread/start).
|
||||
let _task_complete: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_accepts_local_image_input() -> Result<()> {
|
||||
// Two Codex turns hit the mock model (session start + turn/start).
|
||||
let responses = vec![
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
];
|
||||
// Use the unchecked variant because the request payload includes a LocalImage
|
||||
// which the strict matcher does not currently cover.
|
||||
let server = create_mock_chat_completions_server_unchecked(responses).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri(), "never")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
let image_path = codex_home.path().join("image.png");
|
||||
// No need to actually write the file; we just exercise the input path.
|
||||
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::LocalImage { path: image_path }],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let TurnStartResponse { turn } = to_response::<TurnStartResponse>(turn_resp)?;
|
||||
assert!(!turn.id.is_empty());
|
||||
|
||||
// This test only validates that turn/start responds and returns a turn.
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_exec_approval_toggle_v2() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let tmp = TempDir::new()?;
|
||||
let codex_home = tmp.path().to_path_buf();
|
||||
|
||||
// Mock server: first turn requests a shell call (elicitation), then completes.
|
||||
// Second turn same, but we'll set approval_policy=never to avoid elicitation.
|
||||
let responses = vec![
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"python3".to_string(),
|
||||
"-c".to_string(),
|
||||
"print(42)".to_string(),
|
||||
],
|
||||
None,
|
||||
Some(5000),
|
||||
"call1",
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done 1")?,
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"python3".to_string(),
|
||||
"-c".to_string(),
|
||||
"print(42)".to_string(),
|
||||
],
|
||||
None,
|
||||
Some(5000),
|
||||
"call2",
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done 2")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
// Default approval is untrusted to force elicitation on first turn.
|
||||
create_config_toml(codex_home.as_path(), &server.uri(), "untrusted")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.as_path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// thread/start
|
||||
let start_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
// turn/start — expect ExecCommandApproval request from server
|
||||
let first_turn_id = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "run python".to_string(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
// Acknowledge RPC
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(first_turn_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
// Receive elicitation
|
||||
let server_req = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_request_message(),
|
||||
)
|
||||
.await??;
|
||||
let ServerRequest::ExecCommandApproval { request_id, params } = server_req else {
|
||||
panic!("expected ExecCommandApproval request");
|
||||
};
|
||||
assert_eq!(params.call_id, "call1");
|
||||
assert_eq!(
|
||||
params.parsed_cmd,
|
||||
vec![ParsedCommand::Unknown {
|
||||
cmd: "python3 -c 'print(42)'".to_string()
|
||||
}]
|
||||
);
|
||||
|
||||
// Approve and wait for task completion
|
||||
mcp.send_response(
|
||||
request_id,
|
||||
serde_json::json!({ "decision": codex_core::protocol::ReviewDecision::Approved }),
|
||||
)
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
// Second turn with approval_policy=never should not elicit approval
|
||||
let second_turn_id = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "run python again".to_string(),
|
||||
}],
|
||||
approval_policy: Some(codex_app_server_protocol::AskForApproval::Never),
|
||||
sandbox_policy: Some(codex_app_server_protocol::SandboxPolicy::DangerFullAccess),
|
||||
model: Some("mock-model".to_string()),
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: Some(ReasoningSummary::Auto),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(second_turn_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
// Ensure we do NOT receive an ExecCommandApproval request before task completes
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_updates_sandbox_and_cwd_between_turns_v2() -> Result<()> {
|
||||
// When returning Result from a test, pass an Ok(()) to the skip macro
|
||||
// so the early return type matches. The no-arg form returns unit.
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let tmp = TempDir::new()?;
|
||||
let codex_home = tmp.path().join("codex_home");
|
||||
std::fs::create_dir(&codex_home)?;
|
||||
let workspace_root = tmp.path().join("workspace");
|
||||
std::fs::create_dir(&workspace_root)?;
|
||||
let first_cwd = workspace_root.join("turn1");
|
||||
let second_cwd = workspace_root.join("turn2");
|
||||
std::fs::create_dir(&first_cwd)?;
|
||||
std::fs::create_dir(&second_cwd)?;
|
||||
|
||||
let responses = vec![
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"bash".to_string(),
|
||||
"-lc".to_string(),
|
||||
"echo first turn".to_string(),
|
||||
],
|
||||
None,
|
||||
Some(5000),
|
||||
"call-first",
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done first")?,
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"bash".to_string(),
|
||||
"-lc".to_string(),
|
||||
"echo second turn".to_string(),
|
||||
],
|
||||
None,
|
||||
Some(5000),
|
||||
"call-second",
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done second")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri(), "untrusted")?;
|
||||
|
||||
let mut mcp = McpProcess::new(&codex_home).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// thread/start
|
||||
let start_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
// first turn with workspace-write sandbox and first_cwd
|
||||
let first_turn = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "first turn".to_string(),
|
||||
}],
|
||||
cwd: Some(first_cwd.clone()),
|
||||
approval_policy: Some(codex_app_server_protocol::AskForApproval::Never),
|
||||
sandbox_policy: Some(codex_app_server_protocol::SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots: vec![first_cwd.clone()],
|
||||
network_access: false,
|
||||
exclude_tmpdir_env_var: false,
|
||||
exclude_slash_tmp: false,
|
||||
}),
|
||||
model: Some("mock-model".to_string()),
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: Some(ReasoningSummary::Auto),
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(first_turn)),
|
||||
)
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
// second turn with workspace-write and second_cwd, ensure exec begins in second_cwd
|
||||
let second_turn = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "second turn".to_string(),
|
||||
}],
|
||||
cwd: Some(second_cwd.clone()),
|
||||
approval_policy: Some(codex_app_server_protocol::AskForApproval::Never),
|
||||
sandbox_policy: Some(codex_app_server_protocol::SandboxPolicy::DangerFullAccess),
|
||||
model: Some("mock-model".to_string()),
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: Some(ReasoningSummary::Auto),
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(second_turn)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let exec_begin_notification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/exec_command_begin"),
|
||||
)
|
||||
.await??;
|
||||
let params = exec_begin_notification
|
||||
.params
|
||||
.clone()
|
||||
.expect("exec_command_begin params");
|
||||
let event: Event = serde_json::from_value(params).expect("deserialize exec begin event");
|
||||
let exec_begin = match event.msg {
|
||||
EventMsg::ExecCommandBegin(exec_begin) => exec_begin,
|
||||
other => panic!("expected ExecCommandBegin event, got {other:?}"),
|
||||
};
|
||||
assert_eq!(exec_begin.cwd, second_cwd);
|
||||
assert_eq!(
|
||||
exec_begin.command,
|
||||
vec![
|
||||
"bash".to_string(),
|
||||
"-lc".to_string(),
|
||||
"echo second turn".to_string()
|
||||
]
|
||||
);
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper to create a config.toml pointing at the mock model server.
|
||||
fn create_config_toml(
|
||||
codex_home: &Path,
|
||||
server_uri: &str,
|
||||
approval_policy: &str,
|
||||
) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "{approval_policy}"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -288,7 +288,7 @@ pub fn maybe_parse_apply_patch_verified(argv: &[String], cwd: &Path) -> MaybeApp
|
||||
path,
|
||||
ApplyPatchFileChange::Update {
|
||||
unified_diff,
|
||||
move_path: move_path.map(|p| cwd.join(p)),
|
||||
move_path: move_path.map(|p| effective_cwd.join(p)),
|
||||
new_content: contents,
|
||||
},
|
||||
);
|
||||
@@ -1603,6 +1603,53 @@ g
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_resolves_move_path_with_effective_cwd() {
|
||||
let session_dir = tempdir().unwrap();
|
||||
let worktree_rel = "alt";
|
||||
let worktree_dir = session_dir.path().join(worktree_rel);
|
||||
fs::create_dir_all(&worktree_dir).unwrap();
|
||||
|
||||
let source_name = "old.txt";
|
||||
let dest_name = "renamed.txt";
|
||||
let source_path = worktree_dir.join(source_name);
|
||||
fs::write(&source_path, "before\n").unwrap();
|
||||
|
||||
let patch = wrap_patch(&format!(
|
||||
r#"*** Update File: {source_name}
|
||||
*** Move to: {dest_name}
|
||||
@@
|
||||
-before
|
||||
+after"#
|
||||
));
|
||||
|
||||
let shell_script = format!("cd {worktree_rel} && apply_patch <<'PATCH'\n{patch}\nPATCH");
|
||||
let argv = vec!["bash".into(), "-lc".into(), shell_script];
|
||||
|
||||
let result = maybe_parse_apply_patch_verified(&argv, session_dir.path());
|
||||
let action = match result {
|
||||
MaybeApplyPatchVerified::Body(action) => action,
|
||||
other => panic!("expected verified body, got {other:?}"),
|
||||
};
|
||||
|
||||
assert_eq!(action.cwd, worktree_dir);
|
||||
|
||||
let change = action
|
||||
.changes()
|
||||
.get(&worktree_dir.join(source_name))
|
||||
.expect("source file change present");
|
||||
|
||||
match change {
|
||||
ApplyPatchFileChange::Update { move_path, .. } => {
|
||||
assert_eq!(
|
||||
move_path.as_deref(),
|
||||
Some(worktree_dir.join(dest_name).as_path())
|
||||
);
|
||||
}
|
||||
other => panic!("expected update change, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_fails_on_write_error() {
|
||||
let dir = tempdir().unwrap();
|
||||
|
||||
@@ -30,7 +30,6 @@ codex-login = { workspace = true }
|
||||
codex-mcp-server = { workspace = true }
|
||||
codex-process-hardening = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-protocol-ts = { workspace = true }
|
||||
codex-responses-api-proxy = { workspace = true }
|
||||
codex-rmcp-client = { workspace = true }
|
||||
codex-stdio-to-uds = { workspace = true }
|
||||
@@ -39,6 +38,7 @@ ctor = { workspace = true }
|
||||
owo-colors = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
supports-color = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
"io-std",
|
||||
"macros",
|
||||
|
||||
@@ -5,6 +5,7 @@ use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::exec_env::create_env;
|
||||
use codex_core::landlock::spawn_command_under_linux_sandbox;
|
||||
#[cfg(target_os = "macos")]
|
||||
use codex_core::seatbelt::spawn_command_under_seatbelt;
|
||||
use codex_core::spawn::StdioPolicy;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
@@ -14,6 +15,7 @@ use crate::SeatbeltCommand;
|
||||
use crate::WindowsCommand;
|
||||
use crate::exit_status::handle_exit_status;
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
pub async fn run_command_under_seatbelt(
|
||||
command: SeatbeltCommand,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
@@ -33,6 +35,14 @@ pub async fn run_command_under_seatbelt(
|
||||
.await
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
pub async fn run_command_under_seatbelt(
|
||||
_command: SeatbeltCommand,
|
||||
_codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
) -> anyhow::Result<()> {
|
||||
anyhow::bail!("Seatbelt sandbox is only available on macOS");
|
||||
}
|
||||
|
||||
pub async fn run_command_under_landlock(
|
||||
command: LandlockCommand,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
@@ -72,6 +82,7 @@ pub async fn run_command_under_windows(
|
||||
}
|
||||
|
||||
enum SandboxType {
|
||||
#[cfg(target_os = "macos")]
|
||||
Seatbelt,
|
||||
Landlock,
|
||||
Windows,
|
||||
@@ -124,6 +135,9 @@ async fn run_command_under_sandbox(
|
||||
let cwd_clone = cwd.clone();
|
||||
let env_map = env.clone();
|
||||
let command_vec = command.clone();
|
||||
let base_dir = config.codex_home.clone();
|
||||
|
||||
// Preflight audit is invoked elsewhere at the appropriate times.
|
||||
let res = tokio::task::spawn_blocking(move || {
|
||||
run_windows_sandbox_capture(
|
||||
policy_str,
|
||||
@@ -132,6 +146,7 @@ async fn run_command_under_sandbox(
|
||||
&cwd_clone,
|
||||
env_map,
|
||||
None,
|
||||
Some(base_dir.as_path()),
|
||||
)
|
||||
})
|
||||
.await;
|
||||
@@ -166,6 +181,7 @@ async fn run_command_under_sandbox(
|
||||
}
|
||||
|
||||
let mut child = match sandbox_type {
|
||||
#[cfg(target_os = "macos")]
|
||||
SandboxType::Seatbelt => {
|
||||
spawn_command_under_seatbelt(
|
||||
command,
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use clap::Args;
|
||||
use clap::CommandFactory;
|
||||
use clap::Parser;
|
||||
use clap_complete::Shell;
|
||||
@@ -20,14 +21,17 @@ use codex_exec::Cli as ExecCli;
|
||||
use codex_responses_api_proxy::Args as ResponsesApiProxyArgs;
|
||||
use codex_tui::AppExitInfo;
|
||||
use codex_tui::Cli as TuiCli;
|
||||
use codex_tui::updates::UpdateAction;
|
||||
use codex_tui::update_action::UpdateAction;
|
||||
use owo_colors::OwoColorize;
|
||||
use std::path::PathBuf;
|
||||
use supports_color::Stream;
|
||||
|
||||
mod mcp_cmd;
|
||||
#[cfg(not(windows))]
|
||||
mod wsl_paths;
|
||||
|
||||
use crate::mcp_cmd::McpCli;
|
||||
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::features::is_known_feature_key;
|
||||
@@ -79,8 +83,8 @@ enum Subcommand {
|
||||
/// [experimental] Run the Codex MCP server (stdio transport).
|
||||
McpServer,
|
||||
|
||||
/// [experimental] Run the app server.
|
||||
AppServer,
|
||||
/// [experimental] Run the app server or related tooling.
|
||||
AppServer(AppServerCommand),
|
||||
|
||||
/// Generate shell completion scripts.
|
||||
Completion(CompletionCommand),
|
||||
@@ -96,9 +100,6 @@ enum Subcommand {
|
||||
/// Resume a previous interactive session (picker by default; use --last to continue the most recent).
|
||||
Resume(ResumeCommand),
|
||||
|
||||
/// Internal: generate TypeScript protocol bindings.
|
||||
#[clap(hide = true)]
|
||||
GenerateTs(GenerateTsCommand),
|
||||
/// [EXPERIMENTAL] Browse tasks from Codex Cloud and apply changes locally.
|
||||
#[clap(name = "cloud", alias = "cloud-tasks")]
|
||||
Cloud(CloudTasksCli),
|
||||
@@ -205,6 +206,22 @@ struct LogoutCommand {
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct AppServerCommand {
|
||||
/// Omit to run the app server; specify a subcommand for tooling.
|
||||
#[command(subcommand)]
|
||||
subcommand: Option<AppServerSubcommand>,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
enum AppServerSubcommand {
|
||||
/// [experimental] Generate TypeScript bindings for the app server protocol.
|
||||
GenerateTs(GenerateTsCommand),
|
||||
|
||||
/// [experimental] Generate JSON Schema for the app server protocol.
|
||||
GenerateJsonSchema(GenerateJsonSchemaCommand),
|
||||
}
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
struct GenerateTsCommand {
|
||||
/// Output directory where .ts files will be written
|
||||
#[arg(short = 'o', long = "out", value_name = "DIR")]
|
||||
@@ -215,6 +232,13 @@ struct GenerateTsCommand {
|
||||
prettier: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
struct GenerateJsonSchemaCommand {
|
||||
/// Output directory where the schema bundle will be written
|
||||
#[arg(short = 'o', long = "out", value_name = "DIR")]
|
||||
out_dir: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct StdioToUdsCommand {
|
||||
/// Path to the Unix domain socket to connect to.
|
||||
@@ -267,10 +291,30 @@ fn handle_app_exit(exit_info: AppExitInfo) -> anyhow::Result<()> {
|
||||
/// Run the update action and print the result.
|
||||
fn run_update_action(action: UpdateAction) -> anyhow::Result<()> {
|
||||
println!();
|
||||
let (cmd, args) = action.command_args();
|
||||
let cmd_str = action.command_str();
|
||||
println!("Updating Codex via `{cmd_str}`...");
|
||||
let status = std::process::Command::new(cmd).args(args).status()?;
|
||||
|
||||
let status = {
|
||||
#[cfg(windows)]
|
||||
{
|
||||
// On Windows, run via cmd.exe so .CMD/.BAT are correctly resolved (PATHEXT semantics).
|
||||
std::process::Command::new("cmd")
|
||||
.args(["/C", &cmd_str])
|
||||
.status()?
|
||||
}
|
||||
#[cfg(not(windows))]
|
||||
{
|
||||
let (cmd, args) = action.command_args();
|
||||
let command_path = crate::wsl_paths::normalize_for_wsl(cmd);
|
||||
let normalized_args: Vec<String> = args
|
||||
.iter()
|
||||
.map(crate::wsl_paths::normalize_for_wsl)
|
||||
.collect();
|
||||
std::process::Command::new(&command_path)
|
||||
.args(&normalized_args)
|
||||
.status()?
|
||||
}
|
||||
};
|
||||
if !status.success() {
|
||||
anyhow::bail!("`{cmd_str}` failed with status {status}");
|
||||
}
|
||||
@@ -387,9 +431,20 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
prepend_config_flags(&mut mcp_cli.config_overrides, root_config_overrides.clone());
|
||||
mcp_cli.run().await?;
|
||||
}
|
||||
Some(Subcommand::AppServer) => {
|
||||
codex_app_server::run_main(codex_linux_sandbox_exe, root_config_overrides).await?;
|
||||
}
|
||||
Some(Subcommand::AppServer(app_server_cli)) => match app_server_cli.subcommand {
|
||||
None => {
|
||||
codex_app_server::run_main(codex_linux_sandbox_exe, root_config_overrides).await?;
|
||||
}
|
||||
Some(AppServerSubcommand::GenerateTs(gen_cli)) => {
|
||||
codex_app_server_protocol::generate_ts(
|
||||
&gen_cli.out_dir,
|
||||
gen_cli.prettier.as_deref(),
|
||||
)?;
|
||||
}
|
||||
Some(AppServerSubcommand::GenerateJsonSchema(gen_cli)) => {
|
||||
codex_app_server_protocol::generate_json(&gen_cli.out_dir)?;
|
||||
}
|
||||
},
|
||||
Some(Subcommand::Resume(ResumeCommand {
|
||||
session_id,
|
||||
last,
|
||||
@@ -504,21 +559,24 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
tokio::task::spawn_blocking(move || codex_stdio_to_uds::run(socket_path.as_path()))
|
||||
.await??;
|
||||
}
|
||||
Some(Subcommand::GenerateTs(gen_cli)) => {
|
||||
codex_protocol_ts::generate_ts(&gen_cli.out_dir, gen_cli.prettier.as_deref())?;
|
||||
}
|
||||
Some(Subcommand::Features(FeaturesCli { sub })) => match sub {
|
||||
FeaturesSubcommand::List => {
|
||||
// Respect root-level `-c` overrides plus top-level flags like `--profile`.
|
||||
let cli_kv_overrides = root_config_overrides
|
||||
let mut cli_kv_overrides = root_config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
|
||||
// Honor `--search` via the new feature toggle.
|
||||
if interactive.web_search {
|
||||
cli_kv_overrides.push((
|
||||
"features.web_search_request".to_string(),
|
||||
toml::Value::Boolean(true),
|
||||
));
|
||||
}
|
||||
|
||||
// Thread through relevant top-level flags (at minimum, `--profile`).
|
||||
// Also honor `--search` since it maps to a feature toggle.
|
||||
let overrides = ConfigOverrides {
|
||||
config_profile: interactive.config_profile.clone(),
|
||||
tools_web_search_request: interactive.web_search.then_some(true),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
|
||||
@@ -353,7 +353,9 @@ async fn run_login(config_overrides: &CliConfigOverrides, login_args: LoginArgs)
|
||||
.context("failed to load configuration")?;
|
||||
|
||||
if !config.features.enabled(Feature::RmcpClient) {
|
||||
bail!("OAuth login is only supported when [feature].rmcp_client is true in config.toml.");
|
||||
bail!(
|
||||
"OAuth login is only supported when [features].rmcp_client is true in config.toml. See https://github.com/openai/codex/blob/main/docs/config.md#feature-flags for details."
|
||||
);
|
||||
}
|
||||
|
||||
let LoginArgs { name, scopes } = login_args;
|
||||
|
||||
76
codex-rs/cli/src/wsl_paths.rs
Normal file
76
codex-rs/cli/src/wsl_paths.rs
Normal file
@@ -0,0 +1,76 @@
|
||||
use std::ffi::OsStr;
|
||||
|
||||
/// WSL-specific path helpers used by the updater logic.
|
||||
///
|
||||
/// See https://github.com/openai/codex/issues/6086.
|
||||
pub fn is_wsl() -> bool {
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
if std::env::var_os("WSL_DISTRO_NAME").is_some() {
|
||||
return true;
|
||||
}
|
||||
match std::fs::read_to_string("/proc/version") {
|
||||
Ok(version) => version.to_lowercase().contains("microsoft"),
|
||||
Err(_) => false,
|
||||
}
|
||||
}
|
||||
#[cfg(not(target_os = "linux"))]
|
||||
{
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a Windows absolute path (`C:\foo\bar` or `C:/foo/bar`) to a WSL mount path (`/mnt/c/foo/bar`).
|
||||
/// Returns `None` if the input does not look like a Windows drive path.
|
||||
pub fn win_path_to_wsl(path: &str) -> Option<String> {
|
||||
let bytes = path.as_bytes();
|
||||
if bytes.len() < 3
|
||||
|| bytes[1] != b':'
|
||||
|| !(bytes[2] == b'\\' || bytes[2] == b'/')
|
||||
|| !bytes[0].is_ascii_alphabetic()
|
||||
{
|
||||
return None;
|
||||
}
|
||||
let drive = (bytes[0] as char).to_ascii_lowercase();
|
||||
let tail = path[3..].replace('\\', "/");
|
||||
if tail.is_empty() {
|
||||
return Some(format!("/mnt/{drive}"));
|
||||
}
|
||||
Some(format!("/mnt/{drive}/{tail}"))
|
||||
}
|
||||
|
||||
/// If under WSL and given a Windows-style path, return the equivalent `/mnt/<drive>/…` path.
|
||||
/// Otherwise returns the input unchanged.
|
||||
pub fn normalize_for_wsl<P: AsRef<OsStr>>(path: P) -> String {
|
||||
let value = path.as_ref().to_string_lossy().to_string();
|
||||
if !is_wsl() {
|
||||
return value;
|
||||
}
|
||||
if let Some(mapped) = win_path_to_wsl(&value) {
|
||||
return mapped;
|
||||
}
|
||||
value
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn win_to_wsl_basic() {
|
||||
assert_eq!(
|
||||
win_path_to_wsl(r"C:\Temp\codex.zip").as_deref(),
|
||||
Some("/mnt/c/Temp/codex.zip")
|
||||
);
|
||||
assert_eq!(
|
||||
win_path_to_wsl("D:/Work/codex.tgz").as_deref(),
|
||||
Some("/mnt/d/Work/codex.tgz")
|
||||
);
|
||||
assert!(win_path_to_wsl("/home/user/codex").is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_is_noop_on_unix_paths() {
|
||||
assert_eq!(normalize_for_wsl("/home/u/x"), "/home/u/x");
|
||||
}
|
||||
}
|
||||
@@ -8,6 +8,7 @@ pub mod util;
|
||||
pub use cli::Cli;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use codex_login::AuthManager;
|
||||
use std::io::IsTerminal;
|
||||
use std::io::Read;
|
||||
use std::path::PathBuf;
|
||||
@@ -56,20 +57,8 @@ async fn init_backend(user_agent_suffix: &str) -> anyhow::Result<BackendContext>
|
||||
};
|
||||
append_error_log(format!("startup: base_url={base_url} path_style={style}"));
|
||||
|
||||
let auth = match codex_core::config::find_codex_home()
|
||||
.ok()
|
||||
.map(|home| {
|
||||
let store_mode = codex_core::config::Config::load_from_base_config_with_overrides(
|
||||
codex_core::config::ConfigToml::default(),
|
||||
codex_core::config::ConfigOverrides::default(),
|
||||
home.clone(),
|
||||
)
|
||||
.map(|cfg| cfg.cli_auth_credentials_store_mode)
|
||||
.unwrap_or_default();
|
||||
codex_login::AuthManager::new(home, false, store_mode)
|
||||
})
|
||||
.and_then(|am| am.auth())
|
||||
{
|
||||
let auth_manager = util::load_auth_manager().await;
|
||||
let auth = match auth_manager.as_ref().and_then(AuthManager::auth) {
|
||||
Some(auth) => auth,
|
||||
None => {
|
||||
eprintln!(
|
||||
@@ -1044,7 +1033,7 @@ pub async fn run_main(cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> an
|
||||
// Close task modal/pending apply if present before opening env modal
|
||||
app.diff_overlay = None;
|
||||
app.env_modal = Some(app::EnvModalState { query: String::new(), selected: 0 });
|
||||
// Cache environments until user explicitly refreshes with 'r' inside the modal.
|
||||
// Cache environments while the modal is open to avoid repeated fetches.
|
||||
let should_fetch = app.environments.is_empty();
|
||||
if should_fetch {
|
||||
app.env_loading = true;
|
||||
@@ -1115,7 +1104,7 @@ pub async fn run_main(cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> an
|
||||
let _ = tx.send(evt);
|
||||
});
|
||||
} else {
|
||||
app.status = "No environment selected (press 'e' to choose)".to_string();
|
||||
app.status = "No environment selected".to_string();
|
||||
}
|
||||
}
|
||||
needs_redraw = true;
|
||||
@@ -1313,18 +1302,6 @@ pub async fn run_main(cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> an
|
||||
// Environment modal key handling
|
||||
match key.code {
|
||||
KeyCode::Esc => { app.env_modal = None; needs_redraw = true; }
|
||||
KeyCode::Char('r') | KeyCode::Char('R') => {
|
||||
// Trigger refresh of environments
|
||||
app.env_loading = true; app.env_error = None; needs_redraw = true;
|
||||
let _ = frame_tx.send(Instant::now() + Duration::from_millis(100));
|
||||
let tx = tx.clone();
|
||||
tokio::spawn(async move {
|
||||
let base_url = crate::util::normalize_base_url(&std::env::var("CODEX_CLOUD_TASKS_BASE_URL").unwrap_or_else(|_| "https://chatgpt.com/backend-api".to_string()));
|
||||
let headers = crate::util::build_chatgpt_headers().await;
|
||||
let res = crate::env_detect::list_environments(&base_url, &headers).await;
|
||||
let _ = tx.send(app::AppEvent::EnvironmentsLoaded(res));
|
||||
});
|
||||
}
|
||||
KeyCode::Char(ch) if !key.modifiers.contains(KeyModifiers::CONTROL) && !key.modifiers.contains(KeyModifiers::ALT) => {
|
||||
if let Some(m) = app.env_modal.as_mut() { m.query.push(ch); }
|
||||
needs_redraw = true;
|
||||
@@ -1431,7 +1408,7 @@ pub async fn run_main(cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> an
|
||||
}
|
||||
KeyCode::Char('o') | KeyCode::Char('O') => {
|
||||
app.env_modal = Some(app::EnvModalState { query: String::new(), selected: 0 });
|
||||
// Cache environments until user explicitly refreshes with 'r' inside the modal.
|
||||
// Cache environments while the modal is open to avoid repeated fetches.
|
||||
let should_fetch = app.environments.is_empty();
|
||||
if should_fetch { app.env_loading = true; app.env_error = None; }
|
||||
needs_redraw = true;
|
||||
|
||||
@@ -945,9 +945,7 @@ pub fn draw_env_modal(frame: &mut Frame, area: Rect, app: &mut App) {
|
||||
|
||||
// Subheader with usage hints (dim cyan)
|
||||
let subheader = Paragraph::new(Line::from(
|
||||
"Type to search, Enter select, Esc cancel; r refresh"
|
||||
.cyan()
|
||||
.dim(),
|
||||
"Type to search, Enter select, Esc cancel".cyan().dim(),
|
||||
))
|
||||
.wrap(Wrap { trim: true });
|
||||
frame.render_widget(subheader, rows[0]);
|
||||
|
||||
@@ -2,6 +2,10 @@ use base64::Engine as _;
|
||||
use chrono::Utc;
|
||||
use reqwest::header::HeaderMap;
|
||||
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_login::AuthManager;
|
||||
|
||||
pub fn set_user_agent_suffix(suffix: &str) {
|
||||
if let Ok(mut guard) = codex_core::default_client::USER_AGENT_SUFFIX.lock() {
|
||||
guard.replace(suffix.to_string());
|
||||
@@ -54,6 +58,18 @@ pub fn extract_chatgpt_account_id(token: &str) -> Option<String> {
|
||||
.map(str::to_string)
|
||||
}
|
||||
|
||||
pub async fn load_auth_manager() -> Option<AuthManager> {
|
||||
// TODO: pass in cli overrides once cloud tasks properly support them.
|
||||
let config = Config::load_with_cli_overrides(Vec::new(), ConfigOverrides::default())
|
||||
.await
|
||||
.ok()?;
|
||||
Some(AuthManager::new(
|
||||
config.codex_home,
|
||||
false,
|
||||
config.cli_auth_credentials_store_mode,
|
||||
))
|
||||
}
|
||||
|
||||
/// Build headers for ChatGPT-backed requests: `User-Agent`, optional `Authorization`,
|
||||
/// and optional `ChatGPT-Account-Id`.
|
||||
pub async fn build_chatgpt_headers() -> HeaderMap {
|
||||
@@ -69,31 +85,22 @@ pub async fn build_chatgpt_headers() -> HeaderMap {
|
||||
USER_AGENT,
|
||||
HeaderValue::from_str(&ua).unwrap_or(HeaderValue::from_static("codex-cli")),
|
||||
);
|
||||
if let Ok(home) = codex_core::config::find_codex_home() {
|
||||
let store_mode = codex_core::config::Config::load_from_base_config_with_overrides(
|
||||
codex_core::config::ConfigToml::default(),
|
||||
codex_core::config::ConfigOverrides::default(),
|
||||
home.clone(),
|
||||
)
|
||||
.map(|cfg| cfg.cli_auth_credentials_store_mode)
|
||||
.unwrap_or_default();
|
||||
let am = codex_login::AuthManager::new(home, false, store_mode);
|
||||
if let Some(auth) = am.auth()
|
||||
&& let Ok(tok) = auth.get_token().await
|
||||
&& !tok.is_empty()
|
||||
if let Some(am) = load_auth_manager().await
|
||||
&& let Some(auth) = am.auth()
|
||||
&& let Ok(tok) = auth.get_token().await
|
||||
&& !tok.is_empty()
|
||||
{
|
||||
let v = format!("Bearer {tok}");
|
||||
if let Ok(hv) = HeaderValue::from_str(&v) {
|
||||
headers.insert(AUTHORIZATION, hv);
|
||||
}
|
||||
if let Some(acc) = auth
|
||||
.get_account_id()
|
||||
.or_else(|| extract_chatgpt_account_id(&tok))
|
||||
&& let Ok(name) = HeaderName::from_bytes(b"ChatGPT-Account-Id")
|
||||
&& let Ok(hv) = HeaderValue::from_str(&acc)
|
||||
{
|
||||
let v = format!("Bearer {tok}");
|
||||
if let Ok(hv) = HeaderValue::from_str(&v) {
|
||||
headers.insert(AUTHORIZATION, hv);
|
||||
}
|
||||
if let Some(acc) = auth
|
||||
.get_account_id()
|
||||
.or_else(|| extract_chatgpt_account_id(&tok))
|
||||
&& let Ok(name) = HeaderName::from_bytes(b"ChatGPT-Account-Id")
|
||||
&& let Ok(hv) = HeaderValue::from_str(&acc)
|
||||
{
|
||||
headers.insert(name, hv);
|
||||
}
|
||||
headers.insert(name, hv);
|
||||
}
|
||||
}
|
||||
headers
|
||||
|
||||
@@ -19,8 +19,8 @@ use toml::Value;
|
||||
pub struct CliConfigOverrides {
|
||||
/// Override a configuration value that would otherwise be loaded from
|
||||
/// `~/.codex/config.toml`. Use a dotted path (`foo.bar.baz`) to override
|
||||
/// nested values. The `value` portion is parsed as JSON. If it fails to
|
||||
/// parse as JSON, the raw string is used as a literal.
|
||||
/// nested values. The `value` portion is parsed as TOML. If it fails to
|
||||
/// parse as TOML, the raw string is used as a literal.
|
||||
///
|
||||
/// Examples:
|
||||
/// - `-c model="o3"`
|
||||
@@ -59,7 +59,7 @@ impl CliConfigOverrides {
|
||||
return Err(format!("Empty key in override: {s}"));
|
||||
}
|
||||
|
||||
// Attempt to parse as JSON. If that fails, treat it as a raw
|
||||
// Attempt to parse as TOML. If that fails, treat it as a raw
|
||||
// string. This allows convenient usage such as
|
||||
// `-c model=o3` without the quotes.
|
||||
let value: Value = match parse_toml_value(value_str) {
|
||||
|
||||
@@ -34,7 +34,7 @@ const PRESETS: &[ModelPreset] = &[
|
||||
id: "gpt-5-codex",
|
||||
model: "gpt-5-codex",
|
||||
display_name: "gpt-5-codex",
|
||||
description: "Optimized for coding tasks with many tools.",
|
||||
description: "Optimized for codex.",
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
supported_reasoning_efforts: &[
|
||||
ReasoningEffortPreset {
|
||||
@@ -52,6 +52,24 @@ const PRESETS: &[ModelPreset] = &[
|
||||
],
|
||||
is_default: true,
|
||||
},
|
||||
ModelPreset {
|
||||
id: "gpt-5-codex-mini",
|
||||
model: "gpt-5-codex-mini",
|
||||
display_name: "gpt-5-codex-mini",
|
||||
description: "Optimized for codex. Cheaper, faster, but less capable.",
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
supported_reasoning_efforts: &[
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Medium,
|
||||
description: "Dynamically adjusts reasoning based on the task",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::High,
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems",
|
||||
},
|
||||
],
|
||||
is_default: false,
|
||||
},
|
||||
ModelPreset {
|
||||
id: "gpt-5",
|
||||
model: "gpt-5",
|
||||
@@ -80,8 +98,13 @@ const PRESETS: &[ModelPreset] = &[
|
||||
},
|
||||
];
|
||||
|
||||
pub fn builtin_model_presets(_auth_mode: Option<AuthMode>) -> Vec<ModelPreset> {
|
||||
PRESETS.to_vec()
|
||||
pub fn builtin_model_presets(auth_mode: Option<AuthMode>) -> Vec<ModelPreset> {
|
||||
let allow_codex_mini = matches!(auth_mode, Some(AuthMode::ChatGPT));
|
||||
PRESETS
|
||||
.iter()
|
||||
.filter(|preset| allow_codex_mini || preset.id != "gpt-5-codex-mini")
|
||||
.copied()
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
@@ -80,7 +80,7 @@ toml_edit = { workspace = true }
|
||||
tracing = { workspace = true, features = ["log"] }
|
||||
tree-sitter = { workspace = true }
|
||||
tree-sitter-bash = { workspace = true }
|
||||
uuid = { workspace = true, features = ["serde", "v4"] }
|
||||
uuid = { workspace = true, features = ["serde", "v4", "v5"] }
|
||||
which = { workspace = true }
|
||||
wildmatch = { workspace = true }
|
||||
codex_windows_sandbox = { package = "codex-windows-sandbox", path = "../windows-sandbox-rs" }
|
||||
|
||||
@@ -2,8 +2,6 @@ You are Codex, based on GPT-5. You are running as a coding agent in the Codex CL
|
||||
|
||||
## General
|
||||
|
||||
- The arguments to `shell` will be passed to execvp(). Most terminal commands should be prefixed with ["bash", "-lc"].
|
||||
- Always set the `workdir` param when using the shell function. Do not use `cd` unless absolutely necessary.
|
||||
- When searching for text or files, prefer using `rg` or `rg --files` respectively because `rg` is much faster than alternatives like `grep`. (If the `rg` command is not found, then use alternatives.)
|
||||
|
||||
## Editing constraints
|
||||
@@ -16,6 +14,7 @@ You are Codex, based on GPT-5. You are running as a coding agent in the Codex CL
|
||||
* If asked to make a commit or code edits and there are unrelated changes to your work or changes that you didn't make in those files, don't revert those changes.
|
||||
* If the changes are in files you've touched recently, you should read carefully and understand how you can work with the changes rather than reverting them.
|
||||
* If the changes are in unrelated files, just ignore them and don't revert them.
|
||||
- Do not amend a commit unless explicitly requested to do so.
|
||||
- While you are working, you might notice unexpected changes that you didn't make. If this happens, STOP IMMEDIATELY and ask the user how they would like to proceed.
|
||||
- **NEVER** use destructive commands like `git reset --hard` or `git checkout --` unless specifically requested or approved by the user.
|
||||
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
mod storage;
|
||||
|
||||
use chrono::Utc;
|
||||
use reqwest::StatusCode;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
#[cfg(test)]
|
||||
use serial_test::serial;
|
||||
use std::env;
|
||||
use std::fmt::Debug;
|
||||
use std::io::ErrorKind;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
@@ -22,10 +24,16 @@ use crate::auth::storage::AuthStorageBackend;
|
||||
use crate::auth::storage::create_auth_storage;
|
||||
use crate::config::Config;
|
||||
use crate::default_client::CodexHttpClient;
|
||||
use crate::token_data::PlanType;
|
||||
use crate::error::RefreshTokenFailedError;
|
||||
use crate::error::RefreshTokenFailedReason;
|
||||
use crate::token_data::KnownPlan as InternalKnownPlan;
|
||||
use crate::token_data::PlanType as InternalPlanType;
|
||||
use crate::token_data::TokenData;
|
||||
use crate::token_data::parse_id_token;
|
||||
use crate::util::try_parse_error_message;
|
||||
use codex_protocol::account::PlanType as AccountPlanType;
|
||||
use serde_json::Value;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CodexAuth {
|
||||
@@ -46,18 +54,54 @@ impl PartialEq for CodexAuth {
|
||||
// TODO(pakrym): use token exp field to check for expiration instead
|
||||
const TOKEN_REFRESH_INTERVAL: i64 = 8;
|
||||
|
||||
const REFRESH_TOKEN_EXPIRED_MESSAGE: &str = "Your access token could not be refreshed because your refresh token has expired. Please log out and sign in again.";
|
||||
const REFRESH_TOKEN_REUSED_MESSAGE: &str = "Your access token could not be refreshed because your refresh token was already used. Please log out and sign in again.";
|
||||
const REFRESH_TOKEN_INVALIDATED_MESSAGE: &str = "Your access token could not be refreshed because your refresh token was revoked. Please log out and sign in again.";
|
||||
const REFRESH_TOKEN_UNKNOWN_MESSAGE: &str =
|
||||
"Your access token could not be refreshed. Please log out and sign in again.";
|
||||
const REFRESH_TOKEN_URL: &str = "https://auth.openai.com/oauth/token";
|
||||
pub const REFRESH_TOKEN_URL_OVERRIDE_ENV_VAR: &str = "CODEX_REFRESH_TOKEN_URL_OVERRIDE";
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum RefreshTokenError {
|
||||
#[error("{0}")]
|
||||
Permanent(#[from] RefreshTokenFailedError),
|
||||
#[error(transparent)]
|
||||
Transient(#[from] std::io::Error),
|
||||
}
|
||||
|
||||
impl RefreshTokenError {
|
||||
pub fn failed_reason(&self) -> Option<RefreshTokenFailedReason> {
|
||||
match self {
|
||||
Self::Permanent(error) => Some(error.reason),
|
||||
Self::Transient(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn other_with_message(message: impl Into<String>) -> Self {
|
||||
Self::Transient(std::io::Error::other(message.into()))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<RefreshTokenError> for std::io::Error {
|
||||
fn from(err: RefreshTokenError) -> Self {
|
||||
match err {
|
||||
RefreshTokenError::Permanent(failed) => std::io::Error::other(failed),
|
||||
RefreshTokenError::Transient(inner) => inner,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CodexAuth {
|
||||
pub async fn refresh_token(&self) -> Result<String, std::io::Error> {
|
||||
pub async fn refresh_token(&self) -> Result<String, RefreshTokenError> {
|
||||
tracing::info!("Refreshing token");
|
||||
|
||||
let token_data = self
|
||||
.get_current_token_data()
|
||||
.ok_or(std::io::Error::other("Token data is not available."))?;
|
||||
let token_data = self.get_current_token_data().ok_or_else(|| {
|
||||
RefreshTokenError::Transient(std::io::Error::other("Token data is not available."))
|
||||
})?;
|
||||
let token = token_data.refresh_token;
|
||||
|
||||
let refresh_response = try_refresh_token(token, &self.client)
|
||||
.await
|
||||
.map_err(std::io::Error::other)?;
|
||||
let refresh_response = try_refresh_token(token, &self.client).await?;
|
||||
|
||||
let updated = update_tokens(
|
||||
&self.storage,
|
||||
@@ -65,7 +109,8 @@ impl CodexAuth {
|
||||
refresh_response.access_token,
|
||||
refresh_response.refresh_token,
|
||||
)
|
||||
.await?;
|
||||
.await
|
||||
.map_err(RefreshTokenError::from)?;
|
||||
|
||||
if let Ok(mut auth_lock) = self.auth_dot_json.lock() {
|
||||
*auth_lock = Some(updated.clone());
|
||||
@@ -74,7 +119,7 @@ impl CodexAuth {
|
||||
let access = match updated.tokens {
|
||||
Some(t) => t.access_token,
|
||||
None => {
|
||||
return Err(std::io::Error::other(
|
||||
return Err(RefreshTokenError::other_with_message(
|
||||
"Token data is not available after refresh.",
|
||||
));
|
||||
}
|
||||
@@ -99,15 +144,21 @@ impl CodexAuth {
|
||||
..
|
||||
}) => {
|
||||
if last_refresh < Utc::now() - chrono::Duration::days(TOKEN_REFRESH_INTERVAL) {
|
||||
let refresh_response = tokio::time::timeout(
|
||||
let refresh_result = tokio::time::timeout(
|
||||
Duration::from_secs(60),
|
||||
try_refresh_token(tokens.refresh_token.clone(), &self.client),
|
||||
)
|
||||
.await
|
||||
.map_err(|_| {
|
||||
std::io::Error::other("timed out while refreshing OpenAI API key")
|
||||
})?
|
||||
.map_err(std::io::Error::other)?;
|
||||
.await;
|
||||
let refresh_response = match refresh_result {
|
||||
Ok(Ok(response)) => response,
|
||||
Ok(Err(err)) => return Err(err.into()),
|
||||
Err(_) => {
|
||||
return Err(std::io::Error::new(
|
||||
ErrorKind::TimedOut,
|
||||
"timed out while refreshing OpenAI API key",
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let updated_auth_dot_json = update_tokens(
|
||||
&self.storage,
|
||||
@@ -153,7 +204,34 @@ impl CodexAuth {
|
||||
self.get_current_token_data().and_then(|t| t.id_token.email)
|
||||
}
|
||||
|
||||
pub(crate) fn get_plan_type(&self) -> Option<PlanType> {
|
||||
/// Account-facing plan classification derived from the current token.
|
||||
/// Returns a high-level `AccountPlanType` (e.g., Free/Plus/Pro/Team/…)
|
||||
/// mapped from the ID token's internal plan value. Prefer this when you
|
||||
/// need to make UI or product decisions based on the user's subscription.
|
||||
pub fn account_plan_type(&self) -> Option<AccountPlanType> {
|
||||
let map_known = |kp: &InternalKnownPlan| match kp {
|
||||
InternalKnownPlan::Free => AccountPlanType::Free,
|
||||
InternalKnownPlan::Plus => AccountPlanType::Plus,
|
||||
InternalKnownPlan::Pro => AccountPlanType::Pro,
|
||||
InternalKnownPlan::Team => AccountPlanType::Team,
|
||||
InternalKnownPlan::Business => AccountPlanType::Business,
|
||||
InternalKnownPlan::Enterprise => AccountPlanType::Enterprise,
|
||||
InternalKnownPlan::Edu => AccountPlanType::Edu,
|
||||
};
|
||||
|
||||
self.get_current_token_data()
|
||||
.and_then(|t| t.id_token.chatgpt_plan_type)
|
||||
.map(|pt| match pt {
|
||||
InternalPlanType::Known(k) => map_known(&k),
|
||||
InternalPlanType::Unknown(_) => AccountPlanType::Unknown,
|
||||
})
|
||||
}
|
||||
|
||||
/// Raw internal plan value from the ID token.
|
||||
/// Exposes the underlying `token_data::PlanType` without mapping it to the
|
||||
/// public `AccountPlanType`. Use this when downstream code needs to inspect
|
||||
/// internal/unknown plan strings exactly as issued in the token.
|
||||
pub(crate) fn get_plan_type(&self) -> Option<InternalPlanType> {
|
||||
self.get_current_token_data()
|
||||
.and_then(|t| t.id_token.chatgpt_plan_type)
|
||||
}
|
||||
@@ -425,7 +503,7 @@ async fn update_tokens(
|
||||
async fn try_refresh_token(
|
||||
refresh_token: String,
|
||||
client: &CodexHttpClient,
|
||||
) -> std::io::Result<RefreshResponse> {
|
||||
) -> Result<RefreshResponse, RefreshTokenError> {
|
||||
let refresh_request = RefreshRequest {
|
||||
client_id: CLIENT_ID,
|
||||
grant_type: "refresh_token",
|
||||
@@ -433,30 +511,93 @@ async fn try_refresh_token(
|
||||
scope: "openid profile email",
|
||||
};
|
||||
|
||||
let endpoint = refresh_token_endpoint();
|
||||
|
||||
// Use shared client factory to include standard headers
|
||||
let response = client
|
||||
.post("https://auth.openai.com/oauth/token")
|
||||
.post(endpoint.as_str())
|
||||
.header("Content-Type", "application/json")
|
||||
.json(&refresh_request)
|
||||
.send()
|
||||
.await
|
||||
.map_err(std::io::Error::other)?;
|
||||
.map_err(|err| RefreshTokenError::Transient(std::io::Error::other(err)))?;
|
||||
|
||||
if response.status().is_success() {
|
||||
let status = response.status();
|
||||
if status.is_success() {
|
||||
let refresh_response = response
|
||||
.json::<RefreshResponse>()
|
||||
.await
|
||||
.map_err(std::io::Error::other)?;
|
||||
.map_err(|err| RefreshTokenError::Transient(std::io::Error::other(err)))?;
|
||||
Ok(refresh_response)
|
||||
} else {
|
||||
Err(std::io::Error::other(format!(
|
||||
"Failed to refresh token: {}: {}",
|
||||
response.status(),
|
||||
try_parse_error_message(&response.text().await.unwrap_or_default()),
|
||||
)))
|
||||
let body = response.text().await.unwrap_or_default();
|
||||
if status == StatusCode::UNAUTHORIZED {
|
||||
let failed = classify_refresh_token_failure(&body);
|
||||
Err(RefreshTokenError::Permanent(failed))
|
||||
} else {
|
||||
let message = try_parse_error_message(&body);
|
||||
Err(RefreshTokenError::Transient(std::io::Error::other(
|
||||
format!("Failed to refresh token: {status}: {message}"),
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn classify_refresh_token_failure(body: &str) -> RefreshTokenFailedError {
|
||||
let code = extract_refresh_token_error_code(body);
|
||||
|
||||
let normalized_code = code.as_deref().map(str::to_ascii_lowercase);
|
||||
let reason = match normalized_code.as_deref() {
|
||||
Some("refresh_token_expired") => RefreshTokenFailedReason::Expired,
|
||||
Some("refresh_token_reused") => RefreshTokenFailedReason::Exhausted,
|
||||
Some("refresh_token_invalidated") => RefreshTokenFailedReason::Revoked,
|
||||
_ => RefreshTokenFailedReason::Other,
|
||||
};
|
||||
|
||||
if reason == RefreshTokenFailedReason::Other {
|
||||
tracing::warn!(
|
||||
backend_code = normalized_code.as_deref(),
|
||||
backend_body = body,
|
||||
"Encountered unknown 401 response while refreshing token"
|
||||
);
|
||||
}
|
||||
|
||||
let message = match reason {
|
||||
RefreshTokenFailedReason::Expired => REFRESH_TOKEN_EXPIRED_MESSAGE.to_string(),
|
||||
RefreshTokenFailedReason::Exhausted => REFRESH_TOKEN_REUSED_MESSAGE.to_string(),
|
||||
RefreshTokenFailedReason::Revoked => REFRESH_TOKEN_INVALIDATED_MESSAGE.to_string(),
|
||||
RefreshTokenFailedReason::Other => REFRESH_TOKEN_UNKNOWN_MESSAGE.to_string(),
|
||||
};
|
||||
|
||||
RefreshTokenFailedError::new(reason, message)
|
||||
}
|
||||
|
||||
fn extract_refresh_token_error_code(body: &str) -> Option<String> {
|
||||
if body.trim().is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let Value::Object(map) = serde_json::from_str::<Value>(body).ok()? else {
|
||||
return None;
|
||||
};
|
||||
|
||||
if let Some(error_value) = map.get("error") {
|
||||
match error_value {
|
||||
Value::Object(obj) => {
|
||||
if let Some(code) = obj.get("code").and_then(Value::as_str) {
|
||||
return Some(code.to_string());
|
||||
}
|
||||
}
|
||||
Value::String(code) => {
|
||||
return Some(code.to_string());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
map.get("code").and_then(Value::as_str).map(str::to_string)
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RefreshRequest {
|
||||
client_id: &'static str,
|
||||
@@ -475,6 +616,11 @@ struct RefreshResponse {
|
||||
// Shared constant for token refresh (client id used for oauth token refresh flow)
|
||||
pub const CLIENT_ID: &str = "app_EMoamEEZ73f0CkXaXp7hrann";
|
||||
|
||||
fn refresh_token_endpoint() -> String {
|
||||
std::env::var(REFRESH_TOKEN_URL_OVERRIDE_ENV_VAR)
|
||||
.unwrap_or_else(|_| REFRESH_TOKEN_URL.to_string())
|
||||
}
|
||||
|
||||
use std::sync::RwLock;
|
||||
|
||||
/// Internal cached auth state.
|
||||
@@ -492,8 +638,9 @@ mod tests {
|
||||
use crate::config::ConfigOverrides;
|
||||
use crate::config::ConfigToml;
|
||||
use crate::token_data::IdTokenInfo;
|
||||
use crate::token_data::KnownPlan;
|
||||
use crate::token_data::PlanType;
|
||||
use crate::token_data::KnownPlan as InternalKnownPlan;
|
||||
use crate::token_data::PlanType as InternalPlanType;
|
||||
use codex_protocol::account::PlanType as AccountPlanType;
|
||||
|
||||
use base64::Engine;
|
||||
use codex_protocol::config_types::ForcedLoginMethod;
|
||||
@@ -610,7 +757,7 @@ mod tests {
|
||||
tokens: Some(TokenData {
|
||||
id_token: IdTokenInfo {
|
||||
email: Some("user@example.com".to_string()),
|
||||
chatgpt_plan_type: Some(PlanType::Known(KnownPlan::Pro)),
|
||||
chatgpt_plan_type: Some(InternalPlanType::Known(InternalKnownPlan::Pro)),
|
||||
chatgpt_account_id: None,
|
||||
raw_jwt: fake_jwt,
|
||||
},
|
||||
@@ -864,6 +1011,54 @@ mod tests {
|
||||
.contains("ChatGPT login is required, but an API key is currently being used.")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn plan_type_maps_known_plan() {
|
||||
let codex_home = tempdir().unwrap();
|
||||
let _jwt = write_auth_file(
|
||||
AuthFileParams {
|
||||
openai_api_key: None,
|
||||
chatgpt_plan_type: "pro".to_string(),
|
||||
chatgpt_account_id: None,
|
||||
},
|
||||
codex_home.path(),
|
||||
)
|
||||
.expect("failed to write auth file");
|
||||
|
||||
let auth = super::load_auth(codex_home.path(), false, AuthCredentialsStoreMode::File)
|
||||
.expect("load auth")
|
||||
.expect("auth available");
|
||||
|
||||
pretty_assertions::assert_eq!(auth.account_plan_type(), Some(AccountPlanType::Pro));
|
||||
pretty_assertions::assert_eq!(
|
||||
auth.get_plan_type(),
|
||||
Some(InternalPlanType::Known(InternalKnownPlan::Pro))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn plan_type_maps_unknown_to_unknown() {
|
||||
let codex_home = tempdir().unwrap();
|
||||
let _jwt = write_auth_file(
|
||||
AuthFileParams {
|
||||
openai_api_key: None,
|
||||
chatgpt_plan_type: "mystery-tier".to_string(),
|
||||
chatgpt_account_id: None,
|
||||
},
|
||||
codex_home.path(),
|
||||
)
|
||||
.expect("failed to write auth file");
|
||||
|
||||
let auth = super::load_auth(codex_home.path(), false, AuthCredentialsStoreMode::File)
|
||||
.expect("load auth")
|
||||
.expect("auth available");
|
||||
|
||||
pretty_assertions::assert_eq!(auth.account_plan_type(), Some(AccountPlanType::Unknown));
|
||||
pretty_assertions::assert_eq!(
|
||||
auth.get_plan_type(),
|
||||
Some(InternalPlanType::Unknown("mystery-tier".to_string()))
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Central manager providing a single source of truth for auth.json derived
|
||||
@@ -965,7 +1160,9 @@ impl AuthManager {
|
||||
|
||||
/// Attempt to refresh the current auth token (if any). On success, reload
|
||||
/// the auth state from disk so other components observe refreshed token.
|
||||
pub async fn refresh_token(&self) -> std::io::Result<Option<String>> {
|
||||
/// If the token refresh fails in a permanent (non‑transient) way, logs out
|
||||
/// to clear invalid auth state.
|
||||
pub async fn refresh_token(&self) -> Result<Option<String>, RefreshTokenError> {
|
||||
let auth = match self.auth() {
|
||||
Some(a) => a,
|
||||
None => return Ok(None),
|
||||
|
||||
@@ -31,6 +31,7 @@ use tracing::warn;
|
||||
|
||||
use crate::AuthManager;
|
||||
use crate::auth::CodexAuth;
|
||||
use crate::auth::RefreshTokenError;
|
||||
use crate::chat_completions::AggregateStreamExt;
|
||||
use crate::chat_completions::stream_chat_completions;
|
||||
use crate::client_common::Prompt;
|
||||
@@ -389,12 +390,17 @@ impl ModelClient {
|
||||
&& let Some(manager) = auth_manager.as_ref()
|
||||
&& let Some(auth) = auth.as_ref()
|
||||
&& auth.mode == AuthMode::ChatGPT
|
||||
&& let Err(err) = manager.refresh_token().await
|
||||
{
|
||||
manager.refresh_token().await.map_err(|err| {
|
||||
StreamAttemptError::Fatal(CodexErr::Fatal(format!(
|
||||
"Failed to refresh ChatGPT credentials: {err}"
|
||||
)))
|
||||
})?;
|
||||
let stream_error = match err {
|
||||
RefreshTokenError::Permanent(failed) => {
|
||||
StreamAttemptError::Fatal(CodexErr::RefreshTokenFailed(failed))
|
||||
}
|
||||
RefreshTokenError::Transient(other) => {
|
||||
StreamAttemptError::RetryableTransportError(CodexErr::Io(other))
|
||||
}
|
||||
};
|
||||
return Err(stream_error);
|
||||
}
|
||||
|
||||
// The OpenAI Responses endpoint returns structured JSON bodies even for 4xx/5xx
|
||||
@@ -441,6 +447,8 @@ impl ModelClient {
|
||||
return Err(StreamAttemptError::Fatal(codex_err));
|
||||
} else if error.r#type.as_deref() == Some("usage_not_included") {
|
||||
return Err(StreamAttemptError::Fatal(CodexErr::UsageNotIncluded));
|
||||
} else if is_quota_exceeded_error(&error) {
|
||||
return Err(StreamAttemptError::Fatal(CodexErr::QuotaExceeded));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -838,6 +846,8 @@ async fn process_sse<S>(
|
||||
Ok(error) => {
|
||||
if is_context_window_error(&error) {
|
||||
response_error = Some(CodexErr::ContextWindowExceeded);
|
||||
} else if is_quota_exceeded_error(&error) {
|
||||
response_error = Some(CodexErr::QuotaExceeded);
|
||||
} else {
|
||||
let delay = try_parse_retry_after(&error);
|
||||
let message = error.message.clone().unwrap_or_default();
|
||||
@@ -931,8 +941,10 @@ async fn stream_from_fixture(
|
||||
fn rate_limit_regex() -> &'static Regex {
|
||||
static RE: OnceLock<Regex> = OnceLock::new();
|
||||
|
||||
// Match both OpenAI-style messages like "Please try again in 1.898s"
|
||||
// and Azure OpenAI-style messages like "Try again in 35 seconds".
|
||||
#[expect(clippy::unwrap_used)]
|
||||
RE.get_or_init(|| Regex::new(r"Please try again in (\d+(?:\.\d+)?)(s|ms)").unwrap())
|
||||
RE.get_or_init(|| Regex::new(r"(?i)try again in\s*(\d+(?:\.\d+)?)\s*(s|ms|seconds?)").unwrap())
|
||||
}
|
||||
|
||||
fn try_parse_retry_after(err: &Error) -> Option<Duration> {
|
||||
@@ -940,7 +952,8 @@ fn try_parse_retry_after(err: &Error) -> Option<Duration> {
|
||||
return None;
|
||||
}
|
||||
|
||||
// parse the Please try again in 1.898s format using regex
|
||||
// parse retry hints like "try again in 1.898s" or
|
||||
// "Try again in 35 seconds" using regex
|
||||
let re = rate_limit_regex();
|
||||
if let Some(message) = &err.message
|
||||
&& let Some(captures) = re.captures(message)
|
||||
@@ -950,9 +963,9 @@ fn try_parse_retry_after(err: &Error) -> Option<Duration> {
|
||||
|
||||
if let (Some(value), Some(unit)) = (seconds, unit) {
|
||||
let value = value.as_str().parse::<f64>().ok()?;
|
||||
let unit = unit.as_str();
|
||||
let unit = unit.as_str().to_ascii_lowercase();
|
||||
|
||||
if unit == "s" {
|
||||
if unit == "s" || unit.starts_with("second") {
|
||||
return Some(Duration::from_secs_f64(value));
|
||||
} else if unit == "ms" {
|
||||
return Some(Duration::from_millis(value as u64));
|
||||
@@ -966,6 +979,10 @@ fn is_context_window_error(error: &Error) -> bool {
|
||||
error.code.as_deref() == Some("context_length_exceeded")
|
||||
}
|
||||
|
||||
fn is_quota_exceeded_error(error: &Error) -> bool {
|
||||
error.code.as_deref() == Some("insufficient_quota")
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@@ -1298,6 +1315,41 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn quota_exceeded_error_is_fatal() {
|
||||
let raw_error = r#"{"type":"response.failed","sequence_number":3,"response":{"id":"resp_fatal_quota","object":"response","created_at":1759771626,"status":"failed","background":false,"error":{"code":"insufficient_quota","message":"You exceeded your current quota, please check your plan and billing details. For more information on this error, read the docs: https://platform.openai.com/docs/guides/error-codes/api-errors."},"incomplete_details":null}}"#;
|
||||
|
||||
let sse1 = format!("event: response.failed\ndata: {raw_error}\n\n");
|
||||
let provider = ModelProviderInfo {
|
||||
name: "test".to_string(),
|
||||
base_url: Some("https://test.com".to_string()),
|
||||
env_key: Some("TEST_API_KEY".to_string()),
|
||||
env_key_instructions: None,
|
||||
experimental_bearer_token: None,
|
||||
wire_api: WireApi::Responses,
|
||||
query_params: None,
|
||||
http_headers: None,
|
||||
env_http_headers: None,
|
||||
request_max_retries: Some(0),
|
||||
stream_max_retries: Some(0),
|
||||
stream_idle_timeout_ms: Some(1000),
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
|
||||
let otel_event_manager = otel_event_manager();
|
||||
|
||||
let events = collect_events(&[sse1.as_bytes()], provider, otel_event_manager).await;
|
||||
|
||||
assert_eq!(events.len(), 1);
|
||||
|
||||
match &events[0] {
|
||||
Err(err @ CodexErr::QuotaExceeded) => {
|
||||
assert_eq!(err.to_string(), CodexErr::QuotaExceeded.to_string());
|
||||
}
|
||||
other => panic!("unexpected quota exceeded event: {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
// ────────────────────────────
|
||||
// Table-driven test from `main`
|
||||
// ────────────────────────────
|
||||
@@ -1427,6 +1479,19 @@ mod tests {
|
||||
assert_eq!(delay, Some(Duration::from_secs_f64(1.898)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_try_parse_retry_after_azure() {
|
||||
let err = Error {
|
||||
r#type: None,
|
||||
message: Some("Rate limit exceeded. Try again in 35 seconds.".to_string()),
|
||||
code: Some("rate_limit_exceeded".to_string()),
|
||||
plan_type: None,
|
||||
resets_at: None,
|
||||
};
|
||||
let delay = try_parse_retry_after(&err);
|
||||
assert_eq!(delay, Some(Duration::from_secs(35)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_response_deserializes_schema_known_plan_type_and_serializes_back() {
|
||||
use crate::token_data::KnownPlan;
|
||||
|
||||
@@ -6,6 +6,7 @@ use std::sync::atomic::AtomicU64;
|
||||
|
||||
use crate::AuthManager;
|
||||
use crate::client_common::REVIEW_PROMPT;
|
||||
use crate::compact;
|
||||
use crate::features::Feature;
|
||||
use crate::function_tool::FunctionCallError;
|
||||
use crate::mcp::auth::McpAuthStatusEntry;
|
||||
@@ -58,7 +59,7 @@ use crate::client_common::ResponseEvent;
|
||||
use crate::config::Config;
|
||||
use crate::config::types::McpServerTransportConfig;
|
||||
use crate::config::types::ShellEnvironmentPolicy;
|
||||
use crate::conversation_history::ConversationHistory;
|
||||
use crate::context_manager::ContextManager;
|
||||
use crate::environment_context::EnvironmentContext;
|
||||
use crate::error::CodexErr;
|
||||
use crate::error::Result as CodexResult;
|
||||
@@ -66,6 +67,8 @@ use crate::error::Result as CodexResult;
|
||||
use crate::exec::StreamOutput;
|
||||
// Removed: legacy executor wiring replaced by ToolOrchestrator flows.
|
||||
// legacy normalize_exec_result no longer used after orchestrator migration
|
||||
use crate::compact::build_compacted_history;
|
||||
use crate::compact::collect_user_messages;
|
||||
use crate::mcp::auth::compute_auth_statuses;
|
||||
use crate::mcp_connection_manager::McpConnectionManager;
|
||||
use crate::model_family::find_family_for_model;
|
||||
@@ -129,10 +132,6 @@ use codex_protocol::user_input::UserInput;
|
||||
use codex_utils_readiness::Readiness;
|
||||
use codex_utils_readiness::ReadinessFlag;
|
||||
|
||||
pub mod compact;
|
||||
use self::compact::build_compacted_history;
|
||||
use self::compact::collect_user_messages;
|
||||
|
||||
/// The high-level interface to the Codex system.
|
||||
/// It operates as a queue pair where you send submissions and receive events.
|
||||
pub struct Codex {
|
||||
@@ -553,7 +552,7 @@ impl Session {
|
||||
None
|
||||
} else {
|
||||
Some(format!(
|
||||
"You can either enable it using the CLI with `--enable {canonical}` or through the config.toml file with `[features].{canonical}`"
|
||||
"Enable it with `--enable {canonical}` or `[features].{canonical}` in config.toml. See https://github.com/openai/codex/blob/main/docs/config.md#feature-flags for details."
|
||||
))
|
||||
};
|
||||
post_session_configured_events.push(Event {
|
||||
@@ -945,7 +944,7 @@ impl Session {
|
||||
turn_context: &TurnContext,
|
||||
rollout_items: &[RolloutItem],
|
||||
) -> Vec<ResponseItem> {
|
||||
let mut history = ConversationHistory::new();
|
||||
let mut history = ContextManager::new();
|
||||
for item in rollout_items {
|
||||
match item {
|
||||
RolloutItem::ResponseItem(response_item) => {
|
||||
@@ -968,7 +967,7 @@ impl Session {
|
||||
}
|
||||
|
||||
/// Append ResponseItems to the in-memory conversation history only.
|
||||
async fn record_into_history(&self, items: &[ResponseItem]) {
|
||||
pub(crate) async fn record_into_history(&self, items: &[ResponseItem]) {
|
||||
let mut state = self.state.lock().await;
|
||||
state.record_items(items.iter());
|
||||
}
|
||||
@@ -1020,7 +1019,7 @@ impl Session {
|
||||
items
|
||||
}
|
||||
|
||||
async fn persist_rollout_items(&self, items: &[RolloutItem]) {
|
||||
pub(crate) async fn persist_rollout_items(&self, items: &[RolloutItem]) {
|
||||
let recorder = {
|
||||
let guard = self.services.rollout.lock().await;
|
||||
guard.clone()
|
||||
@@ -1032,12 +1031,12 @@ impl Session {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn clone_history(&self) -> ConversationHistory {
|
||||
pub(crate) async fn clone_history(&self) -> ContextManager {
|
||||
let state = self.state.lock().await;
|
||||
state.clone_history()
|
||||
}
|
||||
|
||||
async fn update_token_usage_info(
|
||||
pub(crate) async fn update_token_usage_info(
|
||||
&self,
|
||||
turn_context: &TurnContext,
|
||||
token_usage: Option<&TokenUsage>,
|
||||
@@ -1054,7 +1053,7 @@ impl Session {
|
||||
self.send_token_count_event(turn_context).await;
|
||||
}
|
||||
|
||||
async fn update_rate_limits(
|
||||
pub(crate) async fn update_rate_limits(
|
||||
&self,
|
||||
turn_context: &TurnContext,
|
||||
new_rate_limits: RateLimitSnapshot,
|
||||
@@ -1075,7 +1074,7 @@ impl Session {
|
||||
self.send_event(turn_context, event).await;
|
||||
}
|
||||
|
||||
async fn set_total_tokens_full(&self, turn_context: &TurnContext) {
|
||||
pub(crate) async fn set_total_tokens_full(&self, turn_context: &TurnContext) {
|
||||
let context_window = turn_context.client.get_model_context_window();
|
||||
if let Some(context_window) = context_window {
|
||||
{
|
||||
@@ -1118,7 +1117,11 @@ impl Session {
|
||||
self.send_event(turn_context, event).await;
|
||||
}
|
||||
|
||||
async fn notify_stream_error(&self, turn_context: &TurnContext, message: impl Into<String>) {
|
||||
pub(crate) async fn notify_stream_error(
|
||||
&self,
|
||||
turn_context: &TurnContext,
|
||||
message: impl Into<String>,
|
||||
) {
|
||||
let event = EventMsg::StreamError(StreamErrorEvent {
|
||||
message: message.into(),
|
||||
});
|
||||
@@ -1643,8 +1646,7 @@ async fn spawn_review_thread(
|
||||
let mut review_features = config.features.clone();
|
||||
review_features
|
||||
.disable(crate::features::Feature::WebSearchRequest)
|
||||
.disable(crate::features::Feature::ViewImageTool)
|
||||
.disable(crate::features::Feature::StreamableShell);
|
||||
.disable(crate::features::Feature::ViewImageTool);
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_family: &review_model_family,
|
||||
features: &review_features,
|
||||
@@ -1773,19 +1775,14 @@ pub(crate) async fn run_task(
|
||||
sess.clone_history().await.get_history_for_prompt()
|
||||
};
|
||||
|
||||
let turn_input_messages: Vec<String> = turn_input
|
||||
let turn_input_messages = turn_input
|
||||
.iter()
|
||||
.filter_map(|item| match item {
|
||||
ResponseItem::Message { content, .. } => Some(content),
|
||||
.filter_map(|item| match parse_turn_item(item) {
|
||||
Some(TurnItem::UserMessage(user_message)) => Some(user_message),
|
||||
_ => None,
|
||||
})
|
||||
.flat_map(|content| {
|
||||
content.iter().filter_map(|item| match item {
|
||||
ContentItem::OutputText { text } => Some(text.clone()),
|
||||
_ => None,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
.map(|user_message| user_message.message())
|
||||
.collect::<Vec<String>>();
|
||||
match run_turn(
|
||||
Arc::clone(&sess),
|
||||
Arc::clone(&turn_context),
|
||||
@@ -1933,6 +1930,8 @@ async fn run_turn(
|
||||
return Err(CodexErr::UsageLimitReached(e));
|
||||
}
|
||||
Err(CodexErr::UsageNotIncluded) => return Err(CodexErr::UsageNotIncluded),
|
||||
Err(e @ CodexErr::QuotaExceeded) => return Err(e),
|
||||
Err(e @ CodexErr::RefreshTokenFailed(_)) => return Err(e),
|
||||
Err(e) => {
|
||||
// Use the configured provider-specific stream retry budget.
|
||||
let max_retries = turn_context.client.get_provider().stream_max_retries();
|
||||
@@ -1951,7 +1950,7 @@ async fn run_turn(
|
||||
// at a seemingly frozen screen.
|
||||
sess.notify_stream_error(
|
||||
&turn_context,
|
||||
format!("Re-connecting... {retries}/{max_retries}"),
|
||||
format!("Reconnecting... {retries}/{max_retries}"),
|
||||
)
|
||||
.await;
|
||||
|
||||
@@ -2839,7 +2838,7 @@ mod tests {
|
||||
turn_context: &TurnContext,
|
||||
) -> (Vec<RolloutItem>, Vec<ResponseItem>) {
|
||||
let mut rollout_items = Vec::new();
|
||||
let mut live_history = ConversationHistory::new();
|
||||
let mut live_history = ContextManager::new();
|
||||
|
||||
let initial_context = session.build_initial_context(turn_context);
|
||||
for item in &initial_context {
|
||||
|
||||
@@ -158,6 +158,11 @@ async fn forward_events(
|
||||
) {
|
||||
while let Ok(event) = codex.next_event().await {
|
||||
match event {
|
||||
// ignore all legacy delta events
|
||||
Event {
|
||||
id: _,
|
||||
msg: EventMsg::AgentMessageDelta(_) | EventMsg::AgentReasoningDelta(_),
|
||||
} => continue,
|
||||
Event {
|
||||
id: _,
|
||||
msg: EventMsg::SessionConfigured(_),
|
||||
|
||||
@@ -1,4 +1,38 @@
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
|
||||
use crate::bash::parse_shell_lc_plain_commands;
|
||||
use crate::is_safe_command::is_known_safe_command;
|
||||
|
||||
pub fn requires_initial_appoval(
|
||||
policy: AskForApproval,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
command: &[String],
|
||||
with_escalated_permissions: bool,
|
||||
) -> bool {
|
||||
if is_known_safe_command(command) {
|
||||
return false;
|
||||
}
|
||||
match policy {
|
||||
AskForApproval::Never | AskForApproval::OnFailure => false,
|
||||
AskForApproval::OnRequest => {
|
||||
// In DangerFullAccess, only prompt if the command looks dangerous.
|
||||
if matches!(sandbox_policy, SandboxPolicy::DangerFullAccess) {
|
||||
return command_might_be_dangerous(command);
|
||||
}
|
||||
|
||||
// In restricted sandboxes (ReadOnly/WorkspaceWrite), do not prompt for
|
||||
// non‑escalated, non‑dangerous commands — let the sandbox enforce
|
||||
// restrictions (e.g., block network/write) without a user prompt.
|
||||
let wants_escalation: bool = with_escalated_permissions;
|
||||
if wants_escalation {
|
||||
return true;
|
||||
}
|
||||
command_might_be_dangerous(command)
|
||||
}
|
||||
AskForApproval::UnlessTrusted => !is_known_safe_command(command),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn command_might_be_dangerous(command: &[String]) -> bool {
|
||||
if is_dangerous_to_call_with_exec(command) {
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use super::Session;
|
||||
use super::TurnContext;
|
||||
use super::get_last_assistant_message_from_turn;
|
||||
use crate::Prompt;
|
||||
use crate::client_common::ResponseEvent;
|
||||
use crate::codex::Session;
|
||||
use crate::codex::TurnContext;
|
||||
use crate::codex::get_last_assistant_message_from_turn;
|
||||
use crate::error::CodexErr;
|
||||
use crate::error::Result as CodexResult;
|
||||
use crate::protocol::AgentMessageEvent;
|
||||
@@ -16,7 +16,6 @@ use crate::protocol::TurnContextItem;
|
||||
use crate::protocol::WarningEvent;
|
||||
use crate::truncate::truncate_middle;
|
||||
use crate::util::backoff;
|
||||
use askama::Template;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseInputItem;
|
||||
@@ -26,16 +25,9 @@ use codex_protocol::user_input::UserInput;
|
||||
use futures::prelude::*;
|
||||
use tracing::error;
|
||||
|
||||
pub const SUMMARIZATION_PROMPT: &str = include_str!("../../templates/compact/prompt.md");
|
||||
pub const SUMMARIZATION_PROMPT: &str = include_str!("../templates/compact/prompt.md");
|
||||
const COMPACT_USER_MESSAGE_MAX_TOKENS: usize = 20_000;
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "compact/history_bridge.md", escape = "none")]
|
||||
struct HistoryBridgeTemplate<'a> {
|
||||
user_messages_text: &'a str,
|
||||
summary_text: &'a str,
|
||||
}
|
||||
|
||||
pub(crate) async fn run_inline_auto_compact_task(
|
||||
sess: Arc<Session>,
|
||||
turn_context: Arc<TurnContext>,
|
||||
@@ -150,6 +142,7 @@ async fn run_compact_task_inner(
|
||||
let history_snapshot = sess.clone_history().await.get_history();
|
||||
let summary_text = get_last_assistant_message_from_turn(&history_snapshot).unwrap_or_default();
|
||||
let user_messages = collect_user_messages(&history_snapshot);
|
||||
|
||||
let initial_context = sess.build_initial_context(turn_context.as_ref());
|
||||
let mut new_history = build_compacted_history(initial_context, &user_messages, &summary_text);
|
||||
let ghost_snapshots: Vec<ResponseItem> = history_snapshot
|
||||
@@ -171,7 +164,7 @@ async fn run_compact_task_inner(
|
||||
sess.send_event(&turn_context, event).await;
|
||||
|
||||
let warning = EventMsg::Warning(WarningEvent {
|
||||
message: "Heads up: Long conversations and multiple compactions can cause the model to be less accurate. Start new a new conversation when possible to keep conversations small and targeted.".to_string(),
|
||||
message: "Heads up: Long conversations and multiple compactions can cause the model to be less accurate. Start a new conversation when possible to keep conversations small and targeted.".to_string(),
|
||||
});
|
||||
sess.send_event(&turn_context, warning).await;
|
||||
}
|
||||
@@ -224,33 +217,47 @@ fn build_compacted_history_with_limit(
|
||||
summary_text: &str,
|
||||
max_bytes: usize,
|
||||
) -> Vec<ResponseItem> {
|
||||
let mut user_messages_text = if user_messages.is_empty() {
|
||||
"(none)".to_string()
|
||||
} else {
|
||||
user_messages.join("\n\n")
|
||||
};
|
||||
// Truncate the concatenated prior user messages so the bridge message
|
||||
// stays well under the context window (approx. 4 bytes/token).
|
||||
if user_messages_text.len() > max_bytes {
|
||||
user_messages_text = truncate_middle(&user_messages_text, max_bytes).0;
|
||||
let mut selected_messages: Vec<String> = Vec::new();
|
||||
if max_bytes > 0 {
|
||||
let mut remaining = max_bytes;
|
||||
for message in user_messages.iter().rev() {
|
||||
if remaining == 0 {
|
||||
break;
|
||||
}
|
||||
if message.len() <= remaining {
|
||||
selected_messages.push(message.clone());
|
||||
remaining = remaining.saturating_sub(message.len());
|
||||
} else {
|
||||
let (truncated, _) = truncate_middle(message, remaining);
|
||||
selected_messages.push(truncated);
|
||||
break;
|
||||
}
|
||||
}
|
||||
selected_messages.reverse();
|
||||
}
|
||||
|
||||
for message in &selected_messages {
|
||||
history.push(ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::InputText {
|
||||
text: message.clone(),
|
||||
}],
|
||||
});
|
||||
}
|
||||
|
||||
let summary_text = if summary_text.is_empty() {
|
||||
"(no summary available)".to_string()
|
||||
} else {
|
||||
summary_text.to_string()
|
||||
};
|
||||
let Ok(bridge) = HistoryBridgeTemplate {
|
||||
user_messages_text: &user_messages_text,
|
||||
summary_text: &summary_text,
|
||||
}
|
||||
.render() else {
|
||||
return vec![];
|
||||
};
|
||||
|
||||
history.push(ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::InputText { text: bridge }],
|
||||
content: vec![ContentItem::InputText { text: summary_text }],
|
||||
});
|
||||
|
||||
history
|
||||
}
|
||||
|
||||
@@ -390,30 +397,55 @@ mod tests {
|
||||
"SUMMARY",
|
||||
max_bytes,
|
||||
);
|
||||
assert_eq!(history.len(), 2);
|
||||
|
||||
// Expect exactly one bridge message added to history (plus any initial context we provided, which is none).
|
||||
assert_eq!(history.len(), 1);
|
||||
let truncated_message = &history[0];
|
||||
let summary_message = &history[1];
|
||||
|
||||
// Extract the text content of the bridge message.
|
||||
let bridge_text = match &history[0] {
|
||||
let truncated_text = match truncated_message {
|
||||
ResponseItem::Message { role, content, .. } if role == "user" => {
|
||||
content_items_to_text(content).unwrap_or_default()
|
||||
}
|
||||
other => panic!("unexpected item in history: {other:?}"),
|
||||
};
|
||||
|
||||
// The bridge should contain the truncation marker and not the full original payload.
|
||||
assert!(
|
||||
bridge_text.contains("tokens truncated"),
|
||||
"expected truncation marker in bridge message"
|
||||
truncated_text.contains("tokens truncated"),
|
||||
"expected truncation marker in truncated user message"
|
||||
);
|
||||
assert!(
|
||||
!bridge_text.contains(&big),
|
||||
"bridge should not include the full oversized user text"
|
||||
!truncated_text.contains(&big),
|
||||
"truncated user message should not include the full oversized user text"
|
||||
);
|
||||
|
||||
let summary_text = match summary_message {
|
||||
ResponseItem::Message { role, content, .. } if role == "user" => {
|
||||
content_items_to_text(content).unwrap_or_default()
|
||||
}
|
||||
other => panic!("unexpected item in history: {other:?}"),
|
||||
};
|
||||
assert_eq!(summary_text, "SUMMARY");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_compacted_history_appends_summary_message() {
|
||||
let initial_context: Vec<ResponseItem> = Vec::new();
|
||||
let user_messages = vec!["first user message".to_string()];
|
||||
let summary_text = "summary text";
|
||||
|
||||
let history = build_compacted_history(initial_context, &user_messages, summary_text);
|
||||
assert!(
|
||||
bridge_text.contains("SUMMARY"),
|
||||
"bridge should include the provided summary text"
|
||||
!history.is_empty(),
|
||||
"expected compacted history to include summary"
|
||||
);
|
||||
|
||||
let last = history.last().expect("history should have a summary entry");
|
||||
let summary = match last {
|
||||
ResponseItem::Message { role, content, .. } if role == "user" => {
|
||||
content_items_to_text(content).unwrap_or_default()
|
||||
}
|
||||
other => panic!("expected summary message, found {other:?}"),
|
||||
};
|
||||
assert_eq!(summary, summary_text);
|
||||
}
|
||||
}
|
||||
@@ -23,6 +23,10 @@ pub enum ConfigEdit {
|
||||
},
|
||||
/// Toggle the acknowledgement flag under `[notice]`.
|
||||
SetNoticeHideFullAccessWarning(bool),
|
||||
/// Toggle the Windows world-writable directories warning acknowledgement flag.
|
||||
SetNoticeHideWorldWritableWarning(bool),
|
||||
/// Toggle the rate limit model nudge acknowledgement flag.
|
||||
SetNoticeHideRateLimitModelNudge(bool),
|
||||
/// Toggle the Windows onboarding acknowledgement flag.
|
||||
SetWindowsWslSetupAcknowledged(bool),
|
||||
/// Replace the entire `[mcp_servers]` table.
|
||||
@@ -239,6 +243,16 @@ impl ConfigDocument {
|
||||
&[Notice::TABLE_KEY, "hide_full_access_warning"],
|
||||
value(*acknowledged),
|
||||
)),
|
||||
ConfigEdit::SetNoticeHideWorldWritableWarning(acknowledged) => Ok(self.write_value(
|
||||
Scope::Global,
|
||||
&[Notice::TABLE_KEY, "hide_world_writable_warning"],
|
||||
value(*acknowledged),
|
||||
)),
|
||||
ConfigEdit::SetNoticeHideRateLimitModelNudge(acknowledged) => Ok(self.write_value(
|
||||
Scope::Global,
|
||||
&[Notice::TABLE_KEY, "hide_rate_limit_model_nudge"],
|
||||
value(*acknowledged),
|
||||
)),
|
||||
ConfigEdit::SetWindowsWslSetupAcknowledged(acknowledged) => Ok(self.write_value(
|
||||
Scope::Global,
|
||||
&["windows_wsl_setup_acknowledged"],
|
||||
@@ -473,6 +487,18 @@ impl ConfigEditsBuilder {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn set_hide_world_writable_warning(mut self, acknowledged: bool) -> Self {
|
||||
self.edits
|
||||
.push(ConfigEdit::SetNoticeHideWorldWritableWarning(acknowledged));
|
||||
self
|
||||
}
|
||||
|
||||
pub fn set_hide_rate_limit_model_nudge(mut self, acknowledged: bool) -> Self {
|
||||
self.edits
|
||||
.push(ConfigEdit::SetNoticeHideRateLimitModelNudge(acknowledged));
|
||||
self
|
||||
}
|
||||
|
||||
pub fn set_windows_wsl_setup_acknowledged(mut self, acknowledged: bool) -> Self {
|
||||
self.edits
|
||||
.push(ConfigEdit::SetWindowsWslSetupAcknowledged(acknowledged));
|
||||
@@ -720,6 +746,34 @@ hide_full_access_warning = true
|
||||
assert_eq!(contents, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn blocking_set_hide_rate_limit_model_nudge_preserves_table() {
|
||||
let tmp = tempdir().expect("tmpdir");
|
||||
let codex_home = tmp.path();
|
||||
std::fs::write(
|
||||
codex_home.join(CONFIG_TOML_FILE),
|
||||
r#"[notice]
|
||||
existing = "value"
|
||||
"#,
|
||||
)
|
||||
.expect("seed");
|
||||
|
||||
apply_blocking(
|
||||
codex_home,
|
||||
None,
|
||||
&[ConfigEdit::SetNoticeHideRateLimitModelNudge(true)],
|
||||
)
|
||||
.expect("persist");
|
||||
|
||||
let contents =
|
||||
std::fs::read_to_string(codex_home.join(CONFIG_TOML_FILE)).expect("read config");
|
||||
let expected = r#"[notice]
|
||||
existing = "value"
|
||||
hide_rate_limit_model_nudge = true
|
||||
"#;
|
||||
assert_eq!(contents, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn blocking_replace_mcp_servers_round_trips() {
|
||||
let tmp = tempdir().expect("tmpdir");
|
||||
|
||||
@@ -241,8 +241,6 @@ pub struct Config {
|
||||
/// When `true`, run a model-based assessment for commands denied by the sandbox.
|
||||
pub experimental_sandbox_command_assessment: bool,
|
||||
|
||||
pub use_experimental_streamable_shell_tool: bool,
|
||||
|
||||
/// If set to `true`, used only the experimental unified exec tool.
|
||||
pub use_experimental_unified_exec_tool: bool,
|
||||
|
||||
@@ -655,7 +653,6 @@ pub struct ConfigToml {
|
||||
/// Legacy, now use features
|
||||
pub experimental_instructions_file: Option<PathBuf>,
|
||||
pub experimental_compact_prompt_file: Option<PathBuf>,
|
||||
pub experimental_use_exec_command_tool: Option<bool>,
|
||||
pub experimental_use_unified_exec_tool: Option<bool>,
|
||||
pub experimental_use_rmcp_client: Option<bool>,
|
||||
pub experimental_use_freeform_apply_patch: Option<bool>,
|
||||
@@ -999,7 +996,6 @@ impl Config {
|
||||
|
||||
let include_apply_patch_tool_flag = features.enabled(Feature::ApplyPatchFreeform);
|
||||
let tools_web_search_request = features.enabled(Feature::WebSearchRequest);
|
||||
let use_experimental_streamable_shell_tool = features.enabled(Feature::StreamableShell);
|
||||
let use_experimental_unified_exec_tool = features.enabled(Feature::UnifiedExec);
|
||||
let use_experimental_use_rmcp_client = features.enabled(Feature::RmcpClient);
|
||||
let experimental_sandbox_command_assessment =
|
||||
@@ -1156,7 +1152,6 @@ impl Config {
|
||||
include_apply_patch_tool: include_apply_patch_tool_flag,
|
||||
tools_web_search_request,
|
||||
experimental_sandbox_command_assessment,
|
||||
use_experimental_streamable_shell_tool,
|
||||
use_experimental_unified_exec_tool,
|
||||
use_experimental_use_rmcp_client,
|
||||
features,
|
||||
@@ -1715,7 +1710,6 @@ trust_level = "trusted"
|
||||
fn legacy_toggles_map_to_features() -> std::io::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let cfg = ConfigToml {
|
||||
experimental_use_exec_command_tool: Some(true),
|
||||
experimental_use_unified_exec_tool: Some(true),
|
||||
experimental_use_rmcp_client: Some(true),
|
||||
experimental_use_freeform_apply_patch: Some(true),
|
||||
@@ -1729,12 +1723,11 @@ trust_level = "trusted"
|
||||
)?;
|
||||
|
||||
assert!(config.features.enabled(Feature::ApplyPatchFreeform));
|
||||
assert!(config.features.enabled(Feature::StreamableShell));
|
||||
assert!(config.features.enabled(Feature::UnifiedExec));
|
||||
assert!(config.features.enabled(Feature::RmcpClient));
|
||||
|
||||
assert!(config.include_apply_patch_tool);
|
||||
assert!(config.use_experimental_streamable_shell_tool);
|
||||
|
||||
assert!(config.use_experimental_unified_exec_tool);
|
||||
assert!(config.use_experimental_use_rmcp_client);
|
||||
|
||||
@@ -2902,7 +2895,6 @@ model_verbosity = "high"
|
||||
include_apply_patch_tool: false,
|
||||
tools_web_search_request: false,
|
||||
experimental_sandbox_command_assessment: false,
|
||||
use_experimental_streamable_shell_tool: false,
|
||||
use_experimental_unified_exec_tool: false,
|
||||
use_experimental_use_rmcp_client: false,
|
||||
features: Features::with_defaults(),
|
||||
@@ -2974,7 +2966,6 @@ model_verbosity = "high"
|
||||
include_apply_patch_tool: false,
|
||||
tools_web_search_request: false,
|
||||
experimental_sandbox_command_assessment: false,
|
||||
use_experimental_streamable_shell_tool: false,
|
||||
use_experimental_unified_exec_tool: false,
|
||||
use_experimental_use_rmcp_client: false,
|
||||
features: Features::with_defaults(),
|
||||
@@ -3061,7 +3052,6 @@ model_verbosity = "high"
|
||||
include_apply_patch_tool: false,
|
||||
tools_web_search_request: false,
|
||||
experimental_sandbox_command_assessment: false,
|
||||
use_experimental_streamable_shell_tool: false,
|
||||
use_experimental_unified_exec_tool: false,
|
||||
use_experimental_use_rmcp_client: false,
|
||||
features: Features::with_defaults(),
|
||||
@@ -3134,7 +3124,6 @@ model_verbosity = "high"
|
||||
include_apply_patch_tool: false,
|
||||
tools_web_search_request: false,
|
||||
experimental_sandbox_command_assessment: false,
|
||||
use_experimental_streamable_shell_tool: false,
|
||||
use_experimental_unified_exec_tool: false,
|
||||
use_experimental_use_rmcp_client: false,
|
||||
features: Features::with_defaults(),
|
||||
|
||||
@@ -25,7 +25,6 @@ pub struct ConfigProfile {
|
||||
pub experimental_compact_prompt_file: Option<PathBuf>,
|
||||
pub include_apply_patch_tool: Option<bool>,
|
||||
pub experimental_use_unified_exec_tool: Option<bool>,
|
||||
pub experimental_use_exec_command_tool: Option<bool>,
|
||||
pub experimental_use_rmcp_client: Option<bool>,
|
||||
pub experimental_use_freeform_apply_patch: Option<bool>,
|
||||
pub experimental_sandbox_command_assessment: Option<bool>,
|
||||
|
||||
@@ -358,6 +358,10 @@ pub struct Tui {
|
||||
pub struct Notice {
|
||||
/// Tracks whether the user has acknowledged the full access warning prompt.
|
||||
pub hide_full_access_warning: Option<bool>,
|
||||
/// Tracks whether the user has acknowledged the Windows world-writable directories warning.
|
||||
pub hide_world_writable_warning: Option<bool>,
|
||||
/// Tracks whether the user opted out of the rate limit model switch reminder.
|
||||
pub hide_rate_limit_model_nudge: Option<bool>,
|
||||
}
|
||||
|
||||
impl Notice {
|
||||
|
||||
174
codex-rs/core/src/context_manager/history.rs
Normal file
174
codex-rs/core/src/context_manager/history.rs
Normal file
@@ -0,0 +1,174 @@
|
||||
use codex_protocol::models::FunctionCallOutputPayload;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::TokenUsage;
|
||||
use codex_protocol::protocol::TokenUsageInfo;
|
||||
use std::ops::Deref;
|
||||
|
||||
use crate::context_manager::normalize;
|
||||
use crate::context_manager::truncate::format_output_for_model_body;
|
||||
use crate::context_manager::truncate::globally_truncate_function_output_items;
|
||||
|
||||
/// Transcript of conversation history
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub(crate) struct ContextManager {
|
||||
/// The oldest items are at the beginning of the vector.
|
||||
items: Vec<ResponseItem>,
|
||||
token_info: Option<TokenUsageInfo>,
|
||||
}
|
||||
|
||||
impl ContextManager {
|
||||
pub(crate) fn new() -> Self {
|
||||
Self {
|
||||
items: Vec::new(),
|
||||
token_info: TokenUsageInfo::new_or_append(&None, &None, None),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn token_info(&self) -> Option<TokenUsageInfo> {
|
||||
self.token_info.clone()
|
||||
}
|
||||
|
||||
pub(crate) fn set_token_usage_full(&mut self, context_window: i64) {
|
||||
match &mut self.token_info {
|
||||
Some(info) => info.fill_to_context_window(context_window),
|
||||
None => {
|
||||
self.token_info = Some(TokenUsageInfo::full_context_window(context_window));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// `items` is ordered from oldest to newest.
|
||||
pub(crate) fn record_items<I>(&mut self, items: I)
|
||||
where
|
||||
I: IntoIterator,
|
||||
I::Item: std::ops::Deref<Target = ResponseItem>,
|
||||
{
|
||||
for item in items {
|
||||
let item_ref = item.deref();
|
||||
let is_ghost_snapshot = matches!(item_ref, ResponseItem::GhostSnapshot { .. });
|
||||
if !is_api_message(item_ref) && !is_ghost_snapshot {
|
||||
continue;
|
||||
}
|
||||
|
||||
let processed = Self::process_item(&item);
|
||||
self.items.push(processed);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get_history(&mut self) -> Vec<ResponseItem> {
|
||||
self.normalize_history();
|
||||
self.contents()
|
||||
}
|
||||
|
||||
// Returns the history prepared for sending to the model.
|
||||
// With extra response items filtered out and GhostCommits removed.
|
||||
pub(crate) fn get_history_for_prompt(&mut self) -> Vec<ResponseItem> {
|
||||
let mut history = self.get_history();
|
||||
Self::remove_ghost_snapshots(&mut history);
|
||||
history
|
||||
}
|
||||
|
||||
pub(crate) fn remove_first_item(&mut self) {
|
||||
if !self.items.is_empty() {
|
||||
// Remove the oldest item (front of the list). Items are ordered from
|
||||
// oldest → newest, so index 0 is the first entry recorded.
|
||||
let removed = self.items.remove(0);
|
||||
// If the removed item participates in a call/output pair, also remove
|
||||
// its corresponding counterpart to keep the invariants intact without
|
||||
// running a full normalization pass.
|
||||
normalize::remove_corresponding_for(&mut self.items, &removed);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn replace(&mut self, items: Vec<ResponseItem>) {
|
||||
self.items = items;
|
||||
}
|
||||
|
||||
pub(crate) fn update_token_info(
|
||||
&mut self,
|
||||
usage: &TokenUsage,
|
||||
model_context_window: Option<i64>,
|
||||
) {
|
||||
self.token_info = TokenUsageInfo::new_or_append(
|
||||
&self.token_info,
|
||||
&Some(usage.clone()),
|
||||
model_context_window,
|
||||
);
|
||||
}
|
||||
|
||||
/// This function enforces a couple of invariants on the in-memory history:
|
||||
/// 1. every call (function/custom) has a corresponding output entry
|
||||
/// 2. every output has a corresponding call entry
|
||||
fn normalize_history(&mut self) {
|
||||
// all function/tool calls must have a corresponding output
|
||||
normalize::ensure_call_outputs_present(&mut self.items);
|
||||
|
||||
// all outputs must have a corresponding function/tool call
|
||||
normalize::remove_orphan_outputs(&mut self.items);
|
||||
}
|
||||
|
||||
/// Returns a clone of the contents in the transcript.
|
||||
fn contents(&self) -> Vec<ResponseItem> {
|
||||
self.items.clone()
|
||||
}
|
||||
|
||||
fn remove_ghost_snapshots(items: &mut Vec<ResponseItem>) {
|
||||
items.retain(|item| !matches!(item, ResponseItem::GhostSnapshot { .. }));
|
||||
}
|
||||
|
||||
fn process_item(item: &ResponseItem) -> ResponseItem {
|
||||
match item {
|
||||
ResponseItem::FunctionCallOutput { call_id, output } => {
|
||||
let truncated = format_output_for_model_body(output.content.as_str());
|
||||
let truncated_items = output
|
||||
.content_items
|
||||
.as_ref()
|
||||
.map(|items| globally_truncate_function_output_items(items));
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: call_id.clone(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: truncated,
|
||||
content_items: truncated_items,
|
||||
success: output.success,
|
||||
},
|
||||
}
|
||||
}
|
||||
ResponseItem::CustomToolCallOutput { call_id, output } => {
|
||||
let truncated = format_output_for_model_body(output);
|
||||
ResponseItem::CustomToolCallOutput {
|
||||
call_id: call_id.clone(),
|
||||
output: truncated,
|
||||
}
|
||||
}
|
||||
ResponseItem::Message { .. }
|
||||
| ResponseItem::Reasoning { .. }
|
||||
| ResponseItem::LocalShellCall { .. }
|
||||
| ResponseItem::FunctionCall { .. }
|
||||
| ResponseItem::WebSearchCall { .. }
|
||||
| ResponseItem::CustomToolCall { .. }
|
||||
| ResponseItem::GhostSnapshot { .. }
|
||||
| ResponseItem::Other => item.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// API messages include every non-system item (user/assistant messages, reasoning,
|
||||
/// tool calls, tool outputs, shell calls, and web-search calls).
|
||||
fn is_api_message(message: &ResponseItem) -> bool {
|
||||
match message {
|
||||
ResponseItem::Message { role, .. } => role.as_str() != "system",
|
||||
ResponseItem::FunctionCallOutput { .. }
|
||||
| ResponseItem::FunctionCall { .. }
|
||||
| ResponseItem::CustomToolCall { .. }
|
||||
| ResponseItem::CustomToolCallOutput { .. }
|
||||
| ResponseItem::LocalShellCall { .. }
|
||||
| ResponseItem::Reasoning { .. }
|
||||
| ResponseItem::WebSearchCall { .. } => true,
|
||||
ResponseItem::GhostSnapshot { .. } => false,
|
||||
ResponseItem::Other => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[path = "history_tests.rs"]
|
||||
mod tests;
|
||||
841
codex-rs/core/src/context_manager/history_tests.rs
Normal file
841
codex-rs/core/src/context_manager/history_tests.rs
Normal file
@@ -0,0 +1,841 @@
|
||||
use super::*;
|
||||
use crate::context_manager::truncate;
|
||||
use codex_git::GhostCommit;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::FunctionCallOutputContentItem;
|
||||
use codex_protocol::models::FunctionCallOutputPayload;
|
||||
use codex_protocol::models::LocalShellAction;
|
||||
use codex_protocol::models::LocalShellExecAction;
|
||||
use codex_protocol::models::LocalShellStatus;
|
||||
use codex_protocol::models::ReasoningItemContent;
|
||||
use codex_protocol::models::ReasoningItemReasoningSummary;
|
||||
use pretty_assertions::assert_eq;
|
||||
use regex_lite::Regex;
|
||||
|
||||
fn assistant_msg(text: &str) -> ResponseItem {
|
||||
ResponseItem::Message {
|
||||
id: None,
|
||||
role: "assistant".to_string(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: text.to_string(),
|
||||
}],
|
||||
}
|
||||
}
|
||||
|
||||
fn create_history_with_items(items: Vec<ResponseItem>) -> ContextManager {
|
||||
let mut h = ContextManager::new();
|
||||
h.record_items(items.iter());
|
||||
h
|
||||
}
|
||||
|
||||
fn user_msg(text: &str) -> ResponseItem {
|
||||
ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: text.to_string(),
|
||||
}],
|
||||
}
|
||||
}
|
||||
|
||||
fn reasoning_msg(text: &str) -> ResponseItem {
|
||||
ResponseItem::Reasoning {
|
||||
id: String::new(),
|
||||
summary: vec![ReasoningItemReasoningSummary::SummaryText {
|
||||
text: "summary".to_string(),
|
||||
}],
|
||||
content: Some(vec![ReasoningItemContent::ReasoningText {
|
||||
text: text.to_string(),
|
||||
}]),
|
||||
encrypted_content: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn filters_non_api_messages() {
|
||||
let mut h = ContextManager::default();
|
||||
// System message is not API messages; Other is ignored.
|
||||
let system = ResponseItem::Message {
|
||||
id: None,
|
||||
role: "system".to_string(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: "ignored".to_string(),
|
||||
}],
|
||||
};
|
||||
let reasoning = reasoning_msg("thinking...");
|
||||
h.record_items([&system, &reasoning, &ResponseItem::Other]);
|
||||
|
||||
// User and assistant should be retained.
|
||||
let u = user_msg("hi");
|
||||
let a = assistant_msg("hello");
|
||||
h.record_items([&u, &a]);
|
||||
|
||||
let items = h.contents();
|
||||
assert_eq!(
|
||||
items,
|
||||
vec![
|
||||
ResponseItem::Reasoning {
|
||||
id: String::new(),
|
||||
summary: vec![ReasoningItemReasoningSummary::SummaryText {
|
||||
text: "summary".to_string(),
|
||||
}],
|
||||
content: Some(vec![ReasoningItemContent::ReasoningText {
|
||||
text: "thinking...".to_string(),
|
||||
}]),
|
||||
encrypted_content: None,
|
||||
},
|
||||
ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: "hi".to_string()
|
||||
}]
|
||||
},
|
||||
ResponseItem::Message {
|
||||
id: None,
|
||||
role: "assistant".to_string(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: "hello".to_string()
|
||||
}]
|
||||
}
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_history_for_prompt_drops_ghost_commits() {
|
||||
let items = vec![ResponseItem::GhostSnapshot {
|
||||
ghost_commit: GhostCommit::new("ghost-1".to_string(), None, Vec::new(), Vec::new()),
|
||||
}];
|
||||
let mut history = create_history_with_items(items);
|
||||
let filtered = history.get_history_for_prompt();
|
||||
assert_eq!(filtered, vec![]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_first_item_removes_matching_output_for_function_call() {
|
||||
let items = vec![
|
||||
ResponseItem::FunctionCall {
|
||||
id: None,
|
||||
name: "do_it".to_string(),
|
||||
arguments: "{}".to_string(),
|
||||
call_id: "call-1".to_string(),
|
||||
},
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "call-1".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "ok".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.remove_first_item();
|
||||
assert_eq!(h.contents(), vec![]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_first_item_removes_matching_call_for_output() {
|
||||
let items = vec![
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "call-2".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "ok".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
ResponseItem::FunctionCall {
|
||||
id: None,
|
||||
name: "do_it".to_string(),
|
||||
arguments: "{}".to_string(),
|
||||
call_id: "call-2".to_string(),
|
||||
},
|
||||
];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.remove_first_item();
|
||||
assert_eq!(h.contents(), vec![]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_first_item_handles_local_shell_pair() {
|
||||
let items = vec![
|
||||
ResponseItem::LocalShellCall {
|
||||
id: None,
|
||||
call_id: Some("call-3".to_string()),
|
||||
status: LocalShellStatus::Completed,
|
||||
action: LocalShellAction::Exec(LocalShellExecAction {
|
||||
command: vec!["echo".to_string(), "hi".to_string()],
|
||||
timeout_ms: None,
|
||||
working_directory: None,
|
||||
env: None,
|
||||
user: None,
|
||||
}),
|
||||
},
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "call-3".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "ok".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.remove_first_item();
|
||||
assert_eq!(h.contents(), vec![]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_first_item_handles_custom_tool_pair() {
|
||||
let items = vec![
|
||||
ResponseItem::CustomToolCall {
|
||||
id: None,
|
||||
status: None,
|
||||
call_id: "tool-1".to_string(),
|
||||
name: "my_tool".to_string(),
|
||||
input: "{}".to_string(),
|
||||
},
|
||||
ResponseItem::CustomToolCallOutput {
|
||||
call_id: "tool-1".to_string(),
|
||||
output: "ok".to_string(),
|
||||
},
|
||||
];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.remove_first_item();
|
||||
assert_eq!(h.contents(), vec![]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalization_retains_local_shell_outputs() {
|
||||
let items = vec![
|
||||
ResponseItem::LocalShellCall {
|
||||
id: None,
|
||||
call_id: Some("shell-1".to_string()),
|
||||
status: LocalShellStatus::Completed,
|
||||
action: LocalShellAction::Exec(LocalShellExecAction {
|
||||
command: vec!["echo".to_string(), "hi".to_string()],
|
||||
timeout_ms: None,
|
||||
working_directory: None,
|
||||
env: None,
|
||||
user: None,
|
||||
}),
|
||||
},
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "shell-1".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "ok".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
let mut history = create_history_with_items(items.clone());
|
||||
let normalized = history.get_history();
|
||||
assert_eq!(normalized, items);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn record_items_truncates_function_call_output_content() {
|
||||
let mut history = ContextManager::new();
|
||||
let long_line = "a very long line to trigger truncation\n";
|
||||
let long_output = long_line.repeat(2_500);
|
||||
let item = ResponseItem::FunctionCallOutput {
|
||||
call_id: "call-100".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: long_output.clone(),
|
||||
success: Some(true),
|
||||
..Default::default()
|
||||
},
|
||||
};
|
||||
|
||||
history.record_items([&item]);
|
||||
|
||||
assert_eq!(history.items.len(), 1);
|
||||
match &history.items[0] {
|
||||
ResponseItem::FunctionCallOutput { output, .. } => {
|
||||
assert_ne!(output.content, long_output);
|
||||
assert!(
|
||||
output.content.starts_with("Total output lines:"),
|
||||
"expected truncated summary, got {}",
|
||||
output.content
|
||||
);
|
||||
}
|
||||
other => panic!("unexpected history item: {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn record_items_truncates_custom_tool_call_output_content() {
|
||||
let mut history = ContextManager::new();
|
||||
let line = "custom output that is very long\n";
|
||||
let long_output = line.repeat(2_500);
|
||||
let item = ResponseItem::CustomToolCallOutput {
|
||||
call_id: "tool-200".to_string(),
|
||||
output: long_output.clone(),
|
||||
};
|
||||
|
||||
history.record_items([&item]);
|
||||
|
||||
assert_eq!(history.items.len(), 1);
|
||||
match &history.items[0] {
|
||||
ResponseItem::CustomToolCallOutput { output, .. } => {
|
||||
assert_ne!(output, &long_output);
|
||||
assert!(
|
||||
output.starts_with("Total output lines:"),
|
||||
"expected truncated summary, got {output}"
|
||||
);
|
||||
}
|
||||
other => panic!("unexpected history item: {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn assert_truncated_message_matches(message: &str, line: &str, total_lines: usize) {
|
||||
let pattern = truncated_message_pattern(line, total_lines);
|
||||
let regex = Regex::new(&pattern).unwrap_or_else(|err| {
|
||||
panic!("failed to compile regex {pattern}: {err}");
|
||||
});
|
||||
let captures = regex
|
||||
.captures(message)
|
||||
.unwrap_or_else(|| panic!("message failed to match pattern {pattern}: {message}"));
|
||||
let body = captures
|
||||
.name("body")
|
||||
.expect("missing body capture")
|
||||
.as_str();
|
||||
assert!(
|
||||
body.len() <= truncate::MODEL_FORMAT_MAX_BYTES,
|
||||
"body exceeds byte limit: {} bytes",
|
||||
body.len()
|
||||
);
|
||||
}
|
||||
|
||||
fn truncated_message_pattern(line: &str, total_lines: usize) -> String {
|
||||
let head_take = truncate::MODEL_FORMAT_HEAD_LINES.min(total_lines);
|
||||
let tail_take = truncate::MODEL_FORMAT_TAIL_LINES.min(total_lines.saturating_sub(head_take));
|
||||
let omitted = total_lines.saturating_sub(head_take + tail_take);
|
||||
let escaped_line = regex_lite::escape(line);
|
||||
if omitted == 0 {
|
||||
return format!(
|
||||
r"(?s)^Total output lines: {total_lines}\n\n(?P<body>{escaped_line}.*\n\[\.{{3}} output truncated to fit {max_bytes} bytes \.{{3}}]\n\n.*)$",
|
||||
max_bytes = truncate::MODEL_FORMAT_MAX_BYTES,
|
||||
);
|
||||
}
|
||||
format!(
|
||||
r"(?s)^Total output lines: {total_lines}\n\n(?P<body>{escaped_line}.*\n\[\.{{3}} omitted {omitted} of {total_lines} lines \.{{3}}]\n\n.*)$",
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn format_exec_output_truncates_large_error() {
|
||||
let line = "very long execution error line that should trigger truncation\n";
|
||||
let large_error = line.repeat(2_500); // way beyond both byte and line limits
|
||||
|
||||
let truncated = truncate::format_output_for_model_body(&large_error);
|
||||
|
||||
let total_lines = large_error.lines().count();
|
||||
assert_truncated_message_matches(&truncated, line, total_lines);
|
||||
assert_ne!(truncated, large_error);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn format_exec_output_marks_byte_truncation_without_omitted_lines() {
|
||||
let long_line = "a".repeat(truncate::MODEL_FORMAT_MAX_BYTES + 50);
|
||||
let truncated = truncate::format_output_for_model_body(&long_line);
|
||||
|
||||
assert_ne!(truncated, long_line);
|
||||
let marker_line = format!(
|
||||
"[... output truncated to fit {} bytes ...]",
|
||||
truncate::MODEL_FORMAT_MAX_BYTES
|
||||
);
|
||||
assert!(
|
||||
truncated.contains(&marker_line),
|
||||
"missing byte truncation marker: {truncated}"
|
||||
);
|
||||
assert!(
|
||||
!truncated.contains("omitted"),
|
||||
"line omission marker should not appear when no lines were dropped: {truncated}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn format_exec_output_returns_original_when_within_limits() {
|
||||
let content = "example output\n".repeat(10);
|
||||
|
||||
assert_eq!(truncate::format_output_for_model_body(&content), content);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn format_exec_output_reports_omitted_lines_and_keeps_head_and_tail() {
|
||||
let total_lines = truncate::MODEL_FORMAT_MAX_LINES + 100;
|
||||
let content: String = (0..total_lines)
|
||||
.map(|idx| format!("line-{idx}\n"))
|
||||
.collect();
|
||||
|
||||
let truncated = truncate::format_output_for_model_body(&content);
|
||||
let omitted = total_lines - truncate::MODEL_FORMAT_MAX_LINES;
|
||||
let expected_marker = format!("[... omitted {omitted} of {total_lines} lines ...]");
|
||||
|
||||
assert!(
|
||||
truncated.contains(&expected_marker),
|
||||
"missing omitted marker: {truncated}"
|
||||
);
|
||||
assert!(
|
||||
truncated.contains("line-0\n"),
|
||||
"expected head line to remain: {truncated}"
|
||||
);
|
||||
|
||||
let last_line = format!("line-{}\n", total_lines - 1);
|
||||
assert!(
|
||||
truncated.contains(&last_line),
|
||||
"expected tail line to remain: {truncated}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn format_exec_output_prefers_line_marker_when_both_limits_exceeded() {
|
||||
let total_lines = truncate::MODEL_FORMAT_MAX_LINES + 42;
|
||||
let long_line = "x".repeat(256);
|
||||
let content: String = (0..total_lines)
|
||||
.map(|idx| format!("line-{idx}-{long_line}\n"))
|
||||
.collect();
|
||||
|
||||
let truncated = truncate::format_output_for_model_body(&content);
|
||||
|
||||
assert!(
|
||||
truncated.contains("[... omitted 42 of 298 lines ...]"),
|
||||
"expected omitted marker when line count exceeds limit: {truncated}"
|
||||
);
|
||||
assert!(
|
||||
!truncated.contains("output truncated to fit"),
|
||||
"line omission marker should take precedence over byte marker: {truncated}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn truncates_across_multiple_under_limit_texts_and_reports_omitted() {
|
||||
// Arrange: several text items, none exceeding per-item limit, but total exceeds budget.
|
||||
let budget = truncate::MODEL_FORMAT_MAX_BYTES;
|
||||
let t1_len = (budget / 2).saturating_sub(10);
|
||||
let t2_len = (budget / 2).saturating_sub(10);
|
||||
let remaining_after_t1_t2 = budget.saturating_sub(t1_len + t2_len);
|
||||
let t3_len = 50; // gets truncated to remaining_after_t1_t2
|
||||
let t4_len = 5; // omitted
|
||||
let t5_len = 7; // omitted
|
||||
|
||||
let t1 = "a".repeat(t1_len);
|
||||
let t2 = "b".repeat(t2_len);
|
||||
let t3 = "c".repeat(t3_len);
|
||||
let t4 = "d".repeat(t4_len);
|
||||
let t5 = "e".repeat(t5_len);
|
||||
|
||||
let item = ResponseItem::FunctionCallOutput {
|
||||
call_id: "call-omit".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "irrelevant".to_string(),
|
||||
content_items: Some(vec![
|
||||
FunctionCallOutputContentItem::InputText { text: t1 },
|
||||
FunctionCallOutputContentItem::InputText { text: t2 },
|
||||
FunctionCallOutputContentItem::InputImage {
|
||||
image_url: "img:mid".to_string(),
|
||||
},
|
||||
FunctionCallOutputContentItem::InputText { text: t3 },
|
||||
FunctionCallOutputContentItem::InputText { text: t4 },
|
||||
FunctionCallOutputContentItem::InputText { text: t5 },
|
||||
]),
|
||||
success: Some(true),
|
||||
},
|
||||
};
|
||||
|
||||
let mut history = ContextManager::new();
|
||||
history.record_items([&item]);
|
||||
assert_eq!(history.items.len(), 1);
|
||||
let json = serde_json::to_value(&history.items[0]).expect("serialize to json");
|
||||
|
||||
let output = json
|
||||
.get("output")
|
||||
.expect("output field")
|
||||
.as_array()
|
||||
.expect("array output");
|
||||
|
||||
// Expect: t1 (full), t2 (full), image, t3 (truncated), summary mentioning 2 omitted.
|
||||
assert_eq!(output.len(), 5);
|
||||
|
||||
let first = output[0].as_object().expect("first obj");
|
||||
assert_eq!(first.get("type").unwrap(), "input_text");
|
||||
let first_text = first.get("text").unwrap().as_str().unwrap();
|
||||
assert_eq!(first_text.len(), t1_len);
|
||||
|
||||
let second = output[1].as_object().expect("second obj");
|
||||
assert_eq!(second.get("type").unwrap(), "input_text");
|
||||
let second_text = second.get("text").unwrap().as_str().unwrap();
|
||||
assert_eq!(second_text.len(), t2_len);
|
||||
|
||||
assert_eq!(
|
||||
output[2],
|
||||
serde_json::json!({"type": "input_image", "image_url": "img:mid"})
|
||||
);
|
||||
|
||||
let fourth = output[3].as_object().expect("fourth obj");
|
||||
assert_eq!(fourth.get("type").unwrap(), "input_text");
|
||||
let fourth_text = fourth.get("text").unwrap().as_str().unwrap();
|
||||
assert_eq!(fourth_text.len(), remaining_after_t1_t2);
|
||||
|
||||
let summary = output[4].as_object().expect("summary obj");
|
||||
assert_eq!(summary.get("type").unwrap(), "input_text");
|
||||
let summary_text = summary.get("text").unwrap().as_str().unwrap();
|
||||
assert!(summary_text.contains("omitted 2 text items"));
|
||||
}
|
||||
|
||||
//TODO(aibrahim): run CI in release mode.
|
||||
#[cfg(not(debug_assertions))]
|
||||
#[test]
|
||||
fn normalize_adds_missing_output_for_function_call() {
|
||||
let items = vec![ResponseItem::FunctionCall {
|
||||
id: None,
|
||||
name: "do_it".to_string(),
|
||||
arguments: "{}".to_string(),
|
||||
call_id: "call-x".to_string(),
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
|
||||
h.normalize_history();
|
||||
|
||||
assert_eq!(
|
||||
h.contents(),
|
||||
vec![
|
||||
ResponseItem::FunctionCall {
|
||||
id: None,
|
||||
name: "do_it".to_string(),
|
||||
arguments: "{}".to_string(),
|
||||
call_id: "call-x".to_string(),
|
||||
},
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "call-x".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "aborted".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
#[test]
|
||||
fn normalize_adds_missing_output_for_custom_tool_call() {
|
||||
let items = vec![ResponseItem::CustomToolCall {
|
||||
id: None,
|
||||
status: None,
|
||||
call_id: "tool-x".to_string(),
|
||||
name: "custom".to_string(),
|
||||
input: "{}".to_string(),
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
|
||||
h.normalize_history();
|
||||
|
||||
assert_eq!(
|
||||
h.contents(),
|
||||
vec![
|
||||
ResponseItem::CustomToolCall {
|
||||
id: None,
|
||||
status: None,
|
||||
call_id: "tool-x".to_string(),
|
||||
name: "custom".to_string(),
|
||||
input: "{}".to_string(),
|
||||
},
|
||||
ResponseItem::CustomToolCallOutput {
|
||||
call_id: "tool-x".to_string(),
|
||||
output: "aborted".to_string(),
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
#[test]
|
||||
fn normalize_adds_missing_output_for_local_shell_call_with_id() {
|
||||
let items = vec![ResponseItem::LocalShellCall {
|
||||
id: None,
|
||||
call_id: Some("shell-1".to_string()),
|
||||
status: LocalShellStatus::Completed,
|
||||
action: LocalShellAction::Exec(LocalShellExecAction {
|
||||
command: vec!["echo".to_string(), "hi".to_string()],
|
||||
timeout_ms: None,
|
||||
working_directory: None,
|
||||
env: None,
|
||||
user: None,
|
||||
}),
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
|
||||
h.normalize_history();
|
||||
|
||||
assert_eq!(
|
||||
h.contents(),
|
||||
vec![
|
||||
ResponseItem::LocalShellCall {
|
||||
id: None,
|
||||
call_id: Some("shell-1".to_string()),
|
||||
status: LocalShellStatus::Completed,
|
||||
action: LocalShellAction::Exec(LocalShellExecAction {
|
||||
command: vec!["echo".to_string(), "hi".to_string()],
|
||||
timeout_ms: None,
|
||||
working_directory: None,
|
||||
env: None,
|
||||
user: None,
|
||||
}),
|
||||
},
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "shell-1".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "aborted".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
#[test]
|
||||
fn normalize_removes_orphan_function_call_output() {
|
||||
let items = vec![ResponseItem::FunctionCallOutput {
|
||||
call_id: "orphan-1".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "ok".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
|
||||
h.normalize_history();
|
||||
|
||||
assert_eq!(h.contents(), vec![]);
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
#[test]
|
||||
fn normalize_removes_orphan_custom_tool_call_output() {
|
||||
let items = vec![ResponseItem::CustomToolCallOutput {
|
||||
call_id: "orphan-2".to_string(),
|
||||
output: "ok".to_string(),
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
|
||||
h.normalize_history();
|
||||
|
||||
assert_eq!(h.contents(), vec![]);
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
#[test]
|
||||
fn normalize_mixed_inserts_and_removals() {
|
||||
let items = vec![
|
||||
// Will get an inserted output
|
||||
ResponseItem::FunctionCall {
|
||||
id: None,
|
||||
name: "f1".to_string(),
|
||||
arguments: "{}".to_string(),
|
||||
call_id: "c1".to_string(),
|
||||
},
|
||||
// Orphan output that should be removed
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "c2".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "ok".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
// Will get an inserted custom tool output
|
||||
ResponseItem::CustomToolCall {
|
||||
id: None,
|
||||
status: None,
|
||||
call_id: "t1".to_string(),
|
||||
name: "tool".to_string(),
|
||||
input: "{}".to_string(),
|
||||
},
|
||||
// Local shell call also gets an inserted function call output
|
||||
ResponseItem::LocalShellCall {
|
||||
id: None,
|
||||
call_id: Some("s1".to_string()),
|
||||
status: LocalShellStatus::Completed,
|
||||
action: LocalShellAction::Exec(LocalShellExecAction {
|
||||
command: vec!["echo".to_string()],
|
||||
timeout_ms: None,
|
||||
working_directory: None,
|
||||
env: None,
|
||||
user: None,
|
||||
}),
|
||||
},
|
||||
];
|
||||
let mut h = create_history_with_items(items);
|
||||
|
||||
h.normalize_history();
|
||||
|
||||
assert_eq!(
|
||||
h.contents(),
|
||||
vec![
|
||||
ResponseItem::FunctionCall {
|
||||
id: None,
|
||||
name: "f1".to_string(),
|
||||
arguments: "{}".to_string(),
|
||||
call_id: "c1".to_string(),
|
||||
},
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "c1".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "aborted".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
ResponseItem::CustomToolCall {
|
||||
id: None,
|
||||
status: None,
|
||||
call_id: "t1".to_string(),
|
||||
name: "tool".to_string(),
|
||||
input: "{}".to_string(),
|
||||
},
|
||||
ResponseItem::CustomToolCallOutput {
|
||||
call_id: "t1".to_string(),
|
||||
output: "aborted".to_string(),
|
||||
},
|
||||
ResponseItem::LocalShellCall {
|
||||
id: None,
|
||||
call_id: Some("s1".to_string()),
|
||||
status: LocalShellStatus::Completed,
|
||||
action: LocalShellAction::Exec(LocalShellExecAction {
|
||||
command: vec!["echo".to_string()],
|
||||
timeout_ms: None,
|
||||
working_directory: None,
|
||||
env: None,
|
||||
user: None,
|
||||
}),
|
||||
},
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "s1".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "aborted".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
// In debug builds we panic on normalization errors instead of silently fixing them.
|
||||
#[cfg(debug_assertions)]
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn normalize_adds_missing_output_for_function_call_panics_in_debug() {
|
||||
let items = vec![ResponseItem::FunctionCall {
|
||||
id: None,
|
||||
name: "do_it".to_string(),
|
||||
arguments: "{}".to_string(),
|
||||
call_id: "call-x".to_string(),
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.normalize_history();
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn normalize_adds_missing_output_for_custom_tool_call_panics_in_debug() {
|
||||
let items = vec![ResponseItem::CustomToolCall {
|
||||
id: None,
|
||||
status: None,
|
||||
call_id: "tool-x".to_string(),
|
||||
name: "custom".to_string(),
|
||||
input: "{}".to_string(),
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.normalize_history();
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn normalize_adds_missing_output_for_local_shell_call_with_id_panics_in_debug() {
|
||||
let items = vec![ResponseItem::LocalShellCall {
|
||||
id: None,
|
||||
call_id: Some("shell-1".to_string()),
|
||||
status: LocalShellStatus::Completed,
|
||||
action: LocalShellAction::Exec(LocalShellExecAction {
|
||||
command: vec!["echo".to_string(), "hi".to_string()],
|
||||
timeout_ms: None,
|
||||
working_directory: None,
|
||||
env: None,
|
||||
user: None,
|
||||
}),
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.normalize_history();
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn normalize_removes_orphan_function_call_output_panics_in_debug() {
|
||||
let items = vec![ResponseItem::FunctionCallOutput {
|
||||
call_id: "orphan-1".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "ok".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.normalize_history();
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn normalize_removes_orphan_custom_tool_call_output_panics_in_debug() {
|
||||
let items = vec![ResponseItem::CustomToolCallOutput {
|
||||
call_id: "orphan-2".to_string(),
|
||||
output: "ok".to_string(),
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.normalize_history();
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn normalize_mixed_inserts_and_removals_panics_in_debug() {
|
||||
let items = vec![
|
||||
ResponseItem::FunctionCall {
|
||||
id: None,
|
||||
name: "f1".to_string(),
|
||||
arguments: "{}".to_string(),
|
||||
call_id: "c1".to_string(),
|
||||
},
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "c2".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "ok".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
ResponseItem::CustomToolCall {
|
||||
id: None,
|
||||
status: None,
|
||||
call_id: "t1".to_string(),
|
||||
name: "tool".to_string(),
|
||||
input: "{}".to_string(),
|
||||
},
|
||||
ResponseItem::LocalShellCall {
|
||||
id: None,
|
||||
call_id: Some("s1".to_string()),
|
||||
status: LocalShellStatus::Completed,
|
||||
action: LocalShellAction::Exec(LocalShellExecAction {
|
||||
command: vec!["echo".to_string()],
|
||||
timeout_ms: None,
|
||||
working_directory: None,
|
||||
env: None,
|
||||
user: None,
|
||||
}),
|
||||
},
|
||||
];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.normalize_history();
|
||||
}
|
||||
6
codex-rs/core/src/context_manager/mod.rs
Normal file
6
codex-rs/core/src/context_manager/mod.rs
Normal file
@@ -0,0 +1,6 @@
|
||||
mod history;
|
||||
mod normalize;
|
||||
mod truncate;
|
||||
|
||||
pub(crate) use history::ContextManager;
|
||||
pub(crate) use truncate::format_output_for_model_body;
|
||||
213
codex-rs/core/src/context_manager/normalize.rs
Normal file
213
codex-rs/core/src/context_manager/normalize.rs
Normal file
@@ -0,0 +1,213 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use codex_protocol::models::FunctionCallOutputPayload;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
|
||||
use crate::util::error_or_panic;
|
||||
|
||||
pub(crate) fn ensure_call_outputs_present(items: &mut Vec<ResponseItem>) {
|
||||
// Collect synthetic outputs to insert immediately after their calls.
|
||||
// Store the insertion position (index of call) alongside the item so
|
||||
// we can insert in reverse order and avoid index shifting.
|
||||
let mut missing_outputs_to_insert: Vec<(usize, ResponseItem)> = Vec::new();
|
||||
|
||||
for (idx, item) in items.iter().enumerate() {
|
||||
match item {
|
||||
ResponseItem::FunctionCall { call_id, .. } => {
|
||||
let has_output = items.iter().any(|i| match i {
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: existing, ..
|
||||
} => existing == call_id,
|
||||
_ => false,
|
||||
});
|
||||
|
||||
if !has_output {
|
||||
error_or_panic(format!(
|
||||
"Function call output is missing for call id: {call_id}"
|
||||
));
|
||||
missing_outputs_to_insert.push((
|
||||
idx,
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: call_id.clone(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "aborted".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
));
|
||||
}
|
||||
}
|
||||
ResponseItem::CustomToolCall { call_id, .. } => {
|
||||
let has_output = items.iter().any(|i| match i {
|
||||
ResponseItem::CustomToolCallOutput {
|
||||
call_id: existing, ..
|
||||
} => existing == call_id,
|
||||
_ => false,
|
||||
});
|
||||
|
||||
if !has_output {
|
||||
error_or_panic(format!(
|
||||
"Custom tool call output is missing for call id: {call_id}"
|
||||
));
|
||||
missing_outputs_to_insert.push((
|
||||
idx,
|
||||
ResponseItem::CustomToolCallOutput {
|
||||
call_id: call_id.clone(),
|
||||
output: "aborted".to_string(),
|
||||
},
|
||||
));
|
||||
}
|
||||
}
|
||||
// LocalShellCall is represented in upstream streams by a FunctionCallOutput
|
||||
ResponseItem::LocalShellCall { call_id, .. } => {
|
||||
if let Some(call_id) = call_id.as_ref() {
|
||||
let has_output = items.iter().any(|i| match i {
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: existing, ..
|
||||
} => existing == call_id,
|
||||
_ => false,
|
||||
});
|
||||
|
||||
if !has_output {
|
||||
error_or_panic(format!(
|
||||
"Local shell call output is missing for call id: {call_id}"
|
||||
));
|
||||
missing_outputs_to_insert.push((
|
||||
idx,
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: call_id.clone(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "aborted".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
// Insert synthetic outputs in reverse index order to avoid re-indexing.
|
||||
for (idx, output_item) in missing_outputs_to_insert.into_iter().rev() {
|
||||
items.insert(idx + 1, output_item);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn remove_orphan_outputs(items: &mut Vec<ResponseItem>) {
|
||||
let function_call_ids: HashSet<String> = items
|
||||
.iter()
|
||||
.filter_map(|i| match i {
|
||||
ResponseItem::FunctionCall { call_id, .. } => Some(call_id.clone()),
|
||||
_ => None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
let local_shell_call_ids: HashSet<String> = items
|
||||
.iter()
|
||||
.filter_map(|i| match i {
|
||||
ResponseItem::LocalShellCall {
|
||||
call_id: Some(call_id),
|
||||
..
|
||||
} => Some(call_id.clone()),
|
||||
_ => None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
let custom_tool_call_ids: HashSet<String> = items
|
||||
.iter()
|
||||
.filter_map(|i| match i {
|
||||
ResponseItem::CustomToolCall { call_id, .. } => Some(call_id.clone()),
|
||||
_ => None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
items.retain(|item| match item {
|
||||
ResponseItem::FunctionCallOutput { call_id, .. } => {
|
||||
let has_match =
|
||||
function_call_ids.contains(call_id) || local_shell_call_ids.contains(call_id);
|
||||
if !has_match {
|
||||
error_or_panic(format!(
|
||||
"Orphan function call output for call id: {call_id}"
|
||||
));
|
||||
}
|
||||
has_match
|
||||
}
|
||||
ResponseItem::CustomToolCallOutput { call_id, .. } => {
|
||||
let has_match = custom_tool_call_ids.contains(call_id);
|
||||
if !has_match {
|
||||
error_or_panic(format!(
|
||||
"Orphan custom tool call output for call id: {call_id}"
|
||||
));
|
||||
}
|
||||
has_match
|
||||
}
|
||||
_ => true,
|
||||
});
|
||||
}
|
||||
|
||||
pub(crate) fn remove_corresponding_for(items: &mut Vec<ResponseItem>, item: &ResponseItem) {
|
||||
match item {
|
||||
ResponseItem::FunctionCall { call_id, .. } => {
|
||||
remove_first_matching(items, |i| {
|
||||
matches!(
|
||||
i,
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: existing, ..
|
||||
} if existing == call_id
|
||||
)
|
||||
});
|
||||
}
|
||||
ResponseItem::FunctionCallOutput { call_id, .. } => {
|
||||
if let Some(pos) = items.iter().position(|i| {
|
||||
matches!(i, ResponseItem::FunctionCall { call_id: existing, .. } if existing == call_id)
|
||||
}) {
|
||||
items.remove(pos);
|
||||
} else if let Some(pos) = items.iter().position(|i| {
|
||||
matches!(i, ResponseItem::LocalShellCall { call_id: Some(existing), .. } if existing == call_id)
|
||||
}) {
|
||||
items.remove(pos);
|
||||
}
|
||||
}
|
||||
ResponseItem::CustomToolCall { call_id, .. } => {
|
||||
remove_first_matching(items, |i| {
|
||||
matches!(
|
||||
i,
|
||||
ResponseItem::CustomToolCallOutput {
|
||||
call_id: existing, ..
|
||||
} if existing == call_id
|
||||
)
|
||||
});
|
||||
}
|
||||
ResponseItem::CustomToolCallOutput { call_id, .. } => {
|
||||
remove_first_matching(
|
||||
items,
|
||||
|i| matches!(i, ResponseItem::CustomToolCall { call_id: existing, .. } if existing == call_id),
|
||||
);
|
||||
}
|
||||
ResponseItem::LocalShellCall {
|
||||
call_id: Some(call_id),
|
||||
..
|
||||
} => {
|
||||
remove_first_matching(items, |i| {
|
||||
matches!(
|
||||
i,
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: existing, ..
|
||||
} if existing == call_id
|
||||
)
|
||||
});
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn remove_first_matching<F>(items: &mut Vec<ResponseItem>, predicate: F)
|
||||
where
|
||||
F: Fn(&ResponseItem) -> bool,
|
||||
{
|
||||
if let Some(pos) = items.iter().position(predicate) {
|
||||
items.remove(pos);
|
||||
}
|
||||
}
|
||||
128
codex-rs/core/src/context_manager/truncate.rs
Normal file
128
codex-rs/core/src/context_manager/truncate.rs
Normal file
@@ -0,0 +1,128 @@
|
||||
use codex_protocol::models::FunctionCallOutputContentItem;
|
||||
use codex_utils_string::take_bytes_at_char_boundary;
|
||||
use codex_utils_string::take_last_bytes_at_char_boundary;
|
||||
|
||||
// Model-formatting limits: clients get full streams; only content sent to the model is truncated.
|
||||
pub(crate) const MODEL_FORMAT_MAX_BYTES: usize = 10 * 1024; // 10 KiB
|
||||
pub(crate) const MODEL_FORMAT_MAX_LINES: usize = 256; // lines
|
||||
pub(crate) const MODEL_FORMAT_HEAD_LINES: usize = MODEL_FORMAT_MAX_LINES / 2;
|
||||
pub(crate) const MODEL_FORMAT_TAIL_LINES: usize = MODEL_FORMAT_MAX_LINES - MODEL_FORMAT_HEAD_LINES; // 128
|
||||
pub(crate) const MODEL_FORMAT_HEAD_BYTES: usize = MODEL_FORMAT_MAX_BYTES / 2;
|
||||
|
||||
pub(crate) fn globally_truncate_function_output_items(
|
||||
items: &[FunctionCallOutputContentItem],
|
||||
) -> Vec<FunctionCallOutputContentItem> {
|
||||
let mut out: Vec<FunctionCallOutputContentItem> = Vec::with_capacity(items.len());
|
||||
let mut remaining = MODEL_FORMAT_MAX_BYTES;
|
||||
let mut omitted_text_items = 0usize;
|
||||
|
||||
for it in items {
|
||||
match it {
|
||||
FunctionCallOutputContentItem::InputText { text } => {
|
||||
if remaining == 0 {
|
||||
omitted_text_items += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
let len = text.len();
|
||||
if len <= remaining {
|
||||
out.push(FunctionCallOutputContentItem::InputText { text: text.clone() });
|
||||
remaining -= len;
|
||||
} else {
|
||||
let slice = take_bytes_at_char_boundary(text, remaining);
|
||||
if !slice.is_empty() {
|
||||
out.push(FunctionCallOutputContentItem::InputText {
|
||||
text: slice.to_string(),
|
||||
});
|
||||
}
|
||||
remaining = 0;
|
||||
}
|
||||
}
|
||||
// todo(aibrahim): handle input images; resize
|
||||
FunctionCallOutputContentItem::InputImage { image_url } => {
|
||||
out.push(FunctionCallOutputContentItem::InputImage {
|
||||
image_url: image_url.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if omitted_text_items > 0 {
|
||||
out.push(FunctionCallOutputContentItem::InputText {
|
||||
text: format!("[omitted {omitted_text_items} text items ...]"),
|
||||
});
|
||||
}
|
||||
|
||||
out
|
||||
}
|
||||
|
||||
pub(crate) fn format_output_for_model_body(content: &str) -> String {
|
||||
// Head+tail truncation for the model: show the beginning and end with an elision.
|
||||
// Clients still receive full streams; only this formatted summary is capped.
|
||||
let total_lines = content.lines().count();
|
||||
if content.len() <= MODEL_FORMAT_MAX_BYTES && total_lines <= MODEL_FORMAT_MAX_LINES {
|
||||
return content.to_string();
|
||||
}
|
||||
let output = truncate_formatted_exec_output(content, total_lines);
|
||||
format!("Total output lines: {total_lines}\n\n{output}")
|
||||
}
|
||||
|
||||
fn truncate_formatted_exec_output(content: &str, total_lines: usize) -> String {
|
||||
let segments: Vec<&str> = content.split_inclusive('\n').collect();
|
||||
let head_take = MODEL_FORMAT_HEAD_LINES.min(segments.len());
|
||||
let tail_take = MODEL_FORMAT_TAIL_LINES.min(segments.len().saturating_sub(head_take));
|
||||
let omitted = segments.len().saturating_sub(head_take + tail_take);
|
||||
|
||||
let head_slice_end: usize = segments
|
||||
.iter()
|
||||
.take(head_take)
|
||||
.map(|segment| segment.len())
|
||||
.sum();
|
||||
let tail_slice_start: usize = if tail_take == 0 {
|
||||
content.len()
|
||||
} else {
|
||||
content.len()
|
||||
- segments
|
||||
.iter()
|
||||
.rev()
|
||||
.take(tail_take)
|
||||
.map(|segment| segment.len())
|
||||
.sum::<usize>()
|
||||
};
|
||||
let head_slice = &content[..head_slice_end];
|
||||
let tail_slice = &content[tail_slice_start..];
|
||||
let truncated_by_bytes = content.len() > MODEL_FORMAT_MAX_BYTES;
|
||||
// this is a bit wrong. We are counting metadata lines and not just shell output lines.
|
||||
let marker = if omitted > 0 {
|
||||
Some(format!(
|
||||
"\n[... omitted {omitted} of {total_lines} lines ...]\n\n"
|
||||
))
|
||||
} else if truncated_by_bytes {
|
||||
Some(format!(
|
||||
"\n[... output truncated to fit {MODEL_FORMAT_MAX_BYTES} bytes ...]\n\n"
|
||||
))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let marker_len = marker.as_ref().map_or(0, String::len);
|
||||
let base_head_budget = MODEL_FORMAT_HEAD_BYTES.min(MODEL_FORMAT_MAX_BYTES);
|
||||
let head_budget = base_head_budget.min(MODEL_FORMAT_MAX_BYTES.saturating_sub(marker_len));
|
||||
let head_part = take_bytes_at_char_boundary(head_slice, head_budget);
|
||||
let mut result = String::with_capacity(MODEL_FORMAT_MAX_BYTES.min(content.len()));
|
||||
|
||||
result.push_str(head_part);
|
||||
if let Some(marker_text) = marker.as_ref() {
|
||||
result.push_str(marker_text);
|
||||
}
|
||||
|
||||
let remaining = MODEL_FORMAT_MAX_BYTES.saturating_sub(result.len());
|
||||
if remaining == 0 {
|
||||
return result;
|
||||
}
|
||||
|
||||
let tail_part = take_last_bytes_at_char_boundary(tail_slice, remaining);
|
||||
result.push_str(tail_part);
|
||||
|
||||
result
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -32,12 +32,11 @@ pub async fn discover_prompts_in_excluding(
|
||||
|
||||
while let Ok(Some(entry)) = entries.next_entry().await {
|
||||
let path = entry.path();
|
||||
let is_file = entry
|
||||
.file_type()
|
||||
let is_file_like = fs::metadata(&path)
|
||||
.await
|
||||
.map(|ft| ft.is_file())
|
||||
.map(|m| m.is_file())
|
||||
.unwrap_or(false);
|
||||
if !is_file {
|
||||
if !is_file_like {
|
||||
continue;
|
||||
}
|
||||
// Only include Markdown files with a .md extension.
|
||||
@@ -197,6 +196,25 @@ mod tests {
|
||||
assert_eq!(names, vec!["good"]);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[cfg(unix)]
|
||||
async fn discovers_symlinked_md_files() {
|
||||
let tmp = tempdir().expect("create TempDir");
|
||||
let dir = tmp.path();
|
||||
|
||||
// Create a real file
|
||||
fs::write(dir.join("real.md"), b"real content").unwrap();
|
||||
|
||||
// Create a symlink to the real file
|
||||
std::os::unix::fs::symlink(dir.join("real.md"), dir.join("link.md")).unwrap();
|
||||
|
||||
let found = discover_prompts_in(dir).await;
|
||||
let names: Vec<String> = found.into_iter().map(|e| e.name).collect();
|
||||
|
||||
// Both real and link should be discovered, sorted alphabetically
|
||||
assert_eq!(names, vec!["link", "real"]);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn parses_frontmatter_and_strips_from_body() {
|
||||
let tmp = tempdir().expect("create TempDir");
|
||||
|
||||
@@ -109,6 +109,9 @@ pub enum CodexErr {
|
||||
#[error("{0}")]
|
||||
ConnectionFailed(ConnectionFailedError),
|
||||
|
||||
#[error("Quota exceeded. Check your plan and billing details.")]
|
||||
QuotaExceeded,
|
||||
|
||||
#[error(
|
||||
"To use Codex with your ChatGPT plan, upgrade to Plus: https://openai.com/chatgpt/pricing."
|
||||
)]
|
||||
@@ -135,6 +138,9 @@ pub enum CodexErr {
|
||||
#[error("unsupported operation: {0}")]
|
||||
UnsupportedOperation(String),
|
||||
|
||||
#[error("{0}")]
|
||||
RefreshTokenFailed(RefreshTokenFailedError),
|
||||
|
||||
#[error("Fatal error: {0}")]
|
||||
Fatal(String),
|
||||
|
||||
@@ -201,6 +207,30 @@ impl std::fmt::Display for ResponseStreamFailed {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Error)]
|
||||
#[error("{message}")]
|
||||
pub struct RefreshTokenFailedError {
|
||||
pub reason: RefreshTokenFailedReason,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
impl RefreshTokenFailedError {
|
||||
pub fn new(reason: RefreshTokenFailedReason, message: impl Into<String>) -> Self {
|
||||
Self {
|
||||
reason,
|
||||
message: message.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum RefreshTokenFailedReason {
|
||||
Expired,
|
||||
Exhausted,
|
||||
Revoked,
|
||||
Other,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct UnexpectedResponseError {
|
||||
pub status: StatusCode,
|
||||
@@ -208,18 +238,44 @@ pub struct UnexpectedResponseError {
|
||||
pub request_id: Option<String>,
|
||||
}
|
||||
|
||||
const CLOUDFLARE_BLOCKED_MESSAGE: &str =
|
||||
"Access blocked by Cloudflare. This usually happens when connecting from a restricted region";
|
||||
|
||||
impl UnexpectedResponseError {
|
||||
fn friendly_message(&self) -> Option<String> {
|
||||
if self.status != StatusCode::FORBIDDEN {
|
||||
return None;
|
||||
}
|
||||
|
||||
if !self.body.contains("Cloudflare") || !self.body.contains("blocked") {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut message = format!("{CLOUDFLARE_BLOCKED_MESSAGE} (status {})", self.status);
|
||||
if let Some(id) = &self.request_id {
|
||||
message.push_str(&format!(", request id: {id}"));
|
||||
}
|
||||
|
||||
Some(message)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for UnexpectedResponseError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"unexpected status {}: {}{}",
|
||||
self.status,
|
||||
self.body,
|
||||
self.request_id
|
||||
.as_ref()
|
||||
.map(|id| format!(", request id: {id}"))
|
||||
.unwrap_or_default()
|
||||
)
|
||||
if let Some(friendly) = self.friendly_message() {
|
||||
write!(f, "{friendly}")
|
||||
} else {
|
||||
write!(
|
||||
f,
|
||||
"unexpected status {}: {}{}",
|
||||
self.status,
|
||||
self.body,
|
||||
self.request_id
|
||||
.as_ref()
|
||||
.map(|id| format!(", request id: {id}"))
|
||||
.unwrap_or_default()
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -255,7 +311,7 @@ impl std::fmt::Display for UsageLimitReachedError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let message = match self.plan_type.as_ref() {
|
||||
Some(PlanType::Known(KnownPlan::Plus)) => format!(
|
||||
"You've hit your usage limit. Upgrade to Pro (https://openai.com/chatgpt/pricing), visit chatgpt.com/codex/settings/usage to purchase more credits{}",
|
||||
"You've hit your usage limit. Upgrade to Pro (https://openai.com/chatgpt/pricing), visit https://chatgpt.com/codex/settings/usage to purchase more credits{}",
|
||||
retry_suffix_after_or(self.resets_at.as_ref())
|
||||
),
|
||||
Some(PlanType::Known(KnownPlan::Team)) | Some(PlanType::Known(KnownPlan::Business)) => {
|
||||
@@ -269,7 +325,7 @@ impl std::fmt::Display for UsageLimitReachedError {
|
||||
.to_string()
|
||||
}
|
||||
Some(PlanType::Known(KnownPlan::Pro)) => format!(
|
||||
"You've hit your usage limit. Visit chatgpt.com/codex/settings/usage to purchase more credits{}",
|
||||
"You've hit your usage limit. Visit https://chatgpt.com/codex/settings/usage to purchase more credits{}",
|
||||
retry_suffix_after_or(self.resets_at.as_ref())
|
||||
),
|
||||
Some(PlanType::Known(KnownPlan::Enterprise))
|
||||
@@ -460,7 +516,7 @@ mod tests {
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"You've hit your usage limit. Upgrade to Pro (https://openai.com/chatgpt/pricing), visit chatgpt.com/codex/settings/usage to purchase more credits or try again later."
|
||||
"You've hit your usage limit. Upgrade to Pro (https://openai.com/chatgpt/pricing), visit https://chatgpt.com/codex/settings/usage to purchase more credits or try again later."
|
||||
);
|
||||
}
|
||||
|
||||
@@ -613,7 +669,7 @@ mod tests {
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
};
|
||||
let expected = format!(
|
||||
"You've hit your usage limit. Visit chatgpt.com/codex/settings/usage to purchase more credits or try again at {expected_time}."
|
||||
"You've hit your usage limit. Visit https://chatgpt.com/codex/settings/usage to purchase more credits or try again at {expected_time}."
|
||||
);
|
||||
assert_eq!(err.to_string(), expected);
|
||||
});
|
||||
@@ -635,6 +691,35 @@ mod tests {
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unexpected_status_cloudflare_html_is_simplified() {
|
||||
let err = UnexpectedResponseError {
|
||||
status: StatusCode::FORBIDDEN,
|
||||
body: "<html><body>Cloudflare error: Sorry, you have been blocked</body></html>"
|
||||
.to_string(),
|
||||
request_id: Some("ray-id".to_string()),
|
||||
};
|
||||
let status = StatusCode::FORBIDDEN.to_string();
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
format!("{CLOUDFLARE_BLOCKED_MESSAGE} (status {status}), request id: ray-id")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unexpected_status_non_html_is_unchanged() {
|
||||
let err = UnexpectedResponseError {
|
||||
status: StatusCode::FORBIDDEN,
|
||||
body: "plain text error".to_string(),
|
||||
request_id: None,
|
||||
};
|
||||
let status = StatusCode::FORBIDDEN.to_string();
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
format!("unexpected status {status}: plain text error")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn usage_limit_reached_includes_hours_and_minutes() {
|
||||
let base = Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap();
|
||||
@@ -647,7 +732,7 @@ mod tests {
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
};
|
||||
let expected = format!(
|
||||
"You've hit your usage limit. Upgrade to Pro (https://openai.com/chatgpt/pricing), visit chatgpt.com/codex/settings/usage to purchase more credits or try again at {expected_time}."
|
||||
"You've hit your usage limit. Upgrade to Pro (https://openai.com/chatgpt/pricing), visit https://chatgpt.com/codex/settings/usage to purchase more credits or try again at {expected_time}."
|
||||
);
|
||||
assert_eq!(err.to_string(), expected);
|
||||
});
|
||||
|
||||
@@ -14,6 +14,7 @@ use tracing::warn;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::user_instructions::UserInstructions;
|
||||
use crate::user_shell_command::is_user_shell_command_text;
|
||||
|
||||
fn is_session_prefix(text: &str) -> bool {
|
||||
let trimmed = text.trim_start();
|
||||
@@ -31,7 +32,7 @@ fn parse_user_message(message: &[ContentItem]) -> Option<UserMessageItem> {
|
||||
for content_item in message.iter() {
|
||||
match content_item {
|
||||
ContentItem::InputText { text } => {
|
||||
if is_session_prefix(text) {
|
||||
if is_session_prefix(text) || is_user_shell_command_text(text) {
|
||||
return None;
|
||||
}
|
||||
content.push(UserInput::Text { text: text.clone() });
|
||||
@@ -197,7 +198,14 @@ mod tests {
|
||||
text: "# AGENTS.md instructions for test_directory\n\n<INSTRUCTIONS>\ntest_text\n</INSTRUCTIONS>".to_string(),
|
||||
}],
|
||||
},
|
||||
];
|
||||
ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::InputText {
|
||||
text: "<user_shell_command>echo 42</user_shell_command>".to_string(),
|
||||
}],
|
||||
},
|
||||
];
|
||||
|
||||
for item in items {
|
||||
let turn_item = parse_turn_item(&item);
|
||||
|
||||
@@ -171,6 +171,7 @@ async fn exec_windows_sandbox(
|
||||
params: ExecParams,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
) -> Result<RawExecToolCallOutput> {
|
||||
use crate::config::find_codex_home;
|
||||
use codex_windows_sandbox::run_windows_sandbox_capture;
|
||||
|
||||
let ExecParams {
|
||||
@@ -188,8 +189,17 @@ async fn exec_windows_sandbox(
|
||||
};
|
||||
|
||||
let sandbox_cwd = cwd.clone();
|
||||
let logs_base_dir = find_codex_home().ok();
|
||||
let spawn_res = tokio::task::spawn_blocking(move || {
|
||||
run_windows_sandbox_capture(policy_str, &sandbox_cwd, command, &cwd, env, timeout_ms)
|
||||
run_windows_sandbox_capture(
|
||||
policy_str,
|
||||
&sandbox_cwd,
|
||||
command,
|
||||
&cwd,
|
||||
env,
|
||||
timeout_ms,
|
||||
logs_base_dir.as_deref(),
|
||||
)
|
||||
})
|
||||
.await;
|
||||
|
||||
@@ -303,6 +313,10 @@ pub(crate) mod errors {
|
||||
SandboxTransformError::MissingLinuxSandboxExecutable => {
|
||||
CodexErr::LandlockSandboxExecutableNotProvided
|
||||
}
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
SandboxTransformError::SeatbeltUnavailable => CodexErr::UnsupportedOperation(
|
||||
"seatbelt sandbox is only available on macOS".to_string(),
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -504,6 +518,7 @@ async fn consume_truncated_output(
|
||||
}
|
||||
Err(_) => {
|
||||
// timeout
|
||||
kill_child_process_group(&mut child)?;
|
||||
child.start_kill()?;
|
||||
// Debatable whether `child.wait().await` should be called here.
|
||||
(synthetic_exit_status(EXIT_CODE_SIGNAL_BASE + TIMEOUT_CODE), true)
|
||||
@@ -511,6 +526,7 @@ async fn consume_truncated_output(
|
||||
}
|
||||
}
|
||||
_ = tokio::signal::ctrl_c() => {
|
||||
kill_child_process_group(&mut child)?;
|
||||
child.start_kill()?;
|
||||
(synthetic_exit_status(EXIT_CODE_SIGNAL_BASE + SIGKILL_CODE), false)
|
||||
}
|
||||
@@ -607,6 +623,38 @@ fn synthetic_exit_status(code: i32) -> ExitStatus {
|
||||
std::process::ExitStatus::from_raw(code as u32)
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn kill_child_process_group(child: &mut Child) -> io::Result<()> {
|
||||
use std::io::ErrorKind;
|
||||
|
||||
if let Some(pid) = child.id() {
|
||||
let pid = pid as libc::pid_t;
|
||||
let pgid = unsafe { libc::getpgid(pid) };
|
||||
if pgid == -1 {
|
||||
let err = std::io::Error::last_os_error();
|
||||
if err.kind() != ErrorKind::NotFound {
|
||||
return Err(err);
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let result = unsafe { libc::killpg(pgid, libc::SIGKILL) };
|
||||
if result == -1 {
|
||||
let err = std::io::Error::last_os_error();
|
||||
if err.kind() != ErrorKind::NotFound {
|
||||
return Err(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
fn kill_child_process_group(_: &mut Child) -> io::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@@ -679,4 +727,51 @@ mod tests {
|
||||
let output = make_exec_output(exit_code, "", "", "");
|
||||
assert!(is_likely_sandbox_denied(SandboxType::LinuxSeccomp, &output));
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
#[tokio::test]
|
||||
async fn kill_child_process_group_kills_grandchildren_on_timeout() -> Result<()> {
|
||||
let command = vec![
|
||||
"/bin/bash".to_string(),
|
||||
"-c".to_string(),
|
||||
"sleep 60 & echo $!; sleep 60".to_string(),
|
||||
];
|
||||
let env: HashMap<String, String> = std::env::vars().collect();
|
||||
let params = ExecParams {
|
||||
command,
|
||||
cwd: std::env::current_dir()?,
|
||||
timeout_ms: Some(500),
|
||||
env,
|
||||
with_escalated_permissions: None,
|
||||
justification: None,
|
||||
arg0: None,
|
||||
};
|
||||
|
||||
let output = exec(params, SandboxType::None, &SandboxPolicy::ReadOnly, None).await?;
|
||||
assert!(output.timed_out);
|
||||
|
||||
let stdout = output.stdout.from_utf8_lossy().text;
|
||||
let pid_line = stdout.lines().next().unwrap_or("").trim();
|
||||
let pid: i32 = pid_line.parse().map_err(|error| {
|
||||
io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
format!("Failed to parse pid from stdout '{pid_line}': {error}"),
|
||||
)
|
||||
})?;
|
||||
|
||||
let mut killed = false;
|
||||
for _ in 0..20 {
|
||||
// Use kill(pid, 0) to check if the process is alive.
|
||||
if unsafe { libc::kill(pid, 0) } == -1
|
||||
&& let Some(libc::ESRCH) = std::io::Error::last_os_error().raw_os_error()
|
||||
{
|
||||
killed = true;
|
||||
break;
|
||||
}
|
||||
tokio::time::sleep(Duration::from_millis(100)).await;
|
||||
}
|
||||
|
||||
assert!(killed, "grandchild process with pid {pid} is still alive");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,8 +29,6 @@ pub enum Stage {
|
||||
pub enum Feature {
|
||||
/// Use the single unified PTY-backed exec tool.
|
||||
UnifiedExec,
|
||||
/// Use the streamable exec-command/write-stdin tool pair.
|
||||
StreamableShell,
|
||||
/// Enable experimental RMCP features such as OAuth login.
|
||||
RmcpClient,
|
||||
/// Include the freeform apply_patch tool.
|
||||
@@ -118,8 +116,9 @@ impl Features {
|
||||
self.enabled.contains(&f)
|
||||
}
|
||||
|
||||
pub fn enable(&mut self, f: Feature) {
|
||||
pub fn enable(&mut self, f: Feature) -> &mut Self {
|
||||
self.enabled.insert(f);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn disable(&mut self, f: Feature) -> &mut Self {
|
||||
@@ -178,7 +177,6 @@ impl Features {
|
||||
let base_legacy = LegacyFeatureToggles {
|
||||
experimental_sandbox_command_assessment: cfg.experimental_sandbox_command_assessment,
|
||||
experimental_use_freeform_apply_patch: cfg.experimental_use_freeform_apply_patch,
|
||||
experimental_use_exec_command_tool: cfg.experimental_use_exec_command_tool,
|
||||
experimental_use_unified_exec_tool: cfg.experimental_use_unified_exec_tool,
|
||||
experimental_use_rmcp_client: cfg.experimental_use_rmcp_client,
|
||||
tools_web_search: cfg.tools.as_ref().and_then(|t| t.web_search),
|
||||
@@ -197,7 +195,7 @@ impl Features {
|
||||
.experimental_sandbox_command_assessment,
|
||||
experimental_use_freeform_apply_patch: config_profile
|
||||
.experimental_use_freeform_apply_patch,
|
||||
experimental_use_exec_command_tool: config_profile.experimental_use_exec_command_tool,
|
||||
|
||||
experimental_use_unified_exec_tool: config_profile.experimental_use_unified_exec_tool,
|
||||
experimental_use_rmcp_client: config_profile.experimental_use_rmcp_client,
|
||||
tools_web_search: config_profile.tools_web_search,
|
||||
@@ -252,12 +250,6 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
stage: Stage::Experimental,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::StreamableShell,
|
||||
key: "streamable_shell",
|
||||
stage: Stage::Experimental,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::RmcpClient,
|
||||
key: "rmcp_client",
|
||||
|
||||
@@ -17,10 +17,6 @@ const ALIASES: &[Alias] = &[
|
||||
legacy_key: "experimental_use_unified_exec_tool",
|
||||
feature: Feature::UnifiedExec,
|
||||
},
|
||||
Alias {
|
||||
legacy_key: "experimental_use_exec_command_tool",
|
||||
feature: Feature::StreamableShell,
|
||||
},
|
||||
Alias {
|
||||
legacy_key: "experimental_use_rmcp_client",
|
||||
feature: Feature::RmcpClient,
|
||||
@@ -54,7 +50,6 @@ pub struct LegacyFeatureToggles {
|
||||
pub include_apply_patch_tool: Option<bool>,
|
||||
pub experimental_sandbox_command_assessment: Option<bool>,
|
||||
pub experimental_use_freeform_apply_patch: Option<bool>,
|
||||
pub experimental_use_exec_command_tool: Option<bool>,
|
||||
pub experimental_use_unified_exec_tool: Option<bool>,
|
||||
pub experimental_use_rmcp_client: Option<bool>,
|
||||
pub tools_web_search: Option<bool>,
|
||||
@@ -81,12 +76,6 @@ impl LegacyFeatureToggles {
|
||||
self.experimental_use_freeform_apply_patch,
|
||||
"experimental_use_freeform_apply_patch",
|
||||
);
|
||||
set_if_some(
|
||||
features,
|
||||
Feature::StreamableShell,
|
||||
self.experimental_use_exec_command_tool,
|
||||
"experimental_use_exec_command_tool",
|
||||
);
|
||||
set_if_some(
|
||||
features,
|
||||
Feature::UnifiedExec,
|
||||
@@ -123,7 +112,7 @@ fn set_if_some(
|
||||
if let Some(enabled) = maybe_value {
|
||||
set_feature(features, feature, enabled);
|
||||
log_alias(alias_key, feature);
|
||||
features.record_legacy_usage_force(alias_key, feature);
|
||||
features.record_legacy_usage(alias_key, feature);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ mod codex_delegate;
|
||||
mod command_safety;
|
||||
pub mod config;
|
||||
pub mod config_loader;
|
||||
mod conversation_history;
|
||||
mod context_manager;
|
||||
pub mod custom_prompts;
|
||||
mod environment_context;
|
||||
pub mod error;
|
||||
@@ -75,16 +75,19 @@ pub use rollout::find_conversation_path_by_id_str;
|
||||
pub use rollout::list::ConversationItem;
|
||||
pub use rollout::list::ConversationsPage;
|
||||
pub use rollout::list::Cursor;
|
||||
pub use rollout::list::parse_cursor;
|
||||
pub use rollout::list::read_head_for_summary;
|
||||
mod function_tool;
|
||||
mod state;
|
||||
mod tasks;
|
||||
mod user_notification;
|
||||
mod user_shell_command;
|
||||
pub mod util;
|
||||
|
||||
pub use apply_patch::CODEX_APPLY_PATCH_ARG1;
|
||||
pub use command_safety::is_safe_command;
|
||||
pub use safety::get_platform_sandbox;
|
||||
pub use safety::set_windows_sandbox_enabled;
|
||||
// Re-export the protocol types from the standalone `codex-protocol` crate so existing
|
||||
// `codex_core::protocol::...` references continue to work across the workspace.
|
||||
pub use codex_protocol::protocol;
|
||||
@@ -97,11 +100,12 @@ pub use client_common::Prompt;
|
||||
pub use client_common::REVIEW_PROMPT;
|
||||
pub use client_common::ResponseEvent;
|
||||
pub use client_common::ResponseStream;
|
||||
pub use codex::compact::content_items_to_text;
|
||||
pub use codex_protocol::models::ContentItem;
|
||||
pub use codex_protocol::models::LocalShellAction;
|
||||
pub use codex_protocol::models::LocalShellExecAction;
|
||||
pub use codex_protocol::models::LocalShellStatus;
|
||||
pub use codex_protocol::models::ResponseItem;
|
||||
pub use compact::content_items_to_text;
|
||||
pub use event_mapping::parse_turn_item;
|
||||
pub mod compact;
|
||||
pub mod otel_init;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use crate::config::types::ReasoningSummaryFormat;
|
||||
use crate::tools::handlers::apply_patch::ApplyPatchToolType;
|
||||
use crate::tools::spec::ConfigShellToolType;
|
||||
|
||||
/// The `instructions` field in the payload sent to a model should always start
|
||||
/// with this content.
|
||||
@@ -29,12 +30,6 @@ pub struct ModelFamily {
|
||||
// Define if we need a special handling of reasoning summary
|
||||
pub reasoning_summary_format: ReasoningSummaryFormat,
|
||||
|
||||
// This should be set to true when the model expects a tool named
|
||||
// "local_shell" to be provided. Its contract must be understood natively by
|
||||
// the model such that its description can be omitted.
|
||||
// See https://platform.openai.com/docs/guides/tools-local-shell
|
||||
pub uses_local_shell_tool: bool,
|
||||
|
||||
/// Whether this model supports parallel tool calls when using the
|
||||
/// Responses API.
|
||||
pub supports_parallel_tool_calls: bool,
|
||||
@@ -57,6 +52,9 @@ pub struct ModelFamily {
|
||||
|
||||
/// If the model family supports setting the verbosity level when using Responses API.
|
||||
pub support_verbosity: bool,
|
||||
|
||||
/// Preferred shell tool type for this model family when features do not override it.
|
||||
pub shell_type: ConfigShellToolType,
|
||||
}
|
||||
|
||||
macro_rules! model_family {
|
||||
@@ -64,19 +62,20 @@ macro_rules! model_family {
|
||||
$slug:expr, $family:expr $(, $key:ident : $value:expr )* $(,)?
|
||||
) => {{
|
||||
// defaults
|
||||
#[allow(unused_mut)]
|
||||
let mut mf = ModelFamily {
|
||||
slug: $slug.to_string(),
|
||||
family: $family.to_string(),
|
||||
needs_special_apply_patch_instructions: false,
|
||||
supports_reasoning_summaries: false,
|
||||
reasoning_summary_format: ReasoningSummaryFormat::None,
|
||||
uses_local_shell_tool: false,
|
||||
supports_parallel_tool_calls: false,
|
||||
apply_patch_tool_type: None,
|
||||
base_instructions: BASE_INSTRUCTIONS.to_string(),
|
||||
experimental_supported_tools: Vec::new(),
|
||||
effective_context_window_percent: 95,
|
||||
support_verbosity: false,
|
||||
shell_type: ConfigShellToolType::Default,
|
||||
};
|
||||
// apply overrides
|
||||
$(
|
||||
@@ -105,8 +104,8 @@ pub fn find_family_for_model(slug: &str) -> Option<ModelFamily> {
|
||||
model_family!(
|
||||
slug, "codex-mini-latest",
|
||||
supports_reasoning_summaries: true,
|
||||
uses_local_shell_tool: true,
|
||||
needs_special_apply_patch_instructions: true,
|
||||
shell_type: ConfigShellToolType::Local,
|
||||
)
|
||||
} else if slug.starts_with("gpt-4.1") {
|
||||
model_family!(
|
||||
@@ -119,6 +118,8 @@ pub fn find_family_for_model(slug: &str) -> Option<ModelFamily> {
|
||||
model_family!(slug, "gpt-4o", needs_special_apply_patch_instructions: true)
|
||||
} else if slug.starts_with("gpt-3.5") {
|
||||
model_family!(slug, "gpt-3.5", needs_special_apply_patch_instructions: true)
|
||||
} else if slug.starts_with("porcupine") {
|
||||
model_family!(slug, "porcupine", shell_type: ConfigShellToolType::UnifiedExec)
|
||||
} else if slug.starts_with("test-gpt-5-codex") {
|
||||
model_family!(
|
||||
slug, slug,
|
||||
@@ -181,12 +182,12 @@ pub fn derive_default_model_family(model: &str) -> ModelFamily {
|
||||
needs_special_apply_patch_instructions: false,
|
||||
supports_reasoning_summaries: false,
|
||||
reasoning_summary_format: ReasoningSummaryFormat::None,
|
||||
uses_local_shell_tool: false,
|
||||
supports_parallel_tool_calls: false,
|
||||
apply_patch_tool_type: None,
|
||||
base_instructions: BASE_INSTRUCTIONS.to_string(),
|
||||
experimental_supported_tools: Vec::new(),
|
||||
effective_context_window_percent: 95,
|
||||
support_verbosity: false,
|
||||
shell_type: ConfigShellToolType::Default,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -273,7 +273,7 @@ async fn traverse_directories_for_paths(
|
||||
/// Pagination cursor token format: "<file_ts>|<uuid>" where `file_ts` matches the
|
||||
/// filename timestamp portion (YYYY-MM-DDThh-mm-ss) used in rollout filenames.
|
||||
/// The cursor orders files by timestamp desc, then UUID desc.
|
||||
fn parse_cursor(token: &str) -> Option<Cursor> {
|
||||
pub fn parse_cursor(token: &str) -> Option<Cursor> {
|
||||
let (file_ts, uuid_str) = token.split_once('|')?;
|
||||
|
||||
let Ok(uuid) = Uuid::parse_str(uuid_str) else {
|
||||
|
||||
@@ -25,16 +25,6 @@ use tracing::warn;
|
||||
|
||||
const SANDBOX_ASSESSMENT_TIMEOUT: Duration = Duration::from_secs(5);
|
||||
|
||||
const SANDBOX_RISK_CATEGORY_VALUES: &[&str] = &[
|
||||
"data_deletion",
|
||||
"data_exfiltration",
|
||||
"privilege_escalation",
|
||||
"system_modification",
|
||||
"network_access",
|
||||
"resource_exhaustion",
|
||||
"compliance",
|
||||
];
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "sandboxing/assessment_prompt.md", escape = "none")]
|
||||
struct SandboxAssessmentPromptTemplate<'a> {
|
||||
@@ -176,27 +166,26 @@ pub(crate) async fn assess_command(
|
||||
call_id,
|
||||
"success",
|
||||
Some(assessment.risk_level),
|
||||
&assessment.risk_categories,
|
||||
duration,
|
||||
);
|
||||
return Some(assessment);
|
||||
}
|
||||
Err(err) => {
|
||||
warn!("failed to parse sandbox assessment JSON: {err}");
|
||||
parent_otel.sandbox_assessment(call_id, "parse_error", None, &[], duration);
|
||||
parent_otel.sandbox_assessment(call_id, "parse_error", None, duration);
|
||||
}
|
||||
},
|
||||
Ok(Ok(None)) => {
|
||||
warn!("sandbox assessment response did not include any message");
|
||||
parent_otel.sandbox_assessment(call_id, "no_output", None, &[], duration);
|
||||
parent_otel.sandbox_assessment(call_id, "no_output", None, duration);
|
||||
}
|
||||
Ok(Err(err)) => {
|
||||
warn!("sandbox assessment failed: {err}");
|
||||
parent_otel.sandbox_assessment(call_id, "model_error", None, &[], duration);
|
||||
parent_otel.sandbox_assessment(call_id, "model_error", None, duration);
|
||||
}
|
||||
Err(_) => {
|
||||
warn!("sandbox assessment timed out");
|
||||
parent_otel.sandbox_assessment(call_id, "timeout", None, &[], duration);
|
||||
parent_otel.sandbox_assessment(call_id, "timeout", None, duration);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -229,7 +218,7 @@ fn sandbox_roots_for_prompt(policy: &SandboxPolicy, cwd: &Path) -> Vec<PathBuf>
|
||||
fn sandbox_assessment_schema() -> serde_json::Value {
|
||||
json!({
|
||||
"type": "object",
|
||||
"required": ["description", "risk_level", "risk_categories"],
|
||||
"required": ["description", "risk_level"],
|
||||
"properties": {
|
||||
"description": {
|
||||
"type": "string",
|
||||
@@ -240,13 +229,6 @@ fn sandbox_assessment_schema() -> serde_json::Value {
|
||||
"type": "string",
|
||||
"enum": ["low", "medium", "high"]
|
||||
},
|
||||
"risk_categories": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"enum": SANDBOX_RISK_CATEGORY_VALUES
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
})
|
||||
|
||||
@@ -14,8 +14,11 @@ use crate::exec::StdoutStream;
|
||||
use crate::exec::execute_exec_env;
|
||||
use crate::landlock::create_linux_sandbox_command_args;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
#[cfg(target_os = "macos")]
|
||||
use crate::seatbelt::MACOS_PATH_TO_SEATBELT_EXECUTABLE;
|
||||
#[cfg(target_os = "macos")]
|
||||
use crate::seatbelt::create_seatbelt_command_args;
|
||||
#[cfg(target_os = "macos")]
|
||||
use crate::spawn::CODEX_SANDBOX_ENV_VAR;
|
||||
use crate::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
||||
use crate::tools::sandboxing::SandboxablePreference;
|
||||
@@ -56,6 +59,9 @@ pub enum SandboxPreference {
|
||||
pub(crate) enum SandboxTransformError {
|
||||
#[error("missing codex-linux-sandbox executable path")]
|
||||
MissingLinuxSandboxExecutable,
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
#[error("seatbelt sandbox is only available on macOS")]
|
||||
SeatbeltUnavailable,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
@@ -107,6 +113,7 @@ impl SandboxManager {
|
||||
|
||||
let (command, sandbox_env, arg0_override) = match sandbox {
|
||||
SandboxType::None => (command, HashMap::new(), None),
|
||||
#[cfg(target_os = "macos")]
|
||||
SandboxType::MacosSeatbelt => {
|
||||
let mut seatbelt_env = HashMap::new();
|
||||
seatbelt_env.insert(CODEX_SANDBOX_ENV_VAR.to_string(), "seatbelt".to_string());
|
||||
@@ -117,6 +124,8 @@ impl SandboxManager {
|
||||
full_command.append(&mut args);
|
||||
(full_command, seatbelt_env, None)
|
||||
}
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
SandboxType::MacosSeatbelt => return Err(SandboxTransformError::SeatbeltUnavailable),
|
||||
SandboxType::LinuxSeccomp => {
|
||||
let exe = codex_linux_sandbox_exe
|
||||
.ok_or(SandboxTransformError::MissingLinuxSandboxExecutable)?;
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
#![cfg(target_os = "macos")]
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::ffi::CStr;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use tokio::process::Child;
|
||||
@@ -9,6 +12,7 @@ use crate::spawn::StdioPolicy;
|
||||
use crate::spawn::spawn_child_async;
|
||||
|
||||
const MACOS_SEATBELT_BASE_POLICY: &str = include_str!("seatbelt_base_policy.sbpl");
|
||||
const MACOS_SEATBELT_NETWORK_POLICY: &str = include_str!("seatbelt_network_policy.sbpl");
|
||||
|
||||
/// When working with `sandbox-exec`, only consider `sandbox-exec` in `/usr/bin`
|
||||
/// to defend against an attacker trying to inject a malicious version on the
|
||||
@@ -44,27 +48,24 @@ pub(crate) fn create_seatbelt_command_args(
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
sandbox_policy_cwd: &Path,
|
||||
) -> Vec<String> {
|
||||
let (file_write_policy, extra_cli_args) = {
|
||||
let (file_write_policy, file_write_dir_params) = {
|
||||
if sandbox_policy.has_full_disk_write_access() {
|
||||
// Allegedly, this is more permissive than `(allow file-write*)`.
|
||||
(
|
||||
r#"(allow file-write* (regex #"^/"))"#.to_string(),
|
||||
Vec::<String>::new(),
|
||||
Vec::new(),
|
||||
)
|
||||
} else {
|
||||
let writable_roots = sandbox_policy.get_writable_roots_with_cwd(sandbox_policy_cwd);
|
||||
|
||||
let mut writable_folder_policies: Vec<String> = Vec::new();
|
||||
let mut cli_args: Vec<String> = Vec::new();
|
||||
let mut file_write_params = Vec::new();
|
||||
|
||||
for (index, wr) in writable_roots.iter().enumerate() {
|
||||
// Canonicalize to avoid mismatches like /var vs /private/var on macOS.
|
||||
let canonical_root = wr.root.canonicalize().unwrap_or_else(|_| wr.root.clone());
|
||||
let root_param = format!("WRITABLE_ROOT_{index}");
|
||||
cli_args.push(format!(
|
||||
"-D{root_param}={}",
|
||||
canonical_root.to_string_lossy()
|
||||
));
|
||||
file_write_params.push((root_param.clone(), canonical_root));
|
||||
|
||||
if wr.read_only_subpaths.is_empty() {
|
||||
writable_folder_policies.push(format!("(subpath (param \"{root_param}\"))"));
|
||||
@@ -76,9 +77,9 @@ pub(crate) fn create_seatbelt_command_args(
|
||||
for (subpath_index, ro) in wr.read_only_subpaths.iter().enumerate() {
|
||||
let canonical_ro = ro.canonicalize().unwrap_or_else(|_| ro.clone());
|
||||
let ro_param = format!("WRITABLE_ROOT_{index}_RO_{subpath_index}");
|
||||
cli_args.push(format!("-D{ro_param}={}", canonical_ro.to_string_lossy()));
|
||||
require_parts
|
||||
.push(format!("(require-not (subpath (param \"{ro_param}\")))"));
|
||||
file_write_params.push((ro_param, canonical_ro));
|
||||
}
|
||||
let policy_component = format!("(require-all {} )", require_parts.join(" "));
|
||||
writable_folder_policies.push(policy_component);
|
||||
@@ -86,13 +87,13 @@ pub(crate) fn create_seatbelt_command_args(
|
||||
}
|
||||
|
||||
if writable_folder_policies.is_empty() {
|
||||
("".to_string(), Vec::<String>::new())
|
||||
("".to_string(), Vec::new())
|
||||
} else {
|
||||
let file_write_policy = format!(
|
||||
"(allow file-write*\n{}\n)",
|
||||
writable_folder_policies.join(" ")
|
||||
);
|
||||
(file_write_policy, cli_args)
|
||||
(file_write_policy, file_write_params)
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -105,7 +106,7 @@ pub(crate) fn create_seatbelt_command_args(
|
||||
|
||||
// TODO(mbolin): apply_patch calls must also honor the SandboxPolicy.
|
||||
let network_policy = if sandbox_policy.has_full_network_access() {
|
||||
"(allow network-outbound)\n(allow network-inbound)\n(allow system-socket)"
|
||||
MACOS_SEATBELT_NETWORK_POLICY
|
||||
} else {
|
||||
""
|
||||
};
|
||||
@@ -114,17 +115,49 @@ pub(crate) fn create_seatbelt_command_args(
|
||||
"{MACOS_SEATBELT_BASE_POLICY}\n{file_read_policy}\n{file_write_policy}\n{network_policy}"
|
||||
);
|
||||
|
||||
let dir_params = [file_write_dir_params, macos_dir_params()].concat();
|
||||
|
||||
let mut seatbelt_args: Vec<String> = vec!["-p".to_string(), full_policy];
|
||||
seatbelt_args.extend(extra_cli_args);
|
||||
let definition_args = dir_params
|
||||
.into_iter()
|
||||
.map(|(key, value)| format!("-D{key}={value}", value = value.to_string_lossy()));
|
||||
seatbelt_args.extend(definition_args);
|
||||
seatbelt_args.push("--".to_string());
|
||||
seatbelt_args.extend(command);
|
||||
seatbelt_args
|
||||
}
|
||||
|
||||
/// Wraps libc::confstr to return a String.
|
||||
fn confstr(name: libc::c_int) -> Option<String> {
|
||||
let mut buf = vec![0_i8; (libc::PATH_MAX as usize) + 1];
|
||||
let len = unsafe { libc::confstr(name, buf.as_mut_ptr(), buf.len()) };
|
||||
if len == 0 {
|
||||
return None;
|
||||
}
|
||||
// confstr guarantees NUL-termination when len > 0.
|
||||
let cstr = unsafe { CStr::from_ptr(buf.as_ptr()) };
|
||||
cstr.to_str().ok().map(ToString::to_string)
|
||||
}
|
||||
|
||||
/// Wraps confstr to return a canonicalized PathBuf.
|
||||
fn confstr_path(name: libc::c_int) -> Option<PathBuf> {
|
||||
let s = confstr(name)?;
|
||||
let path = PathBuf::from(s);
|
||||
path.canonicalize().ok().or(Some(path))
|
||||
}
|
||||
|
||||
fn macos_dir_params() -> Vec<(String, PathBuf)> {
|
||||
if let Some(p) = confstr_path(libc::_CS_DARWIN_USER_CACHE_DIR) {
|
||||
return vec![("DARWIN_USER_CACHE_DIR".to_string(), p)];
|
||||
}
|
||||
vec![]
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::MACOS_SEATBELT_BASE_POLICY;
|
||||
use super::create_seatbelt_command_args;
|
||||
use super::macos_dir_params;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::fs;
|
||||
@@ -134,11 +167,6 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn create_seatbelt_args_with_read_only_git_subpath() {
|
||||
if cfg!(target_os = "windows") {
|
||||
// /tmp does not exist on Windows, so skip this test.
|
||||
return;
|
||||
}
|
||||
|
||||
// Create a temporary workspace with two writable roots: one containing
|
||||
// a top-level .git directory and one without it.
|
||||
let tmp = TempDir::new().expect("tempdir");
|
||||
@@ -199,6 +227,12 @@ mod tests {
|
||||
format!("-DWRITABLE_ROOT_2={}", cwd.to_string_lossy()),
|
||||
];
|
||||
|
||||
expected_args.extend(
|
||||
macos_dir_params()
|
||||
.into_iter()
|
||||
.map(|(key, value)| format!("-D{key}={value}", value = value.to_string_lossy())),
|
||||
);
|
||||
|
||||
expected_args.extend(vec![
|
||||
"--".to_string(),
|
||||
"/bin/echo".to_string(),
|
||||
@@ -210,11 +244,6 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn create_seatbelt_args_for_cwd_as_git_repo() {
|
||||
if cfg!(target_os = "windows") {
|
||||
// /tmp does not exist on Windows, so skip this test.
|
||||
return;
|
||||
}
|
||||
|
||||
// Create a temporary workspace with two writable roots: one containing
|
||||
// a top-level .git directory and one without it.
|
||||
let tmp = TempDir::new().expect("tempdir");
|
||||
@@ -292,6 +321,12 @@ mod tests {
|
||||
expected_args.push(format!("-DWRITABLE_ROOT_2={p}"));
|
||||
}
|
||||
|
||||
expected_args.extend(
|
||||
macos_dir_params()
|
||||
.into_iter()
|
||||
.map(|(key, value)| format!("-D{key}={value}", value = value.to_string_lossy())),
|
||||
);
|
||||
|
||||
expected_args.extend(vec![
|
||||
"--".to_string(),
|
||||
"/bin/echo".to_string(),
|
||||
|
||||
@@ -49,6 +49,7 @@
|
||||
(sysctl-name "hw.packages")
|
||||
(sysctl-name "hw.pagesize_compat")
|
||||
(sysctl-name "hw.pagesize")
|
||||
(sysctl-name "hw.physicalcpu")
|
||||
(sysctl-name "hw.physicalcpu_max")
|
||||
(sysctl-name "hw.tbfrequency_compat")
|
||||
(sysctl-name "hw.vectorunit")
|
||||
|
||||
30
codex-rs/core/src/seatbelt_network_policy.sbpl
Normal file
30
codex-rs/core/src/seatbelt_network_policy.sbpl
Normal file
@@ -0,0 +1,30 @@
|
||||
; when network access is enabled, these policies are added after those in seatbelt_base_policy.sbpl
|
||||
; Ref https://source.chromium.org/chromium/chromium/src/+/main:sandbox/policy/mac/network.sb;drc=f8f264d5e4e7509c913f4c60c2639d15905a07e4
|
||||
|
||||
(allow network-outbound)
|
||||
(allow network-inbound)
|
||||
(allow system-socket)
|
||||
|
||||
(allow mach-lookup
|
||||
; Used to look up the _CS_DARWIN_USER_CACHE_DIR in the sandbox.
|
||||
(global-name "com.apple.bsd.dirhelper")
|
||||
(global-name "com.apple.system.opendirectoryd.membership")
|
||||
|
||||
; Communicate with the security server for TLS certificate information.
|
||||
(global-name "com.apple.SecurityServer")
|
||||
(global-name "com.apple.networkd")
|
||||
(global-name "com.apple.ocspd")
|
||||
(global-name "com.apple.trustd.agent")
|
||||
|
||||
; Read network configuration.
|
||||
(global-name "com.apple.SystemConfiguration.DNSConfiguration")
|
||||
(global-name "com.apple.SystemConfiguration.configd")
|
||||
)
|
||||
|
||||
(allow sysctl-read
|
||||
(sysctl-name-regex #"^net.routetable")
|
||||
)
|
||||
|
||||
(allow file-write*
|
||||
(subpath (param "DARWIN_USER_CACHE_DIR"))
|
||||
)
|
||||
@@ -64,22 +64,32 @@ pub(crate) async fn spawn_child_async(
|
||||
// any child processes that were spawned as part of a `"shell"` tool call
|
||||
// to also be terminated.
|
||||
|
||||
// This relies on prctl(2), so it only works on Linux.
|
||||
#[cfg(target_os = "linux")]
|
||||
#[cfg(unix)]
|
||||
unsafe {
|
||||
cmd.pre_exec(|| {
|
||||
// This prctl call effectively requests, "deliver SIGTERM when my
|
||||
// current parent dies."
|
||||
if libc::prctl(libc::PR_SET_PDEATHSIG, libc::SIGTERM) == -1 {
|
||||
#[cfg(target_os = "linux")]
|
||||
let parent_pid = libc::getpid();
|
||||
cmd.pre_exec(move || {
|
||||
if libc::setpgid(0, 0) == -1 {
|
||||
return Err(std::io::Error::last_os_error());
|
||||
}
|
||||
|
||||
// Though if there was a race condition and this pre_exec() block is
|
||||
// run _after_ the parent (i.e., the Codex process) has already
|
||||
// exited, then the parent is the _init_ process (which will never
|
||||
// die), so we should just terminate the child process now.
|
||||
if libc::getppid() == 1 {
|
||||
libc::raise(libc::SIGTERM);
|
||||
// This relies on prctl(2), so it only works on Linux.
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
// This prctl call effectively requests, "deliver SIGTERM when my
|
||||
// current parent dies."
|
||||
if libc::prctl(libc::PR_SET_PDEATHSIG, libc::SIGTERM) == -1 {
|
||||
return Err(std::io::Error::last_os_error());
|
||||
}
|
||||
|
||||
// Though if there was a race condition and this pre_exec() block is
|
||||
// run _after_ the parent (i.e., the Codex process) has already
|
||||
// exited, then parent will be the closest configured "subreaper"
|
||||
// ancestor process, or PID 1 (init). If the Codex process has exited
|
||||
// already, so should the child process.
|
||||
if libc::getppid() != parent_pid {
|
||||
libc::raise(libc::SIGTERM);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
});
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
use codex_protocol::models::ResponseItem;
|
||||
|
||||
use crate::codex::SessionConfiguration;
|
||||
use crate::conversation_history::ConversationHistory;
|
||||
use crate::context_manager::ContextManager;
|
||||
use crate::protocol::RateLimitSnapshot;
|
||||
use crate::protocol::TokenUsage;
|
||||
use crate::protocol::TokenUsageInfo;
|
||||
@@ -11,7 +11,7 @@ use crate::protocol::TokenUsageInfo;
|
||||
/// Persistent, session-scoped state previously stored directly on `Session`.
|
||||
pub(crate) struct SessionState {
|
||||
pub(crate) session_configuration: SessionConfiguration,
|
||||
pub(crate) history: ConversationHistory,
|
||||
pub(crate) history: ContextManager,
|
||||
pub(crate) latest_rate_limits: Option<RateLimitSnapshot>,
|
||||
}
|
||||
|
||||
@@ -20,7 +20,7 @@ impl SessionState {
|
||||
pub(crate) fn new(session_configuration: SessionConfiguration) -> Self {
|
||||
Self {
|
||||
session_configuration,
|
||||
history: ConversationHistory::new(),
|
||||
history: ContextManager::new(),
|
||||
latest_rate_limits: None,
|
||||
}
|
||||
}
|
||||
@@ -34,7 +34,7 @@ impl SessionState {
|
||||
self.history.record_items(items)
|
||||
}
|
||||
|
||||
pub(crate) fn clone_history(&self) -> ConversationHistory {
|
||||
pub(crate) fn clone_history(&self) -> ContextManager {
|
||||
self.history.clone()
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ use async_trait::async_trait;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
|
||||
use crate::codex::TurnContext;
|
||||
use crate::codex::compact;
|
||||
use crate::compact;
|
||||
use crate::state::TaskKind;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
|
||||
|
||||
@@ -75,12 +75,12 @@ async fn start_review_conversation(
|
||||
// Avoid loading project docs; reviewer only needs findings
|
||||
sub_agent_config.project_doc_max_bytes = 0;
|
||||
// Carry over review-only feature restrictions so the delegate cannot
|
||||
// re-enable blocked tools (web search, view image, streamable shell).
|
||||
// re-enable blocked tools (web search, view image).
|
||||
sub_agent_config
|
||||
.features
|
||||
.disable(crate::features::Feature::WebSearchRequest)
|
||||
.disable(crate::features::Feature::ViewImageTool)
|
||||
.disable(crate::features::Feature::StreamableShell);
|
||||
.disable(crate::features::Feature::ViewImageTool);
|
||||
|
||||
// Set explicit review rubric for the sub-agent
|
||||
sub_agent_config.base_instructions = Some(crate::REVIEW_PROMPT.to_string());
|
||||
(run_codex_conversation_one_shot(
|
||||
|
||||
@@ -1,28 +1,35 @@
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use codex_protocol::models::ShellToolCallParams;
|
||||
use codex_async_utils::CancelErr;
|
||||
use codex_async_utils::OrCancelExt;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use tokio::sync::Mutex;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
use tracing::error;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::codex::TurnContext;
|
||||
use crate::exec::ExecToolCallOutput;
|
||||
use crate::exec::SandboxType;
|
||||
use crate::exec::StdoutStream;
|
||||
use crate::exec::StreamOutput;
|
||||
use crate::exec::execute_exec_env;
|
||||
use crate::exec_env::create_env;
|
||||
use crate::parse_command::parse_command;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::ExecCommandBeginEvent;
|
||||
use crate::protocol::ExecCommandEndEvent;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
use crate::protocol::TaskStartedEvent;
|
||||
use crate::sandboxing::ExecEnv;
|
||||
use crate::state::TaskKind;
|
||||
use crate::tools::context::ToolPayload;
|
||||
use crate::tools::parallel::ToolCallRuntime;
|
||||
use crate::tools::router::ToolCall;
|
||||
use crate::tools::router::ToolRouter;
|
||||
use crate::turn_diff_tracker::TurnDiffTracker;
|
||||
use crate::tools::format_exec_output_str;
|
||||
use crate::user_shell_command::user_shell_command_record_item;
|
||||
|
||||
use super::SessionTask;
|
||||
use super::SessionTaskContext;
|
||||
|
||||
const USER_SHELL_TOOL_NAME: &str = "local_shell";
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct UserShellCommandTask {
|
||||
command: String,
|
||||
@@ -78,34 +85,126 @@ impl SessionTask for UserShellCommandTask {
|
||||
}
|
||||
};
|
||||
|
||||
let params = ShellToolCallParams {
|
||||
let call_id = Uuid::new_v4().to_string();
|
||||
let raw_command = self.command.clone();
|
||||
|
||||
let parsed_cmd = parse_command(&shell_invocation);
|
||||
session
|
||||
.send_event(
|
||||
turn_context.as_ref(),
|
||||
EventMsg::ExecCommandBegin(ExecCommandBeginEvent {
|
||||
call_id: call_id.clone(),
|
||||
command: shell_invocation.clone(),
|
||||
cwd: turn_context.cwd.clone(),
|
||||
parsed_cmd,
|
||||
is_user_shell_command: true,
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let exec_env = ExecEnv {
|
||||
command: shell_invocation,
|
||||
workdir: None,
|
||||
cwd: turn_context.cwd.clone(),
|
||||
env: create_env(&turn_context.shell_environment_policy),
|
||||
timeout_ms: None,
|
||||
sandbox: SandboxType::None,
|
||||
with_escalated_permissions: None,
|
||||
justification: None,
|
||||
arg0: None,
|
||||
};
|
||||
|
||||
let tool_call = ToolCall {
|
||||
tool_name: USER_SHELL_TOOL_NAME.to_string(),
|
||||
call_id: Uuid::new_v4().to_string(),
|
||||
payload: ToolPayload::LocalShell { params },
|
||||
};
|
||||
let stdout_stream = Some(StdoutStream {
|
||||
sub_id: turn_context.sub_id.clone(),
|
||||
call_id: call_id.clone(),
|
||||
tx_event: session.get_tx_event(),
|
||||
});
|
||||
|
||||
let router = Arc::new(ToolRouter::from_config(&turn_context.tools_config, None));
|
||||
let tracker = Arc::new(Mutex::new(TurnDiffTracker::new()));
|
||||
let runtime = ToolCallRuntime::new(
|
||||
Arc::clone(&router),
|
||||
Arc::clone(&session),
|
||||
Arc::clone(&turn_context),
|
||||
Arc::clone(&tracker),
|
||||
);
|
||||
let sandbox_policy = SandboxPolicy::DangerFullAccess;
|
||||
let exec_result = execute_exec_env(exec_env, &sandbox_policy, stdout_stream)
|
||||
.or_cancel(&cancellation_token)
|
||||
.await;
|
||||
|
||||
if let Err(err) = runtime
|
||||
.handle_tool_call(tool_call, cancellation_token)
|
||||
.await
|
||||
{
|
||||
error!("user shell command failed: {err:?}");
|
||||
match exec_result {
|
||||
Err(CancelErr::Cancelled) => {
|
||||
let aborted_message = "command aborted by user".to_string();
|
||||
let exec_output = ExecToolCallOutput {
|
||||
exit_code: -1,
|
||||
stdout: StreamOutput::new(String::new()),
|
||||
stderr: StreamOutput::new(aborted_message.clone()),
|
||||
aggregated_output: StreamOutput::new(aborted_message.clone()),
|
||||
duration: Duration::ZERO,
|
||||
timed_out: false,
|
||||
};
|
||||
let output_items = [user_shell_command_record_item(&raw_command, &exec_output)];
|
||||
session
|
||||
.record_conversation_items(turn_context.as_ref(), &output_items)
|
||||
.await;
|
||||
session
|
||||
.send_event(
|
||||
turn_context.as_ref(),
|
||||
EventMsg::ExecCommandEnd(ExecCommandEndEvent {
|
||||
call_id,
|
||||
stdout: String::new(),
|
||||
stderr: aborted_message.clone(),
|
||||
aggregated_output: aborted_message.clone(),
|
||||
exit_code: -1,
|
||||
duration: Duration::ZERO,
|
||||
formatted_output: aborted_message,
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
}
|
||||
Ok(Ok(output)) => {
|
||||
session
|
||||
.send_event(
|
||||
turn_context.as_ref(),
|
||||
EventMsg::ExecCommandEnd(ExecCommandEndEvent {
|
||||
call_id: call_id.clone(),
|
||||
stdout: output.stdout.text.clone(),
|
||||
stderr: output.stderr.text.clone(),
|
||||
aggregated_output: output.aggregated_output.text.clone(),
|
||||
exit_code: output.exit_code,
|
||||
duration: output.duration,
|
||||
formatted_output: format_exec_output_str(&output),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let output_items = [user_shell_command_record_item(&raw_command, &output)];
|
||||
session
|
||||
.record_conversation_items(turn_context.as_ref(), &output_items)
|
||||
.await;
|
||||
}
|
||||
Ok(Err(err)) => {
|
||||
error!("user shell command failed: {err:?}");
|
||||
let message = format!("execution error: {err:?}");
|
||||
let exec_output = ExecToolCallOutput {
|
||||
exit_code: -1,
|
||||
stdout: StreamOutput::new(String::new()),
|
||||
stderr: StreamOutput::new(message.clone()),
|
||||
aggregated_output: StreamOutput::new(message.clone()),
|
||||
duration: Duration::ZERO,
|
||||
timed_out: false,
|
||||
};
|
||||
session
|
||||
.send_event(
|
||||
turn_context.as_ref(),
|
||||
EventMsg::ExecCommandEnd(ExecCommandEndEvent {
|
||||
call_id,
|
||||
stdout: exec_output.stdout.text.clone(),
|
||||
stderr: exec_output.stderr.text.clone(),
|
||||
aggregated_output: exec_output.aggregated_output.text.clone(),
|
||||
exit_code: exec_output.exit_code,
|
||||
duration: exec_output.duration,
|
||||
formatted_output: format_exec_output_str(&exec_output),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let output_items = [user_shell_command_record_item(&raw_command, &exec_output)];
|
||||
session
|
||||
.record_conversation_items(turn_context.as_ref(), &output_items)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
use std::time::Duration;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::function_tool::FunctionCallError;
|
||||
use crate::protocol::EventMsg;
|
||||
@@ -27,6 +26,8 @@ pub struct UnifiedExecHandler;
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ExecCommandArgs {
|
||||
cmd: String,
|
||||
#[serde(default)]
|
||||
workdir: Option<String>,
|
||||
#[serde(default = "default_shell")]
|
||||
shell: String,
|
||||
#[serde(default = "default_login")]
|
||||
@@ -99,6 +100,12 @@ impl ToolHandler for UnifiedExecHandler {
|
||||
"failed to parse exec_command arguments: {err:?}"
|
||||
))
|
||||
})?;
|
||||
let workdir = args
|
||||
.workdir
|
||||
.as_deref()
|
||||
.filter(|value| !value.is_empty())
|
||||
.map(PathBuf::from);
|
||||
let cwd = workdir.clone().unwrap_or_else(|| context.turn.cwd.clone());
|
||||
|
||||
let event_ctx = ToolEventCtx::new(
|
||||
context.session.as_ref(),
|
||||
@@ -106,8 +113,7 @@ impl ToolHandler for UnifiedExecHandler {
|
||||
&context.call_id,
|
||||
None,
|
||||
);
|
||||
let emitter =
|
||||
ToolEmitter::unified_exec(args.cmd.clone(), context.turn.cwd.clone(), true);
|
||||
let emitter = ToolEmitter::unified_exec(args.cmd.clone(), cwd.clone(), true);
|
||||
emitter.emit(event_ctx, ToolEventStage::Begin).await;
|
||||
|
||||
manager
|
||||
@@ -118,6 +124,7 @@ impl ToolHandler for UnifiedExecHandler {
|
||||
login: args.login,
|
||||
yield_time_ms: args.yield_time_ms,
|
||||
max_output_tokens: args.max_output_tokens,
|
||||
workdir,
|
||||
},
|
||||
&context,
|
||||
)
|
||||
@@ -163,11 +170,7 @@ impl ToolHandler for UnifiedExecHandler {
|
||||
.await;
|
||||
}
|
||||
|
||||
let content = serialize_response(&response).map_err(|err| {
|
||||
FunctionCallError::RespondToModel(format!(
|
||||
"failed to serialize unified exec output: {err:?}"
|
||||
))
|
||||
})?;
|
||||
let content = format_response(&response);
|
||||
|
||||
Ok(ToolOutput::Function {
|
||||
content,
|
||||
@@ -177,32 +180,30 @@ impl ToolHandler for UnifiedExecHandler {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct SerializedUnifiedExecResponse<'a> {
|
||||
chunk_id: &'a str,
|
||||
wall_time_seconds: f64,
|
||||
output: &'a str,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
session_id: Option<i32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
exit_code: Option<i32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
original_token_count: Option<usize>,
|
||||
}
|
||||
fn format_response(response: &UnifiedExecResponse) -> String {
|
||||
let mut sections = Vec::new();
|
||||
|
||||
fn serialize_response(response: &UnifiedExecResponse) -> Result<String, serde_json::Error> {
|
||||
let payload = SerializedUnifiedExecResponse {
|
||||
chunk_id: &response.chunk_id,
|
||||
wall_time_seconds: duration_to_seconds(response.wall_time),
|
||||
output: &response.output,
|
||||
session_id: response.session_id,
|
||||
exit_code: response.exit_code,
|
||||
original_token_count: response.original_token_count,
|
||||
};
|
||||
if !response.chunk_id.is_empty() {
|
||||
sections.push(format!("Chunk ID: {}", response.chunk_id));
|
||||
}
|
||||
|
||||
serde_json::to_string(&payload)
|
||||
}
|
||||
let wall_time_seconds = response.wall_time.as_secs_f64();
|
||||
sections.push(format!("Wall time: {wall_time_seconds:.4} seconds"));
|
||||
|
||||
fn duration_to_seconds(duration: Duration) -> f64 {
|
||||
duration.as_secs_f64()
|
||||
if let Some(exit_code) = response.exit_code {
|
||||
sections.push(format!("Process exited with code {exit_code}"));
|
||||
}
|
||||
|
||||
if let Some(session_id) = response.session_id {
|
||||
sections.push(format!("Process running with session ID {session_id}"));
|
||||
}
|
||||
|
||||
if let Some(original_token_count) = response.original_token_count {
|
||||
sections.push(format!("Original token count: {original_token_count}"));
|
||||
}
|
||||
|
||||
sections.push("Output:".to_string());
|
||||
sections.push(response.output.clone());
|
||||
|
||||
sections.join("\n")
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ pub mod runtimes;
|
||||
pub mod sandboxing;
|
||||
pub mod spec;
|
||||
|
||||
use crate::conversation_history::format_output_for_model_body;
|
||||
use crate::context_manager::format_output_for_model_body;
|
||||
use crate::exec::ExecToolCallOutput;
|
||||
pub use router::ToolRouter;
|
||||
use serde::Serialize;
|
||||
|
||||
@@ -54,12 +54,21 @@ impl ToolOrchestrator {
|
||||
let mut already_approved = false;
|
||||
|
||||
if needs_initial_approval {
|
||||
let mut risk = None;
|
||||
|
||||
if let Some(metadata) = req.sandbox_retry_data() {
|
||||
risk = tool_ctx
|
||||
.session
|
||||
.assess_sandbox_command(turn_ctx, &tool_ctx.call_id, &metadata.command, None)
|
||||
.await;
|
||||
}
|
||||
|
||||
let approval_ctx = ApprovalCtx {
|
||||
session: tool_ctx.session,
|
||||
turn: turn_ctx,
|
||||
call_id: &tool_ctx.call_id,
|
||||
retry_reason: None,
|
||||
risk: None,
|
||||
risk,
|
||||
};
|
||||
let decision = tool.start_approval_async(req, approval_ctx).await;
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use std::sync::Arc;
|
||||
use std::time::Instant;
|
||||
|
||||
use tokio::sync::RwLock;
|
||||
use tokio_util::either::Either;
|
||||
@@ -53,13 +54,16 @@ impl ToolCallRuntime {
|
||||
let turn = Arc::clone(&self.turn_context);
|
||||
let tracker = Arc::clone(&self.tracker);
|
||||
let lock = Arc::clone(&self.parallel_execution);
|
||||
let aborted_response = Self::aborted_response(&call);
|
||||
let started = Instant::now();
|
||||
let readiness = self.turn_context.tool_call_gate.clone();
|
||||
|
||||
let handle: AbortOnDropHandle<Result<ResponseInputItem, FunctionCallError>> =
|
||||
AbortOnDropHandle::new(tokio::spawn(async move {
|
||||
tokio::select! {
|
||||
_ = cancellation_token.cancelled() => Ok(aborted_response),
|
||||
_ = cancellation_token.cancelled() => {
|
||||
let secs = started.elapsed().as_secs_f32().max(0.1);
|
||||
Ok(Self::aborted_response(&call, secs))
|
||||
},
|
||||
res = async {
|
||||
tracing::info!("waiting for tool gate");
|
||||
readiness.wait_ready().await;
|
||||
@@ -71,7 +75,7 @@ impl ToolCallRuntime {
|
||||
};
|
||||
|
||||
router
|
||||
.dispatch_tool_call(session, turn, tracker, call)
|
||||
.dispatch_tool_call(session, turn, tracker, call.clone())
|
||||
.await
|
||||
} => res,
|
||||
}
|
||||
@@ -91,23 +95,32 @@ impl ToolCallRuntime {
|
||||
}
|
||||
|
||||
impl ToolCallRuntime {
|
||||
fn aborted_response(call: &ToolCall) -> ResponseInputItem {
|
||||
fn aborted_response(call: &ToolCall, secs: f32) -> ResponseInputItem {
|
||||
match &call.payload {
|
||||
ToolPayload::Custom { .. } => ResponseInputItem::CustomToolCallOutput {
|
||||
call_id: call.call_id.clone(),
|
||||
output: "aborted".to_string(),
|
||||
output: Self::abort_message(call, secs),
|
||||
},
|
||||
ToolPayload::Mcp { .. } => ResponseInputItem::McpToolCallOutput {
|
||||
call_id: call.call_id.clone(),
|
||||
result: Err("aborted".to_string()),
|
||||
result: Err(Self::abort_message(call, secs)),
|
||||
},
|
||||
_ => ResponseInputItem::FunctionCallOutput {
|
||||
call_id: call.call_id.clone(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "aborted".to_string(),
|
||||
content: Self::abort_message(call, secs),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn abort_message(call: &ToolCall, secs: f32) -> String {
|
||||
match call.tool_name.as_str() {
|
||||
"shell" | "container.exec" | "local_shell" | "unified_exec" => {
|
||||
format!("Wall time: {secs:.1} seconds\naborted by user")
|
||||
}
|
||||
_ => format!("aborted by user after {secs:.1}s"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,8 +4,7 @@ Runtime: shell
|
||||
Executes shell requests under the orchestrator: asks for approval when needed,
|
||||
builds a CommandSpec, and runs it under the current SandboxAttempt.
|
||||
*/
|
||||
use crate::command_safety::is_dangerous_command::command_might_be_dangerous;
|
||||
use crate::command_safety::is_safe_command::is_known_safe_command;
|
||||
use crate::command_safety::is_dangerous_command::requires_initial_appoval;
|
||||
use crate::exec::ExecToolCallOutput;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
use crate::sandboxing::execute_env;
|
||||
@@ -121,28 +120,12 @@ impl Approvable<ShellRequest> for ShellRuntime {
|
||||
policy: AskForApproval,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
) -> bool {
|
||||
if is_known_safe_command(&req.command) {
|
||||
return false;
|
||||
}
|
||||
match policy {
|
||||
AskForApproval::Never | AskForApproval::OnFailure => false,
|
||||
AskForApproval::OnRequest => {
|
||||
// In DangerFullAccess, only prompt if the command looks dangerous.
|
||||
if matches!(sandbox_policy, SandboxPolicy::DangerFullAccess) {
|
||||
return command_might_be_dangerous(&req.command);
|
||||
}
|
||||
|
||||
// In restricted sandboxes (ReadOnly/WorkspaceWrite), do not prompt for
|
||||
// non‑escalated, non‑dangerous commands — let the sandbox enforce
|
||||
// restrictions (e.g., block network/write) without a user prompt.
|
||||
let wants_escalation = req.with_escalated_permissions.unwrap_or(false);
|
||||
if wants_escalation {
|
||||
return true;
|
||||
}
|
||||
command_might_be_dangerous(&req.command)
|
||||
}
|
||||
AskForApproval::UnlessTrusted => !is_known_safe_command(&req.command),
|
||||
}
|
||||
requires_initial_appoval(
|
||||
policy,
|
||||
sandbox_policy,
|
||||
&req.command,
|
||||
req.with_escalated_permissions.unwrap_or(false),
|
||||
)
|
||||
}
|
||||
|
||||
fn wants_escalated_first_attempt(&self, req: &ShellRequest) -> bool {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use crate::command_safety::is_dangerous_command::requires_initial_appoval;
|
||||
/*
|
||||
Runtime: unified exec
|
||||
|
||||
@@ -21,7 +22,9 @@ use crate::tools::sandboxing::with_cached_approval;
|
||||
use crate::unified_exec::UnifiedExecError;
|
||||
use crate::unified_exec::UnifiedExecSession;
|
||||
use crate::unified_exec::UnifiedExecSessionManager;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::ReviewDecision;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use futures::future::BoxFuture;
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
@@ -106,6 +109,15 @@ impl Approvable<UnifiedExecRequest> for UnifiedExecRuntime<'_> {
|
||||
.await
|
||||
})
|
||||
}
|
||||
|
||||
fn wants_initial_approval(
|
||||
&self,
|
||||
req: &UnifiedExecRequest,
|
||||
policy: AskForApproval,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
) -> bool {
|
||||
requires_initial_appoval(policy, sandbox_policy, &req.command, false)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ToolRuntime<UnifiedExecRequest, UnifiedExecSession> for UnifiedExecRuntime<'a> {
|
||||
|
||||
@@ -15,11 +15,11 @@ use serde_json::json;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum ConfigShellToolType {
|
||||
Default,
|
||||
Local,
|
||||
Streamable,
|
||||
UnifiedExec,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -28,7 +28,6 @@ pub(crate) struct ToolsConfig {
|
||||
pub apply_patch_tool_type: Option<ApplyPatchToolType>,
|
||||
pub web_search_request: bool,
|
||||
pub include_view_image_tool: bool,
|
||||
pub experimental_unified_exec_tool: bool,
|
||||
pub experimental_supported_tools: Vec<String>,
|
||||
}
|
||||
|
||||
@@ -43,18 +42,14 @@ impl ToolsConfig {
|
||||
model_family,
|
||||
features,
|
||||
} = params;
|
||||
let use_streamable_shell_tool = features.enabled(Feature::StreamableShell);
|
||||
let experimental_unified_exec_tool = features.enabled(Feature::UnifiedExec);
|
||||
let include_apply_patch_tool = features.enabled(Feature::ApplyPatchFreeform);
|
||||
let include_web_search_request = features.enabled(Feature::WebSearchRequest);
|
||||
let include_view_image_tool = features.enabled(Feature::ViewImageTool);
|
||||
|
||||
let shell_type = if use_streamable_shell_tool {
|
||||
ConfigShellToolType::Streamable
|
||||
} else if model_family.uses_local_shell_tool {
|
||||
ConfigShellToolType::Local
|
||||
let shell_type = if features.enabled(Feature::UnifiedExec) {
|
||||
ConfigShellToolType::UnifiedExec
|
||||
} else {
|
||||
ConfigShellToolType::Default
|
||||
model_family.shell_type.clone()
|
||||
};
|
||||
|
||||
let apply_patch_tool_type = match model_family.apply_patch_tool_type {
|
||||
@@ -74,7 +69,6 @@ impl ToolsConfig {
|
||||
apply_patch_tool_type,
|
||||
web_search_request: include_web_search_request,
|
||||
include_view_image_tool,
|
||||
experimental_unified_exec_tool,
|
||||
experimental_supported_tools: model_family.experimental_supported_tools.clone(),
|
||||
}
|
||||
}
|
||||
@@ -144,6 +138,15 @@ fn create_exec_command_tool() -> ToolSpec {
|
||||
description: Some("Shell command to execute.".to_string()),
|
||||
},
|
||||
);
|
||||
properties.insert(
|
||||
"workdir".to_string(),
|
||||
JsonSchema::String {
|
||||
description: Some(
|
||||
"Optional working directory to run the command in; defaults to the turn cwd."
|
||||
.to_string(),
|
||||
),
|
||||
},
|
||||
);
|
||||
properties.insert(
|
||||
"shell".to_string(),
|
||||
JsonSchema::String {
|
||||
@@ -178,8 +181,11 @@ fn create_exec_command_tool() -> ToolSpec {
|
||||
ToolSpec::Function(ResponsesApiTool {
|
||||
name: "exec_command".to_string(),
|
||||
description:
|
||||
"Runs a command in a PTY, returning output or a session ID for ongoing interaction."
|
||||
.to_string(),
|
||||
concat!(
|
||||
"Runs a command in a PTY, returning output or a session ID for ongoing interaction.\n",
|
||||
"- Always set the `workdir` param when using the shell function. Do not use `cd` unless absolutely necessary."
|
||||
)
|
||||
.to_string(),
|
||||
strict: false,
|
||||
parameters: JsonSchema::Object {
|
||||
properties,
|
||||
@@ -271,7 +277,12 @@ fn create_shell_tool() -> ToolSpec {
|
||||
|
||||
ToolSpec::Function(ResponsesApiTool {
|
||||
name: "shell".to_string(),
|
||||
description: "Runs a shell command and returns its output.".to_string(),
|
||||
description: concat!(
|
||||
"Runs a shell command and returns its output.\n",
|
||||
"- The value of `command` will be passed to execvp(). Most terminal commands should be prefixed with [`bash`, `-lc`].\n",
|
||||
"- Always set the `workdir` param when using the shell function. Do not use `cd` unless absolutely necessary.",
|
||||
)
|
||||
.to_string(),
|
||||
strict: false,
|
||||
parameters: JsonSchema::Object {
|
||||
properties,
|
||||
@@ -886,15 +897,6 @@ pub(crate) fn build_specs(
|
||||
let mcp_handler = Arc::new(McpHandler);
|
||||
let mcp_resource_handler = Arc::new(McpResourceHandler);
|
||||
|
||||
let use_unified_exec = config.experimental_unified_exec_tool
|
||||
|| matches!(config.shell_type, ConfigShellToolType::Streamable);
|
||||
|
||||
if use_unified_exec {
|
||||
builder.push_spec(create_exec_command_tool());
|
||||
builder.push_spec(create_write_stdin_tool());
|
||||
builder.register_handler("exec_command", unified_exec_handler.clone());
|
||||
builder.register_handler("write_stdin", unified_exec_handler);
|
||||
}
|
||||
match &config.shell_type {
|
||||
ConfigShellToolType::Default => {
|
||||
builder.push_spec(create_shell_tool());
|
||||
@@ -902,8 +904,11 @@ pub(crate) fn build_specs(
|
||||
ConfigShellToolType::Local => {
|
||||
builder.push_spec(ToolSpec::LocalShell {});
|
||||
}
|
||||
ConfigShellToolType::Streamable => {
|
||||
// Already handled by use_unified_exec.
|
||||
ConfigShellToolType::UnifiedExec => {
|
||||
builder.push_spec(create_exec_command_tool());
|
||||
builder.push_spec(create_write_stdin_tool());
|
||||
builder.register_handler("exec_command", unified_exec_handler.clone());
|
||||
builder.register_handler("write_stdin", unified_exec_handler);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1045,7 +1050,7 @@ mod tests {
|
||||
match config.shell_type {
|
||||
ConfigShellToolType::Default => Some("shell"),
|
||||
ConfigShellToolType::Local => Some("local_shell"),
|
||||
ConfigShellToolType::Streamable => None,
|
||||
ConfigShellToolType::UnifiedExec => None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1095,7 +1100,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_full_toolset_specs_for_gpt5_codex() {
|
||||
fn test_full_toolset_specs_for_gpt5_codex_unified_exec_web_search() {
|
||||
let model_family = find_family_for_model("gpt-5-codex")
|
||||
.expect("gpt-5-codex should be a valid model family");
|
||||
let mut features = Features::with_defaults();
|
||||
@@ -1129,7 +1134,6 @@ mod tests {
|
||||
for spec in [
|
||||
create_exec_command_tool(),
|
||||
create_write_stdin_tool(),
|
||||
create_shell_tool(),
|
||||
create_list_mcp_resources_tool(),
|
||||
create_list_mcp_resource_templates_tool(),
|
||||
create_read_mcp_resource_tool(),
|
||||
@@ -1156,32 +1160,106 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_specs_contains_expected_basics() {
|
||||
let model_family = find_family_for_model("codex-mini-latest")
|
||||
.expect("codex-mini-latest should be a valid model family");
|
||||
let mut features = Features::with_defaults();
|
||||
features.enable(Feature::WebSearchRequest);
|
||||
features.enable(Feature::UnifiedExec);
|
||||
fn assert_model_tools(model_family: &str, features: &Features, expected_tools: &[&str]) {
|
||||
let model_family = find_family_for_model(model_family)
|
||||
.unwrap_or_else(|| panic!("{model_family} should be a valid model family"));
|
||||
let config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_family: &model_family,
|
||||
features: &features,
|
||||
features,
|
||||
});
|
||||
let (tools, _) = build_specs(&config, Some(HashMap::new())).build();
|
||||
let tool_names = tools.iter().map(|t| t.spec.name()).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
&tool_names,
|
||||
assert_eq!(&tool_names, &expected_tools,);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_specs_gpt5_codex_default() {
|
||||
assert_model_tools(
|
||||
"gpt-5-codex",
|
||||
&Features::with_defaults(),
|
||||
&[
|
||||
"shell",
|
||||
"list_mcp_resources",
|
||||
"list_mcp_resource_templates",
|
||||
"read_mcp_resource",
|
||||
"update_plan",
|
||||
"apply_patch",
|
||||
"view_image",
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_specs_gpt5_codex_unified_exec_web_search() {
|
||||
assert_model_tools(
|
||||
"gpt-5-codex",
|
||||
Features::with_defaults()
|
||||
.enable(Feature::UnifiedExec)
|
||||
.enable(Feature::WebSearchRequest),
|
||||
&[
|
||||
"exec_command",
|
||||
"write_stdin",
|
||||
"list_mcp_resources",
|
||||
"list_mcp_resource_templates",
|
||||
"read_mcp_resource",
|
||||
"update_plan",
|
||||
"apply_patch",
|
||||
"web_search",
|
||||
"view_image",
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_codex_mini_defaults() {
|
||||
assert_model_tools(
|
||||
"codex-mini-latest",
|
||||
&Features::with_defaults(),
|
||||
&[
|
||||
"local_shell",
|
||||
"list_mcp_resources",
|
||||
"list_mcp_resource_templates",
|
||||
"read_mcp_resource",
|
||||
"update_plan",
|
||||
"view_image",
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_porcupine_defaults() {
|
||||
assert_model_tools(
|
||||
"porcupine",
|
||||
&Features::with_defaults(),
|
||||
&[
|
||||
"exec_command",
|
||||
"write_stdin",
|
||||
"list_mcp_resources",
|
||||
"list_mcp_resource_templates",
|
||||
"read_mcp_resource",
|
||||
"update_plan",
|
||||
"view_image",
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_codex_mini_unified_exec_web_search() {
|
||||
assert_model_tools(
|
||||
"codex-mini-latest",
|
||||
Features::with_defaults()
|
||||
.enable(Feature::UnifiedExec)
|
||||
.enable(Feature::WebSearchRequest),
|
||||
&[
|
||||
"exec_command",
|
||||
"write_stdin",
|
||||
"list_mcp_resources",
|
||||
"list_mcp_resource_templates",
|
||||
"read_mcp_resource",
|
||||
"update_plan",
|
||||
"web_search",
|
||||
"view_image",
|
||||
]
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1656,7 +1734,7 @@ mod tests {
|
||||
};
|
||||
assert_eq!(name, "shell");
|
||||
|
||||
let expected = "Runs a shell command and returns its output.";
|
||||
let expected = "Runs a shell command and returns its output.\n- The value of `command` will be passed to execvp(). Most terminal commands should be prefixed with [`bash`, `-lc`].\n- Always set the `workdir` param when using the shell function. Do not use `cd` unless absolutely necessary.";
|
||||
assert_eq!(description, expected);
|
||||
}
|
||||
|
||||
|
||||
@@ -70,6 +70,7 @@ pub(crate) struct ExecCommandRequest<'a> {
|
||||
pub login: bool,
|
||||
pub yield_time_ms: Option<u64>,
|
||||
pub max_output_tokens: Option<usize>,
|
||||
pub workdir: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -199,6 +200,7 @@ mod tests {
|
||||
login: true,
|
||||
yield_time_ms,
|
||||
max_output_tokens: None,
|
||||
workdir: None,
|
||||
},
|
||||
&context,
|
||||
)
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use tokio::sync::Notify;
|
||||
@@ -38,6 +39,10 @@ impl UnifiedExecSessionManager {
|
||||
request: ExecCommandRequest<'_>,
|
||||
context: &UnifiedExecContext,
|
||||
) -> Result<UnifiedExecResponse, UnifiedExecError> {
|
||||
let cwd = request
|
||||
.workdir
|
||||
.clone()
|
||||
.unwrap_or_else(|| context.turn.cwd.clone());
|
||||
let shell_flag = if request.login { "-lc" } else { "-c" };
|
||||
let command = vec![
|
||||
request.shell.to_string(),
|
||||
@@ -45,7 +50,9 @@ impl UnifiedExecSessionManager {
|
||||
request.command.to_string(),
|
||||
];
|
||||
|
||||
let session = self.open_session_with_sandbox(command, context).await?;
|
||||
let session = self
|
||||
.open_session_with_sandbox(command, cwd.clone(), context)
|
||||
.await?;
|
||||
|
||||
let max_tokens = resolve_max_tokens(request.max_output_tokens);
|
||||
let yield_time_ms =
|
||||
@@ -66,7 +73,7 @@ impl UnifiedExecSessionManager {
|
||||
None
|
||||
} else {
|
||||
Some(
|
||||
self.store_session(session, context, request.command, start)
|
||||
self.store_session(session, context, request.command, cwd.clone(), start)
|
||||
.await,
|
||||
)
|
||||
};
|
||||
@@ -87,6 +94,7 @@ impl UnifiedExecSessionManager {
|
||||
Self::emit_exec_end_from_context(
|
||||
context,
|
||||
request.command.to_string(),
|
||||
cwd,
|
||||
response.output.clone(),
|
||||
exit,
|
||||
response.wall_time,
|
||||
@@ -211,6 +219,7 @@ impl UnifiedExecSessionManager {
|
||||
session: UnifiedExecSession,
|
||||
context: &UnifiedExecContext,
|
||||
command: &str,
|
||||
cwd: PathBuf,
|
||||
started_at: Instant,
|
||||
) -> i32 {
|
||||
let session_id = self
|
||||
@@ -222,7 +231,7 @@ impl UnifiedExecSessionManager {
|
||||
turn_ref: Arc::clone(&context.turn),
|
||||
call_id: context.call_id.clone(),
|
||||
command: command.to_string(),
|
||||
cwd: context.turn.cwd.clone(),
|
||||
cwd,
|
||||
started_at,
|
||||
};
|
||||
self.sessions.lock().await.insert(session_id, entry);
|
||||
@@ -258,6 +267,7 @@ impl UnifiedExecSessionManager {
|
||||
async fn emit_exec_end_from_context(
|
||||
context: &UnifiedExecContext,
|
||||
command: String,
|
||||
cwd: PathBuf,
|
||||
aggregated_output: String,
|
||||
exit_code: i32,
|
||||
duration: Duration,
|
||||
@@ -276,7 +286,7 @@ impl UnifiedExecSessionManager {
|
||||
&context.call_id,
|
||||
None,
|
||||
);
|
||||
let emitter = ToolEmitter::unified_exec(command, context.turn.cwd.clone(), true);
|
||||
let emitter = ToolEmitter::unified_exec(command, cwd, true);
|
||||
emitter
|
||||
.emit(event_ctx, ToolEventStage::Success(output))
|
||||
.await;
|
||||
@@ -300,13 +310,14 @@ impl UnifiedExecSessionManager {
|
||||
pub(super) async fn open_session_with_sandbox(
|
||||
&self,
|
||||
command: Vec<String>,
|
||||
cwd: PathBuf,
|
||||
context: &UnifiedExecContext,
|
||||
) -> Result<UnifiedExecSession, UnifiedExecError> {
|
||||
let mut orchestrator = ToolOrchestrator::new();
|
||||
let mut runtime = UnifiedExecRuntime::new(self);
|
||||
let req = UnifiedExecToolRequest::new(
|
||||
command,
|
||||
context.turn.cwd.clone(),
|
||||
cwd,
|
||||
create_env(&context.turn.shell_environment_policy),
|
||||
);
|
||||
let tool_ctx = ToolCtx {
|
||||
|
||||
108
codex-rs/core/src/user_shell_command.rs
Normal file
108
codex-rs/core/src/user_shell_command.rs
Normal file
@@ -0,0 +1,108 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
|
||||
use crate::exec::ExecToolCallOutput;
|
||||
use crate::tools::format_exec_output_str;
|
||||
|
||||
pub const USER_SHELL_COMMAND_OPEN: &str = "<user_shell_command>";
|
||||
pub const USER_SHELL_COMMAND_CLOSE: &str = "</user_shell_command>";
|
||||
|
||||
pub fn is_user_shell_command_text(text: &str) -> bool {
|
||||
let trimmed = text.trim_start();
|
||||
let lowered = trimmed.to_ascii_lowercase();
|
||||
lowered.starts_with(USER_SHELL_COMMAND_OPEN)
|
||||
}
|
||||
|
||||
fn format_duration_line(duration: Duration) -> String {
|
||||
let duration_seconds = duration.as_secs_f64();
|
||||
format!("Duration: {duration_seconds:.4} seconds")
|
||||
}
|
||||
|
||||
fn format_user_shell_command_body(command: &str, exec_output: &ExecToolCallOutput) -> String {
|
||||
let mut sections = Vec::new();
|
||||
sections.push("<command>".to_string());
|
||||
sections.push(command.to_string());
|
||||
sections.push("</command>".to_string());
|
||||
sections.push("<result>".to_string());
|
||||
sections.push(format!("Exit code: {}", exec_output.exit_code));
|
||||
sections.push(format_duration_line(exec_output.duration));
|
||||
sections.push("Output:".to_string());
|
||||
sections.push(format_exec_output_str(exec_output));
|
||||
sections.push("</result>".to_string());
|
||||
sections.join("\n")
|
||||
}
|
||||
|
||||
pub fn format_user_shell_command_record(command: &str, exec_output: &ExecToolCallOutput) -> String {
|
||||
let body = format_user_shell_command_body(command, exec_output);
|
||||
format!("{USER_SHELL_COMMAND_OPEN}\n{body}\n{USER_SHELL_COMMAND_CLOSE}")
|
||||
}
|
||||
|
||||
pub fn user_shell_command_record_item(
|
||||
command: &str,
|
||||
exec_output: &ExecToolCallOutput,
|
||||
) -> ResponseItem {
|
||||
ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::InputText {
|
||||
text: format_user_shell_command_record(command, exec_output),
|
||||
}],
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::exec::StreamOutput;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn detects_user_shell_command_text_variants() {
|
||||
assert!(is_user_shell_command_text(
|
||||
"<user_shell_command>\necho hi\n</user_shell_command>"
|
||||
));
|
||||
assert!(!is_user_shell_command_text("echo hi"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn formats_basic_record() {
|
||||
let exec_output = ExecToolCallOutput {
|
||||
exit_code: 0,
|
||||
stdout: StreamOutput::new("hi".to_string()),
|
||||
stderr: StreamOutput::new(String::new()),
|
||||
aggregated_output: StreamOutput::new("hi".to_string()),
|
||||
duration: Duration::from_secs(1),
|
||||
timed_out: false,
|
||||
};
|
||||
let item = user_shell_command_record_item("echo hi", &exec_output);
|
||||
let ResponseItem::Message { content, .. } = item else {
|
||||
panic!("expected message");
|
||||
};
|
||||
let [ContentItem::InputText { text }] = content.as_slice() else {
|
||||
panic!("expected input text");
|
||||
};
|
||||
assert_eq!(
|
||||
text,
|
||||
"<user_shell_command>\n<command>\necho hi\n</command>\n<result>\nExit code: 0\nDuration: 1.0000 seconds\nOutput:\nhi\n</result>\n</user_shell_command>"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn uses_aggregated_output_over_streams() {
|
||||
let exec_output = ExecToolCallOutput {
|
||||
exit_code: 42,
|
||||
stdout: StreamOutput::new("stdout-only".to_string()),
|
||||
stderr: StreamOutput::new("stderr-only".to_string()),
|
||||
aggregated_output: StreamOutput::new("combined output wins".to_string()),
|
||||
duration: Duration::from_millis(120),
|
||||
timed_out: false,
|
||||
};
|
||||
let record = format_user_shell_command_record("false", &exec_output);
|
||||
assert_eq!(
|
||||
record,
|
||||
"<user_shell_command>\n<command>\nfalse\n</command>\n<result>\nExit code: 42\nDuration: 0.1200 seconds\nOutput:\ncombined output wins\n</result>\n</user_shell_command>"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
You were originally given instructions from a user over one or more turns. Here were the user messages:
|
||||
|
||||
{{ user_messages_text }}
|
||||
|
||||
Another language model started to solve this problem and produced a summary of its thinking process. You also have access to the state of the tools that were used by that language model. Use this to build on the work that has already been done and avoid duplicating work. Here is the summary produced by the other language model, use the information in this summary to assist with your own analysis:
|
||||
|
||||
{{ summary_text }}
|
||||
@@ -1,5 +1,9 @@
|
||||
You have exceeded the maximum number of tokens, please stop coding and instead write a short memento message for the next agent. Your note should:
|
||||
- Summarize what you finished and what still needs work. If there was a recent update_plan call, repeat its steps verbatim.
|
||||
- List outstanding TODOs with file paths / line numbers so they're easy to find.
|
||||
- Flag code that needs more tests (edge cases, performance, integration, etc.).
|
||||
- Record any open bugs, quirks, or setup steps that will make it easier for the next agent to pick up where you left off.
|
||||
You are performing a CONTEXT CHECKPOINT COMPACTION. Create a handoff summary for another LLM that will resume the task.
|
||||
|
||||
Include:
|
||||
- Current progress and key decisions made
|
||||
- Important context, constraints, or user preferences
|
||||
- What remains to be done (clear next steps)
|
||||
- Any critical data, examples, or references needed to continue
|
||||
|
||||
Be concise, structured, and focused on helping the next LLM seamlessly continue the work.
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user