mirror of
https://github.com/openai/codex.git
synced 2026-02-02 06:57:03 +00:00
Compare commits
291 Commits
docs/updat
...
pr6549
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e1f91433bc | ||
|
|
c3a710ee14 | ||
|
|
29364f3a9b | ||
|
|
530db0ad73 | ||
|
|
424bfecd0b | ||
|
|
eb1c651c00 | ||
|
|
e357fc723d | ||
|
|
807e2c27f0 | ||
|
|
ad279eacdc | ||
|
|
052b052832 | ||
|
|
6951872776 | ||
|
|
bb7b0213a8 | ||
|
|
6c36318bd8 | ||
|
|
930f81a17b | ||
|
|
9aff64e017 | ||
|
|
3838d6739c | ||
|
|
60deb6773a | ||
|
|
0271c20d8f | ||
|
|
52e97b9b6b | ||
|
|
2ac49fea58 | ||
|
|
f01f2ec9ee | ||
|
|
980886498c | ||
|
|
e743d251a7 | ||
|
|
788badd221 | ||
|
|
fbdedd9a06 | ||
|
|
5916153157 | ||
|
|
b46012e483 | ||
|
|
42683dadfb | ||
|
|
65cb1a1b77 | ||
|
|
50a77dc138 | ||
|
|
557ac63094 | ||
|
|
131c384361 | ||
|
|
e2598f5094 | ||
|
|
78b2aeea55 | ||
|
|
082d2fa19a | ||
|
|
7c7c7567d5 | ||
|
|
625f2208c4 | ||
|
|
5f1fab0e7c | ||
|
|
c07461e6f3 | ||
|
|
8b80a0a269 | ||
|
|
a47181e471 | ||
|
|
5beb6167c8 | ||
|
|
917f39ec12 | ||
|
|
a2fdfce02a | ||
|
|
91b16b8682 | ||
|
|
183fc8e01a | ||
|
|
9fba811764 | ||
|
|
db408b9e62 | ||
|
|
2eecc1a2e4 | ||
|
|
c76528ca1f | ||
|
|
bb47f2226f | ||
|
|
c6ab92bc50 | ||
|
|
4c1a6f0ee0 | ||
|
|
361d43b969 | ||
|
|
2e81f1900d | ||
|
|
2030b28083 | ||
|
|
e84e39940b | ||
|
|
e8905f6d20 | ||
|
|
316352be94 | ||
|
|
f8b30af6dc | ||
|
|
039a4b070e | ||
|
|
c368c6aeea | ||
|
|
0c647bc566 | ||
|
|
e30f65118d | ||
|
|
1bd2d7a659 | ||
|
|
65d53fd4b1 | ||
|
|
b5349202e9 | ||
|
|
1b8cc8b625 | ||
|
|
8501b0b768 | ||
|
|
fe7eb18104 | ||
|
|
8c75ed39d5 | ||
|
|
fdb9fa301e | ||
|
|
871d442b8e | ||
|
|
dbad5eeec6 | ||
|
|
4b4252210b | ||
|
|
6582554926 | ||
|
|
649ce520c4 | ||
|
|
667e841d3e | ||
|
|
63e1ef25af | ||
|
|
229d18f4d2 | ||
|
|
4a1a7f9685 | ||
|
|
86c149ae8e | ||
|
|
05f0b4f590 | ||
|
|
d4eda9d10b | ||
|
|
d7953aed74 | ||
|
|
2ab1650d4d | ||
|
|
79aa83ee39 | ||
|
|
c4ebe4b078 | ||
|
|
1a89f70015 | ||
|
|
62474a30e8 | ||
|
|
9a10e80ab7 | ||
|
|
9b538a8672 | ||
|
|
95af417923 | ||
|
|
fff576cf98 | ||
|
|
1575f0504c | ||
|
|
edf4c3f627 | ||
|
|
d40a6b7f73 | ||
|
|
3a22018edd | ||
|
|
fe54c216a3 | ||
|
|
cb6584de46 | ||
|
|
7e068e1094 | ||
|
|
d3187dbc17 | ||
|
|
dc2f26f7b5 | ||
|
|
553db8def1 | ||
|
|
ab63a47173 | ||
|
|
e658c6c73b | ||
|
|
1e0e553304 | ||
|
|
07b7d28937 | ||
|
|
6ee7fbcfff | ||
|
|
5f3a0473f1 | ||
|
|
2eda75a8ee | ||
|
|
e1f098b9b7 | ||
|
|
e5e13479d0 | ||
|
|
7bc3ca9e40 | ||
|
|
4d8b71d412 | ||
|
|
b484672961 | ||
|
|
a1ee10b438 | ||
|
|
dccce34d84 | ||
|
|
f5945d7c03 | ||
|
|
5fcf923c19 | ||
|
|
0c7efa0cfd | ||
|
|
d5853d9c47 | ||
|
|
d9118c04bf | ||
|
|
91e65ac0ce | ||
|
|
1ac4fb45d2 | ||
|
|
07b8bdfbf1 | ||
|
|
0f22067242 | ||
|
|
d7f8b97541 | ||
|
|
611e00c862 | ||
|
|
c8ebb2a0dc | ||
|
|
88e083a9d0 | ||
|
|
1c8507b32a | ||
|
|
23f31c6bff | ||
|
|
ff48ae192b | ||
|
|
a2fe2f9fb1 | ||
|
|
01ca2b5df6 | ||
|
|
368f7adfc6 | ||
|
|
68731ac74d | ||
|
|
0508823075 | ||
|
|
2ac14d1145 | ||
|
|
2371d771cc | ||
|
|
9a638dbf4e | ||
|
|
dc2aeac21f | ||
|
|
f842849bec | ||
|
|
dcf73970d2 | ||
|
|
e761924dc2 | ||
|
|
cdc3df3790 | ||
|
|
a3d3719481 | ||
|
|
11e5327770 | ||
|
|
87cce88f48 | ||
|
|
ff6d4cec6b | ||
|
|
6ef658a9f9 | ||
|
|
8b8be343a7 | ||
|
|
89c00611c2 | ||
|
|
9572cfc782 | ||
|
|
4a55646a02 | ||
|
|
209af68611 | ||
|
|
f4f9695978 | ||
|
|
5fcc380bd9 | ||
|
|
aa76003e28 | ||
|
|
fac548e430 | ||
|
|
9bd3453592 | ||
|
|
b34efde2f3 | ||
|
|
7aa46ab5fc | ||
|
|
bf35105af6 | ||
|
|
3429e82e45 | ||
|
|
815ae4164a | ||
|
|
13e1d0362d | ||
|
|
db31f6966d | ||
|
|
2b20cd66af | ||
|
|
39e09c289d | ||
|
|
069a38a06c | ||
|
|
3183935bd7 | ||
|
|
060637b4d4 | ||
|
|
fa92cd92fa | ||
|
|
89591e4246 | ||
|
|
802d2440b4 | ||
|
|
e9135fa7c5 | ||
|
|
ef3e075ad6 | ||
|
|
149e198ce8 | ||
|
|
1d76ba5ebe | ||
|
|
a1635eea25 | ||
|
|
36113509f2 | ||
|
|
ba95d9862c | ||
|
|
ef55992ab0 | ||
|
|
e3f913f567 | ||
|
|
1b8f2543ac | ||
|
|
65107d24a2 | ||
|
|
36eb071998 | ||
|
|
9b33ce3409 | ||
|
|
926c89cb20 | ||
|
|
5ba2a17576 | ||
|
|
266419217e | ||
|
|
be4bdfec93 | ||
|
|
7ff142d93f | ||
|
|
4a42c4e142 | ||
|
|
66a4b89822 | ||
|
|
d7b333be97 | ||
|
|
4d6a42a622 | ||
|
|
b0bdc04c30 | ||
|
|
67a219ffc2 | ||
|
|
7226365397 | ||
|
|
0fc295d958 | ||
|
|
3e50f94d76 | ||
|
|
eb5b1b627f | ||
|
|
0c1ff1d3fd | ||
|
|
aea7610c76 | ||
|
|
775fbba6e0 | ||
|
|
5ee8a17b4e | ||
|
|
81be54b229 | ||
|
|
5e8659dcbc | ||
|
|
2338294b39 | ||
|
|
afc4eaab8b | ||
|
|
e92c4f6561 | ||
|
|
15fa2283e7 | ||
|
|
5907422d65 | ||
|
|
f178805252 | ||
|
|
a55b0c4bcc | ||
|
|
224222f09f | ||
|
|
7aab45e060 | ||
|
|
bcd64c7e72 | ||
|
|
c124f24354 | ||
|
|
c7e4e6d0ee | ||
|
|
88abbf58ce | ||
|
|
71f838389b | ||
|
|
0533bd2e7c | ||
|
|
6af83d86ff | ||
|
|
e2e1b65da6 | ||
|
|
817d1508bc | ||
|
|
f8af4f5c8d | ||
|
|
a4be4d78b9 | ||
|
|
00c1de0c56 | ||
|
|
190e7eb104 | ||
|
|
061862a0e2 | ||
|
|
c72b2ad766 | ||
|
|
80783a7bb9 | ||
|
|
ed77d2d977 | ||
|
|
abccd3e367 | ||
|
|
0f4fd33ddd | ||
|
|
e258f0f044 | ||
|
|
a6b9471548 | ||
|
|
3059373e06 | ||
|
|
0b4527146e | ||
|
|
6745b12427 | ||
|
|
f59978ed3d | ||
|
|
3ab6028e80 | ||
|
|
892eaff46d | ||
|
|
8e291a1706 | ||
|
|
aee321f62b | ||
|
|
ed32da04d7 | ||
|
|
8ae3949072 | ||
|
|
273819aaae | ||
|
|
4cd6b01494 | ||
|
|
dd59b16a17 | ||
|
|
bac7acaa7c | ||
|
|
3c90728a29 | ||
|
|
34c5a9eaa9 | ||
|
|
f522aafb7f | ||
|
|
fd0673e457 | ||
|
|
00b1e130b3 | ||
|
|
53cadb4df6 | ||
|
|
db7eb9a7ce | ||
|
|
cdd106b930 | ||
|
|
404cae7d40 | ||
|
|
682d05512f | ||
|
|
5cd8803998 | ||
|
|
26f314904a | ||
|
|
da82153a8d | ||
|
|
4bd68e4d9e | ||
|
|
1b10a3a1b2 | ||
|
|
ad9a289951 | ||
|
|
a517f6f55b | ||
|
|
789e65b9d2 | ||
|
|
42d5c35020 | ||
|
|
ab95eaa356 | ||
|
|
7fc01c6e9b | ||
|
|
df15a2f6ef | ||
|
|
ef806456e4 | ||
|
|
bd6ab8c665 | ||
|
|
d2bae07687 | ||
|
|
9c09094583 | ||
|
|
7e4ab31488 | ||
|
|
32d50bda94 | ||
|
|
740b4a95f4 | ||
|
|
c37469b5ba | ||
|
|
c782f8c68d | ||
|
|
7d6e318f87 | ||
|
|
58159383c4 | ||
|
|
5c680c6587 | ||
|
|
39a2446716 | ||
|
|
9c903c4716 |
2
.github/pull_request_template.md
vendored
2
.github/pull_request_template.md
vendored
@@ -4,3 +4,5 @@ Before opening this Pull Request, please read the dedicated "Contributing" markd
|
||||
https://github.com/openai/codex/blob/main/docs/contributing.md
|
||||
|
||||
If your PR conforms to our contribution guidelines, replace this text with a detailed and high quality description of your changes.
|
||||
|
||||
Include a link to a bug report or enhancement request.
|
||||
|
||||
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@@ -46,7 +46,7 @@ jobs:
|
||||
echo "pack_output=$PACK_OUTPUT" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Upload staged npm package artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: codex-npm-staging
|
||||
path: ${{ steps.stage_npm_package.outputs.pack_output }}
|
||||
|
||||
23
.github/workflows/cla.yml
vendored
23
.github/workflows/cla.yml
vendored
@@ -16,10 +16,27 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: contributor-assistant/github-action@v2.6.1
|
||||
# Run on close only if the PR was merged. This will lock the PR to preserve
|
||||
# the CLA agreement. We don't want to lock PRs that have been closed without
|
||||
# merging because the contributor may want to respond with additional comments.
|
||||
# This action has a "lock-pullrequest-aftermerge" option that can be set to false,
|
||||
# but that would unconditionally skip locking even in cases where the PR was merged.
|
||||
if: |
|
||||
github.event_name == 'pull_request_target' ||
|
||||
github.event.comment.body == 'recheck' ||
|
||||
github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA'
|
||||
(
|
||||
github.event_name == 'pull_request_target' &&
|
||||
(
|
||||
github.event.action == 'opened' ||
|
||||
github.event.action == 'synchronize' ||
|
||||
(github.event.action == 'closed' && github.event.pull_request.merged == true)
|
||||
)
|
||||
) ||
|
||||
(
|
||||
github.event_name == 'issue_comment' &&
|
||||
(
|
||||
github.event.comment.body == 'recheck' ||
|
||||
github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA'
|
||||
)
|
||||
)
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
|
||||
2
.github/workflows/codespell.yml
vendored
2
.github/workflows/codespell.yml
vendored
@@ -22,6 +22,6 @@ jobs:
|
||||
- name: Annotate locations with typos
|
||||
uses: codespell-project/codespell-problem-matcher@b80729f885d32f78a716c2f107b4db1025001c42 # v1
|
||||
- name: Codespell
|
||||
uses: codespell-project/actions-codespell@406322ec52dd7b488e48c1c4b82e2a8b3a1bf630 # v2.1
|
||||
uses: codespell-project/actions-codespell@8f01853be192eb0f849a5c7d721450e7a467c579 # v2.2
|
||||
with:
|
||||
ignore_words_file: .codespellignore
|
||||
|
||||
4
.github/workflows/issue-deduplicator.yml
vendored
4
.github/workflows/issue-deduplicator.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
outputs:
|
||||
codex_output: ${{ steps.codex.outputs.final-message }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- name: Prepare Codex inputs
|
||||
env:
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
issues: write
|
||||
steps:
|
||||
- name: Comment on issue
|
||||
uses: actions/github-script@v7
|
||||
uses: actions/github-script@v8
|
||||
env:
|
||||
CODEX_OUTPUT: ${{ needs.gather-duplicates.outputs.codex_output }}
|
||||
with:
|
||||
|
||||
39
.github/workflows/issue-labeler.yml
vendored
39
.github/workflows/issue-labeler.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
outputs:
|
||||
codex_output: ${{ steps.codex.outputs.final-message }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- id: codex
|
||||
uses: openai/codex-action@main
|
||||
@@ -26,21 +26,36 @@ jobs:
|
||||
prompt: |
|
||||
You are an assistant that reviews GitHub issues for the repository.
|
||||
|
||||
Your job is to choose the most appropriate existing labels for the issue described later in this prompt.
|
||||
Your job is to choose the most appropriate labels for the issue described later in this prompt.
|
||||
Follow these rules:
|
||||
- Only pick labels out of the list below.
|
||||
- Prefer a small set of precise labels over many broad ones.
|
||||
|
||||
Labels to apply:
|
||||
- Add one (and only one) of the following three labels to distinguish the type of issue. Default to "bug" if unsure.
|
||||
1. bug — Reproducible defects in Codex products (CLI, VS Code extension, web, auth).
|
||||
2. enhancement — Feature requests or usability improvements that ask for new capabilities, better ergonomics, or quality-of-life tweaks.
|
||||
3. extension — VS Code (or other IDE) extension-specific issues.
|
||||
4. windows-os — Bugs or friction specific to Windows environments (always when PowerShell is mentioned, path handling, copy/paste, OS-specific auth or tooling failures).
|
||||
5. mcp — Topics involving Model Context Protocol servers/clients.
|
||||
6. codex-web — Issues targeting the Codex web UI/Cloud experience.
|
||||
8. azure — Problems or requests tied to Azure OpenAI deployments.
|
||||
9. documentation — Updates or corrections needed in docs/README/config references (broken links, missing examples, outdated keys, clarification requests).
|
||||
10. model-behavior — Undesirable LLM behavior: forgetting goals, refusing work, hallucinating environment details, quota misreports, or other reasoning/performance anomalies.
|
||||
3. documentation — Updates or corrections needed in docs/README/config references (broken links, missing examples, outdated keys, clarification requests).
|
||||
|
||||
- If applicable, add one of the following labels to specify which sub-product or product surface the issue relates to.
|
||||
1. CLI — the Codex command line interface.
|
||||
2. extension — VS Code (or other IDE) extension-specific issues.
|
||||
3. codex-web — Issues targeting the Codex web UI/Cloud experience.
|
||||
4. github-action — Issues with the Codex GitHub action.
|
||||
5. iOS — Issues with the Codex iOS app.
|
||||
|
||||
- Additionally add zero or more of the following labels that are relevant to the issue content. Prefer a small set of precise labels over many broad ones.
|
||||
1. windows-os — Bugs or friction specific to Windows environments (always when PowerShell is mentioned, path handling, copy/paste, OS-specific auth or tooling failures).
|
||||
2. mcp — Topics involving Model Context Protocol servers/clients.
|
||||
3. mcp-server — Problems related to the codex mcp-server command, where codex runs as an MCP server.
|
||||
4. azure — Problems or requests tied to Azure OpenAI deployments.
|
||||
5. model-behavior — Undesirable LLM behavior: forgetting goals, refusing work, hallucinating environment details, quota misreports, or other reasoning/performance anomalies.
|
||||
6. code-review — Issues related to the code review feature or functionality.
|
||||
7. auth - Problems related to authentication, login, or access tokens.
|
||||
8. codex-exec - Problems related to the "codex exec" command or functionality.
|
||||
9. context-management - Problems related to compaction, context windows, or available context reporting.
|
||||
10. custom-model - Problems that involve using custom model providers, local models, or OSS models.
|
||||
11. rate-limits - Problems related to token limits, rate limits, or token usage reporting.
|
||||
12. sandbox - Issues related to local sandbox environments or tool call approvals to override sandbox restrictions.
|
||||
13. tool-calls - Problems related to specific tool call invocations including unexpected errors, failures, or hangs.
|
||||
14. TUI - Problems with the terminal user interface (TUI) including keyboard shortcuts, copy & pasting, menus, or screen update issues.
|
||||
|
||||
Issue number: ${{ github.event.issue.number }}
|
||||
|
||||
|
||||
299
.github/workflows/rust-ci.yml
vendored
299
.github/workflows/rust-ci.yml
vendored
@@ -9,7 +9,7 @@ on:
|
||||
# CI builds in debug (dev) for faster signal.
|
||||
|
||||
jobs:
|
||||
# --- Detect what changed (always runs) -------------------------------------
|
||||
# --- Detect what changed to detect which tests to run (always runs) -------------------------------------
|
||||
changed:
|
||||
name: Detect changed areas
|
||||
runs-on: ubuntu-24.04
|
||||
@@ -76,7 +76,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
- uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: cargo-shear
|
||||
version: 1.5.1
|
||||
@@ -84,8 +84,8 @@ jobs:
|
||||
run: cargo shear
|
||||
|
||||
# --- CI to validate on different os/targets --------------------------------
|
||||
lint_build_test:
|
||||
name: ${{ matrix.runner }} - ${{ matrix.target }}${{ matrix.profile == 'release' && ' (release)' || '' }}
|
||||
lint_build:
|
||||
name: Lint/Build — ${{ matrix.runner }} - ${{ matrix.target }}${{ matrix.profile == 'release' && ' (release)' || '' }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
needs: changed
|
||||
@@ -94,6 +94,11 @@ jobs:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
env:
|
||||
# Speed up repeated builds across CI runs by caching compiled objects.
|
||||
RUSTC_WRAPPER: sccache
|
||||
CARGO_INCREMENTAL: "0"
|
||||
SCCACHE_CACHE_SIZE: 10G
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
@@ -159,20 +164,83 @@ jobs:
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
|
||||
restore-keys: |
|
||||
cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
- name: Restore target cache (except gnu-dev)
|
||||
id: cache_target_restore
|
||||
if: ${{ !(matrix.target == 'x86_64-unknown-linux-gnu' && matrix.profile != 'release') }}
|
||||
# Install and restore sccache cache
|
||||
- name: Install sccache
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: sccache
|
||||
version: 0.7.5
|
||||
|
||||
- name: Configure sccache backend
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [[ -n "${ACTIONS_CACHE_URL:-}" && -n "${ACTIONS_RUNTIME_TOKEN:-}" ]]; then
|
||||
echo "SCCACHE_GHA_ENABLED=true" >> "$GITHUB_ENV"
|
||||
echo "Using sccache GitHub backend"
|
||||
else
|
||||
echo "SCCACHE_GHA_ENABLED=false" >> "$GITHUB_ENV"
|
||||
echo "SCCACHE_DIR=${{ github.workspace }}/.sccache" >> "$GITHUB_ENV"
|
||||
echo "Using sccache local disk + actions/cache fallback"
|
||||
fi
|
||||
|
||||
- name: Restore sccache cache (fallback)
|
||||
if: ${{ env.SCCACHE_GHA_ENABLED != 'true' }}
|
||||
id: cache_sccache_restore
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: ${{ github.workspace }}/codex-rs/target/
|
||||
key: cargo-target-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
path: ${{ github.workspace }}/.sccache/
|
||||
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.run_id }}
|
||||
restore-keys: |
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Prepare APT cache directories (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
sudo mkdir -p /var/cache/apt/archives /var/lib/apt/lists
|
||||
sudo chown -R "$USER:$USER" /var/cache/apt /var/lib/apt/lists
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Restore APT cache (musl)
|
||||
id: cache_apt_restore
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: |
|
||||
/var/cache/apt
|
||||
key: apt-${{ matrix.runner }}-${{ matrix.target }}-v1
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Install musl build tools
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
shell: bash
|
||||
run: |
|
||||
sudo apt install -y musl-tools pkg-config && sudo rm -rf /var/lib/apt/lists/*
|
||||
set -euo pipefail
|
||||
sudo apt-get -y update -o Acquire::Retries=3
|
||||
sudo apt-get -y install --no-install-recommends musl-tools pkg-config
|
||||
|
||||
- name: Install cargo-chef
|
||||
if: ${{ matrix.profile == 'release' }}
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: cargo-chef
|
||||
version: 0.1.71
|
||||
|
||||
- name: Pre-warm dependency cache (cargo-chef)
|
||||
if: ${{ matrix.profile == 'release' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
RECIPE="${RUNNER_TEMP}/chef-recipe.json"
|
||||
cargo chef prepare --recipe-path "$RECIPE"
|
||||
cargo chef cook --recipe-path "$RECIPE" --target ${{ matrix.target }} --release --all-features
|
||||
|
||||
- name: cargo clippy
|
||||
id: clippy
|
||||
@@ -191,20 +259,6 @@ jobs:
|
||||
find . -name Cargo.toml -mindepth 2 -maxdepth 2 -print0 \
|
||||
| xargs -0 -n1 -I{} bash -c 'cd "$(dirname "{}")" && cargo check --profile ${{ matrix.profile }}'
|
||||
|
||||
- uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
with:
|
||||
tool: nextest
|
||||
version: 0.9.103
|
||||
|
||||
- name: tests
|
||||
id: test
|
||||
# Tests take too long for release builds to run them on every PR.
|
||||
if: ${{ matrix.profile != 'release' }}
|
||||
continue-on-error: true
|
||||
run: cargo nextest run --all-features --no-fail-fast --target ${{ matrix.target }}
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
|
||||
# Save caches explicitly; make non-fatal so cache packaging
|
||||
# never fails the overall job. Only save when key wasn't hit.
|
||||
- name: Save cargo home cache
|
||||
@@ -217,33 +271,193 @@ jobs:
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
|
||||
|
||||
- name: Save target cache (except gnu-dev)
|
||||
if: >-
|
||||
always() && !cancelled() &&
|
||||
(steps.cache_target_restore.outputs.cache-hit != 'true') &&
|
||||
!(matrix.target == 'x86_64-unknown-linux-gnu' && matrix.profile != 'release')
|
||||
- name: Save sccache cache (fallback)
|
||||
if: always() && !cancelled() && env.SCCACHE_GHA_ENABLED != 'true'
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: ${{ github.workspace }}/codex-rs/target/
|
||||
key: cargo-target-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
path: ${{ github.workspace }}/.sccache/
|
||||
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.run_id }}
|
||||
|
||||
- name: sccache stats
|
||||
if: always()
|
||||
continue-on-error: true
|
||||
run: sccache --show-stats || true
|
||||
|
||||
- name: sccache summary
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
{
|
||||
echo "### sccache stats — ${{ matrix.target }} (${{ matrix.profile }})";
|
||||
echo;
|
||||
echo '```';
|
||||
sccache --show-stats || true;
|
||||
echo '```';
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
- name: Save APT cache (musl)
|
||||
if: always() && !cancelled() && (matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl') && steps.cache_apt_restore.outputs.cache-hit != 'true'
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: |
|
||||
/var/cache/apt
|
||||
key: apt-${{ matrix.runner }}-${{ matrix.target }}-v1
|
||||
|
||||
# Fail the job if any of the previous steps failed.
|
||||
- name: verify all steps passed
|
||||
if: |
|
||||
steps.clippy.outcome == 'failure' ||
|
||||
steps.cargo_check_all_crates.outcome == 'failure' ||
|
||||
steps.test.outcome == 'failure'
|
||||
steps.cargo_check_all_crates.outcome == 'failure'
|
||||
run: |
|
||||
echo "One or more checks failed (clippy, cargo_check_all_crates, or test). See logs for details."
|
||||
echo "One or more checks failed (clippy or cargo_check_all_crates). See logs for details."
|
||||
exit 1
|
||||
|
||||
tests:
|
||||
name: Tests — ${{ matrix.runner }} - ${{ matrix.target }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
needs: changed
|
||||
if: ${{ needs.changed.outputs.codex == 'true' || needs.changed.outputs.workflows == 'true' || github.event_name == 'push' }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
env:
|
||||
RUSTC_WRAPPER: sccache
|
||||
CARGO_INCREMENTAL: "0"
|
||||
SCCACHE_CACHE_SIZE: 10G
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: macos-14
|
||||
target: aarch64-apple-darwin
|
||||
profile: dev
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-gnu
|
||||
profile: dev
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-gnu
|
||||
profile: dev
|
||||
- runner: windows-latest
|
||||
target: x86_64-pc-windows-msvc
|
||||
profile: dev
|
||||
- runner: windows-11-arm
|
||||
target: aarch64-pc-windows-msvc
|
||||
profile: dev
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- name: Restore cargo home cache
|
||||
id: cache_cargo_home_restore
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
|
||||
restore-keys: |
|
||||
cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
- name: Install sccache
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: sccache
|
||||
version: 0.7.5
|
||||
|
||||
- name: Configure sccache backend
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [[ -n "${ACTIONS_CACHE_URL:-}" && -n "${ACTIONS_RUNTIME_TOKEN:-}" ]]; then
|
||||
echo "SCCACHE_GHA_ENABLED=true" >> "$GITHUB_ENV"
|
||||
echo "Using sccache GitHub backend"
|
||||
else
|
||||
echo "SCCACHE_GHA_ENABLED=false" >> "$GITHUB_ENV"
|
||||
echo "SCCACHE_DIR=${{ github.workspace }}/.sccache" >> "$GITHUB_ENV"
|
||||
echo "Using sccache local disk + actions/cache fallback"
|
||||
fi
|
||||
|
||||
- name: Restore sccache cache (fallback)
|
||||
if: ${{ env.SCCACHE_GHA_ENABLED != 'true' }}
|
||||
id: cache_sccache_restore
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: ${{ github.workspace }}/.sccache/
|
||||
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.run_id }}
|
||||
restore-keys: |
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: nextest
|
||||
version: 0.9.103
|
||||
|
||||
- name: tests
|
||||
id: test
|
||||
continue-on-error: true
|
||||
run: cargo nextest run --all-features --no-fail-fast --target ${{ matrix.target }} --cargo-profile ci-test
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
|
||||
- name: Save cargo home cache
|
||||
if: always() && !cancelled() && steps.cache_cargo_home_restore.outputs.cache-hit != 'true'
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
|
||||
|
||||
- name: Save sccache cache (fallback)
|
||||
if: always() && !cancelled() && env.SCCACHE_GHA_ENABLED != 'true'
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: ${{ github.workspace }}/.sccache/
|
||||
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.run_id }}
|
||||
|
||||
- name: sccache stats
|
||||
if: always()
|
||||
continue-on-error: true
|
||||
run: sccache --show-stats || true
|
||||
|
||||
- name: sccache summary
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
{
|
||||
echo "### sccache stats — ${{ matrix.target }} (tests)";
|
||||
echo;
|
||||
echo '```';
|
||||
sccache --show-stats || true;
|
||||
echo '```';
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
- name: verify tests passed
|
||||
if: steps.test.outcome == 'failure'
|
||||
run: |
|
||||
echo "Tests failed. See logs for details."
|
||||
exit 1
|
||||
|
||||
# --- Gatherer job that you mark as the ONLY required status -----------------
|
||||
results:
|
||||
name: CI results (required)
|
||||
needs: [changed, general, cargo_shear, lint_build_test]
|
||||
needs: [changed, general, cargo_shear, lint_build, tests]
|
||||
if: always()
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
@@ -252,7 +466,8 @@ jobs:
|
||||
run: |
|
||||
echo "general: ${{ needs.general.result }}"
|
||||
echo "shear : ${{ needs.cargo_shear.result }}"
|
||||
echo "matrix : ${{ needs.lint_build_test.result }}"
|
||||
echo "lint : ${{ needs.lint_build.result }}"
|
||||
echo "tests : ${{ needs.tests.result }}"
|
||||
|
||||
# If nothing relevant changed (PR touching only root README, etc.),
|
||||
# declare success regardless of other jobs.
|
||||
@@ -264,4 +479,10 @@ jobs:
|
||||
# Otherwise require the jobs to have succeeded
|
||||
[[ '${{ needs.general.result }}' == 'success' ]] || { echo 'general failed'; exit 1; }
|
||||
[[ '${{ needs.cargo_shear.result }}' == 'success' ]] || { echo 'cargo_shear failed'; exit 1; }
|
||||
[[ '${{ needs.lint_build_test.result }}' == 'success' ]] || { echo 'matrix failed'; exit 1; }
|
||||
[[ '${{ needs.lint_build.result }}' == 'success' ]] || { echo 'lint_build failed'; exit 1; }
|
||||
[[ '${{ needs.tests.result }}' == 'success' ]] || { echo 'tests failed'; exit 1; }
|
||||
|
||||
- name: sccache summary note
|
||||
if: always()
|
||||
run: |
|
||||
echo "Per-job sccache stats are attached to each matrix job's Step Summary."
|
||||
|
||||
29
.github/workflows/rust-release.yml
vendored
29
.github/workflows/rust-release.yml
vendored
@@ -58,9 +58,9 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: macos-14
|
||||
- runner: macos-15-xlarge
|
||||
target: aarch64-apple-darwin
|
||||
- runner: macos-14
|
||||
- runner: macos-15-xlarge
|
||||
target: x86_64-apple-darwin
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
@@ -100,7 +100,7 @@ jobs:
|
||||
- name: Cargo build
|
||||
run: cargo build --target ${{ matrix.target }} --release --bin codex --bin codex-responses-api-proxy
|
||||
|
||||
- if: ${{ matrix.runner == 'macos-14' }}
|
||||
- if: ${{ matrix.runner == 'macos-15-xlarge' }}
|
||||
name: Configure Apple code signing
|
||||
shell: bash
|
||||
env:
|
||||
@@ -185,7 +185,7 @@ jobs:
|
||||
echo "APPLE_CODESIGN_KEYCHAIN=$keychain_path" >> "$GITHUB_ENV"
|
||||
echo "::add-mask::$APPLE_CODESIGN_IDENTITY"
|
||||
|
||||
- if: ${{ matrix.runner == 'macos-14' }}
|
||||
- if: ${{ matrix.runner == 'macos-15-xlarge' }}
|
||||
name: Sign macOS binaries
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -206,7 +206,7 @@ jobs:
|
||||
codesign --force --options runtime --timestamp --sign "$APPLE_CODESIGN_IDENTITY" "${keychain_args[@]}" "$path"
|
||||
done
|
||||
|
||||
- if: ${{ matrix.runner == 'macos-14' }}
|
||||
- if: ${{ matrix.runner == 'macos-15-xlarge' }}
|
||||
name: Notarize macOS binaries
|
||||
shell: bash
|
||||
env:
|
||||
@@ -295,6 +295,15 @@ jobs:
|
||||
# ${{ matrix.target }}
|
||||
dest="dist/${{ matrix.target }}"
|
||||
|
||||
# We want to ship the raw Windows executables in the GitHub Release
|
||||
# in addition to the compressed archives. Keep the originals for
|
||||
# Windows targets; remove them elsewhere to limit the number of
|
||||
# artifacts that end up in the GitHub Release.
|
||||
keep_originals=false
|
||||
if [[ "${{ matrix.runner }}" == windows* ]]; then
|
||||
keep_originals=true
|
||||
fi
|
||||
|
||||
# For compatibility with environments that lack the `zstd` tool we
|
||||
# additionally create a `.tar.gz` for all platforms and `.zip` for
|
||||
# Windows alongside every single binary that we publish. The end result is:
|
||||
@@ -324,11 +333,15 @@ jobs:
|
||||
|
||||
# Also create .zst (existing behaviour) *and* remove the original
|
||||
# uncompressed binary to keep the directory small.
|
||||
zstd -T0 -19 --rm "$dest/$base"
|
||||
zstd_args=(-T0 -19)
|
||||
if [[ "${keep_originals}" == false ]]; then
|
||||
zstd_args+=(--rm)
|
||||
fi
|
||||
zstd "${zstd_args[@]}" "$dest/$base"
|
||||
done
|
||||
|
||||
- name: Remove signing keychain
|
||||
if: ${{ always() && matrix.runner == 'macos-14' }}
|
||||
if: ${{ always() && matrix.runner == 'macos-15-xlarge' }}
|
||||
shell: bash
|
||||
env:
|
||||
APPLE_CODESIGN_KEYCHAIN: ${{ env.APPLE_CODESIGN_KEYCHAIN }}
|
||||
@@ -350,7 +363,7 @@ jobs:
|
||||
fi
|
||||
fi
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: ${{ matrix.target }}
|
||||
# Upload the per-binary .zst files as well as the new .tar.gz
|
||||
|
||||
@@ -84,6 +84,7 @@ If you don’t have the tool:
|
||||
- Use `ResponseMock::single_request()` when a test should only issue one POST, or `ResponseMock::requests()` to inspect every captured `ResponsesRequest`.
|
||||
- `ResponsesRequest` exposes helpers (`body_json`, `input`, `function_call_output`, `custom_tool_call_output`, `call_output`, `header`, `path`, `query_param`) so assertions can target structured payloads instead of manual JSON digging.
|
||||
- Build SSE payloads with the provided `ev_*` constructors and the `sse(...)`.
|
||||
- Prefer `wait_for_event` over `wait_for_event_with_timeout`.
|
||||
|
||||
- Typical pattern:
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
The changelog can be found on the [releases page](https://github.com/openai/codex/releases)
|
||||
The changelog can be found on the [releases page](https://github.com/openai/codex/releases).
|
||||
|
||||
@@ -33,6 +33,8 @@ Then simply run `codex` to get started:
|
||||
codex
|
||||
```
|
||||
|
||||
If you're running into upgrade issues with Homebrew, see the [FAQ entry on brew upgrade codex](./docs/faq.md#brew-upgrade-codex-isnt-upgrading-me).
|
||||
|
||||
<details>
|
||||
<summary>You can also go to the <a href="https://github.com/openai/codex/releases/latest">latest GitHub Release</a> and download the appropriate binary for your platform.</summary>
|
||||
|
||||
@@ -73,11 +75,13 @@ Codex CLI supports a rich set of configuration options, with preferences stored
|
||||
|
||||
- [**Getting started**](./docs/getting-started.md)
|
||||
- [CLI usage](./docs/getting-started.md#cli-usage)
|
||||
- [Slash Commands](./docs/slash_commands.md)
|
||||
- [Running with a prompt as input](./docs/getting-started.md#running-with-a-prompt-as-input)
|
||||
- [Example prompts](./docs/getting-started.md#example-prompts)
|
||||
- [Custom prompts](./docs/prompts.md)
|
||||
- [Memory with AGENTS.md](./docs/getting-started.md#memory-with-agentsmd)
|
||||
- [Configuration](./docs/config.md)
|
||||
- [**Configuration**](./docs/config.md)
|
||||
- [Example config](./docs/example-config.md)
|
||||
- [**Sandbox & approvals**](./docs/sandbox.md)
|
||||
- [**Authentication**](./docs/authentication.md)
|
||||
- [Auth methods](./docs/authentication.md#forcing-a-specific-auth-method-advanced)
|
||||
|
||||
5
codex-rs/.cargo/config.toml
Normal file
5
codex-rs/.cargo/config.toml
Normal file
@@ -0,0 +1,5 @@
|
||||
[target.'cfg(all(windows, target_env = "msvc"))']
|
||||
rustflags = ["-C", "link-arg=/STACK:8388608"]
|
||||
|
||||
[target.'cfg(all(windows, target_env = "gnu"))']
|
||||
rustflags = ["-C", "link-arg=-Wl,--stack,8388608"]
|
||||
613
codex-rs/Cargo.lock
generated
613
codex-rs/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -16,27 +16,28 @@ members = [
|
||||
"core",
|
||||
"exec",
|
||||
"execpolicy",
|
||||
"keyring-store",
|
||||
"file-search",
|
||||
"git-tooling",
|
||||
"linux-sandbox",
|
||||
"login",
|
||||
"mcp-client",
|
||||
"mcp-server",
|
||||
"mcp-types",
|
||||
"ollama",
|
||||
"process-hardening",
|
||||
"protocol",
|
||||
"protocol-ts",
|
||||
"rmcp-client",
|
||||
"responses-api-proxy",
|
||||
"stdio-to-uds",
|
||||
"otel",
|
||||
"tui",
|
||||
"git-apply",
|
||||
"utils/git",
|
||||
"utils/cache",
|
||||
"utils/image",
|
||||
"utils/json-to-toml",
|
||||
"utils/readiness",
|
||||
"utils/pty",
|
||||
"utils/readiness",
|
||||
"utils/string",
|
||||
"utils/tokenizer",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
@@ -57,30 +58,34 @@ codex-app-server-protocol = { path = "app-server-protocol" }
|
||||
codex-apply-patch = { path = "apply-patch" }
|
||||
codex-arg0 = { path = "arg0" }
|
||||
codex-async-utils = { path = "async-utils" }
|
||||
codex-backend-client = { path = "backend-client" }
|
||||
codex-chatgpt = { path = "chatgpt" }
|
||||
codex-common = { path = "common" }
|
||||
codex-core = { path = "core" }
|
||||
codex-exec = { path = "exec" }
|
||||
codex-feedback = { path = "feedback" }
|
||||
codex-file-search = { path = "file-search" }
|
||||
codex-git-tooling = { path = "git-tooling" }
|
||||
codex-git = { path = "utils/git" }
|
||||
codex-keyring-store = { path = "keyring-store" }
|
||||
codex-linux-sandbox = { path = "linux-sandbox" }
|
||||
codex-login = { path = "login" }
|
||||
codex-mcp-client = { path = "mcp-client" }
|
||||
codex-mcp-server = { path = "mcp-server" }
|
||||
codex-ollama = { path = "ollama" }
|
||||
codex-otel = { path = "otel" }
|
||||
codex-process-hardening = { path = "process-hardening" }
|
||||
codex-protocol = { path = "protocol" }
|
||||
codex-protocol-ts = { path = "protocol-ts" }
|
||||
codex-responses-api-proxy = { path = "responses-api-proxy" }
|
||||
codex-rmcp-client = { path = "rmcp-client" }
|
||||
codex-stdio-to-uds = { path = "stdio-to-uds" }
|
||||
codex-tui = { path = "tui" }
|
||||
codex-utils-cache = { path = "utils/cache" }
|
||||
codex-utils-image = { path = "utils/image" }
|
||||
codex-utils-json-to-toml = { path = "utils/json-to-toml" }
|
||||
codex-utils-readiness = { path = "utils/readiness" }
|
||||
codex-utils-pty = { path = "utils/pty" }
|
||||
codex-utils-readiness = { path = "utils/readiness" }
|
||||
codex-utils-string = { path = "utils/string" }
|
||||
codex-utils-tokenizer = { path = "utils/tokenizer" }
|
||||
codex-windows-sandbox = { path = "windows-sandbox-rs" }
|
||||
core_test_support = { path = "core/tests/common" }
|
||||
mcp-types = { path = "mcp-types" }
|
||||
mcp_test_support = { path = "mcp-server/tests/common" }
|
||||
@@ -89,8 +94,8 @@ mcp_test_support = { path = "mcp-server/tests/common" }
|
||||
allocative = "0.3.3"
|
||||
ansi-to-tui = "7.0.0"
|
||||
anyhow = "1"
|
||||
arboard = "3"
|
||||
askama = "0.12"
|
||||
arboard = { version = "3", features = ["wayland-data-control"] }
|
||||
askama = "0.14"
|
||||
assert_cmd = "2"
|
||||
assert_matches = "1.5.0"
|
||||
async-channel = "2.3.1"
|
||||
@@ -115,11 +120,13 @@ env_logger = "0.11.5"
|
||||
escargot = "0.5"
|
||||
eventsource-stream = "0.2.3"
|
||||
futures = { version = "0.3", default-features = false }
|
||||
icu_decimal = "2.0.0"
|
||||
icu_locale_core = "2.0.0"
|
||||
http = "1.3.1"
|
||||
icu_decimal = "2.1"
|
||||
icu_provider = { version = "2.1", features = ["sync"] }
|
||||
icu_locale_core = "2.1"
|
||||
ignore = "0.4.23"
|
||||
image = { version = "^0.25.8", default-features = false }
|
||||
indexmap = "2.6.0"
|
||||
indexmap = "2.12.0"
|
||||
insta = "1.43.2"
|
||||
itertools = "0.14.0"
|
||||
keyring = "3.6"
|
||||
@@ -127,6 +134,7 @@ landlock = "0.4.1"
|
||||
lazy_static = "1"
|
||||
libc = "0.2.175"
|
||||
log = "0.4"
|
||||
lru = "0.12.5"
|
||||
maplit = "1.0.2"
|
||||
mime_guess = "2.0.5"
|
||||
multimap = "0.10.0"
|
||||
@@ -142,7 +150,6 @@ os_info = "3.12.0"
|
||||
owo-colors = "4.2.0"
|
||||
paste = "1.0.15"
|
||||
path-absolutize = "3.1.1"
|
||||
path-clean = "1.0.1"
|
||||
pathdiff = "0.2"
|
||||
portable-pty = "0.9.0"
|
||||
predicates = "3"
|
||||
@@ -150,9 +157,10 @@ pretty_assertions = "1.4.1"
|
||||
pulldown-cmark = "0.10"
|
||||
rand = "0.9"
|
||||
ratatui = "0.29.0"
|
||||
ratatui-macros = "0.6.0"
|
||||
regex-lite = "0.1.7"
|
||||
reqwest = "0.12"
|
||||
rmcp = { version = "0.8.0", default-features = false }
|
||||
rmcp = { version = "0.8.5", default-features = false }
|
||||
schemars = "0.8.22"
|
||||
seccompiler = "0.5.0"
|
||||
sentry = "0.34.0"
|
||||
@@ -172,7 +180,7 @@ sys-locale = "0.3.2"
|
||||
tempfile = "3.23.0"
|
||||
test-log = "0.2.18"
|
||||
textwrap = "0.16.2"
|
||||
thiserror = "2.0.16"
|
||||
thiserror = "2.0.17"
|
||||
time = "0.3"
|
||||
tiny_http = "0.12"
|
||||
tokio = "1"
|
||||
@@ -201,8 +209,9 @@ walkdir = "2.5.0"
|
||||
webbrowser = "1.0"
|
||||
which = "6"
|
||||
wildmatch = "2.5.0"
|
||||
|
||||
wiremock = "0.6"
|
||||
zeroize = "1.8.1"
|
||||
zeroize = "1.8.2"
|
||||
|
||||
[workspace.lints]
|
||||
rust = {}
|
||||
@@ -245,7 +254,12 @@ unwrap_used = "deny"
|
||||
# cargo-shear cannot see the platform-specific openssl-sys usage, so we
|
||||
# silence the false positive here instead of deleting a real dependency.
|
||||
[workspace.metadata.cargo-shear]
|
||||
ignored = ["openssl-sys", "codex-utils-readiness"]
|
||||
ignored = [
|
||||
"icu_provider",
|
||||
"openssl-sys",
|
||||
"codex-utils-readiness",
|
||||
"codex-utils-tokenizer",
|
||||
]
|
||||
|
||||
[profile.release]
|
||||
lto = "fat"
|
||||
@@ -256,10 +270,16 @@ strip = "symbols"
|
||||
# See https://github.com/openai/codex/issues/1411 for details.
|
||||
codegen-units = 1
|
||||
|
||||
[profile.ci-test]
|
||||
debug = 1 # Reduce debug symbol size
|
||||
inherits = "test"
|
||||
opt-level = 0
|
||||
|
||||
[patch.crates-io]
|
||||
# Uncomment to debug local changes.
|
||||
# ratatui = { path = "../../ratatui" }
|
||||
ratatui = { git = "https://github.com/nornagon/ratatui", branch = "nornagon-v0.29.0-patch" }
|
||||
crossterm = { git = "https://github.com/nornagon/crossterm", branch = "nornagon/color-query" }
|
||||
|
||||
# Uncomment to debug local changes.
|
||||
# rmcp = { path = "../../rust-sdk/crates/rmcp" }
|
||||
|
||||
@@ -58,13 +58,16 @@ To test to see what happens when a command is run under the sandbox provided by
|
||||
|
||||
```
|
||||
# macOS
|
||||
codex sandbox macos [--full-auto] [COMMAND]...
|
||||
codex sandbox macos [--full-auto] [--log-denials] [COMMAND]...
|
||||
|
||||
# Linux
|
||||
codex sandbox linux [--full-auto] [COMMAND]...
|
||||
|
||||
# Windows
|
||||
codex sandbox windows [--full-auto] [COMMAND]...
|
||||
|
||||
# Legacy aliases
|
||||
codex debug seatbelt [--full-auto] [COMMAND]...
|
||||
codex debug seatbelt [--full-auto] [--log-denials] [COMMAND]...
|
||||
codex debug landlock [--full-auto] [COMMAND]...
|
||||
```
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ workspace = true
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive"] }
|
||||
codex-protocol = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
paste = { workspace = true }
|
||||
schemars = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
|
||||
@@ -2,20 +2,28 @@ use crate::ClientNotification;
|
||||
use crate::ClientRequest;
|
||||
use crate::ServerNotification;
|
||||
use crate::ServerRequest;
|
||||
use crate::export_client_notification_schemas;
|
||||
use crate::export_client_param_schemas;
|
||||
use crate::export_client_response_schemas;
|
||||
use crate::export_client_responses;
|
||||
use crate::export_server_notification_schemas;
|
||||
use crate::export_server_param_schemas;
|
||||
use crate::export_server_response_schemas;
|
||||
use crate::export_server_responses;
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::FileChange;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use schemars::JsonSchema;
|
||||
use schemars::schema::RootSchema;
|
||||
use schemars::schema_for;
|
||||
use serde::Serialize;
|
||||
use serde_json::Map;
|
||||
use serde_json::Value;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::ffi::OsStr;
|
||||
use std::fs;
|
||||
use std::io::Read;
|
||||
@@ -27,83 +35,29 @@ use ts_rs::TS;
|
||||
|
||||
const HEADER: &str = "// GENERATED CODE! DO NOT MODIFY BY HAND!\n\n";
|
||||
|
||||
macro_rules! for_each_schema_type {
|
||||
($macro:ident) => {
|
||||
$macro!(crate::RequestId);
|
||||
$macro!(crate::JSONRPCMessage);
|
||||
$macro!(crate::JSONRPCRequest);
|
||||
$macro!(crate::JSONRPCNotification);
|
||||
$macro!(crate::JSONRPCResponse);
|
||||
$macro!(crate::JSONRPCError);
|
||||
$macro!(crate::JSONRPCErrorError);
|
||||
$macro!(crate::AddConversationListenerParams);
|
||||
$macro!(crate::AddConversationSubscriptionResponse);
|
||||
$macro!(crate::ApplyPatchApprovalParams);
|
||||
$macro!(crate::ApplyPatchApprovalResponse);
|
||||
$macro!(crate::ArchiveConversationParams);
|
||||
$macro!(crate::ArchiveConversationResponse);
|
||||
$macro!(crate::AuthMode);
|
||||
$macro!(crate::AuthStatusChangeNotification);
|
||||
$macro!(crate::CancelLoginChatGptParams);
|
||||
$macro!(crate::CancelLoginChatGptResponse);
|
||||
$macro!(crate::ClientInfo);
|
||||
$macro!(crate::ClientNotification);
|
||||
$macro!(crate::ClientRequest);
|
||||
$macro!(crate::ConversationSummary);
|
||||
$macro!(crate::ExecCommandApprovalParams);
|
||||
$macro!(crate::ExecCommandApprovalResponse);
|
||||
$macro!(crate::ExecOneOffCommandParams);
|
||||
$macro!(crate::ExecOneOffCommandResponse);
|
||||
$macro!(crate::FuzzyFileSearchParams);
|
||||
$macro!(crate::FuzzyFileSearchResponse);
|
||||
$macro!(crate::FuzzyFileSearchResult);
|
||||
$macro!(crate::GetAuthStatusParams);
|
||||
$macro!(crate::GetAuthStatusResponse);
|
||||
$macro!(crate::GetUserAgentResponse);
|
||||
$macro!(crate::GetUserSavedConfigResponse);
|
||||
$macro!(crate::GitDiffToRemoteParams);
|
||||
$macro!(crate::GitDiffToRemoteResponse);
|
||||
$macro!(crate::GitSha);
|
||||
$macro!(crate::InitializeParams);
|
||||
$macro!(crate::InitializeResponse);
|
||||
$macro!(crate::InputItem);
|
||||
$macro!(crate::InterruptConversationParams);
|
||||
$macro!(crate::InterruptConversationResponse);
|
||||
$macro!(crate::ListConversationsParams);
|
||||
$macro!(crate::ListConversationsResponse);
|
||||
$macro!(crate::LoginApiKeyParams);
|
||||
$macro!(crate::LoginApiKeyResponse);
|
||||
$macro!(crate::LoginChatGptCompleteNotification);
|
||||
$macro!(crate::LoginChatGptResponse);
|
||||
$macro!(crate::LogoutChatGptParams);
|
||||
$macro!(crate::LogoutChatGptResponse);
|
||||
$macro!(crate::NewConversationParams);
|
||||
$macro!(crate::NewConversationResponse);
|
||||
$macro!(crate::Profile);
|
||||
$macro!(crate::RemoveConversationListenerParams);
|
||||
$macro!(crate::RemoveConversationSubscriptionResponse);
|
||||
$macro!(crate::ResumeConversationParams);
|
||||
$macro!(crate::ResumeConversationResponse);
|
||||
$macro!(crate::SandboxSettings);
|
||||
$macro!(crate::SendUserMessageParams);
|
||||
$macro!(crate::SendUserMessageResponse);
|
||||
$macro!(crate::SendUserTurnParams);
|
||||
$macro!(crate::SendUserTurnResponse);
|
||||
$macro!(crate::ServerNotification);
|
||||
$macro!(crate::ServerRequest);
|
||||
$macro!(crate::SessionConfiguredNotification);
|
||||
$macro!(crate::SetDefaultModelParams);
|
||||
$macro!(crate::SetDefaultModelResponse);
|
||||
$macro!(crate::Tools);
|
||||
$macro!(crate::UserInfoResponse);
|
||||
$macro!(crate::UserSavedConfig);
|
||||
$macro!(codex_protocol::protocol::EventMsg);
|
||||
$macro!(codex_protocol::protocol::FileChange);
|
||||
$macro!(codex_protocol::parse_command::ParsedCommand);
|
||||
$macro!(codex_protocol::protocol::SandboxPolicy);
|
||||
};
|
||||
#[derive(Clone)]
|
||||
pub struct GeneratedSchema {
|
||||
namespace: Option<String>,
|
||||
logical_name: String,
|
||||
value: Value,
|
||||
in_v1_dir: bool,
|
||||
}
|
||||
|
||||
impl GeneratedSchema {
|
||||
fn namespace(&self) -> Option<&str> {
|
||||
self.namespace.as_deref()
|
||||
}
|
||||
|
||||
fn logical_name(&self) -> &str {
|
||||
&self.logical_name
|
||||
}
|
||||
|
||||
fn value(&self) -> &Value {
|
||||
&self.value
|
||||
}
|
||||
}
|
||||
|
||||
type JsonSchemaEmitter = fn(&Path) -> Result<GeneratedSchema>;
|
||||
pub fn generate_types(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
||||
generate_ts(out_dir, prettier)?;
|
||||
generate_json(out_dir)?;
|
||||
@@ -111,7 +65,9 @@ pub fn generate_types(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
||||
}
|
||||
|
||||
pub fn generate_ts(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
||||
let v2_out_dir = out_dir.join("v2");
|
||||
ensure_dir(out_dir)?;
|
||||
ensure_dir(&v2_out_dir)?;
|
||||
|
||||
ClientRequest::export_all_to(out_dir)?;
|
||||
export_client_responses(out_dir)?;
|
||||
@@ -122,17 +78,22 @@ pub fn generate_ts(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
||||
ServerNotification::export_all_to(out_dir)?;
|
||||
|
||||
generate_index_ts(out_dir)?;
|
||||
generate_index_ts(&v2_out_dir)?;
|
||||
|
||||
let ts_files = ts_files_in(out_dir)?;
|
||||
// Ensure our header is present on all TS files (root + subdirs like v2/).
|
||||
let ts_files = ts_files_in_recursive(out_dir)?;
|
||||
for file in &ts_files {
|
||||
prepend_header_if_missing(file)?;
|
||||
}
|
||||
|
||||
// Optionally run Prettier on all generated TS files.
|
||||
if let Some(prettier_bin) = prettier
|
||||
&& !ts_files.is_empty()
|
||||
{
|
||||
let status = Command::new(prettier_bin)
|
||||
.arg("--write")
|
||||
.arg("--log-level")
|
||||
.arg("warn")
|
||||
.args(ts_files.iter().map(|p| p.as_os_str()))
|
||||
.status()
|
||||
.with_context(|| format!("Failed to invoke Prettier at {}", prettier_bin.display()))?;
|
||||
@@ -146,23 +107,47 @@ pub fn generate_ts(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
||||
|
||||
pub fn generate_json(out_dir: &Path) -> Result<()> {
|
||||
ensure_dir(out_dir)?;
|
||||
let mut bundle: BTreeMap<String, RootSchema> = BTreeMap::new();
|
||||
let envelope_emitters: &[JsonSchemaEmitter] = &[
|
||||
|d| write_json_schema_with_return::<crate::RequestId>(d, "RequestId"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCMessage>(d, "JSONRPCMessage"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCRequest>(d, "JSONRPCRequest"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCNotification>(d, "JSONRPCNotification"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCResponse>(d, "JSONRPCResponse"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCError>(d, "JSONRPCError"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCErrorError>(d, "JSONRPCErrorError"),
|
||||
|d| write_json_schema_with_return::<crate::ClientRequest>(d, "ClientRequest"),
|
||||
|d| write_json_schema_with_return::<crate::ServerRequest>(d, "ServerRequest"),
|
||||
|d| write_json_schema_with_return::<crate::ClientNotification>(d, "ClientNotification"),
|
||||
|d| write_json_schema_with_return::<crate::ServerNotification>(d, "ServerNotification"),
|
||||
|d| write_json_schema_with_return::<EventMsg>(d, "EventMsg"),
|
||||
|d| write_json_schema_with_return::<FileChange>(d, "FileChange"),
|
||||
|d| write_json_schema_with_return::<crate::protocol::v1::InputItem>(d, "InputItem"),
|
||||
|d| write_json_schema_with_return::<ParsedCommand>(d, "ParsedCommand"),
|
||||
|d| write_json_schema_with_return::<SandboxPolicy>(d, "SandboxPolicy"),
|
||||
];
|
||||
|
||||
macro_rules! add_schema {
|
||||
($ty:path) => {{
|
||||
let name = type_basename(stringify!($ty));
|
||||
let schema = write_json_schema_with_return::<$ty>(out_dir, &name)?;
|
||||
bundle.insert(name, schema);
|
||||
}};
|
||||
let mut schemas: Vec<GeneratedSchema> = Vec::new();
|
||||
for emit in envelope_emitters {
|
||||
schemas.push(emit(out_dir)?);
|
||||
}
|
||||
|
||||
for_each_schema_type!(add_schema);
|
||||
schemas.extend(export_client_param_schemas(out_dir)?);
|
||||
schemas.extend(export_client_response_schemas(out_dir)?);
|
||||
schemas.extend(export_server_param_schemas(out_dir)?);
|
||||
schemas.extend(export_server_response_schemas(out_dir)?);
|
||||
schemas.extend(export_client_notification_schemas(out_dir)?);
|
||||
schemas.extend(export_server_notification_schemas(out_dir)?);
|
||||
|
||||
export_client_response_schemas(out_dir)?;
|
||||
export_server_response_schemas(out_dir)?;
|
||||
let bundle = build_schema_bundle(schemas)?;
|
||||
write_pretty_json(
|
||||
out_dir.join("codex_app_server_protocol.schemas.json"),
|
||||
&bundle,
|
||||
)?;
|
||||
|
||||
let mut definitions = Map::new();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn build_schema_bundle(schemas: Vec<GeneratedSchema>) -> Result<Value> {
|
||||
const SPECIAL_DEFINITIONS: &[&str] = &[
|
||||
"ClientNotification",
|
||||
"ClientRequest",
|
||||
@@ -175,30 +160,62 @@ pub fn generate_json(out_dir: &Path) -> Result<()> {
|
||||
"ServerRequest",
|
||||
];
|
||||
|
||||
for (name, schema) in bundle {
|
||||
let mut schema_value = serde_json::to_value(schema)?;
|
||||
if let Value::Object(ref mut obj) = schema_value {
|
||||
if let Some(defs) = obj.remove("definitions")
|
||||
&& let Value::Object(defs_obj) = defs
|
||||
{
|
||||
for (def_name, def_schema) in defs_obj {
|
||||
if !SPECIAL_DEFINITIONS.contains(&def_name.as_str()) {
|
||||
definitions.insert(def_name, def_schema);
|
||||
}
|
||||
}
|
||||
}
|
||||
let namespaced_types = collect_namespaced_types(&schemas);
|
||||
let mut definitions = Map::new();
|
||||
|
||||
if let Some(Value::Array(one_of)) = obj.get_mut("oneOf") {
|
||||
for variant in one_of.iter_mut() {
|
||||
if let Some(variant_name) = variant_definition_name(&name, variant)
|
||||
&& let Value::Object(variant_obj) = variant
|
||||
for schema in schemas {
|
||||
let GeneratedSchema {
|
||||
namespace,
|
||||
logical_name,
|
||||
mut value,
|
||||
in_v1_dir,
|
||||
} = schema;
|
||||
|
||||
if let Some(ref ns) = namespace {
|
||||
rewrite_refs_to_namespace(&mut value, ns);
|
||||
}
|
||||
|
||||
let mut forced_namespace_refs: Vec<(String, String)> = Vec::new();
|
||||
if let Value::Object(ref mut obj) = value
|
||||
&& let Some(defs) = obj.remove("definitions")
|
||||
&& let Value::Object(defs_obj) = defs
|
||||
{
|
||||
for (def_name, mut def_schema) in defs_obj {
|
||||
if SPECIAL_DEFINITIONS.contains(&def_name.as_str()) {
|
||||
continue;
|
||||
}
|
||||
annotate_schema(&mut def_schema, Some(def_name.as_str()));
|
||||
let target_namespace = match namespace {
|
||||
Some(ref ns) => Some(ns.clone()),
|
||||
None => namespace_for_definition(&def_name, &namespaced_types)
|
||||
.cloned()
|
||||
.filter(|_| !in_v1_dir),
|
||||
};
|
||||
if let Some(ref ns) = target_namespace {
|
||||
if namespace.as_deref() == Some(ns.as_str()) {
|
||||
rewrite_refs_to_namespace(&mut def_schema, ns);
|
||||
insert_into_namespace(&mut definitions, ns, def_name.clone(), def_schema)?;
|
||||
} else if !forced_namespace_refs
|
||||
.iter()
|
||||
.any(|(name, existing_ns)| name == &def_name && existing_ns == ns)
|
||||
{
|
||||
variant_obj.insert("title".into(), Value::String(variant_name));
|
||||
forced_namespace_refs.push((def_name.clone(), ns.clone()));
|
||||
}
|
||||
} else {
|
||||
definitions.insert(def_name, def_schema);
|
||||
}
|
||||
}
|
||||
}
|
||||
definitions.insert(name, schema_value);
|
||||
|
||||
for (name, ns) in forced_namespace_refs {
|
||||
rewrite_named_ref_to_namespace(&mut value, &ns, &name);
|
||||
}
|
||||
|
||||
if let Some(ref ns) = namespace {
|
||||
insert_into_namespace(&mut definitions, ns, logical_name.clone(), value)?;
|
||||
} else {
|
||||
definitions.insert(logical_name, value);
|
||||
}
|
||||
}
|
||||
|
||||
let mut root = Map::new();
|
||||
@@ -213,30 +230,66 @@ pub fn generate_json(out_dir: &Path) -> Result<()> {
|
||||
root.insert("type".to_string(), Value::String("object".into()));
|
||||
root.insert("definitions".to_string(), Value::Object(definitions));
|
||||
|
||||
write_pretty_json(
|
||||
out_dir.join("codex_app_server_protocol.schemas.json"),
|
||||
&Value::Object(root),
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
Ok(Value::Object(root))
|
||||
}
|
||||
|
||||
fn write_json_schema_with_return<T>(out_dir: &Path, name: &str) -> Result<RootSchema>
|
||||
fn insert_into_namespace(
|
||||
definitions: &mut Map<String, Value>,
|
||||
namespace: &str,
|
||||
name: String,
|
||||
schema: Value,
|
||||
) -> Result<()> {
|
||||
let entry = definitions
|
||||
.entry(namespace.to_string())
|
||||
.or_insert_with(|| Value::Object(Map::new()));
|
||||
match entry {
|
||||
Value::Object(map) => {
|
||||
map.insert(name, schema);
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(anyhow!("expected namespace {namespace} to be an object")),
|
||||
}
|
||||
}
|
||||
|
||||
fn write_json_schema_with_return<T>(out_dir: &Path, name: &str) -> Result<GeneratedSchema>
|
||||
where
|
||||
T: JsonSchema,
|
||||
{
|
||||
let file_stem = name.trim();
|
||||
let schema = schema_for!(T);
|
||||
write_pretty_json(out_dir.join(format!("{file_stem}.json")), &schema)
|
||||
let mut schema_value = serde_json::to_value(schema)?;
|
||||
annotate_schema(&mut schema_value, Some(file_stem));
|
||||
// If the name looks like a namespaced path (e.g., "v2::Type"), mirror
|
||||
// the TypeScript layout and write to out_dir/v2/Type.json. Otherwise
|
||||
// write alongside the legacy files.
|
||||
let (raw_namespace, logical_name) = split_namespace(file_stem);
|
||||
let out_path = if let Some(ns) = raw_namespace {
|
||||
let dir = out_dir.join(ns);
|
||||
ensure_dir(&dir)?;
|
||||
dir.join(format!("{logical_name}.json"))
|
||||
} else {
|
||||
out_dir.join(format!("{file_stem}.json"))
|
||||
};
|
||||
|
||||
write_pretty_json(out_path, &schema_value)
|
||||
.with_context(|| format!("Failed to write JSON schema for {file_stem}"))?;
|
||||
Ok(schema)
|
||||
let namespace = match raw_namespace {
|
||||
Some("v1") | None => None,
|
||||
Some(ns) => Some(ns.to_string()),
|
||||
};
|
||||
Ok(GeneratedSchema {
|
||||
in_v1_dir: raw_namespace == Some("v1"),
|
||||
namespace,
|
||||
logical_name: logical_name.to_string(),
|
||||
value: schema_value,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn write_json_schema<T>(out_dir: &Path, name: &str) -> Result<()>
|
||||
pub(crate) fn write_json_schema<T>(out_dir: &Path, name: &str) -> Result<GeneratedSchema>
|
||||
where
|
||||
T: JsonSchema,
|
||||
{
|
||||
write_json_schema_with_return::<T>(out_dir, name).map(|_| ())
|
||||
write_json_schema_with_return::<T>(out_dir, name)
|
||||
}
|
||||
|
||||
fn write_pretty_json(path: PathBuf, value: &impl Serialize) -> Result<()> {
|
||||
@@ -245,13 +298,73 @@ fn write_pretty_json(path: PathBuf, value: &impl Serialize) -> Result<()> {
|
||||
fs::write(&path, json).with_context(|| format!("Failed to write {}", path.display()))?;
|
||||
Ok(())
|
||||
}
|
||||
fn type_basename(type_path: &str) -> String {
|
||||
type_path
|
||||
.rsplit_once("::")
|
||||
.map(|(_, name)| name)
|
||||
.unwrap_or(type_path)
|
||||
.trim()
|
||||
.to_string()
|
||||
|
||||
/// Split a fully-qualified type name like "v2::Type" into its namespace and logical name.
|
||||
fn split_namespace(name: &str) -> (Option<&str>, &str) {
|
||||
name.split_once("::")
|
||||
.map_or((None, name), |(ns, rest)| (Some(ns), rest))
|
||||
}
|
||||
|
||||
/// Recursively rewrite $ref values that point at "#/definitions/..." so that
|
||||
/// they point to a namespaced location under the bundle.
|
||||
fn rewrite_refs_to_namespace(value: &mut Value, ns: &str) {
|
||||
match value {
|
||||
Value::Object(obj) => {
|
||||
if let Some(Value::String(r)) = obj.get_mut("$ref")
|
||||
&& let Some(suffix) = r.strip_prefix("#/definitions/")
|
||||
{
|
||||
let prefix = format!("{ns}/");
|
||||
if !suffix.starts_with(&prefix) {
|
||||
*r = format!("#/definitions/{ns}/{suffix}");
|
||||
}
|
||||
}
|
||||
for v in obj.values_mut() {
|
||||
rewrite_refs_to_namespace(v, ns);
|
||||
}
|
||||
}
|
||||
Value::Array(items) => {
|
||||
for v in items.iter_mut() {
|
||||
rewrite_refs_to_namespace(v, ns);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_namespaced_types(schemas: &[GeneratedSchema]) -> HashMap<String, String> {
|
||||
let mut types = HashMap::new();
|
||||
for schema in schemas {
|
||||
if let Some(ns) = schema.namespace() {
|
||||
types
|
||||
.entry(schema.logical_name().to_string())
|
||||
.or_insert_with(|| ns.to_string());
|
||||
if let Some(Value::Object(defs)) = schema.value().get("definitions") {
|
||||
for key in defs.keys() {
|
||||
types.entry(key.clone()).or_insert_with(|| ns.to_string());
|
||||
}
|
||||
}
|
||||
if let Some(Value::Object(defs)) = schema.value().get("$defs") {
|
||||
for key in defs.keys() {
|
||||
types.entry(key.clone()).or_insert_with(|| ns.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
types
|
||||
}
|
||||
|
||||
fn namespace_for_definition<'a>(
|
||||
name: &str,
|
||||
types: &'a HashMap<String, String>,
|
||||
) -> Option<&'a String> {
|
||||
if let Some(ns) = types.get(name) {
|
||||
return Some(ns);
|
||||
}
|
||||
let trimmed = name.trim_end_matches(|c: char| c.is_ascii_digit());
|
||||
if trimmed != name {
|
||||
return types.get(trimmed);
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn variant_definition_name(base: &str, variant: &Value) -> Option<String> {
|
||||
@@ -301,11 +414,147 @@ fn variant_definition_name(base: &str, variant: &Value) -> Option<String> {
|
||||
}
|
||||
|
||||
fn literal_from_property<'a>(props: &'a Map<String, Value>, key: &str) -> Option<&'a str> {
|
||||
props
|
||||
.get(key)
|
||||
.and_then(|value| value.get("enum"))
|
||||
.and_then(Value::as_array)
|
||||
.and_then(|arr| arr.first())
|
||||
props.get(key).and_then(string_literal)
|
||||
}
|
||||
|
||||
fn string_literal(value: &Value) -> Option<&str> {
|
||||
value.get("const").and_then(Value::as_str).or_else(|| {
|
||||
value
|
||||
.get("enum")
|
||||
.and_then(Value::as_array)
|
||||
.and_then(|arr| arr.first())
|
||||
.and_then(Value::as_str)
|
||||
})
|
||||
}
|
||||
|
||||
fn annotate_schema(value: &mut Value, base: Option<&str>) {
|
||||
match value {
|
||||
Value::Object(map) => annotate_object(map, base),
|
||||
Value::Array(items) => {
|
||||
for item in items {
|
||||
annotate_schema(item, base);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn annotate_object(map: &mut Map<String, Value>, base: Option<&str>) {
|
||||
let owner = map.get("title").and_then(Value::as_str).map(str::to_owned);
|
||||
if let Some(owner) = owner.as_deref()
|
||||
&& let Some(Value::Object(props)) = map.get_mut("properties")
|
||||
{
|
||||
set_discriminator_titles(props, owner);
|
||||
}
|
||||
|
||||
if let Some(Value::Array(variants)) = map.get_mut("oneOf") {
|
||||
annotate_variant_list(variants, base);
|
||||
}
|
||||
if let Some(Value::Array(variants)) = map.get_mut("anyOf") {
|
||||
annotate_variant_list(variants, base);
|
||||
}
|
||||
|
||||
if let Some(Value::Object(defs)) = map.get_mut("definitions") {
|
||||
for (name, schema) in defs.iter_mut() {
|
||||
annotate_schema(schema, Some(name.as_str()));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(Value::Object(defs)) = map.get_mut("$defs") {
|
||||
for (name, schema) in defs.iter_mut() {
|
||||
annotate_schema(schema, Some(name.as_str()));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(Value::Object(props)) = map.get_mut("properties") {
|
||||
for value in props.values_mut() {
|
||||
annotate_schema(value, base);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(items) = map.get_mut("items") {
|
||||
annotate_schema(items, base);
|
||||
}
|
||||
|
||||
if let Some(additional) = map.get_mut("additionalProperties") {
|
||||
annotate_schema(additional, base);
|
||||
}
|
||||
|
||||
for (key, child) in map.iter_mut() {
|
||||
match key.as_str() {
|
||||
"oneOf"
|
||||
| "anyOf"
|
||||
| "definitions"
|
||||
| "$defs"
|
||||
| "properties"
|
||||
| "items"
|
||||
| "additionalProperties" => {}
|
||||
_ => annotate_schema(child, base),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn annotate_variant_list(variants: &mut [Value], base: Option<&str>) {
|
||||
let mut seen = HashSet::new();
|
||||
|
||||
for variant in variants.iter() {
|
||||
if let Some(name) = variant_title(variant) {
|
||||
seen.insert(name.to_owned());
|
||||
}
|
||||
}
|
||||
|
||||
for variant in variants.iter_mut() {
|
||||
let mut variant_name = variant_title(variant).map(str::to_owned);
|
||||
|
||||
if variant_name.is_none()
|
||||
&& let Some(base_name) = base
|
||||
&& let Some(name) = variant_definition_name(base_name, variant)
|
||||
{
|
||||
let mut candidate = name.clone();
|
||||
let mut index = 2;
|
||||
while seen.contains(&candidate) {
|
||||
candidate = format!("{name}{index}");
|
||||
index += 1;
|
||||
}
|
||||
if let Some(obj) = variant.as_object_mut() {
|
||||
obj.insert("title".into(), Value::String(candidate.clone()));
|
||||
}
|
||||
seen.insert(candidate.clone());
|
||||
variant_name = Some(candidate);
|
||||
}
|
||||
|
||||
if let Some(name) = variant_name.as_deref()
|
||||
&& let Some(obj) = variant.as_object_mut()
|
||||
&& let Some(Value::Object(props)) = obj.get_mut("properties")
|
||||
{
|
||||
set_discriminator_titles(props, name);
|
||||
}
|
||||
|
||||
annotate_schema(variant, base);
|
||||
}
|
||||
}
|
||||
|
||||
const DISCRIMINATOR_KEYS: &[&str] = &["type", "method", "mode", "status", "role", "reason"];
|
||||
|
||||
fn set_discriminator_titles(props: &mut Map<String, Value>, owner: &str) {
|
||||
for key in DISCRIMINATOR_KEYS {
|
||||
if let Some(prop_schema) = props.get_mut(*key)
|
||||
&& string_literal(prop_schema).is_some()
|
||||
&& let Value::Object(prop_obj) = prop_schema
|
||||
{
|
||||
if prop_obj.contains_key("title") {
|
||||
continue;
|
||||
}
|
||||
let suffix = to_pascal_case(key);
|
||||
prop_obj.insert("title".into(), Value::String(format!("{owner}{suffix}")));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn variant_title(value: &Value) -> Option<&str> {
|
||||
value
|
||||
.as_object()
|
||||
.and_then(|obj| obj.get("title"))
|
||||
.and_then(Value::as_str)
|
||||
}
|
||||
|
||||
@@ -335,6 +584,33 @@ fn ensure_dir(dir: &Path) -> Result<()> {
|
||||
.with_context(|| format!("Failed to create output directory {}", dir.display()))
|
||||
}
|
||||
|
||||
fn rewrite_named_ref_to_namespace(value: &mut Value, ns: &str, name: &str) {
|
||||
let direct = format!("#/definitions/{name}");
|
||||
let prefixed = format!("{direct}/");
|
||||
let replacement = format!("#/definitions/{ns}/{name}");
|
||||
let replacement_prefixed = format!("{replacement}/");
|
||||
match value {
|
||||
Value::Object(obj) => {
|
||||
if let Some(Value::String(reference)) = obj.get_mut("$ref") {
|
||||
if reference == &direct {
|
||||
*reference = replacement;
|
||||
} else if let Some(rest) = reference.strip_prefix(&prefixed) {
|
||||
*reference = format!("{replacement_prefixed}{rest}");
|
||||
}
|
||||
}
|
||||
for child in obj.values_mut() {
|
||||
rewrite_named_ref_to_namespace(child, ns, name);
|
||||
}
|
||||
}
|
||||
Value::Array(items) => {
|
||||
for child in items {
|
||||
rewrite_named_ref_to_namespace(child, ns, name);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn prepend_header_if_missing(path: &Path) -> Result<()> {
|
||||
let mut content = String::new();
|
||||
{
|
||||
@@ -372,6 +648,28 @@ fn ts_files_in(dir: &Path) -> Result<Vec<PathBuf>> {
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
fn ts_files_in_recursive(dir: &Path) -> Result<Vec<PathBuf>> {
|
||||
let mut files = Vec::new();
|
||||
let mut stack = vec![dir.to_path_buf()];
|
||||
while let Some(d) = stack.pop() {
|
||||
for entry in
|
||||
fs::read_dir(&d).with_context(|| format!("Failed to read dir {}", d.display()))?
|
||||
{
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
stack.push(path);
|
||||
} else if path.is_file() && path.extension() == Some(OsStr::new("ts")) {
|
||||
files.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
files.sort();
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
/// Generate an index.ts file that re-exports all generated types.
|
||||
/// This allows consumers to import all types from a single file.
|
||||
fn generate_index_ts(out_dir: &Path) -> Result<PathBuf> {
|
||||
let mut entries: Vec<String> = Vec::new();
|
||||
let mut stems: Vec<String> = ts_files_in(out_dir)?
|
||||
@@ -388,6 +686,14 @@ fn generate_index_ts(out_dir: &Path) -> Result<PathBuf> {
|
||||
entries.push(format!("export type {{ {name} }} from \"./{name}\";\n"));
|
||||
}
|
||||
|
||||
// If this is the root out_dir and a ./v2 folder exists with TS files,
|
||||
// expose it as a namespace to avoid symbol collisions at the root.
|
||||
let v2_dir = out_dir.join("v2");
|
||||
let has_v2_ts = ts_files_in(&v2_dir).map(|v| !v.is_empty()).unwrap_or(false);
|
||||
if has_v2_ts {
|
||||
entries.push("export * as v2 from \"./v2\";\n".to_string());
|
||||
}
|
||||
|
||||
let mut content =
|
||||
String::with_capacity(HEADER.len() + entries.iter().map(String::len).sum::<usize>());
|
||||
content.push_str(HEADER);
|
||||
@@ -402,3 +708,205 @@ fn generate_index_ts(out_dir: &Path) -> Result<PathBuf> {
|
||||
.with_context(|| format!("Failed to write {}", index_path.display()))?;
|
||||
Ok(index_path)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use anyhow::Result;
|
||||
use std::collections::BTreeSet;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[test]
|
||||
fn generated_ts_has_no_optional_nullable_fields() -> Result<()> {
|
||||
// Assert that there are no types of the form "?: T | null" in the generated TS files.
|
||||
let output_dir = std::env::temp_dir().join(format!("codex_ts_types_{}", Uuid::now_v7()));
|
||||
fs::create_dir(&output_dir)?;
|
||||
|
||||
struct TempDirGuard(PathBuf);
|
||||
|
||||
impl Drop for TempDirGuard {
|
||||
fn drop(&mut self) {
|
||||
let _ = fs::remove_dir_all(&self.0);
|
||||
}
|
||||
}
|
||||
|
||||
let _guard = TempDirGuard(output_dir.clone());
|
||||
|
||||
generate_ts(&output_dir, None)?;
|
||||
|
||||
let mut undefined_offenders = Vec::new();
|
||||
let mut optional_nullable_offenders = BTreeSet::new();
|
||||
let mut stack = vec![output_dir];
|
||||
while let Some(dir) = stack.pop() {
|
||||
for entry in fs::read_dir(&dir)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
stack.push(path);
|
||||
continue;
|
||||
}
|
||||
|
||||
if matches!(path.extension().and_then(|ext| ext.to_str()), Some("ts")) {
|
||||
let contents = fs::read_to_string(&path)?;
|
||||
if contents.contains("| undefined") {
|
||||
undefined_offenders.push(path.clone());
|
||||
}
|
||||
|
||||
const SKIP_PREFIXES: &[&str] = &[
|
||||
"const ",
|
||||
"let ",
|
||||
"var ",
|
||||
"export const ",
|
||||
"export let ",
|
||||
"export var ",
|
||||
];
|
||||
|
||||
let mut search_start = 0;
|
||||
while let Some(idx) = contents[search_start..].find("| null") {
|
||||
let abs_idx = search_start + idx;
|
||||
// Find the property-colon for this field by scanning forward
|
||||
// from the start of the segment and ignoring nested braces,
|
||||
// brackets, and parens. This avoids colons inside nested
|
||||
// type literals like `{ [k in string]?: string }`.
|
||||
|
||||
let line_start_idx =
|
||||
contents[..abs_idx].rfind('\n').map(|i| i + 1).unwrap_or(0);
|
||||
|
||||
let mut segment_start_idx = line_start_idx;
|
||||
if let Some(rel_idx) = contents[line_start_idx..abs_idx].rfind(',') {
|
||||
segment_start_idx = segment_start_idx.max(line_start_idx + rel_idx + 1);
|
||||
}
|
||||
if let Some(rel_idx) = contents[line_start_idx..abs_idx].rfind('{') {
|
||||
segment_start_idx = segment_start_idx.max(line_start_idx + rel_idx + 1);
|
||||
}
|
||||
if let Some(rel_idx) = contents[line_start_idx..abs_idx].rfind('}') {
|
||||
segment_start_idx = segment_start_idx.max(line_start_idx + rel_idx + 1);
|
||||
}
|
||||
|
||||
// Scan forward for the colon that separates the field name from its type.
|
||||
let mut level_brace = 0_i32;
|
||||
let mut level_brack = 0_i32;
|
||||
let mut level_paren = 0_i32;
|
||||
let mut in_single = false;
|
||||
let mut in_double = false;
|
||||
let mut escape = false;
|
||||
let mut prop_colon_idx = None;
|
||||
for (i, ch) in contents[segment_start_idx..abs_idx].char_indices() {
|
||||
let idx_abs = segment_start_idx + i;
|
||||
if escape {
|
||||
escape = false;
|
||||
continue;
|
||||
}
|
||||
match ch {
|
||||
'\\' => {
|
||||
// Only treat as escape when inside a string.
|
||||
if in_single || in_double {
|
||||
escape = true;
|
||||
}
|
||||
}
|
||||
'\'' => {
|
||||
if !in_double {
|
||||
in_single = !in_single;
|
||||
}
|
||||
}
|
||||
'"' => {
|
||||
if !in_single {
|
||||
in_double = !in_double;
|
||||
}
|
||||
}
|
||||
'{' if !in_single && !in_double => level_brace += 1,
|
||||
'}' if !in_single && !in_double => level_brace -= 1,
|
||||
'[' if !in_single && !in_double => level_brack += 1,
|
||||
']' if !in_single && !in_double => level_brack -= 1,
|
||||
'(' if !in_single && !in_double => level_paren += 1,
|
||||
')' if !in_single && !in_double => level_paren -= 1,
|
||||
':' if !in_single
|
||||
&& !in_double
|
||||
&& level_brace == 0
|
||||
&& level_brack == 0
|
||||
&& level_paren == 0 =>
|
||||
{
|
||||
prop_colon_idx = Some(idx_abs);
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
let Some(colon_idx) = prop_colon_idx else {
|
||||
search_start = abs_idx + 5;
|
||||
continue;
|
||||
};
|
||||
|
||||
let mut field_prefix = contents[segment_start_idx..colon_idx].trim();
|
||||
if field_prefix.is_empty() {
|
||||
search_start = abs_idx + 5;
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(comment_idx) = field_prefix.rfind("*/") {
|
||||
field_prefix = field_prefix[comment_idx + 2..].trim_start();
|
||||
}
|
||||
|
||||
if field_prefix.is_empty() {
|
||||
search_start = abs_idx + 5;
|
||||
continue;
|
||||
}
|
||||
|
||||
if SKIP_PREFIXES
|
||||
.iter()
|
||||
.any(|prefix| field_prefix.starts_with(prefix))
|
||||
{
|
||||
search_start = abs_idx + 5;
|
||||
continue;
|
||||
}
|
||||
|
||||
if field_prefix.contains('(') {
|
||||
search_start = abs_idx + 5;
|
||||
continue;
|
||||
}
|
||||
|
||||
// If the last non-whitespace before ':' is '?', then this is an
|
||||
// optional field with a nullable type (i.e., "?: T | null"),
|
||||
// which we explicitly disallow.
|
||||
if field_prefix.chars().rev().find(|c| !c.is_whitespace()) == Some('?') {
|
||||
let line_number =
|
||||
contents[..abs_idx].chars().filter(|c| *c == '\n').count() + 1;
|
||||
let offending_line_end = contents[line_start_idx..]
|
||||
.find('\n')
|
||||
.map(|i| line_start_idx + i)
|
||||
.unwrap_or(contents.len());
|
||||
let offending_snippet =
|
||||
contents[line_start_idx..offending_line_end].trim();
|
||||
|
||||
optional_nullable_offenders.insert(format!(
|
||||
"{}:{}: {offending_snippet}",
|
||||
path.display(),
|
||||
line_number
|
||||
));
|
||||
}
|
||||
|
||||
search_start = abs_idx + 5;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert!(
|
||||
undefined_offenders.is_empty(),
|
||||
"Generated TypeScript still includes unions with `undefined` in {undefined_offenders:?}"
|
||||
);
|
||||
|
||||
// If this assertion fails, it means a field was generated as
|
||||
// "?: T | null" — i.e., both optional (undefined) and nullable (null).
|
||||
// We only want either "?: T" or ": T | null".
|
||||
assert!(
|
||||
optional_nullable_offenders.is_empty(),
|
||||
"Generated TypeScript has optional fields with nullable types (disallowed '?: T | null'), add #[ts(optional)] to fix:\n{optional_nullable_offenders:?}"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,6 +34,7 @@ pub struct JSONRPCRequest {
|
||||
pub id: RequestId,
|
||||
pub method: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub params: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
@@ -42,6 +43,7 @@ pub struct JSONRPCRequest {
|
||||
pub struct JSONRPCNotification {
|
||||
pub method: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub params: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
@@ -63,6 +65,7 @@ pub struct JSONRPCError {
|
||||
pub struct JSONRPCErrorError {
|
||||
pub code: i64,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub data: Option<serde_json::Value>,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
@@ -6,4 +6,6 @@ pub use export::generate_json;
|
||||
pub use export::generate_ts;
|
||||
pub use export::generate_types;
|
||||
pub use jsonrpc_lite::*;
|
||||
pub use protocol::*;
|
||||
pub use protocol::common::*;
|
||||
pub use protocol::v1::*;
|
||||
pub use protocol::v2::*;
|
||||
|
||||
@@ -1,973 +0,0 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::JSONRPCNotification;
|
||||
use crate::JSONRPCRequest;
|
||||
use crate::RequestId;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::config_types::ForcedLoginMethod;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
use codex_protocol::config_types::Verbosity;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::FileChange;
|
||||
use codex_protocol::protocol::ReviewDecision;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use codex_protocol::protocol::TurnAbortReason;
|
||||
use paste::paste;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use strum_macros::Display;
|
||||
use ts_rs::TS;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema, TS)]
|
||||
#[ts(type = "string")]
|
||||
pub struct GitSha(pub String);
|
||||
|
||||
impl GitSha {
|
||||
pub fn new(sha: &str) -> Self {
|
||||
Self(sha.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Display, JsonSchema, TS)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum AuthMode {
|
||||
ApiKey,
|
||||
ChatGPT,
|
||||
}
|
||||
|
||||
/// Generates an `enum ClientRequest` where each variant is a request that the
|
||||
/// client can send to the server. Each variant has associated `params` and
|
||||
/// `response` types. Also generates a `export_client_responses()` function to
|
||||
/// export all response types to TypeScript.
|
||||
macro_rules! client_request_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident {
|
||||
params: $(#[$params_meta:meta])* $params:ty,
|
||||
response: $response:ty,
|
||||
}
|
||||
),* $(,)?
|
||||
) => {
|
||||
/// Request from the client to the server.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "method", rename_all = "camelCase")]
|
||||
pub enum ClientRequest {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$variant {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
$(#[$params_meta])*
|
||||
params: $params,
|
||||
},
|
||||
)*
|
||||
}
|
||||
|
||||
pub fn export_client_responses(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::std::result::Result<(), ::ts_rs::ExportError> {
|
||||
$(
|
||||
<$response as ::ts_rs::TS>::export_all_to(out_dir)?;
|
||||
)*
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn export_client_response_schemas(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<()> {
|
||||
$(
|
||||
crate::export::write_json_schema::<$response>(out_dir, stringify!($response))?;
|
||||
)*
|
||||
Ok(())
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
client_request_definitions! {
|
||||
Initialize {
|
||||
params: InitializeParams,
|
||||
response: InitializeResponse,
|
||||
},
|
||||
NewConversation {
|
||||
params: NewConversationParams,
|
||||
response: NewConversationResponse,
|
||||
},
|
||||
/// List recorded Codex conversations (rollouts) with optional pagination and search.
|
||||
ListConversations {
|
||||
params: ListConversationsParams,
|
||||
response: ListConversationsResponse,
|
||||
},
|
||||
/// Resume a recorded Codex conversation from a rollout file.
|
||||
ResumeConversation {
|
||||
params: ResumeConversationParams,
|
||||
response: ResumeConversationResponse,
|
||||
},
|
||||
ArchiveConversation {
|
||||
params: ArchiveConversationParams,
|
||||
response: ArchiveConversationResponse,
|
||||
},
|
||||
SendUserMessage {
|
||||
params: SendUserMessageParams,
|
||||
response: SendUserMessageResponse,
|
||||
},
|
||||
SendUserTurn {
|
||||
params: SendUserTurnParams,
|
||||
response: SendUserTurnResponse,
|
||||
},
|
||||
InterruptConversation {
|
||||
params: InterruptConversationParams,
|
||||
response: InterruptConversationResponse,
|
||||
},
|
||||
AddConversationListener {
|
||||
params: AddConversationListenerParams,
|
||||
response: AddConversationSubscriptionResponse,
|
||||
},
|
||||
RemoveConversationListener {
|
||||
params: RemoveConversationListenerParams,
|
||||
response: RemoveConversationSubscriptionResponse,
|
||||
},
|
||||
GitDiffToRemote {
|
||||
params: GitDiffToRemoteParams,
|
||||
response: GitDiffToRemoteResponse,
|
||||
},
|
||||
LoginApiKey {
|
||||
params: LoginApiKeyParams,
|
||||
response: LoginApiKeyResponse,
|
||||
},
|
||||
LoginChatGpt {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: LoginChatGptResponse,
|
||||
},
|
||||
CancelLoginChatGpt {
|
||||
params: CancelLoginChatGptParams,
|
||||
response: CancelLoginChatGptResponse,
|
||||
},
|
||||
LogoutChatGpt {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: LogoutChatGptResponse,
|
||||
},
|
||||
GetAuthStatus {
|
||||
params: GetAuthStatusParams,
|
||||
response: GetAuthStatusResponse,
|
||||
},
|
||||
GetUserSavedConfig {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: GetUserSavedConfigResponse,
|
||||
},
|
||||
SetDefaultModel {
|
||||
params: SetDefaultModelParams,
|
||||
response: SetDefaultModelResponse,
|
||||
},
|
||||
GetUserAgent {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: GetUserAgentResponse,
|
||||
},
|
||||
UserInfo {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: UserInfoResponse,
|
||||
},
|
||||
FuzzyFileSearch {
|
||||
params: FuzzyFileSearchParams,
|
||||
response: FuzzyFileSearchResponse,
|
||||
},
|
||||
/// Execute a command (argv vector) under the server's sandbox.
|
||||
ExecOneOffCommand {
|
||||
params: ExecOneOffCommandParams,
|
||||
response: ExecOneOffCommandResponse,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InitializeParams {
|
||||
pub client_info: ClientInfo,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ClientInfo {
|
||||
pub name: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub title: Option<String>,
|
||||
pub version: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InitializeResponse {
|
||||
pub user_agent: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct NewConversationParams {
|
||||
/// Optional override for the model name (e.g. "o3", "o4-mini").
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub model: Option<String>,
|
||||
|
||||
/// Configuration profile from config.toml to specify default options.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub profile: Option<String>,
|
||||
|
||||
/// Working directory for the session. If relative, it is resolved against
|
||||
/// the server process's current working directory.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub cwd: Option<String>,
|
||||
|
||||
/// Approval policy for shell commands generated by the model:
|
||||
/// `untrusted`, `on-failure`, `on-request`, `never`.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
|
||||
/// Sandbox mode: `read-only`, `workspace-write`, or `danger-full-access`.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub sandbox: Option<SandboxMode>,
|
||||
|
||||
/// Individual config settings that will override what is in
|
||||
/// CODEX_HOME/config.toml.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub config: Option<HashMap<String, serde_json::Value>>,
|
||||
|
||||
/// The set of instructions to use instead of the default ones.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub base_instructions: Option<String>,
|
||||
|
||||
/// Whether to include the plan tool in the conversation.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub include_plan_tool: Option<bool>,
|
||||
|
||||
/// Whether to include the apply patch tool in the conversation.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub include_apply_patch_tool: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct NewConversationResponse {
|
||||
pub conversation_id: ConversationId,
|
||||
pub model: String,
|
||||
/// Note this could be ignored by the model.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResumeConversationResponse {
|
||||
pub conversation_id: ConversationId,
|
||||
pub model: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub initial_messages: Option<Vec<EventMsg>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ListConversationsParams {
|
||||
/// Optional page size; defaults to a reasonable server-side value.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub page_size: Option<usize>,
|
||||
/// Opaque pagination cursor returned by a previous call.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ConversationSummary {
|
||||
pub conversation_id: ConversationId,
|
||||
pub path: PathBuf,
|
||||
pub preview: String,
|
||||
/// RFC3339 timestamp string for the session start, if available.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub timestamp: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ListConversationsResponse {
|
||||
pub items: Vec<ConversationSummary>,
|
||||
/// Opaque cursor to pass to the next call to continue after the last item.
|
||||
/// if None, there are no more items to return.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub next_cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResumeConversationParams {
|
||||
/// Absolute path to the rollout JSONL file.
|
||||
pub path: PathBuf,
|
||||
/// Optional overrides to apply when spawning the resumed session.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub overrides: Option<NewConversationParams>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AddConversationSubscriptionResponse {
|
||||
#[schemars(with = "String")]
|
||||
pub subscription_id: Uuid,
|
||||
}
|
||||
|
||||
/// The [`ConversationId`] must match the `rollout_path`.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ArchiveConversationParams {
|
||||
pub conversation_id: ConversationId,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ArchiveConversationResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RemoveConversationSubscriptionResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginApiKeyParams {
|
||||
pub api_key: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginApiKeyResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginChatGptResponse {
|
||||
#[schemars(with = "String")]
|
||||
pub login_id: Uuid,
|
||||
/// URL the client should open in a browser to initiate the OAuth flow.
|
||||
pub auth_url: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GitDiffToRemoteResponse {
|
||||
pub sha: GitSha,
|
||||
pub diff: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CancelLoginChatGptParams {
|
||||
#[schemars(with = "String")]
|
||||
pub login_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GitDiffToRemoteParams {
|
||||
pub cwd: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CancelLoginChatGptResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LogoutChatGptParams {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LogoutChatGptResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetAuthStatusParams {
|
||||
/// If true, include the current auth token (if available) in the response.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub include_token: Option<bool>,
|
||||
/// If true, attempt to refresh the token before returning status.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub refresh_token: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecOneOffCommandParams {
|
||||
/// Command argv to execute.
|
||||
pub command: Vec<String>,
|
||||
/// Timeout of the command in milliseconds.
|
||||
/// If not specified, a sensible default is used server-side.
|
||||
pub timeout_ms: Option<u64>,
|
||||
/// Optional working directory for the process. Defaults to server config cwd.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub cwd: Option<PathBuf>,
|
||||
/// Optional explicit sandbox policy overriding the server default.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub sandbox_policy: Option<SandboxPolicy>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecOneOffCommandResponse {
|
||||
pub exit_code: i32,
|
||||
pub stdout: String,
|
||||
pub stderr: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetAuthStatusResponse {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub auth_method: Option<AuthMode>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub auth_token: Option<String>,
|
||||
|
||||
// Indicates that auth method must be valid to use the server.
|
||||
// This can be false if using a custom provider that is configured
|
||||
// with requires_openai_auth == false.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub requires_openai_auth: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetUserAgentResponse {
|
||||
pub user_agent: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UserInfoResponse {
|
||||
/// Note: `alleged_user_email` is not currently verified. We read it from
|
||||
/// the local auth.json, which the user could theoretically modify. In the
|
||||
/// future, we may add logic to verify the email against the server before
|
||||
/// returning it.
|
||||
pub alleged_user_email: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetUserSavedConfigResponse {
|
||||
pub config: UserSavedConfig,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SetDefaultModelParams {
|
||||
/// If set to None, this means `model` should be cleared in config.toml.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub model: Option<String>,
|
||||
/// If set to None, this means `model_reasoning_effort` should be cleared
|
||||
/// in config.toml.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SetDefaultModelResponse {}
|
||||
|
||||
/// UserSavedConfig contains a subset of the config. It is meant to expose mcp
|
||||
/// client-configurable settings that can be specified in the NewConversation
|
||||
/// and SendUserTurn requests.
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UserSavedConfig {
|
||||
/// Approvals
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub sandbox_mode: Option<SandboxMode>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub sandbox_settings: Option<SandboxSettings>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub forced_chatgpt_workspace_id: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub forced_login_method: Option<ForcedLoginMethod>,
|
||||
|
||||
/// Model-specific configuration
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub model: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub model_reasoning_effort: Option<ReasoningEffort>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub model_reasoning_summary: Option<ReasoningSummary>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub model_verbosity: Option<Verbosity>,
|
||||
|
||||
/// Tools
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub tools: Option<Tools>,
|
||||
|
||||
/// Profiles
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub profile: Option<String>,
|
||||
#[serde(default)]
|
||||
pub profiles: HashMap<String, Profile>,
|
||||
}
|
||||
|
||||
/// MCP representation of a [`codex_core::config_profile::ConfigProfile`].
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Profile {
|
||||
pub model: Option<String>,
|
||||
/// The key in the `model_providers` map identifying the
|
||||
/// [`ModelProviderInfo`] to use.
|
||||
pub model_provider: Option<String>,
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
pub model_reasoning_effort: Option<ReasoningEffort>,
|
||||
pub model_reasoning_summary: Option<ReasoningSummary>,
|
||||
pub model_verbosity: Option<Verbosity>,
|
||||
pub chatgpt_base_url: Option<String>,
|
||||
}
|
||||
/// MCP representation of a [`codex_core::config::ToolsToml`].
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Tools {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub web_search: Option<bool>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub view_image: Option<bool>,
|
||||
}
|
||||
|
||||
/// MCP representation of a [`codex_core::config_types::SandboxWorkspaceWrite`].
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SandboxSettings {
|
||||
#[serde(default)]
|
||||
pub writable_roots: Vec<PathBuf>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub network_access: Option<bool>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub exclude_tmpdir_env_var: Option<bool>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub exclude_slash_tmp: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserMessageParams {
|
||||
pub conversation_id: ConversationId,
|
||||
pub items: Vec<InputItem>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserTurnParams {
|
||||
pub conversation_id: ConversationId,
|
||||
pub items: Vec<InputItem>,
|
||||
pub cwd: PathBuf,
|
||||
pub approval_policy: AskForApproval,
|
||||
pub sandbox_policy: SandboxPolicy,
|
||||
pub model: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub effort: Option<ReasoningEffort>,
|
||||
pub summary: ReasoningSummary,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserTurnResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InterruptConversationParams {
|
||||
pub conversation_id: ConversationId,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InterruptConversationResponse {
|
||||
pub abort_reason: TurnAbortReason,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserMessageResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AddConversationListenerParams {
|
||||
pub conversation_id: ConversationId,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RemoveConversationListenerParams {
|
||||
#[schemars(with = "String")]
|
||||
pub subscription_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[serde(tag = "type", content = "data")]
|
||||
pub enum InputItem {
|
||||
Text {
|
||||
text: String,
|
||||
},
|
||||
/// Pre‑encoded data: URI image.
|
||||
Image {
|
||||
image_url: String,
|
||||
},
|
||||
|
||||
/// Local image path provided by the user. This will be converted to an
|
||||
/// `Image` variant (base64 data URL) during request serialization.
|
||||
LocalImage {
|
||||
path: PathBuf,
|
||||
},
|
||||
}
|
||||
|
||||
/// Generates an `enum ServerRequest` where each variant is a request that the
|
||||
/// server can send to the client along with the corresponding params and
|
||||
/// response types. It also generates helper types used by the app/server
|
||||
/// infrastructure (payload enum, request constructor, and export helpers).
|
||||
macro_rules! server_request_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident
|
||||
),* $(,)?
|
||||
) => {
|
||||
paste! {
|
||||
/// Request initiated from the server and sent to the client.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "method", rename_all = "camelCase")]
|
||||
pub enum ServerRequest {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$variant {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
params: [<$variant Params>],
|
||||
},
|
||||
)*
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, JsonSchema)]
|
||||
pub enum ServerRequestPayload {
|
||||
$( $variant([<$variant Params>]), )*
|
||||
}
|
||||
|
||||
impl ServerRequestPayload {
|
||||
pub fn request_with_id(self, request_id: RequestId) -> ServerRequest {
|
||||
match self {
|
||||
$(Self::$variant(params) => ServerRequest::$variant { request_id, params },)*
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn export_server_responses(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::std::result::Result<(), ::ts_rs::ExportError> {
|
||||
paste! {
|
||||
$(<[<$variant Response>] as ::ts_rs::TS>::export_all_to(out_dir)?;)*
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn export_server_response_schemas(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<()> {
|
||||
paste! {
|
||||
$(crate::export::write_json_schema::<[<$variant Response>]>(out_dir, stringify!([<$variant Response>]))?;)*
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl TryFrom<JSONRPCRequest> for ServerRequest {
|
||||
type Error = serde_json::Error;
|
||||
|
||||
fn try_from(value: JSONRPCRequest) -> Result<Self, Self::Error> {
|
||||
serde_json::from_value(serde_json::to_value(value)?)
|
||||
}
|
||||
}
|
||||
|
||||
server_request_definitions! {
|
||||
/// Request to approve a patch.
|
||||
ApplyPatchApproval,
|
||||
/// Request to exec a command.
|
||||
ExecCommandApproval,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ApplyPatchApprovalParams {
|
||||
pub conversation_id: ConversationId,
|
||||
/// Use to correlate this with [codex_core::protocol::PatchApplyBeginEvent]
|
||||
/// and [codex_core::protocol::PatchApplyEndEvent].
|
||||
pub call_id: String,
|
||||
pub file_changes: HashMap<PathBuf, FileChange>,
|
||||
/// Optional explanatory reason (e.g. request for extra write access).
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub reason: Option<String>,
|
||||
/// When set, the agent is asking the user to allow writes under this root
|
||||
/// for the remainder of the session (unclear if this is honored today).
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub grant_root: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecCommandApprovalParams {
|
||||
pub conversation_id: ConversationId,
|
||||
/// Use to correlate this with [codex_core::protocol::ExecCommandBeginEvent]
|
||||
/// and [codex_core::protocol::ExecCommandEndEvent].
|
||||
pub call_id: String,
|
||||
pub command: Vec<String>,
|
||||
pub cwd: PathBuf,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub reason: Option<String>,
|
||||
pub parsed_cmd: Vec<ParsedCommand>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct ExecCommandApprovalResponse {
|
||||
pub decision: ReviewDecision,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct ApplyPatchApprovalResponse {
|
||||
pub decision: ReviewDecision,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
pub struct FuzzyFileSearchParams {
|
||||
pub query: String,
|
||||
pub roots: Vec<String>,
|
||||
// if provided, will cancel any previous request that used the same value
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub cancellation_token: Option<String>,
|
||||
}
|
||||
|
||||
/// Superset of [`codex_file_search::FileMatch`]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct FuzzyFileSearchResult {
|
||||
pub root: String,
|
||||
pub path: String,
|
||||
pub file_name: String,
|
||||
pub score: u32,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub indices: Option<Vec<u32>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct FuzzyFileSearchResponse {
|
||||
pub files: Vec<FuzzyFileSearchResult>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginChatGptCompleteNotification {
|
||||
#[schemars(with = "String")]
|
||||
pub login_id: Uuid,
|
||||
pub success: bool,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub error: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SessionConfiguredNotification {
|
||||
/// Name left as session_id instead of conversation_id for backwards compatibility.
|
||||
pub session_id: ConversationId,
|
||||
|
||||
/// Tell the client what model is being queried.
|
||||
pub model: String,
|
||||
|
||||
/// The effort the model is putting into reasoning about the user's request.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
|
||||
/// Identifier of the history log file (inode on Unix, 0 otherwise).
|
||||
pub history_log_id: u64,
|
||||
|
||||
/// Current number of entries in the history log.
|
||||
#[ts(type = "number")]
|
||||
pub history_entry_count: usize,
|
||||
|
||||
/// Optional initial messages (as events) for resumed sessions.
|
||||
/// When present, UIs can use these to seed the history.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub initial_messages: Option<Vec<EventMsg>>,
|
||||
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AuthStatusChangeNotification {
|
||||
/// Current authentication method; omitted if signed out.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub auth_method: Option<AuthMode>,
|
||||
}
|
||||
|
||||
/// Notification sent from the server to the client.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS, Display)]
|
||||
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
|
||||
#[strum(serialize_all = "camelCase")]
|
||||
pub enum ServerNotification {
|
||||
/// Authentication status changed
|
||||
AuthStatusChange(AuthStatusChangeNotification),
|
||||
|
||||
/// ChatGPT login flow completed
|
||||
LoginChatGptComplete(LoginChatGptCompleteNotification),
|
||||
|
||||
/// The special session configured event for a new or resumed conversation.
|
||||
SessionConfigured(SessionConfiguredNotification),
|
||||
}
|
||||
|
||||
impl ServerNotification {
|
||||
pub fn to_params(self) -> Result<serde_json::Value, serde_json::Error> {
|
||||
match self {
|
||||
ServerNotification::AuthStatusChange(params) => serde_json::to_value(params),
|
||||
ServerNotification::LoginChatGptComplete(params) => serde_json::to_value(params),
|
||||
ServerNotification::SessionConfigured(params) => serde_json::to_value(params),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<JSONRPCNotification> for ServerNotification {
|
||||
type Error = serde_json::Error;
|
||||
|
||||
fn try_from(value: JSONRPCNotification) -> Result<Self, Self::Error> {
|
||||
serde_json::from_value(serde_json::to_value(value)?)
|
||||
}
|
||||
}
|
||||
|
||||
/// Notification sent from the client to the server.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS, Display)]
|
||||
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
|
||||
#[strum(serialize_all = "camelCase")]
|
||||
pub enum ClientNotification {
|
||||
Initialized,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use anyhow::Result;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
|
||||
#[test]
|
||||
fn serialize_new_conversation() -> Result<()> {
|
||||
let request = ClientRequest::NewConversation {
|
||||
request_id: RequestId::Integer(42),
|
||||
params: NewConversationParams {
|
||||
model: Some("gpt-5-codex".to_string()),
|
||||
profile: None,
|
||||
cwd: None,
|
||||
approval_policy: Some(AskForApproval::OnRequest),
|
||||
sandbox: None,
|
||||
config: None,
|
||||
base_instructions: None,
|
||||
include_plan_tool: None,
|
||||
include_apply_patch_tool: None,
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "newConversation",
|
||||
"id": 42,
|
||||
"params": {
|
||||
"model": "gpt-5-codex",
|
||||
"approvalPolicy": "on-request"
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn conversation_id_serializes_as_plain_string() -> Result<()> {
|
||||
let id = ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
|
||||
|
||||
assert_eq!(
|
||||
json!("67e55044-10b1-426f-9247-bb680e5fe0c8"),
|
||||
serde_json::to_value(id)?
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn conversation_id_deserializes_from_plain_string() -> Result<()> {
|
||||
let id: ConversationId =
|
||||
serde_json::from_value(json!("67e55044-10b1-426f-9247-bb680e5fe0c8"))?;
|
||||
|
||||
assert_eq!(
|
||||
ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?,
|
||||
id,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_client_notification() -> Result<()> {
|
||||
let notification = ClientNotification::Initialized;
|
||||
// Note there is no "params" field for this notification.
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "initialized",
|
||||
}),
|
||||
serde_json::to_value(¬ification)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_server_request() -> Result<()> {
|
||||
let conversation_id = ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
|
||||
let params = ExecCommandApprovalParams {
|
||||
conversation_id,
|
||||
call_id: "call-42".to_string(),
|
||||
command: vec!["echo".to_string(), "hello".to_string()],
|
||||
cwd: PathBuf::from("/tmp"),
|
||||
reason: Some("because tests".to_string()),
|
||||
parsed_cmd: vec![ParsedCommand::Unknown {
|
||||
cmd: "echo hello".to_string(),
|
||||
}],
|
||||
};
|
||||
let request = ServerRequest::ExecCommandApproval {
|
||||
request_id: RequestId::Integer(7),
|
||||
params: params.clone(),
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "execCommandApproval",
|
||||
"id": 7,
|
||||
"params": {
|
||||
"conversationId": "67e55044-10b1-426f-9247-bb680e5fe0c8",
|
||||
"callId": "call-42",
|
||||
"command": ["echo", "hello"],
|
||||
"cwd": "/tmp",
|
||||
"reason": "because tests",
|
||||
"parsedCmd": [
|
||||
{
|
||||
"type": "unknown",
|
||||
"cmd": "echo hello"
|
||||
}
|
||||
]
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
|
||||
let payload = ServerRequestPayload::ExecCommandApproval(params);
|
||||
assert_eq!(payload.request_with_id(RequestId::Integer(7)), request);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
798
codex-rs/app-server-protocol/src/protocol/common.rs
Normal file
798
codex-rs/app-server-protocol/src/protocol/common.rs
Normal file
@@ -0,0 +1,798 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::JSONRPCNotification;
|
||||
use crate::JSONRPCRequest;
|
||||
use crate::RequestId;
|
||||
use crate::export::GeneratedSchema;
|
||||
use crate::export::write_json_schema;
|
||||
use crate::protocol::v1;
|
||||
use crate::protocol::v2;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::FileChange;
|
||||
use codex_protocol::protocol::ReviewDecision;
|
||||
use codex_protocol::protocol::SandboxCommandAssessment;
|
||||
use paste::paste;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use strum_macros::Display;
|
||||
use ts_rs::TS;
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema, TS)]
|
||||
#[ts(type = "string")]
|
||||
pub struct GitSha(pub String);
|
||||
|
||||
impl GitSha {
|
||||
pub fn new(sha: &str) -> Self {
|
||||
Self(sha.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Display, JsonSchema, TS)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum AuthMode {
|
||||
ApiKey,
|
||||
ChatGPT,
|
||||
}
|
||||
|
||||
/// Generates an `enum ClientRequest` where each variant is a request that the
|
||||
/// client can send to the server. Each variant has associated `params` and
|
||||
/// `response` types. Also generates a `export_client_responses()` function to
|
||||
/// export all response types to TypeScript.
|
||||
macro_rules! client_request_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident $(=> $wire:literal)? {
|
||||
params: $(#[$params_meta:meta])* $params:ty,
|
||||
response: $response:ty,
|
||||
}
|
||||
),* $(,)?
|
||||
) => {
|
||||
/// Request from the client to the server.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "method", rename_all = "camelCase")]
|
||||
pub enum ClientRequest {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$(#[serde(rename = $wire)] #[ts(rename = $wire)])?
|
||||
$variant {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
$(#[$params_meta])*
|
||||
params: $params,
|
||||
},
|
||||
)*
|
||||
}
|
||||
|
||||
pub fn export_client_responses(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::std::result::Result<(), ::ts_rs::ExportError> {
|
||||
$(
|
||||
<$response as ::ts_rs::TS>::export_all_to(out_dir)?;
|
||||
)*
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_client_response_schemas(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
$(
|
||||
schemas.push(write_json_schema::<$response>(out_dir, stringify!($response))?);
|
||||
)*
|
||||
Ok(schemas)
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_client_param_schemas(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
$(
|
||||
schemas.push(write_json_schema::<$params>(out_dir, stringify!($params))?);
|
||||
)*
|
||||
Ok(schemas)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
client_request_definitions! {
|
||||
Initialize {
|
||||
params: v1::InitializeParams,
|
||||
response: v1::InitializeResponse,
|
||||
},
|
||||
|
||||
/// NEW APIs
|
||||
// Thread lifecycle
|
||||
ThreadStart => "thread/start" {
|
||||
params: v2::ThreadStartParams,
|
||||
response: v2::ThreadStartResponse,
|
||||
},
|
||||
ThreadResume => "thread/resume" {
|
||||
params: v2::ThreadResumeParams,
|
||||
response: v2::ThreadResumeResponse,
|
||||
},
|
||||
ThreadArchive => "thread/archive" {
|
||||
params: v2::ThreadArchiveParams,
|
||||
response: v2::ThreadArchiveResponse,
|
||||
},
|
||||
ThreadList => "thread/list" {
|
||||
params: v2::ThreadListParams,
|
||||
response: v2::ThreadListResponse,
|
||||
},
|
||||
ThreadCompact => "thread/compact" {
|
||||
params: v2::ThreadCompactParams,
|
||||
response: v2::ThreadCompactResponse,
|
||||
},
|
||||
TurnStart => "turn/start" {
|
||||
params: v2::TurnStartParams,
|
||||
response: v2::TurnStartResponse,
|
||||
},
|
||||
TurnInterrupt => "turn/interrupt" {
|
||||
params: v2::TurnInterruptParams,
|
||||
response: v2::TurnInterruptResponse,
|
||||
},
|
||||
|
||||
ModelList => "model/list" {
|
||||
params: v2::ModelListParams,
|
||||
response: v2::ModelListResponse,
|
||||
},
|
||||
|
||||
LoginAccount => "account/login/start" {
|
||||
params: v2::LoginAccountParams,
|
||||
response: v2::LoginAccountResponse,
|
||||
},
|
||||
|
||||
CancelLoginAccount => "account/login/cancel" {
|
||||
params: v2::CancelLoginAccountParams,
|
||||
response: v2::CancelLoginAccountResponse,
|
||||
},
|
||||
|
||||
LogoutAccount => "account/logout" {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v2::LogoutAccountResponse,
|
||||
},
|
||||
|
||||
GetAccountRateLimits => "account/rateLimits/read" {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v2::GetAccountRateLimitsResponse,
|
||||
},
|
||||
|
||||
FeedbackUpload => "feedback/upload" {
|
||||
params: v2::FeedbackUploadParams,
|
||||
response: v2::FeedbackUploadResponse,
|
||||
},
|
||||
|
||||
GetAccount => "account/read" {
|
||||
params: v2::GetAccountParams,
|
||||
response: v2::GetAccountResponse,
|
||||
},
|
||||
|
||||
/// DEPRECATED APIs below
|
||||
NewConversation {
|
||||
params: v1::NewConversationParams,
|
||||
response: v1::NewConversationResponse,
|
||||
},
|
||||
GetConversationSummary {
|
||||
params: v1::GetConversationSummaryParams,
|
||||
response: v1::GetConversationSummaryResponse,
|
||||
},
|
||||
/// List recorded Codex conversations (rollouts) with optional pagination and search.
|
||||
ListConversations {
|
||||
params: v1::ListConversationsParams,
|
||||
response: v1::ListConversationsResponse,
|
||||
},
|
||||
/// Resume a recorded Codex conversation from a rollout file.
|
||||
ResumeConversation {
|
||||
params: v1::ResumeConversationParams,
|
||||
response: v1::ResumeConversationResponse,
|
||||
},
|
||||
ArchiveConversation {
|
||||
params: v1::ArchiveConversationParams,
|
||||
response: v1::ArchiveConversationResponse,
|
||||
},
|
||||
SendUserMessage {
|
||||
params: v1::SendUserMessageParams,
|
||||
response: v1::SendUserMessageResponse,
|
||||
},
|
||||
SendUserTurn {
|
||||
params: v1::SendUserTurnParams,
|
||||
response: v1::SendUserTurnResponse,
|
||||
},
|
||||
InterruptConversation {
|
||||
params: v1::InterruptConversationParams,
|
||||
response: v1::InterruptConversationResponse,
|
||||
},
|
||||
AddConversationListener {
|
||||
params: v1::AddConversationListenerParams,
|
||||
response: v1::AddConversationSubscriptionResponse,
|
||||
},
|
||||
RemoveConversationListener {
|
||||
params: v1::RemoveConversationListenerParams,
|
||||
response: v1::RemoveConversationSubscriptionResponse,
|
||||
},
|
||||
GitDiffToRemote {
|
||||
params: v1::GitDiffToRemoteParams,
|
||||
response: v1::GitDiffToRemoteResponse,
|
||||
},
|
||||
LoginApiKey {
|
||||
params: v1::LoginApiKeyParams,
|
||||
response: v1::LoginApiKeyResponse,
|
||||
},
|
||||
LoginChatGpt {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::LoginChatGptResponse,
|
||||
},
|
||||
// DEPRECATED in favor of CancelLoginAccount
|
||||
CancelLoginChatGpt {
|
||||
params: v1::CancelLoginChatGptParams,
|
||||
response: v1::CancelLoginChatGptResponse,
|
||||
},
|
||||
LogoutChatGpt {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::LogoutChatGptResponse,
|
||||
},
|
||||
/// DEPRECATED in favor of GetAccount
|
||||
GetAuthStatus {
|
||||
params: v1::GetAuthStatusParams,
|
||||
response: v1::GetAuthStatusResponse,
|
||||
},
|
||||
GetUserSavedConfig {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::GetUserSavedConfigResponse,
|
||||
},
|
||||
SetDefaultModel {
|
||||
params: v1::SetDefaultModelParams,
|
||||
response: v1::SetDefaultModelResponse,
|
||||
},
|
||||
GetUserAgent {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::GetUserAgentResponse,
|
||||
},
|
||||
UserInfo {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::UserInfoResponse,
|
||||
},
|
||||
FuzzyFileSearch {
|
||||
params: FuzzyFileSearchParams,
|
||||
response: FuzzyFileSearchResponse,
|
||||
},
|
||||
/// Execute a command (argv vector) under the server's sandbox.
|
||||
ExecOneOffCommand {
|
||||
params: v1::ExecOneOffCommandParams,
|
||||
response: v1::ExecOneOffCommandResponse,
|
||||
},
|
||||
}
|
||||
|
||||
/// Generates an `enum ServerRequest` where each variant is a request that the
|
||||
/// server can send to the client along with the corresponding params and
|
||||
/// response types. It also generates helper types used by the app/server
|
||||
/// infrastructure (payload enum, request constructor, and export helpers).
|
||||
macro_rules! server_request_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident
|
||||
),* $(,)?
|
||||
) => {
|
||||
paste! {
|
||||
/// Request initiated from the server and sent to the client.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "method", rename_all = "camelCase")]
|
||||
pub enum ServerRequest {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$variant {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
params: [<$variant Params>],
|
||||
},
|
||||
)*
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, JsonSchema)]
|
||||
pub enum ServerRequestPayload {
|
||||
$( $variant([<$variant Params>]), )*
|
||||
}
|
||||
|
||||
impl ServerRequestPayload {
|
||||
pub fn request_with_id(self, request_id: RequestId) -> ServerRequest {
|
||||
match self {
|
||||
$(Self::$variant(params) => ServerRequest::$variant { request_id, params },)*
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn export_server_responses(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::std::result::Result<(), ::ts_rs::ExportError> {
|
||||
paste! {
|
||||
$(<[<$variant Response>] as ::ts_rs::TS>::export_all_to(out_dir)?;)*
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_server_response_schemas(
|
||||
out_dir: &Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
paste! {
|
||||
$(schemas.push(crate::export::write_json_schema::<[<$variant Response>]>(out_dir, stringify!([<$variant Response>]))?);)*
|
||||
}
|
||||
Ok(schemas)
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_server_param_schemas(
|
||||
out_dir: &Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
paste! {
|
||||
$(schemas.push(crate::export::write_json_schema::<[<$variant Params>]>(out_dir, stringify!([<$variant Params>]))?);)*
|
||||
}
|
||||
Ok(schemas)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Generates `ServerNotification` enum and helpers, including a JSON Schema
|
||||
/// exporter for each notification.
|
||||
macro_rules! server_notification_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident $(=> $wire:literal)? ( $payload:ty )
|
||||
),* $(,)?
|
||||
) => {
|
||||
/// Notification sent from the server to the client.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS, Display)]
|
||||
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
|
||||
#[strum(serialize_all = "camelCase")]
|
||||
pub enum ServerNotification {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$(#[serde(rename = $wire)] #[ts(rename = $wire)] #[strum(serialize = $wire)])?
|
||||
$variant($payload),
|
||||
)*
|
||||
}
|
||||
|
||||
impl ServerNotification {
|
||||
pub fn to_params(self) -> Result<serde_json::Value, serde_json::Error> {
|
||||
match self {
|
||||
$(Self::$variant(params) => serde_json::to_value(params),)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<JSONRPCNotification> for ServerNotification {
|
||||
type Error = serde_json::Error;
|
||||
|
||||
fn try_from(value: JSONRPCNotification) -> Result<Self, Self::Error> {
|
||||
serde_json::from_value(serde_json::to_value(value)?)
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_server_notification_schemas(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
$(schemas.push(crate::export::write_json_schema::<$payload>(out_dir, stringify!($payload))?);)*
|
||||
Ok(schemas)
|
||||
}
|
||||
};
|
||||
}
|
||||
/// Notifications sent from the client to the server.
|
||||
macro_rules! client_notification_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident $( ( $payload:ty ) )?
|
||||
),* $(,)?
|
||||
) => {
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS, Display)]
|
||||
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
|
||||
#[strum(serialize_all = "camelCase")]
|
||||
pub enum ClientNotification {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$variant $( ( $payload ) )?,
|
||||
)*
|
||||
}
|
||||
|
||||
pub fn export_client_notification_schemas(
|
||||
_out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let schemas = Vec::new();
|
||||
$( $(schemas.push(crate::export::write_json_schema::<$payload>(_out_dir, stringify!($payload))?);)? )*
|
||||
Ok(schemas)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl TryFrom<JSONRPCRequest> for ServerRequest {
|
||||
type Error = serde_json::Error;
|
||||
|
||||
fn try_from(value: JSONRPCRequest) -> Result<Self, Self::Error> {
|
||||
serde_json::from_value(serde_json::to_value(value)?)
|
||||
}
|
||||
}
|
||||
|
||||
server_request_definitions! {
|
||||
/// Request to approve a patch.
|
||||
ApplyPatchApproval,
|
||||
/// Request to exec a command.
|
||||
ExecCommandApproval,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ApplyPatchApprovalParams {
|
||||
pub conversation_id: ConversationId,
|
||||
/// Use to correlate this with [codex_core::protocol::PatchApplyBeginEvent]
|
||||
/// and [codex_core::protocol::PatchApplyEndEvent].
|
||||
pub call_id: String,
|
||||
pub file_changes: HashMap<PathBuf, FileChange>,
|
||||
/// Optional explanatory reason (e.g. request for extra write access).
|
||||
pub reason: Option<String>,
|
||||
/// When set, the agent is asking the user to allow writes under this root
|
||||
/// for the remainder of the session (unclear if this is honored today).
|
||||
pub grant_root: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecCommandApprovalParams {
|
||||
pub conversation_id: ConversationId,
|
||||
/// Use to correlate this with [codex_core::protocol::ExecCommandBeginEvent]
|
||||
/// and [codex_core::protocol::ExecCommandEndEvent].
|
||||
pub call_id: String,
|
||||
pub command: Vec<String>,
|
||||
pub cwd: PathBuf,
|
||||
pub reason: Option<String>,
|
||||
pub risk: Option<SandboxCommandAssessment>,
|
||||
pub parsed_cmd: Vec<ParsedCommand>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct ExecCommandApprovalResponse {
|
||||
pub decision: ReviewDecision,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct ApplyPatchApprovalResponse {
|
||||
pub decision: ReviewDecision,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
pub struct FuzzyFileSearchParams {
|
||||
pub query: String,
|
||||
pub roots: Vec<String>,
|
||||
// if provided, will cancel any previous request that used the same value
|
||||
pub cancellation_token: Option<String>,
|
||||
}
|
||||
|
||||
/// Superset of [`codex_file_search::FileMatch`]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct FuzzyFileSearchResult {
|
||||
pub root: String,
|
||||
pub path: String,
|
||||
pub file_name: String,
|
||||
pub score: u32,
|
||||
pub indices: Option<Vec<u32>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct FuzzyFileSearchResponse {
|
||||
pub files: Vec<FuzzyFileSearchResult>,
|
||||
}
|
||||
|
||||
server_notification_definitions! {
|
||||
/// NEW NOTIFICATIONS
|
||||
ThreadStarted => "thread/started" (v2::ThreadStartedNotification),
|
||||
TurnStarted => "turn/started" (v2::TurnStartedNotification),
|
||||
TurnCompleted => "turn/completed" (v2::TurnCompletedNotification),
|
||||
ItemStarted => "item/started" (v2::ItemStartedNotification),
|
||||
ItemCompleted => "item/completed" (v2::ItemCompletedNotification),
|
||||
AgentMessageDelta => "item/agentMessage/delta" (v2::AgentMessageDeltaNotification),
|
||||
CommandExecutionOutputDelta => "item/commandExecution/outputDelta" (v2::CommandExecutionOutputDeltaNotification),
|
||||
McpToolCallProgress => "item/mcpToolCall/progress" (v2::McpToolCallProgressNotification),
|
||||
AccountUpdated => "account/updated" (v2::AccountUpdatedNotification),
|
||||
AccountRateLimitsUpdated => "account/rateLimits/updated" (v2::AccountRateLimitsUpdatedNotification),
|
||||
|
||||
#[serde(rename = "account/login/completed")]
|
||||
#[ts(rename = "account/login/completed")]
|
||||
#[strum(serialize = "account/login/completed")]
|
||||
AccountLoginCompleted(v2::AccountLoginCompletedNotification),
|
||||
|
||||
/// DEPRECATED NOTIFICATIONS below
|
||||
AuthStatusChange(v1::AuthStatusChangeNotification),
|
||||
|
||||
/// Deprecated: use `account/login/completed` instead.
|
||||
LoginChatGptComplete(v1::LoginChatGptCompleteNotification),
|
||||
SessionConfigured(v1::SessionConfiguredNotification),
|
||||
}
|
||||
|
||||
client_notification_definitions! {
|
||||
Initialized,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use anyhow::Result;
|
||||
use codex_protocol::account::PlanType;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
|
||||
#[test]
|
||||
fn serialize_new_conversation() -> Result<()> {
|
||||
let request = ClientRequest::NewConversation {
|
||||
request_id: RequestId::Integer(42),
|
||||
params: v1::NewConversationParams {
|
||||
model: Some("gpt-5-codex".to_string()),
|
||||
model_provider: None,
|
||||
profile: None,
|
||||
cwd: None,
|
||||
approval_policy: Some(AskForApproval::OnRequest),
|
||||
sandbox: None,
|
||||
config: None,
|
||||
base_instructions: None,
|
||||
developer_instructions: None,
|
||||
compact_prompt: None,
|
||||
include_apply_patch_tool: None,
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "newConversation",
|
||||
"id": 42,
|
||||
"params": {
|
||||
"model": "gpt-5-codex",
|
||||
"modelProvider": null,
|
||||
"profile": null,
|
||||
"cwd": null,
|
||||
"approvalPolicy": "on-request",
|
||||
"sandbox": null,
|
||||
"config": null,
|
||||
"baseInstructions": null,
|
||||
"includeApplyPatchTool": null
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn conversation_id_serializes_as_plain_string() -> Result<()> {
|
||||
let id = ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
|
||||
|
||||
assert_eq!(
|
||||
json!("67e55044-10b1-426f-9247-bb680e5fe0c8"),
|
||||
serde_json::to_value(id)?
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn conversation_id_deserializes_from_plain_string() -> Result<()> {
|
||||
let id: ConversationId =
|
||||
serde_json::from_value(json!("67e55044-10b1-426f-9247-bb680e5fe0c8"))?;
|
||||
|
||||
assert_eq!(
|
||||
ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?,
|
||||
id,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_client_notification() -> Result<()> {
|
||||
let notification = ClientNotification::Initialized;
|
||||
// Note there is no "params" field for this notification.
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "initialized",
|
||||
}),
|
||||
serde_json::to_value(¬ification)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_server_request() -> Result<()> {
|
||||
let conversation_id = ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
|
||||
let params = ExecCommandApprovalParams {
|
||||
conversation_id,
|
||||
call_id: "call-42".to_string(),
|
||||
command: vec!["echo".to_string(), "hello".to_string()],
|
||||
cwd: PathBuf::from("/tmp"),
|
||||
reason: Some("because tests".to_string()),
|
||||
risk: None,
|
||||
parsed_cmd: vec![ParsedCommand::Unknown {
|
||||
cmd: "echo hello".to_string(),
|
||||
}],
|
||||
};
|
||||
let request = ServerRequest::ExecCommandApproval {
|
||||
request_id: RequestId::Integer(7),
|
||||
params: params.clone(),
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "execCommandApproval",
|
||||
"id": 7,
|
||||
"params": {
|
||||
"conversationId": "67e55044-10b1-426f-9247-bb680e5fe0c8",
|
||||
"callId": "call-42",
|
||||
"command": ["echo", "hello"],
|
||||
"cwd": "/tmp",
|
||||
"reason": "because tests",
|
||||
"risk": null,
|
||||
"parsedCmd": [
|
||||
{
|
||||
"type": "unknown",
|
||||
"cmd": "echo hello"
|
||||
}
|
||||
]
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
|
||||
let payload = ServerRequestPayload::ExecCommandApproval(params);
|
||||
assert_eq!(payload.request_with_id(RequestId::Integer(7)), request);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_get_account_rate_limits() -> Result<()> {
|
||||
let request = ClientRequest::GetAccountRateLimits {
|
||||
request_id: RequestId::Integer(1),
|
||||
params: None,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/rateLimits/read",
|
||||
"id": 1,
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_account_login_api_key() -> Result<()> {
|
||||
let request = ClientRequest::LoginAccount {
|
||||
request_id: RequestId::Integer(2),
|
||||
params: v2::LoginAccountParams::ApiKey {
|
||||
api_key: "secret".to_string(),
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/login/start",
|
||||
"id": 2,
|
||||
"params": {
|
||||
"type": "apiKey",
|
||||
"apiKey": "secret"
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_account_login_chatgpt() -> Result<()> {
|
||||
let request = ClientRequest::LoginAccount {
|
||||
request_id: RequestId::Integer(3),
|
||||
params: v2::LoginAccountParams::Chatgpt,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/login/start",
|
||||
"id": 3,
|
||||
"params": {
|
||||
"type": "chatgpt"
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_account_logout() -> Result<()> {
|
||||
let request = ClientRequest::LogoutAccount {
|
||||
request_id: RequestId::Integer(4),
|
||||
params: None,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/logout",
|
||||
"id": 4,
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_get_account() -> Result<()> {
|
||||
let request = ClientRequest::GetAccount {
|
||||
request_id: RequestId::Integer(5),
|
||||
params: v2::GetAccountParams {
|
||||
refresh_token: false,
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/read",
|
||||
"id": 5,
|
||||
"params": {
|
||||
"refreshToken": false
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn account_serializes_fields_in_camel_case() -> Result<()> {
|
||||
let api_key = v2::Account::ApiKey {};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"type": "apiKey",
|
||||
}),
|
||||
serde_json::to_value(&api_key)?,
|
||||
);
|
||||
|
||||
let chatgpt = v2::Account::Chatgpt {
|
||||
email: "user@example.com".to_string(),
|
||||
plan_type: PlanType::Plus,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"type": "chatgpt",
|
||||
"email": "user@example.com",
|
||||
"planType": "plus",
|
||||
}),
|
||||
serde_json::to_value(&chatgpt)?,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_list_models() -> Result<()> {
|
||||
let request = ClientRequest::ModelList {
|
||||
request_id: RequestId::Integer(6),
|
||||
params: v2::ModelListParams::default(),
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "model/list",
|
||||
"id": 6,
|
||||
"params": {
|
||||
"limit": null,
|
||||
"cursor": null
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
6
codex-rs/app-server-protocol/src/protocol/mod.rs
Normal file
6
codex-rs/app-server-protocol/src/protocol/mod.rs
Normal file
@@ -0,0 +1,6 @@
|
||||
// Module declarations for the app-server protocol namespace.
|
||||
// Exposes protocol pieces used by `lib.rs` via `pub use protocol::common::*;`.
|
||||
|
||||
pub mod common;
|
||||
pub mod v1;
|
||||
pub mod v2;
|
||||
418
codex-rs/app-server-protocol/src/protocol/v1.rs
Normal file
418
codex-rs/app-server-protocol/src/protocol/v1.rs
Normal file
@@ -0,0 +1,418 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::config_types::ForcedLoginMethod;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
use codex_protocol::config_types::Verbosity;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_protocol::protocol::TurnAbortReason;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use ts_rs::TS;
|
||||
use uuid::Uuid;
|
||||
|
||||
// Reuse shared types defined in `common.rs`.
|
||||
use crate::protocol::common::AuthMode;
|
||||
use crate::protocol::common::GitSha;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InitializeParams {
|
||||
pub client_info: ClientInfo,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ClientInfo {
|
||||
pub name: String,
|
||||
pub title: Option<String>,
|
||||
pub version: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InitializeResponse {
|
||||
pub user_agent: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct NewConversationParams {
|
||||
pub model: Option<String>,
|
||||
pub model_provider: Option<String>,
|
||||
pub profile: Option<String>,
|
||||
pub cwd: Option<String>,
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
pub sandbox: Option<SandboxMode>,
|
||||
pub config: Option<HashMap<String, serde_json::Value>>,
|
||||
pub base_instructions: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub developer_instructions: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub compact_prompt: Option<String>,
|
||||
pub include_apply_patch_tool: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct NewConversationResponse {
|
||||
pub conversation_id: ConversationId,
|
||||
pub model: String,
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResumeConversationResponse {
|
||||
pub conversation_id: ConversationId,
|
||||
pub model: String,
|
||||
pub initial_messages: Option<Vec<EventMsg>>,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(untagged)]
|
||||
pub enum GetConversationSummaryParams {
|
||||
RolloutPath {
|
||||
#[serde(rename = "rolloutPath")]
|
||||
rollout_path: PathBuf,
|
||||
},
|
||||
ConversationId {
|
||||
#[serde(rename = "conversationId")]
|
||||
conversation_id: ConversationId,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetConversationSummaryResponse {
|
||||
pub summary: ConversationSummary,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ListConversationsParams {
|
||||
pub page_size: Option<usize>,
|
||||
pub cursor: Option<String>,
|
||||
pub model_providers: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ConversationSummary {
|
||||
pub conversation_id: ConversationId,
|
||||
pub path: PathBuf,
|
||||
pub preview: String,
|
||||
pub timestamp: Option<String>,
|
||||
pub model_provider: String,
|
||||
pub cwd: PathBuf,
|
||||
pub cli_version: String,
|
||||
pub source: SessionSource,
|
||||
pub git_info: Option<ConversationGitInfo>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub struct ConversationGitInfo {
|
||||
pub sha: Option<String>,
|
||||
pub branch: Option<String>,
|
||||
pub origin_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ListConversationsResponse {
|
||||
pub items: Vec<ConversationSummary>,
|
||||
pub next_cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResumeConversationParams {
|
||||
pub path: Option<PathBuf>,
|
||||
pub conversation_id: Option<ConversationId>,
|
||||
pub history: Option<Vec<ResponseItem>>,
|
||||
pub overrides: Option<NewConversationParams>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AddConversationSubscriptionResponse {
|
||||
#[schemars(with = "String")]
|
||||
pub subscription_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ArchiveConversationParams {
|
||||
pub conversation_id: ConversationId,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ArchiveConversationResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RemoveConversationSubscriptionResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginApiKeyParams {
|
||||
pub api_key: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginApiKeyResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginChatGptResponse {
|
||||
#[schemars(with = "String")]
|
||||
pub login_id: Uuid,
|
||||
pub auth_url: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GitDiffToRemoteResponse {
|
||||
pub sha: GitSha,
|
||||
pub diff: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CancelLoginChatGptParams {
|
||||
#[schemars(with = "String")]
|
||||
pub login_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GitDiffToRemoteParams {
|
||||
pub cwd: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CancelLoginChatGptResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LogoutChatGptParams {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LogoutChatGptResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetAuthStatusParams {
|
||||
pub include_token: Option<bool>,
|
||||
pub refresh_token: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecOneOffCommandParams {
|
||||
pub command: Vec<String>,
|
||||
pub timeout_ms: Option<u64>,
|
||||
pub cwd: Option<PathBuf>,
|
||||
pub sandbox_policy: Option<SandboxPolicy>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecOneOffCommandResponse {
|
||||
pub exit_code: i32,
|
||||
pub stdout: String,
|
||||
pub stderr: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetAuthStatusResponse {
|
||||
pub auth_method: Option<AuthMode>,
|
||||
pub auth_token: Option<String>,
|
||||
pub requires_openai_auth: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetUserAgentResponse {
|
||||
pub user_agent: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UserInfoResponse {
|
||||
pub alleged_user_email: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetUserSavedConfigResponse {
|
||||
pub config: UserSavedConfig,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SetDefaultModelParams {
|
||||
pub model: Option<String>,
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SetDefaultModelResponse {}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UserSavedConfig {
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
pub sandbox_mode: Option<SandboxMode>,
|
||||
pub sandbox_settings: Option<SandboxSettings>,
|
||||
pub forced_chatgpt_workspace_id: Option<String>,
|
||||
pub forced_login_method: Option<ForcedLoginMethod>,
|
||||
pub model: Option<String>,
|
||||
pub model_reasoning_effort: Option<ReasoningEffort>,
|
||||
pub model_reasoning_summary: Option<ReasoningSummary>,
|
||||
pub model_verbosity: Option<Verbosity>,
|
||||
pub tools: Option<Tools>,
|
||||
pub profile: Option<String>,
|
||||
pub profiles: HashMap<String, Profile>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Profile {
|
||||
pub model: Option<String>,
|
||||
pub model_provider: Option<String>,
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
pub model_reasoning_effort: Option<ReasoningEffort>,
|
||||
pub model_reasoning_summary: Option<ReasoningSummary>,
|
||||
pub model_verbosity: Option<Verbosity>,
|
||||
pub chatgpt_base_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Tools {
|
||||
pub web_search: Option<bool>,
|
||||
pub view_image: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SandboxSettings {
|
||||
#[serde(default)]
|
||||
pub writable_roots: Vec<PathBuf>,
|
||||
pub network_access: Option<bool>,
|
||||
pub exclude_tmpdir_env_var: Option<bool>,
|
||||
pub exclude_slash_tmp: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserMessageParams {
|
||||
pub conversation_id: ConversationId,
|
||||
pub items: Vec<InputItem>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserTurnParams {
|
||||
pub conversation_id: ConversationId,
|
||||
pub items: Vec<InputItem>,
|
||||
pub cwd: PathBuf,
|
||||
pub approval_policy: AskForApproval,
|
||||
pub sandbox_policy: SandboxPolicy,
|
||||
pub model: String,
|
||||
pub effort: Option<ReasoningEffort>,
|
||||
pub summary: ReasoningSummary,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserTurnResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InterruptConversationParams {
|
||||
pub conversation_id: ConversationId,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InterruptConversationResponse {
|
||||
pub abort_reason: TurnAbortReason,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserMessageResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AddConversationListenerParams {
|
||||
pub conversation_id: ConversationId,
|
||||
#[serde(default)]
|
||||
pub experimental_raw_events: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RemoveConversationListenerParams {
|
||||
#[schemars(with = "String")]
|
||||
pub subscription_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[serde(tag = "type", content = "data")]
|
||||
pub enum InputItem {
|
||||
Text { text: String },
|
||||
Image { image_url: String },
|
||||
LocalImage { path: PathBuf },
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
/// Deprecated in favor of AccountLoginCompletedNotification.
|
||||
pub struct LoginChatGptCompleteNotification {
|
||||
#[schemars(with = "String")]
|
||||
pub login_id: Uuid,
|
||||
pub success: bool,
|
||||
pub error: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SessionConfiguredNotification {
|
||||
pub session_id: ConversationId,
|
||||
pub model: String,
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
pub history_log_id: u64,
|
||||
#[ts(type = "number")]
|
||||
pub history_entry_count: usize,
|
||||
pub initial_messages: Option<Vec<EventMsg>>,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
/// Deprecated notification. Use AccountUpdatedNotification instead.
|
||||
pub struct AuthStatusChangeNotification {
|
||||
pub auth_method: Option<AuthMode>,
|
||||
}
|
||||
856
codex-rs/app-server-protocol/src/protocol/v2.rs
Normal file
856
codex-rs/app-server-protocol/src/protocol/v2.rs
Normal file
@@ -0,0 +1,856 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::protocol::common::AuthMode;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::account::PlanType;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::items::AgentMessageContent as CoreAgentMessageContent;
|
||||
use codex_protocol::items::TurnItem as CoreTurnItem;
|
||||
use codex_protocol::protocol::RateLimitSnapshot as CoreRateLimitSnapshot;
|
||||
use codex_protocol::protocol::RateLimitWindow as CoreRateLimitWindow;
|
||||
use codex_protocol::user_input::UserInput as CoreUserInput;
|
||||
use mcp_types::ContentBlock as McpContentBlock;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use serde_json::Value as JsonValue;
|
||||
use ts_rs::TS;
|
||||
|
||||
// Macro to declare a camelCased API v2 enum mirroring a core enum which
|
||||
// tends to use kebab-case.
|
||||
macro_rules! v2_enum_from_core {
|
||||
(
|
||||
pub enum $Name:ident from $Src:path { $( $Variant:ident ),+ $(,)? }
|
||||
) => {
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum $Name { $( $Variant ),+ }
|
||||
|
||||
impl $Name {
|
||||
pub fn to_core(self) -> $Src {
|
||||
match self { $( $Name::$Variant => <$Src>::$Variant ),+ }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<$Src> for $Name {
|
||||
fn from(value: $Src) -> Self {
|
||||
match value { $( <$Src>::$Variant => $Name::$Variant ),+ }
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
v2_enum_from_core!(
|
||||
pub enum AskForApproval from codex_protocol::protocol::AskForApproval {
|
||||
UnlessTrusted, OnFailure, OnRequest, Never
|
||||
}
|
||||
);
|
||||
|
||||
v2_enum_from_core!(
|
||||
pub enum SandboxMode from codex_protocol::config_types::SandboxMode {
|
||||
ReadOnly, WorkspaceWrite, DangerFullAccess
|
||||
}
|
||||
);
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(tag = "mode", rename_all = "camelCase")]
|
||||
#[ts(tag = "mode")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum SandboxPolicy {
|
||||
DangerFullAccess,
|
||||
ReadOnly,
|
||||
WorkspaceWrite {
|
||||
#[serde(default)]
|
||||
writable_roots: Vec<PathBuf>,
|
||||
#[serde(default)]
|
||||
network_access: bool,
|
||||
#[serde(default)]
|
||||
exclude_tmpdir_env_var: bool,
|
||||
#[serde(default)]
|
||||
exclude_slash_tmp: bool,
|
||||
},
|
||||
}
|
||||
|
||||
impl SandboxPolicy {
|
||||
pub fn to_core(&self) -> codex_protocol::protocol::SandboxPolicy {
|
||||
match self {
|
||||
SandboxPolicy::DangerFullAccess => {
|
||||
codex_protocol::protocol::SandboxPolicy::DangerFullAccess
|
||||
}
|
||||
SandboxPolicy::ReadOnly => codex_protocol::protocol::SandboxPolicy::ReadOnly,
|
||||
SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots,
|
||||
network_access,
|
||||
exclude_tmpdir_env_var,
|
||||
exclude_slash_tmp,
|
||||
} => codex_protocol::protocol::SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots: writable_roots.clone(),
|
||||
network_access: *network_access,
|
||||
exclude_tmpdir_env_var: *exclude_tmpdir_env_var,
|
||||
exclude_slash_tmp: *exclude_slash_tmp,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<codex_protocol::protocol::SandboxPolicy> for SandboxPolicy {
|
||||
fn from(value: codex_protocol::protocol::SandboxPolicy) -> Self {
|
||||
match value {
|
||||
codex_protocol::protocol::SandboxPolicy::DangerFullAccess => {
|
||||
SandboxPolicy::DangerFullAccess
|
||||
}
|
||||
codex_protocol::protocol::SandboxPolicy::ReadOnly => SandboxPolicy::ReadOnly,
|
||||
codex_protocol::protocol::SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots,
|
||||
network_access,
|
||||
exclude_tmpdir_env_var,
|
||||
exclude_slash_tmp,
|
||||
} => SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots,
|
||||
network_access,
|
||||
exclude_tmpdir_env_var,
|
||||
exclude_slash_tmp,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "type", rename_all = "camelCase")]
|
||||
#[ts(tag = "type")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum Account {
|
||||
#[serde(rename = "apiKey", rename_all = "camelCase")]
|
||||
#[ts(rename = "apiKey", rename_all = "camelCase")]
|
||||
ApiKey {},
|
||||
|
||||
#[serde(rename = "chatgpt", rename_all = "camelCase")]
|
||||
#[ts(rename = "chatgpt", rename_all = "camelCase")]
|
||||
Chatgpt { email: String, plan_type: PlanType },
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "type")]
|
||||
#[ts(tag = "type")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum LoginAccountParams {
|
||||
#[serde(rename = "apiKey", rename_all = "camelCase")]
|
||||
#[ts(rename = "apiKey", rename_all = "camelCase")]
|
||||
ApiKey {
|
||||
#[serde(rename = "apiKey")]
|
||||
#[ts(rename = "apiKey")]
|
||||
api_key: String,
|
||||
},
|
||||
#[serde(rename = "chatgpt")]
|
||||
#[ts(rename = "chatgpt")]
|
||||
Chatgpt,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "type", rename_all = "camelCase")]
|
||||
#[ts(tag = "type")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum LoginAccountResponse {
|
||||
#[serde(rename = "apiKey", rename_all = "camelCase")]
|
||||
#[ts(rename = "apiKey", rename_all = "camelCase")]
|
||||
ApiKey {},
|
||||
#[serde(rename = "chatgpt", rename_all = "camelCase")]
|
||||
#[ts(rename = "chatgpt", rename_all = "camelCase")]
|
||||
Chatgpt {
|
||||
// Use plain String for identifiers to avoid TS/JSON Schema quirks around uuid-specific types.
|
||||
// Convert to/from UUIDs at the application layer as needed.
|
||||
login_id: String,
|
||||
/// URL the client should open in a browser to initiate the OAuth flow.
|
||||
auth_url: String,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct CancelLoginAccountParams {
|
||||
pub login_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct CancelLoginAccountResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct LogoutAccountResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct GetAccountRateLimitsResponse {
|
||||
pub rate_limits: RateLimitSnapshot,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct GetAccountParams {
|
||||
#[serde(default)]
|
||||
pub refresh_token: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct GetAccountResponse {
|
||||
pub account: Option<Account>,
|
||||
pub requires_openai_auth: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ModelListParams {
|
||||
/// Opaque pagination cursor returned by a previous call.
|
||||
pub cursor: Option<String>,
|
||||
/// Optional page size; defaults to a reasonable server-side value.
|
||||
pub limit: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct Model {
|
||||
pub id: String,
|
||||
pub model: String,
|
||||
pub display_name: String,
|
||||
pub description: String,
|
||||
pub supported_reasoning_efforts: Vec<ReasoningEffortOption>,
|
||||
pub default_reasoning_effort: ReasoningEffort,
|
||||
// Only one model should be marked as default.
|
||||
pub is_default: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ReasoningEffortOption {
|
||||
pub reasoning_effort: ReasoningEffort,
|
||||
pub description: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ModelListResponse {
|
||||
pub data: Vec<Model>,
|
||||
/// Opaque cursor to pass to the next call to continue after the last item.
|
||||
/// If None, there are no more items to return.
|
||||
pub next_cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct FeedbackUploadParams {
|
||||
pub classification: String,
|
||||
pub reason: Option<String>,
|
||||
pub conversation_id: Option<ConversationId>,
|
||||
pub include_logs: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct FeedbackUploadResponse {
|
||||
pub thread_id: String,
|
||||
}
|
||||
|
||||
// === Threads, Turns, and Items ===
|
||||
// Thread APIs
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadStartParams {
|
||||
pub model: Option<String>,
|
||||
pub model_provider: Option<String>,
|
||||
pub cwd: Option<String>,
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
pub sandbox: Option<SandboxMode>,
|
||||
pub config: Option<HashMap<String, serde_json::Value>>,
|
||||
pub base_instructions: Option<String>,
|
||||
pub developer_instructions: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadStartResponse {
|
||||
pub thread: Thread,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadResumeParams {
|
||||
pub thread_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadResumeResponse {
|
||||
pub thread: Thread,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadArchiveParams {
|
||||
pub thread_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadArchiveResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadListParams {
|
||||
/// Opaque pagination cursor returned by a previous call.
|
||||
pub cursor: Option<String>,
|
||||
/// Optional page size; defaults to a reasonable server-side value.
|
||||
pub limit: Option<u32>,
|
||||
/// Optional provider filter; when set, only sessions recorded under these
|
||||
/// providers are returned. When present but empty, includes all providers.
|
||||
pub model_providers: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadListResponse {
|
||||
pub data: Vec<Thread>,
|
||||
/// Opaque cursor to pass to the next call to continue after the last item.
|
||||
/// if None, there are no more items to return.
|
||||
pub next_cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadCompactParams {
|
||||
pub thread_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadCompactResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct Thread {
|
||||
pub id: String,
|
||||
/// Usually the first user message in the thread, if available.
|
||||
pub preview: String,
|
||||
pub model_provider: String,
|
||||
/// Unix timestamp (in seconds) when the thread was created.
|
||||
pub created_at: i64,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct AccountUpdatedNotification {
|
||||
pub auth_mode: Option<AuthMode>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct Turn {
|
||||
pub id: String,
|
||||
pub items: Vec<ThreadItem>,
|
||||
pub status: TurnStatus,
|
||||
pub error: Option<TurnError>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TurnError {
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum TurnStatus {
|
||||
Completed,
|
||||
Interrupted,
|
||||
Failed,
|
||||
InProgress,
|
||||
}
|
||||
|
||||
// Turn APIs
|
||||
#[derive(Serialize, Deserialize, Debug, Default, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TurnStartParams {
|
||||
pub thread_id: String,
|
||||
pub input: Vec<UserInput>,
|
||||
/// Override the working directory for this turn and subsequent turns.
|
||||
pub cwd: Option<PathBuf>,
|
||||
/// Override the approval policy for this turn and subsequent turns.
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
/// Override the sandbox policy for this turn and subsequent turns.
|
||||
pub sandbox_policy: Option<SandboxPolicy>,
|
||||
/// Override the model for this turn and subsequent turns.
|
||||
pub model: Option<String>,
|
||||
/// Override the reasoning effort for this turn and subsequent turns.
|
||||
pub effort: Option<ReasoningEffort>,
|
||||
/// Override the reasoning summary for this turn and subsequent turns.
|
||||
pub summary: Option<ReasoningSummary>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TurnStartResponse {
|
||||
pub turn: Turn,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TurnInterruptParams {
|
||||
pub thread_id: String,
|
||||
pub turn_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TurnInterruptResponse {}
|
||||
|
||||
// User input types
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "type", rename_all = "camelCase")]
|
||||
#[ts(tag = "type")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum UserInput {
|
||||
Text { text: String },
|
||||
Image { url: String },
|
||||
LocalImage { path: PathBuf },
|
||||
}
|
||||
|
||||
impl UserInput {
|
||||
pub fn into_core(self) -> CoreUserInput {
|
||||
match self {
|
||||
UserInput::Text { text } => CoreUserInput::Text { text },
|
||||
UserInput::Image { url } => CoreUserInput::Image { image_url: url },
|
||||
UserInput::LocalImage { path } => CoreUserInput::LocalImage { path },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CoreUserInput> for UserInput {
|
||||
fn from(value: CoreUserInput) -> Self {
|
||||
match value {
|
||||
CoreUserInput::Text { text } => UserInput::Text { text },
|
||||
CoreUserInput::Image { image_url } => UserInput::Image { url: image_url },
|
||||
CoreUserInput::LocalImage { path } => UserInput::LocalImage { path },
|
||||
_ => unreachable!("unsupported user input variant"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "type", rename_all = "camelCase")]
|
||||
#[ts(tag = "type")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum ThreadItem {
|
||||
UserMessage {
|
||||
id: String,
|
||||
content: Vec<UserInput>,
|
||||
},
|
||||
AgentMessage {
|
||||
id: String,
|
||||
text: String,
|
||||
},
|
||||
Reasoning {
|
||||
id: String,
|
||||
text: String,
|
||||
},
|
||||
CommandExecution {
|
||||
id: String,
|
||||
command: String,
|
||||
aggregated_output: String,
|
||||
exit_code: Option<i32>,
|
||||
status: CommandExecutionStatus,
|
||||
duration_ms: Option<i64>,
|
||||
},
|
||||
FileChange {
|
||||
id: String,
|
||||
changes: Vec<FileUpdateChange>,
|
||||
status: PatchApplyStatus,
|
||||
},
|
||||
McpToolCall {
|
||||
id: String,
|
||||
server: String,
|
||||
tool: String,
|
||||
status: McpToolCallStatus,
|
||||
arguments: JsonValue,
|
||||
result: Option<McpToolCallResult>,
|
||||
error: Option<McpToolCallError>,
|
||||
},
|
||||
WebSearch {
|
||||
id: String,
|
||||
query: String,
|
||||
},
|
||||
TodoList {
|
||||
id: String,
|
||||
items: Vec<TodoItem>,
|
||||
},
|
||||
ImageView {
|
||||
id: String,
|
||||
path: String,
|
||||
},
|
||||
CodeReview {
|
||||
id: String,
|
||||
review: String,
|
||||
},
|
||||
}
|
||||
|
||||
impl From<CoreTurnItem> for ThreadItem {
|
||||
fn from(value: CoreTurnItem) -> Self {
|
||||
match value {
|
||||
CoreTurnItem::UserMessage(user) => ThreadItem::UserMessage {
|
||||
id: user.id,
|
||||
content: user.content.into_iter().map(UserInput::from).collect(),
|
||||
},
|
||||
CoreTurnItem::AgentMessage(agent) => {
|
||||
let text = agent
|
||||
.content
|
||||
.into_iter()
|
||||
.map(|entry| match entry {
|
||||
CoreAgentMessageContent::Text { text } => text,
|
||||
})
|
||||
.collect::<String>();
|
||||
ThreadItem::AgentMessage { id: agent.id, text }
|
||||
}
|
||||
CoreTurnItem::Reasoning(reasoning) => {
|
||||
let text = if !reasoning.summary_text.is_empty() {
|
||||
reasoning.summary_text.join("\n")
|
||||
} else {
|
||||
reasoning.raw_content.join("\n")
|
||||
};
|
||||
ThreadItem::Reasoning {
|
||||
id: reasoning.id,
|
||||
text,
|
||||
}
|
||||
}
|
||||
CoreTurnItem::WebSearch(search) => ThreadItem::WebSearch {
|
||||
id: search.id,
|
||||
query: search.query,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum CommandExecutionStatus {
|
||||
InProgress,
|
||||
Completed,
|
||||
Failed,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct FileUpdateChange {
|
||||
pub path: String,
|
||||
pub kind: PatchChangeKind,
|
||||
pub diff: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum PatchChangeKind {
|
||||
Add,
|
||||
Delete,
|
||||
Update,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum PatchApplyStatus {
|
||||
Completed,
|
||||
Failed,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum McpToolCallStatus {
|
||||
InProgress,
|
||||
Completed,
|
||||
Failed,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct McpToolCallResult {
|
||||
pub content: Vec<McpContentBlock>,
|
||||
pub structured_content: JsonValue,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct McpToolCallError {
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TodoItem {
|
||||
pub id: String,
|
||||
pub text: String,
|
||||
pub completed: bool,
|
||||
}
|
||||
|
||||
// === Server Notifications ===
|
||||
// Thread/Turn lifecycle notifications and item progress events
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadStartedNotification {
|
||||
pub thread: Thread,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TurnStartedNotification {
|
||||
pub turn: Turn,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct Usage {
|
||||
pub input_tokens: i32,
|
||||
pub cached_input_tokens: i32,
|
||||
pub output_tokens: i32,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TurnCompletedNotification {
|
||||
pub turn: Turn,
|
||||
// TODO: should usage be stored on the Turn object, and we return that instead?
|
||||
pub usage: Usage,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ItemStartedNotification {
|
||||
pub item: ThreadItem,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ItemCompletedNotification {
|
||||
pub item: ThreadItem,
|
||||
}
|
||||
|
||||
// Item-specific progress notifications
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct AgentMessageDeltaNotification {
|
||||
pub item_id: String,
|
||||
pub delta: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct CommandExecutionOutputDeltaNotification {
|
||||
pub item_id: String,
|
||||
pub delta: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct McpToolCallProgressNotification {
|
||||
pub item_id: String,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct AccountRateLimitsUpdatedNotification {
|
||||
pub rate_limits: RateLimitSnapshot,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct RateLimitSnapshot {
|
||||
pub primary: Option<RateLimitWindow>,
|
||||
pub secondary: Option<RateLimitWindow>,
|
||||
}
|
||||
|
||||
impl From<CoreRateLimitSnapshot> for RateLimitSnapshot {
|
||||
fn from(value: CoreRateLimitSnapshot) -> Self {
|
||||
Self {
|
||||
primary: value.primary.map(RateLimitWindow::from),
|
||||
secondary: value.secondary.map(RateLimitWindow::from),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct RateLimitWindow {
|
||||
pub used_percent: i32,
|
||||
pub window_duration_mins: Option<i64>,
|
||||
pub resets_at: Option<i64>,
|
||||
}
|
||||
|
||||
impl From<CoreRateLimitWindow> for RateLimitWindow {
|
||||
fn from(value: CoreRateLimitWindow) -> Self {
|
||||
Self {
|
||||
used_percent: value.used_percent.round() as i32,
|
||||
window_duration_mins: value.window_minutes,
|
||||
resets_at: value.resets_at,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct AccountLoginCompletedNotification {
|
||||
// Use plain String for identifiers to avoid TS/JSON Schema quirks around uuid-specific types.
|
||||
// Convert to/from UUIDs at the application layer as needed.
|
||||
pub login_id: Option<String>,
|
||||
pub success: bool,
|
||||
pub error: Option<String>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use codex_protocol::items::AgentMessageContent;
|
||||
use codex_protocol::items::AgentMessageItem;
|
||||
use codex_protocol::items::ReasoningItem;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::items::UserMessageItem;
|
||||
use codex_protocol::items::WebSearchItem;
|
||||
use codex_protocol::user_input::UserInput as CoreUserInput;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[test]
|
||||
fn core_turn_item_into_thread_item_converts_supported_variants() {
|
||||
let user_item = TurnItem::UserMessage(UserMessageItem {
|
||||
id: "user-1".to_string(),
|
||||
content: vec![
|
||||
CoreUserInput::Text {
|
||||
text: "hello".to_string(),
|
||||
},
|
||||
CoreUserInput::Image {
|
||||
image_url: "https://example.com/image.png".to_string(),
|
||||
},
|
||||
CoreUserInput::LocalImage {
|
||||
path: PathBuf::from("local/image.png"),
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
assert_eq!(
|
||||
ThreadItem::from(user_item),
|
||||
ThreadItem::UserMessage {
|
||||
id: "user-1".to_string(),
|
||||
content: vec![
|
||||
UserInput::Text {
|
||||
text: "hello".to_string(),
|
||||
},
|
||||
UserInput::Image {
|
||||
url: "https://example.com/image.png".to_string(),
|
||||
},
|
||||
UserInput::LocalImage {
|
||||
path: PathBuf::from("local/image.png"),
|
||||
},
|
||||
],
|
||||
}
|
||||
);
|
||||
|
||||
let agent_item = TurnItem::AgentMessage(AgentMessageItem {
|
||||
id: "agent-1".to_string(),
|
||||
content: vec![
|
||||
AgentMessageContent::Text {
|
||||
text: "Hello ".to_string(),
|
||||
},
|
||||
AgentMessageContent::Text {
|
||||
text: "world".to_string(),
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
assert_eq!(
|
||||
ThreadItem::from(agent_item),
|
||||
ThreadItem::AgentMessage {
|
||||
id: "agent-1".to_string(),
|
||||
text: "Hello world".to_string(),
|
||||
}
|
||||
);
|
||||
|
||||
let reasoning_item = TurnItem::Reasoning(ReasoningItem {
|
||||
id: "reasoning-1".to_string(),
|
||||
summary_text: vec!["line one".to_string(), "line two".to_string()],
|
||||
raw_content: vec![],
|
||||
});
|
||||
|
||||
assert_eq!(
|
||||
ThreadItem::from(reasoning_item),
|
||||
ThreadItem::Reasoning {
|
||||
id: "reasoning-1".to_string(),
|
||||
text: "line one\nline two".to_string(),
|
||||
}
|
||||
);
|
||||
|
||||
let search_item = TurnItem::WebSearch(WebSearchItem {
|
||||
id: "search-1".to_string(),
|
||||
query: "docs".to_string(),
|
||||
});
|
||||
|
||||
assert_eq!(
|
||||
ThreadItem::from(search_item),
|
||||
ThreadItem::WebSearch {
|
||||
id: "search-1".to_string(),
|
||||
query: "docs".to_string(),
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -19,11 +19,14 @@ anyhow = { workspace = true }
|
||||
codex-arg0 = { workspace = true }
|
||||
codex-common = { workspace = true, features = ["cli"] }
|
||||
codex-core = { workspace = true }
|
||||
codex-backend-client = { workspace = true }
|
||||
codex-file-search = { workspace = true }
|
||||
codex-login = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-feedback = { workspace = true }
|
||||
codex-utils-json-to-toml = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
@@ -35,6 +38,7 @@ tokio = { workspace = true, features = [
|
||||
] }
|
||||
tracing = { workspace = true, features = ["log"] }
|
||||
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
|
||||
opentelemetry-appender-tracing = { workspace = true }
|
||||
uuid = { workspace = true, features = ["serde", "v7"] }
|
||||
|
||||
[dev-dependencies]
|
||||
@@ -44,6 +48,7 @@ base64 = { workspace = true }
|
||||
core_test_support = { workspace = true }
|
||||
os_info = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
serial_test = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# codex-app-server
|
||||
|
||||
`codex app-server` is the harness Codex uses to power rich interfaces such as the [Codex VS Code extension](https://marketplace.visualstudio.com/items?itemName=openai.chatgpt). The message schema is currently unstable, but those who wish to build experimental UIs on top of Codex may find it valuable.
|
||||
`codex app-server` is the interface Codex uses to power rich interfaces such as the [Codex VS Code extension](https://marketplace.visualstudio.com/items?itemName=openai.chatgpt). The message schema is currently unstable, but those who wish to build experimental UIs on top of Codex may find it valuable.
|
||||
|
||||
## Protocol
|
||||
|
||||
@@ -8,8 +8,253 @@ Similar to [MCP](https://modelcontextprotocol.io/), `codex app-server` supports
|
||||
|
||||
## Message Schema
|
||||
|
||||
Currently, you can dump a TypeScript version of the schema using `codex generate-ts`. It is specific to the version of Codex you used to run `generate-ts`, so the two are guaranteed to be compatible.
|
||||
Currently, you can dump a TypeScript version of the schema using `codex app-server generate-ts`, or a JSON Schema bundle via `codex app-server generate-json-schema`. Each output is specific to the version of Codex you used to run the command, so the generated artifacts are guaranteed to match that version.
|
||||
|
||||
```
|
||||
codex generate-ts --out DIR
|
||||
codex app-server generate-ts --out DIR
|
||||
codex app-server generate-json-schema --out DIR
|
||||
```
|
||||
|
||||
## Initialization
|
||||
|
||||
Clients must send a single `initialize` request before invoking any other method, then acknowledge with an `initialized` notification. The server returns the user agent string it will present to upstream services; subsequent requests issued before initialization receive a `"Not initialized"` error, and repeated `initialize` calls receive an `"Already initialized"` error.
|
||||
|
||||
Example:
|
||||
|
||||
```json
|
||||
{ "method": "initialize", "id": 0, "params": {
|
||||
"clientInfo": { "name": "codex-vscode", "title": "Codex VS Code Extension", "version": "0.1.0" }
|
||||
} }
|
||||
{ "id": 0, "result": { "userAgent": "codex-app-server/0.1.0 codex-vscode/0.1.0" } }
|
||||
{ "method": "initialized" }
|
||||
```
|
||||
|
||||
## Core primitives
|
||||
|
||||
We have 3 top level primitives:
|
||||
- Thread - a conversation between the Codex agent and a user. Each thread contains multiple turns.
|
||||
- Turn - one turn of the conversation, typically starting with a user message and finishing with an agent message. Each turn contains multiple items.
|
||||
- Item - represents user inputs and agent outputs as part of the turn, persisted and used as the context for future conversations.
|
||||
|
||||
## Thread & turn endpoints
|
||||
|
||||
The JSON-RPC API exposes dedicated methods for managing Codex conversations. Threads store long-lived conversation metadata, and turns store the per-message exchange (input → Codex output, including streamed items). Use the thread APIs to create, list, or archive sessions, then drive the conversation with turn APIs and notifications.
|
||||
|
||||
### Quick reference
|
||||
- `thread/start` — create a new thread; emits `thread/started` and auto-subscribes you to turn/item events for that thread.
|
||||
- `thread/resume` — reopen an existing thread by id so subsequent `turn/start` calls append to it.
|
||||
- `thread/list` — page through stored rollouts; supports cursor-based pagination and optional `modelProviders` filtering.
|
||||
- `thread/archive` — move a thread’s rollout file into the archived directory; returns `{}` on success.
|
||||
- `turn/start` — add user input to a thread and begin Codex generation; responds with the initial `turn` object and streams `turn/started`, `item/*`, and `turn/completed` notifications.
|
||||
- `turn/interrupt` — request cancellation of an in-flight turn by `(thread_id, turn_id)`; success is an empty `{}` response and the turn finishes with `status: "interrupted"`.
|
||||
|
||||
### 1) Start or resume a thread
|
||||
|
||||
Start a fresh thread when you need a new Codex conversation.
|
||||
|
||||
```json
|
||||
{ "method": "thread/start", "id": 10, "params": {
|
||||
// Optionally set config settings. If not specified, will use the user's
|
||||
// current config settings.
|
||||
"model": "gpt-5-codex",
|
||||
"cwd": "/Users/me/project",
|
||||
"approvalPolicy": "never",
|
||||
"sandbox": "workspaceWrite",
|
||||
} }
|
||||
{ "id": 10, "result": {
|
||||
"thread": {
|
||||
"id": "thr_123",
|
||||
"preview": "",
|
||||
"modelProvider": "openai",
|
||||
"createdAt": 1730910000
|
||||
}
|
||||
} }
|
||||
{ "method": "thread/started", "params": { "thread": { … } } }
|
||||
```
|
||||
|
||||
To continue a stored session, call `thread/resume` with the `thread.id` you previously recorded. The response shape matches `thread/start`, and no additional notifications are emitted:
|
||||
|
||||
```json
|
||||
{ "method": "thread/resume", "id": 11, "params": { "threadId": "thr_123" } }
|
||||
{ "id": 11, "result": { "thread": { "id": "thr_123", … } } }
|
||||
```
|
||||
|
||||
### 2) List threads (pagination & filters)
|
||||
|
||||
`thread/list` lets you render a history UI. Pass any combination of:
|
||||
- `cursor` — opaque string from a prior response; omit for the first page.
|
||||
- `limit` — server defaults to a reasonable page size if unset.
|
||||
- `modelProviders` — restrict results to specific providers; unset, null, or an empty array will include all providers.
|
||||
|
||||
Example:
|
||||
|
||||
```json
|
||||
{ "method": "thread/list", "id": 20, "params": {
|
||||
"cursor": null,
|
||||
"limit": 25,
|
||||
} }
|
||||
{ "id": 20, "result": {
|
||||
"data": [
|
||||
{ "id": "thr_a", "preview": "Create a TUI", "modelProvider": "openai", "createdAt": 1730831111 },
|
||||
{ "id": "thr_b", "preview": "Fix tests", "modelProvider": "openai", "createdAt": 1730750000 }
|
||||
],
|
||||
"nextCursor": "opaque-token-or-null"
|
||||
} }
|
||||
```
|
||||
|
||||
When `nextCursor` is `null`, you’ve reached the final page.
|
||||
|
||||
### 3) Archive a thread
|
||||
|
||||
Use `thread/archive` to move the persisted rollout (stored as a JSONL file on disk) into the archived sessions directory.
|
||||
|
||||
```json
|
||||
{ "method": "thread/archive", "id": 21, "params": { "threadId": "thr_b" } }
|
||||
{ "id": 21, "result": {} }
|
||||
```
|
||||
|
||||
An archived thread will not appear in future calls to `thread/list`.
|
||||
|
||||
### 4) Start a turn (send user input)
|
||||
|
||||
Turns attach user input (text or images) to a thread and trigger Codex generation. The `input` field is a list of discriminated unions:
|
||||
|
||||
- `{"type":"text","text":"Explain this diff"}`
|
||||
- `{"type":"image","url":"https://…png"}`
|
||||
- `{"type":"localImage","path":"/tmp/screenshot.png"}`
|
||||
|
||||
You can optionally specify config overrides on the new turn. If specified, these settings become the default for subsequent turns on the same thread.
|
||||
|
||||
```json
|
||||
{ "method": "turn/start", "id": 30, "params": {
|
||||
"threadId": "thr_123",
|
||||
"input": [ { "type": "text", "text": "Run tests" } ],
|
||||
// Below are optional config overrides
|
||||
"cwd": "/Users/me/project",
|
||||
"approvalPolicy": "unlessTrusted",
|
||||
"sandboxPolicy": {
|
||||
"mode": "workspaceWrite",
|
||||
"writableRoots": ["/Users/me/project"],
|
||||
"networkAccess": true
|
||||
},
|
||||
"model": "gpt-5-codex",
|
||||
"effort": "medium",
|
||||
"summary": "concise"
|
||||
} }
|
||||
{ "id": 30, "result": { "turn": {
|
||||
"id": "turn_456",
|
||||
"status": "inProgress",
|
||||
"items": [],
|
||||
"error": null
|
||||
} } }
|
||||
```
|
||||
|
||||
### 5) Interrupt an active turn
|
||||
|
||||
You can cancel a running Turn with `turn/interrupt`.
|
||||
|
||||
```json
|
||||
{ "method": "turn/interrupt", "id": 31, "params": {
|
||||
"threadId": "thr_123",
|
||||
"turnId": "turn_456"
|
||||
} }
|
||||
{ "id": 31, "result": {} }
|
||||
```
|
||||
|
||||
The server requests cancellations for running subprocesses, then emits a `turn/completed` event with `status: "interrupted"`. Rely on the `turn/completed` to know when Codex-side cleanup is done.
|
||||
|
||||
## Auth endpoints
|
||||
|
||||
The JSON-RPC auth/account surface exposes request/response methods plus server-initiated notifications (no `id`). Use these to determine auth state, start or cancel logins, logout, and inspect ChatGPT rate limits.
|
||||
|
||||
### Quick reference
|
||||
- `account/read` — fetch current account info; optionally refresh tokens.
|
||||
- `account/login/start` — begin login (`apiKey` or `chatgpt`).
|
||||
- `account/login/completed` (notify) — emitted when a login attempt finishes (success or error).
|
||||
- `account/login/cancel` — cancel a pending ChatGPT login by `loginId`.
|
||||
- `account/logout` — sign out; triggers `account/updated`.
|
||||
- `account/updated` (notify) — emitted whenever auth mode changes (`authMode`: `apikey`, `chatgpt`, or `null`).
|
||||
- `account/rateLimits/read` — fetch ChatGPT rate limits; updates arrive via `account/rateLimits/updated` (notify).
|
||||
|
||||
### 1) Check auth state
|
||||
|
||||
Request:
|
||||
```json
|
||||
{ "method": "account/read", "id": 1, "params": { "refreshToken": false } }
|
||||
```
|
||||
|
||||
Response examples:
|
||||
```json
|
||||
{ "id": 1, "result": { "account": null, "requiresOpenaiAuth": false } } // No OpenAI auth needed (e.g., OSS/local models)
|
||||
{ "id": 1, "result": { "account": null, "requiresOpenaiAuth": true } } // OpenAI auth required (typical for OpenAI-hosted models)
|
||||
{ "id": 1, "result": { "account": { "type": "apiKey" }, "requiresOpenaiAuth": true } }
|
||||
{ "id": 1, "result": { "account": { "type": "chatgpt", "email": "user@example.com", "planType": "pro" }, "requiresOpenaiAuth": true } }
|
||||
```
|
||||
|
||||
Field notes:
|
||||
- `refreshToken` (bool): set `true` to force a token refresh.
|
||||
- `requiresOpenaiAuth` reflects the active provider; when `false`, Codex can run without OpenAI credentials.
|
||||
|
||||
### 2) Log in with an API key
|
||||
|
||||
1. Send:
|
||||
```json
|
||||
{ "method": "account/login/start", "id": 2, "params": { "type": "apiKey", "apiKey": "sk-…" } }
|
||||
```
|
||||
2. Expect:
|
||||
```json
|
||||
{ "id": 2, "result": { "type": "apiKey" } }
|
||||
```
|
||||
3. Notifications:
|
||||
```json
|
||||
{ "method": "account/login/completed", "params": { "loginId": null, "success": true, "error": null } }
|
||||
{ "method": "account/updated", "params": { "authMode": "apikey" } }
|
||||
```
|
||||
|
||||
### 3) Log in with ChatGPT (browser flow)
|
||||
|
||||
1. Start:
|
||||
```json
|
||||
{ "method": "account/login/start", "id": 3, "params": { "type": "chatgpt" } }
|
||||
{ "id": 3, "result": { "type": "chatgpt", "loginId": "<uuid>", "authUrl": "https://chatgpt.com/…&redirect_uri=http%3A%2F%2Flocalhost%3A<port>%2Fauth%2Fcallback" } }
|
||||
```
|
||||
2. Open `authUrl` in a browser; the app-server hosts the local callback.
|
||||
3. Wait for notifications:
|
||||
```json
|
||||
{ "method": "account/login/completed", "params": { "loginId": "<uuid>", "success": true, "error": null } }
|
||||
{ "method": "account/updated", "params": { "authMode": "chatgpt" } }
|
||||
```
|
||||
|
||||
### 4) Cancel a ChatGPT login
|
||||
|
||||
```json
|
||||
{ "method": "account/login/cancel", "id": 4, "params": { "loginId": "<uuid>" } }
|
||||
{ "method": "account/login/completed", "params": { "loginId": "<uuid>", "success": false, "error": "…" } }
|
||||
```
|
||||
|
||||
### 5) Logout
|
||||
|
||||
```json
|
||||
{ "method": "account/logout", "id": 5 }
|
||||
{ "id": 5, "result": {} }
|
||||
{ "method": "account/updated", "params": { "authMode": null } }
|
||||
```
|
||||
|
||||
### 6) Rate limits (ChatGPT)
|
||||
|
||||
```json
|
||||
{ "method": "account/rateLimits/read", "id": 6 }
|
||||
{ "id": 6, "result": { "rateLimits": { "primary": { "usedPercent": 25, "windowDurationMins": 15, "resetsAt": 1730947200 }, "secondary": null } } }
|
||||
{ "method": "account/rateLimits/updated", "params": { "rateLimits": { … } } }
|
||||
```
|
||||
|
||||
Field notes:
|
||||
- `usedPercent` is current usage within the OpenAI quota window.
|
||||
- `windowDurationMins` is the quota window length.
|
||||
- `resetsAt` is a Unix timestamp (seconds) for the next reset.
|
||||
|
||||
### Dev notes
|
||||
|
||||
- `codex app-server generate-ts --out <dir>` emits v2 types under `v2/`.
|
||||
- `codex app-server generate-json-schema --out <dir>` outputs `codex_app_server_protocol.schemas.json`.
|
||||
- See [“Authentication and authorization” in the config docs](../../docs/config.md#authentication-and-authorization) for configuration knobs.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -46,6 +46,7 @@ pub(crate) async fn run_fuzzy_file_search(
|
||||
threads,
|
||||
cancel_flag,
|
||||
COMPUTE_INDICES,
|
||||
true,
|
||||
) {
|
||||
Ok(res) => Ok((root, res)),
|
||||
Err(err) => Err((root, err)),
|
||||
|
||||
@@ -1,32 +1,38 @@
|
||||
#![deny(clippy::print_stdout, clippy::print_stderr)]
|
||||
|
||||
use std::io::ErrorKind;
|
||||
use std::io::Result as IoResult;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use opentelemetry_appender_tracing::layer::OpenTelemetryTracingBridge;
|
||||
use std::io::ErrorKind;
|
||||
use std::io::Result as IoResult;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::message_processor::MessageProcessor;
|
||||
use crate::outgoing_message::OutgoingMessage;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use codex_app_server_protocol::JSONRPCMessage;
|
||||
use codex_feedback::CodexFeedback;
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tokio::io::BufReader;
|
||||
use tokio::io::{self};
|
||||
use tokio::sync::mpsc;
|
||||
use tracing::Level;
|
||||
use tracing::debug;
|
||||
use tracing::error;
|
||||
use tracing::info;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
use crate::message_processor::MessageProcessor;
|
||||
use crate::outgoing_message::OutgoingMessage;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use tracing_subscriber::Layer;
|
||||
use tracing_subscriber::filter::Targets;
|
||||
use tracing_subscriber::layer::SubscriberExt;
|
||||
use tracing_subscriber::util::SubscriberInitExt;
|
||||
|
||||
mod codex_message_processor;
|
||||
mod error_code;
|
||||
mod fuzzy_file_search;
|
||||
mod message_processor;
|
||||
mod models;
|
||||
mod outgoing_message;
|
||||
|
||||
/// Size of the bounded channels used to communicate between tasks. The value
|
||||
@@ -38,13 +44,6 @@ pub async fn run_main(
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
cli_config_overrides: CliConfigOverrides,
|
||||
) -> IoResult<()> {
|
||||
// Install a simple subscriber so `tracing` output is visible. Users can
|
||||
// control the log level with `RUST_LOG`.
|
||||
tracing_subscriber::fmt()
|
||||
.with_writer(std::io::stderr)
|
||||
.with_env_filter(EnvFilter::from_default_env())
|
||||
.init();
|
||||
|
||||
// Set up channels.
|
||||
let (incoming_tx, mut incoming_rx) = mpsc::channel::<JSONRPCMessage>(CHANNEL_CAPACITY);
|
||||
let (outgoing_tx, mut outgoing_rx) = mpsc::unbounded_channel::<OutgoingMessage>();
|
||||
@@ -86,6 +85,38 @@ pub async fn run_main(
|
||||
std::io::Error::new(ErrorKind::InvalidData, format!("error loading config: {e}"))
|
||||
})?;
|
||||
|
||||
let feedback = CodexFeedback::new();
|
||||
|
||||
let otel =
|
||||
codex_core::otel_init::build_provider(&config, env!("CARGO_PKG_VERSION")).map_err(|e| {
|
||||
std::io::Error::new(
|
||||
ErrorKind::InvalidData,
|
||||
format!("error loading otel config: {e}"),
|
||||
)
|
||||
})?;
|
||||
|
||||
// Install a simple subscriber so `tracing` output is visible. Users can
|
||||
// control the log level with `RUST_LOG`.
|
||||
let stderr_fmt = tracing_subscriber::fmt::layer()
|
||||
.with_writer(std::io::stderr)
|
||||
.with_filter(EnvFilter::from_default_env());
|
||||
|
||||
let feedback_layer = tracing_subscriber::fmt::layer()
|
||||
.with_writer(feedback.make_writer())
|
||||
.with_ansi(false)
|
||||
.with_target(false)
|
||||
.with_filter(Targets::new().with_default(Level::TRACE));
|
||||
|
||||
let _ = tracing_subscriber::registry()
|
||||
.with(stderr_fmt)
|
||||
.with(feedback_layer)
|
||||
.with(otel.as_ref().map(|provider| {
|
||||
OpenTelemetryTracingBridge::new(&provider.logger).with_filter(
|
||||
tracing_subscriber::filter::filter_fn(codex_core::otel_init::codex_export_filter),
|
||||
)
|
||||
}))
|
||||
.try_init();
|
||||
|
||||
// Task: process incoming messages.
|
||||
let processor_handle = tokio::spawn({
|
||||
let outgoing_message_sender = OutgoingMessageSender::new(outgoing_tx);
|
||||
@@ -93,6 +124,7 @@ pub async fn run_main(
|
||||
outgoing_message_sender,
|
||||
codex_linux_sandbox_exe,
|
||||
std::sync::Arc::new(config),
|
||||
feedback.clone(),
|
||||
);
|
||||
async move {
|
||||
while let Some(msg) = incoming_rx.recv().await {
|
||||
|
||||
@@ -17,6 +17,7 @@ use codex_core::ConversationManager;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::default_client::USER_AGENT_SUFFIX;
|
||||
use codex_core::default_client::get_codex_user_agent;
|
||||
use codex_feedback::CodexFeedback;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use std::sync::Arc;
|
||||
|
||||
@@ -33,9 +34,14 @@ impl MessageProcessor {
|
||||
outgoing: OutgoingMessageSender,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
config: Arc<Config>,
|
||||
feedback: CodexFeedback,
|
||||
) -> Self {
|
||||
let outgoing = Arc::new(outgoing);
|
||||
let auth_manager = AuthManager::shared(config.codex_home.clone(), false);
|
||||
let auth_manager = AuthManager::shared(
|
||||
config.codex_home.clone(),
|
||||
false,
|
||||
config.cli_auth_credentials_store_mode,
|
||||
);
|
||||
let conversation_manager = Arc::new(ConversationManager::new(
|
||||
auth_manager.clone(),
|
||||
SessionSource::VSCode,
|
||||
@@ -46,6 +52,7 @@ impl MessageProcessor {
|
||||
outgoing.clone(),
|
||||
codex_linux_sandbox_exe,
|
||||
config,
|
||||
feedback,
|
||||
);
|
||||
|
||||
Self {
|
||||
@@ -57,64 +64,79 @@ impl MessageProcessor {
|
||||
|
||||
pub(crate) async fn process_request(&mut self, request: JSONRPCRequest) {
|
||||
let request_id = request.id.clone();
|
||||
if let Ok(request_json) = serde_json::to_value(request)
|
||||
&& let Ok(codex_request) = serde_json::from_value::<ClientRequest>(request_json)
|
||||
{
|
||||
match codex_request {
|
||||
// Handle Initialize internally so CodexMessageProcessor does not have to concern
|
||||
// itself with the `initialized` bool.
|
||||
ClientRequest::Initialize { request_id, params } => {
|
||||
if self.initialized {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Already initialized".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
} else {
|
||||
let ClientInfo {
|
||||
name,
|
||||
title: _title,
|
||||
version,
|
||||
} = params.client_info;
|
||||
let user_agent_suffix = format!("{name}; {version}");
|
||||
if let Ok(mut suffix) = USER_AGENT_SUFFIX.lock() {
|
||||
*suffix = Some(user_agent_suffix);
|
||||
}
|
||||
let request_json = match serde_json::to_value(&request) {
|
||||
Ok(request_json) => request_json,
|
||||
Err(err) => {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!("Invalid request: {err}"),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let user_agent = get_codex_user_agent();
|
||||
let response = InitializeResponse { user_agent };
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
let codex_request = match serde_json::from_value::<ClientRequest>(request_json) {
|
||||
Ok(codex_request) => codex_request,
|
||||
Err(err) => {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!("Invalid request: {err}"),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
self.initialized = true;
|
||||
return;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
if !self.initialized {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Not initialized".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
match codex_request {
|
||||
// Handle Initialize internally so CodexMessageProcessor does not have to concern
|
||||
// itself with the `initialized` bool.
|
||||
ClientRequest::Initialize { request_id, params } => {
|
||||
if self.initialized {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Already initialized".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
} else {
|
||||
let ClientInfo {
|
||||
name,
|
||||
title: _title,
|
||||
version,
|
||||
} = params.client_info;
|
||||
let user_agent_suffix = format!("{name}; {version}");
|
||||
if let Ok(mut suffix) = USER_AGENT_SUFFIX.lock() {
|
||||
*suffix = Some(user_agent_suffix);
|
||||
}
|
||||
|
||||
let user_agent = get_codex_user_agent();
|
||||
let response = InitializeResponse { user_agent };
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
|
||||
self.initialized = true;
|
||||
return;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
if !self.initialized {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Not initialized".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
self.codex_message_processor
|
||||
.process_request(codex_request)
|
||||
.await;
|
||||
} else {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Invalid request".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
}
|
||||
|
||||
self.codex_message_processor
|
||||
.process_request(codex_request)
|
||||
.await;
|
||||
}
|
||||
|
||||
pub(crate) async fn process_notification(&self, notification: JSONRPCNotification) {
|
||||
|
||||
39
codex-rs/app-server/src/models.rs
Normal file
39
codex-rs/app-server/src/models.rs
Normal file
@@ -0,0 +1,39 @@
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_app_server_protocol::Model;
|
||||
use codex_app_server_protocol::ReasoningEffortOption;
|
||||
use codex_common::model_presets::ModelPreset;
|
||||
use codex_common::model_presets::ReasoningEffortPreset;
|
||||
use codex_common::model_presets::builtin_model_presets;
|
||||
|
||||
pub fn supported_models(auth_mode: Option<AuthMode>) -> Vec<Model> {
|
||||
builtin_model_presets(auth_mode)
|
||||
.into_iter()
|
||||
.map(model_from_preset)
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn model_from_preset(preset: ModelPreset) -> Model {
|
||||
Model {
|
||||
id: preset.id.to_string(),
|
||||
model: preset.model.to_string(),
|
||||
display_name: preset.display_name.to_string(),
|
||||
description: preset.description.to_string(),
|
||||
supported_reasoning_efforts: reasoning_efforts_from_preset(
|
||||
preset.supported_reasoning_efforts,
|
||||
),
|
||||
default_reasoning_effort: preset.default_reasoning_effort,
|
||||
is_default: preset.is_default,
|
||||
}
|
||||
}
|
||||
|
||||
fn reasoning_efforts_from_preset(
|
||||
efforts: &'static [ReasoningEffortPreset],
|
||||
) -> Vec<ReasoningEffortOption> {
|
||||
efforts
|
||||
.iter()
|
||||
.map(|preset| ReasoningEffortOption {
|
||||
reasoning_effort: preset.effort,
|
||||
description: preset.description.to_string(),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
@@ -141,7 +141,13 @@ pub(crate) struct OutgoingError {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use codex_app_server_protocol::AccountLoginCompletedNotification;
|
||||
use codex_app_server_protocol::AccountRateLimitsUpdatedNotification;
|
||||
use codex_app_server_protocol::AccountUpdatedNotification;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_app_server_protocol::LoginChatGptCompleteNotification;
|
||||
use codex_app_server_protocol::RateLimitSnapshot;
|
||||
use codex_app_server_protocol::RateLimitWindow;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use uuid::Uuid;
|
||||
@@ -164,6 +170,7 @@ mod tests {
|
||||
"params": {
|
||||
"loginId": Uuid::nil(),
|
||||
"success": true,
|
||||
"error": null,
|
||||
},
|
||||
}),
|
||||
serde_json::to_value(jsonrpc_notification)
|
||||
@@ -171,4 +178,84 @@ mod tests {
|
||||
"ensure the strum macros serialize the method field correctly"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn verify_account_login_completed_notification_serialization() {
|
||||
let notification =
|
||||
ServerNotification::AccountLoginCompleted(AccountLoginCompletedNotification {
|
||||
login_id: Some(Uuid::nil().to_string()),
|
||||
success: true,
|
||||
error: None,
|
||||
});
|
||||
|
||||
let jsonrpc_notification = OutgoingMessage::AppServerNotification(notification);
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/login/completed",
|
||||
"params": {
|
||||
"loginId": Uuid::nil().to_string(),
|
||||
"success": true,
|
||||
"error": null,
|
||||
},
|
||||
}),
|
||||
serde_json::to_value(jsonrpc_notification)
|
||||
.expect("ensure the notification serializes correctly"),
|
||||
"ensure the notification serializes correctly"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn verify_account_rate_limits_notification_serialization() {
|
||||
let notification =
|
||||
ServerNotification::AccountRateLimitsUpdated(AccountRateLimitsUpdatedNotification {
|
||||
rate_limits: RateLimitSnapshot {
|
||||
primary: Some(RateLimitWindow {
|
||||
used_percent: 25,
|
||||
window_duration_mins: Some(15),
|
||||
resets_at: Some(123),
|
||||
}),
|
||||
secondary: None,
|
||||
},
|
||||
});
|
||||
|
||||
let jsonrpc_notification = OutgoingMessage::AppServerNotification(notification);
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/rateLimits/updated",
|
||||
"params": {
|
||||
"rateLimits": {
|
||||
"primary": {
|
||||
"usedPercent": 25,
|
||||
"windowDurationMins": 15,
|
||||
"resetsAt": 123
|
||||
},
|
||||
"secondary": null
|
||||
}
|
||||
},
|
||||
}),
|
||||
serde_json::to_value(jsonrpc_notification)
|
||||
.expect("ensure the notification serializes correctly"),
|
||||
"ensure the notification serializes correctly"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn verify_account_updated_notification_serialization() {
|
||||
let notification = ServerNotification::AccountUpdated(AccountUpdatedNotification {
|
||||
auth_mode: Some(AuthMode::ApiKey),
|
||||
});
|
||||
|
||||
let jsonrpc_notification = OutgoingMessage::AppServerNotification(notification);
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/updated",
|
||||
"params": {
|
||||
"authMode": "apikey"
|
||||
},
|
||||
}),
|
||||
serde_json::to_value(jsonrpc_notification)
|
||||
.expect("ensure the notification serializes correctly"),
|
||||
"ensure the notification serializes correctly"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,11 @@ path = "lib.rs"
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
assert_cmd = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-core = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
@@ -18,4 +22,5 @@ tokio = { workspace = true, features = [
|
||||
"process",
|
||||
"rt-multi-thread",
|
||||
] }
|
||||
uuid = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
|
||||
135
codex-rs/app-server/tests/common/auth_fixtures.rs
Normal file
135
codex-rs/app-server/tests/common/auth_fixtures.rs
Normal file
@@ -0,0 +1,135 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use base64::Engine;
|
||||
use base64::engine::general_purpose::URL_SAFE_NO_PAD;
|
||||
use chrono::DateTime;
|
||||
use chrono::Utc;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use codex_core::auth::AuthDotJson;
|
||||
use codex_core::auth::save_auth;
|
||||
use codex_core::token_data::TokenData;
|
||||
use codex_core::token_data::parse_id_token;
|
||||
use serde_json::json;
|
||||
|
||||
/// Builder for writing a fake ChatGPT auth.json in tests.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ChatGptAuthFixture {
|
||||
access_token: String,
|
||||
refresh_token: String,
|
||||
account_id: Option<String>,
|
||||
claims: ChatGptIdTokenClaims,
|
||||
last_refresh: Option<Option<DateTime<Utc>>>,
|
||||
}
|
||||
|
||||
impl ChatGptAuthFixture {
|
||||
pub fn new(access_token: impl Into<String>) -> Self {
|
||||
Self {
|
||||
access_token: access_token.into(),
|
||||
refresh_token: "refresh-token".to_string(),
|
||||
account_id: None,
|
||||
claims: ChatGptIdTokenClaims::default(),
|
||||
last_refresh: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn refresh_token(mut self, refresh_token: impl Into<String>) -> Self {
|
||||
self.refresh_token = refresh_token.into();
|
||||
self
|
||||
}
|
||||
|
||||
pub fn account_id(mut self, account_id: impl Into<String>) -> Self {
|
||||
self.account_id = Some(account_id.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn plan_type(mut self, plan_type: impl Into<String>) -> Self {
|
||||
self.claims.plan_type = Some(plan_type.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn email(mut self, email: impl Into<String>) -> Self {
|
||||
self.claims.email = Some(email.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn last_refresh(mut self, last_refresh: Option<DateTime<Utc>>) -> Self {
|
||||
self.last_refresh = Some(last_refresh);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn claims(mut self, claims: ChatGptIdTokenClaims) -> Self {
|
||||
self.claims = claims;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct ChatGptIdTokenClaims {
|
||||
pub email: Option<String>,
|
||||
pub plan_type: Option<String>,
|
||||
}
|
||||
|
||||
impl ChatGptIdTokenClaims {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
pub fn email(mut self, email: impl Into<String>) -> Self {
|
||||
self.email = Some(email.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn plan_type(mut self, plan_type: impl Into<String>) -> Self {
|
||||
self.plan_type = Some(plan_type.into());
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
pub fn encode_id_token(claims: &ChatGptIdTokenClaims) -> Result<String> {
|
||||
let header = json!({ "alg": "none", "typ": "JWT" });
|
||||
let mut payload = serde_json::Map::new();
|
||||
if let Some(email) = &claims.email {
|
||||
payload.insert("email".to_string(), json!(email));
|
||||
}
|
||||
if let Some(plan_type) = &claims.plan_type {
|
||||
payload.insert(
|
||||
"https://api.openai.com/auth".to_string(),
|
||||
json!({ "chatgpt_plan_type": plan_type }),
|
||||
);
|
||||
}
|
||||
let payload = serde_json::Value::Object(payload);
|
||||
|
||||
let header_b64 =
|
||||
URL_SAFE_NO_PAD.encode(serde_json::to_vec(&header).context("serialize jwt header")?);
|
||||
let payload_b64 =
|
||||
URL_SAFE_NO_PAD.encode(serde_json::to_vec(&payload).context("serialize jwt payload")?);
|
||||
let signature_b64 = URL_SAFE_NO_PAD.encode(b"signature");
|
||||
Ok(format!("{header_b64}.{payload_b64}.{signature_b64}"))
|
||||
}
|
||||
|
||||
pub fn write_chatgpt_auth(
|
||||
codex_home: &Path,
|
||||
fixture: ChatGptAuthFixture,
|
||||
cli_auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> Result<()> {
|
||||
let id_token_raw = encode_id_token(&fixture.claims)?;
|
||||
let id_token = parse_id_token(&id_token_raw).context("parse id token")?;
|
||||
let tokens = TokenData {
|
||||
id_token,
|
||||
access_token: fixture.access_token,
|
||||
refresh_token: fixture.refresh_token,
|
||||
account_id: fixture.account_id,
|
||||
};
|
||||
|
||||
let last_refresh = fixture.last_refresh.unwrap_or_else(|| Some(Utc::now()));
|
||||
|
||||
let auth = AuthDotJson {
|
||||
openai_api_key: None,
|
||||
tokens: Some(tokens),
|
||||
last_refresh,
|
||||
};
|
||||
|
||||
save_auth(codex_home, &auth, cli_auth_credentials_store_mode).context("write auth.json")
|
||||
}
|
||||
@@ -1,13 +1,21 @@
|
||||
mod auth_fixtures;
|
||||
mod mcp_process;
|
||||
mod mock_model_server;
|
||||
mod responses;
|
||||
mod rollout;
|
||||
|
||||
pub use auth_fixtures::ChatGptAuthFixture;
|
||||
pub use auth_fixtures::ChatGptIdTokenClaims;
|
||||
pub use auth_fixtures::encode_id_token;
|
||||
pub use auth_fixtures::write_chatgpt_auth;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
pub use mcp_process::McpProcess;
|
||||
pub use mock_model_server::create_mock_chat_completions_server;
|
||||
pub use mock_model_server::create_mock_chat_completions_server_unchecked;
|
||||
pub use responses::create_apply_patch_sse_response;
|
||||
pub use responses::create_final_assistant_message_sse_response;
|
||||
pub use responses::create_shell_sse_response;
|
||||
pub use rollout::create_fake_rollout;
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
pub fn to_response<T: DeserializeOwned>(response: JSONRPCResponse) -> anyhow::Result<T> {
|
||||
|
||||
@@ -14,28 +14,37 @@ use anyhow::Context;
|
||||
use assert_cmd::prelude::*;
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
use codex_app_server_protocol::ArchiveConversationParams;
|
||||
use codex_app_server_protocol::CancelLoginAccountParams;
|
||||
use codex_app_server_protocol::CancelLoginChatGptParams;
|
||||
use codex_app_server_protocol::ClientInfo;
|
||||
use codex_app_server_protocol::ClientNotification;
|
||||
use codex_app_server_protocol::FeedbackUploadParams;
|
||||
use codex_app_server_protocol::GetAccountParams;
|
||||
use codex_app_server_protocol::GetAuthStatusParams;
|
||||
use codex_app_server_protocol::InitializeParams;
|
||||
use codex_app_server_protocol::InterruptConversationParams;
|
||||
use codex_app_server_protocol::ListConversationsParams;
|
||||
use codex_app_server_protocol::LoginApiKeyParams;
|
||||
use codex_app_server_protocol::NewConversationParams;
|
||||
use codex_app_server_protocol::RemoveConversationListenerParams;
|
||||
use codex_app_server_protocol::ResumeConversationParams;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserTurnParams;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::SetDefaultModelParams;
|
||||
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCMessage;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCRequest;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::ListConversationsParams;
|
||||
use codex_app_server_protocol::LoginApiKeyParams;
|
||||
use codex_app_server_protocol::ModelListParams;
|
||||
use codex_app_server_protocol::NewConversationParams;
|
||||
use codex_app_server_protocol::RemoveConversationListenerParams;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ResumeConversationParams;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserTurnParams;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::SetDefaultModelParams;
|
||||
use codex_app_server_protocol::ThreadArchiveParams;
|
||||
use codex_app_server_protocol::ThreadListParams;
|
||||
use codex_app_server_protocol::ThreadResumeParams;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::TurnInterruptParams;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use std::process::Command as StdCommand;
|
||||
use tokio::process::Command;
|
||||
|
||||
@@ -236,6 +245,29 @@ impl McpProcess {
|
||||
self.send_request("getUserAgent", None).await
|
||||
}
|
||||
|
||||
/// Send an `account/rateLimits/read` JSON-RPC request.
|
||||
pub async fn send_get_account_rate_limits_request(&mut self) -> anyhow::Result<i64> {
|
||||
self.send_request("account/rateLimits/read", None).await
|
||||
}
|
||||
|
||||
/// Send an `account/read` JSON-RPC request.
|
||||
pub async fn send_get_account_request(
|
||||
&mut self,
|
||||
params: GetAccountParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("account/read", params).await
|
||||
}
|
||||
|
||||
/// Send a `feedback/upload` JSON-RPC request.
|
||||
pub async fn send_feedback_upload_request(
|
||||
&mut self,
|
||||
params: FeedbackUploadParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("feedback/upload", params).await
|
||||
}
|
||||
|
||||
/// Send a `userInfo` JSON-RPC request.
|
||||
pub async fn send_user_info_request(&mut self) -> anyhow::Result<i64> {
|
||||
self.send_request("userInfo", None).await
|
||||
@@ -259,6 +291,51 @@ impl McpProcess {
|
||||
self.send_request("listConversations", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/start` JSON-RPC request.
|
||||
pub async fn send_thread_start_request(
|
||||
&mut self,
|
||||
params: ThreadStartParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/start", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/resume` JSON-RPC request.
|
||||
pub async fn send_thread_resume_request(
|
||||
&mut self,
|
||||
params: ThreadResumeParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/resume", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/archive` JSON-RPC request.
|
||||
pub async fn send_thread_archive_request(
|
||||
&mut self,
|
||||
params: ThreadArchiveParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/archive", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/list` JSON-RPC request.
|
||||
pub async fn send_thread_list_request(
|
||||
&mut self,
|
||||
params: ThreadListParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/list", params).await
|
||||
}
|
||||
|
||||
/// Send a `model/list` JSON-RPC request.
|
||||
pub async fn send_list_models_request(
|
||||
&mut self,
|
||||
params: ModelListParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("model/list", params).await
|
||||
}
|
||||
|
||||
/// Send a `resumeConversation` JSON-RPC request.
|
||||
pub async fn send_resume_conversation_request(
|
||||
&mut self,
|
||||
@@ -282,6 +359,24 @@ impl McpProcess {
|
||||
self.send_request("loginChatGpt", None).await
|
||||
}
|
||||
|
||||
/// Send a `turn/start` JSON-RPC request (v2).
|
||||
pub async fn send_turn_start_request(
|
||||
&mut self,
|
||||
params: TurnStartParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("turn/start", params).await
|
||||
}
|
||||
|
||||
/// Send a `turn/interrupt` JSON-RPC request (v2).
|
||||
pub async fn send_turn_interrupt_request(
|
||||
&mut self,
|
||||
params: TurnInterruptParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("turn/interrupt", params).await
|
||||
}
|
||||
|
||||
/// Send a `cancelLoginChatGpt` JSON-RPC request.
|
||||
pub async fn send_cancel_login_chat_gpt_request(
|
||||
&mut self,
|
||||
@@ -296,6 +391,40 @@ impl McpProcess {
|
||||
self.send_request("logoutChatGpt", None).await
|
||||
}
|
||||
|
||||
/// Send an `account/logout` JSON-RPC request.
|
||||
pub async fn send_logout_account_request(&mut self) -> anyhow::Result<i64> {
|
||||
self.send_request("account/logout", None).await
|
||||
}
|
||||
|
||||
/// Send an `account/login/start` JSON-RPC request for API key login.
|
||||
pub async fn send_login_account_api_key_request(
|
||||
&mut self,
|
||||
api_key: &str,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = serde_json::json!({
|
||||
"type": "apiKey",
|
||||
"apiKey": api_key,
|
||||
});
|
||||
self.send_request("account/login/start", Some(params)).await
|
||||
}
|
||||
|
||||
/// Send an `account/login/start` JSON-RPC request for ChatGPT login.
|
||||
pub async fn send_login_account_chatgpt_request(&mut self) -> anyhow::Result<i64> {
|
||||
let params = serde_json::json!({
|
||||
"type": "chatgpt"
|
||||
});
|
||||
self.send_request("account/login/start", Some(params)).await
|
||||
}
|
||||
|
||||
/// Send an `account/login/cancel` JSON-RPC request.
|
||||
pub async fn send_cancel_login_account_request(
|
||||
&mut self,
|
||||
params: CancelLoginAccountParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("account/login/cancel", params).await
|
||||
}
|
||||
|
||||
/// Send a `fuzzyFileSearch` JSON-RPC request.
|
||||
pub async fn send_fuzzy_file_search_request(
|
||||
&mut self,
|
||||
|
||||
@@ -29,6 +29,25 @@ pub async fn create_mock_chat_completions_server(responses: Vec<String>) -> Mock
|
||||
server
|
||||
}
|
||||
|
||||
/// Same as `create_mock_chat_completions_server` but does not enforce an
|
||||
/// expectation on the number of calls.
|
||||
pub async fn create_mock_chat_completions_server_unchecked(responses: Vec<String>) -> MockServer {
|
||||
let server = MockServer::start().await;
|
||||
|
||||
let seq_responder = SeqResponder {
|
||||
num_calls: AtomicUsize::new(0),
|
||||
responses,
|
||||
};
|
||||
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/chat/completions"))
|
||||
.respond_with(seq_responder)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
server
|
||||
}
|
||||
|
||||
struct SeqResponder {
|
||||
num_calls: AtomicUsize,
|
||||
responses: Vec<String>,
|
||||
|
||||
82
codex-rs/app-server/tests/common/rollout.rs
Normal file
82
codex-rs/app-server/tests/common/rollout.rs
Normal file
@@ -0,0 +1,82 @@
|
||||
use anyhow::Result;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::protocol::SessionMeta;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use serde_json::json;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use uuid::Uuid;
|
||||
|
||||
/// Create a minimal rollout file under `CODEX_HOME/sessions/YYYY/MM/DD/`.
|
||||
///
|
||||
/// - `filename_ts` is the filename timestamp component in `YYYY-MM-DDThh-mm-ss` format.
|
||||
/// - `meta_rfc3339` is the envelope timestamp used in JSON lines.
|
||||
/// - `preview` is the user message preview text.
|
||||
/// - `model_provider` optionally sets the provider in the session meta payload.
|
||||
///
|
||||
/// Returns the generated conversation/session UUID as a string.
|
||||
pub fn create_fake_rollout(
|
||||
codex_home: &Path,
|
||||
filename_ts: &str,
|
||||
meta_rfc3339: &str,
|
||||
preview: &str,
|
||||
model_provider: Option<&str>,
|
||||
) -> Result<String> {
|
||||
let uuid = Uuid::new_v4();
|
||||
let uuid_str = uuid.to_string();
|
||||
let conversation_id = ConversationId::from_string(&uuid_str)?;
|
||||
|
||||
// sessions/YYYY/MM/DD derived from filename_ts (YYYY-MM-DDThh-mm-ss)
|
||||
let year = &filename_ts[0..4];
|
||||
let month = &filename_ts[5..7];
|
||||
let day = &filename_ts[8..10];
|
||||
let dir = codex_home.join("sessions").join(year).join(month).join(day);
|
||||
fs::create_dir_all(&dir)?;
|
||||
|
||||
let file_path = dir.join(format!("rollout-{filename_ts}-{uuid}.jsonl"));
|
||||
|
||||
// Build JSONL lines
|
||||
let payload = serde_json::to_value(SessionMeta {
|
||||
id: conversation_id,
|
||||
timestamp: meta_rfc3339.to_string(),
|
||||
cwd: PathBuf::from("/"),
|
||||
originator: "codex".to_string(),
|
||||
cli_version: "0.0.0".to_string(),
|
||||
instructions: None,
|
||||
source: SessionSource::Cli,
|
||||
model_provider: model_provider.map(str::to_string),
|
||||
})?;
|
||||
|
||||
let lines = [
|
||||
json!({
|
||||
"timestamp": meta_rfc3339,
|
||||
"type": "session_meta",
|
||||
"payload": payload
|
||||
})
|
||||
.to_string(),
|
||||
json!({
|
||||
"timestamp": meta_rfc3339,
|
||||
"type":"response_item",
|
||||
"payload": {
|
||||
"type":"message",
|
||||
"role":"user",
|
||||
"content":[{"type":"input_text","text": preview}]
|
||||
}
|
||||
})
|
||||
.to_string(),
|
||||
json!({
|
||||
"timestamp": meta_rfc3339,
|
||||
"type":"event_msg",
|
||||
"payload": {
|
||||
"type":"user_message",
|
||||
"message": preview,
|
||||
"kind": "plain"
|
||||
}
|
||||
})
|
||||
.to_string(),
|
||||
];
|
||||
|
||||
fs::write(file_path, lines.join("\n") + "\n")?;
|
||||
Ok(uuid_str)
|
||||
}
|
||||
@@ -1,5 +1,4 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::ArchiveConversationParams;
|
||||
@@ -9,45 +8,37 @@ use codex_app_server_protocol::NewConversationParams;
|
||||
use codex_app_server_protocol::NewConversationResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_core::ARCHIVED_SESSIONS_SUBDIR;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(20);
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn archive_conversation_moves_rollout_into_archived_directory() {
|
||||
let codex_home = TempDir::new().expect("create temp dir");
|
||||
create_config_toml(codex_home.path()).expect("write config.toml");
|
||||
async fn archive_conversation_moves_rollout_into_archived_directory() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("initialize timeout")
|
||||
.expect("initialize request");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let new_request_id = mcp
|
||||
.send_new_conversation_request(NewConversationParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await
|
||||
.expect("send newConversation");
|
||||
.await?;
|
||||
let new_response: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(new_request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("newConversation timeout")
|
||||
.expect("newConversation response");
|
||||
.await??;
|
||||
|
||||
let NewConversationResponse {
|
||||
conversation_id,
|
||||
rollout_path,
|
||||
..
|
||||
} = to_response::<NewConversationResponse>(new_response)
|
||||
.expect("deserialize newConversation response");
|
||||
} = to_response::<NewConversationResponse>(new_response)?;
|
||||
|
||||
assert!(
|
||||
rollout_path.exists(),
|
||||
@@ -60,19 +51,15 @@ async fn archive_conversation_moves_rollout_into_archived_directory() {
|
||||
conversation_id,
|
||||
rollout_path: rollout_path.clone(),
|
||||
})
|
||||
.await
|
||||
.expect("send archiveConversation");
|
||||
.await?;
|
||||
let archive_response: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(archive_request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("archiveConversation timeout")
|
||||
.expect("archiveConversation response");
|
||||
.await??;
|
||||
|
||||
let _: ArchiveConversationResponse =
|
||||
to_response::<ArchiveConversationResponse>(archive_response)
|
||||
.expect("deserialize archiveConversation response");
|
||||
to_response::<ArchiveConversationResponse>(archive_response)?;
|
||||
|
||||
let archived_directory = codex_home.path().join(ARCHIVED_SESSIONS_SUBDIR);
|
||||
let archived_rollout_path =
|
||||
@@ -90,6 +77,8 @@ async fn archive_conversation_moves_rollout_into_archived_directory() {
|
||||
"expected archived rollout path {} to exist",
|
||||
archived_rollout_path.display()
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_config_toml(codex_home: &Path) -> std::io::Result<()> {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
@@ -11,6 +10,7 @@ use codex_app_server_protocol::LoginApiKeyParams;
|
||||
use codex_app_server_protocol::LoginApiKeyResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
@@ -71,125 +71,99 @@ forced_login_method = "{forced_method}"
|
||||
std::fs::write(config_toml, contents)
|
||||
}
|
||||
|
||||
async fn login_with_api_key_via_request(mcp: &mut McpProcess, api_key: &str) {
|
||||
async fn login_with_api_key_via_request(mcp: &mut McpProcess, api_key: &str) -> Result<()> {
|
||||
let request_id = mcp
|
||||
.send_login_api_key_request(LoginApiKeyParams {
|
||||
api_key: api_key.to_string(),
|
||||
})
|
||||
.await
|
||||
.unwrap_or_else(|e| panic!("send loginApiKey: {e}"));
|
||||
.await?;
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.unwrap_or_else(|e| panic!("loginApiKey timeout: {e}"))
|
||||
.unwrap_or_else(|e| panic!("loginApiKey response: {e}"));
|
||||
let _: LoginApiKeyResponse =
|
||||
to_response(resp).unwrap_or_else(|e| panic!("deserialize login response: {e}"));
|
||||
.await??;
|
||||
let _: LoginApiKeyResponse = to_response(resp)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn get_auth_status_no_auth() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml(codex_home.path()).unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||
async fn get_auth_status_no_auth() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path())?;
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)])
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_get_auth_status_request(GetAuthStatusParams {
|
||||
include_token: Some(true),
|
||||
refresh_token: Some(false),
|
||||
})
|
||||
.await
|
||||
.expect("send getAuthStatus");
|
||||
.await?;
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("getAuthStatus timeout")
|
||||
.expect("getAuthStatus response");
|
||||
let status: GetAuthStatusResponse = to_response(resp).expect("deserialize status");
|
||||
.await??;
|
||||
let status: GetAuthStatusResponse = to_response(resp)?;
|
||||
assert_eq!(status.auth_method, None, "expected no auth method");
|
||||
assert_eq!(status.auth_token, None, "expected no token");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn get_auth_status_with_api_key() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml(codex_home.path()).unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||
async fn get_auth_status_with_api_key() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
login_with_api_key_via_request(&mut mcp, "sk-test-key").await;
|
||||
login_with_api_key_via_request(&mut mcp, "sk-test-key").await?;
|
||||
|
||||
let request_id = mcp
|
||||
.send_get_auth_status_request(GetAuthStatusParams {
|
||||
include_token: Some(true),
|
||||
refresh_token: Some(false),
|
||||
})
|
||||
.await
|
||||
.expect("send getAuthStatus");
|
||||
.await?;
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("getAuthStatus timeout")
|
||||
.expect("getAuthStatus response");
|
||||
let status: GetAuthStatusResponse = to_response(resp).expect("deserialize status");
|
||||
.await??;
|
||||
let status: GetAuthStatusResponse = to_response(resp)?;
|
||||
assert_eq!(status.auth_method, Some(AuthMode::ApiKey));
|
||||
assert_eq!(status.auth_token, Some("sk-test-key".to_string()));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn get_auth_status_with_api_key_when_auth_not_required() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml_custom_provider(codex_home.path(), false)
|
||||
.unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||
async fn get_auth_status_with_api_key_when_auth_not_required() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml_custom_provider(codex_home.path(), false)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
login_with_api_key_via_request(&mut mcp, "sk-test-key").await;
|
||||
login_with_api_key_via_request(&mut mcp, "sk-test-key").await?;
|
||||
|
||||
let request_id = mcp
|
||||
.send_get_auth_status_request(GetAuthStatusParams {
|
||||
include_token: Some(true),
|
||||
refresh_token: Some(false),
|
||||
})
|
||||
.await
|
||||
.expect("send getAuthStatus");
|
||||
.await?;
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("getAuthStatus timeout")
|
||||
.expect("getAuthStatus response");
|
||||
let status: GetAuthStatusResponse = to_response(resp).expect("deserialize status");
|
||||
.await??;
|
||||
let status: GetAuthStatusResponse = to_response(resp)?;
|
||||
assert_eq!(status.auth_method, None, "expected no auth method");
|
||||
assert_eq!(status.auth_token, None, "expected no token");
|
||||
assert_eq!(
|
||||
@@ -197,76 +171,60 @@ async fn get_auth_status_with_api_key_when_auth_not_required() {
|
||||
Some(false),
|
||||
"requires_openai_auth should be false",
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn get_auth_status_with_api_key_no_include_token() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml(codex_home.path()).unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||
async fn get_auth_status_with_api_key_no_include_token() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
login_with_api_key_via_request(&mut mcp, "sk-test-key").await;
|
||||
login_with_api_key_via_request(&mut mcp, "sk-test-key").await?;
|
||||
|
||||
// Build params via struct so None field is omitted in wire JSON.
|
||||
let params = GetAuthStatusParams {
|
||||
include_token: None,
|
||||
refresh_token: Some(false),
|
||||
};
|
||||
let request_id = mcp
|
||||
.send_get_auth_status_request(params)
|
||||
.await
|
||||
.expect("send getAuthStatus");
|
||||
let request_id = mcp.send_get_auth_status_request(params).await?;
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("getAuthStatus timeout")
|
||||
.expect("getAuthStatus response");
|
||||
let status: GetAuthStatusResponse = to_response(resp).expect("deserialize status");
|
||||
.await??;
|
||||
let status: GetAuthStatusResponse = to_response(resp)?;
|
||||
assert_eq!(status.auth_method, Some(AuthMode::ApiKey));
|
||||
assert!(status.auth_token.is_none(), "token must be omitted");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn login_api_key_rejected_when_forced_chatgpt() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml_forced_login(codex_home.path(), "chatgpt")
|
||||
.unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||
async fn login_api_key_rejected_when_forced_chatgpt() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml_forced_login(codex_home.path(), "chatgpt")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_login_api_key_request(LoginApiKeyParams {
|
||||
api_key: "sk-test-key".to_string(),
|
||||
})
|
||||
.await
|
||||
.expect("send loginApiKey");
|
||||
.await?;
|
||||
|
||||
let err: JSONRPCError = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("loginApiKey error timeout")
|
||||
.expect("loginApiKey error");
|
||||
.await??;
|
||||
|
||||
assert_eq!(
|
||||
err.error.message,
|
||||
"API key login is disabled. Use ChatGPT login instead."
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_final_assistant_message_sse_response;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
@@ -30,29 +29,29 @@ use codex_protocol::config_types::SandboxMode;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::Event;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::InputMessageKind;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::env;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 4)]
|
||||
async fn test_codex_jsonrpc_conversation_flow() {
|
||||
async fn test_codex_jsonrpc_conversation_flow() -> Result<()> {
|
||||
if env::var(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
|
||||
println!(
|
||||
"Skipping test because it cannot execute when network is disabled in a Codex sandbox."
|
||||
);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let tmp = TempDir::new().expect("tmp dir");
|
||||
let tmp = TempDir::new()?;
|
||||
// Temporary Codex home with config pointing at the mock server.
|
||||
let codex_home = tmp.path().join("codex_home");
|
||||
std::fs::create_dir(&codex_home).expect("create codex home dir");
|
||||
std::fs::create_dir(&codex_home)?;
|
||||
let working_directory = tmp.path().join("workdir");
|
||||
std::fs::create_dir(&working_directory).expect("create working directory");
|
||||
std::fs::create_dir(&working_directory)?;
|
||||
|
||||
// Create a mock model server that immediately ends each turn.
|
||||
// Two turns are expected: initial session configure + one user message.
|
||||
@@ -62,20 +61,15 @@ async fn test_codex_jsonrpc_conversation_flow() {
|
||||
Some(&working_directory),
|
||||
Some(5000),
|
||||
"call1234",
|
||||
)
|
||||
.expect("create shell sse response"),
|
||||
create_final_assistant_message_sse_response("Enjoy your new git repo!")
|
||||
.expect("create final assistant message"),
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("Enjoy your new git repo!")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri()).expect("write config");
|
||||
create_config_toml(&codex_home, &server.uri())?;
|
||||
|
||||
// Start MCP server and initialize.
|
||||
let mut mcp = McpProcess::new(&codex_home).await.expect("spawn mcp");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init error");
|
||||
let mut mcp = McpProcess::new(&codex_home).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// 1) newConversation
|
||||
let new_conv_id = mcp
|
||||
@@ -83,17 +77,13 @@ async fn test_codex_jsonrpc_conversation_flow() {
|
||||
cwd: Some(working_directory.to_string_lossy().into_owned()),
|
||||
..Default::default()
|
||||
})
|
||||
.await
|
||||
.expect("send newConversation");
|
||||
.await?;
|
||||
let new_conv_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(new_conv_id)),
|
||||
)
|
||||
.await
|
||||
.expect("newConversation timeout")
|
||||
.expect("newConversation resp");
|
||||
let new_conv_resp = to_response::<NewConversationResponse>(new_conv_resp)
|
||||
.expect("deserialize newConversation response");
|
||||
.await??;
|
||||
let new_conv_resp = to_response::<NewConversationResponse>(new_conv_resp)?;
|
||||
let NewConversationResponse {
|
||||
conversation_id,
|
||||
model,
|
||||
@@ -104,19 +94,18 @@ async fn test_codex_jsonrpc_conversation_flow() {
|
||||
|
||||
// 2) addConversationListener
|
||||
let add_listener_id = mcp
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams { conversation_id })
|
||||
.await
|
||||
.expect("send addConversationListener");
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams {
|
||||
conversation_id,
|
||||
experimental_raw_events: false,
|
||||
})
|
||||
.await?;
|
||||
let add_listener_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(add_listener_id)),
|
||||
)
|
||||
.await
|
||||
.expect("addConversationListener timeout")
|
||||
.expect("addConversationListener resp");
|
||||
.await??;
|
||||
let AddConversationSubscriptionResponse { subscription_id } =
|
||||
to_response::<AddConversationSubscriptionResponse>(add_listener_resp)
|
||||
.expect("deserialize addConversationListener response");
|
||||
to_response::<AddConversationSubscriptionResponse>(add_listener_resp)?;
|
||||
|
||||
// 3) sendUserMessage (should trigger notifications; we only validate an OK response)
|
||||
let send_user_id = mcp
|
||||
@@ -126,17 +115,13 @@ async fn test_codex_jsonrpc_conversation_flow() {
|
||||
text: "text".to_string(),
|
||||
}],
|
||||
})
|
||||
.await
|
||||
.expect("send sendUserMessage");
|
||||
.await?;
|
||||
let send_user_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(send_user_id)),
|
||||
)
|
||||
.await
|
||||
.expect("sendUserMessage timeout")
|
||||
.expect("sendUserMessage resp");
|
||||
let SendUserMessageResponse {} = to_response::<SendUserMessageResponse>(send_user_resp)
|
||||
.expect("deserialize sendUserMessage response");
|
||||
.await??;
|
||||
let SendUserMessageResponse {} = to_response::<SendUserMessageResponse>(send_user_resp)?;
|
||||
|
||||
// Verify the task_finished notification is received.
|
||||
// Note this also ensures that the final request to the server was made.
|
||||
@@ -144,9 +129,7 @@ async fn test_codex_jsonrpc_conversation_flow() {
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await
|
||||
.expect("task_finished_notification timeout")
|
||||
.expect("task_finished_notification resp");
|
||||
.await??;
|
||||
let serde_json::Value::Object(map) = task_finished_notification
|
||||
.params
|
||||
.expect("notification should have params")
|
||||
@@ -164,33 +147,31 @@ async fn test_codex_jsonrpc_conversation_flow() {
|
||||
.send_remove_conversation_listener_request(RemoveConversationListenerParams {
|
||||
subscription_id,
|
||||
})
|
||||
.await
|
||||
.expect("send removeConversationListener");
|
||||
.await?;
|
||||
let remove_listener_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(remove_listener_id)),
|
||||
)
|
||||
.await
|
||||
.expect("removeConversationListener timeout")
|
||||
.expect("removeConversationListener resp");
|
||||
let RemoveConversationSubscriptionResponse {} =
|
||||
to_response(remove_listener_resp).expect("deserialize removeConversationListener response");
|
||||
.await??;
|
||||
let RemoveConversationSubscriptionResponse {} = to_response(remove_listener_resp)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 4)]
|
||||
async fn test_send_user_turn_changes_approval_policy_behavior() {
|
||||
async fn test_send_user_turn_changes_approval_policy_behavior() -> Result<()> {
|
||||
if env::var(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
|
||||
println!(
|
||||
"Skipping test because it cannot execute when network is disabled in a Codex sandbox."
|
||||
);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let tmp = TempDir::new().expect("tmp dir");
|
||||
let tmp = TempDir::new()?;
|
||||
let codex_home = tmp.path().join("codex_home");
|
||||
std::fs::create_dir(&codex_home).expect("create codex home dir");
|
||||
std::fs::create_dir(&codex_home)?;
|
||||
let working_directory = tmp.path().join("workdir");
|
||||
std::fs::create_dir(&working_directory).expect("create working directory");
|
||||
std::fs::create_dir(&working_directory)?;
|
||||
|
||||
// Mock server will request a python shell call for the first and second turn, then finish.
|
||||
let responses = vec![
|
||||
@@ -203,10 +184,8 @@ async fn test_send_user_turn_changes_approval_policy_behavior() {
|
||||
Some(&working_directory),
|
||||
Some(5000),
|
||||
"call1",
|
||||
)
|
||||
.expect("create first shell sse response"),
|
||||
create_final_assistant_message_sse_response("done 1")
|
||||
.expect("create final assistant message 1"),
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done 1")?,
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"python3".to_string(),
|
||||
@@ -216,20 +195,15 @@ async fn test_send_user_turn_changes_approval_policy_behavior() {
|
||||
Some(&working_directory),
|
||||
Some(5000),
|
||||
"call2",
|
||||
)
|
||||
.expect("create second shell sse response"),
|
||||
create_final_assistant_message_sse_response("done 2")
|
||||
.expect("create final assistant message 2"),
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done 2")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri()).expect("write config");
|
||||
create_config_toml(&codex_home, &server.uri())?;
|
||||
|
||||
// Start MCP server and initialize.
|
||||
let mut mcp = McpProcess::new(&codex_home).await.expect("spawn mcp");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init error");
|
||||
let mut mcp = McpProcess::new(&codex_home).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// 1) Start conversation with approval_policy=untrusted
|
||||
let new_conv_id = mcp
|
||||
@@ -237,36 +211,30 @@ async fn test_send_user_turn_changes_approval_policy_behavior() {
|
||||
cwd: Some(working_directory.to_string_lossy().into_owned()),
|
||||
..Default::default()
|
||||
})
|
||||
.await
|
||||
.expect("send newConversation");
|
||||
.await?;
|
||||
let new_conv_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(new_conv_id)),
|
||||
)
|
||||
.await
|
||||
.expect("newConversation timeout")
|
||||
.expect("newConversation resp");
|
||||
.await??;
|
||||
let NewConversationResponse {
|
||||
conversation_id, ..
|
||||
} = to_response::<NewConversationResponse>(new_conv_resp)
|
||||
.expect("deserialize newConversation response");
|
||||
} = to_response::<NewConversationResponse>(new_conv_resp)?;
|
||||
|
||||
// 2) addConversationListener
|
||||
let add_listener_id = mcp
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams { conversation_id })
|
||||
.await
|
||||
.expect("send addConversationListener");
|
||||
let _: AddConversationSubscriptionResponse =
|
||||
to_response::<AddConversationSubscriptionResponse>(
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(add_listener_id)),
|
||||
)
|
||||
.await
|
||||
.expect("addConversationListener timeout")
|
||||
.expect("addConversationListener resp"),
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams {
|
||||
conversation_id,
|
||||
experimental_raw_events: false,
|
||||
})
|
||||
.await?;
|
||||
let _: AddConversationSubscriptionResponse = to_response::<AddConversationSubscriptionResponse>(
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(add_listener_id)),
|
||||
)
|
||||
.expect("deserialize addConversationListener response");
|
||||
.await??,
|
||||
)?;
|
||||
|
||||
// 3) sendUserMessage triggers a shell call; approval policy is Untrusted so we should get an elicitation
|
||||
let send_user_id = mcp
|
||||
@@ -276,27 +244,21 @@ async fn test_send_user_turn_changes_approval_policy_behavior() {
|
||||
text: "run python".to_string(),
|
||||
}],
|
||||
})
|
||||
.await
|
||||
.expect("send sendUserMessage");
|
||||
.await?;
|
||||
let _send_user_resp: SendUserMessageResponse = to_response::<SendUserMessageResponse>(
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(send_user_id)),
|
||||
)
|
||||
.await
|
||||
.expect("sendUserMessage timeout")
|
||||
.expect("sendUserMessage resp"),
|
||||
)
|
||||
.expect("deserialize sendUserMessage response");
|
||||
.await??,
|
||||
)?;
|
||||
|
||||
// Expect an ExecCommandApproval request (elicitation)
|
||||
let request = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_request_message(),
|
||||
)
|
||||
.await
|
||||
.expect("waiting for exec approval request timeout")
|
||||
.expect("exec approval request");
|
||||
.await??;
|
||||
let ServerRequest::ExecCommandApproval { request_id, params } = request else {
|
||||
panic!("expected ExecCommandApproval request, got: {request:?}");
|
||||
};
|
||||
@@ -312,6 +274,7 @@ async fn test_send_user_turn_changes_approval_policy_behavior() {
|
||||
],
|
||||
cwd: working_directory.clone(),
|
||||
reason: None,
|
||||
risk: None,
|
||||
parsed_cmd: vec![ParsedCommand::Unknown {
|
||||
cmd: "python3 -c 'print(42)'".to_string()
|
||||
}],
|
||||
@@ -324,17 +287,14 @@ async fn test_send_user_turn_changes_approval_policy_behavior() {
|
||||
request_id,
|
||||
serde_json::json!({ "decision": codex_core::protocol::ReviewDecision::Approved }),
|
||||
)
|
||||
.await
|
||||
.expect("send approval response");
|
||||
.await?;
|
||||
|
||||
// Wait for first TaskComplete
|
||||
let _ = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await
|
||||
.expect("task_complete 1 timeout")
|
||||
.expect("task_complete 1 notification");
|
||||
.await??;
|
||||
|
||||
// 4) sendUserTurn with approval_policy=never should run without elicitation
|
||||
let send_turn_id = mcp
|
||||
@@ -350,19 +310,15 @@ async fn test_send_user_turn_changes_approval_policy_behavior() {
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: ReasoningSummary::Auto,
|
||||
})
|
||||
.await
|
||||
.expect("send sendUserTurn");
|
||||
.await?;
|
||||
// Acknowledge sendUserTurn
|
||||
let _send_turn_resp: SendUserTurnResponse = to_response::<SendUserTurnResponse>(
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(send_turn_id)),
|
||||
)
|
||||
.await
|
||||
.expect("sendUserTurn timeout")
|
||||
.expect("sendUserTurn resp"),
|
||||
)
|
||||
.expect("deserialize sendUserTurn response");
|
||||
.await??,
|
||||
)?;
|
||||
|
||||
// Ensure we do NOT receive an ExecCommandApproval request before the task completes.
|
||||
// If any Request is seen while waiting for task_complete, the helper will error and the test fails.
|
||||
@@ -370,31 +326,31 @@ async fn test_send_user_turn_changes_approval_policy_behavior() {
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await
|
||||
.expect("task_complete 2 timeout")
|
||||
.expect("task_complete 2 notification");
|
||||
.await??;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper: minimal config.toml pointing at mock provider.
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 4)]
|
||||
async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() {
|
||||
async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() -> Result<()> {
|
||||
if env::var(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
|
||||
println!(
|
||||
"Skipping test because it cannot execute when network is disabled in a Codex sandbox."
|
||||
);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let tmp = TempDir::new().expect("tmp dir");
|
||||
let tmp = TempDir::new()?;
|
||||
let codex_home = tmp.path().join("codex_home");
|
||||
std::fs::create_dir(&codex_home).expect("create codex home dir");
|
||||
std::fs::create_dir(&codex_home)?;
|
||||
let workspace_root = tmp.path().join("workspace");
|
||||
std::fs::create_dir(&workspace_root).expect("create workspace root");
|
||||
std::fs::create_dir(&workspace_root)?;
|
||||
let first_cwd = workspace_root.join("turn1");
|
||||
let second_cwd = workspace_root.join("turn2");
|
||||
std::fs::create_dir(&first_cwd).expect("create first cwd");
|
||||
std::fs::create_dir(&second_cwd).expect("create second cwd");
|
||||
std::fs::create_dir(&first_cwd)?;
|
||||
std::fs::create_dir(&second_cwd)?;
|
||||
|
||||
let responses = vec![
|
||||
create_shell_sse_response(
|
||||
@@ -406,10 +362,8 @@ async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() {
|
||||
None,
|
||||
Some(5000),
|
||||
"call-first",
|
||||
)
|
||||
.expect("create first shell response"),
|
||||
create_final_assistant_message_sse_response("done first")
|
||||
.expect("create first final assistant message"),
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done first")?,
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"bash".to_string(),
|
||||
@@ -419,21 +373,14 @@ async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() {
|
||||
None,
|
||||
Some(5000),
|
||||
"call-second",
|
||||
)
|
||||
.expect("create second shell response"),
|
||||
create_final_assistant_message_sse_response("done second")
|
||||
.expect("create second final assistant message"),
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done second")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri()).expect("write config");
|
||||
create_config_toml(&codex_home, &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(&codex_home)
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new(&codex_home).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let new_conv_id = mcp
|
||||
.send_new_conversation_request(NewConversationParams {
|
||||
@@ -442,33 +389,29 @@ async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() {
|
||||
sandbox: Some(SandboxMode::WorkspaceWrite),
|
||||
..Default::default()
|
||||
})
|
||||
.await
|
||||
.expect("send newConversation");
|
||||
.await?;
|
||||
let new_conv_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(new_conv_id)),
|
||||
)
|
||||
.await
|
||||
.expect("newConversation timeout")
|
||||
.expect("newConversation resp");
|
||||
.await??;
|
||||
let NewConversationResponse {
|
||||
conversation_id,
|
||||
model,
|
||||
..
|
||||
} = to_response::<NewConversationResponse>(new_conv_resp)
|
||||
.expect("deserialize newConversation response");
|
||||
} = to_response::<NewConversationResponse>(new_conv_resp)?;
|
||||
|
||||
let add_listener_id = mcp
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams { conversation_id })
|
||||
.await
|
||||
.expect("send addConversationListener");
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams {
|
||||
conversation_id,
|
||||
experimental_raw_events: false,
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(add_listener_id)),
|
||||
)
|
||||
.await
|
||||
.expect("addConversationListener timeout")
|
||||
.expect("addConversationListener resp");
|
||||
.await??;
|
||||
|
||||
let first_turn_id = mcp
|
||||
.send_send_user_turn_request(SendUserTurnParams {
|
||||
@@ -488,22 +431,17 @@ async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() {
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: ReasoningSummary::Auto,
|
||||
})
|
||||
.await
|
||||
.expect("send first sendUserTurn");
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(first_turn_id)),
|
||||
)
|
||||
.await
|
||||
.expect("sendUserTurn 1 timeout")
|
||||
.expect("sendUserTurn 1 resp");
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await
|
||||
.expect("task_complete 1 timeout")
|
||||
.expect("task_complete 1 notification");
|
||||
.await??;
|
||||
|
||||
let second_turn_id = mcp
|
||||
.send_send_user_turn_request(SendUserTurnParams {
|
||||
@@ -518,60 +456,18 @@ async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() {
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: ReasoningSummary::Auto,
|
||||
})
|
||||
.await
|
||||
.expect("send second sendUserTurn");
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(second_turn_id)),
|
||||
)
|
||||
.await
|
||||
.expect("sendUserTurn 2 timeout")
|
||||
.expect("sendUserTurn 2 resp");
|
||||
|
||||
let mut env_message: Option<String> = None;
|
||||
let second_cwd_str = second_cwd.to_string_lossy().into_owned();
|
||||
for _ in 0..10 {
|
||||
let notification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/user_message"),
|
||||
)
|
||||
.await
|
||||
.expect("user_message timeout")
|
||||
.expect("user_message notification");
|
||||
let params = notification
|
||||
.params
|
||||
.clone()
|
||||
.expect("user_message should include params");
|
||||
let event: Event = serde_json::from_value(params).expect("deserialize user_message event");
|
||||
if let EventMsg::UserMessage(user) = event.msg
|
||||
&& matches!(user.kind, Some(InputMessageKind::EnvironmentContext))
|
||||
&& user.message.contains(&second_cwd_str)
|
||||
{
|
||||
env_message = Some(user.message);
|
||||
break;
|
||||
}
|
||||
}
|
||||
let env_message = env_message.expect("expected environment context update");
|
||||
assert!(
|
||||
env_message.contains("<sandbox_mode>danger-full-access</sandbox_mode>"),
|
||||
"env context should reflect new sandbox mode: {env_message}"
|
||||
);
|
||||
assert!(
|
||||
env_message.contains("<network_access>enabled</network_access>"),
|
||||
"env context should enable network access for danger-full-access policy: {env_message}"
|
||||
);
|
||||
assert!(
|
||||
env_message.contains(&second_cwd_str),
|
||||
"env context should include updated cwd: {env_message}"
|
||||
);
|
||||
.await??;
|
||||
|
||||
let exec_begin_notification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/exec_command_begin"),
|
||||
)
|
||||
.await
|
||||
.expect("exec_command_begin timeout")
|
||||
.expect("exec_command_begin notification");
|
||||
.await??;
|
||||
let params = exec_begin_notification
|
||||
.params
|
||||
.clone()
|
||||
@@ -599,9 +495,9 @@ async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() {
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await
|
||||
.expect("task_complete 2 timeout")
|
||||
.expect("task_complete 2 notification");
|
||||
.await??;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_config_toml(codex_home: &Path, server_uri: &str) -> std::io::Result<()> {
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::GetUserSavedConfigResponse;
|
||||
@@ -17,6 +15,8 @@ use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
use codex_protocol::config_types::Verbosity;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
@@ -60,31 +60,21 @@ chatgpt_base_url = "https://api.chatgpt.com"
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 4)]
|
||||
async fn get_config_toml_parses_all_fields() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml(codex_home.path()).expect("write config.toml");
|
||||
async fn get_config_toml_parses_all_fields() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_get_user_saved_config_request()
|
||||
.await
|
||||
.expect("send getUserSavedConfig");
|
||||
let request_id = mcp.send_get_user_saved_config_request().await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("getUserSavedConfig timeout")
|
||||
.expect("getUserSavedConfig response");
|
||||
.await??;
|
||||
|
||||
let config: GetUserSavedConfigResponse = to_response(resp).expect("deserialize config");
|
||||
let config: GetUserSavedConfigResponse = to_response(resp)?;
|
||||
let expected = GetUserSavedConfigResponse {
|
||||
config: UserSavedConfig {
|
||||
approval_policy: Some(AskForApproval::OnRequest),
|
||||
@@ -122,33 +112,24 @@ async fn get_config_toml_parses_all_fields() {
|
||||
};
|
||||
|
||||
assert_eq!(config, expected);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn get_config_toml_empty() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
async fn get_config_toml_empty() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_get_user_saved_config_request()
|
||||
.await
|
||||
.expect("send getUserSavedConfig");
|
||||
let request_id = mcp.send_get_user_saved_config_request().await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("getUserSavedConfig timeout")
|
||||
.expect("getUserSavedConfig response");
|
||||
.await??;
|
||||
|
||||
let config: GetUserSavedConfigResponse = to_response(resp).expect("deserialize config");
|
||||
let config: GetUserSavedConfigResponse = to_response(resp)?;
|
||||
let expected = GetUserSavedConfigResponse {
|
||||
config: UserSavedConfig {
|
||||
approval_policy: None,
|
||||
@@ -167,4 +148,5 @@ async fn get_config_toml_empty() {
|
||||
};
|
||||
|
||||
assert_eq!(config, expected);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_final_assistant_message_sse_response;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
@@ -15,31 +14,25 @@ use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserMessageResponse;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_conversation_create_and_send_message_ok() {
|
||||
async fn test_conversation_create_and_send_message_ok() -> Result<()> {
|
||||
// Mock server – we won't strictly rely on it, but provide one to satisfy any model wiring.
|
||||
let responses = vec![
|
||||
create_final_assistant_message_sse_response("Done").expect("build mock assistant message"),
|
||||
];
|
||||
let responses = vec![create_final_assistant_message_sse_response("Done")?];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
|
||||
// Temporary Codex home with config pointing at the mock server.
|
||||
let codex_home = TempDir::new().expect("create temp dir");
|
||||
create_config_toml(codex_home.path(), &server.uri()).expect("write config.toml");
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
// Start MCP server process and initialize.
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Create a conversation via the new JSON-RPC API.
|
||||
let new_conv_id = mcp
|
||||
@@ -47,40 +40,35 @@ async fn test_conversation_create_and_send_message_ok() {
|
||||
model: Some("o3".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await
|
||||
.expect("send newConversation");
|
||||
.await?;
|
||||
let new_conv_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(new_conv_id)),
|
||||
)
|
||||
.await
|
||||
.expect("newConversation timeout")
|
||||
.expect("newConversation resp");
|
||||
.await??;
|
||||
let NewConversationResponse {
|
||||
conversation_id,
|
||||
model,
|
||||
reasoning_effort: _,
|
||||
rollout_path: _,
|
||||
} = to_response::<NewConversationResponse>(new_conv_resp)
|
||||
.expect("deserialize newConversation response");
|
||||
} = to_response::<NewConversationResponse>(new_conv_resp)?;
|
||||
assert_eq!(model, "o3");
|
||||
|
||||
// Add a listener so we receive notifications for this conversation (not strictly required for this test).
|
||||
let add_listener_id = mcp
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams { conversation_id })
|
||||
.await
|
||||
.expect("send addConversationListener");
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams {
|
||||
conversation_id,
|
||||
experimental_raw_events: false,
|
||||
})
|
||||
.await?;
|
||||
let _sub: AddConversationSubscriptionResponse =
|
||||
to_response::<AddConversationSubscriptionResponse>(
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(add_listener_id)),
|
||||
)
|
||||
.await
|
||||
.expect("addConversationListener timeout")
|
||||
.expect("addConversationListener resp"),
|
||||
)
|
||||
.expect("deserialize addConversationListener response");
|
||||
.await??,
|
||||
)?;
|
||||
|
||||
// Now send a user message via the wire API and expect an OK (empty object) result.
|
||||
let send_id = mcp
|
||||
@@ -90,36 +78,32 @@ async fn test_conversation_create_and_send_message_ok() {
|
||||
text: "Hello".to_string(),
|
||||
}],
|
||||
})
|
||||
.await
|
||||
.expect("send sendUserMessage");
|
||||
.await?;
|
||||
let send_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(send_id)),
|
||||
)
|
||||
.await
|
||||
.expect("sendUserMessage timeout")
|
||||
.expect("sendUserMessage resp");
|
||||
let _ok: SendUserMessageResponse = to_response::<SendUserMessageResponse>(send_resp)
|
||||
.expect("deserialize sendUserMessage response");
|
||||
.await??;
|
||||
let _ok: SendUserMessageResponse = to_response::<SendUserMessageResponse>(send_resp)?;
|
||||
|
||||
// avoid race condition by waiting for the mock server to receive the chat.completions request
|
||||
let deadline = std::time::Instant::now() + DEFAULT_READ_TIMEOUT;
|
||||
loop {
|
||||
let requests = loop {
|
||||
let requests = server.received_requests().await.unwrap_or_default();
|
||||
if !requests.is_empty() {
|
||||
break;
|
||||
break requests;
|
||||
}
|
||||
if std::time::Instant::now() >= deadline {
|
||||
panic!("mock server did not receive the chat.completions request in time");
|
||||
}
|
||||
tokio::time::sleep(std::time::Duration::from_millis(10)).await;
|
||||
}
|
||||
};
|
||||
|
||||
// Verify the outbound request body matches expectations for Chat Completions.
|
||||
let request = &server.received_requests().await.unwrap()[0];
|
||||
let body = request
|
||||
.body_json::<serde_json::Value>()
|
||||
.expect("parse request body as JSON");
|
||||
let request = requests
|
||||
.first()
|
||||
.expect("mock server should have received at least one request");
|
||||
let body = request.body_json::<serde_json::Value>()?;
|
||||
assert_eq!(body["model"], json!("o3"));
|
||||
assert!(body["stream"].as_bool().unwrap_or(false));
|
||||
let messages = body["messages"]
|
||||
@@ -130,6 +114,7 @@ async fn test_conversation_create_and_send_message_ok() {
|
||||
assert_eq!(last["content"], json!("Hello"));
|
||||
|
||||
drop(server);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper to create a config.toml pointing at the mock model server.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use app_test_support::McpProcess;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
@@ -13,48 +13,39 @@ const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_fuzzy_file_search_sorts_and_includes_indices() -> Result<()> {
|
||||
// Prepare a temporary Codex home and a separate root with test files.
|
||||
let codex_home = TempDir::new().context("create temp codex home")?;
|
||||
let root = TempDir::new().context("create temp search root")?;
|
||||
let codex_home = TempDir::new()?;
|
||||
let root = TempDir::new()?;
|
||||
|
||||
// Create files designed to have deterministic ordering for query "abe".
|
||||
std::fs::write(root.path().join("abc"), "x").context("write file abc")?;
|
||||
std::fs::write(root.path().join("abcde"), "x").context("write file abcde")?;
|
||||
std::fs::write(root.path().join("abexy"), "x").context("write file abexy")?;
|
||||
std::fs::write(root.path().join("zzz.txt"), "x").context("write file zzz")?;
|
||||
std::fs::write(root.path().join("abc"), "x")?;
|
||||
std::fs::write(root.path().join("abcde"), "x")?;
|
||||
std::fs::write(root.path().join("abexy"), "x")?;
|
||||
std::fs::write(root.path().join("zzz.txt"), "x")?;
|
||||
let sub_dir = root.path().join("sub");
|
||||
std::fs::create_dir_all(&sub_dir).context("create sub dir")?;
|
||||
std::fs::create_dir_all(&sub_dir)?;
|
||||
let sub_abce_path = sub_dir.join("abce");
|
||||
std::fs::write(&sub_abce_path, "x").context("write file sub/abce")?;
|
||||
std::fs::write(&sub_abce_path, "x")?;
|
||||
let sub_abce_rel = sub_abce_path
|
||||
.strip_prefix(root.path())
|
||||
.context("strip root prefix from sub/abce")?
|
||||
.strip_prefix(root.path())?
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
|
||||
// Start MCP server and initialize.
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.context("spawn mcp")?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.context("init timeout")?
|
||||
.context("init failed")?;
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let root_path = root.path().to_string_lossy().to_string();
|
||||
// Send fuzzyFileSearch request.
|
||||
let request_id = mcp
|
||||
.send_fuzzy_file_search_request("abe", vec![root_path.clone()], None)
|
||||
.await
|
||||
.context("send fuzzyFileSearch")?;
|
||||
.await?;
|
||||
|
||||
// Read response and verify shape and ordering.
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.context("fuzzyFileSearch timeout")?
|
||||
.context("fuzzyFileSearch resp")?;
|
||||
.await??;
|
||||
|
||||
let value = resp.result;
|
||||
// The path separator on Windows affects the score.
|
||||
@@ -94,24 +85,18 @@ async fn test_fuzzy_file_search_sorts_and_includes_indices() -> Result<()> {
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_fuzzy_file_search_accepts_cancellation_token() -> Result<()> {
|
||||
let codex_home = TempDir::new().context("create temp codex home")?;
|
||||
let root = TempDir::new().context("create temp search root")?;
|
||||
let codex_home = TempDir::new()?;
|
||||
let root = TempDir::new()?;
|
||||
|
||||
std::fs::write(root.path().join("alpha.txt"), "contents").context("write alpha")?;
|
||||
std::fs::write(root.path().join("alpha.txt"), "contents")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.context("spawn mcp")?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.context("init timeout")?
|
||||
.context("init failed")?;
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let root_path = root.path().to_string_lossy().to_string();
|
||||
let request_id = mcp
|
||||
.send_fuzzy_file_search_request("alp", vec![root_path.clone()], None)
|
||||
.await
|
||||
.context("send fuzzyFileSearch")?;
|
||||
.await?;
|
||||
|
||||
let request_id_2 = mcp
|
||||
.send_fuzzy_file_search_request(
|
||||
@@ -119,23 +104,20 @@ async fn test_fuzzy_file_search_accepts_cancellation_token() -> Result<()> {
|
||||
vec![root_path.clone()],
|
||||
Some(request_id.to_string()),
|
||||
)
|
||||
.await
|
||||
.context("send fuzzyFileSearch")?;
|
||||
.await?;
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id_2)),
|
||||
)
|
||||
.await
|
||||
.context("fuzzyFileSearch timeout")?
|
||||
.context("fuzzyFileSearch resp")?;
|
||||
.await??;
|
||||
|
||||
let files = resp
|
||||
.result
|
||||
.get("files")
|
||||
.context("files key missing")?
|
||||
.ok_or_else(|| anyhow!("files key missing"))?
|
||||
.as_array()
|
||||
.context("files not array")?
|
||||
.ok_or_else(|| anyhow!("files not array"))?
|
||||
.clone();
|
||||
|
||||
assert_eq!(files.len(), 1);
|
||||
|
||||
@@ -88,7 +88,10 @@ async fn shell_command_interruption() -> anyhow::Result<()> {
|
||||
|
||||
// 2) addConversationListener
|
||||
let add_listener_id = mcp
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams { conversation_id })
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams {
|
||||
conversation_id,
|
||||
experimental_raw_events: false,
|
||||
})
|
||||
.await?;
|
||||
let _add_listener_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
@@ -143,7 +146,7 @@ fn create_config_toml(codex_home: &Path, server_uri: String) -> std::io::Result<
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "danger-full-access"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_fake_rollout;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
@@ -13,67 +12,66 @@ use codex_app_server_protocol::ResumeConversationParams;
|
||||
use codex_app_server_protocol::ResumeConversationResponse;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_app_server_protocol::SessionConfiguredNotification;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
use uuid::Uuid;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_list_and_resume_conversations() {
|
||||
async fn test_list_and_resume_conversations() -> Result<()> {
|
||||
// Prepare a temporary CODEX_HOME with a few fake rollout files.
|
||||
let codex_home = TempDir::new().expect("create temp dir");
|
||||
let codex_home = TempDir::new()?;
|
||||
create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-02T12-00-00",
|
||||
"2025-01-02T12:00:00Z",
|
||||
"Hello A",
|
||||
);
|
||||
Some("openai"),
|
||||
)?;
|
||||
create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-01T13-00-00",
|
||||
"2025-01-01T13:00:00Z",
|
||||
"Hello B",
|
||||
);
|
||||
Some("openai"),
|
||||
)?;
|
||||
create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-01T12-00-00",
|
||||
"2025-01-01T12:00:00Z",
|
||||
"Hello C",
|
||||
);
|
||||
None,
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Request first page with size 2
|
||||
let req_id = mcp
|
||||
.send_list_conversations_request(ListConversationsParams {
|
||||
page_size: Some(2),
|
||||
cursor: None,
|
||||
model_providers: None,
|
||||
})
|
||||
.await
|
||||
.expect("send listConversations");
|
||||
.await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(req_id)),
|
||||
)
|
||||
.await
|
||||
.expect("listConversations timeout")
|
||||
.expect("listConversations resp");
|
||||
.await??;
|
||||
let ListConversationsResponse { items, next_cursor } =
|
||||
to_response::<ListConversationsResponse>(resp).expect("deserialize response");
|
||||
to_response::<ListConversationsResponse>(resp)?;
|
||||
|
||||
assert_eq!(items.len(), 2);
|
||||
// Newest first; preview text should match
|
||||
assert_eq!(items[0].preview, "Hello A");
|
||||
assert_eq!(items[1].preview, "Hello B");
|
||||
assert_eq!(items[0].model_provider, "openai");
|
||||
assert_eq!(items[1].model_provider, "openai");
|
||||
assert!(items[0].path.is_absolute());
|
||||
assert!(next_cursor.is_some());
|
||||
|
||||
@@ -82,129 +80,277 @@ async fn test_list_and_resume_conversations() {
|
||||
.send_list_conversations_request(ListConversationsParams {
|
||||
page_size: Some(2),
|
||||
cursor: next_cursor,
|
||||
model_providers: None,
|
||||
})
|
||||
.await
|
||||
.expect("send listConversations page 2");
|
||||
.await?;
|
||||
let resp2: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(req_id2)),
|
||||
)
|
||||
.await
|
||||
.expect("listConversations page 2 timeout")
|
||||
.expect("listConversations page 2 resp");
|
||||
.await??;
|
||||
let ListConversationsResponse {
|
||||
items: items2,
|
||||
next_cursor: next2,
|
||||
..
|
||||
} = to_response::<ListConversationsResponse>(resp2).expect("deserialize response");
|
||||
} = to_response::<ListConversationsResponse>(resp2)?;
|
||||
assert_eq!(items2.len(), 1);
|
||||
assert_eq!(items2[0].preview, "Hello C");
|
||||
assert!(next2.is_some());
|
||||
assert_eq!(items2[0].model_provider, "openai");
|
||||
assert_eq!(next2, None);
|
||||
|
||||
// Now resume one of the sessions and expect a SessionConfigured notification and response.
|
||||
// Add a conversation with an explicit non-OpenAI provider for filter tests.
|
||||
create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-01T11-30-00",
|
||||
"2025-01-01T11:30:00Z",
|
||||
"Hello TP",
|
||||
Some("test-provider"),
|
||||
)?;
|
||||
|
||||
// Filtering by model provider should return only matching sessions.
|
||||
let filter_req_id = mcp
|
||||
.send_list_conversations_request(ListConversationsParams {
|
||||
page_size: Some(10),
|
||||
cursor: None,
|
||||
model_providers: Some(vec!["test-provider".to_string()]),
|
||||
})
|
||||
.await?;
|
||||
let filter_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(filter_req_id)),
|
||||
)
|
||||
.await??;
|
||||
let ListConversationsResponse {
|
||||
items: filtered_items,
|
||||
next_cursor: filtered_next,
|
||||
} = to_response::<ListConversationsResponse>(filter_resp)?;
|
||||
assert_eq!(filtered_items.len(), 1);
|
||||
assert_eq!(filtered_next, None);
|
||||
assert_eq!(filtered_items[0].preview, "Hello TP");
|
||||
assert_eq!(filtered_items[0].model_provider, "test-provider");
|
||||
|
||||
// Empty filter should include every session regardless of provider metadata.
|
||||
let unfiltered_req_id = mcp
|
||||
.send_list_conversations_request(ListConversationsParams {
|
||||
page_size: Some(10),
|
||||
cursor: None,
|
||||
model_providers: Some(Vec::new()),
|
||||
})
|
||||
.await?;
|
||||
let unfiltered_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(unfiltered_req_id)),
|
||||
)
|
||||
.await??;
|
||||
let ListConversationsResponse {
|
||||
items: unfiltered_items,
|
||||
next_cursor: unfiltered_next,
|
||||
} = to_response::<ListConversationsResponse>(unfiltered_resp)?;
|
||||
assert_eq!(unfiltered_items.len(), 4);
|
||||
assert!(unfiltered_next.is_none());
|
||||
|
||||
let empty_req_id = mcp
|
||||
.send_list_conversations_request(ListConversationsParams {
|
||||
page_size: Some(10),
|
||||
cursor: None,
|
||||
model_providers: Some(vec!["other".to_string()]),
|
||||
})
|
||||
.await?;
|
||||
let empty_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(empty_req_id)),
|
||||
)
|
||||
.await??;
|
||||
let ListConversationsResponse {
|
||||
items: empty_items,
|
||||
next_cursor: empty_next,
|
||||
} = to_response::<ListConversationsResponse>(empty_resp)?;
|
||||
assert!(empty_items.is_empty());
|
||||
assert!(empty_next.is_none());
|
||||
|
||||
let first_item = &items[0];
|
||||
|
||||
// Now resume one of the sessions from an explicit rollout path.
|
||||
let resume_req_id = mcp
|
||||
.send_resume_conversation_request(ResumeConversationParams {
|
||||
path: items[0].path.clone(),
|
||||
path: Some(first_item.path.clone()),
|
||||
conversation_id: None,
|
||||
history: None,
|
||||
overrides: Some(NewConversationParams {
|
||||
model: Some("o3".to_string()),
|
||||
..Default::default()
|
||||
}),
|
||||
})
|
||||
.await
|
||||
.expect("send resumeConversation");
|
||||
.await?;
|
||||
|
||||
// Expect a codex/event notification with msg.type == sessionConfigured
|
||||
let notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("sessionConfigured"),
|
||||
)
|
||||
.await
|
||||
.expect("sessionConfigured notification timeout")
|
||||
.expect("sessionConfigured notification");
|
||||
let session_configured: ServerNotification = notification
|
||||
.try_into()
|
||||
.expect("deserialize sessionConfigured notification");
|
||||
// Basic shape assertion: ensure event type is sessionConfigured
|
||||
.await??;
|
||||
let session_configured: ServerNotification = notification.try_into()?;
|
||||
let ServerNotification::SessionConfigured(SessionConfiguredNotification {
|
||||
model,
|
||||
rollout_path,
|
||||
initial_messages: session_initial_messages,
|
||||
..
|
||||
}) = session_configured
|
||||
else {
|
||||
unreachable!("expected sessionConfigured notification");
|
||||
};
|
||||
assert_eq!(model, "o3");
|
||||
assert_eq!(items[0].path.clone(), rollout_path);
|
||||
assert_eq!(rollout_path, first_item.path.clone());
|
||||
let session_initial_messages = session_initial_messages
|
||||
.expect("expected initial messages when resuming from rollout path");
|
||||
match session_initial_messages.as_slice() {
|
||||
[EventMsg::UserMessage(message)] => {
|
||||
assert_eq!(message.message, first_item.preview.clone());
|
||||
}
|
||||
other => panic!("unexpected initial messages from rollout resume: {other:#?}"),
|
||||
}
|
||||
|
||||
// Then the response for resumeConversation
|
||||
let resume_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(resume_req_id)),
|
||||
)
|
||||
.await
|
||||
.expect("resumeConversation timeout")
|
||||
.expect("resumeConversation resp");
|
||||
.await??;
|
||||
let ResumeConversationResponse {
|
||||
conversation_id, ..
|
||||
} = to_response::<ResumeConversationResponse>(resume_resp)
|
||||
.expect("deserialize resumeConversation response");
|
||||
conversation_id,
|
||||
model: resume_model,
|
||||
initial_messages: response_initial_messages,
|
||||
..
|
||||
} = to_response::<ResumeConversationResponse>(resume_resp)?;
|
||||
// conversation id should be a valid UUID
|
||||
assert!(!conversation_id.to_string().is_empty());
|
||||
}
|
||||
assert_eq!(resume_model, "o3");
|
||||
let response_initial_messages =
|
||||
response_initial_messages.expect("expected initial messages in resume response");
|
||||
match response_initial_messages.as_slice() {
|
||||
[EventMsg::UserMessage(message)] => {
|
||||
assert_eq!(message.message, first_item.preview.clone());
|
||||
}
|
||||
other => panic!("unexpected initial messages in resume response: {other:#?}"),
|
||||
}
|
||||
|
||||
fn create_fake_rollout(codex_home: &Path, filename_ts: &str, meta_rfc3339: &str, preview: &str) {
|
||||
let uuid = Uuid::new_v4();
|
||||
// sessions/YYYY/MM/DD/ derived from filename_ts (YYYY-MM-DDThh-mm-ss)
|
||||
let year = &filename_ts[0..4];
|
||||
let month = &filename_ts[5..7];
|
||||
let day = &filename_ts[8..10];
|
||||
let dir = codex_home.join("sessions").join(year).join(month).join(day);
|
||||
fs::create_dir_all(&dir).unwrap_or_else(|e| panic!("create sessions dir: {e}"));
|
||||
// Resuming with only a conversation id should locate the rollout automatically.
|
||||
let resume_by_id_req_id = mcp
|
||||
.send_resume_conversation_request(ResumeConversationParams {
|
||||
path: None,
|
||||
conversation_id: Some(first_item.conversation_id),
|
||||
history: None,
|
||||
overrides: Some(NewConversationParams {
|
||||
model: Some("o3".to_string()),
|
||||
..Default::default()
|
||||
}),
|
||||
})
|
||||
.await?;
|
||||
let notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("sessionConfigured"),
|
||||
)
|
||||
.await??;
|
||||
let session_configured: ServerNotification = notification.try_into()?;
|
||||
let ServerNotification::SessionConfigured(SessionConfiguredNotification {
|
||||
model,
|
||||
rollout_path,
|
||||
initial_messages: session_initial_messages,
|
||||
..
|
||||
}) = session_configured
|
||||
else {
|
||||
unreachable!("expected sessionConfigured notification");
|
||||
};
|
||||
assert_eq!(model, "o3");
|
||||
assert_eq!(rollout_path, first_item.path.clone());
|
||||
let session_initial_messages = session_initial_messages
|
||||
.expect("expected initial messages when resuming from conversation id");
|
||||
match session_initial_messages.as_slice() {
|
||||
[EventMsg::UserMessage(message)] => {
|
||||
assert_eq!(message.message, first_item.preview.clone());
|
||||
}
|
||||
other => panic!("unexpected initial messages from conversation id resume: {other:#?}"),
|
||||
}
|
||||
let resume_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(resume_by_id_req_id)),
|
||||
)
|
||||
.await??;
|
||||
let ResumeConversationResponse {
|
||||
conversation_id: by_id_conversation_id,
|
||||
model: by_id_model,
|
||||
initial_messages: by_id_initial_messages,
|
||||
..
|
||||
} = to_response::<ResumeConversationResponse>(resume_resp)?;
|
||||
assert!(!by_id_conversation_id.to_string().is_empty());
|
||||
assert_eq!(by_id_model, "o3");
|
||||
let by_id_initial_messages = by_id_initial_messages
|
||||
.expect("expected initial messages when resuming from conversation id response");
|
||||
match by_id_initial_messages.as_slice() {
|
||||
[EventMsg::UserMessage(message)] => {
|
||||
assert_eq!(message.message, first_item.preview.clone());
|
||||
}
|
||||
other => {
|
||||
panic!("unexpected initial messages in conversation id resume response: {other:#?}")
|
||||
}
|
||||
}
|
||||
|
||||
let file_path = dir.join(format!("rollout-{filename_ts}-{uuid}.jsonl"));
|
||||
let mut lines = Vec::new();
|
||||
// Meta line with timestamp (flattened meta in payload for new schema)
|
||||
lines.push(
|
||||
json!({
|
||||
"timestamp": meta_rfc3339,
|
||||
"type": "session_meta",
|
||||
"payload": {
|
||||
"id": uuid,
|
||||
"timestamp": meta_rfc3339,
|
||||
"cwd": "/",
|
||||
"originator": "codex",
|
||||
"cli_version": "0.0.0",
|
||||
"instructions": null
|
||||
}
|
||||
// Resuming with explicit history should succeed even without a stored rollout.
|
||||
let fork_history_text = "Hello from history";
|
||||
let history = vec![ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::InputText {
|
||||
text: fork_history_text.to_string(),
|
||||
}],
|
||||
}];
|
||||
let resume_with_history_req_id = mcp
|
||||
.send_resume_conversation_request(ResumeConversationParams {
|
||||
path: None,
|
||||
conversation_id: None,
|
||||
history: Some(history),
|
||||
overrides: Some(NewConversationParams {
|
||||
model: Some("o3".to_string()),
|
||||
..Default::default()
|
||||
}),
|
||||
})
|
||||
.to_string(),
|
||||
.await?;
|
||||
let notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("sessionConfigured"),
|
||||
)
|
||||
.await??;
|
||||
let session_configured: ServerNotification = notification.try_into()?;
|
||||
let ServerNotification::SessionConfigured(SessionConfiguredNotification {
|
||||
model,
|
||||
initial_messages: session_initial_messages,
|
||||
..
|
||||
}) = session_configured
|
||||
else {
|
||||
unreachable!("expected sessionConfigured notification");
|
||||
};
|
||||
assert_eq!(model, "o3");
|
||||
assert!(
|
||||
session_initial_messages.as_ref().is_none_or(Vec::is_empty),
|
||||
"expected no initial messages when resuming from explicit history but got {session_initial_messages:#?}"
|
||||
);
|
||||
// Minimal user message entry as a persisted response item (with envelope timestamp)
|
||||
lines.push(
|
||||
json!({
|
||||
"timestamp": meta_rfc3339,
|
||||
"type":"response_item",
|
||||
"payload": {
|
||||
"type":"message",
|
||||
"role":"user",
|
||||
"content":[{"type":"input_text","text": preview}]
|
||||
}
|
||||
})
|
||||
.to_string(),
|
||||
let resume_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(resume_with_history_req_id)),
|
||||
)
|
||||
.await??;
|
||||
let ResumeConversationResponse {
|
||||
conversation_id: history_conversation_id,
|
||||
model: history_model,
|
||||
initial_messages: history_initial_messages,
|
||||
..
|
||||
} = to_response::<ResumeConversationResponse>(resume_resp)?;
|
||||
assert!(!history_conversation_id.to_string().is_empty());
|
||||
assert_eq!(history_model, "o3");
|
||||
assert!(
|
||||
history_initial_messages.as_ref().is_none_or(Vec::is_empty),
|
||||
"expected no initial messages in resume response when history is provided but got {history_initial_messages:#?}"
|
||||
);
|
||||
// Add a matching user message event line to satisfy filters
|
||||
lines.push(
|
||||
json!({
|
||||
"timestamp": meta_rfc3339,
|
||||
"type":"event_msg",
|
||||
"payload": {
|
||||
"type":"user_message",
|
||||
"message": preview,
|
||||
"kind": "plain"
|
||||
}
|
||||
})
|
||||
.to_string(),
|
||||
);
|
||||
fs::write(file_path, lines.join("\n") + "\n")
|
||||
.unwrap_or_else(|e| panic!("write rollout file: {e}"));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
use std::path::Path;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::CancelLoginChatGptParams;
|
||||
@@ -12,7 +10,11 @@ use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::LoginChatGptResponse;
|
||||
use codex_app_server_protocol::LogoutChatGptResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use codex_login::login_with_api_key;
|
||||
use serial_test::serial;
|
||||
use std::path::Path;
|
||||
use std::time::Duration;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
@@ -41,32 +43,26 @@ stream_max_retries = 0
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn logout_chatgpt_removes_auth() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml(codex_home.path()).expect("write config.toml");
|
||||
login_with_api_key(codex_home.path(), "sk-test-key").expect("seed api key");
|
||||
async fn logout_chatgpt_removes_auth() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path())?;
|
||||
login_with_api_key(
|
||||
codex_home.path(),
|
||||
"sk-test-key",
|
||||
AuthCredentialsStoreMode::File,
|
||||
)?;
|
||||
assert!(codex_home.path().join("auth.json").exists());
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)])
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let id = mcp
|
||||
.send_logout_chat_gpt_request()
|
||||
.await
|
||||
.expect("send logoutChatGpt");
|
||||
let id = mcp.send_logout_chat_gpt_request().await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(id)),
|
||||
)
|
||||
.await
|
||||
.expect("logoutChatGpt timeout")
|
||||
.expect("logoutChatGpt response");
|
||||
let _ok: LogoutChatGptResponse = to_response(resp).expect("deserialize logout response");
|
||||
.await??;
|
||||
let _ok: LogoutChatGptResponse = to_response(resp)?;
|
||||
|
||||
assert!(
|
||||
!codex_home.path().join("auth.json").exists(),
|
||||
@@ -79,61 +75,47 @@ async fn logout_chatgpt_removes_auth() {
|
||||
include_token: Some(true),
|
||||
refresh_token: Some(false),
|
||||
})
|
||||
.await
|
||||
.expect("send getAuthStatus");
|
||||
.await?;
|
||||
let status_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(status_id)),
|
||||
)
|
||||
.await
|
||||
.expect("getAuthStatus timeout")
|
||||
.expect("getAuthStatus response");
|
||||
let status: GetAuthStatusResponse = to_response(status_resp).expect("deserialize status");
|
||||
.await??;
|
||||
let status: GetAuthStatusResponse = to_response(status_resp)?;
|
||||
assert_eq!(status.auth_method, None);
|
||||
assert_eq!(status.auth_token, None);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn login_and_cancel_chatgpt() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml(codex_home.path()).unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||
// Serialize tests that launch the login server since it binds to a fixed port.
|
||||
#[serial(login_port)]
|
||||
async fn login_and_cancel_chatgpt() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let login_id = mcp
|
||||
.send_login_chat_gpt_request()
|
||||
.await
|
||||
.expect("send loginChatGpt");
|
||||
let login_id = mcp.send_login_chat_gpt_request().await?;
|
||||
let login_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(login_id)),
|
||||
)
|
||||
.await
|
||||
.expect("loginChatGpt timeout")
|
||||
.expect("loginChatGpt response");
|
||||
let login: LoginChatGptResponse = to_response(login_resp).expect("deserialize login resp");
|
||||
.await??;
|
||||
let login: LoginChatGptResponse = to_response(login_resp)?;
|
||||
|
||||
let cancel_id = mcp
|
||||
.send_cancel_login_chat_gpt_request(CancelLoginChatGptParams {
|
||||
login_id: login.login_id,
|
||||
})
|
||||
.await
|
||||
.expect("send cancelLoginChatGpt");
|
||||
.await?;
|
||||
let cancel_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(cancel_id)),
|
||||
)
|
||||
.await
|
||||
.expect("cancelLoginChatGpt timeout")
|
||||
.expect("cancelLoginChatGpt response");
|
||||
let _ok: CancelLoginChatGptResponse =
|
||||
to_response(cancel_resp).expect("deserialize cancel response");
|
||||
.await??;
|
||||
let _ok: CancelLoginChatGptResponse = to_response(cancel_resp)?;
|
||||
|
||||
// Optionally observe the completion notification; do not fail if it races.
|
||||
let maybe_note = timeout(
|
||||
@@ -144,6 +126,7 @@ async fn login_and_cancel_chatgpt() {
|
||||
if maybe_note.is_err() {
|
||||
eprintln!("warning: did not observe login_chat_gpt_complete notification after cancel");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_config_toml_forced_login(codex_home: &Path, forced_method: &str) -> std::io::Result<()> {
|
||||
@@ -176,66 +159,48 @@ forced_chatgpt_workspace_id = "{workspace_id}"
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn login_chatgpt_rejected_when_forced_api() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml_forced_login(codex_home.path(), "api")
|
||||
.unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||
async fn login_chatgpt_rejected_when_forced_api() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml_forced_login(codex_home.path(), "api")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_login_chat_gpt_request()
|
||||
.await
|
||||
.expect("send loginChatGpt");
|
||||
let request_id = mcp.send_login_chat_gpt_request().await?;
|
||||
let err: JSONRPCError = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("loginChatGpt error timeout")
|
||||
.expect("loginChatGpt error");
|
||||
.await??;
|
||||
|
||||
assert_eq!(
|
||||
err.error.message,
|
||||
"ChatGPT login is disabled. Use API key login instead."
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn login_chatgpt_includes_forced_workspace_query_param() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml_forced_workspace(codex_home.path(), "ws-forced")
|
||||
.unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||
// Serialize tests that launch the login server since it binds to a fixed port.
|
||||
#[serial(login_port)]
|
||||
async fn login_chatgpt_includes_forced_workspace_query_param() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml_forced_workspace(codex_home.path(), "ws-forced")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_login_chat_gpt_request()
|
||||
.await
|
||||
.expect("send loginChatGpt");
|
||||
let request_id = mcp.send_login_chat_gpt_request().await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("loginChatGpt timeout")
|
||||
.expect("loginChatGpt response");
|
||||
.await??;
|
||||
|
||||
let login: LoginChatGptResponse = to_response(resp).expect("deserialize login resp");
|
||||
let login: LoginChatGptResponse = to_response(resp)?;
|
||||
assert!(
|
||||
login.auth_url.contains("allowed_workspace_id=ws-forced"),
|
||||
"auth URL should include forced workspace"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -11,3 +11,4 @@ mod send_message;
|
||||
mod set_default_model;
|
||||
mod user_agent;
|
||||
mod user_info;
|
||||
mod v2;
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_final_assistant_message_sse_response;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
@@ -15,73 +14,76 @@ use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserMessageResponse;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::RawResponseItemEvent;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_send_message_success() {
|
||||
async fn test_send_message_success() -> Result<()> {
|
||||
// Spin up a mock completions server that immediately ends the Codex turn.
|
||||
// Two Codex turns hit the mock model (session start + send-user-message). Provide two SSE responses.
|
||||
let responses = vec![
|
||||
create_final_assistant_message_sse_response("Done").expect("build mock assistant message"),
|
||||
create_final_assistant_message_sse_response("Done").expect("build mock assistant message"),
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
|
||||
// Create a temporary Codex home with config pointing at the mock server.
|
||||
let codex_home = TempDir::new().expect("create temp dir");
|
||||
create_config_toml(codex_home.path(), &server.uri()).expect("write config.toml");
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
// Start MCP server process and initialize.
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timed out")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Start a conversation using the new wire API.
|
||||
let new_conv_id = mcp
|
||||
.send_new_conversation_request(NewConversationParams::default())
|
||||
.await
|
||||
.expect("send newConversation");
|
||||
.send_new_conversation_request(NewConversationParams {
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let new_conv_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(new_conv_id)),
|
||||
)
|
||||
.await
|
||||
.expect("newConversation timeout")
|
||||
.expect("newConversation resp");
|
||||
.await??;
|
||||
let NewConversationResponse {
|
||||
conversation_id, ..
|
||||
} = to_response::<_>(new_conv_resp).expect("deserialize newConversation response");
|
||||
} = to_response::<_>(new_conv_resp)?;
|
||||
|
||||
// 2) addConversationListener
|
||||
let add_listener_id = mcp
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams { conversation_id })
|
||||
.await
|
||||
.expect("send addConversationListener");
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams {
|
||||
conversation_id,
|
||||
experimental_raw_events: false,
|
||||
})
|
||||
.await?;
|
||||
let add_listener_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(add_listener_id)),
|
||||
)
|
||||
.await
|
||||
.expect("addConversationListener timeout")
|
||||
.expect("addConversationListener resp");
|
||||
.await??;
|
||||
let AddConversationSubscriptionResponse { subscription_id: _ } =
|
||||
to_response::<_>(add_listener_resp).expect("deserialize addConversationListener response");
|
||||
to_response::<_>(add_listener_resp)?;
|
||||
|
||||
// Now exercise sendUserMessage twice.
|
||||
send_message("Hello", conversation_id, &mut mcp).await;
|
||||
send_message("Hello again", conversation_id, &mut mcp).await;
|
||||
send_message("Hello", conversation_id, &mut mcp).await?;
|
||||
send_message("Hello again", conversation_id, &mut mcp).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[expect(clippy::expect_used)]
|
||||
async fn send_message(message: &str, conversation_id: ConversationId, mcp: &mut McpProcess) {
|
||||
async fn send_message(
|
||||
message: &str,
|
||||
conversation_id: ConversationId,
|
||||
mcp: &mut McpProcess,
|
||||
) -> Result<()> {
|
||||
// Now exercise sendUserMessage.
|
||||
let send_id = mcp
|
||||
.send_send_user_message_request(SendUserMessageParams {
|
||||
@@ -90,19 +92,15 @@ async fn send_message(message: &str, conversation_id: ConversationId, mcp: &mut
|
||||
text: message.to_string(),
|
||||
}],
|
||||
})
|
||||
.await
|
||||
.expect("send sendUserMessage");
|
||||
.await?;
|
||||
|
||||
let response: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(send_id)),
|
||||
)
|
||||
.await
|
||||
.expect("sendUserMessage response timeout")
|
||||
.expect("sendUserMessage response error");
|
||||
.await??;
|
||||
|
||||
let _ok: SendUserMessageResponse = to_response::<SendUserMessageResponse>(response)
|
||||
.expect("deserialize sendUserMessage response");
|
||||
let _ok: SendUserMessageResponse = to_response::<SendUserMessageResponse>(response)?;
|
||||
|
||||
// Verify the task_finished notification is received.
|
||||
// Note this also ensures that the final request to the server was made.
|
||||
@@ -110,9 +108,7 @@ async fn send_message(message: &str, conversation_id: ConversationId, mcp: &mut
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await
|
||||
.expect("task_finished_notification timeout")
|
||||
.expect("task_finished_notification resp");
|
||||
.await??;
|
||||
let serde_json::Value::Object(map) = task_finished_notification
|
||||
.params
|
||||
.expect("notification should have params")
|
||||
@@ -124,17 +120,105 @@ async fn send_message(message: &str, conversation_id: ConversationId, mcp: &mut
|
||||
.expect("should have conversationId"),
|
||||
&serde_json::Value::String(conversation_id.to_string())
|
||||
);
|
||||
|
||||
let raw_attempt = tokio::time::timeout(
|
||||
std::time::Duration::from_millis(200),
|
||||
mcp.read_stream_until_notification_message("codex/event/raw_response_item"),
|
||||
)
|
||||
.await;
|
||||
assert!(
|
||||
raw_attempt.is_err(),
|
||||
"unexpected raw item notification when not opted in"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_send_message_session_not_found() {
|
||||
async fn test_send_message_raw_notifications_opt_in() -> Result<()> {
|
||||
let responses = vec![create_final_assistant_message_sse_response("Done")?];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let new_conv_id = mcp
|
||||
.send_new_conversation_request(NewConversationParams {
|
||||
developer_instructions: Some("Use the test harness tools.".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let new_conv_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(new_conv_id)),
|
||||
)
|
||||
.await??;
|
||||
let NewConversationResponse {
|
||||
conversation_id, ..
|
||||
} = to_response::<_>(new_conv_resp)?;
|
||||
|
||||
let add_listener_id = mcp
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams {
|
||||
conversation_id,
|
||||
experimental_raw_events: true,
|
||||
})
|
||||
.await?;
|
||||
let add_listener_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(add_listener_id)),
|
||||
)
|
||||
.await??;
|
||||
let AddConversationSubscriptionResponse { subscription_id: _ } =
|
||||
to_response::<_>(add_listener_resp)?;
|
||||
|
||||
let send_id = mcp
|
||||
.send_send_user_message_request(SendUserMessageParams {
|
||||
conversation_id,
|
||||
items: vec![InputItem::Text {
|
||||
text: "Hello".to_string(),
|
||||
}],
|
||||
})
|
||||
.await?;
|
||||
|
||||
let developer = read_raw_response_item(&mut mcp, conversation_id).await;
|
||||
assert_developer_message(&developer, "Use the test harness tools.");
|
||||
|
||||
let instructions = read_raw_response_item(&mut mcp, conversation_id).await;
|
||||
assert_instructions_message(&instructions);
|
||||
|
||||
let environment = read_raw_response_item(&mut mcp, conversation_id).await;
|
||||
assert_environment_message(&environment);
|
||||
|
||||
let response: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(send_id)),
|
||||
)
|
||||
.await??;
|
||||
let _ok: SendUserMessageResponse = to_response::<SendUserMessageResponse>(response)?;
|
||||
|
||||
let user_message = read_raw_response_item(&mut mcp, conversation_id).await;
|
||||
assert_user_message(&user_message, "Hello");
|
||||
|
||||
let assistant_message = read_raw_response_item(&mut mcp, conversation_id).await;
|
||||
assert_assistant_message(&assistant_message, "Done");
|
||||
|
||||
let _ = tokio::time::timeout(
|
||||
std::time::Duration::from_millis(250),
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_send_message_session_not_found() -> Result<()> {
|
||||
// Start MCP without creating a Codex session
|
||||
let codex_home = TempDir::new().expect("tempdir");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await.expect("spawn");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("timeout")
|
||||
.expect("init");
|
||||
let codex_home = TempDir::new()?;
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let unknown = ConversationId::new();
|
||||
let req_id = mcp
|
||||
@@ -144,18 +228,16 @@ async fn test_send_message_session_not_found() {
|
||||
text: "ping".to_string(),
|
||||
}],
|
||||
})
|
||||
.await
|
||||
.expect("send sendUserMessage");
|
||||
.await?;
|
||||
|
||||
// Expect an error response for unknown conversation.
|
||||
let err = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(req_id)),
|
||||
)
|
||||
.await
|
||||
.expect("timeout")
|
||||
.expect("error");
|
||||
.await??;
|
||||
assert_eq!(err.id, RequestId::Integer(req_id));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
@@ -184,3 +266,126 @@ stream_max_retries = 0
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
#[expect(clippy::expect_used)]
|
||||
async fn read_raw_response_item(
|
||||
mcp: &mut McpProcess,
|
||||
conversation_id: ConversationId,
|
||||
) -> ResponseItem {
|
||||
let raw_notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/raw_response_item"),
|
||||
)
|
||||
.await
|
||||
.expect("codex/event/raw_response_item notification timeout")
|
||||
.expect("codex/event/raw_response_item notification resp");
|
||||
|
||||
let serde_json::Value::Object(params) = raw_notification
|
||||
.params
|
||||
.expect("codex/event/raw_response_item should have params")
|
||||
else {
|
||||
panic!("codex/event/raw_response_item should have params");
|
||||
};
|
||||
|
||||
let conversation_id_value = params
|
||||
.get("conversationId")
|
||||
.and_then(|value| value.as_str())
|
||||
.expect("raw response item should include conversationId");
|
||||
|
||||
assert_eq!(
|
||||
conversation_id_value,
|
||||
conversation_id.to_string(),
|
||||
"raw response item conversation mismatch"
|
||||
);
|
||||
|
||||
let msg_value = params
|
||||
.get("msg")
|
||||
.cloned()
|
||||
.expect("raw response item should include msg payload");
|
||||
|
||||
let event: RawResponseItemEvent =
|
||||
serde_json::from_value(msg_value).expect("deserialize raw response item");
|
||||
event.item
|
||||
}
|
||||
|
||||
fn assert_instructions_message(item: &ResponseItem) {
|
||||
match item {
|
||||
ResponseItem::Message { role, content, .. } => {
|
||||
assert_eq!(role, "user");
|
||||
let texts = content_texts(content);
|
||||
let is_instructions = texts
|
||||
.iter()
|
||||
.any(|text| text.starts_with("# AGENTS.md instructions for "));
|
||||
assert!(
|
||||
is_instructions,
|
||||
"expected instructions message, got {texts:?}"
|
||||
);
|
||||
}
|
||||
other => panic!("expected instructions message, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn assert_developer_message(item: &ResponseItem, expected_text: &str) {
|
||||
match item {
|
||||
ResponseItem::Message { role, content, .. } => {
|
||||
assert_eq!(role, "developer");
|
||||
let texts = content_texts(content);
|
||||
assert_eq!(
|
||||
texts,
|
||||
vec![expected_text],
|
||||
"expected developer instructions message, got {texts:?}"
|
||||
);
|
||||
}
|
||||
other => panic!("expected developer instructions message, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn assert_environment_message(item: &ResponseItem) {
|
||||
match item {
|
||||
ResponseItem::Message { role, content, .. } => {
|
||||
assert_eq!(role, "user");
|
||||
let texts = content_texts(content);
|
||||
assert!(
|
||||
texts
|
||||
.iter()
|
||||
.any(|text| text.contains("<environment_context>")),
|
||||
"expected environment context message, got {texts:?}"
|
||||
);
|
||||
}
|
||||
other => panic!("expected environment message, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn assert_user_message(item: &ResponseItem, expected_text: &str) {
|
||||
match item {
|
||||
ResponseItem::Message { role, content, .. } => {
|
||||
assert_eq!(role, "user");
|
||||
let texts = content_texts(content);
|
||||
assert_eq!(texts, vec![expected_text]);
|
||||
}
|
||||
other => panic!("expected user message, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn assert_assistant_message(item: &ResponseItem, expected_text: &str) {
|
||||
match item {
|
||||
ResponseItem::Message { role, content, .. } => {
|
||||
assert_eq!(role, "assistant");
|
||||
let texts = content_texts(content);
|
||||
assert_eq!(texts, vec![expected_text]);
|
||||
}
|
||||
other => panic!("expected assistant message, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn content_texts(content: &[ContentItem]) -> Vec<&str> {
|
||||
content
|
||||
.iter()
|
||||
.filter_map(|item| match item {
|
||||
ContentItem::InputText { text } | ContentItem::OutputText { text } => {
|
||||
Some(text.as_str())
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
@@ -8,50 +7,38 @@ use codex_app_server_protocol::SetDefaultModelParams;
|
||||
use codex_app_server_protocol::SetDefaultModelResponse;
|
||||
use codex_core::config::ConfigToml;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn set_default_model_persists_overrides() {
|
||||
let codex_home = TempDir::new().expect("create tempdir");
|
||||
create_config_toml(codex_home.path()).expect("write config.toml");
|
||||
async fn set_default_model_persists_overrides() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let params = SetDefaultModelParams {
|
||||
model: Some("gpt-4.1".to_string()),
|
||||
reasoning_effort: None,
|
||||
};
|
||||
|
||||
let request_id = mcp
|
||||
.send_set_default_model_request(params)
|
||||
.await
|
||||
.expect("send setDefaultModel");
|
||||
let request_id = mcp.send_set_default_model_request(params).await?;
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("setDefaultModel timeout")
|
||||
.expect("setDefaultModel response");
|
||||
.await??;
|
||||
|
||||
let _: SetDefaultModelResponse =
|
||||
to_response(resp).expect("deserialize setDefaultModel response");
|
||||
let _: SetDefaultModelResponse = to_response(resp)?;
|
||||
|
||||
let config_path = codex_home.path().join("config.toml");
|
||||
let config_contents = tokio::fs::read_to_string(&config_path)
|
||||
.await
|
||||
.expect("read config.toml");
|
||||
let config_toml: ConfigToml = toml::from_str(&config_contents).expect("parse config.toml");
|
||||
let config_contents = tokio::fs::read_to_string(&config_path).await?;
|
||||
let config_toml: ConfigToml = toml::from_str(&config_contents)?;
|
||||
|
||||
assert_eq!(
|
||||
ConfigToml {
|
||||
@@ -61,6 +48,7 @@ async fn set_default_model_persists_overrides() {
|
||||
},
|
||||
config_toml,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper to create a config.toml; mirrors create_conversation.rs
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::GetUserAgentResponse;
|
||||
@@ -10,28 +11,18 @@ use tokio::time::timeout;
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn get_user_agent_returns_current_codex_user_agent() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|err| panic!("create tempdir: {err}"));
|
||||
async fn get_user_agent_returns_current_codex_user_agent() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("initialize timeout")
|
||||
.expect("initialize request");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_get_user_agent_request()
|
||||
.await
|
||||
.expect("send getUserAgent");
|
||||
let request_id = mcp.send_get_user_agent_request().await?;
|
||||
let response: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("getUserAgent timeout")
|
||||
.expect("getUserAgent response");
|
||||
.await??;
|
||||
|
||||
let os_info = os_info::get();
|
||||
let user_agent = format!(
|
||||
@@ -42,9 +33,9 @@ async fn get_user_agent_returns_current_codex_user_agent() {
|
||||
codex_core::terminal::user_agent()
|
||||
);
|
||||
|
||||
let received: GetUserAgentResponse =
|
||||
to_response(response).expect("deserialize getUserAgent response");
|
||||
let received: GetUserAgentResponse = to_response(response)?;
|
||||
let expected = GetUserAgentResponse { user_agent };
|
||||
|
||||
assert_eq!(received, expected);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,78 +1,46 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use app_test_support::ChatGptAuthFixture;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use base64::Engine;
|
||||
use base64::engine::general_purpose::URL_SAFE_NO_PAD;
|
||||
use app_test_support::write_chatgpt_auth;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::UserInfoResponse;
|
||||
use codex_core::auth::AuthDotJson;
|
||||
use codex_core::auth::get_auth_file;
|
||||
use codex_core::auth::write_auth_json;
|
||||
use codex_core::token_data::IdTokenInfo;
|
||||
use codex_core::token_data::TokenData;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use std::time::Duration;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: Duration = Duration::from_secs(10);
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn user_info_returns_email_from_auth_json() {
|
||||
let codex_home = TempDir::new().expect("create tempdir");
|
||||
async fn user_info_returns_email_from_auth_json() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let auth_path = get_auth_file(codex_home.path());
|
||||
let mut id_token = IdTokenInfo::default();
|
||||
id_token.email = Some("user@example.com".to_string());
|
||||
id_token.raw_jwt = encode_id_token_with_email("user@example.com").expect("encode id token");
|
||||
write_chatgpt_auth(
|
||||
codex_home.path(),
|
||||
ChatGptAuthFixture::new("access")
|
||||
.refresh_token("refresh")
|
||||
.email("user@example.com"),
|
||||
AuthCredentialsStoreMode::File,
|
||||
)?;
|
||||
|
||||
let auth = AuthDotJson {
|
||||
openai_api_key: None,
|
||||
tokens: Some(TokenData {
|
||||
id_token,
|
||||
access_token: "access".to_string(),
|
||||
refresh_token: "refresh".to_string(),
|
||||
account_id: None,
|
||||
}),
|
||||
last_refresh: None,
|
||||
};
|
||||
write_auth_json(&auth_path, &auth).expect("write auth.json");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("initialize timeout")
|
||||
.expect("initialize request");
|
||||
|
||||
let request_id = mcp.send_user_info_request().await.expect("send userInfo");
|
||||
let request_id = mcp.send_user_info_request().await?;
|
||||
let response: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("userInfo timeout")
|
||||
.expect("userInfo response");
|
||||
.await??;
|
||||
|
||||
let received: UserInfoResponse = to_response(response).expect("deserialize userInfo response");
|
||||
let received: UserInfoResponse = to_response(response)?;
|
||||
let expected = UserInfoResponse {
|
||||
alleged_user_email: Some("user@example.com".to_string()),
|
||||
};
|
||||
|
||||
assert_eq!(received, expected);
|
||||
}
|
||||
|
||||
fn encode_id_token_with_email(email: &str) -> anyhow::Result<String> {
|
||||
let header_b64 = URL_SAFE_NO_PAD.encode(
|
||||
serde_json::to_vec(&json!({ "alg": "none", "typ": "JWT" }))
|
||||
.context("serialize jwt header")?,
|
||||
);
|
||||
let payload =
|
||||
serde_json::to_vec(&json!({ "email": email })).context("serialize jwt payload")?;
|
||||
let payload_b64 = URL_SAFE_NO_PAD.encode(payload);
|
||||
Ok(format!("{header_b64}.{payload_b64}.signature"))
|
||||
Ok(())
|
||||
}
|
||||
|
||||
492
codex-rs/app-server/tests/suite/v2/account.rs
Normal file
492
codex-rs/app-server/tests/suite/v2/account.rs
Normal file
@@ -0,0 +1,492 @@
|
||||
use anyhow::Result;
|
||||
use anyhow::bail;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
|
||||
use app_test_support::ChatGptAuthFixture;
|
||||
use app_test_support::write_chatgpt_auth;
|
||||
use codex_app_server_protocol::Account;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_app_server_protocol::CancelLoginAccountParams;
|
||||
use codex_app_server_protocol::CancelLoginAccountResponse;
|
||||
use codex_app_server_protocol::GetAccountParams;
|
||||
use codex_app_server_protocol::GetAccountResponse;
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::LoginAccountResponse;
|
||||
use codex_app_server_protocol::LogoutAccountResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use codex_login::login_with_api_key;
|
||||
use codex_protocol::account::PlanType as AccountPlanType;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serial_test::serial;
|
||||
use std::path::Path;
|
||||
use std::time::Duration;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
// Helper to create a minimal config.toml for the app server
|
||||
#[derive(Default)]
|
||||
struct CreateConfigTomlParams {
|
||||
forced_method: Option<String>,
|
||||
forced_workspace_id: Option<String>,
|
||||
requires_openai_auth: Option<bool>,
|
||||
}
|
||||
|
||||
fn create_config_toml(codex_home: &Path, params: CreateConfigTomlParams) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
let forced_line = if let Some(method) = params.forced_method {
|
||||
format!("forced_login_method = \"{method}\"\n")
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
let forced_workspace_line = if let Some(ws) = params.forced_workspace_id {
|
||||
format!("forced_chatgpt_workspace_id = \"{ws}\"\n")
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
let requires_line = match params.requires_openai_auth {
|
||||
Some(true) => "requires_openai_auth = true\n".to_string(),
|
||||
Some(false) => String::new(),
|
||||
None => String::new(),
|
||||
};
|
||||
let contents = format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "danger-full-access"
|
||||
{forced_line}
|
||||
{forced_workspace_line}
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "http://127.0.0.1:0/v1"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
{requires_line}
|
||||
"#
|
||||
);
|
||||
std::fs::write(config_toml, contents)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn logout_account_removes_auth_and_notifies() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), CreateConfigTomlParams::default())?;
|
||||
|
||||
login_with_api_key(
|
||||
codex_home.path(),
|
||||
"sk-test-key",
|
||||
AuthCredentialsStoreMode::File,
|
||||
)?;
|
||||
assert!(codex_home.path().join("auth.json").exists());
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let id = mcp.send_logout_account_request().await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(id)),
|
||||
)
|
||||
.await??;
|
||||
let _ok: LogoutAccountResponse = to_response(resp)?;
|
||||
|
||||
let note = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/updated"),
|
||||
)
|
||||
.await??;
|
||||
let parsed: ServerNotification = note.try_into()?;
|
||||
let ServerNotification::AccountUpdated(payload) = parsed else {
|
||||
bail!("unexpected notification: {parsed:?}");
|
||||
};
|
||||
assert!(
|
||||
payload.auth_mode.is_none(),
|
||||
"auth_method should be None after logout"
|
||||
);
|
||||
|
||||
assert!(
|
||||
!codex_home.path().join("auth.json").exists(),
|
||||
"auth.json should be deleted"
|
||||
);
|
||||
|
||||
let get_id = mcp
|
||||
.send_get_account_request(GetAccountParams {
|
||||
refresh_token: false,
|
||||
})
|
||||
.await?;
|
||||
let get_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(get_id)),
|
||||
)
|
||||
.await??;
|
||||
let account: GetAccountResponse = to_response(get_resp)?;
|
||||
assert_eq!(account.account, None);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn login_account_api_key_succeeds_and_notifies() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), CreateConfigTomlParams::default())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let req_id = mcp
|
||||
.send_login_account_api_key_request("sk-test-key")
|
||||
.await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(req_id)),
|
||||
)
|
||||
.await??;
|
||||
let login: LoginAccountResponse = to_response(resp)?;
|
||||
assert_eq!(login, LoginAccountResponse::ApiKey {});
|
||||
|
||||
let note = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/login/completed"),
|
||||
)
|
||||
.await??;
|
||||
let parsed: ServerNotification = note.try_into()?;
|
||||
let ServerNotification::AccountLoginCompleted(payload) = parsed else {
|
||||
bail!("unexpected notification: {parsed:?}");
|
||||
};
|
||||
pretty_assertions::assert_eq!(payload.login_id, None);
|
||||
pretty_assertions::assert_eq!(payload.success, true);
|
||||
pretty_assertions::assert_eq!(payload.error, None);
|
||||
|
||||
let note = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/updated"),
|
||||
)
|
||||
.await??;
|
||||
let parsed: ServerNotification = note.try_into()?;
|
||||
let ServerNotification::AccountUpdated(payload) = parsed else {
|
||||
bail!("unexpected notification: {parsed:?}");
|
||||
};
|
||||
pretty_assertions::assert_eq!(payload.auth_mode, Some(AuthMode::ApiKey));
|
||||
|
||||
assert!(codex_home.path().join("auth.json").exists());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn login_account_api_key_rejected_when_forced_chatgpt() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
forced_method: Some("chatgpt".to_string()),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_login_account_api_key_request("sk-test-key")
|
||||
.await?;
|
||||
let err: JSONRPCError = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
assert_eq!(
|
||||
err.error.message,
|
||||
"API key login is disabled. Use ChatGPT login instead."
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn login_account_chatgpt_rejected_when_forced_api() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
forced_method: Some("api".to_string()),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp.send_login_account_chatgpt_request().await?;
|
||||
let err: JSONRPCError = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
assert_eq!(
|
||||
err.error.message,
|
||||
"ChatGPT login is disabled. Use API key login instead."
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
// Serialize tests that launch the login server since it binds to a fixed port.
|
||||
#[serial(login_port)]
|
||||
async fn login_account_chatgpt_start() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), CreateConfigTomlParams::default())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp.send_login_account_chatgpt_request().await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let login: LoginAccountResponse = to_response(resp)?;
|
||||
let LoginAccountResponse::Chatgpt { login_id, auth_url } = login else {
|
||||
bail!("unexpected login response: {login:?}");
|
||||
};
|
||||
assert!(
|
||||
auth_url.contains("redirect_uri=http%3A%2F%2Flocalhost"),
|
||||
"auth_url should contain a redirect_uri to localhost"
|
||||
);
|
||||
|
||||
let cancel_id = mcp
|
||||
.send_cancel_login_account_request(CancelLoginAccountParams {
|
||||
login_id: login_id.clone(),
|
||||
})
|
||||
.await?;
|
||||
let cancel_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(cancel_id)),
|
||||
)
|
||||
.await??;
|
||||
let _ok: CancelLoginAccountResponse = to_response(cancel_resp)?;
|
||||
|
||||
let note = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/login/completed"),
|
||||
)
|
||||
.await??;
|
||||
let parsed: ServerNotification = note.try_into()?;
|
||||
let ServerNotification::AccountLoginCompleted(payload) = parsed else {
|
||||
bail!("unexpected notification: {parsed:?}");
|
||||
};
|
||||
pretty_assertions::assert_eq!(payload.login_id, Some(login_id));
|
||||
pretty_assertions::assert_eq!(payload.success, false);
|
||||
assert!(
|
||||
payload.error.is_some(),
|
||||
"expected a non-empty error on cancel"
|
||||
);
|
||||
|
||||
let maybe_updated = timeout(
|
||||
Duration::from_millis(500),
|
||||
mcp.read_stream_until_notification_message("account/updated"),
|
||||
)
|
||||
.await;
|
||||
assert!(
|
||||
maybe_updated.is_err(),
|
||||
"account/updated should not be emitted when login is cancelled"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
// Serialize tests that launch the login server since it binds to a fixed port.
|
||||
#[serial(login_port)]
|
||||
async fn login_account_chatgpt_includes_forced_workspace_query_param() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
forced_workspace_id: Some("ws-forced".to_string()),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp.send_login_account_chatgpt_request().await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let login: LoginAccountResponse = to_response(resp)?;
|
||||
let LoginAccountResponse::Chatgpt { auth_url, .. } = login else {
|
||||
bail!("unexpected login response: {login:?}");
|
||||
};
|
||||
assert!(
|
||||
auth_url.contains("allowed_workspace_id=ws-forced"),
|
||||
"auth URL should include forced workspace"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn get_account_no_auth() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
requires_openai_auth: Some(true),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let params = GetAccountParams {
|
||||
refresh_token: false,
|
||||
};
|
||||
let request_id = mcp.send_get_account_request(params).await?;
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
let account: GetAccountResponse = to_response(resp)?;
|
||||
|
||||
assert_eq!(account.account, None, "expected no account");
|
||||
assert_eq!(account.requires_openai_auth, true);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn get_account_with_api_key() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
requires_openai_auth: Some(true),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let req_id = mcp
|
||||
.send_login_account_api_key_request("sk-test-key")
|
||||
.await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(req_id)),
|
||||
)
|
||||
.await??;
|
||||
let _login_ok = to_response::<LoginAccountResponse>(resp)?;
|
||||
|
||||
let params = GetAccountParams {
|
||||
refresh_token: false,
|
||||
};
|
||||
let request_id = mcp.send_get_account_request(params).await?;
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
let received: GetAccountResponse = to_response(resp)?;
|
||||
|
||||
let expected = GetAccountResponse {
|
||||
account: Some(Account::ApiKey {}),
|
||||
requires_openai_auth: true,
|
||||
};
|
||||
assert_eq!(received, expected);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn get_account_when_auth_not_required() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
requires_openai_auth: Some(false),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let params = GetAccountParams {
|
||||
refresh_token: false,
|
||||
};
|
||||
let request_id = mcp.send_get_account_request(params).await?;
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
let received: GetAccountResponse = to_response(resp)?;
|
||||
|
||||
let expected = GetAccountResponse {
|
||||
account: None,
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
assert_eq!(received, expected);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn get_account_with_chatgpt() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
requires_openai_auth: Some(true),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
write_chatgpt_auth(
|
||||
codex_home.path(),
|
||||
ChatGptAuthFixture::new("access-chatgpt")
|
||||
.email("user@example.com")
|
||||
.plan_type("pro"),
|
||||
AuthCredentialsStoreMode::File,
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let params = GetAccountParams {
|
||||
refresh_token: false,
|
||||
};
|
||||
let request_id = mcp.send_get_account_request(params).await?;
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
let received: GetAccountResponse = to_response(resp)?;
|
||||
|
||||
let expected = GetAccountResponse {
|
||||
account: Some(Account::Chatgpt {
|
||||
email: "user@example.com".to_string(),
|
||||
plan_type: AccountPlanType::Pro,
|
||||
}),
|
||||
requires_openai_auth: true,
|
||||
};
|
||||
assert_eq!(received, expected);
|
||||
Ok(())
|
||||
}
|
||||
9
codex-rs/app-server/tests/suite/v2/mod.rs
Normal file
9
codex-rs/app-server/tests/suite/v2/mod.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
mod account;
|
||||
mod model_list;
|
||||
mod rate_limits;
|
||||
mod thread_archive;
|
||||
mod thread_list;
|
||||
mod thread_resume;
|
||||
mod thread_start;
|
||||
mod turn_interrupt;
|
||||
mod turn_start;
|
||||
186
codex-rs/app-server/tests/suite/v2/model_list.rs
Normal file
186
codex-rs/app-server/tests/suite/v2/model_list.rs
Normal file
@@ -0,0 +1,186 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::Model;
|
||||
use codex_app_server_protocol::ModelListParams;
|
||||
use codex_app_server_protocol::ModelListResponse;
|
||||
use codex_app_server_protocol::ReasoningEffortOption;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_TIMEOUT: Duration = Duration::from_secs(10);
|
||||
const INVALID_REQUEST_ERROR_CODE: i64 = -32600;
|
||||
|
||||
#[tokio::test]
|
||||
async fn list_models_returns_all_models_with_large_limit() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
|
||||
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_list_models_request(ModelListParams {
|
||||
limit: Some(100),
|
||||
cursor: None,
|
||||
})
|
||||
.await?;
|
||||
|
||||
let response: JSONRPCResponse = timeout(
|
||||
DEFAULT_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let ModelListResponse {
|
||||
data: items,
|
||||
next_cursor,
|
||||
} = to_response::<ModelListResponse>(response)?;
|
||||
|
||||
let expected_models = vec![
|
||||
Model {
|
||||
id: "gpt-5-codex".to_string(),
|
||||
model: "gpt-5-codex".to_string(),
|
||||
display_name: "gpt-5-codex".to_string(),
|
||||
description: "Optimized for codex.".to_string(),
|
||||
supported_reasoning_efforts: vec![
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::Low,
|
||||
description: "Fastest responses with limited reasoning".to_string(),
|
||||
},
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::Medium,
|
||||
description: "Dynamically adjusts reasoning based on the task".to_string(),
|
||||
},
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::High,
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems"
|
||||
.to_string(),
|
||||
},
|
||||
],
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
is_default: true,
|
||||
},
|
||||
Model {
|
||||
id: "gpt-5".to_string(),
|
||||
model: "gpt-5".to_string(),
|
||||
display_name: "gpt-5".to_string(),
|
||||
description: "Broad world knowledge with strong general reasoning.".to_string(),
|
||||
supported_reasoning_efforts: vec![
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::Minimal,
|
||||
description: "Fastest responses with little reasoning".to_string(),
|
||||
},
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::Low,
|
||||
description: "Balances speed with some reasoning; useful for straightforward \
|
||||
queries and short explanations"
|
||||
.to_string(),
|
||||
},
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::Medium,
|
||||
description: "Provides a solid balance of reasoning depth and latency for \
|
||||
general-purpose tasks"
|
||||
.to_string(),
|
||||
},
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::High,
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems"
|
||||
.to_string(),
|
||||
},
|
||||
],
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
is_default: false,
|
||||
},
|
||||
];
|
||||
|
||||
assert_eq!(items, expected_models);
|
||||
assert!(next_cursor.is_none());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn list_models_pagination_works() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
|
||||
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let first_request = mcp
|
||||
.send_list_models_request(ModelListParams {
|
||||
limit: Some(1),
|
||||
cursor: None,
|
||||
})
|
||||
.await?;
|
||||
|
||||
let first_response: JSONRPCResponse = timeout(
|
||||
DEFAULT_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(first_request)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let ModelListResponse {
|
||||
data: first_items,
|
||||
next_cursor: first_cursor,
|
||||
} = to_response::<ModelListResponse>(first_response)?;
|
||||
|
||||
assert_eq!(first_items.len(), 1);
|
||||
assert_eq!(first_items[0].id, "gpt-5-codex");
|
||||
let next_cursor = first_cursor.ok_or_else(|| anyhow!("cursor for second page"))?;
|
||||
|
||||
let second_request = mcp
|
||||
.send_list_models_request(ModelListParams {
|
||||
limit: Some(1),
|
||||
cursor: Some(next_cursor.clone()),
|
||||
})
|
||||
.await?;
|
||||
|
||||
let second_response: JSONRPCResponse = timeout(
|
||||
DEFAULT_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(second_request)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let ModelListResponse {
|
||||
data: second_items,
|
||||
next_cursor: second_cursor,
|
||||
} = to_response::<ModelListResponse>(second_response)?;
|
||||
|
||||
assert_eq!(second_items.len(), 1);
|
||||
assert_eq!(second_items[0].id, "gpt-5");
|
||||
assert!(second_cursor.is_none());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn list_models_rejects_invalid_cursor() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
|
||||
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_list_models_request(ModelListParams {
|
||||
limit: None,
|
||||
cursor: Some("invalid".to_string()),
|
||||
})
|
||||
.await?;
|
||||
|
||||
let error: JSONRPCError = timeout(
|
||||
DEFAULT_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
assert_eq!(error.id, RequestId::Integer(request_id));
|
||||
assert_eq!(error.error.code, INVALID_REQUEST_ERROR_CODE);
|
||||
assert_eq!(error.error.message, "invalid cursor: invalid");
|
||||
Ok(())
|
||||
}
|
||||
181
codex-rs/app-server/tests/suite/v2/rate_limits.rs
Normal file
181
codex-rs/app-server/tests/suite/v2/rate_limits.rs
Normal file
@@ -0,0 +1,181 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::ChatGptAuthFixture;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use app_test_support::write_chatgpt_auth;
|
||||
use codex_app_server_protocol::GetAccountRateLimitsResponse;
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::LoginApiKeyParams;
|
||||
use codex_app_server_protocol::RateLimitSnapshot;
|
||||
use codex_app_server_protocol::RateLimitWindow;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
use wiremock::Mock;
|
||||
use wiremock::MockServer;
|
||||
use wiremock::ResponseTemplate;
|
||||
use wiremock::matchers::header;
|
||||
use wiremock::matchers::method;
|
||||
use wiremock::matchers::path;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
const INVALID_REQUEST_ERROR_CODE: i64 = -32600;
|
||||
|
||||
#[tokio::test]
|
||||
async fn get_account_rate_limits_requires_auth() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp.send_get_account_rate_limits_request().await?;
|
||||
|
||||
let error: JSONRPCError = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
assert_eq!(error.id, RequestId::Integer(request_id));
|
||||
assert_eq!(error.error.code, INVALID_REQUEST_ERROR_CODE);
|
||||
assert_eq!(
|
||||
error.error.message,
|
||||
"codex account authentication required to read rate limits"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn get_account_rate_limits_requires_chatgpt_auth() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
login_with_api_key(&mut mcp, "sk-test-key").await?;
|
||||
|
||||
let request_id = mcp.send_get_account_rate_limits_request().await?;
|
||||
|
||||
let error: JSONRPCError = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
assert_eq!(error.id, RequestId::Integer(request_id));
|
||||
assert_eq!(error.error.code, INVALID_REQUEST_ERROR_CODE);
|
||||
assert_eq!(
|
||||
error.error.message,
|
||||
"chatgpt authentication required to read rate limits"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn get_account_rate_limits_returns_snapshot() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
write_chatgpt_auth(
|
||||
codex_home.path(),
|
||||
ChatGptAuthFixture::new("chatgpt-token")
|
||||
.account_id("account-123")
|
||||
.plan_type("pro"),
|
||||
AuthCredentialsStoreMode::File,
|
||||
)?;
|
||||
|
||||
let server = MockServer::start().await;
|
||||
let server_url = server.uri();
|
||||
write_chatgpt_base_url(codex_home.path(), &server_url)?;
|
||||
|
||||
let primary_reset_timestamp = chrono::DateTime::parse_from_rfc3339("2025-01-01T00:02:00Z")
|
||||
.expect("parse primary reset timestamp")
|
||||
.timestamp();
|
||||
let secondary_reset_timestamp = chrono::DateTime::parse_from_rfc3339("2025-01-01T01:00:00Z")
|
||||
.expect("parse secondary reset timestamp")
|
||||
.timestamp();
|
||||
let response_body = json!({
|
||||
"plan_type": "pro",
|
||||
"rate_limit": {
|
||||
"allowed": true,
|
||||
"limit_reached": false,
|
||||
"primary_window": {
|
||||
"used_percent": 42,
|
||||
"limit_window_seconds": 3600,
|
||||
"reset_after_seconds": 120,
|
||||
"reset_at": primary_reset_timestamp,
|
||||
},
|
||||
"secondary_window": {
|
||||
"used_percent": 5,
|
||||
"limit_window_seconds": 86400,
|
||||
"reset_after_seconds": 43200,
|
||||
"reset_at": secondary_reset_timestamp,
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/api/codex/usage"))
|
||||
.and(header("authorization", "Bearer chatgpt-token"))
|
||||
.and(header("chatgpt-account-id", "account-123"))
|
||||
.respond_with(ResponseTemplate::new(200).set_body_json(response_body))
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp.send_get_account_rate_limits_request().await?;
|
||||
|
||||
let response: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let received: GetAccountRateLimitsResponse = to_response(response)?;
|
||||
|
||||
let expected = GetAccountRateLimitsResponse {
|
||||
rate_limits: RateLimitSnapshot {
|
||||
primary: Some(RateLimitWindow {
|
||||
used_percent: 42,
|
||||
window_duration_mins: Some(60),
|
||||
resets_at: Some(primary_reset_timestamp),
|
||||
}),
|
||||
secondary: Some(RateLimitWindow {
|
||||
used_percent: 5,
|
||||
window_duration_mins: Some(1440),
|
||||
resets_at: Some(secondary_reset_timestamp),
|
||||
}),
|
||||
},
|
||||
};
|
||||
assert_eq!(received, expected);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn login_with_api_key(mcp: &mut McpProcess, api_key: &str) -> Result<()> {
|
||||
let request_id = mcp
|
||||
.send_login_api_key_request(LoginApiKeyParams {
|
||||
api_key: api_key.to_string(),
|
||||
})
|
||||
.await?;
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_chatgpt_base_url(codex_home: &Path, base_url: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(config_toml, format!("chatgpt_base_url = \"{base_url}\"\n"))
|
||||
}
|
||||
93
codex-rs/app-server/tests/suite/v2/thread_archive.rs
Normal file
93
codex-rs/app-server/tests/suite/v2/thread_archive.rs
Normal file
@@ -0,0 +1,93 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadArchiveParams;
|
||||
use codex_app_server_protocol::ThreadArchiveResponse;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_core::ARCHIVED_SESSIONS_SUBDIR;
|
||||
use codex_core::find_conversation_path_by_id_str;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_archive_moves_rollout_into_archived_directory() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Start a thread.
|
||||
let start_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
assert!(!thread.id.is_empty());
|
||||
|
||||
// Locate the rollout path recorded for this thread id.
|
||||
let rollout_path = find_conversation_path_by_id_str(codex_home.path(), &thread.id)
|
||||
.await?
|
||||
.expect("expected rollout path for thread id to exist");
|
||||
assert!(
|
||||
rollout_path.exists(),
|
||||
"expected {} to exist",
|
||||
rollout_path.display()
|
||||
);
|
||||
|
||||
// Archive the thread.
|
||||
let archive_id = mcp
|
||||
.send_thread_archive_request(ThreadArchiveParams {
|
||||
thread_id: thread.id.clone(),
|
||||
})
|
||||
.await?;
|
||||
let archive_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(archive_id)),
|
||||
)
|
||||
.await??;
|
||||
let _: ThreadArchiveResponse = to_response::<ThreadArchiveResponse>(archive_resp)?;
|
||||
|
||||
// Verify file moved.
|
||||
let archived_directory = codex_home.path().join(ARCHIVED_SESSIONS_SUBDIR);
|
||||
// The archived file keeps the original filename (rollout-...-<id>.jsonl).
|
||||
let archived_rollout_path =
|
||||
archived_directory.join(rollout_path.file_name().expect("rollout file name"));
|
||||
assert!(
|
||||
!rollout_path.exists(),
|
||||
"expected rollout path {} to be moved",
|
||||
rollout_path.display()
|
||||
);
|
||||
assert!(
|
||||
archived_rollout_path.exists(),
|
||||
"expected archived rollout path {} to exist",
|
||||
archived_rollout_path.display()
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_config_toml(codex_home: &Path) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(config_toml, config_contents())
|
||||
}
|
||||
|
||||
fn config_contents() -> &'static str {
|
||||
r#"model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
"#
|
||||
}
|
||||
220
codex-rs/app-server/tests/suite/v2/thread_list.rs
Normal file
220
codex-rs/app-server/tests/suite/v2/thread_list.rs
Normal file
@@ -0,0 +1,220 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_fake_rollout;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadListParams;
|
||||
use codex_app_server_protocol::ThreadListResponse;
|
||||
use serde_json::json;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
use uuid::Uuid;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_basic_empty() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_minimal_config(codex_home.path())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// List threads in an empty CODEX_HOME; should return an empty page with nextCursor: null.
|
||||
let list_id = mcp
|
||||
.send_thread_list_request(ThreadListParams {
|
||||
cursor: None,
|
||||
limit: Some(10),
|
||||
model_providers: None,
|
||||
})
|
||||
.await?;
|
||||
let list_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(list_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadListResponse { data, next_cursor } = to_response::<ThreadListResponse>(list_resp)?;
|
||||
assert!(data.is_empty());
|
||||
assert_eq!(next_cursor, None);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Minimal config.toml for listing.
|
||||
fn create_minimal_config(codex_home: &std::path::Path) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_pagination_next_cursor_none_on_last_page() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_minimal_config(codex_home.path())?;
|
||||
|
||||
// Create three rollouts so we can paginate with limit=2.
|
||||
let _a = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-02T12-00-00",
|
||||
"2025-01-02T12:00:00Z",
|
||||
"Hello",
|
||||
Some("mock_provider"),
|
||||
)?;
|
||||
let _b = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-01T13-00-00",
|
||||
"2025-01-01T13:00:00Z",
|
||||
"Hello",
|
||||
Some("mock_provider"),
|
||||
)?;
|
||||
let _c = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-01T12-00-00",
|
||||
"2025-01-01T12:00:00Z",
|
||||
"Hello",
|
||||
Some("mock_provider"),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Page 1: limit 2 → expect next_cursor Some.
|
||||
let page1_id = mcp
|
||||
.send_thread_list_request(ThreadListParams {
|
||||
cursor: None,
|
||||
limit: Some(2),
|
||||
model_providers: Some(vec!["mock_provider".to_string()]),
|
||||
})
|
||||
.await?;
|
||||
let page1_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(page1_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadListResponse {
|
||||
data: data1,
|
||||
next_cursor: cursor1,
|
||||
} = to_response::<ThreadListResponse>(page1_resp)?;
|
||||
assert_eq!(data1.len(), 2);
|
||||
for thread in &data1 {
|
||||
assert_eq!(thread.preview, "Hello");
|
||||
assert_eq!(thread.model_provider, "mock_provider");
|
||||
assert!(thread.created_at > 0);
|
||||
}
|
||||
let cursor1 = cursor1.expect("expected nextCursor on first page");
|
||||
|
||||
// Page 2: with cursor → expect next_cursor None when no more results.
|
||||
let page2_id = mcp
|
||||
.send_thread_list_request(ThreadListParams {
|
||||
cursor: Some(cursor1),
|
||||
limit: Some(2),
|
||||
model_providers: Some(vec!["mock_provider".to_string()]),
|
||||
})
|
||||
.await?;
|
||||
let page2_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(page2_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadListResponse {
|
||||
data: data2,
|
||||
next_cursor: cursor2,
|
||||
} = to_response::<ThreadListResponse>(page2_resp)?;
|
||||
assert!(data2.len() <= 2);
|
||||
for thread in &data2 {
|
||||
assert_eq!(thread.preview, "Hello");
|
||||
assert_eq!(thread.model_provider, "mock_provider");
|
||||
assert!(thread.created_at > 0);
|
||||
}
|
||||
assert_eq!(cursor2, None, "expected nextCursor to be null on last page");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_respects_provider_filter() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_minimal_config(codex_home.path())?;
|
||||
|
||||
// Create rollouts under two providers.
|
||||
let _a = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-02T10-00-00",
|
||||
"2025-01-02T10:00:00Z",
|
||||
"X",
|
||||
Some("mock_provider"),
|
||||
)?; // mock_provider
|
||||
// one with a different provider
|
||||
let uuid = Uuid::new_v4();
|
||||
let dir = codex_home
|
||||
.path()
|
||||
.join("sessions")
|
||||
.join("2025")
|
||||
.join("01")
|
||||
.join("02");
|
||||
std::fs::create_dir_all(&dir)?;
|
||||
let file_path = dir.join(format!("rollout-2025-01-02T11-00-00-{uuid}.jsonl"));
|
||||
let lines = [
|
||||
json!({
|
||||
"timestamp": "2025-01-02T11:00:00Z",
|
||||
"type": "session_meta",
|
||||
"payload": {
|
||||
"id": uuid,
|
||||
"timestamp": "2025-01-02T11:00:00Z",
|
||||
"cwd": "/",
|
||||
"originator": "codex",
|
||||
"cli_version": "0.0.0",
|
||||
"instructions": null,
|
||||
"source": "vscode",
|
||||
"model_provider": "other_provider"
|
||||
}
|
||||
})
|
||||
.to_string(),
|
||||
json!({
|
||||
"timestamp": "2025-01-02T11:00:00Z",
|
||||
"type":"response_item",
|
||||
"payload": {"type":"message","role":"user","content":[{"type":"input_text","text":"X"}]}
|
||||
})
|
||||
.to_string(),
|
||||
json!({
|
||||
"timestamp": "2025-01-02T11:00:00Z",
|
||||
"type":"event_msg",
|
||||
"payload": {"type":"user_message","message":"X","kind":"plain"}
|
||||
})
|
||||
.to_string(),
|
||||
];
|
||||
std::fs::write(file_path, lines.join("\n") + "\n")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Filter to only other_provider; expect 1 item, nextCursor None.
|
||||
let list_id = mcp
|
||||
.send_thread_list_request(ThreadListParams {
|
||||
cursor: None,
|
||||
limit: Some(10),
|
||||
model_providers: Some(vec!["other_provider".to_string()]),
|
||||
})
|
||||
.await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(list_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadListResponse { data, next_cursor } = to_response::<ThreadListResponse>(resp)?;
|
||||
assert_eq!(data.len(), 1);
|
||||
assert_eq!(next_cursor, None);
|
||||
let thread = &data[0];
|
||||
assert_eq!(thread.preview, "X");
|
||||
assert_eq!(thread.model_provider, "other_provider");
|
||||
let expected_ts = chrono::DateTime::parse_from_rfc3339("2025-01-02T11:00:00Z")?.timestamp();
|
||||
assert_eq!(thread.created_at, expected_ts);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
79
codex-rs/app-server/tests/suite/v2/thread_resume.rs
Normal file
79
codex-rs/app-server/tests/suite/v2/thread_resume.rs
Normal file
@@ -0,0 +1,79 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadResumeParams;
|
||||
use codex_app_server_protocol::ThreadResumeResponse;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_returns_existing_thread() -> Result<()> {
|
||||
let server = create_mock_chat_completions_server(vec![]).await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Start a thread.
|
||||
let start_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("gpt-5-codex".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
// Resume it via v2 API.
|
||||
let resume_id = mcp
|
||||
.send_thread_resume_request(ThreadResumeParams {
|
||||
thread_id: thread.id.clone(),
|
||||
})
|
||||
.await?;
|
||||
let resume_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(resume_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadResumeResponse { thread: resumed } =
|
||||
to_response::<ThreadResumeResponse>(resume_resp)?;
|
||||
assert_eq!(resumed, thread);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper to create a config.toml pointing at the mock model server.
|
||||
fn create_config_toml(codex_home: &std::path::Path, server_uri: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
90
codex-rs/app-server/tests/suite/v2/thread_start.rs
Normal file
90
codex-rs/app-server/tests/suite/v2/thread_start.rs
Normal file
@@ -0,0 +1,90 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::ThreadStartedNotification;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_start_creates_thread_and_emits_started() -> Result<()> {
|
||||
// Provide a mock server and config so model wiring is valid.
|
||||
let server = create_mock_chat_completions_server(vec![]).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
// Start server and initialize.
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Start a v2 thread with an explicit model override.
|
||||
let req_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("gpt-5".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
|
||||
// Expect a proper JSON-RPC response with a thread id.
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(req_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread } = to_response::<ThreadStartResponse>(resp)?;
|
||||
assert!(!thread.id.is_empty(), "thread id should not be empty");
|
||||
assert!(
|
||||
thread.preview.is_empty(),
|
||||
"new threads should start with an empty preview"
|
||||
);
|
||||
assert_eq!(thread.model_provider, "mock_provider");
|
||||
assert!(
|
||||
thread.created_at > 0,
|
||||
"created_at should be a positive UNIX timestamp"
|
||||
);
|
||||
|
||||
// A corresponding thread/started notification should arrive.
|
||||
let notif: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("thread/started"),
|
||||
)
|
||||
.await??;
|
||||
let started: ThreadStartedNotification =
|
||||
serde_json::from_value(notif.params.expect("params must be present"))?;
|
||||
assert_eq!(started.thread, thread);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper to create a config.toml pointing at the mock model server.
|
||||
fn create_config_toml(codex_home: &Path, server_uri: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
128
codex-rs/app-server/tests/suite/v2/turn_interrupt.rs
Normal file
128
codex-rs/app-server/tests/suite/v2/turn_interrupt.rs
Normal file
@@ -0,0 +1,128 @@
|
||||
#![cfg(unix)]
|
||||
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::create_shell_sse_response;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::TurnInterruptParams;
|
||||
use codex_app_server_protocol::TurnInterruptResponse;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_app_server_protocol::TurnStartResponse;
|
||||
use codex_app_server_protocol::UserInput as V2UserInput;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_interrupt_aborts_running_turn() -> Result<()> {
|
||||
// Use a portable sleep command to keep the turn running.
|
||||
#[cfg(target_os = "windows")]
|
||||
let shell_command = vec![
|
||||
"powershell".to_string(),
|
||||
"-Command".to_string(),
|
||||
"Start-Sleep -Seconds 10".to_string(),
|
||||
];
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
let shell_command = vec!["sleep".to_string(), "10".to_string()];
|
||||
|
||||
let tmp = TempDir::new()?;
|
||||
let codex_home = tmp.path().join("codex_home");
|
||||
std::fs::create_dir(&codex_home)?;
|
||||
let working_directory = tmp.path().join("workdir");
|
||||
std::fs::create_dir(&working_directory)?;
|
||||
|
||||
// Mock server: long-running shell command then (after abort) nothing else needed.
|
||||
let server = create_mock_chat_completions_server(vec![create_shell_sse_response(
|
||||
shell_command.clone(),
|
||||
Some(&working_directory),
|
||||
Some(10_000),
|
||||
"call_sleep",
|
||||
)?])
|
||||
.await;
|
||||
create_config_toml(&codex_home, &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(&codex_home).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Start a v2 thread and capture its id.
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
// Start a turn that triggers a long-running command.
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "run sleep".to_string(),
|
||||
}],
|
||||
cwd: Some(working_directory.clone()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let TurnStartResponse { turn } = to_response::<TurnStartResponse>(turn_resp)?;
|
||||
|
||||
// Give the command a brief moment to start.
|
||||
tokio::time::sleep(std::time::Duration::from_secs(1)).await;
|
||||
|
||||
// Interrupt the in-progress turn by id (v2 API).
|
||||
let interrupt_id = mcp
|
||||
.send_turn_interrupt_request(TurnInterruptParams {
|
||||
thread_id: thread.id,
|
||||
turn_id: turn.id,
|
||||
})
|
||||
.await?;
|
||||
let interrupt_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(interrupt_id)),
|
||||
)
|
||||
.await??;
|
||||
let _resp: TurnInterruptResponse = to_response::<TurnInterruptResponse>(interrupt_resp)?;
|
||||
|
||||
// No fields to assert on; successful deserialization confirms proper response shape.
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper to create a config.toml pointing at the mock model server.
|
||||
fn create_config_toml(codex_home: &std::path::Path, server_uri: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "workspace-write"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
486
codex-rs/app-server/tests/suite/v2/turn_start.rs
Normal file
486
codex-rs/app-server/tests/suite/v2/turn_start.rs
Normal file
@@ -0,0 +1,486 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_final_assistant_message_sse_response;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::create_mock_chat_completions_server_unchecked;
|
||||
use app_test_support::create_shell_sse_response;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_app_server_protocol::TurnStartResponse;
|
||||
use codex_app_server_protocol::TurnStartedNotification;
|
||||
use codex_app_server_protocol::UserInput as V2UserInput;
|
||||
use codex_core::protocol_config_types::ReasoningEffort;
|
||||
use codex_core::protocol_config_types::ReasoningSummary;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::Event;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_emits_notifications_and_accepts_model_override() -> Result<()> {
|
||||
// Provide a mock server and config so model wiring is valid.
|
||||
// Three Codex turns hit the mock model (session start + two turn/start calls).
|
||||
let responses = vec![
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server_unchecked(responses).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri(), "never")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Start a thread (v2) and capture its id.
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
// Start a turn with only input and thread_id set (no overrides).
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let TurnStartResponse { turn } = to_response::<TurnStartResponse>(turn_resp)?;
|
||||
assert!(!turn.id.is_empty());
|
||||
|
||||
// Expect a turn/started notification.
|
||||
let notif: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/started"),
|
||||
)
|
||||
.await??;
|
||||
let started: TurnStartedNotification =
|
||||
serde_json::from_value(notif.params.expect("params must be present"))?;
|
||||
assert_eq!(
|
||||
started.turn.status,
|
||||
codex_app_server_protocol::TurnStatus::InProgress
|
||||
);
|
||||
|
||||
// Send a second turn that exercises the overrides path: change the model.
|
||||
let turn_req2 = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "Second".to_string(),
|
||||
}],
|
||||
model: Some("mock-model-override".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp2: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req2)),
|
||||
)
|
||||
.await??;
|
||||
let TurnStartResponse { turn: turn2 } = to_response::<TurnStartResponse>(turn_resp2)?;
|
||||
assert!(!turn2.id.is_empty());
|
||||
// Ensure the second turn has a different id than the first.
|
||||
assert_ne!(turn.id, turn2.id);
|
||||
|
||||
// Expect a second turn/started notification as well.
|
||||
let _notif2: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/started"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
// And we should ultimately get a task_complete without having to add a
|
||||
// legacy conversation listener explicitly (auto-attached by thread/start).
|
||||
let _task_complete: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_accepts_local_image_input() -> Result<()> {
|
||||
// Two Codex turns hit the mock model (session start + turn/start).
|
||||
let responses = vec![
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
];
|
||||
// Use the unchecked variant because the request payload includes a LocalImage
|
||||
// which the strict matcher does not currently cover.
|
||||
let server = create_mock_chat_completions_server_unchecked(responses).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri(), "never")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
let image_path = codex_home.path().join("image.png");
|
||||
// No need to actually write the file; we just exercise the input path.
|
||||
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::LocalImage { path: image_path }],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let TurnStartResponse { turn } = to_response::<TurnStartResponse>(turn_resp)?;
|
||||
assert!(!turn.id.is_empty());
|
||||
|
||||
// This test only validates that turn/start responds and returns a turn.
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_exec_approval_toggle_v2() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let tmp = TempDir::new()?;
|
||||
let codex_home = tmp.path().to_path_buf();
|
||||
|
||||
// Mock server: first turn requests a shell call (elicitation), then completes.
|
||||
// Second turn same, but we'll set approval_policy=never to avoid elicitation.
|
||||
let responses = vec![
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"python3".to_string(),
|
||||
"-c".to_string(),
|
||||
"print(42)".to_string(),
|
||||
],
|
||||
None,
|
||||
Some(5000),
|
||||
"call1",
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done 1")?,
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"python3".to_string(),
|
||||
"-c".to_string(),
|
||||
"print(42)".to_string(),
|
||||
],
|
||||
None,
|
||||
Some(5000),
|
||||
"call2",
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done 2")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
// Default approval is untrusted to force elicitation on first turn.
|
||||
create_config_toml(codex_home.as_path(), &server.uri(), "untrusted")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.as_path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// thread/start
|
||||
let start_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
// turn/start — expect ExecCommandApproval request from server
|
||||
let first_turn_id = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "run python".to_string(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
// Acknowledge RPC
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(first_turn_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
// Receive elicitation
|
||||
let server_req = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_request_message(),
|
||||
)
|
||||
.await??;
|
||||
let ServerRequest::ExecCommandApproval { request_id, params } = server_req else {
|
||||
panic!("expected ExecCommandApproval request");
|
||||
};
|
||||
assert_eq!(params.call_id, "call1");
|
||||
assert_eq!(
|
||||
params.parsed_cmd,
|
||||
vec![ParsedCommand::Unknown {
|
||||
cmd: "python3 -c 'print(42)'".to_string()
|
||||
}]
|
||||
);
|
||||
|
||||
// Approve and wait for task completion
|
||||
mcp.send_response(
|
||||
request_id,
|
||||
serde_json::json!({ "decision": codex_core::protocol::ReviewDecision::Approved }),
|
||||
)
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
// Second turn with approval_policy=never should not elicit approval
|
||||
let second_turn_id = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "run python again".to_string(),
|
||||
}],
|
||||
approval_policy: Some(codex_app_server_protocol::AskForApproval::Never),
|
||||
sandbox_policy: Some(codex_app_server_protocol::SandboxPolicy::DangerFullAccess),
|
||||
model: Some("mock-model".to_string()),
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: Some(ReasoningSummary::Auto),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(second_turn_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
// Ensure we do NOT receive an ExecCommandApproval request before task completes
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_updates_sandbox_and_cwd_between_turns_v2() -> Result<()> {
|
||||
// When returning Result from a test, pass an Ok(()) to the skip macro
|
||||
// so the early return type matches. The no-arg form returns unit.
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let tmp = TempDir::new()?;
|
||||
let codex_home = tmp.path().join("codex_home");
|
||||
std::fs::create_dir(&codex_home)?;
|
||||
let workspace_root = tmp.path().join("workspace");
|
||||
std::fs::create_dir(&workspace_root)?;
|
||||
let first_cwd = workspace_root.join("turn1");
|
||||
let second_cwd = workspace_root.join("turn2");
|
||||
std::fs::create_dir(&first_cwd)?;
|
||||
std::fs::create_dir(&second_cwd)?;
|
||||
|
||||
let responses = vec![
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"bash".to_string(),
|
||||
"-lc".to_string(),
|
||||
"echo first turn".to_string(),
|
||||
],
|
||||
None,
|
||||
Some(5000),
|
||||
"call-first",
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done first")?,
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"bash".to_string(),
|
||||
"-lc".to_string(),
|
||||
"echo second turn".to_string(),
|
||||
],
|
||||
None,
|
||||
Some(5000),
|
||||
"call-second",
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done second")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri(), "untrusted")?;
|
||||
|
||||
let mut mcp = McpProcess::new(&codex_home).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// thread/start
|
||||
let start_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
// first turn with workspace-write sandbox and first_cwd
|
||||
let first_turn = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "first turn".to_string(),
|
||||
}],
|
||||
cwd: Some(first_cwd.clone()),
|
||||
approval_policy: Some(codex_app_server_protocol::AskForApproval::Never),
|
||||
sandbox_policy: Some(codex_app_server_protocol::SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots: vec![first_cwd.clone()],
|
||||
network_access: false,
|
||||
exclude_tmpdir_env_var: false,
|
||||
exclude_slash_tmp: false,
|
||||
}),
|
||||
model: Some("mock-model".to_string()),
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: Some(ReasoningSummary::Auto),
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(first_turn)),
|
||||
)
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
// second turn with workspace-write and second_cwd, ensure exec begins in second_cwd
|
||||
let second_turn = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "second turn".to_string(),
|
||||
}],
|
||||
cwd: Some(second_cwd.clone()),
|
||||
approval_policy: Some(codex_app_server_protocol::AskForApproval::Never),
|
||||
sandbox_policy: Some(codex_app_server_protocol::SandboxPolicy::DangerFullAccess),
|
||||
model: Some("mock-model".to_string()),
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: Some(ReasoningSummary::Auto),
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(second_turn)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let exec_begin_notification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/exec_command_begin"),
|
||||
)
|
||||
.await??;
|
||||
let params = exec_begin_notification
|
||||
.params
|
||||
.clone()
|
||||
.expect("exec_command_begin params");
|
||||
let event: Event = serde_json::from_value(params).expect("deserialize exec begin event");
|
||||
let exec_begin = match event.msg {
|
||||
EventMsg::ExecCommandBegin(exec_begin) => exec_begin,
|
||||
other => panic!("expected ExecCommandBegin event, got {other:?}"),
|
||||
};
|
||||
assert_eq!(exec_begin.cwd, second_cwd);
|
||||
assert_eq!(
|
||||
exec_begin.command,
|
||||
vec![
|
||||
"bash".to_string(),
|
||||
"-lc".to_string(),
|
||||
"echo second turn".to_string()
|
||||
]
|
||||
);
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper to create a config.toml pointing at the mock model server.
|
||||
fn create_config_toml(
|
||||
codex_home: &Path,
|
||||
server_uri: &str,
|
||||
approval_policy: &str,
|
||||
) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "{approval_policy}"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -288,7 +288,7 @@ pub fn maybe_parse_apply_patch_verified(argv: &[String], cwd: &Path) -> MaybeApp
|
||||
path,
|
||||
ApplyPatchFileChange::Update {
|
||||
unified_diff,
|
||||
move_path: move_path.map(|p| cwd.join(p)),
|
||||
move_path: move_path.map(|p| effective_cwd.join(p)),
|
||||
new_content: contents,
|
||||
},
|
||||
);
|
||||
@@ -1603,6 +1603,53 @@ g
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_resolves_move_path_with_effective_cwd() {
|
||||
let session_dir = tempdir().unwrap();
|
||||
let worktree_rel = "alt";
|
||||
let worktree_dir = session_dir.path().join(worktree_rel);
|
||||
fs::create_dir_all(&worktree_dir).unwrap();
|
||||
|
||||
let source_name = "old.txt";
|
||||
let dest_name = "renamed.txt";
|
||||
let source_path = worktree_dir.join(source_name);
|
||||
fs::write(&source_path, "before\n").unwrap();
|
||||
|
||||
let patch = wrap_patch(&format!(
|
||||
r#"*** Update File: {source_name}
|
||||
*** Move to: {dest_name}
|
||||
@@
|
||||
-before
|
||||
+after"#
|
||||
));
|
||||
|
||||
let shell_script = format!("cd {worktree_rel} && apply_patch <<'PATCH'\n{patch}\nPATCH");
|
||||
let argv = vec!["bash".into(), "-lc".into(), shell_script];
|
||||
|
||||
let result = maybe_parse_apply_patch_verified(&argv, session_dir.path());
|
||||
let action = match result {
|
||||
MaybeApplyPatchVerified::Body(action) => action,
|
||||
other => panic!("expected verified body, got {other:?}"),
|
||||
};
|
||||
|
||||
assert_eq!(action.cwd, worktree_dir);
|
||||
|
||||
let change = action
|
||||
.changes()
|
||||
.get(&worktree_dir.join(source_name))
|
||||
.expect("source file change present");
|
||||
|
||||
match change {
|
||||
ApplyPatchFileChange::Update { move_path, .. } => {
|
||||
assert_eq!(
|
||||
move_path.as_deref(),
|
||||
Some(worktree_dir.join(dest_name).as_path())
|
||||
);
|
||||
}
|
||||
other => panic!("expected update change, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_fails_on_write_error() {
|
||||
let dir = tempdir().unwrap();
|
||||
|
||||
@@ -1 +1,3 @@
|
||||
mod cli;
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
mod tool;
|
||||
|
||||
257
codex-rs/apply-patch/tests/suite/tool.rs
Normal file
257
codex-rs/apply-patch/tests/suite/tool.rs
Normal file
@@ -0,0 +1,257 @@
|
||||
use assert_cmd::Command;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use tempfile::tempdir;
|
||||
|
||||
fn run_apply_patch_in_dir(dir: &Path, patch: &str) -> anyhow::Result<assert_cmd::assert::Assert> {
|
||||
let mut cmd = Command::cargo_bin("apply_patch")?;
|
||||
cmd.current_dir(dir);
|
||||
Ok(cmd.arg(patch).assert())
|
||||
}
|
||||
|
||||
fn apply_patch_command(dir: &Path) -> anyhow::Result<Command> {
|
||||
let mut cmd = Command::cargo_bin("apply_patch")?;
|
||||
cmd.current_dir(dir);
|
||||
Ok(cmd)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_cli_applies_multiple_operations() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
let modify_path = tmp.path().join("modify.txt");
|
||||
let delete_path = tmp.path().join("delete.txt");
|
||||
|
||||
fs::write(&modify_path, "line1\nline2\n")?;
|
||||
fs::write(&delete_path, "obsolete\n")?;
|
||||
|
||||
let patch = "*** Begin Patch\n*** Add File: nested/new.txt\n+created\n*** Delete File: delete.txt\n*** Update File: modify.txt\n@@\n-line2\n+changed\n*** End Patch";
|
||||
|
||||
run_apply_patch_in_dir(tmp.path(), patch)?.success().stdout(
|
||||
"Success. Updated the following files:\nA nested/new.txt\nM modify.txt\nD delete.txt\n",
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
fs::read_to_string(tmp.path().join("nested/new.txt"))?,
|
||||
"created\n"
|
||||
);
|
||||
assert_eq!(fs::read_to_string(&modify_path)?, "line1\nchanged\n");
|
||||
assert!(!delete_path.exists());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_cli_applies_multiple_chunks() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
let target_path = tmp.path().join("multi.txt");
|
||||
fs::write(&target_path, "line1\nline2\nline3\nline4\n")?;
|
||||
|
||||
let patch = "*** Begin Patch\n*** Update File: multi.txt\n@@\n-line2\n+changed2\n@@\n-line4\n+changed4\n*** End Patch";
|
||||
|
||||
run_apply_patch_in_dir(tmp.path(), patch)?
|
||||
.success()
|
||||
.stdout("Success. Updated the following files:\nM multi.txt\n");
|
||||
|
||||
assert_eq!(
|
||||
fs::read_to_string(&target_path)?,
|
||||
"line1\nchanged2\nline3\nchanged4\n"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_cli_moves_file_to_new_directory() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
let original_path = tmp.path().join("old/name.txt");
|
||||
let new_path = tmp.path().join("renamed/dir/name.txt");
|
||||
fs::create_dir_all(original_path.parent().expect("parent should exist"))?;
|
||||
fs::write(&original_path, "old content\n")?;
|
||||
|
||||
let patch = "*** Begin Patch\n*** Update File: old/name.txt\n*** Move to: renamed/dir/name.txt\n@@\n-old content\n+new content\n*** End Patch";
|
||||
|
||||
run_apply_patch_in_dir(tmp.path(), patch)?
|
||||
.success()
|
||||
.stdout("Success. Updated the following files:\nM renamed/dir/name.txt\n");
|
||||
|
||||
assert!(!original_path.exists());
|
||||
assert_eq!(fs::read_to_string(&new_path)?, "new content\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_cli_rejects_empty_patch() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
|
||||
apply_patch_command(tmp.path())?
|
||||
.arg("*** Begin Patch\n*** End Patch")
|
||||
.assert()
|
||||
.failure()
|
||||
.stderr("No files were modified.\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_cli_reports_missing_context() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
let target_path = tmp.path().join("modify.txt");
|
||||
fs::write(&target_path, "line1\nline2\n")?;
|
||||
|
||||
apply_patch_command(tmp.path())?
|
||||
.arg("*** Begin Patch\n*** Update File: modify.txt\n@@\n-missing\n+changed\n*** End Patch")
|
||||
.assert()
|
||||
.failure()
|
||||
.stderr("Failed to find expected lines in modify.txt:\nmissing\n");
|
||||
assert_eq!(fs::read_to_string(&target_path)?, "line1\nline2\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_cli_rejects_missing_file_delete() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
|
||||
apply_patch_command(tmp.path())?
|
||||
.arg("*** Begin Patch\n*** Delete File: missing.txt\n*** End Patch")
|
||||
.assert()
|
||||
.failure()
|
||||
.stderr("Failed to delete file missing.txt\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_cli_rejects_empty_update_hunk() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
|
||||
apply_patch_command(tmp.path())?
|
||||
.arg("*** Begin Patch\n*** Update File: foo.txt\n*** End Patch")
|
||||
.assert()
|
||||
.failure()
|
||||
.stderr("Invalid patch hunk on line 2: Update file hunk for path 'foo.txt' is empty\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_cli_requires_existing_file_for_update() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
|
||||
apply_patch_command(tmp.path())?
|
||||
.arg("*** Begin Patch\n*** Update File: missing.txt\n@@\n-old\n+new\n*** End Patch")
|
||||
.assert()
|
||||
.failure()
|
||||
.stderr(
|
||||
"Failed to read file to update missing.txt: No such file or directory (os error 2)\n",
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_cli_move_overwrites_existing_destination() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
let original_path = tmp.path().join("old/name.txt");
|
||||
let destination = tmp.path().join("renamed/dir/name.txt");
|
||||
fs::create_dir_all(original_path.parent().expect("parent should exist"))?;
|
||||
fs::create_dir_all(destination.parent().expect("parent should exist"))?;
|
||||
fs::write(&original_path, "from\n")?;
|
||||
fs::write(&destination, "existing\n")?;
|
||||
|
||||
run_apply_patch_in_dir(
|
||||
tmp.path(),
|
||||
"*** Begin Patch\n*** Update File: old/name.txt\n*** Move to: renamed/dir/name.txt\n@@\n-from\n+new\n*** End Patch",
|
||||
)?
|
||||
.success()
|
||||
.stdout("Success. Updated the following files:\nM renamed/dir/name.txt\n");
|
||||
|
||||
assert!(!original_path.exists());
|
||||
assert_eq!(fs::read_to_string(&destination)?, "new\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_cli_add_overwrites_existing_file() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
let path = tmp.path().join("duplicate.txt");
|
||||
fs::write(&path, "old content\n")?;
|
||||
|
||||
run_apply_patch_in_dir(
|
||||
tmp.path(),
|
||||
"*** Begin Patch\n*** Add File: duplicate.txt\n+new content\n*** End Patch",
|
||||
)?
|
||||
.success()
|
||||
.stdout("Success. Updated the following files:\nA duplicate.txt\n");
|
||||
|
||||
assert_eq!(fs::read_to_string(&path)?, "new content\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_cli_delete_directory_fails() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
fs::create_dir(tmp.path().join("dir"))?;
|
||||
|
||||
apply_patch_command(tmp.path())?
|
||||
.arg("*** Begin Patch\n*** Delete File: dir\n*** End Patch")
|
||||
.assert()
|
||||
.failure()
|
||||
.stderr("Failed to delete file dir\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_cli_rejects_invalid_hunk_header() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
|
||||
apply_patch_command(tmp.path())?
|
||||
.arg("*** Begin Patch\n*** Frobnicate File: foo\n*** End Patch")
|
||||
.assert()
|
||||
.failure()
|
||||
.stderr("Invalid patch hunk on line 2: '*** Frobnicate File: foo' is not a valid hunk header. Valid hunk headers: '*** Add File: {path}', '*** Delete File: {path}', '*** Update File: {path}'\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_cli_updates_file_appends_trailing_newline() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
let target_path = tmp.path().join("no_newline.txt");
|
||||
fs::write(&target_path, "no newline at end")?;
|
||||
|
||||
run_apply_patch_in_dir(
|
||||
tmp.path(),
|
||||
"*** Begin Patch\n*** Update File: no_newline.txt\n@@\n-no newline at end\n+first line\n+second line\n*** End Patch",
|
||||
)?
|
||||
.success()
|
||||
.stdout("Success. Updated the following files:\nM no_newline.txt\n");
|
||||
|
||||
let contents = fs::read_to_string(&target_path)?;
|
||||
assert!(contents.ends_with('\n'));
|
||||
assert_eq!(contents, "first line\nsecond line\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_cli_failure_after_partial_success_leaves_changes() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
let new_file = tmp.path().join("created.txt");
|
||||
|
||||
apply_patch_command(tmp.path())?
|
||||
.arg("*** Begin Patch\n*** Add File: created.txt\n+hello\n*** Update File: missing.txt\n@@\n-old\n+new\n*** End Patch")
|
||||
.assert()
|
||||
.failure()
|
||||
.stdout("")
|
||||
.stderr("Failed to read file to update missing.txt: No such file or directory (os error 2)\n");
|
||||
|
||||
assert_eq!(fs::read_to_string(&new_file)?, "hello\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -11,32 +11,7 @@ const LINUX_SANDBOX_ARG0: &str = "codex-linux-sandbox";
|
||||
const APPLY_PATCH_ARG0: &str = "apply_patch";
|
||||
const MISSPELLED_APPLY_PATCH_ARG0: &str = "applypatch";
|
||||
|
||||
/// While we want to deploy the Codex CLI as a single executable for simplicity,
|
||||
/// we also want to expose some of its functionality as distinct CLIs, so we use
|
||||
/// the "arg0 trick" to determine which CLI to dispatch. This effectively allows
|
||||
/// us to simulate deploying multiple executables as a single binary on Mac and
|
||||
/// Linux (but not Windows).
|
||||
///
|
||||
/// When the current executable is invoked through the hard-link or alias named
|
||||
/// `codex-linux-sandbox` we *directly* execute
|
||||
/// [`codex_linux_sandbox::run_main`] (which never returns). Otherwise we:
|
||||
///
|
||||
/// 1. Load `.env` values from `~/.codex/.env` before creating any threads.
|
||||
/// 2. Construct a Tokio multi-thread runtime.
|
||||
/// 3. Derive the path to the current executable (so children can re-invoke the
|
||||
/// sandbox) when running on Linux.
|
||||
/// 4. Execute the provided async `main_fn` inside that runtime, forwarding any
|
||||
/// error. Note that `main_fn` receives `codex_linux_sandbox_exe:
|
||||
/// Option<PathBuf>`, as an argument, which is generally needed as part of
|
||||
/// constructing [`codex_core::config::Config`].
|
||||
///
|
||||
/// This function should be used to wrap any `main()` function in binary crates
|
||||
/// in this workspace that depends on these helper CLIs.
|
||||
pub fn arg0_dispatch_or_else<F, Fut>(main_fn: F) -> anyhow::Result<()>
|
||||
where
|
||||
F: FnOnce(Option<PathBuf>) -> Fut,
|
||||
Fut: Future<Output = anyhow::Result<()>>,
|
||||
{
|
||||
pub fn arg0_dispatch() -> Option<TempDir> {
|
||||
// Determine if we were invoked via the special alias.
|
||||
let mut args = std::env::args_os();
|
||||
let argv0 = args.next().unwrap_or_default();
|
||||
@@ -76,10 +51,7 @@ where
|
||||
// before creating any threads/the Tokio runtime.
|
||||
load_dotenv();
|
||||
|
||||
// Retain the TempDir so it exists for the lifetime of the invocation of
|
||||
// this executable. Admittedly, we could invoke `keep()` on it, but it
|
||||
// would be nice to avoid leaving temporary directories behind, if possible.
|
||||
let _path_entry = match prepend_path_entry_for_apply_patch() {
|
||||
match prepend_path_entry_for_codex_aliases() {
|
||||
Ok(path_entry) => Some(path_entry),
|
||||
Err(err) => {
|
||||
// It is possible that Codex will proceed successfully even if
|
||||
@@ -87,7 +59,39 @@ where
|
||||
eprintln!("WARNING: proceeding, even though we could not update PATH: {err}");
|
||||
None
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// While we want to deploy the Codex CLI as a single executable for simplicity,
|
||||
/// we also want to expose some of its functionality as distinct CLIs, so we use
|
||||
/// the "arg0 trick" to determine which CLI to dispatch. This effectively allows
|
||||
/// us to simulate deploying multiple executables as a single binary on Mac and
|
||||
/// Linux (but not Windows).
|
||||
///
|
||||
/// When the current executable is invoked through the hard-link or alias named
|
||||
/// `codex-linux-sandbox` we *directly* execute
|
||||
/// [`codex_linux_sandbox::run_main`] (which never returns). Otherwise we:
|
||||
///
|
||||
/// 1. Load `.env` values from `~/.codex/.env` before creating any threads.
|
||||
/// 2. Construct a Tokio multi-thread runtime.
|
||||
/// 3. Derive the path to the current executable (so children can re-invoke the
|
||||
/// sandbox) when running on Linux.
|
||||
/// 4. Execute the provided async `main_fn` inside that runtime, forwarding any
|
||||
/// error. Note that `main_fn` receives `codex_linux_sandbox_exe:
|
||||
/// Option<PathBuf>`, as an argument, which is generally needed as part of
|
||||
/// constructing [`codex_core::config::Config`].
|
||||
///
|
||||
/// This function should be used to wrap any `main()` function in binary crates
|
||||
/// in this workspace that depends on these helper CLIs.
|
||||
pub fn arg0_dispatch_or_else<F, Fut>(main_fn: F) -> anyhow::Result<()>
|
||||
where
|
||||
F: FnOnce(Option<PathBuf>) -> Fut,
|
||||
Fut: Future<Output = anyhow::Result<()>>,
|
||||
{
|
||||
// Retain the TempDir so it exists for the lifetime of the invocation of
|
||||
// this executable. Admittedly, we could invoke `keep()` on it, but it
|
||||
// would be nice to avoid leaving temporary directories behind, if possible.
|
||||
let _path_entry = arg0_dispatch();
|
||||
|
||||
// Regular invocation – create a Tokio runtime and execute the provided
|
||||
// async entry-point.
|
||||
@@ -144,11 +148,16 @@ where
|
||||
///
|
||||
/// IMPORTANT: This function modifies the PATH environment variable, so it MUST
|
||||
/// be called before multiple threads are spawned.
|
||||
fn prepend_path_entry_for_apply_patch() -> std::io::Result<TempDir> {
|
||||
pub fn prepend_path_entry_for_codex_aliases() -> std::io::Result<TempDir> {
|
||||
let temp_dir = TempDir::new()?;
|
||||
let path = temp_dir.path();
|
||||
|
||||
for filename in &[APPLY_PATCH_ARG0, MISSPELLED_APPLY_PATCH_ARG0] {
|
||||
for filename in &[
|
||||
APPLY_PATCH_ARG0,
|
||||
MISSPELLED_APPLY_PATCH_ARG0,
|
||||
#[cfg(target_os = "linux")]
|
||||
LINUX_SANDBOX_ARG0,
|
||||
] {
|
||||
let exe = std::env::current_exe()?;
|
||||
|
||||
#[cfg(unix)]
|
||||
|
||||
@@ -13,6 +13,8 @@ serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] }
|
||||
codex-backend-openapi-models = { path = "../codex-backend-openapi-models" }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-core = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = "1"
|
||||
|
||||
@@ -1,7 +1,13 @@
|
||||
use crate::types::CodeTaskDetailsResponse;
|
||||
use crate::types::PaginatedListTaskListItem;
|
||||
use crate::types::RateLimitStatusPayload;
|
||||
use crate::types::RateLimitWindowSnapshot;
|
||||
use crate::types::TurnAttemptsSiblingTurnsResponse;
|
||||
use anyhow::Result;
|
||||
use codex_core::auth::CodexAuth;
|
||||
use codex_core::default_client::get_codex_user_agent;
|
||||
use codex_protocol::protocol::RateLimitSnapshot;
|
||||
use codex_protocol::protocol::RateLimitWindow;
|
||||
use reqwest::header::AUTHORIZATION;
|
||||
use reqwest::header::CONTENT_TYPE;
|
||||
use reqwest::header::HeaderMap;
|
||||
@@ -64,6 +70,17 @@ impl Client {
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn from_auth(base_url: impl Into<String>, auth: &CodexAuth) -> Result<Self> {
|
||||
let token = auth.get_token().await.map_err(anyhow::Error::from)?;
|
||||
let mut client = Self::new(base_url)?
|
||||
.with_user_agent(get_codex_user_agent())
|
||||
.with_bearer_token(token);
|
||||
if let Some(account_id) = auth.get_account_id() {
|
||||
client = client.with_chatgpt_account_id(account_id);
|
||||
}
|
||||
Ok(client)
|
||||
}
|
||||
|
||||
pub fn with_bearer_token(mut self, token: impl Into<String>) -> Self {
|
||||
self.bearer_token = Some(token.into());
|
||||
self
|
||||
@@ -138,6 +155,17 @@ impl Client {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_rate_limits(&self) -> Result<RateLimitSnapshot> {
|
||||
let url = match self.path_style {
|
||||
PathStyle::CodexApi => format!("{}/api/codex/usage", self.base_url),
|
||||
PathStyle::ChatGptApi => format!("{}/wham/usage", self.base_url),
|
||||
};
|
||||
let req = self.http.get(&url).headers(self.headers());
|
||||
let (body, ct) = self.exec_request(req, "GET", &url).await?;
|
||||
let payload: RateLimitStatusPayload = self.decode_json(&url, &ct, &body)?;
|
||||
Ok(Self::rate_limit_snapshot_from_payload(payload))
|
||||
}
|
||||
|
||||
pub async fn list_tasks(
|
||||
&self,
|
||||
limit: Option<i32>,
|
||||
@@ -241,4 +269,49 @@ impl Client {
|
||||
Err(e) => anyhow::bail!("Decode error for {url}: {e}; content-type={ct}; body={body}"),
|
||||
}
|
||||
}
|
||||
|
||||
// rate limit helpers
|
||||
fn rate_limit_snapshot_from_payload(payload: RateLimitStatusPayload) -> RateLimitSnapshot {
|
||||
let Some(details) = payload
|
||||
.rate_limit
|
||||
.and_then(|inner| inner.map(|boxed| *boxed))
|
||||
else {
|
||||
return RateLimitSnapshot {
|
||||
primary: None,
|
||||
secondary: None,
|
||||
};
|
||||
};
|
||||
|
||||
RateLimitSnapshot {
|
||||
primary: Self::map_rate_limit_window(details.primary_window),
|
||||
secondary: Self::map_rate_limit_window(details.secondary_window),
|
||||
}
|
||||
}
|
||||
|
||||
fn map_rate_limit_window(
|
||||
window: Option<Option<Box<RateLimitWindowSnapshot>>>,
|
||||
) -> Option<RateLimitWindow> {
|
||||
let snapshot = match window {
|
||||
Some(Some(snapshot)) => *snapshot,
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
let used_percent = f64::from(snapshot.used_percent);
|
||||
let window_minutes = Self::window_minutes_from_seconds(snapshot.limit_window_seconds);
|
||||
let resets_at = Some(i64::from(snapshot.reset_at));
|
||||
Some(RateLimitWindow {
|
||||
used_percent,
|
||||
window_minutes,
|
||||
resets_at,
|
||||
})
|
||||
}
|
||||
|
||||
fn window_minutes_from_seconds(seconds: i32) -> Option<i64> {
|
||||
if seconds <= 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
let seconds_i64 = i64::from(seconds);
|
||||
Some((seconds_i64 + 59) / 60)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
pub use codex_backend_openapi_models::models::PaginatedListTaskListItem;
|
||||
pub use codex_backend_openapi_models::models::PlanType;
|
||||
pub use codex_backend_openapi_models::models::RateLimitStatusDetails;
|
||||
pub use codex_backend_openapi_models::models::RateLimitStatusPayload;
|
||||
pub use codex_backend_openapi_models::models::RateLimitWindowSnapshot;
|
||||
pub use codex_backend_openapi_models::models::TaskListItem;
|
||||
|
||||
use serde::Deserialize;
|
||||
|
||||
@@ -14,7 +14,7 @@ codex-core = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = ["full"] }
|
||||
codex-git-apply = { path = "../git-apply" }
|
||||
codex-git = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = { workspace = true }
|
||||
|
||||
@@ -32,7 +32,8 @@ pub async fn run_apply_command(
|
||||
)
|
||||
.await?;
|
||||
|
||||
init_chatgpt_token_from_auth(&config.codex_home).await?;
|
||||
init_chatgpt_token_from_auth(&config.codex_home, config.cli_auth_credentials_store_mode)
|
||||
.await?;
|
||||
|
||||
let task_response = get_task(&config, apply_cli.task_id).await?;
|
||||
apply_diff_from_task(task_response, cwd).await
|
||||
@@ -58,13 +59,13 @@ pub async fn apply_diff_from_task(
|
||||
|
||||
async fn apply_diff(diff: &str, cwd: Option<PathBuf>) -> anyhow::Result<()> {
|
||||
let cwd = cwd.unwrap_or(std::env::current_dir().unwrap_or_else(|_| std::env::temp_dir()));
|
||||
let req = codex_git_apply::ApplyGitRequest {
|
||||
let req = codex_git::ApplyGitRequest {
|
||||
cwd,
|
||||
diff: diff.to_string(),
|
||||
revert: false,
|
||||
preflight: false,
|
||||
};
|
||||
let res = codex_git_apply::apply_git_patch(&req)?;
|
||||
let res = codex_git::apply_git_patch(&req)?;
|
||||
if res.exit_code != 0 {
|
||||
anyhow::bail!(
|
||||
"Git apply failed (applied={}, skipped={}, conflicts={})\nstdout:\n{}\nstderr:\n{}",
|
||||
|
||||
@@ -13,7 +13,8 @@ pub(crate) async fn chatgpt_get_request<T: DeserializeOwned>(
|
||||
path: String,
|
||||
) -> anyhow::Result<T> {
|
||||
let chatgpt_base_url = &config.chatgpt_base_url;
|
||||
init_chatgpt_token_from_auth(&config.codex_home).await?;
|
||||
init_chatgpt_token_from_auth(&config.codex_home, config.cli_auth_credentials_store_mode)
|
||||
.await?;
|
||||
|
||||
// Make direct HTTP request to ChatGPT backend API with the token
|
||||
let client = create_client();
|
||||
|
||||
@@ -3,6 +3,7 @@ use std::path::Path;
|
||||
use std::sync::LazyLock;
|
||||
use std::sync::RwLock;
|
||||
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use codex_core::token_data::TokenData;
|
||||
|
||||
static CHATGPT_TOKEN: LazyLock<RwLock<Option<TokenData>>> = LazyLock::new(|| RwLock::new(None));
|
||||
@@ -18,8 +19,11 @@ pub fn set_chatgpt_token_data(value: TokenData) {
|
||||
}
|
||||
|
||||
/// Initialize the ChatGPT token from auth.json file
|
||||
pub async fn init_chatgpt_token_from_auth(codex_home: &Path) -> std::io::Result<()> {
|
||||
let auth = CodexAuth::from_codex_home(codex_home)?;
|
||||
pub async fn init_chatgpt_token_from_auth(
|
||||
codex_home: &Path,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> std::io::Result<()> {
|
||||
let auth = CodexAuth::from_auth_storage(codex_home, auth_credentials_store_mode)?;
|
||||
if let Some(auth) = auth {
|
||||
let token_data = auth.get_token_data().await?;
|
||||
set_chatgpt_token_data(token_data);
|
||||
|
||||
@@ -30,15 +30,17 @@ codex-login = { workspace = true }
|
||||
codex-mcp-server = { workspace = true }
|
||||
codex-process-hardening = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-protocol-ts = { workspace = true }
|
||||
codex-responses-api-proxy = { workspace = true }
|
||||
codex-rmcp-client = { workspace = true }
|
||||
codex-stdio-to-uds = { workspace = true }
|
||||
codex-tui = { workspace = true }
|
||||
ctor = { workspace = true }
|
||||
libc = { workspace = true }
|
||||
owo-colors = { workspace = true }
|
||||
regex-lite = { workspace = true}
|
||||
serde_json = { workspace = true }
|
||||
supports-color = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
"io-std",
|
||||
"macros",
|
||||
@@ -46,6 +48,10 @@ tokio = { workspace = true, features = [
|
||||
"rt-multi-thread",
|
||||
"signal",
|
||||
] }
|
||||
tracing = { workspace = true }
|
||||
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
codex_windows_sandbox = { package = "codex-windows-sandbox", path = "../windows-sandbox-rs" }
|
||||
|
||||
[dev-dependencies]
|
||||
assert_cmd = { workspace = true }
|
||||
|
||||
@@ -1,3 +1,8 @@
|
||||
#[cfg(target_os = "macos")]
|
||||
mod pid_tracker;
|
||||
#[cfg(target_os = "macos")]
|
||||
mod seatbelt;
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use codex_common::CliConfigOverrides;
|
||||
@@ -5,20 +10,27 @@ use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::exec_env::create_env;
|
||||
use codex_core::landlock::spawn_command_under_linux_sandbox;
|
||||
#[cfg(target_os = "macos")]
|
||||
use codex_core::seatbelt::spawn_command_under_seatbelt;
|
||||
use codex_core::spawn::StdioPolicy;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
|
||||
use crate::LandlockCommand;
|
||||
use crate::SeatbeltCommand;
|
||||
use crate::WindowsCommand;
|
||||
use crate::exit_status::handle_exit_status;
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
use seatbelt::DenialLogger;
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
pub async fn run_command_under_seatbelt(
|
||||
command: SeatbeltCommand,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
) -> anyhow::Result<()> {
|
||||
let SeatbeltCommand {
|
||||
full_auto,
|
||||
log_denials,
|
||||
config_overrides,
|
||||
command,
|
||||
} = command;
|
||||
@@ -28,10 +40,19 @@ pub async fn run_command_under_seatbelt(
|
||||
config_overrides,
|
||||
codex_linux_sandbox_exe,
|
||||
SandboxType::Seatbelt,
|
||||
log_denials,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
pub async fn run_command_under_seatbelt(
|
||||
_command: SeatbeltCommand,
|
||||
_codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
) -> anyhow::Result<()> {
|
||||
anyhow::bail!("Seatbelt sandbox is only available on macOS");
|
||||
}
|
||||
|
||||
pub async fn run_command_under_landlock(
|
||||
command: LandlockCommand,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
@@ -47,13 +68,36 @@ pub async fn run_command_under_landlock(
|
||||
config_overrides,
|
||||
codex_linux_sandbox_exe,
|
||||
SandboxType::Landlock,
|
||||
false,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn run_command_under_windows(
|
||||
command: WindowsCommand,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
) -> anyhow::Result<()> {
|
||||
let WindowsCommand {
|
||||
full_auto,
|
||||
config_overrides,
|
||||
command,
|
||||
} = command;
|
||||
run_command_under_sandbox(
|
||||
full_auto,
|
||||
command,
|
||||
config_overrides,
|
||||
codex_linux_sandbox_exe,
|
||||
SandboxType::Windows,
|
||||
false,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
enum SandboxType {
|
||||
#[cfg(target_os = "macos")]
|
||||
Seatbelt,
|
||||
Landlock,
|
||||
Windows,
|
||||
}
|
||||
|
||||
async fn run_command_under_sandbox(
|
||||
@@ -62,6 +106,7 @@ async fn run_command_under_sandbox(
|
||||
config_overrides: CliConfigOverrides,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
sandbox_type: SandboxType,
|
||||
log_denials: bool,
|
||||
) -> anyhow::Result<()> {
|
||||
let sandbox_mode = create_sandbox_mode(full_auto);
|
||||
let config = Config::load_with_cli_overrides(
|
||||
@@ -87,7 +132,74 @@ async fn run_command_under_sandbox(
|
||||
let stdio_policy = StdioPolicy::Inherit;
|
||||
let env = create_env(&config.shell_environment_policy);
|
||||
|
||||
// Special-case Windows sandbox: execute and exit the process to emulate inherited stdio.
|
||||
if let SandboxType::Windows = sandbox_type {
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
use codex_windows_sandbox::run_windows_sandbox_capture;
|
||||
|
||||
let policy_str = match &config.sandbox_policy {
|
||||
codex_core::protocol::SandboxPolicy::DangerFullAccess => "workspace-write",
|
||||
codex_core::protocol::SandboxPolicy::ReadOnly => "read-only",
|
||||
codex_core::protocol::SandboxPolicy::WorkspaceWrite { .. } => "workspace-write",
|
||||
};
|
||||
|
||||
let sandbox_cwd = sandbox_policy_cwd.clone();
|
||||
let cwd_clone = cwd.clone();
|
||||
let env_map = env.clone();
|
||||
let command_vec = command.clone();
|
||||
let base_dir = config.codex_home.clone();
|
||||
|
||||
// Preflight audit is invoked elsewhere at the appropriate times.
|
||||
let res = tokio::task::spawn_blocking(move || {
|
||||
run_windows_sandbox_capture(
|
||||
policy_str,
|
||||
&sandbox_cwd,
|
||||
command_vec,
|
||||
&cwd_clone,
|
||||
env_map,
|
||||
None,
|
||||
Some(base_dir.as_path()),
|
||||
)
|
||||
})
|
||||
.await;
|
||||
|
||||
let capture = match res {
|
||||
Ok(Ok(v)) => v,
|
||||
Ok(Err(err)) => {
|
||||
eprintln!("windows sandbox failed: {err}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
Err(join_err) => {
|
||||
eprintln!("windows sandbox join error: {join_err}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
if !capture.stdout.is_empty() {
|
||||
use std::io::Write;
|
||||
let _ = std::io::stdout().write_all(&capture.stdout);
|
||||
}
|
||||
if !capture.stderr.is_empty() {
|
||||
use std::io::Write;
|
||||
let _ = std::io::stderr().write_all(&capture.stderr);
|
||||
}
|
||||
|
||||
std::process::exit(capture.exit_code);
|
||||
}
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
{
|
||||
anyhow::bail!("Windows sandbox is only available on Windows");
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
let mut denial_logger = log_denials.then(DenialLogger::new).flatten();
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
let _ = log_denials;
|
||||
|
||||
let mut child = match sandbox_type {
|
||||
#[cfg(target_os = "macos")]
|
||||
SandboxType::Seatbelt => {
|
||||
spawn_command_under_seatbelt(
|
||||
command,
|
||||
@@ -115,9 +227,31 @@ async fn run_command_under_sandbox(
|
||||
)
|
||||
.await?
|
||||
}
|
||||
SandboxType::Windows => {
|
||||
unreachable!("Windows sandbox should have been handled above");
|
||||
}
|
||||
};
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
if let Some(denial_logger) = &mut denial_logger {
|
||||
denial_logger.on_child_spawn(&child);
|
||||
}
|
||||
|
||||
let status = child.wait().await?;
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
if let Some(denial_logger) = denial_logger {
|
||||
let denials = denial_logger.finish().await;
|
||||
eprintln!("\n=== Sandbox denials ===");
|
||||
if denials.is_empty() {
|
||||
eprintln!("None found.");
|
||||
} else {
|
||||
for seatbelt::SandboxDenial { name, capability } in denials {
|
||||
eprintln!("({name}) {capability}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
handle_exit_status(status);
|
||||
}
|
||||
|
||||
|
||||
372
codex-rs/cli/src/debug_sandbox/pid_tracker.rs
Normal file
372
codex-rs/cli/src/debug_sandbox/pid_tracker.rs
Normal file
@@ -0,0 +1,372 @@
|
||||
use std::collections::HashSet;
|
||||
use tokio::task::JoinHandle;
|
||||
use tracing::warn;
|
||||
|
||||
/// Tracks the (recursive) descendants of a process by using `kqueue` to watch for fork events, and
|
||||
/// `proc_listchildpids` to list the children of a process.
|
||||
pub(crate) struct PidTracker {
|
||||
kq: libc::c_int,
|
||||
handle: JoinHandle<HashSet<i32>>,
|
||||
}
|
||||
|
||||
impl PidTracker {
|
||||
pub(crate) fn new(root_pid: i32) -> Option<Self> {
|
||||
if root_pid <= 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
let kq = unsafe { libc::kqueue() };
|
||||
let handle = tokio::task::spawn_blocking(move || track_descendants(kq, root_pid));
|
||||
|
||||
Some(Self { kq, handle })
|
||||
}
|
||||
|
||||
pub(crate) async fn stop(self) -> HashSet<i32> {
|
||||
trigger_stop_event(self.kq);
|
||||
self.handle.await.unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
unsafe extern "C" {
|
||||
fn proc_listchildpids(
|
||||
ppid: libc::c_int,
|
||||
buffer: *mut libc::c_void,
|
||||
buffersize: libc::c_int,
|
||||
) -> libc::c_int;
|
||||
}
|
||||
|
||||
/// Wrap proc_listchildpids.
|
||||
fn list_child_pids(parent: i32) -> Vec<i32> {
|
||||
unsafe {
|
||||
let mut capacity: usize = 16;
|
||||
loop {
|
||||
let mut buf: Vec<i32> = vec![0; capacity];
|
||||
let count = proc_listchildpids(
|
||||
parent as libc::c_int,
|
||||
buf.as_mut_ptr() as *mut libc::c_void,
|
||||
(buf.len() * std::mem::size_of::<i32>()) as libc::c_int,
|
||||
);
|
||||
if count <= 0 {
|
||||
return Vec::new();
|
||||
}
|
||||
let returned = count as usize;
|
||||
if returned < capacity {
|
||||
buf.truncate(returned);
|
||||
return buf;
|
||||
}
|
||||
capacity = capacity.saturating_mul(2).max(returned + 16);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn pid_is_alive(pid: i32) -> bool {
|
||||
if pid <= 0 {
|
||||
return false;
|
||||
}
|
||||
let res = unsafe { libc::kill(pid as libc::pid_t, 0) };
|
||||
if res == 0 {
|
||||
true
|
||||
} else {
|
||||
matches!(
|
||||
std::io::Error::last_os_error().raw_os_error(),
|
||||
Some(libc::EPERM)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
enum WatchPidError {
|
||||
ProcessGone,
|
||||
Other(std::io::Error),
|
||||
}
|
||||
|
||||
/// Add `pid` to the watch list in `kq`.
|
||||
fn watch_pid(kq: libc::c_int, pid: i32) -> Result<(), WatchPidError> {
|
||||
if pid <= 0 {
|
||||
return Err(WatchPidError::ProcessGone);
|
||||
}
|
||||
|
||||
let kev = libc::kevent {
|
||||
ident: pid as libc::uintptr_t,
|
||||
filter: libc::EVFILT_PROC,
|
||||
flags: libc::EV_ADD | libc::EV_CLEAR,
|
||||
fflags: libc::NOTE_FORK | libc::NOTE_EXEC | libc::NOTE_EXIT,
|
||||
data: 0,
|
||||
udata: std::ptr::null_mut(),
|
||||
};
|
||||
|
||||
let res = unsafe { libc::kevent(kq, &kev, 1, std::ptr::null_mut(), 0, std::ptr::null()) };
|
||||
if res < 0 {
|
||||
let err = std::io::Error::last_os_error();
|
||||
if err.raw_os_error() == Some(libc::ESRCH) {
|
||||
Err(WatchPidError::ProcessGone)
|
||||
} else {
|
||||
Err(WatchPidError::Other(err))
|
||||
}
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn watch_children(
|
||||
kq: libc::c_int,
|
||||
parent: i32,
|
||||
seen: &mut HashSet<i32>,
|
||||
active: &mut HashSet<i32>,
|
||||
) {
|
||||
for child_pid in list_child_pids(parent) {
|
||||
add_pid_watch(kq, child_pid, seen, active);
|
||||
}
|
||||
}
|
||||
|
||||
/// Watch `pid` and its children, updating `seen` and `active` sets.
|
||||
fn add_pid_watch(kq: libc::c_int, pid: i32, seen: &mut HashSet<i32>, active: &mut HashSet<i32>) {
|
||||
if pid <= 0 {
|
||||
return;
|
||||
}
|
||||
|
||||
let newly_seen = seen.insert(pid);
|
||||
let mut should_recurse = newly_seen;
|
||||
|
||||
if active.insert(pid) {
|
||||
match watch_pid(kq, pid) {
|
||||
Ok(()) => {
|
||||
should_recurse = true;
|
||||
}
|
||||
Err(WatchPidError::ProcessGone) => {
|
||||
active.remove(&pid);
|
||||
return;
|
||||
}
|
||||
Err(WatchPidError::Other(err)) => {
|
||||
warn!("failed to watch pid {pid}: {err}");
|
||||
active.remove(&pid);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if should_recurse {
|
||||
watch_children(kq, pid, seen, active);
|
||||
}
|
||||
}
|
||||
const STOP_IDENT: libc::uintptr_t = 1;
|
||||
|
||||
fn register_stop_event(kq: libc::c_int) -> bool {
|
||||
let kev = libc::kevent {
|
||||
ident: STOP_IDENT,
|
||||
filter: libc::EVFILT_USER,
|
||||
flags: libc::EV_ADD | libc::EV_CLEAR,
|
||||
fflags: 0,
|
||||
data: 0,
|
||||
udata: std::ptr::null_mut(),
|
||||
};
|
||||
|
||||
let res = unsafe { libc::kevent(kq, &kev, 1, std::ptr::null_mut(), 0, std::ptr::null()) };
|
||||
res >= 0
|
||||
}
|
||||
|
||||
fn trigger_stop_event(kq: libc::c_int) {
|
||||
if kq < 0 {
|
||||
return;
|
||||
}
|
||||
|
||||
let kev = libc::kevent {
|
||||
ident: STOP_IDENT,
|
||||
filter: libc::EVFILT_USER,
|
||||
flags: 0,
|
||||
fflags: libc::NOTE_TRIGGER,
|
||||
data: 0,
|
||||
udata: std::ptr::null_mut(),
|
||||
};
|
||||
|
||||
let _ = unsafe { libc::kevent(kq, &kev, 1, std::ptr::null_mut(), 0, std::ptr::null()) };
|
||||
}
|
||||
|
||||
/// Put all of the above together to track all the descendants of `root_pid`.
|
||||
fn track_descendants(kq: libc::c_int, root_pid: i32) -> HashSet<i32> {
|
||||
if kq < 0 {
|
||||
let mut seen = HashSet::new();
|
||||
seen.insert(root_pid);
|
||||
return seen;
|
||||
}
|
||||
|
||||
if !register_stop_event(kq) {
|
||||
let mut seen = HashSet::new();
|
||||
seen.insert(root_pid);
|
||||
let _ = unsafe { libc::close(kq) };
|
||||
return seen;
|
||||
}
|
||||
|
||||
let mut seen: HashSet<i32> = HashSet::new();
|
||||
let mut active: HashSet<i32> = HashSet::new();
|
||||
|
||||
add_pid_watch(kq, root_pid, &mut seen, &mut active);
|
||||
|
||||
const EVENTS_CAP: usize = 32;
|
||||
let mut events: [libc::kevent; EVENTS_CAP] =
|
||||
unsafe { std::mem::MaybeUninit::zeroed().assume_init() };
|
||||
|
||||
let mut stop_requested = false;
|
||||
loop {
|
||||
if active.is_empty() {
|
||||
if !pid_is_alive(root_pid) {
|
||||
break;
|
||||
}
|
||||
add_pid_watch(kq, root_pid, &mut seen, &mut active);
|
||||
if active.is_empty() {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let nev = unsafe {
|
||||
libc::kevent(
|
||||
kq,
|
||||
std::ptr::null::<libc::kevent>(),
|
||||
0,
|
||||
events.as_mut_ptr(),
|
||||
EVENTS_CAP as libc::c_int,
|
||||
std::ptr::null(),
|
||||
)
|
||||
};
|
||||
|
||||
if nev < 0 {
|
||||
let err = std::io::Error::last_os_error();
|
||||
if err.kind() == std::io::ErrorKind::Interrupted {
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
if nev == 0 {
|
||||
continue;
|
||||
}
|
||||
|
||||
for ev in events.iter().take(nev as usize) {
|
||||
let pid = ev.ident as i32;
|
||||
|
||||
if ev.filter == libc::EVFILT_USER && ev.ident == STOP_IDENT {
|
||||
stop_requested = true;
|
||||
break;
|
||||
}
|
||||
|
||||
if (ev.flags & libc::EV_ERROR) != 0 {
|
||||
if ev.data == libc::ESRCH as isize {
|
||||
active.remove(&pid);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (ev.fflags & libc::NOTE_FORK) != 0 {
|
||||
watch_children(kq, pid, &mut seen, &mut active);
|
||||
}
|
||||
|
||||
if (ev.fflags & libc::NOTE_EXIT) != 0 {
|
||||
active.remove(&pid);
|
||||
}
|
||||
}
|
||||
|
||||
if stop_requested {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let _ = unsafe { libc::close(kq) };
|
||||
|
||||
seen
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::process::Command;
|
||||
use std::process::Stdio;
|
||||
use std::time::Duration;
|
||||
|
||||
#[test]
|
||||
fn pid_is_alive_detects_current_process() {
|
||||
let pid = std::process::id() as i32;
|
||||
assert!(pid_is_alive(pid));
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
#[test]
|
||||
fn list_child_pids_includes_spawned_child() {
|
||||
let mut child = Command::new("/bin/sleep")
|
||||
.arg("5")
|
||||
.stdin(Stdio::null())
|
||||
.spawn()
|
||||
.expect("failed to spawn child process");
|
||||
|
||||
let child_pid = child.id() as i32;
|
||||
let parent_pid = std::process::id() as i32;
|
||||
|
||||
let mut found = false;
|
||||
for _ in 0..100 {
|
||||
if list_child_pids(parent_pid).contains(&child_pid) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
std::thread::sleep(Duration::from_millis(10));
|
||||
}
|
||||
|
||||
let _ = child.kill();
|
||||
let _ = child.wait();
|
||||
|
||||
assert!(found, "expected to find child pid {child_pid} in list");
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
#[tokio::test]
|
||||
async fn pid_tracker_collects_spawned_children() {
|
||||
let tracker = PidTracker::new(std::process::id() as i32).expect("failed to create tracker");
|
||||
|
||||
let mut child = Command::new("/bin/sleep")
|
||||
.arg("0.1")
|
||||
.stdin(Stdio::null())
|
||||
.spawn()
|
||||
.expect("failed to spawn child process");
|
||||
|
||||
let child_pid = child.id() as i32;
|
||||
let parent_pid = std::process::id() as i32;
|
||||
|
||||
let _ = child.wait();
|
||||
|
||||
let seen = tracker.stop().await;
|
||||
|
||||
assert!(
|
||||
seen.contains(&parent_pid),
|
||||
"expected tracker to include parent pid {parent_pid}"
|
||||
);
|
||||
assert!(
|
||||
seen.contains(&child_pid),
|
||||
"expected tracker to include child pid {child_pid}"
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
#[tokio::test]
|
||||
async fn pid_tracker_collects_bash_subshell_descendants() {
|
||||
let tracker = PidTracker::new(std::process::id() as i32).expect("failed to create tracker");
|
||||
|
||||
let child = Command::new("/bin/bash")
|
||||
.arg("-c")
|
||||
.arg("(sleep 0.1 & echo $!; wait)")
|
||||
.stdin(Stdio::null())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::null())
|
||||
.spawn()
|
||||
.expect("failed to spawn bash");
|
||||
|
||||
let output = child.wait_with_output().unwrap().stdout;
|
||||
let subshell_pid = String::from_utf8_lossy(&output)
|
||||
.trim()
|
||||
.parse::<i32>()
|
||||
.expect("failed to parse subshell pid");
|
||||
|
||||
let seen = tracker.stop().await;
|
||||
|
||||
assert!(
|
||||
seen.contains(&subshell_pid),
|
||||
"expected tracker to include subshell pid {subshell_pid}"
|
||||
);
|
||||
}
|
||||
}
|
||||
114
codex-rs/cli/src/debug_sandbox/seatbelt.rs
Normal file
114
codex-rs/cli/src/debug_sandbox/seatbelt.rs
Normal file
@@ -0,0 +1,114 @@
|
||||
use std::collections::HashSet;
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
use tokio::process::Child;
|
||||
use tokio::task::JoinHandle;
|
||||
|
||||
use super::pid_tracker::PidTracker;
|
||||
|
||||
pub struct SandboxDenial {
|
||||
pub name: String,
|
||||
pub capability: String,
|
||||
}
|
||||
|
||||
pub struct DenialLogger {
|
||||
log_stream: Child,
|
||||
pid_tracker: Option<PidTracker>,
|
||||
log_reader: Option<JoinHandle<Vec<u8>>>,
|
||||
}
|
||||
|
||||
impl DenialLogger {
|
||||
pub(crate) fn new() -> Option<Self> {
|
||||
let mut log_stream = start_log_stream()?;
|
||||
let stdout = log_stream.stdout.take()?;
|
||||
let log_reader = tokio::spawn(async move {
|
||||
let mut reader = tokio::io::BufReader::new(stdout);
|
||||
let mut logs = Vec::new();
|
||||
let mut chunk = Vec::new();
|
||||
loop {
|
||||
match reader.read_until(b'\n', &mut chunk).await {
|
||||
Ok(0) | Err(_) => break,
|
||||
Ok(_) => {
|
||||
logs.extend_from_slice(&chunk);
|
||||
chunk.clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
logs
|
||||
});
|
||||
|
||||
Some(Self {
|
||||
log_stream,
|
||||
pid_tracker: None,
|
||||
log_reader: Some(log_reader),
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn on_child_spawn(&mut self, child: &Child) {
|
||||
if let Some(root_pid) = child.id() {
|
||||
self.pid_tracker = PidTracker::new(root_pid as i32);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn finish(mut self) -> Vec<SandboxDenial> {
|
||||
let pid_set = match self.pid_tracker {
|
||||
Some(tracker) => tracker.stop().await,
|
||||
None => Default::default(),
|
||||
};
|
||||
|
||||
if pid_set.is_empty() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let _ = self.log_stream.kill().await;
|
||||
let _ = self.log_stream.wait().await;
|
||||
|
||||
let logs_bytes = match self.log_reader.take() {
|
||||
Some(handle) => handle.await.unwrap_or_default(),
|
||||
None => Vec::new(),
|
||||
};
|
||||
let logs = String::from_utf8_lossy(&logs_bytes);
|
||||
|
||||
let mut seen: HashSet<(String, String)> = HashSet::new();
|
||||
let mut denials: Vec<SandboxDenial> = Vec::new();
|
||||
for line in logs.lines() {
|
||||
if let Ok(json) = serde_json::from_str::<serde_json::Value>(line)
|
||||
&& let Some(msg) = json.get("eventMessage").and_then(|v| v.as_str())
|
||||
&& let Some((pid, name, capability)) = parse_message(msg)
|
||||
&& pid_set.contains(&pid)
|
||||
&& seen.insert((name.clone(), capability.clone()))
|
||||
{
|
||||
denials.push(SandboxDenial { name, capability });
|
||||
}
|
||||
}
|
||||
denials
|
||||
}
|
||||
}
|
||||
|
||||
fn start_log_stream() -> Option<Child> {
|
||||
use std::process::Stdio;
|
||||
|
||||
const PREDICATE: &str = r#"(((processID == 0) AND (senderImagePath CONTAINS "/Sandbox")) OR (subsystem == "com.apple.sandbox.reporting"))"#;
|
||||
|
||||
tokio::process::Command::new("log")
|
||||
.args(["stream", "--style", "ndjson", "--predicate", PREDICATE])
|
||||
.stdin(Stdio::null())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::null())
|
||||
.kill_on_drop(true)
|
||||
.spawn()
|
||||
.ok()
|
||||
}
|
||||
|
||||
fn parse_message(msg: &str) -> Option<(i32, String, String)> {
|
||||
// Example message:
|
||||
// Sandbox: processname(1234) deny(1) capability-name args...
|
||||
static RE: std::sync::OnceLock<regex_lite::Regex> = std::sync::OnceLock::new();
|
||||
let re = RE.get_or_init(|| {
|
||||
#[expect(clippy::unwrap_used)]
|
||||
regex_lite::Regex::new(r"^Sandbox:\s*(.+?)\((\d+)\)\s+deny\(.*?\)\s*(.+)$").unwrap()
|
||||
});
|
||||
|
||||
let (_, [name, pid_str, capability]) = re.captures(msg)?.extract();
|
||||
let pid = pid_str.trim().parse::<i32>().ok()?;
|
||||
Some((pid, name.to_string(), capability.to_string()))
|
||||
}
|
||||
@@ -11,6 +11,10 @@ pub struct SeatbeltCommand {
|
||||
#[arg(long = "full-auto", default_value_t = false)]
|
||||
pub full_auto: bool,
|
||||
|
||||
/// While the command runs, capture macOS sandbox denials via `log stream` and print them after exit
|
||||
#[arg(long = "log-denials", default_value_t = false)]
|
||||
pub log_denials: bool,
|
||||
|
||||
#[clap(skip)]
|
||||
pub config_overrides: CliConfigOverrides,
|
||||
|
||||
@@ -32,3 +36,17 @@ pub struct LandlockCommand {
|
||||
#[arg(trailing_var_arg = true)]
|
||||
pub command: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
pub struct WindowsCommand {
|
||||
/// Convenience alias for low-friction sandboxed automatic execution (network-disabled sandbox that can write to cwd and TMPDIR)
|
||||
#[arg(long = "full-auto", default_value_t = false)]
|
||||
pub full_auto: bool,
|
||||
|
||||
#[clap(skip)]
|
||||
pub config_overrides: CliConfigOverrides,
|
||||
|
||||
/// Full command args to run under Windows restricted token sandbox.
|
||||
#[arg(trailing_var_arg = true)]
|
||||
pub command: Vec<String>,
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_core::CodexAuth;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use codex_core::auth::CLIENT_ID;
|
||||
use codex_core::auth::login_with_api_key;
|
||||
use codex_core::auth::logout;
|
||||
@@ -17,11 +18,13 @@ use std::path::PathBuf;
|
||||
pub async fn login_with_chatgpt(
|
||||
codex_home: PathBuf,
|
||||
forced_chatgpt_workspace_id: Option<String>,
|
||||
cli_auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> std::io::Result<()> {
|
||||
let opts = ServerOptions::new(
|
||||
codex_home,
|
||||
CLIENT_ID.to_string(),
|
||||
forced_chatgpt_workspace_id,
|
||||
cli_auth_credentials_store_mode,
|
||||
);
|
||||
let server = run_login_server(opts)?;
|
||||
|
||||
@@ -43,7 +46,13 @@ pub async fn run_login_with_chatgpt(cli_config_overrides: CliConfigOverrides) ->
|
||||
|
||||
let forced_chatgpt_workspace_id = config.forced_chatgpt_workspace_id.clone();
|
||||
|
||||
match login_with_chatgpt(config.codex_home, forced_chatgpt_workspace_id).await {
|
||||
match login_with_chatgpt(
|
||||
config.codex_home,
|
||||
forced_chatgpt_workspace_id,
|
||||
config.cli_auth_credentials_store_mode,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(_) => {
|
||||
eprintln!("Successfully logged in");
|
||||
std::process::exit(0);
|
||||
@@ -66,7 +75,11 @@ pub async fn run_login_with_api_key(
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
match login_with_api_key(&config.codex_home, &api_key) {
|
||||
match login_with_api_key(
|
||||
&config.codex_home,
|
||||
&api_key,
|
||||
config.cli_auth_credentials_store_mode,
|
||||
) {
|
||||
Ok(_) => {
|
||||
eprintln!("Successfully logged in");
|
||||
std::process::exit(0);
|
||||
@@ -121,6 +134,7 @@ pub async fn run_login_with_device_code(
|
||||
config.codex_home,
|
||||
client_id.unwrap_or(CLIENT_ID.to_string()),
|
||||
forced_chatgpt_workspace_id,
|
||||
config.cli_auth_credentials_store_mode,
|
||||
);
|
||||
if let Some(iss) = issuer_base_url {
|
||||
opts.issuer = iss;
|
||||
@@ -140,7 +154,7 @@ pub async fn run_login_with_device_code(
|
||||
pub async fn run_login_status(cli_config_overrides: CliConfigOverrides) -> ! {
|
||||
let config = load_config_or_exit(cli_config_overrides).await;
|
||||
|
||||
match CodexAuth::from_codex_home(&config.codex_home) {
|
||||
match CodexAuth::from_auth_storage(&config.codex_home, config.cli_auth_credentials_store_mode) {
|
||||
Ok(Some(auth)) => match auth.mode {
|
||||
AuthMode::ApiKey => match auth.get_token().await {
|
||||
Ok(api_key) => {
|
||||
@@ -171,7 +185,7 @@ pub async fn run_login_status(cli_config_overrides: CliConfigOverrides) -> ! {
|
||||
pub async fn run_logout(cli_config_overrides: CliConfigOverrides) -> ! {
|
||||
let config = load_config_or_exit(cli_config_overrides).await;
|
||||
|
||||
match logout(&config.codex_home) {
|
||||
match logout(&config.codex_home, config.cli_auth_credentials_store_mode) {
|
||||
Ok(true) => {
|
||||
eprintln!("Successfully logged out");
|
||||
std::process::exit(0);
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use clap::Args;
|
||||
use clap::CommandFactory;
|
||||
use clap::Parser;
|
||||
use clap_complete::Shell;
|
||||
@@ -7,6 +8,7 @@ use codex_chatgpt::apply_command::ApplyCommand;
|
||||
use codex_chatgpt::apply_command::run_apply_command;
|
||||
use codex_cli::LandlockCommand;
|
||||
use codex_cli::SeatbeltCommand;
|
||||
use codex_cli::WindowsCommand;
|
||||
use codex_cli::login::read_api_key_from_stdin;
|
||||
use codex_cli::login::run_login_status;
|
||||
use codex_cli::login::run_login_with_api_key;
|
||||
@@ -19,16 +21,20 @@ use codex_exec::Cli as ExecCli;
|
||||
use codex_responses_api_proxy::Args as ResponsesApiProxyArgs;
|
||||
use codex_tui::AppExitInfo;
|
||||
use codex_tui::Cli as TuiCli;
|
||||
use codex_tui::UpdateAction;
|
||||
use codex_tui::update_action::UpdateAction;
|
||||
use owo_colors::OwoColorize;
|
||||
use std::path::PathBuf;
|
||||
use supports_color::Stream;
|
||||
|
||||
mod mcp_cmd;
|
||||
#[cfg(not(windows))]
|
||||
mod wsl_paths;
|
||||
|
||||
use crate::mcp_cmd::McpCli;
|
||||
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::features::is_known_feature_key;
|
||||
|
||||
/// Codex CLI
|
||||
///
|
||||
@@ -77,8 +83,8 @@ enum Subcommand {
|
||||
/// [experimental] Run the Codex MCP server (stdio transport).
|
||||
McpServer,
|
||||
|
||||
/// [experimental] Run the app server.
|
||||
AppServer,
|
||||
/// [experimental] Run the app server or related tooling.
|
||||
AppServer(AppServerCommand),
|
||||
|
||||
/// Generate shell completion scripts.
|
||||
Completion(CompletionCommand),
|
||||
@@ -94,9 +100,6 @@ enum Subcommand {
|
||||
/// Resume a previous interactive session (picker by default; use --last to continue the most recent).
|
||||
Resume(ResumeCommand),
|
||||
|
||||
/// Internal: generate TypeScript protocol bindings.
|
||||
#[clap(hide = true)]
|
||||
GenerateTs(GenerateTsCommand),
|
||||
/// [EXPERIMENTAL] Browse tasks from Codex Cloud and apply changes locally.
|
||||
#[clap(name = "cloud", alias = "cloud-tasks")]
|
||||
Cloud(CloudTasksCli),
|
||||
@@ -150,6 +153,9 @@ enum SandboxCommand {
|
||||
/// Run a command under Landlock+seccomp (Linux only).
|
||||
#[clap(visible_alias = "landlock")]
|
||||
Linux(LandlockCommand),
|
||||
|
||||
/// Run a command under Windows restricted token (Windows only).
|
||||
Windows(WindowsCommand),
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
@@ -200,6 +206,22 @@ struct LogoutCommand {
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct AppServerCommand {
|
||||
/// Omit to run the app server; specify a subcommand for tooling.
|
||||
#[command(subcommand)]
|
||||
subcommand: Option<AppServerSubcommand>,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
enum AppServerSubcommand {
|
||||
/// [experimental] Generate TypeScript bindings for the app server protocol.
|
||||
GenerateTs(GenerateTsCommand),
|
||||
|
||||
/// [experimental] Generate JSON Schema for the app server protocol.
|
||||
GenerateJsonSchema(GenerateJsonSchemaCommand),
|
||||
}
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
struct GenerateTsCommand {
|
||||
/// Output directory where .ts files will be written
|
||||
#[arg(short = 'o', long = "out", value_name = "DIR")]
|
||||
@@ -210,6 +232,13 @@ struct GenerateTsCommand {
|
||||
prettier: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
struct GenerateJsonSchemaCommand {
|
||||
/// Output directory where the schema bundle will be written
|
||||
#[arg(short = 'o', long = "out", value_name = "DIR")]
|
||||
out_dir: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct StdioToUdsCommand {
|
||||
/// Path to the Unix domain socket to connect to.
|
||||
@@ -262,10 +291,30 @@ fn handle_app_exit(exit_info: AppExitInfo) -> anyhow::Result<()> {
|
||||
/// Run the update action and print the result.
|
||||
fn run_update_action(action: UpdateAction) -> anyhow::Result<()> {
|
||||
println!();
|
||||
let (cmd, args) = action.command_args();
|
||||
let cmd_str = action.command_str();
|
||||
println!("Updating Codex via `{cmd_str}`...");
|
||||
let status = std::process::Command::new(cmd).args(args).status()?;
|
||||
|
||||
let status = {
|
||||
#[cfg(windows)]
|
||||
{
|
||||
// On Windows, run via cmd.exe so .CMD/.BAT are correctly resolved (PATHEXT semantics).
|
||||
std::process::Command::new("cmd")
|
||||
.args(["/C", &cmd_str])
|
||||
.status()?
|
||||
}
|
||||
#[cfg(not(windows))]
|
||||
{
|
||||
let (cmd, args) = action.command_args();
|
||||
let command_path = crate::wsl_paths::normalize_for_wsl(cmd);
|
||||
let normalized_args: Vec<String> = args
|
||||
.iter()
|
||||
.map(crate::wsl_paths::normalize_for_wsl)
|
||||
.collect();
|
||||
std::process::Command::new(&command_path)
|
||||
.args(&normalized_args)
|
||||
.status()?
|
||||
}
|
||||
};
|
||||
if !status.success() {
|
||||
anyhow::bail!("`{cmd_str}` failed with status {status}");
|
||||
}
|
||||
@@ -286,15 +335,25 @@ struct FeatureToggles {
|
||||
}
|
||||
|
||||
impl FeatureToggles {
|
||||
fn to_overrides(&self) -> Vec<String> {
|
||||
fn to_overrides(&self) -> anyhow::Result<Vec<String>> {
|
||||
let mut v = Vec::new();
|
||||
for k in &self.enable {
|
||||
v.push(format!("features.{k}=true"));
|
||||
for feature in &self.enable {
|
||||
Self::validate_feature(feature)?;
|
||||
v.push(format!("features.{feature}=true"));
|
||||
}
|
||||
for k in &self.disable {
|
||||
v.push(format!("features.{k}=false"));
|
||||
for feature in &self.disable {
|
||||
Self::validate_feature(feature)?;
|
||||
v.push(format!("features.{feature}=false"));
|
||||
}
|
||||
Ok(v)
|
||||
}
|
||||
|
||||
fn validate_feature(feature: &str) -> anyhow::Result<()> {
|
||||
if is_known_feature_key(feature) {
|
||||
Ok(())
|
||||
} else {
|
||||
anyhow::bail!("Unknown feature flag: {feature}")
|
||||
}
|
||||
v
|
||||
}
|
||||
}
|
||||
|
||||
@@ -345,9 +404,8 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
} = MultitoolCli::parse();
|
||||
|
||||
// Fold --enable/--disable into config overrides so they flow to all subcommands.
|
||||
root_config_overrides
|
||||
.raw_overrides
|
||||
.extend(feature_toggles.to_overrides());
|
||||
let toggle_overrides = feature_toggles.to_overrides()?;
|
||||
root_config_overrides.raw_overrides.extend(toggle_overrides);
|
||||
|
||||
match subcommand {
|
||||
None => {
|
||||
@@ -373,9 +431,20 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
prepend_config_flags(&mut mcp_cli.config_overrides, root_config_overrides.clone());
|
||||
mcp_cli.run().await?;
|
||||
}
|
||||
Some(Subcommand::AppServer) => {
|
||||
codex_app_server::run_main(codex_linux_sandbox_exe, root_config_overrides).await?;
|
||||
}
|
||||
Some(Subcommand::AppServer(app_server_cli)) => match app_server_cli.subcommand {
|
||||
None => {
|
||||
codex_app_server::run_main(codex_linux_sandbox_exe, root_config_overrides).await?;
|
||||
}
|
||||
Some(AppServerSubcommand::GenerateTs(gen_cli)) => {
|
||||
codex_app_server_protocol::generate_ts(
|
||||
&gen_cli.out_dir,
|
||||
gen_cli.prettier.as_deref(),
|
||||
)?;
|
||||
}
|
||||
Some(AppServerSubcommand::GenerateJsonSchema(gen_cli)) => {
|
||||
codex_app_server_protocol::generate_json(&gen_cli.out_dir)?;
|
||||
}
|
||||
},
|
||||
Some(Subcommand::Resume(ResumeCommand {
|
||||
session_id,
|
||||
last,
|
||||
@@ -462,6 +531,17 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
SandboxCommand::Windows(mut windows_cli) => {
|
||||
prepend_config_flags(
|
||||
&mut windows_cli.config_overrides,
|
||||
root_config_overrides.clone(),
|
||||
);
|
||||
codex_cli::debug_sandbox::run_command_under_windows(
|
||||
windows_cli,
|
||||
codex_linux_sandbox_exe,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
},
|
||||
Some(Subcommand::Apply(mut apply_cli)) => {
|
||||
prepend_config_flags(
|
||||
@@ -479,21 +559,24 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
tokio::task::spawn_blocking(move || codex_stdio_to_uds::run(socket_path.as_path()))
|
||||
.await??;
|
||||
}
|
||||
Some(Subcommand::GenerateTs(gen_cli)) => {
|
||||
codex_protocol_ts::generate_ts(&gen_cli.out_dir, gen_cli.prettier.as_deref())?;
|
||||
}
|
||||
Some(Subcommand::Features(FeaturesCli { sub })) => match sub {
|
||||
FeaturesSubcommand::List => {
|
||||
// Respect root-level `-c` overrides plus top-level flags like `--profile`.
|
||||
let cli_kv_overrides = root_config_overrides
|
||||
let mut cli_kv_overrides = root_config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(|e| anyhow::anyhow!(e))?;
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
|
||||
// Honor `--search` via the new feature toggle.
|
||||
if interactive.web_search {
|
||||
cli_kv_overrides.push((
|
||||
"features.web_search_request".to_string(),
|
||||
toml::Value::Boolean(true),
|
||||
));
|
||||
}
|
||||
|
||||
// Thread through relevant top-level flags (at minimum, `--profile`).
|
||||
// Also honor `--search` since it maps to a feature toggle.
|
||||
let overrides = ConfigOverrides {
|
||||
config_profile: interactive.config_profile.clone(),
|
||||
tools_web_search_request: interactive.web_search.then_some(true),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
@@ -605,6 +688,7 @@ mod tests {
|
||||
use assert_matches::assert_matches;
|
||||
use codex_core::protocol::TokenUsage;
|
||||
use codex_protocol::ConversationId;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
fn finalize_from_args(args: &[&str]) -> TuiCli {
|
||||
let cli = MultitoolCli::try_parse_from(args).expect("parse");
|
||||
@@ -781,4 +865,32 @@ mod tests {
|
||||
assert!(!interactive.resume_last);
|
||||
assert_eq!(interactive.resume_session_id, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn feature_toggles_known_features_generate_overrides() {
|
||||
let toggles = FeatureToggles {
|
||||
enable: vec!["web_search_request".to_string()],
|
||||
disable: vec!["unified_exec".to_string()],
|
||||
};
|
||||
let overrides = toggles.to_overrides().expect("valid features");
|
||||
assert_eq!(
|
||||
overrides,
|
||||
vec![
|
||||
"features.web_search_request=true".to_string(),
|
||||
"features.unified_exec=false".to_string(),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn feature_toggles_unknown_feature_errors() {
|
||||
let toggles = FeatureToggles {
|
||||
enable: vec!["does_not_exist".to_string()],
|
||||
disable: Vec::new(),
|
||||
};
|
||||
let err = toggles
|
||||
.to_overrides()
|
||||
.expect_err("feature should be rejected");
|
||||
assert_eq!(err.to_string(), "Unknown feature flag: does_not_exist");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,11 +9,11 @@ use codex_common::CliConfigOverrides;
|
||||
use codex_common::format_env_display::format_env_display;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::config::edit::ConfigEditsBuilder;
|
||||
use codex_core::config::find_codex_home;
|
||||
use codex_core::config::load_global_mcp_servers;
|
||||
use codex_core::config::write_global_mcp_servers;
|
||||
use codex_core::config_types::McpServerConfig;
|
||||
use codex_core::config_types::McpServerTransportConfig;
|
||||
use codex_core::config::types::McpServerConfig;
|
||||
use codex_core::config::types::McpServerTransportConfig;
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::mcp::auth::compute_auth_statuses;
|
||||
use codex_core::protocol::McpAuthStatus;
|
||||
@@ -150,6 +150,10 @@ pub struct RemoveArgs {
|
||||
pub struct LoginArgs {
|
||||
/// Name of the MCP server to authenticate with oauth.
|
||||
pub name: String,
|
||||
|
||||
/// Comma-separated list of OAuth scopes to request.
|
||||
#[arg(long, value_delimiter = ',', value_name = "SCOPE,SCOPE")]
|
||||
pub scopes: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Parser)]
|
||||
@@ -192,7 +196,9 @@ impl McpCli {
|
||||
|
||||
async fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Result<()> {
|
||||
// Validate any provided overrides even though they are not currently applied.
|
||||
let overrides = config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let overrides = config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
let config = Config::load_with_cli_overrides(overrides, ConfigOverrides::default())
|
||||
.await
|
||||
.context("failed to load configuration")?;
|
||||
@@ -253,11 +259,16 @@ async fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Re
|
||||
enabled: true,
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
};
|
||||
|
||||
servers.insert(name.clone(), new_entry);
|
||||
|
||||
write_global_mcp_servers(&codex_home, &servers)
|
||||
ConfigEditsBuilder::new(&codex_home)
|
||||
.replace_mcp_servers(&servers)
|
||||
.apply()
|
||||
.await
|
||||
.with_context(|| format!("failed to write MCP servers to {}", codex_home.display()))?;
|
||||
|
||||
println!("Added global MCP server '{name}'.");
|
||||
@@ -268,25 +279,42 @@ async fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Re
|
||||
http_headers,
|
||||
env_http_headers,
|
||||
} = transport
|
||||
&& matches!(supports_oauth_login(&url).await, Ok(true))
|
||||
{
|
||||
println!("Detected OAuth support. Starting OAuth flow…");
|
||||
perform_oauth_login(
|
||||
&name,
|
||||
&url,
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
http_headers.clone(),
|
||||
env_http_headers.clone(),
|
||||
)
|
||||
.await?;
|
||||
println!("Successfully logged in.");
|
||||
match supports_oauth_login(&url).await {
|
||||
Ok(true) => {
|
||||
if !config.features.enabled(Feature::RmcpClient) {
|
||||
println!(
|
||||
"MCP server supports login. Add `experimental_use_rmcp_client = true` \
|
||||
to your config.toml and run `codex mcp login {name}` to login."
|
||||
);
|
||||
} else {
|
||||
println!("Detected OAuth support. Starting OAuth flow…");
|
||||
perform_oauth_login(
|
||||
&name,
|
||||
&url,
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
http_headers.clone(),
|
||||
env_http_headers.clone(),
|
||||
&Vec::new(),
|
||||
)
|
||||
.await?;
|
||||
println!("Successfully logged in.");
|
||||
}
|
||||
}
|
||||
Ok(false) => {}
|
||||
Err(_) => println!(
|
||||
"MCP server may or may not require login. Run `codex mcp login {name}` to login."
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn run_remove(config_overrides: &CliConfigOverrides, remove_args: RemoveArgs) -> Result<()> {
|
||||
config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
|
||||
let RemoveArgs { name } = remove_args;
|
||||
|
||||
@@ -300,7 +328,10 @@ async fn run_remove(config_overrides: &CliConfigOverrides, remove_args: RemoveAr
|
||||
let removed = servers.remove(&name).is_some();
|
||||
|
||||
if removed {
|
||||
write_global_mcp_servers(&codex_home, &servers)
|
||||
ConfigEditsBuilder::new(&codex_home)
|
||||
.replace_mcp_servers(&servers)
|
||||
.apply()
|
||||
.await
|
||||
.with_context(|| format!("failed to write MCP servers to {}", codex_home.display()))?;
|
||||
}
|
||||
|
||||
@@ -314,18 +345,20 @@ async fn run_remove(config_overrides: &CliConfigOverrides, remove_args: RemoveAr
|
||||
}
|
||||
|
||||
async fn run_login(config_overrides: &CliConfigOverrides, login_args: LoginArgs) -> Result<()> {
|
||||
let overrides = config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let overrides = config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
let config = Config::load_with_cli_overrides(overrides, ConfigOverrides::default())
|
||||
.await
|
||||
.context("failed to load configuration")?;
|
||||
|
||||
if !config.features.enabled(Feature::RmcpClient) {
|
||||
bail!(
|
||||
"OAuth login is only supported when experimental_use_rmcp_client is true in config.toml."
|
||||
"OAuth login is only supported when [features].rmcp_client is true in config.toml. See https://github.com/openai/codex/blob/main/docs/config.md#feature-flags for details."
|
||||
);
|
||||
}
|
||||
|
||||
let LoginArgs { name } = login_args;
|
||||
let LoginArgs { name, scopes } = login_args;
|
||||
|
||||
let Some(server) = config.mcp_servers.get(&name) else {
|
||||
bail!("No MCP server named '{name}' found.");
|
||||
@@ -347,6 +380,7 @@ async fn run_login(config_overrides: &CliConfigOverrides, login_args: LoginArgs)
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
http_headers,
|
||||
env_http_headers,
|
||||
&scopes,
|
||||
)
|
||||
.await?;
|
||||
println!("Successfully logged in to MCP server '{name}'.");
|
||||
@@ -354,7 +388,9 @@ async fn run_login(config_overrides: &CliConfigOverrides, login_args: LoginArgs)
|
||||
}
|
||||
|
||||
async fn run_logout(config_overrides: &CliConfigOverrides, logout_args: LogoutArgs) -> Result<()> {
|
||||
let overrides = config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let overrides = config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
let config = Config::load_with_cli_overrides(overrides, ConfigOverrides::default())
|
||||
.await
|
||||
.context("failed to load configuration")?;
|
||||
@@ -381,7 +417,9 @@ async fn run_logout(config_overrides: &CliConfigOverrides, logout_args: LogoutAr
|
||||
}
|
||||
|
||||
async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) -> Result<()> {
|
||||
let overrides = config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let overrides = config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
let config = Config::load_with_cli_overrides(overrides, ConfigOverrides::default())
|
||||
.await
|
||||
.context("failed to load configuration")?;
|
||||
@@ -400,7 +438,7 @@ async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) ->
|
||||
.map(|(name, cfg)| {
|
||||
let auth_status = auth_statuses
|
||||
.get(name.as_str())
|
||||
.copied()
|
||||
.map(|entry| entry.auth_status)
|
||||
.unwrap_or(McpAuthStatus::Unsupported);
|
||||
let transport = match &cfg.transport {
|
||||
McpServerTransportConfig::Stdio {
|
||||
@@ -487,7 +525,7 @@ async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) ->
|
||||
};
|
||||
let auth_status = auth_statuses
|
||||
.get(name.as_str())
|
||||
.copied()
|
||||
.map(|entry| entry.auth_status)
|
||||
.unwrap_or(McpAuthStatus::Unsupported)
|
||||
.to_string();
|
||||
stdio_rows.push([
|
||||
@@ -512,13 +550,15 @@ async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) ->
|
||||
};
|
||||
let auth_status = auth_statuses
|
||||
.get(name.as_str())
|
||||
.copied()
|
||||
.map(|entry| entry.auth_status)
|
||||
.unwrap_or(McpAuthStatus::Unsupported)
|
||||
.to_string();
|
||||
let bearer_token_display =
|
||||
bearer_token_env_var.as_deref().unwrap_or("-").to_string();
|
||||
http_rows.push([
|
||||
name.clone(),
|
||||
url.clone(),
|
||||
bearer_token_env_var.clone().unwrap_or("-".to_string()),
|
||||
bearer_token_display,
|
||||
status,
|
||||
auth_status,
|
||||
]);
|
||||
@@ -634,7 +674,9 @@ async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) ->
|
||||
}
|
||||
|
||||
async fn run_get(config_overrides: &CliConfigOverrides, get_args: GetArgs) -> Result<()> {
|
||||
let overrides = config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let overrides = config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
let config = Config::load_with_cli_overrides(overrides, ConfigOverrides::default())
|
||||
.await
|
||||
.context("failed to load configuration")?;
|
||||
@@ -676,6 +718,8 @@ async fn run_get(config_overrides: &CliConfigOverrides, get_args: GetArgs) -> Re
|
||||
"name": get_args.name,
|
||||
"enabled": server.enabled,
|
||||
"transport": transport,
|
||||
"enabled_tools": server.enabled_tools.clone(),
|
||||
"disabled_tools": server.disabled_tools.clone(),
|
||||
"startup_timeout_sec": server
|
||||
.startup_timeout_sec
|
||||
.map(|timeout| timeout.as_secs_f64()),
|
||||
@@ -687,8 +731,28 @@ async fn run_get(config_overrides: &CliConfigOverrides, get_args: GetArgs) -> Re
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if !server.enabled {
|
||||
println!("{} (disabled)", get_args.name);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
println!("{}", get_args.name);
|
||||
println!(" enabled: {}", server.enabled);
|
||||
let format_tool_list = |tools: &Option<Vec<String>>| -> String {
|
||||
match tools {
|
||||
Some(list) if list.is_empty() => "[]".to_string(),
|
||||
Some(list) => list.join(", "),
|
||||
None => "-".to_string(),
|
||||
}
|
||||
};
|
||||
if server.enabled_tools.is_some() {
|
||||
let enabled_tools_display = format_tool_list(&server.enabled_tools);
|
||||
println!(" enabled_tools: {enabled_tools_display}");
|
||||
}
|
||||
if server.disabled_tools.is_some() {
|
||||
let disabled_tools_display = format_tool_list(&server.disabled_tools);
|
||||
println!(" disabled_tools: {disabled_tools_display}");
|
||||
}
|
||||
match &server.transport {
|
||||
McpServerTransportConfig::Stdio {
|
||||
command,
|
||||
@@ -722,15 +786,15 @@ async fn run_get(config_overrides: &CliConfigOverrides, get_args: GetArgs) -> Re
|
||||
} => {
|
||||
println!(" transport: streamable_http");
|
||||
println!(" url: {url}");
|
||||
let env_var = bearer_token_env_var.as_deref().unwrap_or("-");
|
||||
println!(" bearer_token_env_var: {env_var}");
|
||||
let bearer_token_display = bearer_token_env_var.as_deref().unwrap_or("-");
|
||||
println!(" bearer_token_env_var: {bearer_token_display}");
|
||||
let headers_display = match http_headers {
|
||||
Some(map) if !map.is_empty() => {
|
||||
let mut pairs: Vec<_> = map.iter().collect();
|
||||
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
pairs
|
||||
.into_iter()
|
||||
.map(|(k, v)| format!("{k}={v}"))
|
||||
.map(|(k, _)| format!("{k}=*****"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
}
|
||||
@@ -743,7 +807,7 @@ async fn run_get(config_overrides: &CliConfigOverrides, get_args: GetArgs) -> Re
|
||||
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
pairs
|
||||
.into_iter()
|
||||
.map(|(k, v)| format!("{k}={v}"))
|
||||
.map(|(k, var)| format!("{k}={var}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
}
|
||||
|
||||
76
codex-rs/cli/src/wsl_paths.rs
Normal file
76
codex-rs/cli/src/wsl_paths.rs
Normal file
@@ -0,0 +1,76 @@
|
||||
use std::ffi::OsStr;
|
||||
|
||||
/// WSL-specific path helpers used by the updater logic.
|
||||
///
|
||||
/// See https://github.com/openai/codex/issues/6086.
|
||||
pub fn is_wsl() -> bool {
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
if std::env::var_os("WSL_DISTRO_NAME").is_some() {
|
||||
return true;
|
||||
}
|
||||
match std::fs::read_to_string("/proc/version") {
|
||||
Ok(version) => version.to_lowercase().contains("microsoft"),
|
||||
Err(_) => false,
|
||||
}
|
||||
}
|
||||
#[cfg(not(target_os = "linux"))]
|
||||
{
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a Windows absolute path (`C:\foo\bar` or `C:/foo/bar`) to a WSL mount path (`/mnt/c/foo/bar`).
|
||||
/// Returns `None` if the input does not look like a Windows drive path.
|
||||
pub fn win_path_to_wsl(path: &str) -> Option<String> {
|
||||
let bytes = path.as_bytes();
|
||||
if bytes.len() < 3
|
||||
|| bytes[1] != b':'
|
||||
|| !(bytes[2] == b'\\' || bytes[2] == b'/')
|
||||
|| !bytes[0].is_ascii_alphabetic()
|
||||
{
|
||||
return None;
|
||||
}
|
||||
let drive = (bytes[0] as char).to_ascii_lowercase();
|
||||
let tail = path[3..].replace('\\', "/");
|
||||
if tail.is_empty() {
|
||||
return Some(format!("/mnt/{drive}"));
|
||||
}
|
||||
Some(format!("/mnt/{drive}/{tail}"))
|
||||
}
|
||||
|
||||
/// If under WSL and given a Windows-style path, return the equivalent `/mnt/<drive>/…` path.
|
||||
/// Otherwise returns the input unchanged.
|
||||
pub fn normalize_for_wsl<P: AsRef<OsStr>>(path: P) -> String {
|
||||
let value = path.as_ref().to_string_lossy().to_string();
|
||||
if !is_wsl() {
|
||||
return value;
|
||||
}
|
||||
if let Some(mapped) = win_path_to_wsl(&value) {
|
||||
return mapped;
|
||||
}
|
||||
value
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn win_to_wsl_basic() {
|
||||
assert_eq!(
|
||||
win_path_to_wsl(r"C:\Temp\codex.zip").as_deref(),
|
||||
Some("/mnt/c/Temp/codex.zip")
|
||||
);
|
||||
assert_eq!(
|
||||
win_path_to_wsl("D:/Work/codex.tgz").as_deref(),
|
||||
Some("/mnt/d/Work/codex.tgz")
|
||||
);
|
||||
assert!(win_path_to_wsl("/home/user/codex").is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_is_noop_on_unix_paths() {
|
||||
assert_eq!(normalize_for_wsl("/home/u/x"), "/home/u/x");
|
||||
}
|
||||
}
|
||||
@@ -2,7 +2,7 @@ use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use codex_core::config::load_global_mcp_servers;
|
||||
use codex_core::config_types::McpServerTransportConfig;
|
||||
use codex_core::config::types::McpServerTransportConfig;
|
||||
use predicates::str::contains;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use codex_core::config::edit::ConfigEditsBuilder;
|
||||
use codex_core::config::load_global_mcp_servers;
|
||||
use codex_core::config::write_global_mcp_servers;
|
||||
use codex_core::config_types::McpServerTransportConfig;
|
||||
use codex_core::config::types::McpServerTransportConfig;
|
||||
use predicates::prelude::PredicateBooleanExt;
|
||||
use predicates::str::contains;
|
||||
use pretty_assertions::assert_eq;
|
||||
@@ -59,7 +59,9 @@ async fn list_and_get_render_expected_output() -> Result<()> {
|
||||
}
|
||||
other => panic!("unexpected transport: {other:?}"),
|
||||
}
|
||||
write_global_mcp_servers(codex_home.path(), &servers)?;
|
||||
ConfigEditsBuilder::new(codex_home.path())
|
||||
.replace_mcp_servers(&servers)
|
||||
.apply_blocking()?;
|
||||
|
||||
let mut list_cmd = codex_command(codex_home.path())?;
|
||||
let list_output = list_cmd.args(["mcp", "list"]).output()?;
|
||||
@@ -68,9 +70,9 @@ async fn list_and_get_render_expected_output() -> Result<()> {
|
||||
assert!(stdout.contains("Name"));
|
||||
assert!(stdout.contains("docs"));
|
||||
assert!(stdout.contains("docs-server"));
|
||||
assert!(stdout.contains("TOKEN=secret"));
|
||||
assert!(stdout.contains("APP_TOKEN=$APP_TOKEN"));
|
||||
assert!(stdout.contains("WORKSPACE_ID=$WORKSPACE_ID"));
|
||||
assert!(stdout.contains("TOKEN=*****"));
|
||||
assert!(stdout.contains("APP_TOKEN=*****"));
|
||||
assert!(stdout.contains("WORKSPACE_ID=*****"));
|
||||
assert!(stdout.contains("Status"));
|
||||
assert!(stdout.contains("Auth"));
|
||||
assert!(stdout.contains("enabled"));
|
||||
@@ -119,9 +121,9 @@ async fn list_and_get_render_expected_output() -> Result<()> {
|
||||
assert!(stdout.contains("transport: stdio"));
|
||||
assert!(stdout.contains("command: docs-server"));
|
||||
assert!(stdout.contains("args: --port 4000"));
|
||||
assert!(stdout.contains("env: TOKEN=secret"));
|
||||
assert!(stdout.contains("APP_TOKEN=$APP_TOKEN"));
|
||||
assert!(stdout.contains("WORKSPACE_ID=$WORKSPACE_ID"));
|
||||
assert!(stdout.contains("env: TOKEN=*****"));
|
||||
assert!(stdout.contains("APP_TOKEN=*****"));
|
||||
assert!(stdout.contains("WORKSPACE_ID=*****"));
|
||||
assert!(stdout.contains("enabled: true"));
|
||||
assert!(stdout.contains("remove: codex mcp remove docs"));
|
||||
|
||||
@@ -134,3 +136,30 @@ async fn list_and_get_render_expected_output() -> Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn get_disabled_server_shows_single_line() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut add = codex_command(codex_home.path())?;
|
||||
add.args(["mcp", "add", "docs", "--", "docs-server"])
|
||||
.assert()
|
||||
.success();
|
||||
|
||||
let mut servers = load_global_mcp_servers(codex_home.path()).await?;
|
||||
let docs = servers
|
||||
.get_mut("docs")
|
||||
.expect("docs server should exist after add");
|
||||
docs.enabled = false;
|
||||
ConfigEditsBuilder::new(codex_home.path())
|
||||
.replace_mcp_servers(&servers)
|
||||
.apply_blocking()?;
|
||||
|
||||
let mut get_cmd = codex_command(codex_home.path())?;
|
||||
let get_output = get_cmd.args(["mcp", "get", "docs"]).output()?;
|
||||
assert!(get_output.status.success());
|
||||
let stdout = String::from_utf8(get_output.stdout)?;
|
||||
assert_eq!(stdout.trim_end(), "docs (disabled)");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -22,6 +22,6 @@ chrono = { version = "0.4", features = ["serde"] }
|
||||
diffy = "0.4.2"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
thiserror = "2.0.12"
|
||||
thiserror = "2.0.17"
|
||||
codex-backend-client = { path = "../backend-client", optional = true }
|
||||
codex-git-apply = { path = "../git-apply" }
|
||||
codex-git = { workspace = true }
|
||||
|
||||
@@ -362,13 +362,13 @@ mod api {
|
||||
});
|
||||
}
|
||||
|
||||
let req = codex_git_apply::ApplyGitRequest {
|
||||
let req = codex_git::ApplyGitRequest {
|
||||
cwd: std::env::current_dir().unwrap_or_else(|_| std::env::temp_dir()),
|
||||
diff: diff.clone(),
|
||||
revert: false,
|
||||
preflight,
|
||||
};
|
||||
let r = codex_git_apply::apply_git_patch(&req)
|
||||
let r = codex_git::apply_git_patch(&req)
|
||||
.map_err(|e| CloudTaskError::Io(format!("git apply failed to run: {e}")))?;
|
||||
|
||||
let status = if r.exit_code == 0 {
|
||||
|
||||
@@ -26,4 +26,4 @@ pub use mock::MockClient;
|
||||
#[cfg(feature = "online")]
|
||||
pub use http::HttpClient;
|
||||
|
||||
// Reusable apply engine now lives in the shared crate `codex-git-apply`.
|
||||
// Reusable apply engine now lives in the shared crate `codex-git`.
|
||||
|
||||
@@ -8,6 +8,7 @@ pub mod util;
|
||||
pub use cli::Cli;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use codex_login::AuthManager;
|
||||
use std::io::IsTerminal;
|
||||
use std::io::Read;
|
||||
use std::path::PathBuf;
|
||||
@@ -56,11 +57,8 @@ async fn init_backend(user_agent_suffix: &str) -> anyhow::Result<BackendContext>
|
||||
};
|
||||
append_error_log(format!("startup: base_url={base_url} path_style={style}"));
|
||||
|
||||
let auth = match codex_core::config::find_codex_home()
|
||||
.ok()
|
||||
.map(|home| codex_login::AuthManager::new(home, false))
|
||||
.and_then(|am| am.auth())
|
||||
{
|
||||
let auth_manager = util::load_auth_manager().await;
|
||||
let auth = match auth_manager.as_ref().and_then(AuthManager::auth) {
|
||||
Some(auth) => auth,
|
||||
None => {
|
||||
eprintln!(
|
||||
@@ -1035,7 +1033,7 @@ pub async fn run_main(cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> an
|
||||
// Close task modal/pending apply if present before opening env modal
|
||||
app.diff_overlay = None;
|
||||
app.env_modal = Some(app::EnvModalState { query: String::new(), selected: 0 });
|
||||
// Cache environments until user explicitly refreshes with 'r' inside the modal.
|
||||
// Cache environments while the modal is open to avoid repeated fetches.
|
||||
let should_fetch = app.environments.is_empty();
|
||||
if should_fetch {
|
||||
app.env_loading = true;
|
||||
@@ -1086,7 +1084,19 @@ pub async fn run_main(cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> an
|
||||
let backend = Arc::clone(&backend);
|
||||
let best_of_n = page.best_of_n;
|
||||
tokio::spawn(async move {
|
||||
let result = codex_cloud_tasks_client::CloudBackend::create_task(&*backend, &env, &text, "main", false, best_of_n).await;
|
||||
let git_ref = if let Ok(cwd) = std::env::current_dir() {
|
||||
if let Some(branch) = codex_core::git_info::default_branch_name(&cwd).await {
|
||||
branch
|
||||
} else if let Some(branch) = codex_core::git_info::current_branch_name(&cwd).await {
|
||||
branch
|
||||
} else {
|
||||
"main".to_string()
|
||||
}
|
||||
} else {
|
||||
"main".to_string()
|
||||
};
|
||||
|
||||
let result = codex_cloud_tasks_client::CloudBackend::create_task(&*backend, &env, &text, &git_ref, false, best_of_n).await;
|
||||
let evt = match result {
|
||||
Ok(ok) => app::AppEvent::NewTaskSubmitted(Ok(ok)),
|
||||
Err(e) => app::AppEvent::NewTaskSubmitted(Err(format!("{e}"))),
|
||||
@@ -1094,7 +1104,7 @@ pub async fn run_main(cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> an
|
||||
let _ = tx.send(evt);
|
||||
});
|
||||
} else {
|
||||
app.status = "No environment selected (press 'e' to choose)".to_string();
|
||||
app.status = "No environment selected".to_string();
|
||||
}
|
||||
}
|
||||
needs_redraw = true;
|
||||
@@ -1292,18 +1302,6 @@ pub async fn run_main(cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> an
|
||||
// Environment modal key handling
|
||||
match key.code {
|
||||
KeyCode::Esc => { app.env_modal = None; needs_redraw = true; }
|
||||
KeyCode::Char('r') | KeyCode::Char('R') => {
|
||||
// Trigger refresh of environments
|
||||
app.env_loading = true; app.env_error = None; needs_redraw = true;
|
||||
let _ = frame_tx.send(Instant::now() + Duration::from_millis(100));
|
||||
let tx = tx.clone();
|
||||
tokio::spawn(async move {
|
||||
let base_url = crate::util::normalize_base_url(&std::env::var("CODEX_CLOUD_TASKS_BASE_URL").unwrap_or_else(|_| "https://chatgpt.com/backend-api".to_string()));
|
||||
let headers = crate::util::build_chatgpt_headers().await;
|
||||
let res = crate::env_detect::list_environments(&base_url, &headers).await;
|
||||
let _ = tx.send(app::AppEvent::EnvironmentsLoaded(res));
|
||||
});
|
||||
}
|
||||
KeyCode::Char(ch) if !key.modifiers.contains(KeyModifiers::CONTROL) && !key.modifiers.contains(KeyModifiers::ALT) => {
|
||||
if let Some(m) = app.env_modal.as_mut() { m.query.push(ch); }
|
||||
needs_redraw = true;
|
||||
@@ -1410,7 +1408,7 @@ pub async fn run_main(cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> an
|
||||
}
|
||||
KeyCode::Char('o') | KeyCode::Char('O') => {
|
||||
app.env_modal = Some(app::EnvModalState { query: String::new(), selected: 0 });
|
||||
// Cache environments until user explicitly refreshes with 'r' inside the modal.
|
||||
// Cache environments while the modal is open to avoid repeated fetches.
|
||||
let should_fetch = app.environments.is_empty();
|
||||
if should_fetch { app.env_loading = true; app.env_error = None; }
|
||||
needs_redraw = true;
|
||||
|
||||
@@ -945,9 +945,7 @@ pub fn draw_env_modal(frame: &mut Frame, area: Rect, app: &mut App) {
|
||||
|
||||
// Subheader with usage hints (dim cyan)
|
||||
let subheader = Paragraph::new(Line::from(
|
||||
"Type to search, Enter select, Esc cancel; r refresh"
|
||||
.cyan()
|
||||
.dim(),
|
||||
"Type to search, Enter select, Esc cancel".cyan().dim(),
|
||||
))
|
||||
.wrap(Wrap { trim: true });
|
||||
frame.render_widget(subheader, rows[0]);
|
||||
|
||||
@@ -2,6 +2,10 @@ use base64::Engine as _;
|
||||
use chrono::Utc;
|
||||
use reqwest::header::HeaderMap;
|
||||
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_login::AuthManager;
|
||||
|
||||
pub fn set_user_agent_suffix(suffix: &str) {
|
||||
if let Ok(mut guard) = codex_core::default_client::USER_AGENT_SUFFIX.lock() {
|
||||
guard.replace(suffix.to_string());
|
||||
@@ -54,6 +58,18 @@ pub fn extract_chatgpt_account_id(token: &str) -> Option<String> {
|
||||
.map(str::to_string)
|
||||
}
|
||||
|
||||
pub async fn load_auth_manager() -> Option<AuthManager> {
|
||||
// TODO: pass in cli overrides once cloud tasks properly support them.
|
||||
let config = Config::load_with_cli_overrides(Vec::new(), ConfigOverrides::default())
|
||||
.await
|
||||
.ok()?;
|
||||
Some(AuthManager::new(
|
||||
config.codex_home,
|
||||
false,
|
||||
config.cli_auth_credentials_store_mode,
|
||||
))
|
||||
}
|
||||
|
||||
/// Build headers for ChatGPT-backed requests: `User-Agent`, optional `Authorization`,
|
||||
/// and optional `ChatGPT-Account-Id`.
|
||||
pub async fn build_chatgpt_headers() -> HeaderMap {
|
||||
@@ -69,24 +85,22 @@ pub async fn build_chatgpt_headers() -> HeaderMap {
|
||||
USER_AGENT,
|
||||
HeaderValue::from_str(&ua).unwrap_or(HeaderValue::from_static("codex-cli")),
|
||||
);
|
||||
if let Ok(home) = codex_core::config::find_codex_home() {
|
||||
let am = codex_login::AuthManager::new(home, false);
|
||||
if let Some(auth) = am.auth()
|
||||
&& let Ok(tok) = auth.get_token().await
|
||||
&& !tok.is_empty()
|
||||
if let Some(am) = load_auth_manager().await
|
||||
&& let Some(auth) = am.auth()
|
||||
&& let Ok(tok) = auth.get_token().await
|
||||
&& !tok.is_empty()
|
||||
{
|
||||
let v = format!("Bearer {tok}");
|
||||
if let Ok(hv) = HeaderValue::from_str(&v) {
|
||||
headers.insert(AUTHORIZATION, hv);
|
||||
}
|
||||
if let Some(acc) = auth
|
||||
.get_account_id()
|
||||
.or_else(|| extract_chatgpt_account_id(&tok))
|
||||
&& let Ok(name) = HeaderName::from_bytes(b"ChatGPT-Account-Id")
|
||||
&& let Ok(hv) = HeaderValue::from_str(&acc)
|
||||
{
|
||||
let v = format!("Bearer {tok}");
|
||||
if let Ok(hv) = HeaderValue::from_str(&v) {
|
||||
headers.insert(AUTHORIZATION, hv);
|
||||
}
|
||||
if let Some(acc) = auth
|
||||
.get_account_id()
|
||||
.or_else(|| extract_chatgpt_account_id(&tok))
|
||||
&& let Ok(name) = HeaderName::from_bytes(b"ChatGPT-Account-Id")
|
||||
&& let Ok(hv) = HeaderValue::from_str(&acc)
|
||||
{
|
||||
headers.insert(name, hv);
|
||||
}
|
||||
headers.insert(name, hv);
|
||||
}
|
||||
}
|
||||
headers
|
||||
|
||||
@@ -15,3 +15,7 @@ path = "src/lib.rs"
|
||||
[dependencies]
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
serde_with = "3"
|
||||
|
||||
[package.metadata.cargo-shear]
|
||||
ignored = ["serde_with"]
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
// Currently export only the types referenced by the workspace
|
||||
// The process for this will change
|
||||
|
||||
// Cloud Tasks
|
||||
pub mod code_task_details_response;
|
||||
pub use self::code_task_details_response::CodeTaskDetailsResponse;
|
||||
|
||||
@@ -20,3 +21,14 @@ pub use self::task_list_item::TaskListItem;
|
||||
|
||||
pub mod paginated_list_task_list_item_;
|
||||
pub use self::paginated_list_task_list_item_::PaginatedListTaskListItem;
|
||||
|
||||
// Rate Limits
|
||||
pub mod rate_limit_status_payload;
|
||||
pub use self::rate_limit_status_payload::PlanType;
|
||||
pub use self::rate_limit_status_payload::RateLimitStatusPayload;
|
||||
|
||||
pub mod rate_limit_status_details;
|
||||
pub use self::rate_limit_status_details::RateLimitStatusDetails;
|
||||
|
||||
pub mod rate_limit_window_snapshot;
|
||||
pub use self::rate_limit_window_snapshot::RateLimitWindowSnapshot;
|
||||
|
||||
@@ -0,0 +1,46 @@
|
||||
/*
|
||||
* codex-backend
|
||||
*
|
||||
* codex-backend
|
||||
*
|
||||
* The version of the OpenAPI document: 0.0.1
|
||||
*
|
||||
* Generated by: https://openapi-generator.tech
|
||||
*/
|
||||
|
||||
use crate::models;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct RateLimitStatusDetails {
|
||||
#[serde(rename = "allowed")]
|
||||
pub allowed: bool,
|
||||
#[serde(rename = "limit_reached")]
|
||||
pub limit_reached: bool,
|
||||
#[serde(
|
||||
rename = "primary_window",
|
||||
default,
|
||||
with = "::serde_with::rust::double_option",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub primary_window: Option<Option<Box<models::RateLimitWindowSnapshot>>>,
|
||||
#[serde(
|
||||
rename = "secondary_window",
|
||||
default,
|
||||
with = "::serde_with::rust::double_option",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub secondary_window: Option<Option<Box<models::RateLimitWindowSnapshot>>>,
|
||||
}
|
||||
|
||||
impl RateLimitStatusDetails {
|
||||
pub fn new(allowed: bool, limit_reached: bool) -> RateLimitStatusDetails {
|
||||
RateLimitStatusDetails {
|
||||
allowed,
|
||||
limit_reached,
|
||||
primary_window: None,
|
||||
secondary_window: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,65 @@
|
||||
/*
|
||||
* codex-backend
|
||||
*
|
||||
* codex-backend
|
||||
*
|
||||
* The version of the OpenAPI document: 0.0.1
|
||||
*
|
||||
* Generated by: https://openapi-generator.tech
|
||||
*/
|
||||
|
||||
use crate::models;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct RateLimitStatusPayload {
|
||||
#[serde(rename = "plan_type")]
|
||||
pub plan_type: PlanType,
|
||||
#[serde(
|
||||
rename = "rate_limit",
|
||||
default,
|
||||
with = "::serde_with::rust::double_option",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub rate_limit: Option<Option<Box<models::RateLimitStatusDetails>>>,
|
||||
}
|
||||
|
||||
impl RateLimitStatusPayload {
|
||||
pub fn new(plan_type: PlanType) -> RateLimitStatusPayload {
|
||||
RateLimitStatusPayload {
|
||||
plan_type,
|
||||
rate_limit: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||
pub enum PlanType {
|
||||
#[serde(rename = "free")]
|
||||
Free,
|
||||
#[serde(rename = "go")]
|
||||
Go,
|
||||
#[serde(rename = "plus")]
|
||||
Plus,
|
||||
#[serde(rename = "pro")]
|
||||
Pro,
|
||||
#[serde(rename = "team")]
|
||||
Team,
|
||||
#[serde(rename = "business")]
|
||||
Business,
|
||||
#[serde(rename = "education")]
|
||||
Education,
|
||||
#[serde(rename = "quorum")]
|
||||
Quorum,
|
||||
#[serde(rename = "enterprise")]
|
||||
Enterprise,
|
||||
#[serde(rename = "edu")]
|
||||
Edu,
|
||||
}
|
||||
|
||||
impl Default for PlanType {
|
||||
fn default() -> PlanType {
|
||||
Self::Free
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
/*
|
||||
* codex-backend
|
||||
*
|
||||
* codex-backend
|
||||
*
|
||||
* The version of the OpenAPI document: 0.0.1
|
||||
*
|
||||
* Generated by: https://openapi-generator.tech
|
||||
*/
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct RateLimitWindowSnapshot {
|
||||
#[serde(rename = "used_percent")]
|
||||
pub used_percent: i32,
|
||||
#[serde(rename = "limit_window_seconds")]
|
||||
pub limit_window_seconds: i32,
|
||||
#[serde(rename = "reset_after_seconds")]
|
||||
pub reset_after_seconds: i32,
|
||||
#[serde(rename = "reset_at")]
|
||||
pub reset_at: i32,
|
||||
}
|
||||
|
||||
impl RateLimitWindowSnapshot {
|
||||
pub fn new(
|
||||
used_percent: i32,
|
||||
limit_window_seconds: i32,
|
||||
reset_after_seconds: i32,
|
||||
reset_at: i32,
|
||||
) -> RateLimitWindowSnapshot {
|
||||
RateLimitWindowSnapshot {
|
||||
used_percent,
|
||||
limit_window_seconds,
|
||||
reset_after_seconds,
|
||||
reset_at,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -24,21 +24,21 @@ pub fn builtin_approval_presets() -> Vec<ApprovalPreset> {
|
||||
ApprovalPreset {
|
||||
id: "read-only",
|
||||
label: "Read Only",
|
||||
description: "Codex can read files and answer questions. Codex requires approval to make edits, run commands, or access network",
|
||||
description: "Codex can read files and answer questions. Codex requires approval to make edits, run commands, or access network.",
|
||||
approval: AskForApproval::OnRequest,
|
||||
sandbox: SandboxPolicy::ReadOnly,
|
||||
},
|
||||
ApprovalPreset {
|
||||
id: "auto",
|
||||
label: "Auto",
|
||||
description: "Codex can read files, make edits, and run commands in the workspace. Codex requires approval to work outside the workspace or access network",
|
||||
description: "Codex can read files, make edits, and run commands in the workspace. Codex requires approval to work outside the workspace or access network.",
|
||||
approval: AskForApproval::OnRequest,
|
||||
sandbox: SandboxPolicy::new_workspace_write_policy(),
|
||||
},
|
||||
ApprovalPreset {
|
||||
id: "full-access",
|
||||
label: "Full Access",
|
||||
description: "Codex can read files, make edits, and run commands with network access, without approval. Exercise caution",
|
||||
description: "Codex can read files, make edits, and run commands with network access, without approval. Exercise caution.",
|
||||
approval: AskForApproval::Never,
|
||||
sandbox: SandboxPolicy::DangerFullAccess,
|
||||
},
|
||||
|
||||
@@ -19,8 +19,8 @@ use toml::Value;
|
||||
pub struct CliConfigOverrides {
|
||||
/// Override a configuration value that would otherwise be loaded from
|
||||
/// `~/.codex/config.toml`. Use a dotted path (`foo.bar.baz`) to override
|
||||
/// nested values. The `value` portion is parsed as JSON. If it fails to
|
||||
/// parse as JSON, the raw string is used as a literal.
|
||||
/// nested values. The `value` portion is parsed as TOML. If it fails to
|
||||
/// parse as TOML, the raw string is used as a literal.
|
||||
///
|
||||
/// Examples:
|
||||
/// - `-c model="o3"`
|
||||
@@ -59,7 +59,7 @@ impl CliConfigOverrides {
|
||||
return Err(format!("Empty key in override: {s}"));
|
||||
}
|
||||
|
||||
// Attempt to parse as JSON. If that fails, treat it as a raw
|
||||
// Attempt to parse as TOML. If that fails, treat it as a raw
|
||||
// string. This allows convenient usage such as
|
||||
// `-c model=o3` without the quotes.
|
||||
let value: Value = match parse_toml_value(value_str) {
|
||||
@@ -151,6 +151,15 @@ mod tests {
|
||||
assert_eq!(v.as_integer(), Some(42));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_bool() {
|
||||
let true_literal = parse_toml_value("true").expect("parse");
|
||||
assert_eq!(true_literal.as_bool(), Some(true));
|
||||
|
||||
let false_literal = parse_toml_value("false").expect("parse");
|
||||
assert_eq!(false_literal.as_bool(), Some(false));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fails_on_unquoted_string() {
|
||||
assert!(parse_toml_value("hello").is_err());
|
||||
|
||||
@@ -6,15 +6,11 @@ pub fn format_env_display(env: Option<&HashMap<String, String>>, env_vars: &[Str
|
||||
if let Some(map) = env {
|
||||
let mut pairs: Vec<_> = map.iter().collect();
|
||||
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
parts.extend(
|
||||
pairs
|
||||
.into_iter()
|
||||
.map(|(key, value)| format!("{key}={value}")),
|
||||
);
|
||||
parts.extend(pairs.into_iter().map(|(key, _)| format!("{key}=*****")));
|
||||
}
|
||||
|
||||
if !env_vars.is_empty() {
|
||||
parts.extend(env_vars.iter().map(|var| format!("{var}=${var}")));
|
||||
parts.extend(env_vars.iter().map(|var| format!("{var}=*****")));
|
||||
}
|
||||
|
||||
if parts.is_empty() {
|
||||
@@ -42,14 +38,14 @@ mod tests {
|
||||
env.insert("B".to_string(), "two".to_string());
|
||||
env.insert("A".to_string(), "one".to_string());
|
||||
|
||||
assert_eq!(format_env_display(Some(&env), &[]), "A=one, B=two");
|
||||
assert_eq!(format_env_display(Some(&env), &[]), "A=*****, B=*****");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn formats_env_vars_with_dollar_prefix() {
|
||||
let vars = vec!["TOKEN".to_string(), "PATH".to_string()];
|
||||
|
||||
assert_eq!(format_env_display(None, &vars), "TOKEN=$TOKEN, PATH=$PATH");
|
||||
assert_eq!(format_env_display(None, &vars), "TOKEN=*****, PATH=*****");
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -60,7 +56,7 @@ mod tests {
|
||||
|
||||
assert_eq!(
|
||||
format_env_display(Some(&env), &vars),
|
||||
"HOME=/tmp, TOKEN=$TOKEN"
|
||||
"HOME=*****, TOKEN=*****"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,73 +1,119 @@
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_core::protocol_config_types::ReasoningEffort;
|
||||
|
||||
/// A simple preset pairing a model slug with a reasoning effort.
|
||||
/// A reasoning effort option that can be surfaced for a model.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct ReasoningEffortPreset {
|
||||
/// Effort level that the model supports.
|
||||
pub effort: ReasoningEffort,
|
||||
/// Short human description shown next to the effort in UIs.
|
||||
pub description: &'static str,
|
||||
}
|
||||
|
||||
/// Metadata describing a Codex-supported model.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct ModelPreset {
|
||||
/// Stable identifier for the preset.
|
||||
pub id: &'static str,
|
||||
/// Display label shown in UIs.
|
||||
pub label: &'static str,
|
||||
/// Short human description shown next to the label in UIs.
|
||||
pub description: &'static str,
|
||||
/// Model slug (e.g., "gpt-5").
|
||||
pub model: &'static str,
|
||||
/// Reasoning effort to apply for this preset.
|
||||
pub effort: Option<ReasoningEffort>,
|
||||
/// Display name shown in UIs.
|
||||
pub display_name: &'static str,
|
||||
/// Short human description shown in UIs.
|
||||
pub description: &'static str,
|
||||
/// Reasoning effort applied when none is explicitly chosen.
|
||||
pub default_reasoning_effort: ReasoningEffort,
|
||||
/// Supported reasoning effort options.
|
||||
pub supported_reasoning_efforts: &'static [ReasoningEffortPreset],
|
||||
/// Whether this is the default model for new users.
|
||||
pub is_default: bool,
|
||||
}
|
||||
|
||||
const PRESETS: &[ModelPreset] = &[
|
||||
ModelPreset {
|
||||
id: "gpt-5-codex-low",
|
||||
label: "gpt-5-codex low",
|
||||
description: "Fastest responses with limited reasoning",
|
||||
id: "gpt-5-codex",
|
||||
model: "gpt-5-codex",
|
||||
effort: Some(ReasoningEffort::Low),
|
||||
display_name: "gpt-5-codex",
|
||||
description: "Optimized for codex.",
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
supported_reasoning_efforts: &[
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Low,
|
||||
description: "Fastest responses with limited reasoning",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Medium,
|
||||
description: "Dynamically adjusts reasoning based on the task",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::High,
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems",
|
||||
},
|
||||
],
|
||||
is_default: true,
|
||||
},
|
||||
ModelPreset {
|
||||
id: "gpt-5-codex-medium",
|
||||
label: "gpt-5-codex medium",
|
||||
description: "Dynamically adjusts reasoning based on the task",
|
||||
model: "gpt-5-codex",
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
id: "gpt-5-codex-mini",
|
||||
model: "gpt-5-codex-mini",
|
||||
display_name: "gpt-5-codex-mini",
|
||||
description: "Optimized for codex. Cheaper, faster, but less capable.",
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
supported_reasoning_efforts: &[
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Medium,
|
||||
description: "Dynamically adjusts reasoning based on the task",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::High,
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems",
|
||||
},
|
||||
],
|
||||
is_default: false,
|
||||
},
|
||||
ModelPreset {
|
||||
id: "gpt-5-codex-high",
|
||||
label: "gpt-5-codex high",
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems",
|
||||
model: "gpt-5-codex",
|
||||
effort: Some(ReasoningEffort::High),
|
||||
},
|
||||
ModelPreset {
|
||||
id: "gpt-5-minimal",
|
||||
label: "gpt-5 minimal",
|
||||
description: "Fastest responses with little reasoning",
|
||||
id: "gpt-5",
|
||||
model: "gpt-5",
|
||||
effort: Some(ReasoningEffort::Minimal),
|
||||
},
|
||||
ModelPreset {
|
||||
id: "gpt-5-low",
|
||||
label: "gpt-5 low",
|
||||
description: "Balances speed with some reasoning; useful for straightforward queries and short explanations",
|
||||
model: "gpt-5",
|
||||
effort: Some(ReasoningEffort::Low),
|
||||
},
|
||||
ModelPreset {
|
||||
id: "gpt-5-medium",
|
||||
label: "gpt-5 medium",
|
||||
description: "Provides a solid balance of reasoning depth and latency for general-purpose tasks",
|
||||
model: "gpt-5",
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
},
|
||||
ModelPreset {
|
||||
id: "gpt-5-high",
|
||||
label: "gpt-5 high",
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems",
|
||||
model: "gpt-5",
|
||||
effort: Some(ReasoningEffort::High),
|
||||
display_name: "gpt-5",
|
||||
description: "Broad world knowledge with strong general reasoning.",
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
supported_reasoning_efforts: &[
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Minimal,
|
||||
description: "Fastest responses with little reasoning",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Low,
|
||||
description: "Balances speed with some reasoning; useful for straightforward queries and short explanations",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Medium,
|
||||
description: "Provides a solid balance of reasoning depth and latency for general-purpose tasks",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::High,
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems",
|
||||
},
|
||||
],
|
||||
is_default: false,
|
||||
},
|
||||
];
|
||||
|
||||
pub fn builtin_model_presets(_auth_mode: Option<AuthMode>) -> Vec<ModelPreset> {
|
||||
PRESETS.to_vec()
|
||||
pub fn builtin_model_presets(auth_mode: Option<AuthMode>) -> Vec<ModelPreset> {
|
||||
let allow_codex_mini = matches!(auth_mode, Some(AuthMode::ChatGPT));
|
||||
PRESETS
|
||||
.iter()
|
||||
.filter(|preset| allow_codex_mini || preset.id != "gpt-5-codex-mini")
|
||||
.copied()
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn only_one_default_model_is_configured() {
|
||||
let default_models = PRESETS.iter().filter(|preset| preset.is_default).count();
|
||||
assert!(default_models == 1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,20 +21,31 @@ bytes = { workspace = true }
|
||||
chrono = { workspace = true, features = ["serde"] }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-apply-patch = { workspace = true }
|
||||
codex-async-utils = { workspace = true }
|
||||
codex-file-search = { workspace = true }
|
||||
codex-mcp-client = { workspace = true }
|
||||
codex-git = { workspace = true }
|
||||
codex-keyring-store = { workspace = true }
|
||||
codex-otel = { workspace = true, features = ["otel"] }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-rmcp-client = { workspace = true }
|
||||
codex-async-utils = { workspace = true }
|
||||
codex-utils-string = { workspace = true }
|
||||
codex-utils-pty = { workspace = true }
|
||||
codex-utils-readiness = { workspace = true }
|
||||
codex-utils-string = { workspace = true }
|
||||
codex-utils-tokenizer = { workspace = true }
|
||||
codex-windows-sandbox = { package = "codex-windows-sandbox", path = "../windows-sandbox-rs" }
|
||||
dirs = { workspace = true }
|
||||
dunce = { workspace = true }
|
||||
env-flags = { workspace = true }
|
||||
eventsource-stream = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
http = { workspace = true }
|
||||
indexmap = { workspace = true }
|
||||
keyring = { workspace = true, features = [
|
||||
"apple-native",
|
||||
"crypto-rust",
|
||||
"linux-native-async-persistent",
|
||||
"windows-native",
|
||||
] }
|
||||
libc = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
os_info = { workspace = true }
|
||||
@@ -44,6 +55,7 @@ reqwest = { workspace = true, features = ["json", "stream"] }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
sha1 = { workspace = true }
|
||||
sha2 = { workspace = true }
|
||||
shlex = { workspace = true }
|
||||
similar = { workspace = true }
|
||||
strum_macros = { workspace = true }
|
||||
@@ -69,7 +81,7 @@ toml_edit = { workspace = true }
|
||||
tracing = { workspace = true, features = ["log"] }
|
||||
tree-sitter = { workspace = true }
|
||||
tree-sitter-bash = { workspace = true }
|
||||
uuid = { workspace = true, features = ["serde", "v4"] }
|
||||
uuid = { workspace = true, features = ["serde", "v4", "v5"] }
|
||||
which = { workspace = true }
|
||||
wildmatch = { workspace = true }
|
||||
|
||||
@@ -92,8 +104,11 @@ openssl-sys = { workspace = true, features = ["vendored"] }
|
||||
[dev-dependencies]
|
||||
assert_cmd = { workspace = true }
|
||||
assert_matches = { workspace = true }
|
||||
codex-arg0 = { workspace = true }
|
||||
core_test_support = { workspace = true }
|
||||
ctor = { workspace = true }
|
||||
escargot = { workspace = true }
|
||||
image = { workspace = true, features = ["jpeg", "png"] }
|
||||
maplit = { workspace = true }
|
||||
predicates = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user