mirror of
https://github.com/openai/codex.git
synced 2026-02-02 06:57:03 +00:00
Compare commits
162 Commits
centralize
...
fix-timeou
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9b7329699a | ||
|
|
8307d9bf3b | ||
|
|
294dafcacf | ||
|
|
0b26c76047 | ||
|
|
dbad5eeec6 | ||
|
|
ca9f9c6f5d | ||
|
|
4b4252210b | ||
|
|
6582554926 | ||
|
|
649ce520c4 | ||
|
|
667e841d3e | ||
|
|
63e1ef25af | ||
|
|
229d18f4d2 | ||
|
|
4a1a7f9685 | ||
|
|
86c149ae8e | ||
|
|
05f0b4f590 | ||
|
|
d4eda9d10b | ||
|
|
d7953aed74 | ||
|
|
2ab1650d4d | ||
|
|
79aa83ee39 | ||
|
|
c4ebe4b078 | ||
|
|
1a89f70015 | ||
|
|
62474a30e8 | ||
|
|
9a10e80ab7 | ||
|
|
9b538a8672 | ||
|
|
95af417923 | ||
|
|
fff576cf98 | ||
|
|
1575f0504c | ||
|
|
edf4c3f627 | ||
|
|
d40a6b7f73 | ||
|
|
3a22018edd | ||
|
|
fe54c216a3 | ||
|
|
cb6584de46 | ||
|
|
7e068e1094 | ||
|
|
d3187dbc17 | ||
|
|
dc2f26f7b5 | ||
|
|
553db8def1 | ||
|
|
ab63a47173 | ||
|
|
e658c6c73b | ||
|
|
1e0e553304 | ||
|
|
07b7d28937 | ||
|
|
6ee7fbcfff | ||
|
|
5f3a0473f1 | ||
|
|
2eda75a8ee | ||
|
|
e1f098b9b7 | ||
|
|
e5e13479d0 | ||
|
|
7bc3ca9e40 | ||
|
|
4d8b71d412 | ||
|
|
b484672961 | ||
|
|
a1ee10b438 | ||
|
|
dccce34d84 | ||
|
|
f5945d7c03 | ||
|
|
5fcf923c19 | ||
|
|
0c7efa0cfd | ||
|
|
d5853d9c47 | ||
|
|
d9118c04bf | ||
|
|
91e65ac0ce | ||
|
|
1ac4fb45d2 | ||
|
|
07b8bdfbf1 | ||
|
|
0f22067242 | ||
|
|
d7f8b97541 | ||
|
|
611e00c862 | ||
|
|
c8ebb2a0dc | ||
|
|
88e083a9d0 | ||
|
|
1c8507b32a | ||
|
|
23f31c6bff | ||
|
|
ff48ae192b | ||
|
|
a2fe2f9fb1 | ||
|
|
01ca2b5df6 | ||
|
|
368f7adfc6 | ||
|
|
68731ac74d | ||
|
|
0508823075 | ||
|
|
2ac14d1145 | ||
|
|
2371d771cc | ||
|
|
9a638dbf4e | ||
|
|
dc2aeac21f | ||
|
|
f842849bec | ||
|
|
dcf73970d2 | ||
|
|
e761924dc2 | ||
|
|
cdc3df3790 | ||
|
|
a3d3719481 | ||
|
|
11e5327770 | ||
|
|
87cce88f48 | ||
|
|
ff6d4cec6b | ||
|
|
6ef658a9f9 | ||
|
|
8b8be343a7 | ||
|
|
89c00611c2 | ||
|
|
9572cfc782 | ||
|
|
4a55646a02 | ||
|
|
209af68611 | ||
|
|
f4f9695978 | ||
|
|
5fcc380bd9 | ||
|
|
aa76003e28 | ||
|
|
fac548e430 | ||
|
|
9bd3453592 | ||
|
|
b34efde2f3 | ||
|
|
7aa46ab5fc | ||
|
|
bf35105af6 | ||
|
|
3429e82e45 | ||
|
|
815ae4164a | ||
|
|
13e1d0362d | ||
|
|
db31f6966d | ||
|
|
2b20cd66af | ||
|
|
39e09c289d | ||
|
|
069a38a06c | ||
|
|
3183935bd7 | ||
|
|
060637b4d4 | ||
|
|
fa92cd92fa | ||
|
|
89591e4246 | ||
|
|
802d2440b4 | ||
|
|
e9135fa7c5 | ||
|
|
ef3e075ad6 | ||
|
|
149e198ce8 | ||
|
|
1d76ba5ebe | ||
|
|
a1635eea25 | ||
|
|
36113509f2 | ||
|
|
ba95d9862c | ||
|
|
ef55992ab0 | ||
|
|
e3f913f567 | ||
|
|
1b8f2543ac | ||
|
|
65107d24a2 | ||
|
|
36eb071998 | ||
|
|
9b33ce3409 | ||
|
|
926c89cb20 | ||
|
|
5ba2a17576 | ||
|
|
266419217e | ||
|
|
be4bdfec93 | ||
|
|
7ff142d93f | ||
|
|
4a42c4e142 | ||
|
|
66a4b89822 | ||
|
|
d7b333be97 | ||
|
|
4d6a42a622 | ||
|
|
b0bdc04c30 | ||
|
|
67a219ffc2 | ||
|
|
7226365397 | ||
|
|
0fc295d958 | ||
|
|
3e50f94d76 | ||
|
|
eb5b1b627f | ||
|
|
0c1ff1d3fd | ||
|
|
aea7610c76 | ||
|
|
775fbba6e0 | ||
|
|
5ee8a17b4e | ||
|
|
81be54b229 | ||
|
|
5e8659dcbc | ||
|
|
2338294b39 | ||
|
|
afc4eaab8b | ||
|
|
e92c4f6561 | ||
|
|
15fa2283e7 | ||
|
|
5907422d65 | ||
|
|
f178805252 | ||
|
|
a55b0c4bcc | ||
|
|
224222f09f | ||
|
|
7aab45e060 | ||
|
|
bcd64c7e72 | ||
|
|
c124f24354 | ||
|
|
c7e4e6d0ee | ||
|
|
88abbf58ce | ||
|
|
71f838389b | ||
|
|
0533bd2e7c | ||
|
|
6af83d86ff | ||
|
|
e2e1b65da6 | ||
|
|
817d1508bc | ||
|
|
f8af4f5c8d |
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@@ -46,7 +46,7 @@ jobs:
|
||||
echo "pack_output=$PACK_OUTPUT" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Upload staged npm package artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: codex-npm-staging
|
||||
path: ${{ steps.stage_npm_package.outputs.pack_output }}
|
||||
|
||||
4
.github/workflows/issue-deduplicator.yml
vendored
4
.github/workflows/issue-deduplicator.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
outputs:
|
||||
codex_output: ${{ steps.codex.outputs.final-message }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- name: Prepare Codex inputs
|
||||
env:
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
issues: write
|
||||
steps:
|
||||
- name: Comment on issue
|
||||
uses: actions/github-script@v7
|
||||
uses: actions/github-script@v8
|
||||
env:
|
||||
CODEX_OUTPUT: ${{ needs.gather-duplicates.outputs.codex_output }}
|
||||
with:
|
||||
|
||||
2
.github/workflows/issue-labeler.yml
vendored
2
.github/workflows/issue-labeler.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
outputs:
|
||||
codex_output: ${{ steps.codex.outputs.final-message }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- id: codex
|
||||
uses: openai/codex-action@main
|
||||
|
||||
297
.github/workflows/rust-ci.yml
vendored
297
.github/workflows/rust-ci.yml
vendored
@@ -9,7 +9,7 @@ on:
|
||||
# CI builds in debug (dev) for faster signal.
|
||||
|
||||
jobs:
|
||||
# --- Detect what changed (always runs) -------------------------------------
|
||||
# --- Detect what changed to detect which tests to run (always runs) -------------------------------------
|
||||
changed:
|
||||
name: Detect changed areas
|
||||
runs-on: ubuntu-24.04
|
||||
@@ -84,8 +84,8 @@ jobs:
|
||||
run: cargo shear
|
||||
|
||||
# --- CI to validate on different os/targets --------------------------------
|
||||
lint_build_test:
|
||||
name: ${{ matrix.runner }} - ${{ matrix.target }}${{ matrix.profile == 'release' && ' (release)' || '' }}
|
||||
lint_build:
|
||||
name: Lint/Build — ${{ matrix.runner }} - ${{ matrix.target }}${{ matrix.profile == 'release' && ' (release)' || '' }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
needs: changed
|
||||
@@ -94,6 +94,11 @@ jobs:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
env:
|
||||
# Speed up repeated builds across CI runs by caching compiled objects.
|
||||
RUSTC_WRAPPER: sccache
|
||||
CARGO_INCREMENTAL: "0"
|
||||
SCCACHE_CACHE_SIZE: 10G
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
@@ -159,20 +164,83 @@ jobs:
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
|
||||
restore-keys: |
|
||||
cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
- name: Restore target cache (except gnu-dev)
|
||||
id: cache_target_restore
|
||||
if: ${{ !(matrix.target == 'x86_64-unknown-linux-gnu' && matrix.profile != 'release') }}
|
||||
# Install and restore sccache cache
|
||||
- name: Install sccache
|
||||
uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
with:
|
||||
tool: sccache
|
||||
version: 0.7.5
|
||||
|
||||
- name: Configure sccache backend
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [[ -n "${ACTIONS_CACHE_URL:-}" && -n "${ACTIONS_RUNTIME_TOKEN:-}" ]]; then
|
||||
echo "SCCACHE_GHA_ENABLED=true" >> "$GITHUB_ENV"
|
||||
echo "Using sccache GitHub backend"
|
||||
else
|
||||
echo "SCCACHE_GHA_ENABLED=false" >> "$GITHUB_ENV"
|
||||
echo "SCCACHE_DIR=${{ github.workspace }}/.sccache" >> "$GITHUB_ENV"
|
||||
echo "Using sccache local disk + actions/cache fallback"
|
||||
fi
|
||||
|
||||
- name: Restore sccache cache (fallback)
|
||||
if: ${{ env.SCCACHE_GHA_ENABLED != 'true' }}
|
||||
id: cache_sccache_restore
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: ${{ github.workspace }}/codex-rs/target/
|
||||
key: cargo-target-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
path: ${{ github.workspace }}/.sccache/
|
||||
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.run_id }}
|
||||
restore-keys: |
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Prepare APT cache directories (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
sudo mkdir -p /var/cache/apt/archives /var/lib/apt/lists
|
||||
sudo chown -R "$USER:$USER" /var/cache/apt /var/lib/apt/lists
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Restore APT cache (musl)
|
||||
id: cache_apt_restore
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: |
|
||||
/var/cache/apt
|
||||
key: apt-${{ matrix.runner }}-${{ matrix.target }}-v1
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Install musl build tools
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
shell: bash
|
||||
run: |
|
||||
sudo apt install -y musl-tools pkg-config && sudo rm -rf /var/lib/apt/lists/*
|
||||
set -euo pipefail
|
||||
sudo apt-get -y update -o Acquire::Retries=3
|
||||
sudo apt-get -y install --no-install-recommends musl-tools pkg-config
|
||||
|
||||
- name: Install cargo-chef
|
||||
if: ${{ matrix.profile == 'release' }}
|
||||
uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
with:
|
||||
tool: cargo-chef
|
||||
version: 0.1.71
|
||||
|
||||
- name: Pre-warm dependency cache (cargo-chef)
|
||||
if: ${{ matrix.profile == 'release' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
RECIPE="${RUNNER_TEMP}/chef-recipe.json"
|
||||
cargo chef prepare --recipe-path "$RECIPE"
|
||||
cargo chef cook --recipe-path "$RECIPE" --target ${{ matrix.target }} --release --all-features
|
||||
|
||||
- name: cargo clippy
|
||||
id: clippy
|
||||
@@ -191,20 +259,6 @@ jobs:
|
||||
find . -name Cargo.toml -mindepth 2 -maxdepth 2 -print0 \
|
||||
| xargs -0 -n1 -I{} bash -c 'cd "$(dirname "{}")" && cargo check --profile ${{ matrix.profile }}'
|
||||
|
||||
- uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
with:
|
||||
tool: nextest
|
||||
version: 0.9.103
|
||||
|
||||
- name: tests
|
||||
id: test
|
||||
# Tests take too long for release builds to run them on every PR.
|
||||
if: ${{ matrix.profile != 'release' }}
|
||||
continue-on-error: true
|
||||
run: cargo nextest run --all-features --no-fail-fast --target ${{ matrix.target }} --cargo-profile ci-test
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
|
||||
# Save caches explicitly; make non-fatal so cache packaging
|
||||
# never fails the overall job. Only save when key wasn't hit.
|
||||
- name: Save cargo home cache
|
||||
@@ -217,33 +271,193 @@ jobs:
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
|
||||
|
||||
- name: Save target cache (except gnu-dev)
|
||||
if: >-
|
||||
always() && !cancelled() &&
|
||||
(steps.cache_target_restore.outputs.cache-hit != 'true') &&
|
||||
!(matrix.target == 'x86_64-unknown-linux-gnu' && matrix.profile != 'release')
|
||||
- name: Save sccache cache (fallback)
|
||||
if: always() && !cancelled() && env.SCCACHE_GHA_ENABLED != 'true'
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: ${{ github.workspace }}/codex-rs/target/
|
||||
key: cargo-target-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
path: ${{ github.workspace }}/.sccache/
|
||||
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.run_id }}
|
||||
|
||||
- name: sccache stats
|
||||
if: always()
|
||||
continue-on-error: true
|
||||
run: sccache --show-stats || true
|
||||
|
||||
- name: sccache summary
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
{
|
||||
echo "### sccache stats — ${{ matrix.target }} (${{ matrix.profile }})";
|
||||
echo;
|
||||
echo '```';
|
||||
sccache --show-stats || true;
|
||||
echo '```';
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
- name: Save APT cache (musl)
|
||||
if: always() && !cancelled() && (matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl') && steps.cache_apt_restore.outputs.cache-hit != 'true'
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: |
|
||||
/var/cache/apt
|
||||
key: apt-${{ matrix.runner }}-${{ matrix.target }}-v1
|
||||
|
||||
# Fail the job if any of the previous steps failed.
|
||||
- name: verify all steps passed
|
||||
if: |
|
||||
steps.clippy.outcome == 'failure' ||
|
||||
steps.cargo_check_all_crates.outcome == 'failure' ||
|
||||
steps.test.outcome == 'failure'
|
||||
steps.cargo_check_all_crates.outcome == 'failure'
|
||||
run: |
|
||||
echo "One or more checks failed (clippy, cargo_check_all_crates, or test). See logs for details."
|
||||
echo "One or more checks failed (clippy or cargo_check_all_crates). See logs for details."
|
||||
exit 1
|
||||
|
||||
tests:
|
||||
name: Tests — ${{ matrix.runner }} - ${{ matrix.target }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
needs: changed
|
||||
if: ${{ needs.changed.outputs.codex == 'true' || needs.changed.outputs.workflows == 'true' || github.event_name == 'push' }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
env:
|
||||
RUSTC_WRAPPER: sccache
|
||||
CARGO_INCREMENTAL: "0"
|
||||
SCCACHE_CACHE_SIZE: 10G
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: macos-14
|
||||
target: aarch64-apple-darwin
|
||||
profile: dev
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-gnu
|
||||
profile: dev
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-gnu
|
||||
profile: dev
|
||||
- runner: windows-latest
|
||||
target: x86_64-pc-windows-msvc
|
||||
profile: dev
|
||||
- runner: windows-11-arm
|
||||
target: aarch64-pc-windows-msvc
|
||||
profile: dev
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- name: Restore cargo home cache
|
||||
id: cache_cargo_home_restore
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
|
||||
restore-keys: |
|
||||
cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
- name: Install sccache
|
||||
uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
with:
|
||||
tool: sccache
|
||||
version: 0.7.5
|
||||
|
||||
- name: Configure sccache backend
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [[ -n "${ACTIONS_CACHE_URL:-}" && -n "${ACTIONS_RUNTIME_TOKEN:-}" ]]; then
|
||||
echo "SCCACHE_GHA_ENABLED=true" >> "$GITHUB_ENV"
|
||||
echo "Using sccache GitHub backend"
|
||||
else
|
||||
echo "SCCACHE_GHA_ENABLED=false" >> "$GITHUB_ENV"
|
||||
echo "SCCACHE_DIR=${{ github.workspace }}/.sccache" >> "$GITHUB_ENV"
|
||||
echo "Using sccache local disk + actions/cache fallback"
|
||||
fi
|
||||
|
||||
- name: Restore sccache cache (fallback)
|
||||
if: ${{ env.SCCACHE_GHA_ENABLED != 'true' }}
|
||||
id: cache_sccache_restore
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: ${{ github.workspace }}/.sccache/
|
||||
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.run_id }}
|
||||
restore-keys: |
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
- uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
with:
|
||||
tool: nextest
|
||||
version: 0.9.103
|
||||
|
||||
- name: tests
|
||||
id: test
|
||||
continue-on-error: true
|
||||
run: cargo nextest run --all-features --no-fail-fast --target ${{ matrix.target }} --cargo-profile ci-test
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
|
||||
- name: Save cargo home cache
|
||||
if: always() && !cancelled() && steps.cache_cargo_home_restore.outputs.cache-hit != 'true'
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
|
||||
|
||||
- name: Save sccache cache (fallback)
|
||||
if: always() && !cancelled() && env.SCCACHE_GHA_ENABLED != 'true'
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: ${{ github.workspace }}/.sccache/
|
||||
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.run_id }}
|
||||
|
||||
- name: sccache stats
|
||||
if: always()
|
||||
continue-on-error: true
|
||||
run: sccache --show-stats || true
|
||||
|
||||
- name: sccache summary
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
{
|
||||
echo "### sccache stats — ${{ matrix.target }} (tests)";
|
||||
echo;
|
||||
echo '```';
|
||||
sccache --show-stats || true;
|
||||
echo '```';
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
- name: verify tests passed
|
||||
if: steps.test.outcome == 'failure'
|
||||
run: |
|
||||
echo "Tests failed. See logs for details."
|
||||
exit 1
|
||||
|
||||
# --- Gatherer job that you mark as the ONLY required status -----------------
|
||||
results:
|
||||
name: CI results (required)
|
||||
needs: [changed, general, cargo_shear, lint_build_test]
|
||||
needs: [changed, general, cargo_shear, lint_build, tests]
|
||||
if: always()
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
@@ -252,7 +466,8 @@ jobs:
|
||||
run: |
|
||||
echo "general: ${{ needs.general.result }}"
|
||||
echo "shear : ${{ needs.cargo_shear.result }}"
|
||||
echo "matrix : ${{ needs.lint_build_test.result }}"
|
||||
echo "lint : ${{ needs.lint_build.result }}"
|
||||
echo "tests : ${{ needs.tests.result }}"
|
||||
|
||||
# If nothing relevant changed (PR touching only root README, etc.),
|
||||
# declare success regardless of other jobs.
|
||||
@@ -264,4 +479,10 @@ jobs:
|
||||
# Otherwise require the jobs to have succeeded
|
||||
[[ '${{ needs.general.result }}' == 'success' ]] || { echo 'general failed'; exit 1; }
|
||||
[[ '${{ needs.cargo_shear.result }}' == 'success' ]] || { echo 'cargo_shear failed'; exit 1; }
|
||||
[[ '${{ needs.lint_build_test.result }}' == 'success' ]] || { echo 'matrix failed'; exit 1; }
|
||||
[[ '${{ needs.lint_build.result }}' == 'success' ]] || { echo 'lint_build failed'; exit 1; }
|
||||
[[ '${{ needs.tests.result }}' == 'success' ]] || { echo 'tests failed'; exit 1; }
|
||||
|
||||
- name: sccache summary note
|
||||
if: always()
|
||||
run: |
|
||||
echo "Per-job sccache stats are attached to each matrix job's Step Summary."
|
||||
|
||||
2
.github/workflows/rust-release.yml
vendored
2
.github/workflows/rust-release.yml
vendored
@@ -350,7 +350,7 @@ jobs:
|
||||
fi
|
||||
fi
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: ${{ matrix.target }}
|
||||
# Upload the per-binary .zst files as well as the new .tar.gz
|
||||
|
||||
@@ -1 +1 @@
|
||||
The changelog can be found on the [releases page](https://github.com/openai/codex/releases)
|
||||
The changelog can be found on the [releases page](https://github.com/openai/codex/releases).
|
||||
|
||||
@@ -33,7 +33,7 @@ Then simply run `codex` to get started:
|
||||
codex
|
||||
```
|
||||
|
||||
If you're running into upgrade issues with Homebrew, see the [FAQ entry on brew upgrade codex](./docs/faq.md#brew-update-codex-isnt-upgrading-me).
|
||||
If you're running into upgrade issues with Homebrew, see the [FAQ entry on brew upgrade codex](./docs/faq.md#brew-upgrade-codex-isnt-upgrading-me).
|
||||
|
||||
<details>
|
||||
<summary>You can also go to the <a href="https://github.com/openai/codex/releases/latest">latest GitHub Release</a> and download the appropriate binary for your platform.</summary>
|
||||
@@ -75,11 +75,13 @@ Codex CLI supports a rich set of configuration options, with preferences stored
|
||||
|
||||
- [**Getting started**](./docs/getting-started.md)
|
||||
- [CLI usage](./docs/getting-started.md#cli-usage)
|
||||
- [Slash Commands](./docs/slash_commands.md)
|
||||
- [Running with a prompt as input](./docs/getting-started.md#running-with-a-prompt-as-input)
|
||||
- [Example prompts](./docs/getting-started.md#example-prompts)
|
||||
- [Custom prompts](./docs/prompts.md)
|
||||
- [Memory with AGENTS.md](./docs/getting-started.md#memory-with-agentsmd)
|
||||
- [Configuration](./docs/config.md)
|
||||
- [**Configuration**](./docs/config.md)
|
||||
- [Example config](./docs/example-config.md)
|
||||
- [**Sandbox & approvals**](./docs/sandbox.md)
|
||||
- [**Authentication**](./docs/authentication.md)
|
||||
- [Auth methods](./docs/authentication.md#forcing-a-specific-auth-method-advanced)
|
||||
|
||||
5
codex-rs/.cargo/config.toml
Normal file
5
codex-rs/.cargo/config.toml
Normal file
@@ -0,0 +1,5 @@
|
||||
[target.'cfg(all(windows, target_env = "msvc"))']
|
||||
rustflags = ["-C", "link-arg=/STACK:8388608"]
|
||||
|
||||
[target.'cfg(all(windows, target_env = "gnu"))']
|
||||
rustflags = ["-C", "link-arg=-Wl,--stack,8388608"]
|
||||
319
codex-rs/Cargo.lock
generated
319
codex-rs/Cargo.lock
generated
@@ -172,9 +172,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.99"
|
||||
version = "1.0.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b0674a1ddeecb70197781e945de4b3b8ffb61fa939a5597bcf48503737663100"
|
||||
checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61"
|
||||
|
||||
[[package]]
|
||||
name = "app_test_support"
|
||||
@@ -186,9 +186,11 @@ dependencies = [
|
||||
"chrono",
|
||||
"codex-app-server-protocol",
|
||||
"codex-core",
|
||||
"codex-protocol",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
"uuid",
|
||||
"wiremock",
|
||||
]
|
||||
|
||||
@@ -592,9 +594,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "2.9.1"
|
||||
version = "2.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967"
|
||||
checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3"
|
||||
|
||||
[[package]]
|
||||
name = "block-buffer"
|
||||
@@ -843,6 +845,7 @@ dependencies = [
|
||||
"codex-backend-client",
|
||||
"codex-common",
|
||||
"codex-core",
|
||||
"codex-feedback",
|
||||
"codex-file-search",
|
||||
"codex-login",
|
||||
"codex-protocol",
|
||||
@@ -853,6 +856,7 @@ dependencies = [
|
||||
"pretty_assertions",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serial_test",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"toml",
|
||||
@@ -869,6 +873,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"codex-protocol",
|
||||
"mcp-types",
|
||||
"paste",
|
||||
"pretty_assertions",
|
||||
"schemars 0.8.22",
|
||||
@@ -889,7 +894,7 @@ dependencies = [
|
||||
"pretty_assertions",
|
||||
"similar",
|
||||
"tempfile",
|
||||
"thiserror 2.0.16",
|
||||
"thiserror 2.0.17",
|
||||
"tree-sitter",
|
||||
"tree-sitter-bash",
|
||||
]
|
||||
@@ -948,7 +953,7 @@ dependencies = [
|
||||
"clap",
|
||||
"codex-common",
|
||||
"codex-core",
|
||||
"codex-git-apply",
|
||||
"codex-git",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tempfile",
|
||||
@@ -981,6 +986,7 @@ dependencies = [
|
||||
"codex-rmcp-client",
|
||||
"codex-stdio-to-uds",
|
||||
"codex-tui",
|
||||
"codex-windows-sandbox",
|
||||
"ctor 0.5.0",
|
||||
"owo-colors",
|
||||
"predicates",
|
||||
@@ -989,6 +995,7 @@ dependencies = [
|
||||
"supports-color",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"toml",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1025,11 +1032,11 @@ dependencies = [
|
||||
"async-trait",
|
||||
"chrono",
|
||||
"codex-backend-client",
|
||||
"codex-git-apply",
|
||||
"codex-git",
|
||||
"diffy",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"thiserror 2.0.16",
|
||||
"thiserror 2.0.17",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1061,12 +1068,16 @@ dependencies = [
|
||||
"codex-apply-patch",
|
||||
"codex-async-utils",
|
||||
"codex-file-search",
|
||||
"codex-git",
|
||||
"codex-keyring-store",
|
||||
"codex-otel",
|
||||
"codex-protocol",
|
||||
"codex-rmcp-client",
|
||||
"codex-utils-pty",
|
||||
"codex-utils-readiness",
|
||||
"codex-utils-string",
|
||||
"codex-utils-tokenizer",
|
||||
"codex-windows-sandbox",
|
||||
"core-foundation 0.9.4",
|
||||
"core_test_support",
|
||||
"dirs",
|
||||
@@ -1076,7 +1087,9 @@ dependencies = [
|
||||
"eventsource-stream",
|
||||
"futures",
|
||||
"http",
|
||||
"indexmap 2.10.0",
|
||||
"image",
|
||||
"indexmap 2.12.0",
|
||||
"keyring",
|
||||
"landlock",
|
||||
"libc",
|
||||
"maplit",
|
||||
@@ -1093,12 +1106,13 @@ dependencies = [
|
||||
"serde_json",
|
||||
"serial_test",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"shlex",
|
||||
"similar",
|
||||
"strum_macros 0.27.2",
|
||||
"tempfile",
|
||||
"test-log",
|
||||
"thiserror 2.0.16",
|
||||
"thiserror 2.0.17",
|
||||
"time",
|
||||
"tokio",
|
||||
"tokio-test",
|
||||
@@ -1194,25 +1208,29 @@ dependencies = [
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-git-apply"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"regex",
|
||||
"tempfile",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-git-tooling"
|
||||
name = "codex-git"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"assert_matches",
|
||||
"once_cell",
|
||||
"pretty_assertions",
|
||||
"regex",
|
||||
"schemars 0.8.22",
|
||||
"serde",
|
||||
"tempfile",
|
||||
"thiserror 2.0.16",
|
||||
"thiserror 2.0.17",
|
||||
"ts-rs",
|
||||
"walkdir",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-keyring-store"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"keyring",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-linux-sandbox"
|
||||
version = "0.0.0"
|
||||
@@ -1327,8 +1345,11 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"base64",
|
||||
"codex-git",
|
||||
"codex-utils-image",
|
||||
"icu_decimal",
|
||||
"icu_locale_core",
|
||||
"icu_provider",
|
||||
"mcp-types",
|
||||
"mime_guess",
|
||||
"schemars 0.8.22",
|
||||
@@ -1376,6 +1397,7 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"axum",
|
||||
"codex-keyring-store",
|
||||
"codex-protocol",
|
||||
"dirs",
|
||||
"escargot",
|
||||
@@ -1427,7 +1449,6 @@ dependencies = [
|
||||
"codex-core",
|
||||
"codex-feedback",
|
||||
"codex-file-search",
|
||||
"codex-git-tooling",
|
||||
"codex-login",
|
||||
"codex-ollama",
|
||||
"codex-protocol",
|
||||
@@ -1452,6 +1473,7 @@ dependencies = [
|
||||
"regex-lite",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serial_test",
|
||||
"shlex",
|
||||
"strum 0.27.2",
|
||||
"strum_macros 0.27.2",
|
||||
@@ -1472,6 +1494,27 @@ dependencies = [
|
||||
"vt100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-utils-cache"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"lru",
|
||||
"sha1",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-utils-image"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"codex-utils-cache",
|
||||
"image",
|
||||
"tempfile",
|
||||
"thiserror 2.0.17",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-utils-json-to-toml"
|
||||
version = "0.0.0"
|
||||
@@ -1496,7 +1539,7 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"assert_matches",
|
||||
"async-trait",
|
||||
"thiserror 2.0.16",
|
||||
"thiserror 2.0.17",
|
||||
"time",
|
||||
"tokio",
|
||||
]
|
||||
@@ -1511,10 +1554,22 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"pretty_assertions",
|
||||
"thiserror 2.0.16",
|
||||
"thiserror 2.0.17",
|
||||
"tiktoken-rs",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-windows-sandbox"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"dirs-next",
|
||||
"rand 0.8.5",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "color-eyre"
|
||||
version = "0.6.5"
|
||||
@@ -1700,10 +1755,9 @@ checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
|
||||
[[package]]
|
||||
name = "crossterm"
|
||||
version = "0.28.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "829d955a0bb380ef178a640b91779e3987da38c9aea133b20614cfed8cdea9c6"
|
||||
source = "git+https://github.com/nornagon/crossterm?branch=nornagon%2Fcolor-query#87db8bfa6dc99427fd3b071681b07fc31c6ce995"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bitflags 2.10.0",
|
||||
"crossterm_winapi",
|
||||
"futures-core",
|
||||
"mio",
|
||||
@@ -2047,7 +2101,7 @@ version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "89a09f22a6c6069a18470eb92d2298acf25463f14256d24778e1230d789a2aec"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bitflags 2.10.0",
|
||||
"objc2",
|
||||
]
|
||||
|
||||
@@ -2672,7 +2726,7 @@ dependencies = [
|
||||
"futures-core",
|
||||
"futures-sink",
|
||||
"http",
|
||||
"indexmap 2.10.0",
|
||||
"indexmap 2.12.0",
|
||||
"slab",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
@@ -2716,6 +2770,12 @@ dependencies = [
|
||||
"foldhash",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.16.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d"
|
||||
|
||||
[[package]]
|
||||
name = "heck"
|
||||
version = "0.5.0"
|
||||
@@ -2949,9 +3009,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "icu_collections"
|
||||
version = "2.0.0"
|
||||
version = "2.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47"
|
||||
checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43"
|
||||
dependencies = [
|
||||
"displaydoc",
|
||||
"potential_utf",
|
||||
@@ -2962,34 +3022,31 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "icu_decimal"
|
||||
version = "2.0.0"
|
||||
version = "2.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fec61c43fdc4e368a9f450272833123a8ef0d7083a44597660ce94d791b8a2e2"
|
||||
checksum = "a38c52231bc348f9b982c1868a2af3195199623007ba2c7650f432038f5b3e8e"
|
||||
dependencies = [
|
||||
"displaydoc",
|
||||
"fixed_decimal",
|
||||
"icu_decimal_data",
|
||||
"icu_locale",
|
||||
"icu_locale_core",
|
||||
"icu_provider",
|
||||
"tinystr",
|
||||
"writeable",
|
||||
"zerovec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "icu_decimal_data"
|
||||
version = "2.0.0"
|
||||
version = "2.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b70963bc35f9bdf1bc66a5c1f458f4991c1dc71760e00fa06016b2c76b2738d5"
|
||||
checksum = "2905b4044eab2dd848fe84199f9195567b63ab3a93094711501363f63546fef7"
|
||||
|
||||
[[package]]
|
||||
name = "icu_locale"
|
||||
version = "2.0.0"
|
||||
version = "2.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6ae5921528335e91da1b6c695dbf1ec37df5ac13faa3f91e5640be93aa2fbefd"
|
||||
checksum = "532b11722e350ab6bf916ba6eb0efe3ee54b932666afec989465f9243fe6dd60"
|
||||
dependencies = [
|
||||
"displaydoc",
|
||||
"icu_collections",
|
||||
"icu_locale_core",
|
||||
"icu_locale_data",
|
||||
@@ -3001,12 +3058,13 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "icu_locale_core"
|
||||
version = "2.0.0"
|
||||
version = "2.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a"
|
||||
checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6"
|
||||
dependencies = [
|
||||
"displaydoc",
|
||||
"litemap",
|
||||
"serde",
|
||||
"tinystr",
|
||||
"writeable",
|
||||
"zerovec",
|
||||
@@ -3014,17 +3072,16 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "icu_locale_data"
|
||||
version = "2.0.0"
|
||||
version = "2.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4fdef0c124749d06a743c69e938350816554eb63ac979166590e2b4ee4252765"
|
||||
checksum = "f03e2fcaefecdf05619f3d6f91740e79ab969b4dd54f77cbf546b1d0d28e3147"
|
||||
|
||||
[[package]]
|
||||
name = "icu_normalizer"
|
||||
version = "2.0.0"
|
||||
version = "2.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979"
|
||||
checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599"
|
||||
dependencies = [
|
||||
"displaydoc",
|
||||
"icu_collections",
|
||||
"icu_normalizer_data",
|
||||
"icu_properties",
|
||||
@@ -3035,42 +3092,40 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "icu_normalizer_data"
|
||||
version = "2.0.0"
|
||||
version = "2.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3"
|
||||
checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a"
|
||||
|
||||
[[package]]
|
||||
name = "icu_properties"
|
||||
version = "2.0.1"
|
||||
version = "2.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b"
|
||||
checksum = "e93fcd3157766c0c8da2f8cff6ce651a31f0810eaa1c51ec363ef790bbb5fb99"
|
||||
dependencies = [
|
||||
"displaydoc",
|
||||
"icu_collections",
|
||||
"icu_locale_core",
|
||||
"icu_properties_data",
|
||||
"icu_provider",
|
||||
"potential_utf",
|
||||
"zerotrie",
|
||||
"zerovec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "icu_properties_data"
|
||||
version = "2.0.1"
|
||||
version = "2.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632"
|
||||
checksum = "02845b3647bb045f1100ecd6480ff52f34c35f82d9880e029d329c21d1054899"
|
||||
|
||||
[[package]]
|
||||
name = "icu_provider"
|
||||
version = "2.0.0"
|
||||
version = "2.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af"
|
||||
checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614"
|
||||
dependencies = [
|
||||
"displaydoc",
|
||||
"icu_locale_core",
|
||||
"serde",
|
||||
"stable_deref_trait",
|
||||
"tinystr",
|
||||
"writeable",
|
||||
"yoke",
|
||||
"zerofrom",
|
||||
@@ -3156,13 +3211,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "2.10.0"
|
||||
version = "2.12.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661"
|
||||
checksum = "6717a8d2a5a929a1a2eb43a12812498ed141a0bcfb7e8f7844fbdbe4303bba9f"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
"hashbrown 0.15.4",
|
||||
"hashbrown 0.16.0",
|
||||
"serde",
|
||||
"serde_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3177,7 +3233,7 @@ version = "0.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bitflags 2.10.0",
|
||||
"inotify-sys",
|
||||
"libc",
|
||||
]
|
||||
@@ -3240,7 +3296,7 @@ version = "0.7.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d93587f37623a1a17d94ef2bc9ada592f5465fe7732084ab7beefabe5c77c0c4"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bitflags 2.10.0",
|
||||
"cfg-if",
|
||||
"libc",
|
||||
]
|
||||
@@ -3450,7 +3506,7 @@ checksum = "b3d2ef408b88e913bfc6594f5e693d57676f6463ded7d8bf994175364320c706"
|
||||
dependencies = [
|
||||
"enumflags2",
|
||||
"libc",
|
||||
"thiserror 2.0.16",
|
||||
"thiserror 2.0.17",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3486,7 +3542,7 @@ version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4488594b9328dee448adb906d8b126d9b7deb7cf5c22161ee591610bb1be83c0"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bitflags 2.10.0",
|
||||
"libc",
|
||||
]
|
||||
|
||||
@@ -3496,7 +3552,7 @@ version = "0.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "761e49ec5fd8a5a463f9b84e877c373d888935b71c6be78f3767fe2ae6bed18e"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bitflags 2.10.0",
|
||||
"libc",
|
||||
]
|
||||
|
||||
@@ -3764,7 +3820,7 @@ version = "0.28.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bitflags 2.10.0",
|
||||
"cfg-if",
|
||||
"cfg_aliases 0.1.1",
|
||||
"libc",
|
||||
@@ -3776,7 +3832,7 @@ version = "0.29.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bitflags 2.10.0",
|
||||
"cfg-if",
|
||||
"cfg_aliases 0.2.1",
|
||||
"libc",
|
||||
@@ -3789,7 +3845,7 @@ version = "0.30.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bitflags 2.10.0",
|
||||
"cfg-if",
|
||||
"cfg_aliases 0.2.1",
|
||||
"libc",
|
||||
@@ -3817,7 +3873,7 @@ version = "8.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4d3d07927151ff8575b7087f245456e549fea62edf0ec4e565a5ee50c8402bc3"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bitflags 2.10.0",
|
||||
"fsevent-sys",
|
||||
"inotify",
|
||||
"kqueue",
|
||||
@@ -3987,7 +4043,7 @@ version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e6f29f568bec459b0ddff777cec4fe3fd8666d82d5a40ebd0ff7e66134f89bcc"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bitflags 2.10.0",
|
||||
"objc2",
|
||||
"objc2-core-graphics",
|
||||
"objc2-foundation",
|
||||
@@ -3999,7 +4055,7 @@ version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1c10c2894a6fed806ade6027bcd50662746363a9589d3ec9d9bef30a4e4bc166"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bitflags 2.10.0",
|
||||
"dispatch2",
|
||||
"objc2",
|
||||
]
|
||||
@@ -4010,7 +4066,7 @@ version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "989c6c68c13021b5c2d6b71456ebb0f9dc78d752e86a98da7c716f4f9470f5a4"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bitflags 2.10.0",
|
||||
"dispatch2",
|
||||
"objc2",
|
||||
"objc2-core-foundation",
|
||||
@@ -4029,7 +4085,7 @@ version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "900831247d2fe1a09a683278e5384cfb8c80c79fe6b166f9d14bfdde0ea1b03c"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bitflags 2.10.0",
|
||||
"objc2",
|
||||
"objc2-core-foundation",
|
||||
]
|
||||
@@ -4040,7 +4096,7 @@ version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7282e9ac92529fa3457ce90ebb15f4ecbc383e8338060960760fa2cf75420c3c"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bitflags 2.10.0",
|
||||
"objc2",
|
||||
"objc2-core-foundation",
|
||||
]
|
||||
@@ -4072,7 +4128,7 @@ version = "0.10.73"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8505734d46c8ab1e19a1dce3aef597ad87dcb4c37e7188231769bd6bd51cebf8"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bitflags 2.10.0",
|
||||
"cfg-if",
|
||||
"foreign-types",
|
||||
"libc",
|
||||
@@ -4130,7 +4186,7 @@ dependencies = [
|
||||
"futures-sink",
|
||||
"js-sys",
|
||||
"pin-project-lite",
|
||||
"thiserror 2.0.16",
|
||||
"thiserror 2.0.17",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
@@ -4173,7 +4229,7 @@ dependencies = [
|
||||
"prost",
|
||||
"reqwest",
|
||||
"serde_json",
|
||||
"thiserror 2.0.16",
|
||||
"thiserror 2.0.17",
|
||||
"tokio",
|
||||
"tonic",
|
||||
"tracing",
|
||||
@@ -4213,7 +4269,7 @@ dependencies = [
|
||||
"percent-encoding",
|
||||
"rand 0.9.2",
|
||||
"serde_json",
|
||||
"thiserror 2.0.16",
|
||||
"thiserror 2.0.17",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
]
|
||||
@@ -4324,7 +4380,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db"
|
||||
dependencies = [
|
||||
"fixedbitset",
|
||||
"indexmap 2.10.0",
|
||||
"indexmap 2.12.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4392,7 +4448,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3af6b589e163c5a788fab00ce0c0366f6efbb9959c2f9874b224936af7fce7e1"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"indexmap 2.10.0",
|
||||
"indexmap 2.12.0",
|
||||
"quick-xml",
|
||||
"serde",
|
||||
"time",
|
||||
@@ -4404,7 +4460,7 @@ version = "0.18.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "97baced388464909d42d89643fe4361939af9b7ce7a31ee32a168f832a70f2a0"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bitflags 2.10.0",
|
||||
"crc32fast",
|
||||
"fdeflate",
|
||||
"flate2",
|
||||
@@ -4463,11 +4519,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "potential_utf"
|
||||
version = "0.1.2"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585"
|
||||
checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"serde_core",
|
||||
"writeable",
|
||||
"zerovec",
|
||||
]
|
||||
|
||||
@@ -4557,7 +4614,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a3ef4f2f0422f23a82ec9f628ea2acd12871c81a9362b02c43c1aa86acfc3ba1"
|
||||
dependencies = [
|
||||
"futures",
|
||||
"indexmap 2.10.0",
|
||||
"indexmap 2.12.0",
|
||||
"nix 0.30.1",
|
||||
"tokio",
|
||||
"tracing",
|
||||
@@ -4593,7 +4650,7 @@ version = "0.10.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "76979bea66e7875e7509c4ec5300112b316af87fa7a252ca91c448b32dfe3993"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bitflags 2.10.0",
|
||||
"getopts",
|
||||
"memchr",
|
||||
"pulldown-cmark-escape",
|
||||
@@ -4644,7 +4701,7 @@ dependencies = [
|
||||
"rustc-hash 2.1.1",
|
||||
"rustls",
|
||||
"socket2 0.6.0",
|
||||
"thiserror 2.0.16",
|
||||
"thiserror 2.0.17",
|
||||
"tokio",
|
||||
"tracing",
|
||||
"web-time",
|
||||
@@ -4665,7 +4722,7 @@ dependencies = [
|
||||
"rustls",
|
||||
"rustls-pki-types",
|
||||
"slab",
|
||||
"thiserror 2.0.16",
|
||||
"thiserror 2.0.17",
|
||||
"tinyvec",
|
||||
"tracing",
|
||||
"web-time",
|
||||
@@ -4774,7 +4831,7 @@ name = "ratatui"
|
||||
version = "0.29.0"
|
||||
source = "git+https://github.com/nornagon/ratatui?branch=nornagon-v0.29.0-patch#9b2ad1298408c45918ee9f8241a6f95498cdbed2"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bitflags 2.10.0",
|
||||
"cassowary",
|
||||
"compact_str",
|
||||
"crossterm",
|
||||
@@ -4804,7 +4861,7 @@ version = "0.5.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7e8af0dde094006011e6a740d4879319439489813bd0bcdc7d821beaeeff48ec"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bitflags 2.10.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4826,7 +4883,7 @@ checksum = "dd6f9d3d47bdd2ad6945c5015a226ec6155d0bcdfd8f7cd29f86b71f8de99d2b"
|
||||
dependencies = [
|
||||
"getrandom 0.2.16",
|
||||
"libredox",
|
||||
"thiserror 2.0.16",
|
||||
"thiserror 2.0.17",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4955,9 +5012,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rmcp"
|
||||
version = "0.8.3"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1fdad1258f7259fdc0f2dfc266939c82c3b5d1fd72bcde274d600cdc27e60243"
|
||||
checksum = "e5947688160b56fb6c827e3c20a72c90392a1d7e9dec74749197aa1780ac42ca"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"bytes",
|
||||
@@ -4977,7 +5034,7 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sse-stream",
|
||||
"thiserror 2.0.16",
|
||||
"thiserror 2.0.17",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
"tokio-util",
|
||||
@@ -4989,9 +5046,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rmcp-macros"
|
||||
version = "0.8.3"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ede0589a208cc7ce81d1be68aa7e74b917fcd03c81528408bab0457e187dcd9b"
|
||||
checksum = "01263441d3f8635c628e33856c468b96ebbce1af2d3699ea712ca71432d4ee7a"
|
||||
dependencies = [
|
||||
"darling 0.21.3",
|
||||
"proc-macro2",
|
||||
@@ -5033,7 +5090,7 @@ version = "0.38.44"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bitflags 2.10.0",
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys 0.4.15",
|
||||
@@ -5046,7 +5103,7 @@ version = "1.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bitflags 2.10.0",
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys 0.9.4",
|
||||
@@ -5112,7 +5169,7 @@ version = "14.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7803e8936da37efd9b6d4478277f4b2b9bb5cdb37a113e8d63222e58da647e63"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bitflags 2.10.0",
|
||||
"cfg-if",
|
||||
"clipboard-win",
|
||||
"fd-lock",
|
||||
@@ -5311,7 +5368,7 @@ version = "2.11.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bitflags 2.10.0",
|
||||
"core-foundation 0.9.4",
|
||||
"core-foundation-sys",
|
||||
"libc",
|
||||
@@ -5324,7 +5381,7 @@ version = "3.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b3297343eaf830f66ede390ea39da1d462b6b0c1b000f420d0a83f898bbbe6ef"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bitflags 2.10.0",
|
||||
"core-foundation 0.10.1",
|
||||
"core-foundation-sys",
|
||||
"libc",
|
||||
@@ -5457,9 +5514,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.226"
|
||||
version = "1.0.228"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0dca6411025b24b60bfa7ec1fe1f8e710ac09782dca409ee8237ba74b51295fd"
|
||||
checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
|
||||
dependencies = [
|
||||
"serde_core",
|
||||
"serde_derive",
|
||||
@@ -5467,18 +5524,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_core"
|
||||
version = "1.0.226"
|
||||
version = "1.0.228"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ba2ba63999edb9dac981fb34b3e5c0d111a69b0924e253ed29d83f7c99e966a4"
|
||||
checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.226"
|
||||
version = "1.0.228"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8db53ae22f34573731bafa1db20f04027b2d25e02d8205921b569171699cdb33"
|
||||
checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -5502,7 +5559,7 @@ version = "1.0.145"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c"
|
||||
dependencies = [
|
||||
"indexmap 2.10.0",
|
||||
"indexmap 2.12.0",
|
||||
"itoa",
|
||||
"memchr",
|
||||
"ryu",
|
||||
@@ -5563,7 +5620,7 @@ dependencies = [
|
||||
"chrono",
|
||||
"hex",
|
||||
"indexmap 1.9.3",
|
||||
"indexmap 2.10.0",
|
||||
"indexmap 2.12.0",
|
||||
"schemars 0.9.0",
|
||||
"schemars 1.0.4",
|
||||
"serde",
|
||||
@@ -5632,6 +5689,12 @@ dependencies = [
|
||||
"digest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sha1_smol"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bbfa15b3dddfee50a0fff136974b3e1bde555604ba463834a7eb7deb6417705d"
|
||||
|
||||
[[package]]
|
||||
name = "sha2"
|
||||
version = "0.10.9"
|
||||
@@ -6023,7 +6086,7 @@ version = "0.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bitflags 2.10.0",
|
||||
"core-foundation 0.9.4",
|
||||
"system-configuration-sys",
|
||||
]
|
||||
@@ -6140,11 +6203,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "2.0.16"
|
||||
version = "2.0.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3467d614147380f2e4e374161426ff399c91084acd2363eaf549172b3d5e60c0"
|
||||
checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8"
|
||||
dependencies = [
|
||||
"thiserror-impl 2.0.16",
|
||||
"thiserror-impl 2.0.17",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6160,9 +6223,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "thiserror-impl"
|
||||
version = "2.0.16"
|
||||
version = "2.0.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6c5e1be1c48b9172ee610da68fd9cd2770e7a4056cb3fc98710ee6906f0c7960"
|
||||
checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -6381,7 +6444,7 @@ version = "0.9.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "75129e1dc5000bfbaa9fee9d1b21f974f9fbad9daec557a521ee6e080825f6e8"
|
||||
dependencies = [
|
||||
"indexmap 2.10.0",
|
||||
"indexmap 2.12.0",
|
||||
"serde",
|
||||
"serde_spanned",
|
||||
"toml_datetime",
|
||||
@@ -6405,7 +6468,7 @@ version = "0.23.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7211ff1b8f0d3adae1663b7da9ffe396eabe1ca25f0b0bee42b0da29a9ddce93"
|
||||
dependencies = [
|
||||
"indexmap 2.10.0",
|
||||
"indexmap 2.12.0",
|
||||
"toml_datetime",
|
||||
"toml_parser",
|
||||
"toml_writer",
|
||||
@@ -6464,7 +6527,7 @@ checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
"indexmap 2.10.0",
|
||||
"indexmap 2.12.0",
|
||||
"pin-project-lite",
|
||||
"slab",
|
||||
"sync_wrapper",
|
||||
@@ -6481,7 +6544,7 @@ version = "0.6.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bitflags 2.10.0",
|
||||
"bytes",
|
||||
"futures-util",
|
||||
"http",
|
||||
@@ -6642,7 +6705,7 @@ checksum = "adc5f880ad8d8f94e88cb81c3557024cf1a8b75e3b504c50481ed4f5a6006ff3"
|
||||
dependencies = [
|
||||
"regex",
|
||||
"streaming-iterator",
|
||||
"thiserror 2.0.16",
|
||||
"thiserror 2.0.17",
|
||||
"tree-sitter",
|
||||
]
|
||||
|
||||
@@ -6665,7 +6728,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6ef1b7a6d914a34127ed8e1fa927eb7088903787bcded4fa3eef8f85ee1568be"
|
||||
dependencies = [
|
||||
"serde_json",
|
||||
"thiserror 2.0.16",
|
||||
"thiserror 2.0.17",
|
||||
"ts-rs-macros",
|
||||
"uuid",
|
||||
]
|
||||
@@ -6819,6 +6882,7 @@ dependencies = [
|
||||
"getrandom 0.3.3",
|
||||
"js-sys",
|
||||
"serde",
|
||||
"sha1_smol",
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
@@ -7556,14 +7620,14 @@ version = "0.39.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bitflags 2.10.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "writeable"
|
||||
version = "0.6.1"
|
||||
version = "0.6.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb"
|
||||
checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9"
|
||||
|
||||
[[package]]
|
||||
name = "x11rb"
|
||||
@@ -7758,10 +7822,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zerovec"
|
||||
version = "0.11.2"
|
||||
version = "0.11.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4a05eb080e015ba39cc9e23bbe5e7fb04d5fb040350f99f34e338d5fdd294428"
|
||||
checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"yoke",
|
||||
"zerofrom",
|
||||
"zerovec-derive",
|
||||
|
||||
@@ -16,8 +16,8 @@ members = [
|
||||
"core",
|
||||
"exec",
|
||||
"execpolicy",
|
||||
"keyring-store",
|
||||
"file-search",
|
||||
"git-tooling",
|
||||
"linux-sandbox",
|
||||
"login",
|
||||
"mcp-server",
|
||||
@@ -31,10 +31,12 @@ members = [
|
||||
"stdio-to-uds",
|
||||
"otel",
|
||||
"tui",
|
||||
"git-apply",
|
||||
"utils/git",
|
||||
"utils/cache",
|
||||
"utils/image",
|
||||
"utils/json-to-toml",
|
||||
"utils/readiness",
|
||||
"utils/pty",
|
||||
"utils/readiness",
|
||||
"utils/string",
|
||||
"utils/tokenizer",
|
||||
]
|
||||
@@ -64,7 +66,8 @@ codex-core = { path = "core" }
|
||||
codex-exec = { path = "exec" }
|
||||
codex-feedback = { path = "feedback" }
|
||||
codex-file-search = { path = "file-search" }
|
||||
codex-git-tooling = { path = "git-tooling" }
|
||||
codex-git = { path = "utils/git" }
|
||||
codex-keyring-store = { path = "keyring-store" }
|
||||
codex-linux-sandbox = { path = "linux-sandbox" }
|
||||
codex-login = { path = "login" }
|
||||
codex-mcp-server = { path = "mcp-server" }
|
||||
@@ -77,11 +80,14 @@ codex-responses-api-proxy = { path = "responses-api-proxy" }
|
||||
codex-rmcp-client = { path = "rmcp-client" }
|
||||
codex-stdio-to-uds = { path = "stdio-to-uds" }
|
||||
codex-tui = { path = "tui" }
|
||||
codex-utils-cache = { path = "utils/cache" }
|
||||
codex-utils-image = { path = "utils/image" }
|
||||
codex-utils-json-to-toml = { path = "utils/json-to-toml" }
|
||||
codex-utils-pty = { path = "utils/pty" }
|
||||
codex-utils-readiness = { path = "utils/readiness" }
|
||||
codex-utils-string = { path = "utils/string" }
|
||||
codex-utils-tokenizer = { path = "utils/tokenizer" }
|
||||
codex-windows-sandbox = { path = "windows-sandbox" }
|
||||
core_test_support = { path = "core/tests/common" }
|
||||
mcp-types = { path = "mcp-types" }
|
||||
mcp_test_support = { path = "mcp-server/tests/common" }
|
||||
@@ -117,11 +123,12 @@ escargot = "0.5"
|
||||
eventsource-stream = "0.2.3"
|
||||
futures = { version = "0.3", default-features = false }
|
||||
http = "1.3.1"
|
||||
icu_decimal = "2.0.0"
|
||||
icu_locale_core = "2.0.0"
|
||||
icu_decimal = "2.1"
|
||||
icu_provider = { version = "2.1", features = ["sync"] }
|
||||
icu_locale_core = "2.1"
|
||||
ignore = "0.4.23"
|
||||
image = { version = "^0.25.8", default-features = false }
|
||||
indexmap = "2.6.0"
|
||||
indexmap = "2.12.0"
|
||||
insta = "1.43.2"
|
||||
itertools = "0.14.0"
|
||||
keyring = "3.6"
|
||||
@@ -129,6 +136,7 @@ landlock = "0.4.1"
|
||||
lazy_static = "1"
|
||||
libc = "0.2.175"
|
||||
log = "0.4"
|
||||
lru = "0.12.5"
|
||||
maplit = "1.0.2"
|
||||
mime_guess = "2.0.5"
|
||||
multimap = "0.10.0"
|
||||
@@ -154,7 +162,7 @@ ratatui = "0.29.0"
|
||||
ratatui-macros = "0.6.0"
|
||||
regex-lite = "0.1.7"
|
||||
reqwest = "0.12"
|
||||
rmcp = { version = "0.8.3", default-features = false }
|
||||
rmcp = { version = "0.8.5", default-features = false }
|
||||
schemars = "0.8.22"
|
||||
seccompiler = "0.5.0"
|
||||
sentry = "0.34.0"
|
||||
@@ -174,7 +182,7 @@ sys-locale = "0.3.2"
|
||||
tempfile = "3.23.0"
|
||||
test-log = "0.2.18"
|
||||
textwrap = "0.16.2"
|
||||
thiserror = "2.0.16"
|
||||
thiserror = "2.0.17"
|
||||
time = "0.3"
|
||||
tiny_http = "0.12"
|
||||
tokio = "1"
|
||||
@@ -203,6 +211,7 @@ walkdir = "2.5.0"
|
||||
webbrowser = "1.0"
|
||||
which = "6"
|
||||
wildmatch = "2.5.0"
|
||||
|
||||
wiremock = "0.6"
|
||||
zeroize = "1.8.1"
|
||||
|
||||
@@ -247,7 +256,12 @@ unwrap_used = "deny"
|
||||
# cargo-shear cannot see the platform-specific openssl-sys usage, so we
|
||||
# silence the false positive here instead of deleting a real dependency.
|
||||
[workspace.metadata.cargo-shear]
|
||||
ignored = ["openssl-sys", "codex-utils-readiness", "codex-utils-tokenizer"]
|
||||
ignored = [
|
||||
"icu_provider",
|
||||
"openssl-sys",
|
||||
"codex-utils-readiness",
|
||||
"codex-utils-tokenizer",
|
||||
]
|
||||
|
||||
[profile.release]
|
||||
lto = "fat"
|
||||
@@ -267,6 +281,7 @@ opt-level = 0
|
||||
# Uncomment to debug local changes.
|
||||
# ratatui = { path = "../../ratatui" }
|
||||
ratatui = { git = "https://github.com/nornagon/ratatui", branch = "nornagon-v0.29.0-patch" }
|
||||
crossterm = { git = "https://github.com/nornagon/crossterm", branch = "nornagon/color-query" }
|
||||
|
||||
# Uncomment to debug local changes.
|
||||
# rmcp = { path = "../../rust-sdk/crates/rmcp" }
|
||||
|
||||
@@ -63,6 +63,9 @@ codex sandbox macos [--full-auto] [COMMAND]...
|
||||
# Linux
|
||||
codex sandbox linux [--full-auto] [COMMAND]...
|
||||
|
||||
# Windows
|
||||
codex sandbox windows [--full-auto] [COMMAND]...
|
||||
|
||||
# Legacy aliases
|
||||
codex debug seatbelt [--full-auto] [COMMAND]...
|
||||
codex debug landlock [--full-auto] [COMMAND]...
|
||||
|
||||
@@ -14,6 +14,7 @@ workspace = true
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive"] }
|
||||
codex-protocol = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
paste = { workspace = true }
|
||||
schemars = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
|
||||
@@ -2,20 +2,28 @@ use crate::ClientNotification;
|
||||
use crate::ClientRequest;
|
||||
use crate::ServerNotification;
|
||||
use crate::ServerRequest;
|
||||
use crate::export_client_notification_schemas;
|
||||
use crate::export_client_param_schemas;
|
||||
use crate::export_client_response_schemas;
|
||||
use crate::export_client_responses;
|
||||
use crate::export_server_notification_schemas;
|
||||
use crate::export_server_param_schemas;
|
||||
use crate::export_server_response_schemas;
|
||||
use crate::export_server_responses;
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::FileChange;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use schemars::JsonSchema;
|
||||
use schemars::schema::RootSchema;
|
||||
use schemars::schema_for;
|
||||
use serde::Serialize;
|
||||
use serde_json::Map;
|
||||
use serde_json::Value;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::ffi::OsStr;
|
||||
use std::fs;
|
||||
use std::io::Read;
|
||||
@@ -23,101 +31,33 @@ use std::io::Write;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
use ts_rs::ExportError;
|
||||
use ts_rs::TS;
|
||||
|
||||
const HEADER: &str = "// GENERATED CODE! DO NOT MODIFY BY HAND!\n\n";
|
||||
|
||||
macro_rules! for_each_schema_type {
|
||||
($macro:ident) => {
|
||||
$macro!(crate::RequestId);
|
||||
$macro!(crate::JSONRPCMessage);
|
||||
$macro!(crate::JSONRPCRequest);
|
||||
$macro!(crate::JSONRPCNotification);
|
||||
$macro!(crate::JSONRPCResponse);
|
||||
$macro!(crate::JSONRPCError);
|
||||
$macro!(crate::JSONRPCErrorError);
|
||||
$macro!(crate::AddConversationListenerParams);
|
||||
$macro!(crate::AddConversationSubscriptionResponse);
|
||||
$macro!(crate::ApplyPatchApprovalParams);
|
||||
$macro!(crate::ApplyPatchApprovalResponse);
|
||||
$macro!(crate::ArchiveConversationParams);
|
||||
$macro!(crate::ArchiveConversationResponse);
|
||||
$macro!(crate::AuthMode);
|
||||
$macro!(crate::AuthStatusChangeNotification);
|
||||
$macro!(crate::CancelLoginChatGptParams);
|
||||
$macro!(crate::CancelLoginChatGptResponse);
|
||||
$macro!(crate::ClientInfo);
|
||||
$macro!(crate::ClientNotification);
|
||||
$macro!(crate::ClientRequest);
|
||||
$macro!(crate::ConversationSummary);
|
||||
$macro!(crate::ExecCommandApprovalParams);
|
||||
$macro!(crate::ExecCommandApprovalResponse);
|
||||
$macro!(crate::ExecOneOffCommandParams);
|
||||
$macro!(crate::ExecOneOffCommandResponse);
|
||||
$macro!(crate::FuzzyFileSearchParams);
|
||||
$macro!(crate::FuzzyFileSearchResponse);
|
||||
$macro!(crate::FuzzyFileSearchResult);
|
||||
$macro!(crate::GetAuthStatusParams);
|
||||
$macro!(crate::GetAuthStatusResponse);
|
||||
$macro!(crate::GetUserAgentResponse);
|
||||
$macro!(crate::GetUserSavedConfigResponse);
|
||||
$macro!(crate::GitDiffToRemoteParams);
|
||||
$macro!(crate::GitDiffToRemoteResponse);
|
||||
$macro!(crate::GitSha);
|
||||
$macro!(crate::InitializeParams);
|
||||
$macro!(crate::InitializeResponse);
|
||||
$macro!(crate::InputItem);
|
||||
$macro!(crate::InterruptConversationParams);
|
||||
$macro!(crate::InterruptConversationResponse);
|
||||
$macro!(crate::ListConversationsParams);
|
||||
$macro!(crate::ListConversationsResponse);
|
||||
$macro!(crate::LoginApiKeyParams);
|
||||
$macro!(crate::LoginApiKeyResponse);
|
||||
$macro!(crate::LoginChatGptCompleteNotification);
|
||||
$macro!(crate::LoginChatGptResponse);
|
||||
$macro!(crate::LogoutChatGptParams);
|
||||
$macro!(crate::LogoutChatGptResponse);
|
||||
$macro!(crate::NewConversationParams);
|
||||
$macro!(crate::NewConversationResponse);
|
||||
$macro!(crate::Profile);
|
||||
$macro!(crate::RemoveConversationListenerParams);
|
||||
$macro!(crate::RemoveConversationSubscriptionResponse);
|
||||
$macro!(crate::ResumeConversationParams);
|
||||
$macro!(crate::ResumeConversationResponse);
|
||||
$macro!(crate::SandboxSettings);
|
||||
$macro!(crate::SendUserMessageParams);
|
||||
$macro!(crate::SendUserMessageResponse);
|
||||
$macro!(crate::SendUserTurnParams);
|
||||
$macro!(crate::SendUserTurnResponse);
|
||||
$macro!(crate::ServerNotification);
|
||||
$macro!(crate::ServerRequest);
|
||||
$macro!(crate::SessionConfiguredNotification);
|
||||
$macro!(crate::SetDefaultModelParams);
|
||||
$macro!(crate::SetDefaultModelResponse);
|
||||
$macro!(crate::Tools);
|
||||
$macro!(crate::UserInfoResponse);
|
||||
$macro!(crate::UserSavedConfig);
|
||||
$macro!(codex_protocol::protocol::EventMsg);
|
||||
$macro!(codex_protocol::protocol::FileChange);
|
||||
$macro!(codex_protocol::parse_command::ParsedCommand);
|
||||
$macro!(codex_protocol::protocol::SandboxPolicy);
|
||||
};
|
||||
#[derive(Clone)]
|
||||
pub struct GeneratedSchema {
|
||||
namespace: Option<String>,
|
||||
logical_name: String,
|
||||
value: Value,
|
||||
in_v1_dir: bool,
|
||||
}
|
||||
|
||||
fn export_ts_with_context<F>(label: &str, export: F) -> Result<()>
|
||||
where
|
||||
F: FnOnce() -> std::result::Result<(), ExportError>,
|
||||
{
|
||||
match export() {
|
||||
Ok(()) => Ok(()),
|
||||
Err(ExportError::CannotBeExported(ty)) => Err(anyhow!(
|
||||
"failed to export {label}: dependency {ty} cannot be exported"
|
||||
)),
|
||||
Err(err) => Err(err.into()),
|
||||
impl GeneratedSchema {
|
||||
fn namespace(&self) -> Option<&str> {
|
||||
self.namespace.as_deref()
|
||||
}
|
||||
|
||||
fn logical_name(&self) -> &str {
|
||||
&self.logical_name
|
||||
}
|
||||
|
||||
fn value(&self) -> &Value {
|
||||
&self.value
|
||||
}
|
||||
}
|
||||
|
||||
type JsonSchemaEmitter = fn(&Path) -> Result<GeneratedSchema>;
|
||||
pub fn generate_types(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
||||
generate_ts(out_dir, prettier)?;
|
||||
generate_json(out_dir)?;
|
||||
@@ -125,27 +65,28 @@ pub fn generate_types(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
||||
}
|
||||
|
||||
pub fn generate_ts(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
||||
let v2_out_dir = out_dir.join("v2");
|
||||
ensure_dir(out_dir)?;
|
||||
ensure_dir(&v2_out_dir)?;
|
||||
|
||||
export_ts_with_context("ClientRequest", || ClientRequest::export_all_to(out_dir))?;
|
||||
export_ts_with_context("client responses", || export_client_responses(out_dir))?;
|
||||
export_ts_with_context("ClientNotification", || {
|
||||
ClientNotification::export_all_to(out_dir)
|
||||
})?;
|
||||
ClientRequest::export_all_to(out_dir)?;
|
||||
export_client_responses(out_dir)?;
|
||||
ClientNotification::export_all_to(out_dir)?;
|
||||
|
||||
export_ts_with_context("ServerRequest", || ServerRequest::export_all_to(out_dir))?;
|
||||
export_ts_with_context("server responses", || export_server_responses(out_dir))?;
|
||||
export_ts_with_context("ServerNotification", || {
|
||||
ServerNotification::export_all_to(out_dir)
|
||||
})?;
|
||||
ServerRequest::export_all_to(out_dir)?;
|
||||
export_server_responses(out_dir)?;
|
||||
ServerNotification::export_all_to(out_dir)?;
|
||||
|
||||
generate_index_ts(out_dir)?;
|
||||
generate_index_ts(&v2_out_dir)?;
|
||||
|
||||
let ts_files = ts_files_in(out_dir)?;
|
||||
// Ensure our header is present on all TS files (root + subdirs like v2/).
|
||||
let ts_files = ts_files_in_recursive(out_dir)?;
|
||||
for file in &ts_files {
|
||||
prepend_header_if_missing(file)?;
|
||||
}
|
||||
|
||||
// Optionally run Prettier on all generated TS files.
|
||||
if let Some(prettier_bin) = prettier
|
||||
&& !ts_files.is_empty()
|
||||
{
|
||||
@@ -164,23 +105,47 @@ pub fn generate_ts(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
||||
|
||||
pub fn generate_json(out_dir: &Path) -> Result<()> {
|
||||
ensure_dir(out_dir)?;
|
||||
let mut bundle: BTreeMap<String, RootSchema> = BTreeMap::new();
|
||||
let envelope_emitters: &[JsonSchemaEmitter] = &[
|
||||
|d| write_json_schema_with_return::<crate::RequestId>(d, "RequestId"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCMessage>(d, "JSONRPCMessage"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCRequest>(d, "JSONRPCRequest"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCNotification>(d, "JSONRPCNotification"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCResponse>(d, "JSONRPCResponse"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCError>(d, "JSONRPCError"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCErrorError>(d, "JSONRPCErrorError"),
|
||||
|d| write_json_schema_with_return::<crate::ClientRequest>(d, "ClientRequest"),
|
||||
|d| write_json_schema_with_return::<crate::ServerRequest>(d, "ServerRequest"),
|
||||
|d| write_json_schema_with_return::<crate::ClientNotification>(d, "ClientNotification"),
|
||||
|d| write_json_schema_with_return::<crate::ServerNotification>(d, "ServerNotification"),
|
||||
|d| write_json_schema_with_return::<EventMsg>(d, "EventMsg"),
|
||||
|d| write_json_schema_with_return::<FileChange>(d, "FileChange"),
|
||||
|d| write_json_schema_with_return::<crate::protocol::v1::InputItem>(d, "InputItem"),
|
||||
|d| write_json_schema_with_return::<ParsedCommand>(d, "ParsedCommand"),
|
||||
|d| write_json_schema_with_return::<SandboxPolicy>(d, "SandboxPolicy"),
|
||||
];
|
||||
|
||||
macro_rules! add_schema {
|
||||
($ty:path) => {{
|
||||
let name = type_basename(stringify!($ty));
|
||||
let schema = write_json_schema_with_return::<$ty>(out_dir, &name)?;
|
||||
bundle.insert(name, schema);
|
||||
}};
|
||||
let mut schemas: Vec<GeneratedSchema> = Vec::new();
|
||||
for emit in envelope_emitters {
|
||||
schemas.push(emit(out_dir)?);
|
||||
}
|
||||
|
||||
for_each_schema_type!(add_schema);
|
||||
schemas.extend(export_client_param_schemas(out_dir)?);
|
||||
schemas.extend(export_client_response_schemas(out_dir)?);
|
||||
schemas.extend(export_server_param_schemas(out_dir)?);
|
||||
schemas.extend(export_server_response_schemas(out_dir)?);
|
||||
schemas.extend(export_client_notification_schemas(out_dir)?);
|
||||
schemas.extend(export_server_notification_schemas(out_dir)?);
|
||||
|
||||
export_client_response_schemas(out_dir)?;
|
||||
export_server_response_schemas(out_dir)?;
|
||||
let bundle = build_schema_bundle(schemas)?;
|
||||
write_pretty_json(
|
||||
out_dir.join("codex_app_server_protocol.schemas.json"),
|
||||
&bundle,
|
||||
)?;
|
||||
|
||||
let mut definitions = Map::new();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn build_schema_bundle(schemas: Vec<GeneratedSchema>) -> Result<Value> {
|
||||
const SPECIAL_DEFINITIONS: &[&str] = &[
|
||||
"ClientNotification",
|
||||
"ClientRequest",
|
||||
@@ -193,30 +158,62 @@ pub fn generate_json(out_dir: &Path) -> Result<()> {
|
||||
"ServerRequest",
|
||||
];
|
||||
|
||||
for (name, schema) in bundle {
|
||||
let mut schema_value = serde_json::to_value(schema)?;
|
||||
if let Value::Object(ref mut obj) = schema_value {
|
||||
if let Some(defs) = obj.remove("definitions")
|
||||
&& let Value::Object(defs_obj) = defs
|
||||
{
|
||||
for (def_name, def_schema) in defs_obj {
|
||||
if !SPECIAL_DEFINITIONS.contains(&def_name.as_str()) {
|
||||
definitions.insert(def_name, def_schema);
|
||||
}
|
||||
}
|
||||
}
|
||||
let namespaced_types = collect_namespaced_types(&schemas);
|
||||
let mut definitions = Map::new();
|
||||
|
||||
if let Some(Value::Array(one_of)) = obj.get_mut("oneOf") {
|
||||
for variant in one_of.iter_mut() {
|
||||
if let Some(variant_name) = variant_definition_name(&name, variant)
|
||||
&& let Value::Object(variant_obj) = variant
|
||||
for schema in schemas {
|
||||
let GeneratedSchema {
|
||||
namespace,
|
||||
logical_name,
|
||||
mut value,
|
||||
in_v1_dir,
|
||||
} = schema;
|
||||
|
||||
if let Some(ref ns) = namespace {
|
||||
rewrite_refs_to_namespace(&mut value, ns);
|
||||
}
|
||||
|
||||
let mut forced_namespace_refs: Vec<(String, String)> = Vec::new();
|
||||
if let Value::Object(ref mut obj) = value
|
||||
&& let Some(defs) = obj.remove("definitions")
|
||||
&& let Value::Object(defs_obj) = defs
|
||||
{
|
||||
for (def_name, mut def_schema) in defs_obj {
|
||||
if SPECIAL_DEFINITIONS.contains(&def_name.as_str()) {
|
||||
continue;
|
||||
}
|
||||
annotate_schema(&mut def_schema, Some(def_name.as_str()));
|
||||
let target_namespace = match namespace {
|
||||
Some(ref ns) => Some(ns.clone()),
|
||||
None => namespace_for_definition(&def_name, &namespaced_types)
|
||||
.cloned()
|
||||
.filter(|_| !in_v1_dir),
|
||||
};
|
||||
if let Some(ref ns) = target_namespace {
|
||||
if namespace.as_deref() == Some(ns.as_str()) {
|
||||
rewrite_refs_to_namespace(&mut def_schema, ns);
|
||||
insert_into_namespace(&mut definitions, ns, def_name.clone(), def_schema)?;
|
||||
} else if !forced_namespace_refs
|
||||
.iter()
|
||||
.any(|(name, existing_ns)| name == &def_name && existing_ns == ns)
|
||||
{
|
||||
variant_obj.insert("title".into(), Value::String(variant_name));
|
||||
forced_namespace_refs.push((def_name.clone(), ns.clone()));
|
||||
}
|
||||
} else {
|
||||
definitions.insert(def_name, def_schema);
|
||||
}
|
||||
}
|
||||
}
|
||||
definitions.insert(name, schema_value);
|
||||
|
||||
for (name, ns) in forced_namespace_refs {
|
||||
rewrite_named_ref_to_namespace(&mut value, &ns, &name);
|
||||
}
|
||||
|
||||
if let Some(ref ns) = namespace {
|
||||
insert_into_namespace(&mut definitions, ns, logical_name.clone(), value)?;
|
||||
} else {
|
||||
definitions.insert(logical_name, value);
|
||||
}
|
||||
}
|
||||
|
||||
let mut root = Map::new();
|
||||
@@ -231,30 +228,66 @@ pub fn generate_json(out_dir: &Path) -> Result<()> {
|
||||
root.insert("type".to_string(), Value::String("object".into()));
|
||||
root.insert("definitions".to_string(), Value::Object(definitions));
|
||||
|
||||
write_pretty_json(
|
||||
out_dir.join("codex_app_server_protocol.schemas.json"),
|
||||
&Value::Object(root),
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
Ok(Value::Object(root))
|
||||
}
|
||||
|
||||
fn write_json_schema_with_return<T>(out_dir: &Path, name: &str) -> Result<RootSchema>
|
||||
fn insert_into_namespace(
|
||||
definitions: &mut Map<String, Value>,
|
||||
namespace: &str,
|
||||
name: String,
|
||||
schema: Value,
|
||||
) -> Result<()> {
|
||||
let entry = definitions
|
||||
.entry(namespace.to_string())
|
||||
.or_insert_with(|| Value::Object(Map::new()));
|
||||
match entry {
|
||||
Value::Object(map) => {
|
||||
map.insert(name, schema);
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(anyhow!("expected namespace {namespace} to be an object")),
|
||||
}
|
||||
}
|
||||
|
||||
fn write_json_schema_with_return<T>(out_dir: &Path, name: &str) -> Result<GeneratedSchema>
|
||||
where
|
||||
T: JsonSchema,
|
||||
{
|
||||
let file_stem = name.trim();
|
||||
let schema = schema_for!(T);
|
||||
write_pretty_json(out_dir.join(format!("{file_stem}.json")), &schema)
|
||||
let mut schema_value = serde_json::to_value(schema)?;
|
||||
annotate_schema(&mut schema_value, Some(file_stem));
|
||||
// If the name looks like a namespaced path (e.g., "v2::Type"), mirror
|
||||
// the TypeScript layout and write to out_dir/v2/Type.json. Otherwise
|
||||
// write alongside the legacy files.
|
||||
let (raw_namespace, logical_name) = split_namespace(file_stem);
|
||||
let out_path = if let Some(ns) = raw_namespace {
|
||||
let dir = out_dir.join(ns);
|
||||
ensure_dir(&dir)?;
|
||||
dir.join(format!("{logical_name}.json"))
|
||||
} else {
|
||||
out_dir.join(format!("{file_stem}.json"))
|
||||
};
|
||||
|
||||
write_pretty_json(out_path, &schema_value)
|
||||
.with_context(|| format!("Failed to write JSON schema for {file_stem}"))?;
|
||||
Ok(schema)
|
||||
let namespace = match raw_namespace {
|
||||
Some("v1") | None => None,
|
||||
Some(ns) => Some(ns.to_string()),
|
||||
};
|
||||
Ok(GeneratedSchema {
|
||||
in_v1_dir: raw_namespace == Some("v1"),
|
||||
namespace,
|
||||
logical_name: logical_name.to_string(),
|
||||
value: schema_value,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn write_json_schema<T>(out_dir: &Path, name: &str) -> Result<()>
|
||||
pub(crate) fn write_json_schema<T>(out_dir: &Path, name: &str) -> Result<GeneratedSchema>
|
||||
where
|
||||
T: JsonSchema,
|
||||
{
|
||||
write_json_schema_with_return::<T>(out_dir, name).map(|_| ())
|
||||
write_json_schema_with_return::<T>(out_dir, name)
|
||||
}
|
||||
|
||||
fn write_pretty_json(path: PathBuf, value: &impl Serialize) -> Result<()> {
|
||||
@@ -263,13 +296,73 @@ fn write_pretty_json(path: PathBuf, value: &impl Serialize) -> Result<()> {
|
||||
fs::write(&path, json).with_context(|| format!("Failed to write {}", path.display()))?;
|
||||
Ok(())
|
||||
}
|
||||
fn type_basename(type_path: &str) -> String {
|
||||
type_path
|
||||
.rsplit_once("::")
|
||||
.map(|(_, name)| name)
|
||||
.unwrap_or(type_path)
|
||||
.trim()
|
||||
.to_string()
|
||||
|
||||
/// Split a fully-qualified type name like "v2::Type" into its namespace and logical name.
|
||||
fn split_namespace(name: &str) -> (Option<&str>, &str) {
|
||||
name.split_once("::")
|
||||
.map_or((None, name), |(ns, rest)| (Some(ns), rest))
|
||||
}
|
||||
|
||||
/// Recursively rewrite $ref values that point at "#/definitions/..." so that
|
||||
/// they point to a namespaced location under the bundle.
|
||||
fn rewrite_refs_to_namespace(value: &mut Value, ns: &str) {
|
||||
match value {
|
||||
Value::Object(obj) => {
|
||||
if let Some(Value::String(r)) = obj.get_mut("$ref")
|
||||
&& let Some(suffix) = r.strip_prefix("#/definitions/")
|
||||
{
|
||||
let prefix = format!("{ns}/");
|
||||
if !suffix.starts_with(&prefix) {
|
||||
*r = format!("#/definitions/{ns}/{suffix}");
|
||||
}
|
||||
}
|
||||
for v in obj.values_mut() {
|
||||
rewrite_refs_to_namespace(v, ns);
|
||||
}
|
||||
}
|
||||
Value::Array(items) => {
|
||||
for v in items.iter_mut() {
|
||||
rewrite_refs_to_namespace(v, ns);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_namespaced_types(schemas: &[GeneratedSchema]) -> HashMap<String, String> {
|
||||
let mut types = HashMap::new();
|
||||
for schema in schemas {
|
||||
if let Some(ns) = schema.namespace() {
|
||||
types
|
||||
.entry(schema.logical_name().to_string())
|
||||
.or_insert_with(|| ns.to_string());
|
||||
if let Some(Value::Object(defs)) = schema.value().get("definitions") {
|
||||
for key in defs.keys() {
|
||||
types.entry(key.clone()).or_insert_with(|| ns.to_string());
|
||||
}
|
||||
}
|
||||
if let Some(Value::Object(defs)) = schema.value().get("$defs") {
|
||||
for key in defs.keys() {
|
||||
types.entry(key.clone()).or_insert_with(|| ns.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
types
|
||||
}
|
||||
|
||||
fn namespace_for_definition<'a>(
|
||||
name: &str,
|
||||
types: &'a HashMap<String, String>,
|
||||
) -> Option<&'a String> {
|
||||
if let Some(ns) = types.get(name) {
|
||||
return Some(ns);
|
||||
}
|
||||
let trimmed = name.trim_end_matches(|c: char| c.is_ascii_digit());
|
||||
if trimmed != name {
|
||||
return types.get(trimmed);
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn variant_definition_name(base: &str, variant: &Value) -> Option<String> {
|
||||
@@ -319,11 +412,147 @@ fn variant_definition_name(base: &str, variant: &Value) -> Option<String> {
|
||||
}
|
||||
|
||||
fn literal_from_property<'a>(props: &'a Map<String, Value>, key: &str) -> Option<&'a str> {
|
||||
props
|
||||
.get(key)
|
||||
.and_then(|value| value.get("enum"))
|
||||
.and_then(Value::as_array)
|
||||
.and_then(|arr| arr.first())
|
||||
props.get(key).and_then(string_literal)
|
||||
}
|
||||
|
||||
fn string_literal(value: &Value) -> Option<&str> {
|
||||
value.get("const").and_then(Value::as_str).or_else(|| {
|
||||
value
|
||||
.get("enum")
|
||||
.and_then(Value::as_array)
|
||||
.and_then(|arr| arr.first())
|
||||
.and_then(Value::as_str)
|
||||
})
|
||||
}
|
||||
|
||||
fn annotate_schema(value: &mut Value, base: Option<&str>) {
|
||||
match value {
|
||||
Value::Object(map) => annotate_object(map, base),
|
||||
Value::Array(items) => {
|
||||
for item in items {
|
||||
annotate_schema(item, base);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn annotate_object(map: &mut Map<String, Value>, base: Option<&str>) {
|
||||
let owner = map.get("title").and_then(Value::as_str).map(str::to_owned);
|
||||
if let Some(owner) = owner.as_deref()
|
||||
&& let Some(Value::Object(props)) = map.get_mut("properties")
|
||||
{
|
||||
set_discriminator_titles(props, owner);
|
||||
}
|
||||
|
||||
if let Some(Value::Array(variants)) = map.get_mut("oneOf") {
|
||||
annotate_variant_list(variants, base);
|
||||
}
|
||||
if let Some(Value::Array(variants)) = map.get_mut("anyOf") {
|
||||
annotate_variant_list(variants, base);
|
||||
}
|
||||
|
||||
if let Some(Value::Object(defs)) = map.get_mut("definitions") {
|
||||
for (name, schema) in defs.iter_mut() {
|
||||
annotate_schema(schema, Some(name.as_str()));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(Value::Object(defs)) = map.get_mut("$defs") {
|
||||
for (name, schema) in defs.iter_mut() {
|
||||
annotate_schema(schema, Some(name.as_str()));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(Value::Object(props)) = map.get_mut("properties") {
|
||||
for value in props.values_mut() {
|
||||
annotate_schema(value, base);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(items) = map.get_mut("items") {
|
||||
annotate_schema(items, base);
|
||||
}
|
||||
|
||||
if let Some(additional) = map.get_mut("additionalProperties") {
|
||||
annotate_schema(additional, base);
|
||||
}
|
||||
|
||||
for (key, child) in map.iter_mut() {
|
||||
match key.as_str() {
|
||||
"oneOf"
|
||||
| "anyOf"
|
||||
| "definitions"
|
||||
| "$defs"
|
||||
| "properties"
|
||||
| "items"
|
||||
| "additionalProperties" => {}
|
||||
_ => annotate_schema(child, base),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn annotate_variant_list(variants: &mut [Value], base: Option<&str>) {
|
||||
let mut seen = HashSet::new();
|
||||
|
||||
for variant in variants.iter() {
|
||||
if let Some(name) = variant_title(variant) {
|
||||
seen.insert(name.to_owned());
|
||||
}
|
||||
}
|
||||
|
||||
for variant in variants.iter_mut() {
|
||||
let mut variant_name = variant_title(variant).map(str::to_owned);
|
||||
|
||||
if variant_name.is_none()
|
||||
&& let Some(base_name) = base
|
||||
&& let Some(name) = variant_definition_name(base_name, variant)
|
||||
{
|
||||
let mut candidate = name.clone();
|
||||
let mut index = 2;
|
||||
while seen.contains(&candidate) {
|
||||
candidate = format!("{name}{index}");
|
||||
index += 1;
|
||||
}
|
||||
if let Some(obj) = variant.as_object_mut() {
|
||||
obj.insert("title".into(), Value::String(candidate.clone()));
|
||||
}
|
||||
seen.insert(candidate.clone());
|
||||
variant_name = Some(candidate);
|
||||
}
|
||||
|
||||
if let Some(name) = variant_name.as_deref()
|
||||
&& let Some(obj) = variant.as_object_mut()
|
||||
&& let Some(Value::Object(props)) = obj.get_mut("properties")
|
||||
{
|
||||
set_discriminator_titles(props, name);
|
||||
}
|
||||
|
||||
annotate_schema(variant, base);
|
||||
}
|
||||
}
|
||||
|
||||
const DISCRIMINATOR_KEYS: &[&str] = &["type", "method", "mode", "status", "role", "reason"];
|
||||
|
||||
fn set_discriminator_titles(props: &mut Map<String, Value>, owner: &str) {
|
||||
for key in DISCRIMINATOR_KEYS {
|
||||
if let Some(prop_schema) = props.get_mut(*key)
|
||||
&& string_literal(prop_schema).is_some()
|
||||
&& let Value::Object(prop_obj) = prop_schema
|
||||
{
|
||||
if prop_obj.contains_key("title") {
|
||||
continue;
|
||||
}
|
||||
let suffix = to_pascal_case(key);
|
||||
prop_obj.insert("title".into(), Value::String(format!("{owner}{suffix}")));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn variant_title(value: &Value) -> Option<&str> {
|
||||
value
|
||||
.as_object()
|
||||
.and_then(|obj| obj.get("title"))
|
||||
.and_then(Value::as_str)
|
||||
}
|
||||
|
||||
@@ -353,6 +582,33 @@ fn ensure_dir(dir: &Path) -> Result<()> {
|
||||
.with_context(|| format!("Failed to create output directory {}", dir.display()))
|
||||
}
|
||||
|
||||
fn rewrite_named_ref_to_namespace(value: &mut Value, ns: &str, name: &str) {
|
||||
let direct = format!("#/definitions/{name}");
|
||||
let prefixed = format!("{direct}/");
|
||||
let replacement = format!("#/definitions/{ns}/{name}");
|
||||
let replacement_prefixed = format!("{replacement}/");
|
||||
match value {
|
||||
Value::Object(obj) => {
|
||||
if let Some(Value::String(reference)) = obj.get_mut("$ref") {
|
||||
if reference == &direct {
|
||||
*reference = replacement;
|
||||
} else if let Some(rest) = reference.strip_prefix(&prefixed) {
|
||||
*reference = format!("{replacement_prefixed}{rest}");
|
||||
}
|
||||
}
|
||||
for child in obj.values_mut() {
|
||||
rewrite_named_ref_to_namespace(child, ns, name);
|
||||
}
|
||||
}
|
||||
Value::Array(items) => {
|
||||
for child in items {
|
||||
rewrite_named_ref_to_namespace(child, ns, name);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn prepend_header_if_missing(path: &Path) -> Result<()> {
|
||||
let mut content = String::new();
|
||||
{
|
||||
@@ -390,6 +646,26 @@ fn ts_files_in(dir: &Path) -> Result<Vec<PathBuf>> {
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
fn ts_files_in_recursive(dir: &Path) -> Result<Vec<PathBuf>> {
|
||||
let mut files = Vec::new();
|
||||
let mut stack = vec![dir.to_path_buf()];
|
||||
while let Some(d) = stack.pop() {
|
||||
for entry in
|
||||
fs::read_dir(&d).with_context(|| format!("Failed to read dir {}", d.display()))?
|
||||
{
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
stack.push(path);
|
||||
} else if path.is_file() && path.extension() == Some(OsStr::new("ts")) {
|
||||
files.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
files.sort();
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
fn generate_index_ts(out_dir: &Path) -> Result<PathBuf> {
|
||||
let mut entries: Vec<String> = Vec::new();
|
||||
let mut stems: Vec<String> = ts_files_in(out_dir)?
|
||||
@@ -406,6 +682,14 @@ fn generate_index_ts(out_dir: &Path) -> Result<PathBuf> {
|
||||
entries.push(format!("export type {{ {name} }} from \"./{name}\";\n"));
|
||||
}
|
||||
|
||||
// If this is the root out_dir and a ./v2 folder exists with TS files,
|
||||
// expose it as a namespace to avoid symbol collisions at the root.
|
||||
let v2_dir = out_dir.join("v2");
|
||||
let has_v2_ts = ts_files_in(&v2_dir).map(|v| !v.is_empty()).unwrap_or(false);
|
||||
if has_v2_ts {
|
||||
entries.push("export * as v2 from \"./v2\";\n".to_string());
|
||||
}
|
||||
|
||||
let mut content =
|
||||
String::with_capacity(HEADER.len() + entries.iter().map(String::len).sum::<usize>());
|
||||
content.push_str(HEADER);
|
||||
@@ -420,3 +704,205 @@ fn generate_index_ts(out_dir: &Path) -> Result<PathBuf> {
|
||||
.with_context(|| format!("Failed to write {}", index_path.display()))?;
|
||||
Ok(index_path)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use anyhow::Result;
|
||||
use std::collections::BTreeSet;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[test]
|
||||
fn generated_ts_has_no_optional_nullable_fields() -> Result<()> {
|
||||
// Assert that there are no types of the form "?: T | null" in the generated TS files.
|
||||
let output_dir = std::env::temp_dir().join(format!("codex_ts_types_{}", Uuid::now_v7()));
|
||||
fs::create_dir(&output_dir)?;
|
||||
|
||||
struct TempDirGuard(PathBuf);
|
||||
|
||||
impl Drop for TempDirGuard {
|
||||
fn drop(&mut self) {
|
||||
let _ = fs::remove_dir_all(&self.0);
|
||||
}
|
||||
}
|
||||
|
||||
let _guard = TempDirGuard(output_dir.clone());
|
||||
|
||||
generate_ts(&output_dir, None)?;
|
||||
|
||||
let mut undefined_offenders = Vec::new();
|
||||
let mut optional_nullable_offenders = BTreeSet::new();
|
||||
let mut stack = vec![output_dir];
|
||||
while let Some(dir) = stack.pop() {
|
||||
for entry in fs::read_dir(&dir)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
stack.push(path);
|
||||
continue;
|
||||
}
|
||||
|
||||
if matches!(path.extension().and_then(|ext| ext.to_str()), Some("ts")) {
|
||||
let contents = fs::read_to_string(&path)?;
|
||||
if contents.contains("| undefined") {
|
||||
undefined_offenders.push(path.clone());
|
||||
}
|
||||
|
||||
const SKIP_PREFIXES: &[&str] = &[
|
||||
"const ",
|
||||
"let ",
|
||||
"var ",
|
||||
"export const ",
|
||||
"export let ",
|
||||
"export var ",
|
||||
];
|
||||
|
||||
let mut search_start = 0;
|
||||
while let Some(idx) = contents[search_start..].find("| null") {
|
||||
let abs_idx = search_start + idx;
|
||||
// Find the property-colon for this field by scanning forward
|
||||
// from the start of the segment and ignoring nested braces,
|
||||
// brackets, and parens. This avoids colons inside nested
|
||||
// type literals like `{ [k in string]?: string }`.
|
||||
|
||||
let line_start_idx =
|
||||
contents[..abs_idx].rfind('\n').map(|i| i + 1).unwrap_or(0);
|
||||
|
||||
let mut segment_start_idx = line_start_idx;
|
||||
if let Some(rel_idx) = contents[line_start_idx..abs_idx].rfind(',') {
|
||||
segment_start_idx = segment_start_idx.max(line_start_idx + rel_idx + 1);
|
||||
}
|
||||
if let Some(rel_idx) = contents[line_start_idx..abs_idx].rfind('{') {
|
||||
segment_start_idx = segment_start_idx.max(line_start_idx + rel_idx + 1);
|
||||
}
|
||||
if let Some(rel_idx) = contents[line_start_idx..abs_idx].rfind('}') {
|
||||
segment_start_idx = segment_start_idx.max(line_start_idx + rel_idx + 1);
|
||||
}
|
||||
|
||||
// Scan forward for the colon that separates the field name from its type.
|
||||
let mut level_brace = 0_i32;
|
||||
let mut level_brack = 0_i32;
|
||||
let mut level_paren = 0_i32;
|
||||
let mut in_single = false;
|
||||
let mut in_double = false;
|
||||
let mut escape = false;
|
||||
let mut prop_colon_idx = None;
|
||||
for (i, ch) in contents[segment_start_idx..abs_idx].char_indices() {
|
||||
let idx_abs = segment_start_idx + i;
|
||||
if escape {
|
||||
escape = false;
|
||||
continue;
|
||||
}
|
||||
match ch {
|
||||
'\\' => {
|
||||
// Only treat as escape when inside a string.
|
||||
if in_single || in_double {
|
||||
escape = true;
|
||||
}
|
||||
}
|
||||
'\'' => {
|
||||
if !in_double {
|
||||
in_single = !in_single;
|
||||
}
|
||||
}
|
||||
'"' => {
|
||||
if !in_single {
|
||||
in_double = !in_double;
|
||||
}
|
||||
}
|
||||
'{' if !in_single && !in_double => level_brace += 1,
|
||||
'}' if !in_single && !in_double => level_brace -= 1,
|
||||
'[' if !in_single && !in_double => level_brack += 1,
|
||||
']' if !in_single && !in_double => level_brack -= 1,
|
||||
'(' if !in_single && !in_double => level_paren += 1,
|
||||
')' if !in_single && !in_double => level_paren -= 1,
|
||||
':' if !in_single
|
||||
&& !in_double
|
||||
&& level_brace == 0
|
||||
&& level_brack == 0
|
||||
&& level_paren == 0 =>
|
||||
{
|
||||
prop_colon_idx = Some(idx_abs);
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
let Some(colon_idx) = prop_colon_idx else {
|
||||
search_start = abs_idx + 5;
|
||||
continue;
|
||||
};
|
||||
|
||||
let mut field_prefix = contents[segment_start_idx..colon_idx].trim();
|
||||
if field_prefix.is_empty() {
|
||||
search_start = abs_idx + 5;
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(comment_idx) = field_prefix.rfind("*/") {
|
||||
field_prefix = field_prefix[comment_idx + 2..].trim_start();
|
||||
}
|
||||
|
||||
if field_prefix.is_empty() {
|
||||
search_start = abs_idx + 5;
|
||||
continue;
|
||||
}
|
||||
|
||||
if SKIP_PREFIXES
|
||||
.iter()
|
||||
.any(|prefix| field_prefix.starts_with(prefix))
|
||||
{
|
||||
search_start = abs_idx + 5;
|
||||
continue;
|
||||
}
|
||||
|
||||
if field_prefix.contains('(') {
|
||||
search_start = abs_idx + 5;
|
||||
continue;
|
||||
}
|
||||
|
||||
// If the last non-whitespace before ':' is '?', then this is an
|
||||
// optional field with a nullable type (i.e., "?: T | null"),
|
||||
// which we explicitly disallow.
|
||||
if field_prefix.chars().rev().find(|c| !c.is_whitespace()) == Some('?') {
|
||||
let line_number =
|
||||
contents[..abs_idx].chars().filter(|c| *c == '\n').count() + 1;
|
||||
let offending_line_end = contents[line_start_idx..]
|
||||
.find('\n')
|
||||
.map(|i| line_start_idx + i)
|
||||
.unwrap_or(contents.len());
|
||||
let offending_snippet =
|
||||
contents[line_start_idx..offending_line_end].trim();
|
||||
|
||||
optional_nullable_offenders.insert(format!(
|
||||
"{}:{}: {offending_snippet}",
|
||||
path.display(),
|
||||
line_number
|
||||
));
|
||||
}
|
||||
|
||||
search_start = abs_idx + 5;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert!(
|
||||
undefined_offenders.is_empty(),
|
||||
"Generated TypeScript still includes unions with `undefined` in {undefined_offenders:?}"
|
||||
);
|
||||
|
||||
// If this assertion fails, it means a field was generated as
|
||||
// "?: T | null" — i.e., both optional (undefined) and nullable (null).
|
||||
// We only want either "?: T" or ": T | null".
|
||||
assert!(
|
||||
optional_nullable_offenders.is_empty(),
|
||||
"Generated TypeScript has optional fields with nullable types (disallowed '?: T | null'), add #[ts(optional)] to fix:\n{optional_nullable_offenders:?}"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,6 +34,7 @@ pub struct JSONRPCRequest {
|
||||
pub id: RequestId,
|
||||
pub method: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub params: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
@@ -42,6 +43,7 @@ pub struct JSONRPCRequest {
|
||||
pub struct JSONRPCNotification {
|
||||
pub method: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub params: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
@@ -63,6 +65,7 @@ pub struct JSONRPCError {
|
||||
pub struct JSONRPCErrorError {
|
||||
pub code: i64,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub data: Option<serde_json::Value>,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
@@ -6,4 +6,6 @@ pub use export::generate_json;
|
||||
pub use export::generate_ts;
|
||||
pub use export::generate_types;
|
||||
pub use jsonrpc_lite::*;
|
||||
pub use protocol::*;
|
||||
pub use protocol::common::*;
|
||||
pub use protocol::v1::*;
|
||||
pub use protocol::v2::*;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
821
codex-rs/app-server-protocol/src/protocol/common.rs
Normal file
821
codex-rs/app-server-protocol/src/protocol/common.rs
Normal file
@@ -0,0 +1,821 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::JSONRPCNotification;
|
||||
use crate::JSONRPCRequest;
|
||||
use crate::RequestId;
|
||||
use crate::export::GeneratedSchema;
|
||||
use crate::export::write_json_schema;
|
||||
use crate::protocol::v1;
|
||||
use crate::protocol::v2;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::FileChange;
|
||||
use codex_protocol::protocol::ReviewDecision;
|
||||
use codex_protocol::protocol::SandboxCommandAssessment;
|
||||
use paste::paste;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use strum_macros::Display;
|
||||
use ts_rs::TS;
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema, TS)]
|
||||
#[ts(type = "string")]
|
||||
pub struct GitSha(pub String);
|
||||
|
||||
impl GitSha {
|
||||
pub fn new(sha: &str) -> Self {
|
||||
Self(sha.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Display, JsonSchema, TS)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum AuthMode {
|
||||
ApiKey,
|
||||
ChatGPT,
|
||||
}
|
||||
|
||||
/// Generates an `enum ClientRequest` where each variant is a request that the
|
||||
/// client can send to the server. Each variant has associated `params` and
|
||||
/// `response` types. Also generates a `export_client_responses()` function to
|
||||
/// export all response types to TypeScript.
|
||||
macro_rules! client_request_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident {
|
||||
params: $(#[$params_meta:meta])* $params:ty,
|
||||
response: $response:ty,
|
||||
}
|
||||
),* $(,)?
|
||||
) => {
|
||||
/// Request from the client to the server.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "method", rename_all = "camelCase")]
|
||||
pub enum ClientRequest {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$variant {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
$(#[$params_meta])*
|
||||
params: $params,
|
||||
},
|
||||
)*
|
||||
}
|
||||
|
||||
pub fn export_client_responses(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::std::result::Result<(), ::ts_rs::ExportError> {
|
||||
$(
|
||||
<$response as ::ts_rs::TS>::export_all_to(out_dir)?;
|
||||
)*
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_client_response_schemas(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
$(
|
||||
schemas.push(write_json_schema::<$response>(out_dir, stringify!($response))?);
|
||||
)*
|
||||
Ok(schemas)
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_client_param_schemas(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
$(
|
||||
schemas.push(write_json_schema::<$params>(out_dir, stringify!($params))?);
|
||||
)*
|
||||
Ok(schemas)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
client_request_definitions! {
|
||||
/// NEW APIs
|
||||
// Thread lifecycle
|
||||
#[serde(rename = "thread/start")]
|
||||
#[ts(rename = "thread/start")]
|
||||
ThreadStart {
|
||||
params: v2::ThreadStartParams,
|
||||
response: v2::ThreadStartResponse,
|
||||
},
|
||||
#[serde(rename = "thread/resume")]
|
||||
#[ts(rename = "thread/resume")]
|
||||
ThreadResume {
|
||||
params: v2::ThreadResumeParams,
|
||||
response: v2::ThreadResumeResponse,
|
||||
},
|
||||
#[serde(rename = "thread/archive")]
|
||||
#[ts(rename = "thread/archive")]
|
||||
ThreadArchive {
|
||||
params: v2::ThreadArchiveParams,
|
||||
response: v2::ThreadArchiveResponse,
|
||||
},
|
||||
#[serde(rename = "thread/list")]
|
||||
#[ts(rename = "thread/list")]
|
||||
ThreadList {
|
||||
params: v2::ThreadListParams,
|
||||
response: v2::ThreadListResponse,
|
||||
},
|
||||
#[serde(rename = "thread/compact")]
|
||||
#[ts(rename = "thread/compact")]
|
||||
ThreadCompact {
|
||||
params: v2::ThreadCompactParams,
|
||||
response: v2::ThreadCompactResponse,
|
||||
},
|
||||
#[serde(rename = "turn/start")]
|
||||
#[ts(rename = "turn/start")]
|
||||
TurnStart {
|
||||
params: v2::TurnStartParams,
|
||||
response: v2::TurnStartResponse,
|
||||
},
|
||||
#[serde(rename = "turn/interrupt")]
|
||||
#[ts(rename = "turn/interrupt")]
|
||||
TurnInterrupt {
|
||||
params: v2::TurnInterruptParams,
|
||||
response: v2::TurnInterruptResponse,
|
||||
},
|
||||
|
||||
#[serde(rename = "model/list")]
|
||||
#[ts(rename = "model/list")]
|
||||
ModelList {
|
||||
params: v2::ModelListParams,
|
||||
response: v2::ModelListResponse,
|
||||
},
|
||||
|
||||
#[serde(rename = "account/login/start")]
|
||||
#[ts(rename = "account/login/start")]
|
||||
LoginAccount {
|
||||
params: v2::LoginAccountParams,
|
||||
response: v2::LoginAccountResponse,
|
||||
},
|
||||
|
||||
#[serde(rename = "account/login/cancel")]
|
||||
#[ts(rename = "account/login/cancel")]
|
||||
CancelLoginAccount {
|
||||
params: v2::CancelLoginAccountParams,
|
||||
response: v2::CancelLoginAccountResponse,
|
||||
},
|
||||
|
||||
#[serde(rename = "account/logout")]
|
||||
#[ts(rename = "account/logout")]
|
||||
LogoutAccount {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v2::LogoutAccountResponse,
|
||||
},
|
||||
|
||||
#[serde(rename = "account/rateLimits/read")]
|
||||
#[ts(rename = "account/rateLimits/read")]
|
||||
GetAccountRateLimits {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v2::GetAccountRateLimitsResponse,
|
||||
},
|
||||
|
||||
#[serde(rename = "feedback/upload")]
|
||||
#[ts(rename = "feedback/upload")]
|
||||
FeedbackUpload {
|
||||
params: v2::FeedbackUploadParams,
|
||||
response: v2::FeedbackUploadResponse,
|
||||
},
|
||||
|
||||
#[serde(rename = "account/read")]
|
||||
#[ts(rename = "account/read")]
|
||||
GetAccount {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v2::GetAccountResponse,
|
||||
},
|
||||
|
||||
/// DEPRECATED APIs below
|
||||
Initialize {
|
||||
params: v1::InitializeParams,
|
||||
response: v1::InitializeResponse,
|
||||
},
|
||||
NewConversation {
|
||||
params: v1::NewConversationParams,
|
||||
response: v1::NewConversationResponse,
|
||||
},
|
||||
GetConversationSummary {
|
||||
params: v1::GetConversationSummaryParams,
|
||||
response: v1::GetConversationSummaryResponse,
|
||||
},
|
||||
/// List recorded Codex conversations (rollouts) with optional pagination and search.
|
||||
ListConversations {
|
||||
params: v1::ListConversationsParams,
|
||||
response: v1::ListConversationsResponse,
|
||||
},
|
||||
/// Resume a recorded Codex conversation from a rollout file.
|
||||
ResumeConversation {
|
||||
params: v1::ResumeConversationParams,
|
||||
response: v1::ResumeConversationResponse,
|
||||
},
|
||||
ArchiveConversation {
|
||||
params: v1::ArchiveConversationParams,
|
||||
response: v1::ArchiveConversationResponse,
|
||||
},
|
||||
SendUserMessage {
|
||||
params: v1::SendUserMessageParams,
|
||||
response: v1::SendUserMessageResponse,
|
||||
},
|
||||
SendUserTurn {
|
||||
params: v1::SendUserTurnParams,
|
||||
response: v1::SendUserTurnResponse,
|
||||
},
|
||||
InterruptConversation {
|
||||
params: v1::InterruptConversationParams,
|
||||
response: v1::InterruptConversationResponse,
|
||||
},
|
||||
AddConversationListener {
|
||||
params: v1::AddConversationListenerParams,
|
||||
response: v1::AddConversationSubscriptionResponse,
|
||||
},
|
||||
RemoveConversationListener {
|
||||
params: v1::RemoveConversationListenerParams,
|
||||
response: v1::RemoveConversationSubscriptionResponse,
|
||||
},
|
||||
GitDiffToRemote {
|
||||
params: v1::GitDiffToRemoteParams,
|
||||
response: v1::GitDiffToRemoteResponse,
|
||||
},
|
||||
LoginApiKey {
|
||||
params: v1::LoginApiKeyParams,
|
||||
response: v1::LoginApiKeyResponse,
|
||||
},
|
||||
LoginChatGpt {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::LoginChatGptResponse,
|
||||
},
|
||||
// DEPRECATED in favor of CancelLoginAccount
|
||||
CancelLoginChatGpt {
|
||||
params: v1::CancelLoginChatGptParams,
|
||||
response: v1::CancelLoginChatGptResponse,
|
||||
},
|
||||
LogoutChatGpt {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::LogoutChatGptResponse,
|
||||
},
|
||||
GetAuthStatus {
|
||||
params: v1::GetAuthStatusParams,
|
||||
response: v1::GetAuthStatusResponse,
|
||||
},
|
||||
GetUserSavedConfig {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::GetUserSavedConfigResponse,
|
||||
},
|
||||
SetDefaultModel {
|
||||
params: v1::SetDefaultModelParams,
|
||||
response: v1::SetDefaultModelResponse,
|
||||
},
|
||||
GetUserAgent {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::GetUserAgentResponse,
|
||||
},
|
||||
UserInfo {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::UserInfoResponse,
|
||||
},
|
||||
FuzzyFileSearch {
|
||||
params: FuzzyFileSearchParams,
|
||||
response: FuzzyFileSearchResponse,
|
||||
},
|
||||
/// Execute a command (argv vector) under the server's sandbox.
|
||||
ExecOneOffCommand {
|
||||
params: v1::ExecOneOffCommandParams,
|
||||
response: v1::ExecOneOffCommandResponse,
|
||||
},
|
||||
}
|
||||
|
||||
/// Generates an `enum ServerRequest` where each variant is a request that the
|
||||
/// server can send to the client along with the corresponding params and
|
||||
/// response types. It also generates helper types used by the app/server
|
||||
/// infrastructure (payload enum, request constructor, and export helpers).
|
||||
macro_rules! server_request_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident
|
||||
),* $(,)?
|
||||
) => {
|
||||
paste! {
|
||||
/// Request initiated from the server and sent to the client.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "method", rename_all = "camelCase")]
|
||||
pub enum ServerRequest {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$variant {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
params: [<$variant Params>],
|
||||
},
|
||||
)*
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, JsonSchema)]
|
||||
pub enum ServerRequestPayload {
|
||||
$( $variant([<$variant Params>]), )*
|
||||
}
|
||||
|
||||
impl ServerRequestPayload {
|
||||
pub fn request_with_id(self, request_id: RequestId) -> ServerRequest {
|
||||
match self {
|
||||
$(Self::$variant(params) => ServerRequest::$variant { request_id, params },)*
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn export_server_responses(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::std::result::Result<(), ::ts_rs::ExportError> {
|
||||
paste! {
|
||||
$(<[<$variant Response>] as ::ts_rs::TS>::export_all_to(out_dir)?;)*
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_server_response_schemas(
|
||||
out_dir: &Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
paste! {
|
||||
$(schemas.push(crate::export::write_json_schema::<[<$variant Response>]>(out_dir, stringify!([<$variant Response>]))?);)*
|
||||
}
|
||||
Ok(schemas)
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_server_param_schemas(
|
||||
out_dir: &Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
paste! {
|
||||
$(schemas.push(crate::export::write_json_schema::<[<$variant Params>]>(out_dir, stringify!([<$variant Params>]))?);)*
|
||||
}
|
||||
Ok(schemas)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Generates `ServerNotification` enum and helpers, including a JSON Schema
|
||||
/// exporter for each notification.
|
||||
macro_rules! server_notification_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident $(=> $wire:literal)? ( $payload:ty )
|
||||
),* $(,)?
|
||||
) => {
|
||||
/// Notification sent from the server to the client.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS, Display)]
|
||||
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
|
||||
#[strum(serialize_all = "camelCase")]
|
||||
pub enum ServerNotification {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$(#[serde(rename = $wire)] #[ts(rename = $wire)] #[strum(serialize = $wire)])?
|
||||
$variant($payload),
|
||||
)*
|
||||
}
|
||||
|
||||
impl ServerNotification {
|
||||
pub fn to_params(self) -> Result<serde_json::Value, serde_json::Error> {
|
||||
match self {
|
||||
$(Self::$variant(params) => serde_json::to_value(params),)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<JSONRPCNotification> for ServerNotification {
|
||||
type Error = serde_json::Error;
|
||||
|
||||
fn try_from(value: JSONRPCNotification) -> Result<Self, Self::Error> {
|
||||
serde_json::from_value(serde_json::to_value(value)?)
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_server_notification_schemas(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
$(schemas.push(crate::export::write_json_schema::<$payload>(out_dir, stringify!($payload))?);)*
|
||||
Ok(schemas)
|
||||
}
|
||||
};
|
||||
}
|
||||
/// Notifications sent from the client to the server.
|
||||
macro_rules! client_notification_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident $( ( $payload:ty ) )?
|
||||
),* $(,)?
|
||||
) => {
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS, Display)]
|
||||
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
|
||||
#[strum(serialize_all = "camelCase")]
|
||||
pub enum ClientNotification {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$variant $( ( $payload ) )?,
|
||||
)*
|
||||
}
|
||||
|
||||
pub fn export_client_notification_schemas(
|
||||
_out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let schemas = Vec::new();
|
||||
$( $(schemas.push(crate::export::write_json_schema::<$payload>(_out_dir, stringify!($payload))?);)? )*
|
||||
Ok(schemas)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl TryFrom<JSONRPCRequest> for ServerRequest {
|
||||
type Error = serde_json::Error;
|
||||
|
||||
fn try_from(value: JSONRPCRequest) -> Result<Self, Self::Error> {
|
||||
serde_json::from_value(serde_json::to_value(value)?)
|
||||
}
|
||||
}
|
||||
|
||||
server_request_definitions! {
|
||||
/// Request to approve a patch.
|
||||
ApplyPatchApproval,
|
||||
/// Request to exec a command.
|
||||
ExecCommandApproval,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ApplyPatchApprovalParams {
|
||||
pub conversation_id: ConversationId,
|
||||
/// Use to correlate this with [codex_core::protocol::PatchApplyBeginEvent]
|
||||
/// and [codex_core::protocol::PatchApplyEndEvent].
|
||||
pub call_id: String,
|
||||
pub file_changes: HashMap<PathBuf, FileChange>,
|
||||
/// Optional explanatory reason (e.g. request for extra write access).
|
||||
pub reason: Option<String>,
|
||||
/// When set, the agent is asking the user to allow writes under this root
|
||||
/// for the remainder of the session (unclear if this is honored today).
|
||||
pub grant_root: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecCommandApprovalParams {
|
||||
pub conversation_id: ConversationId,
|
||||
/// Use to correlate this with [codex_core::protocol::ExecCommandBeginEvent]
|
||||
/// and [codex_core::protocol::ExecCommandEndEvent].
|
||||
pub call_id: String,
|
||||
pub command: Vec<String>,
|
||||
pub cwd: PathBuf,
|
||||
pub reason: Option<String>,
|
||||
pub risk: Option<SandboxCommandAssessment>,
|
||||
pub parsed_cmd: Vec<ParsedCommand>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct ExecCommandApprovalResponse {
|
||||
pub decision: ReviewDecision,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct ApplyPatchApprovalResponse {
|
||||
pub decision: ReviewDecision,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
pub struct FuzzyFileSearchParams {
|
||||
pub query: String,
|
||||
pub roots: Vec<String>,
|
||||
// if provided, will cancel any previous request that used the same value
|
||||
pub cancellation_token: Option<String>,
|
||||
}
|
||||
|
||||
/// Superset of [`codex_file_search::FileMatch`]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct FuzzyFileSearchResult {
|
||||
pub root: String,
|
||||
pub path: String,
|
||||
pub file_name: String,
|
||||
pub score: u32,
|
||||
pub indices: Option<Vec<u32>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct FuzzyFileSearchResponse {
|
||||
pub files: Vec<FuzzyFileSearchResult>,
|
||||
}
|
||||
|
||||
server_notification_definitions! {
|
||||
/// NEW NOTIFICATIONS
|
||||
ThreadStarted => "thread/started" (v2::ThreadStartedNotification),
|
||||
TurnStarted => "turn/started" (v2::TurnStartedNotification),
|
||||
TurnCompleted => "turn/completed" (v2::TurnCompletedNotification),
|
||||
ItemStarted => "item/started" (v2::ItemStartedNotification),
|
||||
ItemCompleted => "item/completed" (v2::ItemCompletedNotification),
|
||||
AgentMessageDelta => "item/agentMessage/delta" (v2::AgentMessageDeltaNotification),
|
||||
CommandExecutionOutputDelta => "item/commandExecution/outputDelta" (v2::CommandExecutionOutputDeltaNotification),
|
||||
McpToolCallProgress => "item/mcpToolCall/progress" (v2::McpToolCallProgressNotification),
|
||||
AccountUpdated => "account/updated" (v2::AccountUpdatedNotification),
|
||||
AccountRateLimitsUpdated => "account/rateLimits/updated" (v2::AccountRateLimitsUpdatedNotification),
|
||||
|
||||
#[serde(rename = "account/login/completed")]
|
||||
#[ts(rename = "account/login/completed")]
|
||||
#[strum(serialize = "account/login/completed")]
|
||||
AccountLoginCompleted(v2::AccountLoginCompletedNotification),
|
||||
|
||||
/// DEPRECATED NOTIFICATIONS below
|
||||
AuthStatusChange(v1::AuthStatusChangeNotification),
|
||||
|
||||
/// Deprecated: use `account/login/completed` instead.
|
||||
LoginChatGptComplete(v1::LoginChatGptCompleteNotification),
|
||||
SessionConfigured(v1::SessionConfiguredNotification),
|
||||
}
|
||||
|
||||
client_notification_definitions! {
|
||||
Initialized,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use anyhow::Result;
|
||||
use codex_protocol::account::PlanType;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
|
||||
#[test]
|
||||
fn serialize_new_conversation() -> Result<()> {
|
||||
let request = ClientRequest::NewConversation {
|
||||
request_id: RequestId::Integer(42),
|
||||
params: v1::NewConversationParams {
|
||||
model: Some("gpt-5-codex".to_string()),
|
||||
model_provider: None,
|
||||
profile: None,
|
||||
cwd: None,
|
||||
approval_policy: Some(AskForApproval::OnRequest),
|
||||
sandbox: None,
|
||||
config: None,
|
||||
base_instructions: None,
|
||||
developer_instructions: None,
|
||||
compact_prompt: None,
|
||||
include_apply_patch_tool: None,
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "newConversation",
|
||||
"id": 42,
|
||||
"params": {
|
||||
"model": "gpt-5-codex",
|
||||
"modelProvider": null,
|
||||
"profile": null,
|
||||
"cwd": null,
|
||||
"approvalPolicy": "on-request",
|
||||
"sandbox": null,
|
||||
"config": null,
|
||||
"baseInstructions": null,
|
||||
"includeApplyPatchTool": null
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn conversation_id_serializes_as_plain_string() -> Result<()> {
|
||||
let id = ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
|
||||
|
||||
assert_eq!(
|
||||
json!("67e55044-10b1-426f-9247-bb680e5fe0c8"),
|
||||
serde_json::to_value(id)?
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn conversation_id_deserializes_from_plain_string() -> Result<()> {
|
||||
let id: ConversationId =
|
||||
serde_json::from_value(json!("67e55044-10b1-426f-9247-bb680e5fe0c8"))?;
|
||||
|
||||
assert_eq!(
|
||||
ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?,
|
||||
id,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_client_notification() -> Result<()> {
|
||||
let notification = ClientNotification::Initialized;
|
||||
// Note there is no "params" field for this notification.
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "initialized",
|
||||
}),
|
||||
serde_json::to_value(¬ification)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_server_request() -> Result<()> {
|
||||
let conversation_id = ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
|
||||
let params = ExecCommandApprovalParams {
|
||||
conversation_id,
|
||||
call_id: "call-42".to_string(),
|
||||
command: vec!["echo".to_string(), "hello".to_string()],
|
||||
cwd: PathBuf::from("/tmp"),
|
||||
reason: Some("because tests".to_string()),
|
||||
risk: None,
|
||||
parsed_cmd: vec![ParsedCommand::Unknown {
|
||||
cmd: "echo hello".to_string(),
|
||||
}],
|
||||
};
|
||||
let request = ServerRequest::ExecCommandApproval {
|
||||
request_id: RequestId::Integer(7),
|
||||
params: params.clone(),
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "execCommandApproval",
|
||||
"id": 7,
|
||||
"params": {
|
||||
"conversationId": "67e55044-10b1-426f-9247-bb680e5fe0c8",
|
||||
"callId": "call-42",
|
||||
"command": ["echo", "hello"],
|
||||
"cwd": "/tmp",
|
||||
"reason": "because tests",
|
||||
"risk": null,
|
||||
"parsedCmd": [
|
||||
{
|
||||
"type": "unknown",
|
||||
"cmd": "echo hello"
|
||||
}
|
||||
]
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
|
||||
let payload = ServerRequestPayload::ExecCommandApproval(params);
|
||||
assert_eq!(payload.request_with_id(RequestId::Integer(7)), request);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_get_account_rate_limits() -> Result<()> {
|
||||
let request = ClientRequest::GetAccountRateLimits {
|
||||
request_id: RequestId::Integer(1),
|
||||
params: None,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/rateLimits/read",
|
||||
"id": 1,
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_account_login_api_key() -> Result<()> {
|
||||
let request = ClientRequest::LoginAccount {
|
||||
request_id: RequestId::Integer(2),
|
||||
params: v2::LoginAccountParams::ApiKey {
|
||||
api_key: "secret".to_string(),
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/login/start",
|
||||
"id": 2,
|
||||
"params": {
|
||||
"type": "apiKey",
|
||||
"apiKey": "secret"
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_account_login_chatgpt() -> Result<()> {
|
||||
let request = ClientRequest::LoginAccount {
|
||||
request_id: RequestId::Integer(3),
|
||||
params: v2::LoginAccountParams::Chatgpt,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/login/start",
|
||||
"id": 3,
|
||||
"params": {
|
||||
"type": "chatgpt"
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_account_logout() -> Result<()> {
|
||||
let request = ClientRequest::LogoutAccount {
|
||||
request_id: RequestId::Integer(4),
|
||||
params: None,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/logout",
|
||||
"id": 4,
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_get_account() -> Result<()> {
|
||||
let request = ClientRequest::GetAccount {
|
||||
request_id: RequestId::Integer(5),
|
||||
params: None,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/read",
|
||||
"id": 5,
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn account_serializes_fields_in_camel_case() -> Result<()> {
|
||||
let api_key = v2::Account::ApiKey {
|
||||
api_key: "secret".to_string(),
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"type": "apiKey",
|
||||
"apiKey": "secret",
|
||||
}),
|
||||
serde_json::to_value(&api_key)?,
|
||||
);
|
||||
|
||||
let chatgpt = v2::Account::Chatgpt {
|
||||
email: Some("user@example.com".to_string()),
|
||||
plan_type: PlanType::Plus,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"type": "chatgpt",
|
||||
"email": "user@example.com",
|
||||
"planType": "plus",
|
||||
}),
|
||||
serde_json::to_value(&chatgpt)?,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_list_models() -> Result<()> {
|
||||
let request = ClientRequest::ModelList {
|
||||
request_id: RequestId::Integer(6),
|
||||
params: v2::ModelListParams::default(),
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "model/list",
|
||||
"id": 6,
|
||||
"params": {
|
||||
"limit": null,
|
||||
"cursor": null
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
6
codex-rs/app-server-protocol/src/protocol/mod.rs
Normal file
6
codex-rs/app-server-protocol/src/protocol/mod.rs
Normal file
@@ -0,0 +1,6 @@
|
||||
// Module declarations for the app-server protocol namespace.
|
||||
// Exposes protocol pieces used by `lib.rs` via `pub use protocol::common::*;`.
|
||||
|
||||
pub mod common;
|
||||
pub mod v1;
|
||||
pub mod v2;
|
||||
405
codex-rs/app-server-protocol/src/protocol/v1.rs
Normal file
405
codex-rs/app-server-protocol/src/protocol/v1.rs
Normal file
@@ -0,0 +1,405 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::config_types::ForcedLoginMethod;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
use codex_protocol::config_types::Verbosity;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use codex_protocol::protocol::TurnAbortReason;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use ts_rs::TS;
|
||||
use uuid::Uuid;
|
||||
|
||||
// Reuse shared types defined in `common.rs`.
|
||||
use crate::protocol::common::AuthMode;
|
||||
use crate::protocol::common::GitSha;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InitializeParams {
|
||||
pub client_info: ClientInfo,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ClientInfo {
|
||||
pub name: String,
|
||||
pub title: Option<String>,
|
||||
pub version: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InitializeResponse {
|
||||
pub user_agent: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct NewConversationParams {
|
||||
pub model: Option<String>,
|
||||
pub model_provider: Option<String>,
|
||||
pub profile: Option<String>,
|
||||
pub cwd: Option<String>,
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
pub sandbox: Option<SandboxMode>,
|
||||
pub config: Option<HashMap<String, serde_json::Value>>,
|
||||
pub base_instructions: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub developer_instructions: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub compact_prompt: Option<String>,
|
||||
pub include_apply_patch_tool: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct NewConversationResponse {
|
||||
pub conversation_id: ConversationId,
|
||||
pub model: String,
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResumeConversationResponse {
|
||||
pub conversation_id: ConversationId,
|
||||
pub model: String,
|
||||
pub initial_messages: Option<Vec<EventMsg>>,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(untagged)]
|
||||
pub enum GetConversationSummaryParams {
|
||||
RolloutPath {
|
||||
#[serde(rename = "rolloutPath")]
|
||||
rollout_path: PathBuf,
|
||||
},
|
||||
ConversationId {
|
||||
#[serde(rename = "conversationId")]
|
||||
conversation_id: ConversationId,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetConversationSummaryResponse {
|
||||
pub summary: ConversationSummary,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ListConversationsParams {
|
||||
pub page_size: Option<usize>,
|
||||
pub cursor: Option<String>,
|
||||
pub model_providers: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ConversationSummary {
|
||||
pub conversation_id: ConversationId,
|
||||
pub path: PathBuf,
|
||||
pub preview: String,
|
||||
pub timestamp: Option<String>,
|
||||
pub model_provider: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ListConversationsResponse {
|
||||
pub items: Vec<ConversationSummary>,
|
||||
pub next_cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResumeConversationParams {
|
||||
pub path: Option<PathBuf>,
|
||||
pub conversation_id: Option<ConversationId>,
|
||||
pub history: Option<Vec<ResponseItem>>,
|
||||
pub overrides: Option<NewConversationParams>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AddConversationSubscriptionResponse {
|
||||
#[schemars(with = "String")]
|
||||
pub subscription_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ArchiveConversationParams {
|
||||
pub conversation_id: ConversationId,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ArchiveConversationResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RemoveConversationSubscriptionResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginApiKeyParams {
|
||||
pub api_key: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginApiKeyResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginChatGptResponse {
|
||||
#[schemars(with = "String")]
|
||||
pub login_id: Uuid,
|
||||
pub auth_url: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GitDiffToRemoteResponse {
|
||||
pub sha: GitSha,
|
||||
pub diff: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CancelLoginChatGptParams {
|
||||
#[schemars(with = "String")]
|
||||
pub login_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GitDiffToRemoteParams {
|
||||
pub cwd: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CancelLoginChatGptResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LogoutChatGptParams {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LogoutChatGptResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetAuthStatusParams {
|
||||
pub include_token: Option<bool>,
|
||||
pub refresh_token: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecOneOffCommandParams {
|
||||
pub command: Vec<String>,
|
||||
pub timeout_ms: Option<u64>,
|
||||
pub cwd: Option<PathBuf>,
|
||||
pub sandbox_policy: Option<SandboxPolicy>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecOneOffCommandResponse {
|
||||
pub exit_code: i32,
|
||||
pub stdout: String,
|
||||
pub stderr: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetAuthStatusResponse {
|
||||
pub auth_method: Option<AuthMode>,
|
||||
pub auth_token: Option<String>,
|
||||
pub requires_openai_auth: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetUserAgentResponse {
|
||||
pub user_agent: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UserInfoResponse {
|
||||
pub alleged_user_email: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetUserSavedConfigResponse {
|
||||
pub config: UserSavedConfig,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SetDefaultModelParams {
|
||||
pub model: Option<String>,
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SetDefaultModelResponse {}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UserSavedConfig {
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
pub sandbox_mode: Option<SandboxMode>,
|
||||
pub sandbox_settings: Option<SandboxSettings>,
|
||||
pub forced_chatgpt_workspace_id: Option<String>,
|
||||
pub forced_login_method: Option<ForcedLoginMethod>,
|
||||
pub model: Option<String>,
|
||||
pub model_reasoning_effort: Option<ReasoningEffort>,
|
||||
pub model_reasoning_summary: Option<ReasoningSummary>,
|
||||
pub model_verbosity: Option<Verbosity>,
|
||||
pub tools: Option<Tools>,
|
||||
pub profile: Option<String>,
|
||||
pub profiles: HashMap<String, Profile>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Profile {
|
||||
pub model: Option<String>,
|
||||
pub model_provider: Option<String>,
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
pub model_reasoning_effort: Option<ReasoningEffort>,
|
||||
pub model_reasoning_summary: Option<ReasoningSummary>,
|
||||
pub model_verbosity: Option<Verbosity>,
|
||||
pub chatgpt_base_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Tools {
|
||||
pub web_search: Option<bool>,
|
||||
pub view_image: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SandboxSettings {
|
||||
#[serde(default)]
|
||||
pub writable_roots: Vec<PathBuf>,
|
||||
pub network_access: Option<bool>,
|
||||
pub exclude_tmpdir_env_var: Option<bool>,
|
||||
pub exclude_slash_tmp: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserMessageParams {
|
||||
pub conversation_id: ConversationId,
|
||||
pub items: Vec<InputItem>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserTurnParams {
|
||||
pub conversation_id: ConversationId,
|
||||
pub items: Vec<InputItem>,
|
||||
pub cwd: PathBuf,
|
||||
pub approval_policy: AskForApproval,
|
||||
pub sandbox_policy: SandboxPolicy,
|
||||
pub model: String,
|
||||
pub effort: Option<ReasoningEffort>,
|
||||
pub summary: ReasoningSummary,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserTurnResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InterruptConversationParams {
|
||||
pub conversation_id: ConversationId,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InterruptConversationResponse {
|
||||
pub abort_reason: TurnAbortReason,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserMessageResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AddConversationListenerParams {
|
||||
pub conversation_id: ConversationId,
|
||||
#[serde(default)]
|
||||
pub experimental_raw_events: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RemoveConversationListenerParams {
|
||||
#[schemars(with = "String")]
|
||||
pub subscription_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[serde(tag = "type", content = "data")]
|
||||
pub enum InputItem {
|
||||
Text { text: String },
|
||||
Image { image_url: String },
|
||||
LocalImage { path: PathBuf },
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
/// Deprecated in favor of AccountLoginCompletedNotification.
|
||||
pub struct LoginChatGptCompleteNotification {
|
||||
#[schemars(with = "String")]
|
||||
pub login_id: Uuid,
|
||||
pub success: bool,
|
||||
pub error: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SessionConfiguredNotification {
|
||||
pub session_id: ConversationId,
|
||||
pub model: String,
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
pub history_log_id: u64,
|
||||
#[ts(type = "number")]
|
||||
pub history_entry_count: usize,
|
||||
pub initial_messages: Option<Vec<EventMsg>>,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
/// Deprecated notification. Use AccountUpdatedNotification instead.
|
||||
pub struct AuthStatusChangeNotification {
|
||||
pub auth_method: Option<AuthMode>,
|
||||
}
|
||||
699
codex-rs/app-server-protocol/src/protocol/v2.rs
Normal file
699
codex-rs/app-server-protocol/src/protocol/v2.rs
Normal file
@@ -0,0 +1,699 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::protocol::common::AuthMode;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::account::PlanType;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::protocol::RateLimitSnapshot as CoreRateLimitSnapshot;
|
||||
use codex_protocol::protocol::RateLimitWindow as CoreRateLimitWindow;
|
||||
use codex_protocol::user_input::UserInput as CoreUserInput;
|
||||
use mcp_types::ContentBlock as McpContentBlock;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use serde_json::Value as JsonValue;
|
||||
use ts_rs::TS;
|
||||
|
||||
// Macro to declare a camelCased API v2 enum mirroring a core enum which
|
||||
// tends to use kebab-case.
|
||||
macro_rules! v2_enum_from_core {
|
||||
(
|
||||
pub enum $Name:ident from $Src:path { $( $Variant:ident ),+ $(,)? }
|
||||
) => {
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum $Name { $( $Variant ),+ }
|
||||
|
||||
impl $Name {
|
||||
pub fn to_core(self) -> $Src {
|
||||
match self { $( $Name::$Variant => <$Src>::$Variant ),+ }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<$Src> for $Name {
|
||||
fn from(value: $Src) -> Self {
|
||||
match value { $( <$Src>::$Variant => $Name::$Variant ),+ }
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
v2_enum_from_core!(
|
||||
pub enum AskForApproval from codex_protocol::protocol::AskForApproval {
|
||||
UnlessTrusted, OnFailure, OnRequest, Never
|
||||
}
|
||||
);
|
||||
|
||||
v2_enum_from_core!(
|
||||
pub enum SandboxMode from codex_protocol::config_types::SandboxMode {
|
||||
ReadOnly, WorkspaceWrite, DangerFullAccess
|
||||
}
|
||||
);
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(tag = "mode", rename_all = "camelCase")]
|
||||
#[ts(tag = "mode")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum SandboxPolicy {
|
||||
DangerFullAccess,
|
||||
ReadOnly,
|
||||
WorkspaceWrite {
|
||||
#[serde(default)]
|
||||
writable_roots: Vec<PathBuf>,
|
||||
#[serde(default)]
|
||||
network_access: bool,
|
||||
#[serde(default)]
|
||||
exclude_tmpdir_env_var: bool,
|
||||
#[serde(default)]
|
||||
exclude_slash_tmp: bool,
|
||||
},
|
||||
}
|
||||
|
||||
impl SandboxPolicy {
|
||||
pub fn to_core(&self) -> codex_protocol::protocol::SandboxPolicy {
|
||||
match self {
|
||||
SandboxPolicy::DangerFullAccess => {
|
||||
codex_protocol::protocol::SandboxPolicy::DangerFullAccess
|
||||
}
|
||||
SandboxPolicy::ReadOnly => codex_protocol::protocol::SandboxPolicy::ReadOnly,
|
||||
SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots,
|
||||
network_access,
|
||||
exclude_tmpdir_env_var,
|
||||
exclude_slash_tmp,
|
||||
} => codex_protocol::protocol::SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots: writable_roots.clone(),
|
||||
network_access: *network_access,
|
||||
exclude_tmpdir_env_var: *exclude_tmpdir_env_var,
|
||||
exclude_slash_tmp: *exclude_slash_tmp,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<codex_protocol::protocol::SandboxPolicy> for SandboxPolicy {
|
||||
fn from(value: codex_protocol::protocol::SandboxPolicy) -> Self {
|
||||
match value {
|
||||
codex_protocol::protocol::SandboxPolicy::DangerFullAccess => {
|
||||
SandboxPolicy::DangerFullAccess
|
||||
}
|
||||
codex_protocol::protocol::SandboxPolicy::ReadOnly => SandboxPolicy::ReadOnly,
|
||||
codex_protocol::protocol::SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots,
|
||||
network_access,
|
||||
exclude_tmpdir_env_var,
|
||||
exclude_slash_tmp,
|
||||
} => SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots,
|
||||
network_access,
|
||||
exclude_tmpdir_env_var,
|
||||
exclude_slash_tmp,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "type", rename_all = "camelCase")]
|
||||
#[ts(tag = "type")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum Account {
|
||||
#[serde(rename = "apiKey", rename_all = "camelCase")]
|
||||
#[ts(rename = "apiKey", rename_all = "camelCase")]
|
||||
ApiKey { api_key: String },
|
||||
|
||||
#[serde(rename = "chatgpt", rename_all = "camelCase")]
|
||||
#[ts(rename = "chatgpt", rename_all = "camelCase")]
|
||||
Chatgpt {
|
||||
email: Option<String>,
|
||||
plan_type: PlanType,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "type")]
|
||||
#[ts(tag = "type")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum LoginAccountParams {
|
||||
#[serde(rename = "apiKey", rename_all = "camelCase")]
|
||||
#[ts(rename = "apiKey", rename_all = "camelCase")]
|
||||
ApiKey {
|
||||
#[serde(rename = "apiKey")]
|
||||
#[ts(rename = "apiKey")]
|
||||
api_key: String,
|
||||
},
|
||||
#[serde(rename = "chatgpt")]
|
||||
#[ts(rename = "chatgpt")]
|
||||
Chatgpt,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "type", rename_all = "camelCase")]
|
||||
#[ts(tag = "type")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum LoginAccountResponse {
|
||||
#[serde(rename = "apiKey", rename_all = "camelCase")]
|
||||
#[ts(rename = "apiKey", rename_all = "camelCase")]
|
||||
ApiKey {},
|
||||
#[serde(rename = "chatgpt", rename_all = "camelCase")]
|
||||
#[ts(rename = "chatgpt", rename_all = "camelCase")]
|
||||
Chatgpt {
|
||||
// Use plain String for identifiers to avoid TS/JSON Schema quirks around uuid-specific types.
|
||||
// Convert to/from UUIDs at the application layer as needed.
|
||||
login_id: String,
|
||||
/// URL the client should open in a browser to initiate the OAuth flow.
|
||||
auth_url: String,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct CancelLoginAccountParams {
|
||||
pub login_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct CancelLoginAccountResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct LogoutAccountResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct GetAccountRateLimitsResponse {
|
||||
pub rate_limits: RateLimitSnapshot,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct GetAccountResponse {
|
||||
pub account: Account,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ModelListParams {
|
||||
/// Opaque pagination cursor returned by a previous call.
|
||||
pub cursor: Option<String>,
|
||||
/// Optional page size; defaults to a reasonable server-side value.
|
||||
pub limit: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct Model {
|
||||
pub id: String,
|
||||
pub model: String,
|
||||
pub display_name: String,
|
||||
pub description: String,
|
||||
pub supported_reasoning_efforts: Vec<ReasoningEffortOption>,
|
||||
pub default_reasoning_effort: ReasoningEffort,
|
||||
// Only one model should be marked as default.
|
||||
pub is_default: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ReasoningEffortOption {
|
||||
pub reasoning_effort: ReasoningEffort,
|
||||
pub description: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ModelListResponse {
|
||||
pub data: Vec<Model>,
|
||||
/// Opaque cursor to pass to the next call to continue after the last item.
|
||||
/// If None, there are no more items to return.
|
||||
pub next_cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct FeedbackUploadParams {
|
||||
pub classification: String,
|
||||
pub reason: Option<String>,
|
||||
pub conversation_id: Option<ConversationId>,
|
||||
pub include_logs: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct FeedbackUploadResponse {
|
||||
pub thread_id: String,
|
||||
}
|
||||
|
||||
// === Threads, Turns, and Items ===
|
||||
// Thread APIs
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadStartParams {
|
||||
pub model: Option<String>,
|
||||
pub model_provider: Option<String>,
|
||||
pub cwd: Option<String>,
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
pub sandbox: Option<SandboxMode>,
|
||||
pub config: Option<HashMap<String, serde_json::Value>>,
|
||||
pub base_instructions: Option<String>,
|
||||
pub developer_instructions: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadStartResponse {
|
||||
pub thread: Thread,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadResumeParams {
|
||||
pub thread_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadResumeResponse {
|
||||
pub thread: Thread,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadArchiveParams {
|
||||
pub thread_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadArchiveResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadListParams {
|
||||
/// Opaque pagination cursor returned by a previous call.
|
||||
pub cursor: Option<String>,
|
||||
/// Optional page size; defaults to a reasonable server-side value.
|
||||
pub limit: Option<u32>,
|
||||
/// Optional provider filter; when set, only sessions recorded under these
|
||||
/// providers are returned. When present but empty, includes all providers.
|
||||
pub model_providers: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadListResponse {
|
||||
pub data: Vec<Thread>,
|
||||
/// Opaque cursor to pass to the next call to continue after the last item.
|
||||
/// if None, there are no more items to return.
|
||||
pub next_cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadCompactParams {
|
||||
pub thread_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadCompactResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct Thread {
|
||||
pub id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct AccountUpdatedNotification {
|
||||
pub auth_mode: Option<AuthMode>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct Turn {
|
||||
pub id: String,
|
||||
pub items: Vec<ThreadItem>,
|
||||
pub status: TurnStatus,
|
||||
pub error: Option<TurnError>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TurnError {
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum TurnStatus {
|
||||
Completed,
|
||||
Interrupted,
|
||||
Failed,
|
||||
InProgress,
|
||||
}
|
||||
|
||||
// Turn APIs
|
||||
#[derive(Serialize, Deserialize, Debug, Default, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TurnStartParams {
|
||||
pub thread_id: String,
|
||||
pub input: Vec<UserInput>,
|
||||
/// Override the working directory for this turn and subsequent turns.
|
||||
pub cwd: Option<PathBuf>,
|
||||
/// Override the approval policy for this turn and subsequent turns.
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
/// Override the sandbox policy for this turn and subsequent turns.
|
||||
pub sandbox_policy: Option<SandboxPolicy>,
|
||||
/// Override the model for this turn and subsequent turns.
|
||||
pub model: Option<String>,
|
||||
/// Override the reasoning effort for this turn and subsequent turns.
|
||||
pub effort: Option<ReasoningEffort>,
|
||||
/// Override the reasoning summary for this turn and subsequent turns.
|
||||
pub summary: Option<ReasoningSummary>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TurnStartResponse {
|
||||
pub turn: Turn,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TurnInterruptParams {
|
||||
pub thread_id: String,
|
||||
pub turn_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TurnInterruptResponse {}
|
||||
|
||||
// User input types
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "type", rename_all = "camelCase")]
|
||||
#[ts(tag = "type")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum UserInput {
|
||||
Text { text: String },
|
||||
Image { url: String },
|
||||
LocalImage { path: PathBuf },
|
||||
}
|
||||
|
||||
impl UserInput {
|
||||
pub fn into_core(self) -> CoreUserInput {
|
||||
match self {
|
||||
UserInput::Text { text } => CoreUserInput::Text { text },
|
||||
UserInput::Image { url } => CoreUserInput::Image { image_url: url },
|
||||
UserInput::LocalImage { path } => CoreUserInput::LocalImage { path },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "type", rename_all = "camelCase")]
|
||||
#[ts(tag = "type")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum ThreadItem {
|
||||
UserMessage {
|
||||
id: String,
|
||||
content: Vec<UserInput>,
|
||||
},
|
||||
AgentMessage {
|
||||
id: String,
|
||||
text: String,
|
||||
},
|
||||
Reasoning {
|
||||
id: String,
|
||||
text: String,
|
||||
},
|
||||
CommandExecution {
|
||||
id: String,
|
||||
command: String,
|
||||
aggregated_output: String,
|
||||
exit_code: Option<i32>,
|
||||
status: CommandExecutionStatus,
|
||||
duration_ms: Option<i64>,
|
||||
},
|
||||
FileChange {
|
||||
id: String,
|
||||
changes: Vec<FileUpdateChange>,
|
||||
status: PatchApplyStatus,
|
||||
},
|
||||
McpToolCall {
|
||||
id: String,
|
||||
server: String,
|
||||
tool: String,
|
||||
status: McpToolCallStatus,
|
||||
arguments: JsonValue,
|
||||
result: Option<McpToolCallResult>,
|
||||
error: Option<McpToolCallError>,
|
||||
},
|
||||
WebSearch {
|
||||
id: String,
|
||||
query: String,
|
||||
},
|
||||
TodoList {
|
||||
id: String,
|
||||
items: Vec<TodoItem>,
|
||||
},
|
||||
ImageView {
|
||||
id: String,
|
||||
path: String,
|
||||
},
|
||||
CodeReview {
|
||||
id: String,
|
||||
review: String,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum CommandExecutionStatus {
|
||||
InProgress,
|
||||
Completed,
|
||||
Failed,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct FileUpdateChange {
|
||||
pub path: String,
|
||||
pub kind: PatchChangeKind,
|
||||
pub diff: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum PatchChangeKind {
|
||||
Add,
|
||||
Delete,
|
||||
Update,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum PatchApplyStatus {
|
||||
Completed,
|
||||
Failed,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum McpToolCallStatus {
|
||||
InProgress,
|
||||
Completed,
|
||||
Failed,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct McpToolCallResult {
|
||||
pub content: Vec<McpContentBlock>,
|
||||
pub structured_content: JsonValue,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct McpToolCallError {
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TodoItem {
|
||||
pub id: String,
|
||||
pub text: String,
|
||||
pub completed: bool,
|
||||
}
|
||||
|
||||
// === Server Notifications ===
|
||||
// Thread/Turn lifecycle notifications and item progress events
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadStartedNotification {
|
||||
pub thread: Thread,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TurnStartedNotification {
|
||||
pub turn: Turn,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct Usage {
|
||||
pub input_tokens: i32,
|
||||
pub cached_input_tokens: i32,
|
||||
pub output_tokens: i32,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TurnCompletedNotification {
|
||||
pub turn: Turn,
|
||||
// TODO: should usage be stored on the Turn object, and we return that instead?
|
||||
pub usage: Usage,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ItemStartedNotification {
|
||||
pub item: ThreadItem,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ItemCompletedNotification {
|
||||
pub item: ThreadItem,
|
||||
}
|
||||
|
||||
// Item-specific progress notifications
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct AgentMessageDeltaNotification {
|
||||
pub item_id: String,
|
||||
pub delta: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct CommandExecutionOutputDeltaNotification {
|
||||
pub item_id: String,
|
||||
pub delta: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct McpToolCallProgressNotification {
|
||||
pub item_id: String,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct AccountRateLimitsUpdatedNotification {
|
||||
pub rate_limits: RateLimitSnapshot,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct RateLimitSnapshot {
|
||||
pub primary: Option<RateLimitWindow>,
|
||||
pub secondary: Option<RateLimitWindow>,
|
||||
}
|
||||
|
||||
impl From<CoreRateLimitSnapshot> for RateLimitSnapshot {
|
||||
fn from(value: CoreRateLimitSnapshot) -> Self {
|
||||
Self {
|
||||
primary: value.primary.map(RateLimitWindow::from),
|
||||
secondary: value.secondary.map(RateLimitWindow::from),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct RateLimitWindow {
|
||||
pub used_percent: i32,
|
||||
pub window_duration_mins: Option<i64>,
|
||||
pub resets_at: Option<i64>,
|
||||
}
|
||||
|
||||
impl From<CoreRateLimitWindow> for RateLimitWindow {
|
||||
fn from(value: CoreRateLimitWindow) -> Self {
|
||||
Self {
|
||||
used_percent: value.used_percent.round() as i32,
|
||||
window_duration_mins: value.window_minutes,
|
||||
resets_at: value.resets_at,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct AccountLoginCompletedNotification {
|
||||
// Use plain String for identifiers to avoid TS/JSON Schema quirks around uuid-specific types.
|
||||
// Convert to/from UUIDs at the application layer as needed.
|
||||
pub login_id: Option<String>,
|
||||
pub success: bool,
|
||||
pub error: Option<String>,
|
||||
}
|
||||
@@ -24,6 +24,7 @@ codex-file-search = { workspace = true }
|
||||
codex-login = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-feedback = { workspace = true }
|
||||
codex-utils-json-to-toml = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
@@ -47,6 +48,7 @@ base64 = { workspace = true }
|
||||
core_test_support = { workspace = true }
|
||||
os_info = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
serial_test = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -12,16 +12,19 @@ use crate::message_processor::MessageProcessor;
|
||||
use crate::outgoing_message::OutgoingMessage;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use codex_app_server_protocol::JSONRPCMessage;
|
||||
use codex_feedback::CodexFeedback;
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tokio::io::BufReader;
|
||||
use tokio::io::{self};
|
||||
use tokio::sync::mpsc;
|
||||
use tracing::Level;
|
||||
use tracing::debug;
|
||||
use tracing::error;
|
||||
use tracing::info;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
use tracing_subscriber::Layer;
|
||||
use tracing_subscriber::filter::Targets;
|
||||
use tracing_subscriber::layer::SubscriberExt;
|
||||
use tracing_subscriber::util::SubscriberInitExt;
|
||||
|
||||
@@ -82,6 +85,8 @@ pub async fn run_main(
|
||||
std::io::Error::new(ErrorKind::InvalidData, format!("error loading config: {e}"))
|
||||
})?;
|
||||
|
||||
let feedback = CodexFeedback::new();
|
||||
|
||||
let otel =
|
||||
codex_core::otel_init::build_provider(&config, env!("CARGO_PKG_VERSION")).map_err(|e| {
|
||||
std::io::Error::new(
|
||||
@@ -96,8 +101,15 @@ pub async fn run_main(
|
||||
.with_writer(std::io::stderr)
|
||||
.with_filter(EnvFilter::from_default_env());
|
||||
|
||||
let feedback_layer = tracing_subscriber::fmt::layer()
|
||||
.with_writer(feedback.make_writer())
|
||||
.with_ansi(false)
|
||||
.with_target(false)
|
||||
.with_filter(Targets::new().with_default(Level::TRACE));
|
||||
|
||||
let _ = tracing_subscriber::registry()
|
||||
.with(stderr_fmt)
|
||||
.with(feedback_layer)
|
||||
.with(otel.as_ref().map(|provider| {
|
||||
OpenTelemetryTracingBridge::new(&provider.logger).with_filter(
|
||||
tracing_subscriber::filter::filter_fn(codex_core::otel_init::codex_export_filter),
|
||||
@@ -112,6 +124,7 @@ pub async fn run_main(
|
||||
outgoing_message_sender,
|
||||
codex_linux_sandbox_exe,
|
||||
std::sync::Arc::new(config),
|
||||
feedback.clone(),
|
||||
);
|
||||
async move {
|
||||
while let Some(msg) = incoming_rx.recv().await {
|
||||
|
||||
@@ -17,6 +17,7 @@ use codex_core::ConversationManager;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::default_client::USER_AGENT_SUFFIX;
|
||||
use codex_core::default_client::get_codex_user_agent;
|
||||
use codex_feedback::CodexFeedback;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use std::sync::Arc;
|
||||
|
||||
@@ -33,9 +34,14 @@ impl MessageProcessor {
|
||||
outgoing: OutgoingMessageSender,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
config: Arc<Config>,
|
||||
feedback: CodexFeedback,
|
||||
) -> Self {
|
||||
let outgoing = Arc::new(outgoing);
|
||||
let auth_manager = AuthManager::shared(config.codex_home.clone(), false);
|
||||
let auth_manager = AuthManager::shared(
|
||||
config.codex_home.clone(),
|
||||
false,
|
||||
config.cli_auth_credentials_store_mode,
|
||||
);
|
||||
let conversation_manager = Arc::new(ConversationManager::new(
|
||||
auth_manager.clone(),
|
||||
SessionSource::VSCode,
|
||||
@@ -46,6 +52,7 @@ impl MessageProcessor {
|
||||
outgoing.clone(),
|
||||
codex_linux_sandbox_exe,
|
||||
config,
|
||||
feedback,
|
||||
);
|
||||
|
||||
Self {
|
||||
@@ -57,64 +64,79 @@ impl MessageProcessor {
|
||||
|
||||
pub(crate) async fn process_request(&mut self, request: JSONRPCRequest) {
|
||||
let request_id = request.id.clone();
|
||||
if let Ok(request_json) = serde_json::to_value(request)
|
||||
&& let Ok(codex_request) = serde_json::from_value::<ClientRequest>(request_json)
|
||||
{
|
||||
match codex_request {
|
||||
// Handle Initialize internally so CodexMessageProcessor does not have to concern
|
||||
// itself with the `initialized` bool.
|
||||
ClientRequest::Initialize { request_id, params } => {
|
||||
if self.initialized {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Already initialized".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
} else {
|
||||
let ClientInfo {
|
||||
name,
|
||||
title: _title,
|
||||
version,
|
||||
} = params.client_info;
|
||||
let user_agent_suffix = format!("{name}; {version}");
|
||||
if let Ok(mut suffix) = USER_AGENT_SUFFIX.lock() {
|
||||
*suffix = Some(user_agent_suffix);
|
||||
}
|
||||
let request_json = match serde_json::to_value(&request) {
|
||||
Ok(request_json) => request_json,
|
||||
Err(err) => {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!("Invalid request: {err}"),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let user_agent = get_codex_user_agent();
|
||||
let response = InitializeResponse { user_agent };
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
let codex_request = match serde_json::from_value::<ClientRequest>(request_json) {
|
||||
Ok(codex_request) => codex_request,
|
||||
Err(err) => {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!("Invalid request: {err}"),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
self.initialized = true;
|
||||
return;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
if !self.initialized {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Not initialized".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
match codex_request {
|
||||
// Handle Initialize internally so CodexMessageProcessor does not have to concern
|
||||
// itself with the `initialized` bool.
|
||||
ClientRequest::Initialize { request_id, params } => {
|
||||
if self.initialized {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Already initialized".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
} else {
|
||||
let ClientInfo {
|
||||
name,
|
||||
title: _title,
|
||||
version,
|
||||
} = params.client_info;
|
||||
let user_agent_suffix = format!("{name}; {version}");
|
||||
if let Ok(mut suffix) = USER_AGENT_SUFFIX.lock() {
|
||||
*suffix = Some(user_agent_suffix);
|
||||
}
|
||||
|
||||
let user_agent = get_codex_user_agent();
|
||||
let response = InitializeResponse { user_agent };
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
|
||||
self.initialized = true;
|
||||
return;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
if !self.initialized {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Not initialized".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
self.codex_message_processor
|
||||
.process_request(codex_request)
|
||||
.await;
|
||||
} else {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Invalid request".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
}
|
||||
|
||||
self.codex_message_processor
|
||||
.process_request(codex_request)
|
||||
.await;
|
||||
}
|
||||
|
||||
pub(crate) async fn process_notification(&self, notification: JSONRPCNotification) {
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_app_server_protocol::Model;
|
||||
use codex_app_server_protocol::ReasoningEffortOption;
|
||||
use codex_common::model_presets::ModelPreset;
|
||||
use codex_common::model_presets::ReasoningEffortPreset;
|
||||
use codex_common::model_presets::builtin_model_presets;
|
||||
|
||||
pub fn supported_models() -> Vec<Model> {
|
||||
builtin_model_presets(None)
|
||||
pub fn supported_models(auth_mode: Option<AuthMode>) -> Vec<Model> {
|
||||
builtin_model_presets(auth_mode)
|
||||
.into_iter()
|
||||
.map(model_from_preset)
|
||||
.collect()
|
||||
|
||||
@@ -141,9 +141,13 @@ pub(crate) struct OutgoingError {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use codex_app_server_protocol::AccountLoginCompletedNotification;
|
||||
use codex_app_server_protocol::AccountRateLimitsUpdatedNotification;
|
||||
use codex_app_server_protocol::AccountUpdatedNotification;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_app_server_protocol::LoginChatGptCompleteNotification;
|
||||
use codex_protocol::protocol::RateLimitSnapshot;
|
||||
use codex_protocol::protocol::RateLimitWindow;
|
||||
use codex_app_server_protocol::RateLimitSnapshot;
|
||||
use codex_app_server_protocol::RateLimitWindow;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use uuid::Uuid;
|
||||
@@ -166,6 +170,7 @@ mod tests {
|
||||
"params": {
|
||||
"loginId": Uuid::nil(),
|
||||
"success": true,
|
||||
"error": null,
|
||||
},
|
||||
}),
|
||||
serde_json::to_value(jsonrpc_notification)
|
||||
@@ -175,27 +180,77 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn verify_account_rate_limits_notification_serialization() {
|
||||
let notification = ServerNotification::AccountRateLimitsUpdated(RateLimitSnapshot {
|
||||
primary: Some(RateLimitWindow {
|
||||
used_percent: 25.0,
|
||||
window_minutes: Some(15),
|
||||
resets_at: Some(123),
|
||||
fn verify_account_login_completed_notification_serialization() {
|
||||
let notification =
|
||||
ServerNotification::AccountLoginCompleted(AccountLoginCompletedNotification {
|
||||
login_id: Some(Uuid::nil().to_string()),
|
||||
success: true,
|
||||
error: None,
|
||||
});
|
||||
|
||||
let jsonrpc_notification = OutgoingMessage::AppServerNotification(notification);
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/login/completed",
|
||||
"params": {
|
||||
"loginId": Uuid::nil().to_string(),
|
||||
"success": true,
|
||||
"error": null,
|
||||
},
|
||||
}),
|
||||
secondary: None,
|
||||
});
|
||||
serde_json::to_value(jsonrpc_notification)
|
||||
.expect("ensure the notification serializes correctly"),
|
||||
"ensure the notification serializes correctly"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn verify_account_rate_limits_notification_serialization() {
|
||||
let notification =
|
||||
ServerNotification::AccountRateLimitsUpdated(AccountRateLimitsUpdatedNotification {
|
||||
rate_limits: RateLimitSnapshot {
|
||||
primary: Some(RateLimitWindow {
|
||||
used_percent: 25,
|
||||
window_duration_mins: Some(15),
|
||||
resets_at: Some(123),
|
||||
}),
|
||||
secondary: None,
|
||||
},
|
||||
});
|
||||
|
||||
let jsonrpc_notification = OutgoingMessage::AppServerNotification(notification);
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/rateLimits/updated",
|
||||
"params": {
|
||||
"primary": {
|
||||
"used_percent": 25.0,
|
||||
"window_minutes": 15,
|
||||
"resets_at": 123,
|
||||
},
|
||||
"secondary": null,
|
||||
"rateLimits": {
|
||||
"primary": {
|
||||
"usedPercent": 25,
|
||||
"windowDurationMins": 15,
|
||||
"resetsAt": 123
|
||||
},
|
||||
"secondary": null
|
||||
}
|
||||
},
|
||||
}),
|
||||
serde_json::to_value(jsonrpc_notification)
|
||||
.expect("ensure the notification serializes correctly"),
|
||||
"ensure the notification serializes correctly"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn verify_account_updated_notification_serialization() {
|
||||
let notification = ServerNotification::AccountUpdated(AccountUpdatedNotification {
|
||||
auth_mode: Some(AuthMode::ApiKey),
|
||||
});
|
||||
|
||||
let jsonrpc_notification = OutgoingMessage::AppServerNotification(notification);
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/updated",
|
||||
"params": {
|
||||
"authMode": "apikey"
|
||||
},
|
||||
}),
|
||||
serde_json::to_value(jsonrpc_notification)
|
||||
|
||||
@@ -13,6 +13,7 @@ base64 = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-core = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
@@ -21,4 +22,5 @@ tokio = { workspace = true, features = [
|
||||
"process",
|
||||
"rt-multi-thread",
|
||||
] }
|
||||
uuid = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
|
||||
@@ -6,9 +6,9 @@ use base64::Engine;
|
||||
use base64::engine::general_purpose::URL_SAFE_NO_PAD;
|
||||
use chrono::DateTime;
|
||||
use chrono::Utc;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use codex_core::auth::AuthDotJson;
|
||||
use codex_core::auth::get_auth_file;
|
||||
use codex_core::auth::write_auth_json;
|
||||
use codex_core::auth::save_auth;
|
||||
use codex_core::token_data::TokenData;
|
||||
use codex_core::token_data::parse_id_token;
|
||||
use serde_json::json;
|
||||
@@ -109,7 +109,11 @@ pub fn encode_id_token(claims: &ChatGptIdTokenClaims) -> Result<String> {
|
||||
Ok(format!("{header_b64}.{payload_b64}.{signature_b64}"))
|
||||
}
|
||||
|
||||
pub fn write_chatgpt_auth(codex_home: &Path, fixture: ChatGptAuthFixture) -> Result<()> {
|
||||
pub fn write_chatgpt_auth(
|
||||
codex_home: &Path,
|
||||
fixture: ChatGptAuthFixture,
|
||||
cli_auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> Result<()> {
|
||||
let id_token_raw = encode_id_token(&fixture.claims)?;
|
||||
let id_token = parse_id_token(&id_token_raw).context("parse id token")?;
|
||||
let tokens = TokenData {
|
||||
@@ -127,5 +131,5 @@ pub fn write_chatgpt_auth(codex_home: &Path, fixture: ChatGptAuthFixture) -> Res
|
||||
last_refresh,
|
||||
};
|
||||
|
||||
write_auth_json(&get_auth_file(codex_home), &auth).context("write auth.json")
|
||||
save_auth(codex_home, &auth, cli_auth_credentials_store_mode).context("write auth.json")
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ mod auth_fixtures;
|
||||
mod mcp_process;
|
||||
mod mock_model_server;
|
||||
mod responses;
|
||||
mod rollout;
|
||||
|
||||
pub use auth_fixtures::ChatGptAuthFixture;
|
||||
pub use auth_fixtures::ChatGptIdTokenClaims;
|
||||
@@ -10,9 +11,11 @@ pub use auth_fixtures::write_chatgpt_auth;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
pub use mcp_process::McpProcess;
|
||||
pub use mock_model_server::create_mock_chat_completions_server;
|
||||
pub use mock_model_server::create_mock_chat_completions_server_unchecked;
|
||||
pub use responses::create_apply_patch_sse_response;
|
||||
pub use responses::create_final_assistant_message_sse_response;
|
||||
pub use responses::create_shell_sse_response;
|
||||
pub use rollout::create_fake_rollout;
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
pub fn to_response<T: DeserializeOwned>(response: JSONRPCResponse) -> anyhow::Result<T> {
|
||||
|
||||
@@ -14,29 +14,36 @@ use anyhow::Context;
|
||||
use assert_cmd::prelude::*;
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
use codex_app_server_protocol::ArchiveConversationParams;
|
||||
use codex_app_server_protocol::CancelLoginAccountParams;
|
||||
use codex_app_server_protocol::CancelLoginChatGptParams;
|
||||
use codex_app_server_protocol::ClientInfo;
|
||||
use codex_app_server_protocol::ClientNotification;
|
||||
use codex_app_server_protocol::FeedbackUploadParams;
|
||||
use codex_app_server_protocol::GetAuthStatusParams;
|
||||
use codex_app_server_protocol::InitializeParams;
|
||||
use codex_app_server_protocol::InterruptConversationParams;
|
||||
use codex_app_server_protocol::ListConversationsParams;
|
||||
use codex_app_server_protocol::ListModelsParams;
|
||||
use codex_app_server_protocol::LoginApiKeyParams;
|
||||
use codex_app_server_protocol::NewConversationParams;
|
||||
use codex_app_server_protocol::RemoveConversationListenerParams;
|
||||
use codex_app_server_protocol::ResumeConversationParams;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserTurnParams;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::SetDefaultModelParams;
|
||||
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCMessage;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCRequest;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::ListConversationsParams;
|
||||
use codex_app_server_protocol::LoginApiKeyParams;
|
||||
use codex_app_server_protocol::ModelListParams;
|
||||
use codex_app_server_protocol::NewConversationParams;
|
||||
use codex_app_server_protocol::RemoveConversationListenerParams;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ResumeConversationParams;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserTurnParams;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::SetDefaultModelParams;
|
||||
use codex_app_server_protocol::ThreadArchiveParams;
|
||||
use codex_app_server_protocol::ThreadListParams;
|
||||
use codex_app_server_protocol::ThreadResumeParams;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::TurnInterruptParams;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use std::process::Command as StdCommand;
|
||||
use tokio::process::Command;
|
||||
|
||||
@@ -242,6 +249,15 @@ impl McpProcess {
|
||||
self.send_request("account/rateLimits/read", None).await
|
||||
}
|
||||
|
||||
/// Send a `feedback/upload` JSON-RPC request.
|
||||
pub async fn send_feedback_upload_request(
|
||||
&mut self,
|
||||
params: FeedbackUploadParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("feedback/upload", params).await
|
||||
}
|
||||
|
||||
/// Send a `userInfo` JSON-RPC request.
|
||||
pub async fn send_user_info_request(&mut self) -> anyhow::Result<i64> {
|
||||
self.send_request("userInfo", None).await
|
||||
@@ -265,10 +281,46 @@ impl McpProcess {
|
||||
self.send_request("listConversations", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/start` JSON-RPC request.
|
||||
pub async fn send_thread_start_request(
|
||||
&mut self,
|
||||
params: ThreadStartParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/start", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/resume` JSON-RPC request.
|
||||
pub async fn send_thread_resume_request(
|
||||
&mut self,
|
||||
params: ThreadResumeParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/resume", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/archive` JSON-RPC request.
|
||||
pub async fn send_thread_archive_request(
|
||||
&mut self,
|
||||
params: ThreadArchiveParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/archive", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/list` JSON-RPC request.
|
||||
pub async fn send_thread_list_request(
|
||||
&mut self,
|
||||
params: ThreadListParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/list", params).await
|
||||
}
|
||||
|
||||
/// Send a `model/list` JSON-RPC request.
|
||||
pub async fn send_list_models_request(
|
||||
&mut self,
|
||||
params: ListModelsParams,
|
||||
params: ModelListParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("model/list", params).await
|
||||
@@ -297,6 +349,24 @@ impl McpProcess {
|
||||
self.send_request("loginChatGpt", None).await
|
||||
}
|
||||
|
||||
/// Send a `turn/start` JSON-RPC request (v2).
|
||||
pub async fn send_turn_start_request(
|
||||
&mut self,
|
||||
params: TurnStartParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("turn/start", params).await
|
||||
}
|
||||
|
||||
/// Send a `turn/interrupt` JSON-RPC request (v2).
|
||||
pub async fn send_turn_interrupt_request(
|
||||
&mut self,
|
||||
params: TurnInterruptParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("turn/interrupt", params).await
|
||||
}
|
||||
|
||||
/// Send a `cancelLoginChatGpt` JSON-RPC request.
|
||||
pub async fn send_cancel_login_chat_gpt_request(
|
||||
&mut self,
|
||||
@@ -311,6 +381,40 @@ impl McpProcess {
|
||||
self.send_request("logoutChatGpt", None).await
|
||||
}
|
||||
|
||||
/// Send an `account/logout` JSON-RPC request.
|
||||
pub async fn send_logout_account_request(&mut self) -> anyhow::Result<i64> {
|
||||
self.send_request("account/logout", None).await
|
||||
}
|
||||
|
||||
/// Send an `account/login/start` JSON-RPC request for API key login.
|
||||
pub async fn send_login_account_api_key_request(
|
||||
&mut self,
|
||||
api_key: &str,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = serde_json::json!({
|
||||
"type": "apiKey",
|
||||
"apiKey": api_key,
|
||||
});
|
||||
self.send_request("account/login/start", Some(params)).await
|
||||
}
|
||||
|
||||
/// Send an `account/login/start` JSON-RPC request for ChatGPT login.
|
||||
pub async fn send_login_account_chatgpt_request(&mut self) -> anyhow::Result<i64> {
|
||||
let params = serde_json::json!({
|
||||
"type": "chatgpt"
|
||||
});
|
||||
self.send_request("account/login/start", Some(params)).await
|
||||
}
|
||||
|
||||
/// Send an `account/login/cancel` JSON-RPC request.
|
||||
pub async fn send_cancel_login_account_request(
|
||||
&mut self,
|
||||
params: CancelLoginAccountParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("account/login/cancel", params).await
|
||||
}
|
||||
|
||||
/// Send a `fuzzyFileSearch` JSON-RPC request.
|
||||
pub async fn send_fuzzy_file_search_request(
|
||||
&mut self,
|
||||
|
||||
@@ -29,6 +29,25 @@ pub async fn create_mock_chat_completions_server(responses: Vec<String>) -> Mock
|
||||
server
|
||||
}
|
||||
|
||||
/// Same as `create_mock_chat_completions_server` but does not enforce an
|
||||
/// expectation on the number of calls.
|
||||
pub async fn create_mock_chat_completions_server_unchecked(responses: Vec<String>) -> MockServer {
|
||||
let server = MockServer::start().await;
|
||||
|
||||
let seq_responder = SeqResponder {
|
||||
num_calls: AtomicUsize::new(0),
|
||||
responses,
|
||||
};
|
||||
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/chat/completions"))
|
||||
.respond_with(seq_responder)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
server
|
||||
}
|
||||
|
||||
struct SeqResponder {
|
||||
num_calls: AtomicUsize,
|
||||
responses: Vec<String>,
|
||||
|
||||
82
codex-rs/app-server/tests/common/rollout.rs
Normal file
82
codex-rs/app-server/tests/common/rollout.rs
Normal file
@@ -0,0 +1,82 @@
|
||||
use anyhow::Result;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::protocol::SessionMeta;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use serde_json::json;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use uuid::Uuid;
|
||||
|
||||
/// Create a minimal rollout file under `CODEX_HOME/sessions/YYYY/MM/DD/`.
|
||||
///
|
||||
/// - `filename_ts` is the filename timestamp component in `YYYY-MM-DDThh-mm-ss` format.
|
||||
/// - `meta_rfc3339` is the envelope timestamp used in JSON lines.
|
||||
/// - `preview` is the user message preview text.
|
||||
/// - `model_provider` optionally sets the provider in the session meta payload.
|
||||
///
|
||||
/// Returns the generated conversation/session UUID as a string.
|
||||
pub fn create_fake_rollout(
|
||||
codex_home: &Path,
|
||||
filename_ts: &str,
|
||||
meta_rfc3339: &str,
|
||||
preview: &str,
|
||||
model_provider: Option<&str>,
|
||||
) -> Result<String> {
|
||||
let uuid = Uuid::new_v4();
|
||||
let uuid_str = uuid.to_string();
|
||||
let conversation_id = ConversationId::from_string(&uuid_str)?;
|
||||
|
||||
// sessions/YYYY/MM/DD derived from filename_ts (YYYY-MM-DDThh-mm-ss)
|
||||
let year = &filename_ts[0..4];
|
||||
let month = &filename_ts[5..7];
|
||||
let day = &filename_ts[8..10];
|
||||
let dir = codex_home.join("sessions").join(year).join(month).join(day);
|
||||
fs::create_dir_all(&dir)?;
|
||||
|
||||
let file_path = dir.join(format!("rollout-{filename_ts}-{uuid}.jsonl"));
|
||||
|
||||
// Build JSONL lines
|
||||
let payload = serde_json::to_value(SessionMeta {
|
||||
id: conversation_id,
|
||||
timestamp: meta_rfc3339.to_string(),
|
||||
cwd: PathBuf::from("/"),
|
||||
originator: "codex".to_string(),
|
||||
cli_version: "0.0.0".to_string(),
|
||||
instructions: None,
|
||||
source: SessionSource::Cli,
|
||||
model_provider: model_provider.map(str::to_string),
|
||||
})?;
|
||||
|
||||
let lines = [
|
||||
json!({
|
||||
"timestamp": meta_rfc3339,
|
||||
"type": "session_meta",
|
||||
"payload": payload
|
||||
})
|
||||
.to_string(),
|
||||
json!({
|
||||
"timestamp": meta_rfc3339,
|
||||
"type":"response_item",
|
||||
"payload": {
|
||||
"type":"message",
|
||||
"role":"user",
|
||||
"content":[{"type":"input_text","text": preview}]
|
||||
}
|
||||
})
|
||||
.to_string(),
|
||||
json!({
|
||||
"timestamp": meta_rfc3339,
|
||||
"type":"event_msg",
|
||||
"payload": {
|
||||
"type":"user_message",
|
||||
"message": preview,
|
||||
"kind": "plain"
|
||||
}
|
||||
})
|
||||
.to_string(),
|
||||
];
|
||||
|
||||
fs::write(file_path, lines.join("\n") + "\n")?;
|
||||
Ok(uuid_str)
|
||||
}
|
||||
@@ -1,5 +1,4 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::ArchiveConversationParams;
|
||||
@@ -9,45 +8,37 @@ use codex_app_server_protocol::NewConversationParams;
|
||||
use codex_app_server_protocol::NewConversationResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_core::ARCHIVED_SESSIONS_SUBDIR;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(20);
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn archive_conversation_moves_rollout_into_archived_directory() {
|
||||
let codex_home = TempDir::new().expect("create temp dir");
|
||||
create_config_toml(codex_home.path()).expect("write config.toml");
|
||||
async fn archive_conversation_moves_rollout_into_archived_directory() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("initialize timeout")
|
||||
.expect("initialize request");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let new_request_id = mcp
|
||||
.send_new_conversation_request(NewConversationParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await
|
||||
.expect("send newConversation");
|
||||
.await?;
|
||||
let new_response: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(new_request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("newConversation timeout")
|
||||
.expect("newConversation response");
|
||||
.await??;
|
||||
|
||||
let NewConversationResponse {
|
||||
conversation_id,
|
||||
rollout_path,
|
||||
..
|
||||
} = to_response::<NewConversationResponse>(new_response)
|
||||
.expect("deserialize newConversation response");
|
||||
} = to_response::<NewConversationResponse>(new_response)?;
|
||||
|
||||
assert!(
|
||||
rollout_path.exists(),
|
||||
@@ -60,19 +51,15 @@ async fn archive_conversation_moves_rollout_into_archived_directory() {
|
||||
conversation_id,
|
||||
rollout_path: rollout_path.clone(),
|
||||
})
|
||||
.await
|
||||
.expect("send archiveConversation");
|
||||
.await?;
|
||||
let archive_response: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(archive_request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("archiveConversation timeout")
|
||||
.expect("archiveConversation response");
|
||||
.await??;
|
||||
|
||||
let _: ArchiveConversationResponse =
|
||||
to_response::<ArchiveConversationResponse>(archive_response)
|
||||
.expect("deserialize archiveConversation response");
|
||||
to_response::<ArchiveConversationResponse>(archive_response)?;
|
||||
|
||||
let archived_directory = codex_home.path().join(ARCHIVED_SESSIONS_SUBDIR);
|
||||
let archived_rollout_path =
|
||||
@@ -90,6 +77,8 @@ async fn archive_conversation_moves_rollout_into_archived_directory() {
|
||||
"expected archived rollout path {} to exist",
|
||||
archived_rollout_path.display()
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_config_toml(codex_home: &Path) -> std::io::Result<()> {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
@@ -11,6 +10,7 @@ use codex_app_server_protocol::LoginApiKeyParams;
|
||||
use codex_app_server_protocol::LoginApiKeyResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
@@ -71,125 +71,99 @@ forced_login_method = "{forced_method}"
|
||||
std::fs::write(config_toml, contents)
|
||||
}
|
||||
|
||||
async fn login_with_api_key_via_request(mcp: &mut McpProcess, api_key: &str) {
|
||||
async fn login_with_api_key_via_request(mcp: &mut McpProcess, api_key: &str) -> Result<()> {
|
||||
let request_id = mcp
|
||||
.send_login_api_key_request(LoginApiKeyParams {
|
||||
api_key: api_key.to_string(),
|
||||
})
|
||||
.await
|
||||
.unwrap_or_else(|e| panic!("send loginApiKey: {e}"));
|
||||
.await?;
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.unwrap_or_else(|e| panic!("loginApiKey timeout: {e}"))
|
||||
.unwrap_or_else(|e| panic!("loginApiKey response: {e}"));
|
||||
let _: LoginApiKeyResponse =
|
||||
to_response(resp).unwrap_or_else(|e| panic!("deserialize login response: {e}"));
|
||||
.await??;
|
||||
let _: LoginApiKeyResponse = to_response(resp)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn get_auth_status_no_auth() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml(codex_home.path()).unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||
async fn get_auth_status_no_auth() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path())?;
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)])
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_get_auth_status_request(GetAuthStatusParams {
|
||||
include_token: Some(true),
|
||||
refresh_token: Some(false),
|
||||
})
|
||||
.await
|
||||
.expect("send getAuthStatus");
|
||||
.await?;
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("getAuthStatus timeout")
|
||||
.expect("getAuthStatus response");
|
||||
let status: GetAuthStatusResponse = to_response(resp).expect("deserialize status");
|
||||
.await??;
|
||||
let status: GetAuthStatusResponse = to_response(resp)?;
|
||||
assert_eq!(status.auth_method, None, "expected no auth method");
|
||||
assert_eq!(status.auth_token, None, "expected no token");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn get_auth_status_with_api_key() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml(codex_home.path()).unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||
async fn get_auth_status_with_api_key() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
login_with_api_key_via_request(&mut mcp, "sk-test-key").await;
|
||||
login_with_api_key_via_request(&mut mcp, "sk-test-key").await?;
|
||||
|
||||
let request_id = mcp
|
||||
.send_get_auth_status_request(GetAuthStatusParams {
|
||||
include_token: Some(true),
|
||||
refresh_token: Some(false),
|
||||
})
|
||||
.await
|
||||
.expect("send getAuthStatus");
|
||||
.await?;
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("getAuthStatus timeout")
|
||||
.expect("getAuthStatus response");
|
||||
let status: GetAuthStatusResponse = to_response(resp).expect("deserialize status");
|
||||
.await??;
|
||||
let status: GetAuthStatusResponse = to_response(resp)?;
|
||||
assert_eq!(status.auth_method, Some(AuthMode::ApiKey));
|
||||
assert_eq!(status.auth_token, Some("sk-test-key".to_string()));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn get_auth_status_with_api_key_when_auth_not_required() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml_custom_provider(codex_home.path(), false)
|
||||
.unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||
async fn get_auth_status_with_api_key_when_auth_not_required() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml_custom_provider(codex_home.path(), false)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
login_with_api_key_via_request(&mut mcp, "sk-test-key").await;
|
||||
login_with_api_key_via_request(&mut mcp, "sk-test-key").await?;
|
||||
|
||||
let request_id = mcp
|
||||
.send_get_auth_status_request(GetAuthStatusParams {
|
||||
include_token: Some(true),
|
||||
refresh_token: Some(false),
|
||||
})
|
||||
.await
|
||||
.expect("send getAuthStatus");
|
||||
.await?;
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("getAuthStatus timeout")
|
||||
.expect("getAuthStatus response");
|
||||
let status: GetAuthStatusResponse = to_response(resp).expect("deserialize status");
|
||||
.await??;
|
||||
let status: GetAuthStatusResponse = to_response(resp)?;
|
||||
assert_eq!(status.auth_method, None, "expected no auth method");
|
||||
assert_eq!(status.auth_token, None, "expected no token");
|
||||
assert_eq!(
|
||||
@@ -197,76 +171,60 @@ async fn get_auth_status_with_api_key_when_auth_not_required() {
|
||||
Some(false),
|
||||
"requires_openai_auth should be false",
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn get_auth_status_with_api_key_no_include_token() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml(codex_home.path()).unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||
async fn get_auth_status_with_api_key_no_include_token() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
login_with_api_key_via_request(&mut mcp, "sk-test-key").await;
|
||||
login_with_api_key_via_request(&mut mcp, "sk-test-key").await?;
|
||||
|
||||
// Build params via struct so None field is omitted in wire JSON.
|
||||
let params = GetAuthStatusParams {
|
||||
include_token: None,
|
||||
refresh_token: Some(false),
|
||||
};
|
||||
let request_id = mcp
|
||||
.send_get_auth_status_request(params)
|
||||
.await
|
||||
.expect("send getAuthStatus");
|
||||
let request_id = mcp.send_get_auth_status_request(params).await?;
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("getAuthStatus timeout")
|
||||
.expect("getAuthStatus response");
|
||||
let status: GetAuthStatusResponse = to_response(resp).expect("deserialize status");
|
||||
.await??;
|
||||
let status: GetAuthStatusResponse = to_response(resp)?;
|
||||
assert_eq!(status.auth_method, Some(AuthMode::ApiKey));
|
||||
assert!(status.auth_token.is_none(), "token must be omitted");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn login_api_key_rejected_when_forced_chatgpt() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml_forced_login(codex_home.path(), "chatgpt")
|
||||
.unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||
async fn login_api_key_rejected_when_forced_chatgpt() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml_forced_login(codex_home.path(), "chatgpt")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_login_api_key_request(LoginApiKeyParams {
|
||||
api_key: "sk-test-key".to_string(),
|
||||
})
|
||||
.await
|
||||
.expect("send loginApiKey");
|
||||
.await?;
|
||||
|
||||
let err: JSONRPCError = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("loginApiKey error timeout")
|
||||
.expect("loginApiKey error");
|
||||
.await??;
|
||||
|
||||
assert_eq!(
|
||||
err.error.message,
|
||||
"API key login is disabled. Use ChatGPT login instead."
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_final_assistant_message_sse_response;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
@@ -32,26 +31,27 @@ use codex_protocol::protocol::Event;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::env;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 4)]
|
||||
async fn test_codex_jsonrpc_conversation_flow() {
|
||||
async fn test_codex_jsonrpc_conversation_flow() -> Result<()> {
|
||||
if env::var(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
|
||||
println!(
|
||||
"Skipping test because it cannot execute when network is disabled in a Codex sandbox."
|
||||
);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let tmp = TempDir::new().expect("tmp dir");
|
||||
let tmp = TempDir::new()?;
|
||||
// Temporary Codex home with config pointing at the mock server.
|
||||
let codex_home = tmp.path().join("codex_home");
|
||||
std::fs::create_dir(&codex_home).expect("create codex home dir");
|
||||
std::fs::create_dir(&codex_home)?;
|
||||
let working_directory = tmp.path().join("workdir");
|
||||
std::fs::create_dir(&working_directory).expect("create working directory");
|
||||
std::fs::create_dir(&working_directory)?;
|
||||
|
||||
// Create a mock model server that immediately ends each turn.
|
||||
// Two turns are expected: initial session configure + one user message.
|
||||
@@ -61,20 +61,15 @@ async fn test_codex_jsonrpc_conversation_flow() {
|
||||
Some(&working_directory),
|
||||
Some(5000),
|
||||
"call1234",
|
||||
)
|
||||
.expect("create shell sse response"),
|
||||
create_final_assistant_message_sse_response("Enjoy your new git repo!")
|
||||
.expect("create final assistant message"),
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("Enjoy your new git repo!")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri()).expect("write config");
|
||||
create_config_toml(&codex_home, &server.uri())?;
|
||||
|
||||
// Start MCP server and initialize.
|
||||
let mut mcp = McpProcess::new(&codex_home).await.expect("spawn mcp");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init error");
|
||||
let mut mcp = McpProcess::new(&codex_home).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// 1) newConversation
|
||||
let new_conv_id = mcp
|
||||
@@ -82,17 +77,13 @@ async fn test_codex_jsonrpc_conversation_flow() {
|
||||
cwd: Some(working_directory.to_string_lossy().into_owned()),
|
||||
..Default::default()
|
||||
})
|
||||
.await
|
||||
.expect("send newConversation");
|
||||
.await?;
|
||||
let new_conv_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(new_conv_id)),
|
||||
)
|
||||
.await
|
||||
.expect("newConversation timeout")
|
||||
.expect("newConversation resp");
|
||||
let new_conv_resp = to_response::<NewConversationResponse>(new_conv_resp)
|
||||
.expect("deserialize newConversation response");
|
||||
.await??;
|
||||
let new_conv_resp = to_response::<NewConversationResponse>(new_conv_resp)?;
|
||||
let NewConversationResponse {
|
||||
conversation_id,
|
||||
model,
|
||||
@@ -103,19 +94,18 @@ async fn test_codex_jsonrpc_conversation_flow() {
|
||||
|
||||
// 2) addConversationListener
|
||||
let add_listener_id = mcp
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams { conversation_id })
|
||||
.await
|
||||
.expect("send addConversationListener");
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams {
|
||||
conversation_id,
|
||||
experimental_raw_events: false,
|
||||
})
|
||||
.await?;
|
||||
let add_listener_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(add_listener_id)),
|
||||
)
|
||||
.await
|
||||
.expect("addConversationListener timeout")
|
||||
.expect("addConversationListener resp");
|
||||
.await??;
|
||||
let AddConversationSubscriptionResponse { subscription_id } =
|
||||
to_response::<AddConversationSubscriptionResponse>(add_listener_resp)
|
||||
.expect("deserialize addConversationListener response");
|
||||
to_response::<AddConversationSubscriptionResponse>(add_listener_resp)?;
|
||||
|
||||
// 3) sendUserMessage (should trigger notifications; we only validate an OK response)
|
||||
let send_user_id = mcp
|
||||
@@ -125,17 +115,13 @@ async fn test_codex_jsonrpc_conversation_flow() {
|
||||
text: "text".to_string(),
|
||||
}],
|
||||
})
|
||||
.await
|
||||
.expect("send sendUserMessage");
|
||||
.await?;
|
||||
let send_user_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(send_user_id)),
|
||||
)
|
||||
.await
|
||||
.expect("sendUserMessage timeout")
|
||||
.expect("sendUserMessage resp");
|
||||
let SendUserMessageResponse {} = to_response::<SendUserMessageResponse>(send_user_resp)
|
||||
.expect("deserialize sendUserMessage response");
|
||||
.await??;
|
||||
let SendUserMessageResponse {} = to_response::<SendUserMessageResponse>(send_user_resp)?;
|
||||
|
||||
// Verify the task_finished notification is received.
|
||||
// Note this also ensures that the final request to the server was made.
|
||||
@@ -143,9 +129,7 @@ async fn test_codex_jsonrpc_conversation_flow() {
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await
|
||||
.expect("task_finished_notification timeout")
|
||||
.expect("task_finished_notification resp");
|
||||
.await??;
|
||||
let serde_json::Value::Object(map) = task_finished_notification
|
||||
.params
|
||||
.expect("notification should have params")
|
||||
@@ -163,33 +147,31 @@ async fn test_codex_jsonrpc_conversation_flow() {
|
||||
.send_remove_conversation_listener_request(RemoveConversationListenerParams {
|
||||
subscription_id,
|
||||
})
|
||||
.await
|
||||
.expect("send removeConversationListener");
|
||||
.await?;
|
||||
let remove_listener_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(remove_listener_id)),
|
||||
)
|
||||
.await
|
||||
.expect("removeConversationListener timeout")
|
||||
.expect("removeConversationListener resp");
|
||||
let RemoveConversationSubscriptionResponse {} =
|
||||
to_response(remove_listener_resp).expect("deserialize removeConversationListener response");
|
||||
.await??;
|
||||
let RemoveConversationSubscriptionResponse {} = to_response(remove_listener_resp)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 4)]
|
||||
async fn test_send_user_turn_changes_approval_policy_behavior() {
|
||||
async fn test_send_user_turn_changes_approval_policy_behavior() -> Result<()> {
|
||||
if env::var(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
|
||||
println!(
|
||||
"Skipping test because it cannot execute when network is disabled in a Codex sandbox."
|
||||
);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let tmp = TempDir::new().expect("tmp dir");
|
||||
let tmp = TempDir::new()?;
|
||||
let codex_home = tmp.path().join("codex_home");
|
||||
std::fs::create_dir(&codex_home).expect("create codex home dir");
|
||||
std::fs::create_dir(&codex_home)?;
|
||||
let working_directory = tmp.path().join("workdir");
|
||||
std::fs::create_dir(&working_directory).expect("create working directory");
|
||||
std::fs::create_dir(&working_directory)?;
|
||||
|
||||
// Mock server will request a python shell call for the first and second turn, then finish.
|
||||
let responses = vec![
|
||||
@@ -202,10 +184,8 @@ async fn test_send_user_turn_changes_approval_policy_behavior() {
|
||||
Some(&working_directory),
|
||||
Some(5000),
|
||||
"call1",
|
||||
)
|
||||
.expect("create first shell sse response"),
|
||||
create_final_assistant_message_sse_response("done 1")
|
||||
.expect("create final assistant message 1"),
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done 1")?,
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"python3".to_string(),
|
||||
@@ -215,20 +195,15 @@ async fn test_send_user_turn_changes_approval_policy_behavior() {
|
||||
Some(&working_directory),
|
||||
Some(5000),
|
||||
"call2",
|
||||
)
|
||||
.expect("create second shell sse response"),
|
||||
create_final_assistant_message_sse_response("done 2")
|
||||
.expect("create final assistant message 2"),
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done 2")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri()).expect("write config");
|
||||
create_config_toml(&codex_home, &server.uri())?;
|
||||
|
||||
// Start MCP server and initialize.
|
||||
let mut mcp = McpProcess::new(&codex_home).await.expect("spawn mcp");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init error");
|
||||
let mut mcp = McpProcess::new(&codex_home).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// 1) Start conversation with approval_policy=untrusted
|
||||
let new_conv_id = mcp
|
||||
@@ -236,36 +211,30 @@ async fn test_send_user_turn_changes_approval_policy_behavior() {
|
||||
cwd: Some(working_directory.to_string_lossy().into_owned()),
|
||||
..Default::default()
|
||||
})
|
||||
.await
|
||||
.expect("send newConversation");
|
||||
.await?;
|
||||
let new_conv_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(new_conv_id)),
|
||||
)
|
||||
.await
|
||||
.expect("newConversation timeout")
|
||||
.expect("newConversation resp");
|
||||
.await??;
|
||||
let NewConversationResponse {
|
||||
conversation_id, ..
|
||||
} = to_response::<NewConversationResponse>(new_conv_resp)
|
||||
.expect("deserialize newConversation response");
|
||||
} = to_response::<NewConversationResponse>(new_conv_resp)?;
|
||||
|
||||
// 2) addConversationListener
|
||||
let add_listener_id = mcp
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams { conversation_id })
|
||||
.await
|
||||
.expect("send addConversationListener");
|
||||
let _: AddConversationSubscriptionResponse =
|
||||
to_response::<AddConversationSubscriptionResponse>(
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(add_listener_id)),
|
||||
)
|
||||
.await
|
||||
.expect("addConversationListener timeout")
|
||||
.expect("addConversationListener resp"),
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams {
|
||||
conversation_id,
|
||||
experimental_raw_events: false,
|
||||
})
|
||||
.await?;
|
||||
let _: AddConversationSubscriptionResponse = to_response::<AddConversationSubscriptionResponse>(
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(add_listener_id)),
|
||||
)
|
||||
.expect("deserialize addConversationListener response");
|
||||
.await??,
|
||||
)?;
|
||||
|
||||
// 3) sendUserMessage triggers a shell call; approval policy is Untrusted so we should get an elicitation
|
||||
let send_user_id = mcp
|
||||
@@ -275,27 +244,21 @@ async fn test_send_user_turn_changes_approval_policy_behavior() {
|
||||
text: "run python".to_string(),
|
||||
}],
|
||||
})
|
||||
.await
|
||||
.expect("send sendUserMessage");
|
||||
.await?;
|
||||
let _send_user_resp: SendUserMessageResponse = to_response::<SendUserMessageResponse>(
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(send_user_id)),
|
||||
)
|
||||
.await
|
||||
.expect("sendUserMessage timeout")
|
||||
.expect("sendUserMessage resp"),
|
||||
)
|
||||
.expect("deserialize sendUserMessage response");
|
||||
.await??,
|
||||
)?;
|
||||
|
||||
// Expect an ExecCommandApproval request (elicitation)
|
||||
let request = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_request_message(),
|
||||
)
|
||||
.await
|
||||
.expect("waiting for exec approval request timeout")
|
||||
.expect("exec approval request");
|
||||
.await??;
|
||||
let ServerRequest::ExecCommandApproval { request_id, params } = request else {
|
||||
panic!("expected ExecCommandApproval request, got: {request:?}");
|
||||
};
|
||||
@@ -311,6 +274,7 @@ async fn test_send_user_turn_changes_approval_policy_behavior() {
|
||||
],
|
||||
cwd: working_directory.clone(),
|
||||
reason: None,
|
||||
risk: None,
|
||||
parsed_cmd: vec![ParsedCommand::Unknown {
|
||||
cmd: "python3 -c 'print(42)'".to_string()
|
||||
}],
|
||||
@@ -323,17 +287,14 @@ async fn test_send_user_turn_changes_approval_policy_behavior() {
|
||||
request_id,
|
||||
serde_json::json!({ "decision": codex_core::protocol::ReviewDecision::Approved }),
|
||||
)
|
||||
.await
|
||||
.expect("send approval response");
|
||||
.await?;
|
||||
|
||||
// Wait for first TaskComplete
|
||||
let _ = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await
|
||||
.expect("task_complete 1 timeout")
|
||||
.expect("task_complete 1 notification");
|
||||
.await??;
|
||||
|
||||
// 4) sendUserTurn with approval_policy=never should run without elicitation
|
||||
let send_turn_id = mcp
|
||||
@@ -349,19 +310,15 @@ async fn test_send_user_turn_changes_approval_policy_behavior() {
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: ReasoningSummary::Auto,
|
||||
})
|
||||
.await
|
||||
.expect("send sendUserTurn");
|
||||
.await?;
|
||||
// Acknowledge sendUserTurn
|
||||
let _send_turn_resp: SendUserTurnResponse = to_response::<SendUserTurnResponse>(
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(send_turn_id)),
|
||||
)
|
||||
.await
|
||||
.expect("sendUserTurn timeout")
|
||||
.expect("sendUserTurn resp"),
|
||||
)
|
||||
.expect("deserialize sendUserTurn response");
|
||||
.await??,
|
||||
)?;
|
||||
|
||||
// Ensure we do NOT receive an ExecCommandApproval request before the task completes.
|
||||
// If any Request is seen while waiting for task_complete, the helper will error and the test fails.
|
||||
@@ -369,31 +326,31 @@ async fn test_send_user_turn_changes_approval_policy_behavior() {
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await
|
||||
.expect("task_complete 2 timeout")
|
||||
.expect("task_complete 2 notification");
|
||||
.await??;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper: minimal config.toml pointing at mock provider.
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 4)]
|
||||
async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() {
|
||||
async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() -> Result<()> {
|
||||
if env::var(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
|
||||
println!(
|
||||
"Skipping test because it cannot execute when network is disabled in a Codex sandbox."
|
||||
);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let tmp = TempDir::new().expect("tmp dir");
|
||||
let tmp = TempDir::new()?;
|
||||
let codex_home = tmp.path().join("codex_home");
|
||||
std::fs::create_dir(&codex_home).expect("create codex home dir");
|
||||
std::fs::create_dir(&codex_home)?;
|
||||
let workspace_root = tmp.path().join("workspace");
|
||||
std::fs::create_dir(&workspace_root).expect("create workspace root");
|
||||
std::fs::create_dir(&workspace_root)?;
|
||||
let first_cwd = workspace_root.join("turn1");
|
||||
let second_cwd = workspace_root.join("turn2");
|
||||
std::fs::create_dir(&first_cwd).expect("create first cwd");
|
||||
std::fs::create_dir(&second_cwd).expect("create second cwd");
|
||||
std::fs::create_dir(&first_cwd)?;
|
||||
std::fs::create_dir(&second_cwd)?;
|
||||
|
||||
let responses = vec![
|
||||
create_shell_sse_response(
|
||||
@@ -405,10 +362,8 @@ async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() {
|
||||
None,
|
||||
Some(5000),
|
||||
"call-first",
|
||||
)
|
||||
.expect("create first shell response"),
|
||||
create_final_assistant_message_sse_response("done first")
|
||||
.expect("create first final assistant message"),
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done first")?,
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"bash".to_string(),
|
||||
@@ -418,21 +373,14 @@ async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() {
|
||||
None,
|
||||
Some(5000),
|
||||
"call-second",
|
||||
)
|
||||
.expect("create second shell response"),
|
||||
create_final_assistant_message_sse_response("done second")
|
||||
.expect("create second final assistant message"),
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done second")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri()).expect("write config");
|
||||
create_config_toml(&codex_home, &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(&codex_home)
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new(&codex_home).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let new_conv_id = mcp
|
||||
.send_new_conversation_request(NewConversationParams {
|
||||
@@ -441,33 +389,29 @@ async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() {
|
||||
sandbox: Some(SandboxMode::WorkspaceWrite),
|
||||
..Default::default()
|
||||
})
|
||||
.await
|
||||
.expect("send newConversation");
|
||||
.await?;
|
||||
let new_conv_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(new_conv_id)),
|
||||
)
|
||||
.await
|
||||
.expect("newConversation timeout")
|
||||
.expect("newConversation resp");
|
||||
.await??;
|
||||
let NewConversationResponse {
|
||||
conversation_id,
|
||||
model,
|
||||
..
|
||||
} = to_response::<NewConversationResponse>(new_conv_resp)
|
||||
.expect("deserialize newConversation response");
|
||||
} = to_response::<NewConversationResponse>(new_conv_resp)?;
|
||||
|
||||
let add_listener_id = mcp
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams { conversation_id })
|
||||
.await
|
||||
.expect("send addConversationListener");
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams {
|
||||
conversation_id,
|
||||
experimental_raw_events: false,
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(add_listener_id)),
|
||||
)
|
||||
.await
|
||||
.expect("addConversationListener timeout")
|
||||
.expect("addConversationListener resp");
|
||||
.await??;
|
||||
|
||||
let first_turn_id = mcp
|
||||
.send_send_user_turn_request(SendUserTurnParams {
|
||||
@@ -487,22 +431,17 @@ async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() {
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: ReasoningSummary::Auto,
|
||||
})
|
||||
.await
|
||||
.expect("send first sendUserTurn");
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(first_turn_id)),
|
||||
)
|
||||
.await
|
||||
.expect("sendUserTurn 1 timeout")
|
||||
.expect("sendUserTurn 1 resp");
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await
|
||||
.expect("task_complete 1 timeout")
|
||||
.expect("task_complete 1 notification");
|
||||
.await??;
|
||||
|
||||
let second_turn_id = mcp
|
||||
.send_send_user_turn_request(SendUserTurnParams {
|
||||
@@ -517,23 +456,18 @@ async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() {
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: ReasoningSummary::Auto,
|
||||
})
|
||||
.await
|
||||
.expect("send second sendUserTurn");
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(second_turn_id)),
|
||||
)
|
||||
.await
|
||||
.expect("sendUserTurn 2 timeout")
|
||||
.expect("sendUserTurn 2 resp");
|
||||
.await??;
|
||||
|
||||
let exec_begin_notification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/exec_command_begin"),
|
||||
)
|
||||
.await
|
||||
.expect("exec_command_begin timeout")
|
||||
.expect("exec_command_begin notification");
|
||||
.await??;
|
||||
let params = exec_begin_notification
|
||||
.params
|
||||
.clone()
|
||||
@@ -561,9 +495,9 @@ async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() {
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await
|
||||
.expect("task_complete 2 timeout")
|
||||
.expect("task_complete 2 notification");
|
||||
.await??;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_config_toml(codex_home: &Path, server_uri: &str) -> std::io::Result<()> {
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::GetUserSavedConfigResponse;
|
||||
@@ -17,6 +15,8 @@ use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
use codex_protocol::config_types::Verbosity;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
@@ -60,31 +60,21 @@ chatgpt_base_url = "https://api.chatgpt.com"
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 4)]
|
||||
async fn get_config_toml_parses_all_fields() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml(codex_home.path()).expect("write config.toml");
|
||||
async fn get_config_toml_parses_all_fields() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_get_user_saved_config_request()
|
||||
.await
|
||||
.expect("send getUserSavedConfig");
|
||||
let request_id = mcp.send_get_user_saved_config_request().await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("getUserSavedConfig timeout")
|
||||
.expect("getUserSavedConfig response");
|
||||
.await??;
|
||||
|
||||
let config: GetUserSavedConfigResponse = to_response(resp).expect("deserialize config");
|
||||
let config: GetUserSavedConfigResponse = to_response(resp)?;
|
||||
let expected = GetUserSavedConfigResponse {
|
||||
config: UserSavedConfig {
|
||||
approval_policy: Some(AskForApproval::OnRequest),
|
||||
@@ -122,33 +112,24 @@ async fn get_config_toml_parses_all_fields() {
|
||||
};
|
||||
|
||||
assert_eq!(config, expected);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn get_config_toml_empty() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
async fn get_config_toml_empty() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_get_user_saved_config_request()
|
||||
.await
|
||||
.expect("send getUserSavedConfig");
|
||||
let request_id = mcp.send_get_user_saved_config_request().await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("getUserSavedConfig timeout")
|
||||
.expect("getUserSavedConfig response");
|
||||
.await??;
|
||||
|
||||
let config: GetUserSavedConfigResponse = to_response(resp).expect("deserialize config");
|
||||
let config: GetUserSavedConfigResponse = to_response(resp)?;
|
||||
let expected = GetUserSavedConfigResponse {
|
||||
config: UserSavedConfig {
|
||||
approval_policy: None,
|
||||
@@ -167,4 +148,5 @@ async fn get_config_toml_empty() {
|
||||
};
|
||||
|
||||
assert_eq!(config, expected);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_final_assistant_message_sse_response;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
@@ -15,31 +14,25 @@ use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserMessageResponse;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_conversation_create_and_send_message_ok() {
|
||||
async fn test_conversation_create_and_send_message_ok() -> Result<()> {
|
||||
// Mock server – we won't strictly rely on it, but provide one to satisfy any model wiring.
|
||||
let responses = vec![
|
||||
create_final_assistant_message_sse_response("Done").expect("build mock assistant message"),
|
||||
];
|
||||
let responses = vec![create_final_assistant_message_sse_response("Done")?];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
|
||||
// Temporary Codex home with config pointing at the mock server.
|
||||
let codex_home = TempDir::new().expect("create temp dir");
|
||||
create_config_toml(codex_home.path(), &server.uri()).expect("write config.toml");
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
// Start MCP server process and initialize.
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Create a conversation via the new JSON-RPC API.
|
||||
let new_conv_id = mcp
|
||||
@@ -47,40 +40,35 @@ async fn test_conversation_create_and_send_message_ok() {
|
||||
model: Some("o3".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await
|
||||
.expect("send newConversation");
|
||||
.await?;
|
||||
let new_conv_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(new_conv_id)),
|
||||
)
|
||||
.await
|
||||
.expect("newConversation timeout")
|
||||
.expect("newConversation resp");
|
||||
.await??;
|
||||
let NewConversationResponse {
|
||||
conversation_id,
|
||||
model,
|
||||
reasoning_effort: _,
|
||||
rollout_path: _,
|
||||
} = to_response::<NewConversationResponse>(new_conv_resp)
|
||||
.expect("deserialize newConversation response");
|
||||
} = to_response::<NewConversationResponse>(new_conv_resp)?;
|
||||
assert_eq!(model, "o3");
|
||||
|
||||
// Add a listener so we receive notifications for this conversation (not strictly required for this test).
|
||||
let add_listener_id = mcp
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams { conversation_id })
|
||||
.await
|
||||
.expect("send addConversationListener");
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams {
|
||||
conversation_id,
|
||||
experimental_raw_events: false,
|
||||
})
|
||||
.await?;
|
||||
let _sub: AddConversationSubscriptionResponse =
|
||||
to_response::<AddConversationSubscriptionResponse>(
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(add_listener_id)),
|
||||
)
|
||||
.await
|
||||
.expect("addConversationListener timeout")
|
||||
.expect("addConversationListener resp"),
|
||||
)
|
||||
.expect("deserialize addConversationListener response");
|
||||
.await??,
|
||||
)?;
|
||||
|
||||
// Now send a user message via the wire API and expect an OK (empty object) result.
|
||||
let send_id = mcp
|
||||
@@ -90,36 +78,32 @@ async fn test_conversation_create_and_send_message_ok() {
|
||||
text: "Hello".to_string(),
|
||||
}],
|
||||
})
|
||||
.await
|
||||
.expect("send sendUserMessage");
|
||||
.await?;
|
||||
let send_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(send_id)),
|
||||
)
|
||||
.await
|
||||
.expect("sendUserMessage timeout")
|
||||
.expect("sendUserMessage resp");
|
||||
let _ok: SendUserMessageResponse = to_response::<SendUserMessageResponse>(send_resp)
|
||||
.expect("deserialize sendUserMessage response");
|
||||
.await??;
|
||||
let _ok: SendUserMessageResponse = to_response::<SendUserMessageResponse>(send_resp)?;
|
||||
|
||||
// avoid race condition by waiting for the mock server to receive the chat.completions request
|
||||
let deadline = std::time::Instant::now() + DEFAULT_READ_TIMEOUT;
|
||||
loop {
|
||||
let requests = loop {
|
||||
let requests = server.received_requests().await.unwrap_or_default();
|
||||
if !requests.is_empty() {
|
||||
break;
|
||||
break requests;
|
||||
}
|
||||
if std::time::Instant::now() >= deadline {
|
||||
panic!("mock server did not receive the chat.completions request in time");
|
||||
}
|
||||
tokio::time::sleep(std::time::Duration::from_millis(10)).await;
|
||||
}
|
||||
};
|
||||
|
||||
// Verify the outbound request body matches expectations for Chat Completions.
|
||||
let request = &server.received_requests().await.unwrap()[0];
|
||||
let body = request
|
||||
.body_json::<serde_json::Value>()
|
||||
.expect("parse request body as JSON");
|
||||
let request = requests
|
||||
.first()
|
||||
.expect("mock server should have received at least one request");
|
||||
let body = request.body_json::<serde_json::Value>()?;
|
||||
assert_eq!(body["model"], json!("o3"));
|
||||
assert!(body["stream"].as_bool().unwrap_or(false));
|
||||
let messages = body["messages"]
|
||||
@@ -130,6 +114,7 @@ async fn test_conversation_create_and_send_message_ok() {
|
||||
assert_eq!(last["content"], json!("Hello"));
|
||||
|
||||
drop(server);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper to create a config.toml pointing at the mock model server.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use app_test_support::McpProcess;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
@@ -13,48 +13,39 @@ const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_fuzzy_file_search_sorts_and_includes_indices() -> Result<()> {
|
||||
// Prepare a temporary Codex home and a separate root with test files.
|
||||
let codex_home = TempDir::new().context("create temp codex home")?;
|
||||
let root = TempDir::new().context("create temp search root")?;
|
||||
let codex_home = TempDir::new()?;
|
||||
let root = TempDir::new()?;
|
||||
|
||||
// Create files designed to have deterministic ordering for query "abe".
|
||||
std::fs::write(root.path().join("abc"), "x").context("write file abc")?;
|
||||
std::fs::write(root.path().join("abcde"), "x").context("write file abcde")?;
|
||||
std::fs::write(root.path().join("abexy"), "x").context("write file abexy")?;
|
||||
std::fs::write(root.path().join("zzz.txt"), "x").context("write file zzz")?;
|
||||
std::fs::write(root.path().join("abc"), "x")?;
|
||||
std::fs::write(root.path().join("abcde"), "x")?;
|
||||
std::fs::write(root.path().join("abexy"), "x")?;
|
||||
std::fs::write(root.path().join("zzz.txt"), "x")?;
|
||||
let sub_dir = root.path().join("sub");
|
||||
std::fs::create_dir_all(&sub_dir).context("create sub dir")?;
|
||||
std::fs::create_dir_all(&sub_dir)?;
|
||||
let sub_abce_path = sub_dir.join("abce");
|
||||
std::fs::write(&sub_abce_path, "x").context("write file sub/abce")?;
|
||||
std::fs::write(&sub_abce_path, "x")?;
|
||||
let sub_abce_rel = sub_abce_path
|
||||
.strip_prefix(root.path())
|
||||
.context("strip root prefix from sub/abce")?
|
||||
.strip_prefix(root.path())?
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
|
||||
// Start MCP server and initialize.
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.context("spawn mcp")?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.context("init timeout")?
|
||||
.context("init failed")?;
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let root_path = root.path().to_string_lossy().to_string();
|
||||
// Send fuzzyFileSearch request.
|
||||
let request_id = mcp
|
||||
.send_fuzzy_file_search_request("abe", vec![root_path.clone()], None)
|
||||
.await
|
||||
.context("send fuzzyFileSearch")?;
|
||||
.await?;
|
||||
|
||||
// Read response and verify shape and ordering.
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.context("fuzzyFileSearch timeout")?
|
||||
.context("fuzzyFileSearch resp")?;
|
||||
.await??;
|
||||
|
||||
let value = resp.result;
|
||||
// The path separator on Windows affects the score.
|
||||
@@ -94,24 +85,18 @@ async fn test_fuzzy_file_search_sorts_and_includes_indices() -> Result<()> {
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_fuzzy_file_search_accepts_cancellation_token() -> Result<()> {
|
||||
let codex_home = TempDir::new().context("create temp codex home")?;
|
||||
let root = TempDir::new().context("create temp search root")?;
|
||||
let codex_home = TempDir::new()?;
|
||||
let root = TempDir::new()?;
|
||||
|
||||
std::fs::write(root.path().join("alpha.txt"), "contents").context("write alpha")?;
|
||||
std::fs::write(root.path().join("alpha.txt"), "contents")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.context("spawn mcp")?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.context("init timeout")?
|
||||
.context("init failed")?;
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let root_path = root.path().to_string_lossy().to_string();
|
||||
let request_id = mcp
|
||||
.send_fuzzy_file_search_request("alp", vec![root_path.clone()], None)
|
||||
.await
|
||||
.context("send fuzzyFileSearch")?;
|
||||
.await?;
|
||||
|
||||
let request_id_2 = mcp
|
||||
.send_fuzzy_file_search_request(
|
||||
@@ -119,23 +104,20 @@ async fn test_fuzzy_file_search_accepts_cancellation_token() -> Result<()> {
|
||||
vec![root_path.clone()],
|
||||
Some(request_id.to_string()),
|
||||
)
|
||||
.await
|
||||
.context("send fuzzyFileSearch")?;
|
||||
.await?;
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id_2)),
|
||||
)
|
||||
.await
|
||||
.context("fuzzyFileSearch timeout")?
|
||||
.context("fuzzyFileSearch resp")?;
|
||||
.await??;
|
||||
|
||||
let files = resp
|
||||
.result
|
||||
.get("files")
|
||||
.context("files key missing")?
|
||||
.ok_or_else(|| anyhow!("files key missing"))?
|
||||
.as_array()
|
||||
.context("files not array")?
|
||||
.ok_or_else(|| anyhow!("files not array"))?
|
||||
.clone();
|
||||
|
||||
assert_eq!(files.len(), 1);
|
||||
|
||||
@@ -88,7 +88,10 @@ async fn shell_command_interruption() -> anyhow::Result<()> {
|
||||
|
||||
// 2) addConversationListener
|
||||
let add_listener_id = mcp
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams { conversation_id })
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams {
|
||||
conversation_id,
|
||||
experimental_raw_events: false,
|
||||
})
|
||||
.await?;
|
||||
let _add_listener_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
@@ -143,7 +146,7 @@ fn create_config_toml(codex_home: &Path, server_uri: String) -> std::io::Result<
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "danger-full-access"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_fake_rollout;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
@@ -13,67 +12,66 @@ use codex_app_server_protocol::ResumeConversationParams;
|
||||
use codex_app_server_protocol::ResumeConversationResponse;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_app_server_protocol::SessionConfiguredNotification;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
use uuid::Uuid;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_list_and_resume_conversations() {
|
||||
async fn test_list_and_resume_conversations() -> Result<()> {
|
||||
// Prepare a temporary CODEX_HOME with a few fake rollout files.
|
||||
let codex_home = TempDir::new().expect("create temp dir");
|
||||
let codex_home = TempDir::new()?;
|
||||
create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-02T12-00-00",
|
||||
"2025-01-02T12:00:00Z",
|
||||
"Hello A",
|
||||
);
|
||||
Some("openai"),
|
||||
)?;
|
||||
create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-01T13-00-00",
|
||||
"2025-01-01T13:00:00Z",
|
||||
"Hello B",
|
||||
);
|
||||
Some("openai"),
|
||||
)?;
|
||||
create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-01T12-00-00",
|
||||
"2025-01-01T12:00:00Z",
|
||||
"Hello C",
|
||||
);
|
||||
None,
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Request first page with size 2
|
||||
let req_id = mcp
|
||||
.send_list_conversations_request(ListConversationsParams {
|
||||
page_size: Some(2),
|
||||
cursor: None,
|
||||
model_providers: None,
|
||||
})
|
||||
.await
|
||||
.expect("send listConversations");
|
||||
.await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(req_id)),
|
||||
)
|
||||
.await
|
||||
.expect("listConversations timeout")
|
||||
.expect("listConversations resp");
|
||||
.await??;
|
||||
let ListConversationsResponse { items, next_cursor } =
|
||||
to_response::<ListConversationsResponse>(resp).expect("deserialize response");
|
||||
to_response::<ListConversationsResponse>(resp)?;
|
||||
|
||||
assert_eq!(items.len(), 2);
|
||||
// Newest first; preview text should match
|
||||
assert_eq!(items[0].preview, "Hello A");
|
||||
assert_eq!(items[1].preview, "Hello B");
|
||||
assert_eq!(items[0].model_provider, "openai");
|
||||
assert_eq!(items[1].model_provider, "openai");
|
||||
assert!(items[0].path.is_absolute());
|
||||
assert!(next_cursor.is_some());
|
||||
|
||||
@@ -82,129 +80,277 @@ async fn test_list_and_resume_conversations() {
|
||||
.send_list_conversations_request(ListConversationsParams {
|
||||
page_size: Some(2),
|
||||
cursor: next_cursor,
|
||||
model_providers: None,
|
||||
})
|
||||
.await
|
||||
.expect("send listConversations page 2");
|
||||
.await?;
|
||||
let resp2: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(req_id2)),
|
||||
)
|
||||
.await
|
||||
.expect("listConversations page 2 timeout")
|
||||
.expect("listConversations page 2 resp");
|
||||
.await??;
|
||||
let ListConversationsResponse {
|
||||
items: items2,
|
||||
next_cursor: next2,
|
||||
..
|
||||
} = to_response::<ListConversationsResponse>(resp2).expect("deserialize response");
|
||||
} = to_response::<ListConversationsResponse>(resp2)?;
|
||||
assert_eq!(items2.len(), 1);
|
||||
assert_eq!(items2[0].preview, "Hello C");
|
||||
assert!(next2.is_some());
|
||||
assert_eq!(items2[0].model_provider, "openai");
|
||||
assert_eq!(next2, None);
|
||||
|
||||
// Now resume one of the sessions and expect a SessionConfigured notification and response.
|
||||
// Add a conversation with an explicit non-OpenAI provider for filter tests.
|
||||
create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-01T11-30-00",
|
||||
"2025-01-01T11:30:00Z",
|
||||
"Hello TP",
|
||||
Some("test-provider"),
|
||||
)?;
|
||||
|
||||
// Filtering by model provider should return only matching sessions.
|
||||
let filter_req_id = mcp
|
||||
.send_list_conversations_request(ListConversationsParams {
|
||||
page_size: Some(10),
|
||||
cursor: None,
|
||||
model_providers: Some(vec!["test-provider".to_string()]),
|
||||
})
|
||||
.await?;
|
||||
let filter_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(filter_req_id)),
|
||||
)
|
||||
.await??;
|
||||
let ListConversationsResponse {
|
||||
items: filtered_items,
|
||||
next_cursor: filtered_next,
|
||||
} = to_response::<ListConversationsResponse>(filter_resp)?;
|
||||
assert_eq!(filtered_items.len(), 1);
|
||||
assert_eq!(filtered_next, None);
|
||||
assert_eq!(filtered_items[0].preview, "Hello TP");
|
||||
assert_eq!(filtered_items[0].model_provider, "test-provider");
|
||||
|
||||
// Empty filter should include every session regardless of provider metadata.
|
||||
let unfiltered_req_id = mcp
|
||||
.send_list_conversations_request(ListConversationsParams {
|
||||
page_size: Some(10),
|
||||
cursor: None,
|
||||
model_providers: Some(Vec::new()),
|
||||
})
|
||||
.await?;
|
||||
let unfiltered_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(unfiltered_req_id)),
|
||||
)
|
||||
.await??;
|
||||
let ListConversationsResponse {
|
||||
items: unfiltered_items,
|
||||
next_cursor: unfiltered_next,
|
||||
} = to_response::<ListConversationsResponse>(unfiltered_resp)?;
|
||||
assert_eq!(unfiltered_items.len(), 4);
|
||||
assert!(unfiltered_next.is_none());
|
||||
|
||||
let empty_req_id = mcp
|
||||
.send_list_conversations_request(ListConversationsParams {
|
||||
page_size: Some(10),
|
||||
cursor: None,
|
||||
model_providers: Some(vec!["other".to_string()]),
|
||||
})
|
||||
.await?;
|
||||
let empty_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(empty_req_id)),
|
||||
)
|
||||
.await??;
|
||||
let ListConversationsResponse {
|
||||
items: empty_items,
|
||||
next_cursor: empty_next,
|
||||
} = to_response::<ListConversationsResponse>(empty_resp)?;
|
||||
assert!(empty_items.is_empty());
|
||||
assert!(empty_next.is_none());
|
||||
|
||||
let first_item = &items[0];
|
||||
|
||||
// Now resume one of the sessions from an explicit rollout path.
|
||||
let resume_req_id = mcp
|
||||
.send_resume_conversation_request(ResumeConversationParams {
|
||||
path: items[0].path.clone(),
|
||||
path: Some(first_item.path.clone()),
|
||||
conversation_id: None,
|
||||
history: None,
|
||||
overrides: Some(NewConversationParams {
|
||||
model: Some("o3".to_string()),
|
||||
..Default::default()
|
||||
}),
|
||||
})
|
||||
.await
|
||||
.expect("send resumeConversation");
|
||||
.await?;
|
||||
|
||||
// Expect a codex/event notification with msg.type == sessionConfigured
|
||||
let notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("sessionConfigured"),
|
||||
)
|
||||
.await
|
||||
.expect("sessionConfigured notification timeout")
|
||||
.expect("sessionConfigured notification");
|
||||
let session_configured: ServerNotification = notification
|
||||
.try_into()
|
||||
.expect("deserialize sessionConfigured notification");
|
||||
// Basic shape assertion: ensure event type is sessionConfigured
|
||||
.await??;
|
||||
let session_configured: ServerNotification = notification.try_into()?;
|
||||
let ServerNotification::SessionConfigured(SessionConfiguredNotification {
|
||||
model,
|
||||
rollout_path,
|
||||
initial_messages: session_initial_messages,
|
||||
..
|
||||
}) = session_configured
|
||||
else {
|
||||
unreachable!("expected sessionConfigured notification");
|
||||
};
|
||||
assert_eq!(model, "o3");
|
||||
assert_eq!(items[0].path.clone(), rollout_path);
|
||||
assert_eq!(rollout_path, first_item.path.clone());
|
||||
let session_initial_messages = session_initial_messages
|
||||
.expect("expected initial messages when resuming from rollout path");
|
||||
match session_initial_messages.as_slice() {
|
||||
[EventMsg::UserMessage(message)] => {
|
||||
assert_eq!(message.message, first_item.preview.clone());
|
||||
}
|
||||
other => panic!("unexpected initial messages from rollout resume: {other:#?}"),
|
||||
}
|
||||
|
||||
// Then the response for resumeConversation
|
||||
let resume_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(resume_req_id)),
|
||||
)
|
||||
.await
|
||||
.expect("resumeConversation timeout")
|
||||
.expect("resumeConversation resp");
|
||||
.await??;
|
||||
let ResumeConversationResponse {
|
||||
conversation_id, ..
|
||||
} = to_response::<ResumeConversationResponse>(resume_resp)
|
||||
.expect("deserialize resumeConversation response");
|
||||
conversation_id,
|
||||
model: resume_model,
|
||||
initial_messages: response_initial_messages,
|
||||
..
|
||||
} = to_response::<ResumeConversationResponse>(resume_resp)?;
|
||||
// conversation id should be a valid UUID
|
||||
assert!(!conversation_id.to_string().is_empty());
|
||||
}
|
||||
assert_eq!(resume_model, "o3");
|
||||
let response_initial_messages =
|
||||
response_initial_messages.expect("expected initial messages in resume response");
|
||||
match response_initial_messages.as_slice() {
|
||||
[EventMsg::UserMessage(message)] => {
|
||||
assert_eq!(message.message, first_item.preview.clone());
|
||||
}
|
||||
other => panic!("unexpected initial messages in resume response: {other:#?}"),
|
||||
}
|
||||
|
||||
fn create_fake_rollout(codex_home: &Path, filename_ts: &str, meta_rfc3339: &str, preview: &str) {
|
||||
let uuid = Uuid::new_v4();
|
||||
// sessions/YYYY/MM/DD/ derived from filename_ts (YYYY-MM-DDThh-mm-ss)
|
||||
let year = &filename_ts[0..4];
|
||||
let month = &filename_ts[5..7];
|
||||
let day = &filename_ts[8..10];
|
||||
let dir = codex_home.join("sessions").join(year).join(month).join(day);
|
||||
fs::create_dir_all(&dir).unwrap_or_else(|e| panic!("create sessions dir: {e}"));
|
||||
// Resuming with only a conversation id should locate the rollout automatically.
|
||||
let resume_by_id_req_id = mcp
|
||||
.send_resume_conversation_request(ResumeConversationParams {
|
||||
path: None,
|
||||
conversation_id: Some(first_item.conversation_id),
|
||||
history: None,
|
||||
overrides: Some(NewConversationParams {
|
||||
model: Some("o3".to_string()),
|
||||
..Default::default()
|
||||
}),
|
||||
})
|
||||
.await?;
|
||||
let notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("sessionConfigured"),
|
||||
)
|
||||
.await??;
|
||||
let session_configured: ServerNotification = notification.try_into()?;
|
||||
let ServerNotification::SessionConfigured(SessionConfiguredNotification {
|
||||
model,
|
||||
rollout_path,
|
||||
initial_messages: session_initial_messages,
|
||||
..
|
||||
}) = session_configured
|
||||
else {
|
||||
unreachable!("expected sessionConfigured notification");
|
||||
};
|
||||
assert_eq!(model, "o3");
|
||||
assert_eq!(rollout_path, first_item.path.clone());
|
||||
let session_initial_messages = session_initial_messages
|
||||
.expect("expected initial messages when resuming from conversation id");
|
||||
match session_initial_messages.as_slice() {
|
||||
[EventMsg::UserMessage(message)] => {
|
||||
assert_eq!(message.message, first_item.preview.clone());
|
||||
}
|
||||
other => panic!("unexpected initial messages from conversation id resume: {other:#?}"),
|
||||
}
|
||||
let resume_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(resume_by_id_req_id)),
|
||||
)
|
||||
.await??;
|
||||
let ResumeConversationResponse {
|
||||
conversation_id: by_id_conversation_id,
|
||||
model: by_id_model,
|
||||
initial_messages: by_id_initial_messages,
|
||||
..
|
||||
} = to_response::<ResumeConversationResponse>(resume_resp)?;
|
||||
assert!(!by_id_conversation_id.to_string().is_empty());
|
||||
assert_eq!(by_id_model, "o3");
|
||||
let by_id_initial_messages = by_id_initial_messages
|
||||
.expect("expected initial messages when resuming from conversation id response");
|
||||
match by_id_initial_messages.as_slice() {
|
||||
[EventMsg::UserMessage(message)] => {
|
||||
assert_eq!(message.message, first_item.preview.clone());
|
||||
}
|
||||
other => {
|
||||
panic!("unexpected initial messages in conversation id resume response: {other:#?}")
|
||||
}
|
||||
}
|
||||
|
||||
let file_path = dir.join(format!("rollout-{filename_ts}-{uuid}.jsonl"));
|
||||
let mut lines = Vec::new();
|
||||
// Meta line with timestamp (flattened meta in payload for new schema)
|
||||
lines.push(
|
||||
json!({
|
||||
"timestamp": meta_rfc3339,
|
||||
"type": "session_meta",
|
||||
"payload": {
|
||||
"id": uuid,
|
||||
"timestamp": meta_rfc3339,
|
||||
"cwd": "/",
|
||||
"originator": "codex",
|
||||
"cli_version": "0.0.0",
|
||||
"instructions": null
|
||||
}
|
||||
// Resuming with explicit history should succeed even without a stored rollout.
|
||||
let fork_history_text = "Hello from history";
|
||||
let history = vec![ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::InputText {
|
||||
text: fork_history_text.to_string(),
|
||||
}],
|
||||
}];
|
||||
let resume_with_history_req_id = mcp
|
||||
.send_resume_conversation_request(ResumeConversationParams {
|
||||
path: None,
|
||||
conversation_id: None,
|
||||
history: Some(history),
|
||||
overrides: Some(NewConversationParams {
|
||||
model: Some("o3".to_string()),
|
||||
..Default::default()
|
||||
}),
|
||||
})
|
||||
.to_string(),
|
||||
.await?;
|
||||
let notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("sessionConfigured"),
|
||||
)
|
||||
.await??;
|
||||
let session_configured: ServerNotification = notification.try_into()?;
|
||||
let ServerNotification::SessionConfigured(SessionConfiguredNotification {
|
||||
model,
|
||||
initial_messages: session_initial_messages,
|
||||
..
|
||||
}) = session_configured
|
||||
else {
|
||||
unreachable!("expected sessionConfigured notification");
|
||||
};
|
||||
assert_eq!(model, "o3");
|
||||
assert!(
|
||||
session_initial_messages.as_ref().is_none_or(Vec::is_empty),
|
||||
"expected no initial messages when resuming from explicit history but got {session_initial_messages:#?}"
|
||||
);
|
||||
// Minimal user message entry as a persisted response item (with envelope timestamp)
|
||||
lines.push(
|
||||
json!({
|
||||
"timestamp": meta_rfc3339,
|
||||
"type":"response_item",
|
||||
"payload": {
|
||||
"type":"message",
|
||||
"role":"user",
|
||||
"content":[{"type":"input_text","text": preview}]
|
||||
}
|
||||
})
|
||||
.to_string(),
|
||||
let resume_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(resume_with_history_req_id)),
|
||||
)
|
||||
.await??;
|
||||
let ResumeConversationResponse {
|
||||
conversation_id: history_conversation_id,
|
||||
model: history_model,
|
||||
initial_messages: history_initial_messages,
|
||||
..
|
||||
} = to_response::<ResumeConversationResponse>(resume_resp)?;
|
||||
assert!(!history_conversation_id.to_string().is_empty());
|
||||
assert_eq!(history_model, "o3");
|
||||
assert!(
|
||||
history_initial_messages.as_ref().is_none_or(Vec::is_empty),
|
||||
"expected no initial messages in resume response when history is provided but got {history_initial_messages:#?}"
|
||||
);
|
||||
// Add a matching user message event line to satisfy filters
|
||||
lines.push(
|
||||
json!({
|
||||
"timestamp": meta_rfc3339,
|
||||
"type":"event_msg",
|
||||
"payload": {
|
||||
"type":"user_message",
|
||||
"message": preview,
|
||||
"kind": "plain"
|
||||
}
|
||||
})
|
||||
.to_string(),
|
||||
);
|
||||
fs::write(file_path, lines.join("\n") + "\n")
|
||||
.unwrap_or_else(|e| panic!("write rollout file: {e}"));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
use std::path::Path;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::CancelLoginChatGptParams;
|
||||
@@ -12,7 +10,11 @@ use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::LoginChatGptResponse;
|
||||
use codex_app_server_protocol::LogoutChatGptResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use codex_login::login_with_api_key;
|
||||
use serial_test::serial;
|
||||
use std::path::Path;
|
||||
use std::time::Duration;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
@@ -41,32 +43,26 @@ stream_max_retries = 0
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn logout_chatgpt_removes_auth() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml(codex_home.path()).expect("write config.toml");
|
||||
login_with_api_key(codex_home.path(), "sk-test-key").expect("seed api key");
|
||||
async fn logout_chatgpt_removes_auth() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path())?;
|
||||
login_with_api_key(
|
||||
codex_home.path(),
|
||||
"sk-test-key",
|
||||
AuthCredentialsStoreMode::File,
|
||||
)?;
|
||||
assert!(codex_home.path().join("auth.json").exists());
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)])
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let id = mcp
|
||||
.send_logout_chat_gpt_request()
|
||||
.await
|
||||
.expect("send logoutChatGpt");
|
||||
let id = mcp.send_logout_chat_gpt_request().await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(id)),
|
||||
)
|
||||
.await
|
||||
.expect("logoutChatGpt timeout")
|
||||
.expect("logoutChatGpt response");
|
||||
let _ok: LogoutChatGptResponse = to_response(resp).expect("deserialize logout response");
|
||||
.await??;
|
||||
let _ok: LogoutChatGptResponse = to_response(resp)?;
|
||||
|
||||
assert!(
|
||||
!codex_home.path().join("auth.json").exists(),
|
||||
@@ -79,61 +75,47 @@ async fn logout_chatgpt_removes_auth() {
|
||||
include_token: Some(true),
|
||||
refresh_token: Some(false),
|
||||
})
|
||||
.await
|
||||
.expect("send getAuthStatus");
|
||||
.await?;
|
||||
let status_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(status_id)),
|
||||
)
|
||||
.await
|
||||
.expect("getAuthStatus timeout")
|
||||
.expect("getAuthStatus response");
|
||||
let status: GetAuthStatusResponse = to_response(status_resp).expect("deserialize status");
|
||||
.await??;
|
||||
let status: GetAuthStatusResponse = to_response(status_resp)?;
|
||||
assert_eq!(status.auth_method, None);
|
||||
assert_eq!(status.auth_token, None);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn login_and_cancel_chatgpt() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml(codex_home.path()).unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||
// Serialize tests that launch the login server since it binds to a fixed port.
|
||||
#[serial(login_port)]
|
||||
async fn login_and_cancel_chatgpt() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let login_id = mcp
|
||||
.send_login_chat_gpt_request()
|
||||
.await
|
||||
.expect("send loginChatGpt");
|
||||
let login_id = mcp.send_login_chat_gpt_request().await?;
|
||||
let login_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(login_id)),
|
||||
)
|
||||
.await
|
||||
.expect("loginChatGpt timeout")
|
||||
.expect("loginChatGpt response");
|
||||
let login: LoginChatGptResponse = to_response(login_resp).expect("deserialize login resp");
|
||||
.await??;
|
||||
let login: LoginChatGptResponse = to_response(login_resp)?;
|
||||
|
||||
let cancel_id = mcp
|
||||
.send_cancel_login_chat_gpt_request(CancelLoginChatGptParams {
|
||||
login_id: login.login_id,
|
||||
})
|
||||
.await
|
||||
.expect("send cancelLoginChatGpt");
|
||||
.await?;
|
||||
let cancel_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(cancel_id)),
|
||||
)
|
||||
.await
|
||||
.expect("cancelLoginChatGpt timeout")
|
||||
.expect("cancelLoginChatGpt response");
|
||||
let _ok: CancelLoginChatGptResponse =
|
||||
to_response(cancel_resp).expect("deserialize cancel response");
|
||||
.await??;
|
||||
let _ok: CancelLoginChatGptResponse = to_response(cancel_resp)?;
|
||||
|
||||
// Optionally observe the completion notification; do not fail if it races.
|
||||
let maybe_note = timeout(
|
||||
@@ -144,6 +126,7 @@ async fn login_and_cancel_chatgpt() {
|
||||
if maybe_note.is_err() {
|
||||
eprintln!("warning: did not observe login_chat_gpt_complete notification after cancel");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_config_toml_forced_login(codex_home: &Path, forced_method: &str) -> std::io::Result<()> {
|
||||
@@ -176,66 +159,48 @@ forced_chatgpt_workspace_id = "{workspace_id}"
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn login_chatgpt_rejected_when_forced_api() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml_forced_login(codex_home.path(), "api")
|
||||
.unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||
async fn login_chatgpt_rejected_when_forced_api() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml_forced_login(codex_home.path(), "api")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_login_chat_gpt_request()
|
||||
.await
|
||||
.expect("send loginChatGpt");
|
||||
let request_id = mcp.send_login_chat_gpt_request().await?;
|
||||
let err: JSONRPCError = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("loginChatGpt error timeout")
|
||||
.expect("loginChatGpt error");
|
||||
.await??;
|
||||
|
||||
assert_eq!(
|
||||
err.error.message,
|
||||
"ChatGPT login is disabled. Use API key login instead."
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn login_chatgpt_includes_forced_workspace_query_param() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml_forced_workspace(codex_home.path(), "ws-forced")
|
||||
.unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||
// Serialize tests that launch the login server since it binds to a fixed port.
|
||||
#[serial(login_port)]
|
||||
async fn login_chatgpt_includes_forced_workspace_query_param() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml_forced_workspace(codex_home.path(), "ws-forced")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_login_chat_gpt_request()
|
||||
.await
|
||||
.expect("send loginChatGpt");
|
||||
let request_id = mcp.send_login_chat_gpt_request().await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("loginChatGpt timeout")
|
||||
.expect("loginChatGpt response");
|
||||
.await??;
|
||||
|
||||
let login: LoginChatGptResponse = to_response(resp).expect("deserialize login resp");
|
||||
let login: LoginChatGptResponse = to_response(resp)?;
|
||||
assert!(
|
||||
login.auth_url.contains("allowed_workspace_id=ws-forced"),
|
||||
"auth URL should include forced workspace"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -13,3 +13,4 @@ mod send_message;
|
||||
mod set_default_model;
|
||||
mod user_agent;
|
||||
mod user_info;
|
||||
mod v2;
|
||||
|
||||
@@ -6,9 +6,9 @@ use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::ListModelsParams;
|
||||
use codex_app_server_protocol::ListModelsResponse;
|
||||
use codex_app_server_protocol::Model;
|
||||
use codex_app_server_protocol::ModelListParams;
|
||||
use codex_app_server_protocol::ModelListResponse;
|
||||
use codex_app_server_protocol::ReasoningEffortOption;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
@@ -27,8 +27,8 @@ async fn list_models_returns_all_models_with_large_limit() -> Result<()> {
|
||||
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_list_models_request(ListModelsParams {
|
||||
page_size: Some(100),
|
||||
.send_list_models_request(ModelListParams {
|
||||
limit: Some(100),
|
||||
cursor: None,
|
||||
})
|
||||
.await?;
|
||||
@@ -39,14 +39,17 @@ async fn list_models_returns_all_models_with_large_limit() -> Result<()> {
|
||||
)
|
||||
.await??;
|
||||
|
||||
let ListModelsResponse { items, next_cursor } = to_response::<ListModelsResponse>(response)?;
|
||||
let ModelListResponse {
|
||||
data: items,
|
||||
next_cursor,
|
||||
} = to_response::<ModelListResponse>(response)?;
|
||||
|
||||
let expected_models = vec![
|
||||
Model {
|
||||
id: "gpt-5-codex".to_string(),
|
||||
model: "gpt-5-codex".to_string(),
|
||||
display_name: "gpt-5-codex".to_string(),
|
||||
description: "Optimized for coding tasks with many tools.".to_string(),
|
||||
description: "Optimized for codex.".to_string(),
|
||||
supported_reasoning_efforts: vec![
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::Low,
|
||||
@@ -111,8 +114,8 @@ async fn list_models_pagination_works() -> Result<()> {
|
||||
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let first_request = mcp
|
||||
.send_list_models_request(ListModelsParams {
|
||||
page_size: Some(1),
|
||||
.send_list_models_request(ModelListParams {
|
||||
limit: Some(1),
|
||||
cursor: None,
|
||||
})
|
||||
.await?;
|
||||
@@ -123,18 +126,18 @@ async fn list_models_pagination_works() -> Result<()> {
|
||||
)
|
||||
.await??;
|
||||
|
||||
let ListModelsResponse {
|
||||
items: first_items,
|
||||
let ModelListResponse {
|
||||
data: first_items,
|
||||
next_cursor: first_cursor,
|
||||
} = to_response::<ListModelsResponse>(first_response)?;
|
||||
} = to_response::<ModelListResponse>(first_response)?;
|
||||
|
||||
assert_eq!(first_items.len(), 1);
|
||||
assert_eq!(first_items[0].id, "gpt-5-codex");
|
||||
let next_cursor = first_cursor.ok_or_else(|| anyhow!("cursor for second page"))?;
|
||||
|
||||
let second_request = mcp
|
||||
.send_list_models_request(ListModelsParams {
|
||||
page_size: Some(1),
|
||||
.send_list_models_request(ModelListParams {
|
||||
limit: Some(1),
|
||||
cursor: Some(next_cursor.clone()),
|
||||
})
|
||||
.await?;
|
||||
@@ -145,10 +148,10 @@ async fn list_models_pagination_works() -> Result<()> {
|
||||
)
|
||||
.await??;
|
||||
|
||||
let ListModelsResponse {
|
||||
items: second_items,
|
||||
let ModelListResponse {
|
||||
data: second_items,
|
||||
next_cursor: second_cursor,
|
||||
} = to_response::<ListModelsResponse>(second_response)?;
|
||||
} = to_response::<ModelListResponse>(second_response)?;
|
||||
|
||||
assert_eq!(second_items.len(), 1);
|
||||
assert_eq!(second_items[0].id, "gpt-5");
|
||||
@@ -164,8 +167,8 @@ async fn list_models_rejects_invalid_cursor() -> Result<()> {
|
||||
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_list_models_request(ListModelsParams {
|
||||
page_size: None,
|
||||
.send_list_models_request(ModelListParams {
|
||||
limit: None,
|
||||
cursor: Some("invalid".to_string()),
|
||||
})
|
||||
.await?;
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use app_test_support::ChatGptAuthFixture;
|
||||
use app_test_support::McpProcess;
|
||||
@@ -8,9 +7,10 @@ use codex_app_server_protocol::GetAccountRateLimitsResponse;
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::LoginApiKeyParams;
|
||||
use codex_app_server_protocol::RateLimitSnapshot;
|
||||
use codex_app_server_protocol::RateLimitWindow;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_protocol::protocol::RateLimitSnapshot;
|
||||
use codex_protocol::protocol::RateLimitWindow;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use std::path::Path;
|
||||
@@ -28,28 +28,18 @@ const INVALID_REQUEST_ERROR_CODE: i64 = -32600;
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn get_account_rate_limits_requires_auth() -> Result<()> {
|
||||
let codex_home = TempDir::new().context("create codex home tempdir")?;
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)])
|
||||
.await
|
||||
.context("spawn mcp process")?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.context("initialize timeout")?
|
||||
.context("initialize request")?;
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_get_account_rate_limits_request()
|
||||
.await
|
||||
.context("send account/rateLimits/read")?;
|
||||
let request_id = mcp.send_get_account_rate_limits_request().await?;
|
||||
|
||||
let error: JSONRPCError = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.context("account/rateLimits/read timeout")?
|
||||
.context("account/rateLimits/read error")?;
|
||||
.await??;
|
||||
|
||||
assert_eq!(error.id, RequestId::Integer(request_id));
|
||||
assert_eq!(error.error.code, INVALID_REQUEST_ERROR_CODE);
|
||||
@@ -63,30 +53,20 @@ async fn get_account_rate_limits_requires_auth() -> Result<()> {
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn get_account_rate_limits_requires_chatgpt_auth() -> Result<()> {
|
||||
let codex_home = TempDir::new().context("create codex home tempdir")?;
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.context("spawn mcp process")?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.context("initialize timeout")?
|
||||
.context("initialize request")?;
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
login_with_api_key(&mut mcp, "sk-test-key").await?;
|
||||
|
||||
let request_id = mcp
|
||||
.send_get_account_rate_limits_request()
|
||||
.await
|
||||
.context("send account/rateLimits/read")?;
|
||||
let request_id = mcp.send_get_account_rate_limits_request().await?;
|
||||
|
||||
let error: JSONRPCError = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.context("account/rateLimits/read timeout")?
|
||||
.context("account/rateLimits/read error")?;
|
||||
.await??;
|
||||
|
||||
assert_eq!(error.id, RequestId::Integer(request_id));
|
||||
assert_eq!(error.error.code, INVALID_REQUEST_ERROR_CODE);
|
||||
@@ -100,18 +80,18 @@ async fn get_account_rate_limits_requires_chatgpt_auth() -> Result<()> {
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn get_account_rate_limits_returns_snapshot() -> Result<()> {
|
||||
let codex_home = TempDir::new().context("create codex home tempdir")?;
|
||||
let codex_home = TempDir::new()?;
|
||||
write_chatgpt_auth(
|
||||
codex_home.path(),
|
||||
ChatGptAuthFixture::new("chatgpt-token")
|
||||
.account_id("account-123")
|
||||
.plan_type("pro"),
|
||||
)
|
||||
.context("write chatgpt auth")?;
|
||||
AuthCredentialsStoreMode::File,
|
||||
)?;
|
||||
|
||||
let server = MockServer::start().await;
|
||||
let server_url = server.uri();
|
||||
write_chatgpt_base_url(codex_home.path(), &server_url).context("write chatgpt base url")?;
|
||||
write_chatgpt_base_url(codex_home.path(), &server_url)?;
|
||||
|
||||
let primary_reset_timestamp = chrono::DateTime::parse_from_rfc3339("2025-01-01T00:02:00Z")
|
||||
.expect("parse primary reset timestamp")
|
||||
@@ -147,40 +127,29 @@ async fn get_account_rate_limits_returns_snapshot() -> Result<()> {
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)])
|
||||
.await
|
||||
.context("spawn mcp process")?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.context("initialize timeout")?
|
||||
.context("initialize request")?;
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_get_account_rate_limits_request()
|
||||
.await
|
||||
.context("send account/rateLimits/read")?;
|
||||
let request_id = mcp.send_get_account_rate_limits_request().await?;
|
||||
|
||||
let response: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.context("account/rateLimits/read timeout")?
|
||||
.context("account/rateLimits/read response")?;
|
||||
.await??;
|
||||
|
||||
let received: GetAccountRateLimitsResponse =
|
||||
to_response(response).context("deserialize rate limit response")?;
|
||||
let received: GetAccountRateLimitsResponse = to_response(response)?;
|
||||
|
||||
let expected = GetAccountRateLimitsResponse {
|
||||
rate_limits: RateLimitSnapshot {
|
||||
primary: Some(RateLimitWindow {
|
||||
used_percent: 42.0,
|
||||
window_minutes: Some(60),
|
||||
used_percent: 42,
|
||||
window_duration_mins: Some(60),
|
||||
resets_at: Some(primary_reset_timestamp),
|
||||
}),
|
||||
secondary: Some(RateLimitWindow {
|
||||
used_percent: 5.0,
|
||||
window_minutes: Some(1440),
|
||||
used_percent: 5,
|
||||
window_duration_mins: Some(1440),
|
||||
resets_at: Some(secondary_reset_timestamp),
|
||||
}),
|
||||
},
|
||||
@@ -195,16 +164,13 @@ async fn login_with_api_key(mcp: &mut McpProcess, api_key: &str) -> Result<()> {
|
||||
.send_login_api_key_request(LoginApiKeyParams {
|
||||
api_key: api_key.to_string(),
|
||||
})
|
||||
.await
|
||||
.context("send loginApiKey")?;
|
||||
.await?;
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.context("loginApiKey timeout")?
|
||||
.context("loginApiKey response")?;
|
||||
.await??;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_final_assistant_message_sse_response;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
@@ -15,73 +14,76 @@ use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserMessageResponse;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::RawResponseItemEvent;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_send_message_success() {
|
||||
async fn test_send_message_success() -> Result<()> {
|
||||
// Spin up a mock completions server that immediately ends the Codex turn.
|
||||
// Two Codex turns hit the mock model (session start + send-user-message). Provide two SSE responses.
|
||||
let responses = vec![
|
||||
create_final_assistant_message_sse_response("Done").expect("build mock assistant message"),
|
||||
create_final_assistant_message_sse_response("Done").expect("build mock assistant message"),
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
|
||||
// Create a temporary Codex home with config pointing at the mock server.
|
||||
let codex_home = TempDir::new().expect("create temp dir");
|
||||
create_config_toml(codex_home.path(), &server.uri()).expect("write config.toml");
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
// Start MCP server process and initialize.
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timed out")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Start a conversation using the new wire API.
|
||||
let new_conv_id = mcp
|
||||
.send_new_conversation_request(NewConversationParams::default())
|
||||
.await
|
||||
.expect("send newConversation");
|
||||
.send_new_conversation_request(NewConversationParams {
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let new_conv_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(new_conv_id)),
|
||||
)
|
||||
.await
|
||||
.expect("newConversation timeout")
|
||||
.expect("newConversation resp");
|
||||
.await??;
|
||||
let NewConversationResponse {
|
||||
conversation_id, ..
|
||||
} = to_response::<_>(new_conv_resp).expect("deserialize newConversation response");
|
||||
} = to_response::<_>(new_conv_resp)?;
|
||||
|
||||
// 2) addConversationListener
|
||||
let add_listener_id = mcp
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams { conversation_id })
|
||||
.await
|
||||
.expect("send addConversationListener");
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams {
|
||||
conversation_id,
|
||||
experimental_raw_events: false,
|
||||
})
|
||||
.await?;
|
||||
let add_listener_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(add_listener_id)),
|
||||
)
|
||||
.await
|
||||
.expect("addConversationListener timeout")
|
||||
.expect("addConversationListener resp");
|
||||
.await??;
|
||||
let AddConversationSubscriptionResponse { subscription_id: _ } =
|
||||
to_response::<_>(add_listener_resp).expect("deserialize addConversationListener response");
|
||||
to_response::<_>(add_listener_resp)?;
|
||||
|
||||
// Now exercise sendUserMessage twice.
|
||||
send_message("Hello", conversation_id, &mut mcp).await;
|
||||
send_message("Hello again", conversation_id, &mut mcp).await;
|
||||
send_message("Hello", conversation_id, &mut mcp).await?;
|
||||
send_message("Hello again", conversation_id, &mut mcp).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[expect(clippy::expect_used)]
|
||||
async fn send_message(message: &str, conversation_id: ConversationId, mcp: &mut McpProcess) {
|
||||
async fn send_message(
|
||||
message: &str,
|
||||
conversation_id: ConversationId,
|
||||
mcp: &mut McpProcess,
|
||||
) -> Result<()> {
|
||||
// Now exercise sendUserMessage.
|
||||
let send_id = mcp
|
||||
.send_send_user_message_request(SendUserMessageParams {
|
||||
@@ -90,19 +92,15 @@ async fn send_message(message: &str, conversation_id: ConversationId, mcp: &mut
|
||||
text: message.to_string(),
|
||||
}],
|
||||
})
|
||||
.await
|
||||
.expect("send sendUserMessage");
|
||||
.await?;
|
||||
|
||||
let response: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(send_id)),
|
||||
)
|
||||
.await
|
||||
.expect("sendUserMessage response timeout")
|
||||
.expect("sendUserMessage response error");
|
||||
.await??;
|
||||
|
||||
let _ok: SendUserMessageResponse = to_response::<SendUserMessageResponse>(response)
|
||||
.expect("deserialize sendUserMessage response");
|
||||
let _ok: SendUserMessageResponse = to_response::<SendUserMessageResponse>(response)?;
|
||||
|
||||
// Verify the task_finished notification is received.
|
||||
// Note this also ensures that the final request to the server was made.
|
||||
@@ -110,9 +108,7 @@ async fn send_message(message: &str, conversation_id: ConversationId, mcp: &mut
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await
|
||||
.expect("task_finished_notification timeout")
|
||||
.expect("task_finished_notification resp");
|
||||
.await??;
|
||||
let serde_json::Value::Object(map) = task_finished_notification
|
||||
.params
|
||||
.expect("notification should have params")
|
||||
@@ -124,17 +120,105 @@ async fn send_message(message: &str, conversation_id: ConversationId, mcp: &mut
|
||||
.expect("should have conversationId"),
|
||||
&serde_json::Value::String(conversation_id.to_string())
|
||||
);
|
||||
|
||||
let raw_attempt = tokio::time::timeout(
|
||||
std::time::Duration::from_millis(200),
|
||||
mcp.read_stream_until_notification_message("codex/event/raw_response_item"),
|
||||
)
|
||||
.await;
|
||||
assert!(
|
||||
raw_attempt.is_err(),
|
||||
"unexpected raw item notification when not opted in"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_send_message_session_not_found() {
|
||||
async fn test_send_message_raw_notifications_opt_in() -> Result<()> {
|
||||
let responses = vec![create_final_assistant_message_sse_response("Done")?];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let new_conv_id = mcp
|
||||
.send_new_conversation_request(NewConversationParams {
|
||||
developer_instructions: Some("Use the test harness tools.".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let new_conv_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(new_conv_id)),
|
||||
)
|
||||
.await??;
|
||||
let NewConversationResponse {
|
||||
conversation_id, ..
|
||||
} = to_response::<_>(new_conv_resp)?;
|
||||
|
||||
let add_listener_id = mcp
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams {
|
||||
conversation_id,
|
||||
experimental_raw_events: true,
|
||||
})
|
||||
.await?;
|
||||
let add_listener_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(add_listener_id)),
|
||||
)
|
||||
.await??;
|
||||
let AddConversationSubscriptionResponse { subscription_id: _ } =
|
||||
to_response::<_>(add_listener_resp)?;
|
||||
|
||||
let send_id = mcp
|
||||
.send_send_user_message_request(SendUserMessageParams {
|
||||
conversation_id,
|
||||
items: vec![InputItem::Text {
|
||||
text: "Hello".to_string(),
|
||||
}],
|
||||
})
|
||||
.await?;
|
||||
|
||||
let developer = read_raw_response_item(&mut mcp, conversation_id).await;
|
||||
assert_developer_message(&developer, "Use the test harness tools.");
|
||||
|
||||
let instructions = read_raw_response_item(&mut mcp, conversation_id).await;
|
||||
assert_instructions_message(&instructions);
|
||||
|
||||
let environment = read_raw_response_item(&mut mcp, conversation_id).await;
|
||||
assert_environment_message(&environment);
|
||||
|
||||
let response: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(send_id)),
|
||||
)
|
||||
.await??;
|
||||
let _ok: SendUserMessageResponse = to_response::<SendUserMessageResponse>(response)?;
|
||||
|
||||
let user_message = read_raw_response_item(&mut mcp, conversation_id).await;
|
||||
assert_user_message(&user_message, "Hello");
|
||||
|
||||
let assistant_message = read_raw_response_item(&mut mcp, conversation_id).await;
|
||||
assert_assistant_message(&assistant_message, "Done");
|
||||
|
||||
let _ = tokio::time::timeout(
|
||||
std::time::Duration::from_millis(250),
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_send_message_session_not_found() -> Result<()> {
|
||||
// Start MCP without creating a Codex session
|
||||
let codex_home = TempDir::new().expect("tempdir");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await.expect("spawn");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("timeout")
|
||||
.expect("init");
|
||||
let codex_home = TempDir::new()?;
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let unknown = ConversationId::new();
|
||||
let req_id = mcp
|
||||
@@ -144,18 +228,16 @@ async fn test_send_message_session_not_found() {
|
||||
text: "ping".to_string(),
|
||||
}],
|
||||
})
|
||||
.await
|
||||
.expect("send sendUserMessage");
|
||||
.await?;
|
||||
|
||||
// Expect an error response for unknown conversation.
|
||||
let err = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(req_id)),
|
||||
)
|
||||
.await
|
||||
.expect("timeout")
|
||||
.expect("error");
|
||||
.await??;
|
||||
assert_eq!(err.id, RequestId::Integer(req_id));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
@@ -184,3 +266,126 @@ stream_max_retries = 0
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
#[expect(clippy::expect_used)]
|
||||
async fn read_raw_response_item(
|
||||
mcp: &mut McpProcess,
|
||||
conversation_id: ConversationId,
|
||||
) -> ResponseItem {
|
||||
let raw_notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/raw_response_item"),
|
||||
)
|
||||
.await
|
||||
.expect("codex/event/raw_response_item notification timeout")
|
||||
.expect("codex/event/raw_response_item notification resp");
|
||||
|
||||
let serde_json::Value::Object(params) = raw_notification
|
||||
.params
|
||||
.expect("codex/event/raw_response_item should have params")
|
||||
else {
|
||||
panic!("codex/event/raw_response_item should have params");
|
||||
};
|
||||
|
||||
let conversation_id_value = params
|
||||
.get("conversationId")
|
||||
.and_then(|value| value.as_str())
|
||||
.expect("raw response item should include conversationId");
|
||||
|
||||
assert_eq!(
|
||||
conversation_id_value,
|
||||
conversation_id.to_string(),
|
||||
"raw response item conversation mismatch"
|
||||
);
|
||||
|
||||
let msg_value = params
|
||||
.get("msg")
|
||||
.cloned()
|
||||
.expect("raw response item should include msg payload");
|
||||
|
||||
let event: RawResponseItemEvent =
|
||||
serde_json::from_value(msg_value).expect("deserialize raw response item");
|
||||
event.item
|
||||
}
|
||||
|
||||
fn assert_instructions_message(item: &ResponseItem) {
|
||||
match item {
|
||||
ResponseItem::Message { role, content, .. } => {
|
||||
assert_eq!(role, "user");
|
||||
let texts = content_texts(content);
|
||||
let is_instructions = texts
|
||||
.iter()
|
||||
.any(|text| text.starts_with("# AGENTS.md instructions for "));
|
||||
assert!(
|
||||
is_instructions,
|
||||
"expected instructions message, got {texts:?}"
|
||||
);
|
||||
}
|
||||
other => panic!("expected instructions message, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn assert_developer_message(item: &ResponseItem, expected_text: &str) {
|
||||
match item {
|
||||
ResponseItem::Message { role, content, .. } => {
|
||||
assert_eq!(role, "developer");
|
||||
let texts = content_texts(content);
|
||||
assert_eq!(
|
||||
texts,
|
||||
vec![expected_text],
|
||||
"expected developer instructions message, got {texts:?}"
|
||||
);
|
||||
}
|
||||
other => panic!("expected developer instructions message, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn assert_environment_message(item: &ResponseItem) {
|
||||
match item {
|
||||
ResponseItem::Message { role, content, .. } => {
|
||||
assert_eq!(role, "user");
|
||||
let texts = content_texts(content);
|
||||
assert!(
|
||||
texts
|
||||
.iter()
|
||||
.any(|text| text.contains("<environment_context>")),
|
||||
"expected environment context message, got {texts:?}"
|
||||
);
|
||||
}
|
||||
other => panic!("expected environment message, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn assert_user_message(item: &ResponseItem, expected_text: &str) {
|
||||
match item {
|
||||
ResponseItem::Message { role, content, .. } => {
|
||||
assert_eq!(role, "user");
|
||||
let texts = content_texts(content);
|
||||
assert_eq!(texts, vec![expected_text]);
|
||||
}
|
||||
other => panic!("expected user message, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn assert_assistant_message(item: &ResponseItem, expected_text: &str) {
|
||||
match item {
|
||||
ResponseItem::Message { role, content, .. } => {
|
||||
assert_eq!(role, "assistant");
|
||||
let texts = content_texts(content);
|
||||
assert_eq!(texts, vec![expected_text]);
|
||||
}
|
||||
other => panic!("expected assistant message, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn content_texts(content: &[ContentItem]) -> Vec<&str> {
|
||||
content
|
||||
.iter()
|
||||
.filter_map(|item| match item {
|
||||
ContentItem::InputText { text } | ContentItem::OutputText { text } => {
|
||||
Some(text.as_str())
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
@@ -8,50 +7,38 @@ use codex_app_server_protocol::SetDefaultModelParams;
|
||||
use codex_app_server_protocol::SetDefaultModelResponse;
|
||||
use codex_core::config::ConfigToml;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn set_default_model_persists_overrides() {
|
||||
let codex_home = TempDir::new().expect("create tempdir");
|
||||
create_config_toml(codex_home.path()).expect("write config.toml");
|
||||
async fn set_default_model_persists_overrides() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let params = SetDefaultModelParams {
|
||||
model: Some("gpt-4.1".to_string()),
|
||||
reasoning_effort: None,
|
||||
};
|
||||
|
||||
let request_id = mcp
|
||||
.send_set_default_model_request(params)
|
||||
.await
|
||||
.expect("send setDefaultModel");
|
||||
let request_id = mcp.send_set_default_model_request(params).await?;
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("setDefaultModel timeout")
|
||||
.expect("setDefaultModel response");
|
||||
.await??;
|
||||
|
||||
let _: SetDefaultModelResponse =
|
||||
to_response(resp).expect("deserialize setDefaultModel response");
|
||||
let _: SetDefaultModelResponse = to_response(resp)?;
|
||||
|
||||
let config_path = codex_home.path().join("config.toml");
|
||||
let config_contents = tokio::fs::read_to_string(&config_path)
|
||||
.await
|
||||
.expect("read config.toml");
|
||||
let config_toml: ConfigToml = toml::from_str(&config_contents).expect("parse config.toml");
|
||||
let config_contents = tokio::fs::read_to_string(&config_path).await?;
|
||||
let config_toml: ConfigToml = toml::from_str(&config_contents)?;
|
||||
|
||||
assert_eq!(
|
||||
ConfigToml {
|
||||
@@ -61,6 +48,7 @@ async fn set_default_model_persists_overrides() {
|
||||
},
|
||||
config_toml,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper to create a config.toml; mirrors create_conversation.rs
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::GetUserAgentResponse;
|
||||
@@ -10,28 +11,18 @@ use tokio::time::timeout;
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn get_user_agent_returns_current_codex_user_agent() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|err| panic!("create tempdir: {err}"));
|
||||
async fn get_user_agent_returns_current_codex_user_agent() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("initialize timeout")
|
||||
.expect("initialize request");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_get_user_agent_request()
|
||||
.await
|
||||
.expect("send getUserAgent");
|
||||
let request_id = mcp.send_get_user_agent_request().await?;
|
||||
let response: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("getUserAgent timeout")
|
||||
.expect("getUserAgent response");
|
||||
.await??;
|
||||
|
||||
let os_info = os_info::get();
|
||||
let user_agent = format!(
|
||||
@@ -42,9 +33,9 @@ async fn get_user_agent_returns_current_codex_user_agent() {
|
||||
codex_core::terminal::user_agent()
|
||||
);
|
||||
|
||||
let received: GetUserAgentResponse =
|
||||
to_response(response).expect("deserialize getUserAgent response");
|
||||
let received: GetUserAgentResponse = to_response(response)?;
|
||||
let expected = GetUserAgentResponse { user_agent };
|
||||
|
||||
assert_eq!(received, expected);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Result;
|
||||
use app_test_support::ChatGptAuthFixture;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
@@ -7,45 +6,41 @@ use app_test_support::write_chatgpt_auth;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::UserInfoResponse;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::time::Duration;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: Duration = Duration::from_secs(10);
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn user_info_returns_email_from_auth_json() {
|
||||
let codex_home = TempDir::new().expect("create tempdir");
|
||||
async fn user_info_returns_email_from_auth_json() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
write_chatgpt_auth(
|
||||
codex_home.path(),
|
||||
ChatGptAuthFixture::new("access")
|
||||
.refresh_token("refresh")
|
||||
.email("user@example.com"),
|
||||
)
|
||||
.expect("write chatgpt auth");
|
||||
AuthCredentialsStoreMode::File,
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("initialize timeout")
|
||||
.expect("initialize request");
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp.send_user_info_request().await.expect("send userInfo");
|
||||
let request_id = mcp.send_user_info_request().await?;
|
||||
let response: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("userInfo timeout")
|
||||
.expect("userInfo response");
|
||||
.await??;
|
||||
|
||||
let received: UserInfoResponse = to_response(response).expect("deserialize userInfo response");
|
||||
let received: UserInfoResponse = to_response(response)?;
|
||||
let expected = UserInfoResponse {
|
||||
alleged_user_email: Some("user@example.com".to_string()),
|
||||
};
|
||||
|
||||
assert_eq!(received, expected);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
309
codex-rs/app-server/tests/suite/v2/account.rs
Normal file
309
codex-rs/app-server/tests/suite/v2/account.rs
Normal file
@@ -0,0 +1,309 @@
|
||||
use anyhow::Result;
|
||||
use anyhow::bail;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_app_server_protocol::CancelLoginAccountParams;
|
||||
use codex_app_server_protocol::CancelLoginAccountResponse;
|
||||
use codex_app_server_protocol::GetAuthStatusParams;
|
||||
use codex_app_server_protocol::GetAuthStatusResponse;
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::LoginAccountResponse;
|
||||
use codex_app_server_protocol::LogoutAccountResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use codex_login::login_with_api_key;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serial_test::serial;
|
||||
use std::path::Path;
|
||||
use std::time::Duration;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
// Helper to create a minimal config.toml for the app server
|
||||
fn create_config_toml(
|
||||
codex_home: &Path,
|
||||
forced_method: Option<&str>,
|
||||
forced_workspace_id: Option<&str>,
|
||||
) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
let forced_line = if let Some(method) = forced_method {
|
||||
format!("forced_login_method = \"{method}\"\n")
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
let forced_workspace_line = if let Some(ws) = forced_workspace_id {
|
||||
format!("forced_chatgpt_workspace_id = \"{ws}\"\n")
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
let contents = format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "danger-full-access"
|
||||
{forced_line}
|
||||
{forced_workspace_line}
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "http://127.0.0.1:0/v1"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
);
|
||||
std::fs::write(config_toml, contents)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn logout_account_removes_auth_and_notifies() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), None, None)?;
|
||||
|
||||
login_with_api_key(
|
||||
codex_home.path(),
|
||||
"sk-test-key",
|
||||
AuthCredentialsStoreMode::File,
|
||||
)?;
|
||||
assert!(codex_home.path().join("auth.json").exists());
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let id = mcp.send_logout_account_request().await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(id)),
|
||||
)
|
||||
.await??;
|
||||
let _ok: LogoutAccountResponse = to_response(resp)?;
|
||||
|
||||
let note = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/updated"),
|
||||
)
|
||||
.await??;
|
||||
let parsed: ServerNotification = note.try_into()?;
|
||||
let ServerNotification::AccountUpdated(payload) = parsed else {
|
||||
bail!("unexpected notification: {parsed:?}");
|
||||
};
|
||||
assert!(
|
||||
payload.auth_mode.is_none(),
|
||||
"auth_method should be None after logout"
|
||||
);
|
||||
|
||||
assert!(
|
||||
!codex_home.path().join("auth.json").exists(),
|
||||
"auth.json should be deleted"
|
||||
);
|
||||
|
||||
let status_id = mcp
|
||||
.send_get_auth_status_request(GetAuthStatusParams {
|
||||
include_token: Some(true),
|
||||
refresh_token: Some(false),
|
||||
})
|
||||
.await?;
|
||||
let status_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(status_id)),
|
||||
)
|
||||
.await??;
|
||||
let status: GetAuthStatusResponse = to_response(status_resp)?;
|
||||
assert_eq!(status.auth_method, None);
|
||||
assert_eq!(status.auth_token, None);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn login_account_api_key_succeeds_and_notifies() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), None, None)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let req_id = mcp
|
||||
.send_login_account_api_key_request("sk-test-key")
|
||||
.await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(req_id)),
|
||||
)
|
||||
.await??;
|
||||
let login: LoginAccountResponse = to_response(resp)?;
|
||||
assert_eq!(login, LoginAccountResponse::ApiKey {});
|
||||
|
||||
let note = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/login/completed"),
|
||||
)
|
||||
.await??;
|
||||
let parsed: ServerNotification = note.try_into()?;
|
||||
let ServerNotification::AccountLoginCompleted(payload) = parsed else {
|
||||
bail!("unexpected notification: {parsed:?}");
|
||||
};
|
||||
pretty_assertions::assert_eq!(payload.login_id, None);
|
||||
pretty_assertions::assert_eq!(payload.success, true);
|
||||
pretty_assertions::assert_eq!(payload.error, None);
|
||||
|
||||
let note = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/updated"),
|
||||
)
|
||||
.await??;
|
||||
let parsed: ServerNotification = note.try_into()?;
|
||||
let ServerNotification::AccountUpdated(payload) = parsed else {
|
||||
bail!("unexpected notification: {parsed:?}");
|
||||
};
|
||||
pretty_assertions::assert_eq!(payload.auth_mode, Some(AuthMode::ApiKey));
|
||||
|
||||
assert!(codex_home.path().join("auth.json").exists());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn login_account_api_key_rejected_when_forced_chatgpt() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), Some("chatgpt"), None)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_login_account_api_key_request("sk-test-key")
|
||||
.await?;
|
||||
let err: JSONRPCError = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
assert_eq!(
|
||||
err.error.message,
|
||||
"API key login is disabled. Use ChatGPT login instead."
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn login_account_chatgpt_rejected_when_forced_api() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), Some("api"), None)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp.send_login_account_chatgpt_request().await?;
|
||||
let err: JSONRPCError = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
assert_eq!(
|
||||
err.error.message,
|
||||
"ChatGPT login is disabled. Use API key login instead."
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
// Serialize tests that launch the login server since it binds to a fixed port.
|
||||
#[serial(login_port)]
|
||||
async fn login_account_chatgpt_start() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), None, None)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp.send_login_account_chatgpt_request().await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let login: LoginAccountResponse = to_response(resp)?;
|
||||
let LoginAccountResponse::Chatgpt { login_id, auth_url } = login else {
|
||||
bail!("unexpected login response: {login:?}");
|
||||
};
|
||||
assert!(
|
||||
auth_url.contains("redirect_uri=http%3A%2F%2Flocalhost"),
|
||||
"auth_url should contain a redirect_uri to localhost"
|
||||
);
|
||||
|
||||
let cancel_id = mcp
|
||||
.send_cancel_login_account_request(CancelLoginAccountParams {
|
||||
login_id: login_id.clone(),
|
||||
})
|
||||
.await?;
|
||||
let cancel_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(cancel_id)),
|
||||
)
|
||||
.await??;
|
||||
let _ok: CancelLoginAccountResponse = to_response(cancel_resp)?;
|
||||
|
||||
let note = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/login/completed"),
|
||||
)
|
||||
.await??;
|
||||
let parsed: ServerNotification = note.try_into()?;
|
||||
let ServerNotification::AccountLoginCompleted(payload) = parsed else {
|
||||
bail!("unexpected notification: {parsed:?}");
|
||||
};
|
||||
pretty_assertions::assert_eq!(payload.login_id, Some(login_id));
|
||||
pretty_assertions::assert_eq!(payload.success, false);
|
||||
assert!(
|
||||
payload.error.is_some(),
|
||||
"expected a non-empty error on cancel"
|
||||
);
|
||||
|
||||
let maybe_updated = timeout(
|
||||
Duration::from_millis(500),
|
||||
mcp.read_stream_until_notification_message("account/updated"),
|
||||
)
|
||||
.await;
|
||||
assert!(
|
||||
maybe_updated.is_err(),
|
||||
"account/updated should not be emitted when login is cancelled"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
// Serialize tests that launch the login server since it binds to a fixed port.
|
||||
#[serial(login_port)]
|
||||
async fn login_account_chatgpt_includes_forced_workspace_query_param() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), None, Some("ws-forced"))?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp.send_login_account_chatgpt_request().await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let login: LoginAccountResponse = to_response(resp)?;
|
||||
let LoginAccountResponse::Chatgpt { auth_url, .. } = login else {
|
||||
bail!("unexpected login response: {login:?}");
|
||||
};
|
||||
assert!(
|
||||
auth_url.contains("allowed_workspace_id=ws-forced"),
|
||||
"auth URL should include forced workspace"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
7
codex-rs/app-server/tests/suite/v2/mod.rs
Normal file
7
codex-rs/app-server/tests/suite/v2/mod.rs
Normal file
@@ -0,0 +1,7 @@
|
||||
mod account;
|
||||
mod thread_archive;
|
||||
mod thread_list;
|
||||
mod thread_resume;
|
||||
mod thread_start;
|
||||
mod turn_interrupt;
|
||||
mod turn_start;
|
||||
93
codex-rs/app-server/tests/suite/v2/thread_archive.rs
Normal file
93
codex-rs/app-server/tests/suite/v2/thread_archive.rs
Normal file
@@ -0,0 +1,93 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadArchiveParams;
|
||||
use codex_app_server_protocol::ThreadArchiveResponse;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_core::ARCHIVED_SESSIONS_SUBDIR;
|
||||
use codex_core::find_conversation_path_by_id_str;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_archive_moves_rollout_into_archived_directory() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Start a thread.
|
||||
let start_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
assert!(!thread.id.is_empty());
|
||||
|
||||
// Locate the rollout path recorded for this thread id.
|
||||
let rollout_path = find_conversation_path_by_id_str(codex_home.path(), &thread.id)
|
||||
.await?
|
||||
.expect("expected rollout path for thread id to exist");
|
||||
assert!(
|
||||
rollout_path.exists(),
|
||||
"expected {} to exist",
|
||||
rollout_path.display()
|
||||
);
|
||||
|
||||
// Archive the thread.
|
||||
let archive_id = mcp
|
||||
.send_thread_archive_request(ThreadArchiveParams {
|
||||
thread_id: thread.id.clone(),
|
||||
})
|
||||
.await?;
|
||||
let archive_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(archive_id)),
|
||||
)
|
||||
.await??;
|
||||
let _: ThreadArchiveResponse = to_response::<ThreadArchiveResponse>(archive_resp)?;
|
||||
|
||||
// Verify file moved.
|
||||
let archived_directory = codex_home.path().join(ARCHIVED_SESSIONS_SUBDIR);
|
||||
// The archived file keeps the original filename (rollout-...-<id>.jsonl).
|
||||
let archived_rollout_path =
|
||||
archived_directory.join(rollout_path.file_name().expect("rollout file name"));
|
||||
assert!(
|
||||
!rollout_path.exists(),
|
||||
"expected rollout path {} to be moved",
|
||||
rollout_path.display()
|
||||
);
|
||||
assert!(
|
||||
archived_rollout_path.exists(),
|
||||
"expected archived rollout path {} to exist",
|
||||
archived_rollout_path.display()
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_config_toml(codex_home: &Path) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(config_toml, config_contents())
|
||||
}
|
||||
|
||||
fn config_contents() -> &'static str {
|
||||
r#"model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
"#
|
||||
}
|
||||
205
codex-rs/app-server/tests/suite/v2/thread_list.rs
Normal file
205
codex-rs/app-server/tests/suite/v2/thread_list.rs
Normal file
@@ -0,0 +1,205 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_fake_rollout;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadListParams;
|
||||
use codex_app_server_protocol::ThreadListResponse;
|
||||
use serde_json::json;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
use uuid::Uuid;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_basic_empty() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_minimal_config(codex_home.path())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// List threads in an empty CODEX_HOME; should return an empty page with nextCursor: null.
|
||||
let list_id = mcp
|
||||
.send_thread_list_request(ThreadListParams {
|
||||
cursor: None,
|
||||
limit: Some(10),
|
||||
model_providers: None,
|
||||
})
|
||||
.await?;
|
||||
let list_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(list_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadListResponse { data, next_cursor } = to_response::<ThreadListResponse>(list_resp)?;
|
||||
assert!(data.is_empty());
|
||||
assert_eq!(next_cursor, None);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Minimal config.toml for listing.
|
||||
fn create_minimal_config(codex_home: &std::path::Path) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_pagination_next_cursor_none_on_last_page() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_minimal_config(codex_home.path())?;
|
||||
|
||||
// Create three rollouts so we can paginate with limit=2.
|
||||
let _a = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-02T12-00-00",
|
||||
"2025-01-02T12:00:00Z",
|
||||
"Hello",
|
||||
Some("mock_provider"),
|
||||
)?;
|
||||
let _b = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-01T13-00-00",
|
||||
"2025-01-01T13:00:00Z",
|
||||
"Hello",
|
||||
Some("mock_provider"),
|
||||
)?;
|
||||
let _c = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-01T12-00-00",
|
||||
"2025-01-01T12:00:00Z",
|
||||
"Hello",
|
||||
Some("mock_provider"),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Page 1: limit 2 → expect next_cursor Some.
|
||||
let page1_id = mcp
|
||||
.send_thread_list_request(ThreadListParams {
|
||||
cursor: None,
|
||||
limit: Some(2),
|
||||
model_providers: Some(vec!["mock_provider".to_string()]),
|
||||
})
|
||||
.await?;
|
||||
let page1_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(page1_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadListResponse {
|
||||
data: data1,
|
||||
next_cursor: cursor1,
|
||||
} = to_response::<ThreadListResponse>(page1_resp)?;
|
||||
assert_eq!(data1.len(), 2);
|
||||
let cursor1 = cursor1.expect("expected nextCursor on first page");
|
||||
|
||||
// Page 2: with cursor → expect next_cursor None when no more results.
|
||||
let page2_id = mcp
|
||||
.send_thread_list_request(ThreadListParams {
|
||||
cursor: Some(cursor1),
|
||||
limit: Some(2),
|
||||
model_providers: Some(vec!["mock_provider".to_string()]),
|
||||
})
|
||||
.await?;
|
||||
let page2_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(page2_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadListResponse {
|
||||
data: data2,
|
||||
next_cursor: cursor2,
|
||||
} = to_response::<ThreadListResponse>(page2_resp)?;
|
||||
assert!(data2.len() <= 2);
|
||||
assert_eq!(cursor2, None, "expected nextCursor to be null on last page");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_respects_provider_filter() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_minimal_config(codex_home.path())?;
|
||||
|
||||
// Create rollouts under two providers.
|
||||
let _a = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-02T10-00-00",
|
||||
"2025-01-02T10:00:00Z",
|
||||
"X",
|
||||
Some("mock_provider"),
|
||||
)?; // mock_provider
|
||||
// one with a different provider
|
||||
let uuid = Uuid::new_v4();
|
||||
let dir = codex_home
|
||||
.path()
|
||||
.join("sessions")
|
||||
.join("2025")
|
||||
.join("01")
|
||||
.join("02");
|
||||
std::fs::create_dir_all(&dir)?;
|
||||
let file_path = dir.join(format!("rollout-2025-01-02T11-00-00-{uuid}.jsonl"));
|
||||
let lines = [
|
||||
json!({
|
||||
"timestamp": "2025-01-02T11:00:00Z",
|
||||
"type": "session_meta",
|
||||
"payload": {
|
||||
"id": uuid,
|
||||
"timestamp": "2025-01-02T11:00:00Z",
|
||||
"cwd": "/",
|
||||
"originator": "codex",
|
||||
"cli_version": "0.0.0",
|
||||
"instructions": null,
|
||||
"source": "vscode",
|
||||
"model_provider": "other_provider"
|
||||
}
|
||||
})
|
||||
.to_string(),
|
||||
json!({
|
||||
"timestamp": "2025-01-02T11:00:00Z",
|
||||
"type":"response_item",
|
||||
"payload": {"type":"message","role":"user","content":[{"type":"input_text","text":"X"}]}
|
||||
})
|
||||
.to_string(),
|
||||
json!({
|
||||
"timestamp": "2025-01-02T11:00:00Z",
|
||||
"type":"event_msg",
|
||||
"payload": {"type":"user_message","message":"X","kind":"plain"}
|
||||
})
|
||||
.to_string(),
|
||||
];
|
||||
std::fs::write(file_path, lines.join("\n") + "\n")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Filter to only other_provider; expect 1 item, nextCursor None.
|
||||
let list_id = mcp
|
||||
.send_thread_list_request(ThreadListParams {
|
||||
cursor: None,
|
||||
limit: Some(10),
|
||||
model_providers: Some(vec!["other_provider".to_string()]),
|
||||
})
|
||||
.await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(list_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadListResponse { data, next_cursor } = to_response::<ThreadListResponse>(resp)?;
|
||||
assert_eq!(data.len(), 1);
|
||||
assert_eq!(next_cursor, None);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
79
codex-rs/app-server/tests/suite/v2/thread_resume.rs
Normal file
79
codex-rs/app-server/tests/suite/v2/thread_resume.rs
Normal file
@@ -0,0 +1,79 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadResumeParams;
|
||||
use codex_app_server_protocol::ThreadResumeResponse;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_returns_existing_thread() -> Result<()> {
|
||||
let server = create_mock_chat_completions_server(vec![]).await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Start a thread.
|
||||
let start_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("gpt-5-codex".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
// Resume it via v2 API.
|
||||
let resume_id = mcp
|
||||
.send_thread_resume_request(ThreadResumeParams {
|
||||
thread_id: thread.id.clone(),
|
||||
})
|
||||
.await?;
|
||||
let resume_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(resume_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadResumeResponse { thread: resumed } =
|
||||
to_response::<ThreadResumeResponse>(resume_resp)?;
|
||||
assert_eq!(resumed.id, thread.id);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper to create a config.toml pointing at the mock model server.
|
||||
fn create_config_toml(codex_home: &std::path::Path, server_uri: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
81
codex-rs/app-server/tests/suite/v2/thread_start.rs
Normal file
81
codex-rs/app-server/tests/suite/v2/thread_start.rs
Normal file
@@ -0,0 +1,81 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::ThreadStartedNotification;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_start_creates_thread_and_emits_started() -> Result<()> {
|
||||
// Provide a mock server and config so model wiring is valid.
|
||||
let server = create_mock_chat_completions_server(vec![]).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
// Start server and initialize.
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Start a v2 thread with an explicit model override.
|
||||
let req_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("gpt-5".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
|
||||
// Expect a proper JSON-RPC response with a thread id.
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(req_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread } = to_response::<ThreadStartResponse>(resp)?;
|
||||
assert!(!thread.id.is_empty(), "thread id should not be empty");
|
||||
|
||||
// A corresponding thread/started notification should arrive.
|
||||
let notif: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("thread/started"),
|
||||
)
|
||||
.await??;
|
||||
let started: ThreadStartedNotification =
|
||||
serde_json::from_value(notif.params.expect("params must be present"))?;
|
||||
assert_eq!(started.thread.id, thread.id);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper to create a config.toml pointing at the mock model server.
|
||||
fn create_config_toml(codex_home: &Path, server_uri: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
128
codex-rs/app-server/tests/suite/v2/turn_interrupt.rs
Normal file
128
codex-rs/app-server/tests/suite/v2/turn_interrupt.rs
Normal file
@@ -0,0 +1,128 @@
|
||||
#![cfg(unix)]
|
||||
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::create_shell_sse_response;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::TurnInterruptParams;
|
||||
use codex_app_server_protocol::TurnInterruptResponse;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_app_server_protocol::TurnStartResponse;
|
||||
use codex_app_server_protocol::UserInput as V2UserInput;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_interrupt_aborts_running_turn() -> Result<()> {
|
||||
// Use a portable sleep command to keep the turn running.
|
||||
#[cfg(target_os = "windows")]
|
||||
let shell_command = vec![
|
||||
"powershell".to_string(),
|
||||
"-Command".to_string(),
|
||||
"Start-Sleep -Seconds 10".to_string(),
|
||||
];
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
let shell_command = vec!["sleep".to_string(), "10".to_string()];
|
||||
|
||||
let tmp = TempDir::new()?;
|
||||
let codex_home = tmp.path().join("codex_home");
|
||||
std::fs::create_dir(&codex_home)?;
|
||||
let working_directory = tmp.path().join("workdir");
|
||||
std::fs::create_dir(&working_directory)?;
|
||||
|
||||
// Mock server: long-running shell command then (after abort) nothing else needed.
|
||||
let server = create_mock_chat_completions_server(vec![create_shell_sse_response(
|
||||
shell_command.clone(),
|
||||
Some(&working_directory),
|
||||
Some(10_000),
|
||||
"call_sleep",
|
||||
)?])
|
||||
.await;
|
||||
create_config_toml(&codex_home, &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(&codex_home).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Start a v2 thread and capture its id.
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
// Start a turn that triggers a long-running command.
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "run sleep".to_string(),
|
||||
}],
|
||||
cwd: Some(working_directory.clone()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let TurnStartResponse { turn } = to_response::<TurnStartResponse>(turn_resp)?;
|
||||
|
||||
// Give the command a brief moment to start.
|
||||
tokio::time::sleep(std::time::Duration::from_secs(1)).await;
|
||||
|
||||
// Interrupt the in-progress turn by id (v2 API).
|
||||
let interrupt_id = mcp
|
||||
.send_turn_interrupt_request(TurnInterruptParams {
|
||||
thread_id: thread.id,
|
||||
turn_id: turn.id,
|
||||
})
|
||||
.await?;
|
||||
let interrupt_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(interrupt_id)),
|
||||
)
|
||||
.await??;
|
||||
let _resp: TurnInterruptResponse = to_response::<TurnInterruptResponse>(interrupt_resp)?;
|
||||
|
||||
// No fields to assert on; successful deserialization confirms proper response shape.
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper to create a config.toml pointing at the mock model server.
|
||||
fn create_config_toml(codex_home: &std::path::Path, server_uri: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "workspace-write"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
486
codex-rs/app-server/tests/suite/v2/turn_start.rs
Normal file
486
codex-rs/app-server/tests/suite/v2/turn_start.rs
Normal file
@@ -0,0 +1,486 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_final_assistant_message_sse_response;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::create_mock_chat_completions_server_unchecked;
|
||||
use app_test_support::create_shell_sse_response;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_app_server_protocol::TurnStartResponse;
|
||||
use codex_app_server_protocol::TurnStartedNotification;
|
||||
use codex_app_server_protocol::UserInput as V2UserInput;
|
||||
use codex_core::protocol_config_types::ReasoningEffort;
|
||||
use codex_core::protocol_config_types::ReasoningSummary;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::Event;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_emits_notifications_and_accepts_model_override() -> Result<()> {
|
||||
// Provide a mock server and config so model wiring is valid.
|
||||
// Three Codex turns hit the mock model (session start + two turn/start calls).
|
||||
let responses = vec![
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server_unchecked(responses).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri(), "never")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Start a thread (v2) and capture its id.
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
// Start a turn with only input and thread_id set (no overrides).
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let TurnStartResponse { turn } = to_response::<TurnStartResponse>(turn_resp)?;
|
||||
assert!(!turn.id.is_empty());
|
||||
|
||||
// Expect a turn/started notification.
|
||||
let notif: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/started"),
|
||||
)
|
||||
.await??;
|
||||
let started: TurnStartedNotification =
|
||||
serde_json::from_value(notif.params.expect("params must be present"))?;
|
||||
assert_eq!(
|
||||
started.turn.status,
|
||||
codex_app_server_protocol::TurnStatus::InProgress
|
||||
);
|
||||
|
||||
// Send a second turn that exercises the overrides path: change the model.
|
||||
let turn_req2 = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "Second".to_string(),
|
||||
}],
|
||||
model: Some("mock-model-override".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp2: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req2)),
|
||||
)
|
||||
.await??;
|
||||
let TurnStartResponse { turn: turn2 } = to_response::<TurnStartResponse>(turn_resp2)?;
|
||||
assert!(!turn2.id.is_empty());
|
||||
// Ensure the second turn has a different id than the first.
|
||||
assert_ne!(turn.id, turn2.id);
|
||||
|
||||
// Expect a second turn/started notification as well.
|
||||
let _notif2: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/started"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
// And we should ultimately get a task_complete without having to add a
|
||||
// legacy conversation listener explicitly (auto-attached by thread/start).
|
||||
let _task_complete: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_accepts_local_image_input() -> Result<()> {
|
||||
// Two Codex turns hit the mock model (session start + turn/start).
|
||||
let responses = vec![
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
];
|
||||
// Use the unchecked variant because the request payload includes a LocalImage
|
||||
// which the strict matcher does not currently cover.
|
||||
let server = create_mock_chat_completions_server_unchecked(responses).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri(), "never")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
let image_path = codex_home.path().join("image.png");
|
||||
// No need to actually write the file; we just exercise the input path.
|
||||
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::LocalImage { path: image_path }],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let TurnStartResponse { turn } = to_response::<TurnStartResponse>(turn_resp)?;
|
||||
assert!(!turn.id.is_empty());
|
||||
|
||||
// This test only validates that turn/start responds and returns a turn.
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_exec_approval_toggle_v2() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let tmp = TempDir::new()?;
|
||||
let codex_home = tmp.path().to_path_buf();
|
||||
|
||||
// Mock server: first turn requests a shell call (elicitation), then completes.
|
||||
// Second turn same, but we'll set approval_policy=never to avoid elicitation.
|
||||
let responses = vec![
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"python3".to_string(),
|
||||
"-c".to_string(),
|
||||
"print(42)".to_string(),
|
||||
],
|
||||
None,
|
||||
Some(5000),
|
||||
"call1",
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done 1")?,
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"python3".to_string(),
|
||||
"-c".to_string(),
|
||||
"print(42)".to_string(),
|
||||
],
|
||||
None,
|
||||
Some(5000),
|
||||
"call2",
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done 2")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
// Default approval is untrusted to force elicitation on first turn.
|
||||
create_config_toml(codex_home.as_path(), &server.uri(), "untrusted")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.as_path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// thread/start
|
||||
let start_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
// turn/start — expect ExecCommandApproval request from server
|
||||
let first_turn_id = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "run python".to_string(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
// Acknowledge RPC
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(first_turn_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
// Receive elicitation
|
||||
let server_req = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_request_message(),
|
||||
)
|
||||
.await??;
|
||||
let ServerRequest::ExecCommandApproval { request_id, params } = server_req else {
|
||||
panic!("expected ExecCommandApproval request");
|
||||
};
|
||||
assert_eq!(params.call_id, "call1");
|
||||
assert_eq!(
|
||||
params.parsed_cmd,
|
||||
vec![ParsedCommand::Unknown {
|
||||
cmd: "python3 -c 'print(42)'".to_string()
|
||||
}]
|
||||
);
|
||||
|
||||
// Approve and wait for task completion
|
||||
mcp.send_response(
|
||||
request_id,
|
||||
serde_json::json!({ "decision": codex_core::protocol::ReviewDecision::Approved }),
|
||||
)
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
// Second turn with approval_policy=never should not elicit approval
|
||||
let second_turn_id = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "run python again".to_string(),
|
||||
}],
|
||||
approval_policy: Some(codex_app_server_protocol::AskForApproval::Never),
|
||||
sandbox_policy: Some(codex_app_server_protocol::SandboxPolicy::DangerFullAccess),
|
||||
model: Some("mock-model".to_string()),
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: Some(ReasoningSummary::Auto),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(second_turn_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
// Ensure we do NOT receive an ExecCommandApproval request before task completes
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_updates_sandbox_and_cwd_between_turns_v2() -> Result<()> {
|
||||
// When returning Result from a test, pass an Ok(()) to the skip macro
|
||||
// so the early return type matches. The no-arg form returns unit.
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let tmp = TempDir::new()?;
|
||||
let codex_home = tmp.path().join("codex_home");
|
||||
std::fs::create_dir(&codex_home)?;
|
||||
let workspace_root = tmp.path().join("workspace");
|
||||
std::fs::create_dir(&workspace_root)?;
|
||||
let first_cwd = workspace_root.join("turn1");
|
||||
let second_cwd = workspace_root.join("turn2");
|
||||
std::fs::create_dir(&first_cwd)?;
|
||||
std::fs::create_dir(&second_cwd)?;
|
||||
|
||||
let responses = vec![
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"bash".to_string(),
|
||||
"-lc".to_string(),
|
||||
"echo first turn".to_string(),
|
||||
],
|
||||
None,
|
||||
Some(5000),
|
||||
"call-first",
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done first")?,
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"bash".to_string(),
|
||||
"-lc".to_string(),
|
||||
"echo second turn".to_string(),
|
||||
],
|
||||
None,
|
||||
Some(5000),
|
||||
"call-second",
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done second")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri(), "untrusted")?;
|
||||
|
||||
let mut mcp = McpProcess::new(&codex_home).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// thread/start
|
||||
let start_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
// first turn with workspace-write sandbox and first_cwd
|
||||
let first_turn = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "first turn".to_string(),
|
||||
}],
|
||||
cwd: Some(first_cwd.clone()),
|
||||
approval_policy: Some(codex_app_server_protocol::AskForApproval::Never),
|
||||
sandbox_policy: Some(codex_app_server_protocol::SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots: vec![first_cwd.clone()],
|
||||
network_access: false,
|
||||
exclude_tmpdir_env_var: false,
|
||||
exclude_slash_tmp: false,
|
||||
}),
|
||||
model: Some("mock-model".to_string()),
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: Some(ReasoningSummary::Auto),
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(first_turn)),
|
||||
)
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
// second turn with workspace-write and second_cwd, ensure exec begins in second_cwd
|
||||
let second_turn = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "second turn".to_string(),
|
||||
}],
|
||||
cwd: Some(second_cwd.clone()),
|
||||
approval_policy: Some(codex_app_server_protocol::AskForApproval::Never),
|
||||
sandbox_policy: Some(codex_app_server_protocol::SandboxPolicy::DangerFullAccess),
|
||||
model: Some("mock-model".to_string()),
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: Some(ReasoningSummary::Auto),
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(second_turn)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let exec_begin_notification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/exec_command_begin"),
|
||||
)
|
||||
.await??;
|
||||
let params = exec_begin_notification
|
||||
.params
|
||||
.clone()
|
||||
.expect("exec_command_begin params");
|
||||
let event: Event = serde_json::from_value(params).expect("deserialize exec begin event");
|
||||
let exec_begin = match event.msg {
|
||||
EventMsg::ExecCommandBegin(exec_begin) => exec_begin,
|
||||
other => panic!("expected ExecCommandBegin event, got {other:?}"),
|
||||
};
|
||||
assert_eq!(exec_begin.cwd, second_cwd);
|
||||
assert_eq!(
|
||||
exec_begin.command,
|
||||
vec![
|
||||
"bash".to_string(),
|
||||
"-lc".to_string(),
|
||||
"echo second turn".to_string()
|
||||
]
|
||||
);
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper to create a config.toml pointing at the mock model server.
|
||||
fn create_config_toml(
|
||||
codex_home: &Path,
|
||||
server_uri: &str,
|
||||
approval_policy: &str,
|
||||
) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "{approval_policy}"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -14,7 +14,7 @@ codex-core = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = ["full"] }
|
||||
codex-git-apply = { path = "../git-apply" }
|
||||
codex-git = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = { workspace = true }
|
||||
|
||||
@@ -32,7 +32,8 @@ pub async fn run_apply_command(
|
||||
)
|
||||
.await?;
|
||||
|
||||
init_chatgpt_token_from_auth(&config.codex_home).await?;
|
||||
init_chatgpt_token_from_auth(&config.codex_home, config.cli_auth_credentials_store_mode)
|
||||
.await?;
|
||||
|
||||
let task_response = get_task(&config, apply_cli.task_id).await?;
|
||||
apply_diff_from_task(task_response, cwd).await
|
||||
@@ -58,13 +59,13 @@ pub async fn apply_diff_from_task(
|
||||
|
||||
async fn apply_diff(diff: &str, cwd: Option<PathBuf>) -> anyhow::Result<()> {
|
||||
let cwd = cwd.unwrap_or(std::env::current_dir().unwrap_or_else(|_| std::env::temp_dir()));
|
||||
let req = codex_git_apply::ApplyGitRequest {
|
||||
let req = codex_git::ApplyGitRequest {
|
||||
cwd,
|
||||
diff: diff.to_string(),
|
||||
revert: false,
|
||||
preflight: false,
|
||||
};
|
||||
let res = codex_git_apply::apply_git_patch(&req)?;
|
||||
let res = codex_git::apply_git_patch(&req)?;
|
||||
if res.exit_code != 0 {
|
||||
anyhow::bail!(
|
||||
"Git apply failed (applied={}, skipped={}, conflicts={})\nstdout:\n{}\nstderr:\n{}",
|
||||
|
||||
@@ -13,7 +13,8 @@ pub(crate) async fn chatgpt_get_request<T: DeserializeOwned>(
|
||||
path: String,
|
||||
) -> anyhow::Result<T> {
|
||||
let chatgpt_base_url = &config.chatgpt_base_url;
|
||||
init_chatgpt_token_from_auth(&config.codex_home).await?;
|
||||
init_chatgpt_token_from_auth(&config.codex_home, config.cli_auth_credentials_store_mode)
|
||||
.await?;
|
||||
|
||||
// Make direct HTTP request to ChatGPT backend API with the token
|
||||
let client = create_client();
|
||||
|
||||
@@ -3,6 +3,7 @@ use std::path::Path;
|
||||
use std::sync::LazyLock;
|
||||
use std::sync::RwLock;
|
||||
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use codex_core::token_data::TokenData;
|
||||
|
||||
static CHATGPT_TOKEN: LazyLock<RwLock<Option<TokenData>>> = LazyLock::new(|| RwLock::new(None));
|
||||
@@ -18,8 +19,11 @@ pub fn set_chatgpt_token_data(value: TokenData) {
|
||||
}
|
||||
|
||||
/// Initialize the ChatGPT token from auth.json file
|
||||
pub async fn init_chatgpt_token_from_auth(codex_home: &Path) -> std::io::Result<()> {
|
||||
let auth = CodexAuth::from_codex_home(codex_home)?;
|
||||
pub async fn init_chatgpt_token_from_auth(
|
||||
codex_home: &Path,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> std::io::Result<()> {
|
||||
let auth = CodexAuth::from_auth_storage(codex_home, auth_credentials_store_mode)?;
|
||||
if let Some(auth) = auth {
|
||||
let token_data = auth.get_token_data().await?;
|
||||
set_chatgpt_token_data(token_data);
|
||||
|
||||
@@ -39,6 +39,7 @@ ctor = { workspace = true }
|
||||
owo-colors = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
supports-color = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
"io-std",
|
||||
"macros",
|
||||
@@ -47,6 +48,9 @@ tokio = { workspace = true, features = [
|
||||
"signal",
|
||||
] }
|
||||
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
codex_windows_sandbox = { package = "codex-windows-sandbox", path = "../windows-sandbox-rs" }
|
||||
|
||||
[dev-dependencies]
|
||||
assert_cmd = { workspace = true }
|
||||
assert_matches = { workspace = true }
|
||||
|
||||
@@ -11,6 +11,7 @@ use codex_protocol::config_types::SandboxMode;
|
||||
|
||||
use crate::LandlockCommand;
|
||||
use crate::SeatbeltCommand;
|
||||
use crate::WindowsCommand;
|
||||
use crate::exit_status::handle_exit_status;
|
||||
|
||||
pub async fn run_command_under_seatbelt(
|
||||
@@ -51,9 +52,29 @@ pub async fn run_command_under_landlock(
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn run_command_under_windows(
|
||||
command: WindowsCommand,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
) -> anyhow::Result<()> {
|
||||
let WindowsCommand {
|
||||
full_auto,
|
||||
config_overrides,
|
||||
command,
|
||||
} = command;
|
||||
run_command_under_sandbox(
|
||||
full_auto,
|
||||
command,
|
||||
config_overrides,
|
||||
codex_linux_sandbox_exe,
|
||||
SandboxType::Windows,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
enum SandboxType {
|
||||
Seatbelt,
|
||||
Landlock,
|
||||
Windows,
|
||||
}
|
||||
|
||||
async fn run_command_under_sandbox(
|
||||
@@ -87,6 +108,65 @@ async fn run_command_under_sandbox(
|
||||
let stdio_policy = StdioPolicy::Inherit;
|
||||
let env = create_env(&config.shell_environment_policy);
|
||||
|
||||
// Special-case Windows sandbox: execute and exit the process to emulate inherited stdio.
|
||||
if let SandboxType::Windows = sandbox_type {
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
use codex_windows_sandbox::run_windows_sandbox_capture;
|
||||
|
||||
let policy_str = match &config.sandbox_policy {
|
||||
codex_core::protocol::SandboxPolicy::DangerFullAccess => "workspace-write",
|
||||
codex_core::protocol::SandboxPolicy::ReadOnly => "read-only",
|
||||
codex_core::protocol::SandboxPolicy::WorkspaceWrite { .. } => "workspace-write",
|
||||
};
|
||||
|
||||
let sandbox_cwd = sandbox_policy_cwd.clone();
|
||||
let cwd_clone = cwd.clone();
|
||||
let env_map = env.clone();
|
||||
let command_vec = command.clone();
|
||||
let base_dir = config.codex_home.clone();
|
||||
let res = tokio::task::spawn_blocking(move || {
|
||||
run_windows_sandbox_capture(
|
||||
policy_str,
|
||||
&sandbox_cwd,
|
||||
command_vec,
|
||||
&cwd_clone,
|
||||
env_map,
|
||||
None,
|
||||
Some(base_dir.as_path()),
|
||||
)
|
||||
})
|
||||
.await;
|
||||
|
||||
let capture = match res {
|
||||
Ok(Ok(v)) => v,
|
||||
Ok(Err(err)) => {
|
||||
eprintln!("windows sandbox failed: {err}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
Err(join_err) => {
|
||||
eprintln!("windows sandbox join error: {join_err}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
if !capture.stdout.is_empty() {
|
||||
use std::io::Write;
|
||||
let _ = std::io::stdout().write_all(&capture.stdout);
|
||||
}
|
||||
if !capture.stderr.is_empty() {
|
||||
use std::io::Write;
|
||||
let _ = std::io::stderr().write_all(&capture.stderr);
|
||||
}
|
||||
|
||||
std::process::exit(capture.exit_code);
|
||||
}
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
{
|
||||
anyhow::bail!("Windows sandbox is only available on Windows");
|
||||
}
|
||||
}
|
||||
|
||||
let mut child = match sandbox_type {
|
||||
SandboxType::Seatbelt => {
|
||||
spawn_command_under_seatbelt(
|
||||
@@ -115,6 +195,9 @@ async fn run_command_under_sandbox(
|
||||
)
|
||||
.await?
|
||||
}
|
||||
SandboxType::Windows => {
|
||||
unreachable!("Windows sandbox should have been handled above");
|
||||
}
|
||||
};
|
||||
let status = child.wait().await?;
|
||||
|
||||
|
||||
@@ -32,3 +32,17 @@ pub struct LandlockCommand {
|
||||
#[arg(trailing_var_arg = true)]
|
||||
pub command: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
pub struct WindowsCommand {
|
||||
/// Convenience alias for low-friction sandboxed automatic execution (network-disabled sandbox that can write to cwd and TMPDIR)
|
||||
#[arg(long = "full-auto", default_value_t = false)]
|
||||
pub full_auto: bool,
|
||||
|
||||
#[clap(skip)]
|
||||
pub config_overrides: CliConfigOverrides,
|
||||
|
||||
/// Full command args to run under Windows restricted token sandbox.
|
||||
#[arg(trailing_var_arg = true)]
|
||||
pub command: Vec<String>,
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_core::CodexAuth;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use codex_core::auth::CLIENT_ID;
|
||||
use codex_core::auth::login_with_api_key;
|
||||
use codex_core::auth::logout;
|
||||
@@ -17,11 +18,13 @@ use std::path::PathBuf;
|
||||
pub async fn login_with_chatgpt(
|
||||
codex_home: PathBuf,
|
||||
forced_chatgpt_workspace_id: Option<String>,
|
||||
cli_auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> std::io::Result<()> {
|
||||
let opts = ServerOptions::new(
|
||||
codex_home,
|
||||
CLIENT_ID.to_string(),
|
||||
forced_chatgpt_workspace_id,
|
||||
cli_auth_credentials_store_mode,
|
||||
);
|
||||
let server = run_login_server(opts)?;
|
||||
|
||||
@@ -43,7 +46,13 @@ pub async fn run_login_with_chatgpt(cli_config_overrides: CliConfigOverrides) ->
|
||||
|
||||
let forced_chatgpt_workspace_id = config.forced_chatgpt_workspace_id.clone();
|
||||
|
||||
match login_with_chatgpt(config.codex_home, forced_chatgpt_workspace_id).await {
|
||||
match login_with_chatgpt(
|
||||
config.codex_home,
|
||||
forced_chatgpt_workspace_id,
|
||||
config.cli_auth_credentials_store_mode,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(_) => {
|
||||
eprintln!("Successfully logged in");
|
||||
std::process::exit(0);
|
||||
@@ -66,7 +75,11 @@ pub async fn run_login_with_api_key(
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
match login_with_api_key(&config.codex_home, &api_key) {
|
||||
match login_with_api_key(
|
||||
&config.codex_home,
|
||||
&api_key,
|
||||
config.cli_auth_credentials_store_mode,
|
||||
) {
|
||||
Ok(_) => {
|
||||
eprintln!("Successfully logged in");
|
||||
std::process::exit(0);
|
||||
@@ -121,6 +134,7 @@ pub async fn run_login_with_device_code(
|
||||
config.codex_home,
|
||||
client_id.unwrap_or(CLIENT_ID.to_string()),
|
||||
forced_chatgpt_workspace_id,
|
||||
config.cli_auth_credentials_store_mode,
|
||||
);
|
||||
if let Some(iss) = issuer_base_url {
|
||||
opts.issuer = iss;
|
||||
@@ -140,7 +154,7 @@ pub async fn run_login_with_device_code(
|
||||
pub async fn run_login_status(cli_config_overrides: CliConfigOverrides) -> ! {
|
||||
let config = load_config_or_exit(cli_config_overrides).await;
|
||||
|
||||
match CodexAuth::from_codex_home(&config.codex_home) {
|
||||
match CodexAuth::from_auth_storage(&config.codex_home, config.cli_auth_credentials_store_mode) {
|
||||
Ok(Some(auth)) => match auth.mode {
|
||||
AuthMode::ApiKey => match auth.get_token().await {
|
||||
Ok(api_key) => {
|
||||
@@ -171,7 +185,7 @@ pub async fn run_login_status(cli_config_overrides: CliConfigOverrides) -> ! {
|
||||
pub async fn run_logout(cli_config_overrides: CliConfigOverrides) -> ! {
|
||||
let config = load_config_or_exit(cli_config_overrides).await;
|
||||
|
||||
match logout(&config.codex_home) {
|
||||
match logout(&config.codex_home, config.cli_auth_credentials_store_mode) {
|
||||
Ok(true) => {
|
||||
eprintln!("Successfully logged out");
|
||||
std::process::exit(0);
|
||||
|
||||
@@ -7,6 +7,7 @@ use codex_chatgpt::apply_command::ApplyCommand;
|
||||
use codex_chatgpt::apply_command::run_apply_command;
|
||||
use codex_cli::LandlockCommand;
|
||||
use codex_cli::SeatbeltCommand;
|
||||
use codex_cli::WindowsCommand;
|
||||
use codex_cli::login::read_api_key_from_stdin;
|
||||
use codex_cli::login::run_login_status;
|
||||
use codex_cli::login::run_login_with_api_key;
|
||||
@@ -29,6 +30,7 @@ mod mcp_cmd;
|
||||
use crate::mcp_cmd::McpCli;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::features::is_known_feature_key;
|
||||
|
||||
/// Codex CLI
|
||||
///
|
||||
@@ -150,6 +152,9 @@ enum SandboxCommand {
|
||||
/// Run a command under Landlock+seccomp (Linux only).
|
||||
#[clap(visible_alias = "landlock")]
|
||||
Linux(LandlockCommand),
|
||||
|
||||
/// Run a command under Windows restricted token (Windows only).
|
||||
Windows(WindowsCommand),
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
@@ -286,15 +291,25 @@ struct FeatureToggles {
|
||||
}
|
||||
|
||||
impl FeatureToggles {
|
||||
fn to_overrides(&self) -> Vec<String> {
|
||||
fn to_overrides(&self) -> anyhow::Result<Vec<String>> {
|
||||
let mut v = Vec::new();
|
||||
for k in &self.enable {
|
||||
v.push(format!("features.{k}=true"));
|
||||
for feature in &self.enable {
|
||||
Self::validate_feature(feature)?;
|
||||
v.push(format!("features.{feature}=true"));
|
||||
}
|
||||
for k in &self.disable {
|
||||
v.push(format!("features.{k}=false"));
|
||||
for feature in &self.disable {
|
||||
Self::validate_feature(feature)?;
|
||||
v.push(format!("features.{feature}=false"));
|
||||
}
|
||||
Ok(v)
|
||||
}
|
||||
|
||||
fn validate_feature(feature: &str) -> anyhow::Result<()> {
|
||||
if is_known_feature_key(feature) {
|
||||
Ok(())
|
||||
} else {
|
||||
anyhow::bail!("Unknown feature flag: {feature}")
|
||||
}
|
||||
v
|
||||
}
|
||||
}
|
||||
|
||||
@@ -345,9 +360,8 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
} = MultitoolCli::parse();
|
||||
|
||||
// Fold --enable/--disable into config overrides so they flow to all subcommands.
|
||||
root_config_overrides
|
||||
.raw_overrides
|
||||
.extend(feature_toggles.to_overrides());
|
||||
let toggle_overrides = feature_toggles.to_overrides()?;
|
||||
root_config_overrides.raw_overrides.extend(toggle_overrides);
|
||||
|
||||
match subcommand {
|
||||
None => {
|
||||
@@ -462,6 +476,17 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
SandboxCommand::Windows(mut windows_cli) => {
|
||||
prepend_config_flags(
|
||||
&mut windows_cli.config_overrides,
|
||||
root_config_overrides.clone(),
|
||||
);
|
||||
codex_cli::debug_sandbox::run_command_under_windows(
|
||||
windows_cli,
|
||||
codex_linux_sandbox_exe,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
},
|
||||
Some(Subcommand::Apply(mut apply_cli)) => {
|
||||
prepend_config_flags(
|
||||
@@ -485,15 +510,21 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
Some(Subcommand::Features(FeaturesCli { sub })) => match sub {
|
||||
FeaturesSubcommand::List => {
|
||||
// Respect root-level `-c` overrides plus top-level flags like `--profile`.
|
||||
let cli_kv_overrides = root_config_overrides
|
||||
let mut cli_kv_overrides = root_config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(|e| anyhow::anyhow!(e))?;
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
|
||||
// Honor `--search` via the new feature toggle.
|
||||
if interactive.web_search {
|
||||
cli_kv_overrides.push((
|
||||
"features.web_search_request".to_string(),
|
||||
toml::Value::Boolean(true),
|
||||
));
|
||||
}
|
||||
|
||||
// Thread through relevant top-level flags (at minimum, `--profile`).
|
||||
// Also honor `--search` since it maps to a feature toggle.
|
||||
let overrides = ConfigOverrides {
|
||||
config_profile: interactive.config_profile.clone(),
|
||||
tools_web_search_request: interactive.web_search.then_some(true),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
@@ -605,6 +636,7 @@ mod tests {
|
||||
use assert_matches::assert_matches;
|
||||
use codex_core::protocol::TokenUsage;
|
||||
use codex_protocol::ConversationId;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
fn finalize_from_args(args: &[&str]) -> TuiCli {
|
||||
let cli = MultitoolCli::try_parse_from(args).expect("parse");
|
||||
@@ -781,4 +813,32 @@ mod tests {
|
||||
assert!(!interactive.resume_last);
|
||||
assert_eq!(interactive.resume_session_id, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn feature_toggles_known_features_generate_overrides() {
|
||||
let toggles = FeatureToggles {
|
||||
enable: vec!["web_search_request".to_string()],
|
||||
disable: vec!["unified_exec".to_string()],
|
||||
};
|
||||
let overrides = toggles.to_overrides().expect("valid features");
|
||||
assert_eq!(
|
||||
overrides,
|
||||
vec![
|
||||
"features.web_search_request=true".to_string(),
|
||||
"features.unified_exec=false".to_string(),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn feature_toggles_unknown_feature_errors() {
|
||||
let toggles = FeatureToggles {
|
||||
enable: vec!["does_not_exist".to_string()],
|
||||
disable: Vec::new(),
|
||||
};
|
||||
let err = toggles
|
||||
.to_overrides()
|
||||
.expect_err("feature should be rejected");
|
||||
assert_eq!(err.to_string(), "Unknown feature flag: does_not_exist");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,11 +9,11 @@ use codex_common::CliConfigOverrides;
|
||||
use codex_common::format_env_display::format_env_display;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::config::edit::ConfigEditsBuilder;
|
||||
use codex_core::config::find_codex_home;
|
||||
use codex_core::config::load_global_mcp_servers;
|
||||
use codex_core::config::write_global_mcp_servers;
|
||||
use codex_core::config_types::McpServerConfig;
|
||||
use codex_core::config_types::McpServerTransportConfig;
|
||||
use codex_core::config::types::McpServerConfig;
|
||||
use codex_core::config::types::McpServerTransportConfig;
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::mcp::auth::compute_auth_statuses;
|
||||
use codex_core::protocol::McpAuthStatus;
|
||||
@@ -196,7 +196,9 @@ impl McpCli {
|
||||
|
||||
async fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Result<()> {
|
||||
// Validate any provided overrides even though they are not currently applied.
|
||||
let overrides = config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let overrides = config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
let config = Config::load_with_cli_overrides(overrides, ConfigOverrides::default())
|
||||
.await
|
||||
.context("failed to load configuration")?;
|
||||
@@ -263,7 +265,10 @@ async fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Re
|
||||
|
||||
servers.insert(name.clone(), new_entry);
|
||||
|
||||
write_global_mcp_servers(&codex_home, &servers)
|
||||
ConfigEditsBuilder::new(&codex_home)
|
||||
.replace_mcp_servers(&servers)
|
||||
.apply()
|
||||
.await
|
||||
.with_context(|| format!("failed to write MCP servers to {}", codex_home.display()))?;
|
||||
|
||||
println!("Added global MCP server '{name}'.");
|
||||
@@ -274,26 +279,42 @@ async fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Re
|
||||
http_headers,
|
||||
env_http_headers,
|
||||
} = transport
|
||||
&& matches!(supports_oauth_login(&url).await, Ok(true))
|
||||
{
|
||||
println!("Detected OAuth support. Starting OAuth flow…");
|
||||
perform_oauth_login(
|
||||
&name,
|
||||
&url,
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
http_headers.clone(),
|
||||
env_http_headers.clone(),
|
||||
&Vec::new(),
|
||||
)
|
||||
.await?;
|
||||
println!("Successfully logged in.");
|
||||
match supports_oauth_login(&url).await {
|
||||
Ok(true) => {
|
||||
if !config.features.enabled(Feature::RmcpClient) {
|
||||
println!(
|
||||
"MCP server supports login. Add `experimental_use_rmcp_client = true` \
|
||||
to your config.toml and run `codex mcp login {name}` to login."
|
||||
);
|
||||
} else {
|
||||
println!("Detected OAuth support. Starting OAuth flow…");
|
||||
perform_oauth_login(
|
||||
&name,
|
||||
&url,
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
http_headers.clone(),
|
||||
env_http_headers.clone(),
|
||||
&Vec::new(),
|
||||
)
|
||||
.await?;
|
||||
println!("Successfully logged in.");
|
||||
}
|
||||
}
|
||||
Ok(false) => {}
|
||||
Err(_) => println!(
|
||||
"MCP server may or may not require login. Run `codex mcp login {name}` to login."
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn run_remove(config_overrides: &CliConfigOverrides, remove_args: RemoveArgs) -> Result<()> {
|
||||
config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
|
||||
let RemoveArgs { name } = remove_args;
|
||||
|
||||
@@ -307,7 +328,10 @@ async fn run_remove(config_overrides: &CliConfigOverrides, remove_args: RemoveAr
|
||||
let removed = servers.remove(&name).is_some();
|
||||
|
||||
if removed {
|
||||
write_global_mcp_servers(&codex_home, &servers)
|
||||
ConfigEditsBuilder::new(&codex_home)
|
||||
.replace_mcp_servers(&servers)
|
||||
.apply()
|
||||
.await
|
||||
.with_context(|| format!("failed to write MCP servers to {}", codex_home.display()))?;
|
||||
}
|
||||
|
||||
@@ -321,14 +345,16 @@ async fn run_remove(config_overrides: &CliConfigOverrides, remove_args: RemoveAr
|
||||
}
|
||||
|
||||
async fn run_login(config_overrides: &CliConfigOverrides, login_args: LoginArgs) -> Result<()> {
|
||||
let overrides = config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let overrides = config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
let config = Config::load_with_cli_overrides(overrides, ConfigOverrides::default())
|
||||
.await
|
||||
.context("failed to load configuration")?;
|
||||
|
||||
if !config.features.enabled(Feature::RmcpClient) {
|
||||
bail!(
|
||||
"OAuth login is only supported when experimental_use_rmcp_client is true in config.toml."
|
||||
"OAuth login is only supported when [features].rmcp_client is true in config.toml. See https://github.com/openai/codex/blob/main/docs/config.md#feature-flags for details."
|
||||
);
|
||||
}
|
||||
|
||||
@@ -362,7 +388,9 @@ async fn run_login(config_overrides: &CliConfigOverrides, login_args: LoginArgs)
|
||||
}
|
||||
|
||||
async fn run_logout(config_overrides: &CliConfigOverrides, logout_args: LogoutArgs) -> Result<()> {
|
||||
let overrides = config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let overrides = config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
let config = Config::load_with_cli_overrides(overrides, ConfigOverrides::default())
|
||||
.await
|
||||
.context("failed to load configuration")?;
|
||||
@@ -389,7 +417,9 @@ async fn run_logout(config_overrides: &CliConfigOverrides, logout_args: LogoutAr
|
||||
}
|
||||
|
||||
async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) -> Result<()> {
|
||||
let overrides = config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let overrides = config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
let config = Config::load_with_cli_overrides(overrides, ConfigOverrides::default())
|
||||
.await
|
||||
.context("failed to load configuration")?;
|
||||
@@ -523,10 +553,12 @@ async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) ->
|
||||
.map(|entry| entry.auth_status)
|
||||
.unwrap_or(McpAuthStatus::Unsupported)
|
||||
.to_string();
|
||||
let bearer_token_display =
|
||||
bearer_token_env_var.as_deref().unwrap_or("-").to_string();
|
||||
http_rows.push([
|
||||
name.clone(),
|
||||
url.clone(),
|
||||
bearer_token_env_var.clone().unwrap_or("-".to_string()),
|
||||
bearer_token_display,
|
||||
status,
|
||||
auth_status,
|
||||
]);
|
||||
@@ -642,7 +674,9 @@ async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) ->
|
||||
}
|
||||
|
||||
async fn run_get(config_overrides: &CliConfigOverrides, get_args: GetArgs) -> Result<()> {
|
||||
let overrides = config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let overrides = config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
let config = Config::load_with_cli_overrides(overrides, ConfigOverrides::default())
|
||||
.await
|
||||
.context("failed to load configuration")?;
|
||||
@@ -752,15 +786,15 @@ async fn run_get(config_overrides: &CliConfigOverrides, get_args: GetArgs) -> Re
|
||||
} => {
|
||||
println!(" transport: streamable_http");
|
||||
println!(" url: {url}");
|
||||
let env_var = bearer_token_env_var.as_deref().unwrap_or("-");
|
||||
println!(" bearer_token_env_var: {env_var}");
|
||||
let bearer_token_display = bearer_token_env_var.as_deref().unwrap_or("-");
|
||||
println!(" bearer_token_env_var: {bearer_token_display}");
|
||||
let headers_display = match http_headers {
|
||||
Some(map) if !map.is_empty() => {
|
||||
let mut pairs: Vec<_> = map.iter().collect();
|
||||
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
pairs
|
||||
.into_iter()
|
||||
.map(|(k, v)| format!("{k}={v}"))
|
||||
.map(|(k, _)| format!("{k}=*****"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
}
|
||||
@@ -773,7 +807,7 @@ async fn run_get(config_overrides: &CliConfigOverrides, get_args: GetArgs) -> Re
|
||||
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
pairs
|
||||
.into_iter()
|
||||
.map(|(k, v)| format!("{k}={v}"))
|
||||
.map(|(k, var)| format!("{k}={var}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use codex_core::config::load_global_mcp_servers;
|
||||
use codex_core::config_types::McpServerTransportConfig;
|
||||
use codex_core::config::types::McpServerTransportConfig;
|
||||
use predicates::str::contains;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use codex_core::config::edit::ConfigEditsBuilder;
|
||||
use codex_core::config::load_global_mcp_servers;
|
||||
use codex_core::config::write_global_mcp_servers;
|
||||
use codex_core::config_types::McpServerTransportConfig;
|
||||
use codex_core::config::types::McpServerTransportConfig;
|
||||
use predicates::prelude::PredicateBooleanExt;
|
||||
use predicates::str::contains;
|
||||
use pretty_assertions::assert_eq;
|
||||
@@ -59,7 +59,9 @@ async fn list_and_get_render_expected_output() -> Result<()> {
|
||||
}
|
||||
other => panic!("unexpected transport: {other:?}"),
|
||||
}
|
||||
write_global_mcp_servers(codex_home.path(), &servers)?;
|
||||
ConfigEditsBuilder::new(codex_home.path())
|
||||
.replace_mcp_servers(&servers)
|
||||
.apply_blocking()?;
|
||||
|
||||
let mut list_cmd = codex_command(codex_home.path())?;
|
||||
let list_output = list_cmd.args(["mcp", "list"]).output()?;
|
||||
@@ -68,9 +70,9 @@ async fn list_and_get_render_expected_output() -> Result<()> {
|
||||
assert!(stdout.contains("Name"));
|
||||
assert!(stdout.contains("docs"));
|
||||
assert!(stdout.contains("docs-server"));
|
||||
assert!(stdout.contains("TOKEN=secret"));
|
||||
assert!(stdout.contains("APP_TOKEN=$APP_TOKEN"));
|
||||
assert!(stdout.contains("WORKSPACE_ID=$WORKSPACE_ID"));
|
||||
assert!(stdout.contains("TOKEN=*****"));
|
||||
assert!(stdout.contains("APP_TOKEN=*****"));
|
||||
assert!(stdout.contains("WORKSPACE_ID=*****"));
|
||||
assert!(stdout.contains("Status"));
|
||||
assert!(stdout.contains("Auth"));
|
||||
assert!(stdout.contains("enabled"));
|
||||
@@ -119,9 +121,9 @@ async fn list_and_get_render_expected_output() -> Result<()> {
|
||||
assert!(stdout.contains("transport: stdio"));
|
||||
assert!(stdout.contains("command: docs-server"));
|
||||
assert!(stdout.contains("args: --port 4000"));
|
||||
assert!(stdout.contains("env: TOKEN=secret"));
|
||||
assert!(stdout.contains("APP_TOKEN=$APP_TOKEN"));
|
||||
assert!(stdout.contains("WORKSPACE_ID=$WORKSPACE_ID"));
|
||||
assert!(stdout.contains("env: TOKEN=*****"));
|
||||
assert!(stdout.contains("APP_TOKEN=*****"));
|
||||
assert!(stdout.contains("WORKSPACE_ID=*****"));
|
||||
assert!(stdout.contains("enabled: true"));
|
||||
assert!(stdout.contains("remove: codex mcp remove docs"));
|
||||
|
||||
@@ -149,7 +151,9 @@ async fn get_disabled_server_shows_single_line() -> Result<()> {
|
||||
.get_mut("docs")
|
||||
.expect("docs server should exist after add");
|
||||
docs.enabled = false;
|
||||
write_global_mcp_servers(codex_home.path(), &servers)?;
|
||||
ConfigEditsBuilder::new(codex_home.path())
|
||||
.replace_mcp_servers(&servers)
|
||||
.apply_blocking()?;
|
||||
|
||||
let mut get_cmd = codex_command(codex_home.path())?;
|
||||
let get_output = get_cmd.args(["mcp", "get", "docs"]).output()?;
|
||||
|
||||
@@ -22,6 +22,6 @@ chrono = { version = "0.4", features = ["serde"] }
|
||||
diffy = "0.4.2"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
thiserror = "2.0.12"
|
||||
thiserror = "2.0.17"
|
||||
codex-backend-client = { path = "../backend-client", optional = true }
|
||||
codex-git-apply = { path = "../git-apply" }
|
||||
codex-git = { workspace = true }
|
||||
|
||||
@@ -362,13 +362,13 @@ mod api {
|
||||
});
|
||||
}
|
||||
|
||||
let req = codex_git_apply::ApplyGitRequest {
|
||||
let req = codex_git::ApplyGitRequest {
|
||||
cwd: std::env::current_dir().unwrap_or_else(|_| std::env::temp_dir()),
|
||||
diff: diff.clone(),
|
||||
revert: false,
|
||||
preflight,
|
||||
};
|
||||
let r = codex_git_apply::apply_git_patch(&req)
|
||||
let r = codex_git::apply_git_patch(&req)
|
||||
.map_err(|e| CloudTaskError::Io(format!("git apply failed to run: {e}")))?;
|
||||
|
||||
let status = if r.exit_code == 0 {
|
||||
|
||||
@@ -26,4 +26,4 @@ pub use mock::MockClient;
|
||||
#[cfg(feature = "online")]
|
||||
pub use http::HttpClient;
|
||||
|
||||
// Reusable apply engine now lives in the shared crate `codex-git-apply`.
|
||||
// Reusable apply engine now lives in the shared crate `codex-git`.
|
||||
|
||||
@@ -58,7 +58,16 @@ async fn init_backend(user_agent_suffix: &str) -> anyhow::Result<BackendContext>
|
||||
|
||||
let auth = match codex_core::config::find_codex_home()
|
||||
.ok()
|
||||
.map(|home| codex_login::AuthManager::new(home, false))
|
||||
.map(|home| {
|
||||
let store_mode = codex_core::config::Config::load_from_base_config_with_overrides(
|
||||
codex_core::config::ConfigToml::default(),
|
||||
codex_core::config::ConfigOverrides::default(),
|
||||
home.clone(),
|
||||
)
|
||||
.map(|cfg| cfg.cli_auth_credentials_store_mode)
|
||||
.unwrap_or_default();
|
||||
codex_login::AuthManager::new(home, false, store_mode)
|
||||
})
|
||||
.and_then(|am| am.auth())
|
||||
{
|
||||
Some(auth) => auth,
|
||||
@@ -1035,7 +1044,7 @@ pub async fn run_main(cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> an
|
||||
// Close task modal/pending apply if present before opening env modal
|
||||
app.diff_overlay = None;
|
||||
app.env_modal = Some(app::EnvModalState { query: String::new(), selected: 0 });
|
||||
// Cache environments until user explicitly refreshes with 'r' inside the modal.
|
||||
// Cache environments while the modal is open to avoid repeated fetches.
|
||||
let should_fetch = app.environments.is_empty();
|
||||
if should_fetch {
|
||||
app.env_loading = true;
|
||||
@@ -1086,7 +1095,19 @@ pub async fn run_main(cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> an
|
||||
let backend = Arc::clone(&backend);
|
||||
let best_of_n = page.best_of_n;
|
||||
tokio::spawn(async move {
|
||||
let result = codex_cloud_tasks_client::CloudBackend::create_task(&*backend, &env, &text, "main", false, best_of_n).await;
|
||||
let git_ref = if let Ok(cwd) = std::env::current_dir() {
|
||||
if let Some(branch) = codex_core::git_info::default_branch_name(&cwd).await {
|
||||
branch
|
||||
} else if let Some(branch) = codex_core::git_info::current_branch_name(&cwd).await {
|
||||
branch
|
||||
} else {
|
||||
"main".to_string()
|
||||
}
|
||||
} else {
|
||||
"main".to_string()
|
||||
};
|
||||
|
||||
let result = codex_cloud_tasks_client::CloudBackend::create_task(&*backend, &env, &text, &git_ref, false, best_of_n).await;
|
||||
let evt = match result {
|
||||
Ok(ok) => app::AppEvent::NewTaskSubmitted(Ok(ok)),
|
||||
Err(e) => app::AppEvent::NewTaskSubmitted(Err(format!("{e}"))),
|
||||
@@ -1094,7 +1115,7 @@ pub async fn run_main(cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> an
|
||||
let _ = tx.send(evt);
|
||||
});
|
||||
} else {
|
||||
app.status = "No environment selected (press 'e' to choose)".to_string();
|
||||
app.status = "No environment selected".to_string();
|
||||
}
|
||||
}
|
||||
needs_redraw = true;
|
||||
@@ -1292,18 +1313,6 @@ pub async fn run_main(cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> an
|
||||
// Environment modal key handling
|
||||
match key.code {
|
||||
KeyCode::Esc => { app.env_modal = None; needs_redraw = true; }
|
||||
KeyCode::Char('r') | KeyCode::Char('R') => {
|
||||
// Trigger refresh of environments
|
||||
app.env_loading = true; app.env_error = None; needs_redraw = true;
|
||||
let _ = frame_tx.send(Instant::now() + Duration::from_millis(100));
|
||||
let tx = tx.clone();
|
||||
tokio::spawn(async move {
|
||||
let base_url = crate::util::normalize_base_url(&std::env::var("CODEX_CLOUD_TASKS_BASE_URL").unwrap_or_else(|_| "https://chatgpt.com/backend-api".to_string()));
|
||||
let headers = crate::util::build_chatgpt_headers().await;
|
||||
let res = crate::env_detect::list_environments(&base_url, &headers).await;
|
||||
let _ = tx.send(app::AppEvent::EnvironmentsLoaded(res));
|
||||
});
|
||||
}
|
||||
KeyCode::Char(ch) if !key.modifiers.contains(KeyModifiers::CONTROL) && !key.modifiers.contains(KeyModifiers::ALT) => {
|
||||
if let Some(m) = app.env_modal.as_mut() { m.query.push(ch); }
|
||||
needs_redraw = true;
|
||||
@@ -1410,7 +1419,7 @@ pub async fn run_main(cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> an
|
||||
}
|
||||
KeyCode::Char('o') | KeyCode::Char('O') => {
|
||||
app.env_modal = Some(app::EnvModalState { query: String::new(), selected: 0 });
|
||||
// Cache environments until user explicitly refreshes with 'r' inside the modal.
|
||||
// Cache environments while the modal is open to avoid repeated fetches.
|
||||
let should_fetch = app.environments.is_empty();
|
||||
if should_fetch { app.env_loading = true; app.env_error = None; }
|
||||
needs_redraw = true;
|
||||
|
||||
@@ -945,9 +945,7 @@ pub fn draw_env_modal(frame: &mut Frame, area: Rect, app: &mut App) {
|
||||
|
||||
// Subheader with usage hints (dim cyan)
|
||||
let subheader = Paragraph::new(Line::from(
|
||||
"Type to search, Enter select, Esc cancel; r refresh"
|
||||
.cyan()
|
||||
.dim(),
|
||||
"Type to search, Enter select, Esc cancel".cyan().dim(),
|
||||
))
|
||||
.wrap(Wrap { trim: true });
|
||||
frame.render_widget(subheader, rows[0]);
|
||||
|
||||
@@ -70,7 +70,14 @@ pub async fn build_chatgpt_headers() -> HeaderMap {
|
||||
HeaderValue::from_str(&ua).unwrap_or(HeaderValue::from_static("codex-cli")),
|
||||
);
|
||||
if let Ok(home) = codex_core::config::find_codex_home() {
|
||||
let am = codex_login::AuthManager::new(home, false);
|
||||
let store_mode = codex_core::config::Config::load_from_base_config_with_overrides(
|
||||
codex_core::config::ConfigToml::default(),
|
||||
codex_core::config::ConfigOverrides::default(),
|
||||
home.clone(),
|
||||
)
|
||||
.map(|cfg| cfg.cli_auth_credentials_store_mode)
|
||||
.unwrap_or_default();
|
||||
let am = codex_login::AuthManager::new(home, false, store_mode);
|
||||
if let Some(auth) = am.auth()
|
||||
&& let Ok(tok) = auth.get_token().await
|
||||
&& !tok.is_empty()
|
||||
|
||||
@@ -16,3 +16,6 @@ path = "src/lib.rs"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
serde_with = "3"
|
||||
|
||||
[package.metadata.cargo-shear]
|
||||
ignored = ["serde_with"]
|
||||
|
||||
@@ -24,21 +24,21 @@ pub fn builtin_approval_presets() -> Vec<ApprovalPreset> {
|
||||
ApprovalPreset {
|
||||
id: "read-only",
|
||||
label: "Read Only",
|
||||
description: "Codex can read files and answer questions. Codex requires approval to make edits, run commands, or access network",
|
||||
description: "Codex can read files and answer questions. Codex requires approval to make edits, run commands, or access network.",
|
||||
approval: AskForApproval::OnRequest,
|
||||
sandbox: SandboxPolicy::ReadOnly,
|
||||
},
|
||||
ApprovalPreset {
|
||||
id: "auto",
|
||||
label: "Auto",
|
||||
description: "Codex can read files, make edits, and run commands in the workspace. Codex requires approval to work outside the workspace or access network",
|
||||
description: "Codex can read files, make edits, and run commands in the workspace. Codex requires approval to work outside the workspace or access network.",
|
||||
approval: AskForApproval::OnRequest,
|
||||
sandbox: SandboxPolicy::new_workspace_write_policy(),
|
||||
},
|
||||
ApprovalPreset {
|
||||
id: "full-access",
|
||||
label: "Full Access",
|
||||
description: "Codex can read files, make edits, and run commands with network access, without approval. Exercise caution",
|
||||
description: "Codex can read files, make edits, and run commands with network access, without approval. Exercise caution.",
|
||||
approval: AskForApproval::Never,
|
||||
sandbox: SandboxPolicy::DangerFullAccess,
|
||||
},
|
||||
|
||||
@@ -6,15 +6,11 @@ pub fn format_env_display(env: Option<&HashMap<String, String>>, env_vars: &[Str
|
||||
if let Some(map) = env {
|
||||
let mut pairs: Vec<_> = map.iter().collect();
|
||||
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
parts.extend(
|
||||
pairs
|
||||
.into_iter()
|
||||
.map(|(key, value)| format!("{key}={value}")),
|
||||
);
|
||||
parts.extend(pairs.into_iter().map(|(key, _)| format!("{key}=*****")));
|
||||
}
|
||||
|
||||
if !env_vars.is_empty() {
|
||||
parts.extend(env_vars.iter().map(|var| format!("{var}=${var}")));
|
||||
parts.extend(env_vars.iter().map(|var| format!("{var}=*****")));
|
||||
}
|
||||
|
||||
if parts.is_empty() {
|
||||
@@ -42,14 +38,14 @@ mod tests {
|
||||
env.insert("B".to_string(), "two".to_string());
|
||||
env.insert("A".to_string(), "one".to_string());
|
||||
|
||||
assert_eq!(format_env_display(Some(&env), &[]), "A=one, B=two");
|
||||
assert_eq!(format_env_display(Some(&env), &[]), "A=*****, B=*****");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn formats_env_vars_with_dollar_prefix() {
|
||||
let vars = vec!["TOKEN".to_string(), "PATH".to_string()];
|
||||
|
||||
assert_eq!(format_env_display(None, &vars), "TOKEN=$TOKEN, PATH=$PATH");
|
||||
assert_eq!(format_env_display(None, &vars), "TOKEN=*****, PATH=*****");
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -60,7 +56,7 @@ mod tests {
|
||||
|
||||
assert_eq!(
|
||||
format_env_display(Some(&env), &vars),
|
||||
"HOME=/tmp, TOKEN=$TOKEN"
|
||||
"HOME=*****, TOKEN=*****"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,7 +34,7 @@ const PRESETS: &[ModelPreset] = &[
|
||||
id: "gpt-5-codex",
|
||||
model: "gpt-5-codex",
|
||||
display_name: "gpt-5-codex",
|
||||
description: "Optimized for coding tasks with many tools.",
|
||||
description: "Optimized for codex.",
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
supported_reasoning_efforts: &[
|
||||
ReasoningEffortPreset {
|
||||
@@ -52,6 +52,24 @@ const PRESETS: &[ModelPreset] = &[
|
||||
],
|
||||
is_default: true,
|
||||
},
|
||||
ModelPreset {
|
||||
id: "gpt-5-codex-mini",
|
||||
model: "gpt-5-codex-mini",
|
||||
display_name: "gpt-5-codex-mini",
|
||||
description: "Optimized for codex. Cheaper, faster, but less capable.",
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
supported_reasoning_efforts: &[
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Medium,
|
||||
description: "Dynamically adjusts reasoning based on the task",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::High,
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems",
|
||||
},
|
||||
],
|
||||
is_default: false,
|
||||
},
|
||||
ModelPreset {
|
||||
id: "gpt-5",
|
||||
model: "gpt-5",
|
||||
@@ -80,8 +98,13 @@ const PRESETS: &[ModelPreset] = &[
|
||||
},
|
||||
];
|
||||
|
||||
pub fn builtin_model_presets(_auth_mode: Option<AuthMode>) -> Vec<ModelPreset> {
|
||||
PRESETS.to_vec()
|
||||
pub fn builtin_model_presets(auth_mode: Option<AuthMode>) -> Vec<ModelPreset> {
|
||||
let allow_codex_mini = matches!(auth_mode, Some(AuthMode::ChatGPT));
|
||||
PRESETS
|
||||
.iter()
|
||||
.filter(|preset| allow_codex_mini || preset.id != "gpt-5-codex-mini")
|
||||
.copied()
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
@@ -21,13 +21,16 @@ bytes = { workspace = true }
|
||||
chrono = { workspace = true, features = ["serde"] }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-apply-patch = { workspace = true }
|
||||
codex-async-utils = { workspace = true }
|
||||
codex-file-search = { workspace = true }
|
||||
codex-git = { workspace = true }
|
||||
codex-keyring-store = { workspace = true }
|
||||
codex-otel = { workspace = true, features = ["otel"] }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-rmcp-client = { workspace = true }
|
||||
codex-async-utils = { workspace = true }
|
||||
codex-utils-string = { workspace = true }
|
||||
codex-utils-pty = { workspace = true }
|
||||
codex-utils-readiness = { workspace = true }
|
||||
codex-utils-string = { workspace = true }
|
||||
codex-utils-tokenizer = { workspace = true }
|
||||
dirs = { workspace = true }
|
||||
dunce = { workspace = true }
|
||||
@@ -36,6 +39,12 @@ eventsource-stream = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
http = { workspace = true }
|
||||
indexmap = { workspace = true }
|
||||
keyring = { workspace = true, features = [
|
||||
"apple-native",
|
||||
"crypto-rust",
|
||||
"linux-native-async-persistent",
|
||||
"windows-native",
|
||||
] }
|
||||
libc = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
os_info = { workspace = true }
|
||||
@@ -45,6 +54,7 @@ reqwest = { workspace = true, features = ["json", "stream"] }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
sha1 = { workspace = true }
|
||||
sha2 = { workspace = true }
|
||||
shlex = { workspace = true }
|
||||
similar = { workspace = true }
|
||||
strum_macros = { workspace = true }
|
||||
@@ -70,9 +80,10 @@ toml_edit = { workspace = true }
|
||||
tracing = { workspace = true, features = ["log"] }
|
||||
tree-sitter = { workspace = true }
|
||||
tree-sitter-bash = { workspace = true }
|
||||
uuid = { workspace = true, features = ["serde", "v4"] }
|
||||
uuid = { workspace = true, features = ["serde", "v4", "v5"] }
|
||||
which = { workspace = true }
|
||||
wildmatch = { workspace = true }
|
||||
codex_windows_sandbox = { package = "codex-windows-sandbox", path = "../windows-sandbox-rs" }
|
||||
|
||||
|
||||
[target.'cfg(target_os = "linux")'.dependencies]
|
||||
@@ -95,6 +106,7 @@ assert_cmd = { workspace = true }
|
||||
assert_matches = { workspace = true }
|
||||
core_test_support = { workspace = true }
|
||||
escargot = { workspace = true }
|
||||
image = { workspace = true, features = ["jpeg", "png"] }
|
||||
maplit = { workspace = true }
|
||||
predicates = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
|
||||
@@ -82,6 +82,6 @@ OUTPUT FORMAT:
|
||||
|
||||
* **Do not** wrap the JSON in markdown fences or extra prose.
|
||||
* The code_location field is required and must include absolute_file_path and line_range.
|
||||
*Line ranges must be as short as possible for interpreting the issue (avoid ranges over 5–10 lines; pick the most suitable subrange).
|
||||
* Line ranges must be as short as possible for interpreting the issue (avoid ranges over 5–10 lines; pick the most suitable subrange).
|
||||
* The code_location should overlap with the diff.
|
||||
* Do not generate a PR fix.
|
||||
* Do not generate a PR fix.
|
||||
|
||||
@@ -61,7 +61,13 @@ pub(crate) async fn apply_patch(
|
||||
// that similar patches can be auto-approved in the future during
|
||||
// this session.
|
||||
let rx_approve = sess
|
||||
.request_patch_approval(turn_context, call_id.to_owned(), &action, None, None)
|
||||
.request_patch_approval(
|
||||
turn_context,
|
||||
call_id.to_owned(),
|
||||
convert_apply_patch_to_protocol(&action),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
match rx_approve.await.unwrap_or_default() {
|
||||
ReviewDecision::Approved | ReviewDecision::ApprovedForSession => {
|
||||
|
||||
@@ -1,16 +1,14 @@
|
||||
use chrono::DateTime;
|
||||
mod storage;
|
||||
|
||||
use chrono::Utc;
|
||||
use reqwest::StatusCode;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
#[cfg(test)]
|
||||
use serial_test::serial;
|
||||
use std::env;
|
||||
use std::fs::File;
|
||||
use std::fs::OpenOptions;
|
||||
use std::io::Read;
|
||||
use std::io::Write;
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::fs::OpenOptionsExt;
|
||||
use std::fmt::Debug;
|
||||
use std::io::ErrorKind;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
@@ -20,11 +18,20 @@ use std::time::Duration;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_protocol::config_types::ForcedLoginMethod;
|
||||
|
||||
pub use crate::auth::storage::AuthCredentialsStoreMode;
|
||||
pub use crate::auth::storage::AuthDotJson;
|
||||
use crate::auth::storage::AuthStorageBackend;
|
||||
use crate::auth::storage::create_auth_storage;
|
||||
use crate::config::Config;
|
||||
use crate::default_client::CodexHttpClient;
|
||||
use crate::error::RefreshTokenFailedError;
|
||||
use crate::error::RefreshTokenFailedReason;
|
||||
use crate::token_data::PlanType;
|
||||
use crate::token_data::TokenData;
|
||||
use crate::token_data::parse_id_token;
|
||||
use crate::util::try_parse_error_message;
|
||||
use serde_json::Value;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CodexAuth {
|
||||
@@ -32,7 +39,7 @@ pub struct CodexAuth {
|
||||
|
||||
pub(crate) api_key: Option<String>,
|
||||
pub(crate) auth_dot_json: Arc<Mutex<Option<AuthDotJson>>>,
|
||||
pub(crate) auth_file: PathBuf,
|
||||
storage: Arc<dyn AuthStorageBackend>,
|
||||
pub(crate) client: CodexHttpClient,
|
||||
}
|
||||
|
||||
@@ -42,26 +49,66 @@ impl PartialEq for CodexAuth {
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(pakrym): use token exp field to check for expiration instead
|
||||
const TOKEN_REFRESH_INTERVAL: i64 = 8;
|
||||
|
||||
const REFRESH_TOKEN_EXPIRED_MESSAGE: &str = "Your access token could not be refreshed because your refresh token has expired. Please log out and sign in again.";
|
||||
const REFRESH_TOKEN_REUSED_MESSAGE: &str = "Your access token could not be refreshed because your refresh token was already used. Please log out and sign in again.";
|
||||
const REFRESH_TOKEN_INVALIDATED_MESSAGE: &str = "Your access token could not be refreshed because your refresh token was revoked. Please log out and sign in again.";
|
||||
const REFRESH_TOKEN_UNKNOWN_MESSAGE: &str =
|
||||
"Your access token could not be refreshed. Please log out and sign in again.";
|
||||
const REFRESH_TOKEN_URL: &str = "https://auth.openai.com/oauth/token";
|
||||
pub const REFRESH_TOKEN_URL_OVERRIDE_ENV_VAR: &str = "CODEX_REFRESH_TOKEN_URL_OVERRIDE";
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum RefreshTokenError {
|
||||
#[error("{0}")]
|
||||
Permanent(#[from] RefreshTokenFailedError),
|
||||
#[error(transparent)]
|
||||
Transient(#[from] std::io::Error),
|
||||
}
|
||||
|
||||
impl RefreshTokenError {
|
||||
pub fn failed_reason(&self) -> Option<RefreshTokenFailedReason> {
|
||||
match self {
|
||||
Self::Permanent(error) => Some(error.reason),
|
||||
Self::Transient(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn other_with_message(message: impl Into<String>) -> Self {
|
||||
Self::Transient(std::io::Error::other(message.into()))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<RefreshTokenError> for std::io::Error {
|
||||
fn from(err: RefreshTokenError) -> Self {
|
||||
match err {
|
||||
RefreshTokenError::Permanent(failed) => std::io::Error::other(failed),
|
||||
RefreshTokenError::Transient(inner) => inner,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CodexAuth {
|
||||
pub async fn refresh_token(&self) -> Result<String, std::io::Error> {
|
||||
pub async fn refresh_token(&self) -> Result<String, RefreshTokenError> {
|
||||
tracing::info!("Refreshing token");
|
||||
|
||||
let token_data = self
|
||||
.get_current_token_data()
|
||||
.ok_or(std::io::Error::other("Token data is not available."))?;
|
||||
let token_data = self.get_current_token_data().ok_or_else(|| {
|
||||
RefreshTokenError::Transient(std::io::Error::other("Token data is not available."))
|
||||
})?;
|
||||
let token = token_data.refresh_token;
|
||||
|
||||
let refresh_response = try_refresh_token(token, &self.client)
|
||||
.await
|
||||
.map_err(std::io::Error::other)?;
|
||||
let refresh_response = try_refresh_token(token, &self.client).await?;
|
||||
|
||||
let updated = update_tokens(
|
||||
&self.auth_file,
|
||||
&self.storage,
|
||||
refresh_response.id_token,
|
||||
refresh_response.access_token,
|
||||
refresh_response.refresh_token,
|
||||
)
|
||||
.await?;
|
||||
.await
|
||||
.map_err(RefreshTokenError::from)?;
|
||||
|
||||
if let Ok(mut auth_lock) = self.auth_dot_json.lock() {
|
||||
*auth_lock = Some(updated.clone());
|
||||
@@ -70,7 +117,7 @@ impl CodexAuth {
|
||||
let access = match updated.tokens {
|
||||
Some(t) => t.access_token,
|
||||
None => {
|
||||
return Err(std::io::Error::other(
|
||||
return Err(RefreshTokenError::other_with_message(
|
||||
"Token data is not available after refresh.",
|
||||
));
|
||||
}
|
||||
@@ -78,9 +125,12 @@ impl CodexAuth {
|
||||
Ok(access)
|
||||
}
|
||||
|
||||
/// Loads the available auth information from the auth.json.
|
||||
pub fn from_codex_home(codex_home: &Path) -> std::io::Result<Option<CodexAuth>> {
|
||||
load_auth(codex_home, false)
|
||||
/// Loads the available auth information from auth storage.
|
||||
pub fn from_auth_storage(
|
||||
codex_home: &Path,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> std::io::Result<Option<CodexAuth>> {
|
||||
load_auth(codex_home, false, auth_credentials_store_mode)
|
||||
}
|
||||
|
||||
pub async fn get_token_data(&self) -> Result<TokenData, std::io::Error> {
|
||||
@@ -91,19 +141,25 @@ impl CodexAuth {
|
||||
last_refresh: Some(last_refresh),
|
||||
..
|
||||
}) => {
|
||||
if last_refresh < Utc::now() - chrono::Duration::days(28) {
|
||||
let refresh_response = tokio::time::timeout(
|
||||
if last_refresh < Utc::now() - chrono::Duration::days(TOKEN_REFRESH_INTERVAL) {
|
||||
let refresh_result = tokio::time::timeout(
|
||||
Duration::from_secs(60),
|
||||
try_refresh_token(tokens.refresh_token.clone(), &self.client),
|
||||
)
|
||||
.await
|
||||
.map_err(|_| {
|
||||
std::io::Error::other("timed out while refreshing OpenAI API key")
|
||||
})?
|
||||
.map_err(std::io::Error::other)?;
|
||||
.await;
|
||||
let refresh_response = match refresh_result {
|
||||
Ok(Ok(response)) => response,
|
||||
Ok(Err(err)) => return Err(err.into()),
|
||||
Err(_) => {
|
||||
return Err(std::io::Error::new(
|
||||
ErrorKind::TimedOut,
|
||||
"timed out while refreshing OpenAI API key",
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let updated_auth_dot_json = update_tokens(
|
||||
&self.auth_file,
|
||||
&self.storage,
|
||||
refresh_response.id_token,
|
||||
refresh_response.access_token,
|
||||
refresh_response.refresh_token,
|
||||
@@ -177,7 +233,7 @@ impl CodexAuth {
|
||||
Self {
|
||||
api_key: None,
|
||||
mode: AuthMode::ChatGPT,
|
||||
auth_file: PathBuf::new(),
|
||||
storage: create_auth_storage(PathBuf::new(), AuthCredentialsStoreMode::File),
|
||||
auth_dot_json,
|
||||
client: crate::default_client::create_client(),
|
||||
}
|
||||
@@ -187,7 +243,7 @@ impl CodexAuth {
|
||||
Self {
|
||||
api_key: Some(api_key.to_owned()),
|
||||
mode: AuthMode::ApiKey,
|
||||
auth_file: PathBuf::new(),
|
||||
storage: create_auth_storage(PathBuf::new(), AuthCredentialsStoreMode::File),
|
||||
auth_dot_json: Arc::new(Mutex::new(None)),
|
||||
client,
|
||||
}
|
||||
@@ -215,33 +271,57 @@ pub fn read_codex_api_key_from_env() -> Option<String> {
|
||||
.filter(|value| !value.is_empty())
|
||||
}
|
||||
|
||||
pub fn get_auth_file(codex_home: &Path) -> PathBuf {
|
||||
codex_home.join("auth.json")
|
||||
}
|
||||
|
||||
/// Delete the auth.json file inside `codex_home` if it exists. Returns `Ok(true)`
|
||||
/// if a file was removed, `Ok(false)` if no auth file was present.
|
||||
pub fn logout(codex_home: &Path) -> std::io::Result<bool> {
|
||||
let auth_file = get_auth_file(codex_home);
|
||||
match std::fs::remove_file(&auth_file) {
|
||||
Ok(_) => Ok(true),
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(false),
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
pub fn logout(
|
||||
codex_home: &Path,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> std::io::Result<bool> {
|
||||
let storage = create_auth_storage(codex_home.to_path_buf(), auth_credentials_store_mode);
|
||||
storage.delete()
|
||||
}
|
||||
|
||||
/// Writes an `auth.json` that contains only the API key.
|
||||
pub fn login_with_api_key(codex_home: &Path, api_key: &str) -> std::io::Result<()> {
|
||||
pub fn login_with_api_key(
|
||||
codex_home: &Path,
|
||||
api_key: &str,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> std::io::Result<()> {
|
||||
let auth_dot_json = AuthDotJson {
|
||||
openai_api_key: Some(api_key.to_string()),
|
||||
tokens: None,
|
||||
last_refresh: None,
|
||||
};
|
||||
write_auth_json(&get_auth_file(codex_home), &auth_dot_json)
|
||||
save_auth(codex_home, &auth_dot_json, auth_credentials_store_mode)
|
||||
}
|
||||
|
||||
/// Persist the provided auth payload using the specified backend.
|
||||
pub fn save_auth(
|
||||
codex_home: &Path,
|
||||
auth: &AuthDotJson,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> std::io::Result<()> {
|
||||
let storage = create_auth_storage(codex_home.to_path_buf(), auth_credentials_store_mode);
|
||||
storage.save(auth)
|
||||
}
|
||||
|
||||
/// Load CLI auth data using the configured credential store backend.
|
||||
/// Returns `None` when no credentials are stored.
|
||||
pub fn load_auth_dot_json(
|
||||
codex_home: &Path,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> std::io::Result<Option<AuthDotJson>> {
|
||||
let storage = create_auth_storage(codex_home.to_path_buf(), auth_credentials_store_mode);
|
||||
storage.load()
|
||||
}
|
||||
|
||||
pub async fn enforce_login_restrictions(config: &Config) -> std::io::Result<()> {
|
||||
let Some(auth) = load_auth(&config.codex_home, true)? else {
|
||||
let Some(auth) = load_auth(
|
||||
&config.codex_home,
|
||||
true,
|
||||
config.cli_auth_credentials_store_mode,
|
||||
)?
|
||||
else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
@@ -260,7 +340,11 @@ pub async fn enforce_login_restrictions(config: &Config) -> std::io::Result<()>
|
||||
};
|
||||
|
||||
if let Some(message) = method_violation {
|
||||
return logout_with_message(&config.codex_home, message);
|
||||
return logout_with_message(
|
||||
&config.codex_home,
|
||||
message,
|
||||
config.cli_auth_credentials_store_mode,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -277,6 +361,7 @@ pub async fn enforce_login_restrictions(config: &Config) -> std::io::Result<()>
|
||||
format!(
|
||||
"Failed to load ChatGPT credentials while enforcing workspace restrictions: {err}. Logging out."
|
||||
),
|
||||
config.cli_auth_credentials_store_mode,
|
||||
);
|
||||
}
|
||||
};
|
||||
@@ -292,15 +377,23 @@ pub async fn enforce_login_restrictions(config: &Config) -> std::io::Result<()>
|
||||
"Login is restricted to workspace {expected_account_id}, but current credentials lack a workspace identifier. Logging out."
|
||||
),
|
||||
};
|
||||
return logout_with_message(&config.codex_home, message);
|
||||
return logout_with_message(
|
||||
&config.codex_home,
|
||||
message,
|
||||
config.cli_auth_credentials_store_mode,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn logout_with_message(codex_home: &Path, message: String) -> std::io::Result<()> {
|
||||
match logout(codex_home) {
|
||||
fn logout_with_message(
|
||||
codex_home: &Path,
|
||||
message: String,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> std::io::Result<()> {
|
||||
match logout(codex_home, auth_credentials_store_mode) {
|
||||
Ok(_) => Err(std::io::Error::other(message)),
|
||||
Err(err) => Err(std::io::Error::other(format!(
|
||||
"{message}. Failed to remove auth.json: {err}"
|
||||
@@ -311,6 +404,7 @@ fn logout_with_message(codex_home: &Path, message: String) -> std::io::Result<()
|
||||
fn load_auth(
|
||||
codex_home: &Path,
|
||||
enable_codex_api_key_env: bool,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> std::io::Result<Option<CodexAuth>> {
|
||||
if enable_codex_api_key_env && let Some(api_key) = read_codex_api_key_from_env() {
|
||||
let client = crate::default_client::create_client();
|
||||
@@ -320,12 +414,12 @@ fn load_auth(
|
||||
)));
|
||||
}
|
||||
|
||||
let auth_file = get_auth_file(codex_home);
|
||||
let storage = create_auth_storage(codex_home.to_path_buf(), auth_credentials_store_mode);
|
||||
|
||||
let client = crate::default_client::create_client();
|
||||
let auth_dot_json = match try_read_auth_json(&auth_file) {
|
||||
Ok(auth) => auth,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(None),
|
||||
Err(err) => return Err(err),
|
||||
let auth_dot_json = match storage.load()? {
|
||||
Some(auth) => auth,
|
||||
None => return Ok(None),
|
||||
};
|
||||
|
||||
let AuthDotJson {
|
||||
@@ -342,7 +436,7 @@ fn load_auth(
|
||||
Ok(Some(CodexAuth {
|
||||
api_key: None,
|
||||
mode: AuthMode::ChatGPT,
|
||||
auth_file,
|
||||
storage: storage.clone(),
|
||||
auth_dot_json: Arc::new(Mutex::new(Some(AuthDotJson {
|
||||
openai_api_key: None,
|
||||
tokens,
|
||||
@@ -352,44 +446,20 @@ fn load_auth(
|
||||
}))
|
||||
}
|
||||
|
||||
/// Attempt to read and refresh the `auth.json` file in the given `CODEX_HOME` directory.
|
||||
/// Returns the full AuthDotJson structure after refreshing if necessary.
|
||||
pub fn try_read_auth_json(auth_file: &Path) -> std::io::Result<AuthDotJson> {
|
||||
let mut file = File::open(auth_file)?;
|
||||
let mut contents = String::new();
|
||||
file.read_to_string(&mut contents)?;
|
||||
let auth_dot_json: AuthDotJson = serde_json::from_str(&contents)?;
|
||||
|
||||
Ok(auth_dot_json)
|
||||
}
|
||||
|
||||
pub fn write_auth_json(auth_file: &Path, auth_dot_json: &AuthDotJson) -> std::io::Result<()> {
|
||||
if let Some(parent) = auth_file.parent() {
|
||||
std::fs::create_dir_all(parent)?;
|
||||
}
|
||||
let json_data = serde_json::to_string_pretty(auth_dot_json)?;
|
||||
let mut options = OpenOptions::new();
|
||||
options.truncate(true).write(true).create(true);
|
||||
#[cfg(unix)]
|
||||
{
|
||||
options.mode(0o600);
|
||||
}
|
||||
let mut file = options.open(auth_file)?;
|
||||
file.write_all(json_data.as_bytes())?;
|
||||
file.flush()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn update_tokens(
|
||||
auth_file: &Path,
|
||||
id_token: String,
|
||||
storage: &Arc<dyn AuthStorageBackend>,
|
||||
id_token: Option<String>,
|
||||
access_token: Option<String>,
|
||||
refresh_token: Option<String>,
|
||||
) -> std::io::Result<AuthDotJson> {
|
||||
let mut auth_dot_json = try_read_auth_json(auth_file)?;
|
||||
let mut auth_dot_json = storage
|
||||
.load()?
|
||||
.ok_or(std::io::Error::other("Token data is not available."))?;
|
||||
|
||||
let tokens = auth_dot_json.tokens.get_or_insert_with(TokenData::default);
|
||||
tokens.id_token = parse_id_token(&id_token).map_err(std::io::Error::other)?;
|
||||
if let Some(id_token) = id_token {
|
||||
tokens.id_token = parse_id_token(&id_token).map_err(std::io::Error::other)?;
|
||||
}
|
||||
if let Some(access_token) = access_token {
|
||||
tokens.access_token = access_token;
|
||||
}
|
||||
@@ -397,14 +467,14 @@ async fn update_tokens(
|
||||
tokens.refresh_token = refresh_token;
|
||||
}
|
||||
auth_dot_json.last_refresh = Some(Utc::now());
|
||||
write_auth_json(auth_file, &auth_dot_json)?;
|
||||
storage.save(&auth_dot_json)?;
|
||||
Ok(auth_dot_json)
|
||||
}
|
||||
|
||||
async fn try_refresh_token(
|
||||
refresh_token: String,
|
||||
client: &CodexHttpClient,
|
||||
) -> std::io::Result<RefreshResponse> {
|
||||
) -> Result<RefreshResponse, RefreshTokenError> {
|
||||
let refresh_request = RefreshRequest {
|
||||
client_id: CLIENT_ID,
|
||||
grant_type: "refresh_token",
|
||||
@@ -412,29 +482,93 @@ async fn try_refresh_token(
|
||||
scope: "openid profile email",
|
||||
};
|
||||
|
||||
let endpoint = refresh_token_endpoint();
|
||||
|
||||
// Use shared client factory to include standard headers
|
||||
let response = client
|
||||
.post("https://auth.openai.com/oauth/token")
|
||||
.post(endpoint.as_str())
|
||||
.header("Content-Type", "application/json")
|
||||
.json(&refresh_request)
|
||||
.send()
|
||||
.await
|
||||
.map_err(std::io::Error::other)?;
|
||||
.map_err(|err| RefreshTokenError::Transient(std::io::Error::other(err)))?;
|
||||
|
||||
if response.status().is_success() {
|
||||
let status = response.status();
|
||||
if status.is_success() {
|
||||
let refresh_response = response
|
||||
.json::<RefreshResponse>()
|
||||
.await
|
||||
.map_err(std::io::Error::other)?;
|
||||
.map_err(|err| RefreshTokenError::Transient(std::io::Error::other(err)))?;
|
||||
Ok(refresh_response)
|
||||
} else {
|
||||
Err(std::io::Error::other(format!(
|
||||
"Failed to refresh token: {}",
|
||||
response.status()
|
||||
)))
|
||||
let body = response.text().await.unwrap_or_default();
|
||||
if status == StatusCode::UNAUTHORIZED {
|
||||
let failed = classify_refresh_token_failure(&body);
|
||||
Err(RefreshTokenError::Permanent(failed))
|
||||
} else {
|
||||
let message = try_parse_error_message(&body);
|
||||
Err(RefreshTokenError::Transient(std::io::Error::other(
|
||||
format!("Failed to refresh token: {status}: {message}"),
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn classify_refresh_token_failure(body: &str) -> RefreshTokenFailedError {
|
||||
let code = extract_refresh_token_error_code(body);
|
||||
|
||||
let normalized_code = code.as_deref().map(str::to_ascii_lowercase);
|
||||
let reason = match normalized_code.as_deref() {
|
||||
Some("refresh_token_expired") => RefreshTokenFailedReason::Expired,
|
||||
Some("refresh_token_reused") => RefreshTokenFailedReason::Exhausted,
|
||||
Some("refresh_token_invalidated") => RefreshTokenFailedReason::Revoked,
|
||||
_ => RefreshTokenFailedReason::Other,
|
||||
};
|
||||
|
||||
if reason == RefreshTokenFailedReason::Other {
|
||||
tracing::warn!(
|
||||
backend_code = normalized_code.as_deref(),
|
||||
backend_body = body,
|
||||
"Encountered unknown 401 response while refreshing token"
|
||||
);
|
||||
}
|
||||
|
||||
let message = match reason {
|
||||
RefreshTokenFailedReason::Expired => REFRESH_TOKEN_EXPIRED_MESSAGE.to_string(),
|
||||
RefreshTokenFailedReason::Exhausted => REFRESH_TOKEN_REUSED_MESSAGE.to_string(),
|
||||
RefreshTokenFailedReason::Revoked => REFRESH_TOKEN_INVALIDATED_MESSAGE.to_string(),
|
||||
RefreshTokenFailedReason::Other => REFRESH_TOKEN_UNKNOWN_MESSAGE.to_string(),
|
||||
};
|
||||
|
||||
RefreshTokenFailedError::new(reason, message)
|
||||
}
|
||||
|
||||
fn extract_refresh_token_error_code(body: &str) -> Option<String> {
|
||||
if body.trim().is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let Value::Object(map) = serde_json::from_str::<Value>(body).ok()? else {
|
||||
return None;
|
||||
};
|
||||
|
||||
if let Some(error_value) = map.get("error") {
|
||||
match error_value {
|
||||
Value::Object(obj) => {
|
||||
if let Some(code) = obj.get("code").and_then(Value::as_str) {
|
||||
return Some(code.to_string());
|
||||
}
|
||||
}
|
||||
Value::String(code) => {
|
||||
return Some(code.to_string());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
map.get("code").and_then(Value::as_str).map(str::to_string)
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RefreshRequest {
|
||||
client_id: &'static str,
|
||||
@@ -445,27 +579,19 @@ struct RefreshRequest {
|
||||
|
||||
#[derive(Deserialize, Clone)]
|
||||
struct RefreshResponse {
|
||||
id_token: String,
|
||||
id_token: Option<String>,
|
||||
access_token: Option<String>,
|
||||
refresh_token: Option<String>,
|
||||
}
|
||||
|
||||
/// Expected structure for $CODEX_HOME/auth.json.
|
||||
#[derive(Deserialize, Serialize, Clone, Debug, PartialEq)]
|
||||
pub struct AuthDotJson {
|
||||
#[serde(rename = "OPENAI_API_KEY")]
|
||||
pub openai_api_key: Option<String>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub tokens: Option<TokenData>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub last_refresh: Option<DateTime<Utc>>,
|
||||
}
|
||||
|
||||
// Shared constant for token refresh (client id used for oauth token refresh flow)
|
||||
pub const CLIENT_ID: &str = "app_EMoamEEZ73f0CkXaXp7hrann";
|
||||
|
||||
fn refresh_token_endpoint() -> String {
|
||||
std::env::var(REFRESH_TOKEN_URL_OVERRIDE_ENV_VAR)
|
||||
.unwrap_or_else(|_| REFRESH_TOKEN_URL.to_string())
|
||||
}
|
||||
|
||||
use std::sync::RwLock;
|
||||
|
||||
/// Internal cached auth state.
|
||||
@@ -477,12 +603,15 @@ struct CachedAuth {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::auth::storage::FileAuthStorage;
|
||||
use crate::auth::storage::get_auth_file;
|
||||
use crate::config::Config;
|
||||
use crate::config::ConfigOverrides;
|
||||
use crate::config::ConfigToml;
|
||||
use crate::token_data::IdTokenInfo;
|
||||
use crate::token_data::KnownPlan;
|
||||
use crate::token_data::PlanType;
|
||||
|
||||
use base64::Engine;
|
||||
use codex_protocol::config_types::ForcedLoginMethod;
|
||||
use pretty_assertions::assert_eq;
|
||||
@@ -491,9 +620,9 @@ mod tests {
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[tokio::test]
|
||||
async fn roundtrip_auth_dot_json() {
|
||||
async fn refresh_without_id_token() {
|
||||
let codex_home = tempdir().unwrap();
|
||||
let _ = write_auth_file(
|
||||
let fake_jwt = write_auth_file(
|
||||
AuthFileParams {
|
||||
openai_api_key: None,
|
||||
chatgpt_plan_type: "pro".to_string(),
|
||||
@@ -503,12 +632,23 @@ mod tests {
|
||||
)
|
||||
.expect("failed to write auth file");
|
||||
|
||||
let file = get_auth_file(codex_home.path());
|
||||
let auth_dot_json = try_read_auth_json(&file).unwrap();
|
||||
write_auth_json(&file, &auth_dot_json).unwrap();
|
||||
let storage = create_auth_storage(
|
||||
codex_home.path().to_path_buf(),
|
||||
AuthCredentialsStoreMode::File,
|
||||
);
|
||||
let updated = super::update_tokens(
|
||||
&storage,
|
||||
None,
|
||||
Some("new-access-token".to_string()),
|
||||
Some("new-refresh-token".to_string()),
|
||||
)
|
||||
.await
|
||||
.expect("update_tokens should succeed");
|
||||
|
||||
let same_auth_dot_json = try_read_auth_json(&file).unwrap();
|
||||
assert_eq!(auth_dot_json, same_auth_dot_json);
|
||||
let tokens = updated.tokens.expect("tokens should exist");
|
||||
assert_eq!(tokens.id_token.raw_jwt, fake_jwt);
|
||||
assert_eq!(tokens.access_token, "new-access-token");
|
||||
assert_eq!(tokens.refresh_token, "new-refresh-token");
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -530,9 +670,13 @@ mod tests {
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
super::login_with_api_key(dir.path(), "sk-new").expect("login_with_api_key should succeed");
|
||||
super::login_with_api_key(dir.path(), "sk-new", AuthCredentialsStoreMode::File)
|
||||
.expect("login_with_api_key should succeed");
|
||||
|
||||
let auth = super::try_read_auth_json(&auth_path).expect("auth.json should parse");
|
||||
let storage = FileAuthStorage::new(dir.path().to_path_buf());
|
||||
let auth = storage
|
||||
.try_read_auth_json(&auth_path)
|
||||
.expect("auth.json should parse");
|
||||
assert_eq!(auth.openai_api_key.as_deref(), Some("sk-new"));
|
||||
assert!(auth.tokens.is_none(), "tokens should be cleared");
|
||||
}
|
||||
@@ -540,7 +684,8 @@ mod tests {
|
||||
#[test]
|
||||
fn missing_auth_json_returns_none() {
|
||||
let dir = tempdir().unwrap();
|
||||
let auth = CodexAuth::from_codex_home(dir.path()).expect("call should succeed");
|
||||
let auth = CodexAuth::from_auth_storage(dir.path(), AuthCredentialsStoreMode::File)
|
||||
.expect("call should succeed");
|
||||
assert_eq!(auth, None);
|
||||
}
|
||||
|
||||
@@ -562,9 +707,11 @@ mod tests {
|
||||
api_key,
|
||||
mode,
|
||||
auth_dot_json,
|
||||
auth_file: _,
|
||||
storage: _,
|
||||
..
|
||||
} = super::load_auth(codex_home.path(), false).unwrap().unwrap();
|
||||
} = super::load_auth(codex_home.path(), false, AuthCredentialsStoreMode::File)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert_eq!(None, api_key);
|
||||
assert_eq!(AuthMode::ChatGPT, mode);
|
||||
|
||||
@@ -605,7 +752,9 @@ mod tests {
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let auth = super::load_auth(dir.path(), false).unwrap().unwrap();
|
||||
let auth = super::load_auth(dir.path(), false, AuthCredentialsStoreMode::File)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert_eq!(auth.mode, AuthMode::ApiKey);
|
||||
assert_eq!(auth.api_key, Some("sk-test-key".to_string()));
|
||||
|
||||
@@ -620,11 +769,11 @@ mod tests {
|
||||
tokens: None,
|
||||
last_refresh: None,
|
||||
};
|
||||
write_auth_json(&get_auth_file(dir.path()), &auth_dot_json)?;
|
||||
assert!(dir.path().join("auth.json").exists());
|
||||
let removed = logout(dir.path())?;
|
||||
assert!(removed);
|
||||
assert!(!dir.path().join("auth.json").exists());
|
||||
super::save_auth(dir.path(), &auth_dot_json, AuthCredentialsStoreMode::File)?;
|
||||
let auth_file = get_auth_file(dir.path());
|
||||
assert!(auth_file.exists());
|
||||
assert!(logout(dir.path(), AuthCredentialsStoreMode::File)?);
|
||||
assert!(!auth_file.exists());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -732,7 +881,8 @@ mod tests {
|
||||
#[tokio::test]
|
||||
async fn enforce_login_restrictions_logs_out_for_method_mismatch() {
|
||||
let codex_home = tempdir().unwrap();
|
||||
login_with_api_key(codex_home.path(), "sk-test").expect("seed api key");
|
||||
login_with_api_key(codex_home.path(), "sk-test", AuthCredentialsStoreMode::File)
|
||||
.expect("seed api key");
|
||||
|
||||
let config = build_config(codex_home.path(), Some(ForcedLoginMethod::Chatgpt), None);
|
||||
|
||||
@@ -801,7 +951,8 @@ mod tests {
|
||||
async fn enforce_login_restrictions_allows_api_key_if_login_method_not_set_but_forced_chatgpt_workspace_id_is_set()
|
||||
{
|
||||
let codex_home = tempdir().unwrap();
|
||||
login_with_api_key(codex_home.path(), "sk-test").expect("seed api key");
|
||||
login_with_api_key(codex_home.path(), "sk-test", AuthCredentialsStoreMode::File)
|
||||
.expect("seed api key");
|
||||
|
||||
let config = build_config(codex_home.path(), None, Some("org_mine".to_string()));
|
||||
|
||||
@@ -845,6 +996,7 @@ pub struct AuthManager {
|
||||
codex_home: PathBuf,
|
||||
inner: RwLock<CachedAuth>,
|
||||
enable_codex_api_key_env: bool,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
}
|
||||
|
||||
impl AuthManager {
|
||||
@@ -852,14 +1004,23 @@ impl AuthManager {
|
||||
/// preferred auth method. Errors loading auth are swallowed; `auth()` will
|
||||
/// simply return `None` in that case so callers can treat it as an
|
||||
/// unauthenticated state.
|
||||
pub fn new(codex_home: PathBuf, enable_codex_api_key_env: bool) -> Self {
|
||||
let auth = load_auth(&codex_home, enable_codex_api_key_env)
|
||||
.ok()
|
||||
.flatten();
|
||||
pub fn new(
|
||||
codex_home: PathBuf,
|
||||
enable_codex_api_key_env: bool,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> Self {
|
||||
let auth = load_auth(
|
||||
&codex_home,
|
||||
enable_codex_api_key_env,
|
||||
auth_credentials_store_mode,
|
||||
)
|
||||
.ok()
|
||||
.flatten();
|
||||
Self {
|
||||
codex_home,
|
||||
inner: RwLock::new(CachedAuth { auth }),
|
||||
enable_codex_api_key_env,
|
||||
auth_credentials_store_mode,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -870,6 +1031,7 @@ impl AuthManager {
|
||||
codex_home: PathBuf::new(),
|
||||
inner: RwLock::new(cached),
|
||||
enable_codex_api_key_env: false,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode::File,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -881,9 +1043,13 @@ impl AuthManager {
|
||||
/// Force a reload of the auth information from auth.json. Returns
|
||||
/// whether the auth value changed.
|
||||
pub fn reload(&self) -> bool {
|
||||
let new_auth = load_auth(&self.codex_home, self.enable_codex_api_key_env)
|
||||
.ok()
|
||||
.flatten();
|
||||
let new_auth = load_auth(
|
||||
&self.codex_home,
|
||||
self.enable_codex_api_key_env,
|
||||
self.auth_credentials_store_mode,
|
||||
)
|
||||
.ok()
|
||||
.flatten();
|
||||
if let Ok(mut guard) = self.inner.write() {
|
||||
let changed = !AuthManager::auths_equal(&guard.auth, &new_auth);
|
||||
guard.auth = new_auth;
|
||||
@@ -902,13 +1068,23 @@ impl AuthManager {
|
||||
}
|
||||
|
||||
/// Convenience constructor returning an `Arc` wrapper.
|
||||
pub fn shared(codex_home: PathBuf, enable_codex_api_key_env: bool) -> Arc<Self> {
|
||||
Arc::new(Self::new(codex_home, enable_codex_api_key_env))
|
||||
pub fn shared(
|
||||
codex_home: PathBuf,
|
||||
enable_codex_api_key_env: bool,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> Arc<Self> {
|
||||
Arc::new(Self::new(
|
||||
codex_home,
|
||||
enable_codex_api_key_env,
|
||||
auth_credentials_store_mode,
|
||||
))
|
||||
}
|
||||
|
||||
/// Attempt to refresh the current auth token (if any). On success, reload
|
||||
/// the auth state from disk so other components observe refreshed token.
|
||||
pub async fn refresh_token(&self) -> std::io::Result<Option<String>> {
|
||||
/// If the token refresh fails in a permanent (non‑transient) way, logs out
|
||||
/// to clear invalid auth state.
|
||||
pub async fn refresh_token(&self) -> Result<Option<String>, RefreshTokenError> {
|
||||
let auth = match self.auth() {
|
||||
Some(a) => a,
|
||||
None => return Ok(None),
|
||||
@@ -931,7 +1107,7 @@ impl AuthManager {
|
||||
/// reloads the in‑memory auth cache so callers immediately observe the
|
||||
/// unauthenticated state.
|
||||
pub fn logout(&self) -> std::io::Result<bool> {
|
||||
let removed = super::auth::logout(&self.codex_home)?;
|
||||
let removed = super::auth::logout(&self.codex_home, self.auth_credentials_store_mode)?;
|
||||
// Always reload to clear any cached auth (even if file absent).
|
||||
self.reload();
|
||||
Ok(removed)
|
||||
|
||||
672
codex-rs/core/src/auth/storage.rs
Normal file
672
codex-rs/core/src/auth/storage.rs
Normal file
@@ -0,0 +1,672 @@
|
||||
use chrono::DateTime;
|
||||
use chrono::Utc;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use sha2::Digest;
|
||||
use sha2::Sha256;
|
||||
use std::fmt::Debug;
|
||||
use std::fs::File;
|
||||
use std::fs::OpenOptions;
|
||||
use std::io::Read;
|
||||
use std::io::Write;
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::fs::OpenOptionsExt;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use tracing::warn;
|
||||
|
||||
use crate::token_data::TokenData;
|
||||
use codex_keyring_store::DefaultKeyringStore;
|
||||
use codex_keyring_store::KeyringStore;
|
||||
|
||||
/// Determine where Codex should store CLI auth credentials.
|
||||
#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum AuthCredentialsStoreMode {
|
||||
#[default]
|
||||
/// Persist credentials in CODEX_HOME/auth.json.
|
||||
File,
|
||||
/// Persist credentials in the keyring. Fail if unavailable.
|
||||
Keyring,
|
||||
/// Use keyring when available; otherwise, fall back to a file in CODEX_HOME.
|
||||
Auto,
|
||||
}
|
||||
|
||||
/// Expected structure for $CODEX_HOME/auth.json.
|
||||
#[derive(Deserialize, Serialize, Clone, Debug, PartialEq)]
|
||||
pub struct AuthDotJson {
|
||||
#[serde(rename = "OPENAI_API_KEY")]
|
||||
pub openai_api_key: Option<String>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub tokens: Option<TokenData>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub last_refresh: Option<DateTime<Utc>>,
|
||||
}
|
||||
|
||||
pub(super) fn get_auth_file(codex_home: &Path) -> PathBuf {
|
||||
codex_home.join("auth.json")
|
||||
}
|
||||
|
||||
pub(super) fn delete_file_if_exists(codex_home: &Path) -> std::io::Result<bool> {
|
||||
let auth_file = get_auth_file(codex_home);
|
||||
match std::fs::remove_file(&auth_file) {
|
||||
Ok(()) => Ok(true),
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(false),
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) trait AuthStorageBackend: Debug + Send + Sync {
|
||||
fn load(&self) -> std::io::Result<Option<AuthDotJson>>;
|
||||
fn save(&self, auth: &AuthDotJson) -> std::io::Result<()>;
|
||||
fn delete(&self) -> std::io::Result<bool>;
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(super) struct FileAuthStorage {
|
||||
codex_home: PathBuf,
|
||||
}
|
||||
|
||||
impl FileAuthStorage {
|
||||
pub(super) fn new(codex_home: PathBuf) -> Self {
|
||||
Self { codex_home }
|
||||
}
|
||||
|
||||
/// Attempt to read and refresh the `auth.json` file in the given `CODEX_HOME` directory.
|
||||
/// Returns the full AuthDotJson structure after refreshing if necessary.
|
||||
pub(super) fn try_read_auth_json(&self, auth_file: &Path) -> std::io::Result<AuthDotJson> {
|
||||
let mut file = File::open(auth_file)?;
|
||||
let mut contents = String::new();
|
||||
file.read_to_string(&mut contents)?;
|
||||
let auth_dot_json: AuthDotJson = serde_json::from_str(&contents)?;
|
||||
|
||||
Ok(auth_dot_json)
|
||||
}
|
||||
}
|
||||
|
||||
impl AuthStorageBackend for FileAuthStorage {
|
||||
fn load(&self) -> std::io::Result<Option<AuthDotJson>> {
|
||||
let auth_file = get_auth_file(&self.codex_home);
|
||||
let auth_dot_json = match self.try_read_auth_json(&auth_file) {
|
||||
Ok(auth) => auth,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(None),
|
||||
Err(err) => return Err(err),
|
||||
};
|
||||
Ok(Some(auth_dot_json))
|
||||
}
|
||||
|
||||
fn save(&self, auth_dot_json: &AuthDotJson) -> std::io::Result<()> {
|
||||
let auth_file = get_auth_file(&self.codex_home);
|
||||
|
||||
if let Some(parent) = auth_file.parent() {
|
||||
std::fs::create_dir_all(parent)?;
|
||||
}
|
||||
let json_data = serde_json::to_string_pretty(auth_dot_json)?;
|
||||
let mut options = OpenOptions::new();
|
||||
options.truncate(true).write(true).create(true);
|
||||
#[cfg(unix)]
|
||||
{
|
||||
options.mode(0o600);
|
||||
}
|
||||
let mut file = options.open(auth_file)?;
|
||||
file.write_all(json_data.as_bytes())?;
|
||||
file.flush()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn delete(&self) -> std::io::Result<bool> {
|
||||
delete_file_if_exists(&self.codex_home)
|
||||
}
|
||||
}
|
||||
|
||||
const KEYRING_SERVICE: &str = "Codex Auth";
|
||||
|
||||
// turns codex_home path into a stable, short key string
|
||||
fn compute_store_key(codex_home: &Path) -> std::io::Result<String> {
|
||||
let canonical = codex_home
|
||||
.canonicalize()
|
||||
.unwrap_or_else(|_| codex_home.to_path_buf());
|
||||
let path_str = canonical.to_string_lossy();
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(path_str.as_bytes());
|
||||
let digest = hasher.finalize();
|
||||
let hex = format!("{digest:x}");
|
||||
let truncated = hex.get(..16).unwrap_or(&hex);
|
||||
Ok(format!("cli|{truncated}"))
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct KeyringAuthStorage {
|
||||
codex_home: PathBuf,
|
||||
keyring_store: Arc<dyn KeyringStore>,
|
||||
}
|
||||
|
||||
impl KeyringAuthStorage {
|
||||
fn new(codex_home: PathBuf, keyring_store: Arc<dyn KeyringStore>) -> Self {
|
||||
Self {
|
||||
codex_home,
|
||||
keyring_store,
|
||||
}
|
||||
}
|
||||
|
||||
fn load_from_keyring(&self, key: &str) -> std::io::Result<Option<AuthDotJson>> {
|
||||
match self.keyring_store.load(KEYRING_SERVICE, key) {
|
||||
Ok(Some(serialized)) => serde_json::from_str(&serialized).map(Some).map_err(|err| {
|
||||
std::io::Error::other(format!(
|
||||
"failed to deserialize CLI auth from keyring: {err}"
|
||||
))
|
||||
}),
|
||||
Ok(None) => Ok(None),
|
||||
Err(error) => Err(std::io::Error::other(format!(
|
||||
"failed to load CLI auth from keyring: {}",
|
||||
error.message()
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
fn save_to_keyring(&self, key: &str, value: &str) -> std::io::Result<()> {
|
||||
match self.keyring_store.save(KEYRING_SERVICE, key, value) {
|
||||
Ok(()) => Ok(()),
|
||||
Err(error) => {
|
||||
let message = format!(
|
||||
"failed to write OAuth tokens to keyring: {}",
|
||||
error.message()
|
||||
);
|
||||
warn!("{message}");
|
||||
Err(std::io::Error::other(message))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AuthStorageBackend for KeyringAuthStorage {
|
||||
fn load(&self) -> std::io::Result<Option<AuthDotJson>> {
|
||||
let key = compute_store_key(&self.codex_home)?;
|
||||
self.load_from_keyring(&key)
|
||||
}
|
||||
|
||||
fn save(&self, auth: &AuthDotJson) -> std::io::Result<()> {
|
||||
let key = compute_store_key(&self.codex_home)?;
|
||||
// Simpler error mapping per style: prefer method reference over closure
|
||||
let serialized = serde_json::to_string(auth).map_err(std::io::Error::other)?;
|
||||
self.save_to_keyring(&key, &serialized)?;
|
||||
if let Err(err) = delete_file_if_exists(&self.codex_home) {
|
||||
warn!("failed to remove CLI auth fallback file: {err}");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn delete(&self) -> std::io::Result<bool> {
|
||||
let key = compute_store_key(&self.codex_home)?;
|
||||
let keyring_removed = self
|
||||
.keyring_store
|
||||
.delete(KEYRING_SERVICE, &key)
|
||||
.map_err(|err| {
|
||||
std::io::Error::other(format!("failed to delete auth from keyring: {err}"))
|
||||
})?;
|
||||
let file_removed = delete_file_if_exists(&self.codex_home)?;
|
||||
Ok(keyring_removed || file_removed)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct AutoAuthStorage {
|
||||
keyring_storage: Arc<KeyringAuthStorage>,
|
||||
file_storage: Arc<FileAuthStorage>,
|
||||
}
|
||||
|
||||
impl AutoAuthStorage {
|
||||
fn new(codex_home: PathBuf, keyring_store: Arc<dyn KeyringStore>) -> Self {
|
||||
Self {
|
||||
keyring_storage: Arc::new(KeyringAuthStorage::new(codex_home.clone(), keyring_store)),
|
||||
file_storage: Arc::new(FileAuthStorage::new(codex_home)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AuthStorageBackend for AutoAuthStorage {
|
||||
fn load(&self) -> std::io::Result<Option<AuthDotJson>> {
|
||||
match self.keyring_storage.load() {
|
||||
Ok(Some(auth)) => Ok(Some(auth)),
|
||||
Ok(None) => self.file_storage.load(),
|
||||
Err(err) => {
|
||||
warn!("failed to load CLI auth from keyring, falling back to file storage: {err}");
|
||||
self.file_storage.load()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn save(&self, auth: &AuthDotJson) -> std::io::Result<()> {
|
||||
match self.keyring_storage.save(auth) {
|
||||
Ok(()) => Ok(()),
|
||||
Err(err) => {
|
||||
warn!("failed to save auth to keyring, falling back to file storage: {err}");
|
||||
self.file_storage.save(auth)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn delete(&self) -> std::io::Result<bool> {
|
||||
// Keyring storage will delete from disk as well
|
||||
self.keyring_storage.delete()
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn create_auth_storage(
|
||||
codex_home: PathBuf,
|
||||
mode: AuthCredentialsStoreMode,
|
||||
) -> Arc<dyn AuthStorageBackend> {
|
||||
let keyring_store: Arc<dyn KeyringStore> = Arc::new(DefaultKeyringStore);
|
||||
create_auth_storage_with_keyring_store(codex_home, mode, keyring_store)
|
||||
}
|
||||
|
||||
fn create_auth_storage_with_keyring_store(
|
||||
codex_home: PathBuf,
|
||||
mode: AuthCredentialsStoreMode,
|
||||
keyring_store: Arc<dyn KeyringStore>,
|
||||
) -> Arc<dyn AuthStorageBackend> {
|
||||
match mode {
|
||||
AuthCredentialsStoreMode::File => Arc::new(FileAuthStorage::new(codex_home)),
|
||||
AuthCredentialsStoreMode::Keyring => {
|
||||
Arc::new(KeyringAuthStorage::new(codex_home, keyring_store))
|
||||
}
|
||||
AuthCredentialsStoreMode::Auto => Arc::new(AutoAuthStorage::new(codex_home, keyring_store)),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::token_data::IdTokenInfo;
|
||||
use anyhow::Context;
|
||||
use base64::Engine;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use tempfile::tempdir;
|
||||
|
||||
use codex_keyring_store::tests::MockKeyringStore;
|
||||
use keyring::Error as KeyringError;
|
||||
|
||||
#[tokio::test]
|
||||
async fn file_storage_load_returns_auth_dot_json() -> anyhow::Result<()> {
|
||||
let codex_home = tempdir()?;
|
||||
let storage = FileAuthStorage::new(codex_home.path().to_path_buf());
|
||||
let auth_dot_json = AuthDotJson {
|
||||
openai_api_key: Some("test-key".to_string()),
|
||||
tokens: None,
|
||||
last_refresh: Some(Utc::now()),
|
||||
};
|
||||
|
||||
storage
|
||||
.save(&auth_dot_json)
|
||||
.context("failed to save auth file")?;
|
||||
|
||||
let loaded = storage.load().context("failed to load auth file")?;
|
||||
assert_eq!(Some(auth_dot_json), loaded);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn file_storage_save_persists_auth_dot_json() -> anyhow::Result<()> {
|
||||
let codex_home = tempdir()?;
|
||||
let storage = FileAuthStorage::new(codex_home.path().to_path_buf());
|
||||
let auth_dot_json = AuthDotJson {
|
||||
openai_api_key: Some("test-key".to_string()),
|
||||
tokens: None,
|
||||
last_refresh: Some(Utc::now()),
|
||||
};
|
||||
|
||||
let file = get_auth_file(codex_home.path());
|
||||
storage
|
||||
.save(&auth_dot_json)
|
||||
.context("failed to save auth file")?;
|
||||
|
||||
let same_auth_dot_json = storage
|
||||
.try_read_auth_json(&file)
|
||||
.context("failed to read auth file after save")?;
|
||||
assert_eq!(auth_dot_json, same_auth_dot_json);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn file_storage_delete_removes_auth_file() -> anyhow::Result<()> {
|
||||
let dir = tempdir()?;
|
||||
let auth_dot_json = AuthDotJson {
|
||||
openai_api_key: Some("sk-test-key".to_string()),
|
||||
tokens: None,
|
||||
last_refresh: None,
|
||||
};
|
||||
let storage = create_auth_storage(dir.path().to_path_buf(), AuthCredentialsStoreMode::File);
|
||||
storage.save(&auth_dot_json)?;
|
||||
assert!(dir.path().join("auth.json").exists());
|
||||
let storage = FileAuthStorage::new(dir.path().to_path_buf());
|
||||
let removed = storage.delete()?;
|
||||
assert!(removed);
|
||||
assert!(!dir.path().join("auth.json").exists());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn seed_keyring_and_fallback_auth_file_for_delete<F>(
|
||||
mock_keyring: &MockKeyringStore,
|
||||
codex_home: &Path,
|
||||
compute_key: F,
|
||||
) -> anyhow::Result<(String, PathBuf)>
|
||||
where
|
||||
F: FnOnce() -> std::io::Result<String>,
|
||||
{
|
||||
let key = compute_key()?;
|
||||
mock_keyring.save(KEYRING_SERVICE, &key, "{}")?;
|
||||
let auth_file = get_auth_file(codex_home);
|
||||
std::fs::write(&auth_file, "stale")?;
|
||||
Ok((key, auth_file))
|
||||
}
|
||||
|
||||
fn seed_keyring_with_auth<F>(
|
||||
mock_keyring: &MockKeyringStore,
|
||||
compute_key: F,
|
||||
auth: &AuthDotJson,
|
||||
) -> anyhow::Result<()>
|
||||
where
|
||||
F: FnOnce() -> std::io::Result<String>,
|
||||
{
|
||||
let key = compute_key()?;
|
||||
let serialized = serde_json::to_string(auth)?;
|
||||
mock_keyring.save(KEYRING_SERVICE, &key, &serialized)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn assert_keyring_saved_auth_and_removed_fallback(
|
||||
mock_keyring: &MockKeyringStore,
|
||||
key: &str,
|
||||
codex_home: &Path,
|
||||
expected: &AuthDotJson,
|
||||
) {
|
||||
let saved_value = mock_keyring
|
||||
.saved_value(key)
|
||||
.expect("keyring entry should exist");
|
||||
let expected_serialized = serde_json::to_string(expected).expect("serialize expected auth");
|
||||
assert_eq!(saved_value, expected_serialized);
|
||||
let auth_file = get_auth_file(codex_home);
|
||||
assert!(
|
||||
!auth_file.exists(),
|
||||
"fallback auth.json should be removed after keyring save"
|
||||
);
|
||||
}
|
||||
|
||||
fn id_token_with_prefix(prefix: &str) -> IdTokenInfo {
|
||||
#[derive(Serialize)]
|
||||
struct Header {
|
||||
alg: &'static str,
|
||||
typ: &'static str,
|
||||
}
|
||||
|
||||
let header = Header {
|
||||
alg: "none",
|
||||
typ: "JWT",
|
||||
};
|
||||
let payload = json!({
|
||||
"email": format!("{prefix}@example.com"),
|
||||
"https://api.openai.com/auth": {
|
||||
"chatgpt_account_id": format!("{prefix}-account"),
|
||||
},
|
||||
});
|
||||
let encode = |bytes: &[u8]| base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(bytes);
|
||||
let header_b64 = encode(&serde_json::to_vec(&header).expect("serialize header"));
|
||||
let payload_b64 = encode(&serde_json::to_vec(&payload).expect("serialize payload"));
|
||||
let signature_b64 = encode(b"sig");
|
||||
let fake_jwt = format!("{header_b64}.{payload_b64}.{signature_b64}");
|
||||
|
||||
crate::token_data::parse_id_token(&fake_jwt).expect("fake JWT should parse")
|
||||
}
|
||||
|
||||
fn auth_with_prefix(prefix: &str) -> AuthDotJson {
|
||||
AuthDotJson {
|
||||
openai_api_key: Some(format!("{prefix}-api-key")),
|
||||
tokens: Some(TokenData {
|
||||
id_token: id_token_with_prefix(prefix),
|
||||
access_token: format!("{prefix}-access"),
|
||||
refresh_token: format!("{prefix}-refresh"),
|
||||
account_id: Some(format!("{prefix}-account-id")),
|
||||
}),
|
||||
last_refresh: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn keyring_auth_storage_load_returns_deserialized_auth() -> anyhow::Result<()> {
|
||||
let codex_home = tempdir()?;
|
||||
let mock_keyring = MockKeyringStore::default();
|
||||
let storage = KeyringAuthStorage::new(
|
||||
codex_home.path().to_path_buf(),
|
||||
Arc::new(mock_keyring.clone()),
|
||||
);
|
||||
let expected = AuthDotJson {
|
||||
openai_api_key: Some("sk-test".to_string()),
|
||||
tokens: None,
|
||||
last_refresh: None,
|
||||
};
|
||||
seed_keyring_with_auth(
|
||||
&mock_keyring,
|
||||
|| compute_store_key(codex_home.path()),
|
||||
&expected,
|
||||
)?;
|
||||
|
||||
let loaded = storage.load()?;
|
||||
assert_eq!(Some(expected), loaded);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn keyring_auth_storage_compute_store_key_for_home_directory() -> anyhow::Result<()> {
|
||||
let codex_home = PathBuf::from("~/.codex");
|
||||
|
||||
let key = compute_store_key(codex_home.as_path())?;
|
||||
|
||||
assert_eq!(key, "cli|940db7b1d0e4eb40");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn keyring_auth_storage_save_persists_and_removes_fallback_file() -> anyhow::Result<()> {
|
||||
let codex_home = tempdir()?;
|
||||
let mock_keyring = MockKeyringStore::default();
|
||||
let storage = KeyringAuthStorage::new(
|
||||
codex_home.path().to_path_buf(),
|
||||
Arc::new(mock_keyring.clone()),
|
||||
);
|
||||
let auth_file = get_auth_file(codex_home.path());
|
||||
std::fs::write(&auth_file, "stale")?;
|
||||
let auth = AuthDotJson {
|
||||
openai_api_key: None,
|
||||
tokens: Some(TokenData {
|
||||
id_token: Default::default(),
|
||||
access_token: "access".to_string(),
|
||||
refresh_token: "refresh".to_string(),
|
||||
account_id: Some("account".to_string()),
|
||||
}),
|
||||
last_refresh: Some(Utc::now()),
|
||||
};
|
||||
|
||||
storage.save(&auth)?;
|
||||
|
||||
let key = compute_store_key(codex_home.path())?;
|
||||
assert_keyring_saved_auth_and_removed_fallback(
|
||||
&mock_keyring,
|
||||
&key,
|
||||
codex_home.path(),
|
||||
&auth,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn keyring_auth_storage_delete_removes_keyring_and_file() -> anyhow::Result<()> {
|
||||
let codex_home = tempdir()?;
|
||||
let mock_keyring = MockKeyringStore::default();
|
||||
let storage = KeyringAuthStorage::new(
|
||||
codex_home.path().to_path_buf(),
|
||||
Arc::new(mock_keyring.clone()),
|
||||
);
|
||||
let (key, auth_file) = seed_keyring_and_fallback_auth_file_for_delete(
|
||||
&mock_keyring,
|
||||
codex_home.path(),
|
||||
|| compute_store_key(codex_home.path()),
|
||||
)?;
|
||||
|
||||
let removed = storage.delete()?;
|
||||
|
||||
assert!(removed, "delete should report removal");
|
||||
assert!(
|
||||
!mock_keyring.contains(&key),
|
||||
"keyring entry should be removed"
|
||||
);
|
||||
assert!(
|
||||
!auth_file.exists(),
|
||||
"fallback auth.json should be removed after keyring delete"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn auto_auth_storage_load_prefers_keyring_value() -> anyhow::Result<()> {
|
||||
let codex_home = tempdir()?;
|
||||
let mock_keyring = MockKeyringStore::default();
|
||||
let storage = AutoAuthStorage::new(
|
||||
codex_home.path().to_path_buf(),
|
||||
Arc::new(mock_keyring.clone()),
|
||||
);
|
||||
let keyring_auth = auth_with_prefix("keyring");
|
||||
seed_keyring_with_auth(
|
||||
&mock_keyring,
|
||||
|| compute_store_key(codex_home.path()),
|
||||
&keyring_auth,
|
||||
)?;
|
||||
|
||||
let file_auth = auth_with_prefix("file");
|
||||
storage.file_storage.save(&file_auth)?;
|
||||
|
||||
let loaded = storage.load()?;
|
||||
assert_eq!(loaded, Some(keyring_auth));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn auto_auth_storage_load_uses_file_when_keyring_empty() -> anyhow::Result<()> {
|
||||
let codex_home = tempdir()?;
|
||||
let mock_keyring = MockKeyringStore::default();
|
||||
let storage = AutoAuthStorage::new(codex_home.path().to_path_buf(), Arc::new(mock_keyring));
|
||||
|
||||
let expected = auth_with_prefix("file-only");
|
||||
storage.file_storage.save(&expected)?;
|
||||
|
||||
let loaded = storage.load()?;
|
||||
assert_eq!(loaded, Some(expected));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn auto_auth_storage_load_falls_back_when_keyring_errors() -> anyhow::Result<()> {
|
||||
let codex_home = tempdir()?;
|
||||
let mock_keyring = MockKeyringStore::default();
|
||||
let storage = AutoAuthStorage::new(
|
||||
codex_home.path().to_path_buf(),
|
||||
Arc::new(mock_keyring.clone()),
|
||||
);
|
||||
let key = compute_store_key(codex_home.path())?;
|
||||
mock_keyring.set_error(&key, KeyringError::Invalid("error".into(), "load".into()));
|
||||
|
||||
let expected = auth_with_prefix("fallback");
|
||||
storage.file_storage.save(&expected)?;
|
||||
|
||||
let loaded = storage.load()?;
|
||||
assert_eq!(loaded, Some(expected));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn auto_auth_storage_save_prefers_keyring() -> anyhow::Result<()> {
|
||||
let codex_home = tempdir()?;
|
||||
let mock_keyring = MockKeyringStore::default();
|
||||
let storage = AutoAuthStorage::new(
|
||||
codex_home.path().to_path_buf(),
|
||||
Arc::new(mock_keyring.clone()),
|
||||
);
|
||||
let key = compute_store_key(codex_home.path())?;
|
||||
|
||||
let stale = auth_with_prefix("stale");
|
||||
storage.file_storage.save(&stale)?;
|
||||
|
||||
let expected = auth_with_prefix("to-save");
|
||||
storage.save(&expected)?;
|
||||
|
||||
assert_keyring_saved_auth_and_removed_fallback(
|
||||
&mock_keyring,
|
||||
&key,
|
||||
codex_home.path(),
|
||||
&expected,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn auto_auth_storage_save_falls_back_when_keyring_errors() -> anyhow::Result<()> {
|
||||
let codex_home = tempdir()?;
|
||||
let mock_keyring = MockKeyringStore::default();
|
||||
let storage = AutoAuthStorage::new(
|
||||
codex_home.path().to_path_buf(),
|
||||
Arc::new(mock_keyring.clone()),
|
||||
);
|
||||
let key = compute_store_key(codex_home.path())?;
|
||||
mock_keyring.set_error(&key, KeyringError::Invalid("error".into(), "save".into()));
|
||||
|
||||
let auth = auth_with_prefix("fallback");
|
||||
storage.save(&auth)?;
|
||||
|
||||
let auth_file = get_auth_file(codex_home.path());
|
||||
assert!(
|
||||
auth_file.exists(),
|
||||
"fallback auth.json should be created when keyring save fails"
|
||||
);
|
||||
let saved = storage
|
||||
.file_storage
|
||||
.load()?
|
||||
.context("fallback auth should exist")?;
|
||||
assert_eq!(saved, auth);
|
||||
assert!(
|
||||
mock_keyring.saved_value(&key).is_none(),
|
||||
"keyring should not contain value when save fails"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn auto_auth_storage_delete_removes_keyring_and_file() -> anyhow::Result<()> {
|
||||
let codex_home = tempdir()?;
|
||||
let mock_keyring = MockKeyringStore::default();
|
||||
let storage = AutoAuthStorage::new(
|
||||
codex_home.path().to_path_buf(),
|
||||
Arc::new(mock_keyring.clone()),
|
||||
);
|
||||
let (key, auth_file) = seed_keyring_and_fallback_auth_file_for_delete(
|
||||
&mock_keyring,
|
||||
codex_home.path(),
|
||||
|| compute_store_key(codex_home.path()),
|
||||
)?;
|
||||
|
||||
let removed = storage.delete()?;
|
||||
|
||||
assert!(removed, "delete should report removal");
|
||||
assert!(
|
||||
!mock_keyring.contains(&key),
|
||||
"keyring entry should be removed"
|
||||
);
|
||||
assert!(
|
||||
!auth_file.exists(),
|
||||
"fallback auth.json should be removed after delete"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -17,8 +17,11 @@ use crate::util::backoff;
|
||||
use bytes::Bytes;
|
||||
use codex_otel::otel_event_manager::OtelEventManager;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::FunctionCallOutputContentItem;
|
||||
use codex_protocol::models::ReasoningItemContent;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_protocol::protocol::SubAgentSource;
|
||||
use eventsource_stream::Eventsource;
|
||||
use futures::Stream;
|
||||
use futures::StreamExt;
|
||||
@@ -40,6 +43,7 @@ pub(crate) async fn stream_chat_completions(
|
||||
client: &CodexHttpClient,
|
||||
provider: &ModelProviderInfo,
|
||||
otel_event_manager: &OtelEventManager,
|
||||
session_source: &SessionSource,
|
||||
) -> Result<ResponseStream> {
|
||||
if prompt.output_schema.is_some() {
|
||||
return Err(CodexErr::UnsupportedOperation(
|
||||
@@ -76,6 +80,7 @@ pub(crate) async fn stream_chat_completions(
|
||||
ResponseItem::CustomToolCall { .. } => {}
|
||||
ResponseItem::CustomToolCallOutput { .. } => {}
|
||||
ResponseItem::WebSearchCall { .. } => {}
|
||||
ResponseItem::GhostSnapshot { .. } => {}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -158,16 +163,26 @@ pub(crate) async fn stream_chat_completions(
|
||||
for (idx, item) in input.iter().enumerate() {
|
||||
match item {
|
||||
ResponseItem::Message { role, content, .. } => {
|
||||
// Build content either as a plain string (typical for assistant text)
|
||||
// or as an array of content items when images are present (user/tool multimodal).
|
||||
let mut text = String::new();
|
||||
let mut items: Vec<serde_json::Value> = Vec::new();
|
||||
let mut saw_image = false;
|
||||
|
||||
for c in content {
|
||||
match c {
|
||||
ContentItem::InputText { text: t }
|
||||
| ContentItem::OutputText { text: t } => {
|
||||
text.push_str(t);
|
||||
items.push(json!({"type":"text","text": t}));
|
||||
}
|
||||
ContentItem::InputImage { image_url } => {
|
||||
saw_image = true;
|
||||
items.push(json!({"type":"image_url","image_url": {"url": image_url}}));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
// Skip exact-duplicate assistant messages.
|
||||
if role == "assistant" {
|
||||
if let Some(prev) = &last_assistant_text
|
||||
@@ -178,7 +193,17 @@ pub(crate) async fn stream_chat_completions(
|
||||
last_assistant_text = Some(text.clone());
|
||||
}
|
||||
|
||||
let mut msg = json!({"role": role, "content": text});
|
||||
// For assistant messages, always send a plain string for compatibility.
|
||||
// For user messages, if an image is present, send an array of content items.
|
||||
let content_value = if role == "assistant" {
|
||||
json!(text)
|
||||
} else if saw_image {
|
||||
json!(items)
|
||||
} else {
|
||||
json!(text)
|
||||
};
|
||||
|
||||
let mut msg = json!({"role": role, "content": content_value});
|
||||
if role == "assistant"
|
||||
&& let Some(reasoning) = reasoning_by_anchor_index.get(&idx)
|
||||
&& let Some(obj) = msg.as_object_mut()
|
||||
@@ -237,10 +262,29 @@ pub(crate) async fn stream_chat_completions(
|
||||
messages.push(msg);
|
||||
}
|
||||
ResponseItem::FunctionCallOutput { call_id, output } => {
|
||||
// Prefer structured content items when available (e.g., images)
|
||||
// otherwise fall back to the legacy plain-string content.
|
||||
let content_value = if let Some(items) = &output.content_items {
|
||||
let mapped: Vec<serde_json::Value> = items
|
||||
.iter()
|
||||
.map(|it| match it {
|
||||
FunctionCallOutputContentItem::InputText { text } => {
|
||||
json!({"type":"text","text": text})
|
||||
}
|
||||
FunctionCallOutputContentItem::InputImage { image_url } => {
|
||||
json!({"type":"image_url","image_url": {"url": image_url}})
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
json!(mapped)
|
||||
} else {
|
||||
json!(output.content)
|
||||
};
|
||||
|
||||
messages.push(json!({
|
||||
"role": "tool",
|
||||
"tool_call_id": call_id,
|
||||
"content": output.content,
|
||||
"content": content_value,
|
||||
}));
|
||||
}
|
||||
ResponseItem::CustomToolCall {
|
||||
@@ -270,6 +314,10 @@ pub(crate) async fn stream_chat_completions(
|
||||
"content": output,
|
||||
}));
|
||||
}
|
||||
ResponseItem::GhostSnapshot { .. } => {
|
||||
// Ghost snapshots annotate history but are not sent to the model.
|
||||
continue;
|
||||
}
|
||||
ResponseItem::Reasoning { .. }
|
||||
| ResponseItem::WebSearchCall { .. }
|
||||
| ResponseItem::Other => {
|
||||
@@ -298,7 +346,20 @@ pub(crate) async fn stream_chat_completions(
|
||||
loop {
|
||||
attempt += 1;
|
||||
|
||||
let req_builder = provider.create_request_builder(client, &None).await?;
|
||||
let mut req_builder = provider.create_request_builder(client, &None).await?;
|
||||
|
||||
// Include subagent header only for subagent sessions.
|
||||
if let SessionSource::SubAgent(sub) = session_source.clone() {
|
||||
let subagent = if let SubAgentSource::Other(label) = sub {
|
||||
label
|
||||
} else {
|
||||
serde_json::to_value(&sub)
|
||||
.ok()
|
||||
.and_then(|v| v.as_str().map(std::string::ToString::to_string))
|
||||
.unwrap_or_else(|| "other".to_string())
|
||||
};
|
||||
req_builder = req_builder.header("x-openai-subagent", subagent);
|
||||
}
|
||||
|
||||
let res = otel_event_manager
|
||||
.log_request(attempt, || {
|
||||
@@ -368,6 +429,61 @@ pub(crate) async fn stream_chat_completions(
|
||||
}
|
||||
}
|
||||
|
||||
async fn append_assistant_text(
|
||||
tx_event: &mpsc::Sender<Result<ResponseEvent>>,
|
||||
assistant_item: &mut Option<ResponseItem>,
|
||||
text: String,
|
||||
) {
|
||||
if assistant_item.is_none() {
|
||||
let item = ResponseItem::Message {
|
||||
id: None,
|
||||
role: "assistant".to_string(),
|
||||
content: vec![],
|
||||
};
|
||||
*assistant_item = Some(item.clone());
|
||||
let _ = tx_event
|
||||
.send(Ok(ResponseEvent::OutputItemAdded(item)))
|
||||
.await;
|
||||
}
|
||||
|
||||
if let Some(ResponseItem::Message { content, .. }) = assistant_item {
|
||||
content.push(ContentItem::OutputText { text: text.clone() });
|
||||
let _ = tx_event
|
||||
.send(Ok(ResponseEvent::OutputTextDelta(text.clone())))
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
async fn append_reasoning_text(
|
||||
tx_event: &mpsc::Sender<Result<ResponseEvent>>,
|
||||
reasoning_item: &mut Option<ResponseItem>,
|
||||
text: String,
|
||||
) {
|
||||
if reasoning_item.is_none() {
|
||||
let item = ResponseItem::Reasoning {
|
||||
id: String::new(),
|
||||
summary: Vec::new(),
|
||||
content: Some(vec![]),
|
||||
encrypted_content: None,
|
||||
};
|
||||
*reasoning_item = Some(item.clone());
|
||||
let _ = tx_event
|
||||
.send(Ok(ResponseEvent::OutputItemAdded(item)))
|
||||
.await;
|
||||
}
|
||||
|
||||
if let Some(ResponseItem::Reasoning {
|
||||
content: Some(content),
|
||||
..
|
||||
}) = reasoning_item
|
||||
{
|
||||
content.push(ReasoningItemContent::ReasoningText { text: text.clone() });
|
||||
|
||||
let _ = tx_event
|
||||
.send(Ok(ResponseEvent::ReasoningContentDelta(text.clone())))
|
||||
.await;
|
||||
}
|
||||
}
|
||||
/// Lightweight SSE processor for the Chat Completions streaming format. The
|
||||
/// output is mapped onto Codex's internal [`ResponseEvent`] so that the rest
|
||||
/// of the pipeline can stay agnostic of the underlying wire format.
|
||||
@@ -395,8 +511,8 @@ async fn process_chat_sse<S>(
|
||||
}
|
||||
|
||||
let mut fn_call_state = FunctionCallState::default();
|
||||
let mut assistant_text = String::new();
|
||||
let mut reasoning_text = String::new();
|
||||
let mut assistant_item: Option<ResponseItem> = None;
|
||||
let mut reasoning_item: Option<ResponseItem> = None;
|
||||
|
||||
loop {
|
||||
let start = std::time::Instant::now();
|
||||
@@ -437,26 +553,11 @@ async fn process_chat_sse<S>(
|
||||
if sse.data.trim() == "[DONE]" {
|
||||
// Emit any finalized items before closing so downstream consumers receive
|
||||
// terminal events for both assistant content and raw reasoning.
|
||||
if !assistant_text.is_empty() {
|
||||
let item = ResponseItem::Message {
|
||||
role: "assistant".to_string(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: std::mem::take(&mut assistant_text),
|
||||
}],
|
||||
id: None,
|
||||
};
|
||||
if let Some(item) = assistant_item {
|
||||
let _ = tx_event.send(Ok(ResponseEvent::OutputItemDone(item))).await;
|
||||
}
|
||||
|
||||
if !reasoning_text.is_empty() {
|
||||
let item = ResponseItem::Reasoning {
|
||||
id: String::new(),
|
||||
summary: Vec::new(),
|
||||
content: Some(vec![ReasoningItemContent::ReasoningText {
|
||||
text: std::mem::take(&mut reasoning_text),
|
||||
}]),
|
||||
encrypted_content: None,
|
||||
};
|
||||
if let Some(item) = reasoning_item {
|
||||
let _ = tx_event.send(Ok(ResponseEvent::OutputItemDone(item))).await;
|
||||
}
|
||||
|
||||
@@ -486,10 +587,7 @@ async fn process_chat_sse<S>(
|
||||
.and_then(|c| c.as_str())
|
||||
&& !content.is_empty()
|
||||
{
|
||||
assistant_text.push_str(content);
|
||||
let _ = tx_event
|
||||
.send(Ok(ResponseEvent::OutputTextDelta(content.to_string())))
|
||||
.await;
|
||||
append_assistant_text(&tx_event, &mut assistant_item, content.to_string()).await;
|
||||
}
|
||||
|
||||
// Forward any reasoning/thinking deltas if present.
|
||||
@@ -519,10 +617,7 @@ async fn process_chat_sse<S>(
|
||||
|
||||
if let Some(reasoning) = maybe_text {
|
||||
// Accumulate so we can emit a terminal Reasoning item at the end.
|
||||
reasoning_text.push_str(&reasoning);
|
||||
let _ = tx_event
|
||||
.send(Ok(ResponseEvent::ReasoningContentDelta(reasoning)))
|
||||
.await;
|
||||
append_reasoning_text(&tx_event, &mut reasoning_item, reasoning).await;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -532,10 +627,7 @@ async fn process_chat_sse<S>(
|
||||
// Accept either a plain string or an object with { text | content }
|
||||
if let Some(s) = message_reasoning.as_str() {
|
||||
if !s.is_empty() {
|
||||
reasoning_text.push_str(s);
|
||||
let _ = tx_event
|
||||
.send(Ok(ResponseEvent::ReasoningContentDelta(s.to_string())))
|
||||
.await;
|
||||
append_reasoning_text(&tx_event, &mut reasoning_item, s.to_string()).await;
|
||||
}
|
||||
} else if let Some(obj) = message_reasoning.as_object()
|
||||
&& let Some(s) = obj
|
||||
@@ -544,10 +636,7 @@ async fn process_chat_sse<S>(
|
||||
.or_else(|| obj.get("content").and_then(|v| v.as_str()))
|
||||
&& !s.is_empty()
|
||||
{
|
||||
reasoning_text.push_str(s);
|
||||
let _ = tx_event
|
||||
.send(Ok(ResponseEvent::ReasoningContentDelta(s.to_string())))
|
||||
.await;
|
||||
append_reasoning_text(&tx_event, &mut reasoning_item, s.to_string()).await;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -585,15 +674,7 @@ async fn process_chat_sse<S>(
|
||||
"tool_calls" if fn_call_state.active => {
|
||||
// First, flush the terminal raw reasoning so UIs can finalize
|
||||
// the reasoning stream before any exec/tool events begin.
|
||||
if !reasoning_text.is_empty() {
|
||||
let item = ResponseItem::Reasoning {
|
||||
id: String::new(),
|
||||
summary: Vec::new(),
|
||||
content: Some(vec![ReasoningItemContent::ReasoningText {
|
||||
text: std::mem::take(&mut reasoning_text),
|
||||
}]),
|
||||
encrypted_content: None,
|
||||
};
|
||||
if let Some(item) = reasoning_item.take() {
|
||||
let _ = tx_event.send(Ok(ResponseEvent::OutputItemDone(item))).await;
|
||||
}
|
||||
|
||||
@@ -610,26 +691,11 @@ async fn process_chat_sse<S>(
|
||||
"stop" => {
|
||||
// Regular turn without tool-call. Emit the final assistant message
|
||||
// as a single OutputItemDone so non-delta consumers see the result.
|
||||
if !assistant_text.is_empty() {
|
||||
let item = ResponseItem::Message {
|
||||
role: "assistant".to_string(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: std::mem::take(&mut assistant_text),
|
||||
}],
|
||||
id: None,
|
||||
};
|
||||
if let Some(item) = assistant_item.take() {
|
||||
let _ = tx_event.send(Ok(ResponseEvent::OutputItemDone(item))).await;
|
||||
}
|
||||
// Also emit a terminal Reasoning item so UIs can finalize raw reasoning.
|
||||
if !reasoning_text.is_empty() {
|
||||
let item = ResponseItem::Reasoning {
|
||||
id: String::new(),
|
||||
summary: Vec::new(),
|
||||
content: Some(vec![ReasoningItemContent::ReasoningText {
|
||||
text: std::mem::take(&mut reasoning_text),
|
||||
}]),
|
||||
encrypted_content: None,
|
||||
};
|
||||
if let Some(item) = reasoning_item.take() {
|
||||
let _ = tx_event.send(Ok(ResponseEvent::OutputItemDone(item))).await;
|
||||
}
|
||||
}
|
||||
@@ -848,8 +914,8 @@ where
|
||||
Poll::Ready(Some(Ok(ResponseEvent::ReasoningSummaryPartAdded))) => {
|
||||
continue;
|
||||
}
|
||||
Poll::Ready(Some(Ok(ResponseEvent::WebSearchCallBegin { call_id }))) => {
|
||||
return Poll::Ready(Some(Ok(ResponseEvent::WebSearchCallBegin { call_id })));
|
||||
Poll::Ready(Some(Ok(ResponseEvent::OutputItemAdded(item)))) => {
|
||||
return Poll::Ready(Some(Ok(ResponseEvent::OutputItemAdded(item))));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ use codex_protocol::ConversationId;
|
||||
use codex_protocol::config_types::ReasoningEffort as ReasoningEffortConfig;
|
||||
use codex_protocol::config_types::ReasoningSummary as ReasoningSummaryConfig;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use eventsource_stream::Eventsource;
|
||||
use futures::prelude::*;
|
||||
use regex_lite::Regex;
|
||||
@@ -30,6 +31,7 @@ use tracing::warn;
|
||||
|
||||
use crate::AuthManager;
|
||||
use crate::auth::CodexAuth;
|
||||
use crate::auth::RefreshTokenError;
|
||||
use crate::chat_completions::AggregateStreamExt;
|
||||
use crate::chat_completions::stream_chat_completions;
|
||||
use crate::client_common::Prompt;
|
||||
@@ -56,7 +58,6 @@ use crate::openai_model_info::get_model_info;
|
||||
use crate::protocol::RateLimitSnapshot;
|
||||
use crate::protocol::RateLimitWindow;
|
||||
use crate::protocol::TokenUsage;
|
||||
use crate::state::TaskKind;
|
||||
use crate::token_data::PlanType;
|
||||
use crate::tools::spec::create_tools_json_for_responses_api;
|
||||
use crate::util::backoff;
|
||||
@@ -87,8 +88,10 @@ pub struct ModelClient {
|
||||
conversation_id: ConversationId,
|
||||
effort: Option<ReasoningEffortConfig>,
|
||||
summary: ReasoningSummaryConfig,
|
||||
session_source: SessionSource,
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
impl ModelClient {
|
||||
pub fn new(
|
||||
config: Arc<Config>,
|
||||
@@ -98,6 +101,7 @@ impl ModelClient {
|
||||
effort: Option<ReasoningEffortConfig>,
|
||||
summary: ReasoningSummaryConfig,
|
||||
conversation_id: ConversationId,
|
||||
session_source: SessionSource,
|
||||
) -> Self {
|
||||
let client = create_client();
|
||||
|
||||
@@ -110,6 +114,7 @@ impl ModelClient {
|
||||
conversation_id,
|
||||
effort,
|
||||
summary,
|
||||
session_source,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -127,20 +132,17 @@ impl ModelClient {
|
||||
})
|
||||
}
|
||||
|
||||
/// Dispatches to either the Responses or Chat implementation depending on
|
||||
/// the provider config. Public callers always invoke `stream()` – the
|
||||
/// specialised helpers are private to avoid accidental misuse.
|
||||
pub async fn stream(&self, prompt: &Prompt) -> Result<ResponseStream> {
|
||||
self.stream_with_task_kind(prompt, TaskKind::Regular).await
|
||||
pub fn config(&self) -> Arc<Config> {
|
||||
Arc::clone(&self.config)
|
||||
}
|
||||
|
||||
pub(crate) async fn stream_with_task_kind(
|
||||
&self,
|
||||
prompt: &Prompt,
|
||||
task_kind: TaskKind,
|
||||
) -> Result<ResponseStream> {
|
||||
pub fn provider(&self) -> &ModelProviderInfo {
|
||||
&self.provider
|
||||
}
|
||||
|
||||
pub async fn stream(&self, prompt: &Prompt) -> Result<ResponseStream> {
|
||||
match self.provider.wire_api {
|
||||
WireApi::Responses => self.stream_responses(prompt, task_kind).await,
|
||||
WireApi::Responses => self.stream_responses(prompt).await,
|
||||
WireApi::Chat => {
|
||||
// Create the raw streaming connection first.
|
||||
let response_stream = stream_chat_completions(
|
||||
@@ -149,6 +151,7 @@ impl ModelClient {
|
||||
&self.client,
|
||||
&self.provider,
|
||||
&self.otel_event_manager,
|
||||
&self.session_source,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -181,11 +184,7 @@ impl ModelClient {
|
||||
}
|
||||
|
||||
/// Implementation for the OpenAI *Responses* experimental API.
|
||||
async fn stream_responses(
|
||||
&self,
|
||||
prompt: &Prompt,
|
||||
task_kind: TaskKind,
|
||||
) -> Result<ResponseStream> {
|
||||
async fn stream_responses(&self, prompt: &Prompt) -> Result<ResponseStream> {
|
||||
if let Some(path) = &*CODEX_RS_SSE_FIXTURE {
|
||||
// short circuit for tests
|
||||
warn!(path, "Streaming from fixture");
|
||||
@@ -215,18 +214,16 @@ impl ModelClient {
|
||||
|
||||
let input_with_instructions = prompt.get_formatted_input();
|
||||
|
||||
let verbosity = match &self.config.model_family.family {
|
||||
family if family == "gpt-5" => self.config.model_verbosity,
|
||||
_ => {
|
||||
if self.config.model_verbosity.is_some() {
|
||||
warn!(
|
||||
"model_verbosity is set but ignored for non-gpt-5 model family: {}",
|
||||
self.config.model_family.family
|
||||
);
|
||||
}
|
||||
|
||||
None
|
||||
let verbosity = if self.config.model_family.support_verbosity {
|
||||
self.config.model_verbosity
|
||||
} else {
|
||||
if self.config.model_verbosity.is_some() {
|
||||
warn!(
|
||||
"model_verbosity is set but ignored as the model does not support verbosity: {}",
|
||||
self.config.model_family.family
|
||||
);
|
||||
}
|
||||
None
|
||||
};
|
||||
|
||||
// Only include `text.verbosity` for GPT-5 family models
|
||||
@@ -264,7 +261,7 @@ impl ModelClient {
|
||||
let max_attempts = self.provider.request_max_retries();
|
||||
for attempt in 0..=max_attempts {
|
||||
match self
|
||||
.attempt_stream_responses(attempt, &payload_json, &auth_manager, task_kind)
|
||||
.attempt_stream_responses(attempt, &payload_json, &auth_manager)
|
||||
.await
|
||||
{
|
||||
Ok(stream) => {
|
||||
@@ -292,7 +289,6 @@ impl ModelClient {
|
||||
attempt: u64,
|
||||
payload_json: &Value,
|
||||
auth_manager: &Option<Arc<AuthManager>>,
|
||||
task_kind: TaskKind,
|
||||
) -> std::result::Result<ResponseStream, StreamAttemptError> {
|
||||
// Always fetch the latest auth in case a prior attempt refreshed the token.
|
||||
let auth = auth_manager.as_ref().and_then(|m| m.auth());
|
||||
@@ -310,13 +306,24 @@ impl ModelClient {
|
||||
.await
|
||||
.map_err(StreamAttemptError::Fatal)?;
|
||||
|
||||
// Include subagent header only for subagent sessions.
|
||||
if let SessionSource::SubAgent(sub) = &self.session_source {
|
||||
let subagent = if let crate::protocol::SubAgentSource::Other(label) = sub {
|
||||
label.clone()
|
||||
} else {
|
||||
serde_json::to_value(sub)
|
||||
.ok()
|
||||
.and_then(|v| v.as_str().map(std::string::ToString::to_string))
|
||||
.unwrap_or_else(|| "other".to_string())
|
||||
};
|
||||
req_builder = req_builder.header("x-openai-subagent", subagent);
|
||||
}
|
||||
|
||||
req_builder = req_builder
|
||||
.header("OpenAI-Beta", "responses=experimental")
|
||||
// Send session_id for compatibility.
|
||||
.header("conversation_id", self.conversation_id.to_string())
|
||||
.header("session_id", self.conversation_id.to_string())
|
||||
.header(reqwest::header::ACCEPT, "text/event-stream")
|
||||
.header("Codex-Task-Type", task_kind.header_value())
|
||||
.json(payload_json);
|
||||
|
||||
if let Some(auth) = auth.as_ref()
|
||||
@@ -381,9 +388,19 @@ impl ModelClient {
|
||||
|
||||
if status == StatusCode::UNAUTHORIZED
|
||||
&& let Some(manager) = auth_manager.as_ref()
|
||||
&& manager.auth().is_some()
|
||||
&& let Some(auth) = auth.as_ref()
|
||||
&& auth.mode == AuthMode::ChatGPT
|
||||
&& let Err(err) = manager.refresh_token().await
|
||||
{
|
||||
let _ = manager.refresh_token().await;
|
||||
let stream_error = match err {
|
||||
RefreshTokenError::Permanent(failed) => {
|
||||
StreamAttemptError::Fatal(CodexErr::RefreshTokenFailed(failed))
|
||||
}
|
||||
RefreshTokenError::Transient(other) => {
|
||||
StreamAttemptError::RetryableTransportError(CodexErr::Io(other))
|
||||
}
|
||||
};
|
||||
return Err(stream_error);
|
||||
}
|
||||
|
||||
// The OpenAI Responses endpoint returns structured JSON bodies even for 4xx/5xx
|
||||
@@ -454,6 +471,10 @@ impl ModelClient {
|
||||
self.otel_event_manager.clone()
|
||||
}
|
||||
|
||||
pub fn get_session_source(&self) -> SessionSource {
|
||||
self.session_source.clone()
|
||||
}
|
||||
|
||||
/// Returns the currently configured model slug.
|
||||
pub fn get_model(&self) -> String {
|
||||
self.config.model.clone()
|
||||
@@ -659,6 +680,33 @@ fn parse_header_str<'a>(headers: &'a HeaderMap, name: &str) -> Option<&'a str> {
|
||||
headers.get(name)?.to_str().ok()
|
||||
}
|
||||
|
||||
async fn emit_completed(
|
||||
tx_event: &mpsc::Sender<Result<ResponseEvent>>,
|
||||
otel_event_manager: &OtelEventManager,
|
||||
completed: ResponseCompleted,
|
||||
) {
|
||||
if let Some(token_usage) = &completed.usage {
|
||||
otel_event_manager.sse_event_completed(
|
||||
token_usage.input_tokens,
|
||||
token_usage.output_tokens,
|
||||
token_usage
|
||||
.input_tokens_details
|
||||
.as_ref()
|
||||
.map(|d| d.cached_tokens),
|
||||
token_usage
|
||||
.output_tokens_details
|
||||
.as_ref()
|
||||
.map(|d| d.reasoning_tokens),
|
||||
token_usage.total_tokens,
|
||||
);
|
||||
}
|
||||
let event = ResponseEvent::Completed {
|
||||
response_id: completed.id.clone(),
|
||||
token_usage: completed.usage.map(Into::into),
|
||||
};
|
||||
let _ = tx_event.send(Ok(event)).await;
|
||||
}
|
||||
|
||||
async fn process_sse<S>(
|
||||
stream: S,
|
||||
tx_event: mpsc::Sender<Result<ResponseEvent>>,
|
||||
@@ -671,7 +719,7 @@ async fn process_sse<S>(
|
||||
|
||||
// If the stream stays completely silent for an extended period treat it as disconnected.
|
||||
// The response id returned from the "complete" message.
|
||||
let mut response_completed: Option<ResponseCompleted> = None;
|
||||
let response_completed: Option<ResponseCompleted> = None;
|
||||
let mut response_error: Option<CodexErr> = None;
|
||||
|
||||
loop {
|
||||
@@ -690,30 +738,8 @@ async fn process_sse<S>(
|
||||
}
|
||||
Ok(None) => {
|
||||
match response_completed {
|
||||
Some(ResponseCompleted {
|
||||
id: response_id,
|
||||
usage,
|
||||
}) => {
|
||||
if let Some(token_usage) = &usage {
|
||||
otel_event_manager.sse_event_completed(
|
||||
token_usage.input_tokens,
|
||||
token_usage.output_tokens,
|
||||
token_usage
|
||||
.input_tokens_details
|
||||
.as_ref()
|
||||
.map(|d| d.cached_tokens),
|
||||
token_usage
|
||||
.output_tokens_details
|
||||
.as_ref()
|
||||
.map(|d| d.reasoning_tokens),
|
||||
token_usage.total_tokens,
|
||||
);
|
||||
}
|
||||
let event = ResponseEvent::Completed {
|
||||
response_id,
|
||||
token_usage: usage.map(Into::into),
|
||||
};
|
||||
let _ = tx_event.send(Ok(event)).await;
|
||||
Some(completed) => {
|
||||
emit_completed(&tx_event, &otel_event_manager, completed).await
|
||||
}
|
||||
None => {
|
||||
let error = response_error.unwrap_or(CodexErr::Stream(
|
||||
@@ -843,7 +869,8 @@ async fn process_sse<S>(
|
||||
if let Some(resp_val) = event.response {
|
||||
match serde_json::from_value::<ResponseCompleted>(resp_val) {
|
||||
Ok(r) => {
|
||||
response_completed = Some(r);
|
||||
emit_completed(&tx_event, &otel_event_manager, r).await;
|
||||
return;
|
||||
}
|
||||
Err(e) => {
|
||||
let error = format!("failed to parse ResponseCompleted: {e}");
|
||||
@@ -861,21 +888,15 @@ async fn process_sse<S>(
|
||||
| "response.in_progress"
|
||||
| "response.output_text.done" => {}
|
||||
"response.output_item.added" => {
|
||||
if let Some(item) = event.item.as_ref() {
|
||||
// Detect web_search_call begin and forward a synthetic event upstream.
|
||||
if let Some(ty) = item.get("type").and_then(|v| v.as_str())
|
||||
&& ty == "web_search_call"
|
||||
{
|
||||
let call_id = item
|
||||
.get("id")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let ev = ResponseEvent::WebSearchCallBegin { call_id };
|
||||
if tx_event.send(Ok(ev)).await.is_err() {
|
||||
return;
|
||||
}
|
||||
}
|
||||
let Some(item_val) = event.item else { continue };
|
||||
let Ok(item) = serde_json::from_value::<ResponseItem>(item_val) else {
|
||||
debug!("failed to parse ResponseItem from output_item.done");
|
||||
continue;
|
||||
};
|
||||
|
||||
let event = ResponseEvent::OutputItemAdded(item);
|
||||
if tx_event.send(Ok(event)).await.is_err() {
|
||||
return;
|
||||
}
|
||||
}
|
||||
"response.reasoning_summary_part.added" => {
|
||||
@@ -922,8 +943,10 @@ async fn stream_from_fixture(
|
||||
fn rate_limit_regex() -> &'static Regex {
|
||||
static RE: OnceLock<Regex> = OnceLock::new();
|
||||
|
||||
// Match both OpenAI-style messages like "Please try again in 1.898s"
|
||||
// and Azure OpenAI-style messages like "Try again in 35 seconds".
|
||||
#[expect(clippy::unwrap_used)]
|
||||
RE.get_or_init(|| Regex::new(r"Please try again in (\d+(?:\.\d+)?)(s|ms)").unwrap())
|
||||
RE.get_or_init(|| Regex::new(r"(?i)try again in\s*(\d+(?:\.\d+)?)\s*(s|ms|seconds?)").unwrap())
|
||||
}
|
||||
|
||||
fn try_parse_retry_after(err: &Error) -> Option<Duration> {
|
||||
@@ -931,7 +954,8 @@ fn try_parse_retry_after(err: &Error) -> Option<Duration> {
|
||||
return None;
|
||||
}
|
||||
|
||||
// parse the Please try again in 1.898s format using regex
|
||||
// parse retry hints like "try again in 1.898s" or
|
||||
// "Try again in 35 seconds" using regex
|
||||
let re = rate_limit_regex();
|
||||
if let Some(message) = &err.message
|
||||
&& let Some(captures) = re.captures(message)
|
||||
@@ -941,9 +965,9 @@ fn try_parse_retry_after(err: &Error) -> Option<Duration> {
|
||||
|
||||
if let (Some(value), Some(unit)) = (seconds, unit) {
|
||||
let value = value.as_str().parse::<f64>().ok()?;
|
||||
let unit = unit.as_str();
|
||||
let unit = unit.as_str().to_ascii_lowercase();
|
||||
|
||||
if unit == "s" {
|
||||
if unit == "s" || unit.starts_with("second") {
|
||||
return Some(Duration::from_secs_f64(value));
|
||||
} else if unit == "ms" {
|
||||
return Some(Duration::from_millis(value as u64));
|
||||
@@ -1418,6 +1442,19 @@ mod tests {
|
||||
assert_eq!(delay, Some(Duration::from_secs_f64(1.898)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_try_parse_retry_after_azure() {
|
||||
let err = Error {
|
||||
r#type: None,
|
||||
message: Some("Rate limit exceeded. Try again in 35 seconds.".to_string()),
|
||||
code: Some("rate_limit_exceeded".to_string()),
|
||||
plan_type: None,
|
||||
resets_at: None,
|
||||
};
|
||||
let delay = try_parse_retry_after(&err);
|
||||
assert_eq!(delay, Some(Duration::from_secs(35)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_response_deserializes_schema_known_plan_type_and_serializes_back() {
|
||||
use crate::token_data::KnownPlan;
|
||||
|
||||
@@ -23,6 +23,11 @@ use tokio::sync::mpsc;
|
||||
/// Review thread system prompt. Edit `core/src/review_prompt.md` to customize.
|
||||
pub const REVIEW_PROMPT: &str = include_str!("../review_prompt.md");
|
||||
|
||||
// Centralized templates for review-related user messages
|
||||
pub const REVIEW_EXIT_SUCCESS_TMPL: &str = include_str!("../templates/review/exit_success.xml");
|
||||
pub const REVIEW_EXIT_INTERRUPTED_TMPL: &str =
|
||||
include_str!("../templates/review/exit_interrupted.xml");
|
||||
|
||||
/// API request payload for a single model turn
|
||||
#[derive(Default, Debug, Clone)]
|
||||
pub struct Prompt {
|
||||
@@ -192,6 +197,7 @@ fn strip_total_output_header(output: &str) -> Option<&str> {
|
||||
pub enum ResponseEvent {
|
||||
Created,
|
||||
OutputItemDone(ResponseItem),
|
||||
OutputItemAdded(ResponseItem),
|
||||
Completed {
|
||||
response_id: String,
|
||||
token_usage: Option<TokenUsage>,
|
||||
@@ -200,9 +206,6 @@ pub enum ResponseEvent {
|
||||
ReasoningSummaryDelta(String),
|
||||
ReasoningContentDelta(String),
|
||||
ReasoningSummaryPartAdded,
|
||||
WebSearchCallBegin {
|
||||
call_id: String,
|
||||
},
|
||||
RateLimits(RateLimitSnapshot),
|
||||
}
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -13,10 +13,9 @@ use crate::protocol::ErrorEvent;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::TaskStartedEvent;
|
||||
use crate::protocol::TurnContextItem;
|
||||
use crate::state::TaskKind;
|
||||
use crate::protocol::WarningEvent;
|
||||
use crate::truncate::truncate_middle;
|
||||
use crate::util::backoff;
|
||||
use askama::Template;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseInputItem;
|
||||
@@ -29,20 +28,12 @@ use tracing::error;
|
||||
pub const SUMMARIZATION_PROMPT: &str = include_str!("../../templates/compact/prompt.md");
|
||||
const COMPACT_USER_MESSAGE_MAX_TOKENS: usize = 20_000;
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "compact/history_bridge.md", escape = "none")]
|
||||
struct HistoryBridgeTemplate<'a> {
|
||||
user_messages_text: &'a str,
|
||||
summary_text: &'a str,
|
||||
}
|
||||
|
||||
pub(crate) async fn run_inline_auto_compact_task(
|
||||
sess: Arc<Session>,
|
||||
turn_context: Arc<TurnContext>,
|
||||
) {
|
||||
let input = vec![UserInput::Text {
|
||||
text: SUMMARIZATION_PROMPT.to_string(),
|
||||
}];
|
||||
let prompt = turn_context.compact_prompt().to_string();
|
||||
let input = vec![UserInput::Text { text: prompt }];
|
||||
run_compact_task_inner(sess, turn_context, input).await;
|
||||
}
|
||||
|
||||
@@ -85,7 +76,7 @@ async fn run_compact_task_inner(
|
||||
sess.persist_rollout_items(&[rollout_item]).await;
|
||||
|
||||
loop {
|
||||
let turn_input = history.get_history();
|
||||
let turn_input = history.get_history_for_prompt();
|
||||
let prompt = Prompt {
|
||||
input: turn_input.clone(),
|
||||
..Default::default()
|
||||
@@ -132,7 +123,7 @@ async fn run_compact_task_inner(
|
||||
let delay = backoff(retries);
|
||||
sess.notify_stream_error(
|
||||
turn_context.as_ref(),
|
||||
format!("Re-connecting... {retries}/{max_retries}"),
|
||||
format!("Reconnecting... {retries}/{max_retries}"),
|
||||
)
|
||||
.await;
|
||||
tokio::time::sleep(delay).await;
|
||||
@@ -148,11 +139,18 @@ async fn run_compact_task_inner(
|
||||
}
|
||||
}
|
||||
|
||||
let history_snapshot = sess.history_snapshot().await;
|
||||
let history_snapshot = sess.clone_history().await.get_history();
|
||||
let summary_text = get_last_assistant_message_from_turn(&history_snapshot).unwrap_or_default();
|
||||
let user_messages = collect_user_messages(&history_snapshot);
|
||||
|
||||
let initial_context = sess.build_initial_context(turn_context.as_ref());
|
||||
let new_history = build_compacted_history(initial_context, &user_messages, &summary_text);
|
||||
let mut new_history = build_compacted_history(initial_context, &user_messages, &summary_text);
|
||||
let ghost_snapshots: Vec<ResponseItem> = history_snapshot
|
||||
.iter()
|
||||
.filter(|item| matches!(item, ResponseItem::GhostSnapshot { .. }))
|
||||
.cloned()
|
||||
.collect();
|
||||
new_history.extend(ghost_snapshots);
|
||||
sess.replace_history(new_history).await;
|
||||
|
||||
let rollout_item = RolloutItem::Compacted(CompactedItem {
|
||||
@@ -164,6 +162,11 @@ async fn run_compact_task_inner(
|
||||
message: "Compact task completed".to_string(),
|
||||
});
|
||||
sess.send_event(&turn_context, event).await;
|
||||
|
||||
let warning = EventMsg::Warning(WarningEvent {
|
||||
message: "Heads up: Long conversations and multiple compactions can cause the model to be less accurate. Start new a new conversation when possible to keep conversations small and targeted.".to_string(),
|
||||
});
|
||||
sess.send_event(&turn_context, warning).await;
|
||||
}
|
||||
|
||||
pub fn content_items_to_text(content: &[ContentItem]) -> Option<String> {
|
||||
@@ -200,35 +203,61 @@ pub(crate) fn build_compacted_history(
|
||||
user_messages: &[String],
|
||||
summary_text: &str,
|
||||
) -> Vec<ResponseItem> {
|
||||
let mut history = initial_context;
|
||||
let mut user_messages_text = if user_messages.is_empty() {
|
||||
"(none)".to_string()
|
||||
} else {
|
||||
user_messages.join("\n\n")
|
||||
};
|
||||
// Truncate the concatenated prior user messages so the bridge message
|
||||
// stays well under the context window (approx. 4 bytes/token).
|
||||
let max_bytes = COMPACT_USER_MESSAGE_MAX_TOKENS * 4;
|
||||
if user_messages_text.len() > max_bytes {
|
||||
user_messages_text = truncate_middle(&user_messages_text, max_bytes).0;
|
||||
build_compacted_history_with_limit(
|
||||
initial_context,
|
||||
user_messages,
|
||||
summary_text,
|
||||
COMPACT_USER_MESSAGE_MAX_TOKENS * 4,
|
||||
)
|
||||
}
|
||||
|
||||
fn build_compacted_history_with_limit(
|
||||
mut history: Vec<ResponseItem>,
|
||||
user_messages: &[String],
|
||||
summary_text: &str,
|
||||
max_bytes: usize,
|
||||
) -> Vec<ResponseItem> {
|
||||
let mut selected_messages: Vec<String> = Vec::new();
|
||||
if max_bytes > 0 {
|
||||
let mut remaining = max_bytes;
|
||||
for message in user_messages.iter().rev() {
|
||||
if remaining == 0 {
|
||||
break;
|
||||
}
|
||||
if message.len() <= remaining {
|
||||
selected_messages.push(message.clone());
|
||||
remaining = remaining.saturating_sub(message.len());
|
||||
} else {
|
||||
let (truncated, _) = truncate_middle(message, remaining);
|
||||
selected_messages.push(truncated);
|
||||
break;
|
||||
}
|
||||
}
|
||||
selected_messages.reverse();
|
||||
}
|
||||
|
||||
for message in &selected_messages {
|
||||
history.push(ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::InputText {
|
||||
text: message.clone(),
|
||||
}],
|
||||
});
|
||||
}
|
||||
|
||||
let summary_text = if summary_text.is_empty() {
|
||||
"(no summary available)".to_string()
|
||||
} else {
|
||||
summary_text.to_string()
|
||||
};
|
||||
let Ok(bridge) = HistoryBridgeTemplate {
|
||||
user_messages_text: &user_messages_text,
|
||||
summary_text: &summary_text,
|
||||
}
|
||||
.render() else {
|
||||
return vec![];
|
||||
};
|
||||
|
||||
history.push(ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::InputText { text: bridge }],
|
||||
content: vec![ContentItem::InputText { text: summary_text }],
|
||||
});
|
||||
|
||||
history
|
||||
}
|
||||
|
||||
@@ -237,11 +266,7 @@ async fn drain_to_completed(
|
||||
turn_context: &TurnContext,
|
||||
prompt: &Prompt,
|
||||
) -> CodexResult<()> {
|
||||
let mut stream = turn_context
|
||||
.client
|
||||
.clone()
|
||||
.stream_with_task_kind(prompt, TaskKind::Compact)
|
||||
.await?;
|
||||
let mut stream = turn_context.client.clone().stream(prompt).await?;
|
||||
loop {
|
||||
let maybe_event = stream.next().await;
|
||||
let Some(event) = maybe_event else {
|
||||
@@ -335,7 +360,8 @@ mod tests {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::InputText {
|
||||
text: "<user_instructions>do things</user_instructions>".to_string(),
|
||||
text: "# AGENTS.md instructions for project\n\n<INSTRUCTIONS>\ndo things\n</INSTRUCTIONS>"
|
||||
.to_string(),
|
||||
}],
|
||||
},
|
||||
ResponseItem::Message {
|
||||
@@ -361,35 +387,65 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn build_compacted_history_truncates_overlong_user_messages() {
|
||||
// Prepare a very large prior user message so the aggregated
|
||||
// `user_messages_text` exceeds the truncation threshold used by
|
||||
// `build_compacted_history` (80k bytes).
|
||||
let big = "X".repeat(200_000);
|
||||
let history = build_compacted_history(Vec::new(), std::slice::from_ref(&big), "SUMMARY");
|
||||
// Use a small truncation limit so the test remains fast while still validating
|
||||
// that oversized user content is truncated.
|
||||
let max_bytes = 128;
|
||||
let big = "X".repeat(max_bytes + 50);
|
||||
let history = super::build_compacted_history_with_limit(
|
||||
Vec::new(),
|
||||
std::slice::from_ref(&big),
|
||||
"SUMMARY",
|
||||
max_bytes,
|
||||
);
|
||||
assert_eq!(history.len(), 2);
|
||||
|
||||
// Expect exactly one bridge message added to history (plus any initial context we provided, which is none).
|
||||
assert_eq!(history.len(), 1);
|
||||
let truncated_message = &history[0];
|
||||
let summary_message = &history[1];
|
||||
|
||||
// Extract the text content of the bridge message.
|
||||
let bridge_text = match &history[0] {
|
||||
let truncated_text = match truncated_message {
|
||||
ResponseItem::Message { role, content, .. } if role == "user" => {
|
||||
content_items_to_text(content).unwrap_or_default()
|
||||
}
|
||||
other => panic!("unexpected item in history: {other:?}"),
|
||||
};
|
||||
|
||||
// The bridge should contain the truncation marker and not the full original payload.
|
||||
assert!(
|
||||
bridge_text.contains("tokens truncated"),
|
||||
"expected truncation marker in bridge message"
|
||||
truncated_text.contains("tokens truncated"),
|
||||
"expected truncation marker in truncated user message"
|
||||
);
|
||||
assert!(
|
||||
!bridge_text.contains(&big),
|
||||
"bridge should not include the full oversized user text"
|
||||
!truncated_text.contains(&big),
|
||||
"truncated user message should not include the full oversized user text"
|
||||
);
|
||||
|
||||
let summary_text = match summary_message {
|
||||
ResponseItem::Message { role, content, .. } if role == "user" => {
|
||||
content_items_to_text(content).unwrap_or_default()
|
||||
}
|
||||
other => panic!("unexpected item in history: {other:?}"),
|
||||
};
|
||||
assert_eq!(summary_text, "SUMMARY");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_compacted_history_appends_summary_message() {
|
||||
let initial_context: Vec<ResponseItem> = Vec::new();
|
||||
let user_messages = vec!["first user message".to_string()];
|
||||
let summary_text = "summary text";
|
||||
|
||||
let history = build_compacted_history(initial_context, &user_messages, summary_text);
|
||||
assert!(
|
||||
bridge_text.contains("SUMMARY"),
|
||||
"bridge should include the provided summary text"
|
||||
!history.is_empty(),
|
||||
"expected compacted history to include summary"
|
||||
);
|
||||
|
||||
let last = history.last().expect("history should have a summary entry");
|
||||
let summary = match last {
|
||||
ResponseItem::Message { role, content, .. } if role == "user" => {
|
||||
content_items_to_text(content).unwrap_or_default()
|
||||
}
|
||||
other => panic!("expected summary message, found {other:?}"),
|
||||
};
|
||||
assert_eq!(summary, summary_text);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,16 +3,21 @@ use crate::error::Result as CodexResult;
|
||||
use crate::protocol::Event;
|
||||
use crate::protocol::Op;
|
||||
use crate::protocol::Submission;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub struct CodexConversation {
|
||||
codex: Codex,
|
||||
rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
/// Conduit for the bidirectional stream of messages that compose a conversation
|
||||
/// in Codex.
|
||||
impl CodexConversation {
|
||||
pub(crate) fn new(codex: Codex) -> Self {
|
||||
Self { codex }
|
||||
pub(crate) fn new(codex: Codex, rollout_path: PathBuf) -> Self {
|
||||
Self {
|
||||
codex,
|
||||
rollout_path,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn submit(&self, op: Op) -> CodexResult<String> {
|
||||
@@ -27,4 +32,8 @@ impl CodexConversation {
|
||||
pub async fn next_event(&self) -> CodexResult<Event> {
|
||||
self.codex.next_event().await
|
||||
}
|
||||
|
||||
pub fn rollout_path(&self) -> PathBuf {
|
||||
self.rollout_path.clone()
|
||||
}
|
||||
}
|
||||
|
||||
300
codex-rs/core/src/codex_delegate.rs
Normal file
300
codex-rs/core/src/codex_delegate.rs
Normal file
@@ -0,0 +1,300 @@
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicU64;
|
||||
|
||||
use async_channel::Receiver;
|
||||
use async_channel::Sender;
|
||||
use codex_async_utils::OrCancelExt;
|
||||
use codex_protocol::protocol::ApplyPatchApprovalRequestEvent;
|
||||
use codex_protocol::protocol::Event;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::ExecApprovalRequestEvent;
|
||||
use codex_protocol::protocol::Op;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_protocol::protocol::SubAgentSource;
|
||||
use codex_protocol::protocol::Submission;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
|
||||
use crate::AuthManager;
|
||||
use crate::codex::Codex;
|
||||
use crate::codex::CodexSpawnOk;
|
||||
use crate::codex::SUBMISSION_CHANNEL_CAPACITY;
|
||||
use crate::codex::Session;
|
||||
use crate::codex::TurnContext;
|
||||
use crate::config::Config;
|
||||
use crate::error::CodexErr;
|
||||
use codex_protocol::protocol::InitialHistory;
|
||||
|
||||
/// Start an interactive sub-Codex conversation and return IO channels.
|
||||
///
|
||||
/// The returned `events_rx` yields non-approval events emitted by the sub-agent.
|
||||
/// Approval requests are handled via `parent_session` and are not surfaced.
|
||||
/// The returned `ops_tx` allows the caller to submit additional `Op`s to the sub-agent.
|
||||
pub(crate) async fn run_codex_conversation_interactive(
|
||||
config: Config,
|
||||
auth_manager: Arc<AuthManager>,
|
||||
parent_session: Arc<Session>,
|
||||
parent_ctx: Arc<TurnContext>,
|
||||
cancel_token: CancellationToken,
|
||||
initial_history: Option<InitialHistory>,
|
||||
) -> Result<Codex, CodexErr> {
|
||||
let (tx_sub, rx_sub) = async_channel::bounded(SUBMISSION_CHANNEL_CAPACITY);
|
||||
let (tx_ops, rx_ops) = async_channel::bounded(SUBMISSION_CHANNEL_CAPACITY);
|
||||
|
||||
let CodexSpawnOk { codex, .. } = Codex::spawn(
|
||||
config,
|
||||
auth_manager,
|
||||
initial_history.unwrap_or(InitialHistory::New),
|
||||
SessionSource::SubAgent(SubAgentSource::Review),
|
||||
)
|
||||
.await?;
|
||||
let codex = Arc::new(codex);
|
||||
|
||||
// Use a child token so parent cancel cascades but we can scope it to this task
|
||||
let cancel_token_events = cancel_token.child_token();
|
||||
let cancel_token_ops = cancel_token.child_token();
|
||||
|
||||
// Forward events from the sub-agent to the consumer, filtering approvals and
|
||||
// routing them to the parent session for decisions.
|
||||
let parent_session_clone = Arc::clone(&parent_session);
|
||||
let parent_ctx_clone = Arc::clone(&parent_ctx);
|
||||
let codex_for_events = Arc::clone(&codex);
|
||||
tokio::spawn(async move {
|
||||
let _ = forward_events(
|
||||
codex_for_events,
|
||||
tx_sub,
|
||||
parent_session_clone,
|
||||
parent_ctx_clone,
|
||||
cancel_token_events.clone(),
|
||||
)
|
||||
.or_cancel(&cancel_token_events)
|
||||
.await;
|
||||
});
|
||||
|
||||
// Forward ops from the caller to the sub-agent.
|
||||
let codex_for_ops = Arc::clone(&codex);
|
||||
tokio::spawn(async move {
|
||||
forward_ops(codex_for_ops, rx_ops, cancel_token_ops).await;
|
||||
});
|
||||
|
||||
Ok(Codex {
|
||||
next_id: AtomicU64::new(0),
|
||||
tx_sub: tx_ops,
|
||||
rx_event: rx_sub,
|
||||
})
|
||||
}
|
||||
|
||||
/// Convenience wrapper for one-time use with an initial prompt.
|
||||
///
|
||||
/// Internally calls the interactive variant, then immediately submits the provided input.
|
||||
pub(crate) async fn run_codex_conversation_one_shot(
|
||||
config: Config,
|
||||
auth_manager: Arc<AuthManager>,
|
||||
input: Vec<UserInput>,
|
||||
parent_session: Arc<Session>,
|
||||
parent_ctx: Arc<TurnContext>,
|
||||
cancel_token: CancellationToken,
|
||||
initial_history: Option<InitialHistory>,
|
||||
) -> Result<Codex, CodexErr> {
|
||||
// Use a child token so we can stop the delegate after completion without
|
||||
// requiring the caller to cancel the parent token.
|
||||
let child_cancel = cancel_token.child_token();
|
||||
let io = run_codex_conversation_interactive(
|
||||
config,
|
||||
auth_manager,
|
||||
parent_session,
|
||||
parent_ctx,
|
||||
child_cancel.clone(),
|
||||
initial_history,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Send the initial input to kick off the one-shot turn.
|
||||
io.submit(Op::UserInput { items: input }).await?;
|
||||
|
||||
// Bridge events so we can observe completion and shut down automatically.
|
||||
let (tx_bridge, rx_bridge) = async_channel::bounded(SUBMISSION_CHANNEL_CAPACITY);
|
||||
let ops_tx = io.tx_sub.clone();
|
||||
let io_for_bridge = io;
|
||||
tokio::spawn(async move {
|
||||
while let Ok(event) = io_for_bridge.next_event().await {
|
||||
let should_shutdown = matches!(
|
||||
event.msg,
|
||||
EventMsg::TaskComplete(_) | EventMsg::TurnAborted(_)
|
||||
);
|
||||
let _ = tx_bridge.send(event).await;
|
||||
if should_shutdown {
|
||||
let _ = ops_tx
|
||||
.send(Submission {
|
||||
id: "shutdown".to_string(),
|
||||
op: Op::Shutdown {},
|
||||
})
|
||||
.await;
|
||||
child_cancel.cancel();
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// For one-shot usage, return a closed `tx_sub` so callers cannot submit
|
||||
// additional ops after the initial request. Create a channel and drop the
|
||||
// receiver to close it immediately.
|
||||
let (tx_closed, rx_closed) = async_channel::bounded(SUBMISSION_CHANNEL_CAPACITY);
|
||||
drop(rx_closed);
|
||||
|
||||
Ok(Codex {
|
||||
next_id: AtomicU64::new(0),
|
||||
rx_event: rx_bridge,
|
||||
tx_sub: tx_closed,
|
||||
})
|
||||
}
|
||||
|
||||
async fn forward_events(
|
||||
codex: Arc<Codex>,
|
||||
tx_sub: Sender<Event>,
|
||||
parent_session: Arc<Session>,
|
||||
parent_ctx: Arc<TurnContext>,
|
||||
cancel_token: CancellationToken,
|
||||
) {
|
||||
while let Ok(event) = codex.next_event().await {
|
||||
match event {
|
||||
// ignore all legacy delta events
|
||||
Event {
|
||||
id: _,
|
||||
msg: EventMsg::AgentMessageDelta(_) | EventMsg::AgentReasoningDelta(_),
|
||||
} => continue,
|
||||
Event {
|
||||
id: _,
|
||||
msg: EventMsg::SessionConfigured(_),
|
||||
} => continue,
|
||||
Event {
|
||||
id,
|
||||
msg: EventMsg::ExecApprovalRequest(event),
|
||||
} => {
|
||||
// Initiate approval via parent session; do not surface to consumer.
|
||||
handle_exec_approval(
|
||||
&codex,
|
||||
id,
|
||||
&parent_session,
|
||||
&parent_ctx,
|
||||
event,
|
||||
&cancel_token,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
Event {
|
||||
id,
|
||||
msg: EventMsg::ApplyPatchApprovalRequest(event),
|
||||
} => {
|
||||
handle_patch_approval(
|
||||
&codex,
|
||||
id,
|
||||
&parent_session,
|
||||
&parent_ctx,
|
||||
event,
|
||||
&cancel_token,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
other => {
|
||||
let _ = tx_sub.send(other).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Forward ops from a caller to a sub-agent, respecting cancellation.
|
||||
async fn forward_ops(
|
||||
codex: Arc<Codex>,
|
||||
rx_ops: Receiver<Submission>,
|
||||
cancel_token_ops: CancellationToken,
|
||||
) {
|
||||
loop {
|
||||
let op: Op = match rx_ops.recv().or_cancel(&cancel_token_ops).await {
|
||||
Ok(Ok(Submission { id: _, op })) => op,
|
||||
Ok(Err(_)) | Err(_) => break,
|
||||
};
|
||||
let _ = codex.submit(op).await;
|
||||
}
|
||||
}
|
||||
|
||||
/// Handle an ExecApprovalRequest by consulting the parent session and replying.
|
||||
async fn handle_exec_approval(
|
||||
codex: &Codex,
|
||||
id: String,
|
||||
parent_session: &Session,
|
||||
parent_ctx: &TurnContext,
|
||||
event: ExecApprovalRequestEvent,
|
||||
cancel_token: &CancellationToken,
|
||||
) {
|
||||
// Race approval with cancellation and timeout to avoid hangs.
|
||||
let approval_fut = parent_session.request_command_approval(
|
||||
parent_ctx,
|
||||
parent_ctx.sub_id.clone(),
|
||||
event.command,
|
||||
event.cwd,
|
||||
event.reason,
|
||||
event.risk,
|
||||
);
|
||||
let decision = await_approval_with_cancel(
|
||||
approval_fut,
|
||||
parent_session,
|
||||
&parent_ctx.sub_id,
|
||||
cancel_token,
|
||||
)
|
||||
.await;
|
||||
|
||||
let _ = codex.submit(Op::ExecApproval { id, decision }).await;
|
||||
}
|
||||
|
||||
/// Handle an ApplyPatchApprovalRequest by consulting the parent session and replying.
|
||||
async fn handle_patch_approval(
|
||||
codex: &Codex,
|
||||
id: String,
|
||||
parent_session: &Session,
|
||||
parent_ctx: &TurnContext,
|
||||
event: ApplyPatchApprovalRequestEvent,
|
||||
cancel_token: &CancellationToken,
|
||||
) {
|
||||
let decision_rx = parent_session
|
||||
.request_patch_approval(
|
||||
parent_ctx,
|
||||
parent_ctx.sub_id.clone(),
|
||||
event.changes,
|
||||
event.reason,
|
||||
event.grant_root,
|
||||
)
|
||||
.await;
|
||||
let decision = await_approval_with_cancel(
|
||||
async move { decision_rx.await.unwrap_or_default() },
|
||||
parent_session,
|
||||
&parent_ctx.sub_id,
|
||||
cancel_token,
|
||||
)
|
||||
.await;
|
||||
let _ = codex.submit(Op::PatchApproval { id, decision }).await;
|
||||
}
|
||||
|
||||
/// Await an approval decision, aborting on cancellation.
|
||||
async fn await_approval_with_cancel<F>(
|
||||
fut: F,
|
||||
parent_session: &Session,
|
||||
sub_id: &str,
|
||||
cancel_token: &CancellationToken,
|
||||
) -> codex_protocol::protocol::ReviewDecision
|
||||
where
|
||||
F: core::future::Future<Output = codex_protocol::protocol::ReviewDecision>,
|
||||
{
|
||||
tokio::select! {
|
||||
biased;
|
||||
_ = cancel_token.cancelled() => {
|
||||
parent_session
|
||||
.notify_approval(sub_id, codex_protocol::protocol::ReviewDecision::Abort)
|
||||
.await;
|
||||
codex_protocol::protocol::ReviewDecision::Abort
|
||||
}
|
||||
decision = fut => {
|
||||
decision
|
||||
}
|
||||
}
|
||||
}
|
||||
954
codex-rs/core/src/config/edit.rs
Normal file
954
codex-rs/core/src/config/edit.rs
Normal file
@@ -0,0 +1,954 @@
|
||||
use crate::config::CONFIG_TOML_FILE;
|
||||
use crate::config::types::McpServerConfig;
|
||||
use crate::config::types::Notice;
|
||||
use anyhow::Context;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
use std::collections::BTreeMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use tempfile::NamedTempFile;
|
||||
use tokio::task;
|
||||
use toml_edit::DocumentMut;
|
||||
use toml_edit::Item as TomlItem;
|
||||
use toml_edit::Table as TomlTable;
|
||||
use toml_edit::value;
|
||||
|
||||
/// Discrete config mutations supported by the persistence engine.
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum ConfigEdit {
|
||||
/// Update the active (or default) model selection and optional reasoning effort.
|
||||
SetModel {
|
||||
model: Option<String>,
|
||||
effort: Option<ReasoningEffort>,
|
||||
},
|
||||
/// Toggle the acknowledgement flag under `[notice]`.
|
||||
SetNoticeHideFullAccessWarning(bool),
|
||||
/// Toggle the Windows onboarding acknowledgement flag.
|
||||
SetWindowsWslSetupAcknowledged(bool),
|
||||
/// Replace the entire `[mcp_servers]` table.
|
||||
ReplaceMcpServers(BTreeMap<String, McpServerConfig>),
|
||||
/// Set trust_level = "trusted" under `[projects."<path>"]`,
|
||||
/// migrating inline tables to explicit tables.
|
||||
SetProjectTrusted(PathBuf),
|
||||
/// Set the value stored at the exact dotted path.
|
||||
SetPath {
|
||||
segments: Vec<String>,
|
||||
value: TomlItem,
|
||||
},
|
||||
/// Remove the value stored at the exact dotted path.
|
||||
ClearPath { segments: Vec<String> },
|
||||
}
|
||||
|
||||
// TODO(jif) move to a dedicated file
|
||||
mod document_helpers {
|
||||
use crate::config::types::McpServerConfig;
|
||||
use crate::config::types::McpServerTransportConfig;
|
||||
use toml_edit::Array as TomlArray;
|
||||
use toml_edit::InlineTable;
|
||||
use toml_edit::Item as TomlItem;
|
||||
use toml_edit::Table as TomlTable;
|
||||
use toml_edit::value;
|
||||
|
||||
pub(super) fn ensure_table_for_write(item: &mut TomlItem) -> Option<&mut TomlTable> {
|
||||
match item {
|
||||
TomlItem::Table(table) => Some(table),
|
||||
TomlItem::Value(value) => {
|
||||
if let Some(inline) = value.as_inline_table() {
|
||||
*item = TomlItem::Table(table_from_inline(inline));
|
||||
item.as_table_mut()
|
||||
} else {
|
||||
*item = TomlItem::Table(new_implicit_table());
|
||||
item.as_table_mut()
|
||||
}
|
||||
}
|
||||
TomlItem::None => {
|
||||
*item = TomlItem::Table(new_implicit_table());
|
||||
item.as_table_mut()
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn ensure_table_for_read(item: &mut TomlItem) -> Option<&mut TomlTable> {
|
||||
match item {
|
||||
TomlItem::Table(table) => Some(table),
|
||||
TomlItem::Value(value) => {
|
||||
let inline = value.as_inline_table()?;
|
||||
*item = TomlItem::Table(table_from_inline(inline));
|
||||
item.as_table_mut()
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn serialize_mcp_server(config: &McpServerConfig) -> TomlItem {
|
||||
let mut entry = TomlTable::new();
|
||||
entry.set_implicit(false);
|
||||
|
||||
match &config.transport {
|
||||
McpServerTransportConfig::Stdio {
|
||||
command,
|
||||
args,
|
||||
env,
|
||||
env_vars,
|
||||
cwd,
|
||||
} => {
|
||||
entry["command"] = value(command.clone());
|
||||
if !args.is_empty() {
|
||||
entry["args"] = array_from_iter(args.iter().cloned());
|
||||
}
|
||||
if let Some(env) = env
|
||||
&& !env.is_empty()
|
||||
{
|
||||
entry["env"] = table_from_pairs(env.iter());
|
||||
}
|
||||
if !env_vars.is_empty() {
|
||||
entry["env_vars"] = array_from_iter(env_vars.iter().cloned());
|
||||
}
|
||||
if let Some(cwd) = cwd {
|
||||
entry["cwd"] = value(cwd.to_string_lossy().to_string());
|
||||
}
|
||||
}
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
http_headers,
|
||||
env_http_headers,
|
||||
} => {
|
||||
entry["url"] = value(url.clone());
|
||||
if let Some(env_var) = bearer_token_env_var {
|
||||
entry["bearer_token_env_var"] = value(env_var.clone());
|
||||
}
|
||||
if let Some(headers) = http_headers
|
||||
&& !headers.is_empty()
|
||||
{
|
||||
entry["http_headers"] = table_from_pairs(headers.iter());
|
||||
}
|
||||
if let Some(headers) = env_http_headers
|
||||
&& !headers.is_empty()
|
||||
{
|
||||
entry["env_http_headers"] = table_from_pairs(headers.iter());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !config.enabled {
|
||||
entry["enabled"] = value(false);
|
||||
}
|
||||
if let Some(timeout) = config.startup_timeout_sec {
|
||||
entry["startup_timeout_sec"] = value(timeout.as_secs_f64());
|
||||
}
|
||||
if let Some(timeout) = config.tool_timeout_sec {
|
||||
entry["tool_timeout_sec"] = value(timeout.as_secs_f64());
|
||||
}
|
||||
if let Some(enabled_tools) = &config.enabled_tools
|
||||
&& !enabled_tools.is_empty()
|
||||
{
|
||||
entry["enabled_tools"] = array_from_iter(enabled_tools.iter().cloned());
|
||||
}
|
||||
if let Some(disabled_tools) = &config.disabled_tools
|
||||
&& !disabled_tools.is_empty()
|
||||
{
|
||||
entry["disabled_tools"] = array_from_iter(disabled_tools.iter().cloned());
|
||||
}
|
||||
|
||||
TomlItem::Table(entry)
|
||||
}
|
||||
|
||||
fn table_from_inline(inline: &InlineTable) -> TomlTable {
|
||||
let mut table = new_implicit_table();
|
||||
for (key, value) in inline.iter() {
|
||||
let mut value = value.clone();
|
||||
let decor = value.decor_mut();
|
||||
decor.set_suffix("");
|
||||
table.insert(key, TomlItem::Value(value));
|
||||
}
|
||||
table
|
||||
}
|
||||
|
||||
pub(super) fn new_implicit_table() -> TomlTable {
|
||||
let mut table = TomlTable::new();
|
||||
table.set_implicit(true);
|
||||
table
|
||||
}
|
||||
|
||||
fn array_from_iter<I>(iter: I) -> TomlItem
|
||||
where
|
||||
I: Iterator<Item = String>,
|
||||
{
|
||||
let mut array = TomlArray::new();
|
||||
for value in iter {
|
||||
array.push(value);
|
||||
}
|
||||
TomlItem::Value(array.into())
|
||||
}
|
||||
|
||||
fn table_from_pairs<'a, I>(pairs: I) -> TomlItem
|
||||
where
|
||||
I: IntoIterator<Item = (&'a String, &'a String)>,
|
||||
{
|
||||
let mut entries: Vec<_> = pairs.into_iter().collect();
|
||||
entries.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
let mut table = TomlTable::new();
|
||||
table.set_implicit(false);
|
||||
for (key, val) in entries {
|
||||
table.insert(key, value(val.clone()));
|
||||
}
|
||||
TomlItem::Table(table)
|
||||
}
|
||||
}
|
||||
|
||||
struct ConfigDocument {
|
||||
doc: DocumentMut,
|
||||
profile: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
enum Scope {
|
||||
Global,
|
||||
Profile,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
enum TraversalMode {
|
||||
Create,
|
||||
Existing,
|
||||
}
|
||||
|
||||
impl ConfigDocument {
|
||||
fn new(doc: DocumentMut, profile: Option<String>) -> Self {
|
||||
Self { doc, profile }
|
||||
}
|
||||
|
||||
fn apply(&mut self, edit: &ConfigEdit) -> anyhow::Result<bool> {
|
||||
match edit {
|
||||
ConfigEdit::SetModel { model, effort } => Ok({
|
||||
let mut mutated = false;
|
||||
mutated |= self.write_profile_value(
|
||||
&["model"],
|
||||
model.as_ref().map(|model_value| value(model_value.clone())),
|
||||
);
|
||||
mutated |= self.write_profile_value(
|
||||
&["model_reasoning_effort"],
|
||||
effort.map(|effort| value(effort.to_string())),
|
||||
);
|
||||
mutated
|
||||
}),
|
||||
ConfigEdit::SetNoticeHideFullAccessWarning(acknowledged) => Ok(self.write_value(
|
||||
Scope::Global,
|
||||
&[Notice::TABLE_KEY, "hide_full_access_warning"],
|
||||
value(*acknowledged),
|
||||
)),
|
||||
ConfigEdit::SetWindowsWslSetupAcknowledged(acknowledged) => Ok(self.write_value(
|
||||
Scope::Global,
|
||||
&["windows_wsl_setup_acknowledged"],
|
||||
value(*acknowledged),
|
||||
)),
|
||||
ConfigEdit::ReplaceMcpServers(servers) => Ok(self.replace_mcp_servers(servers)),
|
||||
ConfigEdit::SetPath { segments, value } => Ok(self.insert(segments, value.clone())),
|
||||
ConfigEdit::ClearPath { segments } => Ok(self.clear_owned(segments)),
|
||||
ConfigEdit::SetProjectTrusted(project_path) => {
|
||||
// Delegate to the existing, tested logic in config.rs to
|
||||
// ensure tables are explicit and migration is preserved.
|
||||
crate::config::set_project_trusted_inner(&mut self.doc, project_path.as_path())?;
|
||||
Ok(true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn write_profile_value(&mut self, segments: &[&str], value: Option<TomlItem>) -> bool {
|
||||
match value {
|
||||
Some(item) => self.write_value(Scope::Profile, segments, item),
|
||||
None => self.clear(Scope::Profile, segments),
|
||||
}
|
||||
}
|
||||
|
||||
fn write_value(&mut self, scope: Scope, segments: &[&str], value: TomlItem) -> bool {
|
||||
let resolved = self.scoped_segments(scope, segments);
|
||||
self.insert(&resolved, value)
|
||||
}
|
||||
|
||||
fn clear(&mut self, scope: Scope, segments: &[&str]) -> bool {
|
||||
let resolved = self.scoped_segments(scope, segments);
|
||||
self.remove(&resolved)
|
||||
}
|
||||
|
||||
fn clear_owned(&mut self, segments: &[String]) -> bool {
|
||||
self.remove(segments)
|
||||
}
|
||||
|
||||
fn replace_mcp_servers(&mut self, servers: &BTreeMap<String, McpServerConfig>) -> bool {
|
||||
if servers.is_empty() {
|
||||
return self.clear(Scope::Global, &["mcp_servers"]);
|
||||
}
|
||||
|
||||
let mut table = TomlTable::new();
|
||||
table.set_implicit(true);
|
||||
|
||||
for (name, config) in servers {
|
||||
table.insert(name, document_helpers::serialize_mcp_server(config));
|
||||
}
|
||||
|
||||
let item = TomlItem::Table(table);
|
||||
self.write_value(Scope::Global, &["mcp_servers"], item)
|
||||
}
|
||||
|
||||
fn scoped_segments(&self, scope: Scope, segments: &[&str]) -> Vec<String> {
|
||||
let resolved: Vec<String> = segments
|
||||
.iter()
|
||||
.map(|segment| (*segment).to_string())
|
||||
.collect();
|
||||
|
||||
if matches!(scope, Scope::Profile)
|
||||
&& resolved.first().is_none_or(|segment| segment != "profiles")
|
||||
&& let Some(profile) = self.profile.as_deref()
|
||||
{
|
||||
let mut scoped = Vec::with_capacity(resolved.len() + 2);
|
||||
scoped.push("profiles".to_string());
|
||||
scoped.push(profile.to_string());
|
||||
scoped.extend(resolved);
|
||||
return scoped;
|
||||
}
|
||||
|
||||
resolved
|
||||
}
|
||||
|
||||
fn insert(&mut self, segments: &[String], value: TomlItem) -> bool {
|
||||
let Some((last, parents)) = segments.split_last() else {
|
||||
return false;
|
||||
};
|
||||
|
||||
let Some(parent) = self.descend(parents, TraversalMode::Create) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
parent[last] = value;
|
||||
true
|
||||
}
|
||||
|
||||
fn remove(&mut self, segments: &[String]) -> bool {
|
||||
let Some((last, parents)) = segments.split_last() else {
|
||||
return false;
|
||||
};
|
||||
|
||||
let Some(parent) = self.descend(parents, TraversalMode::Existing) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
parent.remove(last).is_some()
|
||||
}
|
||||
|
||||
fn descend(&mut self, segments: &[String], mode: TraversalMode) -> Option<&mut TomlTable> {
|
||||
let mut current = self.doc.as_table_mut();
|
||||
|
||||
for segment in segments {
|
||||
match mode {
|
||||
TraversalMode::Create => {
|
||||
if !current.contains_key(segment.as_str()) {
|
||||
current.insert(
|
||||
segment.as_str(),
|
||||
TomlItem::Table(document_helpers::new_implicit_table()),
|
||||
);
|
||||
}
|
||||
|
||||
let item = current.get_mut(segment.as_str())?;
|
||||
current = document_helpers::ensure_table_for_write(item)?;
|
||||
}
|
||||
TraversalMode::Existing => {
|
||||
let item = current.get_mut(segment.as_str())?;
|
||||
current = document_helpers::ensure_table_for_read(item)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some(current)
|
||||
}
|
||||
}
|
||||
|
||||
/// Persist edits using a blocking strategy.
|
||||
pub fn apply_blocking(
|
||||
codex_home: &Path,
|
||||
profile: Option<&str>,
|
||||
edits: &[ConfigEdit],
|
||||
) -> anyhow::Result<()> {
|
||||
if edits.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let config_path = codex_home.join(CONFIG_TOML_FILE);
|
||||
let serialized = match std::fs::read_to_string(&config_path) {
|
||||
Ok(contents) => contents,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => String::new(),
|
||||
Err(err) => return Err(err.into()),
|
||||
};
|
||||
|
||||
let doc = if serialized.is_empty() {
|
||||
DocumentMut::new()
|
||||
} else {
|
||||
serialized.parse::<DocumentMut>()?
|
||||
};
|
||||
|
||||
let profile = profile.map(ToOwned::to_owned).or_else(|| {
|
||||
doc.get("profile")
|
||||
.and_then(|item| item.as_str())
|
||||
.map(ToOwned::to_owned)
|
||||
});
|
||||
|
||||
let mut document = ConfigDocument::new(doc, profile);
|
||||
let mut mutated = false;
|
||||
|
||||
for edit in edits {
|
||||
mutated |= document.apply(edit)?;
|
||||
}
|
||||
|
||||
if !mutated {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
std::fs::create_dir_all(codex_home).with_context(|| {
|
||||
format!(
|
||||
"failed to create Codex home directory at {}",
|
||||
codex_home.display()
|
||||
)
|
||||
})?;
|
||||
|
||||
let tmp = NamedTempFile::new_in(codex_home)?;
|
||||
std::fs::write(tmp.path(), document.doc.to_string()).with_context(|| {
|
||||
format!(
|
||||
"failed to write temporary config file at {}",
|
||||
tmp.path().display()
|
||||
)
|
||||
})?;
|
||||
tmp.persist(config_path)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Persist edits asynchronously by offloading the blocking writer.
|
||||
pub async fn apply(
|
||||
codex_home: &Path,
|
||||
profile: Option<&str>,
|
||||
edits: Vec<ConfigEdit>,
|
||||
) -> anyhow::Result<()> {
|
||||
let codex_home = codex_home.to_path_buf();
|
||||
let profile = profile.map(ToOwned::to_owned);
|
||||
task::spawn_blocking(move || apply_blocking(&codex_home, profile.as_deref(), &edits))
|
||||
.await
|
||||
.context("config persistence task panicked")?
|
||||
}
|
||||
|
||||
/// Fluent builder to batch config edits and apply them atomically.
|
||||
#[derive(Default)]
|
||||
pub struct ConfigEditsBuilder {
|
||||
codex_home: PathBuf,
|
||||
profile: Option<String>,
|
||||
edits: Vec<ConfigEdit>,
|
||||
}
|
||||
|
||||
impl ConfigEditsBuilder {
|
||||
pub fn new(codex_home: &Path) -> Self {
|
||||
Self {
|
||||
codex_home: codex_home.to_path_buf(),
|
||||
profile: None,
|
||||
edits: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_profile(mut self, profile: Option<&str>) -> Self {
|
||||
self.profile = profile.map(ToOwned::to_owned);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn set_model(mut self, model: Option<&str>, effort: Option<ReasoningEffort>) -> Self {
|
||||
self.edits.push(ConfigEdit::SetModel {
|
||||
model: model.map(ToOwned::to_owned),
|
||||
effort,
|
||||
});
|
||||
self
|
||||
}
|
||||
|
||||
pub fn set_hide_full_access_warning(mut self, acknowledged: bool) -> Self {
|
||||
self.edits
|
||||
.push(ConfigEdit::SetNoticeHideFullAccessWarning(acknowledged));
|
||||
self
|
||||
}
|
||||
|
||||
pub fn set_windows_wsl_setup_acknowledged(mut self, acknowledged: bool) -> Self {
|
||||
self.edits
|
||||
.push(ConfigEdit::SetWindowsWslSetupAcknowledged(acknowledged));
|
||||
self
|
||||
}
|
||||
|
||||
pub fn replace_mcp_servers(mut self, servers: &BTreeMap<String, McpServerConfig>) -> Self {
|
||||
self.edits
|
||||
.push(ConfigEdit::ReplaceMcpServers(servers.clone()));
|
||||
self
|
||||
}
|
||||
|
||||
pub fn set_project_trusted<P: Into<PathBuf>>(mut self, project_path: P) -> Self {
|
||||
self.edits
|
||||
.push(ConfigEdit::SetProjectTrusted(project_path.into()));
|
||||
self
|
||||
}
|
||||
|
||||
/// Apply edits on a blocking thread.
|
||||
pub fn apply_blocking(self) -> anyhow::Result<()> {
|
||||
apply_blocking(&self.codex_home, self.profile.as_deref(), &self.edits)
|
||||
}
|
||||
|
||||
/// Apply edits asynchronously via a blocking offload.
|
||||
pub async fn apply(self) -> anyhow::Result<()> {
|
||||
task::spawn_blocking(move || {
|
||||
apply_blocking(&self.codex_home, self.profile.as_deref(), &self.edits)
|
||||
})
|
||||
.await
|
||||
.context("config persistence task panicked")?
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::config::types::McpServerTransportConfig;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::tempdir;
|
||||
use tokio::runtime::Builder;
|
||||
use toml::Value as TomlValue;
|
||||
|
||||
#[test]
|
||||
fn blocking_set_model_top_level() {
|
||||
let tmp = tempdir().expect("tmpdir");
|
||||
let codex_home = tmp.path();
|
||||
|
||||
apply_blocking(
|
||||
codex_home,
|
||||
None,
|
||||
&[ConfigEdit::SetModel {
|
||||
model: Some("gpt-5-codex".to_string()),
|
||||
effort: Some(ReasoningEffort::High),
|
||||
}],
|
||||
)
|
||||
.expect("persist");
|
||||
|
||||
let contents =
|
||||
std::fs::read_to_string(codex_home.join(CONFIG_TOML_FILE)).expect("read config");
|
||||
let expected = r#"model = "gpt-5-codex"
|
||||
model_reasoning_effort = "high"
|
||||
"#;
|
||||
assert_eq!(contents, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn blocking_set_model_preserves_inline_table_contents() {
|
||||
let tmp = tempdir().expect("tmpdir");
|
||||
let codex_home = tmp.path();
|
||||
|
||||
// Seed with inline tables for profiles to simulate common user config.
|
||||
std::fs::write(
|
||||
codex_home.join(CONFIG_TOML_FILE),
|
||||
r#"profile = "fast"
|
||||
|
||||
profiles = { fast = { model = "gpt-4o", sandbox_mode = "strict" } }
|
||||
"#,
|
||||
)
|
||||
.expect("seed");
|
||||
|
||||
apply_blocking(
|
||||
codex_home,
|
||||
None,
|
||||
&[ConfigEdit::SetModel {
|
||||
model: Some("o4-mini".to_string()),
|
||||
effort: None,
|
||||
}],
|
||||
)
|
||||
.expect("persist");
|
||||
|
||||
let raw = std::fs::read_to_string(codex_home.join(CONFIG_TOML_FILE)).expect("read config");
|
||||
let value: TomlValue = toml::from_str(&raw).expect("parse config");
|
||||
|
||||
// Ensure sandbox_mode is preserved under profiles.fast and model updated.
|
||||
let profiles_tbl = value
|
||||
.get("profiles")
|
||||
.and_then(|v| v.as_table())
|
||||
.expect("profiles table");
|
||||
let fast_tbl = profiles_tbl
|
||||
.get("fast")
|
||||
.and_then(|v| v.as_table())
|
||||
.expect("fast table");
|
||||
assert_eq!(
|
||||
fast_tbl.get("sandbox_mode").and_then(|v| v.as_str()),
|
||||
Some("strict")
|
||||
);
|
||||
assert_eq!(
|
||||
fast_tbl.get("model").and_then(|v| v.as_str()),
|
||||
Some("o4-mini")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn blocking_clear_model_removes_inline_table_entry() {
|
||||
let tmp = tempdir().expect("tmpdir");
|
||||
let codex_home = tmp.path();
|
||||
|
||||
std::fs::write(
|
||||
codex_home.join(CONFIG_TOML_FILE),
|
||||
r#"profile = "fast"
|
||||
|
||||
profiles = { fast = { model = "gpt-4o", sandbox_mode = "strict" } }
|
||||
"#,
|
||||
)
|
||||
.expect("seed");
|
||||
|
||||
apply_blocking(
|
||||
codex_home,
|
||||
None,
|
||||
&[ConfigEdit::SetModel {
|
||||
model: None,
|
||||
effort: Some(ReasoningEffort::High),
|
||||
}],
|
||||
)
|
||||
.expect("persist");
|
||||
|
||||
let contents =
|
||||
std::fs::read_to_string(codex_home.join(CONFIG_TOML_FILE)).expect("read config");
|
||||
let expected = r#"profile = "fast"
|
||||
|
||||
[profiles.fast]
|
||||
sandbox_mode = "strict"
|
||||
model_reasoning_effort = "high"
|
||||
"#;
|
||||
assert_eq!(contents, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn blocking_set_model_scopes_to_active_profile() {
|
||||
let tmp = tempdir().expect("tmpdir");
|
||||
let codex_home = tmp.path();
|
||||
std::fs::write(
|
||||
codex_home.join(CONFIG_TOML_FILE),
|
||||
r#"profile = "team"
|
||||
|
||||
[profiles.team]
|
||||
model_reasoning_effort = "low"
|
||||
"#,
|
||||
)
|
||||
.expect("seed");
|
||||
|
||||
apply_blocking(
|
||||
codex_home,
|
||||
None,
|
||||
&[ConfigEdit::SetModel {
|
||||
model: Some("o5-preview".to_string()),
|
||||
effort: Some(ReasoningEffort::Minimal),
|
||||
}],
|
||||
)
|
||||
.expect("persist");
|
||||
|
||||
let contents =
|
||||
std::fs::read_to_string(codex_home.join(CONFIG_TOML_FILE)).expect("read config");
|
||||
let expected = r#"profile = "team"
|
||||
|
||||
[profiles.team]
|
||||
model_reasoning_effort = "minimal"
|
||||
model = "o5-preview"
|
||||
"#;
|
||||
assert_eq!(contents, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn blocking_set_model_with_explicit_profile() {
|
||||
let tmp = tempdir().expect("tmpdir");
|
||||
let codex_home = tmp.path();
|
||||
std::fs::write(
|
||||
codex_home.join(CONFIG_TOML_FILE),
|
||||
r#"[profiles."team a"]
|
||||
model = "gpt-5-codex"
|
||||
"#,
|
||||
)
|
||||
.expect("seed");
|
||||
|
||||
apply_blocking(
|
||||
codex_home,
|
||||
Some("team a"),
|
||||
&[ConfigEdit::SetModel {
|
||||
model: Some("o4-mini".to_string()),
|
||||
effort: None,
|
||||
}],
|
||||
)
|
||||
.expect("persist");
|
||||
|
||||
let contents =
|
||||
std::fs::read_to_string(codex_home.join(CONFIG_TOML_FILE)).expect("read config");
|
||||
let expected = r#"[profiles."team a"]
|
||||
model = "o4-mini"
|
||||
"#;
|
||||
assert_eq!(contents, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn blocking_set_hide_full_access_warning_preserves_table() {
|
||||
let tmp = tempdir().expect("tmpdir");
|
||||
let codex_home = tmp.path();
|
||||
std::fs::write(
|
||||
codex_home.join(CONFIG_TOML_FILE),
|
||||
r#"# Global comment
|
||||
|
||||
[notice]
|
||||
# keep me
|
||||
existing = "value"
|
||||
"#,
|
||||
)
|
||||
.expect("seed");
|
||||
|
||||
apply_blocking(
|
||||
codex_home,
|
||||
None,
|
||||
&[ConfigEdit::SetNoticeHideFullAccessWarning(true)],
|
||||
)
|
||||
.expect("persist");
|
||||
|
||||
let contents =
|
||||
std::fs::read_to_string(codex_home.join(CONFIG_TOML_FILE)).expect("read config");
|
||||
let expected = r#"# Global comment
|
||||
|
||||
[notice]
|
||||
# keep me
|
||||
existing = "value"
|
||||
hide_full_access_warning = true
|
||||
"#;
|
||||
assert_eq!(contents, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn blocking_replace_mcp_servers_round_trips() {
|
||||
let tmp = tempdir().expect("tmpdir");
|
||||
let codex_home = tmp.path();
|
||||
|
||||
let mut servers = BTreeMap::new();
|
||||
servers.insert(
|
||||
"stdio".to_string(),
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::Stdio {
|
||||
command: "cmd".to_string(),
|
||||
args: vec!["--flag".to_string()],
|
||||
env: Some(
|
||||
[
|
||||
("B".to_string(), "2".to_string()),
|
||||
("A".to_string(), "1".to_string()),
|
||||
]
|
||||
.into_iter()
|
||||
.collect(),
|
||||
),
|
||||
env_vars: vec!["FOO".to_string()],
|
||||
cwd: None,
|
||||
},
|
||||
enabled: true,
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: Some(vec!["one".to_string(), "two".to_string()]),
|
||||
disabled_tools: None,
|
||||
},
|
||||
);
|
||||
|
||||
servers.insert(
|
||||
"http".to_string(),
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::StreamableHttp {
|
||||
url: "https://example.com".to_string(),
|
||||
bearer_token_env_var: Some("TOKEN".to_string()),
|
||||
http_headers: Some(
|
||||
[("Z-Header".to_string(), "z".to_string())]
|
||||
.into_iter()
|
||||
.collect(),
|
||||
),
|
||||
env_http_headers: None,
|
||||
},
|
||||
enabled: false,
|
||||
startup_timeout_sec: Some(std::time::Duration::from_secs(5)),
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: Some(vec!["forbidden".to_string()]),
|
||||
},
|
||||
);
|
||||
|
||||
apply_blocking(
|
||||
codex_home,
|
||||
None,
|
||||
&[ConfigEdit::ReplaceMcpServers(servers.clone())],
|
||||
)
|
||||
.expect("persist");
|
||||
|
||||
let raw = std::fs::read_to_string(codex_home.join(CONFIG_TOML_FILE)).expect("read config");
|
||||
let expected = "\
|
||||
[mcp_servers.http]
|
||||
url = \"https://example.com\"
|
||||
bearer_token_env_var = \"TOKEN\"
|
||||
enabled = false
|
||||
startup_timeout_sec = 5.0
|
||||
disabled_tools = [\"forbidden\"]
|
||||
|
||||
[mcp_servers.http.http_headers]
|
||||
Z-Header = \"z\"
|
||||
|
||||
[mcp_servers.stdio]
|
||||
command = \"cmd\"
|
||||
args = [\"--flag\"]
|
||||
env_vars = [\"FOO\"]
|
||||
enabled_tools = [\"one\", \"two\"]
|
||||
|
||||
[mcp_servers.stdio.env]
|
||||
A = \"1\"
|
||||
B = \"2\"
|
||||
";
|
||||
assert_eq!(raw, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn blocking_clear_path_noop_when_missing() {
|
||||
let tmp = tempdir().expect("tmpdir");
|
||||
let codex_home = tmp.path();
|
||||
|
||||
apply_blocking(
|
||||
codex_home,
|
||||
None,
|
||||
&[ConfigEdit::ClearPath {
|
||||
segments: vec!["missing".to_string()],
|
||||
}],
|
||||
)
|
||||
.expect("apply");
|
||||
|
||||
assert!(
|
||||
!codex_home.join(CONFIG_TOML_FILE).exists(),
|
||||
"config.toml should not be created on noop"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn blocking_set_path_updates_notifications() {
|
||||
let tmp = tempdir().expect("tmpdir");
|
||||
let codex_home = tmp.path();
|
||||
|
||||
let item = value(false);
|
||||
apply_blocking(
|
||||
codex_home,
|
||||
None,
|
||||
&[ConfigEdit::SetPath {
|
||||
segments: vec!["tui".to_string(), "notifications".to_string()],
|
||||
value: item,
|
||||
}],
|
||||
)
|
||||
.expect("apply");
|
||||
|
||||
let raw = std::fs::read_to_string(codex_home.join(CONFIG_TOML_FILE)).expect("read config");
|
||||
let config: TomlValue = toml::from_str(&raw).expect("parse config");
|
||||
let notifications = config
|
||||
.get("tui")
|
||||
.and_then(|item| item.as_table())
|
||||
.and_then(|tbl| tbl.get("notifications"))
|
||||
.and_then(toml::Value::as_bool);
|
||||
assert_eq!(notifications, Some(false));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn async_builder_set_model_persists() {
|
||||
let tmp = tempdir().expect("tmpdir");
|
||||
let codex_home = tmp.path().to_path_buf();
|
||||
|
||||
ConfigEditsBuilder::new(&codex_home)
|
||||
.set_model(Some("gpt-5-codex"), Some(ReasoningEffort::High))
|
||||
.apply()
|
||||
.await
|
||||
.expect("persist");
|
||||
|
||||
let contents =
|
||||
std::fs::read_to_string(codex_home.join(CONFIG_TOML_FILE)).expect("read config");
|
||||
let expected = r#"model = "gpt-5-codex"
|
||||
model_reasoning_effort = "high"
|
||||
"#;
|
||||
assert_eq!(contents, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn blocking_builder_set_model_round_trips_back_and_forth() {
|
||||
let tmp = tempdir().expect("tmpdir");
|
||||
let codex_home = tmp.path();
|
||||
|
||||
let initial_expected = r#"model = "o4-mini"
|
||||
model_reasoning_effort = "low"
|
||||
"#;
|
||||
ConfigEditsBuilder::new(codex_home)
|
||||
.set_model(Some("o4-mini"), Some(ReasoningEffort::Low))
|
||||
.apply_blocking()
|
||||
.expect("persist initial");
|
||||
let mut contents =
|
||||
std::fs::read_to_string(codex_home.join(CONFIG_TOML_FILE)).expect("read config");
|
||||
assert_eq!(contents, initial_expected);
|
||||
|
||||
let updated_expected = r#"model = "gpt-5-codex"
|
||||
model_reasoning_effort = "high"
|
||||
"#;
|
||||
ConfigEditsBuilder::new(codex_home)
|
||||
.set_model(Some("gpt-5-codex"), Some(ReasoningEffort::High))
|
||||
.apply_blocking()
|
||||
.expect("persist update");
|
||||
contents = std::fs::read_to_string(codex_home.join(CONFIG_TOML_FILE)).expect("read config");
|
||||
assert_eq!(contents, updated_expected);
|
||||
|
||||
ConfigEditsBuilder::new(codex_home)
|
||||
.set_model(Some("o4-mini"), Some(ReasoningEffort::Low))
|
||||
.apply_blocking()
|
||||
.expect("persist revert");
|
||||
contents = std::fs::read_to_string(codex_home.join(CONFIG_TOML_FILE)).expect("read config");
|
||||
assert_eq!(contents, initial_expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn blocking_set_asynchronous_helpers_available() {
|
||||
let rt = Builder::new_current_thread()
|
||||
.enable_all()
|
||||
.build()
|
||||
.expect("runtime");
|
||||
let tmp = tempdir().expect("tmpdir");
|
||||
let codex_home = tmp.path().to_path_buf();
|
||||
|
||||
rt.block_on(async {
|
||||
ConfigEditsBuilder::new(&codex_home)
|
||||
.set_hide_full_access_warning(true)
|
||||
.apply()
|
||||
.await
|
||||
.expect("persist");
|
||||
});
|
||||
|
||||
let raw = std::fs::read_to_string(codex_home.join(CONFIG_TOML_FILE)).expect("read config");
|
||||
let notice = toml::from_str::<TomlValue>(&raw)
|
||||
.expect("parse config")
|
||||
.get("notice")
|
||||
.and_then(|item| item.as_table())
|
||||
.and_then(|tbl| tbl.get("hide_full_access_warning"))
|
||||
.and_then(toml::Value::as_bool);
|
||||
assert_eq!(notice, Some(true));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn replace_mcp_servers_blocking_clears_table_when_empty() {
|
||||
let tmp = tempdir().expect("tmpdir");
|
||||
let codex_home = tmp.path();
|
||||
std::fs::write(
|
||||
codex_home.join(CONFIG_TOML_FILE),
|
||||
"[mcp_servers]\nfoo = { command = \"cmd\" }\n",
|
||||
)
|
||||
.expect("seed");
|
||||
|
||||
apply_blocking(
|
||||
codex_home,
|
||||
None,
|
||||
&[ConfigEdit::ReplaceMcpServers(BTreeMap::new())],
|
||||
)
|
||||
.expect("persist");
|
||||
|
||||
let contents =
|
||||
std::fs::read_to_string(codex_home.join(CONFIG_TOML_FILE)).expect("read config");
|
||||
assert!(!contents.contains("mcp_servers"));
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -4,6 +4,7 @@ use std::path::PathBuf;
|
||||
use crate::protocol::AskForApproval;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
use codex_protocol::config_types::Verbosity;
|
||||
|
||||
/// Collection of common configuration options that a user can define as a unit
|
||||
@@ -15,17 +16,19 @@ pub struct ConfigProfile {
|
||||
/// [`ModelProviderInfo`] to use.
|
||||
pub model_provider: Option<String>,
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
pub sandbox_mode: Option<SandboxMode>,
|
||||
pub model_reasoning_effort: Option<ReasoningEffort>,
|
||||
pub model_reasoning_summary: Option<ReasoningSummary>,
|
||||
pub model_verbosity: Option<Verbosity>,
|
||||
pub chatgpt_base_url: Option<String>,
|
||||
pub experimental_instructions_file: Option<PathBuf>,
|
||||
pub experimental_compact_prompt_file: Option<PathBuf>,
|
||||
pub include_apply_patch_tool: Option<bool>,
|
||||
pub include_view_image_tool: Option<bool>,
|
||||
pub experimental_use_unified_exec_tool: Option<bool>,
|
||||
pub experimental_use_exec_command_tool: Option<bool>,
|
||||
pub experimental_use_rmcp_client: Option<bool>,
|
||||
pub experimental_use_freeform_apply_patch: Option<bool>,
|
||||
pub experimental_sandbox_command_assessment: Option<bool>,
|
||||
pub tools_web_search: Option<bool>,
|
||||
pub tools_view_image: Option<bool>,
|
||||
/// Optional feature toggles scoped to this profile.
|
||||
@@ -361,7 +361,7 @@ pub struct Notice {
|
||||
}
|
||||
|
||||
impl Notice {
|
||||
/// used by set_hide_full_access_warning until we refactor config updates
|
||||
/// referenced by config_edit helpers when writing notice flags
|
||||
pub(crate) const TABLE_KEY: &'static str = "notice";
|
||||
}
|
||||
|
||||
@@ -1,748 +0,0 @@
|
||||
use crate::config::CONFIG_TOML_FILE;
|
||||
use anyhow::Result;
|
||||
use std::path::Path;
|
||||
use tempfile::NamedTempFile;
|
||||
use toml_edit::DocumentMut;
|
||||
|
||||
pub const CONFIG_KEY_MODEL: &str = "model";
|
||||
pub const CONFIG_KEY_EFFORT: &str = "model_reasoning_effort";
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
enum NoneBehavior {
|
||||
Skip,
|
||||
Remove,
|
||||
}
|
||||
|
||||
/// Persist overrides into `config.toml` using explicit key segments per
|
||||
/// override. This avoids ambiguity with keys that contain dots or spaces.
|
||||
pub async fn persist_overrides(
|
||||
codex_home: &Path,
|
||||
profile: Option<&str>,
|
||||
overrides: &[(&[&str], &str)],
|
||||
) -> Result<()> {
|
||||
let with_options: Vec<(&[&str], Option<&str>)> = overrides
|
||||
.iter()
|
||||
.map(|(segments, value)| (*segments, Some(*value)))
|
||||
.collect();
|
||||
|
||||
persist_overrides_with_behavior(codex_home, profile, &with_options, NoneBehavior::Skip).await
|
||||
}
|
||||
|
||||
/// Persist overrides where values may be optional. Any entries with `None`
|
||||
/// values are skipped. If all values are `None`, this becomes a no-op and
|
||||
/// returns `Ok(())` without touching the file.
|
||||
pub async fn persist_non_null_overrides(
|
||||
codex_home: &Path,
|
||||
profile: Option<&str>,
|
||||
overrides: &[(&[&str], Option<&str>)],
|
||||
) -> Result<()> {
|
||||
persist_overrides_with_behavior(codex_home, profile, overrides, NoneBehavior::Skip).await
|
||||
}
|
||||
|
||||
/// Persist overrides where `None` values clear any existing values from the
|
||||
/// configuration file.
|
||||
pub async fn persist_overrides_and_clear_if_none(
|
||||
codex_home: &Path,
|
||||
profile: Option<&str>,
|
||||
overrides: &[(&[&str], Option<&str>)],
|
||||
) -> Result<()> {
|
||||
persist_overrides_with_behavior(codex_home, profile, overrides, NoneBehavior::Remove).await
|
||||
}
|
||||
|
||||
/// Apply a single override onto a `toml_edit` document while preserving
|
||||
/// existing formatting/comments.
|
||||
/// The key is expressed as explicit segments to correctly handle keys that
|
||||
/// contain dots or spaces.
|
||||
fn apply_toml_edit_override_segments(
|
||||
doc: &mut DocumentMut,
|
||||
segments: &[&str],
|
||||
value: toml_edit::Item,
|
||||
) {
|
||||
use toml_edit::Item;
|
||||
|
||||
if segments.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut current = doc.as_table_mut();
|
||||
for seg in &segments[..segments.len() - 1] {
|
||||
if !current.contains_key(seg) {
|
||||
current[*seg] = Item::Table(toml_edit::Table::new());
|
||||
if let Some(t) = current[*seg].as_table_mut() {
|
||||
t.set_implicit(true);
|
||||
}
|
||||
}
|
||||
|
||||
let maybe_item = current.get_mut(seg);
|
||||
let Some(item) = maybe_item else { return };
|
||||
|
||||
if !item.is_table() {
|
||||
*item = Item::Table(toml_edit::Table::new());
|
||||
if let Some(t) = item.as_table_mut() {
|
||||
t.set_implicit(true);
|
||||
}
|
||||
}
|
||||
|
||||
let Some(tbl) = item.as_table_mut() else {
|
||||
return;
|
||||
};
|
||||
current = tbl;
|
||||
}
|
||||
|
||||
let last = segments[segments.len() - 1];
|
||||
current[last] = value;
|
||||
}
|
||||
|
||||
async fn persist_overrides_with_behavior(
|
||||
codex_home: &Path,
|
||||
profile: Option<&str>,
|
||||
overrides: &[(&[&str], Option<&str>)],
|
||||
none_behavior: NoneBehavior,
|
||||
) -> Result<()> {
|
||||
if overrides.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let should_skip = match none_behavior {
|
||||
NoneBehavior::Skip => overrides.iter().all(|(_, value)| value.is_none()),
|
||||
NoneBehavior::Remove => false,
|
||||
};
|
||||
|
||||
if should_skip {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let config_path = codex_home.join(CONFIG_TOML_FILE);
|
||||
|
||||
let read_result = tokio::fs::read_to_string(&config_path).await;
|
||||
let mut doc = match read_result {
|
||||
Ok(contents) => contents.parse::<DocumentMut>()?,
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
||||
if overrides
|
||||
.iter()
|
||||
.all(|(_, value)| value.is_none() && matches!(none_behavior, NoneBehavior::Remove))
|
||||
{
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
tokio::fs::create_dir_all(codex_home).await?;
|
||||
DocumentMut::new()
|
||||
}
|
||||
Err(e) => return Err(e.into()),
|
||||
};
|
||||
|
||||
let effective_profile = if let Some(p) = profile {
|
||||
Some(p.to_owned())
|
||||
} else {
|
||||
doc.get("profile")
|
||||
.and_then(|i| i.as_str())
|
||||
.map(str::to_string)
|
||||
};
|
||||
|
||||
let mut mutated = false;
|
||||
|
||||
for (segments, value) in overrides.iter().copied() {
|
||||
let mut seg_buf: Vec<&str> = Vec::new();
|
||||
let segments_to_apply: &[&str];
|
||||
|
||||
if let Some(ref name) = effective_profile {
|
||||
if segments.first().copied() == Some("profiles") {
|
||||
segments_to_apply = segments;
|
||||
} else {
|
||||
seg_buf.reserve(2 + segments.len());
|
||||
seg_buf.push("profiles");
|
||||
seg_buf.push(name.as_str());
|
||||
seg_buf.extend_from_slice(segments);
|
||||
segments_to_apply = seg_buf.as_slice();
|
||||
}
|
||||
} else {
|
||||
segments_to_apply = segments;
|
||||
}
|
||||
|
||||
match value {
|
||||
Some(v) => {
|
||||
let item_value = toml_edit::value(v);
|
||||
apply_toml_edit_override_segments(&mut doc, segments_to_apply, item_value);
|
||||
mutated = true;
|
||||
}
|
||||
None => {
|
||||
if matches!(none_behavior, NoneBehavior::Remove)
|
||||
&& remove_toml_edit_segments(&mut doc, segments_to_apply)
|
||||
{
|
||||
mutated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !mutated {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let tmp_file = NamedTempFile::new_in(codex_home)?;
|
||||
tokio::fs::write(tmp_file.path(), doc.to_string()).await?;
|
||||
tmp_file.persist(config_path)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn remove_toml_edit_segments(doc: &mut DocumentMut, segments: &[&str]) -> bool {
|
||||
use toml_edit::Item;
|
||||
|
||||
if segments.is_empty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
let mut current = doc.as_table_mut();
|
||||
for seg in &segments[..segments.len() - 1] {
|
||||
let Some(item) = current.get_mut(seg) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
match item {
|
||||
Item::Table(table) => {
|
||||
current = table;
|
||||
}
|
||||
_ => {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
current.remove(segments[segments.len() - 1]).is_some()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::tempdir;
|
||||
|
||||
/// Verifies model and effort are written at top-level when no profile is set.
|
||||
#[tokio::test]
|
||||
async fn set_default_model_and_effort_top_level_when_no_profile() {
|
||||
let tmpdir = tempdir().expect("tmp");
|
||||
let codex_home = tmpdir.path();
|
||||
|
||||
persist_overrides(
|
||||
codex_home,
|
||||
None,
|
||||
&[
|
||||
(&[CONFIG_KEY_MODEL], "gpt-5-codex"),
|
||||
(&[CONFIG_KEY_EFFORT], "high"),
|
||||
],
|
||||
)
|
||||
.await
|
||||
.expect("persist");
|
||||
|
||||
let contents = read_config(codex_home).await;
|
||||
let expected = r#"model = "gpt-5-codex"
|
||||
model_reasoning_effort = "high"
|
||||
"#;
|
||||
assert_eq!(contents, expected);
|
||||
}
|
||||
|
||||
/// Verifies values are written under the active profile when `profile` is set.
|
||||
#[tokio::test]
|
||||
async fn set_defaults_update_profile_when_profile_set() {
|
||||
let tmpdir = tempdir().expect("tmp");
|
||||
let codex_home = tmpdir.path();
|
||||
|
||||
// Seed config with a profile selection but without profiles table
|
||||
let seed = "profile = \"o3\"\n";
|
||||
tokio::fs::write(codex_home.join(CONFIG_TOML_FILE), seed)
|
||||
.await
|
||||
.expect("seed write");
|
||||
|
||||
persist_overrides(
|
||||
codex_home,
|
||||
None,
|
||||
&[
|
||||
(&[CONFIG_KEY_MODEL], "o3"),
|
||||
(&[CONFIG_KEY_EFFORT], "minimal"),
|
||||
],
|
||||
)
|
||||
.await
|
||||
.expect("persist");
|
||||
|
||||
let contents = read_config(codex_home).await;
|
||||
let expected = r#"profile = "o3"
|
||||
|
||||
[profiles.o3]
|
||||
model = "o3"
|
||||
model_reasoning_effort = "minimal"
|
||||
"#;
|
||||
assert_eq!(contents, expected);
|
||||
}
|
||||
|
||||
/// Verifies profile names with dots/spaces are preserved via explicit segments.
|
||||
#[tokio::test]
|
||||
async fn set_defaults_update_profile_with_dot_and_space() {
|
||||
let tmpdir = tempdir().expect("tmp");
|
||||
let codex_home = tmpdir.path();
|
||||
|
||||
// Seed config with a profile name that contains a dot and a space
|
||||
let seed = "profile = \"my.team name\"\n";
|
||||
tokio::fs::write(codex_home.join(CONFIG_TOML_FILE), seed)
|
||||
.await
|
||||
.expect("seed write");
|
||||
|
||||
persist_overrides(
|
||||
codex_home,
|
||||
None,
|
||||
&[
|
||||
(&[CONFIG_KEY_MODEL], "o3"),
|
||||
(&[CONFIG_KEY_EFFORT], "minimal"),
|
||||
],
|
||||
)
|
||||
.await
|
||||
.expect("persist");
|
||||
|
||||
let contents = read_config(codex_home).await;
|
||||
let expected = r#"profile = "my.team name"
|
||||
|
||||
[profiles."my.team name"]
|
||||
model = "o3"
|
||||
model_reasoning_effort = "minimal"
|
||||
"#;
|
||||
assert_eq!(contents, expected);
|
||||
}
|
||||
|
||||
/// Verifies explicit profile override writes under that profile even without active profile.
|
||||
#[tokio::test]
|
||||
async fn set_defaults_update_when_profile_override_supplied() {
|
||||
let tmpdir = tempdir().expect("tmp");
|
||||
let codex_home = tmpdir.path();
|
||||
|
||||
// No profile key in config.toml
|
||||
tokio::fs::write(codex_home.join(CONFIG_TOML_FILE), "")
|
||||
.await
|
||||
.expect("seed write");
|
||||
|
||||
// Persist with an explicit profile override
|
||||
persist_overrides(
|
||||
codex_home,
|
||||
Some("o3"),
|
||||
&[(&[CONFIG_KEY_MODEL], "o3"), (&[CONFIG_KEY_EFFORT], "high")],
|
||||
)
|
||||
.await
|
||||
.expect("persist");
|
||||
|
||||
let contents = read_config(codex_home).await;
|
||||
let expected = r#"[profiles.o3]
|
||||
model = "o3"
|
||||
model_reasoning_effort = "high"
|
||||
"#;
|
||||
assert_eq!(contents, expected);
|
||||
}
|
||||
|
||||
/// Verifies nested tables are created as needed when applying overrides.
|
||||
#[tokio::test]
|
||||
async fn persist_overrides_creates_nested_tables() {
|
||||
let tmpdir = tempdir().expect("tmp");
|
||||
let codex_home = tmpdir.path();
|
||||
|
||||
persist_overrides(
|
||||
codex_home,
|
||||
None,
|
||||
&[
|
||||
(&["a", "b", "c"], "v"),
|
||||
(&["x"], "y"),
|
||||
(&["profiles", "p1", CONFIG_KEY_MODEL], "gpt-5-codex"),
|
||||
],
|
||||
)
|
||||
.await
|
||||
.expect("persist");
|
||||
|
||||
let contents = read_config(codex_home).await;
|
||||
let expected = r#"x = "y"
|
||||
|
||||
[a.b]
|
||||
c = "v"
|
||||
|
||||
[profiles.p1]
|
||||
model = "gpt-5-codex"
|
||||
"#;
|
||||
assert_eq!(contents, expected);
|
||||
}
|
||||
|
||||
/// Verifies a scalar key becomes a table when nested keys are written.
|
||||
#[tokio::test]
|
||||
async fn persist_overrides_replaces_scalar_with_table() {
|
||||
let tmpdir = tempdir().expect("tmp");
|
||||
let codex_home = tmpdir.path();
|
||||
let seed = "foo = \"bar\"\n";
|
||||
tokio::fs::write(codex_home.join(CONFIG_TOML_FILE), seed)
|
||||
.await
|
||||
.expect("seed write");
|
||||
|
||||
persist_overrides(codex_home, None, &[(&["foo", "bar", "baz"], "ok")])
|
||||
.await
|
||||
.expect("persist");
|
||||
|
||||
let contents = read_config(codex_home).await;
|
||||
let expected = r#"[foo.bar]
|
||||
baz = "ok"
|
||||
"#;
|
||||
assert_eq!(contents, expected);
|
||||
}
|
||||
|
||||
/// Verifies comments and spacing are preserved when writing under active profile.
|
||||
#[tokio::test]
|
||||
async fn set_defaults_preserve_comments() {
|
||||
let tmpdir = tempdir().expect("tmp");
|
||||
let codex_home = tmpdir.path();
|
||||
|
||||
// Seed a config with comments and spacing we expect to preserve
|
||||
let seed = r#"# Global comment
|
||||
# Another line
|
||||
|
||||
profile = "o3"
|
||||
|
||||
# Profile settings
|
||||
[profiles.o3]
|
||||
# keep me
|
||||
existing = "keep"
|
||||
"#;
|
||||
tokio::fs::write(codex_home.join(CONFIG_TOML_FILE), seed)
|
||||
.await
|
||||
.expect("seed write");
|
||||
|
||||
// Apply defaults; since profile is set, it should write under [profiles.o3]
|
||||
persist_overrides(
|
||||
codex_home,
|
||||
None,
|
||||
&[(&[CONFIG_KEY_MODEL], "o3"), (&[CONFIG_KEY_EFFORT], "high")],
|
||||
)
|
||||
.await
|
||||
.expect("persist");
|
||||
|
||||
let contents = read_config(codex_home).await;
|
||||
let expected = r#"# Global comment
|
||||
# Another line
|
||||
|
||||
profile = "o3"
|
||||
|
||||
# Profile settings
|
||||
[profiles.o3]
|
||||
# keep me
|
||||
existing = "keep"
|
||||
model = "o3"
|
||||
model_reasoning_effort = "high"
|
||||
"#;
|
||||
assert_eq!(contents, expected);
|
||||
}
|
||||
|
||||
/// Verifies comments and spacing are preserved when writing at top level.
|
||||
#[tokio::test]
|
||||
async fn set_defaults_preserve_global_comments() {
|
||||
let tmpdir = tempdir().expect("tmp");
|
||||
let codex_home = tmpdir.path();
|
||||
|
||||
// Seed a config WITHOUT a profile, containing comments and spacing
|
||||
let seed = r#"# Top-level comments
|
||||
# should be preserved
|
||||
|
||||
existing = "keep"
|
||||
"#;
|
||||
tokio::fs::write(codex_home.join(CONFIG_TOML_FILE), seed)
|
||||
.await
|
||||
.expect("seed write");
|
||||
|
||||
// Since there is no profile, the defaults should be written at top-level
|
||||
persist_overrides(
|
||||
codex_home,
|
||||
None,
|
||||
&[
|
||||
(&[CONFIG_KEY_MODEL], "gpt-5-codex"),
|
||||
(&[CONFIG_KEY_EFFORT], "minimal"),
|
||||
],
|
||||
)
|
||||
.await
|
||||
.expect("persist");
|
||||
|
||||
let contents = read_config(codex_home).await;
|
||||
let expected = r#"# Top-level comments
|
||||
# should be preserved
|
||||
|
||||
existing = "keep"
|
||||
model = "gpt-5-codex"
|
||||
model_reasoning_effort = "minimal"
|
||||
"#;
|
||||
assert_eq!(contents, expected);
|
||||
}
|
||||
|
||||
/// Verifies errors on invalid TOML propagate and file is not clobbered.
|
||||
#[tokio::test]
|
||||
async fn persist_overrides_errors_on_parse_failure() {
|
||||
let tmpdir = tempdir().expect("tmp");
|
||||
let codex_home = tmpdir.path();
|
||||
|
||||
// Write an intentionally invalid TOML file
|
||||
let invalid = "invalid = [unclosed";
|
||||
tokio::fs::write(codex_home.join(CONFIG_TOML_FILE), invalid)
|
||||
.await
|
||||
.expect("seed write");
|
||||
|
||||
// Attempting to persist should return an error and must not clobber the file.
|
||||
let res = persist_overrides(codex_home, None, &[(&["x"], "y")]).await;
|
||||
assert!(res.is_err(), "expected parse error to propagate");
|
||||
|
||||
// File should be unchanged
|
||||
let contents = read_config(codex_home).await;
|
||||
assert_eq!(contents, invalid);
|
||||
}
|
||||
|
||||
/// Verifies changing model only preserves existing effort at top-level.
|
||||
#[tokio::test]
|
||||
async fn changing_only_model_preserves_existing_effort_top_level() {
|
||||
let tmpdir = tempdir().expect("tmp");
|
||||
let codex_home = tmpdir.path();
|
||||
|
||||
// Seed with an effort value only
|
||||
let seed = "model_reasoning_effort = \"minimal\"\n";
|
||||
tokio::fs::write(codex_home.join(CONFIG_TOML_FILE), seed)
|
||||
.await
|
||||
.expect("seed write");
|
||||
|
||||
// Change only the model
|
||||
persist_overrides(codex_home, None, &[(&[CONFIG_KEY_MODEL], "o3")])
|
||||
.await
|
||||
.expect("persist");
|
||||
|
||||
let contents = read_config(codex_home).await;
|
||||
let expected = r#"model_reasoning_effort = "minimal"
|
||||
model = "o3"
|
||||
"#;
|
||||
assert_eq!(contents, expected);
|
||||
}
|
||||
|
||||
/// Verifies changing effort only preserves existing model at top-level.
|
||||
#[tokio::test]
|
||||
async fn changing_only_effort_preserves_existing_model_top_level() {
|
||||
let tmpdir = tempdir().expect("tmp");
|
||||
let codex_home = tmpdir.path();
|
||||
|
||||
// Seed with a model value only
|
||||
let seed = "model = \"gpt-5-codex\"\n";
|
||||
tokio::fs::write(codex_home.join(CONFIG_TOML_FILE), seed)
|
||||
.await
|
||||
.expect("seed write");
|
||||
|
||||
// Change only the effort
|
||||
persist_overrides(codex_home, None, &[(&[CONFIG_KEY_EFFORT], "high")])
|
||||
.await
|
||||
.expect("persist");
|
||||
|
||||
let contents = read_config(codex_home).await;
|
||||
let expected = r#"model = "gpt-5-codex"
|
||||
model_reasoning_effort = "high"
|
||||
"#;
|
||||
assert_eq!(contents, expected);
|
||||
}
|
||||
|
||||
/// Verifies changing model only preserves existing effort in active profile.
|
||||
#[tokio::test]
|
||||
async fn changing_only_model_preserves_effort_in_active_profile() {
|
||||
let tmpdir = tempdir().expect("tmp");
|
||||
let codex_home = tmpdir.path();
|
||||
|
||||
// Seed with an active profile and an existing effort under that profile
|
||||
let seed = r#"profile = "p1"
|
||||
|
||||
[profiles.p1]
|
||||
model_reasoning_effort = "low"
|
||||
"#;
|
||||
tokio::fs::write(codex_home.join(CONFIG_TOML_FILE), seed)
|
||||
.await
|
||||
.expect("seed write");
|
||||
|
||||
persist_overrides(codex_home, None, &[(&[CONFIG_KEY_MODEL], "o4-mini")])
|
||||
.await
|
||||
.expect("persist");
|
||||
|
||||
let contents = read_config(codex_home).await;
|
||||
let expected = r#"profile = "p1"
|
||||
|
||||
[profiles.p1]
|
||||
model_reasoning_effort = "low"
|
||||
model = "o4-mini"
|
||||
"#;
|
||||
assert_eq!(contents, expected);
|
||||
}
|
||||
|
||||
/// Verifies changing effort only preserves existing model in a profile override.
|
||||
#[tokio::test]
|
||||
async fn changing_only_effort_preserves_model_in_profile_override() {
|
||||
let tmpdir = tempdir().expect("tmp");
|
||||
let codex_home = tmpdir.path();
|
||||
|
||||
// No active profile key; we'll target an explicit override
|
||||
let seed = r#"[profiles.team]
|
||||
model = "gpt-5-codex"
|
||||
"#;
|
||||
tokio::fs::write(codex_home.join(CONFIG_TOML_FILE), seed)
|
||||
.await
|
||||
.expect("seed write");
|
||||
|
||||
persist_overrides(
|
||||
codex_home,
|
||||
Some("team"),
|
||||
&[(&[CONFIG_KEY_EFFORT], "minimal")],
|
||||
)
|
||||
.await
|
||||
.expect("persist");
|
||||
|
||||
let contents = read_config(codex_home).await;
|
||||
let expected = r#"[profiles.team]
|
||||
model = "gpt-5-codex"
|
||||
model_reasoning_effort = "minimal"
|
||||
"#;
|
||||
assert_eq!(contents, expected);
|
||||
}
|
||||
|
||||
/// Verifies `persist_non_null_overrides` skips `None` entries and writes only present values at top-level.
|
||||
#[tokio::test]
|
||||
async fn persist_non_null_skips_none_top_level() {
|
||||
let tmpdir = tempdir().expect("tmp");
|
||||
let codex_home = tmpdir.path();
|
||||
|
||||
persist_non_null_overrides(
|
||||
codex_home,
|
||||
None,
|
||||
&[
|
||||
(&[CONFIG_KEY_MODEL], Some("gpt-5-codex")),
|
||||
(&[CONFIG_KEY_EFFORT], None),
|
||||
],
|
||||
)
|
||||
.await
|
||||
.expect("persist");
|
||||
|
||||
let contents = read_config(codex_home).await;
|
||||
let expected = "model = \"gpt-5-codex\"\n";
|
||||
assert_eq!(contents, expected);
|
||||
}
|
||||
|
||||
/// Verifies no-op behavior when all provided overrides are `None` (no file created/modified).
|
||||
#[tokio::test]
|
||||
async fn persist_non_null_noop_when_all_none() {
|
||||
let tmpdir = tempdir().expect("tmp");
|
||||
let codex_home = tmpdir.path();
|
||||
|
||||
persist_non_null_overrides(
|
||||
codex_home,
|
||||
None,
|
||||
&[(&["a"], None), (&["profiles", "p", "x"], None)],
|
||||
)
|
||||
.await
|
||||
.expect("persist");
|
||||
|
||||
// Should not create config.toml on a pure no-op
|
||||
assert!(!codex_home.join(CONFIG_TOML_FILE).exists());
|
||||
}
|
||||
|
||||
/// Verifies entries are written under the specified profile and `None` entries are skipped.
|
||||
#[tokio::test]
|
||||
async fn persist_non_null_respects_profile_override() {
|
||||
let tmpdir = tempdir().expect("tmp");
|
||||
let codex_home = tmpdir.path();
|
||||
|
||||
persist_non_null_overrides(
|
||||
codex_home,
|
||||
Some("team"),
|
||||
&[
|
||||
(&[CONFIG_KEY_MODEL], Some("o3")),
|
||||
(&[CONFIG_KEY_EFFORT], None),
|
||||
],
|
||||
)
|
||||
.await
|
||||
.expect("persist");
|
||||
|
||||
let contents = read_config(codex_home).await;
|
||||
let expected = r#"[profiles.team]
|
||||
model = "o3"
|
||||
"#;
|
||||
assert_eq!(contents, expected);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn persist_clear_none_removes_top_level_value() {
|
||||
let tmpdir = tempdir().expect("tmp");
|
||||
let codex_home = tmpdir.path();
|
||||
|
||||
let seed = r#"model = "gpt-5-codex"
|
||||
model_reasoning_effort = "medium"
|
||||
"#;
|
||||
tokio::fs::write(codex_home.join(CONFIG_TOML_FILE), seed)
|
||||
.await
|
||||
.expect("seed write");
|
||||
|
||||
persist_overrides_and_clear_if_none(
|
||||
codex_home,
|
||||
None,
|
||||
&[
|
||||
(&[CONFIG_KEY_MODEL], None),
|
||||
(&[CONFIG_KEY_EFFORT], Some("high")),
|
||||
],
|
||||
)
|
||||
.await
|
||||
.expect("persist");
|
||||
|
||||
let contents = read_config(codex_home).await;
|
||||
let expected = "model_reasoning_effort = \"high\"\n";
|
||||
assert_eq!(contents, expected);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn persist_clear_none_respects_active_profile() {
|
||||
let tmpdir = tempdir().expect("tmp");
|
||||
let codex_home = tmpdir.path();
|
||||
|
||||
let seed = r#"profile = "team"
|
||||
|
||||
[profiles.team]
|
||||
model = "gpt-4"
|
||||
model_reasoning_effort = "minimal"
|
||||
"#;
|
||||
tokio::fs::write(codex_home.join(CONFIG_TOML_FILE), seed)
|
||||
.await
|
||||
.expect("seed write");
|
||||
|
||||
persist_overrides_and_clear_if_none(
|
||||
codex_home,
|
||||
None,
|
||||
&[
|
||||
(&[CONFIG_KEY_MODEL], None),
|
||||
(&[CONFIG_KEY_EFFORT], Some("high")),
|
||||
],
|
||||
)
|
||||
.await
|
||||
.expect("persist");
|
||||
|
||||
let contents = read_config(codex_home).await;
|
||||
let expected = r#"profile = "team"
|
||||
|
||||
[profiles.team]
|
||||
model_reasoning_effort = "high"
|
||||
"#;
|
||||
assert_eq!(contents, expected);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn persist_clear_none_noop_when_file_missing() {
|
||||
let tmpdir = tempdir().expect("tmp");
|
||||
let codex_home = tmpdir.path();
|
||||
|
||||
persist_overrides_and_clear_if_none(codex_home, None, &[(&[CONFIG_KEY_MODEL], None)])
|
||||
.await
|
||||
.expect("persist");
|
||||
|
||||
assert!(!codex_home.join(CONFIG_TOML_FILE).exists());
|
||||
}
|
||||
|
||||
// Test helper moved to bottom per review guidance.
|
||||
async fn read_config(codex_home: &Path) -> String {
|
||||
let p = codex_home.join(CONFIG_TOML_FILE);
|
||||
tokio::fs::read_to_string(p).await.unwrap_or_default()
|
||||
}
|
||||
}
|
||||
174
codex-rs/core/src/context_manager/history.rs
Normal file
174
codex-rs/core/src/context_manager/history.rs
Normal file
@@ -0,0 +1,174 @@
|
||||
use codex_protocol::models::FunctionCallOutputPayload;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::TokenUsage;
|
||||
use codex_protocol::protocol::TokenUsageInfo;
|
||||
use std::ops::Deref;
|
||||
|
||||
use crate::context_manager::normalize;
|
||||
use crate::context_manager::truncate::format_output_for_model_body;
|
||||
use crate::context_manager::truncate::globally_truncate_function_output_items;
|
||||
|
||||
/// Transcript of conversation history
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub(crate) struct ContextManager {
|
||||
/// The oldest items are at the beginning of the vector.
|
||||
items: Vec<ResponseItem>,
|
||||
token_info: Option<TokenUsageInfo>,
|
||||
}
|
||||
|
||||
impl ContextManager {
|
||||
pub(crate) fn new() -> Self {
|
||||
Self {
|
||||
items: Vec::new(),
|
||||
token_info: TokenUsageInfo::new_or_append(&None, &None, None),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn token_info(&self) -> Option<TokenUsageInfo> {
|
||||
self.token_info.clone()
|
||||
}
|
||||
|
||||
pub(crate) fn set_token_usage_full(&mut self, context_window: i64) {
|
||||
match &mut self.token_info {
|
||||
Some(info) => info.fill_to_context_window(context_window),
|
||||
None => {
|
||||
self.token_info = Some(TokenUsageInfo::full_context_window(context_window));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// `items` is ordered from oldest to newest.
|
||||
pub(crate) fn record_items<I>(&mut self, items: I)
|
||||
where
|
||||
I: IntoIterator,
|
||||
I::Item: std::ops::Deref<Target = ResponseItem>,
|
||||
{
|
||||
for item in items {
|
||||
let item_ref = item.deref();
|
||||
let is_ghost_snapshot = matches!(item_ref, ResponseItem::GhostSnapshot { .. });
|
||||
if !is_api_message(item_ref) && !is_ghost_snapshot {
|
||||
continue;
|
||||
}
|
||||
|
||||
let processed = Self::process_item(&item);
|
||||
self.items.push(processed);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get_history(&mut self) -> Vec<ResponseItem> {
|
||||
self.normalize_history();
|
||||
self.contents()
|
||||
}
|
||||
|
||||
// Returns the history prepared for sending to the model.
|
||||
// With extra response items filtered out and GhostCommits removed.
|
||||
pub(crate) fn get_history_for_prompt(&mut self) -> Vec<ResponseItem> {
|
||||
let mut history = self.get_history();
|
||||
Self::remove_ghost_snapshots(&mut history);
|
||||
history
|
||||
}
|
||||
|
||||
pub(crate) fn remove_first_item(&mut self) {
|
||||
if !self.items.is_empty() {
|
||||
// Remove the oldest item (front of the list). Items are ordered from
|
||||
// oldest → newest, so index 0 is the first entry recorded.
|
||||
let removed = self.items.remove(0);
|
||||
// If the removed item participates in a call/output pair, also remove
|
||||
// its corresponding counterpart to keep the invariants intact without
|
||||
// running a full normalization pass.
|
||||
normalize::remove_corresponding_for(&mut self.items, &removed);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn replace(&mut self, items: Vec<ResponseItem>) {
|
||||
self.items = items;
|
||||
}
|
||||
|
||||
pub(crate) fn update_token_info(
|
||||
&mut self,
|
||||
usage: &TokenUsage,
|
||||
model_context_window: Option<i64>,
|
||||
) {
|
||||
self.token_info = TokenUsageInfo::new_or_append(
|
||||
&self.token_info,
|
||||
&Some(usage.clone()),
|
||||
model_context_window,
|
||||
);
|
||||
}
|
||||
|
||||
/// This function enforces a couple of invariants on the in-memory history:
|
||||
/// 1. every call (function/custom) has a corresponding output entry
|
||||
/// 2. every output has a corresponding call entry
|
||||
fn normalize_history(&mut self) {
|
||||
// all function/tool calls must have a corresponding output
|
||||
normalize::ensure_call_outputs_present(&mut self.items);
|
||||
|
||||
// all outputs must have a corresponding function/tool call
|
||||
normalize::remove_orphan_outputs(&mut self.items);
|
||||
}
|
||||
|
||||
/// Returns a clone of the contents in the transcript.
|
||||
fn contents(&self) -> Vec<ResponseItem> {
|
||||
self.items.clone()
|
||||
}
|
||||
|
||||
fn remove_ghost_snapshots(items: &mut Vec<ResponseItem>) {
|
||||
items.retain(|item| !matches!(item, ResponseItem::GhostSnapshot { .. }));
|
||||
}
|
||||
|
||||
fn process_item(item: &ResponseItem) -> ResponseItem {
|
||||
match item {
|
||||
ResponseItem::FunctionCallOutput { call_id, output } => {
|
||||
let truncated = format_output_for_model_body(output.content.as_str());
|
||||
let truncated_items = output
|
||||
.content_items
|
||||
.as_ref()
|
||||
.map(|items| globally_truncate_function_output_items(items));
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: call_id.clone(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: truncated,
|
||||
content_items: truncated_items,
|
||||
success: output.success,
|
||||
},
|
||||
}
|
||||
}
|
||||
ResponseItem::CustomToolCallOutput { call_id, output } => {
|
||||
let truncated = format_output_for_model_body(output);
|
||||
ResponseItem::CustomToolCallOutput {
|
||||
call_id: call_id.clone(),
|
||||
output: truncated,
|
||||
}
|
||||
}
|
||||
ResponseItem::Message { .. }
|
||||
| ResponseItem::Reasoning { .. }
|
||||
| ResponseItem::LocalShellCall { .. }
|
||||
| ResponseItem::FunctionCall { .. }
|
||||
| ResponseItem::WebSearchCall { .. }
|
||||
| ResponseItem::CustomToolCall { .. }
|
||||
| ResponseItem::GhostSnapshot { .. }
|
||||
| ResponseItem::Other => item.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// API messages include every non-system item (user/assistant messages, reasoning,
|
||||
/// tool calls, tool outputs, shell calls, and web-search calls).
|
||||
fn is_api_message(message: &ResponseItem) -> bool {
|
||||
match message {
|
||||
ResponseItem::Message { role, .. } => role.as_str() != "system",
|
||||
ResponseItem::FunctionCallOutput { .. }
|
||||
| ResponseItem::FunctionCall { .. }
|
||||
| ResponseItem::CustomToolCall { .. }
|
||||
| ResponseItem::CustomToolCallOutput { .. }
|
||||
| ResponseItem::LocalShellCall { .. }
|
||||
| ResponseItem::Reasoning { .. }
|
||||
| ResponseItem::WebSearchCall { .. } => true,
|
||||
ResponseItem::GhostSnapshot { .. } => false,
|
||||
ResponseItem::Other => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[path = "history_tests.rs"]
|
||||
mod tests;
|
||||
841
codex-rs/core/src/context_manager/history_tests.rs
Normal file
841
codex-rs/core/src/context_manager/history_tests.rs
Normal file
@@ -0,0 +1,841 @@
|
||||
use super::*;
|
||||
use crate::context_manager::truncate;
|
||||
use codex_git::GhostCommit;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::FunctionCallOutputContentItem;
|
||||
use codex_protocol::models::FunctionCallOutputPayload;
|
||||
use codex_protocol::models::LocalShellAction;
|
||||
use codex_protocol::models::LocalShellExecAction;
|
||||
use codex_protocol::models::LocalShellStatus;
|
||||
use codex_protocol::models::ReasoningItemContent;
|
||||
use codex_protocol::models::ReasoningItemReasoningSummary;
|
||||
use pretty_assertions::assert_eq;
|
||||
use regex_lite::Regex;
|
||||
|
||||
fn assistant_msg(text: &str) -> ResponseItem {
|
||||
ResponseItem::Message {
|
||||
id: None,
|
||||
role: "assistant".to_string(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: text.to_string(),
|
||||
}],
|
||||
}
|
||||
}
|
||||
|
||||
fn create_history_with_items(items: Vec<ResponseItem>) -> ContextManager {
|
||||
let mut h = ContextManager::new();
|
||||
h.record_items(items.iter());
|
||||
h
|
||||
}
|
||||
|
||||
fn user_msg(text: &str) -> ResponseItem {
|
||||
ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: text.to_string(),
|
||||
}],
|
||||
}
|
||||
}
|
||||
|
||||
fn reasoning_msg(text: &str) -> ResponseItem {
|
||||
ResponseItem::Reasoning {
|
||||
id: String::new(),
|
||||
summary: vec![ReasoningItemReasoningSummary::SummaryText {
|
||||
text: "summary".to_string(),
|
||||
}],
|
||||
content: Some(vec![ReasoningItemContent::ReasoningText {
|
||||
text: text.to_string(),
|
||||
}]),
|
||||
encrypted_content: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn filters_non_api_messages() {
|
||||
let mut h = ContextManager::default();
|
||||
// System message is not API messages; Other is ignored.
|
||||
let system = ResponseItem::Message {
|
||||
id: None,
|
||||
role: "system".to_string(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: "ignored".to_string(),
|
||||
}],
|
||||
};
|
||||
let reasoning = reasoning_msg("thinking...");
|
||||
h.record_items([&system, &reasoning, &ResponseItem::Other]);
|
||||
|
||||
// User and assistant should be retained.
|
||||
let u = user_msg("hi");
|
||||
let a = assistant_msg("hello");
|
||||
h.record_items([&u, &a]);
|
||||
|
||||
let items = h.contents();
|
||||
assert_eq!(
|
||||
items,
|
||||
vec![
|
||||
ResponseItem::Reasoning {
|
||||
id: String::new(),
|
||||
summary: vec![ReasoningItemReasoningSummary::SummaryText {
|
||||
text: "summary".to_string(),
|
||||
}],
|
||||
content: Some(vec![ReasoningItemContent::ReasoningText {
|
||||
text: "thinking...".to_string(),
|
||||
}]),
|
||||
encrypted_content: None,
|
||||
},
|
||||
ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: "hi".to_string()
|
||||
}]
|
||||
},
|
||||
ResponseItem::Message {
|
||||
id: None,
|
||||
role: "assistant".to_string(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: "hello".to_string()
|
||||
}]
|
||||
}
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_history_for_prompt_drops_ghost_commits() {
|
||||
let items = vec![ResponseItem::GhostSnapshot {
|
||||
ghost_commit: GhostCommit::new("ghost-1".to_string(), None, Vec::new(), Vec::new()),
|
||||
}];
|
||||
let mut history = create_history_with_items(items);
|
||||
let filtered = history.get_history_for_prompt();
|
||||
assert_eq!(filtered, vec![]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_first_item_removes_matching_output_for_function_call() {
|
||||
let items = vec![
|
||||
ResponseItem::FunctionCall {
|
||||
id: None,
|
||||
name: "do_it".to_string(),
|
||||
arguments: "{}".to_string(),
|
||||
call_id: "call-1".to_string(),
|
||||
},
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "call-1".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "ok".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.remove_first_item();
|
||||
assert_eq!(h.contents(), vec![]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_first_item_removes_matching_call_for_output() {
|
||||
let items = vec![
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "call-2".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "ok".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
ResponseItem::FunctionCall {
|
||||
id: None,
|
||||
name: "do_it".to_string(),
|
||||
arguments: "{}".to_string(),
|
||||
call_id: "call-2".to_string(),
|
||||
},
|
||||
];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.remove_first_item();
|
||||
assert_eq!(h.contents(), vec![]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_first_item_handles_local_shell_pair() {
|
||||
let items = vec![
|
||||
ResponseItem::LocalShellCall {
|
||||
id: None,
|
||||
call_id: Some("call-3".to_string()),
|
||||
status: LocalShellStatus::Completed,
|
||||
action: LocalShellAction::Exec(LocalShellExecAction {
|
||||
command: vec!["echo".to_string(), "hi".to_string()],
|
||||
timeout_ms: None,
|
||||
working_directory: None,
|
||||
env: None,
|
||||
user: None,
|
||||
}),
|
||||
},
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "call-3".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "ok".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.remove_first_item();
|
||||
assert_eq!(h.contents(), vec![]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_first_item_handles_custom_tool_pair() {
|
||||
let items = vec![
|
||||
ResponseItem::CustomToolCall {
|
||||
id: None,
|
||||
status: None,
|
||||
call_id: "tool-1".to_string(),
|
||||
name: "my_tool".to_string(),
|
||||
input: "{}".to_string(),
|
||||
},
|
||||
ResponseItem::CustomToolCallOutput {
|
||||
call_id: "tool-1".to_string(),
|
||||
output: "ok".to_string(),
|
||||
},
|
||||
];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.remove_first_item();
|
||||
assert_eq!(h.contents(), vec![]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalization_retains_local_shell_outputs() {
|
||||
let items = vec![
|
||||
ResponseItem::LocalShellCall {
|
||||
id: None,
|
||||
call_id: Some("shell-1".to_string()),
|
||||
status: LocalShellStatus::Completed,
|
||||
action: LocalShellAction::Exec(LocalShellExecAction {
|
||||
command: vec!["echo".to_string(), "hi".to_string()],
|
||||
timeout_ms: None,
|
||||
working_directory: None,
|
||||
env: None,
|
||||
user: None,
|
||||
}),
|
||||
},
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "shell-1".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "ok".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
let mut history = create_history_with_items(items.clone());
|
||||
let normalized = history.get_history();
|
||||
assert_eq!(normalized, items);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn record_items_truncates_function_call_output_content() {
|
||||
let mut history = ContextManager::new();
|
||||
let long_line = "a very long line to trigger truncation\n";
|
||||
let long_output = long_line.repeat(2_500);
|
||||
let item = ResponseItem::FunctionCallOutput {
|
||||
call_id: "call-100".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: long_output.clone(),
|
||||
success: Some(true),
|
||||
..Default::default()
|
||||
},
|
||||
};
|
||||
|
||||
history.record_items([&item]);
|
||||
|
||||
assert_eq!(history.items.len(), 1);
|
||||
match &history.items[0] {
|
||||
ResponseItem::FunctionCallOutput { output, .. } => {
|
||||
assert_ne!(output.content, long_output);
|
||||
assert!(
|
||||
output.content.starts_with("Total output lines:"),
|
||||
"expected truncated summary, got {}",
|
||||
output.content
|
||||
);
|
||||
}
|
||||
other => panic!("unexpected history item: {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn record_items_truncates_custom_tool_call_output_content() {
|
||||
let mut history = ContextManager::new();
|
||||
let line = "custom output that is very long\n";
|
||||
let long_output = line.repeat(2_500);
|
||||
let item = ResponseItem::CustomToolCallOutput {
|
||||
call_id: "tool-200".to_string(),
|
||||
output: long_output.clone(),
|
||||
};
|
||||
|
||||
history.record_items([&item]);
|
||||
|
||||
assert_eq!(history.items.len(), 1);
|
||||
match &history.items[0] {
|
||||
ResponseItem::CustomToolCallOutput { output, .. } => {
|
||||
assert_ne!(output, &long_output);
|
||||
assert!(
|
||||
output.starts_with("Total output lines:"),
|
||||
"expected truncated summary, got {output}"
|
||||
);
|
||||
}
|
||||
other => panic!("unexpected history item: {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn assert_truncated_message_matches(message: &str, line: &str, total_lines: usize) {
|
||||
let pattern = truncated_message_pattern(line, total_lines);
|
||||
let regex = Regex::new(&pattern).unwrap_or_else(|err| {
|
||||
panic!("failed to compile regex {pattern}: {err}");
|
||||
});
|
||||
let captures = regex
|
||||
.captures(message)
|
||||
.unwrap_or_else(|| panic!("message failed to match pattern {pattern}: {message}"));
|
||||
let body = captures
|
||||
.name("body")
|
||||
.expect("missing body capture")
|
||||
.as_str();
|
||||
assert!(
|
||||
body.len() <= truncate::MODEL_FORMAT_MAX_BYTES,
|
||||
"body exceeds byte limit: {} bytes",
|
||||
body.len()
|
||||
);
|
||||
}
|
||||
|
||||
fn truncated_message_pattern(line: &str, total_lines: usize) -> String {
|
||||
let head_take = truncate::MODEL_FORMAT_HEAD_LINES.min(total_lines);
|
||||
let tail_take = truncate::MODEL_FORMAT_TAIL_LINES.min(total_lines.saturating_sub(head_take));
|
||||
let omitted = total_lines.saturating_sub(head_take + tail_take);
|
||||
let escaped_line = regex_lite::escape(line);
|
||||
if omitted == 0 {
|
||||
return format!(
|
||||
r"(?s)^Total output lines: {total_lines}\n\n(?P<body>{escaped_line}.*\n\[\.{{3}} output truncated to fit {max_bytes} bytes \.{{3}}]\n\n.*)$",
|
||||
max_bytes = truncate::MODEL_FORMAT_MAX_BYTES,
|
||||
);
|
||||
}
|
||||
format!(
|
||||
r"(?s)^Total output lines: {total_lines}\n\n(?P<body>{escaped_line}.*\n\[\.{{3}} omitted {omitted} of {total_lines} lines \.{{3}}]\n\n.*)$",
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn format_exec_output_truncates_large_error() {
|
||||
let line = "very long execution error line that should trigger truncation\n";
|
||||
let large_error = line.repeat(2_500); // way beyond both byte and line limits
|
||||
|
||||
let truncated = truncate::format_output_for_model_body(&large_error);
|
||||
|
||||
let total_lines = large_error.lines().count();
|
||||
assert_truncated_message_matches(&truncated, line, total_lines);
|
||||
assert_ne!(truncated, large_error);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn format_exec_output_marks_byte_truncation_without_omitted_lines() {
|
||||
let long_line = "a".repeat(truncate::MODEL_FORMAT_MAX_BYTES + 50);
|
||||
let truncated = truncate::format_output_for_model_body(&long_line);
|
||||
|
||||
assert_ne!(truncated, long_line);
|
||||
let marker_line = format!(
|
||||
"[... output truncated to fit {} bytes ...]",
|
||||
truncate::MODEL_FORMAT_MAX_BYTES
|
||||
);
|
||||
assert!(
|
||||
truncated.contains(&marker_line),
|
||||
"missing byte truncation marker: {truncated}"
|
||||
);
|
||||
assert!(
|
||||
!truncated.contains("omitted"),
|
||||
"line omission marker should not appear when no lines were dropped: {truncated}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn format_exec_output_returns_original_when_within_limits() {
|
||||
let content = "example output\n".repeat(10);
|
||||
|
||||
assert_eq!(truncate::format_output_for_model_body(&content), content);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn format_exec_output_reports_omitted_lines_and_keeps_head_and_tail() {
|
||||
let total_lines = truncate::MODEL_FORMAT_MAX_LINES + 100;
|
||||
let content: String = (0..total_lines)
|
||||
.map(|idx| format!("line-{idx}\n"))
|
||||
.collect();
|
||||
|
||||
let truncated = truncate::format_output_for_model_body(&content);
|
||||
let omitted = total_lines - truncate::MODEL_FORMAT_MAX_LINES;
|
||||
let expected_marker = format!("[... omitted {omitted} of {total_lines} lines ...]");
|
||||
|
||||
assert!(
|
||||
truncated.contains(&expected_marker),
|
||||
"missing omitted marker: {truncated}"
|
||||
);
|
||||
assert!(
|
||||
truncated.contains("line-0\n"),
|
||||
"expected head line to remain: {truncated}"
|
||||
);
|
||||
|
||||
let last_line = format!("line-{}\n", total_lines - 1);
|
||||
assert!(
|
||||
truncated.contains(&last_line),
|
||||
"expected tail line to remain: {truncated}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn format_exec_output_prefers_line_marker_when_both_limits_exceeded() {
|
||||
let total_lines = truncate::MODEL_FORMAT_MAX_LINES + 42;
|
||||
let long_line = "x".repeat(256);
|
||||
let content: String = (0..total_lines)
|
||||
.map(|idx| format!("line-{idx}-{long_line}\n"))
|
||||
.collect();
|
||||
|
||||
let truncated = truncate::format_output_for_model_body(&content);
|
||||
|
||||
assert!(
|
||||
truncated.contains("[... omitted 42 of 298 lines ...]"),
|
||||
"expected omitted marker when line count exceeds limit: {truncated}"
|
||||
);
|
||||
assert!(
|
||||
!truncated.contains("output truncated to fit"),
|
||||
"line omission marker should take precedence over byte marker: {truncated}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn truncates_across_multiple_under_limit_texts_and_reports_omitted() {
|
||||
// Arrange: several text items, none exceeding per-item limit, but total exceeds budget.
|
||||
let budget = truncate::MODEL_FORMAT_MAX_BYTES;
|
||||
let t1_len = (budget / 2).saturating_sub(10);
|
||||
let t2_len = (budget / 2).saturating_sub(10);
|
||||
let remaining_after_t1_t2 = budget.saturating_sub(t1_len + t2_len);
|
||||
let t3_len = 50; // gets truncated to remaining_after_t1_t2
|
||||
let t4_len = 5; // omitted
|
||||
let t5_len = 7; // omitted
|
||||
|
||||
let t1 = "a".repeat(t1_len);
|
||||
let t2 = "b".repeat(t2_len);
|
||||
let t3 = "c".repeat(t3_len);
|
||||
let t4 = "d".repeat(t4_len);
|
||||
let t5 = "e".repeat(t5_len);
|
||||
|
||||
let item = ResponseItem::FunctionCallOutput {
|
||||
call_id: "call-omit".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "irrelevant".to_string(),
|
||||
content_items: Some(vec![
|
||||
FunctionCallOutputContentItem::InputText { text: t1 },
|
||||
FunctionCallOutputContentItem::InputText { text: t2 },
|
||||
FunctionCallOutputContentItem::InputImage {
|
||||
image_url: "img:mid".to_string(),
|
||||
},
|
||||
FunctionCallOutputContentItem::InputText { text: t3 },
|
||||
FunctionCallOutputContentItem::InputText { text: t4 },
|
||||
FunctionCallOutputContentItem::InputText { text: t5 },
|
||||
]),
|
||||
success: Some(true),
|
||||
},
|
||||
};
|
||||
|
||||
let mut history = ContextManager::new();
|
||||
history.record_items([&item]);
|
||||
assert_eq!(history.items.len(), 1);
|
||||
let json = serde_json::to_value(&history.items[0]).expect("serialize to json");
|
||||
|
||||
let output = json
|
||||
.get("output")
|
||||
.expect("output field")
|
||||
.as_array()
|
||||
.expect("array output");
|
||||
|
||||
// Expect: t1 (full), t2 (full), image, t3 (truncated), summary mentioning 2 omitted.
|
||||
assert_eq!(output.len(), 5);
|
||||
|
||||
let first = output[0].as_object().expect("first obj");
|
||||
assert_eq!(first.get("type").unwrap(), "input_text");
|
||||
let first_text = first.get("text").unwrap().as_str().unwrap();
|
||||
assert_eq!(first_text.len(), t1_len);
|
||||
|
||||
let second = output[1].as_object().expect("second obj");
|
||||
assert_eq!(second.get("type").unwrap(), "input_text");
|
||||
let second_text = second.get("text").unwrap().as_str().unwrap();
|
||||
assert_eq!(second_text.len(), t2_len);
|
||||
|
||||
assert_eq!(
|
||||
output[2],
|
||||
serde_json::json!({"type": "input_image", "image_url": "img:mid"})
|
||||
);
|
||||
|
||||
let fourth = output[3].as_object().expect("fourth obj");
|
||||
assert_eq!(fourth.get("type").unwrap(), "input_text");
|
||||
let fourth_text = fourth.get("text").unwrap().as_str().unwrap();
|
||||
assert_eq!(fourth_text.len(), remaining_after_t1_t2);
|
||||
|
||||
let summary = output[4].as_object().expect("summary obj");
|
||||
assert_eq!(summary.get("type").unwrap(), "input_text");
|
||||
let summary_text = summary.get("text").unwrap().as_str().unwrap();
|
||||
assert!(summary_text.contains("omitted 2 text items"));
|
||||
}
|
||||
|
||||
//TODO(aibrahim): run CI in release mode.
|
||||
#[cfg(not(debug_assertions))]
|
||||
#[test]
|
||||
fn normalize_adds_missing_output_for_function_call() {
|
||||
let items = vec![ResponseItem::FunctionCall {
|
||||
id: None,
|
||||
name: "do_it".to_string(),
|
||||
arguments: "{}".to_string(),
|
||||
call_id: "call-x".to_string(),
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
|
||||
h.normalize_history();
|
||||
|
||||
assert_eq!(
|
||||
h.contents(),
|
||||
vec![
|
||||
ResponseItem::FunctionCall {
|
||||
id: None,
|
||||
name: "do_it".to_string(),
|
||||
arguments: "{}".to_string(),
|
||||
call_id: "call-x".to_string(),
|
||||
},
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "call-x".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "aborted".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
#[test]
|
||||
fn normalize_adds_missing_output_for_custom_tool_call() {
|
||||
let items = vec![ResponseItem::CustomToolCall {
|
||||
id: None,
|
||||
status: None,
|
||||
call_id: "tool-x".to_string(),
|
||||
name: "custom".to_string(),
|
||||
input: "{}".to_string(),
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
|
||||
h.normalize_history();
|
||||
|
||||
assert_eq!(
|
||||
h.contents(),
|
||||
vec![
|
||||
ResponseItem::CustomToolCall {
|
||||
id: None,
|
||||
status: None,
|
||||
call_id: "tool-x".to_string(),
|
||||
name: "custom".to_string(),
|
||||
input: "{}".to_string(),
|
||||
},
|
||||
ResponseItem::CustomToolCallOutput {
|
||||
call_id: "tool-x".to_string(),
|
||||
output: "aborted".to_string(),
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
#[test]
|
||||
fn normalize_adds_missing_output_for_local_shell_call_with_id() {
|
||||
let items = vec![ResponseItem::LocalShellCall {
|
||||
id: None,
|
||||
call_id: Some("shell-1".to_string()),
|
||||
status: LocalShellStatus::Completed,
|
||||
action: LocalShellAction::Exec(LocalShellExecAction {
|
||||
command: vec!["echo".to_string(), "hi".to_string()],
|
||||
timeout_ms: None,
|
||||
working_directory: None,
|
||||
env: None,
|
||||
user: None,
|
||||
}),
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
|
||||
h.normalize_history();
|
||||
|
||||
assert_eq!(
|
||||
h.contents(),
|
||||
vec![
|
||||
ResponseItem::LocalShellCall {
|
||||
id: None,
|
||||
call_id: Some("shell-1".to_string()),
|
||||
status: LocalShellStatus::Completed,
|
||||
action: LocalShellAction::Exec(LocalShellExecAction {
|
||||
command: vec!["echo".to_string(), "hi".to_string()],
|
||||
timeout_ms: None,
|
||||
working_directory: None,
|
||||
env: None,
|
||||
user: None,
|
||||
}),
|
||||
},
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "shell-1".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "aborted".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
#[test]
|
||||
fn normalize_removes_orphan_function_call_output() {
|
||||
let items = vec![ResponseItem::FunctionCallOutput {
|
||||
call_id: "orphan-1".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "ok".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
|
||||
h.normalize_history();
|
||||
|
||||
assert_eq!(h.contents(), vec![]);
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
#[test]
|
||||
fn normalize_removes_orphan_custom_tool_call_output() {
|
||||
let items = vec![ResponseItem::CustomToolCallOutput {
|
||||
call_id: "orphan-2".to_string(),
|
||||
output: "ok".to_string(),
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
|
||||
h.normalize_history();
|
||||
|
||||
assert_eq!(h.contents(), vec![]);
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
#[test]
|
||||
fn normalize_mixed_inserts_and_removals() {
|
||||
let items = vec![
|
||||
// Will get an inserted output
|
||||
ResponseItem::FunctionCall {
|
||||
id: None,
|
||||
name: "f1".to_string(),
|
||||
arguments: "{}".to_string(),
|
||||
call_id: "c1".to_string(),
|
||||
},
|
||||
// Orphan output that should be removed
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "c2".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "ok".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
// Will get an inserted custom tool output
|
||||
ResponseItem::CustomToolCall {
|
||||
id: None,
|
||||
status: None,
|
||||
call_id: "t1".to_string(),
|
||||
name: "tool".to_string(),
|
||||
input: "{}".to_string(),
|
||||
},
|
||||
// Local shell call also gets an inserted function call output
|
||||
ResponseItem::LocalShellCall {
|
||||
id: None,
|
||||
call_id: Some("s1".to_string()),
|
||||
status: LocalShellStatus::Completed,
|
||||
action: LocalShellAction::Exec(LocalShellExecAction {
|
||||
command: vec!["echo".to_string()],
|
||||
timeout_ms: None,
|
||||
working_directory: None,
|
||||
env: None,
|
||||
user: None,
|
||||
}),
|
||||
},
|
||||
];
|
||||
let mut h = create_history_with_items(items);
|
||||
|
||||
h.normalize_history();
|
||||
|
||||
assert_eq!(
|
||||
h.contents(),
|
||||
vec![
|
||||
ResponseItem::FunctionCall {
|
||||
id: None,
|
||||
name: "f1".to_string(),
|
||||
arguments: "{}".to_string(),
|
||||
call_id: "c1".to_string(),
|
||||
},
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "c1".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "aborted".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
ResponseItem::CustomToolCall {
|
||||
id: None,
|
||||
status: None,
|
||||
call_id: "t1".to_string(),
|
||||
name: "tool".to_string(),
|
||||
input: "{}".to_string(),
|
||||
},
|
||||
ResponseItem::CustomToolCallOutput {
|
||||
call_id: "t1".to_string(),
|
||||
output: "aborted".to_string(),
|
||||
},
|
||||
ResponseItem::LocalShellCall {
|
||||
id: None,
|
||||
call_id: Some("s1".to_string()),
|
||||
status: LocalShellStatus::Completed,
|
||||
action: LocalShellAction::Exec(LocalShellExecAction {
|
||||
command: vec!["echo".to_string()],
|
||||
timeout_ms: None,
|
||||
working_directory: None,
|
||||
env: None,
|
||||
user: None,
|
||||
}),
|
||||
},
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "s1".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "aborted".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
// In debug builds we panic on normalization errors instead of silently fixing them.
|
||||
#[cfg(debug_assertions)]
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn normalize_adds_missing_output_for_function_call_panics_in_debug() {
|
||||
let items = vec![ResponseItem::FunctionCall {
|
||||
id: None,
|
||||
name: "do_it".to_string(),
|
||||
arguments: "{}".to_string(),
|
||||
call_id: "call-x".to_string(),
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.normalize_history();
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn normalize_adds_missing_output_for_custom_tool_call_panics_in_debug() {
|
||||
let items = vec![ResponseItem::CustomToolCall {
|
||||
id: None,
|
||||
status: None,
|
||||
call_id: "tool-x".to_string(),
|
||||
name: "custom".to_string(),
|
||||
input: "{}".to_string(),
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.normalize_history();
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn normalize_adds_missing_output_for_local_shell_call_with_id_panics_in_debug() {
|
||||
let items = vec![ResponseItem::LocalShellCall {
|
||||
id: None,
|
||||
call_id: Some("shell-1".to_string()),
|
||||
status: LocalShellStatus::Completed,
|
||||
action: LocalShellAction::Exec(LocalShellExecAction {
|
||||
command: vec!["echo".to_string(), "hi".to_string()],
|
||||
timeout_ms: None,
|
||||
working_directory: None,
|
||||
env: None,
|
||||
user: None,
|
||||
}),
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.normalize_history();
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn normalize_removes_orphan_function_call_output_panics_in_debug() {
|
||||
let items = vec![ResponseItem::FunctionCallOutput {
|
||||
call_id: "orphan-1".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "ok".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.normalize_history();
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn normalize_removes_orphan_custom_tool_call_output_panics_in_debug() {
|
||||
let items = vec![ResponseItem::CustomToolCallOutput {
|
||||
call_id: "orphan-2".to_string(),
|
||||
output: "ok".to_string(),
|
||||
}];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.normalize_history();
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn normalize_mixed_inserts_and_removals_panics_in_debug() {
|
||||
let items = vec![
|
||||
ResponseItem::FunctionCall {
|
||||
id: None,
|
||||
name: "f1".to_string(),
|
||||
arguments: "{}".to_string(),
|
||||
call_id: "c1".to_string(),
|
||||
},
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: "c2".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "ok".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
ResponseItem::CustomToolCall {
|
||||
id: None,
|
||||
status: None,
|
||||
call_id: "t1".to_string(),
|
||||
name: "tool".to_string(),
|
||||
input: "{}".to_string(),
|
||||
},
|
||||
ResponseItem::LocalShellCall {
|
||||
id: None,
|
||||
call_id: Some("s1".to_string()),
|
||||
status: LocalShellStatus::Completed,
|
||||
action: LocalShellAction::Exec(LocalShellExecAction {
|
||||
command: vec!["echo".to_string()],
|
||||
timeout_ms: None,
|
||||
working_directory: None,
|
||||
env: None,
|
||||
user: None,
|
||||
}),
|
||||
},
|
||||
];
|
||||
let mut h = create_history_with_items(items);
|
||||
h.normalize_history();
|
||||
}
|
||||
6
codex-rs/core/src/context_manager/mod.rs
Normal file
6
codex-rs/core/src/context_manager/mod.rs
Normal file
@@ -0,0 +1,6 @@
|
||||
mod history;
|
||||
mod normalize;
|
||||
mod truncate;
|
||||
|
||||
pub(crate) use history::ContextManager;
|
||||
pub(crate) use truncate::format_output_for_model_body;
|
||||
213
codex-rs/core/src/context_manager/normalize.rs
Normal file
213
codex-rs/core/src/context_manager/normalize.rs
Normal file
@@ -0,0 +1,213 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use codex_protocol::models::FunctionCallOutputPayload;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
|
||||
use crate::util::error_or_panic;
|
||||
|
||||
pub(crate) fn ensure_call_outputs_present(items: &mut Vec<ResponseItem>) {
|
||||
// Collect synthetic outputs to insert immediately after their calls.
|
||||
// Store the insertion position (index of call) alongside the item so
|
||||
// we can insert in reverse order and avoid index shifting.
|
||||
let mut missing_outputs_to_insert: Vec<(usize, ResponseItem)> = Vec::new();
|
||||
|
||||
for (idx, item) in items.iter().enumerate() {
|
||||
match item {
|
||||
ResponseItem::FunctionCall { call_id, .. } => {
|
||||
let has_output = items.iter().any(|i| match i {
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: existing, ..
|
||||
} => existing == call_id,
|
||||
_ => false,
|
||||
});
|
||||
|
||||
if !has_output {
|
||||
error_or_panic(format!(
|
||||
"Function call output is missing for call id: {call_id}"
|
||||
));
|
||||
missing_outputs_to_insert.push((
|
||||
idx,
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: call_id.clone(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "aborted".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
));
|
||||
}
|
||||
}
|
||||
ResponseItem::CustomToolCall { call_id, .. } => {
|
||||
let has_output = items.iter().any(|i| match i {
|
||||
ResponseItem::CustomToolCallOutput {
|
||||
call_id: existing, ..
|
||||
} => existing == call_id,
|
||||
_ => false,
|
||||
});
|
||||
|
||||
if !has_output {
|
||||
error_or_panic(format!(
|
||||
"Custom tool call output is missing for call id: {call_id}"
|
||||
));
|
||||
missing_outputs_to_insert.push((
|
||||
idx,
|
||||
ResponseItem::CustomToolCallOutput {
|
||||
call_id: call_id.clone(),
|
||||
output: "aborted".to_string(),
|
||||
},
|
||||
));
|
||||
}
|
||||
}
|
||||
// LocalShellCall is represented in upstream streams by a FunctionCallOutput
|
||||
ResponseItem::LocalShellCall { call_id, .. } => {
|
||||
if let Some(call_id) = call_id.as_ref() {
|
||||
let has_output = items.iter().any(|i| match i {
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: existing, ..
|
||||
} => existing == call_id,
|
||||
_ => false,
|
||||
});
|
||||
|
||||
if !has_output {
|
||||
error_or_panic(format!(
|
||||
"Local shell call output is missing for call id: {call_id}"
|
||||
));
|
||||
missing_outputs_to_insert.push((
|
||||
idx,
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: call_id.clone(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "aborted".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
// Insert synthetic outputs in reverse index order to avoid re-indexing.
|
||||
for (idx, output_item) in missing_outputs_to_insert.into_iter().rev() {
|
||||
items.insert(idx + 1, output_item);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn remove_orphan_outputs(items: &mut Vec<ResponseItem>) {
|
||||
let function_call_ids: HashSet<String> = items
|
||||
.iter()
|
||||
.filter_map(|i| match i {
|
||||
ResponseItem::FunctionCall { call_id, .. } => Some(call_id.clone()),
|
||||
_ => None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
let local_shell_call_ids: HashSet<String> = items
|
||||
.iter()
|
||||
.filter_map(|i| match i {
|
||||
ResponseItem::LocalShellCall {
|
||||
call_id: Some(call_id),
|
||||
..
|
||||
} => Some(call_id.clone()),
|
||||
_ => None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
let custom_tool_call_ids: HashSet<String> = items
|
||||
.iter()
|
||||
.filter_map(|i| match i {
|
||||
ResponseItem::CustomToolCall { call_id, .. } => Some(call_id.clone()),
|
||||
_ => None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
items.retain(|item| match item {
|
||||
ResponseItem::FunctionCallOutput { call_id, .. } => {
|
||||
let has_match =
|
||||
function_call_ids.contains(call_id) || local_shell_call_ids.contains(call_id);
|
||||
if !has_match {
|
||||
error_or_panic(format!(
|
||||
"Orphan function call output for call id: {call_id}"
|
||||
));
|
||||
}
|
||||
has_match
|
||||
}
|
||||
ResponseItem::CustomToolCallOutput { call_id, .. } => {
|
||||
let has_match = custom_tool_call_ids.contains(call_id);
|
||||
if !has_match {
|
||||
error_or_panic(format!(
|
||||
"Orphan custom tool call output for call id: {call_id}"
|
||||
));
|
||||
}
|
||||
has_match
|
||||
}
|
||||
_ => true,
|
||||
});
|
||||
}
|
||||
|
||||
pub(crate) fn remove_corresponding_for(items: &mut Vec<ResponseItem>, item: &ResponseItem) {
|
||||
match item {
|
||||
ResponseItem::FunctionCall { call_id, .. } => {
|
||||
remove_first_matching(items, |i| {
|
||||
matches!(
|
||||
i,
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: existing, ..
|
||||
} if existing == call_id
|
||||
)
|
||||
});
|
||||
}
|
||||
ResponseItem::FunctionCallOutput { call_id, .. } => {
|
||||
if let Some(pos) = items.iter().position(|i| {
|
||||
matches!(i, ResponseItem::FunctionCall { call_id: existing, .. } if existing == call_id)
|
||||
}) {
|
||||
items.remove(pos);
|
||||
} else if let Some(pos) = items.iter().position(|i| {
|
||||
matches!(i, ResponseItem::LocalShellCall { call_id: Some(existing), .. } if existing == call_id)
|
||||
}) {
|
||||
items.remove(pos);
|
||||
}
|
||||
}
|
||||
ResponseItem::CustomToolCall { call_id, .. } => {
|
||||
remove_first_matching(items, |i| {
|
||||
matches!(
|
||||
i,
|
||||
ResponseItem::CustomToolCallOutput {
|
||||
call_id: existing, ..
|
||||
} if existing == call_id
|
||||
)
|
||||
});
|
||||
}
|
||||
ResponseItem::CustomToolCallOutput { call_id, .. } => {
|
||||
remove_first_matching(
|
||||
items,
|
||||
|i| matches!(i, ResponseItem::CustomToolCall { call_id: existing, .. } if existing == call_id),
|
||||
);
|
||||
}
|
||||
ResponseItem::LocalShellCall {
|
||||
call_id: Some(call_id),
|
||||
..
|
||||
} => {
|
||||
remove_first_matching(items, |i| {
|
||||
matches!(
|
||||
i,
|
||||
ResponseItem::FunctionCallOutput {
|
||||
call_id: existing, ..
|
||||
} if existing == call_id
|
||||
)
|
||||
});
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn remove_first_matching<F>(items: &mut Vec<ResponseItem>, predicate: F)
|
||||
where
|
||||
F: Fn(&ResponseItem) -> bool,
|
||||
{
|
||||
if let Some(pos) = items.iter().position(predicate) {
|
||||
items.remove(pos);
|
||||
}
|
||||
}
|
||||
128
codex-rs/core/src/context_manager/truncate.rs
Normal file
128
codex-rs/core/src/context_manager/truncate.rs
Normal file
@@ -0,0 +1,128 @@
|
||||
use codex_protocol::models::FunctionCallOutputContentItem;
|
||||
use codex_utils_string::take_bytes_at_char_boundary;
|
||||
use codex_utils_string::take_last_bytes_at_char_boundary;
|
||||
|
||||
// Model-formatting limits: clients get full streams; only content sent to the model is truncated.
|
||||
pub(crate) const MODEL_FORMAT_MAX_BYTES: usize = 10 * 1024; // 10 KiB
|
||||
pub(crate) const MODEL_FORMAT_MAX_LINES: usize = 256; // lines
|
||||
pub(crate) const MODEL_FORMAT_HEAD_LINES: usize = MODEL_FORMAT_MAX_LINES / 2;
|
||||
pub(crate) const MODEL_FORMAT_TAIL_LINES: usize = MODEL_FORMAT_MAX_LINES - MODEL_FORMAT_HEAD_LINES; // 128
|
||||
pub(crate) const MODEL_FORMAT_HEAD_BYTES: usize = MODEL_FORMAT_MAX_BYTES / 2;
|
||||
|
||||
pub(crate) fn globally_truncate_function_output_items(
|
||||
items: &[FunctionCallOutputContentItem],
|
||||
) -> Vec<FunctionCallOutputContentItem> {
|
||||
let mut out: Vec<FunctionCallOutputContentItem> = Vec::with_capacity(items.len());
|
||||
let mut remaining = MODEL_FORMAT_MAX_BYTES;
|
||||
let mut omitted_text_items = 0usize;
|
||||
|
||||
for it in items {
|
||||
match it {
|
||||
FunctionCallOutputContentItem::InputText { text } => {
|
||||
if remaining == 0 {
|
||||
omitted_text_items += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
let len = text.len();
|
||||
if len <= remaining {
|
||||
out.push(FunctionCallOutputContentItem::InputText { text: text.clone() });
|
||||
remaining -= len;
|
||||
} else {
|
||||
let slice = take_bytes_at_char_boundary(text, remaining);
|
||||
if !slice.is_empty() {
|
||||
out.push(FunctionCallOutputContentItem::InputText {
|
||||
text: slice.to_string(),
|
||||
});
|
||||
}
|
||||
remaining = 0;
|
||||
}
|
||||
}
|
||||
// todo(aibrahim): handle input images; resize
|
||||
FunctionCallOutputContentItem::InputImage { image_url } => {
|
||||
out.push(FunctionCallOutputContentItem::InputImage {
|
||||
image_url: image_url.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if omitted_text_items > 0 {
|
||||
out.push(FunctionCallOutputContentItem::InputText {
|
||||
text: format!("[omitted {omitted_text_items} text items ...]"),
|
||||
});
|
||||
}
|
||||
|
||||
out
|
||||
}
|
||||
|
||||
pub(crate) fn format_output_for_model_body(content: &str) -> String {
|
||||
// Head+tail truncation for the model: show the beginning and end with an elision.
|
||||
// Clients still receive full streams; only this formatted summary is capped.
|
||||
let total_lines = content.lines().count();
|
||||
if content.len() <= MODEL_FORMAT_MAX_BYTES && total_lines <= MODEL_FORMAT_MAX_LINES {
|
||||
return content.to_string();
|
||||
}
|
||||
let output = truncate_formatted_exec_output(content, total_lines);
|
||||
format!("Total output lines: {total_lines}\n\n{output}")
|
||||
}
|
||||
|
||||
fn truncate_formatted_exec_output(content: &str, total_lines: usize) -> String {
|
||||
let segments: Vec<&str> = content.split_inclusive('\n').collect();
|
||||
let head_take = MODEL_FORMAT_HEAD_LINES.min(segments.len());
|
||||
let tail_take = MODEL_FORMAT_TAIL_LINES.min(segments.len().saturating_sub(head_take));
|
||||
let omitted = segments.len().saturating_sub(head_take + tail_take);
|
||||
|
||||
let head_slice_end: usize = segments
|
||||
.iter()
|
||||
.take(head_take)
|
||||
.map(|segment| segment.len())
|
||||
.sum();
|
||||
let tail_slice_start: usize = if tail_take == 0 {
|
||||
content.len()
|
||||
} else {
|
||||
content.len()
|
||||
- segments
|
||||
.iter()
|
||||
.rev()
|
||||
.take(tail_take)
|
||||
.map(|segment| segment.len())
|
||||
.sum::<usize>()
|
||||
};
|
||||
let head_slice = &content[..head_slice_end];
|
||||
let tail_slice = &content[tail_slice_start..];
|
||||
let truncated_by_bytes = content.len() > MODEL_FORMAT_MAX_BYTES;
|
||||
// this is a bit wrong. We are counting metadata lines and not just shell output lines.
|
||||
let marker = if omitted > 0 {
|
||||
Some(format!(
|
||||
"\n[... omitted {omitted} of {total_lines} lines ...]\n\n"
|
||||
))
|
||||
} else if truncated_by_bytes {
|
||||
Some(format!(
|
||||
"\n[... output truncated to fit {MODEL_FORMAT_MAX_BYTES} bytes ...]\n\n"
|
||||
))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let marker_len = marker.as_ref().map_or(0, String::len);
|
||||
let base_head_budget = MODEL_FORMAT_HEAD_BYTES.min(MODEL_FORMAT_MAX_BYTES);
|
||||
let head_budget = base_head_budget.min(MODEL_FORMAT_MAX_BYTES.saturating_sub(marker_len));
|
||||
let head_part = take_bytes_at_char_boundary(head_slice, head_budget);
|
||||
let mut result = String::with_capacity(MODEL_FORMAT_MAX_BYTES.min(content.len()));
|
||||
|
||||
result.push_str(head_part);
|
||||
if let Some(marker_text) = marker.as_ref() {
|
||||
result.push_str(marker_text);
|
||||
}
|
||||
|
||||
let remaining = MODEL_FORMAT_MAX_BYTES.saturating_sub(result.len());
|
||||
if remaining == 0 {
|
||||
return result;
|
||||
}
|
||||
|
||||
let tail_part = take_last_bytes_at_char_boundary(tail_slice, remaining);
|
||||
result.push_str(tail_part);
|
||||
|
||||
result
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user