mirror of
https://github.com/openai/codex.git
synced 2026-02-01 22:47:52 +00:00
Compare commits
295 Commits
jif/multi-
...
jif/mini-c
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
507e43b69c | ||
|
|
8869f662bf | ||
|
|
1668ca726f | ||
|
|
7905e99d03 | ||
|
|
7fc49697dd | ||
|
|
c576756c81 | ||
|
|
c1ac5223e1 | ||
|
|
f5b3e738fb | ||
|
|
0cce6ebd83 | ||
|
|
1fc72c647f | ||
|
|
99f47d6e9a | ||
|
|
a6324ab34b | ||
|
|
3cabb24210 | ||
|
|
1fa8350ae7 | ||
|
|
004a74940a | ||
|
|
749b58366c | ||
|
|
d886a8646c | ||
|
|
169201b1b5 | ||
|
|
42fa4c237f | ||
|
|
5f10548772 | ||
|
|
da44569fef | ||
|
|
393a5a0311 | ||
|
|
55bda1a0f2 | ||
|
|
b4d240c3ae | ||
|
|
f6df1596eb | ||
|
|
ae96a15312 | ||
|
|
3fc487e0e0 | ||
|
|
faeb08c1e1 | ||
|
|
05b960671d | ||
|
|
bad4c12b9d | ||
|
|
2259031d64 | ||
|
|
a09711332a | ||
|
|
4a9c2bcc5a | ||
|
|
2a68b74b9b | ||
|
|
3728db11b8 | ||
|
|
e59e7d163d | ||
|
|
71a2973fd9 | ||
|
|
24b88890cb | ||
|
|
5e426ac270 | ||
|
|
27da8a68d3 | ||
|
|
0471ddbe74 | ||
|
|
e6d2ef432d | ||
|
|
577e1fd1b2 | ||
|
|
fe1e0da102 | ||
|
|
e958d0337e | ||
|
|
8e937fbba9 | ||
|
|
02f67bace8 | ||
|
|
3d322fa9d8 | ||
|
|
6a939ed7a4 | ||
|
|
4283a7432b | ||
|
|
92472e7baa | ||
|
|
e1447c3009 | ||
|
|
bcd7858ced | ||
|
|
32b1795ff4 | ||
|
|
bdae0035ec | ||
|
|
7532f34699 | ||
|
|
bc6d9ef6fc | ||
|
|
dc3deaa3e7 | ||
|
|
6fbb89e858 | ||
|
|
258fc4b401 | ||
|
|
b9ff4ec830 | ||
|
|
0c09dc3c03 | ||
|
|
5675af5190 | ||
|
|
31d9b6f4d2 | ||
|
|
5a82a72d93 | ||
|
|
ce49e92848 | ||
|
|
4d787a2cc2 | ||
|
|
c96c26cf5b | ||
|
|
7e33ac7eb6 | ||
|
|
ebbbee70c6 | ||
|
|
5a70b1568f | ||
|
|
903a0c0933 | ||
|
|
4c673086bc | ||
|
|
2cd1a0a45e | ||
|
|
9f8d3c14ce | ||
|
|
89403c5e11 | ||
|
|
3c711f3d16 | ||
|
|
141d2b5022 | ||
|
|
ebacd28817 | ||
|
|
e25d2ab3bf | ||
|
|
bde734fd1e | ||
|
|
58e8f75b27 | ||
|
|
2651980bdf | ||
|
|
51d75bb80a | ||
|
|
57ba758df5 | ||
|
|
40e2405998 | ||
|
|
fe03320791 | ||
|
|
2d56519ecd | ||
|
|
97f1f20edb | ||
|
|
3b8d79ee11 | ||
|
|
3a300d1117 | ||
|
|
17ab5f6a52 | ||
|
|
3c8fb90bf0 | ||
|
|
325ce985f1 | ||
|
|
18b737910c | ||
|
|
cbca43d57a | ||
|
|
e726a82c8a | ||
|
|
ddae70bd62 | ||
|
|
d75626ad99 | ||
|
|
12779c7c07 | ||
|
|
490c1c1fdd | ||
|
|
87f7226cca | ||
|
|
3a6a43ff5c | ||
|
|
d7cdcfc302 | ||
|
|
5dfa780f3d | ||
|
|
3e91a95ce1 | ||
|
|
034d489c34 | ||
|
|
729e097662 | ||
|
|
7ac498e0e0 | ||
|
|
45ffcdf886 | ||
|
|
06088535ad | ||
|
|
4223948cf5 | ||
|
|
898e5f82f0 | ||
|
|
d5562983d9 | ||
|
|
9659583559 | ||
|
|
623707ab58 | ||
|
|
86f81ca010 | ||
|
|
6a57d7980b | ||
|
|
198289934f | ||
|
|
6709ad8975 | ||
|
|
cf515142b0 | ||
|
|
74b2238931 | ||
|
|
cc0b5e8504 | ||
|
|
8e49a2c0d1 | ||
|
|
af1ed2685e | ||
|
|
1a0e2e612b | ||
|
|
acfd94f625 | ||
|
|
cabf85aa18 | ||
|
|
bc284669c2 | ||
|
|
fbe883318d | ||
|
|
2a06d64bc9 | ||
|
|
7daaabc795 | ||
|
|
1aed01e99f | ||
|
|
ed64804cb5 | ||
|
|
5c380d5b1e | ||
|
|
46b0c4acbb | ||
|
|
5b5a5b92b5 | ||
|
|
ea56186c2b | ||
|
|
cacdae8c05 | ||
|
|
bc92dc5cf0 | ||
|
|
7e5b3e069e | ||
|
|
e2e3f4490e | ||
|
|
225614d7fb | ||
|
|
16c66c37eb | ||
|
|
e9c548c65e | ||
|
|
fceae86581 | ||
|
|
568b938c80 | ||
|
|
24d6e0114f | ||
|
|
d3ff668f68 | ||
|
|
81caee3400 | ||
|
|
51dd5af807 | ||
|
|
6372ba9d5f | ||
|
|
bdfdebcfa1 | ||
|
|
62a73b6d58 | ||
|
|
be4364bb80 | ||
|
|
0d3e673019 | ||
|
|
41a317321d | ||
|
|
051bf81df9 | ||
|
|
a70f5b0b3c | ||
|
|
224c4867dd | ||
|
|
c9c6560685 | ||
|
|
634764ece9 | ||
|
|
5bc3e325a6 | ||
|
|
4156060416 | ||
|
|
98122cbad0 | ||
|
|
7b21b443bb | ||
|
|
93dec9045e | ||
|
|
69898e3dba | ||
|
|
c4af304c77 | ||
|
|
5b7707dfb1 | ||
|
|
59d6937550 | ||
|
|
932a5a446f | ||
|
|
484f6f4c26 | ||
|
|
5522663f92 | ||
|
|
98e171258c | ||
|
|
da667b1f56 | ||
|
|
1e29774fce | ||
|
|
9ce6bbc43e | ||
|
|
7520d8ba58 | ||
|
|
0318f30ed8 | ||
|
|
be212db0c8 | ||
|
|
5b022c2904 | ||
|
|
e21ce6c5de | ||
|
|
267c05fb30 | ||
|
|
634650dd25 | ||
|
|
8a0c2e5841 | ||
|
|
0f8bb4579b | ||
|
|
35fd69a9f0 | ||
|
|
ccba737d26 | ||
|
|
75076aabfe | ||
|
|
f6b563ec64 | ||
|
|
357e4c902b | ||
|
|
ef8b8ebc94 | ||
|
|
54b290ec1d | ||
|
|
efd0c21b9b | ||
|
|
61e81af887 | ||
|
|
f07b8aa591 | ||
|
|
5f3f70203c | ||
|
|
21c6d40a44 | ||
|
|
a9b5e8a136 | ||
|
|
187924d761 | ||
|
|
66450f0445 | ||
|
|
e8421c761c | ||
|
|
fe460e0f9a | ||
|
|
1253d19641 | ||
|
|
4c9b4b684f | ||
|
|
018de994b0 | ||
|
|
c31960b13a | ||
|
|
9179c9deac | ||
|
|
a1e81180f8 | ||
|
|
fedcb8f63c | ||
|
|
116059c3a0 | ||
|
|
0d788e6263 | ||
|
|
4cef89a122 | ||
|
|
124a09e577 | ||
|
|
a59052341d | ||
|
|
8372d61be7 | ||
|
|
230a045ac9 | ||
|
|
3389465c8d | ||
|
|
8b4d27dfcd | ||
|
|
dc1a568dc7 | ||
|
|
54ded1a3c0 | ||
|
|
11d4f3f45e | ||
|
|
8b7ec31ba7 | ||
|
|
188f79afee | ||
|
|
a0b2d03302 | ||
|
|
4ce9d0aa7b | ||
|
|
1dd1355df3 | ||
|
|
915352b10c | ||
|
|
740bf0e755 | ||
|
|
d1c6329c32 | ||
|
|
cab7136fb3 | ||
|
|
32db8ea5ca | ||
|
|
06e21c7a65 | ||
|
|
7ecd0dc9b3 | ||
|
|
8858012fd1 | ||
|
|
6346e4f560 | ||
|
|
4c3d2a5bbe | ||
|
|
c92dbea7c1 | ||
|
|
771f1ca6ab | ||
|
|
b1c93e135b | ||
|
|
5f8776d34d | ||
|
|
58a91a0b50 | ||
|
|
c29afc0cf3 | ||
|
|
cafb07fe6e | ||
|
|
07f077dfb3 | ||
|
|
7cf6f1c723 | ||
|
|
57f8158608 | ||
|
|
95580f229e | ||
|
|
720fa67816 | ||
|
|
fabb797097 | ||
|
|
807f8a43c2 | ||
|
|
1d8e2b4da8 | ||
|
|
bba5e5e0d4 | ||
|
|
8f10d3bf05 | ||
|
|
468ee8a75c | ||
|
|
0b53aed2d0 | ||
|
|
649badd102 | ||
|
|
a8e0fe8bb9 | ||
|
|
e139ef3e67 | ||
|
|
db1423ae8b | ||
|
|
1d678c8187 | ||
|
|
181ff89cbd | ||
|
|
5678213058 | ||
|
|
279283fe02 | ||
|
|
0c1658d0ec | ||
|
|
19525efb22 | ||
|
|
90f37e8549 | ||
|
|
ee9d441777 | ||
|
|
1b5095b5d1 | ||
|
|
c673e7adb6 | ||
|
|
6846bc1115 | ||
|
|
efd2d76484 | ||
|
|
82fcc087b5 | ||
|
|
3cfa4bc8be | ||
|
|
ab753387cc | ||
|
|
2de731490e | ||
|
|
7078a0b676 | ||
|
|
79ce79a62e | ||
|
|
66b7c673e9 | ||
|
|
13c42a077c | ||
|
|
a48904de72 | ||
|
|
4313e0a710 | ||
|
|
ce3ff29932 | ||
|
|
810ebe0d2b | ||
|
|
bf732600ea | ||
|
|
38de0a1de4 | ||
|
|
e61bae12e3 | ||
|
|
96a65ff0ed | ||
|
|
40de81e7af | ||
|
|
972b5853a0 | ||
|
|
fb24c47bea | ||
|
|
f2b740c95d | ||
|
|
0130a2fa40 | ||
|
|
53eb2e9f27 |
3
.bazelignore
Normal file
3
.bazelignore
Normal file
@@ -0,0 +1,3 @@
|
||||
# Without this, Bazel will consider BUILD.bazel files in
|
||||
# .git/sl/origbackups (which can be populated by Sapling SCM).
|
||||
.git
|
||||
45
.bazelrc
Normal file
45
.bazelrc
Normal file
@@ -0,0 +1,45 @@
|
||||
common --repo_env=BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN=1
|
||||
common --repo_env=BAZEL_NO_APPLE_CPP_TOOLCHAIN=1
|
||||
|
||||
common --disk_cache=~/.cache/bazel-disk-cache
|
||||
common --repo_contents_cache=~/.cache/bazel-repo-contents-cache
|
||||
common --repository_cache=~/.cache/bazel-repo-cache
|
||||
|
||||
common --experimental_platform_in_output_dir
|
||||
|
||||
common --enable_platform_specific_config
|
||||
# TODO(zbarsky): We need to untangle these libc constraints to get linux remote builds working.
|
||||
common:linux --host_platform=//:local
|
||||
common --@rules_cc//cc/toolchains/args/archiver_flags:use_libtool_on_macos=False
|
||||
common --@toolchains_llvm_bootstrapped//config:experimental_stub_libgcc_s
|
||||
|
||||
# We need to use the sh toolchain on windows so we don't send host bash paths to the linux executor.
|
||||
common:windows --@rules_rust//rust/settings:experimental_use_sh_toolchain_for_bootstrap_process_wrapper
|
||||
|
||||
# TODO(zbarsky): rules_rust doesn't implement this flag properly with remote exec...
|
||||
# common --@rules_rust//rust/settings:pipelined_compilation
|
||||
|
||||
common --incompatible_strict_action_env
|
||||
# Not ideal, but We need to allow dotslash to be found
|
||||
common --test_env=PATH=/opt/homebrew/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin
|
||||
|
||||
common --test_output=errors
|
||||
common --bes_results_url=https://app.buildbuddy.io/invocation/
|
||||
common --bes_backend=grpcs://remote.buildbuddy.io
|
||||
common --remote_cache=grpcs://remote.buildbuddy.io
|
||||
common --remote_download_toplevel
|
||||
common --nobuild_runfile_links
|
||||
common --remote_timeout=3600
|
||||
common --noexperimental_throttle_remote_action_building
|
||||
common --experimental_remote_execution_keepalive
|
||||
common --grpc_keepalive_time=30s
|
||||
|
||||
# This limits both in-flight executions and concurrent downloads. Even with high number
|
||||
# of jobs execution will still be limited by CPU cores, so this just pays a bit of
|
||||
# memory in exchange for higher download concurrency.
|
||||
common --jobs=30
|
||||
|
||||
common:remote --extra_execution_platforms=//:rbe
|
||||
common:remote --remote_executor=grpcs://remote.buildbuddy.io
|
||||
common:remote --jobs=800
|
||||
|
||||
9
.github/ISSUE_TEMPLATE/2-bug-report.yml
vendored
9
.github/ISSUE_TEMPLATE/2-bug-report.yml
vendored
@@ -40,11 +40,18 @@ body:
|
||||
description: |
|
||||
For MacOS and Linux: copy the output of `uname -mprs`
|
||||
For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console
|
||||
- type: input
|
||||
id: terminal
|
||||
attributes:
|
||||
label: What terminal emulator and version are you using (if applicable)?
|
||||
description: Also note any multiplexer in use (screen / tmux / zellij)
|
||||
description: |
|
||||
E.g, VSCode, Terminal.app, iTerm2, Ghostty, Windows Terminal (WSL / PowerShell)
|
||||
- type: textarea
|
||||
id: actual
|
||||
attributes:
|
||||
label: What issue are you seeing?
|
||||
description: Please include the full error messages and prompts with PII redacted. If possible, please provide text instead of a screenshot.
|
||||
description: Please include the full error messages and prompts with PII redacted. If possible, please provide text instead of a screenshot.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
|
||||
BIN
.github/codex-cli-login.png
vendored
BIN
.github/codex-cli-login.png
vendored
Binary file not shown.
|
Before Width: | Height: | Size: 2.9 MiB |
BIN
.github/codex-cli-permissions.png
vendored
BIN
.github/codex-cli-permissions.png
vendored
Binary file not shown.
|
Before Width: | Height: | Size: 408 KiB |
BIN
.github/codex-cli-splash.png
vendored
BIN
.github/codex-cli-splash.png
vendored
Binary file not shown.
|
Before Width: | Height: | Size: 3.1 MiB After Width: | Height: | Size: 818 KiB |
BIN
.github/demo.gif
vendored
BIN
.github/demo.gif
vendored
Binary file not shown.
|
Before Width: | Height: | Size: 19 MiB |
20
.github/workflows/Dockerfile.bazel
vendored
Normal file
20
.github/workflows/Dockerfile.bazel
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
FROM ubuntu:24.04
|
||||
|
||||
# TODO(mbolin): Published to docker.io/mbolin491/codex-bazel:latest for
|
||||
# initial debugging, but we should publish to a more proper location.
|
||||
#
|
||||
# docker buildx create --use
|
||||
# docker buildx build --platform linux/amd64,linux/arm64 -f .github/workflows/Dockerfile.bazel -t mbolin491/codex-bazel:latest --push .
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
curl git python3 ca-certificates && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install dotslash.
|
||||
RUN curl -LSfs "https://github.com/facebook/dotslash/releases/download/v0.5.8/dotslash-ubuntu-22.04.$(uname -m).tar.gz" | tar fxz - -C /usr/local/bin
|
||||
|
||||
# Ubuntu 24.04 ships with user 'ubuntu' already created with UID 1000.
|
||||
USER ubuntu
|
||||
|
||||
WORKDIR /workspace
|
||||
110
.github/workflows/bazel.yml
vendored
Normal file
110
.github/workflows/bazel.yml
vendored
Normal file
@@ -0,0 +1,110 @@
|
||||
name: Bazel (experimental)
|
||||
|
||||
# Note this workflow was originally derived from:
|
||||
# https://github.com/cerisier/toolchains_llvm_bootstrapped/blob/main/.github/workflows/ci.yaml
|
||||
|
||||
on:
|
||||
pull_request: {}
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
# Cancel previous actions from the same PR or branch except 'main' branch.
|
||||
# See https://docs.github.com/en/actions/using-jobs/using-concurrency and https://docs.github.com/en/actions/learn-github-actions/contexts for more info.
|
||||
group: concurrency-group::${{ github.workflow }}::${{ github.event.pull_request.number > 0 && format('pr-{0}', github.event.pull_request.number) || github.ref_name }}${{ github.ref_name == 'main' && format('::{0}', github.run_id) || ''}}
|
||||
cancel-in-progress: ${{ github.ref_name != 'main' }}
|
||||
jobs:
|
||||
test:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
# macOS
|
||||
- os: macos-15-xlarge
|
||||
target: aarch64-apple-darwin
|
||||
- os: macos-15-xlarge
|
||||
target: x86_64-apple-darwin
|
||||
|
||||
# Linux
|
||||
- os: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-gnu
|
||||
- os: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-gnu
|
||||
- os: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
- os: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
# TODO: Enable Windows once we fix the toolchain issues there.
|
||||
#- os: windows-latest
|
||||
# target: x86_64-pc-windows-gnullvm
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
# Configure a human readable name for each job
|
||||
name: Local Bazel build on ${{ matrix.os }} for ${{ matrix.target }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
# Some integration tests rely on DotSlash being installed.
|
||||
# See https://github.com/openai/codex/pull/7617.
|
||||
- name: Install DotSlash
|
||||
uses: facebook/install-dotslash@v2
|
||||
|
||||
- name: Make DotSlash available in PATH (Unix)
|
||||
if: runner.os != 'Windows'
|
||||
run: cp "$(which dotslash)" /usr/local/bin
|
||||
|
||||
- name: Make DotSlash available in PATH (Windows)
|
||||
if: runner.os == 'Windows'
|
||||
shell: pwsh
|
||||
run: Copy-Item (Get-Command dotslash).Source -Destination "$env:LOCALAPPDATA\Microsoft\WindowsApps\dotslash.exe"
|
||||
|
||||
# Install Bazel via Bazelisk
|
||||
- name: Set up Bazel
|
||||
uses: bazelbuild/setup-bazelisk@v3
|
||||
|
||||
# TODO(mbolin): Bring this back once we have caching working. Currently,
|
||||
# we never seem to get a cache hit but we still end up paying the cost of
|
||||
# uploading at the end of the build, which takes over a minute!
|
||||
#
|
||||
# Cache build and external artifacts so that the next ci build is incremental.
|
||||
# Because github action caches cannot be updated after a build, we need to
|
||||
# store the contents of each build in a unique cache key, then fall back to loading
|
||||
# it on the next ci run. We use hashFiles(...) in the key and restore-keys- with
|
||||
# the prefix to load the most recent cache for the branch on a cache miss. You
|
||||
# should customize the contents of hashFiles to capture any bazel input sources,
|
||||
# although this doesn't need to be perfect. If none of the input sources change
|
||||
# then a cache hit will load an existing cache and bazel won't have to do any work.
|
||||
# In the case of a cache miss, you want the fallback cache to contain most of the
|
||||
# previously built artifacts to minimize build time. The more precise you are with
|
||||
# hashFiles sources the less work bazel will have to do.
|
||||
# - name: Mount bazel caches
|
||||
# uses: actions/cache@v4
|
||||
# with:
|
||||
# path: |
|
||||
# ~/.cache/bazel-repo-cache
|
||||
# ~/.cache/bazel-repo-contents-cache
|
||||
# key: bazel-cache-${{ matrix.os }}-${{ hashFiles('**/BUILD.bazel', '**/*.bzl', 'MODULE.bazel') }}
|
||||
# restore-keys: |
|
||||
# bazel-cache-${{ matrix.os }}
|
||||
|
||||
- name: Configure Bazel startup args (Windows)
|
||||
if: runner.os == 'Windows'
|
||||
shell: pwsh
|
||||
run: |
|
||||
# Use a very short path to reduce argv/path length issues.
|
||||
"BAZEL_STARTUP_ARGS=--output_user_root=C:\" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
|
||||
|
||||
- name: bazel test //...
|
||||
env:
|
||||
BUILDBUDDY_API_KEY: ${{ secrets.BUILDBUDDY_API_KEY }}
|
||||
shell: bash
|
||||
run: |
|
||||
bazel $BAZEL_STARTUP_ARGS --bazelrc=.github/workflows/ci.bazelrc test //... \
|
||||
--build_metadata=REPO_URL=https://github.com/openai/codex.git \
|
||||
--build_metadata=COMMIT_SHA=$(git rev-parse HEAD) \
|
||||
--build_metadata=ROLE=CI \
|
||||
--build_metadata=VISIBILITY=PUBLIC \
|
||||
"--remote_header=x-buildbuddy-api-key=$BUILDBUDDY_API_KEY"
|
||||
20
.github/workflows/ci.bazelrc
vendored
Normal file
20
.github/workflows/ci.bazelrc
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
common --remote_download_minimal
|
||||
common --nobuild_runfile_links
|
||||
common --keep_going
|
||||
|
||||
# We prefer to run the build actions entirely remotely so we can dial up the concurrency.
|
||||
# We have platform-specific tests, so we want to execute the tests on all platforms using the strongest sandboxing available on each platform.
|
||||
|
||||
# On linux, we can do a full remote build/test, by targeting the right (x86/arm) runners, so we have coverage of both.
|
||||
# Linux crossbuilds don't work until we untangle the libc constraint mess.
|
||||
common:linux --config=remote
|
||||
common:linux --strategy=remote
|
||||
common:linux --platforms=//:rbe
|
||||
|
||||
# On mac, we can run all the build actions remotely but test actions locally.
|
||||
common:macos --config=remote
|
||||
common:macos --strategy=remote
|
||||
common:macos --strategy=TestRunner=darwin-sandbox,local
|
||||
|
||||
common:windows --strategy=TestRunner=local
|
||||
|
||||
@@ -12,6 +12,8 @@ permissions:
|
||||
|
||||
jobs:
|
||||
close-stale-contributor-prs:
|
||||
# Prevent scheduled runs on forks
|
||||
if: github.repository == 'openai/codex'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Close inactive PRs from contributors
|
||||
|
||||
3
.github/workflows/issue-deduplicator.yml
vendored
3
.github/workflows/issue-deduplicator.yml
vendored
@@ -9,7 +9,8 @@ on:
|
||||
jobs:
|
||||
gather-duplicates:
|
||||
name: Identify potential duplicates
|
||||
if: ${{ github.event.action == 'opened' || (github.event.action == 'labeled' && github.event.label.name == 'codex-deduplicate') }}
|
||||
# Prevent runs on forks (requires OpenAI API key, wastes Actions minutes)
|
||||
if: github.repository == 'openai/codex' && (github.event.action == 'opened' || (github.event.action == 'labeled' && github.event.label.name == 'codex-deduplicate'))
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
3
.github/workflows/issue-labeler.yml
vendored
3
.github/workflows/issue-labeler.yml
vendored
@@ -9,7 +9,8 @@ on:
|
||||
jobs:
|
||||
gather-labels:
|
||||
name: Generate label suggestions
|
||||
if: ${{ github.event.action == 'opened' || (github.event.action == 'labeled' && github.event.label.name == 'codex-label') }}
|
||||
# Prevent runs on forks (requires OpenAI API key, wastes Actions minutes)
|
||||
if: github.repository == 'openai/codex' && (github.event.action == 'opened' || (github.event.action == 'labeled' && github.event.label.name == 'codex-label'))
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
78
.github/workflows/rust-ci.yml
vendored
78
.github/workflows/rust-ci.yml
vendored
@@ -88,7 +88,7 @@ jobs:
|
||||
# --- CI to validate on different os/targets --------------------------------
|
||||
lint_build:
|
||||
name: Lint/Build — ${{ matrix.runner }} - ${{ matrix.target }}${{ matrix.profile == 'release' && ' (release)' || '' }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
runs-on: ${{ matrix.runs_on || matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
needs: changed
|
||||
# Keep job-level if to avoid spinning up runners when not needed
|
||||
@@ -106,47 +106,74 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: macos-14
|
||||
- runner: macos-15-xlarge
|
||||
target: aarch64-apple-darwin
|
||||
profile: dev
|
||||
- runner: macos-14
|
||||
- runner: macos-15-xlarge
|
||||
target: x86_64-apple-darwin
|
||||
profile: dev
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-x64
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-gnu
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-x64
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-arm64
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-gnu
|
||||
profile: dev
|
||||
- runner: windows-latest
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-arm64
|
||||
- runner: windows-x64
|
||||
target: x86_64-pc-windows-msvc
|
||||
profile: dev
|
||||
- runner: windows-11-arm
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-windows-x64
|
||||
- runner: windows-arm64
|
||||
target: aarch64-pc-windows-msvc
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-windows-arm64
|
||||
|
||||
# Also run representative release builds on Mac and Linux because
|
||||
# there could be release-only build errors we want to catch.
|
||||
# Hopefully this also pre-populates the build cache to speed up
|
||||
# releases.
|
||||
- runner: macos-14
|
||||
- runner: macos-15-xlarge
|
||||
target: aarch64-apple-darwin
|
||||
profile: release
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
profile: release
|
||||
- runner: windows-latest
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-x64
|
||||
- runner: windows-x64
|
||||
target: x86_64-pc-windows-msvc
|
||||
profile: release
|
||||
- runner: windows-11-arm
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-windows-x64
|
||||
- runner: windows-arm64
|
||||
target: aarch64-pc-windows-msvc
|
||||
profile: release
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-windows-arm64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
@@ -336,7 +363,7 @@ jobs:
|
||||
|
||||
tests:
|
||||
name: Tests — ${{ matrix.runner }} - ${{ matrix.target }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
runs-on: ${{ matrix.runs_on || matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
needs: changed
|
||||
if: ${{ needs.changed.outputs.codex == 'true' || needs.changed.outputs.workflows == 'true' || github.event_name == 'push' }}
|
||||
@@ -353,40 +380,37 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: macos-14
|
||||
- runner: macos-15-xlarge
|
||||
target: aarch64-apple-darwin
|
||||
profile: dev
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-gnu
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-x64
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-gnu
|
||||
profile: dev
|
||||
- runner: windows-latest
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-arm64
|
||||
- runner: windows-x64
|
||||
target: x86_64-pc-windows-msvc
|
||||
profile: dev
|
||||
- runner: windows-11-arm
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-windows-x64
|
||||
- runner: windows-arm64
|
||||
target: aarch64-pc-windows-msvc
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-windows-arm64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
# We have been running out of space when running this job on Linux for
|
||||
# x86_64-unknown-linux-gnu, so remove some unnecessary dependencies.
|
||||
- name: Remove unnecessary dependencies to save space
|
||||
if: ${{ startsWith(matrix.runner, 'ubuntu') }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
sudo rm -rf \
|
||||
/usr/local/lib/android \
|
||||
/usr/share/dotnet \
|
||||
/usr/local/share/boost \
|
||||
/usr/local/lib/node_modules \
|
||||
/opt/ghc
|
||||
sudo apt-get remove -y docker.io docker-compose podman buildah
|
||||
|
||||
# Some integration tests rely on DotSlash being installed.
|
||||
# See https://github.com/openai/codex/pull/7617.
|
||||
- name: Install DotSlash
|
||||
|
||||
2
.github/workflows/rust-release-prepare.yml
vendored
2
.github/workflows/rust-release-prepare.yml
vendored
@@ -14,6 +14,8 @@ permissions:
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
# Prevent scheduled runs on forks (no secrets, wastes Actions minutes)
|
||||
if: github.repository == 'openai/codex'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
23
.github/workflows/rust-release.yml
vendored
23
.github/workflows/rust-release.yml
vendored
@@ -49,7 +49,7 @@ jobs:
|
||||
needs: tag-check
|
||||
name: Build - ${{ matrix.runner }} - ${{ matrix.target }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 60
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
@@ -323,6 +323,26 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Generate release notes from tag commit message
|
||||
id: release_notes
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# On tag pushes, GITHUB_SHA may be a tag object for annotated tags;
|
||||
# peel it to the underlying commit.
|
||||
commit="$(git rev-parse "${GITHUB_SHA}^{commit}")"
|
||||
notes_path="${RUNNER_TEMP}/release-notes.md"
|
||||
|
||||
# Use the commit message for the commit the tag points at (not the
|
||||
# annotated tag message).
|
||||
git log -1 --format=%B "${commit}" > "${notes_path}"
|
||||
# Ensure trailing newline so GitHub's markdown renderer doesn't
|
||||
# occasionally run the last line into subsequent content.
|
||||
echo >> "${notes_path}"
|
||||
|
||||
echo "path=${notes_path}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
- uses: actions/download-artifact@v7
|
||||
with:
|
||||
path: dist
|
||||
@@ -395,6 +415,7 @@ jobs:
|
||||
with:
|
||||
name: ${{ steps.release_name.outputs.name }}
|
||||
tag_name: ${{ github.ref_name }}
|
||||
body_path: ${{ steps.release_notes.outputs.path }}
|
||||
files: dist/**
|
||||
# Mark as prerelease only when the version has a suffix after x.y.z
|
||||
# (e.g. -alpha, -beta). Otherwise publish a normal release.
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -9,6 +9,7 @@ node_modules
|
||||
|
||||
# build
|
||||
dist/
|
||||
bazel-*
|
||||
build/
|
||||
out/
|
||||
storybook-static/
|
||||
|
||||
6
.markdownlint-cli2.yaml
Normal file
6
.markdownlint-cli2.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
config:
|
||||
MD013:
|
||||
line_length: 100
|
||||
|
||||
globs:
|
||||
- "docs/tui-chat-composer.md"
|
||||
@@ -13,6 +13,7 @@ In the codex-rs folder where the rust code lives:
|
||||
- Use method references over closures when possible per https://rust-lang.github.io/rust-clippy/master/index.html#redundant_closure_for_method_calls
|
||||
- When writing tests, prefer comparing the equality of entire objects over fields one by one.
|
||||
- When making a change that adds or changes an API, ensure that the documentation in the `docs/` folder is up to date if applicable.
|
||||
- If you change `ConfigToml` or nested config types, run `just write-config-schema` to update `codex-rs/core/config.schema.json`.
|
||||
|
||||
Run `just fmt` (in `codex-rs` directory) automatically after making Rust code changes; do not ask for approval to run it. Before finalizing a change to `codex-rs`, run `just fix -p <project>` (in `codex-rs` directory) to fix any linter issues in the code. Prefer scoping with `-p` to avoid slow workspace‑wide Clippy builds; only run `just fix` without `-p` if you changed shared crates. Additionally, run the tests:
|
||||
|
||||
@@ -77,6 +78,12 @@ If you don’t have the tool:
|
||||
- Prefer deep equals comparisons whenever possible. Perform `assert_eq!()` on entire objects, rather than individual fields.
|
||||
- Avoid mutating process environment in tests; prefer passing environment-derived flags or dependencies from above.
|
||||
|
||||
### Spawning workspace binaries in tests (Cargo vs Bazel)
|
||||
|
||||
- Prefer `codex_utils_cargo_bin::cargo_bin("...")` over `assert_cmd::Command::cargo_bin(...)` or `escargot` when tests need to spawn first-party binaries.
|
||||
- Under Bazel, binaries and resources may live under runfiles; use `codex_utils_cargo_bin::cargo_bin` to resolve absolute paths that remain stable after `chdir`.
|
||||
- When locating fixture files or test resources under Bazel, avoid `env!("CARGO_MANIFEST_DIR")`. Prefer `codex_utils_cargo_bin::find_resource!` so paths resolve correctly under both Cargo and Bazel runfiles.
|
||||
|
||||
### Integration tests (core)
|
||||
|
||||
- Prefer the utilities in `core_test_support::responses` when writing end-to-end Codex tests.
|
||||
|
||||
19
BUILD.bazel
Normal file
19
BUILD.bazel
Normal file
@@ -0,0 +1,19 @@
|
||||
# We mark the local platform as glibc-compatible so that rust can grab a toolchain for us.
|
||||
# TODO(zbarsky): Upstream a better libc constraint into rules_rust.
|
||||
# We only enable this on linux though for sanity, and because it breaks remote execution.
|
||||
platform(
|
||||
name = "local",
|
||||
constraint_values = [
|
||||
"@toolchains_llvm_bootstrapped//constraints/libc:gnu.2.28",
|
||||
],
|
||||
parents = [
|
||||
"@platforms//host",
|
||||
],
|
||||
)
|
||||
|
||||
alias(
|
||||
name = "rbe",
|
||||
actual = "@rbe_platform",
|
||||
)
|
||||
|
||||
exports_files(["AGENTS.md"])
|
||||
128
MODULE.bazel
Normal file
128
MODULE.bazel
Normal file
@@ -0,0 +1,128 @@
|
||||
bazel_dep(name = "platforms", version = "1.0.0")
|
||||
bazel_dep(name = "toolchains_llvm_bootstrapped", version = "0.3.1")
|
||||
archive_override(
|
||||
module_name = "toolchains_llvm_bootstrapped",
|
||||
integrity = "sha256-9ks21bgEqbQWmwUIvqeLA64+Jk6o4ZVjC8KxjVa2Vw8=",
|
||||
strip_prefix = "toolchains_llvm_bootstrapped-e3775e66a7b6d287c705ca0cd24497ef4a77c503",
|
||||
urls = ["https://github.com/cerisier/toolchains_llvm_bootstrapped/archive/e3775e66a7b6d287c705ca0cd24497ef4a77c503/master.tar.gz"],
|
||||
patch_strip = 1,
|
||||
patches = [
|
||||
"//patches:llvm_toolchain_archive_params.patch",
|
||||
],
|
||||
)
|
||||
|
||||
osx = use_extension("@toolchains_llvm_bootstrapped//toolchain/extension:osx.bzl", "osx")
|
||||
osx.framework(name = "ApplicationServices")
|
||||
osx.framework(name = "AppKit")
|
||||
osx.framework(name = "ColorSync")
|
||||
osx.framework(name = "CoreFoundation")
|
||||
osx.framework(name = "CoreGraphics")
|
||||
osx.framework(name = "CoreServices")
|
||||
osx.framework(name = "CoreText")
|
||||
osx.framework(name = "CFNetwork")
|
||||
osx.framework(name = "Foundation")
|
||||
osx.framework(name = "ImageIO")
|
||||
osx.framework(name = "Kernel")
|
||||
osx.framework(name = "OSLog")
|
||||
osx.framework(name = "Security")
|
||||
osx.framework(name = "SystemConfiguration")
|
||||
|
||||
register_toolchains(
|
||||
"@toolchains_llvm_bootstrapped//toolchain:all",
|
||||
)
|
||||
|
||||
bazel_dep(name = "rules_cc", version = "0.2.16")
|
||||
bazel_dep(name = "rules_platform", version = "0.1.0")
|
||||
bazel_dep(name = "rules_rust", version = "0.68.1")
|
||||
single_version_override(
|
||||
module_name = "rules_rust",
|
||||
patch_strip = 1,
|
||||
patches = [
|
||||
"//patches:rules_rust.patch",
|
||||
"//patches:rules_rust_windows_gnu.patch",
|
||||
"//patches:rules_rust_musl.patch",
|
||||
],
|
||||
)
|
||||
|
||||
RUST_TRIPLES = [
|
||||
"aarch64-unknown-linux-musl",
|
||||
"aarch64-apple-darwin",
|
||||
"aarch64-pc-windows-gnullvm",
|
||||
"x86_64-unknown-linux-musl",
|
||||
"x86_64-apple-darwin",
|
||||
"x86_64-pc-windows-gnullvm",
|
||||
]
|
||||
|
||||
rust = use_extension("@rules_rust//rust:extensions.bzl", "rust")
|
||||
rust.toolchain(
|
||||
edition = "2024",
|
||||
extra_target_triples = RUST_TRIPLES,
|
||||
versions = ["1.90.0"],
|
||||
)
|
||||
use_repo(rust, "rust_toolchains")
|
||||
|
||||
register_toolchains("@rust_toolchains//:all")
|
||||
|
||||
bazel_dep(name = "rules_rs", version = "0.0.23")
|
||||
|
||||
crate = use_extension("@rules_rs//rs:extensions.bzl", "crate")
|
||||
crate.from_cargo(
|
||||
cargo_lock = "//codex-rs:Cargo.lock",
|
||||
cargo_toml = "//codex-rs:Cargo.toml",
|
||||
platform_triples = RUST_TRIPLES,
|
||||
)
|
||||
|
||||
bazel_dep(name = "openssl", version = "3.5.4.bcr.0")
|
||||
|
||||
crate.annotation(
|
||||
build_script_data = [
|
||||
"@openssl//:gen_dir",
|
||||
],
|
||||
build_script_env = {
|
||||
"OPENSSL_DIR": "$(execpath @openssl//:gen_dir)",
|
||||
"OPENSSL_NO_VENDOR": "1",
|
||||
"OPENSSL_STATIC": "1",
|
||||
},
|
||||
crate = "openssl-sys",
|
||||
data = ["@openssl//:gen_dir"],
|
||||
)
|
||||
|
||||
inject_repo(crate, "openssl")
|
||||
|
||||
# Fix readme inclusions
|
||||
crate.annotation(
|
||||
crate = "windows-link",
|
||||
patch_args = ["-p1"],
|
||||
patches = [
|
||||
"//patches:windows-link.patch"
|
||||
],
|
||||
)
|
||||
|
||||
WINDOWS_IMPORT_LIB = """
|
||||
load("@rules_cc//cc:defs.bzl", "cc_import")
|
||||
|
||||
cc_import(
|
||||
name = "windows_import_lib",
|
||||
static_library = glob(["lib/*.a"])[0],
|
||||
)
|
||||
"""
|
||||
|
||||
crate.annotation(
|
||||
additive_build_file_content = WINDOWS_IMPORT_LIB,
|
||||
crate = "windows_x86_64_gnullvm",
|
||||
gen_build_script = "off",
|
||||
deps = [":windows_import_lib"],
|
||||
)
|
||||
crate.annotation(
|
||||
additive_build_file_content = WINDOWS_IMPORT_LIB,
|
||||
crate = "windows_aarch64_gnullvm",
|
||||
gen_build_script = "off",
|
||||
deps = [":windows_import_lib"],
|
||||
)
|
||||
use_repo(crate, "crates")
|
||||
|
||||
rbe_platform_repository = use_repo_rule("//:rbe.bzl", "rbe_platform_repository")
|
||||
|
||||
rbe_platform_repository(
|
||||
name = "rbe_platform",
|
||||
)
|
||||
1101
MODULE.bazel.lock
generated
Normal file
1101
MODULE.bazel.lock
generated
Normal file
File diff suppressed because one or more lines are too long
77
README.md
77
README.md
@@ -1,13 +1,11 @@
|
||||
<p align="center"><code>npm i -g @openai/codex</code><br />or <code>brew install --cask codex</code></p>
|
||||
|
||||
<p align="center"><strong>Codex CLI</strong> is a coding agent from OpenAI that runs locally on your computer.
|
||||
</br>
|
||||
</br>If you want Codex in your code editor (VS Code, Cursor, Windsurf), <a href="https://developers.openai.com/codex/ide">install in your IDE</a>
|
||||
</br>If you are looking for the <em>cloud-based agent</em> from OpenAI, <strong>Codex Web</strong>, go to <a href="https://chatgpt.com/codex">chatgpt.com/codex</a></p>
|
||||
|
||||
<p align="center">
|
||||
<img src="./.github/codex-cli-splash.png" alt="Codex CLI splash" width="80%" />
|
||||
</p>
|
||||
</p>
|
||||
</br>
|
||||
If you want Codex in your code editor (VS Code, Cursor, Windsurf), <a href="https://developers.openai.com/codex/ide">install in your IDE.</a>
|
||||
</br>If you are looking for the <em>cloud-based agent</em> from OpenAI, <strong>Codex Web</strong>, go to <a href="https://chatgpt.com/codex">chatgpt.com/codex</a>.</p>
|
||||
|
||||
---
|
||||
|
||||
@@ -15,25 +13,19 @@
|
||||
|
||||
### Installing and running Codex CLI
|
||||
|
||||
Install globally with your preferred package manager. If you use npm:
|
||||
Install globally with your preferred package manager:
|
||||
|
||||
```shell
|
||||
# Install using npm
|
||||
npm install -g @openai/codex
|
||||
```
|
||||
|
||||
Alternatively, if you use Homebrew:
|
||||
|
||||
```shell
|
||||
# Install using Homebrew
|
||||
brew install --cask codex
|
||||
```
|
||||
|
||||
Then simply run `codex` to get started:
|
||||
|
||||
```shell
|
||||
codex
|
||||
```
|
||||
|
||||
If you're running into upgrade issues with Homebrew, see the [FAQ entry on brew upgrade codex](./docs/faq.md#brew-upgrade-codex-isnt-upgrading-me).
|
||||
Then simply run `codex` to get started.
|
||||
|
||||
<details>
|
||||
<summary>You can also go to the <a href="https://github.com/openai/codex/releases/latest">latest GitHub Release</a> and download the appropriate binary for your platform.</summary>
|
||||
@@ -53,60 +45,15 @@ Each archive contains a single entry with the platform baked into the name (e.g.
|
||||
|
||||
### Using Codex with your ChatGPT plan
|
||||
|
||||
<p align="center">
|
||||
<img src="./.github/codex-cli-login.png" alt="Codex CLI login" width="80%" />
|
||||
</p>
|
||||
|
||||
Run `codex` and select **Sign in with ChatGPT**. We recommend signing into your ChatGPT account to use Codex as part of your Plus, Pro, Team, Edu, or Enterprise plan. [Learn more about what's included in your ChatGPT plan](https://help.openai.com/en/articles/11369540-codex-in-chatgpt).
|
||||
|
||||
You can also use Codex with an API key, but this requires [additional setup](./docs/authentication.md#usage-based-billing-alternative-use-an-openai-api-key). If you previously used an API key for usage-based billing, see the [migration steps](./docs/authentication.md#migrating-from-usage-based-billing-api-key). If you're having trouble with login, please comment on [this issue](https://github.com/openai/codex/issues/1243).
|
||||
You can also use Codex with an API key, but this requires [additional setup](https://developers.openai.com/codex/auth#sign-in-with-an-api-key).
|
||||
|
||||
### Model Context Protocol (MCP)
|
||||
## Docs
|
||||
|
||||
Codex can access MCP servers. To configure them, refer to the [config docs](./docs/config.md#mcp_servers).
|
||||
|
||||
### Configuration
|
||||
|
||||
Codex CLI supports a rich set of configuration options, with preferences stored in `~/.codex/config.toml`. For full configuration options, see [Configuration](./docs/config.md).
|
||||
|
||||
### Execpolicy
|
||||
|
||||
See the [Execpolicy quickstart](./docs/execpolicy.md) to set up rules that govern what commands Codex can execute.
|
||||
|
||||
### Docs & FAQ
|
||||
|
||||
- [**Getting started**](./docs/getting-started.md)
|
||||
- [CLI usage](./docs/getting-started.md#cli-usage)
|
||||
- [Slash Commands](./docs/slash_commands.md)
|
||||
- [Running with a prompt as input](./docs/getting-started.md#running-with-a-prompt-as-input)
|
||||
- [Example prompts](./docs/getting-started.md#example-prompts)
|
||||
- [Custom prompts](./docs/prompts.md)
|
||||
- [Memory with AGENTS.md](./docs/getting-started.md#memory-with-agentsmd)
|
||||
- [**Configuration**](./docs/config.md)
|
||||
- [Example config](./docs/example-config.md)
|
||||
- [**Sandbox & approvals**](./docs/sandbox.md)
|
||||
- [**Execpolicy quickstart**](./docs/execpolicy.md)
|
||||
- [**Authentication**](./docs/authentication.md)
|
||||
- [Auth methods](./docs/authentication.md#forcing-a-specific-auth-method-advanced)
|
||||
- [Login on a "Headless" machine](./docs/authentication.md#connecting-on-a-headless-machine)
|
||||
- **Automating Codex**
|
||||
- [GitHub Action](https://github.com/openai/codex-action)
|
||||
- [TypeScript SDK](./sdk/typescript/README.md)
|
||||
- [Non-interactive mode (`codex exec`)](./docs/exec.md)
|
||||
- [**Advanced**](./docs/advanced.md)
|
||||
- [Tracing / verbose logging](./docs/advanced.md#tracing--verbose-logging)
|
||||
- [Model Context Protocol (MCP)](./docs/advanced.md#model-context-protocol-mcp)
|
||||
- [**Zero data retention (ZDR)**](./docs/zdr.md)
|
||||
- [**Codex Documentation**](https://developers.openai.com/codex)
|
||||
- [**Contributing**](./docs/contributing.md)
|
||||
- [**Install & build**](./docs/install.md)
|
||||
- [System Requirements](./docs/install.md#system-requirements)
|
||||
- [DotSlash](./docs/install.md#dotslash)
|
||||
- [Build from source](./docs/install.md#build-from-source)
|
||||
- [**FAQ**](./docs/faq.md)
|
||||
- [**Installing & building**](./docs/install.md)
|
||||
- [**Open source fund**](./docs/open-source-fund.md)
|
||||
|
||||
---
|
||||
|
||||
## License
|
||||
|
||||
This repository is licensed under the [Apache-2.0 License](LICENSE).
|
||||
|
||||
17
announcement_tip.toml
Normal file
17
announcement_tip.toml
Normal file
@@ -0,0 +1,17 @@
|
||||
# Example announcement tips for Codex TUI.
|
||||
# Each [[announcements]] entry is evaluated in order; the last matching one is shown.
|
||||
# Dates are UTC, formatted as YYYY-MM-DD. The from_date is inclusive and the to_date is exclusive.
|
||||
# version_regex matches against the CLI version (env!("CARGO_PKG_VERSION")); omit to apply to all versions.
|
||||
# target_app specify which app should display the announcement (cli, vsce, ...).
|
||||
|
||||
[[announcements]]
|
||||
content = "Welcome to Codex! Check out the new onboarding flow."
|
||||
from_date = "2024-10-01"
|
||||
to_date = "2024-10-15"
|
||||
target_app = "cli"
|
||||
|
||||
# Test announcement only for local build version until 2026-01-10 excluded (past)
|
||||
[[announcements]]
|
||||
content = "This is a test announcement"
|
||||
version_regex = "^0\\.0\\.0$"
|
||||
to_date = "2026-01-10"
|
||||
24
app-server-ui/.gitignore
vendored
Normal file
24
app-server-ui/.gitignore
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
pnpm-debug.log*
|
||||
lerna-debug.log*
|
||||
|
||||
node_modules
|
||||
dist
|
||||
dist-ssr
|
||||
*.local
|
||||
|
||||
# Editor directories and files
|
||||
.vscode/*
|
||||
!.vscode/extensions.json
|
||||
.idea
|
||||
.DS_Store
|
||||
*.suo
|
||||
*.ntvs*
|
||||
*.njsproj
|
||||
*.sln
|
||||
*.sw?
|
||||
30
app-server-ui/README.md
Normal file
30
app-server-ui/README.md
Normal file
@@ -0,0 +1,30 @@
|
||||
# Codex App Server UI
|
||||
|
||||
Minimal React + Vite client for the codex app-server v2 JSON-RPC protocol.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- `codex` CLI available in your PATH (or set `CODEX_BIN`).
|
||||
- If you are working from this repo, the bridge will prefer the local
|
||||
`codex-rs/target/debug/codex-app-server` binary when it exists.
|
||||
- A configured Codex environment (API key or login) as required by the app-server.
|
||||
|
||||
## Quickstart
|
||||
|
||||
From the repo root:
|
||||
|
||||
```bash
|
||||
pnpm install
|
||||
pnpm --filter app-server-ui dev
|
||||
```
|
||||
|
||||
This starts:
|
||||
- a WebSocket bridge at `ws://localhost:8787` that spawns `codex app-server`
|
||||
- the Vite dev server at `http://localhost:5173`
|
||||
|
||||
## Configuration
|
||||
|
||||
- `CODEX_BIN`: path to the `codex` executable (default: `codex`).
|
||||
- `APP_SERVER_BIN` / `CODEX_APP_SERVER_BIN`: path to a `codex-app-server` binary (overrides `CODEX_BIN`).
|
||||
- `APP_SERVER_UI_PORT`: port for the bridge server (default: `8787`).
|
||||
- `VITE_APP_SERVER_WS`: WebSocket URL for the UI (default: `ws://localhost:8787`).
|
||||
12
app-server-ui/index.html
Normal file
12
app-server-ui/index.html
Normal file
@@ -0,0 +1,12 @@
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Codex App Server UI</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
<script type="module" src="/src/main.tsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
26
app-server-ui/package.json
Normal file
26
app-server-ui/package.json
Normal file
@@ -0,0 +1,26 @@
|
||||
{
|
||||
"name": "app-server-ui",
|
||||
"private": true,
|
||||
"version": "0.1.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "concurrently -k \"pnpm:dev:server\" \"pnpm:dev:client\"",
|
||||
"dev:client": "vite",
|
||||
"dev:server": "node server/index.mjs",
|
||||
"build": "tsc && vite build",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"dependencies": {
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"ws": "^8.18.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^18.3.18",
|
||||
"@types/react-dom": "^18.3.5",
|
||||
"@vitejs/plugin-react": "^4.3.4",
|
||||
"concurrently": "^8.2.2",
|
||||
"typescript": "~5.9.3",
|
||||
"vite": "^7.2.4"
|
||||
}
|
||||
}
|
||||
1
app-server-ui/public/vite.svg
Normal file
1
app-server-ui/public/vite.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="31.88" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 257"><defs><linearGradient id="IconifyId1813088fe1fbc01fb466" x1="-.828%" x2="57.636%" y1="7.652%" y2="78.411%"><stop offset="0%" stop-color="#41D1FF"></stop><stop offset="100%" stop-color="#BD34FE"></stop></linearGradient><linearGradient id="IconifyId1813088fe1fbc01fb467" x1="43.376%" x2="50.316%" y1="2.242%" y2="89.03%"><stop offset="0%" stop-color="#FFEA83"></stop><stop offset="8.333%" stop-color="#FFDD35"></stop><stop offset="100%" stop-color="#FFA800"></stop></linearGradient></defs><path fill="url(#IconifyId1813088fe1fbc01fb466)" d="M255.153 37.938L134.897 252.976c-2.483 4.44-8.862 4.466-11.382.048L.875 37.958c-2.746-4.814 1.371-10.646 6.827-9.67l120.385 21.517a6.537 6.537 0 0 0 2.322-.004l117.867-21.483c5.438-.991 9.574 4.796 6.877 9.62Z"></path><path fill="url(#IconifyId1813088fe1fbc01fb467)" d="M185.432.063L96.44 17.501a3.268 3.268 0 0 0-2.634 3.014l-5.474 92.456a3.268 3.268 0 0 0 3.997 3.378l24.777-5.718c2.318-.535 4.413 1.507 3.936 3.838l-7.361 36.047c-.495 2.426 1.782 4.5 4.151 3.78l15.304-4.649c2.372-.72 4.652 1.36 4.15 3.788l-11.698 56.621c-.732 3.542 3.979 5.473 5.943 2.437l1.313-2.028l72.516-144.72c1.215-2.423-.88-5.186-3.54-4.672l-25.505 4.922c-2.396.462-4.435-1.77-3.759-4.114l16.646-57.705c.677-2.35-1.37-4.583-3.769-4.113Z"></path></svg>
|
||||
|
After Width: | Height: | Size: 1.5 KiB |
181
app-server-ui/server/index.mjs
Normal file
181
app-server-ui/server/index.mjs
Normal file
@@ -0,0 +1,181 @@
|
||||
import { createServer } from "node:http";
|
||||
import { spawn } from "node:child_process";
|
||||
import { createInterface } from "node:readline";
|
||||
import { existsSync } from "node:fs";
|
||||
import { dirname, resolve } from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { WebSocketServer } from "ws";
|
||||
|
||||
const port = Number(process.env.APP_SERVER_UI_PORT ?? 8787);
|
||||
const codexBin = process.env.CODEX_BIN ?? "codex";
|
||||
const explicitAppServerBin =
|
||||
process.env.APP_SERVER_BIN ?? process.env.CODEX_APP_SERVER_BIN ?? null;
|
||||
const here = dirname(fileURLToPath(import.meta.url));
|
||||
const repoRoot = resolve(here, "..", "..");
|
||||
const localAppServerBin = resolve(repoRoot, "codex-rs/target/debug/codex-app-server");
|
||||
const appServerBin =
|
||||
explicitAppServerBin ?? (existsSync(localAppServerBin) ? localAppServerBin : null);
|
||||
|
||||
const sockets = new Set();
|
||||
let appServer = null;
|
||||
|
||||
const broadcast = (payload) => {
|
||||
const message = JSON.stringify(payload);
|
||||
for (const ws of sockets) {
|
||||
if (ws.readyState === ws.OPEN) {
|
||||
ws.send(message);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const startAppServer = () => {
|
||||
if (appServer?.child?.exitCode === null) {
|
||||
return appServer;
|
||||
}
|
||||
|
||||
const command = appServerBin ?? codexBin;
|
||||
const args = appServerBin ? [] : ["app-server"];
|
||||
const child = spawn(command, args, {
|
||||
stdio: ["pipe", "pipe", "pipe"],
|
||||
env: process.env,
|
||||
});
|
||||
|
||||
const stdout = child.stdout;
|
||||
const stderr = child.stderr;
|
||||
const stdoutRl = stdout ? createInterface({ input: stdout }) : null;
|
||||
const stderrRl = stderr ? createInterface({ input: stderr }) : null;
|
||||
|
||||
stdoutRl?.on("line", (line) => {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed) {
|
||||
return;
|
||||
}
|
||||
|
||||
let payload = trimmed;
|
||||
try {
|
||||
const parsed = JSON.parse(trimmed);
|
||||
payload = JSON.stringify(parsed);
|
||||
} catch {
|
||||
payload = JSON.stringify({
|
||||
method: "ui/raw",
|
||||
params: { line: trimmed },
|
||||
});
|
||||
}
|
||||
|
||||
for (const ws of sockets) {
|
||||
if (ws.readyState === ws.OPEN) {
|
||||
ws.send(payload);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
stderrRl?.on("line", (line) => {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed) {
|
||||
return;
|
||||
}
|
||||
console.error(trimmed);
|
||||
broadcast({ method: "ui/stderr", params: { line: trimmed } });
|
||||
});
|
||||
|
||||
child.on("error", (err) => {
|
||||
console.error("codex app-server spawn error:", err);
|
||||
broadcast({ method: "ui/error", params: { message: "Failed to spawn app-server.", details: String(err) } });
|
||||
appServer = null;
|
||||
});
|
||||
|
||||
child.on("exit", (code, signal) => {
|
||||
console.log(`codex app-server exited (code=${code ?? "null"}, signal=${signal ?? "null"})`);
|
||||
broadcast({ method: "ui/exit", params: { code, signal } });
|
||||
appServer = null;
|
||||
});
|
||||
|
||||
appServer = { child, stdoutRl, stderrRl };
|
||||
return appServer;
|
||||
};
|
||||
|
||||
const server = createServer((req, res) => {
|
||||
if (req.url === "/health") {
|
||||
res.writeHead(200, { "content-type": "application/json" });
|
||||
res.end(JSON.stringify({ status: "ok" }));
|
||||
return;
|
||||
}
|
||||
|
||||
res.writeHead(404, { "content-type": "text/plain" });
|
||||
res.end("Not found");
|
||||
});
|
||||
|
||||
const wss = new WebSocketServer({ server });
|
||||
|
||||
wss.on("connection", (ws) => {
|
||||
sockets.add(ws);
|
||||
const running = Boolean(appServer?.child?.exitCode === null);
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
method: "ui/connected",
|
||||
params: { pid: appServer?.child?.pid ?? null, running },
|
||||
}),
|
||||
);
|
||||
|
||||
ws.on("close", () => {
|
||||
sockets.delete(ws);
|
||||
});
|
||||
|
||||
ws.on("message", (data) => {
|
||||
const text = typeof data === "string" ? data : data.toString("utf8");
|
||||
if (!text.trim()) {
|
||||
return;
|
||||
}
|
||||
|
||||
let parsed;
|
||||
try {
|
||||
parsed = JSON.parse(text);
|
||||
} catch (err) {
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
method: "ui/error",
|
||||
params: {
|
||||
message: "Failed to parse JSON from client.",
|
||||
details: String(err),
|
||||
},
|
||||
}),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!appServer || appServer.child.exitCode !== null || !appServer.child.stdin?.writable) {
|
||||
startAppServer();
|
||||
}
|
||||
|
||||
if (!appServer || !appServer.child.stdin?.writable) {
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
method: "ui/error",
|
||||
params: {
|
||||
message: "app-server stdin is closed.",
|
||||
},
|
||||
}),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
appServer.child.stdin.write(`${JSON.stringify(parsed)}\n`);
|
||||
});
|
||||
});
|
||||
|
||||
server.listen(port, () => {
|
||||
console.log(`App server bridge listening on ws://localhost:${port}`);
|
||||
});
|
||||
|
||||
startAppServer();
|
||||
|
||||
const shutdown = () => {
|
||||
appServer?.stdoutRl?.close();
|
||||
appServer?.stderrRl?.close();
|
||||
wss.close();
|
||||
server.close();
|
||||
appServer?.child?.kill("SIGTERM");
|
||||
};
|
||||
|
||||
process.on("SIGINT", shutdown);
|
||||
process.on("SIGTERM", shutdown);
|
||||
347
app-server-ui/src/App.css
Normal file
347
app-server-ui/src/App.css
Normal file
@@ -0,0 +1,347 @@
|
||||
@import url("https://fonts.googleapis.com/css2?family=Space+Grotesk:wght@400;500;600&family=IBM+Plex+Mono:wght@400;500&display=swap");
|
||||
|
||||
:root {
|
||||
color-scheme: only light;
|
||||
font-family: "Space Grotesk", "Segoe UI", system-ui, sans-serif;
|
||||
color: #1d2327;
|
||||
background-color: #f5f4f2;
|
||||
--panel-bg: #ffffff;
|
||||
--panel-border: #d9d4cd;
|
||||
--accent: #ff6a3d;
|
||||
--accent-2: #227c88;
|
||||
--text-muted: #4f5b62;
|
||||
--shadow: 0 10px 22px rgba(28, 36, 40, 0.12);
|
||||
}
|
||||
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
min-height: 100vh;
|
||||
background-color: #f5f4f2;
|
||||
background-image:
|
||||
linear-gradient(180deg, #f8f7f5 0%, #f0ede9 100%),
|
||||
repeating-linear-gradient(90deg, rgba(29, 35, 39, 0.05) 0 1px, transparent 1px 120px),
|
||||
repeating-linear-gradient(0deg, rgba(29, 35, 39, 0.04) 0 1px, transparent 1px 120px);
|
||||
}
|
||||
|
||||
.app {
|
||||
min-height: 100vh;
|
||||
padding: 28px 36px 40px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 22px;
|
||||
}
|
||||
|
||||
.hero {
|
||||
display: flex;
|
||||
justify-content: flex-end;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.status {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
padding: 10px 14px;
|
||||
background: #ffffff;
|
||||
border: 1px solid #cfc8c0;
|
||||
border-radius: 10px;
|
||||
box-shadow: var(--shadow);
|
||||
min-width: 220px;
|
||||
}
|
||||
|
||||
.status-dot {
|
||||
width: 12px;
|
||||
height: 12px;
|
||||
border-radius: 999px;
|
||||
background: #c2c2c2;
|
||||
}
|
||||
|
||||
.status-dot.on {
|
||||
background: #3cb371;
|
||||
}
|
||||
|
||||
.status-dot.off {
|
||||
background: #c46a5e;
|
||||
}
|
||||
|
||||
.status-label {
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.status-meta {
|
||||
font-size: 12px;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.grid {
|
||||
display: grid;
|
||||
grid-template-columns: minmax(0, 1.1fr) minmax(0, 0.9fr);
|
||||
gap: 18px;
|
||||
}
|
||||
|
||||
.panel {
|
||||
background: var(--panel-bg);
|
||||
border: 1px solid var(--panel-border);
|
||||
border-radius: 14px;
|
||||
padding: 20px;
|
||||
box-shadow: var(--shadow);
|
||||
}
|
||||
|
||||
.panel-title {
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.08em;
|
||||
font-size: 11px;
|
||||
color: var(--text-muted);
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
|
||||
.control {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 14px;
|
||||
}
|
||||
|
||||
.control-row {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(2, minmax(0, 1fr));
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
.label {
|
||||
font-size: 12px;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.08em;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.value {
|
||||
font-size: 14px;
|
||||
font-family: "IBM Plex Mono", ui-monospace, "SFMono-Regular", monospace;
|
||||
margin-top: 2px;
|
||||
}
|
||||
|
||||
.button-row {
|
||||
display: flex;
|
||||
gap: 10px;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
.btn {
|
||||
border: 1px solid var(--panel-border);
|
||||
background: #ffffff;
|
||||
color: #1f2a2e;
|
||||
padding: 9px 14px;
|
||||
border-radius: 8px;
|
||||
font-weight: 600;
|
||||
cursor: pointer;
|
||||
transition: box-shadow 0.2s ease, border-color 0.2s ease;
|
||||
}
|
||||
|
||||
.btn:hover:enabled {
|
||||
box-shadow: 0 6px 14px rgba(31, 42, 46, 0.12);
|
||||
border-color: #c3bbb2;
|
||||
}
|
||||
|
||||
.btn.primary {
|
||||
background: linear-gradient(120deg, var(--accent), #ff946b);
|
||||
border-color: transparent;
|
||||
color: #1b1a19;
|
||||
}
|
||||
|
||||
.btn:disabled {
|
||||
cursor: not-allowed;
|
||||
opacity: 0.6;
|
||||
}
|
||||
|
||||
.notice {
|
||||
padding: 10px 12px;
|
||||
border-radius: 10px;
|
||||
background: rgba(255, 106, 61, 0.12);
|
||||
color: #9a3f1e;
|
||||
font-size: 13px;
|
||||
}
|
||||
|
||||
.composer textarea {
|
||||
width: 100%;
|
||||
margin-top: 8px;
|
||||
margin-bottom: 10px;
|
||||
border-radius: 10px;
|
||||
border: 1px solid var(--panel-border);
|
||||
padding: 10px 12px;
|
||||
font-family: inherit;
|
||||
resize: vertical;
|
||||
min-height: 96px;
|
||||
background: #ffffff;
|
||||
}
|
||||
|
||||
.thread-list {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 10px;
|
||||
}
|
||||
|
||||
.thread-item {
|
||||
text-align: left;
|
||||
padding: 9px 10px;
|
||||
border-radius: 10px;
|
||||
border: 1px solid #e4ded7;
|
||||
background: #ffffff;
|
||||
font-family: "IBM Plex Mono", ui-monospace, "SFMono-Regular", monospace;
|
||||
font-size: 12px;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.thread-item:hover {
|
||||
border-color: rgba(34, 124, 136, 0.6);
|
||||
}
|
||||
|
||||
.thread-item.active {
|
||||
border-color: rgba(255, 106, 61, 0.7);
|
||||
background: rgba(255, 106, 61, 0.08);
|
||||
}
|
||||
|
||||
.approvals {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
.approval-card {
|
||||
background: #ffffff;
|
||||
border: 1px dashed rgba(31, 42, 46, 0.2);
|
||||
border-radius: 12px;
|
||||
padding: 12px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 10px;
|
||||
}
|
||||
|
||||
.approval-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
font-size: 13px;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.approval-time {
|
||||
font-family: "IBM Plex Mono", ui-monospace, "SFMono-Regular", monospace;
|
||||
}
|
||||
|
||||
.approval-card pre {
|
||||
margin: 0;
|
||||
padding: 10px;
|
||||
border-radius: 8px;
|
||||
background: #f6f4f1;
|
||||
font-size: 12px;
|
||||
font-family: "IBM Plex Mono", ui-monospace, "SFMono-Regular", monospace;
|
||||
overflow-x: auto;
|
||||
}
|
||||
|
||||
.chat {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
.chat-scroll {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 12px;
|
||||
max-height: 520px;
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
.bubble {
|
||||
border-radius: 14px;
|
||||
padding: 12px 14px;
|
||||
border: 1px solid transparent;
|
||||
background: #ffffff;
|
||||
}
|
||||
|
||||
.bubble.user {
|
||||
align-self: flex-start;
|
||||
border-color: rgba(34, 124, 136, 0.35);
|
||||
background: rgba(34, 124, 136, 0.06);
|
||||
}
|
||||
|
||||
.bubble.assistant {
|
||||
align-self: flex-end;
|
||||
border-color: rgba(255, 106, 61, 0.35);
|
||||
background: rgba(255, 106, 61, 0.07);
|
||||
}
|
||||
|
||||
.bubble-role {
|
||||
font-size: 11px;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.1em;
|
||||
color: var(--text-muted);
|
||||
margin-bottom: 4px;
|
||||
}
|
||||
|
||||
.bubble-text {
|
||||
white-space: pre-wrap;
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
.logs {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.log-scroll {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
max-height: 520px;
|
||||
overflow-y: auto;
|
||||
font-family: "IBM Plex Mono", ui-monospace, "SFMono-Regular", monospace;
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
.log-entry {
|
||||
padding: 9px 10px;
|
||||
border-radius: 10px;
|
||||
border: 1px solid rgba(31, 42, 46, 0.12);
|
||||
background: #ffffff;
|
||||
}
|
||||
|
||||
.log-entry.out {
|
||||
border-color: rgba(34, 124, 136, 0.28);
|
||||
}
|
||||
|
||||
.log-meta {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
margin-bottom: 4px;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.log-detail {
|
||||
color: #1f2a2e;
|
||||
word-break: break-word;
|
||||
}
|
||||
|
||||
.empty {
|
||||
color: var(--text-muted);
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
@media (max-width: 980px) {
|
||||
.hero {
|
||||
flex-direction: column;
|
||||
align-items: stretch;
|
||||
}
|
||||
|
||||
.grid {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
|
||||
.app {
|
||||
padding: 20px;
|
||||
}
|
||||
}
|
||||
705
app-server-ui/src/App.tsx
Normal file
705
app-server-ui/src/App.tsx
Normal file
@@ -0,0 +1,705 @@
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
|
||||
|
||||
type RpcError = {
|
||||
message?: string;
|
||||
[key: string]: unknown;
|
||||
};
|
||||
|
||||
type RpcMessage = {
|
||||
id?: number | string;
|
||||
method?: string;
|
||||
params?: Record<string, unknown>;
|
||||
result?: Record<string, unknown>;
|
||||
error?: RpcError;
|
||||
};
|
||||
|
||||
type LogEntry = {
|
||||
id: number;
|
||||
direction: "in" | "out";
|
||||
label: string;
|
||||
detail?: string;
|
||||
time: string;
|
||||
};
|
||||
|
||||
type ChatMessage = {
|
||||
id: string;
|
||||
role: "user" | "assistant";
|
||||
text: string;
|
||||
};
|
||||
|
||||
type ApprovalRequest = {
|
||||
id: number | string;
|
||||
method: string;
|
||||
params: Record<string, unknown>;
|
||||
receivedAt: string;
|
||||
};
|
||||
|
||||
type PendingRequest = {
|
||||
method: string;
|
||||
};
|
||||
|
||||
const wsUrl = import.meta.env.VITE_APP_SERVER_WS ?? "ws://localhost:8787";
|
||||
|
||||
const formatTime = () =>
|
||||
new Date().toLocaleTimeString("en-US", {
|
||||
hour12: false,
|
||||
hour: "2-digit",
|
||||
minute: "2-digit",
|
||||
second: "2-digit",
|
||||
});
|
||||
|
||||
const summarizeParams = (params: Record<string, unknown> | undefined) => {
|
||||
if (!params) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
return JSON.stringify(params);
|
||||
} catch {
|
||||
return "[unserializable params]";
|
||||
}
|
||||
};
|
||||
|
||||
const shouldLogMethod = (method: string) => {
|
||||
if (method.includes("/delta")) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
export default function App() {
|
||||
const wsRef = useRef<WebSocket | null>(null);
|
||||
const nextIdRef = useRef(1);
|
||||
const pendingRef = useRef<Map<number | string, PendingRequest>>(new Map());
|
||||
const agentIndexRef = useRef<Map<string, Map<string, number>>>(new Map());
|
||||
const selectedThreadIdRef = useRef<string | null>(null);
|
||||
const userItemIdsRef = useRef<Map<string, Set<string>>>(new Map());
|
||||
|
||||
const [connected, setConnected] = useState(false);
|
||||
const [initialized, setInitialized] = useState(false);
|
||||
const [threads, setThreads] = useState<string[]>([]);
|
||||
const [selectedThreadId, setSelectedThreadId] = useState<string | null>(null);
|
||||
const [activeTurnId, setActiveTurnId] = useState<string | null>(null);
|
||||
const [activeTurnThreadId, setActiveTurnThreadId] = useState<string | null>(null);
|
||||
const [input, setInput] = useState("");
|
||||
const [logs, setLogs] = useState<LogEntry[]>([]);
|
||||
const [threadMessages, setThreadMessages] = useState<Record<string, ChatMessage[]>>({});
|
||||
const [approvals, setApprovals] = useState<ApprovalRequest[]>([]);
|
||||
const [connectionError, setConnectionError] = useState<string | null>(null);
|
||||
|
||||
const pushLog = useCallback((entry: Omit<LogEntry, "id" | "time">) => {
|
||||
setLogs((prev) => {
|
||||
const next: LogEntry[] = [
|
||||
{
|
||||
id: prev.length ? prev[0].id + 1 : 1,
|
||||
time: formatTime(),
|
||||
...entry,
|
||||
},
|
||||
...prev,
|
||||
];
|
||||
return next.slice(0, 200);
|
||||
});
|
||||
}, []);
|
||||
|
||||
const sendPayload = useCallback(
|
||||
(payload: RpcMessage, label?: string) => {
|
||||
const socket = wsRef.current;
|
||||
if (!socket || socket.readyState !== WebSocket.OPEN) {
|
||||
return;
|
||||
}
|
||||
|
||||
const json = JSON.stringify(payload);
|
||||
socket.send(json);
|
||||
pushLog({
|
||||
direction: "out",
|
||||
label: label ?? payload.method ?? "response",
|
||||
detail: summarizeParams(payload.params) ?? summarizeParams(payload.result),
|
||||
});
|
||||
},
|
||||
[pushLog],
|
||||
);
|
||||
|
||||
const sendRequest = useCallback(
|
||||
(method: string, params?: Record<string, unknown>) => {
|
||||
const id = nextIdRef.current++;
|
||||
pendingRef.current.set(id, { method });
|
||||
sendPayload({ id, method, params }, method);
|
||||
return id;
|
||||
},
|
||||
[sendPayload],
|
||||
);
|
||||
|
||||
const sendNotification = useCallback(
|
||||
(method: string, params?: Record<string, unknown>) => {
|
||||
sendPayload({ method, params }, method);
|
||||
},
|
||||
[sendPayload],
|
||||
);
|
||||
|
||||
const selectThread = useCallback((threadId: string | null) => {
|
||||
selectedThreadIdRef.current = threadId;
|
||||
setSelectedThreadId(threadId);
|
||||
}, []);
|
||||
|
||||
const ensureThread = useCallback(
|
||||
(threadId: string) => {
|
||||
setThreads((prev) => (prev.includes(threadId) ? prev : [...prev, threadId]));
|
||||
setThreadMessages((prev) => (prev[threadId] ? prev : { ...prev, [threadId]: [] }));
|
||||
if (!selectedThreadIdRef.current) {
|
||||
selectThread(threadId);
|
||||
}
|
||||
},
|
||||
[selectThread],
|
||||
);
|
||||
|
||||
const getAgentIndexForThread = useCallback((threadId: string) => {
|
||||
const existing = agentIndexRef.current.get(threadId);
|
||||
if (existing) {
|
||||
return existing;
|
||||
}
|
||||
const next = new Map<string, number>();
|
||||
agentIndexRef.current.set(threadId, next);
|
||||
return next;
|
||||
}, []);
|
||||
|
||||
const handleInitialize = useCallback(() => {
|
||||
sendRequest("initialize", {
|
||||
clientInfo: {
|
||||
name: "codex_app_server_ui",
|
||||
title: "Codex App Server UI",
|
||||
version: "0.1.0",
|
||||
},
|
||||
});
|
||||
}, [sendRequest]);
|
||||
|
||||
const handleStartThread = useCallback(() => {
|
||||
if (!initialized) {
|
||||
return;
|
||||
}
|
||||
|
||||
sendRequest("thread/start", {});
|
||||
}, [initialized, sendRequest]);
|
||||
|
||||
const handleSendMessage = useCallback(() => {
|
||||
if (!initialized || !selectedThreadId || !input.trim()) {
|
||||
return;
|
||||
}
|
||||
|
||||
const text = input.trim();
|
||||
setInput("");
|
||||
|
||||
sendRequest("turn/start", {
|
||||
threadId: selectedThreadId,
|
||||
input: [{ type: "text", text }],
|
||||
});
|
||||
}, [initialized, selectedThreadId, input, sendRequest]);
|
||||
|
||||
const handleApprovalDecision = useCallback(
|
||||
(approvalId: number | string, decision: "accept" | "decline") => {
|
||||
sendPayload({
|
||||
id: approvalId,
|
||||
result: {
|
||||
decision,
|
||||
},
|
||||
});
|
||||
|
||||
setApprovals((prev) => prev.filter((approval) => approval.id !== approvalId));
|
||||
},
|
||||
[sendPayload],
|
||||
);
|
||||
|
||||
const updateAgentMessage = useCallback(
|
||||
(threadId: string, itemId: string, delta: string) => {
|
||||
setThreadMessages((prev) => {
|
||||
const threadLog = prev[threadId] ?? [];
|
||||
const indexMap = getAgentIndexForThread(threadId);
|
||||
const existingIndex = indexMap.get(itemId);
|
||||
if (existingIndex === undefined) {
|
||||
indexMap.set(itemId, threadLog.length);
|
||||
return {
|
||||
...prev,
|
||||
[threadId]: [...threadLog, { id: itemId, role: "assistant", text: delta }],
|
||||
};
|
||||
}
|
||||
|
||||
const nextThreadLog = [...threadLog];
|
||||
nextThreadLog[existingIndex] = {
|
||||
...nextThreadLog[existingIndex],
|
||||
text: nextThreadLog[existingIndex].text + delta,
|
||||
};
|
||||
return { ...prev, [threadId]: nextThreadLog };
|
||||
});
|
||||
},
|
||||
[getAgentIndexForThread],
|
||||
);
|
||||
|
||||
const extractUserText = useCallback((content: unknown) => {
|
||||
if (!Array.isArray(content)) {
|
||||
return null;
|
||||
}
|
||||
const parts = content
|
||||
.map((entry) => {
|
||||
if (entry && typeof entry === "object" && (entry as { type?: string }).type === "text") {
|
||||
return (entry as { text?: string }).text ?? "";
|
||||
}
|
||||
return "";
|
||||
})
|
||||
.filter((text) => text.length > 0);
|
||||
return parts.length ? parts.join("\n") : null;
|
||||
}, []);
|
||||
|
||||
const markUserItemSeen = useCallback((threadId: string, itemId: string) => {
|
||||
const seen = userItemIdsRef.current.get(threadId) ?? new Set<string>();
|
||||
if (!userItemIdsRef.current.has(threadId)) {
|
||||
userItemIdsRef.current.set(threadId, seen);
|
||||
}
|
||||
if (seen.has(itemId)) {
|
||||
return false;
|
||||
}
|
||||
seen.add(itemId);
|
||||
return true;
|
||||
}, []);
|
||||
|
||||
const handleIncomingMessage = useCallback(
|
||||
(message: RpcMessage) => {
|
||||
if (message.id !== undefined && message.method) {
|
||||
const requestId = message.id;
|
||||
if (
|
||||
message.method === "item/commandExecution/requestApproval" ||
|
||||
message.method === "item/fileChange/requestApproval"
|
||||
) {
|
||||
setApprovals((prev) => [
|
||||
...prev,
|
||||
{
|
||||
id: requestId,
|
||||
method: message.method ?? "",
|
||||
params: message.params ?? {},
|
||||
receivedAt: formatTime(),
|
||||
},
|
||||
]);
|
||||
pushLog({
|
||||
direction: "in",
|
||||
label: message.method,
|
||||
detail: summarizeParams(message.params),
|
||||
});
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (message.id !== undefined) {
|
||||
const pending = pendingRef.current.get(message.id);
|
||||
pendingRef.current.delete(message.id);
|
||||
|
||||
if (pending) {
|
||||
if (pending.method === "initialize") {
|
||||
const errorMessage =
|
||||
message.error && typeof message.error.message === "string"
|
||||
? message.error.message
|
||||
: null;
|
||||
const alreadyInitialized = errorMessage === "Already initialized";
|
||||
if (!message.error) {
|
||||
sendNotification("initialized");
|
||||
}
|
||||
if (!message.error || alreadyInitialized) {
|
||||
setInitialized(true);
|
||||
sendRequest("thread/loaded/list");
|
||||
}
|
||||
}
|
||||
|
||||
if (pending.method === "thread/start" || pending.method === "thread/resume") {
|
||||
const thread = message.result?.thread as { id?: string } | undefined;
|
||||
if (thread?.id) {
|
||||
ensureThread(thread.id);
|
||||
}
|
||||
}
|
||||
|
||||
if (pending.method === "thread/loaded/list") {
|
||||
const data = message.result?.data;
|
||||
if (Array.isArray(data)) {
|
||||
const ids = data.filter((entry): entry is string => typeof entry === "string");
|
||||
setThreads(ids);
|
||||
setThreadMessages((prev) => {
|
||||
const next = { ...prev };
|
||||
for (const id of ids) {
|
||||
if (!next[id]) {
|
||||
next[id] = [];
|
||||
}
|
||||
}
|
||||
return next;
|
||||
});
|
||||
if (!selectedThreadIdRef.current && ids.length > 0) {
|
||||
selectThread(ids[0]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (pending.method === "turn/start") {
|
||||
const turn = message.result?.turn as { id?: string } | undefined;
|
||||
if (turn?.id) {
|
||||
setActiveTurnId(turn.id);
|
||||
setActiveTurnThreadId(selectedThreadIdRef.current);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pushLog({
|
||||
direction: "in",
|
||||
label: pending?.method ?? "response",
|
||||
detail: summarizeParams(message.result) ?? summarizeParams(message.error),
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (message.method) {
|
||||
const eventThreadId = message.params?.threadId as string | undefined;
|
||||
if (eventThreadId) {
|
||||
ensureThread(eventThreadId);
|
||||
}
|
||||
|
||||
if (shouldLogMethod(message.method)) {
|
||||
pushLog({
|
||||
direction: "in",
|
||||
label: message.method,
|
||||
detail: summarizeParams(message.params),
|
||||
});
|
||||
}
|
||||
|
||||
if (message.method === "thread/started") {
|
||||
const thread = (message.params?.thread as { id?: string } | undefined) ?? undefined;
|
||||
if (thread?.id) {
|
||||
ensureThread(thread.id);
|
||||
}
|
||||
}
|
||||
|
||||
if (message.method === "turn/started") {
|
||||
const turn = (message.params?.turn as { id?: string } | undefined) ?? undefined;
|
||||
const threadId = message.params?.threadId as string | undefined;
|
||||
if (turn?.id) {
|
||||
setActiveTurnId(turn.id);
|
||||
setActiveTurnThreadId(threadId ?? null);
|
||||
}
|
||||
}
|
||||
|
||||
if (message.method === "turn/completed") {
|
||||
setActiveTurnId(null);
|
||||
setActiveTurnThreadId(null);
|
||||
}
|
||||
|
||||
if (message.method === "item/started") {
|
||||
const item = message.params?.item as {
|
||||
id?: string;
|
||||
type?: string;
|
||||
content?: unknown;
|
||||
text?: string;
|
||||
} | undefined;
|
||||
const threadId = message.params?.threadId as string | undefined;
|
||||
if (!threadId) {
|
||||
return;
|
||||
}
|
||||
const itemId = item?.id;
|
||||
if (item?.type === "agentMessage" && itemId) {
|
||||
setThreadMessages((prev) => {
|
||||
const threadLog = prev[threadId] ?? [];
|
||||
const indexMap = getAgentIndexForThread(threadId);
|
||||
indexMap.set(itemId, threadLog.length);
|
||||
return {
|
||||
...prev,
|
||||
[threadId]: [...threadLog, { id: itemId, role: "assistant", text: "" }],
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
if (item?.type === "userMessage") {
|
||||
const userText = extractUserText(item.content);
|
||||
if (userText) {
|
||||
if (itemId && !markUserItemSeen(threadId, itemId)) {
|
||||
return;
|
||||
}
|
||||
setThreadMessages((prev) => {
|
||||
const threadLog = prev[threadId] ?? [];
|
||||
return {
|
||||
...prev,
|
||||
[threadId]: [
|
||||
...threadLog,
|
||||
{ id: itemId ?? `user-${Date.now()}`, role: "user", text: userText },
|
||||
],
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (message.method === "item/agentMessage/delta") {
|
||||
const itemId = message.params?.itemId as string | undefined;
|
||||
const threadId = message.params?.threadId as string | undefined;
|
||||
const delta = message.params?.delta as string | undefined;
|
||||
if (itemId && delta && threadId) {
|
||||
updateAgentMessage(threadId, itemId, delta);
|
||||
}
|
||||
}
|
||||
|
||||
if (message.method === "item/completed") {
|
||||
const item = message.params?.item as {
|
||||
id?: string;
|
||||
type?: string;
|
||||
text?: string;
|
||||
content?: unknown;
|
||||
} | undefined;
|
||||
const threadId = message.params?.threadId as string | undefined;
|
||||
if (!threadId) {
|
||||
return;
|
||||
}
|
||||
const itemId = item?.id;
|
||||
if (item?.type === "agentMessage" && itemId && typeof item.text === "string") {
|
||||
setThreadMessages((prev) => {
|
||||
const threadLog = prev[threadId] ?? [];
|
||||
const index = getAgentIndexForThread(threadId).get(itemId);
|
||||
if (index === undefined) {
|
||||
getAgentIndexForThread(threadId).set(itemId, threadLog.length);
|
||||
return {
|
||||
...prev,
|
||||
[threadId]: [...threadLog, { id: itemId, role: "assistant", text: item.text ?? "" }],
|
||||
};
|
||||
}
|
||||
|
||||
const nextThreadLog = [...threadLog];
|
||||
nextThreadLog[index] = { ...nextThreadLog[index], text: item.text ?? "" };
|
||||
return { ...prev, [threadId]: nextThreadLog };
|
||||
});
|
||||
}
|
||||
|
||||
if (item?.type === "userMessage") {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
[
|
||||
ensureThread,
|
||||
extractUserText,
|
||||
getAgentIndexForThread,
|
||||
markUserItemSeen,
|
||||
pushLog,
|
||||
sendNotification,
|
||||
selectThread,
|
||||
sendRequest,
|
||||
updateAgentMessage,
|
||||
],
|
||||
);
|
||||
|
||||
const connect = useCallback(() => {
|
||||
if (wsRef.current) {
|
||||
wsRef.current.close();
|
||||
}
|
||||
|
||||
const socket = new WebSocket(wsUrl);
|
||||
wsRef.current = socket;
|
||||
|
||||
socket.onopen = () => {
|
||||
setConnected(true);
|
||||
setConnectionError(null);
|
||||
handleInitialize();
|
||||
};
|
||||
|
||||
socket.onclose = () => {
|
||||
setConnected(false);
|
||||
setInitialized(false);
|
||||
setThreads([]);
|
||||
selectThread(null);
|
||||
setActiveTurnId(null);
|
||||
setActiveTurnThreadId(null);
|
||||
setApprovals([]);
|
||||
setThreadMessages({});
|
||||
agentIndexRef.current.clear();
|
||||
userItemIdsRef.current.clear();
|
||||
pendingRef.current.clear();
|
||||
};
|
||||
|
||||
socket.onerror = () => {
|
||||
setConnectionError("WebSocket error. Check the bridge server.");
|
||||
};
|
||||
|
||||
socket.onmessage = (event) => {
|
||||
try {
|
||||
const parsed = JSON.parse(event.data as string) as RpcMessage;
|
||||
handleIncomingMessage(parsed);
|
||||
} catch (err) {
|
||||
pushLog({
|
||||
direction: "in",
|
||||
label: "ui/error",
|
||||
detail: `Failed to parse message: ${String(err)}`,
|
||||
});
|
||||
}
|
||||
};
|
||||
}, [handleIncomingMessage, handleInitialize, pushLog, selectThread]);
|
||||
|
||||
useEffect(() => {
|
||||
connect();
|
||||
|
||||
return () => {
|
||||
wsRef.current?.close();
|
||||
wsRef.current = null;
|
||||
};
|
||||
}, [connect]);
|
||||
|
||||
const statusLabel = useMemo(() => {
|
||||
if (!connected) {
|
||||
return "Disconnected";
|
||||
}
|
||||
|
||||
if (!initialized) {
|
||||
return "Connecting";
|
||||
}
|
||||
|
||||
return "Ready";
|
||||
}, [connected, initialized]);
|
||||
|
||||
const activeMessages = selectedThreadId ? threadMessages[selectedThreadId] ?? [] : [];
|
||||
const displayedTurnId =
|
||||
selectedThreadId && activeTurnThreadId === selectedThreadId ? activeTurnId : null;
|
||||
|
||||
return (
|
||||
<div className="app">
|
||||
<header className="hero">
|
||||
<div className="status">
|
||||
<span className={`status-dot ${connected ? "on" : "off"}`} />
|
||||
<div>
|
||||
<div className="status-label">{statusLabel}</div>
|
||||
<div className="status-meta">{wsUrl}</div>
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
<main className="grid">
|
||||
<section className="panel control">
|
||||
<div className="panel-title">Session</div>
|
||||
<div className="control-row">
|
||||
<div>
|
||||
<div className="label">Thread</div>
|
||||
<div className="value">{selectedThreadId ?? "none"}</div>
|
||||
</div>
|
||||
<div>
|
||||
<div className="label">Turn</div>
|
||||
<div className="value">{displayedTurnId ?? "idle"}</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="button-row">
|
||||
<button className="btn" onClick={connect} type="button">
|
||||
Reconnect
|
||||
</button>
|
||||
<button className="btn primary" onClick={handleStartThread} type="button" disabled={!initialized}>
|
||||
Start Thread
|
||||
</button>
|
||||
</div>
|
||||
{connectionError ? <div className="notice">{connectionError}</div> : null}
|
||||
|
||||
<div className="composer">
|
||||
<label className="label" htmlFor="message">
|
||||
Message
|
||||
</label>
|
||||
<textarea
|
||||
id="message"
|
||||
value={input}
|
||||
placeholder="Ask Codex for a change or summary..."
|
||||
onChange={(event) => setInput(event.target.value)}
|
||||
rows={4}
|
||||
/>
|
||||
<button
|
||||
className="btn primary"
|
||||
type="button"
|
||||
onClick={handleSendMessage}
|
||||
disabled={!initialized || !selectedThreadId || !input.trim()}
|
||||
>
|
||||
Send Turn
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div className="thread-list">
|
||||
<div className="panel-title">Subscribed Threads</div>
|
||||
{threads.length === 0 ? (
|
||||
<div className="empty">No threads yet.</div>
|
||||
) : (
|
||||
threads.map((id) => (
|
||||
<button
|
||||
key={id}
|
||||
type="button"
|
||||
className={`thread-item ${selectedThreadId === id ? "active" : ""}`}
|
||||
onClick={() => selectThread(id)}
|
||||
>
|
||||
{id}
|
||||
</button>
|
||||
))
|
||||
)}
|
||||
</div>
|
||||
|
||||
{approvals.length ? (
|
||||
<div className="approvals">
|
||||
<div className="panel-title">Approvals</div>
|
||||
{approvals.map((approval) => (
|
||||
<div className="approval-card" key={String(approval.id)}>
|
||||
<div className="approval-header">
|
||||
<span>{approval.method}</span>
|
||||
<span className="approval-time">{approval.receivedAt}</span>
|
||||
</div>
|
||||
<pre>{JSON.stringify(approval.params, null, 2)}</pre>
|
||||
<div className="button-row">
|
||||
<button
|
||||
className="btn"
|
||||
type="button"
|
||||
onClick={() => handleApprovalDecision(approval.id, "decline")}
|
||||
>
|
||||
Decline
|
||||
</button>
|
||||
<button
|
||||
className="btn primary"
|
||||
type="button"
|
||||
onClick={() => handleApprovalDecision(approval.id, "accept")}
|
||||
>
|
||||
Accept
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
) : null}
|
||||
</section>
|
||||
|
||||
<section className="panel chat">
|
||||
<div className="panel-title">Conversation</div>
|
||||
<div className="chat-scroll">
|
||||
{activeMessages.length === 0 ? (
|
||||
<div className="empty">No messages yet. Start a thread to begin.</div>
|
||||
) : (
|
||||
activeMessages.map((message) => (
|
||||
<div className={`bubble ${message.role}`} key={message.id}>
|
||||
<div className="bubble-role">{message.role === "user" ? "You" : "Codex"}</div>
|
||||
<div className="bubble-text">{message.text}</div>
|
||||
</div>
|
||||
))
|
||||
)}
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<section className="panel logs">
|
||||
<div className="panel-title">Event Log</div>
|
||||
<div className="log-scroll">
|
||||
{logs.length === 0 ? (
|
||||
<div className="empty">Events from app-server will appear here.</div>
|
||||
) : (
|
||||
logs.map((entry) => (
|
||||
<div className={`log-entry ${entry.direction}`} key={entry.id}>
|
||||
<div className="log-meta">
|
||||
<span className="log-time">{entry.time}</span>
|
||||
<span className="log-label">{entry.label}</span>
|
||||
</div>
|
||||
{entry.detail ? <div className="log-detail">{entry.detail}</div> : null}
|
||||
</div>
|
||||
))
|
||||
)}
|
||||
</div>
|
||||
</section>
|
||||
</main>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
16
app-server-ui/src/main.tsx
Normal file
16
app-server-ui/src/main.tsx
Normal file
@@ -0,0 +1,16 @@
|
||||
import React from "react";
|
||||
import ReactDOM from "react-dom/client";
|
||||
import App from "./App";
|
||||
import "./App.css";
|
||||
|
||||
const root = document.getElementById("root");
|
||||
|
||||
if (!root) {
|
||||
throw new Error("Root element not found");
|
||||
}
|
||||
|
||||
ReactDOM.createRoot(root).render(
|
||||
<React.StrictMode>
|
||||
<App />
|
||||
</React.StrictMode>,
|
||||
);
|
||||
27
app-server-ui/tsconfig.json
Normal file
27
app-server-ui/tsconfig.json
Normal file
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"useDefineForClassFields": true,
|
||||
"module": "ESNext",
|
||||
"lib": ["ES2022", "DOM", "DOM.Iterable"],
|
||||
"jsx": "react-jsx",
|
||||
"types": ["vite/client"],
|
||||
"skipLibCheck": true,
|
||||
|
||||
/* Bundler mode */
|
||||
"moduleResolution": "bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"moduleDetection": "force",
|
||||
"noEmit": true,
|
||||
|
||||
/* Linting */
|
||||
"strict": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"erasableSyntaxOnly": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noUncheckedSideEffectImports": true
|
||||
},
|
||||
"include": ["src"]
|
||||
}
|
||||
10
app-server-ui/vite.config.ts
Normal file
10
app-server-ui/vite.config.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { defineConfig } from "vite";
|
||||
import react from "@vitejs/plugin-react";
|
||||
|
||||
export default defineConfig({
|
||||
plugins: [react()],
|
||||
server: {
|
||||
port: 5173,
|
||||
strictPort: true,
|
||||
},
|
||||
});
|
||||
@@ -95,7 +95,6 @@ function detectPackageManager() {
|
||||
return "bun";
|
||||
}
|
||||
|
||||
|
||||
if (
|
||||
__dirname.includes(".bun/install/global") ||
|
||||
__dirname.includes(".bun\\install\\global")
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
"""Install Codex native binaries (Rust CLI plus ripgrep helpers)."""
|
||||
|
||||
import argparse
|
||||
from contextlib import contextmanager
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
@@ -12,6 +13,7 @@ import zipfile
|
||||
from dataclasses import dataclass
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from pathlib import Path
|
||||
import sys
|
||||
from typing import Iterable, Sequence
|
||||
from urllib.parse import urlparse
|
||||
from urllib.request import urlopen
|
||||
@@ -77,6 +79,45 @@ RG_TARGET_PLATFORM_PAIRS: list[tuple[str, str]] = [
|
||||
RG_TARGET_TO_PLATFORM = {target: platform for target, platform in RG_TARGET_PLATFORM_PAIRS}
|
||||
DEFAULT_RG_TARGETS = [target for target, _ in RG_TARGET_PLATFORM_PAIRS]
|
||||
|
||||
# urllib.request.urlopen() defaults to no timeout (can hang indefinitely), which is painful in CI.
|
||||
DOWNLOAD_TIMEOUT_SECS = 60
|
||||
|
||||
|
||||
def _gha_enabled() -> bool:
|
||||
# GitHub Actions supports "workflow commands" (e.g. ::group:: / ::error::) that make logs
|
||||
# much easier to scan: groups collapse noisy sections and error annotations surface the
|
||||
# failure in the UI without changing the actual exception/traceback output.
|
||||
return os.environ.get("GITHUB_ACTIONS") == "true"
|
||||
|
||||
|
||||
def _gha_escape(value: str) -> str:
|
||||
# Workflow commands require percent/newline escaping.
|
||||
return value.replace("%", "%25").replace("\r", "%0D").replace("\n", "%0A")
|
||||
|
||||
|
||||
def _gha_error(*, title: str, message: str) -> None:
|
||||
# Emit a GitHub Actions error annotation. This does not replace stdout/stderr logs; it just
|
||||
# adds a prominent summary line to the job UI so the root cause is easier to spot.
|
||||
if not _gha_enabled():
|
||||
return
|
||||
print(
|
||||
f"::error title={_gha_escape(title)}::{_gha_escape(message)}",
|
||||
flush=True,
|
||||
)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def _gha_group(title: str):
|
||||
# Wrap a block in a collapsible log group on GitHub Actions. Outside of GHA this is a no-op
|
||||
# so local output remains unchanged.
|
||||
if _gha_enabled():
|
||||
print(f"::group::{_gha_escape(title)}", flush=True)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if _gha_enabled():
|
||||
print("::endgroup::", flush=True)
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Install native Codex binaries.")
|
||||
@@ -131,18 +172,20 @@ def main() -> int:
|
||||
workflow_id = workflow_url.rstrip("/").split("/")[-1]
|
||||
print(f"Downloading native artifacts from workflow {workflow_id}...")
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="codex-native-artifacts-") as artifacts_dir_str:
|
||||
artifacts_dir = Path(artifacts_dir_str)
|
||||
_download_artifacts(workflow_id, artifacts_dir)
|
||||
install_binary_components(
|
||||
artifacts_dir,
|
||||
vendor_dir,
|
||||
[BINARY_COMPONENTS[name] for name in components if name in BINARY_COMPONENTS],
|
||||
)
|
||||
with _gha_group(f"Download native artifacts from workflow {workflow_id}"):
|
||||
with tempfile.TemporaryDirectory(prefix="codex-native-artifacts-") as artifacts_dir_str:
|
||||
artifacts_dir = Path(artifacts_dir_str)
|
||||
_download_artifacts(workflow_id, artifacts_dir)
|
||||
install_binary_components(
|
||||
artifacts_dir,
|
||||
vendor_dir,
|
||||
[BINARY_COMPONENTS[name] for name in components if name in BINARY_COMPONENTS],
|
||||
)
|
||||
|
||||
if "rg" in components:
|
||||
print("Fetching ripgrep binaries...")
|
||||
fetch_rg(vendor_dir, DEFAULT_RG_TARGETS, manifest_path=RG_MANIFEST)
|
||||
with _gha_group("Fetch ripgrep binaries"):
|
||||
print("Fetching ripgrep binaries...")
|
||||
fetch_rg(vendor_dir, DEFAULT_RG_TARGETS, manifest_path=RG_MANIFEST)
|
||||
|
||||
print(f"Installed native dependencies into {vendor_dir}")
|
||||
return 0
|
||||
@@ -203,7 +246,14 @@ def fetch_rg(
|
||||
|
||||
for future in as_completed(future_map):
|
||||
target = future_map[future]
|
||||
results[target] = future.result()
|
||||
try:
|
||||
results[target] = future.result()
|
||||
except Exception as exc:
|
||||
_gha_error(
|
||||
title="ripgrep install failed",
|
||||
message=f"target={target} error={exc!r}",
|
||||
)
|
||||
raise RuntimeError(f"Failed to install ripgrep for target {target}.") from exc
|
||||
print(f" installed ripgrep for {target}")
|
||||
|
||||
return [results[target] for target in targets]
|
||||
@@ -301,6 +351,8 @@ def _fetch_single_rg(
|
||||
url = providers[0]["url"]
|
||||
archive_format = platform_info.get("format", "zst")
|
||||
archive_member = platform_info.get("path")
|
||||
digest = platform_info.get("digest")
|
||||
expected_size = platform_info.get("size")
|
||||
|
||||
dest_dir = vendor_dir / target / "path"
|
||||
dest_dir.mkdir(parents=True, exist_ok=True)
|
||||
@@ -313,10 +365,32 @@ def _fetch_single_rg(
|
||||
tmp_dir = Path(tmp_dir_str)
|
||||
archive_filename = os.path.basename(urlparse(url).path)
|
||||
download_path = tmp_dir / archive_filename
|
||||
_download_file(url, download_path)
|
||||
print(
|
||||
f" downloading ripgrep for {target} ({platform_key}) from {url}",
|
||||
flush=True,
|
||||
)
|
||||
try:
|
||||
_download_file(url, download_path)
|
||||
except Exception as exc:
|
||||
_gha_error(
|
||||
title="ripgrep download failed",
|
||||
message=f"target={target} platform={platform_key} url={url} error={exc!r}",
|
||||
)
|
||||
raise RuntimeError(
|
||||
"Failed to download ripgrep "
|
||||
f"(target={target}, platform={platform_key}, format={archive_format}, "
|
||||
f"expected_size={expected_size!r}, digest={digest!r}, url={url}, dest={download_path})."
|
||||
) from exc
|
||||
|
||||
dest.unlink(missing_ok=True)
|
||||
extract_archive(download_path, archive_format, archive_member, dest)
|
||||
try:
|
||||
extract_archive(download_path, archive_format, archive_member, dest)
|
||||
except Exception as exc:
|
||||
raise RuntimeError(
|
||||
"Failed to extract ripgrep "
|
||||
f"(target={target}, platform={platform_key}, format={archive_format}, "
|
||||
f"member={archive_member!r}, url={url}, archive={download_path})."
|
||||
) from exc
|
||||
|
||||
if not is_windows:
|
||||
dest.chmod(0o755)
|
||||
@@ -326,7 +400,9 @@ def _fetch_single_rg(
|
||||
|
||||
def _download_file(url: str, dest: Path) -> None:
|
||||
dest.parent.mkdir(parents=True, exist_ok=True)
|
||||
with urlopen(url) as response, open(dest, "wb") as out:
|
||||
dest.unlink(missing_ok=True)
|
||||
|
||||
with urlopen(url, timeout=DOWNLOAD_TIMEOUT_SECS) as response, open(dest, "wb") as out:
|
||||
shutil.copyfileobj(response, out)
|
||||
|
||||
|
||||
|
||||
1
codex-rs/BUILD.bazel
Normal file
1
codex-rs/BUILD.bazel
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
504
codex-rs/Cargo.lock
generated
504
codex-rs/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -6,6 +6,7 @@ members = [
|
||||
"app-server",
|
||||
"app-server-protocol",
|
||||
"app-server-test-client",
|
||||
"debug-client",
|
||||
"apply-patch",
|
||||
"arg0",
|
||||
"feedback",
|
||||
@@ -36,6 +37,7 @@ members = [
|
||||
"tui",
|
||||
"tui2",
|
||||
"utils/absolute-path",
|
||||
"utils/cargo-bin",
|
||||
"utils/git",
|
||||
"utils/cache",
|
||||
"utils/image",
|
||||
@@ -69,6 +71,7 @@ codex-arg0 = { path = "arg0" }
|
||||
codex-async-utils = { path = "async-utils" }
|
||||
codex-backend-client = { path = "backend-client" }
|
||||
codex-chatgpt = { path = "chatgpt" }
|
||||
codex-cli = { path = "cli"}
|
||||
codex-client = { path = "codex-client" }
|
||||
codex-common = { path = "common" }
|
||||
codex-core = { path = "core" }
|
||||
@@ -93,6 +96,7 @@ codex-tui = { path = "tui" }
|
||||
codex-tui2 = { path = "tui2" }
|
||||
codex-utils-absolute-path = { path = "utils/absolute-path" }
|
||||
codex-utils-cache = { path = "utils/cache" }
|
||||
codex-utils-cargo-bin = { path = "utils/cargo-bin" }
|
||||
codex-utils-image = { path = "utils/image" }
|
||||
codex-utils-json-to-toml = { path = "utils/json-to-toml" }
|
||||
codex-utils-pty = { path = "utils/pty" }
|
||||
@@ -132,7 +136,6 @@ dunce = "1.0.4"
|
||||
encoding_rs = "0.8.35"
|
||||
env-flags = "0.1.1"
|
||||
env_logger = "0.11.5"
|
||||
escargot = "0.5"
|
||||
eventsource-stream = "0.2.3"
|
||||
futures = { version = "0.3", default-features = false }
|
||||
http = "1.3.1"
|
||||
@@ -140,17 +143,18 @@ icu_decimal = "2.1"
|
||||
icu_locale_core = "2.1"
|
||||
icu_provider = { version = "2.1", features = ["sync"] }
|
||||
ignore = "0.4.23"
|
||||
indoc = "2.0"
|
||||
image = { version = "^0.25.9", default-features = false }
|
||||
include_dir = "0.7.4"
|
||||
indexmap = "2.12.0"
|
||||
insta = "1.44.3"
|
||||
insta = "1.46.0"
|
||||
itertools = "0.14.0"
|
||||
keyring = { version = "3.6", default-features = false }
|
||||
landlock = "0.4.4"
|
||||
lazy_static = "1"
|
||||
libc = "0.2.177"
|
||||
log = "0.4"
|
||||
lru = "0.16.2"
|
||||
lru = "0.16.3"
|
||||
maplit = "1.0.2"
|
||||
mime_guess = "2.0.5"
|
||||
multimap = "0.10.0"
|
||||
@@ -158,12 +162,12 @@ notify = "8.2.0"
|
||||
nucleo-matcher = "0.3.1"
|
||||
once_cell = "1.20.2"
|
||||
openssl-sys = "*"
|
||||
opentelemetry = "0.30.0"
|
||||
opentelemetry-appender-tracing = "0.30.0"
|
||||
opentelemetry-otlp = "0.30.0"
|
||||
opentelemetry-semantic-conventions = "0.30.0"
|
||||
opentelemetry_sdk = "0.30.0"
|
||||
tracing-opentelemetry = "0.31.0"
|
||||
opentelemetry = "0.31.0"
|
||||
opentelemetry-appender-tracing = "0.31.0"
|
||||
opentelemetry-otlp = "0.31.0"
|
||||
opentelemetry-semantic-conventions = "0.31.0"
|
||||
opentelemetry_sdk = "0.31.0"
|
||||
tracing-opentelemetry = "0.32.0"
|
||||
os_info = "3.12.0"
|
||||
owo-colors = "4.2.0"
|
||||
path-absolutize = "3.1.1"
|
||||
@@ -174,9 +178,10 @@ pretty_assertions = "1.4.1"
|
||||
pulldown-cmark = "0.10"
|
||||
rand = "0.9"
|
||||
ratatui = "0.29.0"
|
||||
ratatui-core = "0.1.0"
|
||||
ratatui-macros = "0.6.0"
|
||||
regex = "1.12.2"
|
||||
regex-lite = "0.1.7"
|
||||
regex-lite = "0.1.8"
|
||||
reqwest = "0.12"
|
||||
rmcp = { version = "0.12.0", default-features = false }
|
||||
schemars = "0.8.22"
|
||||
@@ -189,6 +194,7 @@ serde_yaml = "0.9"
|
||||
serial_test = "3.2.0"
|
||||
sha1 = "0.10.6"
|
||||
sha2 = "0.10"
|
||||
semver = "1.0"
|
||||
shlex = "1.3.0"
|
||||
similar = "2.7.0"
|
||||
socket2 = "0.6.1"
|
||||
@@ -204,20 +210,22 @@ thiserror = "2.0.17"
|
||||
time = "0.3"
|
||||
tiny_http = "0.12"
|
||||
tokio = "1"
|
||||
tokio-stream = "0.1.17"
|
||||
tokio-stream = "0.1.18"
|
||||
tokio-test = "0.4"
|
||||
tokio-util = "0.7.16"
|
||||
tokio-tungstenite = "0.21.0"
|
||||
tokio-util = "0.7.18"
|
||||
toml = "0.9.5"
|
||||
toml_edit = "0.23.5"
|
||||
tonic = "0.13.1"
|
||||
toml_edit = "0.24.0"
|
||||
tracing = "0.1.43"
|
||||
tracing-appender = "0.2.3"
|
||||
tracing-subscriber = "0.3.20"
|
||||
tracing-subscriber = "0.3.22"
|
||||
tracing-test = "0.2.5"
|
||||
tree-sitter = "0.25.10"
|
||||
tree-sitter-bash = "0.25"
|
||||
zstd = "0.13"
|
||||
tree-sitter-highlight = "0.25.10"
|
||||
ts-rs = "11"
|
||||
tui-scrollbar = "0.2.2"
|
||||
uds_windows = "1.1.0"
|
||||
unicode-segmentation = "1.12.0"
|
||||
unicode-width = "0.2"
|
||||
@@ -227,7 +235,7 @@ uuid = "1"
|
||||
vt100 = "0.16.2"
|
||||
walkdir = "2.5.0"
|
||||
webbrowser = "1.0"
|
||||
which = "6"
|
||||
which = "8"
|
||||
wildmatch = "2.6.1"
|
||||
|
||||
wiremock = "0.6"
|
||||
|
||||
@@ -15,8 +15,8 @@ You can also install via Homebrew (`brew install --cask codex`) or download a pl
|
||||
|
||||
## Documentation quickstart
|
||||
|
||||
- First run with Codex? Follow the walkthrough in [`docs/getting-started.md`](../docs/getting-started.md) for prompts, keyboard shortcuts, and session management.
|
||||
- Already shipping with Codex and want deeper control? Jump to [`docs/advanced.md`](../docs/advanced.md) and the configuration reference at [`docs/config.md`](../docs/config.md).
|
||||
- First run with Codex? Start with [`docs/getting-started.md`](../docs/getting-started.md) (links to the walkthrough for prompts, keyboard shortcuts, and session management).
|
||||
- Want deeper control? See [`docs/config.md`](../docs/config.md) and [`docs/install.md`](../docs/install.md).
|
||||
|
||||
## What's new in the Rust CLI
|
||||
|
||||
@@ -30,7 +30,7 @@ Codex supports a rich set of configuration options. Note that the Rust CLI uses
|
||||
|
||||
#### MCP client
|
||||
|
||||
Codex CLI functions as an MCP client that allows the Codex CLI and IDE extension to connect to MCP servers on startup. See the [`configuration documentation`](../docs/config.md#mcp_servers) for details.
|
||||
Codex CLI functions as an MCP client that allows the Codex CLI and IDE extension to connect to MCP servers on startup. See the [`configuration documentation`](../docs/config.md#connecting-to-mcp-servers) for details.
|
||||
|
||||
#### MCP server (experimental)
|
||||
|
||||
|
||||
6
codex-rs/ansi-escape/BUILD.bazel
Normal file
6
codex-rs/ansi-escape/BUILD.bazel
Normal file
@@ -0,0 +1,6 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "ansi-escape",
|
||||
crate_name = "codex_ansi_escape",
|
||||
)
|
||||
6
codex-rs/app-server-protocol/BUILD.bazel
Normal file
6
codex-rs/app-server-protocol/BUILD.bazel
Normal file
@@ -0,0 +1,6 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "app-server-protocol",
|
||||
crate_name = "codex_app_server_protocol",
|
||||
)
|
||||
@@ -109,14 +109,26 @@ client_request_definitions! {
|
||||
params: v2::ThreadResumeParams,
|
||||
response: v2::ThreadResumeResponse,
|
||||
},
|
||||
ThreadFork => "thread/fork" {
|
||||
params: v2::ThreadForkParams,
|
||||
response: v2::ThreadForkResponse,
|
||||
},
|
||||
ThreadArchive => "thread/archive" {
|
||||
params: v2::ThreadArchiveParams,
|
||||
response: v2::ThreadArchiveResponse,
|
||||
},
|
||||
ThreadRollback => "thread/rollback" {
|
||||
params: v2::ThreadRollbackParams,
|
||||
response: v2::ThreadRollbackResponse,
|
||||
},
|
||||
ThreadList => "thread/list" {
|
||||
params: v2::ThreadListParams,
|
||||
response: v2::ThreadListResponse,
|
||||
},
|
||||
ThreadLoadedList => "thread/loaded/list" {
|
||||
params: v2::ThreadLoadedListParams,
|
||||
response: v2::ThreadLoadedListResponse,
|
||||
},
|
||||
SkillsList => "skills/list" {
|
||||
params: v2::SkillsListParams,
|
||||
response: v2::SkillsListResponse,
|
||||
@@ -144,6 +156,11 @@ client_request_definitions! {
|
||||
response: v2::McpServerOauthLoginResponse,
|
||||
},
|
||||
|
||||
McpServerRefresh => "config/mcpServer/reload" {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v2::McpServerRefreshResponse,
|
||||
},
|
||||
|
||||
McpServerStatusList => "mcpServerStatus/list" {
|
||||
params: v2::ListMcpServerStatusParams,
|
||||
response: v2::ListMcpServerStatusResponse,
|
||||
@@ -193,6 +210,11 @@ client_request_definitions! {
|
||||
response: v2::ConfigWriteResponse,
|
||||
},
|
||||
|
||||
ConfigRequirementsRead => "configRequirements/read" {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v2::ConfigRequirementsReadResponse,
|
||||
},
|
||||
|
||||
GetAccount => "account/read" {
|
||||
params: v2::GetAccountParams,
|
||||
response: v2::GetAccountResponse,
|
||||
@@ -217,6 +239,11 @@ client_request_definitions! {
|
||||
params: v1::ResumeConversationParams,
|
||||
response: v1::ResumeConversationResponse,
|
||||
},
|
||||
/// Fork a recorded Codex conversation into a new session.
|
||||
ForkConversation {
|
||||
params: v1::ForkConversationParams,
|
||||
response: v1::ForkConversationResponse,
|
||||
},
|
||||
ArchiveConversation {
|
||||
params: v1::ArchiveConversationParams,
|
||||
response: v1::ArchiveConversationResponse,
|
||||
@@ -540,6 +567,7 @@ server_notification_definitions! {
|
||||
ReasoningTextDelta => "item/reasoning/textDelta" (v2::ReasoningTextDeltaNotification),
|
||||
ContextCompacted => "thread/compacted" (v2::ContextCompactedNotification),
|
||||
DeprecationNotice => "deprecationNotice" (v2::DeprecationNoticeNotification),
|
||||
ConfigWarning => "configWarning" (v2::ConfigWarningNotification),
|
||||
|
||||
/// Notifies the user of world-writable directories on Windows, which cannot be protected by the sandbox.
|
||||
WindowsWorldWritableWarning => "windows/worldWritableWarning" (v2::WindowsWorldWritableWarningNotification),
|
||||
@@ -565,7 +593,7 @@ client_notification_definitions! {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use anyhow::Result;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::account::PlanType;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
@@ -614,7 +642,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn conversation_id_serializes_as_plain_string() -> Result<()> {
|
||||
let id = ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
|
||||
let id = ThreadId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
|
||||
|
||||
assert_eq!(
|
||||
json!("67e55044-10b1-426f-9247-bb680e5fe0c8"),
|
||||
@@ -625,11 +653,10 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn conversation_id_deserializes_from_plain_string() -> Result<()> {
|
||||
let id: ConversationId =
|
||||
serde_json::from_value(json!("67e55044-10b1-426f-9247-bb680e5fe0c8"))?;
|
||||
let id: ThreadId = serde_json::from_value(json!("67e55044-10b1-426f-9247-bb680e5fe0c8"))?;
|
||||
|
||||
assert_eq!(
|
||||
ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?,
|
||||
ThreadId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?,
|
||||
id,
|
||||
);
|
||||
Ok(())
|
||||
@@ -650,7 +677,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn serialize_server_request() -> Result<()> {
|
||||
let conversation_id = ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
|
||||
let conversation_id = ThreadId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
|
||||
let params = v1::ExecCommandApprovalParams {
|
||||
conversation_id,
|
||||
call_id: "call-42".to_string(),
|
||||
@@ -708,6 +735,22 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_config_requirements_read() -> Result<()> {
|
||||
let request = ClientRequest::ConfigRequirementsRead {
|
||||
request_id: RequestId::Integer(1),
|
||||
params: None,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "configRequirements/read",
|
||||
"id": 1,
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_account_login_api_key() -> Result<()> {
|
||||
let request = ClientRequest::LoginAccount {
|
||||
|
||||
@@ -6,6 +6,7 @@ use crate::protocol::v2::UserInput;
|
||||
use codex_protocol::protocol::AgentReasoningEvent;
|
||||
use codex_protocol::protocol::AgentReasoningRawContentEvent;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::ThreadRolledBackEvent;
|
||||
use codex_protocol::protocol::TurnAbortedEvent;
|
||||
use codex_protocol::protocol::UserMessageEvent;
|
||||
|
||||
@@ -57,6 +58,7 @@ impl ThreadHistoryBuilder {
|
||||
EventMsg::TokenCount(_) => {}
|
||||
EventMsg::EnteredReviewMode(_) => {}
|
||||
EventMsg::ExitedReviewMode(_) => {}
|
||||
EventMsg::ThreadRolledBack(payload) => self.handle_thread_rollback(payload),
|
||||
EventMsg::UndoCompleted(_) => {}
|
||||
EventMsg::TurnAborted(payload) => self.handle_turn_aborted(payload),
|
||||
_ => {}
|
||||
@@ -130,6 +132,23 @@ impl ThreadHistoryBuilder {
|
||||
turn.status = TurnStatus::Interrupted;
|
||||
}
|
||||
|
||||
fn handle_thread_rollback(&mut self, payload: &ThreadRolledBackEvent) {
|
||||
self.finish_current_turn();
|
||||
|
||||
let n = usize::try_from(payload.num_turns).unwrap_or(usize::MAX);
|
||||
if n >= self.turns.len() {
|
||||
self.turns.clear();
|
||||
} else {
|
||||
self.turns.truncate(self.turns.len().saturating_sub(n));
|
||||
}
|
||||
|
||||
// Re-number subsequent synthetic ids so the pruned history is consistent.
|
||||
self.next_turn_index =
|
||||
i64::try_from(self.turns.len().saturating_add(1)).unwrap_or(i64::MAX);
|
||||
let item_count: usize = self.turns.iter().map(|t| t.items.len()).sum();
|
||||
self.next_item_index = i64::try_from(item_count.saturating_add(1)).unwrap_or(i64::MAX);
|
||||
}
|
||||
|
||||
fn finish_current_turn(&mut self) {
|
||||
if let Some(turn) = self.current_turn.take() {
|
||||
if turn.items.is_empty() {
|
||||
@@ -178,6 +197,12 @@ impl ThreadHistoryBuilder {
|
||||
if !payload.message.trim().is_empty() {
|
||||
content.push(UserInput::Text {
|
||||
text: payload.message.clone(),
|
||||
text_elements: payload
|
||||
.text_elements
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(Into::into)
|
||||
.collect(),
|
||||
});
|
||||
}
|
||||
if let Some(images) = &payload.images {
|
||||
@@ -185,6 +210,9 @@ impl ThreadHistoryBuilder {
|
||||
content.push(UserInput::Image { url: image.clone() });
|
||||
}
|
||||
}
|
||||
for path in &payload.local_images {
|
||||
content.push(UserInput::LocalImage { path: path.clone() });
|
||||
}
|
||||
content
|
||||
}
|
||||
}
|
||||
@@ -213,6 +241,7 @@ mod tests {
|
||||
use codex_protocol::protocol::AgentMessageEvent;
|
||||
use codex_protocol::protocol::AgentReasoningEvent;
|
||||
use codex_protocol::protocol::AgentReasoningRawContentEvent;
|
||||
use codex_protocol::protocol::ThreadRolledBackEvent;
|
||||
use codex_protocol::protocol::TurnAbortReason;
|
||||
use codex_protocol::protocol::TurnAbortedEvent;
|
||||
use codex_protocol::protocol::UserMessageEvent;
|
||||
@@ -224,6 +253,8 @@ mod tests {
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "First turn".into(),
|
||||
images: Some(vec!["https://example.com/one.png".into()]),
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "Hi there".into(),
|
||||
@@ -237,6 +268,8 @@ mod tests {
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Second turn".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "Reply two".into(),
|
||||
@@ -257,6 +290,7 @@ mod tests {
|
||||
content: vec![
|
||||
UserInput::Text {
|
||||
text: "First turn".into(),
|
||||
text_elements: Vec::new(),
|
||||
},
|
||||
UserInput::Image {
|
||||
url: "https://example.com/one.png".into(),
|
||||
@@ -288,7 +322,8 @@ mod tests {
|
||||
ThreadItem::UserMessage {
|
||||
id: "item-4".into(),
|
||||
content: vec![UserInput::Text {
|
||||
text: "Second turn".into()
|
||||
text: "Second turn".into(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
}
|
||||
);
|
||||
@@ -307,6 +342,8 @@ mod tests {
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Turn start".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentReasoning(AgentReasoningEvent {
|
||||
text: "first summary".into(),
|
||||
@@ -351,6 +388,8 @@ mod tests {
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Please do the thing".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "Working...".into(),
|
||||
@@ -361,6 +400,8 @@ mod tests {
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Let's try again".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "Second attempt complete.".into(),
|
||||
@@ -378,7 +419,8 @@ mod tests {
|
||||
ThreadItem::UserMessage {
|
||||
id: "item-1".into(),
|
||||
content: vec![UserInput::Text {
|
||||
text: "Please do the thing".into()
|
||||
text: "Please do the thing".into(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
}
|
||||
);
|
||||
@@ -398,7 +440,8 @@ mod tests {
|
||||
ThreadItem::UserMessage {
|
||||
id: "item-3".into(),
|
||||
content: vec![UserInput::Text {
|
||||
text: "Let's try again".into()
|
||||
text: "Let's try again".into(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
}
|
||||
);
|
||||
@@ -410,4 +453,107 @@ mod tests {
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn drops_last_turns_on_thread_rollback() {
|
||||
let events = vec![
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "First".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "A1".into(),
|
||||
}),
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Second".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "A2".into(),
|
||||
}),
|
||||
EventMsg::ThreadRolledBack(ThreadRolledBackEvent { num_turns: 1 }),
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Third".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "A3".into(),
|
||||
}),
|
||||
];
|
||||
|
||||
let turns = build_turns_from_event_msgs(&events);
|
||||
let expected = vec![
|
||||
Turn {
|
||||
id: "turn-1".into(),
|
||||
status: TurnStatus::Completed,
|
||||
error: None,
|
||||
items: vec![
|
||||
ThreadItem::UserMessage {
|
||||
id: "item-1".into(),
|
||||
content: vec![UserInput::Text {
|
||||
text: "First".into(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
},
|
||||
ThreadItem::AgentMessage {
|
||||
id: "item-2".into(),
|
||||
text: "A1".into(),
|
||||
},
|
||||
],
|
||||
},
|
||||
Turn {
|
||||
id: "turn-2".into(),
|
||||
status: TurnStatus::Completed,
|
||||
error: None,
|
||||
items: vec![
|
||||
ThreadItem::UserMessage {
|
||||
id: "item-3".into(),
|
||||
content: vec![UserInput::Text {
|
||||
text: "Third".into(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
},
|
||||
ThreadItem::AgentMessage {
|
||||
id: "item-4".into(),
|
||||
text: "A3".into(),
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
assert_eq!(turns, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn thread_rollback_clears_all_turns_when_num_turns_exceeds_history() {
|
||||
let events = vec![
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "One".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "A1".into(),
|
||||
}),
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Two".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "A2".into(),
|
||||
}),
|
||||
EventMsg::ThreadRolledBack(ThreadRolledBackEvent { num_turns: 99 }),
|
||||
];
|
||||
|
||||
let turns = build_turns_from_event_msgs(&events);
|
||||
assert_eq!(turns, Vec::<Turn>::new());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::config_types::ForcedLoginMethod;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
@@ -16,6 +16,8 @@ use codex_protocol::protocol::ReviewDecision;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_protocol::protocol::TurnAbortReason;
|
||||
use codex_protocol::user_input::ByteRange as CoreByteRange;
|
||||
use codex_protocol::user_input::TextElement as CoreTextElement;
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
@@ -68,7 +70,7 @@ pub struct NewConversationParams {
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct NewConversationResponse {
|
||||
pub conversation_id: ConversationId,
|
||||
pub conversation_id: ThreadId,
|
||||
pub model: String,
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
pub rollout_path: PathBuf,
|
||||
@@ -77,7 +79,16 @@ pub struct NewConversationResponse {
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResumeConversationResponse {
|
||||
pub conversation_id: ConversationId,
|
||||
pub conversation_id: ThreadId,
|
||||
pub model: String,
|
||||
pub initial_messages: Option<Vec<EventMsg>>,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ForkConversationResponse {
|
||||
pub conversation_id: ThreadId,
|
||||
pub model: String,
|
||||
pub initial_messages: Option<Vec<EventMsg>>,
|
||||
pub rollout_path: PathBuf,
|
||||
@@ -90,9 +101,9 @@ pub enum GetConversationSummaryParams {
|
||||
#[serde(rename = "rolloutPath")]
|
||||
rollout_path: PathBuf,
|
||||
},
|
||||
ConversationId {
|
||||
ThreadId {
|
||||
#[serde(rename = "conversationId")]
|
||||
conversation_id: ConversationId,
|
||||
conversation_id: ThreadId,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -113,7 +124,7 @@ pub struct ListConversationsParams {
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ConversationSummary {
|
||||
pub conversation_id: ConversationId,
|
||||
pub conversation_id: ThreadId,
|
||||
pub path: PathBuf,
|
||||
pub preview: String,
|
||||
pub timestamp: Option<String>,
|
||||
@@ -143,11 +154,19 @@ pub struct ListConversationsResponse {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResumeConversationParams {
|
||||
pub path: Option<PathBuf>,
|
||||
pub conversation_id: Option<ConversationId>,
|
||||
pub conversation_id: Option<ThreadId>,
|
||||
pub history: Option<Vec<ResponseItem>>,
|
||||
pub overrides: Option<NewConversationParams>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ForkConversationParams {
|
||||
pub path: Option<PathBuf>,
|
||||
pub conversation_id: Option<ThreadId>,
|
||||
pub overrides: Option<NewConversationParams>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AddConversationSubscriptionResponse {
|
||||
@@ -158,7 +177,7 @@ pub struct AddConversationSubscriptionResponse {
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ArchiveConversationParams {
|
||||
pub conversation_id: ConversationId,
|
||||
pub conversation_id: ThreadId,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
@@ -198,7 +217,7 @@ pub struct GitDiffToRemoteResponse {
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ApplyPatchApprovalParams {
|
||||
pub conversation_id: ConversationId,
|
||||
pub conversation_id: ThreadId,
|
||||
/// Use to correlate this with [codex_core::protocol::PatchApplyBeginEvent]
|
||||
/// and [codex_core::protocol::PatchApplyEndEvent].
|
||||
pub call_id: String,
|
||||
@@ -219,7 +238,7 @@ pub struct ApplyPatchApprovalResponse {
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecCommandApprovalParams {
|
||||
pub conversation_id: ConversationId,
|
||||
pub conversation_id: ThreadId,
|
||||
/// Use to correlate this with [codex_core::protocol::ExecCommandBeginEvent]
|
||||
/// and [codex_core::protocol::ExecCommandEndEvent].
|
||||
pub call_id: String,
|
||||
@@ -369,14 +388,14 @@ pub struct SandboxSettings {
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserMessageParams {
|
||||
pub conversation_id: ConversationId,
|
||||
pub conversation_id: ThreadId,
|
||||
pub items: Vec<InputItem>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserTurnParams {
|
||||
pub conversation_id: ConversationId,
|
||||
pub conversation_id: ThreadId,
|
||||
pub items: Vec<InputItem>,
|
||||
pub cwd: PathBuf,
|
||||
pub approval_policy: AskForApproval,
|
||||
@@ -384,6 +403,8 @@ pub struct SendUserTurnParams {
|
||||
pub model: String,
|
||||
pub effort: Option<ReasoningEffort>,
|
||||
pub summary: ReasoningSummary,
|
||||
/// Optional JSON Schema used to constrain the final assistant message for this turn.
|
||||
pub output_schema: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -393,7 +414,7 @@ pub struct SendUserTurnResponse {}
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InterruptConversationParams {
|
||||
pub conversation_id: ConversationId,
|
||||
pub conversation_id: ThreadId,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
@@ -409,7 +430,7 @@ pub struct SendUserMessageResponse {}
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AddConversationListenerParams {
|
||||
pub conversation_id: ConversationId,
|
||||
pub conversation_id: ThreadId,
|
||||
#[serde(default)]
|
||||
pub experimental_raw_events: bool,
|
||||
}
|
||||
@@ -425,9 +446,74 @@ pub struct RemoveConversationListenerParams {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[serde(tag = "type", content = "data")]
|
||||
pub enum InputItem {
|
||||
Text { text: String },
|
||||
Image { image_url: String },
|
||||
LocalImage { path: PathBuf },
|
||||
Text {
|
||||
text: String,
|
||||
/// UI-defined spans within `text` used to render or persist special elements.
|
||||
#[serde(default)]
|
||||
text_elements: Vec<V1TextElement>,
|
||||
},
|
||||
Image {
|
||||
image_url: String,
|
||||
},
|
||||
LocalImage {
|
||||
path: PathBuf,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename = "ByteRange")]
|
||||
pub struct V1ByteRange {
|
||||
/// Start byte offset (inclusive) within the UTF-8 text buffer.
|
||||
pub start: usize,
|
||||
/// End byte offset (exclusive) within the UTF-8 text buffer.
|
||||
pub end: usize,
|
||||
}
|
||||
|
||||
impl From<CoreByteRange> for V1ByteRange {
|
||||
fn from(value: CoreByteRange) -> Self {
|
||||
Self {
|
||||
start: value.start,
|
||||
end: value.end,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<V1ByteRange> for CoreByteRange {
|
||||
fn from(value: V1ByteRange) -> Self {
|
||||
Self {
|
||||
start: value.start,
|
||||
end: value.end,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename = "TextElement")]
|
||||
pub struct V1TextElement {
|
||||
/// Byte range in the parent `text` buffer that this element occupies.
|
||||
pub byte_range: V1ByteRange,
|
||||
/// Optional human-readable placeholder for the element, displayed in the UI.
|
||||
pub placeholder: Option<String>,
|
||||
}
|
||||
|
||||
impl From<CoreTextElement> for V1TextElement {
|
||||
fn from(value: CoreTextElement) -> Self {
|
||||
Self {
|
||||
byte_range: value.byte_range.into(),
|
||||
placeholder: value.placeholder,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<V1TextElement> for CoreTextElement {
|
||||
fn from(value: V1TextElement) -> Self {
|
||||
Self {
|
||||
byte_range: value.byte_range.into(),
|
||||
placeholder: value.placeholder,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -443,7 +529,7 @@ pub struct LoginChatGptCompleteNotification {
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SessionConfiguredNotification {
|
||||
pub session_id: ConversationId,
|
||||
pub session_id: ThreadId,
|
||||
pub model: String,
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
pub history_log_id: u64,
|
||||
|
||||
@@ -8,6 +8,7 @@ use codex_protocol::config_types::ForcedLoginMethod;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::config_types::SandboxMode as CoreSandboxMode;
|
||||
use codex_protocol::config_types::Verbosity;
|
||||
use codex_protocol::config_types::WebSearchMode;
|
||||
use codex_protocol::items::AgentMessageContent as CoreAgentMessageContent;
|
||||
use codex_protocol::items::TurnItem as CoreTurnItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
@@ -15,6 +16,7 @@ use codex_protocol::openai_models::ReasoningEffort;
|
||||
use codex_protocol::parse_command::ParsedCommand as CoreParsedCommand;
|
||||
use codex_protocol::plan_tool::PlanItemArg as CorePlanItemArg;
|
||||
use codex_protocol::plan_tool::StepStatus as CorePlanStepStatus;
|
||||
use codex_protocol::protocol::AgentStatus as CoreAgentStatus;
|
||||
use codex_protocol::protocol::AskForApproval as CoreAskForApproval;
|
||||
use codex_protocol::protocol::CodexErrorInfo as CoreCodexErrorInfo;
|
||||
use codex_protocol::protocol::CreditsSnapshot as CoreCreditsSnapshot;
|
||||
@@ -23,10 +25,13 @@ use codex_protocol::protocol::RateLimitSnapshot as CoreRateLimitSnapshot;
|
||||
use codex_protocol::protocol::RateLimitWindow as CoreRateLimitWindow;
|
||||
use codex_protocol::protocol::SessionSource as CoreSessionSource;
|
||||
use codex_protocol::protocol::SkillErrorInfo as CoreSkillErrorInfo;
|
||||
use codex_protocol::protocol::SkillInterface as CoreSkillInterface;
|
||||
use codex_protocol::protocol::SkillMetadata as CoreSkillMetadata;
|
||||
use codex_protocol::protocol::SkillScope as CoreSkillScope;
|
||||
use codex_protocol::protocol::TokenUsage as CoreTokenUsage;
|
||||
use codex_protocol::protocol::TokenUsageInfo as CoreTokenUsageInfo;
|
||||
use codex_protocol::user_input::ByteRange as CoreByteRange;
|
||||
use codex_protocol::user_input::TextElement as CoreTextElement;
|
||||
use codex_protocol::user_input::UserInput as CoreUserInput;
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
use mcp_types::ContentBlock as McpContentBlock;
|
||||
@@ -89,6 +94,7 @@ pub enum CodexErrorInfo {
|
||||
InternalServerError,
|
||||
Unauthorized,
|
||||
BadRequest,
|
||||
ThreadRollbackFailed,
|
||||
SandboxError,
|
||||
/// The response SSE stream disconnected in the middle of a turn before completion.
|
||||
ResponseStreamDisconnected {
|
||||
@@ -119,6 +125,7 @@ impl From<CoreCodexErrorInfo> for CodexErrorInfo {
|
||||
CoreCodexErrorInfo::InternalServerError => CodexErrorInfo::InternalServerError,
|
||||
CoreCodexErrorInfo::Unauthorized => CodexErrorInfo::Unauthorized,
|
||||
CoreCodexErrorInfo::BadRequest => CodexErrorInfo::BadRequest,
|
||||
CoreCodexErrorInfo::ThreadRollbackFailed => CodexErrorInfo::ThreadRollbackFailed,
|
||||
CoreCodexErrorInfo::SandboxError => CodexErrorInfo::SandboxError,
|
||||
CoreCodexErrorInfo::ResponseStreamDisconnected { http_status_code } => {
|
||||
CodexErrorInfo::ResponseStreamDisconnected { http_status_code }
|
||||
@@ -325,11 +332,21 @@ pub struct ProfileV2 {
|
||||
pub model_reasoning_effort: Option<ReasoningEffort>,
|
||||
pub model_reasoning_summary: Option<ReasoningSummary>,
|
||||
pub model_verbosity: Option<Verbosity>,
|
||||
pub web_search: Option<WebSearchMode>,
|
||||
pub chatgpt_base_url: Option<String>,
|
||||
#[serde(default, flatten)]
|
||||
pub additional: HashMap<String, JsonValue>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct AnalyticsConfig {
|
||||
pub enabled: Option<bool>,
|
||||
#[serde(default, flatten)]
|
||||
pub additional: HashMap<String, JsonValue>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -344,6 +361,7 @@ pub struct Config {
|
||||
pub sandbox_workspace_write: Option<SandboxWorkspaceWrite>,
|
||||
pub forced_chatgpt_workspace_id: Option<String>,
|
||||
pub forced_login_method: Option<ForcedLoginMethod>,
|
||||
pub web_search: Option<WebSearchMode>,
|
||||
pub tools: Option<ToolsV2>,
|
||||
pub profile: Option<String>,
|
||||
#[serde(default)]
|
||||
@@ -354,6 +372,7 @@ pub struct Config {
|
||||
pub model_reasoning_effort: Option<ReasoningEffort>,
|
||||
pub model_reasoning_summary: Option<ReasoningSummary>,
|
||||
pub model_verbosity: Option<Verbosity>,
|
||||
pub analytics: Option<AnalyticsConfig>,
|
||||
#[serde(default, flatten)]
|
||||
pub additional: HashMap<String, JsonValue>,
|
||||
}
|
||||
@@ -441,6 +460,22 @@ pub struct ConfigReadResponse {
|
||||
pub layers: Option<Vec<ConfigLayer>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ConfigRequirements {
|
||||
pub allowed_approval_policies: Option<Vec<AskForApproval>>,
|
||||
pub allowed_sandbox_modes: Option<Vec<SandboxMode>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ConfigRequirementsReadResponse {
|
||||
/// Null if no requirements are configured (e.g. no requirements.toml/MDM entries).
|
||||
pub requirements: Option<ConfigRequirements>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -475,14 +510,33 @@ pub struct ConfigEdit {
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum ApprovalDecision {
|
||||
pub enum CommandExecutionApprovalDecision {
|
||||
/// User approved the command.
|
||||
Accept,
|
||||
/// Approve and remember the approval for the session.
|
||||
/// User approved the command and future identical commands should run without prompting.
|
||||
AcceptForSession,
|
||||
/// User approved the command, and wants to apply the proposed execpolicy amendment so future
|
||||
/// matching commands can run without prompting.
|
||||
AcceptWithExecpolicyAmendment {
|
||||
execpolicy_amendment: ExecPolicyAmendment,
|
||||
},
|
||||
/// User denied the command. The agent will continue the turn.
|
||||
Decline,
|
||||
/// User denied the command. The turn will also be immediately interrupted.
|
||||
Cancel,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum FileChangeApprovalDecision {
|
||||
/// User approved the file changes.
|
||||
Accept,
|
||||
/// User approved the file changes and future changes to the same files should run without prompting.
|
||||
AcceptForSession,
|
||||
/// User denied the file changes. The agent will continue the turn.
|
||||
Decline,
|
||||
/// User denied the file changes. The turn will also be immediately interrupted.
|
||||
Cancel,
|
||||
}
|
||||
|
||||
@@ -893,6 +947,16 @@ pub struct ListMcpServerStatusResponse {
|
||||
pub next_cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct McpServerRefreshParams {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct McpServerRefreshResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -1033,6 +1097,47 @@ pub struct ThreadResumeResponse {
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Default, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
/// There are two ways to fork a thread:
|
||||
/// 1. By thread_id: load the thread from disk by thread_id and fork it into a new thread.
|
||||
/// 2. By path: load the thread from disk by path and fork it into a new thread.
|
||||
///
|
||||
/// If using path, the thread_id param will be ignored.
|
||||
///
|
||||
/// Prefer using thread_id whenever possible.
|
||||
pub struct ThreadForkParams {
|
||||
pub thread_id: String,
|
||||
|
||||
/// [UNSTABLE] Specify the rollout path to fork from.
|
||||
/// If specified, the thread_id param will be ignored.
|
||||
pub path: Option<PathBuf>,
|
||||
|
||||
/// Configuration overrides for the forked thread, if any.
|
||||
pub model: Option<String>,
|
||||
pub model_provider: Option<String>,
|
||||
pub cwd: Option<String>,
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
pub sandbox: Option<SandboxMode>,
|
||||
pub config: Option<HashMap<String, serde_json::Value>>,
|
||||
pub base_instructions: Option<String>,
|
||||
pub developer_instructions: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadForkResponse {
|
||||
pub thread: Thread,
|
||||
pub model: String,
|
||||
pub model_provider: String,
|
||||
pub cwd: PathBuf,
|
||||
pub approval_policy: AskForApproval,
|
||||
pub sandbox: SandboxPolicy,
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -1045,6 +1150,30 @@ pub struct ThreadArchiveParams {
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadArchiveResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadRollbackParams {
|
||||
pub thread_id: String,
|
||||
/// The number of turns to drop from the end of the thread. Must be >= 1.
|
||||
///
|
||||
/// This only modifies the thread's history and does not revert local file changes
|
||||
/// that have been made by the agent. Clients are responsible for reverting these changes.
|
||||
pub num_turns: u32,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadRollbackResponse {
|
||||
/// The updated thread after applying the rollback, with `turns` populated.
|
||||
///
|
||||
/// The ThreadItems stored in each Turn are lossy since we explicitly do not
|
||||
/// persist all agent interactions, such as command executions. This is the same
|
||||
/// behavior as `thread/resume`.
|
||||
pub thread: Thread,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -1068,6 +1197,27 @@ pub struct ThreadListResponse {
|
||||
pub next_cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadLoadedListParams {
|
||||
/// Opaque pagination cursor returned by a previous call.
|
||||
pub cursor: Option<String>,
|
||||
/// Optional page size; defaults to no limit.
|
||||
pub limit: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadLoadedListResponse {
|
||||
/// Thread ids for sessions currently loaded in memory.
|
||||
pub data: Vec<String>,
|
||||
/// Opaque cursor to pass to the next call to continue after the last item.
|
||||
/// if None, there are no more items to return.
|
||||
pub next_cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -1105,13 +1255,35 @@ pub enum SkillScope {
|
||||
pub struct SkillMetadata {
|
||||
pub name: String,
|
||||
pub description: String,
|
||||
#[ts(optional)]
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
/// Legacy short_description from SKILL.md. Prefer SKILL.toml interface.short_description.
|
||||
pub short_description: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub interface: Option<SkillInterface>,
|
||||
pub path: PathBuf,
|
||||
pub scope: SkillScope,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct SkillInterface {
|
||||
#[ts(optional)]
|
||||
pub display_name: Option<String>,
|
||||
#[ts(optional)]
|
||||
pub short_description: Option<String>,
|
||||
#[ts(optional)]
|
||||
pub icon_small: Option<PathBuf>,
|
||||
#[ts(optional)]
|
||||
pub icon_large: Option<PathBuf>,
|
||||
#[ts(optional)]
|
||||
pub brand_color: Option<String>,
|
||||
#[ts(optional)]
|
||||
pub default_prompt: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -1135,12 +1307,26 @@ impl From<CoreSkillMetadata> for SkillMetadata {
|
||||
name: value.name,
|
||||
description: value.description,
|
||||
short_description: value.short_description,
|
||||
interface: value.interface.map(SkillInterface::from),
|
||||
path: value.path,
|
||||
scope: value.scope.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CoreSkillInterface> for SkillInterface {
|
||||
fn from(value: CoreSkillInterface) -> Self {
|
||||
Self {
|
||||
display_name: value.display_name,
|
||||
short_description: value.short_description,
|
||||
brand_color: value.brand_color,
|
||||
default_prompt: value.default_prompt,
|
||||
icon_small: value.icon_small,
|
||||
icon_large: value.icon_large,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CoreSkillScope> for SkillScope {
|
||||
fn from(value: CoreSkillScope) -> Self {
|
||||
match value {
|
||||
@@ -1183,7 +1369,7 @@ pub struct Thread {
|
||||
pub source: SessionSource,
|
||||
/// Optional Git metadata captured when the thread was created.
|
||||
pub git_info: Option<GitInfo>,
|
||||
/// Only populated on a `thread/resume` response.
|
||||
/// Only populated on `thread/resume`, `thread/rollback`, `thread/fork` responses.
|
||||
/// For all other responses and notifications returning a Thread,
|
||||
/// the turns field will be an empty list.
|
||||
pub turns: Vec<Turn>,
|
||||
@@ -1211,6 +1397,7 @@ pub struct ThreadTokenUsageUpdatedNotification {
|
||||
pub struct ThreadTokenUsage {
|
||||
pub total: TokenUsageBreakdown,
|
||||
pub last: TokenUsageBreakdown,
|
||||
// TODO(aibrahim): make this not optional
|
||||
#[ts(type = "number | null")]
|
||||
pub model_context_window: Option<i64>,
|
||||
}
|
||||
@@ -1258,7 +1445,7 @@ impl From<CoreTokenUsage> for TokenUsageBreakdown {
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct Turn {
|
||||
pub id: String,
|
||||
/// Only populated on a `thread/resume` response.
|
||||
/// Only populated on a `thread/resume` or `thread/fork` response.
|
||||
/// For all other responses and notifications returning a Turn,
|
||||
/// the items field will be an empty list.
|
||||
pub items: Vec<ThreadItem>,
|
||||
@@ -1274,6 +1461,8 @@ pub struct Turn {
|
||||
pub struct TurnError {
|
||||
pub message: String,
|
||||
pub codex_error_info: Option<CodexErrorInfo>,
|
||||
#[serde(default)]
|
||||
pub additional_details: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -1317,6 +1506,8 @@ pub struct TurnStartParams {
|
||||
pub effort: Option<ReasoningEffort>,
|
||||
/// Override the reasoning summary for this turn and subsequent turns.
|
||||
pub summary: Option<ReasoningSummary>,
|
||||
/// Optional JSON Schema used to constrain the final assistant message for this turn.
|
||||
pub output_schema: Option<JsonValue>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -1392,22 +1583,96 @@ pub struct TurnInterruptParams {
|
||||
pub struct TurnInterruptResponse {}
|
||||
|
||||
// User input types
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ByteRange {
|
||||
pub start: usize,
|
||||
pub end: usize,
|
||||
}
|
||||
|
||||
impl From<CoreByteRange> for ByteRange {
|
||||
fn from(value: CoreByteRange) -> Self {
|
||||
Self {
|
||||
start: value.start,
|
||||
end: value.end,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ByteRange> for CoreByteRange {
|
||||
fn from(value: ByteRange) -> Self {
|
||||
Self {
|
||||
start: value.start,
|
||||
end: value.end,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TextElement {
|
||||
/// Byte range in the parent `text` buffer that this element occupies.
|
||||
pub byte_range: ByteRange,
|
||||
/// Optional human-readable placeholder for the element, displayed in the UI.
|
||||
pub placeholder: Option<String>,
|
||||
}
|
||||
|
||||
impl From<CoreTextElement> for TextElement {
|
||||
fn from(value: CoreTextElement) -> Self {
|
||||
Self {
|
||||
byte_range: value.byte_range.into(),
|
||||
placeholder: value.placeholder,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TextElement> for CoreTextElement {
|
||||
fn from(value: TextElement) -> Self {
|
||||
Self {
|
||||
byte_range: value.byte_range.into(),
|
||||
placeholder: value.placeholder,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "type", rename_all = "camelCase")]
|
||||
#[ts(tag = "type")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum UserInput {
|
||||
Text { text: String },
|
||||
Image { url: String },
|
||||
LocalImage { path: PathBuf },
|
||||
Text {
|
||||
text: String,
|
||||
/// UI-defined spans within `text` used to render or persist special elements.
|
||||
#[serde(default)]
|
||||
text_elements: Vec<TextElement>,
|
||||
},
|
||||
Image {
|
||||
url: String,
|
||||
},
|
||||
LocalImage {
|
||||
path: PathBuf,
|
||||
},
|
||||
Skill {
|
||||
name: String,
|
||||
path: PathBuf,
|
||||
},
|
||||
}
|
||||
|
||||
impl UserInput {
|
||||
pub fn into_core(self) -> CoreUserInput {
|
||||
match self {
|
||||
UserInput::Text { text } => CoreUserInput::Text { text },
|
||||
UserInput::Text {
|
||||
text,
|
||||
text_elements,
|
||||
} => CoreUserInput::Text {
|
||||
text,
|
||||
text_elements: text_elements.into_iter().map(Into::into).collect(),
|
||||
},
|
||||
UserInput::Image { url } => CoreUserInput::Image { image_url: url },
|
||||
UserInput::LocalImage { path } => CoreUserInput::LocalImage { path },
|
||||
UserInput::Skill { name, path } => CoreUserInput::Skill { name, path },
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1415,9 +1680,16 @@ impl UserInput {
|
||||
impl From<CoreUserInput> for UserInput {
|
||||
fn from(value: CoreUserInput) -> Self {
|
||||
match value {
|
||||
CoreUserInput::Text { text } => UserInput::Text { text },
|
||||
CoreUserInput::Text {
|
||||
text,
|
||||
text_elements,
|
||||
} => UserInput::Text {
|
||||
text,
|
||||
text_elements: text_elements.into_iter().map(Into::into).collect(),
|
||||
},
|
||||
CoreUserInput::Image { image_url } => UserInput::Image { url: image_url },
|
||||
CoreUserInput::LocalImage { path } => UserInput::LocalImage { path },
|
||||
CoreUserInput::Skill { name, path } => UserInput::Skill { name, path },
|
||||
_ => unreachable!("unsupported user input variant"),
|
||||
}
|
||||
}
|
||||
@@ -1489,6 +1761,25 @@ pub enum ThreadItem {
|
||||
},
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
CollabAgentToolCall {
|
||||
/// Unique identifier for this collab tool call.
|
||||
id: String,
|
||||
/// Name of the collab tool that was invoked.
|
||||
tool: CollabAgentTool,
|
||||
/// Current status of the collab tool call.
|
||||
status: CollabAgentToolCallStatus,
|
||||
/// Thread ID of the agent issuing the collab request.
|
||||
sender_thread_id: String,
|
||||
/// Thread ID of the receiving agent, when applicable. In case of spawn operation,
|
||||
/// this corresponds to the newly spawned agent.
|
||||
receiver_thread_ids: Vec<String>,
|
||||
/// Prompt text sent as part of the collab tool call, when available.
|
||||
prompt: Option<String>,
|
||||
/// Last known status of the target agents, when available.
|
||||
agents_states: HashMap<String, CollabAgentState>,
|
||||
},
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
WebSearch { id: String, query: String },
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
@@ -1541,6 +1832,16 @@ pub enum CommandExecutionStatus {
|
||||
Declined,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum CollabAgentTool {
|
||||
SpawnAgent,
|
||||
SendInput,
|
||||
Wait,
|
||||
CloseAgent,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -1579,6 +1880,66 @@ pub enum McpToolCallStatus {
|
||||
Failed,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum CollabAgentToolCallStatus {
|
||||
InProgress,
|
||||
Completed,
|
||||
Failed,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum CollabAgentStatus {
|
||||
PendingInit,
|
||||
Running,
|
||||
Completed,
|
||||
Errored,
|
||||
Shutdown,
|
||||
NotFound,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct CollabAgentState {
|
||||
pub status: CollabAgentStatus,
|
||||
pub message: Option<String>,
|
||||
}
|
||||
|
||||
impl From<CoreAgentStatus> for CollabAgentState {
|
||||
fn from(value: CoreAgentStatus) -> Self {
|
||||
match value {
|
||||
CoreAgentStatus::PendingInit => Self {
|
||||
status: CollabAgentStatus::PendingInit,
|
||||
message: None,
|
||||
},
|
||||
CoreAgentStatus::Running => Self {
|
||||
status: CollabAgentStatus::Running,
|
||||
message: None,
|
||||
},
|
||||
CoreAgentStatus::Completed(message) => Self {
|
||||
status: CollabAgentStatus::Completed,
|
||||
message,
|
||||
},
|
||||
CoreAgentStatus::Errored(message) => Self {
|
||||
status: CollabAgentStatus::Errored,
|
||||
message: Some(message),
|
||||
},
|
||||
CoreAgentStatus::Shutdown => Self {
|
||||
status: CollabAgentStatus::Shutdown,
|
||||
message: None,
|
||||
},
|
||||
CoreAgentStatus::NotFound => Self {
|
||||
status: CollabAgentStatus::NotFound,
|
||||
message: None,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -1844,7 +2205,7 @@ pub struct CommandExecutionRequestApprovalParams {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct CommandExecutionRequestApprovalResponse {
|
||||
pub decision: ApprovalDecision,
|
||||
pub decision: CommandExecutionApprovalDecision,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -1864,7 +2225,7 @@ pub struct FileChangeRequestApprovalParams {
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct FileChangeRequestApprovalResponse {
|
||||
pub decision: ApprovalDecision,
|
||||
pub decision: FileChangeApprovalDecision,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -1956,6 +2317,16 @@ pub struct DeprecationNoticeNotification {
|
||||
pub details: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ConfigWarningNotification {
|
||||
/// Concise summary of the warning.
|
||||
pub summary: String,
|
||||
/// Optional extra guidance or error details.
|
||||
pub details: Option<String>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@@ -1996,6 +2367,7 @@ mod tests {
|
||||
content: vec![
|
||||
CoreUserInput::Text {
|
||||
text: "hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
},
|
||||
CoreUserInput::Image {
|
||||
image_url: "https://example.com/image.png".to_string(),
|
||||
@@ -2003,6 +2375,10 @@ mod tests {
|
||||
CoreUserInput::LocalImage {
|
||||
path: PathBuf::from("local/image.png"),
|
||||
},
|
||||
CoreUserInput::Skill {
|
||||
name: "skill-creator".to_string(),
|
||||
path: PathBuf::from("/repo/.codex/skills/skill-creator/SKILL.md"),
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
@@ -2013,6 +2389,7 @@ mod tests {
|
||||
content: vec![
|
||||
UserInput::Text {
|
||||
text: "hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
},
|
||||
UserInput::Image {
|
||||
url: "https://example.com/image.png".to_string(),
|
||||
@@ -2020,6 +2397,10 @@ mod tests {
|
||||
UserInput::LocalImage {
|
||||
path: PathBuf::from("local/image.png"),
|
||||
},
|
||||
UserInput::Skill {
|
||||
name: "skill-creator".to_string(),
|
||||
path: PathBuf::from("/repo/.codex/skills/skill-creator/SKILL.md"),
|
||||
},
|
||||
],
|
||||
}
|
||||
);
|
||||
|
||||
6
codex-rs/app-server-test-client/BUILD.bazel
Normal file
6
codex-rs/app-server-test-client/BUILD.bazel
Normal file
@@ -0,0 +1,6 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "codex-app-server-test-client",
|
||||
crate_name = "codex_app_server_test_client",
|
||||
)
|
||||
@@ -13,16 +13,18 @@ use std::time::Duration;
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::bail;
|
||||
use clap::ArgAction;
|
||||
use clap::Parser;
|
||||
use clap::Subcommand;
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
use codex_app_server_protocol::AddConversationSubscriptionResponse;
|
||||
use codex_app_server_protocol::ApprovalDecision;
|
||||
use codex_app_server_protocol::AskForApproval;
|
||||
use codex_app_server_protocol::ClientInfo;
|
||||
use codex_app_server_protocol::ClientRequest;
|
||||
use codex_app_server_protocol::CommandExecutionApprovalDecision;
|
||||
use codex_app_server_protocol::CommandExecutionRequestApprovalParams;
|
||||
use codex_app_server_protocol::CommandExecutionRequestApprovalResponse;
|
||||
use codex_app_server_protocol::FileChangeApprovalDecision;
|
||||
use codex_app_server_protocol::FileChangeRequestApprovalParams;
|
||||
use codex_app_server_protocol::FileChangeRequestApprovalResponse;
|
||||
use codex_app_server_protocol::GetAccountRateLimitsResponse;
|
||||
@@ -35,6 +37,8 @@ use codex_app_server_protocol::JSONRPCRequest;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::LoginChatGptCompleteNotification;
|
||||
use codex_app_server_protocol::LoginChatGptResponse;
|
||||
use codex_app_server_protocol::ModelListParams;
|
||||
use codex_app_server_protocol::ModelListResponse;
|
||||
use codex_app_server_protocol::NewConversationParams;
|
||||
use codex_app_server_protocol::NewConversationResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
@@ -49,7 +53,7 @@ use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_app_server_protocol::TurnStartResponse;
|
||||
use codex_app_server_protocol::TurnStatus;
|
||||
use codex_app_server_protocol::UserInput as V2UserInput;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::Event;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use serde::Serialize;
|
||||
@@ -65,6 +69,19 @@ struct Cli {
|
||||
#[arg(long, env = "CODEX_BIN", default_value = "codex")]
|
||||
codex_bin: String,
|
||||
|
||||
/// Forwarded to the `codex` CLI as `--config key=value`. Repeatable.
|
||||
///
|
||||
/// Example:
|
||||
/// `--config 'model_providers.mock.base_url="http://localhost:4010/v2"'`
|
||||
#[arg(
|
||||
short = 'c',
|
||||
long = "config",
|
||||
value_name = "key=value",
|
||||
action = ArgAction::Append,
|
||||
global = true
|
||||
)]
|
||||
config_overrides: Vec<String>,
|
||||
|
||||
#[command(subcommand)]
|
||||
command: CliCommand,
|
||||
}
|
||||
@@ -113,37 +130,54 @@ enum CliCommand {
|
||||
TestLogin,
|
||||
/// Fetch the current account rate limits from the Codex app-server.
|
||||
GetAccountRateLimits,
|
||||
/// List the available models from the Codex app-server.
|
||||
#[command(name = "model-list")]
|
||||
ModelList,
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
let Cli { codex_bin, command } = Cli::parse();
|
||||
let Cli {
|
||||
codex_bin,
|
||||
config_overrides,
|
||||
command,
|
||||
} = Cli::parse();
|
||||
|
||||
match command {
|
||||
CliCommand::SendMessage { user_message } => send_message(codex_bin, user_message),
|
||||
CliCommand::SendMessageV2 { user_message } => send_message_v2(codex_bin, user_message),
|
||||
CliCommand::SendMessage { user_message } => {
|
||||
send_message(&codex_bin, &config_overrides, user_message)
|
||||
}
|
||||
CliCommand::SendMessageV2 { user_message } => {
|
||||
send_message_v2(&codex_bin, &config_overrides, user_message)
|
||||
}
|
||||
CliCommand::TriggerCmdApproval { user_message } => {
|
||||
trigger_cmd_approval(codex_bin, user_message)
|
||||
trigger_cmd_approval(&codex_bin, &config_overrides, user_message)
|
||||
}
|
||||
CliCommand::TriggerPatchApproval { user_message } => {
|
||||
trigger_patch_approval(codex_bin, user_message)
|
||||
trigger_patch_approval(&codex_bin, &config_overrides, user_message)
|
||||
}
|
||||
CliCommand::NoTriggerCmdApproval => no_trigger_cmd_approval(codex_bin),
|
||||
CliCommand::NoTriggerCmdApproval => no_trigger_cmd_approval(&codex_bin, &config_overrides),
|
||||
CliCommand::SendFollowUpV2 {
|
||||
first_message,
|
||||
follow_up_message,
|
||||
} => send_follow_up_v2(codex_bin, first_message, follow_up_message),
|
||||
CliCommand::TestLogin => test_login(codex_bin),
|
||||
CliCommand::GetAccountRateLimits => get_account_rate_limits(codex_bin),
|
||||
} => send_follow_up_v2(
|
||||
&codex_bin,
|
||||
&config_overrides,
|
||||
first_message,
|
||||
follow_up_message,
|
||||
),
|
||||
CliCommand::TestLogin => test_login(&codex_bin, &config_overrides),
|
||||
CliCommand::GetAccountRateLimits => get_account_rate_limits(&codex_bin, &config_overrides),
|
||||
CliCommand::ModelList => model_list(&codex_bin, &config_overrides),
|
||||
}
|
||||
}
|
||||
|
||||
fn send_message(codex_bin: String, user_message: String) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin)?;
|
||||
fn send_message(codex_bin: &str, config_overrides: &[String], user_message: String) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
|
||||
let conversation = client.new_conversation()?;
|
||||
let conversation = client.start_thread()?;
|
||||
println!("< newConversation response: {conversation:?}");
|
||||
|
||||
let subscription = client.add_conversation_listener(&conversation.conversation_id)?;
|
||||
@@ -154,51 +188,66 @@ fn send_message(codex_bin: String, user_message: String) -> Result<()> {
|
||||
|
||||
client.stream_conversation(&conversation.conversation_id)?;
|
||||
|
||||
client.remove_conversation_listener(subscription.subscription_id)?;
|
||||
client.remove_thread_listener(subscription.subscription_id)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn send_message_v2(codex_bin: String, user_message: String) -> Result<()> {
|
||||
send_message_v2_with_policies(codex_bin, user_message, None, None)
|
||||
fn send_message_v2(
|
||||
codex_bin: &str,
|
||||
config_overrides: &[String],
|
||||
user_message: String,
|
||||
) -> Result<()> {
|
||||
send_message_v2_with_policies(codex_bin, config_overrides, user_message, None, None)
|
||||
}
|
||||
|
||||
fn trigger_cmd_approval(codex_bin: String, user_message: Option<String>) -> Result<()> {
|
||||
fn trigger_cmd_approval(
|
||||
codex_bin: &str,
|
||||
config_overrides: &[String],
|
||||
user_message: Option<String>,
|
||||
) -> Result<()> {
|
||||
let default_prompt =
|
||||
"Run `touch /tmp/should-trigger-approval` so I can confirm the file exists.";
|
||||
let message = user_message.unwrap_or_else(|| default_prompt.to_string());
|
||||
send_message_v2_with_policies(
|
||||
codex_bin,
|
||||
config_overrides,
|
||||
message,
|
||||
Some(AskForApproval::OnRequest),
|
||||
Some(SandboxPolicy::ReadOnly),
|
||||
)
|
||||
}
|
||||
|
||||
fn trigger_patch_approval(codex_bin: String, user_message: Option<String>) -> Result<()> {
|
||||
fn trigger_patch_approval(
|
||||
codex_bin: &str,
|
||||
config_overrides: &[String],
|
||||
user_message: Option<String>,
|
||||
) -> Result<()> {
|
||||
let default_prompt =
|
||||
"Create a file named APPROVAL_DEMO.txt containing a short hello message using apply_patch.";
|
||||
let message = user_message.unwrap_or_else(|| default_prompt.to_string());
|
||||
send_message_v2_with_policies(
|
||||
codex_bin,
|
||||
config_overrides,
|
||||
message,
|
||||
Some(AskForApproval::OnRequest),
|
||||
Some(SandboxPolicy::ReadOnly),
|
||||
)
|
||||
}
|
||||
|
||||
fn no_trigger_cmd_approval(codex_bin: String) -> Result<()> {
|
||||
fn no_trigger_cmd_approval(codex_bin: &str, config_overrides: &[String]) -> Result<()> {
|
||||
let prompt = "Run `touch should_not_trigger_approval.txt`";
|
||||
send_message_v2_with_policies(codex_bin, prompt.to_string(), None, None)
|
||||
send_message_v2_with_policies(codex_bin, config_overrides, prompt.to_string(), None, None)
|
||||
}
|
||||
|
||||
fn send_message_v2_with_policies(
|
||||
codex_bin: String,
|
||||
codex_bin: &str,
|
||||
config_overrides: &[String],
|
||||
user_message: String,
|
||||
approval_policy: Option<AskForApproval>,
|
||||
sandbox_policy: Option<SandboxPolicy>,
|
||||
) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin)?;
|
||||
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
@@ -207,7 +256,11 @@ fn send_message_v2_with_policies(
|
||||
println!("< thread/start response: {thread_response:?}");
|
||||
let mut turn_params = TurnStartParams {
|
||||
thread_id: thread_response.thread.id.clone(),
|
||||
input: vec![V2UserInput::Text { text: user_message }],
|
||||
input: vec![V2UserInput::Text {
|
||||
text: user_message,
|
||||
// Plain text conversion has no UI element ranges.
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
};
|
||||
turn_params.approval_policy = approval_policy;
|
||||
@@ -222,11 +275,12 @@ fn send_message_v2_with_policies(
|
||||
}
|
||||
|
||||
fn send_follow_up_v2(
|
||||
codex_bin: String,
|
||||
codex_bin: &str,
|
||||
config_overrides: &[String],
|
||||
first_message: String,
|
||||
follow_up_message: String,
|
||||
) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin)?;
|
||||
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
@@ -238,6 +292,7 @@ fn send_follow_up_v2(
|
||||
thread_id: thread_response.thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: first_message,
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
};
|
||||
@@ -249,6 +304,7 @@ fn send_follow_up_v2(
|
||||
thread_id: thread_response.thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: follow_up_message,
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
};
|
||||
@@ -259,8 +315,8 @@ fn send_follow_up_v2(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn test_login(codex_bin: String) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin)?;
|
||||
fn test_login(codex_bin: &str, config_overrides: &[String]) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
@@ -289,8 +345,8 @@ fn test_login(codex_bin: String) -> Result<()> {
|
||||
}
|
||||
}
|
||||
|
||||
fn get_account_rate_limits(codex_bin: String) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin)?;
|
||||
fn get_account_rate_limits(codex_bin: &str, config_overrides: &[String]) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
@@ -301,6 +357,18 @@ fn get_account_rate_limits(codex_bin: String) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn model_list(codex_bin: &str, config_overrides: &[String]) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
|
||||
let response = client.model_list(ModelListParams::default())?;
|
||||
println!("< model/list response: {response:?}");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct CodexClient {
|
||||
child: Child,
|
||||
stdin: Option<ChildStdin>,
|
||||
@@ -309,8 +377,12 @@ struct CodexClient {
|
||||
}
|
||||
|
||||
impl CodexClient {
|
||||
fn spawn(codex_bin: String) -> Result<Self> {
|
||||
let mut codex_app_server = Command::new(&codex_bin)
|
||||
fn spawn(codex_bin: &str, config_overrides: &[String]) -> Result<Self> {
|
||||
let mut cmd = Command::new(codex_bin);
|
||||
for override_kv in config_overrides {
|
||||
cmd.arg("--config").arg(override_kv);
|
||||
}
|
||||
let mut codex_app_server = cmd
|
||||
.arg("app-server")
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
@@ -351,7 +423,7 @@ impl CodexClient {
|
||||
self.send_request(request, request_id, "initialize")
|
||||
}
|
||||
|
||||
fn new_conversation(&mut self) -> Result<NewConversationResponse> {
|
||||
fn start_thread(&mut self) -> Result<NewConversationResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::NewConversation {
|
||||
request_id: request_id.clone(),
|
||||
@@ -363,7 +435,7 @@ impl CodexClient {
|
||||
|
||||
fn add_conversation_listener(
|
||||
&mut self,
|
||||
conversation_id: &ConversationId,
|
||||
conversation_id: &ThreadId,
|
||||
) -> Result<AddConversationSubscriptionResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::AddConversationListener {
|
||||
@@ -377,7 +449,7 @@ impl CodexClient {
|
||||
self.send_request(request, request_id, "addConversationListener")
|
||||
}
|
||||
|
||||
fn remove_conversation_listener(&mut self, subscription_id: Uuid) -> Result<()> {
|
||||
fn remove_thread_listener(&mut self, subscription_id: Uuid) -> Result<()> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::RemoveConversationListener {
|
||||
request_id: request_id.clone(),
|
||||
@@ -395,7 +467,7 @@ impl CodexClient {
|
||||
|
||||
fn send_user_message(
|
||||
&mut self,
|
||||
conversation_id: &ConversationId,
|
||||
conversation_id: &ThreadId,
|
||||
message: &str,
|
||||
) -> Result<SendUserMessageResponse> {
|
||||
let request_id = self.request_id();
|
||||
@@ -405,6 +477,7 @@ impl CodexClient {
|
||||
conversation_id: *conversation_id,
|
||||
items: vec![InputItem::Text {
|
||||
text: message.to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
},
|
||||
};
|
||||
@@ -452,7 +525,17 @@ impl CodexClient {
|
||||
self.send_request(request, request_id, "account/rateLimits/read")
|
||||
}
|
||||
|
||||
fn stream_conversation(&mut self, conversation_id: &ConversationId) -> Result<()> {
|
||||
fn model_list(&mut self, params: ModelListParams) -> Result<ModelListResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::ModelList {
|
||||
request_id: request_id.clone(),
|
||||
params,
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "model/list")
|
||||
}
|
||||
|
||||
fn stream_conversation(&mut self, conversation_id: &ThreadId) -> Result<()> {
|
||||
loop {
|
||||
let notification = self.next_notification()?;
|
||||
|
||||
@@ -469,7 +552,7 @@ impl CodexClient {
|
||||
print!("{}", event.delta);
|
||||
std::io::stdout().flush().ok();
|
||||
}
|
||||
EventMsg::TaskComplete(event) => {
|
||||
EventMsg::TurnComplete(event) => {
|
||||
println!("\n[task complete: {event:?}]");
|
||||
break;
|
||||
}
|
||||
@@ -589,7 +672,7 @@ impl CodexClient {
|
||||
fn extract_event(
|
||||
&self,
|
||||
notification: JSONRPCNotification,
|
||||
conversation_id: &ConversationId,
|
||||
conversation_id: &ThreadId,
|
||||
) -> Result<Option<Event>> {
|
||||
let params = notification
|
||||
.params
|
||||
@@ -603,7 +686,7 @@ impl CodexClient {
|
||||
let conversation_value = map
|
||||
.remove("conversationId")
|
||||
.context("event missing conversationId")?;
|
||||
let notification_conversation: ConversationId = serde_json::from_value(conversation_value)
|
||||
let notification_conversation: ThreadId = serde_json::from_value(conversation_value)
|
||||
.context("conversationId was not a valid UUID")?;
|
||||
|
||||
if ¬ification_conversation != conversation_id {
|
||||
@@ -770,7 +853,7 @@ impl CodexClient {
|
||||
}
|
||||
|
||||
let response = CommandExecutionRequestApprovalResponse {
|
||||
decision: ApprovalDecision::Accept,
|
||||
decision: CommandExecutionApprovalDecision::Accept,
|
||||
};
|
||||
self.send_server_request_response(request_id, &response)?;
|
||||
println!("< approved commandExecution request for item {item_id}");
|
||||
@@ -801,7 +884,7 @@ impl CodexClient {
|
||||
}
|
||||
|
||||
let response = FileChangeRequestApprovalResponse {
|
||||
decision: ApprovalDecision::Accept,
|
||||
decision: FileChangeApprovalDecision::Accept,
|
||||
};
|
||||
self.send_server_request_response(request_id, &response)?;
|
||||
println!("< approved fileChange request for item {item_id}");
|
||||
|
||||
8
codex-rs/app-server/BUILD.bazel
Normal file
8
codex-rs/app-server/BUILD.bazel
Normal file
@@ -0,0 +1,8 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "app-server",
|
||||
crate_name = "codex_app_server",
|
||||
integration_deps_extra = ["//codex-rs/app-server/tests/common:common"],
|
||||
test_tags = ["no-sandbox"],
|
||||
)
|
||||
@@ -48,7 +48,6 @@ uuid = { workspace = true, features = ["serde", "v7"] }
|
||||
|
||||
[dev-dependencies]
|
||||
app_test_support = { workspace = true }
|
||||
assert_cmd = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
core_test_support = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
|
||||
@@ -11,6 +11,8 @@
|
||||
- [Initialization](#initialization)
|
||||
- [API Overview](#api-overview)
|
||||
- [Events](#events)
|
||||
- [Approvals](#approvals)
|
||||
- [Skills](#skills)
|
||||
- [Auth endpoints](#auth-endpoints)
|
||||
|
||||
## Protocol
|
||||
@@ -39,7 +41,7 @@ Use the thread APIs to create, list, or archive conversations. Drive a conversat
|
||||
## Lifecycle Overview
|
||||
|
||||
- Initialize once: Immediately after launching the codex app-server process, send an `initialize` request with your client metadata, then emit an `initialized` notification. Any other request before this handshake gets rejected.
|
||||
- Start (or resume) a thread: Call `thread/start` to open a fresh conversation. The response returns the thread object and you’ll also get a `thread/started` notification. If you’re continuing an existing conversation, call `thread/resume` with its ID instead.
|
||||
- Start (or resume) a thread: Call `thread/start` to open a fresh conversation. The response returns the thread object and you’ll also get a `thread/started` notification. If you’re continuing an existing conversation, call `thread/resume` with its ID instead. If you want to branch from an existing conversation, call `thread/fork` to create a new thread id with copied history.
|
||||
- Begin a turn: To send user input, call `turn/start` with the target `threadId` and the user's input. Optional fields let you override model, cwd, sandbox policy, etc. This immediately returns the new turn object and triggers a `turn/started` notification.
|
||||
- Stream events: After `turn/start`, keep reading JSON-RPC notifications on stdout. You’ll see `item/started`, `item/completed`, deltas like `item/agentMessage/delta`, tool progress, etc. These represent streaming model output plus any side effects (commands, tool calls, reasoning notes).
|
||||
- Finish the turn: When the model is done (or the turn is interrupted via making the `turn/interrupt` call), the server sends `turn/completed` with the final turn state and token usage.
|
||||
@@ -50,6 +52,10 @@ Clients must send a single `initialize` request before invoking any other method
|
||||
|
||||
Applications building on top of `codex app-server` should identify themselves via the `clientInfo` parameter.
|
||||
|
||||
**Important**: `clientInfo.name` is used to identify the client for the OpenAI Compliance Logs Platform. If
|
||||
you are developing a new Codex integration that is intended for enterprise use, please contact us to get it
|
||||
added to a known clients list. For more context: https://chatgpt.com/admin/api-reference#tag/Logs:-Codex
|
||||
|
||||
Example (from OpenAI's official VSCode extension):
|
||||
|
||||
```json
|
||||
@@ -58,7 +64,7 @@ Example (from OpenAI's official VSCode extension):
|
||||
"id": 0,
|
||||
"params": {
|
||||
"clientInfo": {
|
||||
"name": "codex-vscode",
|
||||
"name": "codex_vscode",
|
||||
"title": "Codex VS Code Extension",
|
||||
"version": "0.1.0"
|
||||
}
|
||||
@@ -70,8 +76,11 @@ Example (from OpenAI's official VSCode extension):
|
||||
|
||||
- `thread/start` — create a new thread; emits `thread/started` and auto-subscribes you to turn/item events for that thread.
|
||||
- `thread/resume` — reopen an existing thread by id so subsequent `turn/start` calls append to it.
|
||||
- `thread/fork` — fork an existing thread into a new thread id by copying the stored history; emits `thread/started` and auto-subscribes you to turn/item events for the new thread.
|
||||
- `thread/list` — page through stored rollouts; supports cursor-based pagination and optional `modelProviders` filtering.
|
||||
- `thread/loaded/list` — list the thread ids currently loaded in memory.
|
||||
- `thread/archive` — move a thread’s rollout file into the archived directory; returns `{}` on success.
|
||||
- `thread/rollback` — drop the last N turns from the agent’s in-memory context and persist a rollback marker in the rollout so future resumes see the pruned history; returns the updated `thread` (with `turns` populated) on success.
|
||||
- `turn/start` — add user input to a thread and begin Codex generation; responds with the initial `turn` object and streams `turn/started`, `item/*`, and `turn/completed` notifications.
|
||||
- `turn/interrupt` — request cancellation of an in-flight turn by `(thread_id, turn_id)`; success is an empty `{}` response and the turn finishes with `status: "interrupted"`.
|
||||
- `review/start` — kick off Codex’s automated reviewer for a thread; responds like `turn/start` and emits `item/started`/`item/completed` notifications with `enteredReviewMode` and `exitedReviewMode` items, plus a final assistant `agentMessage` containing the review.
|
||||
@@ -79,12 +88,14 @@ Example (from OpenAI's official VSCode extension):
|
||||
- `model/list` — list available models (with reasoning effort options).
|
||||
- `skills/list` — list skills for one or more `cwd` values (optional `forceReload`).
|
||||
- `mcpServer/oauth/login` — start an OAuth login for a configured MCP server; returns an `authorization_url` and later emits `mcpServer/oauthLogin/completed` once the browser flow finishes.
|
||||
- `config/mcpServer/reload` — reload MCP server config from disk and queue a refresh for loaded threads (applied on each thread's next active turn); returns `{}`. Use this after editing `config.toml` without restarting the server.
|
||||
- `mcpServerStatus/list` — enumerate configured MCP servers with their tools, resources, resource templates, and auth status; supports cursor+limit pagination.
|
||||
- `feedback/upload` — submit a feedback report (classification + optional reason/logs and conversation_id); returns the tracking thread id.
|
||||
- `command/exec` — run a single command under the server sandbox without starting a thread/turn (handy for utilities and validation).
|
||||
- `config/read` — fetch the effective config on disk after resolving config layering.
|
||||
- `config/value/write` — write a single config key/value to the user's config.toml on disk.
|
||||
- `config/batchWrite` — apply multiple config edits atomically to the user's config.toml on disk.
|
||||
- `configRequirements/read` — fetch the loaded requirements allow-lists from `requirements.toml` and/or MDM (or `null` if none are configured).
|
||||
|
||||
### Example: Start or resume a thread
|
||||
|
||||
@@ -117,6 +128,14 @@ To continue a stored session, call `thread/resume` with the `thread.id` you prev
|
||||
{ "id": 11, "result": { "thread": { "id": "thr_123", … } } }
|
||||
```
|
||||
|
||||
To branch from a stored session, call `thread/fork` with the `thread.id`. This creates a new thread id and emits a `thread/started` notification for it:
|
||||
|
||||
```json
|
||||
{ "method": "thread/fork", "id": 12, "params": { "threadId": "thr_123" } }
|
||||
{ "id": 12, "result": { "thread": { "id": "thr_456", … } } }
|
||||
{ "method": "thread/started", "params": { "thread": { … } } }
|
||||
```
|
||||
|
||||
### Example: List threads (with pagination & filters)
|
||||
|
||||
`thread/list` lets you render a history UI. Pass any combination of:
|
||||
@@ -143,6 +162,17 @@ Example:
|
||||
|
||||
When `nextCursor` is `null`, you’ve reached the final page.
|
||||
|
||||
### Example: List loaded threads
|
||||
|
||||
`thread/loaded/list` returns thread ids currently loaded in memory. This is useful when you want to check which sessions are active without scanning rollouts on disk.
|
||||
|
||||
```json
|
||||
{ "method": "thread/loaded/list", "id": 21 }
|
||||
{ "id": 21, "result": {
|
||||
"data": ["thr_123", "thr_456"]
|
||||
} }
|
||||
```
|
||||
|
||||
### Example: Archive a thread
|
||||
|
||||
Use `thread/archive` to move the persisted rollout (stored as a JSONL file on disk) into the archived sessions directory.
|
||||
@@ -162,7 +192,7 @@ Turns attach user input (text or images) to a thread and trigger Codex generatio
|
||||
- `{"type":"image","url":"https://…png"}`
|
||||
- `{"type":"localImage","path":"/tmp/screenshot.png"}`
|
||||
|
||||
You can optionally specify config overrides on the new turn. If specified, these settings become the default for subsequent turns on the same thread.
|
||||
You can optionally specify config overrides on the new turn. If specified, these settings become the default for subsequent turns on the same thread. `outputSchema` applies only to the current turn.
|
||||
|
||||
```json
|
||||
{ "method": "turn/start", "id": 30, "params": {
|
||||
@@ -178,7 +208,14 @@ You can optionally specify config overrides on the new turn. If specified, these
|
||||
},
|
||||
"model": "gpt-5.1-codex",
|
||||
"effort": "medium",
|
||||
"summary": "concise"
|
||||
"summary": "concise",
|
||||
// Optional JSON Schema to constrain the final assistant message for this turn.
|
||||
"outputSchema": {
|
||||
"type": "object",
|
||||
"properties": { "answer": { "type": "string" } },
|
||||
"required": ["answer"],
|
||||
"additionalProperties": false
|
||||
}
|
||||
} }
|
||||
{ "id": 30, "result": { "turn": {
|
||||
"id": "turn_456",
|
||||
@@ -188,6 +225,26 @@ You can optionally specify config overrides on the new turn. If specified, these
|
||||
} } }
|
||||
```
|
||||
|
||||
### Example: Start a turn (invoke a skill)
|
||||
|
||||
Invoke a skill explicitly by including `$<skill-name>` in the text input and adding a `skill` input item alongside it.
|
||||
|
||||
```json
|
||||
{ "method": "turn/start", "id": 33, "params": {
|
||||
"threadId": "thr_123",
|
||||
"input": [
|
||||
{ "type": "text", "text": "$skill-creator Add a new skill for triaging flaky CI and include step-by-step usage." },
|
||||
{ "type": "skill", "name": "skill-creator", "path": "/Users/me/.codex/skills/skill-creator/SKILL.md" }
|
||||
]
|
||||
} }
|
||||
{ "id": 33, "result": { "turn": {
|
||||
"id": "turn_457",
|
||||
"status": "inProgress",
|
||||
"items": [],
|
||||
"error": null
|
||||
} } }
|
||||
```
|
||||
|
||||
### Example: Interrupt an active turn
|
||||
|
||||
You can cancel a running Turn with `turn/interrupt`.
|
||||
@@ -302,7 +359,7 @@ Event notifications are the server-initiated event stream for thread lifecycles,
|
||||
The app-server streams JSON-RPC notifications while a turn is running. Each turn starts with `turn/started` (initial `turn`) and ends with `turn/completed` (final `turn` status). Token usage events stream separately via `thread/tokenUsage/updated`. Clients subscribe to the events they care about, rendering each item incrementally as updates arrive. The per-item lifecycle is always: `item/started` → zero or more item-specific deltas → `item/completed`.
|
||||
|
||||
- `turn/started` — `{ turn }` with the turn id, empty `items`, and `status: "inProgress"`.
|
||||
- `turn/completed` — `{ turn }` where `turn.status` is `completed`, `interrupted`, or `failed`; failures carry `{ error: { message, codexErrorInfo? } }`.
|
||||
- `turn/completed` — `{ turn }` where `turn.status` is `completed`, `interrupted`, or `failed`; failures carry `{ error: { message, codexErrorInfo?, additionalDetails? } }`.
|
||||
- `turn/diff/updated` — `{ threadId, turnId, diff }` represents the up-to-date snapshot of the turn-level unified diff, emitted after every FileChange item. `diff` is the latest aggregated unified diff across every file change in the turn. UIs can render this to show the full "what changed" view without stitching individual `fileChange` items.
|
||||
- `turn/plan/updated` — `{ turnId, explanation?, plan }` whenever the agent shares or changes its plan; each `plan` entry is `{ step, status }` with `status` in `pending`, `inProgress`, or `completed`.
|
||||
|
||||
@@ -318,6 +375,7 @@ Today both notifications carry an empty `items` array even when item events were
|
||||
- `commandExecution` — `{id, command, cwd, status, commandActions, aggregatedOutput?, exitCode?, durationMs?}` for sandboxed commands; `status` is `inProgress`, `completed`, `failed`, or `declined`.
|
||||
- `fileChange` — `{id, changes, status}` describing proposed edits; `changes` list `{path, kind, diff}` and `status` is `inProgress`, `completed`, `failed`, or `declined`.
|
||||
- `mcpToolCall` — `{id, server, tool, status, arguments, result?, error?}` describing MCP calls; `status` is `inProgress`, `completed`, or `failed`.
|
||||
- `collabToolCall` — `{id, tool, status, senderThreadId, receiverThreadId?, newThreadId?, prompt?, agentStatus?}` describing collab tool calls (`spawn_agent`, `send_input`, `wait`, `close_agent`); `status` is `inProgress`, `completed`, or `failed`.
|
||||
- `webSearch` — `{id, query}` for a web search request issued by the agent.
|
||||
- `imageView` — `{id, path}` emitted when the agent invokes the image viewer tool.
|
||||
- `enteredReviewMode` — `{id, review}` sent when the reviewer starts; `review` is a short user-facing label such as `"current changes"` or the requested target description.
|
||||
@@ -352,7 +410,7 @@ There are additional item-specific events:
|
||||
|
||||
### Errors
|
||||
|
||||
`error` event is emitted whenever the server hits an error mid-turn (for example, upstream model errors or quota limits). Carries the same `{ error: { message, codexErrorInfo? } }` payload as `turn.status: "failed"` and may precede that terminal notification.
|
||||
`error` event is emitted whenever the server hits an error mid-turn (for example, upstream model errors or quota limits). Carries the same `{ error: { message, codexErrorInfo?, additionalDetails? } }` payload as `turn.status: "failed"` and may precede that terminal notification.
|
||||
|
||||
`codexErrorInfo` maps to the `CodexErrorInfo` enum. Common values:
|
||||
|
||||
@@ -397,6 +455,46 @@ Order of messages:
|
||||
|
||||
UI guidance for IDEs: surface an approval dialog as soon as the request arrives. The turn will proceed after the server receives a response to the approval request. The terminal `item/completed` notification will be sent with the appropriate status.
|
||||
|
||||
## Skills
|
||||
|
||||
Invoke a skill by including `$<skill-name>` in the text input. Add a `skill` input item (recommended) so the backend injects full skill instructions instead of relying on the model to resolve the name.
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "turn/start",
|
||||
"id": 101,
|
||||
"params": {
|
||||
"threadId": "thread-1",
|
||||
"input": [
|
||||
{ "type": "text", "text": "$skill-creator Add a new skill for triaging flaky CI." },
|
||||
{ "type": "skill", "name": "skill-creator", "path": "/Users/me/.codex/skills/skill-creator/SKILL.md" }
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
If you omit the `skill` item, the model will still parse the `$<skill-name>` marker and try to locate the skill, which can add latency.
|
||||
|
||||
Example:
|
||||
|
||||
```
|
||||
$skill-creator Add a new skill for triaging flaky CI and include step-by-step usage.
|
||||
```
|
||||
|
||||
Use `skills/list` to fetch the available skills (optionally scoped by `cwd` and/or with `forceReload`).
|
||||
|
||||
```json
|
||||
{ "method": "skills/list", "id": 25, "params": {
|
||||
"cwd": "/Users/me/project",
|
||||
"forceReload": false
|
||||
} }
|
||||
{ "id": 25, "result": {
|
||||
"skills": [
|
||||
{ "name": "skill-creator", "description": "Create or update a Codex skill" }
|
||||
]
|
||||
} }
|
||||
```
|
||||
|
||||
## Auth endpoints
|
||||
|
||||
The JSON-RPC auth/account surface exposes request/response methods plus server-initiated notifications (no `id`). Use these to determine auth state, start or cancel logins, logout, and inspect ChatGPT rate limits.
|
||||
|
||||
@@ -1,15 +1,24 @@
|
||||
use crate::codex_message_processor::ApiVersion;
|
||||
use crate::codex_message_processor::PendingInterrupts;
|
||||
use crate::codex_message_processor::PendingRollbacks;
|
||||
use crate::codex_message_processor::TurnSummary;
|
||||
use crate::codex_message_processor::TurnSummaryStore;
|
||||
use crate::codex_message_processor::read_event_msgs_from_rollout;
|
||||
use crate::codex_message_processor::read_summary_from_rollout;
|
||||
use crate::codex_message_processor::summary_to_thread;
|
||||
use crate::error_code::INTERNAL_ERROR_CODE;
|
||||
use crate::error_code::INVALID_REQUEST_ERROR_CODE;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use codex_app_server_protocol::AccountRateLimitsUpdatedNotification;
|
||||
use codex_app_server_protocol::AgentMessageDeltaNotification;
|
||||
use codex_app_server_protocol::ApplyPatchApprovalParams;
|
||||
use codex_app_server_protocol::ApplyPatchApprovalResponse;
|
||||
use codex_app_server_protocol::ApprovalDecision;
|
||||
use codex_app_server_protocol::CodexErrorInfo as V2CodexErrorInfo;
|
||||
use codex_app_server_protocol::CollabAgentState as V2CollabAgentStatus;
|
||||
use codex_app_server_protocol::CollabAgentTool;
|
||||
use codex_app_server_protocol::CollabAgentToolCallStatus as V2CollabToolCallStatus;
|
||||
use codex_app_server_protocol::CommandAction as V2ParsedCommand;
|
||||
use codex_app_server_protocol::CommandExecutionApprovalDecision;
|
||||
use codex_app_server_protocol::CommandExecutionOutputDeltaNotification;
|
||||
use codex_app_server_protocol::CommandExecutionRequestApprovalParams;
|
||||
use codex_app_server_protocol::CommandExecutionRequestApprovalResponse;
|
||||
@@ -20,6 +29,7 @@ use codex_app_server_protocol::ErrorNotification;
|
||||
use codex_app_server_protocol::ExecCommandApprovalParams;
|
||||
use codex_app_server_protocol::ExecCommandApprovalResponse;
|
||||
use codex_app_server_protocol::ExecPolicyAmendment as V2ExecPolicyAmendment;
|
||||
use codex_app_server_protocol::FileChangeApprovalDecision;
|
||||
use codex_app_server_protocol::FileChangeOutputDeltaNotification;
|
||||
use codex_app_server_protocol::FileChangeRequestApprovalParams;
|
||||
use codex_app_server_protocol::FileChangeRequestApprovalResponse;
|
||||
@@ -27,6 +37,7 @@ use codex_app_server_protocol::FileUpdateChange;
|
||||
use codex_app_server_protocol::InterruptConversationResponse;
|
||||
use codex_app_server_protocol::ItemCompletedNotification;
|
||||
use codex_app_server_protocol::ItemStartedNotification;
|
||||
use codex_app_server_protocol::JSONRPCErrorError;
|
||||
use codex_app_server_protocol::McpToolCallError;
|
||||
use codex_app_server_protocol::McpToolCallResult;
|
||||
use codex_app_server_protocol::McpToolCallStatus;
|
||||
@@ -40,6 +51,7 @@ use codex_app_server_protocol::ServerNotification;
|
||||
use codex_app_server_protocol::ServerRequestPayload;
|
||||
use codex_app_server_protocol::TerminalInteractionNotification;
|
||||
use codex_app_server_protocol::ThreadItem;
|
||||
use codex_app_server_protocol::ThreadRollbackResponse;
|
||||
use codex_app_server_protocol::ThreadTokenUsage;
|
||||
use codex_app_server_protocol::ThreadTokenUsageUpdatedNotification;
|
||||
use codex_app_server_protocol::Turn;
|
||||
@@ -50,9 +62,11 @@ use codex_app_server_protocol::TurnInterruptResponse;
|
||||
use codex_app_server_protocol::TurnPlanStep;
|
||||
use codex_app_server_protocol::TurnPlanUpdatedNotification;
|
||||
use codex_app_server_protocol::TurnStatus;
|
||||
use codex_core::CodexConversation;
|
||||
use codex_app_server_protocol::build_turns_from_event_msgs;
|
||||
use codex_core::CodexThread;
|
||||
use codex_core::parse_command::shlex_join;
|
||||
use codex_core::protocol::ApplyPatchApprovalRequestEvent;
|
||||
use codex_core::protocol::CodexErrorInfo as CoreCodexErrorInfo;
|
||||
use codex_core::protocol::Event;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::ExecApprovalRequestEvent;
|
||||
@@ -66,7 +80,7 @@ use codex_core::protocol::TokenCountEvent;
|
||||
use codex_core::protocol::TurnDiffEvent;
|
||||
use codex_core::review_format::format_review_findings_block;
|
||||
use codex_core::review_prompts;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::plan_tool::UpdatePlanArgs;
|
||||
use codex_protocol::protocol::ReviewOutputEvent;
|
||||
use std::collections::HashMap;
|
||||
@@ -78,21 +92,24 @@ use tracing::error;
|
||||
|
||||
type JsonValue = serde_json::Value;
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub(crate) async fn apply_bespoke_event_handling(
|
||||
event: Event,
|
||||
conversation_id: ConversationId,
|
||||
conversation: Arc<CodexConversation>,
|
||||
conversation_id: ThreadId,
|
||||
conversation: Arc<CodexThread>,
|
||||
outgoing: Arc<OutgoingMessageSender>,
|
||||
pending_interrupts: PendingInterrupts,
|
||||
pending_rollbacks: PendingRollbacks,
|
||||
turn_summary_store: TurnSummaryStore,
|
||||
api_version: ApiVersion,
|
||||
fallback_model_provider: String,
|
||||
) {
|
||||
let Event {
|
||||
id: event_turn_id,
|
||||
msg,
|
||||
} = event;
|
||||
match msg {
|
||||
EventMsg::TaskComplete(_ev) => {
|
||||
EventMsg::TurnComplete(_ev) => {
|
||||
handle_turn_complete(
|
||||
conversation_id,
|
||||
event_turn_id,
|
||||
@@ -264,6 +281,218 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
.send_server_notification(ServerNotification::ItemCompleted(notification))
|
||||
.await;
|
||||
}
|
||||
EventMsg::CollabAgentSpawnBegin(begin_event) => {
|
||||
let item = ThreadItem::CollabAgentToolCall {
|
||||
id: begin_event.call_id,
|
||||
tool: CollabAgentTool::SpawnAgent,
|
||||
status: V2CollabToolCallStatus::InProgress,
|
||||
sender_thread_id: begin_event.sender_thread_id.to_string(),
|
||||
receiver_thread_ids: Vec::new(),
|
||||
prompt: Some(begin_event.prompt),
|
||||
agents_states: HashMap::new(),
|
||||
};
|
||||
let notification = ItemStartedNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
item,
|
||||
};
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::ItemStarted(notification))
|
||||
.await;
|
||||
}
|
||||
EventMsg::CollabAgentSpawnEnd(end_event) => {
|
||||
let has_receiver = end_event.new_thread_id.is_some();
|
||||
let status = match &end_event.status {
|
||||
codex_protocol::protocol::AgentStatus::Errored(_)
|
||||
| codex_protocol::protocol::AgentStatus::NotFound => V2CollabToolCallStatus::Failed,
|
||||
_ if has_receiver => V2CollabToolCallStatus::Completed,
|
||||
_ => V2CollabToolCallStatus::Failed,
|
||||
};
|
||||
let (receiver_thread_ids, agents_states) = match end_event.new_thread_id {
|
||||
Some(id) => {
|
||||
let receiver_id = id.to_string();
|
||||
let received_status = V2CollabAgentStatus::from(end_event.status.clone());
|
||||
(
|
||||
vec![receiver_id.clone()],
|
||||
[(receiver_id, received_status)].into_iter().collect(),
|
||||
)
|
||||
}
|
||||
None => (Vec::new(), HashMap::new()),
|
||||
};
|
||||
let item = ThreadItem::CollabAgentToolCall {
|
||||
id: end_event.call_id,
|
||||
tool: CollabAgentTool::SpawnAgent,
|
||||
status,
|
||||
sender_thread_id: end_event.sender_thread_id.to_string(),
|
||||
receiver_thread_ids,
|
||||
prompt: Some(end_event.prompt),
|
||||
agents_states,
|
||||
};
|
||||
let notification = ItemCompletedNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
item,
|
||||
};
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::ItemCompleted(notification))
|
||||
.await;
|
||||
}
|
||||
EventMsg::CollabAgentInteractionBegin(begin_event) => {
|
||||
let receiver_thread_ids = vec![begin_event.receiver_thread_id.to_string()];
|
||||
let item = ThreadItem::CollabAgentToolCall {
|
||||
id: begin_event.call_id,
|
||||
tool: CollabAgentTool::SendInput,
|
||||
status: V2CollabToolCallStatus::InProgress,
|
||||
sender_thread_id: begin_event.sender_thread_id.to_string(),
|
||||
receiver_thread_ids,
|
||||
prompt: Some(begin_event.prompt),
|
||||
agents_states: HashMap::new(),
|
||||
};
|
||||
let notification = ItemStartedNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
item,
|
||||
};
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::ItemStarted(notification))
|
||||
.await;
|
||||
}
|
||||
EventMsg::CollabAgentInteractionEnd(end_event) => {
|
||||
let status = match &end_event.status {
|
||||
codex_protocol::protocol::AgentStatus::Errored(_)
|
||||
| codex_protocol::protocol::AgentStatus::NotFound => V2CollabToolCallStatus::Failed,
|
||||
_ => V2CollabToolCallStatus::Completed,
|
||||
};
|
||||
let receiver_id = end_event.receiver_thread_id.to_string();
|
||||
let received_status = V2CollabAgentStatus::from(end_event.status);
|
||||
let item = ThreadItem::CollabAgentToolCall {
|
||||
id: end_event.call_id,
|
||||
tool: CollabAgentTool::SendInput,
|
||||
status,
|
||||
sender_thread_id: end_event.sender_thread_id.to_string(),
|
||||
receiver_thread_ids: vec![receiver_id.clone()],
|
||||
prompt: Some(end_event.prompt),
|
||||
agents_states: [(receiver_id, received_status)].into_iter().collect(),
|
||||
};
|
||||
let notification = ItemCompletedNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
item,
|
||||
};
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::ItemCompleted(notification))
|
||||
.await;
|
||||
}
|
||||
EventMsg::CollabWaitingBegin(begin_event) => {
|
||||
let receiver_thread_ids = begin_event
|
||||
.receiver_thread_ids
|
||||
.iter()
|
||||
.map(ToString::to_string)
|
||||
.collect();
|
||||
let item = ThreadItem::CollabAgentToolCall {
|
||||
id: begin_event.call_id,
|
||||
tool: CollabAgentTool::Wait,
|
||||
status: V2CollabToolCallStatus::InProgress,
|
||||
sender_thread_id: begin_event.sender_thread_id.to_string(),
|
||||
receiver_thread_ids,
|
||||
prompt: None,
|
||||
agents_states: HashMap::new(),
|
||||
};
|
||||
let notification = ItemStartedNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
item,
|
||||
};
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::ItemStarted(notification))
|
||||
.await;
|
||||
}
|
||||
EventMsg::CollabWaitingEnd(end_event) => {
|
||||
let status = if end_event.statuses.values().any(|status| {
|
||||
matches!(
|
||||
status,
|
||||
codex_protocol::protocol::AgentStatus::Errored(_)
|
||||
| codex_protocol::protocol::AgentStatus::NotFound
|
||||
)
|
||||
}) {
|
||||
V2CollabToolCallStatus::Failed
|
||||
} else {
|
||||
V2CollabToolCallStatus::Completed
|
||||
};
|
||||
let receiver_thread_ids = end_event.statuses.keys().map(ToString::to_string).collect();
|
||||
let agents_states = end_event
|
||||
.statuses
|
||||
.iter()
|
||||
.map(|(id, status)| (id.to_string(), V2CollabAgentStatus::from(status.clone())))
|
||||
.collect();
|
||||
let item = ThreadItem::CollabAgentToolCall {
|
||||
id: end_event.call_id,
|
||||
tool: CollabAgentTool::Wait,
|
||||
status,
|
||||
sender_thread_id: end_event.sender_thread_id.to_string(),
|
||||
receiver_thread_ids,
|
||||
prompt: None,
|
||||
agents_states,
|
||||
};
|
||||
let notification = ItemCompletedNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
item,
|
||||
};
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::ItemCompleted(notification))
|
||||
.await;
|
||||
}
|
||||
EventMsg::CollabCloseBegin(begin_event) => {
|
||||
let item = ThreadItem::CollabAgentToolCall {
|
||||
id: begin_event.call_id,
|
||||
tool: CollabAgentTool::CloseAgent,
|
||||
status: V2CollabToolCallStatus::InProgress,
|
||||
sender_thread_id: begin_event.sender_thread_id.to_string(),
|
||||
receiver_thread_ids: vec![begin_event.receiver_thread_id.to_string()],
|
||||
prompt: None,
|
||||
agents_states: HashMap::new(),
|
||||
};
|
||||
let notification = ItemStartedNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
item,
|
||||
};
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::ItemStarted(notification))
|
||||
.await;
|
||||
}
|
||||
EventMsg::CollabCloseEnd(end_event) => {
|
||||
let status = match &end_event.status {
|
||||
codex_protocol::protocol::AgentStatus::Errored(_)
|
||||
| codex_protocol::protocol::AgentStatus::NotFound => V2CollabToolCallStatus::Failed,
|
||||
_ => V2CollabToolCallStatus::Completed,
|
||||
};
|
||||
let receiver_id = end_event.receiver_thread_id.to_string();
|
||||
let agents_states = [(
|
||||
receiver_id.clone(),
|
||||
V2CollabAgentStatus::from(end_event.status),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let item = ThreadItem::CollabAgentToolCall {
|
||||
id: end_event.call_id,
|
||||
tool: CollabAgentTool::CloseAgent,
|
||||
status,
|
||||
sender_thread_id: end_event.sender_thread_id.to_string(),
|
||||
receiver_thread_ids: vec![receiver_id],
|
||||
prompt: None,
|
||||
agents_states,
|
||||
};
|
||||
let notification = ItemCompletedNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
item,
|
||||
};
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::ItemCompleted(notification))
|
||||
.await;
|
||||
}
|
||||
EventMsg::AgentMessageContentDelta(event) => {
|
||||
let notification = AgentMessageDeltaNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
@@ -337,14 +566,35 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
.await;
|
||||
}
|
||||
EventMsg::Error(ev) => {
|
||||
let message = ev.message.clone();
|
||||
let codex_error_info = ev.codex_error_info.clone();
|
||||
|
||||
// If this error belongs to an in-flight `thread/rollback` request, fail that request
|
||||
// (and clear pending state) so subsequent rollbacks are unblocked.
|
||||
//
|
||||
// Don't send a notification for this error.
|
||||
if matches!(
|
||||
codex_error_info,
|
||||
Some(CoreCodexErrorInfo::ThreadRollbackFailed)
|
||||
) {
|
||||
return handle_thread_rollback_failed(
|
||||
conversation_id,
|
||||
message,
|
||||
&pending_rollbacks,
|
||||
&outgoing,
|
||||
)
|
||||
.await;
|
||||
};
|
||||
|
||||
let turn_error = TurnError {
|
||||
message: ev.message,
|
||||
codex_error_info: ev.codex_error_info.map(V2CodexErrorInfo::from),
|
||||
additional_details: None,
|
||||
};
|
||||
handle_error(conversation_id, turn_error.clone(), &turn_summary_store).await;
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::Error(ErrorNotification {
|
||||
error: turn_error,
|
||||
error: turn_error.clone(),
|
||||
will_retry: false,
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
@@ -357,6 +607,7 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
let turn_error = TurnError {
|
||||
message: ev.message,
|
||||
codex_error_info: ev.codex_error_info.map(V2CodexErrorInfo::from),
|
||||
additional_details: ev.additional_details,
|
||||
};
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::Error(ErrorNotification {
|
||||
@@ -688,6 +939,58 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
)
|
||||
.await;
|
||||
}
|
||||
EventMsg::ThreadRolledBack(_rollback_event) => {
|
||||
let pending = {
|
||||
let mut map = pending_rollbacks.lock().await;
|
||||
map.remove(&conversation_id)
|
||||
};
|
||||
|
||||
if let Some(request_id) = pending {
|
||||
let rollout_path = conversation.rollout_path();
|
||||
let response = match read_summary_from_rollout(
|
||||
rollout_path.as_path(),
|
||||
fallback_model_provider.as_str(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(summary) => {
|
||||
let mut thread = summary_to_thread(summary);
|
||||
match read_event_msgs_from_rollout(rollout_path.as_path()).await {
|
||||
Ok(events) => {
|
||||
thread.turns = build_turns_from_event_msgs(&events);
|
||||
ThreadRollbackResponse { thread }
|
||||
}
|
||||
Err(err) => {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: format!(
|
||||
"failed to load rollout `{}`: {err}",
|
||||
rollout_path.display()
|
||||
),
|
||||
data: None,
|
||||
};
|
||||
outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: format!(
|
||||
"failed to load rollout `{}`: {err}",
|
||||
rollout_path.display()
|
||||
),
|
||||
data: None,
|
||||
};
|
||||
outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
outgoing.send_response(request_id, response).await;
|
||||
}
|
||||
}
|
||||
EventMsg::TurnDiff(turn_diff_event) => {
|
||||
handle_turn_diff(
|
||||
conversation_id,
|
||||
@@ -714,7 +1017,7 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
}
|
||||
|
||||
async fn handle_turn_diff(
|
||||
conversation_id: ConversationId,
|
||||
conversation_id: ThreadId,
|
||||
event_turn_id: &str,
|
||||
turn_diff_event: TurnDiffEvent,
|
||||
api_version: ApiVersion,
|
||||
@@ -733,7 +1036,7 @@ async fn handle_turn_diff(
|
||||
}
|
||||
|
||||
async fn handle_turn_plan_update(
|
||||
conversation_id: ConversationId,
|
||||
conversation_id: ThreadId,
|
||||
event_turn_id: &str,
|
||||
plan_update_event: UpdatePlanArgs,
|
||||
api_version: ApiVersion,
|
||||
@@ -757,7 +1060,7 @@ async fn handle_turn_plan_update(
|
||||
}
|
||||
|
||||
async fn emit_turn_completed_with_status(
|
||||
conversation_id: ConversationId,
|
||||
conversation_id: ThreadId,
|
||||
event_turn_id: String,
|
||||
status: TurnStatus,
|
||||
error: Option<TurnError>,
|
||||
@@ -778,7 +1081,7 @@ async fn emit_turn_completed_with_status(
|
||||
}
|
||||
|
||||
async fn complete_file_change_item(
|
||||
conversation_id: ConversationId,
|
||||
conversation_id: ThreadId,
|
||||
item_id: String,
|
||||
changes: Vec<FileUpdateChange>,
|
||||
status: PatchApplyStatus,
|
||||
@@ -810,7 +1113,7 @@ async fn complete_file_change_item(
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn complete_command_execution_item(
|
||||
conversation_id: ConversationId,
|
||||
conversation_id: ThreadId,
|
||||
turn_id: String,
|
||||
item_id: String,
|
||||
command: String,
|
||||
@@ -843,7 +1146,7 @@ async fn complete_command_execution_item(
|
||||
|
||||
async fn maybe_emit_raw_response_item_completed(
|
||||
api_version: ApiVersion,
|
||||
conversation_id: ConversationId,
|
||||
conversation_id: ThreadId,
|
||||
turn_id: &str,
|
||||
item: codex_protocol::models::ResponseItem,
|
||||
outgoing: &OutgoingMessageSender,
|
||||
@@ -863,7 +1166,7 @@ async fn maybe_emit_raw_response_item_completed(
|
||||
}
|
||||
|
||||
async fn find_and_remove_turn_summary(
|
||||
conversation_id: ConversationId,
|
||||
conversation_id: ThreadId,
|
||||
turn_summary_store: &TurnSummaryStore,
|
||||
) -> TurnSummary {
|
||||
let mut map = turn_summary_store.lock().await;
|
||||
@@ -871,7 +1174,7 @@ async fn find_and_remove_turn_summary(
|
||||
}
|
||||
|
||||
async fn handle_turn_complete(
|
||||
conversation_id: ConversationId,
|
||||
conversation_id: ThreadId,
|
||||
event_turn_id: String,
|
||||
outgoing: &OutgoingMessageSender,
|
||||
turn_summary_store: &TurnSummaryStore,
|
||||
@@ -887,7 +1190,7 @@ async fn handle_turn_complete(
|
||||
}
|
||||
|
||||
async fn handle_turn_interrupted(
|
||||
conversation_id: ConversationId,
|
||||
conversation_id: ThreadId,
|
||||
event_turn_id: String,
|
||||
outgoing: &OutgoingMessageSender,
|
||||
turn_summary_store: &TurnSummaryStore,
|
||||
@@ -904,8 +1207,33 @@ async fn handle_turn_interrupted(
|
||||
.await;
|
||||
}
|
||||
|
||||
async fn handle_thread_rollback_failed(
|
||||
conversation_id: ThreadId,
|
||||
message: String,
|
||||
pending_rollbacks: &PendingRollbacks,
|
||||
outgoing: &OutgoingMessageSender,
|
||||
) {
|
||||
let pending_rollback = {
|
||||
let mut map = pending_rollbacks.lock().await;
|
||||
map.remove(&conversation_id)
|
||||
};
|
||||
|
||||
if let Some(request_id) = pending_rollback {
|
||||
outgoing
|
||||
.send_error(
|
||||
request_id,
|
||||
JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: message.clone(),
|
||||
data: None,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_token_count_event(
|
||||
conversation_id: ConversationId,
|
||||
conversation_id: ThreadId,
|
||||
turn_id: String,
|
||||
token_count_event: TokenCountEvent,
|
||||
outgoing: &OutgoingMessageSender,
|
||||
@@ -933,7 +1261,7 @@ async fn handle_token_count_event(
|
||||
}
|
||||
|
||||
async fn handle_error(
|
||||
conversation_id: ConversationId,
|
||||
conversation_id: ThreadId,
|
||||
error: TurnError,
|
||||
turn_summary_store: &TurnSummaryStore,
|
||||
) {
|
||||
@@ -944,7 +1272,7 @@ async fn handle_error(
|
||||
async fn on_patch_approval_response(
|
||||
event_turn_id: String,
|
||||
receiver: oneshot::Receiver<JsonValue>,
|
||||
codex: Arc<CodexConversation>,
|
||||
codex: Arc<CodexThread>,
|
||||
) {
|
||||
let response = receiver.await;
|
||||
let value = match response {
|
||||
@@ -986,7 +1314,7 @@ async fn on_patch_approval_response(
|
||||
async fn on_exec_approval_response(
|
||||
event_turn_id: String,
|
||||
receiver: oneshot::Receiver<JsonValue>,
|
||||
conversation: Arc<CodexConversation>,
|
||||
conversation: Arc<CodexThread>,
|
||||
) {
|
||||
let response = receiver.await;
|
||||
let value = match response {
|
||||
@@ -1081,14 +1409,29 @@ fn format_file_change_diff(change: &CoreFileChange) -> String {
|
||||
}
|
||||
}
|
||||
|
||||
fn map_file_change_approval_decision(
|
||||
decision: FileChangeApprovalDecision,
|
||||
) -> (ReviewDecision, Option<PatchApplyStatus>) {
|
||||
match decision {
|
||||
FileChangeApprovalDecision::Accept => (ReviewDecision::Approved, None),
|
||||
FileChangeApprovalDecision::AcceptForSession => (ReviewDecision::ApprovedForSession, None),
|
||||
FileChangeApprovalDecision::Decline => {
|
||||
(ReviewDecision::Denied, Some(PatchApplyStatus::Declined))
|
||||
}
|
||||
FileChangeApprovalDecision::Cancel => {
|
||||
(ReviewDecision::Abort, Some(PatchApplyStatus::Declined))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn on_file_change_request_approval_response(
|
||||
event_turn_id: String,
|
||||
conversation_id: ConversationId,
|
||||
conversation_id: ThreadId,
|
||||
item_id: String,
|
||||
changes: Vec<FileUpdateChange>,
|
||||
receiver: oneshot::Receiver<JsonValue>,
|
||||
codex: Arc<CodexConversation>,
|
||||
codex: Arc<CodexThread>,
|
||||
outgoing: Arc<OutgoingMessageSender>,
|
||||
turn_summary_store: TurnSummaryStore,
|
||||
) {
|
||||
@@ -1099,23 +1442,12 @@ async fn on_file_change_request_approval_response(
|
||||
.unwrap_or_else(|err| {
|
||||
error!("failed to deserialize FileChangeRequestApprovalResponse: {err}");
|
||||
FileChangeRequestApprovalResponse {
|
||||
decision: ApprovalDecision::Decline,
|
||||
decision: FileChangeApprovalDecision::Decline,
|
||||
}
|
||||
});
|
||||
|
||||
let (decision, completion_status) = match response.decision {
|
||||
ApprovalDecision::Accept
|
||||
| ApprovalDecision::AcceptForSession
|
||||
| ApprovalDecision::AcceptWithExecpolicyAmendment { .. } => {
|
||||
(ReviewDecision::Approved, None)
|
||||
}
|
||||
ApprovalDecision::Decline => {
|
||||
(ReviewDecision::Denied, Some(PatchApplyStatus::Declined))
|
||||
}
|
||||
ApprovalDecision::Cancel => {
|
||||
(ReviewDecision::Abort, Some(PatchApplyStatus::Declined))
|
||||
}
|
||||
};
|
||||
let (decision, completion_status) =
|
||||
map_file_change_approval_decision(response.decision);
|
||||
// Allow EventMsg::PatchApplyEnd to emit ItemCompleted for accepted patches.
|
||||
// Only short-circuit on declines/cancels/failures.
|
||||
(decision, completion_status)
|
||||
@@ -1153,13 +1485,13 @@ async fn on_file_change_request_approval_response(
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn on_command_execution_request_approval_response(
|
||||
event_turn_id: String,
|
||||
conversation_id: ConversationId,
|
||||
conversation_id: ThreadId,
|
||||
item_id: String,
|
||||
command: String,
|
||||
cwd: PathBuf,
|
||||
command_actions: Vec<V2ParsedCommand>,
|
||||
receiver: oneshot::Receiver<JsonValue>,
|
||||
conversation: Arc<CodexConversation>,
|
||||
conversation: Arc<CodexThread>,
|
||||
outgoing: Arc<OutgoingMessageSender>,
|
||||
) {
|
||||
let response = receiver.await;
|
||||
@@ -1169,16 +1501,18 @@ async fn on_command_execution_request_approval_response(
|
||||
.unwrap_or_else(|err| {
|
||||
error!("failed to deserialize CommandExecutionRequestApprovalResponse: {err}");
|
||||
CommandExecutionRequestApprovalResponse {
|
||||
decision: ApprovalDecision::Decline,
|
||||
decision: CommandExecutionApprovalDecision::Decline,
|
||||
}
|
||||
});
|
||||
|
||||
let decision = response.decision;
|
||||
|
||||
let (decision, completion_status) = match decision {
|
||||
ApprovalDecision::Accept => (ReviewDecision::Approved, None),
|
||||
ApprovalDecision::AcceptForSession => (ReviewDecision::ApprovedForSession, None),
|
||||
ApprovalDecision::AcceptWithExecpolicyAmendment {
|
||||
CommandExecutionApprovalDecision::Accept => (ReviewDecision::Approved, None),
|
||||
CommandExecutionApprovalDecision::AcceptForSession => {
|
||||
(ReviewDecision::ApprovedForSession, None)
|
||||
}
|
||||
CommandExecutionApprovalDecision::AcceptWithExecpolicyAmendment {
|
||||
execpolicy_amendment,
|
||||
} => (
|
||||
ReviewDecision::ApprovedExecpolicyAmendment {
|
||||
@@ -1186,11 +1520,11 @@ async fn on_command_execution_request_approval_response(
|
||||
},
|
||||
None,
|
||||
),
|
||||
ApprovalDecision::Decline => (
|
||||
CommandExecutionApprovalDecision::Decline => (
|
||||
ReviewDecision::Denied,
|
||||
Some(CommandExecutionStatus::Declined),
|
||||
),
|
||||
ApprovalDecision::Cancel => (
|
||||
CommandExecutionApprovalDecision::Cancel => (
|
||||
ReviewDecision::Abort,
|
||||
Some(CommandExecutionStatus::Declined),
|
||||
),
|
||||
@@ -1330,9 +1664,17 @@ mod tests {
|
||||
Arc::new(Mutex::new(HashMap::new()))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn file_change_accept_for_session_maps_to_approved_for_session() {
|
||||
let (decision, completion_status) =
|
||||
map_file_change_approval_decision(FileChangeApprovalDecision::AcceptForSession);
|
||||
assert_eq!(decision, ReviewDecision::ApprovedForSession);
|
||||
assert_eq!(completion_status, None);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_handle_error_records_message() -> Result<()> {
|
||||
let conversation_id = ConversationId::new();
|
||||
let conversation_id = ThreadId::new();
|
||||
let turn_summary_store = new_turn_summary_store();
|
||||
|
||||
handle_error(
|
||||
@@ -1340,6 +1682,7 @@ mod tests {
|
||||
TurnError {
|
||||
message: "boom".to_string(),
|
||||
codex_error_info: Some(V2CodexErrorInfo::InternalServerError),
|
||||
additional_details: None,
|
||||
},
|
||||
&turn_summary_store,
|
||||
)
|
||||
@@ -1351,6 +1694,7 @@ mod tests {
|
||||
Some(TurnError {
|
||||
message: "boom".to_string(),
|
||||
codex_error_info: Some(V2CodexErrorInfo::InternalServerError),
|
||||
additional_details: None,
|
||||
})
|
||||
);
|
||||
Ok(())
|
||||
@@ -1358,7 +1702,7 @@ mod tests {
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_handle_turn_complete_emits_completed_without_error() -> Result<()> {
|
||||
let conversation_id = ConversationId::new();
|
||||
let conversation_id = ThreadId::new();
|
||||
let event_turn_id = "complete1".to_string();
|
||||
let (tx, mut rx) = mpsc::channel(CHANNEL_CAPACITY);
|
||||
let outgoing = Arc::new(OutgoingMessageSender::new(tx));
|
||||
@@ -1390,7 +1734,7 @@ mod tests {
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_handle_turn_interrupted_emits_interrupted_with_error() -> Result<()> {
|
||||
let conversation_id = ConversationId::new();
|
||||
let conversation_id = ThreadId::new();
|
||||
let event_turn_id = "interrupt1".to_string();
|
||||
let turn_summary_store = new_turn_summary_store();
|
||||
handle_error(
|
||||
@@ -1398,6 +1742,7 @@ mod tests {
|
||||
TurnError {
|
||||
message: "oops".to_string(),
|
||||
codex_error_info: None,
|
||||
additional_details: None,
|
||||
},
|
||||
&turn_summary_store,
|
||||
)
|
||||
@@ -1431,7 +1776,7 @@ mod tests {
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_handle_turn_complete_emits_failed_with_error() -> Result<()> {
|
||||
let conversation_id = ConversationId::new();
|
||||
let conversation_id = ThreadId::new();
|
||||
let event_turn_id = "complete_err1".to_string();
|
||||
let turn_summary_store = new_turn_summary_store();
|
||||
handle_error(
|
||||
@@ -1439,6 +1784,7 @@ mod tests {
|
||||
TurnError {
|
||||
message: "bad".to_string(),
|
||||
codex_error_info: Some(V2CodexErrorInfo::Other),
|
||||
additional_details: None,
|
||||
},
|
||||
&turn_summary_store,
|
||||
)
|
||||
@@ -1467,6 +1813,7 @@ mod tests {
|
||||
Some(TurnError {
|
||||
message: "bad".to_string(),
|
||||
codex_error_info: Some(V2CodexErrorInfo::Other),
|
||||
additional_details: None,
|
||||
})
|
||||
);
|
||||
}
|
||||
@@ -1494,7 +1841,7 @@ mod tests {
|
||||
],
|
||||
};
|
||||
|
||||
let conversation_id = ConversationId::new();
|
||||
let conversation_id = ThreadId::new();
|
||||
|
||||
handle_turn_plan_update(
|
||||
conversation_id,
|
||||
@@ -1528,7 +1875,7 @@ mod tests {
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_handle_token_count_event_emits_usage_and_rate_limits() -> Result<()> {
|
||||
let conversation_id = ConversationId::new();
|
||||
let conversation_id = ThreadId::new();
|
||||
let turn_id = "turn-123".to_string();
|
||||
let (tx, mut rx) = mpsc::channel(CHANNEL_CAPACITY);
|
||||
let outgoing = Arc::new(OutgoingMessageSender::new(tx));
|
||||
@@ -1613,7 +1960,7 @@ mod tests {
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_handle_token_count_event_without_usage_info() -> Result<()> {
|
||||
let conversation_id = ConversationId::new();
|
||||
let conversation_id = ThreadId::new();
|
||||
let turn_id = "turn-456".to_string();
|
||||
let (tx, mut rx) = mpsc::channel(CHANNEL_CAPACITY);
|
||||
let outgoing = Arc::new(OutgoingMessageSender::new(tx));
|
||||
@@ -1647,7 +1994,7 @@ mod tests {
|
||||
},
|
||||
};
|
||||
|
||||
let thread_id = ConversationId::new().to_string();
|
||||
let thread_id = ThreadId::new().to_string();
|
||||
let turn_id = "turn_1".to_string();
|
||||
let notification = construct_mcp_tool_call_notification(
|
||||
begin_event.clone(),
|
||||
@@ -1677,8 +2024,8 @@ mod tests {
|
||||
#[tokio::test]
|
||||
async fn test_handle_turn_complete_emits_error_multiple_turns() -> Result<()> {
|
||||
// Conversation A will have two turns; Conversation B will have one turn.
|
||||
let conversation_a = ConversationId::new();
|
||||
let conversation_b = ConversationId::new();
|
||||
let conversation_a = ThreadId::new();
|
||||
let conversation_b = ThreadId::new();
|
||||
let turn_summary_store = new_turn_summary_store();
|
||||
|
||||
let (tx, mut rx) = mpsc::channel(CHANNEL_CAPACITY);
|
||||
@@ -1691,6 +2038,7 @@ mod tests {
|
||||
TurnError {
|
||||
message: "a1".to_string(),
|
||||
codex_error_info: Some(V2CodexErrorInfo::BadRequest),
|
||||
additional_details: None,
|
||||
},
|
||||
&turn_summary_store,
|
||||
)
|
||||
@@ -1710,6 +2058,7 @@ mod tests {
|
||||
TurnError {
|
||||
message: "b1".to_string(),
|
||||
codex_error_info: None,
|
||||
additional_details: None,
|
||||
},
|
||||
&turn_summary_store,
|
||||
)
|
||||
@@ -1746,6 +2095,7 @@ mod tests {
|
||||
Some(TurnError {
|
||||
message: "a1".to_string(),
|
||||
codex_error_info: Some(V2CodexErrorInfo::BadRequest),
|
||||
additional_details: None,
|
||||
})
|
||||
);
|
||||
}
|
||||
@@ -1766,6 +2116,7 @@ mod tests {
|
||||
Some(TurnError {
|
||||
message: "b1".to_string(),
|
||||
codex_error_info: None,
|
||||
additional_details: None,
|
||||
})
|
||||
);
|
||||
}
|
||||
@@ -1801,7 +2152,7 @@ mod tests {
|
||||
},
|
||||
};
|
||||
|
||||
let thread_id = ConversationId::new().to_string();
|
||||
let thread_id = ThreadId::new().to_string();
|
||||
let turn_id = "turn_2".to_string();
|
||||
let notification = construct_mcp_tool_call_notification(
|
||||
begin_event.clone(),
|
||||
@@ -1852,7 +2203,7 @@ mod tests {
|
||||
result: Ok(result),
|
||||
};
|
||||
|
||||
let thread_id = ConversationId::new().to_string();
|
||||
let thread_id = ThreadId::new().to_string();
|
||||
let turn_id = "turn_3".to_string();
|
||||
let notification = construct_mcp_tool_call_end_notification(
|
||||
end_event.clone(),
|
||||
@@ -1895,7 +2246,7 @@ mod tests {
|
||||
result: Err("boom".to_string()),
|
||||
};
|
||||
|
||||
let thread_id = ConversationId::new().to_string();
|
||||
let thread_id = ThreadId::new().to_string();
|
||||
let turn_id = "turn_4".to_string();
|
||||
let notification = construct_mcp_tool_call_end_notification(
|
||||
end_event.clone(),
|
||||
@@ -1929,7 +2280,7 @@ mod tests {
|
||||
let (tx, mut rx) = mpsc::channel(CHANNEL_CAPACITY);
|
||||
let outgoing = OutgoingMessageSender::new(tx);
|
||||
let unified_diff = "--- a\n+++ b\n".to_string();
|
||||
let conversation_id = ConversationId::new();
|
||||
let conversation_id = ThreadId::new();
|
||||
|
||||
handle_turn_diff(
|
||||
conversation_id,
|
||||
@@ -1964,7 +2315,7 @@ mod tests {
|
||||
async fn test_handle_turn_diff_is_noop_for_v1() -> Result<()> {
|
||||
let (tx, mut rx) = mpsc::channel(CHANNEL_CAPACITY);
|
||||
let outgoing = OutgoingMessageSender::new(tx);
|
||||
let conversation_id = ConversationId::new();
|
||||
let conversation_id = ThreadId::new();
|
||||
|
||||
handle_turn_diff(
|
||||
conversation_id,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -3,12 +3,18 @@ use crate::error_code::INVALID_REQUEST_ERROR_CODE;
|
||||
use codex_app_server_protocol::ConfigBatchWriteParams;
|
||||
use codex_app_server_protocol::ConfigReadParams;
|
||||
use codex_app_server_protocol::ConfigReadResponse;
|
||||
use codex_app_server_protocol::ConfigRequirements;
|
||||
use codex_app_server_protocol::ConfigRequirementsReadResponse;
|
||||
use codex_app_server_protocol::ConfigValueWriteParams;
|
||||
use codex_app_server_protocol::ConfigWriteErrorCode;
|
||||
use codex_app_server_protocol::ConfigWriteResponse;
|
||||
use codex_app_server_protocol::JSONRPCErrorError;
|
||||
use codex_app_server_protocol::SandboxMode;
|
||||
use codex_core::config::ConfigService;
|
||||
use codex_core::config::ConfigServiceError;
|
||||
use codex_core::config_loader::ConfigRequirementsToml;
|
||||
use codex_core::config_loader::LoaderOverrides;
|
||||
use codex_core::config_loader::SandboxModeRequirement as CoreSandboxModeRequirement;
|
||||
use serde_json::json;
|
||||
use std::path::PathBuf;
|
||||
use toml::Value as TomlValue;
|
||||
@@ -19,9 +25,13 @@ pub(crate) struct ConfigApi {
|
||||
}
|
||||
|
||||
impl ConfigApi {
|
||||
pub(crate) fn new(codex_home: PathBuf, cli_overrides: Vec<(String, TomlValue)>) -> Self {
|
||||
pub(crate) fn new(
|
||||
codex_home: PathBuf,
|
||||
cli_overrides: Vec<(String, TomlValue)>,
|
||||
loader_overrides: LoaderOverrides,
|
||||
) -> Self {
|
||||
Self {
|
||||
service: ConfigService::new(codex_home, cli_overrides),
|
||||
service: ConfigService::new(codex_home, cli_overrides, loader_overrides),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,6 +42,19 @@ impl ConfigApi {
|
||||
self.service.read(params).await.map_err(map_error)
|
||||
}
|
||||
|
||||
pub(crate) async fn config_requirements_read(
|
||||
&self,
|
||||
) -> Result<ConfigRequirementsReadResponse, JSONRPCErrorError> {
|
||||
let requirements = self
|
||||
.service
|
||||
.read_requirements()
|
||||
.await
|
||||
.map_err(map_error)?
|
||||
.map(map_requirements_toml_to_api);
|
||||
|
||||
Ok(ConfigRequirementsReadResponse { requirements })
|
||||
}
|
||||
|
||||
pub(crate) async fn write_value(
|
||||
&self,
|
||||
params: ConfigValueWriteParams,
|
||||
@@ -47,6 +70,32 @@ impl ConfigApi {
|
||||
}
|
||||
}
|
||||
|
||||
fn map_requirements_toml_to_api(requirements: ConfigRequirementsToml) -> ConfigRequirements {
|
||||
ConfigRequirements {
|
||||
allowed_approval_policies: requirements.allowed_approval_policies.map(|policies| {
|
||||
policies
|
||||
.into_iter()
|
||||
.map(codex_app_server_protocol::AskForApproval::from)
|
||||
.collect()
|
||||
}),
|
||||
allowed_sandbox_modes: requirements.allowed_sandbox_modes.map(|modes| {
|
||||
modes
|
||||
.into_iter()
|
||||
.filter_map(map_sandbox_mode_requirement_to_api)
|
||||
.collect()
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn map_sandbox_mode_requirement_to_api(mode: CoreSandboxModeRequirement) -> Option<SandboxMode> {
|
||||
match mode {
|
||||
CoreSandboxModeRequirement::ReadOnly => Some(SandboxMode::ReadOnly),
|
||||
CoreSandboxModeRequirement::WorkspaceWrite => Some(SandboxMode::WorkspaceWrite),
|
||||
CoreSandboxModeRequirement::DangerFullAccess => Some(SandboxMode::DangerFullAccess),
|
||||
CoreSandboxModeRequirement::ExternalSandbox => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn map_error(err: ConfigServiceError) -> JSONRPCErrorError {
|
||||
if let Some(code) = err.write_error_code() {
|
||||
return config_write_error(code, err.to_string());
|
||||
@@ -68,3 +117,39 @@ fn config_write_error(code: ConfigWriteErrorCode, message: impl Into<String>) ->
|
||||
})),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use codex_protocol::protocol::AskForApproval as CoreAskForApproval;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn map_requirements_toml_to_api_converts_core_enums() {
|
||||
let requirements = ConfigRequirementsToml {
|
||||
allowed_approval_policies: Some(vec![
|
||||
CoreAskForApproval::Never,
|
||||
CoreAskForApproval::OnRequest,
|
||||
]),
|
||||
allowed_sandbox_modes: Some(vec![
|
||||
CoreSandboxModeRequirement::ReadOnly,
|
||||
CoreSandboxModeRequirement::ExternalSandbox,
|
||||
]),
|
||||
mcp_servers: None,
|
||||
};
|
||||
|
||||
let mapped = map_requirements_toml_to_api(requirements);
|
||||
|
||||
assert_eq!(
|
||||
mapped.allowed_approval_policies,
|
||||
Some(vec![
|
||||
codex_app_server_protocol::AskForApproval::Never,
|
||||
codex_app_server_protocol::AskForApproval::OnRequest,
|
||||
])
|
||||
);
|
||||
assert_eq!(
|
||||
mapped.allowed_sandbox_modes,
|
||||
Some(vec![SandboxMode::ReadOnly]),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigBuilder;
|
||||
use codex_core::config_loader::LoaderOverrides;
|
||||
use std::io::ErrorKind;
|
||||
use std::io::Result as IoResult;
|
||||
use std::path::PathBuf;
|
||||
@@ -9,7 +11,9 @@ use std::path::PathBuf;
|
||||
use crate::message_processor::MessageProcessor;
|
||||
use crate::outgoing_message::OutgoingMessage;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use codex_app_server_protocol::ConfigWarningNotification;
|
||||
use codex_app_server_protocol::JSONRPCMessage;
|
||||
use codex_core::check_execpolicy_for_warnings;
|
||||
use codex_feedback::CodexFeedback;
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
@@ -17,13 +21,12 @@ use tokio::io::BufReader;
|
||||
use tokio::io::{self};
|
||||
use tokio::sync::mpsc;
|
||||
use toml::Value as TomlValue;
|
||||
use tracing::Level;
|
||||
use tracing::debug;
|
||||
use tracing::error;
|
||||
use tracing::info;
|
||||
use tracing::warn;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
use tracing_subscriber::Layer;
|
||||
use tracing_subscriber::filter::Targets;
|
||||
use tracing_subscriber::layer::SubscriberExt;
|
||||
use tracing_subscriber::util::SubscriberInitExt;
|
||||
|
||||
@@ -44,6 +47,8 @@ const CHANNEL_CAPACITY: usize = 128;
|
||||
pub async fn run_main(
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
cli_config_overrides: CliConfigOverrides,
|
||||
loader_overrides: LoaderOverrides,
|
||||
default_analytics_enabled: bool,
|
||||
) -> IoResult<()> {
|
||||
// Set up channels.
|
||||
let (incoming_tx, mut incoming_rx) = mpsc::channel::<JSONRPCMessage>(CHANNEL_CAPACITY);
|
||||
@@ -80,21 +85,54 @@ pub async fn run_main(
|
||||
format!("error parsing -c overrides: {e}"),
|
||||
)
|
||||
})?;
|
||||
let config = Config::load_with_cli_overrides(cli_kv_overrides.clone())
|
||||
let loader_overrides_for_config_api = loader_overrides.clone();
|
||||
let mut config_warnings = Vec::new();
|
||||
let config = match ConfigBuilder::default()
|
||||
.cli_overrides(cli_kv_overrides.clone())
|
||||
.loader_overrides(loader_overrides)
|
||||
.build()
|
||||
.await
|
||||
.map_err(|e| {
|
||||
std::io::Error::new(ErrorKind::InvalidData, format!("error loading config: {e}"))
|
||||
})?;
|
||||
{
|
||||
Ok(config) => config,
|
||||
Err(err) => {
|
||||
let message = ConfigWarningNotification {
|
||||
summary: "Invalid configuration; using defaults.".to_string(),
|
||||
details: Some(err.to_string()),
|
||||
};
|
||||
config_warnings.push(message);
|
||||
Config::load_default_with_cli_overrides(cli_kv_overrides.clone()).map_err(|e| {
|
||||
std::io::Error::new(
|
||||
ErrorKind::InvalidData,
|
||||
format!("error loading default config after config error: {e}"),
|
||||
)
|
||||
})?
|
||||
}
|
||||
};
|
||||
|
||||
if let Ok(Some(err)) =
|
||||
check_execpolicy_for_warnings(&config.features, &config.config_layer_stack).await
|
||||
{
|
||||
let message = ConfigWarningNotification {
|
||||
summary: "Error parsing rules; custom rules not applied.".to_string(),
|
||||
details: Some(err.to_string()),
|
||||
};
|
||||
config_warnings.push(message);
|
||||
}
|
||||
|
||||
let feedback = CodexFeedback::new();
|
||||
|
||||
let otel =
|
||||
codex_core::otel_init::build_provider(&config, env!("CARGO_PKG_VERSION")).map_err(|e| {
|
||||
std::io::Error::new(
|
||||
ErrorKind::InvalidData,
|
||||
format!("error loading otel config: {e}"),
|
||||
)
|
||||
})?;
|
||||
let otel = codex_core::otel_init::build_provider(
|
||||
&config,
|
||||
env!("CARGO_PKG_VERSION"),
|
||||
Some("codex_app_server"),
|
||||
default_analytics_enabled,
|
||||
)
|
||||
.map_err(|e| {
|
||||
std::io::Error::new(
|
||||
ErrorKind::InvalidData,
|
||||
format!("error loading otel config: {e}"),
|
||||
)
|
||||
})?;
|
||||
|
||||
// Install a simple subscriber so `tracing` output is visible. Users can
|
||||
// control the log level with `RUST_LOG`.
|
||||
@@ -103,11 +141,8 @@ pub async fn run_main(
|
||||
.with_span_events(tracing_subscriber::fmt::format::FmtSpan::FULL)
|
||||
.with_filter(EnvFilter::from_default_env());
|
||||
|
||||
let feedback_layer = tracing_subscriber::fmt::layer()
|
||||
.with_writer(feedback.make_writer())
|
||||
.with_ansi(false)
|
||||
.with_target(false)
|
||||
.with_filter(Targets::new().with_default(Level::TRACE));
|
||||
let feedback_layer = feedback.logger_layer();
|
||||
let feedback_metadata_layer = feedback.metadata_layer();
|
||||
|
||||
let otel_logger_layer = otel.as_ref().and_then(|o| o.logger_layer());
|
||||
|
||||
@@ -116,28 +151,64 @@ pub async fn run_main(
|
||||
let _ = tracing_subscriber::registry()
|
||||
.with(stderr_fmt)
|
||||
.with(feedback_layer)
|
||||
.with(feedback_metadata_layer)
|
||||
.with(otel_logger_layer)
|
||||
.with(otel_tracing_layer)
|
||||
.try_init();
|
||||
for warning in &config_warnings {
|
||||
match &warning.details {
|
||||
Some(details) => error!("{} {}", warning.summary, details),
|
||||
None => error!("{}", warning.summary),
|
||||
}
|
||||
}
|
||||
|
||||
// Task: process incoming messages.
|
||||
let processor_handle = tokio::spawn({
|
||||
let outgoing_message_sender = OutgoingMessageSender::new(outgoing_tx);
|
||||
let cli_overrides: Vec<(String, TomlValue)> = cli_kv_overrides.clone();
|
||||
let loader_overrides = loader_overrides_for_config_api;
|
||||
let mut processor = MessageProcessor::new(
|
||||
outgoing_message_sender,
|
||||
codex_linux_sandbox_exe,
|
||||
std::sync::Arc::new(config),
|
||||
cli_overrides,
|
||||
loader_overrides,
|
||||
feedback.clone(),
|
||||
config_warnings,
|
||||
);
|
||||
let mut thread_created_rx = processor.thread_created_receiver();
|
||||
async move {
|
||||
while let Some(msg) = incoming_rx.recv().await {
|
||||
match msg {
|
||||
JSONRPCMessage::Request(r) => processor.process_request(r).await,
|
||||
JSONRPCMessage::Response(r) => processor.process_response(r).await,
|
||||
JSONRPCMessage::Notification(n) => processor.process_notification(n).await,
|
||||
JSONRPCMessage::Error(e) => processor.process_error(e),
|
||||
let mut listen_for_threads = true;
|
||||
loop {
|
||||
tokio::select! {
|
||||
msg = incoming_rx.recv() => {
|
||||
let Some(msg) = msg else {
|
||||
break;
|
||||
};
|
||||
match msg {
|
||||
JSONRPCMessage::Request(r) => processor.process_request(r).await,
|
||||
JSONRPCMessage::Response(r) => processor.process_response(r).await,
|
||||
JSONRPCMessage::Notification(n) => processor.process_notification(n).await,
|
||||
JSONRPCMessage::Error(e) => processor.process_error(e),
|
||||
}
|
||||
}
|
||||
created = thread_created_rx.recv(), if listen_for_threads => {
|
||||
match created {
|
||||
Ok(thread_id) => {
|
||||
processor.try_attach_thread_listener(thread_id).await;
|
||||
}
|
||||
Err(tokio::sync::broadcast::error::RecvError::Lagged(_)) => {
|
||||
// TODO(jif) handle lag.
|
||||
// Assumes thread creation volume is low enough that lag never happens.
|
||||
// If it does, we log and continue without resyncing to avoid attaching
|
||||
// listeners for threads that should remain unsubscribed.
|
||||
warn!("thread_created receiver lagged; skipping resync");
|
||||
}
|
||||
Err(tokio::sync::broadcast::error::RecvError::Closed) => {
|
||||
listen_for_threads = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,10 +1,43 @@
|
||||
use codex_app_server::run_main;
|
||||
use codex_arg0::arg0_dispatch_or_else;
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_core::config_loader::LoaderOverrides;
|
||||
use std::path::PathBuf;
|
||||
|
||||
// Debug-only test hook: lets integration tests point the server at a temporary
|
||||
// managed config file without writing to /etc.
|
||||
const MANAGED_CONFIG_PATH_ENV_VAR: &str = "CODEX_APP_SERVER_MANAGED_CONFIG_PATH";
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
arg0_dispatch_or_else(|codex_linux_sandbox_exe| async move {
|
||||
run_main(codex_linux_sandbox_exe, CliConfigOverrides::default()).await?;
|
||||
let managed_config_path = managed_config_path_from_debug_env();
|
||||
let loader_overrides = LoaderOverrides {
|
||||
managed_config_path,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
run_main(
|
||||
codex_linux_sandbox_exe,
|
||||
CliConfigOverrides::default(),
|
||||
loader_overrides,
|
||||
false,
|
||||
)
|
||||
.await?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
fn managed_config_path_from_debug_env() -> Option<PathBuf> {
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
if let Ok(value) = std::env::var(MANAGED_CONFIG_PATH_ENV_VAR) {
|
||||
return if value.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(PathBuf::from(value))
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ use codex_app_server_protocol::ClientRequest;
|
||||
use codex_app_server_protocol::ConfigBatchWriteParams;
|
||||
use codex_app_server_protocol::ConfigReadParams;
|
||||
use codex_app_server_protocol::ConfigValueWriteParams;
|
||||
use codex_app_server_protocol::ConfigWarningNotification;
|
||||
use codex_app_server_protocol::InitializeResponse;
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCErrorError;
|
||||
@@ -17,13 +18,19 @@ use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCRequest;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_core::AuthManager;
|
||||
use codex_core::ConversationManager;
|
||||
use codex_core::ThreadManager;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config_loader::LoaderOverrides;
|
||||
use codex_core::default_client::SetOriginatorError;
|
||||
use codex_core::default_client::USER_AGENT_SUFFIX;
|
||||
use codex_core::default_client::get_codex_user_agent;
|
||||
use codex_core::default_client::set_default_originator;
|
||||
use codex_feedback::CodexFeedback;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use tokio::sync::broadcast;
|
||||
use toml::Value as TomlValue;
|
||||
|
||||
pub(crate) struct MessageProcessor {
|
||||
@@ -31,6 +38,7 @@ pub(crate) struct MessageProcessor {
|
||||
codex_message_processor: CodexMessageProcessor,
|
||||
config_api: ConfigApi,
|
||||
initialized: bool,
|
||||
config_warnings: Vec<ConfigWarningNotification>,
|
||||
}
|
||||
|
||||
impl MessageProcessor {
|
||||
@@ -41,7 +49,9 @@ impl MessageProcessor {
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
config: Arc<Config>,
|
||||
cli_overrides: Vec<(String, TomlValue)>,
|
||||
loader_overrides: LoaderOverrides,
|
||||
feedback: CodexFeedback,
|
||||
config_warnings: Vec<ConfigWarningNotification>,
|
||||
) -> Self {
|
||||
let outgoing = Arc::new(outgoing);
|
||||
let auth_manager = AuthManager::shared(
|
||||
@@ -49,26 +59,28 @@ impl MessageProcessor {
|
||||
false,
|
||||
config.cli_auth_credentials_store_mode,
|
||||
);
|
||||
let conversation_manager = Arc::new(ConversationManager::new(
|
||||
let thread_manager = Arc::new(ThreadManager::new(
|
||||
config.codex_home.clone(),
|
||||
auth_manager.clone(),
|
||||
SessionSource::VSCode,
|
||||
));
|
||||
let codex_message_processor = CodexMessageProcessor::new(
|
||||
auth_manager,
|
||||
conversation_manager,
|
||||
thread_manager,
|
||||
outgoing.clone(),
|
||||
codex_linux_sandbox_exe,
|
||||
Arc::clone(&config),
|
||||
cli_overrides.clone(),
|
||||
feedback,
|
||||
);
|
||||
let config_api = ConfigApi::new(config.codex_home.clone(), cli_overrides);
|
||||
let config_api = ConfigApi::new(config.codex_home.clone(), cli_overrides, loader_overrides);
|
||||
|
||||
Self {
|
||||
outgoing,
|
||||
codex_message_processor,
|
||||
config_api,
|
||||
initialized: false,
|
||||
config_warnings,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -118,6 +130,27 @@ impl MessageProcessor {
|
||||
title: _title,
|
||||
version,
|
||||
} = params.client_info;
|
||||
if let Err(error) = set_default_originator(name.clone()) {
|
||||
match error {
|
||||
SetOriginatorError::InvalidHeaderValue => {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!(
|
||||
"Invalid clientInfo.name: '{name}'. Must be a valid HTTP header value."
|
||||
),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
SetOriginatorError::AlreadyInitialized => {
|
||||
// No-op. This is expected to happen if the originator is already set via env var.
|
||||
// TODO(owen): Once we remove support for CODEX_INTERNAL_ORIGINATOR_OVERRIDE,
|
||||
// this will be an unexpected state and we can return a JSON-RPC error indicating
|
||||
// internal server error.
|
||||
}
|
||||
}
|
||||
}
|
||||
let user_agent_suffix = format!("{name}; {version}");
|
||||
if let Ok(mut suffix) = USER_AGENT_SUFFIX.lock() {
|
||||
*suffix = Some(user_agent_suffix);
|
||||
@@ -128,6 +161,15 @@ impl MessageProcessor {
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
|
||||
self.initialized = true;
|
||||
if !self.config_warnings.is_empty() {
|
||||
for notification in self.config_warnings.drain(..) {
|
||||
self.outgoing
|
||||
.send_server_notification(ServerNotification::ConfigWarning(
|
||||
notification,
|
||||
))
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
@@ -155,6 +197,12 @@ impl MessageProcessor {
|
||||
ClientRequest::ConfigBatchWrite { request_id, params } => {
|
||||
self.handle_config_batch_write(request_id, params).await;
|
||||
}
|
||||
ClientRequest::ConfigRequirementsRead {
|
||||
request_id,
|
||||
params: _,
|
||||
} => {
|
||||
self.handle_config_requirements_read(request_id).await;
|
||||
}
|
||||
other => {
|
||||
self.codex_message_processor.process_request(other).await;
|
||||
}
|
||||
@@ -167,6 +215,19 @@ impl MessageProcessor {
|
||||
tracing::info!("<- notification: {:?}", notification);
|
||||
}
|
||||
|
||||
pub(crate) fn thread_created_receiver(&self) -> broadcast::Receiver<ThreadId> {
|
||||
self.codex_message_processor.thread_created_receiver()
|
||||
}
|
||||
|
||||
pub(crate) async fn try_attach_thread_listener(&mut self, thread_id: ThreadId) {
|
||||
if !self.initialized {
|
||||
return;
|
||||
}
|
||||
self.codex_message_processor
|
||||
.try_attach_thread_listener(thread_id)
|
||||
.await;
|
||||
}
|
||||
|
||||
/// Handle a standalone JSON-RPC response originating from the peer.
|
||||
pub(crate) async fn process_response(&mut self, response: JSONRPCResponse) {
|
||||
tracing::info!("<- response: {:?}", response);
|
||||
@@ -207,4 +268,11 @@ impl MessageProcessor {
|
||||
Err(error) => self.outgoing.send_error(request_id, error).await,
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_config_requirements_read(&self, request_id: RequestId) {
|
||||
match self.config_api.config_requirements_read().await {
|
||||
Ok(response) => self.outgoing.send_response(request_id, response).await,
|
||||
Err(error) => self.outgoing.send_error(request_id, error).await,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,19 +2,18 @@ use std::sync::Arc;
|
||||
|
||||
use codex_app_server_protocol::Model;
|
||||
use codex_app_server_protocol::ReasoningEffortOption;
|
||||
use codex_core::ConversationManager;
|
||||
use codex_core::ThreadManager;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::models_manager::manager::RefreshStrategy;
|
||||
use codex_protocol::openai_models::ModelPreset;
|
||||
use codex_protocol::openai_models::ReasoningEffortPreset;
|
||||
|
||||
pub async fn supported_models(
|
||||
conversation_manager: Arc<ConversationManager>,
|
||||
config: &Config,
|
||||
) -> Vec<Model> {
|
||||
conversation_manager
|
||||
.list_models(config)
|
||||
pub async fn supported_models(thread_manager: Arc<ThreadManager>, config: &Config) -> Vec<Model> {
|
||||
thread_manager
|
||||
.list_models(config, RefreshStrategy::OnlineIfUncached)
|
||||
.await
|
||||
.into_iter()
|
||||
.filter(|preset| preset.show_in_picker)
|
||||
.map(model_from_preset)
|
||||
.collect()
|
||||
}
|
||||
|
||||
@@ -162,6 +162,7 @@ mod tests {
|
||||
use codex_app_server_protocol::AccountRateLimitsUpdatedNotification;
|
||||
use codex_app_server_protocol::AccountUpdatedNotification;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_app_server_protocol::ConfigWarningNotification;
|
||||
use codex_app_server_protocol::LoginChatGptCompleteNotification;
|
||||
use codex_app_server_protocol::RateLimitSnapshot;
|
||||
use codex_app_server_protocol::RateLimitWindow;
|
||||
@@ -279,4 +280,26 @@ mod tests {
|
||||
"ensure the notification serializes correctly"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn verify_config_warning_notification_serialization() {
|
||||
let notification = ServerNotification::ConfigWarning(ConfigWarningNotification {
|
||||
summary: "Config error: using defaults".to_string(),
|
||||
details: Some("error loading config: bad config".to_string()),
|
||||
});
|
||||
|
||||
let jsonrpc_notification = OutgoingMessage::AppServerNotification(notification);
|
||||
assert_eq!(
|
||||
json!( {
|
||||
"method": "configWarning",
|
||||
"params": {
|
||||
"summary": "Config error: using defaults",
|
||||
"details": "error loading config: bad config",
|
||||
},
|
||||
}),
|
||||
serde_json::to_value(jsonrpc_notification)
|
||||
.expect("ensure the notification serializes correctly"),
|
||||
"ensure the notification serializes correctly"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
7
codex-rs/app-server/tests/common/BUILD.bazel
Normal file
7
codex-rs/app-server/tests/common/BUILD.bazel
Normal file
@@ -0,0 +1,7 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "common",
|
||||
crate_name = "app_test_support",
|
||||
crate_srcs = glob(["*.rs"]),
|
||||
)
|
||||
@@ -9,12 +9,12 @@ path = "lib.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
assert_cmd = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-core = { workspace = true, features = ["test-support"] }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-utils-cargo-bin = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
|
||||
@@ -17,9 +17,11 @@ pub use core_test_support::format_with_current_shell_non_login;
|
||||
pub use core_test_support::test_path_buf_with_windows;
|
||||
pub use core_test_support::test_tmp_path;
|
||||
pub use core_test_support::test_tmp_path_buf;
|
||||
pub use mcp_process::DEFAULT_CLIENT_NAME;
|
||||
pub use mcp_process::McpProcess;
|
||||
pub use mock_model_server::create_mock_chat_completions_server;
|
||||
pub use mock_model_server::create_mock_chat_completions_server_unchecked;
|
||||
pub use mock_model_server::create_mock_responses_server_repeating_assistant;
|
||||
pub use mock_model_server::create_mock_responses_server_sequence;
|
||||
pub use mock_model_server::create_mock_responses_server_sequence_unchecked;
|
||||
pub use models_cache::write_models_cache;
|
||||
pub use models_cache::write_models_cache_with_models;
|
||||
pub use responses::create_apply_patch_sse_response;
|
||||
@@ -27,6 +29,7 @@ pub use responses::create_exec_command_sse_response;
|
||||
pub use responses::create_final_assistant_message_sse_response;
|
||||
pub use responses::create_shell_command_sse_response;
|
||||
pub use rollout::create_fake_rollout;
|
||||
pub use rollout::create_fake_rollout_with_text_elements;
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
pub fn to_response<T: DeserializeOwned>(response: JSONRPCResponse) -> anyhow::Result<T> {
|
||||
|
||||
@@ -11,7 +11,6 @@ use tokio::process::ChildStdin;
|
||||
use tokio::process::ChildStdout;
|
||||
|
||||
use anyhow::Context;
|
||||
use assert_cmd::prelude::*;
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
use codex_app_server_protocol::ArchiveConversationParams;
|
||||
use codex_app_server_protocol::CancelLoginAccountParams;
|
||||
@@ -22,6 +21,7 @@ use codex_app_server_protocol::ConfigBatchWriteParams;
|
||||
use codex_app_server_protocol::ConfigReadParams;
|
||||
use codex_app_server_protocol::ConfigValueWriteParams;
|
||||
use codex_app_server_protocol::FeedbackUploadParams;
|
||||
use codex_app_server_protocol::ForkConversationParams;
|
||||
use codex_app_server_protocol::GetAccountParams;
|
||||
use codex_app_server_protocol::GetAuthStatusParams;
|
||||
use codex_app_server_protocol::InitializeParams;
|
||||
@@ -44,12 +44,14 @@ use codex_app_server_protocol::SendUserTurnParams;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::SetDefaultModelParams;
|
||||
use codex_app_server_protocol::ThreadArchiveParams;
|
||||
use codex_app_server_protocol::ThreadForkParams;
|
||||
use codex_app_server_protocol::ThreadListParams;
|
||||
use codex_app_server_protocol::ThreadLoadedListParams;
|
||||
use codex_app_server_protocol::ThreadResumeParams;
|
||||
use codex_app_server_protocol::ThreadRollbackParams;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::TurnInterruptParams;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use std::process::Command as StdCommand;
|
||||
use tokio::process::Command;
|
||||
|
||||
pub struct McpProcess {
|
||||
@@ -61,9 +63,11 @@ pub struct McpProcess {
|
||||
process: Child,
|
||||
stdin: ChildStdin,
|
||||
stdout: BufReader<ChildStdout>,
|
||||
pending_user_messages: VecDeque<JSONRPCNotification>,
|
||||
pending_messages: VecDeque<JSONRPCMessage>,
|
||||
}
|
||||
|
||||
pub const DEFAULT_CLIENT_NAME: &str = "codex-app-server-tests";
|
||||
|
||||
impl McpProcess {
|
||||
pub async fn new(codex_home: &Path) -> anyhow::Result<Self> {
|
||||
Self::new_with_env(codex_home, &[]).await
|
||||
@@ -78,12 +82,8 @@ impl McpProcess {
|
||||
codex_home: &Path,
|
||||
env_overrides: &[(&str, Option<&str>)],
|
||||
) -> anyhow::Result<Self> {
|
||||
// Use assert_cmd to locate the binary path and then switch to tokio::process::Command
|
||||
let std_cmd = StdCommand::cargo_bin("codex-app-server")
|
||||
.context("should find binary for codex-mcp-server")?;
|
||||
|
||||
let program = std_cmd.get_program().to_owned();
|
||||
|
||||
let program = codex_utils_cargo_bin::cargo_bin("codex-app-server")
|
||||
.context("should find binary for codex-app-server")?;
|
||||
let mut cmd = Command::new(program);
|
||||
|
||||
cmd.stdin(Stdio::piped());
|
||||
@@ -132,39 +132,68 @@ impl McpProcess {
|
||||
process,
|
||||
stdin,
|
||||
stdout,
|
||||
pending_user_messages: VecDeque::new(),
|
||||
pending_messages: VecDeque::new(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Performs the initialization handshake with the MCP server.
|
||||
pub async fn initialize(&mut self) -> anyhow::Result<()> {
|
||||
let params = Some(serde_json::to_value(InitializeParams {
|
||||
client_info: ClientInfo {
|
||||
name: "codex-app-server-tests".to_string(),
|
||||
let initialized = self
|
||||
.initialize_with_client_info(ClientInfo {
|
||||
name: DEFAULT_CLIENT_NAME.to_string(),
|
||||
title: None,
|
||||
version: "0.1.0".to_string(),
|
||||
},
|
||||
})?);
|
||||
let req_id = self.send_request("initialize", params).await?;
|
||||
let initialized = self.read_jsonrpc_message().await?;
|
||||
let JSONRPCMessage::Response(response) = initialized else {
|
||||
})
|
||||
.await?;
|
||||
let JSONRPCMessage::Response(_) = initialized else {
|
||||
unreachable!("expected JSONRPCMessage::Response for initialize, got {initialized:?}");
|
||||
};
|
||||
if response.id != RequestId::Integer(req_id) {
|
||||
anyhow::bail!(
|
||||
"initialize response id mismatch: expected {}, got {:?}",
|
||||
req_id,
|
||||
response.id
|
||||
);
|
||||
}
|
||||
|
||||
// Send notifications/initialized to ack the response.
|
||||
self.send_notification(ClientNotification::Initialized)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Sends initialize with the provided client info and returns the response/error message.
|
||||
pub async fn initialize_with_client_info(
|
||||
&mut self,
|
||||
client_info: ClientInfo,
|
||||
) -> anyhow::Result<JSONRPCMessage> {
|
||||
let params = Some(serde_json::to_value(InitializeParams { client_info })?);
|
||||
let request_id = self.send_request("initialize", params).await?;
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
match message {
|
||||
JSONRPCMessage::Response(response) => {
|
||||
if response.id != RequestId::Integer(request_id) {
|
||||
anyhow::bail!(
|
||||
"initialize response id mismatch: expected {}, got {:?}",
|
||||
request_id,
|
||||
response.id
|
||||
);
|
||||
}
|
||||
|
||||
// Send notifications/initialized to ack the response.
|
||||
self.send_notification(ClientNotification::Initialized)
|
||||
.await?;
|
||||
|
||||
Ok(JSONRPCMessage::Response(response))
|
||||
}
|
||||
JSONRPCMessage::Error(error) => {
|
||||
if error.id != RequestId::Integer(request_id) {
|
||||
anyhow::bail!(
|
||||
"initialize error id mismatch: expected {}, got {:?}",
|
||||
request_id,
|
||||
error.id
|
||||
);
|
||||
}
|
||||
Ok(JSONRPCMessage::Error(error))
|
||||
}
|
||||
JSONRPCMessage::Notification(notification) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Notification: {notification:?}");
|
||||
}
|
||||
JSONRPCMessage::Request(request) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Request: {request:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Send a `newConversation` JSON-RPC request.
|
||||
pub async fn send_new_conversation_request(
|
||||
&mut self,
|
||||
@@ -203,7 +232,7 @@ impl McpProcess {
|
||||
}
|
||||
|
||||
/// Send a `removeConversationListener` JSON-RPC request.
|
||||
pub async fn send_remove_conversation_listener_request(
|
||||
pub async fn send_remove_thread_listener_request(
|
||||
&mut self,
|
||||
params: RemoveConversationListenerParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
@@ -313,6 +342,15 @@ impl McpProcess {
|
||||
self.send_request("thread/resume", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/fork` JSON-RPC request.
|
||||
pub async fn send_thread_fork_request(
|
||||
&mut self,
|
||||
params: ThreadForkParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/fork", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/archive` JSON-RPC request.
|
||||
pub async fn send_thread_archive_request(
|
||||
&mut self,
|
||||
@@ -322,6 +360,15 @@ impl McpProcess {
|
||||
self.send_request("thread/archive", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/rollback` JSON-RPC request.
|
||||
pub async fn send_thread_rollback_request(
|
||||
&mut self,
|
||||
params: ThreadRollbackParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/rollback", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/list` JSON-RPC request.
|
||||
pub async fn send_thread_list_request(
|
||||
&mut self,
|
||||
@@ -331,6 +378,15 @@ impl McpProcess {
|
||||
self.send_request("thread/list", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/loaded/list` JSON-RPC request.
|
||||
pub async fn send_thread_loaded_list_request(
|
||||
&mut self,
|
||||
params: ThreadLoadedListParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/loaded/list", params).await
|
||||
}
|
||||
|
||||
/// Send a `model/list` JSON-RPC request.
|
||||
pub async fn send_list_models_request(
|
||||
&mut self,
|
||||
@@ -349,6 +405,15 @@ impl McpProcess {
|
||||
self.send_request("resumeConversation", params).await
|
||||
}
|
||||
|
||||
/// Send a `forkConversation` JSON-RPC request.
|
||||
pub async fn send_fork_conversation_request(
|
||||
&mut self,
|
||||
params: ForkConversationParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("forkConversation", params).await
|
||||
}
|
||||
|
||||
/// Send a `loginApiKey` JSON-RPC request.
|
||||
pub async fn send_login_api_key_request(
|
||||
&mut self,
|
||||
@@ -540,27 +605,16 @@ impl McpProcess {
|
||||
pub async fn read_stream_until_request_message(&mut self) -> anyhow::Result<ServerRequest> {
|
||||
eprintln!("in read_stream_until_request_message()");
|
||||
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
let message = self
|
||||
.read_stream_until_message(|message| matches!(message, JSONRPCMessage::Request(_)))
|
||||
.await?;
|
||||
|
||||
match message {
|
||||
JSONRPCMessage::Notification(notification) => {
|
||||
eprintln!("notification: {notification:?}");
|
||||
self.enqueue_user_message(notification);
|
||||
}
|
||||
JSONRPCMessage::Request(jsonrpc_request) => {
|
||||
return jsonrpc_request.try_into().with_context(
|
||||
|| "failed to deserialize ServerRequest from JSONRPCRequest",
|
||||
);
|
||||
}
|
||||
JSONRPCMessage::Error(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Error: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Response(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Response: {message:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
let JSONRPCMessage::Request(jsonrpc_request) = message else {
|
||||
unreachable!("expected JSONRPCMessage::Request, got {message:?}");
|
||||
};
|
||||
jsonrpc_request
|
||||
.try_into()
|
||||
.with_context(|| "failed to deserialize ServerRequest from JSONRPCRequest")
|
||||
}
|
||||
|
||||
pub async fn read_stream_until_response_message(
|
||||
@@ -569,52 +623,32 @@ impl McpProcess {
|
||||
) -> anyhow::Result<JSONRPCResponse> {
|
||||
eprintln!("in read_stream_until_response_message({request_id:?})");
|
||||
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
match message {
|
||||
JSONRPCMessage::Notification(notification) => {
|
||||
eprintln!("notification: {notification:?}");
|
||||
self.enqueue_user_message(notification);
|
||||
}
|
||||
JSONRPCMessage::Request(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Request: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Error(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Error: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Response(jsonrpc_response) => {
|
||||
if jsonrpc_response.id == request_id {
|
||||
return Ok(jsonrpc_response);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let message = self
|
||||
.read_stream_until_message(|message| {
|
||||
Self::message_request_id(message) == Some(&request_id)
|
||||
})
|
||||
.await?;
|
||||
|
||||
let JSONRPCMessage::Response(response) = message else {
|
||||
unreachable!("expected JSONRPCMessage::Response, got {message:?}");
|
||||
};
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
pub async fn read_stream_until_error_message(
|
||||
&mut self,
|
||||
request_id: RequestId,
|
||||
) -> anyhow::Result<JSONRPCError> {
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
match message {
|
||||
JSONRPCMessage::Notification(notification) => {
|
||||
eprintln!("notification: {notification:?}");
|
||||
self.enqueue_user_message(notification);
|
||||
}
|
||||
JSONRPCMessage::Request(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Request: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Response(_) => {
|
||||
// Keep scanning; we're waiting for an error with matching id.
|
||||
}
|
||||
JSONRPCMessage::Error(err) => {
|
||||
if err.id == request_id {
|
||||
return Ok(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let message = self
|
||||
.read_stream_until_message(|message| {
|
||||
Self::message_request_id(message) == Some(&request_id)
|
||||
})
|
||||
.await?;
|
||||
|
||||
let JSONRPCMessage::Error(err) = message else {
|
||||
unreachable!("expected JSONRPCMessage::Error, got {message:?}");
|
||||
};
|
||||
Ok(err)
|
||||
}
|
||||
|
||||
pub async fn read_stream_until_notification_message(
|
||||
@@ -623,46 +657,64 @@ impl McpProcess {
|
||||
) -> anyhow::Result<JSONRPCNotification> {
|
||||
eprintln!("in read_stream_until_notification_message({method})");
|
||||
|
||||
if let Some(notification) = self.take_pending_notification_by_method(method) {
|
||||
return Ok(notification);
|
||||
let message = self
|
||||
.read_stream_until_message(|message| {
|
||||
matches!(
|
||||
message,
|
||||
JSONRPCMessage::Notification(notification) if notification.method == method
|
||||
)
|
||||
})
|
||||
.await?;
|
||||
|
||||
let JSONRPCMessage::Notification(notification) = message else {
|
||||
unreachable!("expected JSONRPCMessage::Notification, got {message:?}");
|
||||
};
|
||||
Ok(notification)
|
||||
}
|
||||
|
||||
/// Clears any buffered messages so future reads only consider new stream items.
|
||||
///
|
||||
/// We call this when e.g. we want to validate against the next turn and no longer care about
|
||||
/// messages buffered from the prior turn.
|
||||
pub fn clear_message_buffer(&mut self) {
|
||||
self.pending_messages.clear();
|
||||
}
|
||||
|
||||
/// Reads the stream until a message matches `predicate`, buffering any non-matching messages
|
||||
/// for later reads.
|
||||
async fn read_stream_until_message<F>(&mut self, predicate: F) -> anyhow::Result<JSONRPCMessage>
|
||||
where
|
||||
F: Fn(&JSONRPCMessage) -> bool,
|
||||
{
|
||||
if let Some(message) = self.take_pending_message(&predicate) {
|
||||
return Ok(message);
|
||||
}
|
||||
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
match message {
|
||||
JSONRPCMessage::Notification(notification) => {
|
||||
if notification.method == method {
|
||||
return Ok(notification);
|
||||
}
|
||||
self.enqueue_user_message(notification);
|
||||
}
|
||||
JSONRPCMessage::Request(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Request: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Error(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Error: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Response(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Response: {message:?}");
|
||||
}
|
||||
if predicate(&message) {
|
||||
return Ok(message);
|
||||
}
|
||||
self.pending_messages.push_back(message);
|
||||
}
|
||||
}
|
||||
|
||||
fn take_pending_notification_by_method(&mut self, method: &str) -> Option<JSONRPCNotification> {
|
||||
if let Some(pos) = self
|
||||
.pending_user_messages
|
||||
.iter()
|
||||
.position(|notification| notification.method == method)
|
||||
{
|
||||
return self.pending_user_messages.remove(pos);
|
||||
fn take_pending_message<F>(&mut self, predicate: &F) -> Option<JSONRPCMessage>
|
||||
where
|
||||
F: Fn(&JSONRPCMessage) -> bool,
|
||||
{
|
||||
if let Some(pos) = self.pending_messages.iter().position(predicate) {
|
||||
return self.pending_messages.remove(pos);
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn enqueue_user_message(&mut self, notification: JSONRPCNotification) {
|
||||
if notification.method == "codex/event/user_message" {
|
||||
self.pending_user_messages.push_back(notification);
|
||||
fn message_request_id(message: &JSONRPCMessage) -> Option<&RequestId> {
|
||||
match message {
|
||||
JSONRPCMessage::Request(request) => Some(&request.id),
|
||||
JSONRPCMessage::Response(response) => Some(&response.id),
|
||||
JSONRPCMessage::Error(err) => Some(&err.id),
|
||||
JSONRPCMessage::Notification(_) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,17 +1,18 @@
|
||||
use std::sync::atomic::AtomicUsize;
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
use core_test_support::responses;
|
||||
use wiremock::Mock;
|
||||
use wiremock::MockServer;
|
||||
use wiremock::Respond;
|
||||
use wiremock::ResponseTemplate;
|
||||
use wiremock::matchers::method;
|
||||
use wiremock::matchers::path;
|
||||
use wiremock::matchers::path_regex;
|
||||
|
||||
/// Create a mock server that will provide the responses, in order, for
|
||||
/// requests to the `/v1/chat/completions` endpoint.
|
||||
pub async fn create_mock_chat_completions_server(responses: Vec<String>) -> MockServer {
|
||||
let server = MockServer::start().await;
|
||||
/// requests to the `/v1/responses` endpoint.
|
||||
pub async fn create_mock_responses_server_sequence(responses: Vec<String>) -> MockServer {
|
||||
let server = responses::start_mock_server().await;
|
||||
|
||||
let num_calls = responses.len();
|
||||
let seq_responder = SeqResponder {
|
||||
@@ -20,7 +21,7 @@ pub async fn create_mock_chat_completions_server(responses: Vec<String>) -> Mock
|
||||
};
|
||||
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/chat/completions"))
|
||||
.and(path_regex(".*/responses$"))
|
||||
.respond_with(seq_responder)
|
||||
.expect(num_calls as u64)
|
||||
.mount(&server)
|
||||
@@ -29,10 +30,10 @@ pub async fn create_mock_chat_completions_server(responses: Vec<String>) -> Mock
|
||||
server
|
||||
}
|
||||
|
||||
/// Same as `create_mock_chat_completions_server` but does not enforce an
|
||||
/// Same as `create_mock_responses_server_sequence` but does not enforce an
|
||||
/// expectation on the number of calls.
|
||||
pub async fn create_mock_chat_completions_server_unchecked(responses: Vec<String>) -> MockServer {
|
||||
let server = MockServer::start().await;
|
||||
pub async fn create_mock_responses_server_sequence_unchecked(responses: Vec<String>) -> MockServer {
|
||||
let server = responses::start_mock_server().await;
|
||||
|
||||
let seq_responder = SeqResponder {
|
||||
num_calls: AtomicUsize::new(0),
|
||||
@@ -40,7 +41,7 @@ pub async fn create_mock_chat_completions_server_unchecked(responses: Vec<String
|
||||
};
|
||||
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/chat/completions"))
|
||||
.and(path_regex(".*/responses$"))
|
||||
.respond_with(seq_responder)
|
||||
.mount(&server)
|
||||
.await;
|
||||
@@ -57,10 +58,24 @@ impl Respond for SeqResponder {
|
||||
fn respond(&self, _: &wiremock::Request) -> ResponseTemplate {
|
||||
let call_num = self.num_calls.fetch_add(1, Ordering::SeqCst);
|
||||
match self.responses.get(call_num) {
|
||||
Some(response) => ResponseTemplate::new(200)
|
||||
.insert_header("content-type", "text/event-stream")
|
||||
.set_body_raw(response.clone(), "text/event-stream"),
|
||||
Some(response) => responses::sse_response(response.clone()),
|
||||
None => panic!("no response for {call_num}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a mock responses API server that returns the same assistant message for every request.
|
||||
pub async fn create_mock_responses_server_repeating_assistant(message: &str) -> MockServer {
|
||||
let server = responses::start_mock_server().await;
|
||||
let body = responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_assistant_message("msg-1", message),
|
||||
responses::ev_completed("resp-1"),
|
||||
]);
|
||||
Mock::given(method("POST"))
|
||||
.and(path_regex(".*/responses$"))
|
||||
.respond_with(responses::sse_response(body))
|
||||
.mount(&server)
|
||||
.await;
|
||||
server
|
||||
}
|
||||
|
||||
@@ -5,7 +5,6 @@ use codex_protocol::openai_models::ConfigShellToolType;
|
||||
use codex_protocol::openai_models::ModelInfo;
|
||||
use codex_protocol::openai_models::ModelPreset;
|
||||
use codex_protocol::openai_models::ModelVisibility;
|
||||
use codex_protocol::openai_models::ReasoningSummaryFormat;
|
||||
use codex_protocol::openai_models::TruncationPolicyConfig;
|
||||
use serde_json::json;
|
||||
use std::path::Path;
|
||||
@@ -16,7 +15,7 @@ fn preset_to_info(preset: &ModelPreset, priority: i32) -> ModelInfo {
|
||||
slug: preset.id.clone(),
|
||||
display_name: preset.display_name.clone(),
|
||||
description: Some(preset.description.clone()),
|
||||
default_reasoning_level: preset.default_reasoning_effort,
|
||||
default_reasoning_level: Some(preset.default_reasoning_effort),
|
||||
supported_reasoning_levels: preset.supported_reasoning_efforts.clone(),
|
||||
shell_type: ConfigShellToolType::ShellCommand,
|
||||
visibility: if preset.show_in_picker {
|
||||
@@ -26,21 +25,21 @@ fn preset_to_info(preset: &ModelPreset, priority: i32) -> ModelInfo {
|
||||
},
|
||||
supported_in_api: true,
|
||||
priority,
|
||||
upgrade: preset.upgrade.as_ref().map(|u| u.id.clone()),
|
||||
base_instructions: None,
|
||||
upgrade: preset.upgrade.as_ref().map(|u| u.into()),
|
||||
base_instructions: "base instructions".to_string(),
|
||||
supports_reasoning_summaries: false,
|
||||
support_verbosity: false,
|
||||
default_verbosity: None,
|
||||
apply_patch_tool_type: None,
|
||||
truncation_policy: TruncationPolicyConfig::bytes(10_000),
|
||||
supports_parallel_tool_calls: false,
|
||||
context_window: None,
|
||||
reasoning_summary_format: ReasoningSummaryFormat::None,
|
||||
context_window: Some(272_000),
|
||||
auto_compact_token_limit: None,
|
||||
effective_context_window_percent: 95,
|
||||
experimental_supported_tools: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
// todo(aibrahim): fix the priorities to be the opposite here.
|
||||
/// Write a models_cache.json file to the codex home directory.
|
||||
/// This prevents ModelsManager from making network requests to refresh models.
|
||||
/// The cache will be treated as fresh (within TTL) and used instead of fetching from the network.
|
||||
@@ -51,14 +50,14 @@ pub fn write_models_cache(codex_home: &Path) -> std::io::Result<()> {
|
||||
.iter()
|
||||
.filter(|preset| preset.show_in_picker)
|
||||
.collect();
|
||||
// Convert presets to ModelInfo, assigning priorities (higher = earlier in list)
|
||||
// Priority is used for sorting, so first model gets highest priority
|
||||
// Convert presets to ModelInfo, assigning priorities (lower = earlier in list).
|
||||
// Priority is used for sorting, so the first model gets the lowest priority.
|
||||
let models: Vec<ModelInfo> = presets
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, preset)| {
|
||||
// Higher priority = earlier in list, so reverse the index
|
||||
let priority = (presets.len() - idx) as i32;
|
||||
// Lower priority = earlier in list.
|
||||
let priority = idx as i32;
|
||||
preset_to_info(preset, priority)
|
||||
})
|
||||
.collect();
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use core_test_support::responses;
|
||||
use serde_json::json;
|
||||
use std::path::Path;
|
||||
|
||||
@@ -14,85 +15,30 @@ pub fn create_shell_command_sse_response(
|
||||
"workdir": workdir.map(|w| w.to_string_lossy()),
|
||||
"timeout_ms": timeout_ms
|
||||
}))?;
|
||||
let tool_call = json!({
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"tool_calls": [
|
||||
{
|
||||
"id": call_id,
|
||||
"function": {
|
||||
"name": "shell_command",
|
||||
"arguments": tool_call_arguments
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": "tool_calls"
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
let sse = format!(
|
||||
"data: {}\n\ndata: DONE\n\n",
|
||||
serde_json::to_string(&tool_call)?
|
||||
);
|
||||
Ok(sse)
|
||||
Ok(responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_function_call(call_id, "shell_command", &tool_call_arguments),
|
||||
responses::ev_completed("resp-1"),
|
||||
]))
|
||||
}
|
||||
|
||||
pub fn create_final_assistant_message_sse_response(message: &str) -> anyhow::Result<String> {
|
||||
let assistant_message = json!({
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": message
|
||||
},
|
||||
"finish_reason": "stop"
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
let sse = format!(
|
||||
"data: {}\n\ndata: DONE\n\n",
|
||||
serde_json::to_string(&assistant_message)?
|
||||
);
|
||||
Ok(sse)
|
||||
Ok(responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_assistant_message("msg-1", message),
|
||||
responses::ev_completed("resp-1"),
|
||||
]))
|
||||
}
|
||||
|
||||
pub fn create_apply_patch_sse_response(
|
||||
patch_content: &str,
|
||||
call_id: &str,
|
||||
) -> anyhow::Result<String> {
|
||||
// Use shell_command to call apply_patch with heredoc format
|
||||
let command = format!("apply_patch <<'EOF'\n{patch_content}\nEOF");
|
||||
let tool_call_arguments = serde_json::to_string(&json!({
|
||||
"command": command
|
||||
}))?;
|
||||
|
||||
let tool_call = json!({
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"tool_calls": [
|
||||
{
|
||||
"id": call_id,
|
||||
"function": {
|
||||
"name": "shell_command",
|
||||
"arguments": tool_call_arguments
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": "tool_calls"
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
let sse = format!(
|
||||
"data: {}\n\ndata: DONE\n\n",
|
||||
serde_json::to_string(&tool_call)?
|
||||
);
|
||||
Ok(sse)
|
||||
Ok(responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_apply_patch_shell_command_call_via_heredoc(call_id, patch_content),
|
||||
responses::ev_completed("resp-1"),
|
||||
]))
|
||||
}
|
||||
|
||||
pub fn create_exec_command_sse_response(call_id: &str) -> anyhow::Result<String> {
|
||||
@@ -108,28 +54,9 @@ pub fn create_exec_command_sse_response(call_id: &str) -> anyhow::Result<String>
|
||||
"cmd": command.join(" "),
|
||||
"yield_time_ms": 500
|
||||
}))?;
|
||||
let tool_call = json!({
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"tool_calls": [
|
||||
{
|
||||
"id": call_id,
|
||||
"function": {
|
||||
"name": "exec_command",
|
||||
"arguments": tool_call_arguments
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": "tool_calls"
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
let sse = format!(
|
||||
"data: {}\n\ndata: DONE\n\n",
|
||||
serde_json::to_string(&tool_call)?
|
||||
);
|
||||
Ok(sse)
|
||||
Ok(responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_function_call(call_id, "exec_command", &tool_call_arguments),
|
||||
responses::ev_completed("resp-1"),
|
||||
]))
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use anyhow::Result;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::GitInfo;
|
||||
use codex_protocol::protocol::SessionMeta;
|
||||
use codex_protocol::protocol::SessionMetaLine;
|
||||
@@ -28,7 +28,7 @@ pub fn create_fake_rollout(
|
||||
) -> Result<String> {
|
||||
let uuid = Uuid::new_v4();
|
||||
let uuid_str = uuid.to_string();
|
||||
let conversation_id = ConversationId::from_string(&uuid_str)?;
|
||||
let conversation_id = ThreadId::from_string(&uuid_str)?;
|
||||
|
||||
// sessions/YYYY/MM/DD derived from filename_ts (YYYY-MM-DDThh-mm-ss)
|
||||
let year = &filename_ts[0..4];
|
||||
@@ -87,3 +87,75 @@ pub fn create_fake_rollout(
|
||||
fs::write(file_path, lines.join("\n") + "\n")?;
|
||||
Ok(uuid_str)
|
||||
}
|
||||
|
||||
pub fn create_fake_rollout_with_text_elements(
|
||||
codex_home: &Path,
|
||||
filename_ts: &str,
|
||||
meta_rfc3339: &str,
|
||||
preview: &str,
|
||||
text_elements: Vec<serde_json::Value>,
|
||||
model_provider: Option<&str>,
|
||||
git_info: Option<GitInfo>,
|
||||
) -> Result<String> {
|
||||
let uuid = Uuid::new_v4();
|
||||
let uuid_str = uuid.to_string();
|
||||
let conversation_id = ThreadId::from_string(&uuid_str)?;
|
||||
|
||||
// sessions/YYYY/MM/DD derived from filename_ts (YYYY-MM-DDThh-mm-ss)
|
||||
let year = &filename_ts[0..4];
|
||||
let month = &filename_ts[5..7];
|
||||
let day = &filename_ts[8..10];
|
||||
let dir = codex_home.join("sessions").join(year).join(month).join(day);
|
||||
fs::create_dir_all(&dir)?;
|
||||
|
||||
let file_path = dir.join(format!("rollout-{filename_ts}-{uuid}.jsonl"));
|
||||
|
||||
// Build JSONL lines
|
||||
let meta = SessionMeta {
|
||||
id: conversation_id,
|
||||
timestamp: meta_rfc3339.to_string(),
|
||||
cwd: PathBuf::from("/"),
|
||||
originator: "codex".to_string(),
|
||||
cli_version: "0.0.0".to_string(),
|
||||
instructions: None,
|
||||
source: SessionSource::Cli,
|
||||
model_provider: model_provider.map(str::to_string),
|
||||
};
|
||||
let payload = serde_json::to_value(SessionMetaLine {
|
||||
meta,
|
||||
git: git_info,
|
||||
})?;
|
||||
|
||||
let lines = [
|
||||
json!( {
|
||||
"timestamp": meta_rfc3339,
|
||||
"type": "session_meta",
|
||||
"payload": payload
|
||||
})
|
||||
.to_string(),
|
||||
json!( {
|
||||
"timestamp": meta_rfc3339,
|
||||
"type":"response_item",
|
||||
"payload": {
|
||||
"type":"message",
|
||||
"role":"user",
|
||||
"content":[{"type":"input_text","text": preview}]
|
||||
}
|
||||
})
|
||||
.to_string(),
|
||||
json!( {
|
||||
"timestamp": meta_rfc3339,
|
||||
"type":"event_msg",
|
||||
"payload": {
|
||||
"type":"user_message",
|
||||
"message": preview,
|
||||
"text_elements": text_elements,
|
||||
"local_images": []
|
||||
}
|
||||
})
|
||||
.to_string(),
|
||||
];
|
||||
|
||||
fs::write(file_path, lines.join("\n") + "\n")?;
|
||||
Ok(uuid_str)
|
||||
}
|
||||
|
||||
@@ -37,7 +37,7 @@ model_provider = "mock_provider"
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "http://127.0.0.1:0/v1"
|
||||
wire_api = "chat"
|
||||
wire_api = "responses"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
{requires_line}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_final_assistant_message_sse_response;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::create_mock_responses_server_sequence;
|
||||
use app_test_support::create_shell_command_sse_response;
|
||||
use app_test_support::format_with_current_shell;
|
||||
use app_test_support::to_response;
|
||||
@@ -65,7 +65,7 @@ async fn test_codex_jsonrpc_conversation_flow() -> Result<()> {
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("Enjoy your new git repo!")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
let server = create_mock_responses_server_sequence(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri())?;
|
||||
|
||||
// Start MCP server and initialize.
|
||||
@@ -114,6 +114,7 @@ async fn test_codex_jsonrpc_conversation_flow() -> Result<()> {
|
||||
conversation_id,
|
||||
items: vec![codex_app_server_protocol::InputItem::Text {
|
||||
text: "text".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
})
|
||||
.await?;
|
||||
@@ -145,9 +146,7 @@ async fn test_codex_jsonrpc_conversation_flow() -> Result<()> {
|
||||
|
||||
// 4) removeConversationListener
|
||||
let remove_listener_id = mcp
|
||||
.send_remove_conversation_listener_request(RemoveConversationListenerParams {
|
||||
subscription_id,
|
||||
})
|
||||
.send_remove_thread_listener_request(RemoveConversationListenerParams { subscription_id })
|
||||
.await?;
|
||||
let remove_listener_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
@@ -199,7 +198,7 @@ async fn test_send_user_turn_changes_approval_policy_behavior() -> Result<()> {
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done 2")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
let server = create_mock_responses_server_sequence(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri())?;
|
||||
|
||||
// Start MCP server and initialize.
|
||||
@@ -243,6 +242,7 @@ async fn test_send_user_turn_changes_approval_policy_behavior() -> Result<()> {
|
||||
conversation_id,
|
||||
items: vec![codex_app_server_protocol::InputItem::Text {
|
||||
text: "run python".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
})
|
||||
.await?;
|
||||
@@ -285,7 +285,7 @@ async fn test_send_user_turn_changes_approval_policy_behavior() -> Result<()> {
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Wait for first TaskComplete
|
||||
// Wait for first TurnComplete
|
||||
let _ = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
@@ -298,6 +298,7 @@ async fn test_send_user_turn_changes_approval_policy_behavior() -> Result<()> {
|
||||
conversation_id,
|
||||
items: vec![codex_app_server_protocol::InputItem::Text {
|
||||
text: "run python again".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
cwd: working_directory.clone(),
|
||||
approval_policy: AskForApproval::Never,
|
||||
@@ -305,6 +306,7 @@ async fn test_send_user_turn_changes_approval_policy_behavior() -> Result<()> {
|
||||
model: "mock-model".to_string(),
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: ReasoningSummary::Auto,
|
||||
output_schema: None,
|
||||
})
|
||||
.await?;
|
||||
// Acknowledge sendUserTurn
|
||||
@@ -364,7 +366,7 @@ async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() -> Result<(
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done second")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
let server = create_mock_responses_server_sequence(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(&codex_home).await?;
|
||||
@@ -406,6 +408,7 @@ async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() -> Result<(
|
||||
conversation_id,
|
||||
items: vec![InputItem::Text {
|
||||
text: "first turn".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
cwd: first_cwd.clone(),
|
||||
approval_policy: AskForApproval::Never,
|
||||
@@ -418,6 +421,7 @@ async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() -> Result<(
|
||||
model: model.clone(),
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: ReasoningSummary::Auto,
|
||||
output_schema: None,
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
@@ -430,12 +434,14 @@ async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() -> Result<(
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
mcp.clear_message_buffer();
|
||||
|
||||
let second_turn_id = mcp
|
||||
.send_send_user_turn_request(SendUserTurnParams {
|
||||
conversation_id,
|
||||
items: vec![InputItem::Text {
|
||||
text: "second turn".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
cwd: second_cwd.clone(),
|
||||
approval_policy: AskForApproval::Never,
|
||||
@@ -443,6 +449,7 @@ async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() -> Result<(
|
||||
model: model.clone(),
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: ReasoningSummary::Auto,
|
||||
output_schema: None,
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
@@ -498,7 +505,7 @@ model_provider = "mock_provider"
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
wire_api = "responses"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_final_assistant_message_sse_response;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
use codex_app_server_protocol::AddConversationSubscriptionResponse;
|
||||
@@ -12,6 +11,7 @@ use codex_app_server_protocol::NewConversationResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserMessageResponse;
|
||||
use core_test_support::responses;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use std::path::Path;
|
||||
@@ -23,8 +23,9 @@ const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_conversation_create_and_send_message_ok() -> Result<()> {
|
||||
// Mock server – we won't strictly rely on it, but provide one to satisfy any model wiring.
|
||||
let responses = vec![create_final_assistant_message_sse_response("Done")?];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
let response_body = create_final_assistant_message_sse_response("Done")?;
|
||||
let server = responses::start_mock_server().await;
|
||||
let response_mock = responses::mount_sse_sequence(&server, vec![response_body]).await;
|
||||
|
||||
// Temporary Codex home with config pointing at the mock server.
|
||||
let codex_home = TempDir::new()?;
|
||||
@@ -76,6 +77,7 @@ async fn test_conversation_create_and_send_message_ok() -> Result<()> {
|
||||
conversation_id,
|
||||
items: vec![InputItem::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
})
|
||||
.await?;
|
||||
@@ -86,32 +88,30 @@ async fn test_conversation_create_and_send_message_ok() -> Result<()> {
|
||||
.await??;
|
||||
let _ok: SendUserMessageResponse = to_response::<SendUserMessageResponse>(send_resp)?;
|
||||
|
||||
// avoid race condition by waiting for the mock server to receive the chat.completions request
|
||||
// Avoid race condition by waiting for the mock server to receive the responses request.
|
||||
let deadline = std::time::Instant::now() + DEFAULT_READ_TIMEOUT;
|
||||
let requests = loop {
|
||||
let requests = server.received_requests().await.unwrap_or_default();
|
||||
let requests = response_mock.requests();
|
||||
if !requests.is_empty() {
|
||||
break requests;
|
||||
}
|
||||
if std::time::Instant::now() >= deadline {
|
||||
panic!("mock server did not receive the chat.completions request in time");
|
||||
panic!("mock server did not receive the responses request in time");
|
||||
}
|
||||
tokio::time::sleep(std::time::Duration::from_millis(10)).await;
|
||||
};
|
||||
|
||||
// Verify the outbound request body matches expectations for Chat Completions.
|
||||
// Verify the outbound request body matches expectations for Responses.
|
||||
let request = requests
|
||||
.first()
|
||||
.expect("mock server should have received at least one request");
|
||||
let body = request.body_json::<serde_json::Value>()?;
|
||||
let body = request.body_json();
|
||||
assert_eq!(body["model"], json!("o3"));
|
||||
assert!(body["stream"].as_bool().unwrap_or(false));
|
||||
let messages = body["messages"]
|
||||
.as_array()
|
||||
.expect("messages should be array");
|
||||
let last = messages.last().expect("at least one message");
|
||||
assert_eq!(last["role"], json!("user"));
|
||||
assert_eq!(last["content"], json!("Hello"));
|
||||
let user_texts = request.message_input_texts("user");
|
||||
assert!(
|
||||
user_texts.iter().any(|text| text == "Hello"),
|
||||
"expected user input to include Hello, got {user_texts:?}"
|
||||
);
|
||||
|
||||
drop(server);
|
||||
Ok(())
|
||||
@@ -133,7 +133,7 @@ model_provider = "mock_provider"
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
wire_api = "responses"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
140
codex-rs/app-server/tests/suite/fork_thread.rs
Normal file
140
codex-rs/app-server/tests/suite/fork_thread.rs
Normal file
@@ -0,0 +1,140 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_fake_rollout;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::ForkConversationParams;
|
||||
use codex_app_server_protocol::ForkConversationResponse;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::NewConversationParams; // reused for overrides shape
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_app_server_protocol::SessionConfiguredNotification;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn fork_conversation_creates_new_rollout() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let preview = "Hello A";
|
||||
let conversation_id = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-02T12-00-00",
|
||||
"2025-01-02T12:00:00Z",
|
||||
preview,
|
||||
Some("openai"),
|
||||
None,
|
||||
)?;
|
||||
|
||||
let original_path = codex_home
|
||||
.path()
|
||||
.join("sessions")
|
||||
.join("2025")
|
||||
.join("01")
|
||||
.join("02")
|
||||
.join(format!(
|
||||
"rollout-2025-01-02T12-00-00-{conversation_id}.jsonl"
|
||||
));
|
||||
assert!(
|
||||
original_path.exists(),
|
||||
"expected original rollout to exist at {}",
|
||||
original_path.display()
|
||||
);
|
||||
let original_contents = std::fs::read_to_string(&original_path)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let fork_req_id = mcp
|
||||
.send_fork_conversation_request(ForkConversationParams {
|
||||
path: Some(original_path.clone()),
|
||||
conversation_id: None,
|
||||
overrides: Some(NewConversationParams {
|
||||
model: Some("o3".to_string()),
|
||||
..Default::default()
|
||||
}),
|
||||
})
|
||||
.await?;
|
||||
|
||||
// Expect a sessionConfigured notification for the forked session.
|
||||
let notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("sessionConfigured"),
|
||||
)
|
||||
.await??;
|
||||
let session_configured: ServerNotification = notification.try_into()?;
|
||||
let ServerNotification::SessionConfigured(SessionConfiguredNotification {
|
||||
model,
|
||||
session_id,
|
||||
rollout_path,
|
||||
initial_messages: session_initial_messages,
|
||||
..
|
||||
}) = session_configured
|
||||
else {
|
||||
unreachable!("expected sessionConfigured notification");
|
||||
};
|
||||
|
||||
assert_eq!(model, "o3");
|
||||
assert_ne!(
|
||||
session_id.to_string(),
|
||||
conversation_id,
|
||||
"expected a new conversation id when forking"
|
||||
);
|
||||
assert_ne!(
|
||||
rollout_path, original_path,
|
||||
"expected a new rollout path when forking"
|
||||
);
|
||||
assert!(
|
||||
rollout_path.exists(),
|
||||
"expected forked rollout to exist at {}",
|
||||
rollout_path.display()
|
||||
);
|
||||
|
||||
let session_initial_messages =
|
||||
session_initial_messages.expect("expected initial messages when forking from rollout");
|
||||
match session_initial_messages.as_slice() {
|
||||
[EventMsg::UserMessage(message)] => {
|
||||
assert_eq!(message.message, preview);
|
||||
}
|
||||
other => panic!("unexpected initial messages from rollout fork: {other:#?}"),
|
||||
}
|
||||
|
||||
// Then the response for forkConversation.
|
||||
let fork_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(fork_req_id)),
|
||||
)
|
||||
.await??;
|
||||
let ForkConversationResponse {
|
||||
conversation_id: forked_id,
|
||||
model: forked_model,
|
||||
initial_messages: response_initial_messages,
|
||||
rollout_path: response_rollout_path,
|
||||
} = to_response::<ForkConversationResponse>(fork_resp)?;
|
||||
|
||||
assert_eq!(forked_model, "o3");
|
||||
assert_eq!(response_rollout_path, rollout_path);
|
||||
assert_ne!(forked_id.to_string(), conversation_id);
|
||||
|
||||
let response_initial_messages =
|
||||
response_initial_messages.expect("expected initial messages in fork response");
|
||||
match response_initial_messages.as_slice() {
|
||||
[EventMsg::UserMessage(message)] => {
|
||||
assert_eq!(message.message, preview);
|
||||
}
|
||||
other => panic!("unexpected initial messages in fork response: {other:#?}"),
|
||||
}
|
||||
|
||||
let after_contents = std::fs::read_to_string(&original_path)?;
|
||||
assert_eq!(
|
||||
after_contents, original_contents,
|
||||
"fork should not mutate the original rollout file"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -18,7 +18,7 @@ use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::create_mock_responses_server_sequence;
|
||||
use app_test_support::create_shell_command_sse_response;
|
||||
use app_test_support::to_response;
|
||||
|
||||
@@ -56,7 +56,7 @@ async fn shell_command_interruption() -> anyhow::Result<()> {
|
||||
std::fs::create_dir(&working_directory)?;
|
||||
|
||||
// Create mock server with a single SSE response: the long sleep command
|
||||
let server = create_mock_chat_completions_server(vec![create_shell_command_sse_response(
|
||||
let server = create_mock_responses_server_sequence(vec![create_shell_command_sse_response(
|
||||
shell_command.clone(),
|
||||
Some(&working_directory),
|
||||
Some(10_000), // 10 seconds timeout in ms
|
||||
@@ -105,6 +105,7 @@ async fn shell_command_interruption() -> anyhow::Result<()> {
|
||||
conversation_id,
|
||||
items: vec![codex_app_server_protocol::InputItem::Text {
|
||||
text: "run first sleep command".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
})
|
||||
.await?;
|
||||
@@ -153,7 +154,7 @@ model_provider = "mock_provider"
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
wire_api = "responses"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
|
||||
@@ -6,7 +6,7 @@ use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::ListConversationsParams;
|
||||
use codex_app_server_protocol::ListConversationsResponse;
|
||||
use codex_app_server_protocol::NewConversationParams; // reused for overrides shape
|
||||
use codex_app_server_protocol::NewConversationParams;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ResumeConversationParams;
|
||||
use codex_app_server_protocol::ResumeConversationResponse;
|
||||
|
||||
@@ -32,7 +32,7 @@ model_provider = "mock_provider"
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "http://127.0.0.1:0/v1"
|
||||
wire_api = "chat"
|
||||
wire_api = "responses"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#,
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
mod archive_conversation;
|
||||
mod archive_thread;
|
||||
mod auth;
|
||||
mod codex_message_processor_flow;
|
||||
mod config;
|
||||
mod create_conversation;
|
||||
mod create_thread;
|
||||
mod fork_thread;
|
||||
mod fuzzy_file_search;
|
||||
mod interrupt;
|
||||
mod list_resume;
|
||||
mod login;
|
||||
mod output_schema;
|
||||
mod send_message;
|
||||
mod set_default_model;
|
||||
mod user_agent;
|
||||
|
||||
285
codex-rs/app-server/tests/suite/output_schema.rs
Normal file
285
codex-rs/app-server/tests/suite/output_schema.rs
Normal file
@@ -0,0 +1,285 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
use codex_app_server_protocol::InputItem;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::NewConversationParams;
|
||||
use codex_app_server_protocol::NewConversationResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SendUserTurnParams;
|
||||
use codex_app_server_protocol::SendUserTurnResponse;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::openai_models::ReasoningEffort;
|
||||
use core_test_support::responses;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn send_user_turn_accepts_output_schema_v1() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = responses::start_mock_server().await;
|
||||
let body = responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_assistant_message("msg-1", "Done"),
|
||||
responses::ev_completed("resp-1"),
|
||||
]);
|
||||
let response_mock = responses::mount_sse_once(&server, body).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let new_conv_id = mcp
|
||||
.send_new_conversation_request(NewConversationParams {
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let new_conv_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(new_conv_id)),
|
||||
)
|
||||
.await??;
|
||||
let NewConversationResponse {
|
||||
conversation_id, ..
|
||||
} = to_response::<NewConversationResponse>(new_conv_resp)?;
|
||||
|
||||
let listener_id = mcp
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams {
|
||||
conversation_id,
|
||||
experimental_raw_events: false,
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(listener_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let output_schema = serde_json::json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"answer": { "type": "string" }
|
||||
},
|
||||
"required": ["answer"],
|
||||
"additionalProperties": false
|
||||
});
|
||||
|
||||
let send_turn_id = mcp
|
||||
.send_send_user_turn_request(SendUserTurnParams {
|
||||
conversation_id,
|
||||
items: vec![InputItem::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
cwd: codex_home.path().to_path_buf(),
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: SandboxPolicy::new_read_only_policy(),
|
||||
model: "mock-model".to_string(),
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: ReasoningSummary::Auto,
|
||||
output_schema: Some(output_schema.clone()),
|
||||
})
|
||||
.await?;
|
||||
let _send_turn_resp: SendUserTurnResponse = to_response::<SendUserTurnResponse>(
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(send_turn_id)),
|
||||
)
|
||||
.await??,
|
||||
)?;
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let request = response_mock.single_request();
|
||||
let payload = request.body_json();
|
||||
let text = payload.get("text").expect("request missing text field");
|
||||
let format = text
|
||||
.get("format")
|
||||
.expect("request missing text.format field");
|
||||
assert_eq!(
|
||||
format,
|
||||
&serde_json::json!({
|
||||
"name": "codex_output_schema",
|
||||
"type": "json_schema",
|
||||
"strict": true,
|
||||
"schema": output_schema,
|
||||
})
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn send_user_turn_output_schema_is_per_turn_v1() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = responses::start_mock_server().await;
|
||||
let body1 = responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_assistant_message("msg-1", "Done"),
|
||||
responses::ev_completed("resp-1"),
|
||||
]);
|
||||
let response_mock1 = responses::mount_sse_once(&server, body1).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let new_conv_id = mcp
|
||||
.send_new_conversation_request(NewConversationParams {
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let new_conv_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(new_conv_id)),
|
||||
)
|
||||
.await??;
|
||||
let NewConversationResponse {
|
||||
conversation_id, ..
|
||||
} = to_response::<NewConversationResponse>(new_conv_resp)?;
|
||||
|
||||
let listener_id = mcp
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams {
|
||||
conversation_id,
|
||||
experimental_raw_events: false,
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(listener_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let output_schema = serde_json::json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"answer": { "type": "string" }
|
||||
},
|
||||
"required": ["answer"],
|
||||
"additionalProperties": false
|
||||
});
|
||||
|
||||
let send_turn_id = mcp
|
||||
.send_send_user_turn_request(SendUserTurnParams {
|
||||
conversation_id,
|
||||
items: vec![InputItem::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
cwd: codex_home.path().to_path_buf(),
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: SandboxPolicy::new_read_only_policy(),
|
||||
model: "mock-model".to_string(),
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: ReasoningSummary::Auto,
|
||||
output_schema: Some(output_schema.clone()),
|
||||
})
|
||||
.await?;
|
||||
let _send_turn_resp: SendUserTurnResponse = to_response::<SendUserTurnResponse>(
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(send_turn_id)),
|
||||
)
|
||||
.await??,
|
||||
)?;
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let payload1 = response_mock1.single_request().body_json();
|
||||
assert_eq!(
|
||||
payload1.pointer("/text/format"),
|
||||
Some(&serde_json::json!({
|
||||
"name": "codex_output_schema",
|
||||
"type": "json_schema",
|
||||
"strict": true,
|
||||
"schema": output_schema,
|
||||
}))
|
||||
);
|
||||
|
||||
let body2 = responses::sse(vec![
|
||||
responses::ev_response_created("resp-2"),
|
||||
responses::ev_assistant_message("msg-2", "Done"),
|
||||
responses::ev_completed("resp-2"),
|
||||
]);
|
||||
let response_mock2 = responses::mount_sse_once(&server, body2).await;
|
||||
|
||||
let send_turn_id_2 = mcp
|
||||
.send_send_user_turn_request(SendUserTurnParams {
|
||||
conversation_id,
|
||||
items: vec![InputItem::Text {
|
||||
text: "Hello again".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
cwd: codex_home.path().to_path_buf(),
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: SandboxPolicy::new_read_only_policy(),
|
||||
model: "mock-model".to_string(),
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: ReasoningSummary::Auto,
|
||||
output_schema: None,
|
||||
})
|
||||
.await?;
|
||||
let _send_turn_resp_2: SendUserTurnResponse = to_response::<SendUserTurnResponse>(
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(send_turn_id_2)),
|
||||
)
|
||||
.await??,
|
||||
)?;
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let payload2 = response_mock2.single_request().body_json();
|
||||
assert_eq!(payload2.pointer("/text/format"), None);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_config_toml(codex_home: &Path, server_uri: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "responses"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -1,7 +1,5 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_final_assistant_message_sse_response;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
use codex_app_server_protocol::AddConversationSubscriptionResponse;
|
||||
@@ -13,12 +11,17 @@ use codex_app_server_protocol::NewConversationResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserMessageResponse;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::DeveloperInstructions;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::RawResponseItemEvent;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use core_test_support::responses;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
@@ -26,13 +29,21 @@ const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_send_message_success() -> Result<()> {
|
||||
// Spin up a mock completions server that immediately ends the Codex turn.
|
||||
// Spin up a mock responses server that immediately ends the Codex turn.
|
||||
// Two Codex turns hit the mock model (session start + send-user-message). Provide two SSE responses.
|
||||
let responses = vec![
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
let server = responses::start_mock_server().await;
|
||||
let body1 = responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_assistant_message("msg-1", "Done"),
|
||||
responses::ev_completed("resp-1"),
|
||||
]);
|
||||
let body2 = responses::sse(vec![
|
||||
responses::ev_response_created("resp-2"),
|
||||
responses::ev_assistant_message("msg-2", "Done"),
|
||||
responses::ev_completed("resp-2"),
|
||||
]);
|
||||
let _response_mock1 = responses::mount_sse_once(&server, body1).await;
|
||||
let _response_mock2 = responses::mount_sse_once(&server, body2).await;
|
||||
|
||||
// Create a temporary Codex home with config pointing at the mock server.
|
||||
let codex_home = TempDir::new()?;
|
||||
@@ -81,7 +92,7 @@ async fn test_send_message_success() -> Result<()> {
|
||||
#[expect(clippy::expect_used)]
|
||||
async fn send_message(
|
||||
message: &str,
|
||||
conversation_id: ConversationId,
|
||||
conversation_id: ThreadId,
|
||||
mcp: &mut McpProcess,
|
||||
) -> Result<()> {
|
||||
// Now exercise sendUserMessage.
|
||||
@@ -90,6 +101,7 @@ async fn send_message(
|
||||
conversation_id,
|
||||
items: vec![InputItem::Text {
|
||||
text: message.to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
})
|
||||
.await?;
|
||||
@@ -135,8 +147,13 @@ async fn send_message(
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_send_message_raw_notifications_opt_in() -> Result<()> {
|
||||
let responses = vec![create_final_assistant_message_sse_response("Done")?];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
let server = responses::start_mock_server().await;
|
||||
let body = responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_assistant_message("msg-1", "Done"),
|
||||
responses::ev_completed("resp-1"),
|
||||
]);
|
||||
let _response_mock = responses::mount_sse_once(&server, body).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
@@ -178,10 +195,14 @@ async fn test_send_message_raw_notifications_opt_in() -> Result<()> {
|
||||
conversation_id,
|
||||
items: vec![InputItem::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
})
|
||||
.await?;
|
||||
|
||||
let permissions = read_raw_response_item(&mut mcp, conversation_id).await;
|
||||
assert_permissions_message(&permissions);
|
||||
|
||||
let developer = read_raw_response_item(&mut mcp, conversation_id).await;
|
||||
assert_developer_message(&developer, "Use the test harness tools.");
|
||||
|
||||
@@ -220,12 +241,13 @@ async fn test_send_message_session_not_found() -> Result<()> {
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let unknown = ConversationId::new();
|
||||
let unknown = ThreadId::new();
|
||||
let req_id = mcp
|
||||
.send_send_user_message_request(SendUserMessageParams {
|
||||
conversation_id: unknown,
|
||||
items: vec![InputItem::Text {
|
||||
text: "ping".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
})
|
||||
.await?;
|
||||
@@ -259,7 +281,7 @@ model_provider = "mock_provider"
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
wire_api = "responses"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
@@ -268,10 +290,8 @@ stream_max_retries = 0
|
||||
}
|
||||
|
||||
#[expect(clippy::expect_used)]
|
||||
async fn read_raw_response_item(
|
||||
mcp: &mut McpProcess,
|
||||
conversation_id: ConversationId,
|
||||
) -> ResponseItem {
|
||||
async fn read_raw_response_item(mcp: &mut McpProcess, conversation_id: ThreadId) -> ResponseItem {
|
||||
// TODO: Switch to rawResponseItem/completed once we migrate to app server v2 in codex web.
|
||||
loop {
|
||||
let raw_notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
@@ -330,6 +350,27 @@ fn assert_instructions_message(item: &ResponseItem) {
|
||||
}
|
||||
}
|
||||
|
||||
fn assert_permissions_message(item: &ResponseItem) {
|
||||
match item {
|
||||
ResponseItem::Message { role, content, .. } => {
|
||||
assert_eq!(role, "developer");
|
||||
let texts = content_texts(content);
|
||||
let expected = DeveloperInstructions::from_policy(
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
AskForApproval::Never,
|
||||
&PathBuf::from("/tmp"),
|
||||
)
|
||||
.into_text();
|
||||
assert_eq!(
|
||||
texts,
|
||||
vec![expected.as_str()],
|
||||
"expected permissions developer message, got {texts:?}"
|
||||
);
|
||||
}
|
||||
other => panic!("expected permissions message, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn assert_developer_message(item: &ResponseItem, expected_text: &str) {
|
||||
match item {
|
||||
ResponseItem::Message { role, content, .. } => {
|
||||
@@ -387,7 +428,7 @@ fn content_texts(content: &[ContentItem]) -> Vec<&str> {
|
||||
content
|
||||
.iter()
|
||||
.filter_map(|item| match item {
|
||||
ContentItem::InputText { text } | ContentItem::OutputText { text } => {
|
||||
ContentItem::InputText { text, .. } | ContentItem::OutputText { text } => {
|
||||
Some(text.as_str())
|
||||
}
|
||||
_ => None,
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::DEFAULT_CLIENT_NAME;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::GetUserAgentResponse;
|
||||
@@ -25,13 +26,13 @@ async fn get_user_agent_returns_current_codex_user_agent() -> Result<()> {
|
||||
.await??;
|
||||
|
||||
let os_info = os_info::get();
|
||||
let originator = codex_core::default_client::originator().value.as_str();
|
||||
let originator = DEFAULT_CLIENT_NAME;
|
||||
let os_type = os_info.os_type();
|
||||
let os_version = os_info.version();
|
||||
let architecture = os_info.architecture().unwrap_or("unknown");
|
||||
let terminal_ua = codex_core::terminal::user_agent();
|
||||
let user_agent = format!(
|
||||
"{originator}/0.0.0 ({os_type} {os_version}; {architecture}) {terminal_ua} (codex-app-server-tests; 0.1.0)"
|
||||
"{originator}/0.0.0 ({os_type} {os_version}; {architecture}) {terminal_ua} ({DEFAULT_CLIENT_NAME}; 0.1.0)"
|
||||
);
|
||||
|
||||
let received: GetUserAgentResponse = to_response(response)?;
|
||||
|
||||
@@ -67,7 +67,7 @@ model_provider = "mock_provider"
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "http://127.0.0.1:0/v1"
|
||||
wire_api = "chat"
|
||||
wire_api = "responses"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
{requires_line}
|
||||
|
||||
66
codex-rs/app-server/tests/suite/v2/analytics.rs
Normal file
66
codex-rs/app-server/tests/suite/v2/analytics.rs
Normal file
@@ -0,0 +1,66 @@
|
||||
use anyhow::Result;
|
||||
use codex_core::config::ConfigBuilder;
|
||||
use codex_core::config::types::OtelExporterKind;
|
||||
use codex_core::config::types::OtelHttpProtocol;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::collections::HashMap;
|
||||
use tempfile::TempDir;
|
||||
|
||||
const SERVICE_VERSION: &str = "0.0.0-test";
|
||||
|
||||
fn set_metrics_exporter(config: &mut codex_core::config::Config) {
|
||||
config.otel.metrics_exporter = OtelExporterKind::OtlpHttp {
|
||||
endpoint: "http://localhost:4318".to_string(),
|
||||
headers: HashMap::new(),
|
||||
protocol: OtelHttpProtocol::Json,
|
||||
tls: None,
|
||||
};
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn app_server_default_analytics_disabled_without_flag() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mut config = ConfigBuilder::default()
|
||||
.codex_home(codex_home.path().to_path_buf())
|
||||
.build()
|
||||
.await?;
|
||||
set_metrics_exporter(&mut config);
|
||||
config.analytics_enabled = None;
|
||||
|
||||
let provider = codex_core::otel_init::build_provider(
|
||||
&config,
|
||||
SERVICE_VERSION,
|
||||
Some("codex_app_server"),
|
||||
false,
|
||||
)
|
||||
.map_err(|err| anyhow::anyhow!(err.to_string()))?;
|
||||
|
||||
// With analytics unset in the config and the default flag is false, metrics are disabled.
|
||||
// No provider is built.
|
||||
assert_eq!(provider.is_none(), true);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn app_server_default_analytics_enabled_with_flag() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mut config = ConfigBuilder::default()
|
||||
.codex_home(codex_home.path().to_path_buf())
|
||||
.build()
|
||||
.await?;
|
||||
set_metrics_exporter(&mut config);
|
||||
config.analytics_enabled = None;
|
||||
|
||||
let provider = codex_core::otel_init::build_provider(
|
||||
&config,
|
||||
SERVICE_VERSION,
|
||||
Some("codex_app_server"),
|
||||
true,
|
||||
)
|
||||
.map_err(|err| anyhow::anyhow!(err.to_string()))?;
|
||||
|
||||
// With analytics unset in the config and the default flag is true, metrics are enabled.
|
||||
let has_metrics = provider.as_ref().and_then(|otel| otel.metrics()).is_some();
|
||||
assert_eq!(has_metrics, true);
|
||||
Ok(())
|
||||
}
|
||||
@@ -184,7 +184,10 @@ writable_roots = [{}]
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(
|
||||
codex_home.path(),
|
||||
&[("CODEX_MANAGED_CONFIG_PATH", Some(&managed_path_str))],
|
||||
&[(
|
||||
"CODEX_APP_SERVER_MANAGED_CONFIG_PATH",
|
||||
Some(&managed_path_str),
|
||||
)],
|
||||
)
|
||||
.await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
137
codex-rs/app-server/tests/suite/v2/initialize.rs
Normal file
137
codex-rs/app-server/tests/suite/v2/initialize.rs
Normal file
@@ -0,0 +1,137 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_mock_responses_server_sequence_unchecked;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::ClientInfo;
|
||||
use codex_app_server_protocol::InitializeResponse;
|
||||
use codex_app_server_protocol::JSONRPCMessage;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn initialize_uses_client_info_name_as_originator() -> Result<()> {
|
||||
let responses = Vec::new();
|
||||
let server = create_mock_responses_server_sequence_unchecked(responses).await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri(), "never")?;
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
|
||||
let message = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.initialize_with_client_info(ClientInfo {
|
||||
name: "codex_vscode".to_string(),
|
||||
title: Some("Codex VS Code Extension".to_string()),
|
||||
version: "0.1.0".to_string(),
|
||||
}),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let JSONRPCMessage::Response(response) = message else {
|
||||
anyhow::bail!("expected initialize response, got {message:?}");
|
||||
};
|
||||
let InitializeResponse { user_agent } = to_response::<InitializeResponse>(response)?;
|
||||
|
||||
assert!(user_agent.starts_with("codex_vscode/"));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn initialize_respects_originator_override_env_var() -> Result<()> {
|
||||
let responses = Vec::new();
|
||||
let server = create_mock_responses_server_sequence_unchecked(responses).await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri(), "never")?;
|
||||
let mut mcp = McpProcess::new_with_env(
|
||||
codex_home.path(),
|
||||
&[(
|
||||
"CODEX_INTERNAL_ORIGINATOR_OVERRIDE",
|
||||
Some("codex_originator_via_env_var"),
|
||||
)],
|
||||
)
|
||||
.await?;
|
||||
|
||||
let message = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.initialize_with_client_info(ClientInfo {
|
||||
name: "codex_vscode".to_string(),
|
||||
title: Some("Codex VS Code Extension".to_string()),
|
||||
version: "0.1.0".to_string(),
|
||||
}),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let JSONRPCMessage::Response(response) = message else {
|
||||
anyhow::bail!("expected initialize response, got {message:?}");
|
||||
};
|
||||
let InitializeResponse { user_agent } = to_response::<InitializeResponse>(response)?;
|
||||
|
||||
assert!(user_agent.starts_with("codex_originator_via_env_var/"));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn initialize_rejects_invalid_client_name() -> Result<()> {
|
||||
let responses = Vec::new();
|
||||
let server = create_mock_responses_server_sequence_unchecked(responses).await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri(), "never")?;
|
||||
let mut mcp = McpProcess::new_with_env(
|
||||
codex_home.path(),
|
||||
&[("CODEX_INTERNAL_ORIGINATOR_OVERRIDE", None)],
|
||||
)
|
||||
.await?;
|
||||
|
||||
let message = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.initialize_with_client_info(ClientInfo {
|
||||
name: "bad\rname".to_string(),
|
||||
title: Some("Bad Client".to_string()),
|
||||
version: "0.1.0".to_string(),
|
||||
}),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let JSONRPCMessage::Error(error) = message else {
|
||||
anyhow::bail!("expected initialize error, got {message:?}");
|
||||
};
|
||||
|
||||
assert_eq!(error.error.code, -32600);
|
||||
assert_eq!(
|
||||
error.error.message,
|
||||
"Invalid clientInfo.name: 'bad\rname'. Must be a valid HTTP header value."
|
||||
);
|
||||
assert_eq!(error.error.data, None);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper to create a config.toml pointing at the mock model server.
|
||||
fn create_config_toml(
|
||||
codex_home: &Path,
|
||||
server_uri: &str,
|
||||
approval_policy: &str,
|
||||
) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "{approval_policy}"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "responses"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -1,11 +1,17 @@
|
||||
mod account;
|
||||
mod analytics;
|
||||
mod config_rpc;
|
||||
mod initialize;
|
||||
mod model_list;
|
||||
mod output_schema;
|
||||
mod rate_limits;
|
||||
mod review;
|
||||
mod thread_archive;
|
||||
mod thread_fork;
|
||||
mod thread_list;
|
||||
mod thread_loaded_list;
|
||||
mod thread_resume;
|
||||
mod thread_rollback;
|
||||
mod thread_start;
|
||||
mod turn_interrupt;
|
||||
mod turn_start;
|
||||
|
||||
@@ -48,57 +48,32 @@ async fn list_models_returns_all_models_with_large_limit() -> Result<()> {
|
||||
|
||||
let expected_models = vec![
|
||||
Model {
|
||||
id: "gpt-5.2".to_string(),
|
||||
model: "gpt-5.2".to_string(),
|
||||
display_name: "gpt-5.2".to_string(),
|
||||
description:
|
||||
"Latest frontier model with improvements across knowledge, reasoning and coding"
|
||||
.to_string(),
|
||||
id: "gpt-5.2-codex".to_string(),
|
||||
model: "gpt-5.2-codex".to_string(),
|
||||
display_name: "gpt-5.2-codex".to_string(),
|
||||
description: "Latest frontier agentic coding model.".to_string(),
|
||||
supported_reasoning_efforts: vec![
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::Low,
|
||||
description: "Balances speed with some reasoning; useful for straightforward \
|
||||
queries and short explanations"
|
||||
.to_string(),
|
||||
description: "Fast responses with lighter reasoning".to_string(),
|
||||
},
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::Medium,
|
||||
description: "Provides a solid balance of reasoning depth and latency for \
|
||||
general-purpose tasks"
|
||||
description: "Balances speed and reasoning depth for everyday tasks"
|
||||
.to_string(),
|
||||
},
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::High,
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems"
|
||||
.to_string(),
|
||||
description: "Greater reasoning depth for complex problems".to_string(),
|
||||
},
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::XHigh,
|
||||
description: "Extra high reasoning for complex problems".to_string(),
|
||||
description: "Extra high reasoning depth for complex problems".to_string(),
|
||||
},
|
||||
],
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
is_default: true,
|
||||
},
|
||||
Model {
|
||||
id: "gpt-5.1-codex-mini".to_string(),
|
||||
model: "gpt-5.1-codex-mini".to_string(),
|
||||
display_name: "gpt-5.1-codex-mini".to_string(),
|
||||
description: "Optimized for codex. Cheaper, faster, but less capable.".to_string(),
|
||||
supported_reasoning_efforts: vec![
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::Medium,
|
||||
description: "Dynamically adjusts reasoning based on the task".to_string(),
|
||||
},
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::High,
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems"
|
||||
.to_string(),
|
||||
},
|
||||
],
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
is_default: false,
|
||||
},
|
||||
Model {
|
||||
id: "gpt-5.1-codex-max".to_string(),
|
||||
model: "gpt-5.1-codex-max".to_string(),
|
||||
@@ -127,23 +102,48 @@ async fn list_models_returns_all_models_with_large_limit() -> Result<()> {
|
||||
is_default: false,
|
||||
},
|
||||
Model {
|
||||
id: "gpt-5.2-codex".to_string(),
|
||||
model: "gpt-5.2-codex".to_string(),
|
||||
display_name: "gpt-5.2-codex".to_string(),
|
||||
description: "Latest frontier agentic coding model.".to_string(),
|
||||
id: "gpt-5.1-codex-mini".to_string(),
|
||||
model: "gpt-5.1-codex-mini".to_string(),
|
||||
display_name: "gpt-5.1-codex-mini".to_string(),
|
||||
description: "Optimized for codex. Cheaper, faster, but less capable.".to_string(),
|
||||
supported_reasoning_efforts: vec![
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::Medium,
|
||||
description: "Dynamically adjusts reasoning based on the task".to_string(),
|
||||
},
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::High,
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems"
|
||||
.to_string(),
|
||||
},
|
||||
],
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
is_default: false,
|
||||
},
|
||||
Model {
|
||||
id: "gpt-5.2".to_string(),
|
||||
model: "gpt-5.2".to_string(),
|
||||
display_name: "gpt-5.2".to_string(),
|
||||
description:
|
||||
"Latest frontier model with improvements across knowledge, reasoning and coding"
|
||||
.to_string(),
|
||||
supported_reasoning_efforts: vec![
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::Low,
|
||||
description: "Fast responses with lighter reasoning".to_string(),
|
||||
description: "Balances speed with some reasoning; useful for straightforward \
|
||||
queries and short explanations"
|
||||
.to_string(),
|
||||
},
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::Medium,
|
||||
description: "Balances speed and reasoning depth for everyday tasks"
|
||||
description: "Provides a solid balance of reasoning depth and latency for \
|
||||
general-purpose tasks"
|
||||
.to_string(),
|
||||
},
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::High,
|
||||
description: "Greater reasoning depth for complex problems".to_string(),
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems"
|
||||
.to_string(),
|
||||
},
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::XHigh,
|
||||
@@ -187,7 +187,7 @@ async fn list_models_pagination_works() -> Result<()> {
|
||||
} = to_response::<ModelListResponse>(first_response)?;
|
||||
|
||||
assert_eq!(first_items.len(), 1);
|
||||
assert_eq!(first_items[0].id, "gpt-5.2");
|
||||
assert_eq!(first_items[0].id, "gpt-5.2-codex");
|
||||
let next_cursor = first_cursor.ok_or_else(|| anyhow!("cursor for second page"))?;
|
||||
|
||||
let second_request = mcp
|
||||
@@ -209,7 +209,7 @@ async fn list_models_pagination_works() -> Result<()> {
|
||||
} = to_response::<ModelListResponse>(second_response)?;
|
||||
|
||||
assert_eq!(second_items.len(), 1);
|
||||
assert_eq!(second_items[0].id, "gpt-5.1-codex-mini");
|
||||
assert_eq!(second_items[0].id, "gpt-5.1-codex-max");
|
||||
let third_cursor = second_cursor.ok_or_else(|| anyhow!("cursor for third page"))?;
|
||||
|
||||
let third_request = mcp
|
||||
@@ -231,7 +231,7 @@ async fn list_models_pagination_works() -> Result<()> {
|
||||
} = to_response::<ModelListResponse>(third_response)?;
|
||||
|
||||
assert_eq!(third_items.len(), 1);
|
||||
assert_eq!(third_items[0].id, "gpt-5.1-codex-max");
|
||||
assert_eq!(third_items[0].id, "gpt-5.1-codex-mini");
|
||||
let fourth_cursor = third_cursor.ok_or_else(|| anyhow!("cursor for fourth page"))?;
|
||||
|
||||
let fourth_request = mcp
|
||||
@@ -253,7 +253,7 @@ async fn list_models_pagination_works() -> Result<()> {
|
||||
} = to_response::<ModelListResponse>(fourth_response)?;
|
||||
|
||||
assert_eq!(fourth_items.len(), 1);
|
||||
assert_eq!(fourth_items[0].id, "gpt-5.2-codex");
|
||||
assert_eq!(fourth_items[0].id, "gpt-5.2");
|
||||
assert!(fourth_cursor.is_none());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
234
codex-rs/app-server/tests/suite/v2/output_schema.rs
Normal file
234
codex-rs/app-server/tests/suite/v2/output_schema.rs
Normal file
@@ -0,0 +1,234 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_app_server_protocol::TurnStartResponse;
|
||||
use codex_app_server_protocol::UserInput as V2UserInput;
|
||||
use core_test_support::responses;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_accepts_output_schema_v2() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = responses::start_mock_server().await;
|
||||
let body = responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_assistant_message("msg-1", "Done"),
|
||||
responses::ev_completed("resp-1"),
|
||||
]);
|
||||
let response_mock = responses::mount_sse_once(&server, body).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
let output_schema = serde_json::json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"answer": { "type": "string" }
|
||||
},
|
||||
"required": ["answer"],
|
||||
"additionalProperties": false
|
||||
});
|
||||
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
output_schema: Some(output_schema.clone()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let _turn: TurnStartResponse = to_response::<TurnStartResponse>(turn_resp)?;
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let request = response_mock.single_request();
|
||||
let payload = request.body_json();
|
||||
let text = payload.get("text").expect("request missing text field");
|
||||
let format = text
|
||||
.get("format")
|
||||
.expect("request missing text.format field");
|
||||
assert_eq!(
|
||||
format,
|
||||
&serde_json::json!({
|
||||
"name": "codex_output_schema",
|
||||
"type": "json_schema",
|
||||
"strict": true,
|
||||
"schema": output_schema,
|
||||
})
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_output_schema_is_per_turn_v2() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = responses::start_mock_server().await;
|
||||
let body1 = responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_assistant_message("msg-1", "Done"),
|
||||
responses::ev_completed("resp-1"),
|
||||
]);
|
||||
let response_mock1 = responses::mount_sse_once(&server, body1).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
let output_schema = serde_json::json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"answer": { "type": "string" }
|
||||
},
|
||||
"required": ["answer"],
|
||||
"additionalProperties": false
|
||||
});
|
||||
|
||||
let turn_req_1 = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
output_schema: Some(output_schema.clone()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp_1: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req_1)),
|
||||
)
|
||||
.await??;
|
||||
let _turn: TurnStartResponse = to_response::<TurnStartResponse>(turn_resp_1)?;
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let payload1 = response_mock1.single_request().body_json();
|
||||
assert_eq!(
|
||||
payload1.pointer("/text/format"),
|
||||
Some(&serde_json::json!({
|
||||
"name": "codex_output_schema",
|
||||
"type": "json_schema",
|
||||
"strict": true,
|
||||
"schema": output_schema,
|
||||
}))
|
||||
);
|
||||
|
||||
let body2 = responses::sse(vec![
|
||||
responses::ev_response_created("resp-2"),
|
||||
responses::ev_assistant_message("msg-2", "Done"),
|
||||
responses::ev_completed("resp-2"),
|
||||
]);
|
||||
let response_mock2 = responses::mount_sse_once(&server, body2).await;
|
||||
|
||||
let turn_req_2 = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "Hello again".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
output_schema: None,
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp_2: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req_2)),
|
||||
)
|
||||
.await??;
|
||||
let _turn: TurnStartResponse = to_response::<TurnStartResponse>(turn_resp_2)?;
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let payload2 = response_mock2.single_request().body_json();
|
||||
assert_eq!(payload2.pointer("/text/format"), None);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_config_toml(codex_home: &Path, server_uri: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "responses"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -1,7 +1,6 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_final_assistant_message_sse_response;
|
||||
use app_test_support::create_mock_chat_completions_server_unchecked;
|
||||
use app_test_support::create_mock_responses_server_repeating_assistant;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::ItemCompletedNotification;
|
||||
use codex_app_server_protocol::ItemStartedNotification;
|
||||
@@ -44,10 +43,7 @@ async fn review_start_runs_review_turn_and_emits_code_review_item() -> Result<()
|
||||
"overall_confidence_score": 0.75
|
||||
})
|
||||
.to_string();
|
||||
let responses = vec![create_final_assistant_message_sse_response(
|
||||
&review_payload,
|
||||
)?];
|
||||
let server = create_mock_chat_completions_server_unchecked(responses).await;
|
||||
let server = create_mock_responses_server_repeating_assistant(&review_payload).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
@@ -135,7 +131,7 @@ async fn review_start_runs_review_turn_and_emits_code_review_item() -> Result<()
|
||||
|
||||
#[tokio::test]
|
||||
async fn review_start_rejects_empty_base_branch() -> Result<()> {
|
||||
let server = create_mock_chat_completions_server_unchecked(vec![]).await;
|
||||
let server = create_mock_responses_server_repeating_assistant("Done").await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
@@ -176,10 +172,7 @@ async fn review_start_with_detached_delivery_returns_new_thread_id() -> Result<(
|
||||
"overall_confidence_score": 0.5
|
||||
})
|
||||
.to_string();
|
||||
let responses = vec![create_final_assistant_message_sse_response(
|
||||
&review_payload,
|
||||
)?];
|
||||
let server = create_mock_chat_completions_server_unchecked(responses).await;
|
||||
let server = create_mock_responses_server_repeating_assistant(&review_payload).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
@@ -219,7 +212,7 @@ async fn review_start_with_detached_delivery_returns_new_thread_id() -> Result<(
|
||||
|
||||
#[tokio::test]
|
||||
async fn review_start_rejects_empty_commit_sha() -> Result<()> {
|
||||
let server = create_mock_chat_completions_server_unchecked(vec![]).await;
|
||||
let server = create_mock_responses_server_repeating_assistant("Done").await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
@@ -254,7 +247,7 @@ async fn review_start_rejects_empty_commit_sha() -> Result<()> {
|
||||
|
||||
#[tokio::test]
|
||||
async fn review_start_rejects_empty_custom_instructions() -> Result<()> {
|
||||
let server = create_mock_chat_completions_server_unchecked(vec![]).await;
|
||||
let server = create_mock_responses_server_repeating_assistant("Done").await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
@@ -320,7 +313,7 @@ model_provider = "mock_provider"
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
wire_api = "responses"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
|
||||
@@ -8,7 +8,7 @@ use codex_app_server_protocol::ThreadArchiveResponse;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_core::ARCHIVED_SESSIONS_SUBDIR;
|
||||
use codex_core::find_conversation_path_by_id_str;
|
||||
use codex_core::find_thread_path_by_id_str;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
@@ -39,7 +39,7 @@ async fn thread_archive_moves_rollout_into_archived_directory() -> Result<()> {
|
||||
assert!(!thread.id.is_empty());
|
||||
|
||||
// Locate the rollout path recorded for this thread id.
|
||||
let rollout_path = find_conversation_path_by_id_str(codex_home.path(), &thread.id)
|
||||
let rollout_path = find_thread_path_by_id_str(codex_home.path(), &thread.id)
|
||||
.await?
|
||||
.expect("expected rollout path for thread id to exist");
|
||||
assert!(
|
||||
|
||||
141
codex-rs/app-server/tests/suite/v2/thread_fork.rs
Normal file
141
codex-rs/app-server/tests/suite/v2/thread_fork.rs
Normal file
@@ -0,0 +1,141 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_fake_rollout;
|
||||
use app_test_support::create_mock_responses_server_repeating_assistant;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SessionSource;
|
||||
use codex_app_server_protocol::ThreadForkParams;
|
||||
use codex_app_server_protocol::ThreadForkResponse;
|
||||
use codex_app_server_protocol::ThreadItem;
|
||||
use codex_app_server_protocol::ThreadStartedNotification;
|
||||
use codex_app_server_protocol::TurnStatus;
|
||||
use codex_app_server_protocol::UserInput;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_fork_creates_new_thread_and_emits_started() -> Result<()> {
|
||||
let server = create_mock_responses_server_repeating_assistant("Done").await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let preview = "Saved user message";
|
||||
let conversation_id = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-05T12-00-00",
|
||||
"2025-01-05T12:00:00Z",
|
||||
preview,
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
|
||||
let original_path = codex_home
|
||||
.path()
|
||||
.join("sessions")
|
||||
.join("2025")
|
||||
.join("01")
|
||||
.join("05")
|
||||
.join(format!(
|
||||
"rollout-2025-01-05T12-00-00-{conversation_id}.jsonl"
|
||||
));
|
||||
assert!(
|
||||
original_path.exists(),
|
||||
"expected original rollout to exist at {}",
|
||||
original_path.display()
|
||||
);
|
||||
let original_contents = std::fs::read_to_string(&original_path)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let fork_id = mcp
|
||||
.send_thread_fork_request(ThreadForkParams {
|
||||
thread_id: conversation_id.clone(),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let fork_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(fork_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadForkResponse { thread, .. } = to_response::<ThreadForkResponse>(fork_resp)?;
|
||||
|
||||
let after_contents = std::fs::read_to_string(&original_path)?;
|
||||
assert_eq!(
|
||||
after_contents, original_contents,
|
||||
"fork should not mutate the original rollout file"
|
||||
);
|
||||
|
||||
assert_ne!(thread.id, conversation_id);
|
||||
assert_eq!(thread.preview, preview);
|
||||
assert_eq!(thread.model_provider, "mock_provider");
|
||||
assert!(thread.path.is_absolute());
|
||||
assert_ne!(thread.path, original_path);
|
||||
assert!(thread.cwd.is_absolute());
|
||||
assert_eq!(thread.source, SessionSource::VsCode);
|
||||
|
||||
assert_eq!(
|
||||
thread.turns.len(),
|
||||
1,
|
||||
"expected forked thread to include one turn"
|
||||
);
|
||||
let turn = &thread.turns[0];
|
||||
assert_eq!(turn.status, TurnStatus::Completed);
|
||||
assert_eq!(turn.items.len(), 1, "expected user message item");
|
||||
match &turn.items[0] {
|
||||
ThreadItem::UserMessage { content, .. } => {
|
||||
assert_eq!(
|
||||
content,
|
||||
&vec![UserInput::Text {
|
||||
text: preview.to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}]
|
||||
);
|
||||
}
|
||||
other => panic!("expected user message item, got {other:?}"),
|
||||
}
|
||||
|
||||
// A corresponding thread/started notification should arrive.
|
||||
let notif: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("thread/started"),
|
||||
)
|
||||
.await??;
|
||||
let started: ThreadStartedNotification =
|
||||
serde_json::from_value(notif.params.expect("params must be present"))?;
|
||||
assert_eq!(started.thread, thread);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper to create a config.toml pointing at the mock model server.
|
||||
fn create_config_toml(codex_home: &Path, server_uri: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "responses"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
139
codex-rs/app-server/tests/suite/v2/thread_loaded_list.rs
Normal file
139
codex-rs/app-server/tests/suite/v2/thread_loaded_list.rs
Normal file
@@ -0,0 +1,139 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_mock_responses_server_repeating_assistant;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadLoadedListParams;
|
||||
use codex_app_server_protocol::ThreadLoadedListResponse;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_loaded_list_returns_loaded_thread_ids() -> Result<()> {
|
||||
let server = create_mock_responses_server_repeating_assistant("Done").await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let thread_id = start_thread(&mut mcp).await?;
|
||||
|
||||
let list_id = mcp
|
||||
.send_thread_loaded_list_request(ThreadLoadedListParams::default())
|
||||
.await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(list_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadLoadedListResponse {
|
||||
mut data,
|
||||
next_cursor,
|
||||
} = to_response::<ThreadLoadedListResponse>(resp)?;
|
||||
data.sort();
|
||||
assert_eq!(data, vec![thread_id]);
|
||||
assert_eq!(next_cursor, None);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_loaded_list_paginates() -> Result<()> {
|
||||
let server = create_mock_responses_server_repeating_assistant("Done").await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let first = start_thread(&mut mcp).await?;
|
||||
let second = start_thread(&mut mcp).await?;
|
||||
|
||||
let mut expected = [first, second];
|
||||
expected.sort();
|
||||
|
||||
let list_id = mcp
|
||||
.send_thread_loaded_list_request(ThreadLoadedListParams {
|
||||
cursor: None,
|
||||
limit: Some(1),
|
||||
})
|
||||
.await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(list_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadLoadedListResponse {
|
||||
data: first_page,
|
||||
next_cursor,
|
||||
} = to_response::<ThreadLoadedListResponse>(resp)?;
|
||||
assert_eq!(first_page, vec![expected[0].clone()]);
|
||||
assert_eq!(next_cursor, Some(expected[0].clone()));
|
||||
|
||||
let list_id = mcp
|
||||
.send_thread_loaded_list_request(ThreadLoadedListParams {
|
||||
cursor: next_cursor,
|
||||
limit: Some(1),
|
||||
})
|
||||
.await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(list_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadLoadedListResponse {
|
||||
data: second_page,
|
||||
next_cursor,
|
||||
} = to_response::<ThreadLoadedListResponse>(resp)?;
|
||||
assert_eq!(second_page, vec![expected[1].clone()]);
|
||||
assert_eq!(next_cursor, None);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_config_toml(codex_home: &Path, server_uri: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "responses"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
async fn start_thread(mcp: &mut McpProcess) -> Result<String> {
|
||||
let req_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("gpt-5.1".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(req_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(resp)?;
|
||||
Ok(thread.id)
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_fake_rollout;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::create_fake_rollout_with_text_elements;
|
||||
use app_test_support::create_mock_responses_server_repeating_assistant;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
@@ -15,6 +15,9 @@ use codex_app_server_protocol::TurnStatus;
|
||||
use codex_app_server_protocol::UserInput;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::user_input::ByteRange;
|
||||
use codex_protocol::user_input::TextElement;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::PathBuf;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
@@ -23,7 +26,7 @@ const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_returns_original_thread() -> Result<()> {
|
||||
let server = create_mock_chat_completions_server(vec![]).await;
|
||||
let server = create_mock_responses_server_repeating_assistant("Done").await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
@@ -66,16 +69,24 @@ async fn thread_resume_returns_original_thread() -> Result<()> {
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_returns_rollout_history() -> Result<()> {
|
||||
let server = create_mock_chat_completions_server(vec![]).await;
|
||||
let server = create_mock_responses_server_repeating_assistant("Done").await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let preview = "Saved user message";
|
||||
let conversation_id = create_fake_rollout(
|
||||
let text_elements = vec![TextElement {
|
||||
byte_range: ByteRange { start: 0, end: 5 },
|
||||
placeholder: Some("<note>".into()),
|
||||
}];
|
||||
let conversation_id = create_fake_rollout_with_text_elements(
|
||||
codex_home.path(),
|
||||
"2025-01-05T12-00-00",
|
||||
"2025-01-05T12:00:00Z",
|
||||
preview,
|
||||
text_elements
|
||||
.iter()
|
||||
.map(|elem| serde_json::to_value(elem).expect("serialize text element"))
|
||||
.collect(),
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
@@ -118,7 +129,8 @@ async fn thread_resume_returns_rollout_history() -> Result<()> {
|
||||
assert_eq!(
|
||||
content,
|
||||
&vec![UserInput::Text {
|
||||
text: preview.to_string()
|
||||
text: preview.to_string(),
|
||||
text_elements: text_elements.clone().into_iter().map(Into::into).collect(),
|
||||
}]
|
||||
);
|
||||
}
|
||||
@@ -130,7 +142,7 @@ async fn thread_resume_returns_rollout_history() -> Result<()> {
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_prefers_path_over_thread_id() -> Result<()> {
|
||||
let server = create_mock_chat_completions_server(vec![]).await;
|
||||
let server = create_mock_responses_server_repeating_assistant("Done").await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
@@ -174,7 +186,7 @@ async fn thread_resume_prefers_path_over_thread_id() -> Result<()> {
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_supports_history_and_overrides() -> Result<()> {
|
||||
let server = create_mock_chat_completions_server(vec![]).await;
|
||||
let server = create_mock_responses_server_repeating_assistant("Done").await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
@@ -247,7 +259,7 @@ model_provider = "mock_provider"
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
wire_api = "responses"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
|
||||
181
codex-rs/app-server/tests/suite/v2/thread_rollback.rs
Normal file
181
codex-rs/app-server/tests/suite/v2/thread_rollback.rs
Normal file
@@ -0,0 +1,181 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_final_assistant_message_sse_response;
|
||||
use app_test_support::create_mock_responses_server_sequence_unchecked;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadItem;
|
||||
use codex_app_server_protocol::ThreadResumeParams;
|
||||
use codex_app_server_protocol::ThreadResumeResponse;
|
||||
use codex_app_server_protocol::ThreadRollbackParams;
|
||||
use codex_app_server_protocol::ThreadRollbackResponse;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_app_server_protocol::UserInput as V2UserInput;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_rollback_drops_last_turns_and_persists_to_rollout() -> Result<()> {
|
||||
// Three Codex turns hit the mock model (session start + two turn/start calls).
|
||||
let responses = vec![
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
];
|
||||
let server = create_mock_responses_server_sequence_unchecked(responses).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Start a thread.
|
||||
let start_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
// Two turns.
|
||||
let first_text = "First";
|
||||
let turn1_id = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: first_text.to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let _turn1_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn1_id)),
|
||||
)
|
||||
.await??;
|
||||
let _completed1 = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let turn2_id = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "Second".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let _turn2_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn2_id)),
|
||||
)
|
||||
.await??;
|
||||
let _completed2 = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
// Roll back the last turn.
|
||||
let rollback_id = mcp
|
||||
.send_thread_rollback_request(ThreadRollbackParams {
|
||||
thread_id: thread.id.clone(),
|
||||
num_turns: 1,
|
||||
})
|
||||
.await?;
|
||||
let rollback_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(rollback_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadRollbackResponse {
|
||||
thread: rolled_back_thread,
|
||||
} = to_response::<ThreadRollbackResponse>(rollback_resp)?;
|
||||
|
||||
assert_eq!(rolled_back_thread.turns.len(), 1);
|
||||
assert_eq!(rolled_back_thread.turns[0].items.len(), 2);
|
||||
match &rolled_back_thread.turns[0].items[0] {
|
||||
ThreadItem::UserMessage { content, .. } => {
|
||||
assert_eq!(
|
||||
content,
|
||||
&vec![V2UserInput::Text {
|
||||
text: first_text.to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}]
|
||||
);
|
||||
}
|
||||
other => panic!("expected user message item, got {other:?}"),
|
||||
}
|
||||
|
||||
// Resume and confirm the history is pruned.
|
||||
let resume_id = mcp
|
||||
.send_thread_resume_request(ThreadResumeParams {
|
||||
thread_id: thread.id,
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let resume_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(resume_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadResumeResponse { thread, .. } = to_response::<ThreadResumeResponse>(resume_resp)?;
|
||||
|
||||
assert_eq!(thread.turns.len(), 1);
|
||||
assert_eq!(thread.turns[0].items.len(), 2);
|
||||
match &thread.turns[0].items[0] {
|
||||
ThreadItem::UserMessage { content, .. } => {
|
||||
assert_eq!(
|
||||
content,
|
||||
&vec![V2UserInput::Text {
|
||||
text: first_text.to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}]
|
||||
);
|
||||
}
|
||||
other => panic!("expected user message item, got {other:?}"),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_config_toml(codex_home: &std::path::Path, server_uri: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "responses"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::create_mock_responses_server_repeating_assistant;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
@@ -17,7 +17,7 @@ const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs
|
||||
#[tokio::test]
|
||||
async fn thread_start_creates_thread_and_emits_started() -> Result<()> {
|
||||
// Provide a mock server and config so model wiring is valid.
|
||||
let server = create_mock_chat_completions_server(vec![]).await;
|
||||
let server = create_mock_responses_server_repeating_assistant("Done").await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
@@ -85,7 +85,7 @@ model_provider = "mock_provider"
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
wire_api = "responses"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::create_mock_responses_server_sequence;
|
||||
use app_test_support::create_shell_command_sse_response;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
@@ -41,7 +41,7 @@ async fn turn_interrupt_aborts_running_turn() -> Result<()> {
|
||||
std::fs::create_dir(&working_directory)?;
|
||||
|
||||
// Mock server: long-running shell command then (after abort) nothing else needed.
|
||||
let server = create_mock_chat_completions_server(vec![create_shell_command_sse_response(
|
||||
let server = create_mock_responses_server_sequence(vec![create_shell_command_sse_response(
|
||||
shell_command.clone(),
|
||||
Some(&working_directory),
|
||||
Some(10_000),
|
||||
@@ -73,6 +73,7 @@ async fn turn_interrupt_aborts_running_turn() -> Result<()> {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "run sleep".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
cwd: Some(working_directory.clone()),
|
||||
..Default::default()
|
||||
@@ -135,7 +136,7 @@ model_provider = "mock_provider"
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
wire_api = "responses"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
|
||||
@@ -3,14 +3,17 @@ use app_test_support::McpProcess;
|
||||
use app_test_support::create_apply_patch_sse_response;
|
||||
use app_test_support::create_exec_command_sse_response;
|
||||
use app_test_support::create_final_assistant_message_sse_response;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::create_mock_chat_completions_server_unchecked;
|
||||
use app_test_support::create_mock_responses_server_sequence;
|
||||
use app_test_support::create_mock_responses_server_sequence_unchecked;
|
||||
use app_test_support::create_shell_command_sse_response;
|
||||
use app_test_support::format_with_current_shell_display;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::ApprovalDecision;
|
||||
use codex_app_server_protocol::ByteRange;
|
||||
use codex_app_server_protocol::ClientInfo;
|
||||
use codex_app_server_protocol::CommandExecutionApprovalDecision;
|
||||
use codex_app_server_protocol::CommandExecutionRequestApprovalResponse;
|
||||
use codex_app_server_protocol::CommandExecutionStatus;
|
||||
use codex_app_server_protocol::FileChangeApprovalDecision;
|
||||
use codex_app_server_protocol::FileChangeOutputDeltaNotification;
|
||||
use codex_app_server_protocol::FileChangeRequestApprovalResponse;
|
||||
use codex_app_server_protocol::ItemCompletedNotification;
|
||||
@@ -21,6 +24,7 @@ use codex_app_server_protocol::PatchApplyStatus;
|
||||
use codex_app_server_protocol::PatchChangeKind;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::TextElement;
|
||||
use codex_app_server_protocol::ThreadItem;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
@@ -39,6 +43,158 @@ use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
const TEST_ORIGINATOR: &str = "codex_vscode";
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_sends_originator_header() -> Result<()> {
|
||||
let responses = vec![create_final_assistant_message_sse_response("Done")?];
|
||||
let server = create_mock_responses_server_sequence_unchecked(responses).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri(), "never")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.initialize_with_client_info(ClientInfo {
|
||||
name: TEST_ORIGINATOR.to_string(),
|
||||
title: Some("Codex VS Code Extension".to_string()),
|
||||
version: "0.1.0".to_string(),
|
||||
}),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let requests = server
|
||||
.received_requests()
|
||||
.await
|
||||
.expect("failed to fetch received requests");
|
||||
assert!(!requests.is_empty());
|
||||
for request in requests {
|
||||
let originator = request
|
||||
.headers
|
||||
.get("originator")
|
||||
.expect("originator header missing");
|
||||
assert_eq!(originator.to_str()?, TEST_ORIGINATOR);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_emits_user_message_item_with_text_elements() -> Result<()> {
|
||||
let responses = vec![create_final_assistant_message_sse_response("Done")?];
|
||||
let server = create_mock_responses_server_sequence_unchecked(responses).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri(), "never")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
let text_elements = vec![TextElement {
|
||||
byte_range: ByteRange { start: 0, end: 5 },
|
||||
placeholder: Some("<note>".to_string()),
|
||||
}];
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: text_elements.clone(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let user_message_item = timeout(DEFAULT_READ_TIMEOUT, async {
|
||||
loop {
|
||||
let notification = mcp
|
||||
.read_stream_until_notification_message("item/started")
|
||||
.await?;
|
||||
let params = notification.params.expect("item/started params");
|
||||
let item_started: ItemStartedNotification =
|
||||
serde_json::from_value(params).expect("deserialize item/started notification");
|
||||
if let ThreadItem::UserMessage { .. } = item_started.item {
|
||||
return Ok::<ThreadItem, anyhow::Error>(item_started.item);
|
||||
}
|
||||
}
|
||||
})
|
||||
.await??;
|
||||
|
||||
match user_message_item {
|
||||
ThreadItem::UserMessage { content, .. } => {
|
||||
assert_eq!(
|
||||
content,
|
||||
vec![V2UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements,
|
||||
}]
|
||||
);
|
||||
}
|
||||
other => panic!("expected user message item, got {other:?}"),
|
||||
}
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_emits_notifications_and_accepts_model_override() -> Result<()> {
|
||||
@@ -49,7 +205,7 @@ async fn turn_start_emits_notifications_and_accepts_model_override() -> Result<(
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server_unchecked(responses).await;
|
||||
let server = create_mock_responses_server_sequence_unchecked(responses).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri(), "never")?;
|
||||
@@ -77,6 +233,7 @@ async fn turn_start_emits_notifications_and_accepts_model_override() -> Result<(
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
@@ -109,6 +266,7 @@ async fn turn_start_emits_notifications_and_accepts_model_override() -> Result<(
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "Second".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
model: Some("mock-model-override".to_string()),
|
||||
..Default::default()
|
||||
@@ -156,7 +314,7 @@ async fn turn_start_accepts_local_image_input() -> Result<()> {
|
||||
];
|
||||
// Use the unchecked variant because the request payload includes a LocalImage
|
||||
// which the strict matcher does not currently cover.
|
||||
let server = create_mock_chat_completions_server_unchecked(responses).await;
|
||||
let server = create_mock_responses_server_sequence_unchecked(responses).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri(), "never")?;
|
||||
@@ -232,7 +390,7 @@ async fn turn_start_exec_approval_toggle_v2() -> Result<()> {
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done 2")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
let server = create_mock_responses_server_sequence(responses).await;
|
||||
// Default approval is untrusted to force elicitation on first turn.
|
||||
create_config_toml(codex_home.as_path(), &server.uri(), "untrusted")?;
|
||||
|
||||
@@ -259,6 +417,7 @@ async fn turn_start_exec_approval_toggle_v2() -> Result<()> {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "run python".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
@@ -304,6 +463,7 @@ async fn turn_start_exec_approval_toggle_v2() -> Result<()> {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "run python again".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
approval_policy: Some(codex_app_server_protocol::AskForApproval::Never),
|
||||
sandbox_policy: Some(codex_app_server_protocol::SandboxPolicy::DangerFullAccess),
|
||||
@@ -356,7 +516,7 @@ async fn turn_start_exec_approval_decline_v2() -> Result<()> {
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
let server = create_mock_responses_server_sequence(responses).await;
|
||||
create_config_toml(codex_home.as_path(), &server.uri(), "untrusted")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.as_path()).await?;
|
||||
@@ -380,6 +540,7 @@ async fn turn_start_exec_approval_decline_v2() -> Result<()> {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "run python".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
cwd: Some(workspace.clone()),
|
||||
..Default::default()
|
||||
@@ -426,7 +587,7 @@ async fn turn_start_exec_approval_decline_v2() -> Result<()> {
|
||||
mcp.send_response(
|
||||
request_id,
|
||||
serde_json::to_value(CommandExecutionRequestApprovalResponse {
|
||||
decision: ApprovalDecision::Decline,
|
||||
decision: CommandExecutionApprovalDecision::Decline,
|
||||
})?,
|
||||
)
|
||||
.await?;
|
||||
@@ -502,7 +663,7 @@ async fn turn_start_updates_sandbox_and_cwd_between_turns_v2() -> Result<()> {
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done second")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
let server = create_mock_responses_server_sequence(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri(), "untrusted")?;
|
||||
|
||||
let mut mcp = McpProcess::new(&codex_home).await?;
|
||||
@@ -528,6 +689,7 @@ async fn turn_start_updates_sandbox_and_cwd_between_turns_v2() -> Result<()> {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "first turn".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
cwd: Some(first_cwd.clone()),
|
||||
approval_policy: Some(codex_app_server_protocol::AskForApproval::Never),
|
||||
@@ -540,6 +702,7 @@ async fn turn_start_updates_sandbox_and_cwd_between_turns_v2() -> Result<()> {
|
||||
model: Some("mock-model".to_string()),
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: Some(ReasoningSummary::Auto),
|
||||
output_schema: None,
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
@@ -552,6 +715,7 @@ async fn turn_start_updates_sandbox_and_cwd_between_turns_v2() -> Result<()> {
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
mcp.clear_message_buffer();
|
||||
|
||||
// second turn with workspace-write and second_cwd, ensure exec begins in second_cwd
|
||||
let second_turn = mcp
|
||||
@@ -559,6 +723,7 @@ async fn turn_start_updates_sandbox_and_cwd_between_turns_v2() -> Result<()> {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "second turn".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
cwd: Some(second_cwd.clone()),
|
||||
approval_policy: Some(codex_app_server_protocol::AskForApproval::Never),
|
||||
@@ -566,6 +731,7 @@ async fn turn_start_updates_sandbox_and_cwd_between_turns_v2() -> Result<()> {
|
||||
model: Some("mock-model".to_string()),
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: Some(ReasoningSummary::Auto),
|
||||
output_schema: None,
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
@@ -633,7 +799,7 @@ async fn turn_start_file_change_approval_v2() -> Result<()> {
|
||||
create_apply_patch_sse_response(patch, "patch-call")?,
|
||||
create_final_assistant_message_sse_response("patch applied")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
let server = create_mock_responses_server_sequence(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri(), "untrusted")?;
|
||||
|
||||
let mut mcp = McpProcess::new(&codex_home).await?;
|
||||
@@ -658,6 +824,7 @@ async fn turn_start_file_change_approval_v2() -> Result<()> {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "apply patch".into(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
cwd: Some(workspace.clone()),
|
||||
..Default::default()
|
||||
@@ -720,7 +887,7 @@ async fn turn_start_file_change_approval_v2() -> Result<()> {
|
||||
mcp.send_response(
|
||||
request_id,
|
||||
serde_json::to_value(FileChangeRequestApprovalResponse {
|
||||
decision: ApprovalDecision::Accept,
|
||||
decision: FileChangeApprovalDecision::Accept,
|
||||
})?,
|
||||
)
|
||||
.await?;
|
||||
@@ -780,6 +947,192 @@ async fn turn_start_file_change_approval_v2() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_file_change_approval_accept_for_session_persists_v2() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let tmp = TempDir::new()?;
|
||||
let codex_home = tmp.path().join("codex_home");
|
||||
std::fs::create_dir(&codex_home)?;
|
||||
let workspace = tmp.path().join("workspace");
|
||||
std::fs::create_dir(&workspace)?;
|
||||
|
||||
let patch_1 = r#"*** Begin Patch
|
||||
*** Add File: README.md
|
||||
+new line
|
||||
*** End Patch
|
||||
"#;
|
||||
let patch_2 = r#"*** Begin Patch
|
||||
*** Update File: README.md
|
||||
@@
|
||||
-new line
|
||||
+updated line
|
||||
*** End Patch
|
||||
"#;
|
||||
|
||||
let responses = vec![
|
||||
create_apply_patch_sse_response(patch_1, "patch-call-1")?,
|
||||
create_final_assistant_message_sse_response("patch 1 applied")?,
|
||||
create_apply_patch_sse_response(patch_2, "patch-call-2")?,
|
||||
create_final_assistant_message_sse_response("patch 2 applied")?,
|
||||
];
|
||||
let server = create_mock_responses_server_sequence(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri(), "untrusted")?;
|
||||
|
||||
let mut mcp = McpProcess::new(&codex_home).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let start_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
cwd: Some(workspace.to_string_lossy().into_owned()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
// First turn: expect FileChangeRequestApproval, respond with AcceptForSession, and verify the file exists.
|
||||
let turn_1_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "apply patch 1".into(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
cwd: Some(workspace.clone()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_1_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_1_req)),
|
||||
)
|
||||
.await??;
|
||||
let TurnStartResponse { turn: turn_1 } = to_response::<TurnStartResponse>(turn_1_resp)?;
|
||||
|
||||
let started_file_change_1 = timeout(DEFAULT_READ_TIMEOUT, async {
|
||||
loop {
|
||||
let started_notif = mcp
|
||||
.read_stream_until_notification_message("item/started")
|
||||
.await?;
|
||||
let started: ItemStartedNotification =
|
||||
serde_json::from_value(started_notif.params.clone().expect("item/started params"))?;
|
||||
if let ThreadItem::FileChange { .. } = started.item {
|
||||
return Ok::<ThreadItem, anyhow::Error>(started.item);
|
||||
}
|
||||
}
|
||||
})
|
||||
.await??;
|
||||
let ThreadItem::FileChange { id, status, .. } = started_file_change_1 else {
|
||||
unreachable!("loop ensures we break on file change items");
|
||||
};
|
||||
assert_eq!(id, "patch-call-1");
|
||||
assert_eq!(status, PatchApplyStatus::InProgress);
|
||||
|
||||
let server_req = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_request_message(),
|
||||
)
|
||||
.await??;
|
||||
let ServerRequest::FileChangeRequestApproval { request_id, params } = server_req else {
|
||||
panic!("expected FileChangeRequestApproval request")
|
||||
};
|
||||
assert_eq!(params.item_id, "patch-call-1");
|
||||
assert_eq!(params.thread_id, thread.id);
|
||||
assert_eq!(params.turn_id, turn_1.id);
|
||||
|
||||
mcp.send_response(
|
||||
request_id,
|
||||
serde_json::to_value(FileChangeRequestApprovalResponse {
|
||||
decision: FileChangeApprovalDecision::AcceptForSession,
|
||||
})?,
|
||||
)
|
||||
.await?;
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("item/fileChange/outputDelta"),
|
||||
)
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("item/completed"),
|
||||
)
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let readme_path = workspace.join("README.md");
|
||||
assert_eq!(std::fs::read_to_string(&readme_path)?, "new line\n");
|
||||
|
||||
// Second turn: apply a patch to the same file. Approval should be skipped due to AcceptForSession.
|
||||
let turn_2_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "apply patch 2".into(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
cwd: Some(workspace.clone()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_2_req)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let started_file_change_2 = timeout(DEFAULT_READ_TIMEOUT, async {
|
||||
loop {
|
||||
let started_notif = mcp
|
||||
.read_stream_until_notification_message("item/started")
|
||||
.await?;
|
||||
let started: ItemStartedNotification =
|
||||
serde_json::from_value(started_notif.params.clone().expect("item/started params"))?;
|
||||
if let ThreadItem::FileChange { .. } = started.item {
|
||||
return Ok::<ThreadItem, anyhow::Error>(started.item);
|
||||
}
|
||||
}
|
||||
})
|
||||
.await??;
|
||||
let ThreadItem::FileChange { id, status, .. } = started_file_change_2 else {
|
||||
unreachable!("loop ensures we break on file change items");
|
||||
};
|
||||
assert_eq!(id, "patch-call-2");
|
||||
assert_eq!(status, PatchApplyStatus::InProgress);
|
||||
|
||||
// If the server incorrectly emits FileChangeRequestApproval, the helper below will error
|
||||
// (it bails on unexpected JSONRPCMessage::Request), causing the test to fail.
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("item/fileChange/outputDelta"),
|
||||
)
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("item/completed"),
|
||||
)
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
assert_eq!(std::fs::read_to_string(readme_path)?, "updated line\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_file_change_approval_decline_v2() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
@@ -799,7 +1152,7 @@ async fn turn_start_file_change_approval_decline_v2() -> Result<()> {
|
||||
create_apply_patch_sse_response(patch, "patch-call")?,
|
||||
create_final_assistant_message_sse_response("patch declined")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
let server = create_mock_responses_server_sequence(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri(), "untrusted")?;
|
||||
|
||||
let mut mcp = McpProcess::new(&codex_home).await?;
|
||||
@@ -824,6 +1177,7 @@ async fn turn_start_file_change_approval_decline_v2() -> Result<()> {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "apply patch".into(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
cwd: Some(workspace.clone()),
|
||||
..Default::default()
|
||||
@@ -886,7 +1240,7 @@ async fn turn_start_file_change_approval_decline_v2() -> Result<()> {
|
||||
mcp.send_response(
|
||||
request_id,
|
||||
serde_json::to_value(FileChangeRequestApprovalResponse {
|
||||
decision: ApprovalDecision::Decline,
|
||||
decision: FileChangeApprovalDecision::Decline,
|
||||
})?,
|
||||
)
|
||||
.await?;
|
||||
@@ -937,7 +1291,7 @@ async fn command_execution_notifications_include_process_id() -> Result<()> {
|
||||
create_exec_command_sse_response("uexec-1")?,
|
||||
create_final_assistant_message_sse_response("done")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
let server = create_mock_responses_server_sequence(responses).await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri(), "never")?;
|
||||
let config_toml = codex_home.path().join("config.toml");
|
||||
@@ -971,6 +1325,7 @@ unified_exec = true
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "run a command".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
@@ -1076,7 +1431,7 @@ model_provider = "mock_provider"
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
wire_api = "responses"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
|
||||
11
codex-rs/apply-patch/BUILD.bazel
Normal file
11
codex-rs/apply-patch/BUILD.bazel
Normal file
@@ -0,0 +1,11 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
exports_files(["apply_patch_tool_instructions.md"])
|
||||
|
||||
codex_rust_crate(
|
||||
name = "apply-patch",
|
||||
crate_name = "codex_apply_patch",
|
||||
compile_data = [
|
||||
"apply_patch_tool_instructions.md",
|
||||
],
|
||||
)
|
||||
@@ -25,5 +25,6 @@ tree-sitter-bash = { workspace = true }
|
||||
[dev-dependencies]
|
||||
assert_cmd = { workspace = true }
|
||||
assert_matches = { workspace = true }
|
||||
codex-utils-cargo-bin = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
@@ -227,11 +227,14 @@ fn check_start_and_end_lines_strict(
|
||||
first_line: Option<&&str>,
|
||||
last_line: Option<&&str>,
|
||||
) -> Result<(), ParseError> {
|
||||
let first_line = first_line.map(|line| line.trim());
|
||||
let last_line = last_line.map(|line| line.trim());
|
||||
|
||||
match (first_line, last_line) {
|
||||
(Some(&first), Some(&last)) if first == BEGIN_PATCH_MARKER && last == END_PATCH_MARKER => {
|
||||
(Some(first), Some(last)) if first == BEGIN_PATCH_MARKER && last == END_PATCH_MARKER => {
|
||||
Ok(())
|
||||
}
|
||||
(Some(&first), _) if first != BEGIN_PATCH_MARKER => Err(InvalidPatchError(String::from(
|
||||
(Some(first), _) if first != BEGIN_PATCH_MARKER => Err(InvalidPatchError(String::from(
|
||||
"The first line of the patch must be '*** Begin Patch'",
|
||||
))),
|
||||
_ => Err(InvalidPatchError(String::from(
|
||||
@@ -444,6 +447,25 @@ fn test_parse_patch() {
|
||||
"The last line of the patch must be '*** End Patch'".to_string()
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
parse_patch_text(
|
||||
concat!(
|
||||
"*** Begin Patch",
|
||||
" ",
|
||||
"\n*** Add File: foo\n+hi\n",
|
||||
" ",
|
||||
"*** End Patch"
|
||||
),
|
||||
ParseMode::Strict
|
||||
)
|
||||
.unwrap()
|
||||
.hunks,
|
||||
vec![AddFile {
|
||||
path: PathBuf::from("foo"),
|
||||
contents: "hi\n".to_string()
|
||||
}]
|
||||
);
|
||||
assert_eq!(
|
||||
parse_patch_text(
|
||||
"*** Begin Patch\n\
|
||||
|
||||
1
codex-rs/apply-patch/tests/fixtures/scenarios/020_delete_file_success/expected/keep.txt
vendored
Normal file
1
codex-rs/apply-patch/tests/fixtures/scenarios/020_delete_file_success/expected/keep.txt
vendored
Normal file
@@ -0,0 +1 @@
|
||||
keep
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user