mirror of
https://github.com/openai/codex.git
synced 2026-02-05 16:33:42 +00:00
Compare commits
122 Commits
dev/by/cod
...
compaction
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d8c11f15a8 | ||
|
|
3ae966edd8 | ||
|
|
c7c2b3cf8d | ||
|
|
337643b00a | ||
|
|
28051d18c6 | ||
|
|
2f8a44baea | ||
|
|
30eb655ad1 | ||
|
|
700a29e157 | ||
|
|
c40ad65bd8 | ||
|
|
894923ed5d | ||
|
|
fc0fd85349 | ||
|
|
877b76bb9d | ||
|
|
538e1059a3 | ||
|
|
067922a734 | ||
|
|
dd24ac6b26 | ||
|
|
ddc704d4c6 | ||
|
|
3b726d9550 | ||
|
|
74ffbbe7c1 | ||
|
|
742f086ee6 | ||
|
|
ab99df0694 | ||
|
|
509ff1c643 | ||
|
|
cabb2085cc | ||
|
|
4db6da32a3 | ||
|
|
0adcd8aa86 | ||
|
|
28bd7db14a | ||
|
|
0c72d8fd6e | ||
|
|
7c96f2e84c | ||
|
|
f45a8733bf | ||
|
|
b655a092ba | ||
|
|
b7bba3614e | ||
|
|
86adf53235 | ||
|
|
998e88b12a | ||
|
|
c900de271a | ||
|
|
a641a6427c | ||
|
|
5d13427ef4 | ||
|
|
394b967432 | ||
|
|
6a279f6d77 | ||
|
|
47aa1f3b6a | ||
|
|
73bd84dee0 | ||
|
|
32b062d0e1 | ||
|
|
f29a0defa2 | ||
|
|
2e5aa809f4 | ||
|
|
6418e65356 | ||
|
|
764712c116 | ||
|
|
5ace350186 | ||
|
|
a8f195828b | ||
|
|
313ee3003b | ||
|
|
159ff06281 | ||
|
|
bdc4742bfc | ||
|
|
247fb2de64 | ||
|
|
6a02fdde76 | ||
|
|
b77bf4d36d | ||
|
|
62266b13f8 | ||
|
|
09251387e0 | ||
|
|
e471ebc5d2 | ||
|
|
375a5ef051 | ||
|
|
fdc69df454 | ||
|
|
01d7f8095b | ||
|
|
3ba702c5b6 | ||
|
|
6316e57497 | ||
|
|
70d5959398 | ||
|
|
3f338e4a6a | ||
|
|
48aeb67f7a | ||
|
|
65c7119fb7 | ||
|
|
c66662c61b | ||
|
|
d594693d1a | ||
|
|
25fccc3d4d | ||
|
|
031bafd1fb | ||
|
|
d27f2533a9 | ||
|
|
0f798173d7 | ||
|
|
cb2bbe5cba | ||
|
|
dd2d68e69e | ||
|
|
8fea8f73d6 | ||
|
|
73b5274443 | ||
|
|
a748600c42 | ||
|
|
b332482eb1 | ||
|
|
58450ba2a1 | ||
|
|
24230c066b | ||
|
|
18acec09df | ||
|
|
182000999c | ||
|
|
652f08e98f | ||
|
|
279c9534a1 | ||
|
|
e2bd9311c9 | ||
|
|
2efcdf4062 | ||
|
|
3651608365 | ||
|
|
83775f4df1 | ||
|
|
515ac2cd19 | ||
|
|
eb7558ba85 | ||
|
|
713ae22c04 | ||
|
|
b3127e2eeb | ||
|
|
77222492f9 | ||
|
|
69cfc73dc6 | ||
|
|
1167465bf6 | ||
|
|
d9232403aa | ||
|
|
b9deb57689 | ||
|
|
c6ded0afd8 | ||
|
|
e04851816d | ||
|
|
e0ae219f36 | ||
|
|
45fe58159e | ||
|
|
7938c170d9 | ||
|
|
eca365cf8c | ||
|
|
ae7d3e1b49 | ||
|
|
f353d3d695 | ||
|
|
935d88b455 | ||
|
|
f30f39b28b | ||
|
|
afa08570f2 | ||
|
|
86a1e41f2e | ||
|
|
f815fa14ea | ||
|
|
0fa45fbca4 | ||
|
|
02fced28a4 | ||
|
|
d86bd20411 | ||
|
|
2b1ee24e11 | ||
|
|
a2c829a808 | ||
|
|
d9e041e0a6 | ||
|
|
0e4adcd760 | ||
|
|
0e79d239ed | ||
|
|
e117a3ff33 | ||
|
|
afd63e8bae | ||
|
|
5d963ee5d9 | ||
|
|
733cb68496 | ||
|
|
80240b3b67 | ||
|
|
8b3521ee77 |
@@ -1,6 +1,6 @@
|
||||
[codespell]
|
||||
# Ref: https://github.com/codespell-project/codespell#using-a-config-file
|
||||
skip = .git*,vendor,*-lock.yaml,*.lock,.codespellrc,*test.ts,*.jsonl,frame*.txt
|
||||
skip = .git*,vendor,*-lock.yaml,*.lock,.codespellrc,*test.ts,*.jsonl,frame*.txt,*.snap,*.snap.new
|
||||
check-hidden = true
|
||||
ignore-regex = ^\s*"image/\S+": ".*|\b(afterAll)\b
|
||||
ignore-words-list = ratatui,ser,iTerm,iterm2,iterm
|
||||
|
||||
163
.github/scripts/install-musl-build-tools.sh
vendored
Normal file
163
.github/scripts/install-musl-build-tools.sh
vendored
Normal file
@@ -0,0 +1,163 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
: "${TARGET:?TARGET environment variable is required}"
|
||||
: "${GITHUB_ENV:?GITHUB_ENV environment variable is required}"
|
||||
|
||||
apt_update_args=()
|
||||
if [[ -n "${APT_UPDATE_ARGS:-}" ]]; then
|
||||
# shellcheck disable=SC2206
|
||||
apt_update_args=(${APT_UPDATE_ARGS})
|
||||
fi
|
||||
|
||||
apt_install_args=()
|
||||
if [[ -n "${APT_INSTALL_ARGS:-}" ]]; then
|
||||
# shellcheck disable=SC2206
|
||||
apt_install_args=(${APT_INSTALL_ARGS})
|
||||
fi
|
||||
|
||||
sudo apt-get update "${apt_update_args[@]}"
|
||||
sudo apt-get install -y "${apt_install_args[@]}" musl-tools pkg-config g++ clang libc++-dev libc++abi-dev lld
|
||||
|
||||
case "${TARGET}" in
|
||||
x86_64-unknown-linux-musl)
|
||||
arch="x86_64"
|
||||
;;
|
||||
aarch64-unknown-linux-musl)
|
||||
arch="aarch64"
|
||||
;;
|
||||
*)
|
||||
echo "Unexpected musl target: ${TARGET}" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Use the musl toolchain as the Rust linker to avoid Zig injecting its own CRT.
|
||||
if command -v "${arch}-linux-musl-gcc" >/dev/null; then
|
||||
musl_linker="$(command -v "${arch}-linux-musl-gcc")"
|
||||
elif command -v musl-gcc >/dev/null; then
|
||||
musl_linker="$(command -v musl-gcc)"
|
||||
else
|
||||
echo "musl gcc not found after install; arch=${arch}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
zig_target="${TARGET/-unknown-linux-musl/-linux-musl}"
|
||||
runner_temp="${RUNNER_TEMP:-/tmp}"
|
||||
tool_root="${runner_temp}/codex-musl-tools-${TARGET}"
|
||||
mkdir -p "${tool_root}"
|
||||
|
||||
sysroot=""
|
||||
if command -v zig >/dev/null; then
|
||||
zig_bin="$(command -v zig)"
|
||||
cc="${tool_root}/zigcc"
|
||||
cxx="${tool_root}/zigcxx"
|
||||
|
||||
cat >"${cc}" <<EOF
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
args=()
|
||||
skip_next=0
|
||||
for arg in "\$@"; do
|
||||
if [[ "\${skip_next}" -eq 1 ]]; then
|
||||
skip_next=0
|
||||
continue
|
||||
fi
|
||||
case "\${arg}" in
|
||||
--target)
|
||||
skip_next=1
|
||||
continue
|
||||
;;
|
||||
--target=*|-target=*|-target)
|
||||
# Drop any explicit --target/-target flags. Zig expects -target and
|
||||
# rejects Rust triples like *-unknown-linux-musl.
|
||||
if [[ "\${arg}" == "-target" ]]; then
|
||||
skip_next=1
|
||||
fi
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
args+=("\${arg}")
|
||||
done
|
||||
|
||||
exec "${zig_bin}" cc -target "${zig_target}" "\${args[@]}"
|
||||
EOF
|
||||
cat >"${cxx}" <<EOF
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
args=()
|
||||
skip_next=0
|
||||
for arg in "\$@"; do
|
||||
if [[ "\${skip_next}" -eq 1 ]]; then
|
||||
skip_next=0
|
||||
continue
|
||||
fi
|
||||
case "\${arg}" in
|
||||
--target)
|
||||
skip_next=1
|
||||
continue
|
||||
;;
|
||||
--target=*|-target=*|-target)
|
||||
if [[ "\${arg}" == "-target" ]]; then
|
||||
skip_next=1
|
||||
fi
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
args+=("\${arg}")
|
||||
done
|
||||
|
||||
exec "${zig_bin}" c++ -target "${zig_target}" "\${args[@]}"
|
||||
EOF
|
||||
chmod +x "${cc}" "${cxx}"
|
||||
|
||||
sysroot="$("${zig_bin}" cc -target "${zig_target}" -print-sysroot 2>/dev/null || true)"
|
||||
else
|
||||
cc="${musl_linker}"
|
||||
|
||||
if command -v "${arch}-linux-musl-g++" >/dev/null; then
|
||||
cxx="$(command -v "${arch}-linux-musl-g++")"
|
||||
elif command -v musl-g++ >/dev/null; then
|
||||
cxx="$(command -v musl-g++)"
|
||||
else
|
||||
cxx="${cc}"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -n "${sysroot}" && "${sysroot}" != "/" ]]; then
|
||||
echo "BORING_BSSL_SYSROOT=${sysroot}" >> "$GITHUB_ENV"
|
||||
boring_sysroot_var="BORING_BSSL_SYSROOT_${TARGET}"
|
||||
boring_sysroot_var="${boring_sysroot_var//-/_}"
|
||||
echo "${boring_sysroot_var}=${sysroot}" >> "$GITHUB_ENV"
|
||||
fi
|
||||
|
||||
cflags="-pthread"
|
||||
cxxflags="-pthread"
|
||||
if [[ "${TARGET}" == "aarch64-unknown-linux-musl" ]]; then
|
||||
# BoringSSL enables -Wframe-larger-than=25344 under clang and treats warnings as errors.
|
||||
cflags="${cflags} -Wno-error=frame-larger-than"
|
||||
cxxflags="${cxxflags} -Wno-error=frame-larger-than"
|
||||
fi
|
||||
|
||||
echo "CFLAGS=${cflags}" >> "$GITHUB_ENV"
|
||||
echo "CXXFLAGS=${cxxflags}" >> "$GITHUB_ENV"
|
||||
echo "CC=${cc}" >> "$GITHUB_ENV"
|
||||
echo "TARGET_CC=${cc}" >> "$GITHUB_ENV"
|
||||
target_cc_var="CC_${TARGET}"
|
||||
target_cc_var="${target_cc_var//-/_}"
|
||||
echo "${target_cc_var}=${cc}" >> "$GITHUB_ENV"
|
||||
echo "CXX=${cxx}" >> "$GITHUB_ENV"
|
||||
echo "TARGET_CXX=${cxx}" >> "$GITHUB_ENV"
|
||||
target_cxx_var="CXX_${TARGET}"
|
||||
target_cxx_var="${target_cxx_var//-/_}"
|
||||
echo "${target_cxx_var}=${cxx}" >> "$GITHUB_ENV"
|
||||
|
||||
cargo_linker_var="CARGO_TARGET_${TARGET^^}_LINKER"
|
||||
cargo_linker_var="${cargo_linker_var//-/_}"
|
||||
echo "${cargo_linker_var}=${musl_linker}" >> "$GITHUB_ENV"
|
||||
|
||||
echo "CMAKE_C_COMPILER=${cc}" >> "$GITHUB_ENV"
|
||||
echo "CMAKE_CXX_COMPILER=${cxx}" >> "$GITHUB_ENV"
|
||||
echo "CMAKE_ARGS=-DCMAKE_HAVE_THREADS_LIBRARY=1 -DCMAKE_USE_PTHREADS_INIT=1 -DCMAKE_THREAD_LIBS_INIT=-pthread -DTHREADS_PREFER_PTHREAD_FLAG=ON" >> "$GITHUB_ENV"
|
||||
2
.github/workflows/bazel.yml
vendored
2
.github/workflows/bazel.yml
vendored
@@ -81,7 +81,7 @@ jobs:
|
||||
# previously built artifacts to minimize build time. The more precise you are with
|
||||
# hashFiles sources the less work bazel will have to do.
|
||||
# - name: Mount bazel caches
|
||||
# uses: actions/cache@v4
|
||||
# uses: actions/cache@v5
|
||||
# with:
|
||||
# path: |
|
||||
# ~/.cache/bazel-repo-cache
|
||||
|
||||
14
.github/workflows/rust-ci.yml
vendored
14
.github/workflows/rust-ci.yml
vendored
@@ -261,15 +261,21 @@ jobs:
|
||||
/var/cache/apt
|
||||
key: apt-${{ matrix.runner }}-${{ matrix.target }}-v1
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Install Zig
|
||||
uses: mlugg/setup-zig@v2
|
||||
with:
|
||||
version: 0.14.0
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Install musl build tools
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
TARGET: ${{ matrix.target }}
|
||||
APT_UPDATE_ARGS: -o Acquire::Retries=3
|
||||
APT_INSTALL_ARGS: --no-install-recommends
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
sudo apt-get -y update -o Acquire::Retries=3
|
||||
sudo apt-get -y install --no-install-recommends musl-tools pkg-config
|
||||
run: bash "${GITHUB_WORKSPACE}/.github/scripts/install-musl-build-tools.sh"
|
||||
|
||||
- name: Install cargo-chef
|
||||
if: ${{ matrix.profile == 'release' }}
|
||||
|
||||
38
.github/workflows/rust-release.yml
vendored
38
.github/workflows/rust-release.yml
vendored
@@ -104,11 +104,17 @@ jobs:
|
||||
${{ github.workspace }}/codex-rs/target/
|
||||
key: cargo-${{ matrix.runner }}-${{ matrix.target }}-release-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Install Zig
|
||||
uses: mlugg/setup-zig@v2
|
||||
with:
|
||||
version: 0.14.0
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Install musl build tools
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y musl-tools pkg-config
|
||||
env:
|
||||
TARGET: ${{ matrix.target }}
|
||||
run: bash "${GITHUB_WORKSPACE}/.github/scripts/install-musl-build-tools.sh"
|
||||
|
||||
- name: Cargo build
|
||||
shell: bash
|
||||
@@ -246,6 +252,7 @@ jobs:
|
||||
# Path that contains the uncompressed binaries for the current
|
||||
# ${{ matrix.target }}
|
||||
dest="dist/${{ matrix.target }}"
|
||||
repo_root=$PWD
|
||||
|
||||
# We want to ship the raw Windows executables in the GitHub Release
|
||||
# in addition to the compressed archives. Keep the originals for
|
||||
@@ -285,7 +292,30 @@ jobs:
|
||||
# Must run from inside the dest dir so 7z won't
|
||||
# embed the directory path inside the zip.
|
||||
if [[ "${{ matrix.runner }}" == windows* ]]; then
|
||||
(cd "$dest" && 7z a "${base}.zip" "$base")
|
||||
if [[ "$base" == "codex-${{ matrix.target }}.exe" ]]; then
|
||||
# Bundle the sandbox helper binaries into the main codex zip so
|
||||
# WinGet installs include the required helpers next to codex.exe.
|
||||
# Fall back to the single-binary zip if the helpers are missing
|
||||
# to avoid breaking releases.
|
||||
bundle_dir="$(mktemp -d)"
|
||||
runner_src="$dest/codex-command-runner-${{ matrix.target }}.exe"
|
||||
setup_src="$dest/codex-windows-sandbox-setup-${{ matrix.target }}.exe"
|
||||
if [[ -f "$runner_src" && -f "$setup_src" ]]; then
|
||||
cp "$dest/$base" "$bundle_dir/$base"
|
||||
cp "$runner_src" "$bundle_dir/codex-command-runner.exe"
|
||||
cp "$setup_src" "$bundle_dir/codex-windows-sandbox-setup.exe"
|
||||
# Use an absolute path so bundle zips land in the real dist
|
||||
# dir even when 7z runs from a temp directory.
|
||||
(cd "$bundle_dir" && 7z a "$repo_root/$dest/${base}.zip" .)
|
||||
else
|
||||
echo "warning: missing sandbox binaries; falling back to single-binary zip"
|
||||
echo "warning: expected $runner_src and $setup_src"
|
||||
(cd "$dest" && 7z a "${base}.zip" "$base")
|
||||
fi
|
||||
rm -rf "$bundle_dir"
|
||||
else
|
||||
(cd "$dest" && 7z a "${base}.zip" "$base")
|
||||
fi
|
||||
fi
|
||||
|
||||
# Also create .zst (existing behaviour) *and* remove the original
|
||||
|
||||
12
.github/workflows/shell-tool-mcp.yml
vendored
12
.github/workflows/shell-tool-mcp.yml
vendored
@@ -97,11 +97,17 @@ jobs:
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- if: ${{ matrix.install_musl }}
|
||||
name: Install Zig
|
||||
uses: mlugg/setup-zig@v2
|
||||
with:
|
||||
version: 0.14.0
|
||||
|
||||
- if: ${{ matrix.install_musl }}
|
||||
name: Install musl build dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y musl-tools pkg-config
|
||||
env:
|
||||
TARGET: ${{ matrix.target }}
|
||||
run: bash "${GITHUB_WORKSPACE}/.github/scripts/install-musl-build-tools.sh"
|
||||
|
||||
- name: Build exec server binaries
|
||||
run: cargo build --release --target ${{ matrix.target }} --bin codex-exec-mcp-server --bin codex-execve-wrapper
|
||||
|
||||
@@ -11,6 +11,7 @@ In the codex-rs folder where the rust code lives:
|
||||
- Always collapse if statements per https://rust-lang.github.io/rust-clippy/master/index.html#collapsible_if
|
||||
- Always inline format! args when possible per https://rust-lang.github.io/rust-clippy/master/index.html#uninlined_format_args
|
||||
- Use method references over closures when possible per https://rust-lang.github.io/rust-clippy/master/index.html#redundant_closure_for_method_calls
|
||||
- When possible, make `match` statements exhaustive and avoid wildcard arms.
|
||||
- When writing tests, prefer comparing the equality of entire objects over fields one by one.
|
||||
- When making a change that adds or changes an API, ensure that the documentation in the `docs/` folder is up to date if applicable.
|
||||
- If you change `ConfigToml` or nested config types, run `just write-config-schema` to update `codex-rs/core/config.schema.json`.
|
||||
|
||||
6
PNPM.md
6
PNPM.md
@@ -15,7 +15,7 @@ This project has been migrated from npm to pnpm to improve dependency management
|
||||
|
||||
```bash
|
||||
# Global installation of pnpm
|
||||
npm install -g pnpm@10.8.1
|
||||
npm install -g pnpm@10.28.2
|
||||
|
||||
# Or with corepack (available with Node.js 22+)
|
||||
corepack enable
|
||||
@@ -59,12 +59,12 @@ codex/
|
||||
|
||||
## CI/CD
|
||||
|
||||
CI/CD workflows have been updated to use pnpm instead of npm. Make sure your CI environments use pnpm 10.8.1 or higher.
|
||||
CI/CD workflows have been updated to use pnpm instead of npm. Make sure your CI environments use pnpm 10.28.2 or higher.
|
||||
|
||||
## Known issues
|
||||
|
||||
If you encounter issues with pnpm, try the following solutions:
|
||||
|
||||
1. Remove the `node_modules` folder and `pnpm-lock.yaml` file, then run `pnpm install`
|
||||
2. Make sure you're using pnpm 10.8.1 or higher
|
||||
2. Make sure you're using pnpm 10.28.2 or higher
|
||||
3. Verify that Node.js 22 or higher is installed
|
||||
|
||||
954
codex-rs/Cargo.lock
generated
954
codex-rs/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -27,6 +27,7 @@ members = [
|
||||
"login",
|
||||
"mcp-server",
|
||||
"mcp-types",
|
||||
"network-proxy",
|
||||
"ollama",
|
||||
"process-hardening",
|
||||
"protocol",
|
||||
@@ -136,6 +137,7 @@ env-flags = "0.1.1"
|
||||
env_logger = "0.11.5"
|
||||
eventsource-stream = "0.2.3"
|
||||
futures = { version = "0.3", default-features = false }
|
||||
globset = "0.4"
|
||||
http = "1.3.1"
|
||||
icu_decimal = "2.1"
|
||||
icu_locale_core = "2.1"
|
||||
@@ -186,6 +188,7 @@ seccompiler = "0.5.0"
|
||||
sentry = "0.46.0"
|
||||
serde = "1"
|
||||
serde_json = "1"
|
||||
serde_path_to_error = "0.1.20"
|
||||
serde_with = "3.16"
|
||||
serde_yaml = "0.9"
|
||||
serial_test = "3.2.0"
|
||||
@@ -209,11 +212,11 @@ tiny_http = "0.12"
|
||||
tokio = "1"
|
||||
tokio-stream = "0.1.18"
|
||||
tokio-test = "0.4"
|
||||
tokio-tungstenite = "0.21.0"
|
||||
tokio-tungstenite = { version = "0.28.0", features = ["proxy", "rustls-tls-native-roots"] }
|
||||
tokio-util = "0.7.18"
|
||||
toml = "0.9.5"
|
||||
toml_edit = "0.24.0"
|
||||
tracing = "0.1.43"
|
||||
tracing = "0.1.44"
|
||||
tracing-appender = "0.2.3"
|
||||
tracing-subscriber = "0.3.22"
|
||||
tracing-test = "0.2.5"
|
||||
@@ -299,6 +302,10 @@ opt-level = 0
|
||||
# ratatui = { path = "../../ratatui" }
|
||||
crossterm = { git = "https://github.com/nornagon/crossterm", branch = "nornagon/color-query" }
|
||||
ratatui = { git = "https://github.com/nornagon/ratatui", branch = "nornagon-v0.29.0-patch" }
|
||||
tokio-tungstenite = { git = "https://github.com/JakkuSakura/tokio-tungstenite", rev = "2ae536b0de793f3ddf31fc2f22d445bf1ef2023d" }
|
||||
|
||||
# Uncomment to debug local changes.
|
||||
# rmcp = { path = "../../rust-sdk/crates/rmcp" }
|
||||
|
||||
[patch."ssh://git@github.com/JakkuSakura/tungstenite-rs.git"]
|
||||
tungstenite = { git = "https://github.com/JakkuSakura/tungstenite-rs", rev = "f514de8644821113e5d18a027d6d28a5c8cc0a6e" }
|
||||
|
||||
@@ -117,6 +117,10 @@ client_request_definitions! {
|
||||
params: v2::ThreadArchiveParams,
|
||||
response: v2::ThreadArchiveResponse,
|
||||
},
|
||||
ThreadUnarchive => "thread/unarchive" {
|
||||
params: v2::ThreadUnarchiveParams,
|
||||
response: v2::ThreadUnarchiveResponse,
|
||||
},
|
||||
ThreadRollback => "thread/rollback" {
|
||||
params: v2::ThreadRollbackParams,
|
||||
response: v2::ThreadRollbackResponse,
|
||||
@@ -129,10 +133,18 @@ client_request_definitions! {
|
||||
params: v2::ThreadLoadedListParams,
|
||||
response: v2::ThreadLoadedListResponse,
|
||||
},
|
||||
ThreadRead => "thread/read" {
|
||||
params: v2::ThreadReadParams,
|
||||
response: v2::ThreadReadResponse,
|
||||
},
|
||||
SkillsList => "skills/list" {
|
||||
params: v2::SkillsListParams,
|
||||
response: v2::SkillsListResponse,
|
||||
},
|
||||
AppsList => "app/list" {
|
||||
params: v2::AppsListParams,
|
||||
response: v2::AppsListResponse,
|
||||
},
|
||||
SkillsConfigWrite => "skills/config/write" {
|
||||
params: v2::SkillsConfigWriteParams,
|
||||
response: v2::SkillsConfigWriteResponse,
|
||||
@@ -516,6 +528,12 @@ server_request_definitions! {
|
||||
response: v2::ToolRequestUserInputResponse,
|
||||
},
|
||||
|
||||
/// Execute a dynamic tool call on the client.
|
||||
DynamicToolCall => "item/tool/call" {
|
||||
params: v2::DynamicToolCallParams,
|
||||
response: v2::DynamicToolCallResponse,
|
||||
},
|
||||
|
||||
/// DEPRECATED APIs below
|
||||
/// Request to approve a patch.
|
||||
/// This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).
|
||||
@@ -580,6 +598,7 @@ server_notification_definitions! {
|
||||
ReasoningSummaryTextDelta => "item/reasoning/summaryTextDelta" (v2::ReasoningSummaryTextDeltaNotification),
|
||||
ReasoningSummaryPartAdded => "item/reasoning/summaryPartAdded" (v2::ReasoningSummaryPartAddedNotification),
|
||||
ReasoningTextDelta => "item/reasoning/textDelta" (v2::ReasoningTextDeltaNotification),
|
||||
ContextCompactionStarted => "thread/compaction/started" (v2::ContextCompactionStartedNotification),
|
||||
ContextCompacted => "thread/compacted" (v2::ContextCompactedNotification),
|
||||
DeprecationNotice => "deprecationNotice" (v2::DeprecationNoticeNotification),
|
||||
ConfigWarning => "configWarning" (v2::ConfigWarningNotification),
|
||||
|
||||
@@ -56,6 +56,8 @@ impl ThreadHistoryBuilder {
|
||||
self.handle_agent_reasoning_raw_content(payload)
|
||||
}
|
||||
EventMsg::TokenCount(_) => {}
|
||||
EventMsg::ContextCompactionStarted(_) => {}
|
||||
EventMsg::ContextCompactionEnded(_) => {}
|
||||
EventMsg::EnteredReviewMode(_) => {}
|
||||
EventMsg::ExitedReviewMode(_) => {}
|
||||
EventMsg::ThreadRolledBack(payload) => self.handle_thread_rollback(payload),
|
||||
|
||||
@@ -5,7 +5,9 @@ use crate::protocol::common::AuthMode;
|
||||
use codex_protocol::account::PlanType;
|
||||
use codex_protocol::approvals::ExecPolicyAmendment as CoreExecPolicyAmendment;
|
||||
use codex_protocol::config_types::CollaborationMode;
|
||||
use codex_protocol::config_types::CollaborationModeMask;
|
||||
use codex_protocol::config_types::ForcedLoginMethod;
|
||||
use codex_protocol::config_types::Personality;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::config_types::SandboxMode as CoreSandboxMode;
|
||||
use codex_protocol::config_types::Verbosity;
|
||||
@@ -29,6 +31,7 @@ use codex_protocol::protocol::SkillErrorInfo as CoreSkillErrorInfo;
|
||||
use codex_protocol::protocol::SkillInterface as CoreSkillInterface;
|
||||
use codex_protocol::protocol::SkillMetadata as CoreSkillMetadata;
|
||||
use codex_protocol::protocol::SkillScope as CoreSkillScope;
|
||||
use codex_protocol::protocol::SubAgentSource as CoreSubAgentSource;
|
||||
use codex_protocol::protocol::TokenUsage as CoreTokenUsage;
|
||||
use codex_protocol::protocol::TokenUsageInfo as CoreTokenUsageInfo;
|
||||
use codex_protocol::user_input::ByteRange as CoreByteRange;
|
||||
@@ -322,6 +325,15 @@ pub struct ToolsV2 {
|
||||
pub view_image: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct DynamicToolSpec {
|
||||
pub name: String,
|
||||
pub description: String,
|
||||
pub input_schema: JsonValue,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -392,6 +404,8 @@ pub struct ConfigLayer {
|
||||
pub name: ConfigLayerSource,
|
||||
pub version: String,
|
||||
pub config: JsonValue,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub disabled_reason: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
@@ -697,6 +711,7 @@ pub enum SessionSource {
|
||||
VsCode,
|
||||
Exec,
|
||||
AppServer,
|
||||
SubAgent(CoreSubAgentSource),
|
||||
#[serde(other)]
|
||||
Unknown,
|
||||
}
|
||||
@@ -708,7 +723,7 @@ impl From<CoreSessionSource> for SessionSource {
|
||||
CoreSessionSource::VSCode => SessionSource::VsCode,
|
||||
CoreSessionSource::Exec => SessionSource::Exec,
|
||||
CoreSessionSource::Mcp => SessionSource::AppServer,
|
||||
CoreSessionSource::SubAgent(_) => SessionSource::Unknown,
|
||||
CoreSessionSource::SubAgent(sub) => SessionSource::SubAgent(sub),
|
||||
CoreSessionSource::Unknown => SessionSource::Unknown,
|
||||
}
|
||||
}
|
||||
@@ -721,6 +736,7 @@ impl From<SessionSource> for CoreSessionSource {
|
||||
SessionSource::VsCode => CoreSessionSource::VSCode,
|
||||
SessionSource::Exec => CoreSessionSource::Exec,
|
||||
SessionSource::AppServer => CoreSessionSource::Mcp,
|
||||
SessionSource::SubAgent(sub) => CoreSessionSource::SubAgent(sub),
|
||||
SessionSource::Unknown => CoreSessionSource::Unknown,
|
||||
}
|
||||
}
|
||||
@@ -898,6 +914,8 @@ pub struct Model {
|
||||
pub description: String,
|
||||
pub supported_reasoning_efforts: Vec<ReasoningEffortOption>,
|
||||
pub default_reasoning_effort: ReasoningEffort,
|
||||
#[serde(default)]
|
||||
pub supports_personality: bool,
|
||||
// Only one model should be marked as default.
|
||||
pub is_default: bool,
|
||||
}
|
||||
@@ -931,7 +949,7 @@ pub struct CollaborationModeListParams {}
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct CollaborationModeListResponse {
|
||||
pub data: Vec<CollaborationMode>,
|
||||
pub data: Vec<CollaborationModeMask>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -965,6 +983,39 @@ pub struct ListMcpServerStatusResponse {
|
||||
pub next_cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct AppsListParams {
|
||||
/// Opaque pagination cursor returned by a previous call.
|
||||
pub cursor: Option<String>,
|
||||
/// Optional page size; defaults to a reasonable server-side value.
|
||||
pub limit: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct AppInfo {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub description: Option<String>,
|
||||
pub logo_url: Option<String>,
|
||||
pub install_url: Option<String>,
|
||||
#[serde(default)]
|
||||
pub is_accessible: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct AppsListResponse {
|
||||
pub data: Vec<AppInfo>,
|
||||
/// Opaque cursor to pass to the next call to continue after the last item.
|
||||
/// If None, there are no more items to return.
|
||||
pub next_cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -1046,6 +1097,9 @@ pub struct ThreadStartParams {
|
||||
pub config: Option<HashMap<String, JsonValue>>,
|
||||
pub base_instructions: Option<String>,
|
||||
pub developer_instructions: Option<String>,
|
||||
pub personality: Option<Personality>,
|
||||
pub ephemeral: Option<bool>,
|
||||
pub dynamic_tools: Option<Vec<DynamicToolSpec>>,
|
||||
/// If true, opt into emitting raw response items on the event stream.
|
||||
///
|
||||
/// This is for internal use only (e.g. Codex Cloud).
|
||||
@@ -1100,6 +1154,7 @@ pub struct ThreadResumeParams {
|
||||
pub config: Option<HashMap<String, serde_json::Value>>,
|
||||
pub base_instructions: Option<String>,
|
||||
pub developer_instructions: Option<String>,
|
||||
pub personality: Option<Personality>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -1168,6 +1223,20 @@ pub struct ThreadArchiveParams {
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadArchiveResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadUnarchiveParams {
|
||||
pub thread_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadUnarchiveResponse {
|
||||
pub thread: Thread,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -1205,6 +1274,30 @@ pub struct ThreadListParams {
|
||||
/// Optional provider filter; when set, only sessions recorded under these
|
||||
/// providers are returned. When present but empty, includes all providers.
|
||||
pub model_providers: Option<Vec<String>>,
|
||||
/// Optional source filter; when set, only sessions from these source kinds
|
||||
/// are returned. When omitted or empty, defaults to interactive sources.
|
||||
pub source_kinds: Option<Vec<ThreadSourceKind>>,
|
||||
/// Optional archived filter; when set to true, only archived threads are returned.
|
||||
/// If false or null, only non-archived threads are returned.
|
||||
pub archived: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase", export_to = "v2/")]
|
||||
pub enum ThreadSourceKind {
|
||||
Cli,
|
||||
#[serde(rename = "vscode")]
|
||||
#[ts(rename = "vscode")]
|
||||
VsCode,
|
||||
Exec,
|
||||
AppServer,
|
||||
SubAgent,
|
||||
SubAgentReview,
|
||||
SubAgentCompact,
|
||||
SubAgentThreadSpawn,
|
||||
SubAgentOther,
|
||||
Unknown,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, JsonSchema, TS)]
|
||||
@@ -1246,6 +1339,23 @@ pub struct ThreadLoadedListResponse {
|
||||
pub next_cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadReadParams {
|
||||
pub thread_id: String,
|
||||
/// When true, include turns and their items from rollout history.
|
||||
#[serde(default)]
|
||||
pub include_turns: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadReadResponse {
|
||||
pub thread: Thread,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -1408,7 +1518,7 @@ pub struct Thread {
|
||||
#[ts(type = "number")]
|
||||
pub updated_at: i64,
|
||||
/// [UNSTABLE] Path to the thread on disk.
|
||||
pub path: PathBuf,
|
||||
pub path: Option<PathBuf>,
|
||||
/// Working directory captured for the thread.
|
||||
pub cwd: PathBuf,
|
||||
/// Version of the CLI that created the thread.
|
||||
@@ -1417,7 +1527,8 @@ pub struct Thread {
|
||||
pub source: SessionSource,
|
||||
/// Optional Git metadata captured when the thread was created.
|
||||
pub git_info: Option<GitInfo>,
|
||||
/// Only populated on `thread/resume`, `thread/rollback`, `thread/fork` responses.
|
||||
/// Only populated on `thread/resume`, `thread/rollback`, `thread/fork`, and `thread/read`
|
||||
/// (when `includeTurns` is true) responses.
|
||||
/// For all other responses and notifications returning a Thread,
|
||||
/// the turns field will be an empty list.
|
||||
pub turns: Vec<Turn>,
|
||||
@@ -1554,6 +1665,8 @@ pub struct TurnStartParams {
|
||||
pub effort: Option<ReasoningEffort>,
|
||||
/// Override the reasoning summary for this turn and subsequent turns.
|
||||
pub summary: Option<ReasoningSummary>,
|
||||
/// Override the personality for this turn and subsequent turns.
|
||||
pub personality: Option<Personality>,
|
||||
/// Optional JSON Schema used to constrain the final assistant message for this turn.
|
||||
pub output_schema: Option<JsonValue>,
|
||||
|
||||
@@ -1856,6 +1969,9 @@ pub enum ThreadItem {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
ExitedReviewMode { id: String, review: String },
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
ContextCompaction { id: String },
|
||||
}
|
||||
|
||||
impl From<CoreTurnItem> for ThreadItem {
|
||||
@@ -1884,6 +2000,9 @@ impl From<CoreTurnItem> for ThreadItem {
|
||||
id: search.id,
|
||||
query: search.query,
|
||||
},
|
||||
CoreTurnItem::ContextCompaction(compaction) => {
|
||||
ThreadItem::ContextCompaction { id: compaction.id }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2254,6 +2373,14 @@ pub struct ContextCompactedNotification {
|
||||
pub turn_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ContextCompactionStartedNotification {
|
||||
pub thread_id: String,
|
||||
pub turn_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -2306,6 +2433,25 @@ pub struct FileChangeRequestApprovalResponse {
|
||||
pub decision: FileChangeApprovalDecision,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct DynamicToolCallParams {
|
||||
pub thread_id: String,
|
||||
pub turn_id: String,
|
||||
pub call_id: String,
|
||||
pub tool: String,
|
||||
pub arguments: JsonValue,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct DynamicToolCallResponse {
|
||||
pub output: String,
|
||||
pub success: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -2323,6 +2469,8 @@ pub struct ToolRequestUserInputQuestion {
|
||||
pub id: String,
|
||||
pub header: String,
|
||||
pub question: String,
|
||||
#[serde(default)]
|
||||
pub is_other: bool,
|
||||
pub options: Option<Vec<ToolRequestUserInputOption>>,
|
||||
}
|
||||
|
||||
@@ -2442,6 +2590,24 @@ pub struct DeprecationNoticeNotification {
|
||||
pub details: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TextPosition {
|
||||
/// 1-based line number.
|
||||
pub line: usize,
|
||||
/// 1-based column number (in Unicode scalar values).
|
||||
pub column: usize,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TextRange {
|
||||
pub start: TextPosition,
|
||||
pub end: TextPosition,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -2450,6 +2616,14 @@ pub struct ConfigWarningNotification {
|
||||
pub summary: String,
|
||||
/// Optional extra guidance or error details.
|
||||
pub details: Option<String>,
|
||||
/// Optional path to the config file that triggered the warning.
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub path: Option<String>,
|
||||
/// Optional range for the error location inside the config file.
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub range: Option<TextRange>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -2457,10 +2631,12 @@ mod tests {
|
||||
use super::*;
|
||||
use codex_protocol::items::AgentMessageContent;
|
||||
use codex_protocol::items::AgentMessageItem;
|
||||
use codex_protocol::items::ContextCompactionItem;
|
||||
use codex_protocol::items::ReasoningItem;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::items::UserMessageItem;
|
||||
use codex_protocol::items::WebSearchItem;
|
||||
use codex_protocol::models::WebSearchAction;
|
||||
use codex_protocol::protocol::NetworkAccess as CoreNetworkAccess;
|
||||
use codex_protocol::user_input::UserInput as CoreUserInput;
|
||||
use pretty_assertions::assert_eq;
|
||||
@@ -2568,6 +2744,9 @@ mod tests {
|
||||
let search_item = TurnItem::WebSearch(WebSearchItem {
|
||||
id: "search-1".to_string(),
|
||||
query: "docs".to_string(),
|
||||
action: WebSearchAction::Search {
|
||||
query: Some("docs".to_string()),
|
||||
},
|
||||
});
|
||||
|
||||
assert_eq!(
|
||||
@@ -2577,6 +2756,17 @@ mod tests {
|
||||
query: "docs".to_string(),
|
||||
}
|
||||
);
|
||||
|
||||
let compaction_item = TurnItem::ContextCompaction(ContextCompactionItem {
|
||||
id: "compact-1".to_string(),
|
||||
});
|
||||
|
||||
assert_eq!(
|
||||
ThreadItem::from(compaction_item),
|
||||
ThreadItem::ContextCompaction {
|
||||
id: "compact-1".to_string(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -22,6 +22,7 @@ codex-common = { workspace = true, features = ["cli"] }
|
||||
codex-core = { workspace = true }
|
||||
codex-backend-client = { workspace = true }
|
||||
codex-file-search = { workspace = true }
|
||||
codex-chatgpt = { workspace = true }
|
||||
codex-login = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
@@ -34,6 +35,7 @@ serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
time = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
"io-std",
|
||||
@@ -48,11 +50,20 @@ uuid = { workspace = true, features = ["serde", "v7"] }
|
||||
|
||||
[dev-dependencies]
|
||||
app_test_support = { workspace = true }
|
||||
axum = { workspace = true, default-features = false, features = [
|
||||
"http1",
|
||||
"json",
|
||||
"tokio",
|
||||
] }
|
||||
base64 = { workspace = true }
|
||||
core_test_support = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
os_info = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
rmcp = { workspace = true, default-features = false, features = [
|
||||
"server",
|
||||
"transport-streamable-http-server",
|
||||
] }
|
||||
serial_test = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
shlex = { workspace = true }
|
||||
|
||||
@@ -79,7 +79,9 @@ Example (from OpenAI's official VSCode extension):
|
||||
- `thread/fork` — fork an existing thread into a new thread id by copying the stored history; emits `thread/started` and auto-subscribes you to turn/item events for the new thread.
|
||||
- `thread/list` — page through stored rollouts; supports cursor-based pagination and optional `modelProviders` filtering.
|
||||
- `thread/loaded/list` — list the thread ids currently loaded in memory.
|
||||
- `thread/read` — read a stored thread by id without resuming it; optionally include turns via `includeTurns`.
|
||||
- `thread/archive` — move a thread’s rollout file into the archived directory; returns `{}` on success.
|
||||
- `thread/unarchive` — move an archived rollout file back into the sessions directory; returns the restored `thread` on success.
|
||||
- `thread/rollback` — drop the last N turns from the agent’s in-memory context and persist a rollback marker in the rollout so future resumes see the pruned history; returns the updated `thread` (with `turns` populated) on success.
|
||||
- `turn/start` — add user input to a thread and begin Codex generation; responds with the initial `turn` object and streams `turn/started`, `item/*`, and `turn/completed` notifications.
|
||||
- `turn/interrupt` — request cancellation of an in-flight turn by `(thread_id, turn_id)`; success is an empty `{}` response and the turn finishes with `status: "interrupted"`.
|
||||
@@ -88,6 +90,7 @@ Example (from OpenAI's official VSCode extension):
|
||||
- `model/list` — list available models (with reasoning effort options).
|
||||
- `collaborationMode/list` — list available collaboration mode presets (experimental, no pagination).
|
||||
- `skills/list` — list skills for one or more `cwd` values (optional `forceReload`).
|
||||
- `app/list` — list available apps.
|
||||
- `skills/config/write` — write user-level skill config by path.
|
||||
- `mcpServer/oauth/login` — start an OAuth login for a configured MCP server; returns an `authorization_url` and later emits `mcpServer/oauthLogin/completed` once the browser flow finishes.
|
||||
- `tool/requestUserInput` — prompt the user with 1–3 short questions for a tool call and return their answers (experimental).
|
||||
@@ -112,6 +115,20 @@ Start a fresh thread when you need a new Codex conversation.
|
||||
"cwd": "/Users/me/project",
|
||||
"approvalPolicy": "never",
|
||||
"sandbox": "workspaceWrite",
|
||||
"personality": "friendly",
|
||||
"dynamicTools": [
|
||||
{
|
||||
"name": "lookup_ticket",
|
||||
"description": "Fetch a ticket by id",
|
||||
"inputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": { "type": "string" }
|
||||
},
|
||||
"required": ["id"]
|
||||
}
|
||||
}
|
||||
],
|
||||
} }
|
||||
{ "id": 10, "result": {
|
||||
"thread": {
|
||||
@@ -124,10 +141,13 @@ Start a fresh thread when you need a new Codex conversation.
|
||||
{ "method": "thread/started", "params": { "thread": { … } } }
|
||||
```
|
||||
|
||||
To continue a stored session, call `thread/resume` with the `thread.id` you previously recorded. The response shape matches `thread/start`, and no additional notifications are emitted:
|
||||
To continue a stored session, call `thread/resume` with the `thread.id` you previously recorded. The response shape matches `thread/start`, and no additional notifications are emitted. You can also pass the same configuration overrides supported by `thread/start`, such as `personality`:
|
||||
|
||||
```json
|
||||
{ "method": "thread/resume", "id": 11, "params": { "threadId": "thr_123" } }
|
||||
{ "method": "thread/resume", "id": 11, "params": {
|
||||
"threadId": "thr_123",
|
||||
"personality": "friendly"
|
||||
} }
|
||||
{ "id": 11, "result": { "thread": { "id": "thr_123", … } } }
|
||||
```
|
||||
|
||||
@@ -147,6 +167,8 @@ To branch from a stored session, call `thread/fork` with the `thread.id`. This c
|
||||
- `limit` — server defaults to a reasonable page size if unset.
|
||||
- `sortKey` — `created_at` (default) or `updated_at`.
|
||||
- `modelProviders` — restrict results to specific providers; unset, null, or an empty array will include all providers.
|
||||
- `sourceKinds` — restrict results to specific sources; omit or pass `[]` for interactive sessions only (`cli`, `vscode`).
|
||||
- `archived` — when `true`, list archived threads only. When `false` or `null`, list non-archived threads (default).
|
||||
|
||||
Example:
|
||||
|
||||
@@ -178,6 +200,20 @@ When `nextCursor` is `null`, you’ve reached the final page.
|
||||
} }
|
||||
```
|
||||
|
||||
### Example: Read a thread
|
||||
|
||||
Use `thread/read` to fetch a stored thread by id without resuming it. Pass `includeTurns` when you want the rollout history loaded into `thread.turns`.
|
||||
|
||||
```json
|
||||
{ "method": "thread/read", "id": 22, "params": { "threadId": "thr_123" } }
|
||||
{ "id": 22, "result": { "thread": { "id": "thr_123", "turns": [] } } }
|
||||
```
|
||||
|
||||
```json
|
||||
{ "method": "thread/read", "id": 23, "params": { "threadId": "thr_123", "includeTurns": true } }
|
||||
{ "id": 23, "result": { "thread": { "id": "thr_123", "turns": [ ... ] } } }
|
||||
```
|
||||
|
||||
### Example: Archive a thread
|
||||
|
||||
Use `thread/archive` to move the persisted rollout (stored as a JSONL file on disk) into the archived sessions directory.
|
||||
@@ -187,7 +223,16 @@ Use `thread/archive` to move the persisted rollout (stored as a JSONL file on di
|
||||
{ "id": 21, "result": {} }
|
||||
```
|
||||
|
||||
An archived thread will not appear in future calls to `thread/list`.
|
||||
An archived thread will not appear in `thread/list` unless `archived` is set to `true`.
|
||||
|
||||
### Example: Unarchive a thread
|
||||
|
||||
Use `thread/unarchive` to move an archived rollout back into the sessions directory.
|
||||
|
||||
```json
|
||||
{ "method": "thread/unarchive", "id": 24, "params": { "threadId": "thr_b" } }
|
||||
{ "id": 24, "result": { "thread": { "id": "thr_b" } } }
|
||||
```
|
||||
|
||||
### Example: Start a turn (send user input)
|
||||
|
||||
@@ -214,6 +259,7 @@ You can optionally specify config overrides on the new turn. If specified, these
|
||||
"model": "gpt-5.1-codex",
|
||||
"effort": "medium",
|
||||
"summary": "concise",
|
||||
"personality": "friendly",
|
||||
// Optional JSON Schema to constrain the final assistant message for this turn.
|
||||
"outputSchema": {
|
||||
"type": "object",
|
||||
|
||||
@@ -24,7 +24,9 @@ use codex_app_server_protocol::CommandExecutionRequestApprovalParams;
|
||||
use codex_app_server_protocol::CommandExecutionRequestApprovalResponse;
|
||||
use codex_app_server_protocol::CommandExecutionStatus;
|
||||
use codex_app_server_protocol::ContextCompactedNotification;
|
||||
use codex_app_server_protocol::ContextCompactionStartedNotification;
|
||||
use codex_app_server_protocol::DeprecationNoticeNotification;
|
||||
use codex_app_server_protocol::DynamicToolCallParams;
|
||||
use codex_app_server_protocol::ErrorNotification;
|
||||
use codex_app_server_protocol::ExecCommandApprovalParams;
|
||||
use codex_app_server_protocol::ExecCommandApprovalResponse;
|
||||
@@ -85,6 +87,7 @@ use codex_core::protocol::TurnDiffEvent;
|
||||
use codex_core::review_format::format_review_findings_block;
|
||||
use codex_core::review_prompts;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::dynamic_tools::DynamicToolResponse as CoreDynamicToolResponse;
|
||||
use codex_protocol::plan_tool::UpdatePlanArgs;
|
||||
use codex_protocol::protocol::ReviewOutputEvent;
|
||||
use codex_protocol::request_user_input::RequestUserInputAnswer as CoreRequestUserInputAnswer;
|
||||
@@ -276,6 +279,7 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
id: question.id,
|
||||
header: question.header,
|
||||
question: question.question,
|
||||
is_other: question.is_other,
|
||||
options: question.options.map(|options| {
|
||||
options
|
||||
.into_iter()
|
||||
@@ -318,6 +322,40 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
}
|
||||
}
|
||||
}
|
||||
EventMsg::DynamicToolCallRequest(request) => {
|
||||
if matches!(api_version, ApiVersion::V2) {
|
||||
let call_id = request.call_id;
|
||||
let params = DynamicToolCallParams {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: request.turn_id,
|
||||
call_id: call_id.clone(),
|
||||
tool: request.tool,
|
||||
arguments: request.arguments,
|
||||
};
|
||||
let rx = outgoing
|
||||
.send_request(ServerRequestPayload::DynamicToolCall(params))
|
||||
.await;
|
||||
tokio::spawn(async move {
|
||||
crate::dynamic_tools::on_call_response(call_id, rx, conversation).await;
|
||||
});
|
||||
} else {
|
||||
error!(
|
||||
"dynamic tool calls are only supported on api v2 (call_id: {})",
|
||||
request.call_id
|
||||
);
|
||||
let call_id = request.call_id;
|
||||
let _ = conversation
|
||||
.submit(Op::DynamicToolResponse {
|
||||
id: call_id.clone(),
|
||||
response: CoreDynamicToolResponse {
|
||||
call_id,
|
||||
output: "dynamic tool calls require api v2".to_string(),
|
||||
success: false,
|
||||
},
|
||||
})
|
||||
.await;
|
||||
}
|
||||
}
|
||||
// TODO(celia): properly construct McpToolCall TurnItem in core.
|
||||
EventMsg::McpToolCallBegin(begin_event) => {
|
||||
let notification = construct_mcp_tool_call_notification(
|
||||
@@ -564,7 +602,18 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
.send_server_notification(ServerNotification::AgentMessageDelta(notification))
|
||||
.await;
|
||||
}
|
||||
EventMsg::ContextCompacted(..) => {
|
||||
EventMsg::ContextCompactionStarted(..) => {
|
||||
let notification = ContextCompactionStartedNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
};
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::ContextCompactionStarted(
|
||||
notification,
|
||||
))
|
||||
.await;
|
||||
}
|
||||
EventMsg::ContextCompactionEnded(..) => {
|
||||
let notification = ContextCompactedNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
@@ -1006,7 +1055,15 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
};
|
||||
|
||||
if let Some(request_id) = pending {
|
||||
let rollout_path = conversation.rollout_path();
|
||||
let Some(rollout_path) = conversation.rollout_path() else {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "thread has no persisted rollout".to_string(),
|
||||
data: None,
|
||||
};
|
||||
outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
};
|
||||
let response = match read_summary_from_rollout(
|
||||
rollout_path.as_path(),
|
||||
fallback_model_provider.as_str(),
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
58
codex-rs/app-server/src/dynamic_tools.rs
Normal file
58
codex-rs/app-server/src/dynamic_tools.rs
Normal file
@@ -0,0 +1,58 @@
|
||||
use codex_app_server_protocol::DynamicToolCallResponse;
|
||||
use codex_core::CodexThread;
|
||||
use codex_protocol::dynamic_tools::DynamicToolResponse as CoreDynamicToolResponse;
|
||||
use codex_protocol::protocol::Op;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::oneshot;
|
||||
use tracing::error;
|
||||
|
||||
pub(crate) async fn on_call_response(
|
||||
call_id: String,
|
||||
receiver: oneshot::Receiver<serde_json::Value>,
|
||||
conversation: Arc<CodexThread>,
|
||||
) {
|
||||
let response = receiver.await;
|
||||
let value = match response {
|
||||
Ok(value) => value,
|
||||
Err(err) => {
|
||||
error!("request failed: {err:?}");
|
||||
let fallback = CoreDynamicToolResponse {
|
||||
call_id: call_id.clone(),
|
||||
output: "dynamic tool request failed".to_string(),
|
||||
success: false,
|
||||
};
|
||||
if let Err(err) = conversation
|
||||
.submit(Op::DynamicToolResponse {
|
||||
id: call_id.clone(),
|
||||
response: fallback,
|
||||
})
|
||||
.await
|
||||
{
|
||||
error!("failed to submit DynamicToolResponse: {err}");
|
||||
}
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let response = serde_json::from_value::<DynamicToolCallResponse>(value).unwrap_or_else(|err| {
|
||||
error!("failed to deserialize DynamicToolCallResponse: {err}");
|
||||
DynamicToolCallResponse {
|
||||
output: "dynamic tool response was invalid".to_string(),
|
||||
success: false,
|
||||
}
|
||||
});
|
||||
let response = CoreDynamicToolResponse {
|
||||
call_id: call_id.clone(),
|
||||
output: response.output,
|
||||
success: response.success,
|
||||
};
|
||||
if let Err(err) = conversation
|
||||
.submit(Op::DynamicToolResponse {
|
||||
id: call_id,
|
||||
response,
|
||||
})
|
||||
.await
|
||||
{
|
||||
error!("failed to submit DynamicToolResponse: {err}");
|
||||
}
|
||||
}
|
||||
155
codex-rs/app-server/src/filters.rs
Normal file
155
codex-rs/app-server/src/filters.rs
Normal file
@@ -0,0 +1,155 @@
|
||||
use codex_app_server_protocol::ThreadSourceKind;
|
||||
use codex_core::INTERACTIVE_SESSION_SOURCES;
|
||||
use codex_protocol::protocol::SessionSource as CoreSessionSource;
|
||||
use codex_protocol::protocol::SubAgentSource as CoreSubAgentSource;
|
||||
|
||||
pub(crate) fn compute_source_filters(
|
||||
source_kinds: Option<Vec<ThreadSourceKind>>,
|
||||
) -> (Vec<CoreSessionSource>, Option<Vec<ThreadSourceKind>>) {
|
||||
let Some(source_kinds) = source_kinds else {
|
||||
return (INTERACTIVE_SESSION_SOURCES.to_vec(), None);
|
||||
};
|
||||
|
||||
if source_kinds.is_empty() {
|
||||
return (INTERACTIVE_SESSION_SOURCES.to_vec(), None);
|
||||
}
|
||||
|
||||
let requires_post_filter = source_kinds.iter().any(|kind| {
|
||||
matches!(
|
||||
kind,
|
||||
ThreadSourceKind::Exec
|
||||
| ThreadSourceKind::AppServer
|
||||
| ThreadSourceKind::SubAgent
|
||||
| ThreadSourceKind::SubAgentReview
|
||||
| ThreadSourceKind::SubAgentCompact
|
||||
| ThreadSourceKind::SubAgentThreadSpawn
|
||||
| ThreadSourceKind::SubAgentOther
|
||||
| ThreadSourceKind::Unknown
|
||||
)
|
||||
});
|
||||
|
||||
if requires_post_filter {
|
||||
(Vec::new(), Some(source_kinds))
|
||||
} else {
|
||||
let interactive_sources = source_kinds
|
||||
.iter()
|
||||
.filter_map(|kind| match kind {
|
||||
ThreadSourceKind::Cli => Some(CoreSessionSource::Cli),
|
||||
ThreadSourceKind::VsCode => Some(CoreSessionSource::VSCode),
|
||||
ThreadSourceKind::Exec
|
||||
| ThreadSourceKind::AppServer
|
||||
| ThreadSourceKind::SubAgent
|
||||
| ThreadSourceKind::SubAgentReview
|
||||
| ThreadSourceKind::SubAgentCompact
|
||||
| ThreadSourceKind::SubAgentThreadSpawn
|
||||
| ThreadSourceKind::SubAgentOther
|
||||
| ThreadSourceKind::Unknown => None,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
(interactive_sources, Some(source_kinds))
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn source_kind_matches(source: &CoreSessionSource, filter: &[ThreadSourceKind]) -> bool {
|
||||
filter.iter().any(|kind| match kind {
|
||||
ThreadSourceKind::Cli => matches!(source, CoreSessionSource::Cli),
|
||||
ThreadSourceKind::VsCode => matches!(source, CoreSessionSource::VSCode),
|
||||
ThreadSourceKind::Exec => matches!(source, CoreSessionSource::Exec),
|
||||
ThreadSourceKind::AppServer => matches!(source, CoreSessionSource::Mcp),
|
||||
ThreadSourceKind::SubAgent => matches!(source, CoreSessionSource::SubAgent(_)),
|
||||
ThreadSourceKind::SubAgentReview => {
|
||||
matches!(
|
||||
source,
|
||||
CoreSessionSource::SubAgent(CoreSubAgentSource::Review)
|
||||
)
|
||||
}
|
||||
ThreadSourceKind::SubAgentCompact => {
|
||||
matches!(
|
||||
source,
|
||||
CoreSessionSource::SubAgent(CoreSubAgentSource::Compact)
|
||||
)
|
||||
}
|
||||
ThreadSourceKind::SubAgentThreadSpawn => matches!(
|
||||
source,
|
||||
CoreSessionSource::SubAgent(CoreSubAgentSource::ThreadSpawn { .. })
|
||||
),
|
||||
ThreadSourceKind::SubAgentOther => matches!(
|
||||
source,
|
||||
CoreSessionSource::SubAgent(CoreSubAgentSource::Other(_))
|
||||
),
|
||||
ThreadSourceKind::Unknown => matches!(source, CoreSessionSource::Unknown),
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use codex_protocol::ThreadId;
|
||||
use pretty_assertions::assert_eq;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[test]
|
||||
fn compute_source_filters_defaults_to_interactive_sources() {
|
||||
let (allowed_sources, filter) = compute_source_filters(None);
|
||||
|
||||
assert_eq!(allowed_sources, INTERACTIVE_SESSION_SOURCES.to_vec());
|
||||
assert_eq!(filter, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compute_source_filters_empty_means_interactive_sources() {
|
||||
let (allowed_sources, filter) = compute_source_filters(Some(Vec::new()));
|
||||
|
||||
assert_eq!(allowed_sources, INTERACTIVE_SESSION_SOURCES.to_vec());
|
||||
assert_eq!(filter, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compute_source_filters_interactive_only_skips_post_filtering() {
|
||||
let source_kinds = vec![ThreadSourceKind::Cli, ThreadSourceKind::VsCode];
|
||||
let (allowed_sources, filter) = compute_source_filters(Some(source_kinds.clone()));
|
||||
|
||||
assert_eq!(
|
||||
allowed_sources,
|
||||
vec![CoreSessionSource::Cli, CoreSessionSource::VSCode]
|
||||
);
|
||||
assert_eq!(filter, Some(source_kinds));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compute_source_filters_subagent_variant_requires_post_filtering() {
|
||||
let source_kinds = vec![ThreadSourceKind::SubAgentReview];
|
||||
let (allowed_sources, filter) = compute_source_filters(Some(source_kinds.clone()));
|
||||
|
||||
assert_eq!(allowed_sources, Vec::new());
|
||||
assert_eq!(filter, Some(source_kinds));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn source_kind_matches_distinguishes_subagent_variants() {
|
||||
let parent_thread_id =
|
||||
ThreadId::from_string(&Uuid::new_v4().to_string()).expect("valid thread id");
|
||||
let review = CoreSessionSource::SubAgent(CoreSubAgentSource::Review);
|
||||
let spawn = CoreSessionSource::SubAgent(CoreSubAgentSource::ThreadSpawn {
|
||||
parent_thread_id,
|
||||
depth: 1,
|
||||
});
|
||||
|
||||
assert!(source_kind_matches(
|
||||
&review,
|
||||
&[ThreadSourceKind::SubAgentReview]
|
||||
));
|
||||
assert!(!source_kind_matches(
|
||||
&review,
|
||||
&[ThreadSourceKind::SubAgentThreadSpawn]
|
||||
));
|
||||
assert!(source_kind_matches(
|
||||
&spawn,
|
||||
&[ThreadSourceKind::SubAgentThreadSpawn]
|
||||
));
|
||||
assert!(!source_kind_matches(
|
||||
&spawn,
|
||||
&[ThreadSourceKind::SubAgentReview]
|
||||
));
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,7 @@
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigBuilder;
|
||||
use codex_core::config_loader::ConfigLayerStackOrdering;
|
||||
use codex_core::config_loader::LoaderOverrides;
|
||||
use std::io::ErrorKind;
|
||||
use std::io::Result as IoResult;
|
||||
@@ -11,9 +12,15 @@ use std::path::PathBuf;
|
||||
use crate::message_processor::MessageProcessor;
|
||||
use crate::outgoing_message::OutgoingMessage;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use codex_app_server_protocol::ConfigLayerSource;
|
||||
use codex_app_server_protocol::ConfigWarningNotification;
|
||||
use codex_app_server_protocol::JSONRPCMessage;
|
||||
use codex_app_server_protocol::TextPosition as AppTextPosition;
|
||||
use codex_app_server_protocol::TextRange as AppTextRange;
|
||||
use codex_core::ExecPolicyError;
|
||||
use codex_core::check_execpolicy_for_warnings;
|
||||
use codex_core::config_loader::ConfigLoadError;
|
||||
use codex_core::config_loader::TextRange as CoreTextRange;
|
||||
use codex_feedback::CodexFeedback;
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
@@ -33,7 +40,9 @@ use tracing_subscriber::util::SubscriberInitExt;
|
||||
mod bespoke_event_handling;
|
||||
mod codex_message_processor;
|
||||
mod config_api;
|
||||
mod dynamic_tools;
|
||||
mod error_code;
|
||||
mod filters;
|
||||
mod fuzzy_file_search;
|
||||
mod message_processor;
|
||||
mod models;
|
||||
@@ -44,6 +53,116 @@ mod outgoing_message;
|
||||
/// plenty for an interactive CLI.
|
||||
const CHANNEL_CAPACITY: usize = 128;
|
||||
|
||||
fn config_warning_from_error(
|
||||
summary: impl Into<String>,
|
||||
err: &std::io::Error,
|
||||
) -> ConfigWarningNotification {
|
||||
let (path, range) = match config_error_location(err) {
|
||||
Some((path, range)) => (Some(path), Some(range)),
|
||||
None => (None, None),
|
||||
};
|
||||
ConfigWarningNotification {
|
||||
summary: summary.into(),
|
||||
details: Some(err.to_string()),
|
||||
path,
|
||||
range,
|
||||
}
|
||||
}
|
||||
|
||||
fn config_error_location(err: &std::io::Error) -> Option<(String, AppTextRange)> {
|
||||
err.get_ref()
|
||||
.and_then(|err| err.downcast_ref::<ConfigLoadError>())
|
||||
.map(|err| {
|
||||
let config_error = err.config_error();
|
||||
(
|
||||
config_error.path.to_string_lossy().to_string(),
|
||||
app_text_range(&config_error.range),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn exec_policy_warning_location(err: &ExecPolicyError) -> (Option<String>, Option<AppTextRange>) {
|
||||
match err {
|
||||
ExecPolicyError::ParsePolicy { path, source } => {
|
||||
if let Some(location) = source.location() {
|
||||
let range = AppTextRange {
|
||||
start: AppTextPosition {
|
||||
line: location.range.start.line,
|
||||
column: location.range.start.column,
|
||||
},
|
||||
end: AppTextPosition {
|
||||
line: location.range.end.line,
|
||||
column: location.range.end.column,
|
||||
},
|
||||
};
|
||||
return (Some(location.path), Some(range));
|
||||
}
|
||||
(Some(path.clone()), None)
|
||||
}
|
||||
_ => (None, None),
|
||||
}
|
||||
}
|
||||
|
||||
fn app_text_range(range: &CoreTextRange) -> AppTextRange {
|
||||
AppTextRange {
|
||||
start: AppTextPosition {
|
||||
line: range.start.line,
|
||||
column: range.start.column,
|
||||
},
|
||||
end: AppTextPosition {
|
||||
line: range.end.line,
|
||||
column: range.end.column,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn project_config_warning(config: &Config) -> Option<ConfigWarningNotification> {
|
||||
let mut disabled_folders = Vec::new();
|
||||
|
||||
for layer in config
|
||||
.config_layer_stack
|
||||
.get_layers(ConfigLayerStackOrdering::LowestPrecedenceFirst, true)
|
||||
{
|
||||
if !matches!(layer.name, ConfigLayerSource::Project { .. })
|
||||
|| layer.disabled_reason.is_none()
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if let ConfigLayerSource::Project { dot_codex_folder } = &layer.name {
|
||||
disabled_folders.push((
|
||||
dot_codex_folder.as_path().display().to_string(),
|
||||
layer
|
||||
.disabled_reason
|
||||
.as_ref()
|
||||
.map(ToString::to_string)
|
||||
.unwrap_or_else(|| "config.toml is disabled.".to_string()),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if disabled_folders.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut message = concat!(
|
||||
"Project config.toml files are disabled in the following folders. ",
|
||||
"Settings in those files are ignored, but skills and exec policies still load.\n",
|
||||
)
|
||||
.to_string();
|
||||
for (index, (folder, reason)) in disabled_folders.iter().enumerate() {
|
||||
let display_index = index + 1;
|
||||
message.push_str(&format!(" {display_index}. {folder}\n"));
|
||||
message.push_str(&format!(" {reason}\n"));
|
||||
}
|
||||
|
||||
Some(ConfigWarningNotification {
|
||||
summary: message,
|
||||
details: None,
|
||||
path: None,
|
||||
range: None,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn run_main(
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
cli_config_overrides: CliConfigOverrides,
|
||||
@@ -95,10 +214,7 @@ pub async fn run_main(
|
||||
{
|
||||
Ok(config) => config,
|
||||
Err(err) => {
|
||||
let message = ConfigWarningNotification {
|
||||
summary: "Invalid configuration; using defaults.".to_string(),
|
||||
details: Some(err.to_string()),
|
||||
};
|
||||
let message = config_warning_from_error("Invalid configuration; using defaults.", &err);
|
||||
config_warnings.push(message);
|
||||
Config::load_default_with_cli_overrides(cli_kv_overrides.clone()).map_err(|e| {
|
||||
std::io::Error::new(
|
||||
@@ -112,13 +228,20 @@ pub async fn run_main(
|
||||
if let Ok(Some(err)) =
|
||||
check_execpolicy_for_warnings(&config.features, &config.config_layer_stack).await
|
||||
{
|
||||
let (path, range) = exec_policy_warning_location(&err);
|
||||
let message = ConfigWarningNotification {
|
||||
summary: "Error parsing rules; custom rules not applied.".to_string(),
|
||||
details: Some(err.to_string()),
|
||||
path,
|
||||
range,
|
||||
};
|
||||
config_warnings.push(message);
|
||||
}
|
||||
|
||||
if let Some(warning) = project_config_warning(&config) {
|
||||
config_warnings.push(warning);
|
||||
}
|
||||
|
||||
let feedback = CodexFeedback::new();
|
||||
|
||||
let otel = codex_core::otel_init::build_provider(
|
||||
|
||||
@@ -28,6 +28,7 @@ fn model_from_preset(preset: ModelPreset) -> Model {
|
||||
preset.supported_reasoning_efforts,
|
||||
),
|
||||
default_reasoning_effort: preset.default_reasoning_effort,
|
||||
supports_personality: preset.supports_personality,
|
||||
is_default: preset.is_default,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -286,6 +286,8 @@ mod tests {
|
||||
let notification = ServerNotification::ConfigWarning(ConfigWarningNotification {
|
||||
summary: "Config error: using defaults".to_string(),
|
||||
details: Some("error loading config: bad config".to_string()),
|
||||
path: None,
|
||||
range: None,
|
||||
});
|
||||
|
||||
let jsonrpc_notification = OutgoingMessage::AppServerNotification(notification);
|
||||
|
||||
@@ -49,6 +49,16 @@ impl ChatGptAuthFixture {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn chatgpt_user_id(mut self, chatgpt_user_id: impl Into<String>) -> Self {
|
||||
self.claims.chatgpt_user_id = Some(chatgpt_user_id.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn chatgpt_account_id(mut self, chatgpt_account_id: impl Into<String>) -> Self {
|
||||
self.claims.chatgpt_account_id = Some(chatgpt_account_id.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn email(mut self, email: impl Into<String>) -> Self {
|
||||
self.claims.email = Some(email.into());
|
||||
self
|
||||
@@ -69,6 +79,8 @@ impl ChatGptAuthFixture {
|
||||
pub struct ChatGptIdTokenClaims {
|
||||
pub email: Option<String>,
|
||||
pub plan_type: Option<String>,
|
||||
pub chatgpt_user_id: Option<String>,
|
||||
pub chatgpt_account_id: Option<String>,
|
||||
}
|
||||
|
||||
impl ChatGptIdTokenClaims {
|
||||
@@ -85,6 +97,16 @@ impl ChatGptIdTokenClaims {
|
||||
self.plan_type = Some(plan_type.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn chatgpt_user_id(mut self, chatgpt_user_id: impl Into<String>) -> Self {
|
||||
self.chatgpt_user_id = Some(chatgpt_user_id.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn chatgpt_account_id(mut self, chatgpt_account_id: impl Into<String>) -> Self {
|
||||
self.chatgpt_account_id = Some(chatgpt_account_id.into());
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
pub fn encode_id_token(claims: &ChatGptIdTokenClaims) -> Result<String> {
|
||||
@@ -93,10 +115,20 @@ pub fn encode_id_token(claims: &ChatGptIdTokenClaims) -> Result<String> {
|
||||
if let Some(email) = &claims.email {
|
||||
payload.insert("email".to_string(), json!(email));
|
||||
}
|
||||
let mut auth_payload = serde_json::Map::new();
|
||||
if let Some(plan_type) = &claims.plan_type {
|
||||
auth_payload.insert("chatgpt_plan_type".to_string(), json!(plan_type));
|
||||
}
|
||||
if let Some(chatgpt_user_id) = &claims.chatgpt_user_id {
|
||||
auth_payload.insert("chatgpt_user_id".to_string(), json!(chatgpt_user_id));
|
||||
}
|
||||
if let Some(chatgpt_account_id) = &claims.chatgpt_account_id {
|
||||
auth_payload.insert("chatgpt_account_id".to_string(), json!(chatgpt_account_id));
|
||||
}
|
||||
if !auth_payload.is_empty() {
|
||||
payload.insert(
|
||||
"https://api.openai.com/auth".to_string(),
|
||||
json!({ "chatgpt_plan_type": plan_type }),
|
||||
serde_json::Value::Object(auth_payload),
|
||||
);
|
||||
}
|
||||
let payload = serde_json::Value::Object(payload);
|
||||
|
||||
@@ -30,6 +30,7 @@ pub use responses::create_final_assistant_message_sse_response;
|
||||
pub use responses::create_request_user_input_sse_response;
|
||||
pub use responses::create_shell_command_sse_response;
|
||||
pub use rollout::create_fake_rollout;
|
||||
pub use rollout::create_fake_rollout_with_source;
|
||||
pub use rollout::create_fake_rollout_with_text_elements;
|
||||
pub use rollout::rollout_path;
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
@@ -12,6 +12,7 @@ use tokio::process::ChildStdout;
|
||||
|
||||
use anyhow::Context;
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
use codex_app_server_protocol::AppsListParams;
|
||||
use codex_app_server_protocol::ArchiveConversationParams;
|
||||
use codex_app_server_protocol::CancelLoginAccountParams;
|
||||
use codex_app_server_protocol::CancelLoginChatGptParams;
|
||||
@@ -48,9 +49,11 @@ use codex_app_server_protocol::ThreadArchiveParams;
|
||||
use codex_app_server_protocol::ThreadForkParams;
|
||||
use codex_app_server_protocol::ThreadListParams;
|
||||
use codex_app_server_protocol::ThreadLoadedListParams;
|
||||
use codex_app_server_protocol::ThreadReadParams;
|
||||
use codex_app_server_protocol::ThreadResumeParams;
|
||||
use codex_app_server_protocol::ThreadRollbackParams;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadUnarchiveParams;
|
||||
use codex_app_server_protocol::TurnInterruptParams;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_core::default_client::CODEX_INTERNAL_ORIGINATOR_OVERRIDE_ENV_VAR;
|
||||
@@ -363,6 +366,15 @@ impl McpProcess {
|
||||
self.send_request("thread/archive", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/unarchive` JSON-RPC request.
|
||||
pub async fn send_thread_unarchive_request(
|
||||
&mut self,
|
||||
params: ThreadUnarchiveParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/unarchive", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/rollback` JSON-RPC request.
|
||||
pub async fn send_thread_rollback_request(
|
||||
&mut self,
|
||||
@@ -390,6 +402,15 @@ impl McpProcess {
|
||||
self.send_request("thread/loaded/list", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/read` JSON-RPC request.
|
||||
pub async fn send_thread_read_request(
|
||||
&mut self,
|
||||
params: ThreadReadParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/read", params).await
|
||||
}
|
||||
|
||||
/// Send a `model/list` JSON-RPC request.
|
||||
pub async fn send_list_models_request(
|
||||
&mut self,
|
||||
@@ -399,6 +420,12 @@ impl McpProcess {
|
||||
self.send_request("model/list", params).await
|
||||
}
|
||||
|
||||
/// Send an `app/list` JSON-RPC request.
|
||||
pub async fn send_apps_list_request(&mut self, params: AppsListParams) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("app/list", params).await
|
||||
}
|
||||
|
||||
/// Send a `collaborationMode/list` JSON-RPC request.
|
||||
pub async fn send_list_collaboration_modes_request(
|
||||
&mut self,
|
||||
|
||||
@@ -67,6 +67,7 @@ pub fn create_request_user_input_sse_response(call_id: &str) -> anyhow::Result<S
|
||||
"id": "confirm_path",
|
||||
"header": "Confirm",
|
||||
"question": "Proceed with the plan?",
|
||||
"isOther": false,
|
||||
"options": [{
|
||||
"label": "Yes (Recommended)",
|
||||
"description": "Continue the current plan."
|
||||
|
||||
@@ -38,6 +38,27 @@ pub fn create_fake_rollout(
|
||||
preview: &str,
|
||||
model_provider: Option<&str>,
|
||||
git_info: Option<GitInfo>,
|
||||
) -> Result<String> {
|
||||
create_fake_rollout_with_source(
|
||||
codex_home,
|
||||
filename_ts,
|
||||
meta_rfc3339,
|
||||
preview,
|
||||
model_provider,
|
||||
git_info,
|
||||
SessionSource::Cli,
|
||||
)
|
||||
}
|
||||
|
||||
/// Create a minimal rollout file with an explicit session source.
|
||||
pub fn create_fake_rollout_with_source(
|
||||
codex_home: &Path,
|
||||
filename_ts: &str,
|
||||
meta_rfc3339: &str,
|
||||
preview: &str,
|
||||
model_provider: Option<&str>,
|
||||
git_info: Option<GitInfo>,
|
||||
source: SessionSource,
|
||||
) -> Result<String> {
|
||||
let uuid = Uuid::new_v4();
|
||||
let uuid_str = uuid.to_string();
|
||||
@@ -57,7 +78,7 @@ pub fn create_fake_rollout(
|
||||
cwd: PathBuf::from("/"),
|
||||
originator: "codex".to_string(),
|
||||
cli_version: "0.0.0".to_string(),
|
||||
source: SessionSource::Cli,
|
||||
source,
|
||||
model_provider: model_provider.map(str::to_string),
|
||||
base_instructions: None,
|
||||
};
|
||||
|
||||
@@ -108,6 +108,10 @@ async fn test_codex_jsonrpc_conversation_flow() -> Result<()> {
|
||||
let AddConversationSubscriptionResponse { subscription_id } =
|
||||
to_response::<AddConversationSubscriptionResponse>(add_listener_resp)?;
|
||||
|
||||
// Drop any buffered events from conversation setup to avoid
|
||||
// matching an earlier task_complete.
|
||||
mcp.clear_message_buffer();
|
||||
|
||||
// 3) sendUserMessage (should trigger notifications; we only validate an OK response)
|
||||
let send_user_id = mcp
|
||||
.send_send_user_message_request(SendUserMessageParams {
|
||||
@@ -125,13 +129,38 @@ async fn test_codex_jsonrpc_conversation_flow() -> Result<()> {
|
||||
.await??;
|
||||
let SendUserMessageResponse {} = to_response::<SendUserMessageResponse>(send_user_resp)?;
|
||||
|
||||
// Verify the task_finished notification is received.
|
||||
// Note this also ensures that the final request to the server was made.
|
||||
let task_finished_notification: JSONRPCNotification = timeout(
|
||||
let task_started_notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
mcp.read_stream_until_notification_message("codex/event/task_started"),
|
||||
)
|
||||
.await??;
|
||||
let task_started_event: Event = serde_json::from_value(
|
||||
task_started_notification
|
||||
.params
|
||||
.clone()
|
||||
.expect("task_started should have params"),
|
||||
)
|
||||
.expect("task_started should deserialize to Event");
|
||||
|
||||
// Verify the task_finished notification for this turn is received.
|
||||
// Note this also ensures that the final request to the server was made.
|
||||
let task_finished_notification: JSONRPCNotification = loop {
|
||||
let notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
let event: Event = serde_json::from_value(
|
||||
notification
|
||||
.params
|
||||
.clone()
|
||||
.expect("task_complete should have params"),
|
||||
)
|
||||
.expect("task_complete should deserialize to Event");
|
||||
if event.id == task_started_event.id {
|
||||
break notification;
|
||||
}
|
||||
};
|
||||
let serde_json::Value::Object(map) = task_finished_notification
|
||||
.params
|
||||
.expect("notification should have params")
|
||||
|
||||
381
codex-rs/app-server/tests/suite/v2/app_list.rs
Normal file
381
codex-rs/app-server/tests/suite/v2/app_list.rs
Normal file
@@ -0,0 +1,381 @@
|
||||
use std::borrow::Cow;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Result;
|
||||
use app_test_support::ChatGptAuthFixture;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use app_test_support::write_chatgpt_auth;
|
||||
use axum::Json;
|
||||
use axum::Router;
|
||||
use axum::extract::State;
|
||||
use axum::http::HeaderMap;
|
||||
use axum::http::StatusCode;
|
||||
use axum::http::header::AUTHORIZATION;
|
||||
use axum::routing::post;
|
||||
use codex_app_server_protocol::AppInfo;
|
||||
use codex_app_server_protocol::AppsListParams;
|
||||
use codex_app_server_protocol::AppsListResponse;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use codex_core::connectors::ConnectorInfo;
|
||||
use pretty_assertions::assert_eq;
|
||||
use rmcp::handler::server::ServerHandler;
|
||||
use rmcp::model::JsonObject;
|
||||
use rmcp::model::ListToolsResult;
|
||||
use rmcp::model::Meta;
|
||||
use rmcp::model::ServerCapabilities;
|
||||
use rmcp::model::ServerInfo;
|
||||
use rmcp::model::Tool;
|
||||
use rmcp::model::ToolAnnotations;
|
||||
use rmcp::transport::StreamableHttpServerConfig;
|
||||
use rmcp::transport::StreamableHttpService;
|
||||
use rmcp::transport::streamable_http_server::session::local::LocalSessionManager;
|
||||
use serde_json::json;
|
||||
use tempfile::TempDir;
|
||||
use tokio::net::TcpListener;
|
||||
use tokio::task::JoinHandle;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_TIMEOUT: Duration = Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn list_apps_returns_empty_when_connectors_disabled() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
|
||||
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_apps_list_request(AppsListParams {
|
||||
limit: Some(50),
|
||||
cursor: None,
|
||||
})
|
||||
.await?;
|
||||
|
||||
let response: JSONRPCResponse = timeout(
|
||||
DEFAULT_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let AppsListResponse { data, next_cursor } = to_response(response)?;
|
||||
|
||||
assert!(data.is_empty());
|
||||
assert!(next_cursor.is_none());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn list_apps_returns_connectors_with_accessible_flags() -> Result<()> {
|
||||
let connectors = vec![
|
||||
ConnectorInfo {
|
||||
connector_id: "alpha".to_string(),
|
||||
connector_name: "Alpha".to_string(),
|
||||
connector_description: Some("Alpha connector".to_string()),
|
||||
logo_url: Some("https://example.com/alpha.png".to_string()),
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
},
|
||||
ConnectorInfo {
|
||||
connector_id: "beta".to_string(),
|
||||
connector_name: "beta".to_string(),
|
||||
connector_description: None,
|
||||
logo_url: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
},
|
||||
];
|
||||
|
||||
let tools = vec![connector_tool("beta", "Beta App")?];
|
||||
let (server_url, server_handle) = start_apps_server(connectors.clone(), tools).await?;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
write_connectors_config(codex_home.path(), &server_url)?;
|
||||
write_chatgpt_auth(
|
||||
codex_home.path(),
|
||||
ChatGptAuthFixture::new("chatgpt-token")
|
||||
.account_id("account-123")
|
||||
.chatgpt_user_id("user-123")
|
||||
.chatgpt_account_id("account-123"),
|
||||
AuthCredentialsStoreMode::File,
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_apps_list_request(AppsListParams {
|
||||
limit: None,
|
||||
cursor: None,
|
||||
})
|
||||
.await?;
|
||||
|
||||
let response: JSONRPCResponse = timeout(
|
||||
DEFAULT_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let AppsListResponse { data, next_cursor } = to_response(response)?;
|
||||
|
||||
let expected = vec![
|
||||
AppInfo {
|
||||
id: "beta".to_string(),
|
||||
name: "Beta App".to_string(),
|
||||
description: None,
|
||||
logo_url: None,
|
||||
install_url: Some("https://chatgpt.com/apps/beta/beta".to_string()),
|
||||
is_accessible: true,
|
||||
},
|
||||
AppInfo {
|
||||
id: "alpha".to_string(),
|
||||
name: "Alpha".to_string(),
|
||||
description: Some("Alpha connector".to_string()),
|
||||
logo_url: Some("https://example.com/alpha.png".to_string()),
|
||||
install_url: Some("https://chatgpt.com/apps/alpha/alpha".to_string()),
|
||||
is_accessible: false,
|
||||
},
|
||||
];
|
||||
|
||||
assert_eq!(data, expected);
|
||||
assert!(next_cursor.is_none());
|
||||
|
||||
server_handle.abort();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn list_apps_paginates_results() -> Result<()> {
|
||||
let connectors = vec![
|
||||
ConnectorInfo {
|
||||
connector_id: "alpha".to_string(),
|
||||
connector_name: "Alpha".to_string(),
|
||||
connector_description: Some("Alpha connector".to_string()),
|
||||
logo_url: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
},
|
||||
ConnectorInfo {
|
||||
connector_id: "beta".to_string(),
|
||||
connector_name: "beta".to_string(),
|
||||
connector_description: None,
|
||||
logo_url: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
},
|
||||
];
|
||||
|
||||
let tools = vec![connector_tool("beta", "Beta App")?];
|
||||
let (server_url, server_handle) = start_apps_server(connectors.clone(), tools).await?;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
write_connectors_config(codex_home.path(), &server_url)?;
|
||||
write_chatgpt_auth(
|
||||
codex_home.path(),
|
||||
ChatGptAuthFixture::new("chatgpt-token")
|
||||
.account_id("account-123")
|
||||
.chatgpt_user_id("user-123")
|
||||
.chatgpt_account_id("account-123"),
|
||||
AuthCredentialsStoreMode::File,
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let first_request = mcp
|
||||
.send_apps_list_request(AppsListParams {
|
||||
limit: Some(1),
|
||||
cursor: None,
|
||||
})
|
||||
.await?;
|
||||
let first_response: JSONRPCResponse = timeout(
|
||||
DEFAULT_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(first_request)),
|
||||
)
|
||||
.await??;
|
||||
let AppsListResponse {
|
||||
data: first_page,
|
||||
next_cursor: first_cursor,
|
||||
} = to_response(first_response)?;
|
||||
|
||||
let expected_first = vec![AppInfo {
|
||||
id: "beta".to_string(),
|
||||
name: "Beta App".to_string(),
|
||||
description: None,
|
||||
logo_url: None,
|
||||
install_url: Some("https://chatgpt.com/apps/beta/beta".to_string()),
|
||||
is_accessible: true,
|
||||
}];
|
||||
|
||||
assert_eq!(first_page, expected_first);
|
||||
let next_cursor = first_cursor.ok_or_else(|| anyhow::anyhow!("missing cursor"))?;
|
||||
|
||||
let second_request = mcp
|
||||
.send_apps_list_request(AppsListParams {
|
||||
limit: Some(1),
|
||||
cursor: Some(next_cursor),
|
||||
})
|
||||
.await?;
|
||||
let second_response: JSONRPCResponse = timeout(
|
||||
DEFAULT_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(second_request)),
|
||||
)
|
||||
.await??;
|
||||
let AppsListResponse {
|
||||
data: second_page,
|
||||
next_cursor: second_cursor,
|
||||
} = to_response(second_response)?;
|
||||
|
||||
let expected_second = vec![AppInfo {
|
||||
id: "alpha".to_string(),
|
||||
name: "Alpha".to_string(),
|
||||
description: Some("Alpha connector".to_string()),
|
||||
logo_url: None,
|
||||
install_url: Some("https://chatgpt.com/apps/alpha/alpha".to_string()),
|
||||
is_accessible: false,
|
||||
}];
|
||||
|
||||
assert_eq!(second_page, expected_second);
|
||||
assert!(second_cursor.is_none());
|
||||
|
||||
server_handle.abort();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct AppsServerState {
|
||||
expected_bearer: String,
|
||||
expected_account_id: String,
|
||||
response: serde_json::Value,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct AppListMcpServer {
|
||||
tools: Arc<Vec<Tool>>,
|
||||
}
|
||||
|
||||
impl AppListMcpServer {
|
||||
fn new(tools: Arc<Vec<Tool>>) -> Self {
|
||||
Self { tools }
|
||||
}
|
||||
}
|
||||
|
||||
impl ServerHandler for AppListMcpServer {
|
||||
fn get_info(&self) -> ServerInfo {
|
||||
ServerInfo {
|
||||
capabilities: ServerCapabilities::builder().enable_tools().build(),
|
||||
..ServerInfo::default()
|
||||
}
|
||||
}
|
||||
|
||||
fn list_tools(
|
||||
&self,
|
||||
_request: Option<rmcp::model::PaginatedRequestParam>,
|
||||
_context: rmcp::service::RequestContext<rmcp::service::RoleServer>,
|
||||
) -> impl std::future::Future<Output = Result<ListToolsResult, rmcp::ErrorData>> + Send + '_
|
||||
{
|
||||
let tools = self.tools.clone();
|
||||
async move {
|
||||
Ok(ListToolsResult {
|
||||
tools: (*tools).clone(),
|
||||
next_cursor: None,
|
||||
meta: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn start_apps_server(
|
||||
connectors: Vec<ConnectorInfo>,
|
||||
tools: Vec<Tool>,
|
||||
) -> Result<(String, JoinHandle<()>)> {
|
||||
let state = AppsServerState {
|
||||
expected_bearer: "Bearer chatgpt-token".to_string(),
|
||||
expected_account_id: "account-123".to_string(),
|
||||
response: json!({ "connectors": connectors }),
|
||||
};
|
||||
let state = Arc::new(state);
|
||||
let tools = Arc::new(tools);
|
||||
|
||||
let listener = TcpListener::bind("127.0.0.1:0").await?;
|
||||
let addr = listener.local_addr()?;
|
||||
|
||||
let mcp_service = StreamableHttpService::new(
|
||||
{
|
||||
let tools = tools.clone();
|
||||
move || Ok(AppListMcpServer::new(tools.clone()))
|
||||
},
|
||||
Arc::new(LocalSessionManager::default()),
|
||||
StreamableHttpServerConfig::default(),
|
||||
);
|
||||
|
||||
let router = Router::new()
|
||||
.route("/aip/connectors/list_accessible", post(list_connectors))
|
||||
.with_state(state)
|
||||
.nest_service("/api/codex/apps", mcp_service);
|
||||
|
||||
let handle = tokio::spawn(async move {
|
||||
let _ = axum::serve(listener, router).await;
|
||||
});
|
||||
|
||||
Ok((format!("http://{addr}"), handle))
|
||||
}
|
||||
|
||||
async fn list_connectors(
|
||||
State(state): State<Arc<AppsServerState>>,
|
||||
headers: HeaderMap,
|
||||
) -> Result<impl axum::response::IntoResponse, StatusCode> {
|
||||
let bearer_ok = headers
|
||||
.get(AUTHORIZATION)
|
||||
.and_then(|value| value.to_str().ok())
|
||||
.is_some_and(|value| value == state.expected_bearer);
|
||||
let account_ok = headers
|
||||
.get("chatgpt-account-id")
|
||||
.and_then(|value| value.to_str().ok())
|
||||
.is_some_and(|value| value == state.expected_account_id);
|
||||
|
||||
if bearer_ok && account_ok {
|
||||
Ok(Json(state.response.clone()))
|
||||
} else {
|
||||
Err(StatusCode::UNAUTHORIZED)
|
||||
}
|
||||
}
|
||||
|
||||
fn connector_tool(connector_id: &str, connector_name: &str) -> Result<Tool> {
|
||||
let schema: JsonObject = serde_json::from_value(json!({
|
||||
"type": "object",
|
||||
"additionalProperties": false
|
||||
}))?;
|
||||
let mut tool = Tool::new(
|
||||
Cow::Owned(format!("connector_{connector_id}")),
|
||||
Cow::Borrowed("Connector test tool"),
|
||||
Arc::new(schema),
|
||||
);
|
||||
tool.annotations = Some(ToolAnnotations::new().read_only(true));
|
||||
|
||||
let mut meta = Meta::new();
|
||||
meta.0
|
||||
.insert("connector_id".to_string(), json!(connector_id));
|
||||
meta.0
|
||||
.insert("connector_name".to_string(), json!(connector_name));
|
||||
tool.meta = Some(meta);
|
||||
Ok(tool)
|
||||
}
|
||||
|
||||
fn write_connectors_config(codex_home: &std::path::Path, base_url: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
chatgpt_base_url = "{base_url}"
|
||||
|
||||
[features]
|
||||
connectors = true
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
//! Validates that the collaboration mode list endpoint returns the expected default presets.
|
||||
//!
|
||||
//! The test drives the app server through the MCP harness and asserts that the list response
|
||||
//! includes the plan, pair programming, and execute modes with their default model and reasoning
|
||||
//! includes the plan, coding, pair programming, and execute modes with their default model and reasoning
|
||||
//! effort settings, which keeps the API contract visible in one place.
|
||||
|
||||
#![allow(clippy::unwrap_used)]
|
||||
@@ -16,7 +16,8 @@ use codex_app_server_protocol::CollaborationModeListResponse;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_core::models_manager::test_builtin_collaboration_mode_presets;
|
||||
use codex_protocol::config_types::CollaborationMode;
|
||||
use codex_protocol::config_types::CollaborationModeMask;
|
||||
use codex_protocol::config_types::ModeKind;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
@@ -44,8 +45,23 @@ async fn list_collaboration_modes_returns_presets() -> Result<()> {
|
||||
let CollaborationModeListResponse { data: items } =
|
||||
to_response::<CollaborationModeListResponse>(response)?;
|
||||
|
||||
let expected = vec![plan_preset(), pair_programming_preset(), execute_preset()];
|
||||
assert_eq!(expected, items);
|
||||
let expected = [
|
||||
plan_preset(),
|
||||
code_preset(),
|
||||
pair_programming_preset(),
|
||||
execute_preset(),
|
||||
];
|
||||
assert_eq!(expected.len(), items.len());
|
||||
for (expected_mask, actual_mask) in expected.iter().zip(items.iter()) {
|
||||
assert_eq!(expected_mask.name, actual_mask.name);
|
||||
assert_eq!(expected_mask.mode, actual_mask.mode);
|
||||
assert_eq!(expected_mask.model, actual_mask.model);
|
||||
assert_eq!(expected_mask.reasoning_effort, actual_mask.reasoning_effort);
|
||||
assert_eq!(
|
||||
expected_mask.developer_instructions,
|
||||
actual_mask.developer_instructions
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -53,11 +69,11 @@ async fn list_collaboration_modes_returns_presets() -> Result<()> {
|
||||
///
|
||||
/// If the defaults change in the app server, this helper should be updated alongside the
|
||||
/// contract, or the test will fail in ways that imply a regression in the API.
|
||||
fn plan_preset() -> CollaborationMode {
|
||||
fn plan_preset() -> CollaborationModeMask {
|
||||
let presets = test_builtin_collaboration_mode_presets();
|
||||
presets
|
||||
.into_iter()
|
||||
.find(|p| matches!(p, CollaborationMode::Plan(_)))
|
||||
.find(|p| p.mode == Some(ModeKind::Plan))
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
@@ -65,11 +81,20 @@ fn plan_preset() -> CollaborationMode {
|
||||
///
|
||||
/// The helper keeps the expected model and reasoning defaults co-located with the test
|
||||
/// so that mismatches point directly at the API contract being exercised.
|
||||
fn pair_programming_preset() -> CollaborationMode {
|
||||
fn pair_programming_preset() -> CollaborationModeMask {
|
||||
let presets = test_builtin_collaboration_mode_presets();
|
||||
presets
|
||||
.into_iter()
|
||||
.find(|p| matches!(p, CollaborationMode::PairProgramming(_)))
|
||||
.find(|p| p.mode == Some(ModeKind::PairProgramming))
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
/// Builds the code preset that the list response is expected to return.
|
||||
fn code_preset() -> CollaborationModeMask {
|
||||
let presets = test_builtin_collaboration_mode_presets();
|
||||
presets
|
||||
.into_iter()
|
||||
.find(|p| p.mode == Some(ModeKind::Code))
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
@@ -77,10 +102,10 @@ fn pair_programming_preset() -> CollaborationMode {
|
||||
///
|
||||
/// The execute preset uses a different reasoning effort to capture the higher-effort
|
||||
/// execution contract the server currently exposes.
|
||||
fn execute_preset() -> CollaborationMode {
|
||||
fn execute_preset() -> CollaborationModeMask {
|
||||
let presets = test_builtin_collaboration_mode_presets();
|
||||
presets
|
||||
.into_iter()
|
||||
.find(|p| matches!(p, CollaborationMode::Execute(_)))
|
||||
.find(|p| p.mode == Some(ModeKind::Execute))
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
286
codex-rs/app-server/tests/suite/v2/dynamic_tools.rs
Normal file
286
codex-rs/app-server/tests/suite/v2/dynamic_tools.rs
Normal file
@@ -0,0 +1,286 @@
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_final_assistant_message_sse_response;
|
||||
use app_test_support::create_mock_responses_server_sequence_unchecked;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::DynamicToolCallParams;
|
||||
use codex_app_server_protocol::DynamicToolCallResponse;
|
||||
use codex_app_server_protocol::DynamicToolSpec;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_app_server_protocol::TurnStartResponse;
|
||||
use codex_app_server_protocol::UserInput as V2UserInput;
|
||||
use core_test_support::responses;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::Value;
|
||||
use serde_json::json;
|
||||
use std::path::Path;
|
||||
use std::time::Duration;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
use wiremock::MockServer;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: Duration = Duration::from_secs(10);
|
||||
|
||||
/// Ensures dynamic tool specs are serialized into the model request payload.
|
||||
#[tokio::test]
|
||||
async fn thread_start_injects_dynamic_tools_into_model_requests() -> Result<()> {
|
||||
let responses = vec![create_final_assistant_message_sse_response("Done")?];
|
||||
let server = create_mock_responses_server_sequence_unchecked(responses).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Use a minimal JSON schema so we can assert the tool payload round-trips.
|
||||
let input_schema = json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"city": { "type": "string" }
|
||||
},
|
||||
"required": ["city"],
|
||||
"additionalProperties": false,
|
||||
});
|
||||
let dynamic_tool = DynamicToolSpec {
|
||||
name: "demo_tool".to_string(),
|
||||
description: "Demo dynamic tool".to_string(),
|
||||
input_schema: input_schema.clone(),
|
||||
};
|
||||
|
||||
// Thread start injects dynamic tools into the thread's tool registry.
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
dynamic_tools: Some(vec![dynamic_tool.clone()]),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
// Start a turn so a model request is issued.
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let _turn: TurnStartResponse = to_response::<TurnStartResponse>(turn_resp)?;
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
// Inspect the captured model request to assert the tool spec made it through.
|
||||
let bodies = responses_bodies(&server).await?;
|
||||
let body = bodies
|
||||
.first()
|
||||
.context("expected at least one responses request")?;
|
||||
let tool = find_tool(body, &dynamic_tool.name)
|
||||
.context("expected dynamic tool to be injected into request")?;
|
||||
|
||||
assert_eq!(
|
||||
tool.get("description"),
|
||||
Some(&Value::String(dynamic_tool.description.clone()))
|
||||
);
|
||||
assert_eq!(tool.get("parameters"), Some(&input_schema));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Exercises the full dynamic tool call path (server request, client response, model output).
|
||||
#[tokio::test]
|
||||
async fn dynamic_tool_call_round_trip_sends_output_to_model() -> Result<()> {
|
||||
let call_id = "dyn-call-1";
|
||||
let tool_name = "demo_tool";
|
||||
let tool_args = json!({ "city": "Paris" });
|
||||
let tool_call_arguments = serde_json::to_string(&tool_args)?;
|
||||
|
||||
// First response triggers a dynamic tool call, second closes the turn.
|
||||
let responses = vec![
|
||||
responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_function_call(call_id, tool_name, &tool_call_arguments),
|
||||
responses::ev_completed("resp-1"),
|
||||
]),
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
];
|
||||
let server = create_mock_responses_server_sequence_unchecked(responses).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let dynamic_tool = DynamicToolSpec {
|
||||
name: tool_name.to_string(),
|
||||
description: "Demo dynamic tool".to_string(),
|
||||
input_schema: json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"city": { "type": "string" }
|
||||
},
|
||||
"required": ["city"],
|
||||
"additionalProperties": false,
|
||||
}),
|
||||
};
|
||||
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
dynamic_tools: Some(vec![dynamic_tool]),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
// Start a turn so the tool call is emitted.
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "Run the tool".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let TurnStartResponse { turn } = to_response::<TurnStartResponse>(turn_resp)?;
|
||||
|
||||
// Read the tool call request from the app server.
|
||||
let request = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_request_message(),
|
||||
)
|
||||
.await??;
|
||||
let (request_id, params) = match request {
|
||||
ServerRequest::DynamicToolCall { request_id, params } => (request_id, params),
|
||||
other => panic!("expected DynamicToolCall request, got {other:?}"),
|
||||
};
|
||||
|
||||
let expected = DynamicToolCallParams {
|
||||
thread_id: thread.id,
|
||||
turn_id: turn.id,
|
||||
call_id: call_id.to_string(),
|
||||
tool: tool_name.to_string(),
|
||||
arguments: tool_args.clone(),
|
||||
};
|
||||
assert_eq!(params, expected);
|
||||
|
||||
// Respond to the tool call so the model receives a function_call_output.
|
||||
let response = DynamicToolCallResponse {
|
||||
output: "dynamic-ok".to_string(),
|
||||
success: true,
|
||||
};
|
||||
mcp.send_response(request_id, serde_json::to_value(response)?)
|
||||
.await?;
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let bodies = responses_bodies(&server).await?;
|
||||
let output = bodies
|
||||
.iter()
|
||||
.find_map(|body| function_call_output_text(body, call_id))
|
||||
.context("expected function_call_output in follow-up request")?;
|
||||
assert_eq!(output, "dynamic-ok");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn responses_bodies(server: &MockServer) -> Result<Vec<Value>> {
|
||||
let requests = server
|
||||
.received_requests()
|
||||
.await
|
||||
.context("failed to fetch received requests")?;
|
||||
|
||||
requests
|
||||
.into_iter()
|
||||
.filter(|req| req.url.path().ends_with("/responses"))
|
||||
.map(|req| {
|
||||
req.body_json::<Value>()
|
||||
.context("request body should be JSON")
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn find_tool<'a>(body: &'a Value, name: &str) -> Option<&'a Value> {
|
||||
body.get("tools")
|
||||
.and_then(Value::as_array)
|
||||
.and_then(|tools| {
|
||||
tools
|
||||
.iter()
|
||||
.find(|tool| tool.get("name").and_then(Value::as_str) == Some(name))
|
||||
})
|
||||
}
|
||||
|
||||
fn function_call_output_text(body: &Value, call_id: &str) -> Option<String> {
|
||||
body.get("input")
|
||||
.and_then(Value::as_array)
|
||||
.and_then(|items| {
|
||||
items.iter().find(|item| {
|
||||
item.get("type").and_then(Value::as_str) == Some("function_call_output")
|
||||
&& item.get("call_id").and_then(Value::as_str) == Some(call_id)
|
||||
})
|
||||
})
|
||||
.and_then(|item| item.get("output"))
|
||||
.and_then(Value::as_str)
|
||||
.map(str::to_string)
|
||||
}
|
||||
|
||||
fn create_config_toml(codex_home: &Path, server_uri: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "responses"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -1,7 +1,9 @@
|
||||
mod account;
|
||||
mod analytics;
|
||||
mod app_list;
|
||||
mod collaboration_mode_list;
|
||||
mod config_rpc;
|
||||
mod dynamic_tools;
|
||||
mod initialize;
|
||||
mod model_list;
|
||||
mod output_schema;
|
||||
@@ -12,8 +14,10 @@ mod thread_archive;
|
||||
mod thread_fork;
|
||||
mod thread_list;
|
||||
mod thread_loaded_list;
|
||||
mod thread_read;
|
||||
mod thread_resume;
|
||||
mod thread_rollback;
|
||||
mod thread_start;
|
||||
mod thread_unarchive;
|
||||
mod turn_interrupt;
|
||||
mod turn_start;
|
||||
|
||||
@@ -72,6 +72,7 @@ async fn list_models_returns_all_models_with_large_limit() -> Result<()> {
|
||||
},
|
||||
],
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
supports_personality: false,
|
||||
is_default: true,
|
||||
},
|
||||
Model {
|
||||
@@ -99,6 +100,7 @@ async fn list_models_returns_all_models_with_large_limit() -> Result<()> {
|
||||
},
|
||||
],
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
supports_personality: false,
|
||||
is_default: false,
|
||||
},
|
||||
Model {
|
||||
@@ -118,6 +120,7 @@ async fn list_models_returns_all_models_with_large_limit() -> Result<()> {
|
||||
},
|
||||
],
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
supports_personality: false,
|
||||
is_default: false,
|
||||
},
|
||||
Model {
|
||||
@@ -151,6 +154,7 @@ async fn list_models_returns_all_models_with_large_limit() -> Result<()> {
|
||||
},
|
||||
],
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
supports_personality: false,
|
||||
is_default: false,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -13,6 +13,7 @@ use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_app_server_protocol::TurnStartResponse;
|
||||
use codex_app_server_protocol::UserInput as V2UserInput;
|
||||
use codex_protocol::config_types::CollaborationMode;
|
||||
use codex_protocol::config_types::ModeKind;
|
||||
use codex_protocol::config_types::Settings;
|
||||
use codex_protocol::openai_models::ReasoningEffort;
|
||||
use tokio::time::timeout;
|
||||
@@ -54,11 +55,14 @@ async fn request_user_input_round_trip() -> Result<()> {
|
||||
}],
|
||||
model: Some("mock-model".to_string()),
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
collaboration_mode: Some(CollaborationMode::Plan(Settings {
|
||||
model: "mock-model".to_string(),
|
||||
reasoning_effort: Some(ReasoningEffort::Medium),
|
||||
developer_instructions: None,
|
||||
})),
|
||||
collaboration_mode: Some(CollaborationMode {
|
||||
mode: ModeKind::Plan,
|
||||
settings: Settings {
|
||||
model: "mock-model".to_string(),
|
||||
reasoning_effort: Some(ReasoningEffort::Medium),
|
||||
developer_instructions: None,
|
||||
},
|
||||
}),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
|
||||
@@ -77,8 +77,9 @@ async fn thread_fork_creates_new_thread_and_emits_started() -> Result<()> {
|
||||
assert_ne!(thread.id, conversation_id);
|
||||
assert_eq!(thread.preview, preview);
|
||||
assert_eq!(thread.model_provider, "mock_provider");
|
||||
assert!(thread.path.is_absolute());
|
||||
assert_ne!(thread.path, original_path);
|
||||
let thread_path = thread.path.clone().expect("thread path");
|
||||
assert!(thread_path.is_absolute());
|
||||
assert_ne!(thread_path, original_path);
|
||||
assert!(thread.cwd.is_absolute());
|
||||
assert_eq!(thread.source, SessionSource::VsCode);
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_fake_rollout;
|
||||
use app_test_support::create_fake_rollout_with_source;
|
||||
use app_test_support::rollout_path;
|
||||
use app_test_support::to_response;
|
||||
use chrono::DateTime;
|
||||
@@ -12,9 +13,15 @@ use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SessionSource;
|
||||
use codex_app_server_protocol::ThreadListResponse;
|
||||
use codex_app_server_protocol::ThreadSortKey;
|
||||
use codex_app_server_protocol::ThreadSourceKind;
|
||||
use codex_core::ARCHIVED_SESSIONS_SUBDIR;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::GitInfo as CoreGitInfo;
|
||||
use codex_protocol::protocol::SessionSource as CoreSessionSource;
|
||||
use codex_protocol::protocol::SubAgentSource;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::cmp::Reverse;
|
||||
use std::fs;
|
||||
use std::fs::FileTimes;
|
||||
use std::fs::OpenOptions;
|
||||
use std::path::Path;
|
||||
@@ -36,8 +43,10 @@ async fn list_threads(
|
||||
cursor: Option<String>,
|
||||
limit: Option<u32>,
|
||||
providers: Option<Vec<String>>,
|
||||
source_kinds: Option<Vec<ThreadSourceKind>>,
|
||||
archived: Option<bool>,
|
||||
) -> Result<ThreadListResponse> {
|
||||
list_threads_with_sort(mcp, cursor, limit, providers, None).await
|
||||
list_threads_with_sort(mcp, cursor, limit, providers, source_kinds, None, archived).await
|
||||
}
|
||||
|
||||
async fn list_threads_with_sort(
|
||||
@@ -45,7 +54,9 @@ async fn list_threads_with_sort(
|
||||
cursor: Option<String>,
|
||||
limit: Option<u32>,
|
||||
providers: Option<Vec<String>>,
|
||||
source_kinds: Option<Vec<ThreadSourceKind>>,
|
||||
sort_key: Option<ThreadSortKey>,
|
||||
archived: Option<bool>,
|
||||
) -> Result<ThreadListResponse> {
|
||||
let request_id = mcp
|
||||
.send_thread_list_request(codex_app_server_protocol::ThreadListParams {
|
||||
@@ -53,6 +64,8 @@ async fn list_threads_with_sort(
|
||||
limit,
|
||||
sort_key,
|
||||
model_providers: providers,
|
||||
source_kinds,
|
||||
archived,
|
||||
})
|
||||
.await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
@@ -125,6 +138,8 @@ async fn thread_list_basic_empty() -> Result<()> {
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
assert!(data.is_empty());
|
||||
@@ -187,6 +202,8 @@ async fn thread_list_pagination_next_cursor_none_on_last_page() -> Result<()> {
|
||||
None,
|
||||
Some(2),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
assert_eq!(data1.len(), 2);
|
||||
@@ -211,6 +228,8 @@ async fn thread_list_pagination_next_cursor_none_on_last_page() -> Result<()> {
|
||||
Some(cursor1),
|
||||
Some(2),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
assert!(data2.len() <= 2);
|
||||
@@ -260,6 +279,8 @@ async fn thread_list_respects_provider_filter() -> Result<()> {
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["other_provider".to_string()]),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
assert_eq!(data.len(), 1);
|
||||
@@ -278,6 +299,207 @@ async fn thread_list_respects_provider_filter() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_empty_source_kinds_defaults_to_interactive_only() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_minimal_config(codex_home.path())?;
|
||||
|
||||
let cli_id = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-02-01T10-00-00",
|
||||
"2025-02-01T10:00:00Z",
|
||||
"CLI",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
let exec_id = create_fake_rollout_with_source(
|
||||
codex_home.path(),
|
||||
"2025-02-01T11-00-00",
|
||||
"2025-02-01T11:00:00Z",
|
||||
"Exec",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
CoreSessionSource::Exec,
|
||||
)?;
|
||||
|
||||
let mut mcp = init_mcp(codex_home.path()).await?;
|
||||
|
||||
let ThreadListResponse { data, next_cursor } = list_threads(
|
||||
&mut mcp,
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
Some(Vec::new()),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
assert_eq!(next_cursor, None);
|
||||
let ids: Vec<_> = data.iter().map(|thread| thread.id.as_str()).collect();
|
||||
assert_eq!(ids, vec![cli_id.as_str()]);
|
||||
assert_ne!(cli_id, exec_id);
|
||||
assert_eq!(data[0].source, SessionSource::Cli);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_filters_by_source_kind_subagent_thread_spawn() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_minimal_config(codex_home.path())?;
|
||||
|
||||
let cli_id = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-02-01T10-00-00",
|
||||
"2025-02-01T10:00:00Z",
|
||||
"CLI",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
|
||||
let parent_thread_id = ThreadId::from_string(&Uuid::new_v4().to_string())?;
|
||||
let subagent_id = create_fake_rollout_with_source(
|
||||
codex_home.path(),
|
||||
"2025-02-01T11-00-00",
|
||||
"2025-02-01T11:00:00Z",
|
||||
"SubAgent",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
CoreSessionSource::SubAgent(SubAgentSource::ThreadSpawn {
|
||||
parent_thread_id,
|
||||
depth: 1,
|
||||
}),
|
||||
)?;
|
||||
|
||||
let mut mcp = init_mcp(codex_home.path()).await?;
|
||||
|
||||
let ThreadListResponse { data, next_cursor } = list_threads(
|
||||
&mut mcp,
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
Some(vec![ThreadSourceKind::SubAgentThreadSpawn]),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
assert_eq!(next_cursor, None);
|
||||
let ids: Vec<_> = data.iter().map(|thread| thread.id.as_str()).collect();
|
||||
assert_eq!(ids, vec![subagent_id.as_str()]);
|
||||
assert_ne!(cli_id, subagent_id);
|
||||
assert!(matches!(data[0].source, SessionSource::SubAgent(_)));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_filters_by_subagent_variant() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_minimal_config(codex_home.path())?;
|
||||
|
||||
let parent_thread_id = ThreadId::from_string(&Uuid::new_v4().to_string())?;
|
||||
|
||||
let review_id = create_fake_rollout_with_source(
|
||||
codex_home.path(),
|
||||
"2025-02-02T09-00-00",
|
||||
"2025-02-02T09:00:00Z",
|
||||
"Review",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
CoreSessionSource::SubAgent(SubAgentSource::Review),
|
||||
)?;
|
||||
let compact_id = create_fake_rollout_with_source(
|
||||
codex_home.path(),
|
||||
"2025-02-02T10-00-00",
|
||||
"2025-02-02T10:00:00Z",
|
||||
"Compact",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
CoreSessionSource::SubAgent(SubAgentSource::Compact),
|
||||
)?;
|
||||
let spawn_id = create_fake_rollout_with_source(
|
||||
codex_home.path(),
|
||||
"2025-02-02T11-00-00",
|
||||
"2025-02-02T11:00:00Z",
|
||||
"Spawn",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
CoreSessionSource::SubAgent(SubAgentSource::ThreadSpawn {
|
||||
parent_thread_id,
|
||||
depth: 1,
|
||||
}),
|
||||
)?;
|
||||
let other_id = create_fake_rollout_with_source(
|
||||
codex_home.path(),
|
||||
"2025-02-02T12-00-00",
|
||||
"2025-02-02T12:00:00Z",
|
||||
"Other",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
CoreSessionSource::SubAgent(SubAgentSource::Other("custom".to_string())),
|
||||
)?;
|
||||
|
||||
let mut mcp = init_mcp(codex_home.path()).await?;
|
||||
|
||||
let review = list_threads(
|
||||
&mut mcp,
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
Some(vec![ThreadSourceKind::SubAgentReview]),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
let review_ids: Vec<_> = review
|
||||
.data
|
||||
.iter()
|
||||
.map(|thread| thread.id.as_str())
|
||||
.collect();
|
||||
assert_eq!(review_ids, vec![review_id.as_str()]);
|
||||
|
||||
let compact = list_threads(
|
||||
&mut mcp,
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
Some(vec![ThreadSourceKind::SubAgentCompact]),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
let compact_ids: Vec<_> = compact
|
||||
.data
|
||||
.iter()
|
||||
.map(|thread| thread.id.as_str())
|
||||
.collect();
|
||||
assert_eq!(compact_ids, vec![compact_id.as_str()]);
|
||||
|
||||
let spawn = list_threads(
|
||||
&mut mcp,
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
Some(vec![ThreadSourceKind::SubAgentThreadSpawn]),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
let spawn_ids: Vec<_> = spawn.data.iter().map(|thread| thread.id.as_str()).collect();
|
||||
assert_eq!(spawn_ids, vec![spawn_id.as_str()]);
|
||||
|
||||
let other = list_threads(
|
||||
&mut mcp,
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
Some(vec![ThreadSourceKind::SubAgentOther]),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
let other_ids: Vec<_> = other.data.iter().map(|thread| thread.id.as_str()).collect();
|
||||
assert_eq!(other_ids, vec![other_id.as_str()]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_fetches_until_limit_or_exhausted() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
@@ -309,6 +531,8 @@ async fn thread_list_fetches_until_limit_or_exhausted() -> Result<()> {
|
||||
None,
|
||||
Some(8),
|
||||
Some(vec!["target_provider".to_string()]),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
assert_eq!(
|
||||
@@ -353,6 +577,8 @@ async fn thread_list_enforces_max_limit() -> Result<()> {
|
||||
None,
|
||||
Some(200),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
assert_eq!(
|
||||
@@ -398,6 +624,8 @@ async fn thread_list_stops_when_not_enough_filtered_results_exist() -> Result<()
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["target_provider".to_string()]),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
assert_eq!(
|
||||
@@ -444,6 +672,8 @@ async fn thread_list_includes_git_info() -> Result<()> {
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
let thread = data
|
||||
@@ -502,6 +732,8 @@ async fn thread_list_default_sorts_by_created_at() -> Result<()> {
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -561,7 +793,9 @@ async fn thread_list_sort_updated_at_orders_by_mtime() -> Result<()> {
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
Some(ThreadSortKey::UpdatedAt),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -624,7 +858,9 @@ async fn thread_list_updated_at_paginates_with_cursor() -> Result<()> {
|
||||
None,
|
||||
Some(2),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
Some(ThreadSortKey::UpdatedAt),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
let ids_page1: Vec<_> = page1.iter().map(|thread| thread.id.as_str()).collect();
|
||||
@@ -639,7 +875,9 @@ async fn thread_list_updated_at_paginates_with_cursor() -> Result<()> {
|
||||
Some(cursor1),
|
||||
Some(2),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
Some(ThreadSortKey::UpdatedAt),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
let ids_page2: Vec<_> = page2.iter().map(|thread| thread.id.as_str()).collect();
|
||||
@@ -678,6 +916,8 @@ async fn thread_list_created_at_tie_breaks_by_uuid() -> Result<()> {
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -729,7 +969,9 @@ async fn thread_list_updated_at_tie_breaks_by_uuid() -> Result<()> {
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
Some(ThreadSortKey::UpdatedAt),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -768,7 +1010,9 @@ async fn thread_list_updated_at_uses_mtime() -> Result<()> {
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
Some(ThreadSortKey::UpdatedAt),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -786,6 +1030,67 @@ async fn thread_list_updated_at_uses_mtime() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_archived_filter() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_minimal_config(codex_home.path())?;
|
||||
|
||||
let active_id = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-03-01T10-00-00",
|
||||
"2025-03-01T10:00:00Z",
|
||||
"Active",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
let archived_id = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-03-01T09-00-00",
|
||||
"2025-03-01T09:00:00Z",
|
||||
"Archived",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
|
||||
let archived_dir = codex_home.path().join(ARCHIVED_SESSIONS_SUBDIR);
|
||||
fs::create_dir_all(&archived_dir)?;
|
||||
let archived_source = rollout_path(codex_home.path(), "2025-03-01T09-00-00", &archived_id);
|
||||
let archived_dest = archived_dir.join(
|
||||
archived_source
|
||||
.file_name()
|
||||
.expect("archived rollout should have a file name"),
|
||||
);
|
||||
fs::rename(&archived_source, &archived_dest)?;
|
||||
|
||||
let mut mcp = init_mcp(codex_home.path()).await?;
|
||||
|
||||
let ThreadListResponse { data, .. } = list_threads(
|
||||
&mut mcp,
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
assert_eq!(data.len(), 1);
|
||||
assert_eq!(data[0].id, active_id);
|
||||
|
||||
let ThreadListResponse { data, .. } = list_threads(
|
||||
&mut mcp,
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
Some(true),
|
||||
)
|
||||
.await?;
|
||||
assert_eq!(data.len(), 1);
|
||||
assert_eq!(data[0].id, archived_id);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_invalid_cursor_returns_error() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
@@ -799,6 +1104,8 @@ async fn thread_list_invalid_cursor_returns_error() -> Result<()> {
|
||||
limit: Some(2),
|
||||
sort_key: None,
|
||||
model_providers: Some(vec!["mock_provider".to_string()]),
|
||||
source_kinds: None,
|
||||
archived: None,
|
||||
})
|
||||
.await?;
|
||||
let error: JSONRPCError = timeout(
|
||||
|
||||
159
codex-rs/app-server/tests/suite/v2/thread_read.rs
Normal file
159
codex-rs/app-server/tests/suite/v2/thread_read.rs
Normal file
@@ -0,0 +1,159 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_fake_rollout_with_text_elements;
|
||||
use app_test_support::create_mock_responses_server_repeating_assistant;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SessionSource;
|
||||
use codex_app_server_protocol::ThreadItem;
|
||||
use codex_app_server_protocol::ThreadReadParams;
|
||||
use codex_app_server_protocol::ThreadReadResponse;
|
||||
use codex_app_server_protocol::TurnStatus;
|
||||
use codex_app_server_protocol::UserInput;
|
||||
use codex_protocol::user_input::ByteRange;
|
||||
use codex_protocol::user_input::TextElement;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_read_returns_summary_without_turns() -> Result<()> {
|
||||
let server = create_mock_responses_server_repeating_assistant("Done").await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let preview = "Saved user message";
|
||||
let text_elements = [TextElement::new(
|
||||
ByteRange { start: 0, end: 5 },
|
||||
Some("<note>".into()),
|
||||
)];
|
||||
let conversation_id = create_fake_rollout_with_text_elements(
|
||||
codex_home.path(),
|
||||
"2025-01-05T12-00-00",
|
||||
"2025-01-05T12:00:00Z",
|
||||
preview,
|
||||
text_elements
|
||||
.iter()
|
||||
.map(|elem| serde_json::to_value(elem).expect("serialize text element"))
|
||||
.collect(),
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let read_id = mcp
|
||||
.send_thread_read_request(ThreadReadParams {
|
||||
thread_id: conversation_id.clone(),
|
||||
include_turns: false,
|
||||
})
|
||||
.await?;
|
||||
let read_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(read_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadReadResponse { thread } = to_response::<ThreadReadResponse>(read_resp)?;
|
||||
|
||||
assert_eq!(thread.id, conversation_id);
|
||||
assert_eq!(thread.preview, preview);
|
||||
assert_eq!(thread.model_provider, "mock_provider");
|
||||
assert!(thread.path.as_ref().expect("thread path").is_absolute());
|
||||
assert_eq!(thread.cwd, PathBuf::from("/"));
|
||||
assert_eq!(thread.cli_version, "0.0.0");
|
||||
assert_eq!(thread.source, SessionSource::Cli);
|
||||
assert_eq!(thread.git_info, None);
|
||||
assert_eq!(thread.turns.len(), 0);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_read_can_include_turns() -> Result<()> {
|
||||
let server = create_mock_responses_server_repeating_assistant("Done").await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let preview = "Saved user message";
|
||||
let text_elements = vec![TextElement::new(
|
||||
ByteRange { start: 0, end: 5 },
|
||||
Some("<note>".into()),
|
||||
)];
|
||||
let conversation_id = create_fake_rollout_with_text_elements(
|
||||
codex_home.path(),
|
||||
"2025-01-05T12-00-00",
|
||||
"2025-01-05T12:00:00Z",
|
||||
preview,
|
||||
text_elements
|
||||
.iter()
|
||||
.map(|elem| serde_json::to_value(elem).expect("serialize text element"))
|
||||
.collect(),
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let read_id = mcp
|
||||
.send_thread_read_request(ThreadReadParams {
|
||||
thread_id: conversation_id.clone(),
|
||||
include_turns: true,
|
||||
})
|
||||
.await?;
|
||||
let read_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(read_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadReadResponse { thread } = to_response::<ThreadReadResponse>(read_resp)?;
|
||||
|
||||
assert_eq!(thread.turns.len(), 1);
|
||||
let turn = &thread.turns[0];
|
||||
assert_eq!(turn.status, TurnStatus::Completed);
|
||||
assert_eq!(turn.items.len(), 1, "expected user message item");
|
||||
match &turn.items[0] {
|
||||
ThreadItem::UserMessage { content, .. } => {
|
||||
assert_eq!(
|
||||
content,
|
||||
&vec![UserInput::Text {
|
||||
text: preview.to_string(),
|
||||
text_elements: text_elements.clone().into_iter().map(Into::into).collect(),
|
||||
}]
|
||||
);
|
||||
}
|
||||
other => panic!("expected user message item, got {other:?}"),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper to create a config.toml pointing at the mock model server.
|
||||
fn create_config_toml(codex_home: &Path, server_uri: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "responses"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -2,7 +2,9 @@ use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_fake_rollout_with_text_elements;
|
||||
use app_test_support::create_mock_responses_server_repeating_assistant;
|
||||
use app_test_support::rollout_path;
|
||||
use app_test_support::to_response;
|
||||
use chrono::Utc;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SessionSource;
|
||||
@@ -11,18 +13,25 @@ use codex_app_server_protocol::ThreadResumeParams;
|
||||
use codex_app_server_protocol::ThreadResumeResponse;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_app_server_protocol::TurnStatus;
|
||||
use codex_app_server_protocol::UserInput;
|
||||
use codex_protocol::config_types::Personality;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::user_input::ByteRange;
|
||||
use codex_protocol::user_input::TextElement;
|
||||
use core_test_support::responses;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::fs::FileTimes;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
const DEFAULT_BASE_INSTRUCTIONS: &str = "You are Codex, based on GPT-5. You are running as a coding agent in the Codex CLI on a user's computer.";
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_returns_original_thread() -> Result<()> {
|
||||
@@ -112,7 +121,7 @@ async fn thread_resume_returns_rollout_history() -> Result<()> {
|
||||
assert_eq!(thread.id, conversation_id);
|
||||
assert_eq!(thread.preview, preview);
|
||||
assert_eq!(thread.model_provider, "mock_provider");
|
||||
assert!(thread.path.is_absolute());
|
||||
assert!(thread.path.as_ref().expect("thread path").is_absolute());
|
||||
assert_eq!(thread.cwd, PathBuf::from("/"));
|
||||
assert_eq!(thread.cli_version, "0.0.0");
|
||||
assert_eq!(thread.source, SessionSource::Cli);
|
||||
@@ -142,6 +151,116 @@ async fn thread_resume_returns_rollout_history() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_without_overrides_does_not_change_updated_at_or_mtime() -> Result<()> {
|
||||
let server = create_mock_responses_server_repeating_assistant("Done").await;
|
||||
let codex_home = TempDir::new()?;
|
||||
let rollout = setup_rollout_fixture(codex_home.path(), &server.uri())?;
|
||||
let thread_id = rollout.conversation_id.clone();
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let resume_id = mcp
|
||||
.send_thread_resume_request(ThreadResumeParams {
|
||||
thread_id: thread_id.clone(),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let resume_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(resume_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadResumeResponse { thread, .. } = to_response::<ThreadResumeResponse>(resume_resp)?;
|
||||
|
||||
assert_eq!(thread.updated_at, rollout.expected_updated_at);
|
||||
|
||||
let after_modified = std::fs::metadata(&rollout.rollout_file_path)?.modified()?;
|
||||
assert_eq!(after_modified, rollout.before_modified);
|
||||
|
||||
let turn_id = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id,
|
||||
input: vec![UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_id)),
|
||||
)
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let after_turn_modified = std::fs::metadata(&rollout.rollout_file_path)?.modified()?;
|
||||
assert!(after_turn_modified > rollout.before_modified);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_with_overrides_defers_updated_at_until_turn_start() -> Result<()> {
|
||||
let server = create_mock_responses_server_repeating_assistant("Done").await;
|
||||
let codex_home = TempDir::new()?;
|
||||
let rollout = setup_rollout_fixture(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let resume_id = mcp
|
||||
.send_thread_resume_request(ThreadResumeParams {
|
||||
thread_id: rollout.conversation_id.clone(),
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let resume_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(resume_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadResumeResponse { thread, .. } = to_response::<ThreadResumeResponse>(resume_resp)?;
|
||||
|
||||
assert_eq!(thread.updated_at, rollout.expected_updated_at);
|
||||
|
||||
let after_resume_modified = std::fs::metadata(&rollout.rollout_file_path)?.modified()?;
|
||||
assert_eq!(after_resume_modified, rollout.before_modified);
|
||||
|
||||
let turn_id = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: rollout.conversation_id,
|
||||
input: vec![UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_id)),
|
||||
)
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let after_turn_modified = std::fs::metadata(&rollout.rollout_file_path)?.modified()?;
|
||||
assert!(after_turn_modified > rollout.before_modified);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_prefers_path_over_thread_id() -> Result<()> {
|
||||
let server = create_mock_responses_server_repeating_assistant("Done").await;
|
||||
@@ -164,7 +283,7 @@ async fn thread_resume_prefers_path_over_thread_id() -> Result<()> {
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
let thread_path = thread.path.clone();
|
||||
let thread_path = thread.path.clone().expect("thread path");
|
||||
let resume_id = mcp
|
||||
.send_thread_resume_request(ThreadResumeParams {
|
||||
thread_id: "not-a-valid-thread-id".to_string(),
|
||||
@@ -248,6 +367,91 @@ async fn thread_resume_supports_history_and_overrides() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_accepts_personality_override_v2() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = responses::start_mock_server().await;
|
||||
let body = responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_assistant_message("msg-1", "Done"),
|
||||
responses::ev_completed("resp-1"),
|
||||
]);
|
||||
let response_mock = responses::mount_sse_once(&server, body).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let start_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("gpt-5.2-codex".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
let resume_id = mcp
|
||||
.send_thread_resume_request(ThreadResumeParams {
|
||||
thread_id: thread.id.clone(),
|
||||
model: Some("gpt-5.2-codex".to_string()),
|
||||
personality: Some(Personality::Friendly),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let resume_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(resume_id)),
|
||||
)
|
||||
.await??;
|
||||
let _resume: ThreadResumeResponse = to_response::<ThreadResumeResponse>(resume_resp)?;
|
||||
|
||||
let turn_id = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id,
|
||||
input: vec![UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let request = response_mock.single_request();
|
||||
let developer_texts = request.message_input_texts("developer");
|
||||
assert!(
|
||||
!developer_texts
|
||||
.iter()
|
||||
.any(|text| text.contains("<personality_spec>")),
|
||||
"did not expect a personality update message in developer input, got {developer_texts:?}"
|
||||
);
|
||||
let instructions_text = request.instructions_text();
|
||||
assert!(
|
||||
instructions_text.contains(DEFAULT_BASE_INSTRUCTIONS),
|
||||
"expected default base instructions from history, got {instructions_text:?}"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper to create a config.toml pointing at the mock model server.
|
||||
fn create_config_toml(codex_home: &std::path::Path, server_uri: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
@@ -261,6 +465,9 @@ sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[features]
|
||||
remote_models = false
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
@@ -271,3 +478,51 @@ stream_max_retries = 0
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
fn set_rollout_mtime(path: &Path, updated_at_rfc3339: &str) -> Result<()> {
|
||||
let parsed = chrono::DateTime::parse_from_rfc3339(updated_at_rfc3339)?.with_timezone(&Utc);
|
||||
let times = FileTimes::new().set_modified(parsed.into());
|
||||
std::fs::OpenOptions::new()
|
||||
.append(true)
|
||||
.open(path)?
|
||||
.set_times(times)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct RolloutFixture {
|
||||
conversation_id: String,
|
||||
rollout_file_path: PathBuf,
|
||||
before_modified: std::time::SystemTime,
|
||||
expected_updated_at: i64,
|
||||
}
|
||||
|
||||
fn setup_rollout_fixture(codex_home: &Path, server_uri: &str) -> Result<RolloutFixture> {
|
||||
create_config_toml(codex_home, server_uri)?;
|
||||
|
||||
let preview = "Saved user message";
|
||||
let filename_ts = "2025-01-05T12-00-00";
|
||||
let meta_rfc3339 = "2025-01-05T12:00:00Z";
|
||||
let expected_updated_at_rfc3339 = "2025-01-07T00:00:00Z";
|
||||
let conversation_id = create_fake_rollout_with_text_elements(
|
||||
codex_home,
|
||||
filename_ts,
|
||||
meta_rfc3339,
|
||||
preview,
|
||||
Vec::new(),
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
let rollout_file_path = rollout_path(codex_home, filename_ts, &conversation_id);
|
||||
set_rollout_mtime(rollout_file_path.as_path(), expected_updated_at_rfc3339)?;
|
||||
let before_modified = std::fs::metadata(&rollout_file_path)?.modified()?;
|
||||
let expected_updated_at = chrono::DateTime::parse_from_rfc3339(expected_updated_at_rfc3339)?
|
||||
.with_timezone(&Utc)
|
||||
.timestamp();
|
||||
|
||||
Ok(RolloutFixture {
|
||||
conversation_id,
|
||||
rollout_file_path,
|
||||
before_modified,
|
||||
expected_updated_at,
|
||||
})
|
||||
}
|
||||
|
||||
101
codex-rs/app-server/tests/suite/v2/thread_unarchive.rs
Normal file
101
codex-rs/app-server/tests/suite/v2/thread_unarchive.rs
Normal file
@@ -0,0 +1,101 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadArchiveParams;
|
||||
use codex_app_server_protocol::ThreadArchiveResponse;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::ThreadUnarchiveParams;
|
||||
use codex_app_server_protocol::ThreadUnarchiveResponse;
|
||||
use codex_core::find_archived_thread_path_by_id_str;
|
||||
use codex_core::find_thread_path_by_id_str;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(30);
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_unarchive_moves_rollout_back_into_sessions_directory() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let start_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
let rollout_path = find_thread_path_by_id_str(codex_home.path(), &thread.id)
|
||||
.await?
|
||||
.expect("expected rollout path for thread id to exist");
|
||||
|
||||
let archive_id = mcp
|
||||
.send_thread_archive_request(ThreadArchiveParams {
|
||||
thread_id: thread.id.clone(),
|
||||
})
|
||||
.await?;
|
||||
let archive_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(archive_id)),
|
||||
)
|
||||
.await??;
|
||||
let _: ThreadArchiveResponse = to_response::<ThreadArchiveResponse>(archive_resp)?;
|
||||
|
||||
let archived_path = find_archived_thread_path_by_id_str(codex_home.path(), &thread.id)
|
||||
.await?
|
||||
.expect("expected archived rollout path for thread id to exist");
|
||||
let archived_path_display = archived_path.display();
|
||||
assert!(
|
||||
archived_path.exists(),
|
||||
"expected {archived_path_display} to exist"
|
||||
);
|
||||
|
||||
let unarchive_id = mcp
|
||||
.send_thread_unarchive_request(ThreadUnarchiveParams {
|
||||
thread_id: thread.id.clone(),
|
||||
})
|
||||
.await?;
|
||||
let unarchive_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(unarchive_id)),
|
||||
)
|
||||
.await??;
|
||||
let _: ThreadUnarchiveResponse = to_response::<ThreadUnarchiveResponse>(unarchive_resp)?;
|
||||
|
||||
let rollout_path_display = rollout_path.display();
|
||||
assert!(
|
||||
rollout_path.exists(),
|
||||
"expected rollout path {rollout_path_display} to be restored"
|
||||
);
|
||||
assert!(
|
||||
!archived_path.exists(),
|
||||
"expected archived rollout path {archived_path_display} to be moved"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_config_toml(codex_home: &Path) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(config_toml, config_contents())
|
||||
}
|
||||
|
||||
fn config_contents() -> &'static str {
|
||||
r#"model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
"#
|
||||
}
|
||||
@@ -34,13 +34,18 @@ use codex_app_server_protocol::TurnStartResponse;
|
||||
use codex_app_server_protocol::TurnStartedNotification;
|
||||
use codex_app_server_protocol::TurnStatus;
|
||||
use codex_app_server_protocol::UserInput as V2UserInput;
|
||||
use codex_core::features::FEATURES;
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::protocol_config_types::ReasoningSummary;
|
||||
use codex_protocol::config_types::CollaborationMode;
|
||||
use codex_protocol::config_types::ModeKind;
|
||||
use codex_protocol::config_types::Personality;
|
||||
use codex_protocol::config_types::Settings;
|
||||
use codex_protocol::openai_models::ReasoningEffort;
|
||||
use core_test_support::responses;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::collections::BTreeMap;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
@@ -54,7 +59,12 @@ async fn turn_start_sends_originator_header() -> Result<()> {
|
||||
let server = create_mock_responses_server_sequence_unchecked(responses).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri(), "never")?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
&server.uri(),
|
||||
"never",
|
||||
&BTreeMap::default(),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(
|
||||
@@ -124,7 +134,12 @@ async fn turn_start_emits_user_message_item_with_text_elements() -> Result<()> {
|
||||
let server = create_mock_responses_server_sequence_unchecked(responses).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri(), "never")?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
&server.uri(),
|
||||
"never",
|
||||
&BTreeMap::default(),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
@@ -211,7 +226,12 @@ async fn turn_start_emits_notifications_and_accepts_model_override() -> Result<(
|
||||
let server = create_mock_responses_server_sequence_unchecked(responses).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri(), "never")?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
&server.uri(),
|
||||
"never",
|
||||
&BTreeMap::default(),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
@@ -321,13 +341,19 @@ async fn turn_start_accepts_collaboration_mode_override_v2() -> Result<()> {
|
||||
let response_mock = responses::mount_sse_once(&server, body).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri(), "never")?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
&server.uri(),
|
||||
"never",
|
||||
&BTreeMap::default(),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("gpt-5.2-codex".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
@@ -338,11 +364,14 @@ async fn turn_start_accepts_collaboration_mode_override_v2() -> Result<()> {
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
let collaboration_mode = CollaborationMode::Custom(Settings {
|
||||
model: "mock-model-collab".to_string(),
|
||||
reasoning_effort: Some(ReasoningEffort::High),
|
||||
developer_instructions: None,
|
||||
});
|
||||
let collaboration_mode = CollaborationMode {
|
||||
mode: ModeKind::Custom,
|
||||
settings: Settings {
|
||||
model: "mock-model-collab".to_string(),
|
||||
reasoning_effort: Some(ReasoningEffort::High),
|
||||
developer_instructions: None,
|
||||
},
|
||||
};
|
||||
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
@@ -379,6 +408,81 @@ async fn turn_start_accepts_collaboration_mode_override_v2() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_accepts_personality_override_v2() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = responses::start_mock_server().await;
|
||||
let body = responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_assistant_message("msg-1", "Done"),
|
||||
responses::ev_completed("resp-1"),
|
||||
]);
|
||||
let response_mock = responses::mount_sse_once(&server, body).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
&server.uri(),
|
||||
"never",
|
||||
&BTreeMap::default(),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("exp-codex-personality".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
personality: Some(Personality::Friendly),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let _turn: TurnStartResponse = to_response::<TurnStartResponse>(turn_resp)?;
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let request = response_mock.single_request();
|
||||
let developer_texts = request.message_input_texts("developer");
|
||||
if developer_texts.is_empty() {
|
||||
eprintln!("request body: {}", request.body_json());
|
||||
}
|
||||
assert!(
|
||||
developer_texts
|
||||
.iter()
|
||||
.any(|text| text.contains("<personality_spec>")),
|
||||
"expected personality update message in developer input, got {developer_texts:?}"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_accepts_local_image_input() -> Result<()> {
|
||||
// Two Codex turns hit the mock model (session start + turn/start).
|
||||
@@ -391,7 +495,12 @@ async fn turn_start_accepts_local_image_input() -> Result<()> {
|
||||
let server = create_mock_responses_server_sequence_unchecked(responses).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri(), "never")?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
&server.uri(),
|
||||
"never",
|
||||
&BTreeMap::default(),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
@@ -466,7 +575,12 @@ async fn turn_start_exec_approval_toggle_v2() -> Result<()> {
|
||||
];
|
||||
let server = create_mock_responses_server_sequence(responses).await;
|
||||
// Default approval is untrusted to force elicitation on first turn.
|
||||
create_config_toml(codex_home.as_path(), &server.uri(), "untrusted")?;
|
||||
create_config_toml(
|
||||
codex_home.as_path(),
|
||||
&server.uri(),
|
||||
"untrusted",
|
||||
&BTreeMap::default(),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.as_path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
@@ -591,7 +705,12 @@ async fn turn_start_exec_approval_decline_v2() -> Result<()> {
|
||||
create_final_assistant_message_sse_response("done")?,
|
||||
];
|
||||
let server = create_mock_responses_server_sequence(responses).await;
|
||||
create_config_toml(codex_home.as_path(), &server.uri(), "untrusted")?;
|
||||
create_config_toml(
|
||||
codex_home.as_path(),
|
||||
&server.uri(),
|
||||
"untrusted",
|
||||
&BTreeMap::default(),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.as_path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
@@ -738,7 +857,12 @@ async fn turn_start_updates_sandbox_and_cwd_between_turns_v2() -> Result<()> {
|
||||
create_final_assistant_message_sse_response("done second")?,
|
||||
];
|
||||
let server = create_mock_responses_server_sequence(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri(), "untrusted")?;
|
||||
create_config_toml(
|
||||
&codex_home,
|
||||
&server.uri(),
|
||||
"untrusted",
|
||||
&BTreeMap::default(),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(&codex_home).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
@@ -776,6 +900,7 @@ async fn turn_start_updates_sandbox_and_cwd_between_turns_v2() -> Result<()> {
|
||||
model: Some("mock-model".to_string()),
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: Some(ReasoningSummary::Auto),
|
||||
personality: None,
|
||||
output_schema: None,
|
||||
collaboration_mode: None,
|
||||
})
|
||||
@@ -806,6 +931,7 @@ async fn turn_start_updates_sandbox_and_cwd_between_turns_v2() -> Result<()> {
|
||||
model: Some("mock-model".to_string()),
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: Some(ReasoningSummary::Auto),
|
||||
personality: None,
|
||||
output_schema: None,
|
||||
collaboration_mode: None,
|
||||
})
|
||||
@@ -876,7 +1002,12 @@ async fn turn_start_file_change_approval_v2() -> Result<()> {
|
||||
create_final_assistant_message_sse_response("patch applied")?,
|
||||
];
|
||||
let server = create_mock_responses_server_sequence(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri(), "untrusted")?;
|
||||
create_config_toml(
|
||||
&codex_home,
|
||||
&server.uri(),
|
||||
"untrusted",
|
||||
&BTreeMap::default(),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(&codex_home).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
@@ -1053,7 +1184,12 @@ async fn turn_start_file_change_approval_accept_for_session_persists_v2() -> Res
|
||||
create_final_assistant_message_sse_response("patch 2 applied")?,
|
||||
];
|
||||
let server = create_mock_responses_server_sequence(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri(), "untrusted")?;
|
||||
create_config_toml(
|
||||
&codex_home,
|
||||
&server.uri(),
|
||||
"untrusted",
|
||||
&BTreeMap::default(),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(&codex_home).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
@@ -1229,7 +1365,12 @@ async fn turn_start_file_change_approval_decline_v2() -> Result<()> {
|
||||
create_final_assistant_message_sse_response("patch declined")?,
|
||||
];
|
||||
let server = create_mock_responses_server_sequence(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri(), "untrusted")?;
|
||||
create_config_toml(
|
||||
&codex_home,
|
||||
&server.uri(),
|
||||
"untrusted",
|
||||
&BTreeMap::default(),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(&codex_home).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
@@ -1369,16 +1510,12 @@ async fn command_execution_notifications_include_process_id() -> Result<()> {
|
||||
];
|
||||
let server = create_mock_responses_server_sequence(responses).await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri(), "never")?;
|
||||
let config_toml = codex_home.path().join("config.toml");
|
||||
let mut config_contents = std::fs::read_to_string(&config_toml)?;
|
||||
config_contents.push_str(
|
||||
r#"
|
||||
[features]
|
||||
unified_exec = true
|
||||
"#,
|
||||
);
|
||||
std::fs::write(&config_toml, config_contents)?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
&server.uri(),
|
||||
"never",
|
||||
&BTreeMap::from([(Feature::UnifiedExec, true)]),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
@@ -1502,7 +1639,24 @@ fn create_config_toml(
|
||||
codex_home: &Path,
|
||||
server_uri: &str,
|
||||
approval_policy: &str,
|
||||
feature_flags: &BTreeMap<Feature, bool>,
|
||||
) -> std::io::Result<()> {
|
||||
let mut features = BTreeMap::from([(Feature::RemoteModels, false)]);
|
||||
for (feature, enabled) in feature_flags {
|
||||
features.insert(*feature, *enabled);
|
||||
}
|
||||
let feature_entries = features
|
||||
.into_iter()
|
||||
.map(|(feature, enabled)| {
|
||||
let key = FEATURES
|
||||
.iter()
|
||||
.find(|spec| spec.id == feature)
|
||||
.map(|spec| spec.key)
|
||||
.unwrap_or_else(|| panic!("missing feature key for {feature:?}"));
|
||||
format!("{key} = {enabled}")
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
@@ -1514,6 +1668,9 @@ sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[features]
|
||||
{feature_entries}
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use crate::types::CodeTaskDetailsResponse;
|
||||
use crate::types::ConfigFileResponse;
|
||||
use crate::types::CreditStatusDetails;
|
||||
use crate::types::PaginatedListTaskListItem;
|
||||
use crate::types::RateLimitStatusPayload;
|
||||
@@ -244,6 +245,20 @@ impl Client {
|
||||
self.decode_json::<TurnAttemptsSiblingTurnsResponse>(&url, &ct, &body)
|
||||
}
|
||||
|
||||
/// Fetch the managed requirements file from codex-backend.
|
||||
///
|
||||
/// `GET /api/codex/config/requirements` (Codex API style) or
|
||||
/// `GET /wham/config/requirements` (ChatGPT backend-api style).
|
||||
pub async fn get_config_requirements_file(&self) -> Result<ConfigFileResponse> {
|
||||
let url = match self.path_style {
|
||||
PathStyle::CodexApi => format!("{}/api/codex/config/requirements", self.base_url),
|
||||
PathStyle::ChatGptApi => format!("{}/wham/config/requirements", self.base_url),
|
||||
};
|
||||
let req = self.http.get(&url).headers(self.headers());
|
||||
let (body, ct) = self.exec_request(req, "GET", &url).await?;
|
||||
self.decode_json::<ConfigFileResponse>(&url, &ct, &body)
|
||||
}
|
||||
|
||||
/// Create a new task (user turn) by POSTing to the appropriate backend path
|
||||
/// based on `path_style`. Returns the created task id.
|
||||
pub async fn create_task(&self, request_body: serde_json::Value) -> Result<String> {
|
||||
|
||||
@@ -4,6 +4,7 @@ pub mod types;
|
||||
pub use client::Client;
|
||||
pub use types::CodeTaskDetailsResponse;
|
||||
pub use types::CodeTaskDetailsResponseExt;
|
||||
pub use types::ConfigFileResponse;
|
||||
pub use types::PaginatedListTaskListItem;
|
||||
pub use types::TaskListItem;
|
||||
pub use types::TurnAttemptsSiblingTurnsResponse;
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
pub use codex_backend_openapi_models::models::ConfigFileResponse;
|
||||
pub use codex_backend_openapi_models::models::CreditStatusDetails;
|
||||
pub use codex_backend_openapi_models::models::PaginatedListTaskListItem;
|
||||
pub use codex_backend_openapi_models::models::PlanType;
|
||||
|
||||
@@ -5,6 +5,7 @@ use crate::chatgpt_token::get_chatgpt_token_data;
|
||||
use crate::chatgpt_token::init_chatgpt_token_from_auth;
|
||||
|
||||
use anyhow::Context;
|
||||
use serde::Serialize;
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
/// Make a GET request to the ChatGPT backend API.
|
||||
@@ -48,3 +49,37 @@ pub(crate) async fn chatgpt_get_request<T: DeserializeOwned>(
|
||||
anyhow::bail!("Request failed with status {status}: {body}")
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn chatgpt_post_request<T: DeserializeOwned, P: Serialize>(
|
||||
config: &Config,
|
||||
access_token: &str,
|
||||
account_id: &str,
|
||||
path: &str,
|
||||
payload: &P,
|
||||
) -> anyhow::Result<T> {
|
||||
let chatgpt_base_url = &config.chatgpt_base_url;
|
||||
let client = create_client();
|
||||
let url = format!("{chatgpt_base_url}{path}");
|
||||
|
||||
let response = client
|
||||
.post(&url)
|
||||
.bearer_auth(access_token)
|
||||
.header("chatgpt-account-id", account_id)
|
||||
.header("Content-Type", "application/json")
|
||||
.json(payload)
|
||||
.send()
|
||||
.await
|
||||
.context("Failed to send request")?;
|
||||
|
||||
if response.status().is_success() {
|
||||
let result: T = response
|
||||
.json()
|
||||
.await
|
||||
.context("Failed to parse JSON response")?;
|
||||
Ok(result)
|
||||
} else {
|
||||
let status = response.status();
|
||||
let body = response.text().await.unwrap_or_default();
|
||||
anyhow::bail!("Request failed with status {status}: {body}")
|
||||
}
|
||||
}
|
||||
|
||||
125
codex-rs/chatgpt/src/connectors.rs
Normal file
125
codex-rs/chatgpt/src/connectors.rs
Normal file
@@ -0,0 +1,125 @@
|
||||
use codex_core::config::Config;
|
||||
use codex_core::features::Feature;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::chatgpt_client::chatgpt_post_request;
|
||||
use crate::chatgpt_token::get_chatgpt_token_data;
|
||||
use crate::chatgpt_token::init_chatgpt_token_from_auth;
|
||||
|
||||
pub use codex_core::connectors::ConnectorInfo;
|
||||
pub use codex_core::connectors::connector_display_label;
|
||||
use codex_core::connectors::connector_install_url;
|
||||
pub use codex_core::connectors::list_accessible_connectors_from_mcp_tools;
|
||||
use codex_core::connectors::merge_connectors;
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct ListConnectorsRequest {
|
||||
principals: Vec<Principal>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct Principal {
|
||||
#[serde(rename = "type")]
|
||||
principal_type: PrincipalType,
|
||||
id: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
||||
enum PrincipalType {
|
||||
User,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ListConnectorsResponse {
|
||||
connectors: Vec<ConnectorInfo>,
|
||||
}
|
||||
|
||||
pub async fn list_connectors(config: &Config) -> anyhow::Result<Vec<ConnectorInfo>> {
|
||||
if !config.features.enabled(Feature::Connectors) {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
let (connectors_result, accessible_result) = tokio::join!(
|
||||
list_all_connectors(config),
|
||||
list_accessible_connectors_from_mcp_tools(config),
|
||||
);
|
||||
let connectors = connectors_result?;
|
||||
let accessible = accessible_result?;
|
||||
Ok(merge_connectors(connectors, accessible))
|
||||
}
|
||||
|
||||
pub async fn list_all_connectors(config: &Config) -> anyhow::Result<Vec<ConnectorInfo>> {
|
||||
if !config.features.enabled(Feature::Connectors) {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
init_chatgpt_token_from_auth(&config.codex_home, config.cli_auth_credentials_store_mode)
|
||||
.await?;
|
||||
|
||||
let token_data =
|
||||
get_chatgpt_token_data().ok_or_else(|| anyhow::anyhow!("ChatGPT token not available"))?;
|
||||
let user_id = token_data
|
||||
.id_token
|
||||
.chatgpt_user_id
|
||||
.as_deref()
|
||||
.ok_or_else(|| {
|
||||
anyhow::anyhow!("ChatGPT user ID not available, please re-run `codex login`")
|
||||
})?;
|
||||
let account_id = token_data
|
||||
.id_token
|
||||
.chatgpt_account_id
|
||||
.as_deref()
|
||||
.ok_or_else(|| {
|
||||
anyhow::anyhow!("ChatGPT account ID not available, please re-run `codex login`")
|
||||
})?;
|
||||
let principal_id = format!("{user_id}__{account_id}");
|
||||
let request = ListConnectorsRequest {
|
||||
principals: vec![Principal {
|
||||
principal_type: PrincipalType::User,
|
||||
id: principal_id,
|
||||
}],
|
||||
};
|
||||
let response: ListConnectorsResponse = chatgpt_post_request(
|
||||
config,
|
||||
token_data.access_token.as_str(),
|
||||
account_id,
|
||||
"/aip/connectors/list_accessible?skip_actions=true&external_logos=true",
|
||||
&request,
|
||||
)
|
||||
.await?;
|
||||
let mut connectors = response.connectors;
|
||||
for connector in &mut connectors {
|
||||
let install_url = match connector.install_url.take() {
|
||||
Some(install_url) => install_url,
|
||||
None => connector_install_url(&connector.connector_name, &connector.connector_id),
|
||||
};
|
||||
connector.connector_name =
|
||||
normalize_connector_name(&connector.connector_name, &connector.connector_id);
|
||||
connector.connector_description =
|
||||
normalize_connector_value(connector.connector_description.as_deref());
|
||||
connector.install_url = Some(install_url);
|
||||
connector.is_accessible = false;
|
||||
}
|
||||
connectors.sort_by(|left, right| {
|
||||
left.connector_name
|
||||
.cmp(&right.connector_name)
|
||||
.then_with(|| left.connector_id.cmp(&right.connector_id))
|
||||
});
|
||||
Ok(connectors)
|
||||
}
|
||||
|
||||
fn normalize_connector_name(name: &str, connector_id: &str) -> String {
|
||||
let trimmed = name.trim();
|
||||
if trimmed.is_empty() {
|
||||
connector_id.to_string()
|
||||
} else {
|
||||
trimmed.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
fn normalize_connector_value(value: Option<&str>) -> Option<String> {
|
||||
value
|
||||
.map(str::trim)
|
||||
.filter(|value| !value.is_empty())
|
||||
.map(str::to_string)
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
pub mod apply_command;
|
||||
mod chatgpt_client;
|
||||
mod chatgpt_token;
|
||||
pub mod connectors;
|
||||
pub mod get_task;
|
||||
|
||||
@@ -147,7 +147,7 @@ struct ResumeCommand {
|
||||
session_id: Option<String>,
|
||||
|
||||
/// Continue the most recent session without showing the picker.
|
||||
#[arg(long = "last", default_value_t = false, conflicts_with = "session_id")]
|
||||
#[arg(long = "last", default_value_t = false)]
|
||||
last: bool,
|
||||
|
||||
/// Show all sessions (disables cwd filtering and shows CWD column).
|
||||
@@ -396,8 +396,7 @@ fn run_update_action(action: UpdateAction) -> anyhow::Result<()> {
|
||||
if !status.success() {
|
||||
anyhow::bail!("`{cmd_str}` failed with status {status}");
|
||||
}
|
||||
println!();
|
||||
println!("🎉 Update ran successfully! Please restart Codex.");
|
||||
println!("\n🎉 Update ran successfully! Please restart Codex.");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -454,8 +453,8 @@ enum FeaturesSubcommand {
|
||||
fn stage_str(stage: codex_core::features::Stage) -> &'static str {
|
||||
use codex_core::features::Stage;
|
||||
match stage {
|
||||
Stage::Beta => "experimental",
|
||||
Stage::Experimental { .. } => "beta",
|
||||
Stage::UnderDevelopment => "under development",
|
||||
Stage::Experimental { .. } => "experimental",
|
||||
Stage::Stable => "stable",
|
||||
Stage::Deprecated => "deprecated",
|
||||
Stage::Removed => "removed",
|
||||
@@ -696,11 +695,20 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
overrides,
|
||||
)
|
||||
.await?;
|
||||
let mut rows = Vec::with_capacity(codex_core::features::FEATURES.len());
|
||||
let mut name_width = 0;
|
||||
let mut stage_width = 0;
|
||||
for def in codex_core::features::FEATURES.iter() {
|
||||
let name = def.key;
|
||||
let stage = stage_str(def.stage);
|
||||
let enabled = config.features.enabled(def.id);
|
||||
println!("{name}\t{stage}\t{enabled}");
|
||||
name_width = name_width.max(name.len());
|
||||
stage_width = stage_width.max(stage.len());
|
||||
rows.push((name, stage, enabled));
|
||||
}
|
||||
|
||||
for (name, stage, enabled) in rows {
|
||||
println!("{name:<name_width$} {stage:<stage_width$} {enabled}");
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -924,6 +932,24 @@ mod tests {
|
||||
finalize_fork_interactive(interactive, root_overrides, session_id, last, all, fork_cli)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exec_resume_last_accepts_prompt_positional() {
|
||||
let cli =
|
||||
MultitoolCli::try_parse_from(["codex", "exec", "--json", "resume", "--last", "2+2"])
|
||||
.expect("parse should succeed");
|
||||
|
||||
let Some(Subcommand::Exec(exec)) = cli.subcommand else {
|
||||
panic!("expected exec subcommand");
|
||||
};
|
||||
let Some(codex_exec::Command::Resume(args)) = exec.command else {
|
||||
panic!("expected exec resume");
|
||||
};
|
||||
|
||||
assert!(args.last);
|
||||
assert_eq!(args.session_id, None);
|
||||
assert_eq!(args.prompt.as_deref(), Some("2+2"));
|
||||
}
|
||||
|
||||
fn app_server_from_args(args: &[&str]) -> AppServerCommand {
|
||||
let cli = MultitoolCli::try_parse_from(args).expect("parse");
|
||||
let Subcommand::AppServer(app_server) = cli.subcommand.expect("app-server present") else {
|
||||
|
||||
@@ -247,6 +247,7 @@ async fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Re
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
};
|
||||
|
||||
servers.insert(name.clone(), new_entry);
|
||||
@@ -348,6 +349,11 @@ async fn run_login(config_overrides: &CliConfigOverrides, login_args: LoginArgs)
|
||||
_ => bail!("OAuth login is only supported for streamable HTTP servers."),
|
||||
};
|
||||
|
||||
let mut scopes = scopes;
|
||||
if scopes.is_empty() {
|
||||
scopes = server.scopes.clone().unwrap_or_default();
|
||||
}
|
||||
|
||||
perform_oauth_login(
|
||||
&name,
|
||||
&url,
|
||||
|
||||
@@ -156,7 +156,7 @@ async fn connect_websocket(
|
||||
info!("connecting to websocket: {url}");
|
||||
|
||||
let mut request = url
|
||||
.clone()
|
||||
.as_str()
|
||||
.into_client_request()
|
||||
.map_err(|err| ApiError::Stream(format!("failed to build websocket request: {err}")))?;
|
||||
request.headers_mut().extend(headers);
|
||||
@@ -228,7 +228,7 @@ async fn run_websocket_response_stream(
|
||||
}
|
||||
};
|
||||
|
||||
if let Err(err) = ws_stream.send(Message::Text(request_text)).await {
|
||||
if let Err(err) = ws_stream.send(Message::Text(request_text.into())).await {
|
||||
return Err(ApiError::Stream(format!(
|
||||
"failed to send websocket request: {err}"
|
||||
)));
|
||||
|
||||
@@ -15,13 +15,12 @@ pub(crate) fn subagent_header(source: &Option<SessionSource>) -> Option<String>
|
||||
return None;
|
||||
};
|
||||
match sub {
|
||||
codex_protocol::protocol::SubAgentSource::Review => Some("review".to_string()),
|
||||
codex_protocol::protocol::SubAgentSource::Compact => Some("compact".to_string()),
|
||||
codex_protocol::protocol::SubAgentSource::ThreadSpawn { .. } => {
|
||||
Some("collab_spawn".to_string())
|
||||
}
|
||||
codex_protocol::protocol::SubAgentSource::Other(label) => Some(label.clone()),
|
||||
other => Some(
|
||||
serde_json::to_value(other)
|
||||
.ok()
|
||||
.and_then(|v| v.as_str().map(std::string::ToString::to_string))
|
||||
.unwrap_or_else(|| "other".to_string()),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -291,7 +291,7 @@ pub fn process_responses_event(
|
||||
if let Ok(item) = serde_json::from_value::<ResponseItem>(item_val) {
|
||||
return Ok(Some(ResponseEvent::OutputItemAdded(item)));
|
||||
}
|
||||
debug!("failed to parse ResponseItem from output_item.done");
|
||||
debug!("failed to parse ResponseItem from output_item.added");
|
||||
}
|
||||
}
|
||||
"response.reasoning_summary_part.added" => {
|
||||
|
||||
@@ -0,0 +1,40 @@
|
||||
/*
|
||||
* codex-backend
|
||||
*
|
||||
* codex-backend
|
||||
*
|
||||
* The version of the OpenAPI document: 0.0.1
|
||||
*
|
||||
* Generated by: https://openapi-generator.tech
|
||||
*/
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct ConfigFileResponse {
|
||||
#[serde(rename = "contents", skip_serializing_if = "Option::is_none")]
|
||||
pub contents: Option<String>,
|
||||
#[serde(rename = "sha256", skip_serializing_if = "Option::is_none")]
|
||||
pub sha256: Option<String>,
|
||||
#[serde(rename = "updated_at", skip_serializing_if = "Option::is_none")]
|
||||
pub updated_at: Option<String>,
|
||||
#[serde(rename = "updated_by_user_id", skip_serializing_if = "Option::is_none")]
|
||||
pub updated_by_user_id: Option<String>,
|
||||
}
|
||||
|
||||
impl ConfigFileResponse {
|
||||
pub fn new(
|
||||
contents: Option<String>,
|
||||
sha256: Option<String>,
|
||||
updated_at: Option<String>,
|
||||
updated_by_user_id: Option<String>,
|
||||
) -> ConfigFileResponse {
|
||||
ConfigFileResponse {
|
||||
contents,
|
||||
sha256,
|
||||
updated_at,
|
||||
updated_by_user_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,10 @@
|
||||
// Currently export only the types referenced by the workspace
|
||||
// The process for this will change
|
||||
|
||||
// Config
|
||||
pub mod config_file_response;
|
||||
pub use self::config_file_response::ConfigFileResponse;
|
||||
|
||||
// Cloud Tasks
|
||||
pub mod code_task_details_response;
|
||||
pub use self::code_task_details_response::CodeTaskDetailsResponse;
|
||||
|
||||
@@ -64,6 +64,7 @@ reqwest = { workspace = true, features = ["json", "stream"] }
|
||||
schemars = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
serde_path_to_error = { workspace = true }
|
||||
serde_yaml = { workspace = true }
|
||||
sha1 = { workspace = true }
|
||||
sha2 = { workspace = true }
|
||||
|
||||
@@ -153,6 +153,9 @@
|
||||
"collaboration_modes": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"connectors": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"elevated_windows_sandbox": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -347,6 +350,7 @@
|
||||
"description": "Initial collaboration mode to use when the TUI starts.",
|
||||
"enum": [
|
||||
"plan",
|
||||
"code",
|
||||
"pair_programming",
|
||||
"execute",
|
||||
"custom"
|
||||
@@ -746,6 +750,13 @@
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"scopes": {
|
||||
"default": null,
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"startup_timeout_ms": {
|
||||
"default": null,
|
||||
"format": "uint64",
|
||||
@@ -1135,6 +1146,9 @@
|
||||
"collaboration_modes": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"connectors": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"elevated_windows_sandbox": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -1451,6 +1465,10 @@
|
||||
],
|
||||
"description": "User-level skill config entries keyed by SKILL.md path."
|
||||
},
|
||||
"suppress_unstable_features_warning": {
|
||||
"description": "Suppress warnings about unstable (under development) features.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"tool_output_token_limit": {
|
||||
"description": "Token budget applied when storing tool/function outputs in the context manager.",
|
||||
"format": "uint",
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -39,12 +39,20 @@ impl AgentControl {
|
||||
&self,
|
||||
config: crate::config::Config,
|
||||
prompt: String,
|
||||
session_source: Option<codex_protocol::protocol::SessionSource>,
|
||||
) -> CodexResult<ThreadId> {
|
||||
let state = self.upgrade()?;
|
||||
let reservation = self.state.reserve_spawn_slot(config.agent_max_threads)?;
|
||||
|
||||
// The same `AgentControl` is sent to spawn the thread.
|
||||
let new_thread = state.spawn_new_thread(config, self.clone()).await?;
|
||||
let new_thread = match session_source {
|
||||
Some(session_source) => {
|
||||
state
|
||||
.spawn_new_thread_with_source(config, self.clone(), session_source)
|
||||
.await?
|
||||
}
|
||||
None => state.spawn_new_thread(config, self.clone()).await?,
|
||||
};
|
||||
reservation.commit(new_thread.thread_id);
|
||||
|
||||
// Notify a new thread has been created. This notification will be processed by clients
|
||||
@@ -268,7 +276,7 @@ mod tests {
|
||||
let control = AgentControl::default();
|
||||
let (_home, config) = test_config().await;
|
||||
let err = control
|
||||
.spawn_agent(config, "hello".to_string())
|
||||
.spawn_agent(config, "hello".to_string(), None)
|
||||
.await
|
||||
.expect_err("spawn_agent should fail without a manager");
|
||||
assert_eq!(
|
||||
@@ -370,7 +378,7 @@ mod tests {
|
||||
let harness = AgentControlHarness::new().await;
|
||||
let thread_id = harness
|
||||
.control
|
||||
.spawn_agent(harness.config.clone(), "spawned".to_string())
|
||||
.spawn_agent(harness.config.clone(), "spawned".to_string(), None)
|
||||
.await
|
||||
.expect("spawn_agent should succeed");
|
||||
let _thread = harness
|
||||
@@ -417,12 +425,12 @@ mod tests {
|
||||
.expect("start thread");
|
||||
|
||||
let first_agent_id = control
|
||||
.spawn_agent(config.clone(), "hello".to_string())
|
||||
.spawn_agent(config.clone(), "hello".to_string(), None)
|
||||
.await
|
||||
.expect("spawn_agent should succeed");
|
||||
|
||||
let err = control
|
||||
.spawn_agent(config, "hello again".to_string())
|
||||
.spawn_agent(config, "hello again".to_string(), None)
|
||||
.await
|
||||
.expect_err("spawn_agent should respect max threads");
|
||||
let CodexErr::AgentLimitReached {
|
||||
@@ -455,7 +463,7 @@ mod tests {
|
||||
let control = manager.agent_control();
|
||||
|
||||
let first_agent_id = control
|
||||
.spawn_agent(config.clone(), "hello".to_string())
|
||||
.spawn_agent(config.clone(), "hello".to_string(), None)
|
||||
.await
|
||||
.expect("spawn_agent should succeed");
|
||||
let _ = control
|
||||
@@ -464,7 +472,7 @@ mod tests {
|
||||
.expect("shutdown agent");
|
||||
|
||||
let second_agent_id = control
|
||||
.spawn_agent(config.clone(), "hello again".to_string())
|
||||
.spawn_agent(config.clone(), "hello again".to_string(), None)
|
||||
.await
|
||||
.expect("spawn_agent should succeed after shutdown");
|
||||
let _ = control
|
||||
@@ -490,12 +498,12 @@ mod tests {
|
||||
let cloned = control.clone();
|
||||
|
||||
let first_agent_id = cloned
|
||||
.spawn_agent(config.clone(), "hello".to_string())
|
||||
.spawn_agent(config.clone(), "hello".to_string(), None)
|
||||
.await
|
||||
.expect("spawn_agent should succeed");
|
||||
|
||||
let err = control
|
||||
.spawn_agent(config, "hello again".to_string())
|
||||
.spawn_agent(config, "hello again".to_string(), None)
|
||||
.await
|
||||
.expect_err("spawn_agent should respect shared guard");
|
||||
let CodexErr::AgentLimitReached { max_threads } = err else {
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
use crate::error::CodexErr;
|
||||
use crate::error::Result;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_protocol::protocol::SubAgentSource;
|
||||
use std::collections::HashSet;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
@@ -19,6 +21,25 @@ pub(crate) struct Guards {
|
||||
total_count: AtomicUsize,
|
||||
}
|
||||
|
||||
/// Initial agent is depth 0.
|
||||
pub(crate) const MAX_THREAD_SPAWN_DEPTH: i32 = 1;
|
||||
|
||||
fn session_depth(session_source: &SessionSource) -> i32 {
|
||||
match session_source {
|
||||
SessionSource::SubAgent(SubAgentSource::ThreadSpawn { depth, .. }) => *depth,
|
||||
SessionSource::SubAgent(_) => 0,
|
||||
_ => 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn next_thread_spawn_depth(session_source: &SessionSource) -> i32 {
|
||||
session_depth(session_source).saturating_add(1)
|
||||
}
|
||||
|
||||
pub(crate) fn exceeds_thread_spawn_depth_limit(depth: i32) -> bool {
|
||||
depth > MAX_THREAD_SPAWN_DEPTH
|
||||
}
|
||||
|
||||
impl Guards {
|
||||
pub(crate) fn reserve_spawn_slot(
|
||||
self: &Arc<Self>,
|
||||
@@ -102,6 +123,30 @@ mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn session_depth_defaults_to_zero_for_root_sources() {
|
||||
assert_eq!(session_depth(&SessionSource::Cli), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn thread_spawn_depth_increments_and_enforces_limit() {
|
||||
let session_source = SessionSource::SubAgent(SubAgentSource::ThreadSpawn {
|
||||
parent_thread_id: ThreadId::new(),
|
||||
depth: 1,
|
||||
});
|
||||
let child_depth = next_thread_spawn_depth(&session_source);
|
||||
assert_eq!(child_depth, 2);
|
||||
assert!(exceeds_thread_spawn_depth_limit(child_depth));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn non_thread_spawn_subagents_default_to_depth_zero() {
|
||||
let session_source = SessionSource::SubAgent(SubAgentSource::Review);
|
||||
assert_eq!(session_depth(&session_source), 0);
|
||||
assert_eq!(next_thread_spawn_depth(&session_source), 1);
|
||||
assert!(!exceeds_thread_spawn_depth_limit(1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reservation_drop_releases_slot() {
|
||||
let guards = Arc::new(Guards::default());
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
pub(crate) mod control;
|
||||
// Do not put in `pub` or `pub(crate)`. This code should not be used somewhere else.
|
||||
mod guards;
|
||||
pub(crate) mod role;
|
||||
pub(crate) mod status;
|
||||
|
||||
pub(crate) use codex_protocol::protocol::AgentStatus;
|
||||
pub(crate) use control::AgentControl;
|
||||
pub(crate) use guards::MAX_THREAD_SPAWN_DEPTH;
|
||||
pub(crate) use guards::exceeds_thread_spawn_depth_limit;
|
||||
pub(crate) use guards::next_thread_spawn_depth;
|
||||
pub(crate) use role::AgentRole;
|
||||
pub(crate) use status::agent_status_from_event;
|
||||
|
||||
@@ -1,20 +1,22 @@
|
||||
use crate::config::Config;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
use codex_protocol::openai_models::ReasoningEffort;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
/// Base instructions for the orchestrator role.
|
||||
const ORCHESTRATOR_PROMPT: &str = include_str!("../../templates/agents/orchestrator.md");
|
||||
/// Base instructions for the worker role.
|
||||
const WORKER_PROMPT: &str = include_str!("../../gpt-5.2-codex_prompt.md");
|
||||
/// Default worker model override used by the worker role.
|
||||
const WORKER_MODEL: &str = "gpt-5.2-codex";
|
||||
/// Default model override used.
|
||||
// TODO(jif) update when we have something smarter.
|
||||
const EXPLORER_MODEL: &str = "gpt-5.2-codex";
|
||||
|
||||
/// Enumerated list of all supported agent roles.
|
||||
const ALL_ROLES: [AgentRole; 3] = [
|
||||
AgentRole::Default,
|
||||
AgentRole::Orchestrator,
|
||||
AgentRole::Explorer,
|
||||
AgentRole::Worker,
|
||||
// TODO(jif) add when we have stable prompts + models
|
||||
// AgentRole::Orchestrator,
|
||||
];
|
||||
|
||||
/// Hard-coded agent role selection used when spawning sub-agents.
|
||||
@@ -27,6 +29,8 @@ pub enum AgentRole {
|
||||
Orchestrator,
|
||||
/// Task-executing agent with a fixed model override.
|
||||
Worker,
|
||||
/// Task-executing agent with a fixed model override.
|
||||
Explorer,
|
||||
}
|
||||
|
||||
/// Immutable profile data that drives per-agent configuration overrides.
|
||||
@@ -36,8 +40,12 @@ pub struct AgentProfile {
|
||||
pub base_instructions: Option<&'static str>,
|
||||
/// Optional model override.
|
||||
pub model: Option<&'static str>,
|
||||
/// Optional reasoning effort override.
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
/// Whether to force a read-only sandbox policy.
|
||||
pub read_only: bool,
|
||||
/// Description to include in the tool specs.
|
||||
pub description: &'static str,
|
||||
}
|
||||
|
||||
impl AgentRole {
|
||||
@@ -45,7 +53,19 @@ impl AgentRole {
|
||||
pub fn enum_values() -> Vec<String> {
|
||||
ALL_ROLES
|
||||
.iter()
|
||||
.filter_map(|role| serde_json::to_string(role).ok())
|
||||
.filter_map(|role| {
|
||||
let description = role.profile().description;
|
||||
serde_json::to_string(role)
|
||||
.map(|role| {
|
||||
let description = if !description.is_empty() {
|
||||
format!(r#", "description": {description}"#)
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
format!(r#"{{ "name": {role}{description}}}"#)
|
||||
})
|
||||
.ok()
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
@@ -58,8 +78,35 @@ impl AgentRole {
|
||||
..Default::default()
|
||||
},
|
||||
AgentRole::Worker => AgentProfile {
|
||||
base_instructions: Some(WORKER_PROMPT),
|
||||
model: Some(WORKER_MODEL),
|
||||
// base_instructions: Some(WORKER_PROMPT),
|
||||
// model: Some(WORKER_MODEL),
|
||||
description: r#"Use for execution and production work.
|
||||
Typical tasks:
|
||||
- Implement part of a feature
|
||||
- Fix tests or bugs
|
||||
- Split large refactors into independent chunks
|
||||
Rules:
|
||||
- Explicitly assign **ownership** of the task (files / responsibility).
|
||||
- Always tell workers they are **not alone in the codebase**, and they should ignore edits made by others without touching them"#,
|
||||
..Default::default()
|
||||
},
|
||||
AgentRole::Explorer => AgentProfile {
|
||||
model: Some(EXPLORER_MODEL),
|
||||
reasoning_effort: Some(ReasoningEffort::Low),
|
||||
description: r#"Use for fast codebase understanding and information gathering.
|
||||
`explorer` are extremely fast agents so use them as much as you can to speed up the resolution of the global task.
|
||||
Typical tasks:
|
||||
- Locate usages of a symbol or concept
|
||||
- Understand how X is handled in Y
|
||||
- Review a section of code for issues
|
||||
- Assess impact of a potential change
|
||||
Rules:
|
||||
- Be explicit in what you are looking for. A good usage of `explorer` would mean that don't need to read the same code after the explorer send you the result.
|
||||
- **Always** prefer asking explorers rather than exploring the codebase yourself.
|
||||
- Spawn multiple explorers in parallel when useful and wait for all results.
|
||||
- You can ask the `explorer` to return file name, lines, entire code snippets, ...
|
||||
- Reuse the same explorer when it is relevant. If later in your process you have more questions on some code an explorer already covered, reuse this same explorer to be more efficient.
|
||||
"#,
|
||||
..Default::default()
|
||||
},
|
||||
}
|
||||
@@ -74,6 +121,9 @@ impl AgentRole {
|
||||
if let Some(model) = profile.model {
|
||||
config.model = Some(model.to_string());
|
||||
}
|
||||
if let Some(reasoning_effort) = profile.reasoning_effort {
|
||||
config.model_reasoning_effort = Some(reasoning_effort)
|
||||
}
|
||||
if profile.read_only {
|
||||
config
|
||||
.sandbox_policy
|
||||
|
||||
@@ -42,6 +42,7 @@ pub(crate) async fn apply_patch(
|
||||
turn_context.approval_policy,
|
||||
&turn_context.sandbox_policy,
|
||||
&turn_context.cwd,
|
||||
turn_context.windows_sandbox_level,
|
||||
) {
|
||||
SafetyCheck::AutoApprove {
|
||||
user_explicitly_approved,
|
||||
|
||||
@@ -996,6 +996,7 @@ mod tests {
|
||||
id_token: IdTokenInfo {
|
||||
email: Some("user@example.com".to_string()),
|
||||
chatgpt_plan_type: Some(InternalPlanType::Known(InternalKnownPlan::Pro)),
|
||||
chatgpt_user_id: Some("user-12345".to_string()),
|
||||
chatgpt_account_id: None,
|
||||
raw_jwt: fake_jwt,
|
||||
},
|
||||
|
||||
@@ -138,26 +138,12 @@ fn parse_plain_command_from_node(cmd: tree_sitter::Node, src: &str) -> Option<Ve
|
||||
words.push(child.utf8_text(src.as_bytes()).ok()?.to_owned());
|
||||
}
|
||||
"string" => {
|
||||
if child.child_count() == 3
|
||||
&& child.child(0)?.kind() == "\""
|
||||
&& child.child(1)?.kind() == "string_content"
|
||||
&& child.child(2)?.kind() == "\""
|
||||
{
|
||||
words.push(child.child(1)?.utf8_text(src.as_bytes()).ok()?.to_owned());
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
let parsed = parse_double_quoted_string(child, src)?;
|
||||
words.push(parsed);
|
||||
}
|
||||
"raw_string" => {
|
||||
let raw_string = child.utf8_text(src.as_bytes()).ok()?;
|
||||
let stripped = raw_string
|
||||
.strip_prefix('\'')
|
||||
.and_then(|s| s.strip_suffix('\''));
|
||||
if let Some(s) = stripped {
|
||||
words.push(s.to_owned());
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
let parsed = parse_raw_string(child, src)?;
|
||||
words.push(parsed);
|
||||
}
|
||||
"concatenation" => {
|
||||
// Handle concatenated arguments like -g"*.py"
|
||||
@@ -170,28 +156,12 @@ fn parse_plain_command_from_node(cmd: tree_sitter::Node, src: &str) -> Option<Ve
|
||||
.push_str(part.utf8_text(src.as_bytes()).ok()?.to_owned().as_str());
|
||||
}
|
||||
"string" => {
|
||||
if part.child_count() == 3
|
||||
&& part.child(0)?.kind() == "\""
|
||||
&& part.child(1)?.kind() == "string_content"
|
||||
&& part.child(2)?.kind() == "\""
|
||||
{
|
||||
concatenated.push_str(
|
||||
part.child(1)?
|
||||
.utf8_text(src.as_bytes())
|
||||
.ok()?
|
||||
.to_owned()
|
||||
.as_str(),
|
||||
);
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
let parsed = parse_double_quoted_string(part, src)?;
|
||||
concatenated.push_str(&parsed);
|
||||
}
|
||||
"raw_string" => {
|
||||
let raw_string = part.utf8_text(src.as_bytes()).ok()?;
|
||||
let stripped = raw_string
|
||||
.strip_prefix('\'')
|
||||
.and_then(|s| s.strip_suffix('\''))?;
|
||||
concatenated.push_str(stripped);
|
||||
let parsed = parse_raw_string(part, src)?;
|
||||
concatenated.push_str(&parsed);
|
||||
}
|
||||
_ => return None,
|
||||
}
|
||||
@@ -207,9 +177,40 @@ fn parse_plain_command_from_node(cmd: tree_sitter::Node, src: &str) -> Option<Ve
|
||||
Some(words)
|
||||
}
|
||||
|
||||
fn parse_double_quoted_string(node: Node, src: &str) -> Option<String> {
|
||||
if node.kind() != "string" {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut cursor = node.walk();
|
||||
for part in node.named_children(&mut cursor) {
|
||||
if part.kind() != "string_content" {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
let raw = node.utf8_text(src.as_bytes()).ok()?;
|
||||
let stripped = raw
|
||||
.strip_prefix('"')
|
||||
.and_then(|text| text.strip_suffix('"'))?;
|
||||
Some(stripped.to_string())
|
||||
}
|
||||
|
||||
fn parse_raw_string(node: Node, src: &str) -> Option<String> {
|
||||
if node.kind() != "raw_string" {
|
||||
return None;
|
||||
}
|
||||
|
||||
let raw_string = node.utf8_text(src.as_bytes()).ok()?;
|
||||
let stripped = raw_string
|
||||
.strip_prefix('\'')
|
||||
.and_then(|s| s.strip_suffix('\''));
|
||||
stripped.map(str::to_owned)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
fn parse_seq(src: &str) -> Option<Vec<Vec<String>>> {
|
||||
let tree = try_parse_shell(src)?;
|
||||
@@ -250,6 +251,38 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn accepts_double_quoted_strings_with_newlines() {
|
||||
let cmds = parse_seq("git commit -m \"line1\nline2\"").unwrap();
|
||||
assert_eq!(
|
||||
cmds,
|
||||
vec![vec![
|
||||
"git".to_string(),
|
||||
"commit".to_string(),
|
||||
"-m".to_string(),
|
||||
"line1\nline2".to_string(),
|
||||
]]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn accepts_mixed_quote_concatenation() {
|
||||
assert_eq!(
|
||||
parse_seq(r#"echo "/usr"'/'"local"/bin"#).unwrap(),
|
||||
vec![vec!["echo".to_string(), "/usr/local/bin".to_string()]]
|
||||
);
|
||||
assert_eq!(
|
||||
parse_seq(r#"echo '/usr'"/"'local'/bin"#).unwrap(),
|
||||
vec![vec!["echo".to_string(), "/usr/local/bin".to_string()]]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rejects_double_quoted_strings_with_expansions() {
|
||||
assert!(parse_seq(r#"echo "hi ${USER}""#).is_none());
|
||||
assert!(parse_seq(r#"echo "$HOME""#).is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn accepts_numbers_as_words() {
|
||||
let cmds = parse_seq("echo 123 456").unwrap();
|
||||
|
||||
@@ -226,13 +226,11 @@ impl ModelClient {
|
||||
|
||||
let mut extra_headers = ApiHeaderMap::new();
|
||||
if let SessionSource::SubAgent(sub) = &self.state.session_source {
|
||||
let subagent = if let crate::protocol::SubAgentSource::Other(label) = sub {
|
||||
label.clone()
|
||||
} else {
|
||||
serde_json::to_value(sub)
|
||||
.ok()
|
||||
.and_then(|v| v.as_str().map(std::string::ToString::to_string))
|
||||
.unwrap_or_else(|| "other".to_string())
|
||||
let subagent = match sub {
|
||||
crate::protocol::SubAgentSource::Review => "review".to_string(),
|
||||
crate::protocol::SubAgentSource::Compact => "compact".to_string(),
|
||||
crate::protocol::SubAgentSource::ThreadSpawn { .. } => "collab_spawn".to_string(),
|
||||
crate::protocol::SubAgentSource::Other(label) => label.clone(),
|
||||
};
|
||||
if let Ok(val) = HeaderValue::from_str(&subagent) {
|
||||
extra_headers.insert("x-openai-subagent", val);
|
||||
@@ -627,11 +625,13 @@ fn build_api_prompt(prompt: &Prompt, instructions: String, tools_json: Vec<Value
|
||||
}
|
||||
}
|
||||
|
||||
fn beta_feature_headers(config: &Config) -> ApiHeaderMap {
|
||||
fn experimental_feature_headers(config: &Config) -> ApiHeaderMap {
|
||||
let enabled = FEATURES
|
||||
.iter()
|
||||
.filter_map(|spec| {
|
||||
if spec.stage.beta_menu_description().is_some() && config.features.enabled(spec.id) {
|
||||
if spec.stage.experimental_menu_description().is_some()
|
||||
&& config.features.enabled(spec.id)
|
||||
{
|
||||
Some(spec.key)
|
||||
} else {
|
||||
None
|
||||
@@ -652,16 +652,14 @@ fn build_responses_headers(
|
||||
config: &Config,
|
||||
turn_state: Option<&Arc<OnceLock<String>>>,
|
||||
) -> ApiHeaderMap {
|
||||
let mut headers = beta_feature_headers(config);
|
||||
let mut headers = experimental_feature_headers(config);
|
||||
headers.insert(
|
||||
WEB_SEARCH_ELIGIBLE_HEADER,
|
||||
HeaderValue::from_static(
|
||||
if matches!(config.web_search_mode, Some(WebSearchMode::Disabled)) {
|
||||
"false"
|
||||
} else {
|
||||
"true"
|
||||
},
|
||||
),
|
||||
HeaderValue::from_static(if config.web_search_mode == WebSearchMode::Disabled {
|
||||
"false"
|
||||
} else {
|
||||
"true"
|
||||
}),
|
||||
);
|
||||
if let Some(turn_state) = turn_state
|
||||
&& let Some(state) = turn_state.get()
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -57,6 +57,7 @@ pub(crate) async fn run_codex_thread_interactive(
|
||||
initial_history.unwrap_or(InitialHistory::New),
|
||||
SessionSource::SubAgent(SubAgentSource::Review),
|
||||
parent_session.services.agent_control.clone(),
|
||||
Vec::new(),
|
||||
)
|
||||
.await?;
|
||||
let codex = Arc::new(codex);
|
||||
@@ -92,6 +93,7 @@ pub(crate) async fn run_codex_thread_interactive(
|
||||
tx_sub: tx_ops,
|
||||
rx_event: rx_sub,
|
||||
agent_status: codex.agent_status.clone(),
|
||||
session: Arc::clone(&codex.session),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -134,6 +136,7 @@ pub(crate) async fn run_codex_thread_one_shot(
|
||||
let (tx_bridge, rx_bridge) = async_channel::bounded(SUBMISSION_CHANNEL_CAPACITY);
|
||||
let ops_tx = io.tx_sub.clone();
|
||||
let agent_status = io.agent_status.clone();
|
||||
let session = Arc::clone(&io.session);
|
||||
let io_for_bridge = io;
|
||||
tokio::spawn(async move {
|
||||
while let Ok(event) = io_for_bridge.next_event().await {
|
||||
@@ -166,6 +169,7 @@ pub(crate) async fn run_codex_thread_one_shot(
|
||||
rx_event: rx_bridge,
|
||||
tx_sub: tx_closed,
|
||||
agent_status,
|
||||
session,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -442,15 +446,15 @@ mod tests {
|
||||
let (tx_events, rx_events) = bounded(1);
|
||||
let (tx_sub, rx_sub) = bounded(SUBMISSION_CHANNEL_CAPACITY);
|
||||
let (_agent_status_tx, agent_status) = watch::channel(AgentStatus::PendingInit);
|
||||
let (session, ctx, _rx_evt) = crate::codex::make_session_and_context_with_rx().await;
|
||||
let codex = Arc::new(Codex {
|
||||
next_id: AtomicU64::new(0),
|
||||
tx_sub,
|
||||
rx_event: rx_events,
|
||||
agent_status,
|
||||
session: Arc::clone(&session),
|
||||
});
|
||||
|
||||
let (session, ctx, _rx_evt) = crate::codex::make_session_and_context_with_rx().await;
|
||||
|
||||
let (tx_out, rx_out) = bounded(1);
|
||||
tx_out
|
||||
.send(Event {
|
||||
|
||||
@@ -4,18 +4,35 @@ use crate::error::Result as CodexResult;
|
||||
use crate::protocol::Event;
|
||||
use crate::protocol::Op;
|
||||
use crate::protocol::Submission;
|
||||
use codex_protocol::config_types::Personality;
|
||||
use codex_protocol::openai_models::ReasoningEffort;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use std::path::PathBuf;
|
||||
use tokio::sync::watch;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ThreadConfigSnapshot {
|
||||
pub model: String,
|
||||
pub model_provider_id: String,
|
||||
pub approval_policy: AskForApproval,
|
||||
pub sandbox_policy: SandboxPolicy,
|
||||
pub cwd: PathBuf,
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
pub personality: Option<Personality>,
|
||||
pub session_source: SessionSource,
|
||||
}
|
||||
|
||||
pub struct CodexThread {
|
||||
codex: Codex,
|
||||
rollout_path: PathBuf,
|
||||
rollout_path: Option<PathBuf>,
|
||||
}
|
||||
|
||||
/// Conduit for the bidirectional stream of messages that compose a thread
|
||||
/// (formerly called a conversation) in Codex.
|
||||
impl CodexThread {
|
||||
pub(crate) fn new(codex: Codex, rollout_path: PathBuf) -> Self {
|
||||
pub(crate) fn new(codex: Codex, rollout_path: Option<PathBuf>) -> Self {
|
||||
Self {
|
||||
codex,
|
||||
rollout_path,
|
||||
@@ -43,7 +60,11 @@ impl CodexThread {
|
||||
self.codex.agent_status.clone()
|
||||
}
|
||||
|
||||
pub fn rollout_path(&self) -> PathBuf {
|
||||
pub fn rollout_path(&self) -> Option<PathBuf> {
|
||||
self.rollout_path.clone()
|
||||
}
|
||||
|
||||
pub async fn config_snapshot(&self) -> ThreadConfigSnapshot {
|
||||
self.codex.thread_config_snapshot().await
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,7 +10,8 @@ use crate::error::CodexErr;
|
||||
use crate::error::Result as CodexResult;
|
||||
use crate::features::Feature;
|
||||
use crate::protocol::CompactedItem;
|
||||
use crate::protocol::ContextCompactedEvent;
|
||||
use crate::protocol::ContextCompactionEndedEvent;
|
||||
use crate::protocol::ContextCompactionStartedEvent;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::TurnContextItem;
|
||||
use crate::protocol::TurnStartedEvent;
|
||||
@@ -20,6 +21,7 @@ use crate::truncate::TruncationPolicy;
|
||||
use crate::truncate::approx_token_count;
|
||||
use crate::truncate::truncate_text;
|
||||
use crate::util::backoff;
|
||||
use codex_protocol::items::ContextCompactionItem;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseInputItem;
|
||||
@@ -28,6 +30,7 @@ use codex_protocol::protocol::RolloutItem;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use futures::prelude::*;
|
||||
use tracing::error;
|
||||
use uuid::Uuid;
|
||||
|
||||
pub const SUMMARIZATION_PROMPT: &str = include_str!("../templates/compact/prompt.md");
|
||||
pub const SUMMARY_PREFIX: &str = include_str!("../templates/compact/summary_prefix.md");
|
||||
@@ -71,6 +74,9 @@ async fn run_compact_task_inner(
|
||||
turn_context: Arc<TurnContext>,
|
||||
input: Vec<UserInput>,
|
||||
) {
|
||||
let compaction_item = compaction_turn_item();
|
||||
emit_compaction_started(&sess, &turn_context, &compaction_item).await;
|
||||
|
||||
let initial_input_for_turn: ResponseInputItem = ResponseInputItem::from(input);
|
||||
|
||||
let mut history = sess.clone_history().await;
|
||||
@@ -94,6 +100,7 @@ async fn run_compact_task_inner(
|
||||
approval_policy: turn_context.approval_policy,
|
||||
sandbox_policy: turn_context.sandbox_policy.clone(),
|
||||
model: turn_context.client.get_model(),
|
||||
personality: turn_context.personality,
|
||||
collaboration_mode: Some(collaboration_mode),
|
||||
effort: turn_context.client.get_reasoning_effort(),
|
||||
summary: turn_context.client.get_reasoning_summary(),
|
||||
@@ -111,6 +118,7 @@ async fn run_compact_task_inner(
|
||||
let prompt = Prompt {
|
||||
input: turn_input,
|
||||
base_instructions: sess.get_base_instructions().await,
|
||||
personality: turn_context.personality,
|
||||
..Default::default()
|
||||
};
|
||||
let attempt_result = drain_to_completed(&sess, turn_context.as_ref(), &prompt).await;
|
||||
@@ -129,6 +137,7 @@ async fn run_compact_task_inner(
|
||||
break;
|
||||
}
|
||||
Err(CodexErr::Interrupted) => {
|
||||
emit_compaction_ended(&sess, &turn_context, compaction_item.clone()).await;
|
||||
return;
|
||||
}
|
||||
Err(e @ CodexErr::ContextWindowExceeded) => {
|
||||
@@ -145,6 +154,7 @@ async fn run_compact_task_inner(
|
||||
sess.set_total_tokens_full(turn_context.as_ref()).await;
|
||||
let event = EventMsg::Error(e.to_error_event(None));
|
||||
sess.send_event(&turn_context, event).await;
|
||||
emit_compaction_ended(&sess, &turn_context, compaction_item.clone()).await;
|
||||
return;
|
||||
}
|
||||
Err(e) => {
|
||||
@@ -162,6 +172,7 @@ async fn run_compact_task_inner(
|
||||
} else {
|
||||
let event = EventMsg::Error(e.to_error_event(None));
|
||||
sess.send_event(&turn_context, event).await;
|
||||
emit_compaction_ended(&sess, &turn_context, compaction_item.clone()).await;
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -191,8 +202,7 @@ async fn run_compact_task_inner(
|
||||
});
|
||||
sess.persist_rollout_items(&[rollout_item]).await;
|
||||
|
||||
let event = EventMsg::ContextCompacted(ContextCompactedEvent {});
|
||||
sess.send_event(&turn_context, event).await;
|
||||
emit_compaction_ended(&sess, &turn_context, compaction_item).await;
|
||||
|
||||
let warning = EventMsg::Warning(WarningEvent {
|
||||
message: "Heads up: Long threads and multiple compactions can cause the model to be less accurate. Start a new thread when possible to keep threads small and targeted.".to_string(),
|
||||
@@ -200,6 +210,38 @@ async fn run_compact_task_inner(
|
||||
sess.send_event(&turn_context, warning).await;
|
||||
}
|
||||
|
||||
fn compaction_turn_item() -> TurnItem {
|
||||
TurnItem::ContextCompaction(ContextCompactionItem {
|
||||
id: Uuid::new_v4().to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) async fn emit_compaction_started(
|
||||
sess: &Session,
|
||||
turn_context: &TurnContext,
|
||||
item: &TurnItem,
|
||||
) {
|
||||
sess.send_event(
|
||||
turn_context,
|
||||
EventMsg::ContextCompactionStarted(ContextCompactionStartedEvent {}),
|
||||
)
|
||||
.await;
|
||||
sess.emit_turn_item_started(turn_context, item).await;
|
||||
}
|
||||
|
||||
pub(crate) async fn emit_compaction_ended(
|
||||
sess: &Session,
|
||||
turn_context: &TurnContext,
|
||||
item: TurnItem,
|
||||
) {
|
||||
sess.emit_turn_item_completed(turn_context, item).await;
|
||||
sess.send_event(
|
||||
turn_context,
|
||||
EventMsg::ContextCompactionEnded(ContextCompactionEndedEvent {}),
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
pub fn content_items_to_text(content: &[ContentItem]) -> Option<String> {
|
||||
let mut pieces = Vec::new();
|
||||
for item in content {
|
||||
|
||||
@@ -3,13 +3,17 @@ use std::sync::Arc;
|
||||
use crate::Prompt;
|
||||
use crate::codex::Session;
|
||||
use crate::codex::TurnContext;
|
||||
use crate::compact::emit_compaction_ended;
|
||||
use crate::compact::emit_compaction_started;
|
||||
use crate::error::Result as CodexResult;
|
||||
use crate::protocol::CompactedItem;
|
||||
use crate::protocol::ContextCompactedEvent;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::RolloutItem;
|
||||
use crate::protocol::TurnStartedEvent;
|
||||
use codex_protocol::items::ContextCompactionItem;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use uuid::Uuid;
|
||||
|
||||
pub(crate) async fn run_inline_remote_auto_compact_task(
|
||||
sess: Arc<Session>,
|
||||
@@ -28,12 +32,19 @@ pub(crate) async fn run_remote_compact_task(sess: Arc<Session>, turn_context: Ar
|
||||
}
|
||||
|
||||
async fn run_remote_compact_task_inner(sess: &Arc<Session>, turn_context: &Arc<TurnContext>) {
|
||||
let compaction_item = TurnItem::ContextCompaction(ContextCompactionItem {
|
||||
id: Uuid::new_v4().to_string(),
|
||||
});
|
||||
emit_compaction_started(sess, turn_context, &compaction_item).await;
|
||||
|
||||
if let Err(err) = run_remote_compact_task_inner_impl(sess, turn_context).await {
|
||||
let event = EventMsg::Error(
|
||||
err.to_error_event(Some("Error running remote compact task".to_string())),
|
||||
);
|
||||
sess.send_event(turn_context, event).await;
|
||||
}
|
||||
|
||||
emit_compaction_ended(sess, turn_context, compaction_item).await;
|
||||
}
|
||||
|
||||
async fn run_remote_compact_task_inner_impl(
|
||||
@@ -55,7 +66,7 @@ async fn run_remote_compact_task_inner_impl(
|
||||
tools: vec![],
|
||||
parallel_tool_calls: false,
|
||||
base_instructions: sess.get_base_instructions().await,
|
||||
personality: None,
|
||||
personality: turn_context.personality,
|
||||
output_schema: None,
|
||||
};
|
||||
|
||||
@@ -77,8 +88,5 @@ async fn run_remote_compact_task_inner_impl(
|
||||
sess.persist_rollout_items(&[RolloutItem::Compacted(compacted_item)])
|
||||
.await;
|
||||
|
||||
let event = EventMsg::ContextCompacted(ContextCompactedEvent {});
|
||||
sess.send_event(turn_context, event).await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ use crate::config::types::Notice;
|
||||
use crate::path_utils::resolve_symlink_write_paths;
|
||||
use crate::path_utils::write_atomically;
|
||||
use anyhow::Context;
|
||||
use codex_protocol::config_types::Personality;
|
||||
use codex_protocol::config_types::TrustLevel;
|
||||
use codex_protocol::openai_models::ReasoningEffort;
|
||||
use std::collections::BTreeMap;
|
||||
@@ -24,6 +25,8 @@ pub enum ConfigEdit {
|
||||
model: Option<String>,
|
||||
effort: Option<ReasoningEffort>,
|
||||
},
|
||||
/// Update the active (or default) model personality.
|
||||
SetModelPersonality { personality: Option<Personality> },
|
||||
/// Toggle the acknowledgement flag under `[notice]`.
|
||||
SetNoticeHideFullAccessWarning(bool),
|
||||
/// Toggle the Windows world-writable directories warning acknowledgement flag.
|
||||
@@ -164,6 +167,11 @@ mod document_helpers {
|
||||
{
|
||||
entry["disabled_tools"] = array_from_iter(disabled_tools.iter().cloned());
|
||||
}
|
||||
if let Some(scopes) = &config.scopes
|
||||
&& !scopes.is_empty()
|
||||
{
|
||||
entry["scopes"] = array_from_iter(scopes.iter().cloned());
|
||||
}
|
||||
|
||||
entry
|
||||
}
|
||||
@@ -269,6 +277,10 @@ impl ConfigDocument {
|
||||
);
|
||||
mutated
|
||||
}),
|
||||
ConfigEdit::SetModelPersonality { personality } => Ok(self.write_profile_value(
|
||||
&["model_personality"],
|
||||
personality.map(|personality| value(personality.to_string())),
|
||||
)),
|
||||
ConfigEdit::SetNoticeHideFullAccessWarning(acknowledged) => Ok(self.write_value(
|
||||
Scope::Global,
|
||||
&[Notice::TABLE_KEY, "hide_full_access_warning"],
|
||||
@@ -712,6 +724,12 @@ impl ConfigEditsBuilder {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn set_model_personality(mut self, personality: Option<Personality>) -> Self {
|
||||
self.edits
|
||||
.push(ConfigEdit::SetModelPersonality { personality });
|
||||
self
|
||||
}
|
||||
|
||||
pub fn set_hide_full_access_warning(mut self, acknowledged: bool) -> Self {
|
||||
self.edits
|
||||
.push(ConfigEdit::SetNoticeHideFullAccessWarning(acknowledged));
|
||||
@@ -1360,6 +1378,7 @@ gpt-5 = "gpt-5.1"
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: Some(vec!["one".to_string(), "two".to_string()]),
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
);
|
||||
|
||||
@@ -1382,6 +1401,7 @@ gpt-5 = "gpt-5.1"
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: Some(vec!["forbidden".to_string()]),
|
||||
scopes: None,
|
||||
},
|
||||
);
|
||||
|
||||
@@ -1447,6 +1467,7 @@ foo = { command = "cmd" }
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
);
|
||||
|
||||
@@ -1491,6 +1512,7 @@ foo = { command = "cmd" } # keep me
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
);
|
||||
|
||||
@@ -1534,6 +1556,7 @@ foo = { command = "cmd", args = ["--flag"] } # keep me
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
);
|
||||
|
||||
@@ -1578,6 +1601,7 @@ foo = { command = "cmd" }
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
);
|
||||
|
||||
|
||||
@@ -38,6 +38,7 @@ use crate::project_doc::DEFAULT_PROJECT_DOC_FILENAME;
|
||||
use crate::project_doc::LOCAL_PROJECT_DOC_FILENAME;
|
||||
use crate::protocol::AskForApproval;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
use crate::windows_sandbox::WindowsSandboxLevelExt;
|
||||
use codex_app_server_protocol::Tools;
|
||||
use codex_app_server_protocol::UserSavedConfig;
|
||||
use codex_protocol::config_types::AltScreenMode;
|
||||
@@ -49,6 +50,7 @@ use codex_protocol::config_types::SandboxMode;
|
||||
use codex_protocol::config_types::TrustLevel;
|
||||
use codex_protocol::config_types::Verbosity;
|
||||
use codex_protocol::config_types::WebSearchMode;
|
||||
use codex_protocol::config_types::WindowsSandboxLevel;
|
||||
use codex_protocol::openai_models::ReasoningEffort;
|
||||
use codex_rmcp_client::OAuthCredentialsStoreMode;
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
@@ -89,7 +91,7 @@ pub use codex_git::GhostSnapshotConfig;
|
||||
/// files are *silently truncated* to this size so we do not take up too much of
|
||||
/// the context window.
|
||||
pub(crate) const PROJECT_DOC_MAX_BYTES: usize = 32 * 1024; // 32 KiB
|
||||
pub(crate) const DEFAULT_AGENT_MAX_THREADS: Option<usize> = None;
|
||||
pub(crate) const DEFAULT_AGENT_MAX_THREADS: Option<usize> = Some(6);
|
||||
|
||||
pub const CONFIG_TOML_FILE: &str = "config.toml";
|
||||
|
||||
@@ -261,6 +263,9 @@ pub struct Config {
|
||||
/// Settings that govern if and what will be written to `~/.codex/history.jsonl`.
|
||||
pub history: History,
|
||||
|
||||
/// When true, session is not persisted on disk. Default to `false`
|
||||
pub ephemeral: bool,
|
||||
|
||||
/// Optional URI-based file opener. If set, citations to files in the model
|
||||
/// output will be hyperlinked using the specified URI scheme.
|
||||
pub file_opener: UriBasedFileOpener,
|
||||
@@ -301,8 +306,8 @@ pub struct Config {
|
||||
/// model info's default preference.
|
||||
pub include_apply_patch_tool: bool,
|
||||
|
||||
/// Explicit or feature-derived web search mode.
|
||||
pub web_search_mode: Option<WebSearchMode>,
|
||||
/// Explicit or feature-derived web search mode. Defaults to cached.
|
||||
pub web_search_mode: WebSearchMode,
|
||||
|
||||
/// If set to `true`, used only the experimental unified exec tool.
|
||||
pub use_experimental_unified_exec_tool: bool,
|
||||
@@ -313,6 +318,9 @@ pub struct Config {
|
||||
/// Centralized feature flags; source of truth for feature gating.
|
||||
pub features: Features,
|
||||
|
||||
/// When `true`, suppress warnings about unstable (under development) features.
|
||||
pub suppress_unstable_features_warning: bool,
|
||||
|
||||
/// The active profile name used to derive this `Config` (if any).
|
||||
pub active_profile: Option<String>,
|
||||
|
||||
@@ -410,9 +418,21 @@ impl ConfigBuilder {
|
||||
// relative paths to absolute paths based on the parent folder of the
|
||||
// respective config file, so we should be safe to deserialize without
|
||||
// AbsolutePathBufGuard here.
|
||||
let config_toml: ConfigToml = merged_toml
|
||||
.try_into()
|
||||
.map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))?;
|
||||
let config_toml: ConfigToml = match merged_toml.try_into() {
|
||||
Ok(config_toml) => config_toml,
|
||||
Err(err) => {
|
||||
if let Some(config_error) =
|
||||
crate::config_loader::first_layer_config_error(&config_layer_stack).await
|
||||
{
|
||||
return Err(crate::config_loader::io_error_from_config_error(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
config_error,
|
||||
Some(err),
|
||||
));
|
||||
}
|
||||
return Err(std::io::Error::new(std::io::ErrorKind::InvalidData, err));
|
||||
}
|
||||
};
|
||||
Config::load_config_with_layer_stack(
|
||||
config_toml,
|
||||
harness_overrides,
|
||||
@@ -891,6 +911,9 @@ pub struct ConfigToml {
|
||||
#[schemars(schema_with = "crate::config::schema::features_schema")]
|
||||
pub features: Option<FeaturesToml>,
|
||||
|
||||
/// Suppress warnings about unstable (under development) features.
|
||||
pub suppress_unstable_features_warning: Option<bool>,
|
||||
|
||||
/// Settings for ghost snapshots (used for undo).
|
||||
#[serde(default)]
|
||||
pub ghost_snapshot: Option<GhostSnapshotToml>,
|
||||
@@ -1035,6 +1058,7 @@ impl ConfigToml {
|
||||
&self,
|
||||
sandbox_mode_override: Option<SandboxMode>,
|
||||
profile_sandbox_mode: Option<SandboxMode>,
|
||||
windows_sandbox_level: WindowsSandboxLevel,
|
||||
resolved_cwd: &Path,
|
||||
) -> SandboxPolicyResolution {
|
||||
let resolved_sandbox_mode = sandbox_mode_override
|
||||
@@ -1073,7 +1097,7 @@ impl ConfigToml {
|
||||
if cfg!(target_os = "windows")
|
||||
&& matches!(resolved_sandbox_mode, SandboxMode::WorkspaceWrite)
|
||||
// If the experimental Windows sandbox is enabled, do not force a downgrade.
|
||||
&& crate::safety::get_platform_sandbox().is_none()
|
||||
&& windows_sandbox_level == codex_protocol::config_types::WindowsSandboxLevel::Disabled
|
||||
{
|
||||
sandbox_policy = SandboxPolicy::new_read_only_policy();
|
||||
forced_auto_mode_downgraded_on_windows = true;
|
||||
@@ -1141,10 +1165,12 @@ pub struct ConfigOverrides {
|
||||
pub codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
pub base_instructions: Option<String>,
|
||||
pub developer_instructions: Option<String>,
|
||||
pub model_personality: Option<Personality>,
|
||||
pub compact_prompt: Option<String>,
|
||||
pub include_apply_patch_tool: Option<bool>,
|
||||
pub show_raw_agent_reasoning: Option<bool>,
|
||||
pub tools_web_search_request: Option<bool>,
|
||||
pub ephemeral: Option<bool>,
|
||||
/// Additional directories that should be treated as writable roots for this session.
|
||||
pub additional_writable_roots: Vec<PathBuf>,
|
||||
}
|
||||
@@ -1177,22 +1203,27 @@ pub fn resolve_oss_provider(
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolve the web search mode from explicit config and feature flags.
|
||||
/// Resolve the web search mode from explicit config, feature flags, and sandbox policy.
|
||||
/// Live search is auto-enabled when sandbox policy is `DangerFullAccess`
|
||||
fn resolve_web_search_mode(
|
||||
config_toml: &ConfigToml,
|
||||
config_profile: &ConfigProfile,
|
||||
features: &Features,
|
||||
) -> Option<WebSearchMode> {
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
) -> WebSearchMode {
|
||||
if let Some(mode) = config_profile.web_search.or(config_toml.web_search) {
|
||||
return Some(mode);
|
||||
return mode;
|
||||
}
|
||||
if features.enabled(Feature::WebSearchCached) {
|
||||
return Some(WebSearchMode::Cached);
|
||||
return WebSearchMode::Cached;
|
||||
}
|
||||
if features.enabled(Feature::WebSearchRequest) {
|
||||
return Some(WebSearchMode::Live);
|
||||
return WebSearchMode::Live;
|
||||
}
|
||||
None
|
||||
if matches!(sandbox_policy, SandboxPolicy::DangerFullAccess) {
|
||||
return WebSearchMode::Live;
|
||||
}
|
||||
WebSearchMode::Cached
|
||||
}
|
||||
|
||||
impl Config {
|
||||
@@ -1228,10 +1259,12 @@ impl Config {
|
||||
codex_linux_sandbox_exe,
|
||||
base_instructions,
|
||||
developer_instructions,
|
||||
model_personality,
|
||||
compact_prompt,
|
||||
include_apply_patch_tool: include_apply_patch_tool_override,
|
||||
show_raw_agent_reasoning,
|
||||
tools_web_search_request: override_tools_web_search_request,
|
||||
ephemeral,
|
||||
additional_writable_roots,
|
||||
} = overrides;
|
||||
|
||||
@@ -1259,17 +1292,6 @@ impl Config {
|
||||
};
|
||||
|
||||
let features = Features::from_config(&cfg, &config_profile, feature_overrides);
|
||||
let web_search_mode = resolve_web_search_mode(&cfg, &config_profile, &features);
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
// Base flag controls sandbox on/off; elevated only applies when base is enabled.
|
||||
let sandbox_enabled = features.enabled(Feature::WindowsSandbox);
|
||||
crate::safety::set_windows_sandbox_enabled(sandbox_enabled);
|
||||
let elevated_enabled =
|
||||
sandbox_enabled && features.enabled(Feature::WindowsSandboxElevated);
|
||||
crate::safety::set_windows_elevated_sandbox_enabled(elevated_enabled);
|
||||
}
|
||||
|
||||
let resolved_cwd = {
|
||||
use std::env;
|
||||
|
||||
@@ -1296,10 +1318,16 @@ impl Config {
|
||||
.get_active_project(&resolved_cwd)
|
||||
.unwrap_or(ProjectConfig { trust_level: None });
|
||||
|
||||
let windows_sandbox_level = WindowsSandboxLevel::from_features(&features);
|
||||
let SandboxPolicyResolution {
|
||||
policy: mut sandbox_policy,
|
||||
forced_auto_mode_downgraded_on_windows,
|
||||
} = cfg.derive_sandbox_policy(sandbox_mode, config_profile.sandbox_mode, &resolved_cwd);
|
||||
} = cfg.derive_sandbox_policy(
|
||||
sandbox_mode,
|
||||
config_profile.sandbox_mode,
|
||||
windows_sandbox_level,
|
||||
&resolved_cwd,
|
||||
);
|
||||
if let SandboxPolicy::WorkspaceWrite { writable_roots, .. } = &mut sandbox_policy {
|
||||
for path in additional_writable_roots {
|
||||
if !writable_roots.iter().any(|existing| existing == &path) {
|
||||
@@ -1319,6 +1347,8 @@ impl Config {
|
||||
AskForApproval::default()
|
||||
}
|
||||
});
|
||||
let web_search_mode =
|
||||
resolve_web_search_mode(&cfg, &config_profile, &features, &sandbox_policy);
|
||||
// TODO(dylan): We should be able to leverage ConfigLayerStack so that
|
||||
// we can reliably check this at every config level.
|
||||
let did_user_set_custom_approval_policy_or_sandbox_mode = approval_policy_override
|
||||
@@ -1433,6 +1463,9 @@ impl Config {
|
||||
Self::try_read_non_empty_file(model_instructions_path, "model instructions file")?;
|
||||
let base_instructions = base_instructions.or(file_base_instructions);
|
||||
let developer_instructions = developer_instructions.or(cfg.developer_instructions);
|
||||
let model_personality = model_personality
|
||||
.or(config_profile.model_personality)
|
||||
.or(cfg.model_personality);
|
||||
|
||||
let experimental_compact_prompt_path = config_profile
|
||||
.experimental_compact_prompt_file
|
||||
@@ -1482,7 +1515,7 @@ impl Config {
|
||||
notify: cfg.notify,
|
||||
user_instructions,
|
||||
base_instructions,
|
||||
model_personality: config_profile.model_personality.or(cfg.model_personality),
|
||||
model_personality,
|
||||
developer_instructions,
|
||||
compact_prompt,
|
||||
// The config.toml omits "_mode" because it's a config file. However, "_mode"
|
||||
@@ -1513,6 +1546,7 @@ impl Config {
|
||||
codex_home,
|
||||
config_layer_stack,
|
||||
history,
|
||||
ephemeral: ephemeral.unwrap_or_default(),
|
||||
file_opener: cfg.file_opener.unwrap_or(UriBasedFileOpener::VsCode),
|
||||
codex_linux_sandbox_exe,
|
||||
|
||||
@@ -1541,6 +1575,9 @@ impl Config {
|
||||
use_experimental_unified_exec_tool,
|
||||
ghost_snapshot,
|
||||
features,
|
||||
suppress_unstable_features_warning: cfg
|
||||
.suppress_unstable_features_warning
|
||||
.unwrap_or(false),
|
||||
active_profile: active_profile_name,
|
||||
active_project,
|
||||
windows_wsl_setup_acknowledged: cfg.windows_wsl_setup_acknowledged.unwrap_or(false),
|
||||
@@ -1635,7 +1672,6 @@ impl Config {
|
||||
}
|
||||
|
||||
pub fn set_windows_sandbox_globally(&mut self, value: bool) {
|
||||
crate::safety::set_windows_sandbox_enabled(value);
|
||||
if value {
|
||||
self.features.enable(Feature::WindowsSandbox);
|
||||
} else {
|
||||
@@ -1645,7 +1681,6 @@ impl Config {
|
||||
}
|
||||
|
||||
pub fn set_windows_elevated_sandbox_globally(&mut self, value: bool) {
|
||||
crate::safety::set_windows_elevated_sandbox_enabled(value);
|
||||
if value {
|
||||
self.features.enable(Feature::WindowsSandboxElevated);
|
||||
} else {
|
||||
@@ -1749,6 +1784,7 @@ mod tests {
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1766,6 +1802,7 @@ mod tests {
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1837,6 +1874,7 @@ network_access = false # This should be ignored.
|
||||
let resolution = sandbox_full_access_cfg.derive_sandbox_policy(
|
||||
sandbox_mode_override,
|
||||
None,
|
||||
WindowsSandboxLevel::Disabled,
|
||||
&PathBuf::from("/tmp/test"),
|
||||
);
|
||||
assert_eq!(
|
||||
@@ -1860,6 +1898,7 @@ network_access = true # This should be ignored.
|
||||
let resolution = sandbox_read_only_cfg.derive_sandbox_policy(
|
||||
sandbox_mode_override,
|
||||
None,
|
||||
WindowsSandboxLevel::Disabled,
|
||||
&PathBuf::from("/tmp/test"),
|
||||
);
|
||||
assert_eq!(
|
||||
@@ -1891,6 +1930,7 @@ exclude_slash_tmp = true
|
||||
let resolution = sandbox_workspace_write_cfg.derive_sandbox_policy(
|
||||
sandbox_mode_override,
|
||||
None,
|
||||
WindowsSandboxLevel::Disabled,
|
||||
&PathBuf::from("/tmp/test"),
|
||||
);
|
||||
if cfg!(target_os = "windows") {
|
||||
@@ -1939,6 +1979,7 @@ trust_level = "trusted"
|
||||
let resolution = sandbox_workspace_write_cfg.derive_sandbox_policy(
|
||||
sandbox_mode_override,
|
||||
None,
|
||||
WindowsSandboxLevel::Disabled,
|
||||
&PathBuf::from("/tmp/test"),
|
||||
);
|
||||
if cfg!(target_os = "windows") {
|
||||
@@ -2230,12 +2271,15 @@ trust_level = "trusted"
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn web_search_mode_uses_none_if_unset() {
|
||||
fn web_search_mode_defaults_to_cached_if_unset() {
|
||||
let cfg = ConfigToml::default();
|
||||
let profile = ConfigProfile::default();
|
||||
let features = Features::with_defaults();
|
||||
|
||||
assert_eq!(resolve_web_search_mode(&cfg, &profile, &features), None);
|
||||
assert_eq!(
|
||||
resolve_web_search_mode(&cfg, &profile, &features, &SandboxPolicy::ReadOnly),
|
||||
WebSearchMode::Cached
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -2249,8 +2293,8 @@ trust_level = "trusted"
|
||||
features.enable(Feature::WebSearchCached);
|
||||
|
||||
assert_eq!(
|
||||
resolve_web_search_mode(&cfg, &profile, &features),
|
||||
Some(WebSearchMode::Live)
|
||||
resolve_web_search_mode(&cfg, &profile, &features, &SandboxPolicy::ReadOnly),
|
||||
WebSearchMode::Live
|
||||
);
|
||||
}
|
||||
|
||||
@@ -2265,11 +2309,50 @@ trust_level = "trusted"
|
||||
features.enable(Feature::WebSearchRequest);
|
||||
|
||||
assert_eq!(
|
||||
resolve_web_search_mode(&cfg, &profile, &features),
|
||||
Some(WebSearchMode::Disabled)
|
||||
resolve_web_search_mode(&cfg, &profile, &features, &SandboxPolicy::ReadOnly),
|
||||
WebSearchMode::Disabled
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn danger_full_access_defaults_web_search_live_when_unset() -> std::io::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let cfg = ConfigToml {
|
||||
sandbox_mode: Some(SandboxMode::DangerFullAccess),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let config = Config::load_from_base_config_with_overrides(
|
||||
cfg,
|
||||
ConfigOverrides::default(),
|
||||
codex_home.path().to_path_buf(),
|
||||
)?;
|
||||
|
||||
assert_eq!(config.web_search_mode, WebSearchMode::Live);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn explicit_web_search_mode_wins_in_danger_full_access() -> std::io::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let cfg = ConfigToml {
|
||||
sandbox_mode: Some(SandboxMode::DangerFullAccess),
|
||||
web_search: Some(WebSearchMode::Cached),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let config = Config::load_from_base_config_with_overrides(
|
||||
cfg,
|
||||
ConfigOverrides::default(),
|
||||
codex_home.path().to_path_buf(),
|
||||
)?;
|
||||
|
||||
assert_eq!(config.web_search_mode, WebSearchMode::Cached);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn profile_legacy_toggles_override_base() -> std::io::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
@@ -2591,6 +2674,7 @@ profile = "project"
|
||||
tool_timeout_sec: Some(Duration::from_secs(5)),
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
);
|
||||
|
||||
@@ -2745,6 +2829,7 @@ bearer_token = "secret"
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
)]);
|
||||
|
||||
@@ -2814,6 +2899,7 @@ ZIG_VAR = "3"
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
)]);
|
||||
|
||||
@@ -2863,6 +2949,7 @@ ZIG_VAR = "3"
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
)]);
|
||||
|
||||
@@ -2910,6 +2997,7 @@ ZIG_VAR = "3"
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
)]);
|
||||
|
||||
@@ -2973,6 +3061,7 @@ startup_timeout_sec = 2.0
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
)]);
|
||||
apply_blocking(
|
||||
@@ -3048,6 +3137,7 @@ X-Auth = "DOCS_AUTH"
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
)]);
|
||||
|
||||
@@ -3076,6 +3166,7 @@ X-Auth = "DOCS_AUTH"
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
);
|
||||
apply_blocking(
|
||||
@@ -3142,6 +3233,7 @@ url = "https://example.com/mcp"
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
),
|
||||
(
|
||||
@@ -3160,6 +3252,7 @@ url = "https://example.com/mcp"
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
),
|
||||
]);
|
||||
@@ -3241,6 +3334,7 @@ url = "https://example.com/mcp"
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
)]);
|
||||
|
||||
@@ -3284,6 +3378,7 @@ url = "https://example.com/mcp"
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: Some(vec!["allowed".to_string()]),
|
||||
disabled_tools: Some(vec!["blocked".to_string()]),
|
||||
scopes: None,
|
||||
},
|
||||
)]);
|
||||
|
||||
@@ -3670,10 +3765,11 @@ model_verbosity = "high"
|
||||
project_doc_max_bytes: PROJECT_DOC_MAX_BYTES,
|
||||
project_doc_fallback_filenames: Vec::new(),
|
||||
tool_output_token_limit: None,
|
||||
agent_max_threads: None,
|
||||
agent_max_threads: DEFAULT_AGENT_MAX_THREADS,
|
||||
codex_home: fixture.codex_home(),
|
||||
config_layer_stack: Default::default(),
|
||||
history: History::default(),
|
||||
ephemeral: false,
|
||||
file_opener: UriBasedFileOpener::VsCode,
|
||||
codex_linux_sandbox_exe: None,
|
||||
hide_agent_reasoning: false,
|
||||
@@ -3690,10 +3786,11 @@ model_verbosity = "high"
|
||||
forced_chatgpt_workspace_id: None,
|
||||
forced_login_method: None,
|
||||
include_apply_patch_tool: false,
|
||||
web_search_mode: None,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
use_experimental_unified_exec_tool: false,
|
||||
ghost_snapshot: GhostSnapshotConfig::default(),
|
||||
features: Features::with_defaults(),
|
||||
suppress_unstable_features_warning: false,
|
||||
active_profile: Some("o3".to_string()),
|
||||
active_project: ProjectConfig { trust_level: None },
|
||||
windows_wsl_setup_acknowledged: false,
|
||||
@@ -3751,10 +3848,11 @@ model_verbosity = "high"
|
||||
project_doc_max_bytes: PROJECT_DOC_MAX_BYTES,
|
||||
project_doc_fallback_filenames: Vec::new(),
|
||||
tool_output_token_limit: None,
|
||||
agent_max_threads: None,
|
||||
agent_max_threads: DEFAULT_AGENT_MAX_THREADS,
|
||||
codex_home: fixture.codex_home(),
|
||||
config_layer_stack: Default::default(),
|
||||
history: History::default(),
|
||||
ephemeral: false,
|
||||
file_opener: UriBasedFileOpener::VsCode,
|
||||
codex_linux_sandbox_exe: None,
|
||||
hide_agent_reasoning: false,
|
||||
@@ -3771,10 +3869,11 @@ model_verbosity = "high"
|
||||
forced_chatgpt_workspace_id: None,
|
||||
forced_login_method: None,
|
||||
include_apply_patch_tool: false,
|
||||
web_search_mode: None,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
use_experimental_unified_exec_tool: false,
|
||||
ghost_snapshot: GhostSnapshotConfig::default(),
|
||||
features: Features::with_defaults(),
|
||||
suppress_unstable_features_warning: false,
|
||||
active_profile: Some("gpt3".to_string()),
|
||||
active_project: ProjectConfig { trust_level: None },
|
||||
windows_wsl_setup_acknowledged: false,
|
||||
@@ -3847,10 +3946,11 @@ model_verbosity = "high"
|
||||
project_doc_max_bytes: PROJECT_DOC_MAX_BYTES,
|
||||
project_doc_fallback_filenames: Vec::new(),
|
||||
tool_output_token_limit: None,
|
||||
agent_max_threads: None,
|
||||
agent_max_threads: DEFAULT_AGENT_MAX_THREADS,
|
||||
codex_home: fixture.codex_home(),
|
||||
config_layer_stack: Default::default(),
|
||||
history: History::default(),
|
||||
ephemeral: false,
|
||||
file_opener: UriBasedFileOpener::VsCode,
|
||||
codex_linux_sandbox_exe: None,
|
||||
hide_agent_reasoning: false,
|
||||
@@ -3867,10 +3967,11 @@ model_verbosity = "high"
|
||||
forced_chatgpt_workspace_id: None,
|
||||
forced_login_method: None,
|
||||
include_apply_patch_tool: false,
|
||||
web_search_mode: None,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
use_experimental_unified_exec_tool: false,
|
||||
ghost_snapshot: GhostSnapshotConfig::default(),
|
||||
features: Features::with_defaults(),
|
||||
suppress_unstable_features_warning: false,
|
||||
active_profile: Some("zdr".to_string()),
|
||||
active_project: ProjectConfig { trust_level: None },
|
||||
windows_wsl_setup_acknowledged: false,
|
||||
@@ -3929,10 +4030,11 @@ model_verbosity = "high"
|
||||
project_doc_max_bytes: PROJECT_DOC_MAX_BYTES,
|
||||
project_doc_fallback_filenames: Vec::new(),
|
||||
tool_output_token_limit: None,
|
||||
agent_max_threads: None,
|
||||
agent_max_threads: DEFAULT_AGENT_MAX_THREADS,
|
||||
codex_home: fixture.codex_home(),
|
||||
config_layer_stack: Default::default(),
|
||||
history: History::default(),
|
||||
ephemeral: false,
|
||||
file_opener: UriBasedFileOpener::VsCode,
|
||||
codex_linux_sandbox_exe: None,
|
||||
hide_agent_reasoning: false,
|
||||
@@ -3949,10 +4051,11 @@ model_verbosity = "high"
|
||||
forced_chatgpt_workspace_id: None,
|
||||
forced_login_method: None,
|
||||
include_apply_patch_tool: false,
|
||||
web_search_mode: None,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
use_experimental_unified_exec_tool: false,
|
||||
ghost_snapshot: GhostSnapshotConfig::default(),
|
||||
features: Features::with_defaults(),
|
||||
suppress_unstable_features_warning: false,
|
||||
active_profile: Some("gpt5".to_string()),
|
||||
active_project: ProjectConfig { trust_level: None },
|
||||
windows_wsl_setup_acknowledged: false,
|
||||
@@ -4133,7 +4236,12 @@ trust_level = "untrusted"
|
||||
let cfg = toml::from_str::<ConfigToml>(config_with_untrusted)
|
||||
.expect("TOML deserialization should succeed");
|
||||
|
||||
let resolution = cfg.derive_sandbox_policy(None, None, &PathBuf::from("/tmp/test"));
|
||||
let resolution = cfg.derive_sandbox_policy(
|
||||
None,
|
||||
None,
|
||||
WindowsSandboxLevel::Disabled,
|
||||
&PathBuf::from("/tmp/test"),
|
||||
);
|
||||
|
||||
// Verify that untrusted projects get WorkspaceWrite (or ReadOnly on Windows due to downgrade)
|
||||
if cfg!(target_os = "windows") {
|
||||
|
||||
@@ -4,6 +4,7 @@ use crate::config::edit::ConfigEdit;
|
||||
use crate::config::edit::ConfigEditsBuilder;
|
||||
use crate::config_loader::ConfigLayerEntry;
|
||||
use crate::config_loader::ConfigLayerStack;
|
||||
use crate::config_loader::ConfigLayerStackOrdering;
|
||||
use crate::config_loader::ConfigRequirementsToml;
|
||||
use crate::config_loader::LoaderOverrides;
|
||||
use crate::config_loader::load_config_layers_state;
|
||||
@@ -171,7 +172,7 @@ impl ConfigService {
|
||||
origins: layers.origins(),
|
||||
layers: params.include_layers.then(|| {
|
||||
layers
|
||||
.layers_high_to_low()
|
||||
.get_layers(ConfigLayerStackOrdering::HighestPrecedenceFirst, true)
|
||||
.iter()
|
||||
.map(|layer| layer.as_layer())
|
||||
.collect()
|
||||
|
||||
@@ -73,6 +73,10 @@ pub struct McpServerConfig {
|
||||
/// Explicit deny-list of tools. These tools will be removed after applying `enabled_tools`.
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub disabled_tools: Option<Vec<String>>,
|
||||
|
||||
/// Optional OAuth scopes to request during MCP login.
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub scopes: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
// Raw MCP config shape used for deserialization and JSON Schema generation.
|
||||
@@ -113,6 +117,8 @@ pub(crate) struct RawMcpServerConfig {
|
||||
pub enabled_tools: Option<Vec<String>>,
|
||||
#[serde(default)]
|
||||
pub disabled_tools: Option<Vec<String>>,
|
||||
#[serde(default)]
|
||||
pub scopes: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for McpServerConfig {
|
||||
@@ -134,6 +140,7 @@ impl<'de> Deserialize<'de> for McpServerConfig {
|
||||
let enabled = raw.enabled.unwrap_or_else(default_enabled);
|
||||
let enabled_tools = raw.enabled_tools.clone();
|
||||
let disabled_tools = raw.disabled_tools.clone();
|
||||
let scopes = raw.scopes.clone();
|
||||
|
||||
fn throw_if_set<E, T>(transport: &str, field: &str, value: Option<&T>) -> Result<(), E>
|
||||
where
|
||||
@@ -188,6 +195,7 @@ impl<'de> Deserialize<'de> for McpServerConfig {
|
||||
disabled_reason: None,
|
||||
enabled_tools,
|
||||
disabled_tools,
|
||||
scopes,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@ Exported from `codex_core::config_loader`:
|
||||
- `origins() -> HashMap<String, ConfigLayerMetadata>`
|
||||
- `layers_high_to_low() -> Vec<ConfigLayer>`
|
||||
- `with_user_config(user_config) -> ConfigLayerStack`
|
||||
- `ConfigLayerEntry` (one layer’s `{name, config, version}`; `name` carries source metadata)
|
||||
- `ConfigLayerEntry` (one layer’s `{name, config, version, disabled_reason}`; `name` carries source metadata)
|
||||
- `LoaderOverrides` (test/override hooks for managed config sources)
|
||||
- `merge_toml_values(base, overlay)` (public helper used elsewhere)
|
||||
|
||||
@@ -29,7 +29,9 @@ Precedence is **top overrides bottom**:
|
||||
3. **Session flags** (CLI overrides, applied as dotted-path TOML writes)
|
||||
4. **User** config (`config.toml`)
|
||||
|
||||
This is what `ConfigLayerStack::effective_config()` implements.
|
||||
Layers with a `disabled_reason` are still surfaced for UI, but are ignored when
|
||||
computing the effective config and origins metadata. This is what
|
||||
`ConfigLayerStack::effective_config()` implements.
|
||||
|
||||
## Typical usage
|
||||
|
||||
|
||||
388
codex-rs/core/src/config_loader/diagnostics.rs
Normal file
388
codex-rs/core/src/config_loader/diagnostics.rs
Normal file
@@ -0,0 +1,388 @@
|
||||
//! Helpers for mapping config parse/validation failures to file locations and
|
||||
//! rendering them in a user-friendly way.
|
||||
|
||||
use crate::config::CONFIG_TOML_FILE;
|
||||
use crate::config::ConfigToml;
|
||||
use codex_app_server_protocol::ConfigLayerSource;
|
||||
use codex_utils_absolute_path::AbsolutePathBufGuard;
|
||||
use serde_path_to_error::Path as SerdePath;
|
||||
use serde_path_to_error::Segment as SerdeSegment;
|
||||
use std::fmt;
|
||||
use std::fmt::Write;
|
||||
use std::io;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use toml_edit::Document;
|
||||
use toml_edit::Item;
|
||||
use toml_edit::Table;
|
||||
use toml_edit::Value;
|
||||
|
||||
use super::ConfigLayerEntry;
|
||||
use super::ConfigLayerStack;
|
||||
use super::ConfigLayerStackOrdering;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct TextPosition {
|
||||
pub line: usize,
|
||||
pub column: usize,
|
||||
}
|
||||
|
||||
/// Text range in 1-based line/column coordinates.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct TextRange {
|
||||
pub start: TextPosition,
|
||||
pub end: TextPosition,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct ConfigError {
|
||||
pub path: PathBuf,
|
||||
pub range: TextRange,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
impl ConfigError {
|
||||
pub fn new(path: PathBuf, range: TextRange, message: impl Into<String>) -> Self {
|
||||
Self {
|
||||
path,
|
||||
range,
|
||||
message: message.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ConfigLoadError {
|
||||
error: ConfigError,
|
||||
source: Option<toml::de::Error>,
|
||||
}
|
||||
|
||||
impl ConfigLoadError {
|
||||
pub fn new(error: ConfigError, source: Option<toml::de::Error>) -> Self {
|
||||
Self { error, source }
|
||||
}
|
||||
|
||||
pub fn config_error(&self) -> &ConfigError {
|
||||
&self.error
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for ConfigLoadError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{}:{}:{}: {}",
|
||||
self.error.path.display(),
|
||||
self.error.range.start.line,
|
||||
self.error.range.start.column,
|
||||
self.error.message
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for ConfigLoadError {
|
||||
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
||||
self.source
|
||||
.as_ref()
|
||||
.map(|err| err as &dyn std::error::Error)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn io_error_from_config_error(
|
||||
kind: io::ErrorKind,
|
||||
error: ConfigError,
|
||||
source: Option<toml::de::Error>,
|
||||
) -> io::Error {
|
||||
io::Error::new(kind, ConfigLoadError::new(error, source))
|
||||
}
|
||||
|
||||
pub(crate) fn config_error_from_toml(
|
||||
path: impl AsRef<Path>,
|
||||
contents: &str,
|
||||
err: toml::de::Error,
|
||||
) -> ConfigError {
|
||||
let range = err
|
||||
.span()
|
||||
.map(|span| text_range_from_span(contents, span))
|
||||
.unwrap_or_else(default_range);
|
||||
ConfigError::new(path.as_ref().to_path_buf(), range, err.message())
|
||||
}
|
||||
|
||||
pub(crate) fn config_error_from_config_toml(
|
||||
path: impl AsRef<Path>,
|
||||
contents: &str,
|
||||
) -> Option<ConfigError> {
|
||||
let deserializer = match toml::de::Deserializer::parse(contents) {
|
||||
Ok(deserializer) => deserializer,
|
||||
Err(err) => return Some(config_error_from_toml(path, contents, err)),
|
||||
};
|
||||
|
||||
let result: Result<ConfigToml, _> = serde_path_to_error::deserialize(deserializer);
|
||||
match result {
|
||||
Ok(_) => None,
|
||||
Err(err) => {
|
||||
let path_hint = err.path().clone();
|
||||
let toml_err: toml::de::Error = err.into_inner();
|
||||
let range = span_for_config_path(contents, &path_hint)
|
||||
.or_else(|| toml_err.span())
|
||||
.map(|span| text_range_from_span(contents, span))
|
||||
.unwrap_or_else(default_range);
|
||||
Some(ConfigError::new(
|
||||
path.as_ref().to_path_buf(),
|
||||
range,
|
||||
toml_err.message(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn first_layer_config_error(layers: &ConfigLayerStack) -> Option<ConfigError> {
|
||||
// When the merged config fails schema validation, we surface the first concrete
|
||||
// per-file error to point users at a specific file and range rather than an
|
||||
// opaque merged-layer failure.
|
||||
first_layer_config_error_for_entries(
|
||||
layers.get_layers(ConfigLayerStackOrdering::LowestPrecedenceFirst, false),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
pub(crate) async fn first_layer_config_error_from_entries(
|
||||
layers: &[ConfigLayerEntry],
|
||||
) -> Option<ConfigError> {
|
||||
first_layer_config_error_for_entries(layers.iter()).await
|
||||
}
|
||||
|
||||
async fn first_layer_config_error_for_entries<'a, I>(layers: I) -> Option<ConfigError>
|
||||
where
|
||||
I: IntoIterator<Item = &'a ConfigLayerEntry>,
|
||||
{
|
||||
for layer in layers {
|
||||
let Some(path) = config_path_for_layer(layer) else {
|
||||
continue;
|
||||
};
|
||||
let contents = match tokio::fs::read_to_string(&path).await {
|
||||
Ok(contents) => contents,
|
||||
Err(err) if err.kind() == io::ErrorKind::NotFound => continue,
|
||||
Err(err) => {
|
||||
tracing::debug!("Failed to read config file {}: {err}", path.display());
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
let Some(parent) = path.parent() else {
|
||||
tracing::debug!("Config file {} has no parent directory", path.display());
|
||||
continue;
|
||||
};
|
||||
let _guard = AbsolutePathBufGuard::new(parent);
|
||||
if let Some(error) = config_error_from_config_toml(&path, &contents) {
|
||||
return Some(error);
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn config_path_for_layer(layer: &ConfigLayerEntry) -> Option<PathBuf> {
|
||||
match &layer.name {
|
||||
ConfigLayerSource::System { file } => Some(file.to_path_buf()),
|
||||
ConfigLayerSource::User { file } => Some(file.to_path_buf()),
|
||||
ConfigLayerSource::Project { dot_codex_folder } => {
|
||||
Some(dot_codex_folder.as_path().join(CONFIG_TOML_FILE))
|
||||
}
|
||||
ConfigLayerSource::LegacyManagedConfigTomlFromFile { file } => Some(file.to_path_buf()),
|
||||
ConfigLayerSource::Mdm { .. }
|
||||
| ConfigLayerSource::SessionFlags
|
||||
| ConfigLayerSource::LegacyManagedConfigTomlFromMdm => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn text_range_from_span(contents: &str, span: std::ops::Range<usize>) -> TextRange {
|
||||
let start = position_for_offset(contents, span.start);
|
||||
let end_index = if span.end > span.start {
|
||||
span.end - 1
|
||||
} else {
|
||||
span.end
|
||||
};
|
||||
let end = position_for_offset(contents, end_index);
|
||||
TextRange { start, end }
|
||||
}
|
||||
|
||||
pub fn format_config_error(error: &ConfigError, contents: &str) -> String {
|
||||
let mut output = String::new();
|
||||
let start = error.range.start;
|
||||
let _ = writeln!(
|
||||
output,
|
||||
"{}:{}:{}: {}",
|
||||
error.path.display(),
|
||||
start.line,
|
||||
start.column,
|
||||
error.message
|
||||
);
|
||||
|
||||
let line_index = start.line.saturating_sub(1);
|
||||
let line = match contents.lines().nth(line_index) {
|
||||
Some(line) => line.trim_end_matches('\r'),
|
||||
None => return output.trim_end().to_string(),
|
||||
};
|
||||
|
||||
let line_number = start.line;
|
||||
let gutter = line_number.to_string().len();
|
||||
let _ = writeln!(output, "{:width$} |", "", width = gutter);
|
||||
let _ = writeln!(output, "{line_number:>gutter$} | {line}");
|
||||
|
||||
let highlight_len = if error.range.end.line == error.range.start.line
|
||||
&& error.range.end.column >= error.range.start.column
|
||||
{
|
||||
error.range.end.column - error.range.start.column + 1
|
||||
} else {
|
||||
1
|
||||
};
|
||||
let spaces = " ".repeat(start.column.saturating_sub(1));
|
||||
let carets = "^".repeat(highlight_len.max(1));
|
||||
let _ = writeln!(output, "{:width$} | {spaces}{carets}", "", width = gutter);
|
||||
output.trim_end().to_string()
|
||||
}
|
||||
|
||||
pub fn format_config_error_with_source(error: &ConfigError) -> String {
|
||||
match std::fs::read_to_string(&error.path) {
|
||||
Ok(contents) => format_config_error(error, &contents),
|
||||
Err(_) => format_config_error(error, ""),
|
||||
}
|
||||
}
|
||||
|
||||
fn position_for_offset(contents: &str, index: usize) -> TextPosition {
|
||||
let bytes = contents.as_bytes();
|
||||
if bytes.is_empty() {
|
||||
return TextPosition { line: 1, column: 1 };
|
||||
}
|
||||
|
||||
let safe_index = index.min(bytes.len().saturating_sub(1));
|
||||
let column_offset = index.saturating_sub(safe_index);
|
||||
let index = safe_index;
|
||||
|
||||
let line_start = bytes[..index]
|
||||
.iter()
|
||||
.rposition(|byte| *byte == b'\n')
|
||||
.map(|pos| pos + 1)
|
||||
.unwrap_or(0);
|
||||
let line = bytes[..line_start]
|
||||
.iter()
|
||||
.filter(|byte| **byte == b'\n')
|
||||
.count();
|
||||
|
||||
let column = std::str::from_utf8(&bytes[line_start..=index])
|
||||
.map(|slice| slice.chars().count().saturating_sub(1))
|
||||
.unwrap_or_else(|_| index - line_start);
|
||||
let column = column + column_offset;
|
||||
|
||||
TextPosition {
|
||||
line: line + 1,
|
||||
column: column + 1,
|
||||
}
|
||||
}
|
||||
|
||||
fn default_range() -> TextRange {
|
||||
let position = TextPosition { line: 1, column: 1 };
|
||||
TextRange {
|
||||
start: position,
|
||||
end: position,
|
||||
}
|
||||
}
|
||||
|
||||
enum TomlNode<'a> {
|
||||
Item(&'a Item),
|
||||
Table(&'a Table),
|
||||
Value(&'a Value),
|
||||
}
|
||||
|
||||
fn span_for_path(contents: &str, path: &SerdePath) -> Option<std::ops::Range<usize>> {
|
||||
let doc = contents.parse::<Document<String>>().ok()?;
|
||||
let node = node_for_path(doc.as_item(), path)?;
|
||||
match node {
|
||||
TomlNode::Item(item) => item.span(),
|
||||
TomlNode::Table(table) => table.span(),
|
||||
TomlNode::Value(value) => value.span(),
|
||||
}
|
||||
}
|
||||
|
||||
fn span_for_config_path(contents: &str, path: &SerdePath) -> Option<std::ops::Range<usize>> {
|
||||
if is_features_table_path(path)
|
||||
&& let Some(span) = span_for_features_value(contents)
|
||||
{
|
||||
return Some(span);
|
||||
}
|
||||
span_for_path(contents, path)
|
||||
}
|
||||
|
||||
fn is_features_table_path(path: &SerdePath) -> bool {
|
||||
let mut segments = path.iter();
|
||||
matches!(segments.next(), Some(SerdeSegment::Map { key }) if key == "features")
|
||||
&& segments.next().is_none()
|
||||
}
|
||||
|
||||
fn span_for_features_value(contents: &str) -> Option<std::ops::Range<usize>> {
|
||||
let doc = contents.parse::<Document<String>>().ok()?;
|
||||
let root = doc.as_item().as_table_like()?;
|
||||
let features_item = root.get("features")?;
|
||||
let features_table = features_item.as_table_like()?;
|
||||
for (_, item) in features_table.iter() {
|
||||
match item {
|
||||
Item::Value(Value::Boolean(_)) => continue,
|
||||
Item::Value(value) => return value.span(),
|
||||
Item::Table(table) => return table.span(),
|
||||
Item::ArrayOfTables(array) => return array.span(),
|
||||
Item::None => continue,
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn node_for_path<'a>(item: &'a Item, path: &SerdePath) -> Option<TomlNode<'a>> {
|
||||
let segments: Vec<_> = path.iter().cloned().collect();
|
||||
let mut node = TomlNode::Item(item);
|
||||
let mut index = 0;
|
||||
while index < segments.len() {
|
||||
match &segments[index] {
|
||||
SerdeSegment::Map { key } | SerdeSegment::Enum { variant: key } => {
|
||||
if let Some(next) = map_child(&node, key) {
|
||||
node = next;
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if index + 1 < segments.len() {
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
return None;
|
||||
}
|
||||
SerdeSegment::Seq { index: seq_index } => {
|
||||
node = seq_child(&node, *seq_index)?;
|
||||
index += 1;
|
||||
}
|
||||
SerdeSegment::Unknown => return None,
|
||||
}
|
||||
}
|
||||
Some(node)
|
||||
}
|
||||
|
||||
fn map_child<'a>(node: &TomlNode<'a>, key: &str) -> Option<TomlNode<'a>> {
|
||||
match node {
|
||||
TomlNode::Item(item) => {
|
||||
let table = item.as_table_like()?;
|
||||
table.get(key).map(TomlNode::Item)
|
||||
}
|
||||
TomlNode::Table(table) => table.get(key).map(TomlNode::Item),
|
||||
TomlNode::Value(Value::InlineTable(table)) => table.get(key).map(TomlNode::Value),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn seq_child<'a>(node: &TomlNode<'a>, index: usize) -> Option<TomlNode<'a>> {
|
||||
match node {
|
||||
TomlNode::Item(Item::Value(Value::Array(array))) => array.get(index).map(TomlNode::Value),
|
||||
TomlNode::Item(Item::ArrayOfTables(array)) => array.get(index).map(TomlNode::Table),
|
||||
TomlNode::Value(Value::Array(array)) => array.get(index).map(TomlNode::Value),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,6 @@
|
||||
use super::LoaderOverrides;
|
||||
use super::diagnostics::config_error_from_toml;
|
||||
use super::diagnostics::io_error_from_config_error;
|
||||
#[cfg(target_os = "macos")]
|
||||
use super::macos::load_managed_admin_config_layer;
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
@@ -75,7 +77,12 @@ pub(super) async fn read_config_from_path(
|
||||
Ok(value) => Ok(Some(value)),
|
||||
Err(err) => {
|
||||
tracing::error!("Failed to parse {}: {err}", path.as_ref().display());
|
||||
Err(io::Error::new(io::ErrorKind::InvalidData, err))
|
||||
let config_error = config_error_from_toml(path.as_ref(), &contents, err.clone());
|
||||
Err(io_error_from_config_error(
|
||||
io::ErrorKind::InvalidData,
|
||||
config_error,
|
||||
Some(err),
|
||||
))
|
||||
}
|
||||
},
|
||||
Err(err) if err.kind() == io::ErrorKind::NotFound => {
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
mod config_requirements;
|
||||
mod diagnostics;
|
||||
mod fingerprint;
|
||||
mod layer_io;
|
||||
#[cfg(target_os = "macos")]
|
||||
@@ -34,6 +35,16 @@ pub use config_requirements::McpServerRequirement;
|
||||
pub use config_requirements::RequirementSource;
|
||||
pub use config_requirements::SandboxModeRequirement;
|
||||
pub use config_requirements::Sourced;
|
||||
pub use diagnostics::ConfigError;
|
||||
pub use diagnostics::ConfigLoadError;
|
||||
pub use diagnostics::TextPosition;
|
||||
pub use diagnostics::TextRange;
|
||||
pub(crate) use diagnostics::config_error_from_toml;
|
||||
pub(crate) use diagnostics::first_layer_config_error;
|
||||
pub(crate) use diagnostics::first_layer_config_error_from_entries;
|
||||
pub use diagnostics::format_config_error;
|
||||
pub use diagnostics::format_config_error_with_source;
|
||||
pub(crate) use diagnostics::io_error_from_config_error;
|
||||
pub use merge::merge_toml_values;
|
||||
pub(crate) use overrides::build_cli_overrides_layer;
|
||||
pub use state::ConfigLayerEntry;
|
||||
@@ -67,9 +78,9 @@ const DEFAULT_PROJECT_ROOT_MARKERS: &[&str] = &[".git"];
|
||||
/// - admin: managed preferences (*)
|
||||
/// - system `/etc/codex/config.toml`
|
||||
/// - user `${CODEX_HOME}/config.toml`
|
||||
/// - cwd `${PWD}/config.toml` (only when the directory is trusted)
|
||||
/// - tree parent directories up to root looking for `./.codex/config.toml` (trusted only)
|
||||
/// - repo `$(git rev-parse --show-toplevel)/.codex/config.toml` (trusted only)
|
||||
/// - cwd `${PWD}/config.toml` (loaded but disabled when the directory is untrusted)
|
||||
/// - tree parent directories up to root looking for `./.codex/config.toml` (loaded but disabled when untrusted)
|
||||
/// - repo `$(git rev-parse --show-toplevel)/.codex/config.toml` (loaded but disabled when untrusted)
|
||||
/// - runtime e.g., --config flags, model selector in UI
|
||||
///
|
||||
/// (*) Only available on macOS via managed device profiles.
|
||||
@@ -171,14 +182,51 @@ pub async fn load_config_layers_state(
|
||||
merge_toml_values(&mut merged_so_far, cli_overrides_layer);
|
||||
}
|
||||
|
||||
let project_root_markers = project_root_markers_from_config(&merged_so_far)?
|
||||
.unwrap_or_else(default_project_root_markers);
|
||||
if let Some(project_root) =
|
||||
trusted_project_root(&merged_so_far, &cwd, &project_root_markers, codex_home).await?
|
||||
let project_root_markers = match project_root_markers_from_config(&merged_so_far) {
|
||||
Ok(markers) => markers.unwrap_or_else(default_project_root_markers),
|
||||
Err(err) => {
|
||||
if let Some(config_error) = first_layer_config_error_from_entries(&layers).await {
|
||||
return Err(io_error_from_config_error(
|
||||
io::ErrorKind::InvalidData,
|
||||
config_error,
|
||||
None,
|
||||
));
|
||||
}
|
||||
return Err(err);
|
||||
}
|
||||
};
|
||||
let project_trust_context = match project_trust_context(
|
||||
&merged_so_far,
|
||||
&cwd,
|
||||
&project_root_markers,
|
||||
codex_home,
|
||||
&user_file,
|
||||
)
|
||||
.await
|
||||
{
|
||||
let project_layers = load_project_layers(&cwd, &project_root).await?;
|
||||
layers.extend(project_layers);
|
||||
}
|
||||
Ok(context) => context,
|
||||
Err(err) => {
|
||||
let source = err
|
||||
.get_ref()
|
||||
.and_then(|err| err.downcast_ref::<toml::de::Error>())
|
||||
.cloned();
|
||||
if let Some(config_error) = first_layer_config_error_from_entries(&layers).await {
|
||||
return Err(io_error_from_config_error(
|
||||
io::ErrorKind::InvalidData,
|
||||
config_error,
|
||||
source,
|
||||
));
|
||||
}
|
||||
return Err(err);
|
||||
}
|
||||
};
|
||||
let project_layers = load_project_layers(
|
||||
&cwd,
|
||||
&project_trust_context.project_root,
|
||||
&project_trust_context,
|
||||
)
|
||||
.await?;
|
||||
layers.extend(project_layers);
|
||||
}
|
||||
|
||||
// Add a layer for runtime overrides from the CLI or UI, if any exist.
|
||||
@@ -243,11 +291,9 @@ async fn load_config_toml_for_required_layer(
|
||||
let toml_file = config_toml.as_ref();
|
||||
let toml_value = match tokio::fs::read_to_string(toml_file).await {
|
||||
Ok(contents) => {
|
||||
let config: TomlValue = toml::from_str(&contents).map_err(|e| {
|
||||
io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
format!("Error parsing config file {}: {e}", toml_file.display()),
|
||||
)
|
||||
let config: TomlValue = toml::from_str(&contents).map_err(|err| {
|
||||
let config_error = config_error_from_toml(toml_file, &contents, err.clone());
|
||||
io_error_from_config_error(io::ErrorKind::InvalidData, config_error, Some(err))
|
||||
})?;
|
||||
let config_parent = toml_file.parent().ok_or_else(|| {
|
||||
io::Error::new(
|
||||
@@ -402,42 +448,127 @@ fn default_project_root_markers() -> Vec<String> {
|
||||
.collect()
|
||||
}
|
||||
|
||||
async fn trusted_project_root(
|
||||
struct ProjectTrustContext {
|
||||
project_root: AbsolutePathBuf,
|
||||
project_root_key: String,
|
||||
repo_root_key: Option<String>,
|
||||
projects_trust: std::collections::HashMap<String, TrustLevel>,
|
||||
user_config_file: AbsolutePathBuf,
|
||||
}
|
||||
|
||||
struct ProjectTrustDecision {
|
||||
trust_level: Option<TrustLevel>,
|
||||
trust_key: String,
|
||||
}
|
||||
|
||||
impl ProjectTrustDecision {
|
||||
fn is_trusted(&self) -> bool {
|
||||
matches!(self.trust_level, Some(TrustLevel::Trusted))
|
||||
}
|
||||
}
|
||||
|
||||
impl ProjectTrustContext {
|
||||
fn decision_for_dir(&self, dir: &AbsolutePathBuf) -> ProjectTrustDecision {
|
||||
let dir_key = dir.as_path().to_string_lossy().to_string();
|
||||
if let Some(trust_level) = self.projects_trust.get(&dir_key).copied() {
|
||||
return ProjectTrustDecision {
|
||||
trust_level: Some(trust_level),
|
||||
trust_key: dir_key,
|
||||
};
|
||||
}
|
||||
|
||||
if let Some(trust_level) = self.projects_trust.get(&self.project_root_key).copied() {
|
||||
return ProjectTrustDecision {
|
||||
trust_level: Some(trust_level),
|
||||
trust_key: self.project_root_key.clone(),
|
||||
};
|
||||
}
|
||||
|
||||
if let Some(repo_root_key) = self.repo_root_key.as_ref()
|
||||
&& let Some(trust_level) = self.projects_trust.get(repo_root_key).copied()
|
||||
{
|
||||
return ProjectTrustDecision {
|
||||
trust_level: Some(trust_level),
|
||||
trust_key: repo_root_key.clone(),
|
||||
};
|
||||
}
|
||||
|
||||
ProjectTrustDecision {
|
||||
trust_level: None,
|
||||
trust_key: self
|
||||
.repo_root_key
|
||||
.clone()
|
||||
.unwrap_or_else(|| self.project_root_key.clone()),
|
||||
}
|
||||
}
|
||||
|
||||
fn disabled_reason_for_dir(&self, dir: &AbsolutePathBuf) -> Option<String> {
|
||||
let decision = self.decision_for_dir(dir);
|
||||
if decision.is_trusted() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let trust_key = decision.trust_key.as_str();
|
||||
let user_config_file = self.user_config_file.as_path().display();
|
||||
match decision.trust_level {
|
||||
Some(TrustLevel::Untrusted) => Some(format!(
|
||||
"{trust_key} is marked as untrusted in {user_config_file}. To load config.toml, mark it trusted."
|
||||
)),
|
||||
_ => Some(format!(
|
||||
"To load config.toml, add {trust_key} as a trusted project in {user_config_file}."
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn project_layer_entry(
|
||||
trust_context: &ProjectTrustContext,
|
||||
dot_codex_folder: &AbsolutePathBuf,
|
||||
layer_dir: &AbsolutePathBuf,
|
||||
config: TomlValue,
|
||||
config_toml_exists: bool,
|
||||
) -> ConfigLayerEntry {
|
||||
let source = ConfigLayerSource::Project {
|
||||
dot_codex_folder: dot_codex_folder.clone(),
|
||||
};
|
||||
|
||||
if config_toml_exists && let Some(reason) = trust_context.disabled_reason_for_dir(layer_dir) {
|
||||
ConfigLayerEntry::new_disabled(source, config, reason)
|
||||
} else {
|
||||
ConfigLayerEntry::new(source, config)
|
||||
}
|
||||
}
|
||||
|
||||
async fn project_trust_context(
|
||||
merged_config: &TomlValue,
|
||||
cwd: &AbsolutePathBuf,
|
||||
project_root_markers: &[String],
|
||||
config_base_dir: &Path,
|
||||
) -> io::Result<Option<AbsolutePathBuf>> {
|
||||
user_config_file: &AbsolutePathBuf,
|
||||
) -> io::Result<ProjectTrustContext> {
|
||||
let config_toml = deserialize_config_toml_with_base(merged_config.clone(), config_base_dir)?;
|
||||
|
||||
let project_root = find_project_root(cwd, project_root_markers).await?;
|
||||
let projects = config_toml.projects.unwrap_or_default();
|
||||
|
||||
let cwd_key = cwd.as_path().to_string_lossy().to_string();
|
||||
let project_root_key = project_root.as_path().to_string_lossy().to_string();
|
||||
let repo_root_key = resolve_root_git_project_for_trust(cwd.as_path())
|
||||
let repo_root = resolve_root_git_project_for_trust(cwd.as_path());
|
||||
let repo_root_key = repo_root
|
||||
.as_ref()
|
||||
.map(|root| root.to_string_lossy().to_string());
|
||||
|
||||
let trust_level = projects
|
||||
.get(&cwd_key)
|
||||
.and_then(|project| project.trust_level)
|
||||
.or_else(|| {
|
||||
projects
|
||||
.get(&project_root_key)
|
||||
.and_then(|project| project.trust_level)
|
||||
})
|
||||
.or_else(|| {
|
||||
repo_root_key
|
||||
.as_ref()
|
||||
.and_then(|root| projects.get(root))
|
||||
.and_then(|project| project.trust_level)
|
||||
});
|
||||
let projects_trust = projects
|
||||
.into_iter()
|
||||
.filter_map(|(key, project)| project.trust_level.map(|trust_level| (key, trust_level)))
|
||||
.collect();
|
||||
|
||||
if matches!(trust_level, Some(TrustLevel::Trusted)) {
|
||||
Ok(Some(project_root))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
Ok(ProjectTrustContext {
|
||||
project_root,
|
||||
project_root_key,
|
||||
repo_root_key,
|
||||
projects_trust,
|
||||
user_config_file: user_config_file.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Takes a `toml::Value` parsed from a config.toml file and walks through it,
|
||||
@@ -527,6 +658,7 @@ async fn find_project_root(
|
||||
async fn load_project_layers(
|
||||
cwd: &AbsolutePathBuf,
|
||||
project_root: &AbsolutePathBuf,
|
||||
trust_context: &ProjectTrustContext,
|
||||
) -> io::Result<Vec<ConfigLayerEntry>> {
|
||||
let mut dirs = cwd
|
||||
.as_path()
|
||||
@@ -555,46 +687,57 @@ async fn load_project_layers(
|
||||
continue;
|
||||
}
|
||||
|
||||
let layer_dir = AbsolutePathBuf::from_absolute_path(dir)?;
|
||||
let decision = trust_context.decision_for_dir(&layer_dir);
|
||||
let dot_codex_abs = AbsolutePathBuf::from_absolute_path(&dot_codex)?;
|
||||
let config_file = dot_codex_abs.join(CONFIG_TOML_FILE)?;
|
||||
match tokio::fs::read_to_string(&config_file).await {
|
||||
Ok(contents) => {
|
||||
let config: TomlValue = toml::from_str(&contents).map_err(|e| {
|
||||
io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
format!(
|
||||
"Error parsing project config file {}: {e}",
|
||||
config_file.as_path().display(),
|
||||
),
|
||||
)
|
||||
})?;
|
||||
let config: TomlValue = match toml::from_str(&contents) {
|
||||
Ok(config) => config,
|
||||
Err(e) => {
|
||||
if decision.is_trusted() {
|
||||
let config_file_display = config_file.as_path().display();
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
format!(
|
||||
"Error parsing project config file {config_file_display}: {e}"
|
||||
),
|
||||
));
|
||||
}
|
||||
layers.push(project_layer_entry(
|
||||
trust_context,
|
||||
&dot_codex_abs,
|
||||
&layer_dir,
|
||||
TomlValue::Table(toml::map::Map::new()),
|
||||
true,
|
||||
));
|
||||
continue;
|
||||
}
|
||||
};
|
||||
let config =
|
||||
resolve_relative_paths_in_config_toml(config, dot_codex_abs.as_path())?;
|
||||
layers.push(ConfigLayerEntry::new(
|
||||
ConfigLayerSource::Project {
|
||||
dot_codex_folder: dot_codex_abs,
|
||||
},
|
||||
config,
|
||||
));
|
||||
let entry =
|
||||
project_layer_entry(trust_context, &dot_codex_abs, &layer_dir, config, true);
|
||||
layers.push(entry);
|
||||
}
|
||||
Err(err) => {
|
||||
if err.kind() == io::ErrorKind::NotFound {
|
||||
// If there is no config.toml file, record an empty entry
|
||||
// for this project layer, as this may still have subfolders
|
||||
// that are significant in the overall ConfigLayerStack.
|
||||
layers.push(ConfigLayerEntry::new(
|
||||
ConfigLayerSource::Project {
|
||||
dot_codex_folder: dot_codex_abs,
|
||||
},
|
||||
layers.push(project_layer_entry(
|
||||
trust_context,
|
||||
&dot_codex_abs,
|
||||
&layer_dir,
|
||||
TomlValue::Table(toml::map::Map::new()),
|
||||
false,
|
||||
));
|
||||
} else {
|
||||
let config_file_display = config_file.as_path().display();
|
||||
return Err(io::Error::new(
|
||||
err.kind(),
|
||||
format!(
|
||||
"Failed to read project config file {}: {err}",
|
||||
config_file.as_path().display(),
|
||||
),
|
||||
format!("Failed to read project config file {config_file_display}: {err}"),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@ pub struct ConfigLayerEntry {
|
||||
pub name: ConfigLayerSource,
|
||||
pub config: TomlValue,
|
||||
pub version: String,
|
||||
pub disabled_reason: Option<String>,
|
||||
}
|
||||
|
||||
impl ConfigLayerEntry {
|
||||
@@ -37,9 +38,28 @@ impl ConfigLayerEntry {
|
||||
name,
|
||||
config,
|
||||
version,
|
||||
disabled_reason: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_disabled(
|
||||
name: ConfigLayerSource,
|
||||
config: TomlValue,
|
||||
disabled_reason: impl Into<String>,
|
||||
) -> Self {
|
||||
let version = version_for_toml(&config);
|
||||
Self {
|
||||
name,
|
||||
config,
|
||||
version,
|
||||
disabled_reason: Some(disabled_reason.into()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_disabled(&self) -> bool {
|
||||
self.disabled_reason.is_some()
|
||||
}
|
||||
|
||||
pub fn metadata(&self) -> ConfigLayerMetadata {
|
||||
ConfigLayerMetadata {
|
||||
name: self.name.clone(),
|
||||
@@ -52,6 +72,7 @@ impl ConfigLayerEntry {
|
||||
name: self.name.clone(),
|
||||
version: self.version.clone(),
|
||||
config: serde_json::to_value(&self.config).unwrap_or(JsonValue::Null),
|
||||
disabled_reason: self.disabled_reason.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -172,7 +193,7 @@ impl ConfigLayerStack {
|
||||
|
||||
pub fn effective_config(&self) -> TomlValue {
|
||||
let mut merged = TomlValue::Table(toml::map::Map::new());
|
||||
for layer in &self.layers {
|
||||
for layer in self.get_layers(ConfigLayerStackOrdering::LowestPrecedenceFirst, false) {
|
||||
merge_toml_values(&mut merged, &layer.config);
|
||||
}
|
||||
merged
|
||||
@@ -182,7 +203,7 @@ impl ConfigLayerStack {
|
||||
let mut origins = HashMap::new();
|
||||
let mut path = Vec::new();
|
||||
|
||||
for layer in &self.layers {
|
||||
for layer in self.get_layers(ConfigLayerStackOrdering::LowestPrecedenceFirst, false) {
|
||||
record_origins(&layer.config, &layer.metadata(), &mut path, &mut origins);
|
||||
}
|
||||
|
||||
@@ -192,16 +213,25 @@ impl ConfigLayerStack {
|
||||
/// Returns the highest-precedence to lowest-precedence layers, so
|
||||
/// `ConfigLayerSource::SessionFlags` would be first, if present.
|
||||
pub fn layers_high_to_low(&self) -> Vec<&ConfigLayerEntry> {
|
||||
self.get_layers(ConfigLayerStackOrdering::HighestPrecedenceFirst)
|
||||
self.get_layers(ConfigLayerStackOrdering::HighestPrecedenceFirst, false)
|
||||
}
|
||||
|
||||
/// Returns the highest-precedence to lowest-precedence layers, so
|
||||
/// `ConfigLayerSource::SessionFlags` would be first, if present.
|
||||
pub fn get_layers(&self, ordering: ConfigLayerStackOrdering) -> Vec<&ConfigLayerEntry> {
|
||||
match ordering {
|
||||
ConfigLayerStackOrdering::HighestPrecedenceFirst => self.layers.iter().rev().collect(),
|
||||
ConfigLayerStackOrdering::LowestPrecedenceFirst => self.layers.iter().collect(),
|
||||
pub fn get_layers(
|
||||
&self,
|
||||
ordering: ConfigLayerStackOrdering,
|
||||
include_disabled: bool,
|
||||
) -> Vec<&ConfigLayerEntry> {
|
||||
let mut layers: Vec<&ConfigLayerEntry> = self
|
||||
.layers
|
||||
.iter()
|
||||
.filter(|layer| include_disabled || !layer.is_disabled())
|
||||
.collect();
|
||||
if ordering == ConfigLayerStackOrdering::HighestPrecedenceFirst {
|
||||
layers.reverse();
|
||||
}
|
||||
layers
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ use crate::config::ConfigOverrides;
|
||||
use crate::config::ConfigToml;
|
||||
use crate::config::ProjectConfig;
|
||||
use crate::config_loader::ConfigLayerEntry;
|
||||
use crate::config_loader::ConfigLoadError;
|
||||
use crate::config_loader::ConfigRequirements;
|
||||
use crate::config_loader::config_requirements::ConfigRequirementsWithSources;
|
||||
use crate::config_loader::fingerprint::version_for_toml;
|
||||
@@ -21,6 +22,13 @@ use std::path::Path;
|
||||
use tempfile::tempdir;
|
||||
use toml::Value as TomlValue;
|
||||
|
||||
fn config_error_from_io(err: &std::io::Error) -> &super::ConfigError {
|
||||
err.get_ref()
|
||||
.and_then(|err| err.downcast_ref::<ConfigLoadError>())
|
||||
.map(ConfigLoadError::config_error)
|
||||
.expect("expected ConfigLoadError")
|
||||
}
|
||||
|
||||
async fn make_config_for_test(
|
||||
codex_home: &Path,
|
||||
project_path: &Path,
|
||||
@@ -44,6 +52,98 @@ async fn make_config_for_test(
|
||||
.await
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn returns_config_error_for_invalid_user_config_toml() {
|
||||
let tmp = tempdir().expect("tempdir");
|
||||
let contents = "model = \"gpt-4\"\ninvalid = [";
|
||||
let config_path = tmp.path().join(CONFIG_TOML_FILE);
|
||||
std::fs::write(&config_path, contents).expect("write config");
|
||||
|
||||
let cwd = AbsolutePathBuf::try_from(tmp.path()).expect("cwd");
|
||||
let err = load_config_layers_state(
|
||||
tmp.path(),
|
||||
Some(cwd),
|
||||
&[] as &[(String, TomlValue)],
|
||||
LoaderOverrides::default(),
|
||||
)
|
||||
.await
|
||||
.expect_err("expected error");
|
||||
|
||||
let config_error = config_error_from_io(&err);
|
||||
let expected_toml_error = toml::from_str::<TomlValue>(contents).expect_err("parse error");
|
||||
let expected_config_error =
|
||||
super::config_error_from_toml(&config_path, contents, expected_toml_error);
|
||||
assert_eq!(config_error, &expected_config_error);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn returns_config_error_for_invalid_managed_config_toml() {
|
||||
let tmp = tempdir().expect("tempdir");
|
||||
let managed_path = tmp.path().join("managed_config.toml");
|
||||
let contents = "model = \"gpt-4\"\ninvalid = [";
|
||||
std::fs::write(&managed_path, contents).expect("write managed config");
|
||||
|
||||
let overrides = LoaderOverrides {
|
||||
managed_config_path: Some(managed_path.clone()),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let cwd = AbsolutePathBuf::try_from(tmp.path()).expect("cwd");
|
||||
let err = load_config_layers_state(
|
||||
tmp.path(),
|
||||
Some(cwd),
|
||||
&[] as &[(String, TomlValue)],
|
||||
overrides,
|
||||
)
|
||||
.await
|
||||
.expect_err("expected error");
|
||||
|
||||
let config_error = config_error_from_io(&err);
|
||||
let expected_toml_error = toml::from_str::<TomlValue>(contents).expect_err("parse error");
|
||||
let expected_config_error =
|
||||
super::config_error_from_toml(&managed_path, contents, expected_toml_error);
|
||||
assert_eq!(config_error, &expected_config_error);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn returns_config_error_for_schema_error_in_user_config() {
|
||||
let tmp = tempdir().expect("tempdir");
|
||||
let contents = "model_context_window = \"not_a_number\"";
|
||||
let config_path = tmp.path().join(CONFIG_TOML_FILE);
|
||||
std::fs::write(&config_path, contents).expect("write config");
|
||||
|
||||
let err = ConfigBuilder::default()
|
||||
.codex_home(tmp.path().to_path_buf())
|
||||
.fallback_cwd(Some(tmp.path().to_path_buf()))
|
||||
.build()
|
||||
.await
|
||||
.expect_err("expected error");
|
||||
|
||||
let config_error = config_error_from_io(&err);
|
||||
let _guard = codex_utils_absolute_path::AbsolutePathBufGuard::new(tmp.path());
|
||||
let expected_config_error =
|
||||
super::diagnostics::config_error_from_config_toml(&config_path, contents)
|
||||
.expect("schema error");
|
||||
assert_eq!(config_error, &expected_config_error);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn schema_error_points_to_feature_value() {
|
||||
let tmp = tempdir().expect("tempdir");
|
||||
let contents = "[features]\ncollaboration_modes = \"true\"";
|
||||
let config_path = tmp.path().join(CONFIG_TOML_FILE);
|
||||
std::fs::write(&config_path, contents).expect("write config");
|
||||
|
||||
let _guard = codex_utils_absolute_path::AbsolutePathBufGuard::new(tmp.path());
|
||||
let error = super::diagnostics::config_error_from_config_toml(&config_path, contents)
|
||||
.expect("schema error");
|
||||
|
||||
let value_line = contents.lines().nth(1).expect("value line");
|
||||
let value_column = value_line.find("\"true\"").expect("value") + 1;
|
||||
assert_eq!(error.range.start.line, 2);
|
||||
assert_eq!(error.range.start.column, value_column);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn merges_managed_config_layer_on_top() {
|
||||
let tmp = tempdir().expect("tempdir");
|
||||
@@ -132,6 +232,7 @@ async fn returns_empty_when_all_layers_missing() {
|
||||
},
|
||||
config: TomlValue::Table(toml::map::Map::new()),
|
||||
version: version_for_toml(&TomlValue::Table(toml::map::Map::new())),
|
||||
disabled_reason: None,
|
||||
},
|
||||
user_layer,
|
||||
);
|
||||
@@ -546,6 +647,7 @@ async fn project_layer_is_added_when_dot_codex_exists_without_config_toml() -> s
|
||||
},
|
||||
config: TomlValue::Table(toml::map::Map::new()),
|
||||
version: version_for_toml(&TomlValue::Table(toml::map::Map::new())),
|
||||
disabled_reason: None,
|
||||
}],
|
||||
project_layers
|
||||
);
|
||||
@@ -554,7 +656,7 @@ async fn project_layer_is_added_when_dot_codex_exists_without_config_toml() -> s
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn project_layers_skipped_when_untrusted_or_unknown() -> std::io::Result<()> {
|
||||
async fn project_layers_disabled_when_untrusted_or_unknown() -> std::io::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
let project_root = tmp.path().join("project");
|
||||
let nested = project_root.join("child");
|
||||
@@ -576,6 +678,13 @@ async fn project_layers_skipped_when_untrusted_or_unknown() -> std::io::Result<(
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
let untrusted_config_path = codex_home_untrusted.join(CONFIG_TOML_FILE);
|
||||
let untrusted_config_contents = tokio::fs::read_to_string(&untrusted_config_path).await?;
|
||||
tokio::fs::write(
|
||||
&untrusted_config_path,
|
||||
format!("foo = \"user\"\n{untrusted_config_contents}"),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let layers_untrusted = load_config_layers_state(
|
||||
&codex_home_untrusted,
|
||||
@@ -584,16 +693,35 @@ async fn project_layers_skipped_when_untrusted_or_unknown() -> std::io::Result<(
|
||||
LoaderOverrides::default(),
|
||||
)
|
||||
.await?;
|
||||
let project_layers_untrusted = layers_untrusted
|
||||
.layers_high_to_low()
|
||||
let project_layers_untrusted: Vec<_> = layers_untrusted
|
||||
.get_layers(
|
||||
super::ConfigLayerStackOrdering::HighestPrecedenceFirst,
|
||||
true,
|
||||
)
|
||||
.into_iter()
|
||||
.filter(|layer| matches!(layer.name, super::ConfigLayerSource::Project { .. }))
|
||||
.count();
|
||||
assert_eq!(project_layers_untrusted, 0);
|
||||
assert_eq!(layers_untrusted.effective_config().get("foo"), None);
|
||||
.collect();
|
||||
assert_eq!(project_layers_untrusted.len(), 1);
|
||||
assert!(
|
||||
project_layers_untrusted[0].disabled_reason.is_some(),
|
||||
"expected untrusted project layer to be disabled"
|
||||
);
|
||||
assert_eq!(
|
||||
project_layers_untrusted[0].config.get("foo"),
|
||||
Some(&TomlValue::String("child".to_string()))
|
||||
);
|
||||
assert_eq!(
|
||||
layers_untrusted.effective_config().get("foo"),
|
||||
Some(&TomlValue::String("user".to_string()))
|
||||
);
|
||||
|
||||
let codex_home_unknown = tmp.path().join("home_unknown");
|
||||
tokio::fs::create_dir_all(&codex_home_unknown).await?;
|
||||
tokio::fs::write(
|
||||
codex_home_unknown.join(CONFIG_TOML_FILE),
|
||||
"foo = \"user\"\n",
|
||||
)
|
||||
.await?;
|
||||
|
||||
let layers_unknown = load_config_layers_state(
|
||||
&codex_home_unknown,
|
||||
@@ -602,13 +730,92 @@ async fn project_layers_skipped_when_untrusted_or_unknown() -> std::io::Result<(
|
||||
LoaderOverrides::default(),
|
||||
)
|
||||
.await?;
|
||||
let project_layers_unknown = layers_unknown
|
||||
.layers_high_to_low()
|
||||
let project_layers_unknown: Vec<_> = layers_unknown
|
||||
.get_layers(
|
||||
super::ConfigLayerStackOrdering::HighestPrecedenceFirst,
|
||||
true,
|
||||
)
|
||||
.into_iter()
|
||||
.filter(|layer| matches!(layer.name, super::ConfigLayerSource::Project { .. }))
|
||||
.count();
|
||||
assert_eq!(project_layers_unknown, 0);
|
||||
assert_eq!(layers_unknown.effective_config().get("foo"), None);
|
||||
.collect();
|
||||
assert_eq!(project_layers_unknown.len(), 1);
|
||||
assert!(
|
||||
project_layers_unknown[0].disabled_reason.is_some(),
|
||||
"expected unknown-trust project layer to be disabled"
|
||||
);
|
||||
assert_eq!(
|
||||
project_layers_unknown[0].config.get("foo"),
|
||||
Some(&TomlValue::String("child".to_string()))
|
||||
);
|
||||
assert_eq!(
|
||||
layers_unknown.effective_config().get("foo"),
|
||||
Some(&TomlValue::String("user".to_string()))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn invalid_project_config_ignored_when_untrusted_or_unknown() -> std::io::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
let project_root = tmp.path().join("project");
|
||||
let nested = project_root.join("child");
|
||||
tokio::fs::create_dir_all(nested.join(".codex")).await?;
|
||||
tokio::fs::write(project_root.join(".git"), "gitdir: here").await?;
|
||||
tokio::fs::write(nested.join(".codex").join(CONFIG_TOML_FILE), "foo =").await?;
|
||||
|
||||
let cwd = AbsolutePathBuf::from_absolute_path(&nested)?;
|
||||
let cases = [
|
||||
("untrusted", Some(TrustLevel::Untrusted)),
|
||||
("unknown", None),
|
||||
];
|
||||
|
||||
for (name, trust_level) in cases {
|
||||
let codex_home = tmp.path().join(format!("home_{name}"));
|
||||
tokio::fs::create_dir_all(&codex_home).await?;
|
||||
let config_path = codex_home.join(CONFIG_TOML_FILE);
|
||||
|
||||
if let Some(trust_level) = trust_level {
|
||||
make_config_for_test(&codex_home, &project_root, trust_level, None).await?;
|
||||
let config_contents = tokio::fs::read_to_string(&config_path).await?;
|
||||
tokio::fs::write(&config_path, format!("foo = \"user\"\n{config_contents}")).await?;
|
||||
} else {
|
||||
tokio::fs::write(&config_path, "foo = \"user\"\n").await?;
|
||||
}
|
||||
|
||||
let layers = load_config_layers_state(
|
||||
&codex_home,
|
||||
Some(cwd.clone()),
|
||||
&[] as &[(String, TomlValue)],
|
||||
LoaderOverrides::default(),
|
||||
)
|
||||
.await?;
|
||||
let project_layers: Vec<_> = layers
|
||||
.get_layers(
|
||||
super::ConfigLayerStackOrdering::HighestPrecedenceFirst,
|
||||
true,
|
||||
)
|
||||
.into_iter()
|
||||
.filter(|layer| matches!(layer.name, super::ConfigLayerSource::Project { .. }))
|
||||
.collect();
|
||||
assert_eq!(
|
||||
project_layers.len(),
|
||||
1,
|
||||
"expected one project layer for {name}"
|
||||
);
|
||||
assert!(
|
||||
project_layers[0].disabled_reason.is_some(),
|
||||
"expected {name} project layer to be disabled"
|
||||
);
|
||||
assert_eq!(
|
||||
project_layers[0].config,
|
||||
TomlValue::Table(toml::map::Map::new())
|
||||
);
|
||||
assert_eq!(
|
||||
layers.effective_config().get("foo"),
|
||||
Some(&TomlValue::String("user".to_string()))
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
227
codex-rs/core/src/connectors.rs
Normal file
227
codex-rs/core/src/connectors.rs
Normal file
@@ -0,0 +1,227 @@
|
||||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use async_channel::unbounded;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
|
||||
use crate::AuthManager;
|
||||
use crate::SandboxState;
|
||||
use crate::config::Config;
|
||||
use crate::features::Feature;
|
||||
use crate::mcp::CODEX_APPS_MCP_SERVER_NAME;
|
||||
use crate::mcp::auth::compute_auth_statuses;
|
||||
use crate::mcp::with_codex_apps_mcp;
|
||||
use crate::mcp_connection_manager::McpConnectionManager;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ConnectorInfo {
|
||||
#[serde(rename = "id")]
|
||||
pub connector_id: String,
|
||||
#[serde(rename = "name")]
|
||||
pub connector_name: String,
|
||||
#[serde(default, rename = "description")]
|
||||
pub connector_description: Option<String>,
|
||||
#[serde(default, rename = "logo_url")]
|
||||
pub logo_url: Option<String>,
|
||||
#[serde(default, rename = "install_url")]
|
||||
pub install_url: Option<String>,
|
||||
#[serde(default)]
|
||||
pub is_accessible: bool,
|
||||
}
|
||||
|
||||
pub async fn list_accessible_connectors_from_mcp_tools(
|
||||
config: &Config,
|
||||
) -> anyhow::Result<Vec<ConnectorInfo>> {
|
||||
if !config.features.enabled(Feature::Connectors) {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let auth_manager = auth_manager_from_config(config);
|
||||
let auth = auth_manager.auth().await;
|
||||
let mcp_servers = with_codex_apps_mcp(HashMap::new(), true, auth.as_ref(), config);
|
||||
if mcp_servers.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let auth_status_entries =
|
||||
compute_auth_statuses(mcp_servers.iter(), config.mcp_oauth_credentials_store_mode).await;
|
||||
|
||||
let mut mcp_connection_manager = McpConnectionManager::default();
|
||||
let (tx_event, rx_event) = unbounded();
|
||||
drop(rx_event);
|
||||
let cancel_token = CancellationToken::new();
|
||||
|
||||
let sandbox_state = SandboxState {
|
||||
sandbox_policy: SandboxPolicy::ReadOnly,
|
||||
codex_linux_sandbox_exe: config.codex_linux_sandbox_exe.clone(),
|
||||
sandbox_cwd: env::current_dir().unwrap_or_else(|_| PathBuf::from("/")),
|
||||
};
|
||||
|
||||
mcp_connection_manager
|
||||
.initialize(
|
||||
&mcp_servers,
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
auth_status_entries,
|
||||
tx_event,
|
||||
cancel_token.clone(),
|
||||
sandbox_state,
|
||||
)
|
||||
.await;
|
||||
|
||||
let tools = mcp_connection_manager.list_all_tools().await;
|
||||
cancel_token.cancel();
|
||||
|
||||
Ok(accessible_connectors_from_mcp_tools(&tools))
|
||||
}
|
||||
|
||||
fn auth_manager_from_config(config: &Config) -> std::sync::Arc<AuthManager> {
|
||||
AuthManager::shared(
|
||||
config.codex_home.clone(),
|
||||
false,
|
||||
config.cli_auth_credentials_store_mode,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn connector_display_label(connector: &ConnectorInfo) -> String {
|
||||
format_connector_label(&connector.connector_name, &connector.connector_id)
|
||||
}
|
||||
|
||||
pub(crate) fn accessible_connectors_from_mcp_tools(
|
||||
mcp_tools: &HashMap<String, crate::mcp_connection_manager::ToolInfo>,
|
||||
) -> Vec<ConnectorInfo> {
|
||||
let tools = mcp_tools.values().filter_map(|tool| {
|
||||
if tool.server_name != CODEX_APPS_MCP_SERVER_NAME {
|
||||
return None;
|
||||
}
|
||||
let connector_id = tool.connector_id.as_deref()?;
|
||||
let connector_name = normalize_connector_value(tool.connector_name.as_deref());
|
||||
Some((connector_id.to_string(), connector_name))
|
||||
});
|
||||
collect_accessible_connectors(tools)
|
||||
}
|
||||
|
||||
pub fn merge_connectors(
|
||||
connectors: Vec<ConnectorInfo>,
|
||||
accessible_connectors: Vec<ConnectorInfo>,
|
||||
) -> Vec<ConnectorInfo> {
|
||||
let mut merged: HashMap<String, ConnectorInfo> = connectors
|
||||
.into_iter()
|
||||
.map(|mut connector| {
|
||||
connector.is_accessible = false;
|
||||
(connector.connector_id.clone(), connector)
|
||||
})
|
||||
.collect();
|
||||
|
||||
for mut connector in accessible_connectors {
|
||||
connector.is_accessible = true;
|
||||
let connector_id = connector.connector_id.clone();
|
||||
if let Some(existing) = merged.get_mut(&connector_id) {
|
||||
existing.is_accessible = true;
|
||||
if existing.connector_name == existing.connector_id
|
||||
&& connector.connector_name != connector.connector_id
|
||||
{
|
||||
existing.connector_name = connector.connector_name;
|
||||
}
|
||||
if existing.connector_description.is_none() && connector.connector_description.is_some()
|
||||
{
|
||||
existing.connector_description = connector.connector_description;
|
||||
}
|
||||
if existing.logo_url.is_none() && connector.logo_url.is_some() {
|
||||
existing.logo_url = connector.logo_url;
|
||||
}
|
||||
} else {
|
||||
merged.insert(connector_id, connector);
|
||||
}
|
||||
}
|
||||
|
||||
let mut merged = merged.into_values().collect::<Vec<_>>();
|
||||
for connector in &mut merged {
|
||||
if connector.install_url.is_none() {
|
||||
connector.install_url = Some(connector_install_url(
|
||||
&connector.connector_name,
|
||||
&connector.connector_id,
|
||||
));
|
||||
}
|
||||
}
|
||||
merged.sort_by(|left, right| {
|
||||
right
|
||||
.is_accessible
|
||||
.cmp(&left.is_accessible)
|
||||
.then_with(|| left.connector_name.cmp(&right.connector_name))
|
||||
.then_with(|| left.connector_id.cmp(&right.connector_id))
|
||||
});
|
||||
merged
|
||||
}
|
||||
|
||||
fn collect_accessible_connectors<I>(tools: I) -> Vec<ConnectorInfo>
|
||||
where
|
||||
I: IntoIterator<Item = (String, Option<String>)>,
|
||||
{
|
||||
let mut connectors: HashMap<String, String> = HashMap::new();
|
||||
for (connector_id, connector_name) in tools {
|
||||
let connector_name = connector_name.unwrap_or_else(|| connector_id.clone());
|
||||
if let Some(existing_name) = connectors.get_mut(&connector_id) {
|
||||
if existing_name == &connector_id && connector_name != connector_id {
|
||||
*existing_name = connector_name;
|
||||
}
|
||||
} else {
|
||||
connectors.insert(connector_id, connector_name);
|
||||
}
|
||||
}
|
||||
let mut accessible: Vec<ConnectorInfo> = connectors
|
||||
.into_iter()
|
||||
.map(|(connector_id, connector_name)| ConnectorInfo {
|
||||
install_url: Some(connector_install_url(&connector_name, &connector_id)),
|
||||
connector_id,
|
||||
connector_name,
|
||||
connector_description: None,
|
||||
logo_url: None,
|
||||
is_accessible: true,
|
||||
})
|
||||
.collect();
|
||||
accessible.sort_by(|left, right| {
|
||||
right
|
||||
.is_accessible
|
||||
.cmp(&left.is_accessible)
|
||||
.then_with(|| left.connector_name.cmp(&right.connector_name))
|
||||
.then_with(|| left.connector_id.cmp(&right.connector_id))
|
||||
});
|
||||
accessible
|
||||
}
|
||||
|
||||
fn normalize_connector_value(value: Option<&str>) -> Option<String> {
|
||||
value
|
||||
.map(str::trim)
|
||||
.filter(|value| !value.is_empty())
|
||||
.map(str::to_string)
|
||||
}
|
||||
|
||||
pub fn connector_install_url(name: &str, connector_id: &str) -> String {
|
||||
let slug = connector_name_slug(name);
|
||||
format!("https://chatgpt.com/apps/{slug}/{connector_id}")
|
||||
}
|
||||
|
||||
fn connector_name_slug(name: &str) -> String {
|
||||
let mut normalized = String::with_capacity(name.len());
|
||||
for character in name.chars() {
|
||||
if character.is_ascii_alphanumeric() {
|
||||
normalized.push(character.to_ascii_lowercase());
|
||||
} else {
|
||||
normalized.push('-');
|
||||
}
|
||||
}
|
||||
let normalized = normalized.trim_matches('-');
|
||||
if normalized.is_empty() {
|
||||
"app".to_string()
|
||||
} else {
|
||||
normalized.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
fn format_connector_label(name: &str, _id: &str) -> String {
|
||||
name.to_string()
|
||||
}
|
||||
@@ -86,7 +86,9 @@ impl ContextManager {
|
||||
// This is a coarse lower bound, not a tokenizer-accurate count.
|
||||
pub(crate) fn estimate_token_count(&self, turn_context: &TurnContext) -> Option<i64> {
|
||||
let model_info = turn_context.client.get_model_info();
|
||||
let personality = turn_context.client.config().model_personality;
|
||||
let personality = turn_context
|
||||
.personality
|
||||
.or(turn_context.client.config().model_personality);
|
||||
let base_instructions = model_info.get_model_instructions(personality);
|
||||
let base_tokens = i64::try_from(approx_token_count(&base_instructions)).unwrap_or(i64::MAX);
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ use crate::instructions::SkillInstructions;
|
||||
use crate::instructions::UserInstructions;
|
||||
use crate::session_prefix::is_session_prefix;
|
||||
use crate::user_shell_command::is_user_shell_command_text;
|
||||
use crate::web_search::web_search_action_detail;
|
||||
|
||||
fn parse_user_message(message: &[ContentItem]) -> Option<UserMessageItem> {
|
||||
if UserInstructions::is_user_instructions(message)
|
||||
@@ -127,14 +128,17 @@ pub fn parse_turn_item(item: &ResponseItem) -> Option<TurnItem> {
|
||||
raw_content,
|
||||
}))
|
||||
}
|
||||
ResponseItem::WebSearchCall {
|
||||
id,
|
||||
action: WebSearchAction::Search { query },
|
||||
..
|
||||
} => Some(TurnItem::WebSearch(WebSearchItem {
|
||||
id: id.clone().unwrap_or_default(),
|
||||
query: query.clone().unwrap_or_default(),
|
||||
})),
|
||||
ResponseItem::WebSearchCall { id, action, .. } => {
|
||||
let (action, query) = match action {
|
||||
Some(action) => (action.clone(), web_search_action_detail(action)),
|
||||
None => (WebSearchAction::Other, String::new()),
|
||||
};
|
||||
Some(TurnItem::WebSearch(WebSearchItem {
|
||||
id: id.clone().unwrap_or_default(),
|
||||
query,
|
||||
action,
|
||||
}))
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
@@ -144,6 +148,7 @@ mod tests {
|
||||
use super::parse_turn_item;
|
||||
use codex_protocol::items::AgentMessageContent;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::items::WebSearchItem;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ReasoningItemContent;
|
||||
use codex_protocol::models::ReasoningItemReasoningSummary;
|
||||
@@ -419,18 +424,102 @@ mod tests {
|
||||
let item = ResponseItem::WebSearchCall {
|
||||
id: Some("ws_1".to_string()),
|
||||
status: Some("completed".to_string()),
|
||||
action: WebSearchAction::Search {
|
||||
action: Some(WebSearchAction::Search {
|
||||
query: Some("weather".to_string()),
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
let turn_item = parse_turn_item(&item).expect("expected web search turn item");
|
||||
|
||||
match turn_item {
|
||||
TurnItem::WebSearch(search) => {
|
||||
assert_eq!(search.id, "ws_1");
|
||||
assert_eq!(search.query, "weather");
|
||||
}
|
||||
TurnItem::WebSearch(search) => assert_eq!(
|
||||
search,
|
||||
WebSearchItem {
|
||||
id: "ws_1".to_string(),
|
||||
query: "weather".to_string(),
|
||||
action: WebSearchAction::Search {
|
||||
query: Some("weather".to_string()),
|
||||
},
|
||||
}
|
||||
),
|
||||
other => panic!("expected TurnItem::WebSearch, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_web_search_open_page_call() {
|
||||
let item = ResponseItem::WebSearchCall {
|
||||
id: Some("ws_open".to_string()),
|
||||
status: Some("completed".to_string()),
|
||||
action: Some(WebSearchAction::OpenPage {
|
||||
url: Some("https://example.com".to_string()),
|
||||
}),
|
||||
};
|
||||
|
||||
let turn_item = parse_turn_item(&item).expect("expected web search turn item");
|
||||
|
||||
match turn_item {
|
||||
TurnItem::WebSearch(search) => assert_eq!(
|
||||
search,
|
||||
WebSearchItem {
|
||||
id: "ws_open".to_string(),
|
||||
query: "https://example.com".to_string(),
|
||||
action: WebSearchAction::OpenPage {
|
||||
url: Some("https://example.com".to_string()),
|
||||
},
|
||||
}
|
||||
),
|
||||
other => panic!("expected TurnItem::WebSearch, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_web_search_find_in_page_call() {
|
||||
let item = ResponseItem::WebSearchCall {
|
||||
id: Some("ws_find".to_string()),
|
||||
status: Some("completed".to_string()),
|
||||
action: Some(WebSearchAction::FindInPage {
|
||||
url: Some("https://example.com".to_string()),
|
||||
pattern: Some("needle".to_string()),
|
||||
}),
|
||||
};
|
||||
|
||||
let turn_item = parse_turn_item(&item).expect("expected web search turn item");
|
||||
|
||||
match turn_item {
|
||||
TurnItem::WebSearch(search) => assert_eq!(
|
||||
search,
|
||||
WebSearchItem {
|
||||
id: "ws_find".to_string(),
|
||||
query: "'needle' in https://example.com".to_string(),
|
||||
action: WebSearchAction::FindInPage {
|
||||
url: Some("https://example.com".to_string()),
|
||||
pattern: Some("needle".to_string()),
|
||||
},
|
||||
}
|
||||
),
|
||||
other => panic!("expected TurnItem::WebSearch, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_partial_web_search_call_without_action_as_other() {
|
||||
let item = ResponseItem::WebSearchCall {
|
||||
id: Some("ws_partial".to_string()),
|
||||
status: Some("in_progress".to_string()),
|
||||
action: None,
|
||||
};
|
||||
|
||||
let turn_item = parse_turn_item(&item).expect("expected web search turn item");
|
||||
match turn_item {
|
||||
TurnItem::WebSearch(search) => assert_eq!(
|
||||
search,
|
||||
WebSearchItem {
|
||||
id: "ws_partial".to_string(),
|
||||
query: String::new(),
|
||||
action: WebSearchAction::Other,
|
||||
}
|
||||
),
|
||||
other => panic!("expected TurnItem::WebSearch, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -64,6 +64,7 @@ pub struct ExecParams {
|
||||
pub expiration: ExecExpiration,
|
||||
pub env: HashMap<String, String>,
|
||||
pub sandbox_permissions: SandboxPermissions,
|
||||
pub windows_sandbox_level: codex_protocol::config_types::WindowsSandboxLevel,
|
||||
pub justification: Option<String>,
|
||||
pub arg0: Option<String>,
|
||||
}
|
||||
@@ -141,11 +142,15 @@ pub async fn process_exec_tool_call(
|
||||
codex_linux_sandbox_exe: &Option<PathBuf>,
|
||||
stdout_stream: Option<StdoutStream>,
|
||||
) -> Result<ExecToolCallOutput> {
|
||||
let windows_sandbox_level = params.windows_sandbox_level;
|
||||
let sandbox_type = match &sandbox_policy {
|
||||
SandboxPolicy::DangerFullAccess | SandboxPolicy::ExternalSandbox { .. } => {
|
||||
SandboxType::None
|
||||
}
|
||||
_ => get_platform_sandbox().unwrap_or(SandboxType::None),
|
||||
_ => get_platform_sandbox(
|
||||
windows_sandbox_level != codex_protocol::config_types::WindowsSandboxLevel::Disabled,
|
||||
)
|
||||
.unwrap_or(SandboxType::None),
|
||||
};
|
||||
tracing::debug!("Sandbox type: {sandbox_type:?}");
|
||||
|
||||
@@ -155,6 +160,7 @@ pub async fn process_exec_tool_call(
|
||||
expiration,
|
||||
env,
|
||||
sandbox_permissions,
|
||||
windows_sandbox_level,
|
||||
justification,
|
||||
arg0: _,
|
||||
} = params;
|
||||
@@ -184,6 +190,7 @@ pub async fn process_exec_tool_call(
|
||||
sandbox_type,
|
||||
sandbox_cwd,
|
||||
codex_linux_sandbox_exe.as_ref(),
|
||||
windows_sandbox_level,
|
||||
)
|
||||
.map_err(CodexErr::from)?;
|
||||
|
||||
@@ -202,6 +209,7 @@ pub(crate) async fn execute_exec_env(
|
||||
env,
|
||||
expiration,
|
||||
sandbox,
|
||||
windows_sandbox_level,
|
||||
sandbox_permissions,
|
||||
justification,
|
||||
arg0,
|
||||
@@ -213,6 +221,7 @@ pub(crate) async fn execute_exec_env(
|
||||
expiration,
|
||||
env,
|
||||
sandbox_permissions,
|
||||
windows_sandbox_level,
|
||||
justification,
|
||||
arg0,
|
||||
};
|
||||
@@ -229,7 +238,7 @@ async fn exec_windows_sandbox(
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
) -> Result<RawExecToolCallOutput> {
|
||||
use crate::config::find_codex_home;
|
||||
use crate::safety::is_windows_elevated_sandbox_enabled;
|
||||
use codex_protocol::config_types::WindowsSandboxLevel;
|
||||
use codex_windows_sandbox::run_windows_sandbox_capture;
|
||||
use codex_windows_sandbox::run_windows_sandbox_capture_elevated;
|
||||
|
||||
@@ -238,6 +247,7 @@ async fn exec_windows_sandbox(
|
||||
cwd,
|
||||
env,
|
||||
expiration,
|
||||
windows_sandbox_level,
|
||||
..
|
||||
} = params;
|
||||
// TODO(iceweasel-oai): run_windows_sandbox_capture should support all
|
||||
@@ -255,7 +265,7 @@ async fn exec_windows_sandbox(
|
||||
"windows sandbox: failed to resolve codex_home: {err}"
|
||||
)))
|
||||
})?;
|
||||
let use_elevated = is_windows_elevated_sandbox_enabled();
|
||||
let use_elevated = matches!(windows_sandbox_level, WindowsSandboxLevel::Elevated);
|
||||
let spawn_res = tokio::task::spawn_blocking(move || {
|
||||
if use_elevated {
|
||||
run_windows_sandbox_capture_elevated(
|
||||
@@ -312,20 +322,7 @@ async fn exec_windows_sandbox(
|
||||
text: stderr_text,
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
// Best-effort aggregate: stdout then stderr (capped).
|
||||
let mut aggregated = Vec::with_capacity(
|
||||
stdout
|
||||
.text
|
||||
.len()
|
||||
.saturating_add(stderr.text.len())
|
||||
.min(EXEC_OUTPUT_MAX_BYTES),
|
||||
);
|
||||
append_capped(&mut aggregated, &stdout.text, EXEC_OUTPUT_MAX_BYTES);
|
||||
append_capped(&mut aggregated, &stderr.text, EXEC_OUTPUT_MAX_BYTES);
|
||||
let aggregated_output = StreamOutput {
|
||||
text: aggregated,
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
let aggregated_output = aggregate_output(&stdout, &stderr);
|
||||
|
||||
Ok(RawExecToolCallOutput {
|
||||
exit_status,
|
||||
@@ -519,6 +516,39 @@ fn append_capped(dst: &mut Vec<u8>, src: &[u8], max_bytes: usize) {
|
||||
dst.extend_from_slice(&src[..take]);
|
||||
}
|
||||
|
||||
fn aggregate_output(
|
||||
stdout: &StreamOutput<Vec<u8>>,
|
||||
stderr: &StreamOutput<Vec<u8>>,
|
||||
) -> StreamOutput<Vec<u8>> {
|
||||
let total_len = stdout.text.len().saturating_add(stderr.text.len());
|
||||
let max_bytes = EXEC_OUTPUT_MAX_BYTES;
|
||||
let mut aggregated = Vec::with_capacity(total_len.min(max_bytes));
|
||||
|
||||
if total_len <= max_bytes {
|
||||
aggregated.extend_from_slice(&stdout.text);
|
||||
aggregated.extend_from_slice(&stderr.text);
|
||||
return StreamOutput {
|
||||
text: aggregated,
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
}
|
||||
|
||||
// Under contention, reserve 1/3 for stdout and 2/3 for stderr; rebalance unused stderr to stdout.
|
||||
let want_stdout = stdout.text.len().min(max_bytes / 3);
|
||||
let want_stderr = stderr.text.len();
|
||||
let stderr_take = want_stderr.min(max_bytes.saturating_sub(want_stdout));
|
||||
let remaining = max_bytes.saturating_sub(want_stdout + stderr_take);
|
||||
let stdout_take = want_stdout + remaining.min(stdout.text.len().saturating_sub(want_stdout));
|
||||
|
||||
aggregated.extend_from_slice(&stdout.text[..stdout_take]);
|
||||
aggregated.extend_from_slice(&stderr.text[..stderr_take]);
|
||||
|
||||
StreamOutput {
|
||||
text: aggregated,
|
||||
truncated_after_lines: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ExecToolCallOutput {
|
||||
pub exit_code: i32,
|
||||
@@ -564,6 +594,7 @@ async fn exec(
|
||||
env,
|
||||
arg0,
|
||||
expiration,
|
||||
windows_sandbox_level: _,
|
||||
..
|
||||
} = params;
|
||||
|
||||
@@ -683,20 +714,7 @@ async fn consume_truncated_output(
|
||||
Duration::from_millis(IO_DRAIN_TIMEOUT_MS),
|
||||
)
|
||||
.await?;
|
||||
// Best-effort aggregate: stdout then stderr (capped).
|
||||
let mut aggregated = Vec::with_capacity(
|
||||
stdout
|
||||
.text
|
||||
.len()
|
||||
.saturating_add(stderr.text.len())
|
||||
.min(EXEC_OUTPUT_MAX_BYTES),
|
||||
);
|
||||
append_capped(&mut aggregated, &stdout.text, EXEC_OUTPUT_MAX_BYTES);
|
||||
append_capped(&mut aggregated, &stderr.text, EXEC_OUTPUT_MAX_BYTES * 2);
|
||||
let aggregated_output = StreamOutput {
|
||||
text: aggregated,
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
let aggregated_output = aggregate_output(&stdout, &stderr);
|
||||
|
||||
Ok(RawExecToolCallOutput {
|
||||
exit_status,
|
||||
@@ -771,6 +789,7 @@ fn synthetic_exit_status(code: i32) -> ExitStatus {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::time::Duration;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
|
||||
@@ -846,6 +865,85 @@ mod tests {
|
||||
assert_eq!(out.text.len(), EXEC_OUTPUT_MAX_BYTES);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn aggregate_output_prefers_stderr_on_contention() {
|
||||
let stdout = StreamOutput {
|
||||
text: vec![b'a'; EXEC_OUTPUT_MAX_BYTES],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
let stderr = StreamOutput {
|
||||
text: vec![b'b'; EXEC_OUTPUT_MAX_BYTES],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
|
||||
let aggregated = aggregate_output(&stdout, &stderr);
|
||||
let stdout_cap = EXEC_OUTPUT_MAX_BYTES / 3;
|
||||
let stderr_cap = EXEC_OUTPUT_MAX_BYTES.saturating_sub(stdout_cap);
|
||||
|
||||
assert_eq!(aggregated.text.len(), EXEC_OUTPUT_MAX_BYTES);
|
||||
assert_eq!(aggregated.text[..stdout_cap], vec![b'a'; stdout_cap]);
|
||||
assert_eq!(aggregated.text[stdout_cap..], vec![b'b'; stderr_cap]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn aggregate_output_fills_remaining_capacity_with_stderr() {
|
||||
let stdout_len = EXEC_OUTPUT_MAX_BYTES / 10;
|
||||
let stdout = StreamOutput {
|
||||
text: vec![b'a'; stdout_len],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
let stderr = StreamOutput {
|
||||
text: vec![b'b'; EXEC_OUTPUT_MAX_BYTES],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
|
||||
let aggregated = aggregate_output(&stdout, &stderr);
|
||||
let stderr_cap = EXEC_OUTPUT_MAX_BYTES.saturating_sub(stdout_len);
|
||||
|
||||
assert_eq!(aggregated.text.len(), EXEC_OUTPUT_MAX_BYTES);
|
||||
assert_eq!(aggregated.text[..stdout_len], vec![b'a'; stdout_len]);
|
||||
assert_eq!(aggregated.text[stdout_len..], vec![b'b'; stderr_cap]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn aggregate_output_rebalances_when_stderr_is_small() {
|
||||
let stdout = StreamOutput {
|
||||
text: vec![b'a'; EXEC_OUTPUT_MAX_BYTES],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
let stderr = StreamOutput {
|
||||
text: vec![b'b'; 1],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
|
||||
let aggregated = aggregate_output(&stdout, &stderr);
|
||||
let stdout_len = EXEC_OUTPUT_MAX_BYTES.saturating_sub(1);
|
||||
|
||||
assert_eq!(aggregated.text.len(), EXEC_OUTPUT_MAX_BYTES);
|
||||
assert_eq!(aggregated.text[..stdout_len], vec![b'a'; stdout_len]);
|
||||
assert_eq!(aggregated.text[stdout_len..], vec![b'b'; 1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn aggregate_output_keeps_stdout_then_stderr_when_under_cap() {
|
||||
let stdout = StreamOutput {
|
||||
text: vec![b'a'; 4],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
let stderr = StreamOutput {
|
||||
text: vec![b'b'; 3],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
|
||||
let aggregated = aggregate_output(&stdout, &stderr);
|
||||
let mut expected = Vec::new();
|
||||
expected.extend_from_slice(&stdout.text);
|
||||
expected.extend_from_slice(&stderr.text);
|
||||
|
||||
assert_eq!(aggregated.text, expected);
|
||||
assert_eq!(aggregated.truncated_after_lines, None);
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
#[test]
|
||||
fn sandbox_detection_flags_sigsys_exit_code() {
|
||||
@@ -878,6 +976,7 @@ mod tests {
|
||||
expiration: 500.into(),
|
||||
env,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
windows_sandbox_level: codex_protocol::config_types::WindowsSandboxLevel::Disabled,
|
||||
justification: None,
|
||||
arg0: None,
|
||||
};
|
||||
@@ -923,6 +1022,7 @@ mod tests {
|
||||
expiration: ExecExpiration::Cancellation(cancel_token),
|
||||
env,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
windows_sandbox_level: codex_protocol::config_types::WindowsSandboxLevel::Disabled,
|
||||
justification: None,
|
||||
arg0: None,
|
||||
};
|
||||
|
||||
@@ -227,7 +227,9 @@ pub async fn load_exec_policy(config_stack: &ConfigLayerStack) -> Result<Policy,
|
||||
// from each layer, so that higher-precedence layers can override
|
||||
// rules defined in lower-precedence ones.
|
||||
let mut policy_paths = Vec::new();
|
||||
for layer in config_stack.get_layers(ConfigLayerStackOrdering::LowestPrecedenceFirst) {
|
||||
// Include disabled project layers so .codex/rules still applies when
|
||||
// project config.toml is trust-disabled.
|
||||
for layer in config_stack.get_layers(ConfigLayerStackOrdering::LowestPrecedenceFirst, true) {
|
||||
if let Some(config_folder) = layer.config_folder() {
|
||||
#[expect(clippy::expect_used)]
|
||||
let policy_dir = config_folder.join(RULES_DIR_NAME).expect("safe join");
|
||||
@@ -625,6 +627,47 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn loads_rules_from_disabled_project_layers() -> anyhow::Result<()> {
|
||||
let project_dir = tempdir()?;
|
||||
let policy_dir = project_dir.path().join(RULES_DIR_NAME);
|
||||
fs::create_dir_all(&policy_dir)?;
|
||||
fs::write(
|
||||
policy_dir.join("disabled.rules"),
|
||||
r#"prefix_rule(pattern=["ls"], decision="forbidden")"#,
|
||||
)?;
|
||||
|
||||
let project_dot_codex_folder = AbsolutePathBuf::from_absolute_path(project_dir.path())?;
|
||||
let layers = vec![ConfigLayerEntry::new_disabled(
|
||||
ConfigLayerSource::Project {
|
||||
dot_codex_folder: project_dot_codex_folder,
|
||||
},
|
||||
TomlValue::Table(Default::default()),
|
||||
"trust disabled",
|
||||
)];
|
||||
let config_stack = ConfigLayerStack::new(
|
||||
layers,
|
||||
ConfigRequirements::default(),
|
||||
ConfigRequirementsToml::default(),
|
||||
)?;
|
||||
|
||||
let policy = load_exec_policy(&config_stack).await?;
|
||||
|
||||
assert_eq!(
|
||||
Evaluation {
|
||||
decision: Decision::Forbidden,
|
||||
matched_rules: vec![RuleMatch::PrefixRuleMatch {
|
||||
matched_prefix: vec!["ls".to_string()],
|
||||
decision: Decision::Forbidden,
|
||||
justification: None,
|
||||
}],
|
||||
},
|
||||
policy.check_multiple([vec!["ls".to_string()]].iter(), &|_| Decision::Allow)
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn loads_policies_from_multiple_config_layers() -> anyhow::Result<()> {
|
||||
let user_dir = tempdir()?;
|
||||
|
||||
@@ -5,14 +5,20 @@
|
||||
//! booleans through multiple types, call sites consult a single `Features`
|
||||
//! container attached to `Config`.
|
||||
|
||||
use crate::config::CONFIG_TOML_FILE;
|
||||
use crate::config::Config;
|
||||
use crate::config::ConfigToml;
|
||||
use crate::config::profile::ConfigProfile;
|
||||
use crate::protocol::Event;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::WarningEvent;
|
||||
use codex_otel::OtelManager;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::BTreeSet;
|
||||
use toml::Value as TomlValue;
|
||||
|
||||
mod legacy;
|
||||
pub(crate) use legacy::LegacyFeatureToggles;
|
||||
@@ -21,8 +27,8 @@ pub(crate) use legacy::legacy_feature_keys;
|
||||
/// High-level lifecycle stage for a feature.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum Stage {
|
||||
/// Closed beta features to be used while developing or within the company.
|
||||
Beta,
|
||||
/// Features that are still under development, not ready for external use
|
||||
UnderDevelopment,
|
||||
/// Experimental features made available to users through the `/experimental` menu
|
||||
Experimental {
|
||||
name: &'static str,
|
||||
@@ -38,14 +44,14 @@ pub enum Stage {
|
||||
}
|
||||
|
||||
impl Stage {
|
||||
pub fn beta_menu_name(self) -> Option<&'static str> {
|
||||
pub fn experimental_menu_name(self) -> Option<&'static str> {
|
||||
match self {
|
||||
Stage::Experimental { name, .. } => Some(name),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn beta_menu_description(self) -> Option<&'static str> {
|
||||
pub fn experimental_menu_description(self) -> Option<&'static str> {
|
||||
match self {
|
||||
Stage::Experimental {
|
||||
menu_description, ..
|
||||
@@ -54,7 +60,7 @@ impl Stage {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn beta_announcement(self) -> Option<&'static str> {
|
||||
pub fn experimental_announcement(self) -> Option<&'static str> {
|
||||
match self {
|
||||
Stage::Experimental { announcement, .. } => Some(announcement),
|
||||
_ => None,
|
||||
@@ -101,9 +107,11 @@ pub enum Feature {
|
||||
EnableRequestCompression,
|
||||
/// Enable collab tools.
|
||||
Collab,
|
||||
/// Enable connectors (apps).
|
||||
Connectors,
|
||||
/// Steer feature flag - when enabled, Enter submits immediately instead of queuing.
|
||||
Steer,
|
||||
/// Enable collaboration modes (Plan, Pair Programming, Execute).
|
||||
/// Enable collaboration modes (Plan, Code, Pair Programming, Execute).
|
||||
CollaborationModes,
|
||||
/// Use the Responses API WebSocket transport for OpenAI by default.
|
||||
ResponsesWebsockets,
|
||||
@@ -341,10 +349,10 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
FeatureSpec {
|
||||
id: Feature::WebSearchCached,
|
||||
key: "web_search_cached",
|
||||
stage: Stage::Beta,
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
// Beta program. Rendered in the `/experimental` menu for users.
|
||||
// Experimental program. Rendered in the `/experimental` menu for users.
|
||||
FeatureSpec {
|
||||
id: Feature::UnifiedExec,
|
||||
key: "unified_exec",
|
||||
@@ -368,43 +376,43 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
FeatureSpec {
|
||||
id: Feature::ChildAgentsMd,
|
||||
key: "child_agents_md",
|
||||
stage: Stage::Beta,
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::ApplyPatchFreeform,
|
||||
key: "apply_patch_freeform",
|
||||
stage: Stage::Beta,
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::ExecPolicy,
|
||||
key: "exec_policy",
|
||||
stage: Stage::Beta,
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: true,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::WindowsSandbox,
|
||||
key: "experimental_windows_sandbox",
|
||||
stage: Stage::Beta,
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::WindowsSandboxElevated,
|
||||
key: "elevated_windows_sandbox",
|
||||
stage: Stage::Beta,
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::RemoteCompaction,
|
||||
key: "remote_compaction",
|
||||
stage: Stage::Beta,
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: true,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::RemoteModels,
|
||||
key: "remote_models",
|
||||
stage: Stage::Beta,
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: true,
|
||||
},
|
||||
FeatureSpec {
|
||||
@@ -419,24 +427,26 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
#[cfg(windows)]
|
||||
default_enabled: true,
|
||||
#[cfg(not(windows))]
|
||||
stage: Stage::Beta,
|
||||
stage: Stage::UnderDevelopment,
|
||||
#[cfg(not(windows))]
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::EnableRequestCompression,
|
||||
key: "enable_request_compression",
|
||||
stage: Stage::Beta,
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::Collab,
|
||||
key: "collab",
|
||||
stage: Stage::Experimental {
|
||||
name: "Multi-agents",
|
||||
menu_description: "Allow Codex to spawn and collaborate with other agents on request (formerly named `collab`).",
|
||||
announcement: "NEW! Codex can now spawn other agents and work with them to solve your problems. Enable in /experimental!",
|
||||
},
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::Connectors,
|
||||
key: "connectors",
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
@@ -452,13 +462,64 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
FeatureSpec {
|
||||
id: Feature::CollaborationModes,
|
||||
key: "collaboration_modes",
|
||||
stage: Stage::Beta,
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::ResponsesWebsockets,
|
||||
key: "responses_websockets",
|
||||
stage: Stage::Beta,
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
];
|
||||
|
||||
/// Push a warning event if any under-development features are enabled.
|
||||
pub fn maybe_push_unstable_features_warning(
|
||||
config: &Config,
|
||||
post_session_configured_events: &mut Vec<Event>,
|
||||
) {
|
||||
if config.suppress_unstable_features_warning {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut under_development_feature_keys = Vec::new();
|
||||
if let Some(table) = config
|
||||
.config_layer_stack
|
||||
.effective_config()
|
||||
.get("features")
|
||||
.and_then(TomlValue::as_table)
|
||||
{
|
||||
for (key, value) in table {
|
||||
if value.as_bool() != Some(true) {
|
||||
continue;
|
||||
}
|
||||
let Some(spec) = FEATURES.iter().find(|spec| spec.key == key.as_str()) else {
|
||||
continue;
|
||||
};
|
||||
if !config.features.enabled(spec.id) {
|
||||
continue;
|
||||
}
|
||||
if matches!(spec.stage, Stage::UnderDevelopment) {
|
||||
under_development_feature_keys.push(spec.key.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if under_development_feature_keys.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let under_development_feature_keys = under_development_feature_keys.join(", ");
|
||||
let config_path = config
|
||||
.codex_home
|
||||
.join(CONFIG_TOML_FILE)
|
||||
.display()
|
||||
.to_string();
|
||||
let message = format!(
|
||||
"Under-development features enabled: {under_development_feature_keys}. Under-development features are incomplete and may behave unpredictably. To suppress this warning, set `suppress_unstable_features_warning = true` in {config_path}."
|
||||
);
|
||||
post_session_configured_events.push(Event {
|
||||
id: "".to_owned(),
|
||||
msg: EventMsg::Warning(WarningEvent { message }),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -15,11 +15,13 @@ pub mod codex;
|
||||
mod codex_thread;
|
||||
mod compact_remote;
|
||||
pub use codex_thread::CodexThread;
|
||||
pub use codex_thread::ThreadConfigSnapshot;
|
||||
mod agent;
|
||||
mod codex_delegate;
|
||||
mod command_safety;
|
||||
pub mod config;
|
||||
pub mod config_loader;
|
||||
pub mod connectors;
|
||||
mod context_manager;
|
||||
pub mod custom_prompts;
|
||||
pub mod env;
|
||||
@@ -67,6 +69,7 @@ mod event_mapping;
|
||||
pub mod review_format;
|
||||
pub mod review_prompts;
|
||||
mod thread_manager;
|
||||
pub mod web_search;
|
||||
pub use codex_protocol::protocol::InitialHistory;
|
||||
pub use thread_manager::NewThread;
|
||||
pub use thread_manager::ThreadManager;
|
||||
@@ -96,6 +99,7 @@ pub use rollout::INTERACTIVE_SESSION_SOURCES;
|
||||
pub use rollout::RolloutRecorder;
|
||||
pub use rollout::SESSIONS_SUBDIR;
|
||||
pub use rollout::SessionMeta;
|
||||
pub use rollout::find_archived_thread_path_by_id_str;
|
||||
#[deprecated(note = "use find_thread_path_by_id_str")]
|
||||
pub use rollout::find_conversation_path_by_id_str;
|
||||
pub use rollout::find_thread_path_by_id_str;
|
||||
@@ -106,6 +110,7 @@ pub use rollout::list::ThreadsPage;
|
||||
pub use rollout::list::parse_cursor;
|
||||
pub use rollout::list::read_head_for_summary;
|
||||
pub use rollout::list::read_session_meta_line;
|
||||
pub use rollout::rollout_date_parts;
|
||||
mod function_tool;
|
||||
mod state;
|
||||
mod tasks;
|
||||
@@ -121,9 +126,7 @@ pub use exec_policy::ExecPolicyError;
|
||||
pub use exec_policy::check_execpolicy_for_warnings;
|
||||
pub use exec_policy::load_exec_policy;
|
||||
pub use safety::get_platform_sandbox;
|
||||
pub use safety::is_windows_elevated_sandbox_enabled;
|
||||
pub use safety::set_windows_elevated_sandbox_enabled;
|
||||
pub use safety::set_windows_sandbox_enabled;
|
||||
pub use tools::spec::parse_tool_input_schema;
|
||||
// Re-export the protocol types from the standalone `codex-protocol` crate so existing
|
||||
// `codex_core::protocol::...` references continue to work across the workspace.
|
||||
pub use codex_protocol::protocol;
|
||||
|
||||
@@ -9,16 +9,136 @@ use codex_protocol::protocol::SandboxPolicy;
|
||||
use mcp_types::Tool as McpTool;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
|
||||
use crate::AuthManager;
|
||||
use crate::CodexAuth;
|
||||
use crate::config::Config;
|
||||
use crate::config::types::McpServerConfig;
|
||||
use crate::config::types::McpServerTransportConfig;
|
||||
use crate::features::Feature;
|
||||
use crate::mcp::auth::compute_auth_statuses;
|
||||
use crate::mcp_connection_manager::McpConnectionManager;
|
||||
use crate::mcp_connection_manager::SandboxState;
|
||||
|
||||
const MCP_TOOL_NAME_PREFIX: &str = "mcp";
|
||||
const MCP_TOOL_NAME_DELIMITER: &str = "__";
|
||||
pub(crate) const CODEX_APPS_MCP_SERVER_NAME: &str = "codex_apps_mcp";
|
||||
const CODEX_CONNECTORS_TOKEN_ENV_VAR: &str = "CODEX_CONNECTORS_TOKEN";
|
||||
|
||||
fn codex_apps_mcp_bearer_token_env_var() -> Option<String> {
|
||||
match env::var(CODEX_CONNECTORS_TOKEN_ENV_VAR) {
|
||||
Ok(value) if !value.trim().is_empty() => Some(CODEX_CONNECTORS_TOKEN_ENV_VAR.to_string()),
|
||||
Ok(_) => None,
|
||||
Err(env::VarError::NotPresent) => None,
|
||||
Err(env::VarError::NotUnicode(_)) => Some(CODEX_CONNECTORS_TOKEN_ENV_VAR.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
fn codex_apps_mcp_bearer_token(auth: Option<&CodexAuth>) -> Option<String> {
|
||||
let token = auth.and_then(|auth| auth.get_token().ok())?;
|
||||
let token = token.trim();
|
||||
if token.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(token.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
fn codex_apps_mcp_http_headers(auth: Option<&CodexAuth>) -> Option<HashMap<String, String>> {
|
||||
let mut headers = HashMap::new();
|
||||
if let Some(token) = codex_apps_mcp_bearer_token(auth) {
|
||||
headers.insert("Authorization".to_string(), format!("Bearer {token}"));
|
||||
}
|
||||
if let Some(account_id) = auth.and_then(CodexAuth::get_account_id) {
|
||||
headers.insert("ChatGPT-Account-ID".to_string(), account_id);
|
||||
}
|
||||
if headers.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(headers)
|
||||
}
|
||||
}
|
||||
|
||||
fn codex_apps_mcp_url(base_url: &str) -> String {
|
||||
let mut base_url = base_url.trim_end_matches('/').to_string();
|
||||
if (base_url.starts_with("https://chatgpt.com")
|
||||
|| base_url.starts_with("https://chat.openai.com"))
|
||||
&& !base_url.contains("/backend-api")
|
||||
{
|
||||
base_url = format!("{base_url}/backend-api");
|
||||
}
|
||||
if base_url.contains("/backend-api") {
|
||||
format!("{base_url}/wham/apps")
|
||||
} else if base_url.contains("/api/codex") {
|
||||
format!("{base_url}/apps")
|
||||
} else {
|
||||
format!("{base_url}/api/codex/apps")
|
||||
}
|
||||
}
|
||||
|
||||
fn codex_apps_mcp_server_config(config: &Config, auth: Option<&CodexAuth>) -> McpServerConfig {
|
||||
let bearer_token_env_var = codex_apps_mcp_bearer_token_env_var();
|
||||
let http_headers = if bearer_token_env_var.is_some() {
|
||||
None
|
||||
} else {
|
||||
codex_apps_mcp_http_headers(auth)
|
||||
};
|
||||
let url = codex_apps_mcp_url(&config.chatgpt_base_url);
|
||||
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
http_headers,
|
||||
env_http_headers: None,
|
||||
},
|
||||
enabled: true,
|
||||
disabled_reason: None,
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn with_codex_apps_mcp(
|
||||
mut servers: HashMap<String, McpServerConfig>,
|
||||
connectors_enabled: bool,
|
||||
auth: Option<&CodexAuth>,
|
||||
config: &Config,
|
||||
) -> HashMap<String, McpServerConfig> {
|
||||
if connectors_enabled {
|
||||
servers.insert(
|
||||
CODEX_APPS_MCP_SERVER_NAME.to_string(),
|
||||
codex_apps_mcp_server_config(config, auth),
|
||||
);
|
||||
} else {
|
||||
servers.remove(CODEX_APPS_MCP_SERVER_NAME);
|
||||
}
|
||||
servers
|
||||
}
|
||||
|
||||
pub(crate) fn effective_mcp_servers(
|
||||
config: &Config,
|
||||
auth: Option<&CodexAuth>,
|
||||
) -> HashMap<String, McpServerConfig> {
|
||||
with_codex_apps_mcp(
|
||||
config.mcp_servers.get().clone(),
|
||||
config.features.enabled(Feature::Connectors),
|
||||
auth,
|
||||
config,
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn collect_mcp_snapshot(config: &Config) -> McpListToolsResponseEvent {
|
||||
if config.mcp_servers.is_empty() {
|
||||
let auth_manager = AuthManager::shared(
|
||||
config.codex_home.clone(),
|
||||
false,
|
||||
config.cli_auth_credentials_store_mode,
|
||||
);
|
||||
let auth = auth_manager.auth().await;
|
||||
let mcp_servers = effective_mcp_servers(config, auth.as_ref());
|
||||
if mcp_servers.is_empty() {
|
||||
return McpListToolsResponseEvent {
|
||||
tools: HashMap::new(),
|
||||
resources: HashMap::new(),
|
||||
@@ -27,11 +147,8 @@ pub async fn collect_mcp_snapshot(config: &Config) -> McpListToolsResponseEvent
|
||||
};
|
||||
}
|
||||
|
||||
let auth_status_entries = compute_auth_statuses(
|
||||
config.mcp_servers.iter(),
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
)
|
||||
.await;
|
||||
let auth_status_entries =
|
||||
compute_auth_statuses(mcp_servers.iter(), config.mcp_oauth_credentials_store_mode).await;
|
||||
|
||||
let mut mcp_connection_manager = McpConnectionManager::default();
|
||||
let (tx_event, rx_event) = unbounded();
|
||||
@@ -47,7 +164,7 @@ pub async fn collect_mcp_snapshot(config: &Config) -> McpListToolsResponseEvent
|
||||
|
||||
mcp_connection_manager
|
||||
.initialize(
|
||||
&config.mcp_servers,
|
||||
&mcp_servers,
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
auth_status_entries.clone(),
|
||||
tx_event,
|
||||
|
||||
@@ -153,6 +153,8 @@ pub(crate) struct ToolInfo {
|
||||
pub(crate) server_name: String,
|
||||
pub(crate) tool_name: String,
|
||||
pub(crate) tool: Tool,
|
||||
pub(crate) connector_id: Option<String>,
|
||||
pub(crate) connector_name: Option<String>,
|
||||
}
|
||||
|
||||
type ResponderMap = HashMap<(String, RequestId), oneshot::Sender<ElicitationResponse>>;
|
||||
@@ -899,14 +901,16 @@ async fn list_tools_for_client(
|
||||
client: &Arc<RmcpClient>,
|
||||
timeout: Option<Duration>,
|
||||
) -> Result<Vec<ToolInfo>> {
|
||||
let resp = client.list_tools(None, timeout).await?;
|
||||
let resp = client.list_tools_with_connector_ids(None, timeout).await?;
|
||||
Ok(resp
|
||||
.tools
|
||||
.into_iter()
|
||||
.map(|tool| ToolInfo {
|
||||
server_name: server_name.to_owned(),
|
||||
tool_name: tool.name.clone(),
|
||||
tool,
|
||||
tool_name: tool.tool.name.clone(),
|
||||
tool: tool.tool,
|
||||
connector_id: tool.connector_id,
|
||||
connector_name: tool.connector_name,
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
@@ -1004,6 +1008,8 @@ mod tests {
|
||||
output_schema: None,
|
||||
title: None,
|
||||
},
|
||||
connector_id: None,
|
||||
connector_name: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1176,6 +1182,7 @@ mod tests {
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
auth_status: McpAuthStatus::Unsupported,
|
||||
};
|
||||
@@ -1221,6 +1228,7 @@ mod tests {
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
auth_status: McpAuthStatus::Unsupported,
|
||||
};
|
||||
|
||||
@@ -1,42 +1,64 @@
|
||||
use codex_protocol::config_types::CollaborationMode;
|
||||
use codex_protocol::config_types::Settings;
|
||||
use codex_protocol::config_types::CollaborationModeMask;
|
||||
use codex_protocol::config_types::ModeKind;
|
||||
use codex_protocol::openai_models::ReasoningEffort;
|
||||
|
||||
const COLLABORATION_MODE_PLAN: &str = include_str!("../../templates/collaboration_mode/plan.md");
|
||||
const COLLABORATION_MODE_CODE: &str = include_str!("../../templates/collaboration_mode/code.md");
|
||||
const COLLABORATION_MODE_PAIR_PROGRAMMING: &str =
|
||||
include_str!("../../templates/collaboration_mode/pair_programming.md");
|
||||
const COLLABORATION_MODE_EXECUTE: &str =
|
||||
include_str!("../../templates/collaboration_mode/execute.md");
|
||||
|
||||
pub(super) fn builtin_collaboration_mode_presets() -> Vec<CollaborationMode> {
|
||||
vec![plan_preset(), pair_programming_preset(), execute_preset()]
|
||||
pub(super) fn builtin_collaboration_mode_presets() -> Vec<CollaborationModeMask> {
|
||||
vec![
|
||||
plan_preset(),
|
||||
code_preset(),
|
||||
pair_programming_preset(),
|
||||
execute_preset(),
|
||||
]
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn test_builtin_collaboration_mode_presets() -> Vec<CollaborationMode> {
|
||||
pub fn test_builtin_collaboration_mode_presets() -> Vec<CollaborationModeMask> {
|
||||
builtin_collaboration_mode_presets()
|
||||
}
|
||||
|
||||
fn plan_preset() -> CollaborationMode {
|
||||
CollaborationMode::Plan(Settings {
|
||||
model: "gpt-5.2-codex".to_string(),
|
||||
reasoning_effort: Some(ReasoningEffort::Medium),
|
||||
developer_instructions: Some(COLLABORATION_MODE_PLAN.to_string()),
|
||||
})
|
||||
fn plan_preset() -> CollaborationModeMask {
|
||||
CollaborationModeMask {
|
||||
name: "Plan".to_string(),
|
||||
mode: Some(ModeKind::Plan),
|
||||
model: None,
|
||||
reasoning_effort: Some(Some(ReasoningEffort::Medium)),
|
||||
developer_instructions: Some(Some(COLLABORATION_MODE_PLAN.to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
fn pair_programming_preset() -> CollaborationMode {
|
||||
CollaborationMode::PairProgramming(Settings {
|
||||
model: "gpt-5.2-codex".to_string(),
|
||||
reasoning_effort: Some(ReasoningEffort::Medium),
|
||||
developer_instructions: Some(COLLABORATION_MODE_PAIR_PROGRAMMING.to_string()),
|
||||
})
|
||||
fn code_preset() -> CollaborationModeMask {
|
||||
CollaborationModeMask {
|
||||
name: "Code".to_string(),
|
||||
mode: Some(ModeKind::Code),
|
||||
model: None,
|
||||
reasoning_effort: None,
|
||||
developer_instructions: Some(Some(COLLABORATION_MODE_CODE.to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
fn execute_preset() -> CollaborationMode {
|
||||
CollaborationMode::Execute(Settings {
|
||||
model: "gpt-5.2-codex".to_string(),
|
||||
reasoning_effort: Some(ReasoningEffort::XHigh),
|
||||
developer_instructions: Some(COLLABORATION_MODE_EXECUTE.to_string()),
|
||||
})
|
||||
fn pair_programming_preset() -> CollaborationModeMask {
|
||||
CollaborationModeMask {
|
||||
name: "Pair Programming".to_string(),
|
||||
mode: Some(ModeKind::PairProgramming),
|
||||
model: None,
|
||||
reasoning_effort: Some(Some(ReasoningEffort::Medium)),
|
||||
developer_instructions: Some(Some(COLLABORATION_MODE_PAIR_PROGRAMMING.to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
fn execute_preset() -> CollaborationModeMask {
|
||||
CollaborationModeMask {
|
||||
name: "Execute".to_string(),
|
||||
mode: Some(ModeKind::Execute),
|
||||
model: None,
|
||||
reasoning_effort: Some(Some(ReasoningEffort::High)),
|
||||
developer_instructions: Some(Some(COLLABORATION_MODE_EXECUTE.to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ use crate::models_manager::model_presets::builtin_model_presets;
|
||||
use codex_api::ModelsClient;
|
||||
use codex_api::ReqwestTransport;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_protocol::config_types::CollaborationMode;
|
||||
use codex_protocol::config_types::CollaborationModeMask;
|
||||
use codex_protocol::openai_models::ModelInfo;
|
||||
use codex_protocol::openai_models::ModelPreset;
|
||||
use codex_protocol::openai_models::ModelsResponse;
|
||||
@@ -30,9 +30,6 @@ use tracing::error;
|
||||
const MODEL_CACHE_FILE: &str = "models_cache.json";
|
||||
const DEFAULT_MODEL_CACHE_TTL: Duration = Duration::from_secs(300);
|
||||
const MODELS_REFRESH_TIMEOUT: Duration = Duration::from_secs(5);
|
||||
const OPENAI_DEFAULT_API_MODEL: &str = "gpt-5.2-codex";
|
||||
const OPENAI_DEFAULT_CHATGPT_MODEL: &str = "gpt-5.2-codex";
|
||||
const CODEX_AUTO_BALANCED_MODEL: &str = "codex-auto-balanced";
|
||||
|
||||
/// Strategy for refreshing available models.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
@@ -94,7 +91,7 @@ impl ModelsManager {
|
||||
/// List collaboration mode presets.
|
||||
///
|
||||
/// Returns a static set of presets seeded with the configured model.
|
||||
pub fn list_collaboration_modes(&self) -> Vec<CollaborationMode> {
|
||||
pub fn list_collaboration_modes(&self) -> Vec<CollaborationModeMask> {
|
||||
builtin_collaboration_mode_presets()
|
||||
}
|
||||
|
||||
@@ -110,7 +107,7 @@ impl ModelsManager {
|
||||
/// Get the model identifier to use, refreshing according to the specified strategy.
|
||||
///
|
||||
/// If `model` is provided, returns it directly. Otherwise selects the default based on
|
||||
/// auth mode and available models (prefers `codex-auto-balanced` for ChatGPT auth).
|
||||
/// auth mode and available models.
|
||||
pub async fn get_default_model(
|
||||
&self,
|
||||
model: &Option<String>,
|
||||
@@ -126,20 +123,14 @@ impl ModelsManager {
|
||||
{
|
||||
error!("failed to refresh available models: {err}");
|
||||
}
|
||||
// if codex-auto-balanced exists & signed in with chatgpt mode, return it, otherwise return the default model
|
||||
let auth_mode = self.auth_manager.get_auth_mode();
|
||||
let remote_models = self.get_remote_models(config).await;
|
||||
if auth_mode == Some(AuthMode::ChatGPT) {
|
||||
let has_auto_balanced = self
|
||||
.build_available_models(remote_models)
|
||||
.iter()
|
||||
.any(|model| model.model == CODEX_AUTO_BALANCED_MODEL && model.show_in_picker);
|
||||
if has_auto_balanced {
|
||||
return CODEX_AUTO_BALANCED_MODEL.to_string();
|
||||
}
|
||||
return OPENAI_DEFAULT_CHATGPT_MODEL.to_string();
|
||||
}
|
||||
OPENAI_DEFAULT_API_MODEL.to_string()
|
||||
let available = self.build_available_models(remote_models);
|
||||
available
|
||||
.iter()
|
||||
.find(|model| model.is_default)
|
||||
.or_else(|| available.first())
|
||||
.map(|model| model.model.clone())
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
// todo(aibrahim): look if we can tighten it to pub(crate)
|
||||
@@ -336,7 +327,16 @@ impl ModelsManager {
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
/// Get model identifier without consulting remote state or cache.
|
||||
pub fn get_model_offline(model: Option<&str>) -> String {
|
||||
model.unwrap_or(OPENAI_DEFAULT_CHATGPT_MODEL).to_string()
|
||||
if let Some(model) = model {
|
||||
return model.to_string();
|
||||
}
|
||||
let presets = builtin_model_presets(None);
|
||||
presets
|
||||
.iter()
|
||||
.find(|preset| preset.show_in_picker)
|
||||
.or_else(|| presets.first())
|
||||
.map(|preset| preset.model.clone())
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
|
||||
@@ -169,6 +169,16 @@ pub(crate) fn find_model_info_for_slug(slug: &str) -> ModelInfo {
|
||||
model_info!(
|
||||
slug,
|
||||
base_instructions: GPT_5_2_CODEX_INSTRUCTIONS.to_string(),
|
||||
model_instructions_template: Some(ModelInstructionsTemplate {
|
||||
template: GPT_5_2_CODEX_INSTRUCTIONS_TEMPLATE.to_string(),
|
||||
personality_messages: Some(PersonalityMessages(BTreeMap::from([(
|
||||
Personality::Friendly,
|
||||
PERSONALITY_FRIENDLY.to_string(),
|
||||
), (
|
||||
Personality::Pragmatic,
|
||||
PERSONALITY_PRAGMATIC.to_string(),
|
||||
)]))),
|
||||
}),
|
||||
apply_patch_tool_type: Some(ApplyPatchToolType::Freeform),
|
||||
shell_type: ConfigShellToolType::ShellCommand,
|
||||
supports_parallel_tool_calls: true,
|
||||
@@ -203,16 +213,6 @@ pub(crate) fn find_model_info_for_slug(slug: &str) -> ModelInfo {
|
||||
truncation_policy: TruncationPolicyConfig::tokens(10_000),
|
||||
context_window: Some(CONTEXT_WINDOW_272K),
|
||||
supported_reasoning_levels: supported_reasoning_level_low_medium_high_xhigh(),
|
||||
model_instructions_template: Some(ModelInstructionsTemplate {
|
||||
template: GPT_5_2_CODEX_INSTRUCTIONS_TEMPLATE.to_string(),
|
||||
personality_messages: Some(PersonalityMessages(BTreeMap::from([(
|
||||
Personality::Friendly,
|
||||
PERSONALITY_FRIENDLY.to_string(),
|
||||
), (
|
||||
Personality::Pragmatic,
|
||||
PERSONALITY_PRAGMATIC.to_string(),
|
||||
)]))),
|
||||
}),
|
||||
)
|
||||
} else if slug.starts_with("gpt-5.1-codex-max") {
|
||||
model_info!(
|
||||
|
||||
@@ -36,6 +36,7 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
|
||||
description: "Extra high reasoning depth for complex problems".to_string(),
|
||||
},
|
||||
],
|
||||
supports_personality: true,
|
||||
is_default: true,
|
||||
upgrade: None,
|
||||
show_in_picker: true,
|
||||
@@ -65,6 +66,7 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
|
||||
description: "Extra high reasoning depth for complex problems".to_string(),
|
||||
},
|
||||
],
|
||||
supports_personality: false,
|
||||
is_default: false,
|
||||
upgrade: Some(gpt_52_codex_upgrade()),
|
||||
show_in_picker: true,
|
||||
@@ -87,6 +89,7 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
|
||||
.to_string(),
|
||||
},
|
||||
],
|
||||
supports_personality: false,
|
||||
is_default: false,
|
||||
upgrade: Some(gpt_52_codex_upgrade()),
|
||||
show_in_picker: true,
|
||||
@@ -116,6 +119,7 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
|
||||
description: "Extra high reasoning depth for complex problems".to_string(),
|
||||
},
|
||||
],
|
||||
supports_personality: false,
|
||||
is_default: false,
|
||||
upgrade: Some(gpt_52_codex_upgrade()),
|
||||
show_in_picker: true,
|
||||
@@ -145,6 +149,7 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
|
||||
description: "Extra high reasoning depth for complex problems".to_string(),
|
||||
},
|
||||
],
|
||||
supports_personality: true,
|
||||
is_default: false,
|
||||
upgrade: None,
|
||||
show_in_picker: false,
|
||||
@@ -174,6 +179,7 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
|
||||
description: "Extra high reasoning depth for complex problems".to_string(),
|
||||
},
|
||||
],
|
||||
supports_personality: false,
|
||||
is_default: false,
|
||||
upgrade: None,
|
||||
show_in_picker: false,
|
||||
@@ -200,6 +206,7 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems".to_string(),
|
||||
},
|
||||
],
|
||||
supports_personality: false,
|
||||
is_default: false,
|
||||
upgrade: Some(gpt_52_codex_upgrade()),
|
||||
show_in_picker: false,
|
||||
@@ -221,6 +228,7 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems".to_string(),
|
||||
},
|
||||
],
|
||||
supports_personality: false,
|
||||
is_default: false,
|
||||
upgrade: Some(gpt_52_codex_upgrade()),
|
||||
show_in_picker: false,
|
||||
@@ -247,6 +255,7 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
|
||||
.to_string(),
|
||||
},
|
||||
],
|
||||
supports_personality: false,
|
||||
is_default: false,
|
||||
upgrade: Some(gpt_52_codex_upgrade()),
|
||||
show_in_picker: false,
|
||||
@@ -276,6 +285,7 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems".to_string(),
|
||||
},
|
||||
],
|
||||
supports_personality: false,
|
||||
is_default: false,
|
||||
upgrade: Some(gpt_52_codex_upgrade()),
|
||||
show_in_picker: false,
|
||||
@@ -301,6 +311,7 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems".to_string(),
|
||||
},
|
||||
],
|
||||
supports_personality: false,
|
||||
is_default: false,
|
||||
upgrade: Some(gpt_52_codex_upgrade()),
|
||||
show_in_picker: false,
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use std::cmp::Reverse;
|
||||
use std::ffi::OsStr;
|
||||
use std::io::{self};
|
||||
use std::num::NonZero;
|
||||
use std::ops::ControlFlow;
|
||||
@@ -15,6 +16,7 @@ use time::format_description::well_known::Rfc3339;
|
||||
use time::macros::format_description;
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::ARCHIVED_SESSIONS_SUBDIR;
|
||||
use super::SESSIONS_SUBDIR;
|
||||
use crate::protocol::EventMsg;
|
||||
use codex_file_search as file_search;
|
||||
@@ -72,6 +74,7 @@ struct HeadTailSummary {
|
||||
/// Hard cap to bound worst‑case work per request.
|
||||
const MAX_SCAN_FILES: usize = 10000;
|
||||
const HEAD_RECORD_LIMIT: usize = 10;
|
||||
const USER_EVENT_SCAN_LIMIT: usize = 200;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum ThreadSortKey {
|
||||
@@ -79,6 +82,19 @@ pub enum ThreadSortKey {
|
||||
UpdatedAt,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub(crate) enum ThreadListLayout {
|
||||
NestedByDate,
|
||||
Flat,
|
||||
}
|
||||
|
||||
pub(crate) struct ThreadListConfig<'a> {
|
||||
pub(crate) allowed_sources: &'a [SessionSource],
|
||||
pub(crate) model_providers: Option<&'a [String]>,
|
||||
pub(crate) default_provider: &'a str,
|
||||
pub(crate) layout: ThreadListLayout,
|
||||
}
|
||||
|
||||
/// Pagination cursor identifying a file by timestamp and UUID.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct Cursor {
|
||||
@@ -259,9 +275,29 @@ pub(crate) async fn get_threads(
|
||||
model_providers: Option<&[String]>,
|
||||
default_provider: &str,
|
||||
) -> io::Result<ThreadsPage> {
|
||||
let mut root = codex_home.to_path_buf();
|
||||
root.push(SESSIONS_SUBDIR);
|
||||
let root = codex_home.join(SESSIONS_SUBDIR);
|
||||
get_threads_in_root(
|
||||
root,
|
||||
page_size,
|
||||
cursor,
|
||||
sort_key,
|
||||
ThreadListConfig {
|
||||
allowed_sources,
|
||||
model_providers,
|
||||
default_provider,
|
||||
layout: ThreadListLayout::NestedByDate,
|
||||
},
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
pub(crate) async fn get_threads_in_root(
|
||||
root: PathBuf,
|
||||
page_size: usize,
|
||||
cursor: Option<&Cursor>,
|
||||
sort_key: ThreadSortKey,
|
||||
config: ThreadListConfig<'_>,
|
||||
) -> io::Result<ThreadsPage> {
|
||||
if !root.exists() {
|
||||
return Ok(ThreadsPage {
|
||||
items: Vec::new(),
|
||||
@@ -273,18 +309,34 @@ pub(crate) async fn get_threads(
|
||||
|
||||
let anchor = cursor.cloned();
|
||||
|
||||
let provider_matcher =
|
||||
model_providers.and_then(|filters| ProviderMatcher::new(filters, default_provider));
|
||||
let provider_matcher = config
|
||||
.model_providers
|
||||
.and_then(|filters| ProviderMatcher::new(filters, config.default_provider));
|
||||
|
||||
let result = traverse_directories_for_paths(
|
||||
root.clone(),
|
||||
page_size,
|
||||
anchor,
|
||||
sort_key,
|
||||
allowed_sources,
|
||||
provider_matcher.as_ref(),
|
||||
)
|
||||
.await?;
|
||||
let result = match config.layout {
|
||||
ThreadListLayout::NestedByDate => {
|
||||
traverse_directories_for_paths(
|
||||
root.clone(),
|
||||
page_size,
|
||||
anchor,
|
||||
sort_key,
|
||||
config.allowed_sources,
|
||||
provider_matcher.as_ref(),
|
||||
)
|
||||
.await?
|
||||
}
|
||||
ThreadListLayout::Flat => {
|
||||
traverse_flat_paths(
|
||||
root.clone(),
|
||||
page_size,
|
||||
anchor,
|
||||
sort_key,
|
||||
config.allowed_sources,
|
||||
provider_matcher.as_ref(),
|
||||
)
|
||||
.await?
|
||||
}
|
||||
};
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
@@ -324,6 +376,26 @@ async fn traverse_directories_for_paths(
|
||||
}
|
||||
}
|
||||
|
||||
async fn traverse_flat_paths(
|
||||
root: PathBuf,
|
||||
page_size: usize,
|
||||
anchor: Option<Cursor>,
|
||||
sort_key: ThreadSortKey,
|
||||
allowed_sources: &[SessionSource],
|
||||
provider_matcher: Option<&ProviderMatcher<'_>>,
|
||||
) -> io::Result<ThreadsPage> {
|
||||
match sort_key {
|
||||
ThreadSortKey::CreatedAt => {
|
||||
traverse_flat_paths_created(root, page_size, anchor, allowed_sources, provider_matcher)
|
||||
.await
|
||||
}
|
||||
ThreadSortKey::UpdatedAt => {
|
||||
traverse_flat_paths_updated(root, page_size, anchor, allowed_sources, provider_matcher)
|
||||
.await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Walk the rollout directory tree in reverse chronological order and
|
||||
/// collect items until the page fills or the scan cap is hit.
|
||||
///
|
||||
@@ -437,6 +509,116 @@ async fn traverse_directories_for_paths_updated(
|
||||
})
|
||||
}
|
||||
|
||||
async fn traverse_flat_paths_created(
|
||||
root: PathBuf,
|
||||
page_size: usize,
|
||||
anchor: Option<Cursor>,
|
||||
allowed_sources: &[SessionSource],
|
||||
provider_matcher: Option<&ProviderMatcher<'_>>,
|
||||
) -> io::Result<ThreadsPage> {
|
||||
let mut items: Vec<ThreadItem> = Vec::with_capacity(page_size);
|
||||
let mut scanned_files = 0usize;
|
||||
let mut anchor_state = AnchorState::new(anchor);
|
||||
let mut more_matches_available = false;
|
||||
|
||||
let files = collect_flat_rollout_files(&root, &mut scanned_files).await?;
|
||||
for (ts, id, path) in files.into_iter() {
|
||||
if anchor_state.should_skip(ts, id) {
|
||||
continue;
|
||||
}
|
||||
if items.len() == page_size {
|
||||
more_matches_available = true;
|
||||
break;
|
||||
}
|
||||
let updated_at = file_modified_time(&path)
|
||||
.await
|
||||
.unwrap_or(None)
|
||||
.and_then(format_rfc3339);
|
||||
if let Some(item) =
|
||||
build_thread_item(path, allowed_sources, provider_matcher, updated_at).await
|
||||
{
|
||||
items.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
let reached_scan_cap = scanned_files >= MAX_SCAN_FILES;
|
||||
if reached_scan_cap && !items.is_empty() {
|
||||
more_matches_available = true;
|
||||
}
|
||||
|
||||
let next = if more_matches_available {
|
||||
build_next_cursor(&items, ThreadSortKey::CreatedAt)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Ok(ThreadsPage {
|
||||
items,
|
||||
next_cursor: next,
|
||||
num_scanned_files: scanned_files,
|
||||
reached_scan_cap,
|
||||
})
|
||||
}
|
||||
|
||||
async fn traverse_flat_paths_updated(
|
||||
root: PathBuf,
|
||||
page_size: usize,
|
||||
anchor: Option<Cursor>,
|
||||
allowed_sources: &[SessionSource],
|
||||
provider_matcher: Option<&ProviderMatcher<'_>>,
|
||||
) -> io::Result<ThreadsPage> {
|
||||
let mut items: Vec<ThreadItem> = Vec::with_capacity(page_size);
|
||||
let mut scanned_files = 0usize;
|
||||
let mut anchor_state = AnchorState::new(anchor);
|
||||
let mut more_matches_available = false;
|
||||
|
||||
let candidates = collect_flat_files_by_updated_at(&root, &mut scanned_files).await?;
|
||||
let mut candidates = candidates;
|
||||
candidates.sort_by_key(|candidate| {
|
||||
let ts = candidate.updated_at.unwrap_or(OffsetDateTime::UNIX_EPOCH);
|
||||
(Reverse(ts), Reverse(candidate.id))
|
||||
});
|
||||
|
||||
for candidate in candidates.into_iter() {
|
||||
let ts = candidate.updated_at.unwrap_or(OffsetDateTime::UNIX_EPOCH);
|
||||
if anchor_state.should_skip(ts, candidate.id) {
|
||||
continue;
|
||||
}
|
||||
if items.len() == page_size {
|
||||
more_matches_available = true;
|
||||
break;
|
||||
}
|
||||
|
||||
let updated_at_fallback = candidate.updated_at.and_then(format_rfc3339);
|
||||
if let Some(item) = build_thread_item(
|
||||
candidate.path,
|
||||
allowed_sources,
|
||||
provider_matcher,
|
||||
updated_at_fallback,
|
||||
)
|
||||
.await
|
||||
{
|
||||
items.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
let reached_scan_cap = scanned_files >= MAX_SCAN_FILES;
|
||||
if reached_scan_cap && !items.is_empty() {
|
||||
more_matches_available = true;
|
||||
}
|
||||
|
||||
let next = if more_matches_available {
|
||||
build_next_cursor(&items, ThreadSortKey::UpdatedAt)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Ok(ThreadsPage {
|
||||
items,
|
||||
next_cursor: next,
|
||||
num_scanned_files: scanned_files,
|
||||
reached_scan_cap,
|
||||
})
|
||||
}
|
||||
|
||||
/// Pagination cursor token format: "<ts>|<uuid>" where `ts` uses
|
||||
/// YYYY-MM-DDThh-mm-ss (UTC, second precision).
|
||||
/// The cursor orders files by the requested sort key (timestamp desc, then UUID desc).
|
||||
@@ -558,6 +740,44 @@ where
|
||||
Ok(collected)
|
||||
}
|
||||
|
||||
async fn collect_flat_rollout_files(
|
||||
root: &Path,
|
||||
scanned_files: &mut usize,
|
||||
) -> io::Result<Vec<(OffsetDateTime, Uuid, PathBuf)>> {
|
||||
let mut dir = tokio::fs::read_dir(root).await?;
|
||||
let mut collected = Vec::new();
|
||||
while let Some(entry) = dir.next_entry().await? {
|
||||
if *scanned_files >= MAX_SCAN_FILES {
|
||||
break;
|
||||
}
|
||||
if !entry
|
||||
.file_type()
|
||||
.await
|
||||
.map(|ft| ft.is_file())
|
||||
.unwrap_or(false)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
let file_name = entry.file_name();
|
||||
let Some(name_str) = file_name.to_str() else {
|
||||
continue;
|
||||
};
|
||||
if !name_str.starts_with("rollout-") || !name_str.ends_with(".jsonl") {
|
||||
continue;
|
||||
}
|
||||
let Some((ts, id)) = parse_timestamp_uuid_from_filename(name_str) else {
|
||||
continue;
|
||||
};
|
||||
*scanned_files += 1;
|
||||
if *scanned_files > MAX_SCAN_FILES {
|
||||
break;
|
||||
}
|
||||
collected.push((ts, id, entry.path()));
|
||||
}
|
||||
collected.sort_by_key(|(ts, sid, _path)| (Reverse(*ts), Reverse(*sid)));
|
||||
Ok(collected)
|
||||
}
|
||||
|
||||
async fn collect_rollout_day_files(
|
||||
day_path: &Path,
|
||||
) -> io::Result<Vec<(OffsetDateTime, Uuid, PathBuf)>> {
|
||||
@@ -610,6 +830,49 @@ async fn collect_files_by_updated_at(
|
||||
Ok(candidates)
|
||||
}
|
||||
|
||||
async fn collect_flat_files_by_updated_at(
|
||||
root: &Path,
|
||||
scanned_files: &mut usize,
|
||||
) -> io::Result<Vec<ThreadCandidate>> {
|
||||
let mut candidates = Vec::new();
|
||||
let mut dir = tokio::fs::read_dir(root).await?;
|
||||
while let Some(entry) = dir.next_entry().await? {
|
||||
if *scanned_files >= MAX_SCAN_FILES {
|
||||
break;
|
||||
}
|
||||
if !entry
|
||||
.file_type()
|
||||
.await
|
||||
.map(|ft| ft.is_file())
|
||||
.unwrap_or(false)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
let file_name = entry.file_name();
|
||||
let Some(name_str) = file_name.to_str() else {
|
||||
continue;
|
||||
};
|
||||
if !name_str.starts_with("rollout-") || !name_str.ends_with(".jsonl") {
|
||||
continue;
|
||||
}
|
||||
let Some((_ts, id)) = parse_timestamp_uuid_from_filename(name_str) else {
|
||||
continue;
|
||||
};
|
||||
*scanned_files += 1;
|
||||
if *scanned_files > MAX_SCAN_FILES {
|
||||
break;
|
||||
}
|
||||
let updated_at = file_modified_time(&entry.path()).await.unwrap_or(None);
|
||||
candidates.push(ThreadCandidate {
|
||||
path: entry.path(),
|
||||
id,
|
||||
updated_at,
|
||||
});
|
||||
}
|
||||
|
||||
Ok(candidates)
|
||||
}
|
||||
|
||||
async fn walk_rollout_files(
|
||||
root: &Path,
|
||||
scanned_files: &mut usize,
|
||||
@@ -683,14 +946,20 @@ async fn read_head_summary(path: &Path, head_limit: usize) -> io::Result<HeadTai
|
||||
let reader = tokio::io::BufReader::new(file);
|
||||
let mut lines = reader.lines();
|
||||
let mut summary = HeadTailSummary::default();
|
||||
let mut lines_scanned = 0usize;
|
||||
|
||||
while summary.head.len() < head_limit {
|
||||
while lines_scanned < head_limit
|
||||
|| (summary.saw_session_meta
|
||||
&& !summary.saw_user_event
|
||||
&& lines_scanned < head_limit + USER_EVENT_SCAN_LIMIT)
|
||||
{
|
||||
let line_opt = lines.next_line().await?;
|
||||
let Some(line) = line_opt else { break };
|
||||
let trimmed = line.trim();
|
||||
if trimmed.is_empty() {
|
||||
continue;
|
||||
}
|
||||
lines_scanned += 1;
|
||||
|
||||
let parsed: Result<RolloutLine, _> = serde_json::from_str(trimmed);
|
||||
let Ok(rollout_line) = parsed else { continue };
|
||||
@@ -703,9 +972,11 @@ async fn read_head_summary(path: &Path, head_limit: usize) -> io::Result<HeadTai
|
||||
.created_at
|
||||
.clone()
|
||||
.or_else(|| Some(rollout_line.timestamp.clone()));
|
||||
if let Ok(val) = serde_json::to_value(session_meta_line) {
|
||||
summary.saw_session_meta = true;
|
||||
if summary.head.len() < head_limit
|
||||
&& let Ok(val) = serde_json::to_value(session_meta_line)
|
||||
{
|
||||
summary.head.push(val);
|
||||
summary.saw_session_meta = true;
|
||||
}
|
||||
}
|
||||
RolloutItem::ResponseItem(item) => {
|
||||
@@ -713,7 +984,9 @@ async fn read_head_summary(path: &Path, head_limit: usize) -> io::Result<HeadTai
|
||||
.created_at
|
||||
.clone()
|
||||
.or_else(|| Some(rollout_line.timestamp.clone()));
|
||||
if let Ok(val) = serde_json::to_value(item) {
|
||||
if summary.head.len() < head_limit
|
||||
&& let Ok(val) = serde_json::to_value(item)
|
||||
{
|
||||
summary.head.push(val);
|
||||
}
|
||||
}
|
||||
@@ -783,11 +1056,9 @@ fn truncate_to_seconds(dt: OffsetDateTime) -> Option<OffsetDateTime> {
|
||||
dt.replace_nanosecond(0).ok()
|
||||
}
|
||||
|
||||
/// Locate a recorded thread rollout file by its UUID string using the existing
|
||||
/// paginated listing implementation. Returns `Ok(Some(path))` if found, `Ok(None)` if not present
|
||||
/// or the id is invalid.
|
||||
pub async fn find_thread_path_by_id_str(
|
||||
async fn find_thread_path_by_id_str_in_subdir(
|
||||
codex_home: &Path,
|
||||
subdir: &str,
|
||||
id_str: &str,
|
||||
) -> io::Result<Option<PathBuf>> {
|
||||
// Validate UUID format early.
|
||||
@@ -796,7 +1067,7 @@ pub async fn find_thread_path_by_id_str(
|
||||
}
|
||||
|
||||
let mut root = codex_home.to_path_buf();
|
||||
root.push(SESSIONS_SUBDIR);
|
||||
root.push(subdir);
|
||||
if !root.exists() {
|
||||
return Ok(None);
|
||||
}
|
||||
@@ -828,3 +1099,31 @@ pub async fn find_thread_path_by_id_str(
|
||||
.next()
|
||||
.map(|m| root.join(m.path)))
|
||||
}
|
||||
|
||||
/// Locate a recorded thread rollout file by its UUID string using the existing
|
||||
/// paginated listing implementation. Returns `Ok(Some(path))` if found, `Ok(None)` if not present
|
||||
/// or the id is invalid.
|
||||
pub async fn find_thread_path_by_id_str(
|
||||
codex_home: &Path,
|
||||
id_str: &str,
|
||||
) -> io::Result<Option<PathBuf>> {
|
||||
find_thread_path_by_id_str_in_subdir(codex_home, SESSIONS_SUBDIR, id_str).await
|
||||
}
|
||||
|
||||
/// Locate an archived thread rollout file by its UUID string.
|
||||
pub async fn find_archived_thread_path_by_id_str(
|
||||
codex_home: &Path,
|
||||
id_str: &str,
|
||||
) -> io::Result<Option<PathBuf>> {
|
||||
find_thread_path_by_id_str_in_subdir(codex_home, ARCHIVED_SESSIONS_SUBDIR, id_str).await
|
||||
}
|
||||
|
||||
/// Extract the `YYYY/MM/DD` directory components from a rollout filename.
|
||||
pub fn rollout_date_parts(file_name: &OsStr) -> Option<(String, String, String)> {
|
||||
let name = file_name.to_string_lossy();
|
||||
let date = name.strip_prefix("rollout-")?.get(..10)?;
|
||||
let year = date.get(..4)?.to_string();
|
||||
let month = date.get(5..7)?.to_string();
|
||||
let day = date.get(8..10)?.to_string();
|
||||
Some((year, month, day))
|
||||
}
|
||||
|
||||
@@ -15,9 +15,11 @@ pub(crate) mod truncation;
|
||||
|
||||
pub use codex_protocol::protocol::SessionMeta;
|
||||
pub(crate) use error::map_session_init_error;
|
||||
pub use list::find_archived_thread_path_by_id_str;
|
||||
pub use list::find_thread_path_by_id_str;
|
||||
#[deprecated(note = "use find_thread_path_by_id_str")]
|
||||
pub use list::find_thread_path_by_id_str as find_conversation_path_by_id_str;
|
||||
pub use list::rollout_date_parts;
|
||||
pub use recorder::RolloutRecorder;
|
||||
pub use recorder::RolloutRecorderParams;
|
||||
|
||||
|
||||
@@ -42,7 +42,8 @@ pub(crate) fn should_persist_event_msg(ev: &EventMsg) -> bool {
|
||||
| EventMsg::AgentReasoning(_)
|
||||
| EventMsg::AgentReasoningRawContent(_)
|
||||
| EventMsg::TokenCount(_)
|
||||
| EventMsg::ContextCompacted(_)
|
||||
| EventMsg::ContextCompactionStarted(_)
|
||||
| EventMsg::ContextCompactionEnded(_)
|
||||
| EventMsg::EnteredReviewMode(_)
|
||||
| EventMsg::ExitedReviewMode(_)
|
||||
| EventMsg::ThreadRolledBack(_)
|
||||
@@ -68,6 +69,7 @@ pub(crate) fn should_persist_event_msg(ev: &EventMsg) -> bool {
|
||||
| EventMsg::ExecCommandEnd(_)
|
||||
| EventMsg::ExecApprovalRequest(_)
|
||||
| EventMsg::RequestUserInput(_)
|
||||
| EventMsg::DynamicToolCallRequest(_)
|
||||
| EventMsg::ElicitationRequest(_)
|
||||
| EventMsg::ApplyPatchApprovalRequest(_)
|
||||
| EventMsg::BackgroundEvent(_)
|
||||
|
||||
@@ -19,11 +19,15 @@ use tokio::sync::oneshot;
|
||||
use tracing::info;
|
||||
use tracing::warn;
|
||||
|
||||
use super::ARCHIVED_SESSIONS_SUBDIR;
|
||||
use super::SESSIONS_SUBDIR;
|
||||
use super::list::Cursor;
|
||||
use super::list::ThreadListConfig;
|
||||
use super::list::ThreadListLayout;
|
||||
use super::list::ThreadSortKey;
|
||||
use super::list::ThreadsPage;
|
||||
use super::list::get_threads;
|
||||
use super::list::get_threads_in_root;
|
||||
use super::policy::is_persisted_response_item;
|
||||
use crate::config::Config;
|
||||
use crate::default_client::originator;
|
||||
@@ -119,6 +123,32 @@ impl RolloutRecorder {
|
||||
.await
|
||||
}
|
||||
|
||||
/// List archived threads (rollout files) under the archived sessions directory.
|
||||
pub async fn list_archived_threads(
|
||||
codex_home: &Path,
|
||||
page_size: usize,
|
||||
cursor: Option<&Cursor>,
|
||||
sort_key: ThreadSortKey,
|
||||
allowed_sources: &[SessionSource],
|
||||
model_providers: Option<&[String]>,
|
||||
default_provider: &str,
|
||||
) -> std::io::Result<ThreadsPage> {
|
||||
let root = codex_home.join(ARCHIVED_SESSIONS_SUBDIR);
|
||||
get_threads_in_root(
|
||||
root,
|
||||
page_size,
|
||||
cursor,
|
||||
sort_key,
|
||||
ThreadListConfig {
|
||||
allowed_sources,
|
||||
model_providers,
|
||||
default_provider,
|
||||
layout: ThreadListLayout::Flat,
|
||||
},
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
/// Find the newest recorded thread path, optionally filtering to a matching cwd.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn find_latest_thread_path(
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
#![allow(clippy::unwrap_used, clippy::expect_used)]
|
||||
|
||||
use std::ffi::OsStr;
|
||||
use std::fs::File;
|
||||
use std::fs::FileTimes;
|
||||
use std::fs::{self};
|
||||
@@ -21,6 +22,7 @@ use crate::rollout::list::ThreadItem;
|
||||
use crate::rollout::list::ThreadSortKey;
|
||||
use crate::rollout::list::ThreadsPage;
|
||||
use crate::rollout::list::get_threads;
|
||||
use crate::rollout::rollout_date_parts;
|
||||
use anyhow::Result;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::models::ContentItem;
|
||||
@@ -43,6 +45,16 @@ fn provider_vec(providers: &[&str]) -> Vec<String> {
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rollout_date_parts_extracts_directory_components() {
|
||||
let file_name = OsStr::new("rollout-2025-03-01T09-00-00-123.jsonl");
|
||||
let parts = rollout_date_parts(file_name);
|
||||
assert_eq!(
|
||||
parts,
|
||||
Some(("2025".to_string(), "03".to_string(), "01".to_string()))
|
||||
);
|
||||
}
|
||||
|
||||
fn write_session_file(
|
||||
root: &Path,
|
||||
ts_str: &str,
|
||||
@@ -131,6 +143,63 @@ fn write_session_file_with_provider(
|
||||
Ok((dt, uuid))
|
||||
}
|
||||
|
||||
fn write_session_file_with_delayed_user_event(
|
||||
root: &Path,
|
||||
ts_str: &str,
|
||||
uuid: Uuid,
|
||||
meta_lines_before_user: usize,
|
||||
) -> std::io::Result<()> {
|
||||
let format: &[FormatItem] =
|
||||
format_description!("[year]-[month]-[day]T[hour]-[minute]-[second]");
|
||||
let dt = PrimitiveDateTime::parse(ts_str, format)
|
||||
.unwrap()
|
||||
.assume_utc();
|
||||
let dir = root
|
||||
.join("sessions")
|
||||
.join(format!("{:04}", dt.year()))
|
||||
.join(format!("{:02}", u8::from(dt.month())))
|
||||
.join(format!("{:02}", dt.day()));
|
||||
fs::create_dir_all(&dir)?;
|
||||
|
||||
let filename = format!("rollout-{ts_str}-{uuid}.jsonl");
|
||||
let file_path = dir.join(filename);
|
||||
let mut file = File::create(file_path)?;
|
||||
|
||||
for i in 0..meta_lines_before_user {
|
||||
let id = if i == 0 {
|
||||
uuid
|
||||
} else {
|
||||
Uuid::from_u128(100 + i as u128)
|
||||
};
|
||||
let payload = serde_json::json!({
|
||||
"id": id,
|
||||
"timestamp": ts_str,
|
||||
"cwd": ".",
|
||||
"originator": "test_originator",
|
||||
"cli_version": "test_version",
|
||||
"source": "vscode",
|
||||
"model_provider": "test-provider",
|
||||
});
|
||||
let meta = serde_json::json!({
|
||||
"timestamp": ts_str,
|
||||
"type": "session_meta",
|
||||
"payload": payload,
|
||||
});
|
||||
writeln!(file, "{meta}")?;
|
||||
}
|
||||
|
||||
let user_event = serde_json::json!({
|
||||
"timestamp": ts_str,
|
||||
"type": "event_msg",
|
||||
"payload": {"type": "user_message", "message": "Hello from user", "kind": "plain"}
|
||||
});
|
||||
writeln!(file, "{user_event}")?;
|
||||
|
||||
let times = FileTimes::new().set_modified(dt.into());
|
||||
file.set_times(times)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_session_file_with_meta_payload(
|
||||
root: &Path,
|
||||
ts_str: &str,
|
||||
@@ -539,6 +608,31 @@ async fn test_pagination_cursor() {
|
||||
assert_eq!(page3, expected_page3);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_list_threads_scans_past_head_for_user_event() {
|
||||
let temp = TempDir::new().unwrap();
|
||||
let home = temp.path();
|
||||
|
||||
let uuid = Uuid::from_u128(99);
|
||||
let ts = "2025-05-01T10-30-00";
|
||||
write_session_file_with_delayed_user_event(home, ts, uuid, 12).unwrap();
|
||||
|
||||
let provider_filter = provider_vec(&[TEST_PROVIDER]);
|
||||
let page = get_threads(
|
||||
home,
|
||||
10,
|
||||
None,
|
||||
ThreadSortKey::CreatedAt,
|
||||
INTERACTIVE_SESSION_SOURCES,
|
||||
Some(provider_filter.as_slice()),
|
||||
TEST_PROVIDER,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(page.items.len(), 1);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_get_thread_contents() {
|
||||
let temp = TempDir::new().unwrap();
|
||||
|
||||
@@ -10,45 +10,7 @@ use crate::util::resolve_path;
|
||||
|
||||
use crate::protocol::AskForApproval;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
use std::sync::atomic::AtomicBool;
|
||||
#[cfg(target_os = "windows")]
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
static WINDOWS_SANDBOX_ENABLED: AtomicBool = AtomicBool::new(false);
|
||||
#[cfg(target_os = "windows")]
|
||||
static WINDOWS_ELEVATED_SANDBOX_ENABLED: AtomicBool = AtomicBool::new(false);
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
pub fn set_windows_sandbox_enabled(enabled: bool) {
|
||||
WINDOWS_SANDBOX_ENABLED.store(enabled, Ordering::Relaxed);
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
#[allow(dead_code)]
|
||||
pub fn set_windows_sandbox_enabled(_enabled: bool) {}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
pub fn set_windows_elevated_sandbox_enabled(enabled: bool) {
|
||||
WINDOWS_ELEVATED_SANDBOX_ENABLED.store(enabled, Ordering::Relaxed);
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
#[allow(dead_code)]
|
||||
pub fn set_windows_elevated_sandbox_enabled(_enabled: bool) {}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
pub fn is_windows_elevated_sandbox_enabled() -> bool {
|
||||
WINDOWS_ELEVATED_SANDBOX_ENABLED.load(Ordering::Relaxed)
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
#[allow(dead_code)]
|
||||
pub fn is_windows_elevated_sandbox_enabled() -> bool {
|
||||
false
|
||||
}
|
||||
use codex_protocol::config_types::WindowsSandboxLevel;
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum SafetyCheck {
|
||||
@@ -67,6 +29,7 @@ pub fn assess_patch_safety(
|
||||
policy: AskForApproval,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
cwd: &Path,
|
||||
windows_sandbox_level: WindowsSandboxLevel,
|
||||
) -> SafetyCheck {
|
||||
if action.is_empty() {
|
||||
return SafetyCheck::Reject {
|
||||
@@ -104,7 +67,7 @@ pub fn assess_patch_safety(
|
||||
// Only auto‑approve when we can actually enforce a sandbox. Otherwise
|
||||
// fall back to asking the user because the patch may touch arbitrary
|
||||
// paths outside the project.
|
||||
match get_platform_sandbox() {
|
||||
match get_platform_sandbox(windows_sandbox_level != WindowsSandboxLevel::Disabled) {
|
||||
Some(sandbox_type) => SafetyCheck::AutoApprove {
|
||||
sandbox_type,
|
||||
user_explicitly_approved: false,
|
||||
@@ -122,19 +85,17 @@ pub fn assess_patch_safety(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_platform_sandbox() -> Option<SandboxType> {
|
||||
pub fn get_platform_sandbox(windows_sandbox_enabled: bool) -> Option<SandboxType> {
|
||||
if cfg!(target_os = "macos") {
|
||||
Some(SandboxType::MacosSeatbelt)
|
||||
} else if cfg!(target_os = "linux") {
|
||||
Some(SandboxType::LinuxSeccomp)
|
||||
} else if cfg!(target_os = "windows") {
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
if WINDOWS_SANDBOX_ENABLED.load(Ordering::Relaxed) {
|
||||
return Some(SandboxType::WindowsRestrictedToken);
|
||||
}
|
||||
if windows_sandbox_enabled {
|
||||
Some(SandboxType::WindowsRestrictedToken)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
None
|
||||
} else {
|
||||
None
|
||||
}
|
||||
@@ -277,7 +238,13 @@ mod tests {
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
assess_patch_safety(&add_inside, AskForApproval::OnRequest, &policy, &cwd),
|
||||
assess_patch_safety(
|
||||
&add_inside,
|
||||
AskForApproval::OnRequest,
|
||||
&policy,
|
||||
&cwd,
|
||||
WindowsSandboxLevel::Disabled
|
||||
),
|
||||
SafetyCheck::AutoApprove {
|
||||
sandbox_type: SandboxType::None,
|
||||
user_explicitly_approved: false,
|
||||
|
||||
@@ -21,6 +21,7 @@ use crate::seatbelt::create_seatbelt_command_args;
|
||||
use crate::spawn::CODEX_SANDBOX_ENV_VAR;
|
||||
use crate::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
||||
use crate::tools::sandboxing::SandboxablePreference;
|
||||
use codex_protocol::config_types::WindowsSandboxLevel;
|
||||
pub use codex_protocol::models::SandboxPermissions;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
@@ -44,6 +45,7 @@ pub struct ExecEnv {
|
||||
pub env: HashMap<String, String>,
|
||||
pub expiration: ExecExpiration,
|
||||
pub sandbox: SandboxType,
|
||||
pub windows_sandbox_level: WindowsSandboxLevel,
|
||||
pub sandbox_permissions: SandboxPermissions,
|
||||
pub justification: Option<String>,
|
||||
pub arg0: Option<String>,
|
||||
@@ -76,19 +78,26 @@ impl SandboxManager {
|
||||
&self,
|
||||
policy: &SandboxPolicy,
|
||||
pref: SandboxablePreference,
|
||||
windows_sandbox_level: WindowsSandboxLevel,
|
||||
) -> SandboxType {
|
||||
match pref {
|
||||
SandboxablePreference::Forbid => SandboxType::None,
|
||||
SandboxablePreference::Require => {
|
||||
// Require a platform sandbox when available; on Windows this
|
||||
// respects the experimental_windows_sandbox feature.
|
||||
crate::safety::get_platform_sandbox().unwrap_or(SandboxType::None)
|
||||
crate::safety::get_platform_sandbox(
|
||||
windows_sandbox_level != WindowsSandboxLevel::Disabled,
|
||||
)
|
||||
.unwrap_or(SandboxType::None)
|
||||
}
|
||||
SandboxablePreference::Auto => match policy {
|
||||
SandboxPolicy::DangerFullAccess | SandboxPolicy::ExternalSandbox { .. } => {
|
||||
SandboxType::None
|
||||
}
|
||||
_ => crate::safety::get_platform_sandbox().unwrap_or(SandboxType::None),
|
||||
_ => crate::safety::get_platform_sandbox(
|
||||
windows_sandbox_level != WindowsSandboxLevel::Disabled,
|
||||
)
|
||||
.unwrap_or(SandboxType::None),
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -100,6 +109,7 @@ impl SandboxManager {
|
||||
sandbox: SandboxType,
|
||||
sandbox_policy_cwd: &Path,
|
||||
codex_linux_sandbox_exe: Option<&PathBuf>,
|
||||
windows_sandbox_level: WindowsSandboxLevel,
|
||||
) -> Result<ExecEnv, SandboxTransformError> {
|
||||
let mut env = spec.env;
|
||||
if !policy.has_full_network_access() {
|
||||
@@ -160,6 +170,7 @@ impl SandboxManager {
|
||||
env,
|
||||
expiration: spec.expiration,
|
||||
sandbox,
|
||||
windows_sandbox_level,
|
||||
sandbox_permissions: spec.sandbox_permissions,
|
||||
justification: spec.justification,
|
||||
arg0: arg0_override,
|
||||
|
||||
@@ -28,6 +28,7 @@ pub struct ShellSnapshot {
|
||||
const SNAPSHOT_TIMEOUT: Duration = Duration::from_secs(10);
|
||||
const SNAPSHOT_RETENTION: Duration = Duration::from_secs(60 * 60 * 24 * 7); // 7 days retention.
|
||||
const SNAPSHOT_DIR: &str = "shell_snapshots";
|
||||
const EXCLUDED_EXPORT_VARS: &[&str] = &["PWD", "OLDPWD"];
|
||||
|
||||
impl ShellSnapshot {
|
||||
pub fn start_snapshotting(
|
||||
@@ -138,9 +139,9 @@ async fn write_shell_snapshot(shell_type: ShellType, output_path: &Path) -> Resu
|
||||
async fn capture_snapshot(shell: &Shell) -> Result<String> {
|
||||
let shell_type = shell.shell_type.clone();
|
||||
match shell_type {
|
||||
ShellType::Zsh => run_shell_script(shell, zsh_snapshot_script()).await,
|
||||
ShellType::Bash => run_shell_script(shell, bash_snapshot_script()).await,
|
||||
ShellType::Sh => run_shell_script(shell, sh_snapshot_script()).await,
|
||||
ShellType::Zsh => run_shell_script(shell, &zsh_snapshot_script()).await,
|
||||
ShellType::Bash => run_shell_script(shell, &bash_snapshot_script()).await,
|
||||
ShellType::Sh => run_shell_script(shell, &sh_snapshot_script()).await,
|
||||
ShellType::PowerShell => run_shell_script(shell, powershell_snapshot_script()).await,
|
||||
ShellType::Cmd => bail!("Shell snapshotting is not yet supported for {shell_type:?}"),
|
||||
}
|
||||
@@ -202,8 +203,13 @@ async fn run_script_with_timeout(
|
||||
Ok(String::from_utf8_lossy(&output.stdout).into_owned())
|
||||
}
|
||||
|
||||
fn zsh_snapshot_script() -> &'static str {
|
||||
r##"if [[ -n "$ZDOTDIR" ]]; then
|
||||
fn excluded_exports_regex() -> String {
|
||||
EXCLUDED_EXPORT_VARS.join("|")
|
||||
}
|
||||
|
||||
fn zsh_snapshot_script() -> String {
|
||||
let excluded = excluded_exports_regex();
|
||||
let script = r##"if [[ -n "$ZDOTDIR" ]]; then
|
||||
rc="$ZDOTDIR/.zshrc"
|
||||
else
|
||||
rc="$HOME/.zshrc"
|
||||
@@ -229,6 +235,9 @@ export_lines=$(export -p | awk '
|
||||
name=line
|
||||
sub(/^(export|declare -x|typeset -x) /, "", name)
|
||||
sub(/=.*/, "", name)
|
||||
if (name ~ /^(EXCLUDED_EXPORTS)$/) {
|
||||
next
|
||||
}
|
||||
if (name ~ /^[A-Za-z_][A-Za-z0-9_]*$/) {
|
||||
print line
|
||||
}
|
||||
@@ -238,11 +247,13 @@ print "# exports $export_count"
|
||||
if [[ -n "$export_lines" ]]; then
|
||||
print -r -- "$export_lines"
|
||||
fi
|
||||
"##
|
||||
"##;
|
||||
script.replace("EXCLUDED_EXPORTS", &excluded)
|
||||
}
|
||||
|
||||
fn bash_snapshot_script() -> &'static str {
|
||||
r##"if [ -z "$BASH_ENV" ] && [ -r "$HOME/.bashrc" ]; then
|
||||
fn bash_snapshot_script() -> String {
|
||||
let excluded = excluded_exports_regex();
|
||||
let script = r##"if [ -z "$BASH_ENV" ] && [ -r "$HOME/.bashrc" ]; then
|
||||
. "$HOME/.bashrc"
|
||||
fi
|
||||
echo '# Snapshot file'
|
||||
@@ -268,6 +279,9 @@ export_lines=$(export -p | awk '
|
||||
name=line
|
||||
sub(/^(export|declare -x|typeset -x) /, "", name)
|
||||
sub(/=.*/, "", name)
|
||||
if (name ~ /^(EXCLUDED_EXPORTS)$/) {
|
||||
next
|
||||
}
|
||||
if (name ~ /^[A-Za-z_][A-Za-z0-9_]*$/) {
|
||||
print line
|
||||
}
|
||||
@@ -277,11 +291,13 @@ echo "# exports $export_count"
|
||||
if [ -n "$export_lines" ]; then
|
||||
printf '%s\n' "$export_lines"
|
||||
fi
|
||||
"##
|
||||
"##;
|
||||
script.replace("EXCLUDED_EXPORTS", &excluded)
|
||||
}
|
||||
|
||||
fn sh_snapshot_script() -> &'static str {
|
||||
r##"if [ -n "$ENV" ] && [ -r "$ENV" ]; then
|
||||
fn sh_snapshot_script() -> String {
|
||||
let excluded = excluded_exports_regex();
|
||||
let script = r##"if [ -n "$ENV" ] && [ -r "$ENV" ]; then
|
||||
. "$ENV"
|
||||
fi
|
||||
echo '# Snapshot file'
|
||||
@@ -320,6 +336,9 @@ if export -p >/dev/null 2>&1; then
|
||||
name=line
|
||||
sub(/^(export|declare -x|typeset -x) /, "", name)
|
||||
sub(/=.*/, "", name)
|
||||
if (name ~ /^(EXCLUDED_EXPORTS)$/) {
|
||||
next
|
||||
}
|
||||
if (name ~ /^[A-Za-z_][A-Za-z0-9_]*$/) {
|
||||
print line
|
||||
}
|
||||
@@ -334,13 +353,14 @@ else
|
||||
echo "# exports $export_count"
|
||||
env | sort | while IFS='=' read -r key value; do
|
||||
case "$key" in
|
||||
""|[0-9]*|*[!A-Za-z0-9_]* ) continue ;;
|
||||
""|[0-9]*|*[!A-Za-z0-9_]*|EXCLUDED_EXPORTS) continue ;;
|
||||
esac
|
||||
escaped=$(printf "%s" "$value" | sed "s/'/'\"'\"'/g")
|
||||
printf "export %s='%s'\n" "$key" "$escaped"
|
||||
done
|
||||
fi
|
||||
"##
|
||||
"##;
|
||||
script.replace("EXCLUDED_EXPORTS", &excluded)
|
||||
}
|
||||
|
||||
fn powershell_snapshot_script() -> &'static str {
|
||||
@@ -444,8 +464,6 @@ mod tests {
|
||||
use pretty_assertions::assert_eq;
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::ffi::OsStrExt;
|
||||
#[cfg(target_os = "linux")]
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
#[cfg(unix)]
|
||||
use std::process::Command;
|
||||
#[cfg(target_os = "linux")]
|
||||
@@ -494,6 +512,7 @@ mod tests {
|
||||
.arg(bash_snapshot_script())
|
||||
.env("BASH_ENV", "/dev/null")
|
||||
.env("VALID_NAME", "ok")
|
||||
.env("PWD", "/tmp/stale")
|
||||
.env("NEXTEST_BIN_EXE_codex-write-config-schema", "/path/to/bin")
|
||||
.env("BAD-NAME", "broken")
|
||||
.output()?;
|
||||
@@ -502,6 +521,7 @@ mod tests {
|
||||
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
assert!(stdout.contains("VALID_NAME"));
|
||||
assert!(!stdout.contains("PWD=/tmp/stale"));
|
||||
assert!(!stdout.contains("NEXTEST_BIN_EXE_codex-write-config-schema"));
|
||||
assert!(!stdout.contains("BAD-NAME"));
|
||||
|
||||
@@ -540,27 +560,16 @@ mod tests {
|
||||
use tokio::time::sleep;
|
||||
|
||||
let dir = tempdir()?;
|
||||
let shell_path = dir.path().join("hanging-shell.sh");
|
||||
let pid_path = dir.path().join("pid");
|
||||
|
||||
let script = format!(
|
||||
"#!/bin/sh\n\
|
||||
echo $$ > {}\n\
|
||||
sleep 30\n",
|
||||
pid_path.display()
|
||||
);
|
||||
fs::write(&shell_path, script).await?;
|
||||
let mut permissions = std::fs::metadata(&shell_path)?.permissions();
|
||||
permissions.set_mode(0o755);
|
||||
std::fs::set_permissions(&shell_path, permissions)?;
|
||||
let script = format!("echo $$ > \"{}\"; sleep 30", pid_path.display());
|
||||
|
||||
let shell = Shell {
|
||||
shell_type: ShellType::Sh,
|
||||
shell_path,
|
||||
shell_path: PathBuf::from("/bin/sh"),
|
||||
shell_snapshot: crate::shell::empty_shell_snapshot_receiver(),
|
||||
};
|
||||
|
||||
let err = run_script_with_timeout(&shell, "ignored", Duration::from_millis(500), true)
|
||||
let err = run_script_with_timeout(&shell, &script, Duration::from_secs(1), true)
|
||||
.await
|
||||
.expect_err("snapshot shell should time out");
|
||||
assert!(
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user