mirror of
https://github.com/openai/codex.git
synced 2026-02-02 23:13:37 +00:00
Compare commits
135 Commits
exec-run-a
...
shareable-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4de870eebc | ||
|
|
33a313899d | ||
|
|
32dbd27fe5 | ||
|
|
36b60fb613 | ||
|
|
300460675c | ||
|
|
bf35720298 | ||
|
|
0c9260770b | ||
|
|
e5b3750be2 | ||
|
|
365864fe95 | ||
|
|
9b4ab55274 | ||
|
|
5f19e0d9ba | ||
|
|
45c1118890 | ||
|
|
af04bd2324 | ||
|
|
d7d08ec4a2 | ||
|
|
b79bf69af6 | ||
|
|
ca9d417633 | ||
|
|
fbb3a30953 | ||
|
|
2d9ac8227a | ||
|
|
03aee7140f | ||
|
|
48f203120d | ||
|
|
bdd8a7d58b | ||
|
|
b7f26d74f0 | ||
|
|
3b1cddf001 | ||
|
|
798c4b3260 | ||
|
|
3e798c5a7d | ||
|
|
e6c4f548ab | ||
|
|
d6631fb5a9 | ||
|
|
89c5f3c4d4 | ||
|
|
b654b7a9ae | ||
|
|
2945667dcc | ||
|
|
d29129f352 | ||
|
|
4ba911d48c | ||
|
|
6a06726af2 | ||
|
|
714dc8d8bd | ||
|
|
780482da84 | ||
|
|
4d9ae3a298 | ||
|
|
e70592f85a | ||
|
|
b4b4763009 | ||
|
|
be33de3f87 | ||
|
|
8cc338aecf | ||
|
|
335713f7e9 | ||
|
|
b9cd089d1f | ||
|
|
ecc66f4f52 | ||
|
|
9757e1418d | ||
|
|
52609c6f42 | ||
|
|
ce3d764ae1 | ||
|
|
26590d7927 | ||
|
|
8497163363 | ||
|
|
83d7c44500 | ||
|
|
7b34cad1b1 | ||
|
|
ff9fa56368 | ||
|
|
fe920d7804 | ||
|
|
147e7118e0 | ||
|
|
f7699e0487 | ||
|
|
66de985e4e | ||
|
|
b7edeee8ca | ||
|
|
851617ff5a | ||
|
|
b8156706e6 | ||
|
|
35e03a0716 | ||
|
|
ad5f9e7370 | ||
|
|
96386755b6 | ||
|
|
74bd6d7178 | ||
|
|
2a624661ef | ||
|
|
231406bd04 | ||
|
|
3878c3dc7c | ||
|
|
dabafe204a | ||
|
|
71b8d937ed | ||
|
|
996e09ca24 | ||
|
|
9f79365691 | ||
|
|
fef3e36f67 | ||
|
|
3bb8e69dd3 | ||
|
|
add648df82 | ||
|
|
1609f6aa81 | ||
|
|
a90ab789c2 | ||
|
|
3f3916e595 | ||
|
|
19d8f71a98 | ||
|
|
3ae966edd8 | ||
|
|
c7c2b3cf8d | ||
|
|
337643b00a | ||
|
|
28051d18c6 | ||
|
|
2f8a44baea | ||
|
|
30eb655ad1 | ||
|
|
700a29e157 | ||
|
|
c40ad65bd8 | ||
|
|
894923ed5d | ||
|
|
fc0fd85349 | ||
|
|
877b76bb9d | ||
|
|
538e1059a3 | ||
|
|
067922a734 | ||
|
|
dd24ac6b26 | ||
|
|
ddc704d4c6 | ||
|
|
3b726d9550 | ||
|
|
74ffbbe7c1 | ||
|
|
742f086ee6 | ||
|
|
ab99df0694 | ||
|
|
509ff1c643 | ||
|
|
cabb2085cc | ||
|
|
4db6da32a3 | ||
|
|
0adcd8aa86 | ||
|
|
28bd7db14a | ||
|
|
0c72d8fd6e | ||
|
|
7c96f2e84c | ||
|
|
f45a8733bf | ||
|
|
b655a092ba | ||
|
|
b7bba3614e | ||
|
|
86adf53235 | ||
|
|
998e88b12a | ||
|
|
c900de271a | ||
|
|
a641a6427c | ||
|
|
5d13427ef4 | ||
|
|
394b967432 | ||
|
|
6a279f6d77 | ||
|
|
47aa1f3b6a | ||
|
|
73bd84dee0 | ||
|
|
32b062d0e1 | ||
|
|
f29a0defa2 | ||
|
|
2e5aa809f4 | ||
|
|
6418e65356 | ||
|
|
764712c116 | ||
|
|
5ace350186 | ||
|
|
a8f195828b | ||
|
|
313ee3003b | ||
|
|
159ff06281 | ||
|
|
bdc4742bfc | ||
|
|
247fb2de64 | ||
|
|
6a02fdde76 | ||
|
|
b77bf4d36d | ||
|
|
62266b13f8 | ||
|
|
09251387e0 | ||
|
|
e471ebc5d2 | ||
|
|
375a5ef051 | ||
|
|
fdc69df454 | ||
|
|
01d7f8095b | ||
|
|
3ba702c5b6 | ||
|
|
6316e57497 |
@@ -1,3 +1,4 @@
|
||||
# Without this, Bazel will consider BUILD.bazel files in
|
||||
# .git/sl/origbackups (which can be populated by Sapling SCM).
|
||||
.git
|
||||
codex-rs/target
|
||||
|
||||
7
.bazelrc
7
.bazelrc
@@ -1,13 +1,19 @@
|
||||
common --repo_env=BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN=1
|
||||
common --repo_env=BAZEL_NO_APPLE_CPP_TOOLCHAIN=1
|
||||
# Dummy xcode config so we don't need to build xcode_locator in repo rule.
|
||||
common --xcode_version_config=//:disable_xcode
|
||||
|
||||
common --disk_cache=~/.cache/bazel-disk-cache
|
||||
common --repo_contents_cache=~/.cache/bazel-repo-contents-cache
|
||||
common --repository_cache=~/.cache/bazel-repo-cache
|
||||
common --remote_cache_compression
|
||||
startup --experimental_remote_repo_contents_cache
|
||||
|
||||
common --experimental_platform_in_output_dir
|
||||
|
||||
# Runfiles strategy rationale: codex-rs/utils/cargo-bin/README.md
|
||||
common --noenable_runfiles
|
||||
|
||||
common --enable_platform_specific_config
|
||||
# TODO(zbarsky): We need to untangle these libc constraints to get linux remote builds working.
|
||||
common:linux --host_platform=//:local
|
||||
@@ -43,4 +49,3 @@ common --jobs=30
|
||||
common:remote --extra_execution_platforms=//:rbe
|
||||
common:remote --remote_executor=grpcs://remote.buildbuddy.io
|
||||
common:remote --jobs=800
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[codespell]
|
||||
# Ref: https://github.com/codespell-project/codespell#using-a-config-file
|
||||
skip = .git*,vendor,*-lock.yaml,*.lock,.codespellrc,*test.ts,*.jsonl,frame*.txt
|
||||
skip = .git*,vendor,*-lock.yaml,*.lock,.codespellrc,*test.ts,*.jsonl,frame*.txt,*.snap,*.snap.new
|
||||
check-hidden = true
|
||||
ignore-regex = ^\s*"image/\S+": ".*|\b(afterAll)\b
|
||||
ignore-words-list = ratatui,ser,iTerm,iterm2,iterm
|
||||
|
||||
96
.github/workflows/rust-ci.yml
vendored
96
.github/workflows/rust-ci.yml
vendored
@@ -59,7 +59,7 @@ jobs:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
with:
|
||||
components: rustfmt
|
||||
- name: cargo fmt
|
||||
@@ -77,7 +77,7 @@ jobs:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: cargo-shear
|
||||
@@ -177,11 +177,31 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- name: Install UBSan runtime (musl)
|
||||
if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
sudo apt-get update -y
|
||||
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y libubsan1
|
||||
fi
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
components: clippy
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Use hermetic Cargo home (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cargo_home="${GITHUB_WORKSPACE}/.cargo-home"
|
||||
mkdir -p "${cargo_home}/bin"
|
||||
echo "CARGO_HOME=${cargo_home}" >> "$GITHUB_ENV"
|
||||
echo "${cargo_home}/bin" >> "$GITHUB_PATH"
|
||||
: > "${cargo_home}/config.toml"
|
||||
|
||||
- name: Compute lockfile hash
|
||||
id: lockhash
|
||||
working-directory: codex-rs
|
||||
@@ -202,6 +222,10 @@ jobs:
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
${{ github.workspace }}/.cargo-home/bin/
|
||||
${{ github.workspace }}/.cargo-home/registry/index/
|
||||
${{ github.workspace }}/.cargo-home/registry/cache/
|
||||
${{ github.workspace }}/.cargo-home/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
|
||||
restore-keys: |
|
||||
cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
@@ -244,6 +268,14 @@ jobs:
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Disable sccache wrapper (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "RUSTC_WRAPPER=" >> "$GITHUB_ENV"
|
||||
echo "RUSTC_WORKSPACE_WRAPPER=" >> "$GITHUB_ENV"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Prepare APT cache directories (musl)
|
||||
shell: bash
|
||||
@@ -277,6 +309,58 @@ jobs:
|
||||
shell: bash
|
||||
run: bash "${GITHUB_WORKSPACE}/.github/scripts/install-musl-build-tools.sh"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Configure rustc UBSan wrapper (musl host)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
ubsan=""
|
||||
if command -v ldconfig >/dev/null 2>&1; then
|
||||
ubsan="$(ldconfig -p | grep -m1 'libubsan\.so\.1' | sed -E 's/.*=> (.*)$/\1/')"
|
||||
fi
|
||||
wrapper_root="${RUNNER_TEMP:-/tmp}"
|
||||
wrapper="${wrapper_root}/rustc-ubsan-wrapper"
|
||||
cat > "${wrapper}" <<EOF
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
if [[ -n "${ubsan}" ]]; then
|
||||
export LD_PRELOAD="${ubsan}\${LD_PRELOAD:+:\${LD_PRELOAD}}"
|
||||
fi
|
||||
exec "\$1" "\${@:2}"
|
||||
EOF
|
||||
chmod +x "${wrapper}"
|
||||
echo "RUSTC_WRAPPER=${wrapper}" >> "$GITHUB_ENV"
|
||||
echo "RUSTC_WORKSPACE_WRAPPER=" >> "$GITHUB_ENV"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Clear sanitizer flags (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Clear global Rust flags so host/proc-macro builds don't pull in UBSan.
|
||||
echo "RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_ENCODED_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "RUSTDOCFLAGS=" >> "$GITHUB_ENV"
|
||||
# Override any runner-level Cargo config rustflags as well.
|
||||
echo "CARGO_BUILD_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
|
||||
sanitize_flags() {
|
||||
local input="$1"
|
||||
input="${input//-fsanitize=undefined/}"
|
||||
input="${input//-fno-sanitize-recover=undefined/}"
|
||||
input="${input//-fno-sanitize-trap=undefined/}"
|
||||
echo "$input"
|
||||
}
|
||||
|
||||
cflags="$(sanitize_flags "${CFLAGS-}")"
|
||||
cxxflags="$(sanitize_flags "${CXXFLAGS-}")"
|
||||
echo "CFLAGS=${cflags}" >> "$GITHUB_ENV"
|
||||
echo "CXXFLAGS=${cxxflags}" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Install cargo-chef
|
||||
if: ${{ matrix.profile == 'release' }}
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
@@ -322,6 +406,10 @@ jobs:
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
${{ github.workspace }}/.cargo-home/bin/
|
||||
${{ github.workspace }}/.cargo-home/registry/index/
|
||||
${{ github.workspace }}/.cargo-home/registry/cache/
|
||||
${{ github.workspace }}/.cargo-home/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
|
||||
|
||||
- name: Save sccache cache (fallback)
|
||||
@@ -422,7 +510,7 @@ jobs:
|
||||
- name: Install DotSlash
|
||||
uses: facebook/install-dotslash@v2
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
|
||||
84
.github/workflows/rust-release.yml
vendored
84
.github/workflows/rust-release.yml
vendored
@@ -21,7 +21,6 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
|
||||
- name: Validate tag matches Cargo.toml version
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -90,10 +89,30 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- name: Install UBSan runtime (musl)
|
||||
if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
sudo apt-get update -y
|
||||
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y libubsan1
|
||||
fi
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Use hermetic Cargo home (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cargo_home="${GITHUB_WORKSPACE}/.cargo-home"
|
||||
mkdir -p "${cargo_home}/bin"
|
||||
echo "CARGO_HOME=${cargo_home}" >> "$GITHUB_ENV"
|
||||
echo "${cargo_home}/bin" >> "$GITHUB_PATH"
|
||||
: > "${cargo_home}/config.toml"
|
||||
|
||||
- uses: actions/cache@v5
|
||||
with:
|
||||
path: |
|
||||
@@ -101,6 +120,10 @@ jobs:
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
${{ github.workspace }}/.cargo-home/bin/
|
||||
${{ github.workspace }}/.cargo-home/registry/index/
|
||||
${{ github.workspace }}/.cargo-home/registry/cache/
|
||||
${{ github.workspace }}/.cargo-home/git/db/
|
||||
${{ github.workspace }}/codex-rs/target/
|
||||
key: cargo-${{ matrix.runner }}-${{ matrix.target }}-release-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
@@ -116,6 +139,58 @@ jobs:
|
||||
TARGET: ${{ matrix.target }}
|
||||
run: bash "${GITHUB_WORKSPACE}/.github/scripts/install-musl-build-tools.sh"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Configure rustc UBSan wrapper (musl host)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
ubsan=""
|
||||
if command -v ldconfig >/dev/null 2>&1; then
|
||||
ubsan="$(ldconfig -p | grep -m1 'libubsan\.so\.1' | sed -E 's/.*=> (.*)$/\1/')"
|
||||
fi
|
||||
wrapper_root="${RUNNER_TEMP:-/tmp}"
|
||||
wrapper="${wrapper_root}/rustc-ubsan-wrapper"
|
||||
cat > "${wrapper}" <<EOF
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
if [[ -n "${ubsan}" ]]; then
|
||||
export LD_PRELOAD="${ubsan}\${LD_PRELOAD:+:\${LD_PRELOAD}}"
|
||||
fi
|
||||
exec "\$1" "\${@:2}"
|
||||
EOF
|
||||
chmod +x "${wrapper}"
|
||||
echo "RUSTC_WRAPPER=${wrapper}" >> "$GITHUB_ENV"
|
||||
echo "RUSTC_WORKSPACE_WRAPPER=" >> "$GITHUB_ENV"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Clear sanitizer flags (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Clear global Rust flags so host/proc-macro builds don't pull in UBSan.
|
||||
echo "RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_ENCODED_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "RUSTDOCFLAGS=" >> "$GITHUB_ENV"
|
||||
# Override any runner-level Cargo config rustflags as well.
|
||||
echo "CARGO_BUILD_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
|
||||
sanitize_flags() {
|
||||
local input="$1"
|
||||
input="${input//-fsanitize=undefined/}"
|
||||
input="${input//-fno-sanitize-recover=undefined/}"
|
||||
input="${input//-fno-sanitize-trap=undefined/}"
|
||||
echo "$input"
|
||||
}
|
||||
|
||||
cflags="$(sanitize_flags "${CFLAGS-}")"
|
||||
cxxflags="$(sanitize_flags "${CXXFLAGS-}")"
|
||||
echo "CFLAGS=${cflags}" >> "$GITHUB_ENV"
|
||||
echo "CXXFLAGS=${cxxflags}" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Cargo build
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -252,6 +327,7 @@ jobs:
|
||||
# Path that contains the uncompressed binaries for the current
|
||||
# ${{ matrix.target }}
|
||||
dest="dist/${{ matrix.target }}"
|
||||
repo_root=$PWD
|
||||
|
||||
# We want to ship the raw Windows executables in the GitHub Release
|
||||
# in addition to the compressed archives. Keep the originals for
|
||||
@@ -303,7 +379,9 @@ jobs:
|
||||
cp "$dest/$base" "$bundle_dir/$base"
|
||||
cp "$runner_src" "$bundle_dir/codex-command-runner.exe"
|
||||
cp "$setup_src" "$bundle_dir/codex-windows-sandbox-setup.exe"
|
||||
(cd "$bundle_dir" && 7z a "$dest/${base}.zip" .)
|
||||
# Use an absolute path so bundle zips land in the real dist
|
||||
# dir even when 7z runs from a temp directory.
|
||||
(cd "$bundle_dir" && 7z a "$repo_root/$dest/${base}.zip" .)
|
||||
else
|
||||
echo "warning: missing sandbox binaries; falling back to single-binary zip"
|
||||
echo "warning: expected $runner_src and $setup_src"
|
||||
|
||||
2
.github/workflows/sdk.yml
vendored
2
.github/workflows/sdk.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
node-version: 22
|
||||
cache: pnpm
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
|
||||
- name: build codex
|
||||
run: cargo build --bin codex
|
||||
|
||||
72
.github/workflows/shell-tool-mcp.yml
vendored
72
.github/workflows/shell-tool-mcp.yml
vendored
@@ -93,7 +93,17 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- name: Install UBSan runtime (musl)
|
||||
if: ${{ matrix.install_musl }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
sudo apt-get update -y
|
||||
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y libubsan1
|
||||
fi
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
@@ -109,6 +119,58 @@ jobs:
|
||||
TARGET: ${{ matrix.target }}
|
||||
run: bash "${GITHUB_WORKSPACE}/.github/scripts/install-musl-build-tools.sh"
|
||||
|
||||
- if: ${{ matrix.install_musl }}
|
||||
name: Configure rustc UBSan wrapper (musl host)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
ubsan=""
|
||||
if command -v ldconfig >/dev/null 2>&1; then
|
||||
ubsan="$(ldconfig -p | grep -m1 'libubsan\.so\.1' | sed -E 's/.*=> (.*)$/\1/')"
|
||||
fi
|
||||
wrapper_root="${RUNNER_TEMP:-/tmp}"
|
||||
wrapper="${wrapper_root}/rustc-ubsan-wrapper"
|
||||
cat > "${wrapper}" <<EOF
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
if [[ -n "${ubsan}" ]]; then
|
||||
export LD_PRELOAD="${ubsan}\${LD_PRELOAD:+:\${LD_PRELOAD}}"
|
||||
fi
|
||||
exec "\$1" "\${@:2}"
|
||||
EOF
|
||||
chmod +x "${wrapper}"
|
||||
echo "RUSTC_WRAPPER=${wrapper}" >> "$GITHUB_ENV"
|
||||
echo "RUSTC_WORKSPACE_WRAPPER=" >> "$GITHUB_ENV"
|
||||
|
||||
- if: ${{ matrix.install_musl }}
|
||||
name: Clear sanitizer flags (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Clear global Rust flags so host/proc-macro builds don't pull in UBSan.
|
||||
echo "RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_ENCODED_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "RUSTDOCFLAGS=" >> "$GITHUB_ENV"
|
||||
# Override any runner-level Cargo config rustflags as well.
|
||||
echo "CARGO_BUILD_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
|
||||
sanitize_flags() {
|
||||
local input="$1"
|
||||
input="${input//-fsanitize=undefined/}"
|
||||
input="${input//-fno-sanitize-recover=undefined/}"
|
||||
input="${input//-fno-sanitize-trap=undefined/}"
|
||||
echo "$input"
|
||||
}
|
||||
|
||||
cflags="$(sanitize_flags "${CFLAGS-}")"
|
||||
cxxflags="$(sanitize_flags "${CXXFLAGS-}")"
|
||||
echo "CFLAGS=${cflags}" >> "$GITHUB_ENV"
|
||||
echo "CXXFLAGS=${cxxflags}" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Build exec server binaries
|
||||
run: cargo build --release --target ${{ matrix.target }} --bin codex-exec-mcp-server --bin codex-execve-wrapper
|
||||
|
||||
@@ -282,7 +344,6 @@ jobs:
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10.8.1
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
@@ -375,12 +436,6 @@ jobs:
|
||||
id-token: write
|
||||
contents: read
|
||||
steps:
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10.8.1
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
@@ -388,6 +443,7 @@ jobs:
|
||||
registry-url: https://registry.npmjs.org
|
||||
scope: "@openai"
|
||||
|
||||
# Trusted publishing requires npm CLI version 11.5.1 or later.
|
||||
- name: Update npm
|
||||
run: npm install -g npm@latest
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ In the codex-rs folder where the rust code lives:
|
||||
- Always collapse if statements per https://rust-lang.github.io/rust-clippy/master/index.html#collapsible_if
|
||||
- Always inline format! args when possible per https://rust-lang.github.io/rust-clippy/master/index.html#uninlined_format_args
|
||||
- Use method references over closures when possible per https://rust-lang.github.io/rust-clippy/master/index.html#redundant_closure_for_method_calls
|
||||
- When possible, make `match` statements exhaustive and avoid wildcard arms.
|
||||
- When writing tests, prefer comparing the equality of entire objects over fields one by one.
|
||||
- When making a change that adds or changes an API, ensure that the documentation in the `docs/` folder is up to date if applicable.
|
||||
- If you change `ConfigToml` or nested config types, run `just write-config-schema` to update `codex-rs/core/config.schema.json`.
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
load("@apple_support//xcode:xcode_config.bzl", "xcode_config")
|
||||
|
||||
xcode_config(name = "disable_xcode")
|
||||
|
||||
# We mark the local platform as glibc-compatible so that rust can grab a toolchain for us.
|
||||
# TODO(zbarsky): Upstream a better libc constraint into rules_rust.
|
||||
# We only enable this on linux though for sanity, and because it breaks remote execution.
|
||||
|
||||
14
MODULE.bazel
14
MODULE.bazel
@@ -27,6 +27,8 @@ register_toolchains(
|
||||
"@toolchains_llvm_bootstrapped//toolchain:all",
|
||||
)
|
||||
|
||||
# Needed to disable xcode...
|
||||
bazel_dep(name = "apple_support", version = "2.1.0")
|
||||
bazel_dep(name = "rules_cc", version = "0.2.16")
|
||||
bazel_dep(name = "rules_platform", version = "0.1.0")
|
||||
bazel_dep(name = "rules_rust", version = "0.68.1")
|
||||
@@ -53,7 +55,7 @@ rust = use_extension("@rules_rust//rust:extensions.bzl", "rust")
|
||||
rust.toolchain(
|
||||
edition = "2024",
|
||||
extra_target_triples = RUST_TRIPLES,
|
||||
versions = ["1.90.0"],
|
||||
versions = ["1.93.0"],
|
||||
)
|
||||
use_repo(rust, "rust_toolchains")
|
||||
|
||||
@@ -67,6 +69,11 @@ crate.from_cargo(
|
||||
cargo_toml = "//codex-rs:Cargo.toml",
|
||||
platform_triples = RUST_TRIPLES,
|
||||
)
|
||||
crate.annotation(
|
||||
crate = "nucleo-matcher",
|
||||
strip_prefix = "matcher",
|
||||
version = "0.3.1",
|
||||
)
|
||||
|
||||
bazel_dep(name = "openssl", version = "3.5.4.bcr.0")
|
||||
|
||||
@@ -85,6 +92,11 @@ crate.annotation(
|
||||
|
||||
inject_repo(crate, "openssl")
|
||||
|
||||
crate.annotation(
|
||||
crate = "runfiles",
|
||||
workspace_cargo_toml = "rust/runfiles/Cargo.toml",
|
||||
)
|
||||
|
||||
# Fix readme inclusions
|
||||
crate.annotation(
|
||||
crate = "windows-link",
|
||||
|
||||
119
MODULE.bazel.lock
generated
119
MODULE.bazel.lock
generated
File diff suppressed because one or more lines are too long
70
PNPM.md
70
PNPM.md
@@ -1,70 +0,0 @@
|
||||
# Migration to pnpm
|
||||
|
||||
This project has been migrated from npm to pnpm to improve dependency management and developer experience.
|
||||
|
||||
## Why pnpm?
|
||||
|
||||
- **Faster installation**: pnpm is significantly faster than npm and yarn
|
||||
- **Disk space savings**: pnpm uses a content-addressable store to avoid duplication
|
||||
- **Phantom dependency prevention**: pnpm creates a strict node_modules structure
|
||||
- **Native workspaces support**: simplified monorepo management
|
||||
|
||||
## How to use pnpm
|
||||
|
||||
### Installation
|
||||
|
||||
```bash
|
||||
# Global installation of pnpm
|
||||
npm install -g pnpm@10.8.1
|
||||
|
||||
# Or with corepack (available with Node.js 22+)
|
||||
corepack enable
|
||||
corepack prepare pnpm@10.8.1 --activate
|
||||
```
|
||||
|
||||
### Common commands
|
||||
|
||||
| npm command | pnpm equivalent |
|
||||
| --------------- | ---------------- |
|
||||
| `npm install` | `pnpm install` |
|
||||
| `npm run build` | `pnpm run build` |
|
||||
| `npm test` | `pnpm test` |
|
||||
| `npm run lint` | `pnpm run lint` |
|
||||
|
||||
### Workspace-specific commands
|
||||
|
||||
| Action | Command |
|
||||
| ------------------------------------------ | ---------------------------------------- |
|
||||
| Run a command in a specific package | `pnpm --filter @openai/codex run build` |
|
||||
| Install a dependency in a specific package | `pnpm --filter @openai/codex add lodash` |
|
||||
| Run a command in all packages | `pnpm -r run test` |
|
||||
|
||||
## Monorepo structure
|
||||
|
||||
```
|
||||
codex/
|
||||
├── pnpm-workspace.yaml # Workspace configuration
|
||||
├── .npmrc # pnpm configuration
|
||||
├── package.json # Root dependencies and scripts
|
||||
├── codex-cli/ # Main package
|
||||
│ └── package.json # codex-cli specific dependencies
|
||||
└── docs/ # Documentation (future package)
|
||||
```
|
||||
|
||||
## Configuration files
|
||||
|
||||
- **pnpm-workspace.yaml**: Defines the packages included in the monorepo
|
||||
- **.npmrc**: Configures pnpm behavior
|
||||
- **Root package.json**: Contains shared scripts and dependencies
|
||||
|
||||
## CI/CD
|
||||
|
||||
CI/CD workflows have been updated to use pnpm instead of npm. Make sure your CI environments use pnpm 10.8.1 or higher.
|
||||
|
||||
## Known issues
|
||||
|
||||
If you encounter issues with pnpm, try the following solutions:
|
||||
|
||||
1. Remove the `node_modules` folder and `pnpm-lock.yaml` file, then run `pnpm install`
|
||||
2. Make sure you're using pnpm 10.8.1 or higher
|
||||
3. Verify that Node.js 22 or higher is installed
|
||||
18
codex-cli/package-lock.json
generated
18
codex-cli/package-lock.json
generated
@@ -1,18 +0,0 @@
|
||||
{
|
||||
"name": "@openai/codex",
|
||||
"version": "0.0.0-dev",
|
||||
"lockfileVersion": 3,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@openai/codex",
|
||||
"version": "0.0.0-dev",
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
"codex": "bin/codex.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -17,5 +17,6 @@
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/openai/codex.git",
|
||||
"directory": "codex-cli"
|
||||
}
|
||||
},
|
||||
"packageManager": "pnpm@10.28.2+sha512.41872f037ad22f7348e3b1debbaf7e867cfd448f2726d9cf74c08f19507c31d2c8e7a11525b983febc2df640b5438dee6023ebb1f84ed43cc2d654d2bc326264"
|
||||
}
|
||||
|
||||
2635
codex-rs/Cargo.lock
generated
2635
codex-rs/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -47,6 +47,7 @@ members = [
|
||||
"utils/string",
|
||||
"codex-client",
|
||||
"codex-api",
|
||||
"state",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
@@ -91,6 +92,7 @@ codex-process-hardening = { path = "process-hardening" }
|
||||
codex-protocol = { path = "protocol" }
|
||||
codex-responses-api-proxy = { path = "responses-api-proxy" }
|
||||
codex-rmcp-client = { path = "rmcp-client" }
|
||||
codex-state = { path = "state" }
|
||||
codex-stdio-to-uds = { path = "stdio-to-uds" }
|
||||
codex-tui = { path = "tui" }
|
||||
codex-utils-absolute-path = { path = "utils/absolute-path" }
|
||||
@@ -118,6 +120,8 @@ async-channel = "2.3.1"
|
||||
async-stream = "0.3.6"
|
||||
async-trait = "0.1.89"
|
||||
axum = { version = "0.8", default-features = false }
|
||||
azure_core = "0.21"
|
||||
azure_identity = "0.21"
|
||||
base64 = "0.22.1"
|
||||
bytes = "1.10.1"
|
||||
chardetng = "0.1.17"
|
||||
@@ -126,6 +130,7 @@ clap = "4"
|
||||
clap_complete = "4"
|
||||
color-eyre = "0.6.3"
|
||||
crossterm = "0.28.1"
|
||||
crossbeam-channel = "0.5.15"
|
||||
ctor = "0.6.3"
|
||||
derive_more = "2"
|
||||
diffy = "0.4.2"
|
||||
@@ -159,7 +164,7 @@ maplit = "1.0.2"
|
||||
mime_guess = "2.0.5"
|
||||
multimap = "0.10.0"
|
||||
notify = "8.2.0"
|
||||
nucleo-matcher = "0.3.1"
|
||||
nucleo = { git = "https://github.com/helix-editor/nucleo.git", rev = "4253de9faabb4e5c6d81d946a5e35a90f87347ee" }
|
||||
once_cell = "1.20.2"
|
||||
openssl-sys = "*"
|
||||
opentelemetry = "0.31.0"
|
||||
@@ -183,6 +188,7 @@ regex = "1.12.2"
|
||||
regex-lite = "0.1.8"
|
||||
reqwest = "0.12"
|
||||
rmcp = { version = "0.12.0", default-features = false }
|
||||
runfiles = { git = "https://github.com/dzbarsky/rules_rust", rev = "b56cbaa8465e74127f1ea216f813cd377295ad81" }
|
||||
schemars = "0.8.22"
|
||||
seccompiler = "0.5.0"
|
||||
sentry = "0.46.0"
|
||||
@@ -198,6 +204,7 @@ semver = "1.0"
|
||||
shlex = "1.3.0"
|
||||
similar = "2.7.0"
|
||||
socket2 = "0.6.1"
|
||||
sqlx = { version = "0.8.6", default-features = false, features = ["chrono", "json", "macros", "migrate", "runtime-tokio-rustls", "sqlite", "time", "uuid"] }
|
||||
starlark = "0.13.0"
|
||||
strum = "0.27.2"
|
||||
strum_macros = "0.27.2"
|
||||
@@ -216,7 +223,7 @@ tokio-tungstenite = { version = "0.28.0", features = ["proxy", "rustls-tls-nativ
|
||||
tokio-util = "0.7.18"
|
||||
toml = "0.9.5"
|
||||
toml_edit = "0.24.0"
|
||||
tracing = "0.1.43"
|
||||
tracing = "0.1.44"
|
||||
tracing-appender = "0.2.3"
|
||||
tracing-subscriber = "0.3.22"
|
||||
tracing-test = "0.2.5"
|
||||
|
||||
@@ -117,6 +117,10 @@ client_request_definitions! {
|
||||
params: v2::ThreadArchiveParams,
|
||||
response: v2::ThreadArchiveResponse,
|
||||
},
|
||||
ThreadUnarchive => "thread/unarchive" {
|
||||
params: v2::ThreadUnarchiveParams,
|
||||
response: v2::ThreadUnarchiveResponse,
|
||||
},
|
||||
ThreadRollback => "thread/rollback" {
|
||||
params: v2::ThreadRollbackParams,
|
||||
response: v2::ThreadRollbackResponse,
|
||||
@@ -594,6 +598,7 @@ server_notification_definitions! {
|
||||
ReasoningSummaryTextDelta => "item/reasoning/summaryTextDelta" (v2::ReasoningSummaryTextDeltaNotification),
|
||||
ReasoningSummaryPartAdded => "item/reasoning/summaryPartAdded" (v2::ReasoningSummaryPartAddedNotification),
|
||||
ReasoningTextDelta => "item/reasoning/textDelta" (v2::ReasoningTextDeltaNotification),
|
||||
/// Deprecated: Use `ContextCompaction` item type instead.
|
||||
ContextCompacted => "thread/compacted" (v2::ContextCompactedNotification),
|
||||
DeprecationNotice => "deprecationNotice" (v2::DeprecationNoticeNotification),
|
||||
ConfigWarning => "configWarning" (v2::ConfigWarningNotification),
|
||||
|
||||
@@ -27,10 +27,12 @@ use codex_protocol::protocol::NetworkAccess as CoreNetworkAccess;
|
||||
use codex_protocol::protocol::RateLimitSnapshot as CoreRateLimitSnapshot;
|
||||
use codex_protocol::protocol::RateLimitWindow as CoreRateLimitWindow;
|
||||
use codex_protocol::protocol::SessionSource as CoreSessionSource;
|
||||
use codex_protocol::protocol::SkillDependencies as CoreSkillDependencies;
|
||||
use codex_protocol::protocol::SkillErrorInfo as CoreSkillErrorInfo;
|
||||
use codex_protocol::protocol::SkillInterface as CoreSkillInterface;
|
||||
use codex_protocol::protocol::SkillMetadata as CoreSkillMetadata;
|
||||
use codex_protocol::protocol::SkillScope as CoreSkillScope;
|
||||
use codex_protocol::protocol::SkillToolDependency as CoreSkillToolDependency;
|
||||
use codex_protocol::protocol::SubAgentSource as CoreSubAgentSource;
|
||||
use codex_protocol::protocol::TokenUsage as CoreTokenUsage;
|
||||
use codex_protocol::protocol::TokenUsageInfo as CoreTokenUsageInfo;
|
||||
@@ -84,6 +86,10 @@ macro_rules! v2_enum_from_core {
|
||||
pub enum CodexErrorInfo {
|
||||
ContextWindowExceeded,
|
||||
UsageLimitExceeded,
|
||||
ModelCap {
|
||||
model: String,
|
||||
reset_after_seconds: Option<u64>,
|
||||
},
|
||||
HttpConnectionFailed {
|
||||
#[serde(rename = "httpStatusCode")]
|
||||
#[ts(rename = "httpStatusCode")]
|
||||
@@ -120,6 +126,13 @@ impl From<CoreCodexErrorInfo> for CodexErrorInfo {
|
||||
match value {
|
||||
CoreCodexErrorInfo::ContextWindowExceeded => CodexErrorInfo::ContextWindowExceeded,
|
||||
CoreCodexErrorInfo::UsageLimitExceeded => CodexErrorInfo::UsageLimitExceeded,
|
||||
CoreCodexErrorInfo::ModelCap {
|
||||
model,
|
||||
reset_after_seconds,
|
||||
} => CodexErrorInfo::ModelCap {
|
||||
model,
|
||||
reset_after_seconds,
|
||||
},
|
||||
CoreCodexErrorInfo::HttpConnectionFailed { http_status_code } => {
|
||||
CodexErrorInfo::HttpConnectionFailed { http_status_code }
|
||||
}
|
||||
@@ -1001,6 +1014,8 @@ pub struct AppInfo {
|
||||
pub name: String,
|
||||
pub description: Option<String>,
|
||||
pub logo_url: Option<String>,
|
||||
pub logo_url_dark: Option<String>,
|
||||
pub distribution_channel: Option<String>,
|
||||
pub install_url: Option<String>,
|
||||
#[serde(default)]
|
||||
pub is_accessible: bool,
|
||||
@@ -1223,6 +1238,20 @@ pub struct ThreadArchiveParams {
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadArchiveResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadUnarchiveParams {
|
||||
pub thread_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadUnarchiveResponse {
|
||||
pub thread: Thread,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -1260,11 +1289,32 @@ pub struct ThreadListParams {
|
||||
/// Optional provider filter; when set, only sessions recorded under these
|
||||
/// providers are returned. When present but empty, includes all providers.
|
||||
pub model_providers: Option<Vec<String>>,
|
||||
/// Optional source filter; when set, only sessions from these source kinds
|
||||
/// are returned. When omitted or empty, defaults to interactive sources.
|
||||
pub source_kinds: Option<Vec<ThreadSourceKind>>,
|
||||
/// Optional archived filter; when set to true, only archived threads are returned.
|
||||
/// If false or null, only non-archived threads are returned.
|
||||
pub archived: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase", export_to = "v2/")]
|
||||
pub enum ThreadSourceKind {
|
||||
Cli,
|
||||
#[serde(rename = "vscode")]
|
||||
#[ts(rename = "vscode")]
|
||||
VsCode,
|
||||
Exec,
|
||||
AppServer,
|
||||
SubAgent,
|
||||
SubAgentReview,
|
||||
SubAgentCompact,
|
||||
SubAgentThreadSpawn,
|
||||
SubAgentOther,
|
||||
Unknown,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -1360,11 +1410,14 @@ pub struct SkillMetadata {
|
||||
pub description: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
/// Legacy short_description from SKILL.md. Prefer SKILL.toml interface.short_description.
|
||||
/// Legacy short_description from SKILL.md. Prefer SKILL.json interface.short_description.
|
||||
pub short_description: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub interface: Option<SkillInterface>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub dependencies: Option<SkillDependencies>,
|
||||
pub path: PathBuf,
|
||||
pub scope: SkillScope,
|
||||
pub enabled: bool,
|
||||
@@ -1388,6 +1441,35 @@ pub struct SkillInterface {
|
||||
pub default_prompt: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct SkillDependencies {
|
||||
pub tools: Vec<SkillToolDependency>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct SkillToolDependency {
|
||||
#[serde(rename = "type")]
|
||||
#[ts(rename = "type")]
|
||||
pub r#type: String,
|
||||
pub value: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub description: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub transport: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub command: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -1427,6 +1509,7 @@ impl From<CoreSkillMetadata> for SkillMetadata {
|
||||
description: value.description,
|
||||
short_description: value.short_description,
|
||||
interface: value.interface.map(SkillInterface::from),
|
||||
dependencies: value.dependencies.map(SkillDependencies::from),
|
||||
path: value.path,
|
||||
scope: value.scope.into(),
|
||||
enabled: true,
|
||||
@@ -1447,6 +1530,31 @@ impl From<CoreSkillInterface> for SkillInterface {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CoreSkillDependencies> for SkillDependencies {
|
||||
fn from(value: CoreSkillDependencies) -> Self {
|
||||
Self {
|
||||
tools: value
|
||||
.tools
|
||||
.into_iter()
|
||||
.map(SkillToolDependency::from)
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CoreSkillToolDependency> for SkillToolDependency {
|
||||
fn from(value: CoreSkillToolDependency) -> Self {
|
||||
Self {
|
||||
r#type: value.r#type,
|
||||
value: value.value,
|
||||
description: value.description,
|
||||
transport: value.transport,
|
||||
command: value.command,
|
||||
url: value.url,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CoreSkillScope> for SkillScope {
|
||||
fn from(value: CoreSkillScope) -> Self {
|
||||
match value {
|
||||
@@ -1802,6 +1910,10 @@ pub enum UserInput {
|
||||
name: String,
|
||||
path: PathBuf,
|
||||
},
|
||||
Mention {
|
||||
name: String,
|
||||
path: String,
|
||||
},
|
||||
}
|
||||
|
||||
impl UserInput {
|
||||
@@ -1817,6 +1929,7 @@ impl UserInput {
|
||||
UserInput::Image { url } => CoreUserInput::Image { image_url: url },
|
||||
UserInput::LocalImage { path } => CoreUserInput::LocalImage { path },
|
||||
UserInput::Skill { name, path } => CoreUserInput::Skill { name, path },
|
||||
UserInput::Mention { name, path } => CoreUserInput::Mention { name, path },
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1834,6 +1947,7 @@ impl From<CoreUserInput> for UserInput {
|
||||
CoreUserInput::Image { image_url } => UserInput::Image { url: image_url },
|
||||
CoreUserInput::LocalImage { path } => UserInput::LocalImage { path },
|
||||
CoreUserInput::Skill { name, path } => UserInput::Skill { name, path },
|
||||
CoreUserInput::Mention { name, path } => UserInput::Mention { name, path },
|
||||
_ => unreachable!("unsupported user input variant"),
|
||||
}
|
||||
}
|
||||
@@ -1934,6 +2048,9 @@ pub enum ThreadItem {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
ExitedReviewMode { id: String, review: String },
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
ContextCompaction { id: String },
|
||||
}
|
||||
|
||||
impl From<CoreTurnItem> for ThreadItem {
|
||||
@@ -1962,6 +2079,9 @@ impl From<CoreTurnItem> for ThreadItem {
|
||||
id: search.id,
|
||||
query: search.query,
|
||||
},
|
||||
CoreTurnItem::ContextCompaction(compaction) => {
|
||||
ThreadItem::ContextCompaction { id: compaction.id }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2324,6 +2444,7 @@ pub struct WindowsWorldWritableWarningNotification {
|
||||
pub failed_scan: bool,
|
||||
}
|
||||
|
||||
/// Deprecated: Use `ContextCompaction` item type instead.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -2420,6 +2541,10 @@ pub struct ToolRequestUserInputQuestion {
|
||||
pub id: String,
|
||||
pub header: String,
|
||||
pub question: String,
|
||||
#[serde(default)]
|
||||
pub is_other: bool,
|
||||
#[serde(default)]
|
||||
pub is_secret: bool,
|
||||
pub options: Option<Vec<ToolRequestUserInputOption>>,
|
||||
}
|
||||
|
||||
@@ -2584,6 +2709,7 @@ mod tests {
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::items::UserMessageItem;
|
||||
use codex_protocol::items::WebSearchItem;
|
||||
use codex_protocol::models::WebSearchAction;
|
||||
use codex_protocol::protocol::NetworkAccess as CoreNetworkAccess;
|
||||
use codex_protocol::user_input::UserInput as CoreUserInput;
|
||||
use pretty_assertions::assert_eq;
|
||||
@@ -2627,6 +2753,10 @@ mod tests {
|
||||
name: "skill-creator".to_string(),
|
||||
path: PathBuf::from("/repo/.codex/skills/skill-creator/SKILL.md"),
|
||||
},
|
||||
CoreUserInput::Mention {
|
||||
name: "Demo App".to_string(),
|
||||
path: "app://demo-app".to_string(),
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
@@ -2649,6 +2779,10 @@ mod tests {
|
||||
name: "skill-creator".to_string(),
|
||||
path: PathBuf::from("/repo/.codex/skills/skill-creator/SKILL.md"),
|
||||
},
|
||||
UserInput::Mention {
|
||||
name: "Demo App".to_string(),
|
||||
path: "app://demo-app".to_string(),
|
||||
},
|
||||
],
|
||||
}
|
||||
);
|
||||
@@ -2691,6 +2825,9 @@ mod tests {
|
||||
let search_item = TurnItem::WebSearch(WebSearchItem {
|
||||
id: "search-1".to_string(),
|
||||
query: "docs".to_string(),
|
||||
action: WebSearchAction::Search {
|
||||
query: Some("docs".to_string()),
|
||||
},
|
||||
});
|
||||
|
||||
assert_eq!(
|
||||
|
||||
@@ -56,6 +56,7 @@ axum = { workspace = true, default-features = false, features = [
|
||||
"tokio",
|
||||
] }
|
||||
base64 = { workspace = true }
|
||||
codex-execpolicy = { workspace = true }
|
||||
core_test_support = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
os_info = { workspace = true }
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
- [Events](#events)
|
||||
- [Approvals](#approvals)
|
||||
- [Skills](#skills)
|
||||
- [Apps](#apps)
|
||||
- [Auth endpoints](#auth-endpoints)
|
||||
|
||||
## Protocol
|
||||
@@ -81,6 +82,7 @@ Example (from OpenAI's official VSCode extension):
|
||||
- `thread/loaded/list` — list the thread ids currently loaded in memory.
|
||||
- `thread/read` — read a stored thread by id without resuming it; optionally include turns via `includeTurns`.
|
||||
- `thread/archive` — move a thread’s rollout file into the archived directory; returns `{}` on success.
|
||||
- `thread/unarchive` — move an archived rollout file back into the sessions directory; returns the restored `thread` on success.
|
||||
- `thread/rollback` — drop the last N turns from the agent’s in-memory context and persist a rollback marker in the rollout so future resumes see the pruned history; returns the updated `thread` (with `turns` populated) on success.
|
||||
- `turn/start` — add user input to a thread and begin Codex generation; responds with the initial `turn` object and streams `turn/started`, `item/*`, and `turn/completed` notifications.
|
||||
- `turn/interrupt` — request cancellation of an in-flight turn by `(thread_id, turn_id)`; success is an empty `{}` response and the turn finishes with `status: "interrupted"`.
|
||||
@@ -166,6 +168,7 @@ To branch from a stored session, call `thread/fork` with the `thread.id`. This c
|
||||
- `limit` — server defaults to a reasonable page size if unset.
|
||||
- `sortKey` — `created_at` (default) or `updated_at`.
|
||||
- `modelProviders` — restrict results to specific providers; unset, null, or an empty array will include all providers.
|
||||
- `sourceKinds` — restrict results to specific sources; omit or pass `[]` for interactive sessions only (`cli`, `vscode`).
|
||||
- `archived` — when `true`, list archived threads only. When `false` or `null`, list non-archived threads (default).
|
||||
|
||||
Example:
|
||||
@@ -223,6 +226,15 @@ Use `thread/archive` to move the persisted rollout (stored as a JSONL file on di
|
||||
|
||||
An archived thread will not appear in `thread/list` unless `archived` is set to `true`.
|
||||
|
||||
### Example: Unarchive a thread
|
||||
|
||||
Use `thread/unarchive` to move an archived rollout back into the sessions directory.
|
||||
|
||||
```json
|
||||
{ "method": "thread/unarchive", "id": 24, "params": { "threadId": "thr_b" } }
|
||||
{ "id": 24, "result": { "thread": { "id": "thr_b" } } }
|
||||
```
|
||||
|
||||
### Example: Start a turn (send user input)
|
||||
|
||||
Turns attach user input (text or images) to a thread and trigger Codex generation. The `input` field is a list of discriminated unions:
|
||||
@@ -285,6 +297,26 @@ Invoke a skill explicitly by including `$<skill-name>` in the text input and add
|
||||
} } }
|
||||
```
|
||||
|
||||
### Example: Start a turn (invoke an app)
|
||||
|
||||
Invoke an app by including `$<app-slug>` in the text input and adding a `mention` input item with the app id in `app://<connector-id>` form.
|
||||
|
||||
```json
|
||||
{ "method": "turn/start", "id": 34, "params": {
|
||||
"threadId": "thr_123",
|
||||
"input": [
|
||||
{ "type": "text", "text": "$demo-app Summarize the latest updates." },
|
||||
{ "type": "mention", "name": "Demo App", "path": "app://demo-app" }
|
||||
]
|
||||
} }
|
||||
{ "id": 34, "result": { "turn": {
|
||||
"id": "turn_458",
|
||||
"status": "inProgress",
|
||||
"items": [],
|
||||
"error": null
|
||||
} } }
|
||||
```
|
||||
|
||||
### Example: Interrupt an active turn
|
||||
|
||||
You can cancel a running Turn with `turn/interrupt`.
|
||||
@@ -420,7 +452,8 @@ Today both notifications carry an empty `items` array even when item events were
|
||||
- `imageView` — `{id, path}` emitted when the agent invokes the image viewer tool.
|
||||
- `enteredReviewMode` — `{id, review}` sent when the reviewer starts; `review` is a short user-facing label such as `"current changes"` or the requested target description.
|
||||
- `exitedReviewMode` — `{id, review}` emitted when the reviewer finishes; `review` is the full plain-text review (usually, overall notes plus bullet point findings).
|
||||
- `compacted` - `{threadId, turnId}` when codex compacts the conversation history. This can happen automatically.
|
||||
- `contextCompaction` — `{id}` emitted when codex compacts the conversation history. This can happen automatically.
|
||||
- `compacted` - `{threadId, turnId}` when codex compacts the conversation history. This can happen automatically. **Deprecated:** Use `contextCompaction` instead.
|
||||
|
||||
All items emit two shared lifecycle events:
|
||||
|
||||
@@ -571,6 +604,57 @@ To enable or disable a skill by path:
|
||||
}
|
||||
```
|
||||
|
||||
## Apps
|
||||
|
||||
Use `app/list` to fetch available apps (connectors). Each entry includes metadata like the app `id`, display `name`, `installUrl`, and whether it is currently accessible.
|
||||
|
||||
```json
|
||||
{ "method": "app/list", "id": 50, "params": {
|
||||
"cursor": null,
|
||||
"limit": 50
|
||||
} }
|
||||
{ "id": 50, "result": {
|
||||
"data": [
|
||||
{
|
||||
"id": "demo-app",
|
||||
"name": "Demo App",
|
||||
"description": "Example connector for documentation.",
|
||||
"logoUrl": "https://example.com/demo-app.png",
|
||||
"logoUrlDark": null,
|
||||
"distributionChannel": null,
|
||||
"installUrl": "https://chatgpt.com/apps/demo-app/demo-app",
|
||||
"isAccessible": true
|
||||
}
|
||||
],
|
||||
"nextCursor": null
|
||||
} }
|
||||
```
|
||||
|
||||
Invoke an app by inserting `$<app-slug>` in the text input. The slug is derived from the app name and lowercased with non-alphanumeric characters replaced by `-` (for example, "Demo App" becomes `$demo-app`). Add a `mention` input item (recommended) so the server uses the exact `app://<connector-id>` path rather than guessing by name.
|
||||
|
||||
Example:
|
||||
|
||||
```
|
||||
$demo-app Pull the latest updates from the team.
|
||||
```
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "turn/start",
|
||||
"id": 51,
|
||||
"params": {
|
||||
"threadId": "thread-1",
|
||||
"input": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "$demo-app Pull the latest updates from the team."
|
||||
},
|
||||
{ "type": "mention", "name": "Demo App", "path": "app://demo-app" }
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Auth endpoints
|
||||
|
||||
The JSON-RPC auth/account surface exposes request/response methods plus server-initiated notifications (no `id`). Use these to determine auth state, start or cancel logins, logout, and inspect ChatGPT rate limits.
|
||||
|
||||
@@ -278,6 +278,8 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
id: question.id,
|
||||
header: question.header,
|
||||
question: question.question,
|
||||
is_other: question.is_other,
|
||||
is_secret: question.is_secret,
|
||||
options: question.options.map(|options| {
|
||||
options
|
||||
.into_iter()
|
||||
|
||||
@@ -13,7 +13,6 @@ use codex_app_server_protocol::AccountLoginCompletedNotification;
|
||||
use codex_app_server_protocol::AccountUpdatedNotification;
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
use codex_app_server_protocol::AddConversationSubscriptionResponse;
|
||||
use codex_app_server_protocol::AppInfo as ApiAppInfo;
|
||||
use codex_app_server_protocol::AppsListParams;
|
||||
use codex_app_server_protocol::AppsListResponse;
|
||||
use codex_app_server_protocol::ArchiveConversationParams;
|
||||
@@ -111,9 +110,12 @@ use codex_app_server_protocol::ThreadResumeParams;
|
||||
use codex_app_server_protocol::ThreadResumeResponse;
|
||||
use codex_app_server_protocol::ThreadRollbackParams;
|
||||
use codex_app_server_protocol::ThreadSortKey;
|
||||
use codex_app_server_protocol::ThreadSourceKind;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::ThreadStartedNotification;
|
||||
use codex_app_server_protocol::ThreadUnarchiveParams;
|
||||
use codex_app_server_protocol::ThreadUnarchiveResponse;
|
||||
use codex_app_server_protocol::Turn;
|
||||
use codex_app_server_protocol::TurnError;
|
||||
use codex_app_server_protocol::TurnInterruptParams;
|
||||
@@ -130,7 +132,6 @@ use codex_chatgpt::connectors;
|
||||
use codex_core::AuthManager;
|
||||
use codex_core::CodexThread;
|
||||
use codex_core::Cursor as RolloutCursor;
|
||||
use codex_core::INTERACTIVE_SESSION_SOURCES;
|
||||
use codex_core::InitialHistory;
|
||||
use codex_core::NewThread;
|
||||
use codex_core::RolloutRecorder;
|
||||
@@ -151,6 +152,7 @@ use codex_core::error::CodexErr;
|
||||
use codex_core::exec::ExecParams;
|
||||
use codex_core::exec_env::create_env;
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::find_archived_thread_path_by_id_str;
|
||||
use codex_core::find_thread_path_by_id_str;
|
||||
use codex_core::git_info::git_diff_to_remote;
|
||||
use codex_core::mcp::collect_mcp_snapshot;
|
||||
@@ -164,7 +166,10 @@ use codex_core::protocol::ReviewTarget as CoreReviewTarget;
|
||||
use codex_core::protocol::SessionConfiguredEvent;
|
||||
use codex_core::read_head_for_summary;
|
||||
use codex_core::read_session_meta_line;
|
||||
use codex_core::rollout_date_parts;
|
||||
use codex_core::sandboxing::SandboxPermissions;
|
||||
use codex_core::state_db::get_state_db;
|
||||
use codex_core::windows_sandbox::WindowsSandboxLevelExt;
|
||||
use codex_feedback::CodexFeedback;
|
||||
use codex_login::ServerOptions as LoginServerOptions;
|
||||
use codex_login::ShutdownHandle;
|
||||
@@ -172,6 +177,7 @@ use codex_login::run_login_server;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::config_types::ForcedLoginMethod;
|
||||
use codex_protocol::config_types::Personality;
|
||||
use codex_protocol::config_types::WindowsSandboxLevel;
|
||||
use codex_protocol::dynamic_tools::DynamicToolSpec as CoreDynamicToolSpec;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
@@ -205,6 +211,9 @@ use tracing::info;
|
||||
use tracing::warn;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::filters::compute_source_filters;
|
||||
use crate::filters::source_kind_matches;
|
||||
|
||||
type PendingInterruptQueue = Vec<(RequestId, ApiVersion)>;
|
||||
pub(crate) type PendingInterrupts = Arc<Mutex<HashMap<ThreadId, PendingInterruptQueue>>>;
|
||||
|
||||
@@ -404,6 +413,9 @@ impl CodexMessageProcessor {
|
||||
ClientRequest::ThreadArchive { request_id, params } => {
|
||||
self.thread_archive(request_id, params).await;
|
||||
}
|
||||
ClientRequest::ThreadUnarchive { request_id, params } => {
|
||||
self.thread_unarchive(request_id, params).await;
|
||||
}
|
||||
ClientRequest::ThreadRollback { request_id, params } => {
|
||||
self.thread_rollback(request_id, params).await;
|
||||
}
|
||||
@@ -1249,12 +1261,14 @@ impl CodexMessageProcessor {
|
||||
let timeout_ms = params
|
||||
.timeout_ms
|
||||
.and_then(|timeout_ms| u64::try_from(timeout_ms).ok());
|
||||
let windows_sandbox_level = WindowsSandboxLevel::from_config(&self.config);
|
||||
let exec_params = ExecParams {
|
||||
command: params.command,
|
||||
cwd,
|
||||
expiration: timeout_ms.into(),
|
||||
env,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
windows_sandbox_level,
|
||||
justification: None,
|
||||
arg0: None,
|
||||
};
|
||||
@@ -1595,6 +1609,7 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
|
||||
async fn thread_archive(&mut self, request_id: RequestId, params: ThreadArchiveParams) {
|
||||
// TODO(jif) mostly rewrite this using sqlite after phase 1
|
||||
let thread_id = match ThreadId::from_string(¶ms.thread_id) {
|
||||
Ok(id) => id,
|
||||
Err(err) => {
|
||||
@@ -1643,6 +1658,157 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
}
|
||||
|
||||
async fn thread_unarchive(&mut self, request_id: RequestId, params: ThreadUnarchiveParams) {
|
||||
// TODO(jif) mostly rewrite this using sqlite after phase 1
|
||||
let thread_id = match ThreadId::from_string(¶ms.thread_id) {
|
||||
Ok(id) => id,
|
||||
Err(err) => {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!("invalid thread id: {err}"),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let archived_path = match find_archived_thread_path_by_id_str(
|
||||
&self.config.codex_home,
|
||||
&thread_id.to_string(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(Some(path)) => path,
|
||||
Ok(None) => {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!("no archived rollout found for thread id {thread_id}"),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
Err(err) => {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!("failed to locate archived thread id {thread_id}: {err}"),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let rollout_path_display = archived_path.display().to_string();
|
||||
let fallback_provider = self.config.model_provider_id.clone();
|
||||
let state_db_ctx = get_state_db(&self.config, None).await;
|
||||
let archived_folder = self
|
||||
.config
|
||||
.codex_home
|
||||
.join(codex_core::ARCHIVED_SESSIONS_SUBDIR);
|
||||
|
||||
let result: Result<Thread, JSONRPCErrorError> = async {
|
||||
let canonical_archived_dir = tokio::fs::canonicalize(&archived_folder).await.map_err(
|
||||
|err| JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: format!(
|
||||
"failed to unarchive thread: unable to resolve archived directory: {err}"
|
||||
),
|
||||
data: None,
|
||||
},
|
||||
)?;
|
||||
let canonical_rollout_path = tokio::fs::canonicalize(&archived_path).await;
|
||||
let canonical_rollout_path = if let Ok(path) = canonical_rollout_path
|
||||
&& path.starts_with(&canonical_archived_dir)
|
||||
{
|
||||
path
|
||||
} else {
|
||||
return Err(JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!(
|
||||
"rollout path `{rollout_path_display}` must be in archived directory"
|
||||
),
|
||||
data: None,
|
||||
});
|
||||
};
|
||||
|
||||
let required_suffix = format!("{thread_id}.jsonl");
|
||||
let Some(file_name) = canonical_rollout_path.file_name().map(OsStr::to_owned) else {
|
||||
return Err(JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!("rollout path `{rollout_path_display}` missing file name"),
|
||||
data: None,
|
||||
});
|
||||
};
|
||||
if !file_name
|
||||
.to_string_lossy()
|
||||
.ends_with(required_suffix.as_str())
|
||||
{
|
||||
return Err(JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!(
|
||||
"rollout path `{rollout_path_display}` does not match thread id {thread_id}"
|
||||
),
|
||||
data: None,
|
||||
});
|
||||
}
|
||||
|
||||
let Some((year, month, day)) = rollout_date_parts(&file_name) else {
|
||||
return Err(JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!(
|
||||
"rollout path `{rollout_path_display}` missing filename timestamp"
|
||||
),
|
||||
data: None,
|
||||
});
|
||||
};
|
||||
|
||||
let sessions_folder = self.config.codex_home.join(codex_core::SESSIONS_SUBDIR);
|
||||
let dest_dir = sessions_folder.join(year).join(month).join(day);
|
||||
let restored_path = dest_dir.join(&file_name);
|
||||
tokio::fs::create_dir_all(&dest_dir)
|
||||
.await
|
||||
.map_err(|err| JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: format!("failed to unarchive thread: {err}"),
|
||||
data: None,
|
||||
})?;
|
||||
tokio::fs::rename(&canonical_rollout_path, &restored_path)
|
||||
.await
|
||||
.map_err(|err| JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: format!("failed to unarchive thread: {err}"),
|
||||
data: None,
|
||||
})?;
|
||||
if let Some(ctx) = state_db_ctx {
|
||||
let _ = ctx
|
||||
.mark_unarchived(thread_id, restored_path.as_path())
|
||||
.await;
|
||||
}
|
||||
let summary =
|
||||
read_summary_from_rollout(restored_path.as_path(), fallback_provider.as_str())
|
||||
.await
|
||||
.map_err(|err| JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: format!("failed to read unarchived thread: {err}"),
|
||||
data: None,
|
||||
})?;
|
||||
Ok(summary_to_thread(summary))
|
||||
}
|
||||
.await;
|
||||
|
||||
match result {
|
||||
Ok(thread) => {
|
||||
let response = ThreadUnarchiveResponse { thread };
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
}
|
||||
Err(err) => {
|
||||
self.outgoing.send_error(request_id, err).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn thread_rollback(&mut self, request_id: RequestId, params: ThreadRollbackParams) {
|
||||
let ThreadRollbackParams {
|
||||
thread_id,
|
||||
@@ -1694,6 +1860,7 @@ impl CodexMessageProcessor {
|
||||
limit,
|
||||
sort_key,
|
||||
model_providers,
|
||||
source_kinds,
|
||||
archived,
|
||||
} = params;
|
||||
|
||||
@@ -1710,6 +1877,7 @@ impl CodexMessageProcessor {
|
||||
requested_page_size,
|
||||
cursor,
|
||||
model_providers,
|
||||
source_kinds,
|
||||
core_sort_key,
|
||||
archived.unwrap_or(false),
|
||||
)
|
||||
@@ -2347,7 +2515,6 @@ impl CodexMessageProcessor {
|
||||
};
|
||||
|
||||
let fallback_provider = self.config.model_provider_id.as_str();
|
||||
|
||||
match read_summary_from_rollout(&path, fallback_provider).await {
|
||||
Ok(summary) => {
|
||||
let response = GetConversationSummaryResponse { summary };
|
||||
@@ -2387,6 +2554,7 @@ impl CodexMessageProcessor {
|
||||
requested_page_size,
|
||||
cursor,
|
||||
model_providers,
|
||||
None,
|
||||
CoreThreadSortKey::UpdatedAt,
|
||||
false,
|
||||
)
|
||||
@@ -2407,6 +2575,7 @@ impl CodexMessageProcessor {
|
||||
requested_page_size: usize,
|
||||
cursor: Option<String>,
|
||||
model_providers: Option<Vec<String>>,
|
||||
source_kinds: Option<Vec<ThreadSourceKind>>,
|
||||
sort_key: CoreThreadSortKey,
|
||||
archived: bool,
|
||||
) -> Result<(Vec<ConversationSummary>, Option<String>), JSONRPCErrorError> {
|
||||
@@ -2436,6 +2605,8 @@ impl CodexMessageProcessor {
|
||||
None => Some(vec![self.config.model_provider_id.clone()]),
|
||||
};
|
||||
let fallback_provider = self.config.model_provider_id.clone();
|
||||
let (allowed_sources_vec, source_kind_filter) = compute_source_filters(source_kinds);
|
||||
let allowed_sources = allowed_sources_vec.as_slice();
|
||||
|
||||
while remaining > 0 {
|
||||
let page_size = remaining.min(THREAD_LIST_MAX_LIMIT);
|
||||
@@ -2445,7 +2616,7 @@ impl CodexMessageProcessor {
|
||||
page_size,
|
||||
cursor_obj.as_ref(),
|
||||
sort_key,
|
||||
INTERACTIVE_SESSION_SOURCES,
|
||||
allowed_sources,
|
||||
model_provider_filter.as_deref(),
|
||||
fallback_provider.as_str(),
|
||||
)
|
||||
@@ -2461,7 +2632,7 @@ impl CodexMessageProcessor {
|
||||
page_size,
|
||||
cursor_obj.as_ref(),
|
||||
sort_key,
|
||||
INTERACTIVE_SESSION_SOURCES,
|
||||
allowed_sources,
|
||||
model_provider_filter.as_deref(),
|
||||
fallback_provider.as_str(),
|
||||
)
|
||||
@@ -2490,6 +2661,11 @@ impl CodexMessageProcessor {
|
||||
updated_at,
|
||||
)
|
||||
})
|
||||
.filter(|summary| {
|
||||
source_kind_filter
|
||||
.as_ref()
|
||||
.is_none_or(|filter| source_kind_matches(&summary.source, filter))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
if filtered.len() > remaining {
|
||||
filtered.truncate(remaining);
|
||||
@@ -2700,6 +2876,8 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
};
|
||||
|
||||
let scopes = scopes.or_else(|| server.scopes.clone());
|
||||
|
||||
match perform_oauth_login_return_url(
|
||||
&name,
|
||||
&url,
|
||||
@@ -3359,8 +3537,13 @@ impl CodexMessageProcessor {
|
||||
});
|
||||
}
|
||||
|
||||
let mut state_db_ctx = None;
|
||||
|
||||
// If the thread is active, request shutdown and wait briefly.
|
||||
if let Some(conversation) = self.thread_manager.remove_thread(&thread_id).await {
|
||||
if let Some(ctx) = conversation.state_db() {
|
||||
state_db_ctx = Some(ctx);
|
||||
}
|
||||
info!("thread {thread_id} was active; shutting down");
|
||||
// Request shutdown.
|
||||
match conversation.submit(Op::Shutdown).await {
|
||||
@@ -3387,14 +3570,24 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
}
|
||||
|
||||
if state_db_ctx.is_none() {
|
||||
state_db_ctx = get_state_db(&self.config, None).await;
|
||||
}
|
||||
|
||||
// Move the rollout file to archived.
|
||||
let result: std::io::Result<()> = async {
|
||||
let result: std::io::Result<()> = async move {
|
||||
let archive_folder = self
|
||||
.config
|
||||
.codex_home
|
||||
.join(codex_core::ARCHIVED_SESSIONS_SUBDIR);
|
||||
tokio::fs::create_dir_all(&archive_folder).await?;
|
||||
tokio::fs::rename(&canonical_rollout_path, &archive_folder.join(&file_name)).await?;
|
||||
let archived_path = archive_folder.join(&file_name);
|
||||
tokio::fs::rename(&canonical_rollout_path, &archived_path).await?;
|
||||
if let Some(ctx) = state_db_ctx {
|
||||
let _ = ctx
|
||||
.mark_archived(thread_id, archived_path.as_path(), Utc::now())
|
||||
.await;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
.await;
|
||||
@@ -3518,7 +3711,7 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
};
|
||||
|
||||
if !config.features.enabled(Feature::Connectors) {
|
||||
if !config.features.enabled(Feature::Apps) {
|
||||
self.outgoing
|
||||
.send_response(
|
||||
request_id,
|
||||
@@ -3581,18 +3774,7 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
|
||||
let end = start.saturating_add(effective_limit).min(total);
|
||||
let data = connectors[start..end]
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|connector| ApiAppInfo {
|
||||
id: connector.connector_id,
|
||||
name: connector.connector_name,
|
||||
description: connector.connector_description,
|
||||
logo_url: connector.logo_url,
|
||||
install_url: connector.install_url,
|
||||
is_accessible: connector.is_accessible,
|
||||
})
|
||||
.collect();
|
||||
let data = connectors[start..end].to_vec();
|
||||
|
||||
let next_cursor = if end < total {
|
||||
Some(end.to_string())
|
||||
@@ -3720,6 +3902,7 @@ impl CodexMessageProcessor {
|
||||
cwd: params.cwd,
|
||||
approval_policy: params.approval_policy.map(AskForApproval::to_core),
|
||||
sandbox_policy: params.sandbox_policy.map(|p| p.to_core()),
|
||||
windows_sandbox_level: None,
|
||||
model: params.model,
|
||||
effort: params.effort.map(Some),
|
||||
summary: params.summary,
|
||||
@@ -4350,6 +4533,22 @@ fn skills_to_info(
|
||||
default_prompt: interface.default_prompt,
|
||||
}
|
||||
}),
|
||||
dependencies: skill.dependencies.clone().map(|dependencies| {
|
||||
codex_app_server_protocol::SkillDependencies {
|
||||
tools: dependencies
|
||||
.tools
|
||||
.into_iter()
|
||||
.map(|tool| codex_app_server_protocol::SkillToolDependency {
|
||||
r#type: tool.r#type,
|
||||
value: tool.value,
|
||||
description: tool.description,
|
||||
transport: tool.transport,
|
||||
command: tool.command,
|
||||
url: tool.url,
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}),
|
||||
path: skill.path.clone(),
|
||||
scope: skill.scope.into(),
|
||||
enabled,
|
||||
|
||||
155
codex-rs/app-server/src/filters.rs
Normal file
155
codex-rs/app-server/src/filters.rs
Normal file
@@ -0,0 +1,155 @@
|
||||
use codex_app_server_protocol::ThreadSourceKind;
|
||||
use codex_core::INTERACTIVE_SESSION_SOURCES;
|
||||
use codex_protocol::protocol::SessionSource as CoreSessionSource;
|
||||
use codex_protocol::protocol::SubAgentSource as CoreSubAgentSource;
|
||||
|
||||
pub(crate) fn compute_source_filters(
|
||||
source_kinds: Option<Vec<ThreadSourceKind>>,
|
||||
) -> (Vec<CoreSessionSource>, Option<Vec<ThreadSourceKind>>) {
|
||||
let Some(source_kinds) = source_kinds else {
|
||||
return (INTERACTIVE_SESSION_SOURCES.to_vec(), None);
|
||||
};
|
||||
|
||||
if source_kinds.is_empty() {
|
||||
return (INTERACTIVE_SESSION_SOURCES.to_vec(), None);
|
||||
}
|
||||
|
||||
let requires_post_filter = source_kinds.iter().any(|kind| {
|
||||
matches!(
|
||||
kind,
|
||||
ThreadSourceKind::Exec
|
||||
| ThreadSourceKind::AppServer
|
||||
| ThreadSourceKind::SubAgent
|
||||
| ThreadSourceKind::SubAgentReview
|
||||
| ThreadSourceKind::SubAgentCompact
|
||||
| ThreadSourceKind::SubAgentThreadSpawn
|
||||
| ThreadSourceKind::SubAgentOther
|
||||
| ThreadSourceKind::Unknown
|
||||
)
|
||||
});
|
||||
|
||||
if requires_post_filter {
|
||||
(Vec::new(), Some(source_kinds))
|
||||
} else {
|
||||
let interactive_sources = source_kinds
|
||||
.iter()
|
||||
.filter_map(|kind| match kind {
|
||||
ThreadSourceKind::Cli => Some(CoreSessionSource::Cli),
|
||||
ThreadSourceKind::VsCode => Some(CoreSessionSource::VSCode),
|
||||
ThreadSourceKind::Exec
|
||||
| ThreadSourceKind::AppServer
|
||||
| ThreadSourceKind::SubAgent
|
||||
| ThreadSourceKind::SubAgentReview
|
||||
| ThreadSourceKind::SubAgentCompact
|
||||
| ThreadSourceKind::SubAgentThreadSpawn
|
||||
| ThreadSourceKind::SubAgentOther
|
||||
| ThreadSourceKind::Unknown => None,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
(interactive_sources, Some(source_kinds))
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn source_kind_matches(source: &CoreSessionSource, filter: &[ThreadSourceKind]) -> bool {
|
||||
filter.iter().any(|kind| match kind {
|
||||
ThreadSourceKind::Cli => matches!(source, CoreSessionSource::Cli),
|
||||
ThreadSourceKind::VsCode => matches!(source, CoreSessionSource::VSCode),
|
||||
ThreadSourceKind::Exec => matches!(source, CoreSessionSource::Exec),
|
||||
ThreadSourceKind::AppServer => matches!(source, CoreSessionSource::Mcp),
|
||||
ThreadSourceKind::SubAgent => matches!(source, CoreSessionSource::SubAgent(_)),
|
||||
ThreadSourceKind::SubAgentReview => {
|
||||
matches!(
|
||||
source,
|
||||
CoreSessionSource::SubAgent(CoreSubAgentSource::Review)
|
||||
)
|
||||
}
|
||||
ThreadSourceKind::SubAgentCompact => {
|
||||
matches!(
|
||||
source,
|
||||
CoreSessionSource::SubAgent(CoreSubAgentSource::Compact)
|
||||
)
|
||||
}
|
||||
ThreadSourceKind::SubAgentThreadSpawn => matches!(
|
||||
source,
|
||||
CoreSessionSource::SubAgent(CoreSubAgentSource::ThreadSpawn { .. })
|
||||
),
|
||||
ThreadSourceKind::SubAgentOther => matches!(
|
||||
source,
|
||||
CoreSessionSource::SubAgent(CoreSubAgentSource::Other(_))
|
||||
),
|
||||
ThreadSourceKind::Unknown => matches!(source, CoreSessionSource::Unknown),
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use codex_protocol::ThreadId;
|
||||
use pretty_assertions::assert_eq;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[test]
|
||||
fn compute_source_filters_defaults_to_interactive_sources() {
|
||||
let (allowed_sources, filter) = compute_source_filters(None);
|
||||
|
||||
assert_eq!(allowed_sources, INTERACTIVE_SESSION_SOURCES.to_vec());
|
||||
assert_eq!(filter, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compute_source_filters_empty_means_interactive_sources() {
|
||||
let (allowed_sources, filter) = compute_source_filters(Some(Vec::new()));
|
||||
|
||||
assert_eq!(allowed_sources, INTERACTIVE_SESSION_SOURCES.to_vec());
|
||||
assert_eq!(filter, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compute_source_filters_interactive_only_skips_post_filtering() {
|
||||
let source_kinds = vec![ThreadSourceKind::Cli, ThreadSourceKind::VsCode];
|
||||
let (allowed_sources, filter) = compute_source_filters(Some(source_kinds.clone()));
|
||||
|
||||
assert_eq!(
|
||||
allowed_sources,
|
||||
vec![CoreSessionSource::Cli, CoreSessionSource::VSCode]
|
||||
);
|
||||
assert_eq!(filter, Some(source_kinds));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compute_source_filters_subagent_variant_requires_post_filtering() {
|
||||
let source_kinds = vec![ThreadSourceKind::SubAgentReview];
|
||||
let (allowed_sources, filter) = compute_source_filters(Some(source_kinds.clone()));
|
||||
|
||||
assert_eq!(allowed_sources, Vec::new());
|
||||
assert_eq!(filter, Some(source_kinds));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn source_kind_matches_distinguishes_subagent_variants() {
|
||||
let parent_thread_id =
|
||||
ThreadId::from_string(&Uuid::new_v4().to_string()).expect("valid thread id");
|
||||
let review = CoreSessionSource::SubAgent(CoreSubAgentSource::Review);
|
||||
let spawn = CoreSessionSource::SubAgent(CoreSubAgentSource::ThreadSpawn {
|
||||
parent_thread_id,
|
||||
depth: 1,
|
||||
});
|
||||
|
||||
assert!(source_kind_matches(
|
||||
&review,
|
||||
&[ThreadSourceKind::SubAgentReview]
|
||||
));
|
||||
assert!(!source_kind_matches(
|
||||
&review,
|
||||
&[ThreadSourceKind::SubAgentThreadSpawn]
|
||||
));
|
||||
assert!(source_kind_matches(
|
||||
&spawn,
|
||||
&[ThreadSourceKind::SubAgentThreadSpawn]
|
||||
));
|
||||
assert!(!source_kind_matches(
|
||||
&spawn,
|
||||
&[ThreadSourceKind::SubAgentReview]
|
||||
));
|
||||
}
|
||||
}
|
||||
@@ -42,6 +42,7 @@ mod codex_message_processor;
|
||||
mod config_api;
|
||||
mod dynamic_tools;
|
||||
mod error_code;
|
||||
mod filters;
|
||||
mod fuzzy_file_search;
|
||||
mod message_processor;
|
||||
mod models;
|
||||
@@ -134,7 +135,7 @@ fn project_config_warning(config: &Config) -> Option<ConfigWarningNotification>
|
||||
.disabled_reason
|
||||
.as_ref()
|
||||
.map(ToString::to_string)
|
||||
.unwrap_or_else(|| "Config folder disabled.".to_string()),
|
||||
.unwrap_or_else(|| "config.toml is disabled.".to_string()),
|
||||
));
|
||||
}
|
||||
}
|
||||
@@ -143,7 +144,11 @@ fn project_config_warning(config: &Config) -> Option<ConfigWarningNotification>
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut message = "The following config folders are disabled:\n".to_string();
|
||||
let mut message = concat!(
|
||||
"Project config.toml files are disabled in the following folders. ",
|
||||
"Settings in those files are ignored, but skills and exec policies still load.\n",
|
||||
)
|
||||
.to_string();
|
||||
for (index, (folder, reason)) in disabled_folders.iter().enumerate() {
|
||||
let display_index = index + 1;
|
||||
message.push_str(&format!(" {display_index}. {folder}\n"));
|
||||
|
||||
72
codex-rs/app-server/tests/common/config.rs
Normal file
72
codex-rs/app-server/tests/common/config.rs
Normal file
@@ -0,0 +1,72 @@
|
||||
use codex_core::features::FEATURES;
|
||||
use codex_core::features::Feature;
|
||||
use std::collections::BTreeMap;
|
||||
use std::path::Path;
|
||||
|
||||
pub fn write_mock_responses_config_toml(
|
||||
codex_home: &Path,
|
||||
server_uri: &str,
|
||||
feature_flags: &BTreeMap<Feature, bool>,
|
||||
auto_compact_limit: i64,
|
||||
requires_openai_auth: Option<bool>,
|
||||
model_provider_id: &str,
|
||||
compact_prompt: &str,
|
||||
) -> std::io::Result<()> {
|
||||
// Phase 1: build the features block for config.toml.
|
||||
let mut features = BTreeMap::from([(Feature::RemoteModels, false)]);
|
||||
for (feature, enabled) in feature_flags {
|
||||
features.insert(*feature, *enabled);
|
||||
}
|
||||
let feature_entries = features
|
||||
.into_iter()
|
||||
.map(|(feature, enabled)| {
|
||||
let key = FEATURES
|
||||
.iter()
|
||||
.find(|spec| spec.id == feature)
|
||||
.map(|spec| spec.key)
|
||||
.unwrap_or_else(|| panic!("missing feature key for {feature:?}"));
|
||||
format!("{key} = {enabled}")
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
// Phase 2: build provider-specific config bits.
|
||||
let requires_line = match requires_openai_auth {
|
||||
Some(true) => "requires_openai_auth = true\n".to_string(),
|
||||
Some(false) | None => String::new(),
|
||||
};
|
||||
let provider_block = if model_provider_id == "openai" {
|
||||
String::new()
|
||||
} else {
|
||||
format!(
|
||||
r#"
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "responses"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
{requires_line}
|
||||
"#
|
||||
)
|
||||
};
|
||||
// Phase 3: write the final config file.
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
compact_prompt = "{compact_prompt}"
|
||||
model_auto_compact_token_limit = {auto_compact_limit}
|
||||
|
||||
model_provider = "{model_provider_id}"
|
||||
|
||||
[features]
|
||||
{feature_entries}
|
||||
{provider_block}
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
mod auth_fixtures;
|
||||
mod config;
|
||||
mod mcp_process;
|
||||
mod mock_model_server;
|
||||
mod models_cache;
|
||||
@@ -10,6 +11,7 @@ pub use auth_fixtures::ChatGptIdTokenClaims;
|
||||
pub use auth_fixtures::encode_id_token;
|
||||
pub use auth_fixtures::write_chatgpt_auth;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
pub use config::write_mock_responses_config_toml;
|
||||
pub use core_test_support::format_with_current_shell;
|
||||
pub use core_test_support::format_with_current_shell_display;
|
||||
pub use core_test_support::format_with_current_shell_display_non_login;
|
||||
@@ -30,6 +32,7 @@ pub use responses::create_final_assistant_message_sse_response;
|
||||
pub use responses::create_request_user_input_sse_response;
|
||||
pub use responses::create_shell_command_sse_response;
|
||||
pub use rollout::create_fake_rollout;
|
||||
pub use rollout::create_fake_rollout_with_source;
|
||||
pub use rollout::create_fake_rollout_with_text_elements;
|
||||
pub use rollout::rollout_path;
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
@@ -53,6 +53,7 @@ use codex_app_server_protocol::ThreadReadParams;
|
||||
use codex_app_server_protocol::ThreadResumeParams;
|
||||
use codex_app_server_protocol::ThreadRollbackParams;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadUnarchiveParams;
|
||||
use codex_app_server_protocol::TurnInterruptParams;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_core::default_client::CODEX_INTERNAL_ORIGINATOR_OVERRIDE_ENV_VAR;
|
||||
@@ -365,6 +366,15 @@ impl McpProcess {
|
||||
self.send_request("thread/archive", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/unarchive` JSON-RPC request.
|
||||
pub async fn send_thread_unarchive_request(
|
||||
&mut self,
|
||||
params: ThreadUnarchiveParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/unarchive", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/rollback` JSON-RPC request.
|
||||
pub async fn send_thread_rollback_request(
|
||||
&mut self,
|
||||
|
||||
@@ -67,6 +67,7 @@ pub fn create_request_user_input_sse_response(call_id: &str) -> anyhow::Result<S
|
||||
"id": "confirm_path",
|
||||
"header": "Confirm",
|
||||
"question": "Proceed with the plan?",
|
||||
"isOther": false,
|
||||
"options": [{
|
||||
"label": "Yes (Recommended)",
|
||||
"description": "Continue the current plan."
|
||||
|
||||
@@ -38,6 +38,27 @@ pub fn create_fake_rollout(
|
||||
preview: &str,
|
||||
model_provider: Option<&str>,
|
||||
git_info: Option<GitInfo>,
|
||||
) -> Result<String> {
|
||||
create_fake_rollout_with_source(
|
||||
codex_home,
|
||||
filename_ts,
|
||||
meta_rfc3339,
|
||||
preview,
|
||||
model_provider,
|
||||
git_info,
|
||||
SessionSource::Cli,
|
||||
)
|
||||
}
|
||||
|
||||
/// Create a minimal rollout file with an explicit session source.
|
||||
pub fn create_fake_rollout_with_source(
|
||||
codex_home: &Path,
|
||||
filename_ts: &str,
|
||||
meta_rfc3339: &str,
|
||||
preview: &str,
|
||||
model_provider: Option<&str>,
|
||||
git_info: Option<GitInfo>,
|
||||
source: SessionSource,
|
||||
) -> Result<String> {
|
||||
let uuid = Uuid::new_v4();
|
||||
let uuid_str = uuid.to_string();
|
||||
@@ -57,7 +78,7 @@ pub fn create_fake_rollout(
|
||||
cwd: PathBuf::from("/"),
|
||||
originator: "codex".to_string(),
|
||||
cli_version: "0.0.0".to_string(),
|
||||
source: SessionSource::Cli,
|
||||
source,
|
||||
model_provider: model_provider.map(str::to_string),
|
||||
base_instructions: None,
|
||||
};
|
||||
|
||||
@@ -48,8 +48,7 @@ async fn test_fuzzy_file_search_sorts_and_includes_indices() -> Result<()> {
|
||||
.await??;
|
||||
|
||||
let value = resp.result;
|
||||
// The path separator on Windows affects the score.
|
||||
let expected_score = if cfg!(windows) { 69 } else { 72 };
|
||||
let expected_score = 72;
|
||||
|
||||
assert_eq!(
|
||||
value,
|
||||
@@ -59,16 +58,9 @@ async fn test_fuzzy_file_search_sorts_and_includes_indices() -> Result<()> {
|
||||
"root": root_path.clone(),
|
||||
"path": "abexy",
|
||||
"file_name": "abexy",
|
||||
"score": 88,
|
||||
"score": 84,
|
||||
"indices": [0, 1, 2],
|
||||
},
|
||||
{
|
||||
"root": root_path.clone(),
|
||||
"path": "abcde",
|
||||
"file_name": "abcde",
|
||||
"score": 74,
|
||||
"indices": [0, 1, 4],
|
||||
},
|
||||
{
|
||||
"root": root_path.clone(),
|
||||
"path": sub_abce_rel,
|
||||
@@ -76,6 +68,13 @@ async fn test_fuzzy_file_search_sorts_and_includes_indices() -> Result<()> {
|
||||
"score": expected_score,
|
||||
"indices": [4, 5, 7],
|
||||
},
|
||||
{
|
||||
"root": root_path.clone(),
|
||||
"path": "abcde",
|
||||
"file_name": "abcde",
|
||||
"score": 71,
|
||||
"indices": [0, 1, 4],
|
||||
},
|
||||
]
|
||||
})
|
||||
);
|
||||
|
||||
@@ -11,6 +11,7 @@ use codex_app_server_protocol::NewConversationResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserMessageResponse;
|
||||
use codex_execpolicy::Policy;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::DeveloperInstructions;
|
||||
@@ -358,6 +359,8 @@ fn assert_permissions_message(item: &ResponseItem) {
|
||||
let expected = DeveloperInstructions::from_policy(
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
AskForApproval::Never,
|
||||
&Policy::empty(),
|
||||
false,
|
||||
&PathBuf::from("/tmp"),
|
||||
)
|
||||
.into_text();
|
||||
|
||||
@@ -13,14 +13,13 @@ use axum::extract::State;
|
||||
use axum::http::HeaderMap;
|
||||
use axum::http::StatusCode;
|
||||
use axum::http::header::AUTHORIZATION;
|
||||
use axum::routing::post;
|
||||
use axum::routing::get;
|
||||
use codex_app_server_protocol::AppInfo;
|
||||
use codex_app_server_protocol::AppsListParams;
|
||||
use codex_app_server_protocol::AppsListResponse;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use codex_core::connectors::ConnectorInfo;
|
||||
use pretty_assertions::assert_eq;
|
||||
use rmcp::handler::server::ServerHandler;
|
||||
use rmcp::model::JsonObject;
|
||||
@@ -71,19 +70,23 @@ async fn list_apps_returns_empty_when_connectors_disabled() -> Result<()> {
|
||||
#[tokio::test]
|
||||
async fn list_apps_returns_connectors_with_accessible_flags() -> Result<()> {
|
||||
let connectors = vec![
|
||||
ConnectorInfo {
|
||||
connector_id: "alpha".to_string(),
|
||||
connector_name: "Alpha".to_string(),
|
||||
connector_description: Some("Alpha connector".to_string()),
|
||||
AppInfo {
|
||||
id: "alpha".to_string(),
|
||||
name: "Alpha".to_string(),
|
||||
description: Some("Alpha connector".to_string()),
|
||||
logo_url: Some("https://example.com/alpha.png".to_string()),
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
},
|
||||
ConnectorInfo {
|
||||
connector_id: "beta".to_string(),
|
||||
connector_name: "beta".to_string(),
|
||||
connector_description: None,
|
||||
AppInfo {
|
||||
id: "beta".to_string(),
|
||||
name: "beta".to_string(),
|
||||
description: None,
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
},
|
||||
@@ -127,6 +130,8 @@ async fn list_apps_returns_connectors_with_accessible_flags() -> Result<()> {
|
||||
name: "Beta App".to_string(),
|
||||
description: None,
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: Some("https://chatgpt.com/apps/beta/beta".to_string()),
|
||||
is_accessible: true,
|
||||
},
|
||||
@@ -135,6 +140,8 @@ async fn list_apps_returns_connectors_with_accessible_flags() -> Result<()> {
|
||||
name: "Alpha".to_string(),
|
||||
description: Some("Alpha connector".to_string()),
|
||||
logo_url: Some("https://example.com/alpha.png".to_string()),
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: Some("https://chatgpt.com/apps/alpha/alpha".to_string()),
|
||||
is_accessible: false,
|
||||
},
|
||||
@@ -150,19 +157,23 @@ async fn list_apps_returns_connectors_with_accessible_flags() -> Result<()> {
|
||||
#[tokio::test]
|
||||
async fn list_apps_paginates_results() -> Result<()> {
|
||||
let connectors = vec![
|
||||
ConnectorInfo {
|
||||
connector_id: "alpha".to_string(),
|
||||
connector_name: "Alpha".to_string(),
|
||||
connector_description: Some("Alpha connector".to_string()),
|
||||
AppInfo {
|
||||
id: "alpha".to_string(),
|
||||
name: "Alpha".to_string(),
|
||||
description: Some("Alpha connector".to_string()),
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
},
|
||||
ConnectorInfo {
|
||||
connector_id: "beta".to_string(),
|
||||
connector_name: "beta".to_string(),
|
||||
connector_description: None,
|
||||
AppInfo {
|
||||
id: "beta".to_string(),
|
||||
name: "beta".to_string(),
|
||||
description: None,
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
},
|
||||
@@ -206,6 +217,8 @@ async fn list_apps_paginates_results() -> Result<()> {
|
||||
name: "Beta App".to_string(),
|
||||
description: None,
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: Some("https://chatgpt.com/apps/beta/beta".to_string()),
|
||||
is_accessible: true,
|
||||
}];
|
||||
@@ -234,6 +247,8 @@ async fn list_apps_paginates_results() -> Result<()> {
|
||||
name: "Alpha".to_string(),
|
||||
description: Some("Alpha connector".to_string()),
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: Some("https://chatgpt.com/apps/alpha/alpha".to_string()),
|
||||
is_accessible: false,
|
||||
}];
|
||||
@@ -289,13 +304,13 @@ impl ServerHandler for AppListMcpServer {
|
||||
}
|
||||
|
||||
async fn start_apps_server(
|
||||
connectors: Vec<ConnectorInfo>,
|
||||
connectors: Vec<AppInfo>,
|
||||
tools: Vec<Tool>,
|
||||
) -> Result<(String, JoinHandle<()>)> {
|
||||
let state = AppsServerState {
|
||||
expected_bearer: "Bearer chatgpt-token".to_string(),
|
||||
expected_account_id: "account-123".to_string(),
|
||||
response: json!({ "connectors": connectors }),
|
||||
response: json!({ "apps": connectors, "next_token": null }),
|
||||
};
|
||||
let state = Arc::new(state);
|
||||
let tools = Arc::new(tools);
|
||||
@@ -313,7 +328,11 @@ async fn start_apps_server(
|
||||
);
|
||||
|
||||
let router = Router::new()
|
||||
.route("/aip/connectors/list_accessible", post(list_connectors))
|
||||
.route("/connectors/directory/list", get(list_directory_connectors))
|
||||
.route(
|
||||
"/connectors/directory/list_workspace",
|
||||
get(list_directory_connectors),
|
||||
)
|
||||
.with_state(state)
|
||||
.nest_service("/api/codex/apps", mcp_service);
|
||||
|
||||
@@ -324,7 +343,7 @@ async fn start_apps_server(
|
||||
Ok((format!("http://{addr}"), handle))
|
||||
}
|
||||
|
||||
async fn list_connectors(
|
||||
async fn list_directory_connectors(
|
||||
State(state): State<Arc<AppsServerState>>,
|
||||
headers: HeaderMap,
|
||||
) -> Result<impl axum::response::IntoResponse, StatusCode> {
|
||||
|
||||
282
codex-rs/app-server/tests/suite/v2/compaction.rs
Normal file
282
codex-rs/app-server/tests/suite/v2/compaction.rs
Normal file
@@ -0,0 +1,282 @@
|
||||
//! End-to-end compaction flow tests.
|
||||
//!
|
||||
//! Phases:
|
||||
//! 1) Arrange: mock responses/compact endpoints + config.
|
||||
//! 2) Act: start a thread and submit multiple turns to trigger auto-compaction.
|
||||
//! 3) Assert: verify item/started + item/completed notifications for context compaction.
|
||||
|
||||
#![expect(clippy::expect_used)]
|
||||
|
||||
use anyhow::Result;
|
||||
use app_test_support::ChatGptAuthFixture;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use app_test_support::write_chatgpt_auth;
|
||||
use app_test_support::write_mock_responses_config_toml;
|
||||
use codex_app_server_protocol::ItemCompletedNotification;
|
||||
use codex_app_server_protocol::ItemStartedNotification;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadItem;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::TurnCompletedNotification;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_app_server_protocol::TurnStartResponse;
|
||||
use codex_app_server_protocol::UserInput as V2UserInput;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use codex_core::features::Feature;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use core_test_support::responses;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::collections::BTreeMap;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
const AUTO_COMPACT_LIMIT: i64 = 1_000;
|
||||
const COMPACT_PROMPT: &str = "Summarize the conversation.";
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn auto_compaction_local_emits_started_and_completed_items() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = responses::start_mock_server().await;
|
||||
let sse1 = responses::sse(vec![
|
||||
responses::ev_assistant_message("m1", "FIRST_REPLY"),
|
||||
responses::ev_completed_with_tokens("r1", 70_000),
|
||||
]);
|
||||
let sse2 = responses::sse(vec![
|
||||
responses::ev_assistant_message("m2", "SECOND_REPLY"),
|
||||
responses::ev_completed_with_tokens("r2", 330_000),
|
||||
]);
|
||||
let sse3 = responses::sse(vec![
|
||||
responses::ev_assistant_message("m3", "LOCAL_SUMMARY"),
|
||||
responses::ev_completed_with_tokens("r3", 200),
|
||||
]);
|
||||
let sse4 = responses::sse(vec![
|
||||
responses::ev_assistant_message("m4", "FINAL_REPLY"),
|
||||
responses::ev_completed_with_tokens("r4", 120),
|
||||
]);
|
||||
responses::mount_sse_sequence(&server, vec![sse1, sse2, sse3, sse4]).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
write_mock_responses_config_toml(
|
||||
codex_home.path(),
|
||||
&server.uri(),
|
||||
&BTreeMap::default(),
|
||||
AUTO_COMPACT_LIMIT,
|
||||
None,
|
||||
"mock_provider",
|
||||
COMPACT_PROMPT,
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let thread_id = start_thread(&mut mcp).await?;
|
||||
for message in ["first", "second", "third"] {
|
||||
send_turn_and_wait(&mut mcp, &thread_id, message).await?;
|
||||
}
|
||||
|
||||
let started = wait_for_context_compaction_started(&mut mcp).await?;
|
||||
let completed = wait_for_context_compaction_completed(&mut mcp).await?;
|
||||
|
||||
let ThreadItem::ContextCompaction { id: started_id } = started.item else {
|
||||
unreachable!("started item should be context compaction");
|
||||
};
|
||||
let ThreadItem::ContextCompaction { id: completed_id } = completed.item else {
|
||||
unreachable!("completed item should be context compaction");
|
||||
};
|
||||
|
||||
assert_eq!(started.thread_id, thread_id);
|
||||
assert_eq!(completed.thread_id, thread_id);
|
||||
assert_eq!(started_id, completed_id);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn auto_compaction_remote_emits_started_and_completed_items() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = responses::start_mock_server().await;
|
||||
let sse1 = responses::sse(vec![
|
||||
responses::ev_assistant_message("m1", "FIRST_REPLY"),
|
||||
responses::ev_completed_with_tokens("r1", 70_000),
|
||||
]);
|
||||
let sse2 = responses::sse(vec![
|
||||
responses::ev_assistant_message("m2", "SECOND_REPLY"),
|
||||
responses::ev_completed_with_tokens("r2", 330_000),
|
||||
]);
|
||||
let sse3 = responses::sse(vec![
|
||||
responses::ev_assistant_message("m3", "FINAL_REPLY"),
|
||||
responses::ev_completed_with_tokens("r3", 120),
|
||||
]);
|
||||
let responses_log = responses::mount_sse_sequence(&server, vec![sse1, sse2, sse3]).await;
|
||||
|
||||
let compacted_history = vec![
|
||||
ResponseItem::Message {
|
||||
id: None,
|
||||
role: "assistant".to_string(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: "REMOTE_COMPACT_SUMMARY".to_string(),
|
||||
}],
|
||||
end_turn: None,
|
||||
},
|
||||
ResponseItem::Compaction {
|
||||
encrypted_content: "ENCRYPTED_COMPACTION_SUMMARY".to_string(),
|
||||
},
|
||||
];
|
||||
let compact_mock = responses::mount_compact_json_once(
|
||||
&server,
|
||||
serde_json::json!({ "output": compacted_history }),
|
||||
)
|
||||
.await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
let mut features = BTreeMap::default();
|
||||
features.insert(Feature::RemoteCompaction, true);
|
||||
write_mock_responses_config_toml(
|
||||
codex_home.path(),
|
||||
&server.uri(),
|
||||
&features,
|
||||
AUTO_COMPACT_LIMIT,
|
||||
Some(true),
|
||||
"openai",
|
||||
COMPACT_PROMPT,
|
||||
)?;
|
||||
write_chatgpt_auth(
|
||||
codex_home.path(),
|
||||
ChatGptAuthFixture::new("access-chatgpt").plan_type("pro"),
|
||||
AuthCredentialsStoreMode::File,
|
||||
)?;
|
||||
|
||||
let server_base_url = format!("{}/v1", server.uri());
|
||||
let mut mcp = McpProcess::new_with_env(
|
||||
codex_home.path(),
|
||||
&[
|
||||
("OPENAI_BASE_URL", Some(server_base_url.as_str())),
|
||||
("OPENAI_API_KEY", None),
|
||||
],
|
||||
)
|
||||
.await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let thread_id = start_thread(&mut mcp).await?;
|
||||
for message in ["first", "second", "third"] {
|
||||
send_turn_and_wait(&mut mcp, &thread_id, message).await?;
|
||||
}
|
||||
|
||||
let started = wait_for_context_compaction_started(&mut mcp).await?;
|
||||
let completed = wait_for_context_compaction_completed(&mut mcp).await?;
|
||||
|
||||
let ThreadItem::ContextCompaction { id: started_id } = started.item else {
|
||||
unreachable!("started item should be context compaction");
|
||||
};
|
||||
let ThreadItem::ContextCompaction { id: completed_id } = completed.item else {
|
||||
unreachable!("completed item should be context compaction");
|
||||
};
|
||||
|
||||
assert_eq!(started.thread_id, thread_id);
|
||||
assert_eq!(completed.thread_id, thread_id);
|
||||
assert_eq!(started_id, completed_id);
|
||||
|
||||
let compact_requests = compact_mock.requests();
|
||||
assert_eq!(compact_requests.len(), 1);
|
||||
assert_eq!(compact_requests[0].path(), "/v1/responses/compact");
|
||||
|
||||
let response_requests = responses_log.requests();
|
||||
assert_eq!(response_requests.len(), 3);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn start_thread(mcp: &mut McpProcess) -> Result<String> {
|
||||
let thread_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
Ok(thread.id)
|
||||
}
|
||||
|
||||
async fn send_turn_and_wait(mcp: &mut McpProcess, thread_id: &str, text: &str) -> Result<String> {
|
||||
let turn_id = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread_id.to_string(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: text.to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_id)),
|
||||
)
|
||||
.await??;
|
||||
let TurnStartResponse { turn } = to_response::<TurnStartResponse>(turn_resp)?;
|
||||
wait_for_turn_completed(mcp, &turn.id).await?;
|
||||
Ok(turn.id)
|
||||
}
|
||||
|
||||
async fn wait_for_turn_completed(mcp: &mut McpProcess, turn_id: &str) -> Result<()> {
|
||||
loop {
|
||||
let notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
let completed: TurnCompletedNotification =
|
||||
serde_json::from_value(notification.params.clone().expect("turn/completed params"))?;
|
||||
if completed.turn.id == turn_id {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn wait_for_context_compaction_started(
|
||||
mcp: &mut McpProcess,
|
||||
) -> Result<ItemStartedNotification> {
|
||||
loop {
|
||||
let notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("item/started"),
|
||||
)
|
||||
.await??;
|
||||
let started: ItemStartedNotification =
|
||||
serde_json::from_value(notification.params.clone().expect("item/started params"))?;
|
||||
if let ThreadItem::ContextCompaction { .. } = started.item {
|
||||
return Ok(started);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn wait_for_context_compaction_completed(
|
||||
mcp: &mut McpProcess,
|
||||
) -> Result<ItemCompletedNotification> {
|
||||
loop {
|
||||
let notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("item/completed"),
|
||||
)
|
||||
.await??;
|
||||
let completed: ItemCompletedNotification =
|
||||
serde_json::from_value(notification.params.clone().expect("item/completed params"))?;
|
||||
if let ThreadItem::ContextCompaction { .. } = completed.item {
|
||||
return Ok(completed);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,7 @@ mod account;
|
||||
mod analytics;
|
||||
mod app_list;
|
||||
mod collaboration_mode_list;
|
||||
mod compaction;
|
||||
mod config_rpc;
|
||||
mod dynamic_tools;
|
||||
mod initialize;
|
||||
@@ -18,5 +19,6 @@ mod thread_read;
|
||||
mod thread_resume;
|
||||
mod thread_rollback;
|
||||
mod thread_start;
|
||||
mod thread_unarchive;
|
||||
mod turn_interrupt;
|
||||
mod turn_start;
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_fake_rollout;
|
||||
use app_test_support::create_fake_rollout_with_source;
|
||||
use app_test_support::rollout_path;
|
||||
use app_test_support::to_response;
|
||||
use chrono::DateTime;
|
||||
@@ -12,8 +13,12 @@ use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SessionSource;
|
||||
use codex_app_server_protocol::ThreadListResponse;
|
||||
use codex_app_server_protocol::ThreadSortKey;
|
||||
use codex_app_server_protocol::ThreadSourceKind;
|
||||
use codex_core::ARCHIVED_SESSIONS_SUBDIR;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::GitInfo as CoreGitInfo;
|
||||
use codex_protocol::protocol::SessionSource as CoreSessionSource;
|
||||
use codex_protocol::protocol::SubAgentSource;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::cmp::Reverse;
|
||||
use std::fs;
|
||||
@@ -38,9 +43,10 @@ async fn list_threads(
|
||||
cursor: Option<String>,
|
||||
limit: Option<u32>,
|
||||
providers: Option<Vec<String>>,
|
||||
source_kinds: Option<Vec<ThreadSourceKind>>,
|
||||
archived: Option<bool>,
|
||||
) -> Result<ThreadListResponse> {
|
||||
list_threads_with_sort(mcp, cursor, limit, providers, None, archived).await
|
||||
list_threads_with_sort(mcp, cursor, limit, providers, source_kinds, None, archived).await
|
||||
}
|
||||
|
||||
async fn list_threads_with_sort(
|
||||
@@ -48,6 +54,7 @@ async fn list_threads_with_sort(
|
||||
cursor: Option<String>,
|
||||
limit: Option<u32>,
|
||||
providers: Option<Vec<String>>,
|
||||
source_kinds: Option<Vec<ThreadSourceKind>>,
|
||||
sort_key: Option<ThreadSortKey>,
|
||||
archived: Option<bool>,
|
||||
) -> Result<ThreadListResponse> {
|
||||
@@ -57,6 +64,7 @@ async fn list_threads_with_sort(
|
||||
limit,
|
||||
sort_key,
|
||||
model_providers: providers,
|
||||
source_kinds,
|
||||
archived,
|
||||
})
|
||||
.await?;
|
||||
@@ -131,6 +139,7 @@ async fn thread_list_basic_empty() -> Result<()> {
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
assert!(data.is_empty());
|
||||
@@ -194,6 +203,7 @@ async fn thread_list_pagination_next_cursor_none_on_last_page() -> Result<()> {
|
||||
Some(2),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
assert_eq!(data1.len(), 2);
|
||||
@@ -219,6 +229,7 @@ async fn thread_list_pagination_next_cursor_none_on_last_page() -> Result<()> {
|
||||
Some(2),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
assert!(data2.len() <= 2);
|
||||
@@ -269,6 +280,7 @@ async fn thread_list_respects_provider_filter() -> Result<()> {
|
||||
Some(10),
|
||||
Some(vec!["other_provider".to_string()]),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
assert_eq!(data.len(), 1);
|
||||
@@ -287,6 +299,207 @@ async fn thread_list_respects_provider_filter() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_empty_source_kinds_defaults_to_interactive_only() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_minimal_config(codex_home.path())?;
|
||||
|
||||
let cli_id = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-02-01T10-00-00",
|
||||
"2025-02-01T10:00:00Z",
|
||||
"CLI",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
let exec_id = create_fake_rollout_with_source(
|
||||
codex_home.path(),
|
||||
"2025-02-01T11-00-00",
|
||||
"2025-02-01T11:00:00Z",
|
||||
"Exec",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
CoreSessionSource::Exec,
|
||||
)?;
|
||||
|
||||
let mut mcp = init_mcp(codex_home.path()).await?;
|
||||
|
||||
let ThreadListResponse { data, next_cursor } = list_threads(
|
||||
&mut mcp,
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
Some(Vec::new()),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
assert_eq!(next_cursor, None);
|
||||
let ids: Vec<_> = data.iter().map(|thread| thread.id.as_str()).collect();
|
||||
assert_eq!(ids, vec![cli_id.as_str()]);
|
||||
assert_ne!(cli_id, exec_id);
|
||||
assert_eq!(data[0].source, SessionSource::Cli);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_filters_by_source_kind_subagent_thread_spawn() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_minimal_config(codex_home.path())?;
|
||||
|
||||
let cli_id = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-02-01T10-00-00",
|
||||
"2025-02-01T10:00:00Z",
|
||||
"CLI",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
|
||||
let parent_thread_id = ThreadId::from_string(&Uuid::new_v4().to_string())?;
|
||||
let subagent_id = create_fake_rollout_with_source(
|
||||
codex_home.path(),
|
||||
"2025-02-01T11-00-00",
|
||||
"2025-02-01T11:00:00Z",
|
||||
"SubAgent",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
CoreSessionSource::SubAgent(SubAgentSource::ThreadSpawn {
|
||||
parent_thread_id,
|
||||
depth: 1,
|
||||
}),
|
||||
)?;
|
||||
|
||||
let mut mcp = init_mcp(codex_home.path()).await?;
|
||||
|
||||
let ThreadListResponse { data, next_cursor } = list_threads(
|
||||
&mut mcp,
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
Some(vec![ThreadSourceKind::SubAgentThreadSpawn]),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
assert_eq!(next_cursor, None);
|
||||
let ids: Vec<_> = data.iter().map(|thread| thread.id.as_str()).collect();
|
||||
assert_eq!(ids, vec![subagent_id.as_str()]);
|
||||
assert_ne!(cli_id, subagent_id);
|
||||
assert!(matches!(data[0].source, SessionSource::SubAgent(_)));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_filters_by_subagent_variant() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_minimal_config(codex_home.path())?;
|
||||
|
||||
let parent_thread_id = ThreadId::from_string(&Uuid::new_v4().to_string())?;
|
||||
|
||||
let review_id = create_fake_rollout_with_source(
|
||||
codex_home.path(),
|
||||
"2025-02-02T09-00-00",
|
||||
"2025-02-02T09:00:00Z",
|
||||
"Review",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
CoreSessionSource::SubAgent(SubAgentSource::Review),
|
||||
)?;
|
||||
let compact_id = create_fake_rollout_with_source(
|
||||
codex_home.path(),
|
||||
"2025-02-02T10-00-00",
|
||||
"2025-02-02T10:00:00Z",
|
||||
"Compact",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
CoreSessionSource::SubAgent(SubAgentSource::Compact),
|
||||
)?;
|
||||
let spawn_id = create_fake_rollout_with_source(
|
||||
codex_home.path(),
|
||||
"2025-02-02T11-00-00",
|
||||
"2025-02-02T11:00:00Z",
|
||||
"Spawn",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
CoreSessionSource::SubAgent(SubAgentSource::ThreadSpawn {
|
||||
parent_thread_id,
|
||||
depth: 1,
|
||||
}),
|
||||
)?;
|
||||
let other_id = create_fake_rollout_with_source(
|
||||
codex_home.path(),
|
||||
"2025-02-02T12-00-00",
|
||||
"2025-02-02T12:00:00Z",
|
||||
"Other",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
CoreSessionSource::SubAgent(SubAgentSource::Other("custom".to_string())),
|
||||
)?;
|
||||
|
||||
let mut mcp = init_mcp(codex_home.path()).await?;
|
||||
|
||||
let review = list_threads(
|
||||
&mut mcp,
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
Some(vec![ThreadSourceKind::SubAgentReview]),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
let review_ids: Vec<_> = review
|
||||
.data
|
||||
.iter()
|
||||
.map(|thread| thread.id.as_str())
|
||||
.collect();
|
||||
assert_eq!(review_ids, vec![review_id.as_str()]);
|
||||
|
||||
let compact = list_threads(
|
||||
&mut mcp,
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
Some(vec![ThreadSourceKind::SubAgentCompact]),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
let compact_ids: Vec<_> = compact
|
||||
.data
|
||||
.iter()
|
||||
.map(|thread| thread.id.as_str())
|
||||
.collect();
|
||||
assert_eq!(compact_ids, vec![compact_id.as_str()]);
|
||||
|
||||
let spawn = list_threads(
|
||||
&mut mcp,
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
Some(vec![ThreadSourceKind::SubAgentThreadSpawn]),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
let spawn_ids: Vec<_> = spawn.data.iter().map(|thread| thread.id.as_str()).collect();
|
||||
assert_eq!(spawn_ids, vec![spawn_id.as_str()]);
|
||||
|
||||
let other = list_threads(
|
||||
&mut mcp,
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
Some(vec![ThreadSourceKind::SubAgentOther]),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
let other_ids: Vec<_> = other.data.iter().map(|thread| thread.id.as_str()).collect();
|
||||
assert_eq!(other_ids, vec![other_id.as_str()]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_fetches_until_limit_or_exhausted() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
@@ -319,6 +532,7 @@ async fn thread_list_fetches_until_limit_or_exhausted() -> Result<()> {
|
||||
Some(8),
|
||||
Some(vec!["target_provider".to_string()]),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
assert_eq!(
|
||||
@@ -364,6 +578,7 @@ async fn thread_list_enforces_max_limit() -> Result<()> {
|
||||
Some(200),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
assert_eq!(
|
||||
@@ -410,6 +625,7 @@ async fn thread_list_stops_when_not_enough_filtered_results_exist() -> Result<()
|
||||
Some(10),
|
||||
Some(vec!["target_provider".to_string()]),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
assert_eq!(
|
||||
@@ -457,6 +673,7 @@ async fn thread_list_includes_git_info() -> Result<()> {
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
let thread = data
|
||||
@@ -516,6 +733,7 @@ async fn thread_list_default_sorts_by_created_at() -> Result<()> {
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -575,6 +793,7 @@ async fn thread_list_sort_updated_at_orders_by_mtime() -> Result<()> {
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
Some(ThreadSortKey::UpdatedAt),
|
||||
None,
|
||||
)
|
||||
@@ -639,6 +858,7 @@ async fn thread_list_updated_at_paginates_with_cursor() -> Result<()> {
|
||||
None,
|
||||
Some(2),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
Some(ThreadSortKey::UpdatedAt),
|
||||
None,
|
||||
)
|
||||
@@ -655,6 +875,7 @@ async fn thread_list_updated_at_paginates_with_cursor() -> Result<()> {
|
||||
Some(cursor1),
|
||||
Some(2),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
Some(ThreadSortKey::UpdatedAt),
|
||||
None,
|
||||
)
|
||||
@@ -696,6 +917,7 @@ async fn thread_list_created_at_tie_breaks_by_uuid() -> Result<()> {
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -747,6 +969,7 @@ async fn thread_list_updated_at_tie_breaks_by_uuid() -> Result<()> {
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
Some(ThreadSortKey::UpdatedAt),
|
||||
None,
|
||||
)
|
||||
@@ -787,6 +1010,7 @@ async fn thread_list_updated_at_uses_mtime() -> Result<()> {
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
Some(ThreadSortKey::UpdatedAt),
|
||||
None,
|
||||
)
|
||||
@@ -846,6 +1070,7 @@ async fn thread_list_archived_filter() -> Result<()> {
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
assert_eq!(data.len(), 1);
|
||||
@@ -856,6 +1081,7 @@ async fn thread_list_archived_filter() -> Result<()> {
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
Some(true),
|
||||
)
|
||||
.await?;
|
||||
@@ -878,6 +1104,7 @@ async fn thread_list_invalid_cursor_returns_error() -> Result<()> {
|
||||
limit: Some(2),
|
||||
sort_key: None,
|
||||
model_providers: Some(vec!["mock_provider".to_string()]),
|
||||
source_kinds: None,
|
||||
archived: None,
|
||||
})
|
||||
.await?;
|
||||
|
||||
@@ -2,7 +2,9 @@ use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_fake_rollout_with_text_elements;
|
||||
use app_test_support::create_mock_responses_server_repeating_assistant;
|
||||
use app_test_support::rollout_path;
|
||||
use app_test_support::to_response;
|
||||
use chrono::Utc;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SessionSource;
|
||||
@@ -22,6 +24,8 @@ use codex_protocol::user_input::TextElement;
|
||||
use core_test_support::responses;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::fs::FileTimes;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
@@ -147,6 +151,116 @@ async fn thread_resume_returns_rollout_history() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_without_overrides_does_not_change_updated_at_or_mtime() -> Result<()> {
|
||||
let server = create_mock_responses_server_repeating_assistant("Done").await;
|
||||
let codex_home = TempDir::new()?;
|
||||
let rollout = setup_rollout_fixture(codex_home.path(), &server.uri())?;
|
||||
let thread_id = rollout.conversation_id.clone();
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let resume_id = mcp
|
||||
.send_thread_resume_request(ThreadResumeParams {
|
||||
thread_id: thread_id.clone(),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let resume_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(resume_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadResumeResponse { thread, .. } = to_response::<ThreadResumeResponse>(resume_resp)?;
|
||||
|
||||
assert_eq!(thread.updated_at, rollout.expected_updated_at);
|
||||
|
||||
let after_modified = std::fs::metadata(&rollout.rollout_file_path)?.modified()?;
|
||||
assert_eq!(after_modified, rollout.before_modified);
|
||||
|
||||
let turn_id = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id,
|
||||
input: vec![UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_id)),
|
||||
)
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let after_turn_modified = std::fs::metadata(&rollout.rollout_file_path)?.modified()?;
|
||||
assert!(after_turn_modified > rollout.before_modified);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_with_overrides_defers_updated_at_until_turn_start() -> Result<()> {
|
||||
let server = create_mock_responses_server_repeating_assistant("Done").await;
|
||||
let codex_home = TempDir::new()?;
|
||||
let rollout = setup_rollout_fixture(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let resume_id = mcp
|
||||
.send_thread_resume_request(ThreadResumeParams {
|
||||
thread_id: rollout.conversation_id.clone(),
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let resume_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(resume_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadResumeResponse { thread, .. } = to_response::<ThreadResumeResponse>(resume_resp)?;
|
||||
|
||||
assert_eq!(thread.updated_at, rollout.expected_updated_at);
|
||||
|
||||
let after_resume_modified = std::fs::metadata(&rollout.rollout_file_path)?.modified()?;
|
||||
assert_eq!(after_resume_modified, rollout.before_modified);
|
||||
|
||||
let turn_id = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: rollout.conversation_id,
|
||||
input: vec![UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_id)),
|
||||
)
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let after_turn_modified = std::fs::metadata(&rollout.rollout_file_path)?.modified()?;
|
||||
assert!(after_turn_modified > rollout.before_modified);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_prefers_path_over_thread_id() -> Result<()> {
|
||||
let server = create_mock_responses_server_repeating_assistant("Done").await;
|
||||
@@ -364,3 +478,51 @@ stream_max_retries = 0
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
fn set_rollout_mtime(path: &Path, updated_at_rfc3339: &str) -> Result<()> {
|
||||
let parsed = chrono::DateTime::parse_from_rfc3339(updated_at_rfc3339)?.with_timezone(&Utc);
|
||||
let times = FileTimes::new().set_modified(parsed.into());
|
||||
std::fs::OpenOptions::new()
|
||||
.append(true)
|
||||
.open(path)?
|
||||
.set_times(times)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct RolloutFixture {
|
||||
conversation_id: String,
|
||||
rollout_file_path: PathBuf,
|
||||
before_modified: std::time::SystemTime,
|
||||
expected_updated_at: i64,
|
||||
}
|
||||
|
||||
fn setup_rollout_fixture(codex_home: &Path, server_uri: &str) -> Result<RolloutFixture> {
|
||||
create_config_toml(codex_home, server_uri)?;
|
||||
|
||||
let preview = "Saved user message";
|
||||
let filename_ts = "2025-01-05T12-00-00";
|
||||
let meta_rfc3339 = "2025-01-05T12:00:00Z";
|
||||
let expected_updated_at_rfc3339 = "2025-01-07T00:00:00Z";
|
||||
let conversation_id = create_fake_rollout_with_text_elements(
|
||||
codex_home,
|
||||
filename_ts,
|
||||
meta_rfc3339,
|
||||
preview,
|
||||
Vec::new(),
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
let rollout_file_path = rollout_path(codex_home, filename_ts, &conversation_id);
|
||||
set_rollout_mtime(rollout_file_path.as_path(), expected_updated_at_rfc3339)?;
|
||||
let before_modified = std::fs::metadata(&rollout_file_path)?.modified()?;
|
||||
let expected_updated_at = chrono::DateTime::parse_from_rfc3339(expected_updated_at_rfc3339)?
|
||||
.with_timezone(&Utc)
|
||||
.timestamp();
|
||||
|
||||
Ok(RolloutFixture {
|
||||
conversation_id,
|
||||
rollout_file_path,
|
||||
before_modified,
|
||||
expected_updated_at,
|
||||
})
|
||||
}
|
||||
|
||||
101
codex-rs/app-server/tests/suite/v2/thread_unarchive.rs
Normal file
101
codex-rs/app-server/tests/suite/v2/thread_unarchive.rs
Normal file
@@ -0,0 +1,101 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadArchiveParams;
|
||||
use codex_app_server_protocol::ThreadArchiveResponse;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::ThreadUnarchiveParams;
|
||||
use codex_app_server_protocol::ThreadUnarchiveResponse;
|
||||
use codex_core::find_archived_thread_path_by_id_str;
|
||||
use codex_core::find_thread_path_by_id_str;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(30);
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_unarchive_moves_rollout_back_into_sessions_directory() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let start_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
let rollout_path = find_thread_path_by_id_str(codex_home.path(), &thread.id)
|
||||
.await?
|
||||
.expect("expected rollout path for thread id to exist");
|
||||
|
||||
let archive_id = mcp
|
||||
.send_thread_archive_request(ThreadArchiveParams {
|
||||
thread_id: thread.id.clone(),
|
||||
})
|
||||
.await?;
|
||||
let archive_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(archive_id)),
|
||||
)
|
||||
.await??;
|
||||
let _: ThreadArchiveResponse = to_response::<ThreadArchiveResponse>(archive_resp)?;
|
||||
|
||||
let archived_path = find_archived_thread_path_by_id_str(codex_home.path(), &thread.id)
|
||||
.await?
|
||||
.expect("expected archived rollout path for thread id to exist");
|
||||
let archived_path_display = archived_path.display();
|
||||
assert!(
|
||||
archived_path.exists(),
|
||||
"expected {archived_path_display} to exist"
|
||||
);
|
||||
|
||||
let unarchive_id = mcp
|
||||
.send_thread_unarchive_request(ThreadUnarchiveParams {
|
||||
thread_id: thread.id.clone(),
|
||||
})
|
||||
.await?;
|
||||
let unarchive_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(unarchive_id)),
|
||||
)
|
||||
.await??;
|
||||
let _: ThreadUnarchiveResponse = to_response::<ThreadUnarchiveResponse>(unarchive_resp)?;
|
||||
|
||||
let rollout_path_display = rollout_path.display();
|
||||
assert!(
|
||||
rollout_path.exists(),
|
||||
"expected rollout path {rollout_path_display} to be restored"
|
||||
);
|
||||
assert!(
|
||||
!archived_path.exists(),
|
||||
"expected archived rollout path {archived_path_display} to be moved"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_config_toml(codex_home: &Path) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(config_toml, config_contents())
|
||||
}
|
||||
|
||||
fn config_contents() -> &'static str {
|
||||
r#"model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
"#
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
use crate::types::CodeTaskDetailsResponse;
|
||||
use crate::types::ConfigFileResponse;
|
||||
use crate::types::CreditStatusDetails;
|
||||
use crate::types::PaginatedListTaskListItem;
|
||||
use crate::types::RateLimitStatusPayload;
|
||||
@@ -244,6 +245,20 @@ impl Client {
|
||||
self.decode_json::<TurnAttemptsSiblingTurnsResponse>(&url, &ct, &body)
|
||||
}
|
||||
|
||||
/// Fetch the managed requirements file from codex-backend.
|
||||
///
|
||||
/// `GET /api/codex/config/requirements` (Codex API style) or
|
||||
/// `GET /wham/config/requirements` (ChatGPT backend-api style).
|
||||
pub async fn get_config_requirements_file(&self) -> Result<ConfigFileResponse> {
|
||||
let url = match self.path_style {
|
||||
PathStyle::CodexApi => format!("{}/api/codex/config/requirements", self.base_url),
|
||||
PathStyle::ChatGptApi => format!("{}/wham/config/requirements", self.base_url),
|
||||
};
|
||||
let req = self.http.get(&url).headers(self.headers());
|
||||
let (body, ct) = self.exec_request(req, "GET", &url).await?;
|
||||
self.decode_json::<ConfigFileResponse>(&url, &ct, &body)
|
||||
}
|
||||
|
||||
/// Create a new task (user turn) by POSTing to the appropriate backend path
|
||||
/// based on `path_style`. Returns the created task id.
|
||||
pub async fn create_task(&self, request_body: serde_json::Value) -> Result<String> {
|
||||
|
||||
@@ -4,6 +4,7 @@ pub mod types;
|
||||
pub use client::Client;
|
||||
pub use types::CodeTaskDetailsResponse;
|
||||
pub use types::CodeTaskDetailsResponseExt;
|
||||
pub use types::ConfigFileResponse;
|
||||
pub use types::PaginatedListTaskListItem;
|
||||
pub use types::TaskListItem;
|
||||
pub use types::TurnAttemptsSiblingTurnsResponse;
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
pub use codex_backend_openapi_models::models::ConfigFileResponse;
|
||||
pub use codex_backend_openapi_models::models::CreditStatusDetails;
|
||||
pub use codex_backend_openapi_models::models::PaginatedListTaskListItem;
|
||||
pub use codex_backend_openapi_models::models::PlanType;
|
||||
|
||||
@@ -17,6 +17,8 @@ serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = ["full"] }
|
||||
codex-git = { workspace = true }
|
||||
urlencoding = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
@@ -5,13 +5,21 @@ use crate::chatgpt_token::get_chatgpt_token_data;
|
||||
use crate::chatgpt_token::init_chatgpt_token_from_auth;
|
||||
|
||||
use anyhow::Context;
|
||||
use serde::Serialize;
|
||||
use serde::de::DeserializeOwned;
|
||||
use std::time::Duration;
|
||||
|
||||
/// Make a GET request to the ChatGPT backend API.
|
||||
pub(crate) async fn chatgpt_get_request<T: DeserializeOwned>(
|
||||
config: &Config,
|
||||
path: String,
|
||||
) -> anyhow::Result<T> {
|
||||
chatgpt_get_request_with_timeout(config, path, None).await
|
||||
}
|
||||
|
||||
pub(crate) async fn chatgpt_get_request_with_timeout<T: DeserializeOwned>(
|
||||
config: &Config,
|
||||
path: String,
|
||||
timeout: Option<Duration>,
|
||||
) -> anyhow::Result<T> {
|
||||
let chatgpt_base_url = &config.chatgpt_base_url;
|
||||
init_chatgpt_token_from_auth(&config.codex_home, config.cli_auth_credentials_store_mode)
|
||||
@@ -28,48 +36,17 @@ pub(crate) async fn chatgpt_get_request<T: DeserializeOwned>(
|
||||
anyhow::anyhow!("ChatGPT account ID not available, please re-run `codex login`")
|
||||
});
|
||||
|
||||
let response = client
|
||||
let mut request = client
|
||||
.get(&url)
|
||||
.bearer_auth(&token.access_token)
|
||||
.header("chatgpt-account-id", account_id?)
|
||||
.header("Content-Type", "application/json")
|
||||
.send()
|
||||
.await
|
||||
.context("Failed to send request")?;
|
||||
|
||||
if response.status().is_success() {
|
||||
let result: T = response
|
||||
.json()
|
||||
.await
|
||||
.context("Failed to parse JSON response")?;
|
||||
Ok(result)
|
||||
} else {
|
||||
let status = response.status();
|
||||
let body = response.text().await.unwrap_or_default();
|
||||
anyhow::bail!("Request failed with status {status}: {body}")
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn chatgpt_post_request<T: DeserializeOwned, P: Serialize>(
|
||||
config: &Config,
|
||||
access_token: &str,
|
||||
account_id: &str,
|
||||
path: &str,
|
||||
payload: &P,
|
||||
) -> anyhow::Result<T> {
|
||||
let chatgpt_base_url = &config.chatgpt_base_url;
|
||||
let client = create_client();
|
||||
let url = format!("{chatgpt_base_url}{path}");
|
||||
|
||||
let response = client
|
||||
.post(&url)
|
||||
.bearer_auth(access_token)
|
||||
.header("chatgpt-account-id", account_id)
|
||||
.header("Content-Type", "application/json")
|
||||
.json(payload)
|
||||
.send()
|
||||
.await
|
||||
.context("Failed to send request")?;
|
||||
.header("Content-Type", "application/json");
|
||||
|
||||
if let Some(timeout) = timeout {
|
||||
request = request.timeout(timeout);
|
||||
}
|
||||
|
||||
let response = request.send().await.context("Failed to send request")?;
|
||||
|
||||
if response.status().is_success() {
|
||||
let result: T = response
|
||||
|
||||
@@ -1,43 +1,45 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use codex_core::config::Config;
|
||||
use codex_core::features::Feature;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::chatgpt_client::chatgpt_post_request;
|
||||
use crate::chatgpt_client::chatgpt_get_request_with_timeout;
|
||||
use crate::chatgpt_token::get_chatgpt_token_data;
|
||||
use crate::chatgpt_token::init_chatgpt_token_from_auth;
|
||||
|
||||
pub use codex_core::connectors::ConnectorInfo;
|
||||
pub use codex_core::connectors::AppInfo;
|
||||
pub use codex_core::connectors::connector_display_label;
|
||||
use codex_core::connectors::connector_install_url;
|
||||
pub use codex_core::connectors::list_accessible_connectors_from_mcp_tools;
|
||||
use codex_core::connectors::merge_connectors;
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct ListConnectorsRequest {
|
||||
principals: Vec<Principal>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct Principal {
|
||||
#[serde(rename = "type")]
|
||||
principal_type: PrincipalType,
|
||||
id: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
||||
enum PrincipalType {
|
||||
User,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ListConnectorsResponse {
|
||||
connectors: Vec<ConnectorInfo>,
|
||||
struct DirectoryListResponse {
|
||||
apps: Vec<DirectoryApp>,
|
||||
#[serde(alias = "nextToken")]
|
||||
next_token: Option<String>,
|
||||
}
|
||||
|
||||
pub async fn list_connectors(config: &Config) -> anyhow::Result<Vec<ConnectorInfo>> {
|
||||
if !config.features.enabled(Feature::Connectors) {
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
struct DirectoryApp {
|
||||
id: String,
|
||||
name: String,
|
||||
description: Option<String>,
|
||||
#[serde(alias = "logoUrl")]
|
||||
logo_url: Option<String>,
|
||||
#[serde(alias = "logoUrlDark")]
|
||||
logo_url_dark: Option<String>,
|
||||
#[serde(alias = "distributionChannel")]
|
||||
distribution_channel: Option<String>,
|
||||
visibility: Option<String>,
|
||||
}
|
||||
|
||||
const DIRECTORY_CONNECTORS_TIMEOUT: Duration = Duration::from_secs(60);
|
||||
|
||||
pub async fn list_connectors(config: &Config) -> anyhow::Result<Vec<AppInfo>> {
|
||||
if !config.features.enabled(Feature::Apps) {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
let (connectors_result, accessible_result) = tokio::join!(
|
||||
@@ -46,11 +48,12 @@ pub async fn list_connectors(config: &Config) -> anyhow::Result<Vec<ConnectorInf
|
||||
);
|
||||
let connectors = connectors_result?;
|
||||
let accessible = accessible_result?;
|
||||
Ok(merge_connectors(connectors, accessible))
|
||||
let merged = merge_connectors(connectors, accessible);
|
||||
Ok(filter_disallowed_connectors(merged))
|
||||
}
|
||||
|
||||
pub async fn list_all_connectors(config: &Config) -> anyhow::Result<Vec<ConnectorInfo>> {
|
||||
if !config.features.enabled(Feature::Connectors) {
|
||||
pub async fn list_all_connectors(config: &Config) -> anyhow::Result<Vec<AppInfo>> {
|
||||
if !config.features.enabled(Feature::Apps) {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
init_chatgpt_token_from_auth(&config.codex_home, config.cli_auth_credentials_store_mode)
|
||||
@@ -58,56 +61,149 @@ pub async fn list_all_connectors(config: &Config) -> anyhow::Result<Vec<Connecto
|
||||
|
||||
let token_data =
|
||||
get_chatgpt_token_data().ok_or_else(|| anyhow::anyhow!("ChatGPT token not available"))?;
|
||||
let user_id = token_data
|
||||
.id_token
|
||||
.chatgpt_user_id
|
||||
.as_deref()
|
||||
.ok_or_else(|| {
|
||||
anyhow::anyhow!("ChatGPT user ID not available, please re-run `codex login`")
|
||||
})?;
|
||||
let account_id = token_data
|
||||
.id_token
|
||||
.chatgpt_account_id
|
||||
.as_deref()
|
||||
.ok_or_else(|| {
|
||||
anyhow::anyhow!("ChatGPT account ID not available, please re-run `codex login`")
|
||||
})?;
|
||||
let principal_id = format!("{user_id}__{account_id}");
|
||||
let request = ListConnectorsRequest {
|
||||
principals: vec![Principal {
|
||||
principal_type: PrincipalType::User,
|
||||
id: principal_id,
|
||||
}],
|
||||
};
|
||||
let response: ListConnectorsResponse = chatgpt_post_request(
|
||||
config,
|
||||
token_data.access_token.as_str(),
|
||||
account_id,
|
||||
"/aip/connectors/list_accessible?skip_actions=true&external_logos=true",
|
||||
&request,
|
||||
)
|
||||
.await?;
|
||||
let mut connectors = response.connectors;
|
||||
let mut apps = list_directory_connectors(config).await?;
|
||||
if token_data.id_token.is_workspace_account() {
|
||||
apps.extend(list_workspace_connectors(config).await?);
|
||||
}
|
||||
let mut connectors = merge_directory_apps(apps)
|
||||
.into_iter()
|
||||
.map(directory_app_to_app_info)
|
||||
.collect::<Vec<_>>();
|
||||
for connector in &mut connectors {
|
||||
let install_url = match connector.install_url.take() {
|
||||
Some(install_url) => install_url,
|
||||
None => connector_install_url(&connector.connector_name, &connector.connector_id),
|
||||
None => connector_install_url(&connector.name, &connector.id),
|
||||
};
|
||||
connector.connector_name =
|
||||
normalize_connector_name(&connector.connector_name, &connector.connector_id);
|
||||
connector.connector_description =
|
||||
normalize_connector_value(connector.connector_description.as_deref());
|
||||
connector.name = normalize_connector_name(&connector.name, &connector.id);
|
||||
connector.description = normalize_connector_value(connector.description.as_deref());
|
||||
connector.install_url = Some(install_url);
|
||||
connector.is_accessible = false;
|
||||
}
|
||||
connectors.sort_by(|left, right| {
|
||||
left.connector_name
|
||||
.cmp(&right.connector_name)
|
||||
.then_with(|| left.connector_id.cmp(&right.connector_id))
|
||||
left.name
|
||||
.cmp(&right.name)
|
||||
.then_with(|| left.id.cmp(&right.id))
|
||||
});
|
||||
Ok(connectors)
|
||||
}
|
||||
|
||||
async fn list_directory_connectors(config: &Config) -> anyhow::Result<Vec<DirectoryApp>> {
|
||||
let mut apps = Vec::new();
|
||||
let mut next_token: Option<String> = None;
|
||||
loop {
|
||||
let path = match next_token.as_deref() {
|
||||
Some(token) => {
|
||||
let encoded_token = urlencoding::encode(token);
|
||||
format!("/connectors/directory/list?tier=categorized&token={encoded_token}")
|
||||
}
|
||||
None => "/connectors/directory/list?tier=categorized".to_string(),
|
||||
};
|
||||
let response: DirectoryListResponse =
|
||||
chatgpt_get_request_with_timeout(config, path, Some(DIRECTORY_CONNECTORS_TIMEOUT))
|
||||
.await?;
|
||||
apps.extend(
|
||||
response
|
||||
.apps
|
||||
.into_iter()
|
||||
.filter(|app| !is_hidden_directory_app(app)),
|
||||
);
|
||||
next_token = response
|
||||
.next_token
|
||||
.map(|token| token.trim().to_string())
|
||||
.filter(|token| !token.is_empty());
|
||||
if next_token.is_none() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Ok(apps)
|
||||
}
|
||||
|
||||
async fn list_workspace_connectors(config: &Config) -> anyhow::Result<Vec<DirectoryApp>> {
|
||||
let response: anyhow::Result<DirectoryListResponse> = chatgpt_get_request_with_timeout(
|
||||
config,
|
||||
"/connectors/directory/list_workspace".to_string(),
|
||||
Some(DIRECTORY_CONNECTORS_TIMEOUT),
|
||||
)
|
||||
.await;
|
||||
match response {
|
||||
Ok(response) => Ok(response
|
||||
.apps
|
||||
.into_iter()
|
||||
.filter(|app| !is_hidden_directory_app(app))
|
||||
.collect()),
|
||||
Err(_) => Ok(Vec::new()),
|
||||
}
|
||||
}
|
||||
|
||||
fn merge_directory_apps(apps: Vec<DirectoryApp>) -> Vec<DirectoryApp> {
|
||||
let mut merged: HashMap<String, DirectoryApp> = HashMap::new();
|
||||
for app in apps {
|
||||
if let Some(existing) = merged.get_mut(&app.id) {
|
||||
merge_directory_app(existing, app);
|
||||
} else {
|
||||
merged.insert(app.id.clone(), app);
|
||||
}
|
||||
}
|
||||
merged.into_values().collect()
|
||||
}
|
||||
|
||||
fn merge_directory_app(existing: &mut DirectoryApp, incoming: DirectoryApp) {
|
||||
let DirectoryApp {
|
||||
id: _,
|
||||
name,
|
||||
description,
|
||||
logo_url,
|
||||
logo_url_dark,
|
||||
distribution_channel,
|
||||
visibility: _,
|
||||
} = incoming;
|
||||
|
||||
let incoming_name_is_empty = name.trim().is_empty();
|
||||
if existing.name.trim().is_empty() && !incoming_name_is_empty {
|
||||
existing.name = name;
|
||||
}
|
||||
|
||||
let incoming_description_present = description
|
||||
.as_deref()
|
||||
.map(|value| !value.trim().is_empty())
|
||||
.unwrap_or(false);
|
||||
let existing_description_present = existing
|
||||
.description
|
||||
.as_deref()
|
||||
.map(|value| !value.trim().is_empty())
|
||||
.unwrap_or(false);
|
||||
if !existing_description_present && incoming_description_present {
|
||||
existing.description = description;
|
||||
}
|
||||
|
||||
if existing.logo_url.is_none() && logo_url.is_some() {
|
||||
existing.logo_url = logo_url;
|
||||
}
|
||||
if existing.logo_url_dark.is_none() && logo_url_dark.is_some() {
|
||||
existing.logo_url_dark = logo_url_dark;
|
||||
}
|
||||
if existing.distribution_channel.is_none() && distribution_channel.is_some() {
|
||||
existing.distribution_channel = distribution_channel;
|
||||
}
|
||||
}
|
||||
|
||||
fn is_hidden_directory_app(app: &DirectoryApp) -> bool {
|
||||
matches!(app.visibility.as_deref(), Some("HIDDEN"))
|
||||
}
|
||||
|
||||
fn directory_app_to_app_info(app: DirectoryApp) -> AppInfo {
|
||||
AppInfo {
|
||||
id: app.id,
|
||||
name: app.name,
|
||||
description: app.description,
|
||||
logo_url: app.logo_url,
|
||||
logo_url_dark: app.logo_url_dark,
|
||||
distribution_channel: app.distribution_channel,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
}
|
||||
}
|
||||
|
||||
fn normalize_connector_name(name: &str, connector_id: &str) -> String {
|
||||
let trimmed = name.trim();
|
||||
if trimmed.is_empty() {
|
||||
@@ -123,3 +219,87 @@ fn normalize_connector_value(value: Option<&str>) -> Option<String> {
|
||||
.filter(|value| !value.is_empty())
|
||||
.map(str::to_string)
|
||||
}
|
||||
|
||||
const ALLOWED_APPS_SDK_APPS: &[&str] = &["asdk_app_69781557cc1481919cf5e9824fa2e792"];
|
||||
const DISALLOWED_CONNECTOR_IDS: &[&str] = &["asdk_app_6938a94a61d881918ef32cb999ff937c"];
|
||||
const DISALLOWED_CONNECTOR_PREFIX: &str = "connector_openai_";
|
||||
|
||||
fn filter_disallowed_connectors(connectors: Vec<AppInfo>) -> Vec<AppInfo> {
|
||||
// TODO: Support Apps SDK connectors.
|
||||
connectors
|
||||
.into_iter()
|
||||
.filter(is_connector_allowed)
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn is_connector_allowed(connector: &AppInfo) -> bool {
|
||||
let connector_id = connector.id.as_str();
|
||||
if connector_id.starts_with(DISALLOWED_CONNECTOR_PREFIX)
|
||||
|| DISALLOWED_CONNECTOR_IDS.contains(&connector_id)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if connector_id.starts_with("asdk_app_") {
|
||||
return ALLOWED_APPS_SDK_APPS.contains(&connector_id);
|
||||
}
|
||||
true
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
fn app(id: &str) -> AppInfo {
|
||||
AppInfo {
|
||||
id: id.to_string(),
|
||||
name: id.to_string(),
|
||||
description: None,
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn filters_internal_asdk_connectors() {
|
||||
let filtered = filter_disallowed_connectors(vec![app("asdk_app_hidden"), app("alpha")]);
|
||||
assert_eq!(filtered, vec![app("alpha")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn allows_whitelisted_asdk_connectors() {
|
||||
let filtered = filter_disallowed_connectors(vec![
|
||||
app("asdk_app_69781557cc1481919cf5e9824fa2e792"),
|
||||
app("beta"),
|
||||
]);
|
||||
assert_eq!(
|
||||
filtered,
|
||||
vec![
|
||||
app("asdk_app_69781557cc1481919cf5e9824fa2e792"),
|
||||
app("beta")
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn filters_openai_connectors() {
|
||||
let filtered = filter_disallowed_connectors(vec![
|
||||
app("connector_openai_foo"),
|
||||
app("connector_openai_bar"),
|
||||
app("gamma"),
|
||||
]);
|
||||
assert_eq!(filtered, vec![app("gamma")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn filters_disallowed_connector_ids() {
|
||||
let filtered = filter_disallowed_connectors(vec![
|
||||
app("asdk_app_6938a94a61d881918ef32cb999ff937c"),
|
||||
app("delta"),
|
||||
]);
|
||||
assert_eq!(filtered, vec![app("delta")]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -136,7 +136,8 @@ async fn run_command_under_sandbox(
|
||||
if let SandboxType::Windows = sandbox_type {
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::windows_sandbox::WindowsSandboxLevelExt;
|
||||
use codex_protocol::config_types::WindowsSandboxLevel;
|
||||
use codex_windows_sandbox::run_windows_sandbox_capture;
|
||||
use codex_windows_sandbox::run_windows_sandbox_capture_elevated;
|
||||
|
||||
@@ -147,8 +148,10 @@ async fn run_command_under_sandbox(
|
||||
let env_map = env.clone();
|
||||
let command_vec = command.clone();
|
||||
let base_dir = config.codex_home.clone();
|
||||
let use_elevated = config.features.enabled(Feature::WindowsSandbox)
|
||||
&& config.features.enabled(Feature::WindowsSandboxElevated);
|
||||
let use_elevated = matches!(
|
||||
WindowsSandboxLevel::from_config(&config),
|
||||
WindowsSandboxLevel::Elevated
|
||||
);
|
||||
|
||||
// Preflight audit is invoked elsewhere at the appropriate times.
|
||||
let res = tokio::task::spawn_blocking(move || {
|
||||
|
||||
@@ -39,6 +39,9 @@ use crate::mcp_cmd::McpCli;
|
||||
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::config::edit::ConfigEditsBuilder;
|
||||
use codex_core::config::find_codex_home;
|
||||
use codex_core::features::Stage;
|
||||
use codex_core::features::is_known_feature_key;
|
||||
use codex_core::terminal::TerminalName;
|
||||
|
||||
@@ -147,7 +150,7 @@ struct ResumeCommand {
|
||||
session_id: Option<String>,
|
||||
|
||||
/// Continue the most recent session without showing the picker.
|
||||
#[arg(long = "last", default_value_t = false, conflicts_with = "session_id")]
|
||||
#[arg(long = "last", default_value_t = false)]
|
||||
last: bool,
|
||||
|
||||
/// Show all sessions (disables cwd filtering and shows CWD column).
|
||||
@@ -448,13 +451,23 @@ struct FeaturesCli {
|
||||
enum FeaturesSubcommand {
|
||||
/// List known features with their stage and effective state.
|
||||
List,
|
||||
/// Enable a feature in config.toml.
|
||||
Enable(FeatureSetArgs),
|
||||
/// Disable a feature in config.toml.
|
||||
Disable(FeatureSetArgs),
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct FeatureSetArgs {
|
||||
/// Feature key to update (for example: unified_exec).
|
||||
feature: String,
|
||||
}
|
||||
|
||||
fn stage_str(stage: codex_core::features::Stage) -> &'static str {
|
||||
use codex_core::features::Stage;
|
||||
match stage {
|
||||
Stage::Beta => "experimental",
|
||||
Stage::Experimental { .. } => "beta",
|
||||
Stage::UnderDevelopment => "under development",
|
||||
Stage::Experimental { .. } => "experimental",
|
||||
Stage::Stable => "stable",
|
||||
Stage::Deprecated => "deprecated",
|
||||
Stage::Removed => "removed",
|
||||
@@ -711,12 +724,69 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
println!("{name:<name_width$} {stage:<stage_width$} {enabled}");
|
||||
}
|
||||
}
|
||||
FeaturesSubcommand::Enable(FeatureSetArgs { feature }) => {
|
||||
enable_feature_in_config(&interactive, &feature).await?;
|
||||
}
|
||||
FeaturesSubcommand::Disable(FeatureSetArgs { feature }) => {
|
||||
disable_feature_in_config(&interactive, &feature).await?;
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn enable_feature_in_config(interactive: &TuiCli, feature: &str) -> anyhow::Result<()> {
|
||||
FeatureToggles::validate_feature(feature)?;
|
||||
let codex_home = find_codex_home()?;
|
||||
ConfigEditsBuilder::new(&codex_home)
|
||||
.with_profile(interactive.config_profile.as_deref())
|
||||
.set_feature_enabled(feature, true)
|
||||
.apply()
|
||||
.await?;
|
||||
println!("Enabled feature `{feature}` in config.toml.");
|
||||
maybe_print_under_development_feature_warning(&codex_home, interactive, feature);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn disable_feature_in_config(interactive: &TuiCli, feature: &str) -> anyhow::Result<()> {
|
||||
FeatureToggles::validate_feature(feature)?;
|
||||
let codex_home = find_codex_home()?;
|
||||
ConfigEditsBuilder::new(&codex_home)
|
||||
.with_profile(interactive.config_profile.as_deref())
|
||||
.set_feature_enabled(feature, false)
|
||||
.apply()
|
||||
.await?;
|
||||
println!("Disabled feature `{feature}` in config.toml.");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn maybe_print_under_development_feature_warning(
|
||||
codex_home: &std::path::Path,
|
||||
interactive: &TuiCli,
|
||||
feature: &str,
|
||||
) {
|
||||
if interactive.config_profile.is_some() {
|
||||
return;
|
||||
}
|
||||
|
||||
let Some(spec) = codex_core::features::FEATURES
|
||||
.iter()
|
||||
.find(|spec| spec.key == feature)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
if !matches!(spec.stage, Stage::UnderDevelopment) {
|
||||
return;
|
||||
}
|
||||
|
||||
let config_path = codex_home.join(codex_core::config::CONFIG_TOML_FILE);
|
||||
eprintln!(
|
||||
"Under-development features enabled: {feature}. Under-development features are incomplete and may behave unpredictably. To suppress this warning, set `suppress_unstable_features_warning = true` in {}.",
|
||||
config_path.display()
|
||||
);
|
||||
}
|
||||
|
||||
/// Prepend root-level overrides so they have lower precedence than
|
||||
/// CLI-specific ones specified after the subcommand (if any).
|
||||
fn prepend_config_flags(
|
||||
@@ -932,6 +1002,24 @@ mod tests {
|
||||
finalize_fork_interactive(interactive, root_overrides, session_id, last, all, fork_cli)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exec_resume_last_accepts_prompt_positional() {
|
||||
let cli =
|
||||
MultitoolCli::try_parse_from(["codex", "exec", "--json", "resume", "--last", "2+2"])
|
||||
.expect("parse should succeed");
|
||||
|
||||
let Some(Subcommand::Exec(exec)) = cli.subcommand else {
|
||||
panic!("expected exec subcommand");
|
||||
};
|
||||
let Some(codex_exec::Command::Resume(args)) = exec.command else {
|
||||
panic!("expected exec resume");
|
||||
};
|
||||
|
||||
assert!(args.last);
|
||||
assert_eq!(args.session_id, None);
|
||||
assert_eq!(args.prompt.as_deref(), Some("2+2"));
|
||||
}
|
||||
|
||||
fn app_server_from_args(args: &[&str]) -> AppServerCommand {
|
||||
let cli = MultitoolCli::try_parse_from(args).expect("parse");
|
||||
let Subcommand::AppServer(app_server) = cli.subcommand.expect("app-server present") else {
|
||||
@@ -1153,6 +1241,32 @@ mod tests {
|
||||
assert!(app_server.analytics_default_enabled);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn features_enable_parses_feature_name() {
|
||||
let cli = MultitoolCli::try_parse_from(["codex", "features", "enable", "unified_exec"])
|
||||
.expect("parse should succeed");
|
||||
let Some(Subcommand::Features(FeaturesCli { sub })) = cli.subcommand else {
|
||||
panic!("expected features subcommand");
|
||||
};
|
||||
let FeaturesSubcommand::Enable(FeatureSetArgs { feature }) = sub else {
|
||||
panic!("expected features enable");
|
||||
};
|
||||
assert_eq!(feature, "unified_exec");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn features_disable_parses_feature_name() {
|
||||
let cli = MultitoolCli::try_parse_from(["codex", "features", "disable", "shell_tool"])
|
||||
.expect("parse should succeed");
|
||||
let Some(Subcommand::Features(FeaturesCli { sub })) = cli.subcommand else {
|
||||
panic!("expected features subcommand");
|
||||
};
|
||||
let FeaturesSubcommand::Disable(FeatureSetArgs { feature }) = sub else {
|
||||
panic!("expected features disable");
|
||||
};
|
||||
assert_eq!(feature, "shell_tool");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn feature_toggles_known_features_generate_overrides() {
|
||||
let toggles = FeatureToggles {
|
||||
|
||||
@@ -13,11 +13,12 @@ use codex_core::config::find_codex_home;
|
||||
use codex_core::config::load_global_mcp_servers;
|
||||
use codex_core::config::types::McpServerConfig;
|
||||
use codex_core::config::types::McpServerTransportConfig;
|
||||
use codex_core::mcp::auth::McpOAuthLoginSupport;
|
||||
use codex_core::mcp::auth::compute_auth_statuses;
|
||||
use codex_core::mcp::auth::oauth_login_support;
|
||||
use codex_core::protocol::McpAuthStatus;
|
||||
use codex_rmcp_client::delete_oauth_tokens;
|
||||
use codex_rmcp_client::perform_oauth_login;
|
||||
use codex_rmcp_client::supports_oauth_login;
|
||||
|
||||
/// Subcommands:
|
||||
/// - `list` — list configured servers (with `--json`)
|
||||
@@ -247,6 +248,7 @@ async fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Re
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
};
|
||||
|
||||
servers.insert(name.clone(), new_entry);
|
||||
@@ -259,33 +261,25 @@ async fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Re
|
||||
|
||||
println!("Added global MCP server '{name}'.");
|
||||
|
||||
if let McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var: None,
|
||||
http_headers,
|
||||
env_http_headers,
|
||||
} = transport
|
||||
{
|
||||
match supports_oauth_login(&url).await {
|
||||
Ok(true) => {
|
||||
println!("Detected OAuth support. Starting OAuth flow…");
|
||||
perform_oauth_login(
|
||||
&name,
|
||||
&url,
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
http_headers.clone(),
|
||||
env_http_headers.clone(),
|
||||
&Vec::new(),
|
||||
config.mcp_oauth_callback_port,
|
||||
)
|
||||
.await?;
|
||||
println!("Successfully logged in.");
|
||||
}
|
||||
Ok(false) => {}
|
||||
Err(_) => println!(
|
||||
"MCP server may or may not require login. Run `codex mcp login {name}` to login."
|
||||
),
|
||||
match oauth_login_support(&transport).await {
|
||||
McpOAuthLoginSupport::Supported(oauth_config) => {
|
||||
println!("Detected OAuth support. Starting OAuth flow…");
|
||||
perform_oauth_login(
|
||||
&name,
|
||||
&oauth_config.url,
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
oauth_config.http_headers,
|
||||
oauth_config.env_http_headers,
|
||||
&Vec::new(),
|
||||
config.mcp_oauth_callback_port,
|
||||
)
|
||||
.await?;
|
||||
println!("Successfully logged in.");
|
||||
}
|
||||
McpOAuthLoginSupport::Unsupported => {}
|
||||
McpOAuthLoginSupport::Unknown(_) => println!(
|
||||
"MCP server may or may not require login. Run `codex mcp login {name}` to login."
|
||||
),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -348,6 +342,11 @@ async fn run_login(config_overrides: &CliConfigOverrides, login_args: LoginArgs)
|
||||
_ => bail!("OAuth login is only supported for streamable HTTP servers."),
|
||||
};
|
||||
|
||||
let mut scopes = scopes;
|
||||
if scopes.is_empty() {
|
||||
scopes = server.scopes.clone().unwrap_or_default();
|
||||
}
|
||||
|
||||
perform_oauth_login(
|
||||
&name,
|
||||
&url,
|
||||
|
||||
58
codex-rs/cli/tests/features.rs
Normal file
58
codex-rs/cli/tests/features.rs
Normal file
@@ -0,0 +1,58 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use predicates::str::contains;
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn codex_command(codex_home: &Path) -> Result<assert_cmd::Command> {
|
||||
let mut cmd = assert_cmd::Command::new(codex_utils_cargo_bin::cargo_bin("codex")?);
|
||||
cmd.env("CODEX_HOME", codex_home);
|
||||
Ok(cmd)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn features_enable_writes_feature_flag_to_config() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut cmd = codex_command(codex_home.path())?;
|
||||
cmd.args(["features", "enable", "unified_exec"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("Enabled feature `unified_exec` in config.toml."));
|
||||
|
||||
let config = std::fs::read_to_string(codex_home.path().join("config.toml"))?;
|
||||
assert!(config.contains("[features]"));
|
||||
assert!(config.contains("unified_exec = true"));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn features_disable_writes_feature_flag_to_config() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut cmd = codex_command(codex_home.path())?;
|
||||
cmd.args(["features", "disable", "shell_tool"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("Disabled feature `shell_tool` in config.toml."));
|
||||
|
||||
let config = std::fs::read_to_string(codex_home.path().join("config.toml"))?;
|
||||
assert!(config.contains("[features]"));
|
||||
assert!(config.contains("shell_tool = false"));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn features_enable_under_development_feature_prints_warning() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut cmd = codex_command(codex_home.path())?;
|
||||
cmd.args(["features", "enable", "sqlite"])
|
||||
.assert()
|
||||
.success()
|
||||
.stderr(contains("Under-development features enabled: sqlite."));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -291,7 +291,7 @@ pub fn process_responses_event(
|
||||
if let Ok(item) = serde_json::from_value::<ResponseItem>(item_val) {
|
||||
return Ok(Some(ResponseEvent::OutputItemAdded(item)));
|
||||
}
|
||||
debug!("failed to parse ResponseItem from output_item.done");
|
||||
debug!("failed to parse ResponseItem from output_item.added");
|
||||
}
|
||||
}
|
||||
"response.reasoning_summary_part.added" => {
|
||||
|
||||
@@ -0,0 +1,40 @@
|
||||
/*
|
||||
* codex-backend
|
||||
*
|
||||
* codex-backend
|
||||
*
|
||||
* The version of the OpenAPI document: 0.0.1
|
||||
*
|
||||
* Generated by: https://openapi-generator.tech
|
||||
*/
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct ConfigFileResponse {
|
||||
#[serde(rename = "contents", skip_serializing_if = "Option::is_none")]
|
||||
pub contents: Option<String>,
|
||||
#[serde(rename = "sha256", skip_serializing_if = "Option::is_none")]
|
||||
pub sha256: Option<String>,
|
||||
#[serde(rename = "updated_at", skip_serializing_if = "Option::is_none")]
|
||||
pub updated_at: Option<String>,
|
||||
#[serde(rename = "updated_by_user_id", skip_serializing_if = "Option::is_none")]
|
||||
pub updated_by_user_id: Option<String>,
|
||||
}
|
||||
|
||||
impl ConfigFileResponse {
|
||||
pub fn new(
|
||||
contents: Option<String>,
|
||||
sha256: Option<String>,
|
||||
updated_at: Option<String>,
|
||||
updated_by_user_id: Option<String>,
|
||||
) -> ConfigFileResponse {
|
||||
ConfigFileResponse {
|
||||
contents,
|
||||
sha256,
|
||||
updated_at,
|
||||
updated_by_user_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,10 @@
|
||||
// Currently export only the types referenced by the workspace
|
||||
// The process for this will change
|
||||
|
||||
// Config
|
||||
pub mod config_file_response;
|
||||
pub use self::config_file_response::ConfigFileResponse;
|
||||
|
||||
// Cloud Tasks
|
||||
pub mod code_task_details_response;
|
||||
pub use self::code_task_details_response::CodeTaskDetailsResponse;
|
||||
|
||||
@@ -21,6 +21,8 @@ anyhow = { workspace = true }
|
||||
arc-swap = "1.8.0"
|
||||
async-channel = { workspace = true }
|
||||
async-trait = { workspace = true }
|
||||
azure_core = { workspace = true }
|
||||
azure_identity = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
chardetng = { workspace = true }
|
||||
chrono = { workspace = true, features = ["serde"] }
|
||||
@@ -37,6 +39,7 @@ codex-keyring-store = { workspace = true }
|
||||
codex-otel = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-rmcp-client = { workspace = true }
|
||||
codex-state = { workspace = true }
|
||||
codex-utils-absolute-path = { workspace = true }
|
||||
codex-utils-pty = { workspace = true }
|
||||
codex-utils-readiness = { workspace = true }
|
||||
@@ -55,6 +58,7 @@ indoc = { workspace = true }
|
||||
keyring = { workspace = true, features = ["crypto-rust"] }
|
||||
libc = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
multimap = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
os_info = { workspace = true }
|
||||
rand = { workspace = true }
|
||||
|
||||
@@ -144,6 +144,9 @@
|
||||
"apply_patch_freeform": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"apps": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"child_agents_md": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -180,6 +183,9 @@
|
||||
"include_apply_patch_tool": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"personality": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"powershell_utf8": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -189,6 +195,9 @@
|
||||
"remote_models": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"request_rule": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"responses_websockets": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -198,6 +207,15 @@
|
||||
"shell_tool": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"skill_env_var_dependency_prompt": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"skill_mcp_dependency_install": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"sqlite": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"steer": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -425,6 +443,11 @@
|
||||
"minimum": 0.0,
|
||||
"type": "integer"
|
||||
},
|
||||
"supports_websockets": {
|
||||
"default": false,
|
||||
"description": "Whether this provider supports the Responses API WebSocket transport.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"wire_api": {
|
||||
"allOf": [
|
||||
{
|
||||
@@ -441,7 +464,6 @@
|
||||
"type": "object"
|
||||
},
|
||||
"Notice": {
|
||||
"additionalProperties": false,
|
||||
"description": "Settings for notices we display to users via the tui and app-server clients (primarily the Codex IDE extension). NOTE: these are different from notifications - notices are warnings, NUX screens, acknowledgements, etc.",
|
||||
"properties": {
|
||||
"hide_full_access_warning": {
|
||||
@@ -475,6 +497,14 @@
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"NotificationMethod": {
|
||||
"enum": [
|
||||
"auto",
|
||||
"osc9",
|
||||
"bel"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"Notifications": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -750,6 +780,13 @@
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"scopes": {
|
||||
"default": null,
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"startup_timeout_ms": {
|
||||
"default": null,
|
||||
"format": "uint64",
|
||||
@@ -976,6 +1013,15 @@
|
||||
"default": null,
|
||||
"description": "Start the TUI in the specified collaboration mode (plan/execute/etc.). Defaults to unset."
|
||||
},
|
||||
"notification_method": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/NotificationMethod"
|
||||
}
|
||||
],
|
||||
"default": "auto",
|
||||
"description": "Notification method to use for unfocused terminal notifications. Defaults to `auto`."
|
||||
},
|
||||
"notifications": {
|
||||
"allOf": [
|
||||
{
|
||||
@@ -1040,13 +1086,6 @@
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Experimental: Responses API over WebSocket transport.",
|
||||
"enum": [
|
||||
"responses_websocket"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Regular Chat Completions compatible with `/v1/chat/completions`.",
|
||||
"enum": [
|
||||
@@ -1130,6 +1169,9 @@
|
||||
"apply_patch_freeform": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"apps": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"child_agents_md": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -1166,6 +1208,9 @@
|
||||
"include_apply_patch_tool": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"personality": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"powershell_utf8": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -1175,6 +1220,9 @@
|
||||
"remote_models": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"request_rule": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"responses_websockets": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -1184,6 +1232,15 @@
|
||||
"shell_tool": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"skill_env_var_dependency_prompt": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"skill_mcp_dependency_install": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"sqlite": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"steer": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -1431,6 +1488,10 @@
|
||||
],
|
||||
"description": "Sandbox configuration to apply if `sandbox` is `WorkspaceWrite`."
|
||||
},
|
||||
"session_object_storage_url": {
|
||||
"description": "Base URL for the object store used by /share (enterprise/self-hosted).",
|
||||
"type": "string"
|
||||
},
|
||||
"shell_environment_policy": {
|
||||
"allOf": [
|
||||
{
|
||||
@@ -1458,6 +1519,10 @@
|
||||
],
|
||||
"description": "User-level skill config entries keyed by SKILL.md path."
|
||||
},
|
||||
"suppress_unstable_features_warning": {
|
||||
"description": "Suppress warnings about unstable (under development) features.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"tool_output_token_limit": {
|
||||
"description": "Token budget applied when storing tool/function outputs in the context manager.",
|
||||
"format": "uint",
|
||||
|
||||
@@ -44,6 +44,8 @@ pub struct AgentProfile {
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
/// Whether to force a read-only sandbox policy.
|
||||
pub read_only: bool,
|
||||
/// Description to include in the tool specs.
|
||||
pub description: &'static str,
|
||||
}
|
||||
|
||||
impl AgentRole {
|
||||
@@ -51,7 +53,19 @@ impl AgentRole {
|
||||
pub fn enum_values() -> Vec<String> {
|
||||
ALL_ROLES
|
||||
.iter()
|
||||
.filter_map(|role| serde_json::to_string(role).ok())
|
||||
.filter_map(|role| {
|
||||
let description = role.profile().description;
|
||||
serde_json::to_string(role)
|
||||
.map(|role| {
|
||||
let description = if !description.is_empty() {
|
||||
format!(r#", "description": {description}"#)
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
format!(r#"{{ "name": {role}{description}}}"#)
|
||||
})
|
||||
.ok()
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
@@ -66,11 +80,33 @@ impl AgentRole {
|
||||
AgentRole::Worker => AgentProfile {
|
||||
// base_instructions: Some(WORKER_PROMPT),
|
||||
// model: Some(WORKER_MODEL),
|
||||
description: r#"Use for execution and production work.
|
||||
Typical tasks:
|
||||
- Implement part of a feature
|
||||
- Fix tests or bugs
|
||||
- Split large refactors into independent chunks
|
||||
Rules:
|
||||
- Explicitly assign **ownership** of the task (files / responsibility).
|
||||
- Always tell workers they are **not alone in the codebase**, and they should ignore edits made by others without touching them"#,
|
||||
..Default::default()
|
||||
},
|
||||
AgentRole::Explorer => AgentProfile {
|
||||
model: Some(EXPLORER_MODEL),
|
||||
reasoning_effort: Some(ReasoningEffort::Low),
|
||||
description: r#"Use for fast codebase understanding and information gathering.
|
||||
`explorer` are extremely fast agents so use them as much as you can to speed up the resolution of the global task.
|
||||
Typical tasks:
|
||||
- Locate usages of a symbol or concept
|
||||
- Understand how X is handled in Y
|
||||
- Review a section of code for issues
|
||||
- Assess impact of a potential change
|
||||
Rules:
|
||||
- Be explicit in what you are looking for. A good usage of `explorer` would mean that don't need to read the same code after the explorer send you the result.
|
||||
- **Always** prefer asking explorers rather than exploring the codebase yourself.
|
||||
- Spawn multiple explorers in parallel when useful and wait for all results.
|
||||
- You can ask the `explorer` to return file name, lines, entire code snippets, ...
|
||||
- Reuse the same explorer when it is relevant. If later in your process you have more questions on some code an explorer already covered, reuse this same explorer to be more efficient.
|
||||
"#,
|
||||
..Default::default()
|
||||
},
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ use serde::Deserialize;
|
||||
|
||||
use crate::auth::CodexAuth;
|
||||
use crate::error::CodexErr;
|
||||
use crate::error::ModelCapError;
|
||||
use crate::error::RetryLimitReachedError;
|
||||
use crate::error::UnexpectedResponseError;
|
||||
use crate::error::UsageLimitReachedError;
|
||||
@@ -49,6 +50,23 @@ pub(crate) fn map_api_error(err: ApiError) -> CodexErr {
|
||||
} else if status == http::StatusCode::INTERNAL_SERVER_ERROR {
|
||||
CodexErr::InternalServerError
|
||||
} else if status == http::StatusCode::TOO_MANY_REQUESTS {
|
||||
if let Some(model) = headers
|
||||
.as_ref()
|
||||
.and_then(|map| map.get(MODEL_CAP_MODEL_HEADER))
|
||||
.and_then(|value| value.to_str().ok())
|
||||
.map(str::to_string)
|
||||
{
|
||||
let reset_after_seconds = headers
|
||||
.as_ref()
|
||||
.and_then(|map| map.get(MODEL_CAP_RESET_AFTER_HEADER))
|
||||
.and_then(|value| value.to_str().ok())
|
||||
.and_then(|value| value.parse::<u64>().ok());
|
||||
return CodexErr::ModelCap(ModelCapError {
|
||||
model,
|
||||
reset_after_seconds,
|
||||
});
|
||||
}
|
||||
|
||||
if let Ok(err) = serde_json::from_str::<UsageErrorResponse>(&body_text) {
|
||||
if err.error.error_type.as_deref() == Some("usage_limit_reached") {
|
||||
let rate_limits = headers.as_ref().and_then(parse_rate_limit);
|
||||
@@ -92,6 +110,42 @@ pub(crate) fn map_api_error(err: ApiError) -> CodexErr {
|
||||
}
|
||||
}
|
||||
|
||||
const MODEL_CAP_MODEL_HEADER: &str = "x-codex-model-cap-model";
|
||||
const MODEL_CAP_RESET_AFTER_HEADER: &str = "x-codex-model-cap-reset-after-seconds";
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use codex_api::TransportError;
|
||||
use http::HeaderMap;
|
||||
use http::StatusCode;
|
||||
|
||||
#[test]
|
||||
fn map_api_error_maps_model_cap_headers() {
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
MODEL_CAP_MODEL_HEADER,
|
||||
http::HeaderValue::from_static("boomslang"),
|
||||
);
|
||||
headers.insert(
|
||||
MODEL_CAP_RESET_AFTER_HEADER,
|
||||
http::HeaderValue::from_static("120"),
|
||||
);
|
||||
let err = map_api_error(ApiError::Transport(TransportError::Http {
|
||||
status: StatusCode::TOO_MANY_REQUESTS,
|
||||
url: Some("http://example.com/v1/responses".to_string()),
|
||||
headers: Some(headers),
|
||||
body: Some(String::new()),
|
||||
}));
|
||||
|
||||
let CodexErr::ModelCap(model_cap) = err else {
|
||||
panic!("expected CodexErr::ModelCap, got {err:?}");
|
||||
};
|
||||
assert_eq!(model_cap.model, "boomslang");
|
||||
assert_eq!(model_cap.reset_after_seconds, Some(120));
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_request_id(headers: Option<&HeaderMap>) -> Option<String> {
|
||||
headers.and_then(|map| {
|
||||
["cf-ray", "x-request-id", "x-oai-request-id"]
|
||||
|
||||
@@ -42,6 +42,7 @@ pub(crate) async fn apply_patch(
|
||||
turn_context.approval_policy,
|
||||
&turn_context.sandbox_policy,
|
||||
&turn_context.cwd,
|
||||
turn_context.windows_sandbox_level,
|
||||
) {
|
||||
SafetyCheck::AutoApprove {
|
||||
user_explicitly_approved,
|
||||
|
||||
@@ -65,6 +65,7 @@ use crate::model_provider_info::ModelProviderInfo;
|
||||
use crate::model_provider_info::WireApi;
|
||||
use crate::tools::spec::create_tools_json_for_chat_completions_api;
|
||||
use crate::tools::spec::create_tools_json_for_responses_api;
|
||||
use crate::transport_manager::TransportManager;
|
||||
|
||||
pub const WEB_SEARCH_ELIGIBLE_HEADER: &str = "x-oai-web-search-eligible";
|
||||
pub const X_CODEX_TURN_STATE_HEADER: &str = "x-codex-turn-state";
|
||||
@@ -80,6 +81,7 @@ struct ModelClientState {
|
||||
effort: Option<ReasoningEffortConfig>,
|
||||
summary: ReasoningSummaryConfig,
|
||||
session_source: SessionSource,
|
||||
transport_manager: TransportManager,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -91,6 +93,7 @@ pub struct ModelClientSession {
|
||||
state: Arc<ModelClientState>,
|
||||
connection: Option<ApiWebSocketConnection>,
|
||||
websocket_last_items: Vec<ResponseItem>,
|
||||
transport_manager: TransportManager,
|
||||
/// Turn state for sticky routing.
|
||||
///
|
||||
/// This is an `OnceLock` that stores the turn state value received from the server
|
||||
@@ -116,6 +119,7 @@ impl ModelClient {
|
||||
summary: ReasoningSummaryConfig,
|
||||
conversation_id: ThreadId,
|
||||
session_source: SessionSource,
|
||||
transport_manager: TransportManager,
|
||||
) -> Self {
|
||||
Self {
|
||||
state: Arc::new(ModelClientState {
|
||||
@@ -128,6 +132,7 @@ impl ModelClient {
|
||||
effort,
|
||||
summary,
|
||||
session_source,
|
||||
transport_manager,
|
||||
}),
|
||||
}
|
||||
}
|
||||
@@ -137,6 +142,7 @@ impl ModelClient {
|
||||
state: Arc::clone(&self.state),
|
||||
connection: None,
|
||||
websocket_last_items: Vec::new(),
|
||||
transport_manager: self.state.transport_manager.clone(),
|
||||
turn_state: Arc::new(OnceLock::new()),
|
||||
}
|
||||
}
|
||||
@@ -171,6 +177,10 @@ impl ModelClient {
|
||||
self.state.session_source.clone()
|
||||
}
|
||||
|
||||
pub(crate) fn transport_manager(&self) -> TransportManager {
|
||||
self.state.transport_manager.clone()
|
||||
}
|
||||
|
||||
/// Returns the currently configured model slug.
|
||||
pub fn get_model(&self) -> String {
|
||||
self.state.model_info.slug.clone()
|
||||
@@ -250,9 +260,18 @@ impl ModelClientSession {
|
||||
/// For Chat providers, the underlying stream is optionally aggregated
|
||||
/// based on the `show_raw_agent_reasoning` flag in the config.
|
||||
pub async fn stream(&mut self, prompt: &Prompt) -> Result<ResponseStream> {
|
||||
match self.state.provider.wire_api {
|
||||
WireApi::Responses => self.stream_responses_api(prompt).await,
|
||||
WireApi::ResponsesWebsocket => self.stream_responses_websocket(prompt).await,
|
||||
let wire_api = self.state.provider.wire_api;
|
||||
match wire_api {
|
||||
WireApi::Responses => {
|
||||
let websocket_enabled = self.responses_websocket_enabled()
|
||||
&& !self.transport_manager.disable_websockets();
|
||||
|
||||
if websocket_enabled {
|
||||
self.stream_responses_websocket(prompt).await
|
||||
} else {
|
||||
self.stream_responses_api(prompt).await
|
||||
}
|
||||
}
|
||||
WireApi::Chat => {
|
||||
let api_stream = self.stream_chat_completions(prompt).await?;
|
||||
|
||||
@@ -271,6 +290,34 @@ impl ModelClientSession {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn try_switch_fallback_transport(&mut self) -> bool {
|
||||
let websocket_enabled = self.responses_websocket_enabled();
|
||||
let activated = self
|
||||
.transport_manager
|
||||
.activate_http_fallback(websocket_enabled);
|
||||
if activated {
|
||||
warn!("falling back to HTTP");
|
||||
self.state.otel_manager.counter(
|
||||
"codex.transport.fallback_to_http",
|
||||
1,
|
||||
&[("from_wire_api", "responses_websocket")],
|
||||
);
|
||||
|
||||
self.connection = None;
|
||||
self.websocket_last_items.clear();
|
||||
}
|
||||
activated
|
||||
}
|
||||
|
||||
fn responses_websocket_enabled(&self) -> bool {
|
||||
self.state.provider.supports_websockets
|
||||
&& self
|
||||
.state
|
||||
.config
|
||||
.features
|
||||
.enabled(Feature::ResponsesWebsockets)
|
||||
}
|
||||
|
||||
fn build_responses_request(&self, prompt: &Prompt) -> Result<ApiPrompt> {
|
||||
let instructions = prompt.base_instructions.text.clone();
|
||||
let tools_json: Vec<Value> = create_tools_json_for_responses_api(&prompt.tools)?;
|
||||
@@ -625,11 +672,13 @@ fn build_api_prompt(prompt: &Prompt, instructions: String, tools_json: Vec<Value
|
||||
}
|
||||
}
|
||||
|
||||
fn beta_feature_headers(config: &Config) -> ApiHeaderMap {
|
||||
fn experimental_feature_headers(config: &Config) -> ApiHeaderMap {
|
||||
let enabled = FEATURES
|
||||
.iter()
|
||||
.filter_map(|spec| {
|
||||
if spec.stage.beta_menu_description().is_some() && config.features.enabled(spec.id) {
|
||||
if spec.stage.experimental_menu_description().is_some()
|
||||
&& config.features.enabled(spec.id)
|
||||
{
|
||||
Some(spec.key)
|
||||
} else {
|
||||
None
|
||||
@@ -650,7 +699,7 @@ fn build_responses_headers(
|
||||
config: &Config,
|
||||
turn_state: Option<&Arc<OnceLock<String>>>,
|
||||
) -> ApiHeaderMap {
|
||||
let mut headers = beta_feature_headers(config);
|
||||
let mut headers = experimental_feature_headers(config);
|
||||
headers.insert(
|
||||
WEB_SEARCH_ELIGIBLE_HEADER,
|
||||
HeaderValue::from_static(
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -12,6 +12,8 @@ use codex_protocol::protocol::SessionSource;
|
||||
use std::path::PathBuf;
|
||||
use tokio::sync::watch;
|
||||
|
||||
use crate::state_db::StateDbHandle;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ThreadConfigSnapshot {
|
||||
pub model: String,
|
||||
@@ -64,6 +66,10 @@ impl CodexThread {
|
||||
self.rollout_path.clone()
|
||||
}
|
||||
|
||||
pub fn state_db(&self) -> Option<StateDbHandle> {
|
||||
self.codex.state_db()
|
||||
}
|
||||
|
||||
pub async fn config_snapshot(&self) -> ThreadConfigSnapshot {
|
||||
self.codex.thread_config_snapshot().await
|
||||
}
|
||||
|
||||
@@ -10,7 +10,6 @@ use crate::error::CodexErr;
|
||||
use crate::error::Result as CodexResult;
|
||||
use crate::features::Feature;
|
||||
use crate::protocol::CompactedItem;
|
||||
use crate::protocol::ContextCompactedEvent;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::TurnContextItem;
|
||||
use crate::protocol::TurnStartedEvent;
|
||||
@@ -20,6 +19,7 @@ use crate::truncate::TruncationPolicy;
|
||||
use crate::truncate::approx_token_count;
|
||||
use crate::truncate::truncate_text;
|
||||
use crate::util::backoff;
|
||||
use codex_protocol::items::ContextCompactionItem;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseInputItem;
|
||||
@@ -71,6 +71,9 @@ async fn run_compact_task_inner(
|
||||
turn_context: Arc<TurnContext>,
|
||||
input: Vec<UserInput>,
|
||||
) {
|
||||
let compaction_item = TurnItem::ContextCompaction(ContextCompactionItem::new());
|
||||
sess.emit_turn_item_started(&turn_context, &compaction_item)
|
||||
.await;
|
||||
let initial_input_for_turn: ResponseInputItem = ResponseInputItem::from(input);
|
||||
|
||||
let mut history = sess.clone_history().await;
|
||||
@@ -193,9 +196,8 @@ async fn run_compact_task_inner(
|
||||
});
|
||||
sess.persist_rollout_items(&[rollout_item]).await;
|
||||
|
||||
let event = EventMsg::ContextCompacted(ContextCompactedEvent {});
|
||||
sess.send_event(&turn_context, event).await;
|
||||
|
||||
sess.emit_turn_item_completed(&turn_context, compaction_item)
|
||||
.await;
|
||||
let warning = EventMsg::Warning(WarningEvent {
|
||||
message: "Heads up: Long threads and multiple compactions can cause the model to be less accurate. Start a new thread when possible to keep threads small and targeted.".to_string(),
|
||||
});
|
||||
|
||||
@@ -5,10 +5,11 @@ use crate::codex::Session;
|
||||
use crate::codex::TurnContext;
|
||||
use crate::error::Result as CodexResult;
|
||||
use crate::protocol::CompactedItem;
|
||||
use crate::protocol::ContextCompactedEvent;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::RolloutItem;
|
||||
use crate::protocol::TurnStartedEvent;
|
||||
use codex_protocol::items::ContextCompactionItem;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
|
||||
pub(crate) async fn run_inline_remote_auto_compact_task(
|
||||
@@ -40,6 +41,9 @@ async fn run_remote_compact_task_inner_impl(
|
||||
sess: &Arc<Session>,
|
||||
turn_context: &Arc<TurnContext>,
|
||||
) -> CodexResult<()> {
|
||||
let compaction_item = TurnItem::ContextCompaction(ContextCompactionItem::new());
|
||||
sess.emit_turn_item_started(turn_context, &compaction_item)
|
||||
.await;
|
||||
let history = sess.clone_history().await;
|
||||
|
||||
// Required to keep `/undo` available after compaction
|
||||
@@ -77,8 +81,7 @@ async fn run_remote_compact_task_inner_impl(
|
||||
sess.persist_rollout_items(&[RolloutItem::Compacted(compacted_item)])
|
||||
.await;
|
||||
|
||||
let event = EventMsg::ContextCompacted(ContextCompactedEvent {});
|
||||
sess.send_event(turn_context, event).await;
|
||||
|
||||
sess.emit_turn_item_completed(turn_context, compaction_item)
|
||||
.await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -167,6 +167,11 @@ mod document_helpers {
|
||||
{
|
||||
entry["disabled_tools"] = array_from_iter(disabled_tools.iter().cloned());
|
||||
}
|
||||
if let Some(scopes) = &config.scopes
|
||||
&& !scopes.is_empty()
|
||||
{
|
||||
entry["scopes"] = array_from_iter(scopes.iter().cloned());
|
||||
}
|
||||
|
||||
entry
|
||||
}
|
||||
@@ -1373,6 +1378,7 @@ gpt-5 = "gpt-5.1"
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: Some(vec!["one".to_string(), "two".to_string()]),
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
);
|
||||
|
||||
@@ -1395,6 +1401,7 @@ gpt-5 = "gpt-5.1"
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: Some(vec!["forbidden".to_string()]),
|
||||
scopes: None,
|
||||
},
|
||||
);
|
||||
|
||||
@@ -1460,6 +1467,7 @@ foo = { command = "cmd" }
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
);
|
||||
|
||||
@@ -1504,6 +1512,7 @@ foo = { command = "cmd" } # keep me
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
);
|
||||
|
||||
@@ -1547,6 +1556,7 @@ foo = { command = "cmd", args = ["--flag"] } # keep me
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
);
|
||||
|
||||
@@ -1591,6 +1601,7 @@ foo = { command = "cmd" }
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
);
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ use crate::config::types::McpServerConfig;
|
||||
use crate::config::types::McpServerDisabledReason;
|
||||
use crate::config::types::McpServerTransportConfig;
|
||||
use crate::config::types::Notice;
|
||||
use crate::config::types::NotificationMethod;
|
||||
use crate::config::types::Notifications;
|
||||
use crate::config::types::OtelConfig;
|
||||
use crate::config::types::OtelConfigToml;
|
||||
@@ -38,6 +39,7 @@ use crate::project_doc::DEFAULT_PROJECT_DOC_FILENAME;
|
||||
use crate::project_doc::LOCAL_PROJECT_DOC_FILENAME;
|
||||
use crate::protocol::AskForApproval;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
use crate::windows_sandbox::WindowsSandboxLevelExt;
|
||||
use codex_app_server_protocol::Tools;
|
||||
use codex_app_server_protocol::UserSavedConfig;
|
||||
use codex_protocol::config_types::AltScreenMode;
|
||||
@@ -49,6 +51,7 @@ use codex_protocol::config_types::SandboxMode;
|
||||
use codex_protocol::config_types::TrustLevel;
|
||||
use codex_protocol::config_types::Verbosity;
|
||||
use codex_protocol::config_types::WebSearchMode;
|
||||
use codex_protocol::config_types::WindowsSandboxLevel;
|
||||
use codex_protocol::openai_models::ReasoningEffort;
|
||||
use codex_rmcp_client::OAuthCredentialsStoreMode;
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
@@ -190,10 +193,13 @@ pub struct Config {
|
||||
/// If unset the feature is disabled.
|
||||
pub notify: Option<Vec<String>>,
|
||||
|
||||
/// TUI notifications preference. When set, the TUI will send OSC 9 notifications on approvals
|
||||
/// and turn completions when not focused.
|
||||
/// TUI notifications preference. When set, the TUI will send terminal notifications on
|
||||
/// approvals and turn completions when not focused.
|
||||
pub tui_notifications: Notifications,
|
||||
|
||||
/// Notification method for terminal notifications (osc9 or bel).
|
||||
pub tui_notification_method: NotificationMethod,
|
||||
|
||||
/// Enable ASCII animations and shimmer effects in the TUI.
|
||||
pub animations: bool,
|
||||
|
||||
@@ -293,6 +299,9 @@ pub struct Config {
|
||||
/// Base URL for requests to ChatGPT (as opposed to the OpenAI API).
|
||||
pub chatgpt_base_url: String,
|
||||
|
||||
/// Optional base URL for storing shared session rollouts in an object store.
|
||||
pub session_object_storage_url: Option<String>,
|
||||
|
||||
/// When set, restricts ChatGPT login to a specific workspace identifier.
|
||||
pub forced_chatgpt_workspace_id: Option<String>,
|
||||
|
||||
@@ -316,6 +325,9 @@ pub struct Config {
|
||||
/// Centralized feature flags; source of truth for feature gating.
|
||||
pub features: Features,
|
||||
|
||||
/// When `true`, suppress warnings about unstable (under development) features.
|
||||
pub suppress_unstable_features_warning: bool,
|
||||
|
||||
/// The active profile name used to derive this `Config` (if any).
|
||||
pub active_profile: Option<String>,
|
||||
|
||||
@@ -832,6 +844,9 @@ pub struct ConfigToml {
|
||||
/// When unset, Codex will bind to an ephemeral port chosen by the OS.
|
||||
pub mcp_oauth_callback_port: Option<u16>,
|
||||
|
||||
/// Base URL for the object store used by /share (enterprise/self-hosted).
|
||||
pub session_object_storage_url: Option<String>,
|
||||
|
||||
/// User-defined provider entries that extend/override the built-in list.
|
||||
#[serde(default)]
|
||||
pub model_providers: HashMap<String, ModelProviderInfo>,
|
||||
@@ -906,6 +921,9 @@ pub struct ConfigToml {
|
||||
#[schemars(schema_with = "crate::config::schema::features_schema")]
|
||||
pub features: Option<FeaturesToml>,
|
||||
|
||||
/// Suppress warnings about unstable (under development) features.
|
||||
pub suppress_unstable_features_warning: Option<bool>,
|
||||
|
||||
/// Settings for ghost snapshots (used for undo).
|
||||
#[serde(default)]
|
||||
pub ghost_snapshot: Option<GhostSnapshotToml>,
|
||||
@@ -1050,6 +1068,7 @@ impl ConfigToml {
|
||||
&self,
|
||||
sandbox_mode_override: Option<SandboxMode>,
|
||||
profile_sandbox_mode: Option<SandboxMode>,
|
||||
windows_sandbox_level: WindowsSandboxLevel,
|
||||
resolved_cwd: &Path,
|
||||
) -> SandboxPolicyResolution {
|
||||
let resolved_sandbox_mode = sandbox_mode_override
|
||||
@@ -1088,7 +1107,7 @@ impl ConfigToml {
|
||||
if cfg!(target_os = "windows")
|
||||
&& matches!(resolved_sandbox_mode, SandboxMode::WorkspaceWrite)
|
||||
// If the experimental Windows sandbox is enabled, do not force a downgrade.
|
||||
&& crate::safety::get_platform_sandbox().is_none()
|
||||
&& windows_sandbox_level == codex_protocol::config_types::WindowsSandboxLevel::Disabled
|
||||
{
|
||||
sandbox_policy = SandboxPolicy::new_read_only_policy();
|
||||
forced_auto_mode_downgraded_on_windows = true;
|
||||
@@ -1212,6 +1231,20 @@ fn resolve_web_search_mode(
|
||||
None
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_web_search_mode_for_turn(
|
||||
explicit_mode: Option<WebSearchMode>,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
) -> WebSearchMode {
|
||||
if let Some(mode) = explicit_mode {
|
||||
return mode;
|
||||
}
|
||||
if matches!(sandbox_policy, SandboxPolicy::DangerFullAccess) {
|
||||
WebSearchMode::Live
|
||||
} else {
|
||||
WebSearchMode::Cached
|
||||
}
|
||||
}
|
||||
|
||||
impl Config {
|
||||
#[cfg(test)]
|
||||
fn load_from_base_config_with_overrides(
|
||||
@@ -1278,17 +1311,6 @@ impl Config {
|
||||
};
|
||||
|
||||
let features = Features::from_config(&cfg, &config_profile, feature_overrides);
|
||||
let web_search_mode = resolve_web_search_mode(&cfg, &config_profile, &features);
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
// Base flag controls sandbox on/off; elevated only applies when base is enabled.
|
||||
let sandbox_enabled = features.enabled(Feature::WindowsSandbox);
|
||||
crate::safety::set_windows_sandbox_enabled(sandbox_enabled);
|
||||
let elevated_enabled =
|
||||
sandbox_enabled && features.enabled(Feature::WindowsSandboxElevated);
|
||||
crate::safety::set_windows_elevated_sandbox_enabled(elevated_enabled);
|
||||
}
|
||||
|
||||
let resolved_cwd = {
|
||||
use std::env;
|
||||
|
||||
@@ -1315,10 +1337,16 @@ impl Config {
|
||||
.get_active_project(&resolved_cwd)
|
||||
.unwrap_or(ProjectConfig { trust_level: None });
|
||||
|
||||
let windows_sandbox_level = WindowsSandboxLevel::from_features(&features);
|
||||
let SandboxPolicyResolution {
|
||||
policy: mut sandbox_policy,
|
||||
forced_auto_mode_downgraded_on_windows,
|
||||
} = cfg.derive_sandbox_policy(sandbox_mode, config_profile.sandbox_mode, &resolved_cwd);
|
||||
} = cfg.derive_sandbox_policy(
|
||||
sandbox_mode,
|
||||
config_profile.sandbox_mode,
|
||||
windows_sandbox_level,
|
||||
&resolved_cwd,
|
||||
);
|
||||
if let SandboxPolicy::WorkspaceWrite { writable_roots, .. } = &mut sandbox_policy {
|
||||
for path in additional_writable_roots {
|
||||
if !writable_roots.iter().any(|existing| existing == &path) {
|
||||
@@ -1338,6 +1366,7 @@ impl Config {
|
||||
AskForApproval::default()
|
||||
}
|
||||
});
|
||||
let web_search_mode = resolve_web_search_mode(&cfg, &config_profile, &features);
|
||||
// TODO(dylan): We should be able to leverage ConfigLayerStack so that
|
||||
// we can reliably check this at every config level.
|
||||
let did_user_set_custom_approval_policy_or_sandbox_mode = approval_policy_override
|
||||
@@ -1349,12 +1378,6 @@ impl Config {
|
||||
|| cfg.sandbox_mode.is_some();
|
||||
|
||||
let mut model_providers = built_in_model_providers();
|
||||
if features.enabled(Feature::ResponsesWebsockets)
|
||||
&& let Some(provider) = model_providers.get_mut("openai")
|
||||
&& provider.is_openai()
|
||||
{
|
||||
provider.wire_api = crate::model_provider_info::WireApi::ResponsesWebsocket;
|
||||
}
|
||||
// Merge user-defined providers into the built-in list.
|
||||
for (key, provider) in cfg.model_providers.into_iter() {
|
||||
model_providers.entry(key).or_insert(provider);
|
||||
@@ -1428,6 +1451,16 @@ impl Config {
|
||||
}
|
||||
});
|
||||
|
||||
let session_object_storage_url =
|
||||
cfg.session_object_storage_url.as_ref().and_then(|value| {
|
||||
let trimmed = value.trim();
|
||||
if trimmed.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(trimmed.to_string())
|
||||
}
|
||||
});
|
||||
|
||||
let forced_login_method = cfg.forced_login_method;
|
||||
|
||||
let model = model.or(config_profile.model).or(cfg.model);
|
||||
@@ -1557,6 +1590,7 @@ impl Config {
|
||||
.chatgpt_base_url
|
||||
.or(cfg.chatgpt_base_url)
|
||||
.unwrap_or("https://chatgpt.com/backend-api/".to_string()),
|
||||
session_object_storage_url,
|
||||
forced_chatgpt_workspace_id,
|
||||
forced_login_method,
|
||||
include_apply_patch_tool: include_apply_patch_tool_flag,
|
||||
@@ -1564,6 +1598,9 @@ impl Config {
|
||||
use_experimental_unified_exec_tool,
|
||||
ghost_snapshot,
|
||||
features,
|
||||
suppress_unstable_features_warning: cfg
|
||||
.suppress_unstable_features_warning
|
||||
.unwrap_or(false),
|
||||
active_profile: active_profile_name,
|
||||
active_project,
|
||||
windows_wsl_setup_acknowledged: cfg.windows_wsl_setup_acknowledged.unwrap_or(false),
|
||||
@@ -1585,6 +1622,11 @@ impl Config {
|
||||
.as_ref()
|
||||
.map(|t| t.notifications.clone())
|
||||
.unwrap_or_default(),
|
||||
tui_notification_method: cfg
|
||||
.tui
|
||||
.as_ref()
|
||||
.map(|t| t.notification_method)
|
||||
.unwrap_or_default(),
|
||||
animations: cfg.tui.as_ref().map(|t| t.animations).unwrap_or(true),
|
||||
show_tooltips: cfg.tui.as_ref().map(|t| t.show_tooltips).unwrap_or(true),
|
||||
experimental_mode: cfg.tui.as_ref().and_then(|t| t.experimental_mode),
|
||||
@@ -1657,20 +1699,19 @@ impl Config {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_windows_sandbox_globally(&mut self, value: bool) {
|
||||
crate::safety::set_windows_sandbox_enabled(value);
|
||||
pub fn set_windows_sandbox_enabled(&mut self, value: bool) {
|
||||
if value {
|
||||
self.features.enable(Feature::WindowsSandbox);
|
||||
self.forced_auto_mode_downgraded_on_windows = false;
|
||||
} else {
|
||||
self.features.disable(Feature::WindowsSandbox);
|
||||
}
|
||||
self.forced_auto_mode_downgraded_on_windows = !value;
|
||||
}
|
||||
|
||||
pub fn set_windows_elevated_sandbox_globally(&mut self, value: bool) {
|
||||
crate::safety::set_windows_elevated_sandbox_enabled(value);
|
||||
pub fn set_windows_elevated_sandbox_enabled(&mut self, value: bool) {
|
||||
if value {
|
||||
self.features.enable(Feature::WindowsSandboxElevated);
|
||||
self.forced_auto_mode_downgraded_on_windows = false;
|
||||
} else {
|
||||
self.features.disable(Feature::WindowsSandboxElevated);
|
||||
}
|
||||
@@ -1744,6 +1785,7 @@ mod tests {
|
||||
use crate::config::types::FeedbackConfigToml;
|
||||
use crate::config::types::HistoryPersistence;
|
||||
use crate::config::types::McpServerTransportConfig;
|
||||
use crate::config::types::NotificationMethod;
|
||||
use crate::config::types::Notifications;
|
||||
use crate::config_loader::RequirementSource;
|
||||
use crate::features::Feature;
|
||||
@@ -1772,6 +1814,7 @@ mod tests {
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1789,6 +1832,7 @@ mod tests {
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1838,6 +1882,7 @@ persistence = "none"
|
||||
tui,
|
||||
Tui {
|
||||
notifications: Notifications::Enabled(true),
|
||||
notification_method: NotificationMethod::Auto,
|
||||
animations: true,
|
||||
show_tooltips: true,
|
||||
experimental_mode: None,
|
||||
@@ -1860,6 +1905,7 @@ network_access = false # This should be ignored.
|
||||
let resolution = sandbox_full_access_cfg.derive_sandbox_policy(
|
||||
sandbox_mode_override,
|
||||
None,
|
||||
WindowsSandboxLevel::Disabled,
|
||||
&PathBuf::from("/tmp/test"),
|
||||
);
|
||||
assert_eq!(
|
||||
@@ -1883,6 +1929,7 @@ network_access = true # This should be ignored.
|
||||
let resolution = sandbox_read_only_cfg.derive_sandbox_policy(
|
||||
sandbox_mode_override,
|
||||
None,
|
||||
WindowsSandboxLevel::Disabled,
|
||||
&PathBuf::from("/tmp/test"),
|
||||
);
|
||||
assert_eq!(
|
||||
@@ -1914,6 +1961,7 @@ exclude_slash_tmp = true
|
||||
let resolution = sandbox_workspace_write_cfg.derive_sandbox_policy(
|
||||
sandbox_mode_override,
|
||||
None,
|
||||
WindowsSandboxLevel::Disabled,
|
||||
&PathBuf::from("/tmp/test"),
|
||||
);
|
||||
if cfg!(target_os = "windows") {
|
||||
@@ -1962,6 +2010,7 @@ trust_level = "trusted"
|
||||
let resolution = sandbox_workspace_write_cfg.derive_sandbox_policy(
|
||||
sandbox_mode_override,
|
||||
None,
|
||||
WindowsSandboxLevel::Disabled,
|
||||
&PathBuf::from("/tmp/test"),
|
||||
);
|
||||
if cfg!(target_os = "windows") {
|
||||
@@ -2253,7 +2302,7 @@ trust_level = "trusted"
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn web_search_mode_uses_none_if_unset() {
|
||||
fn web_search_mode_defaults_to_none_if_unset() {
|
||||
let cfg = ConfigToml::default();
|
||||
let profile = ConfigProfile::default();
|
||||
let features = Features::with_defaults();
|
||||
@@ -2293,6 +2342,30 @@ trust_level = "trusted"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn web_search_mode_for_turn_defaults_to_cached_when_unset() {
|
||||
let mode = resolve_web_search_mode_for_turn(None, &SandboxPolicy::ReadOnly);
|
||||
|
||||
assert_eq!(mode, WebSearchMode::Cached);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn web_search_mode_for_turn_defaults_to_live_for_danger_full_access() {
|
||||
let mode = resolve_web_search_mode_for_turn(None, &SandboxPolicy::DangerFullAccess);
|
||||
|
||||
assert_eq!(mode, WebSearchMode::Live);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn web_search_mode_for_turn_prefers_explicit_value() {
|
||||
let mode = resolve_web_search_mode_for_turn(
|
||||
Some(WebSearchMode::Cached),
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
);
|
||||
|
||||
assert_eq!(mode, WebSearchMode::Cached);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn profile_legacy_toggles_override_base() -> std::io::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
@@ -2493,7 +2566,7 @@ profile = "project"
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn responses_websockets_feature_updates_openai_provider() -> std::io::Result<()> {
|
||||
fn responses_websockets_feature_does_not_change_wire_api() -> std::io::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mut entries = BTreeMap::new();
|
||||
entries.insert("responses_websockets".to_string(), true);
|
||||
@@ -2510,7 +2583,7 @@ profile = "project"
|
||||
|
||||
assert_eq!(
|
||||
config.model_provider.wire_api,
|
||||
crate::model_provider_info::WireApi::ResponsesWebsocket
|
||||
crate::model_provider_info::WireApi::Responses
|
||||
);
|
||||
|
||||
Ok(())
|
||||
@@ -2614,6 +2687,7 @@ profile = "project"
|
||||
tool_timeout_sec: Some(Duration::from_secs(5)),
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
);
|
||||
|
||||
@@ -2768,6 +2842,7 @@ bearer_token = "secret"
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
)]);
|
||||
|
||||
@@ -2837,6 +2912,7 @@ ZIG_VAR = "3"
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
)]);
|
||||
|
||||
@@ -2886,6 +2962,7 @@ ZIG_VAR = "3"
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
)]);
|
||||
|
||||
@@ -2933,6 +3010,7 @@ ZIG_VAR = "3"
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
)]);
|
||||
|
||||
@@ -2996,6 +3074,7 @@ startup_timeout_sec = 2.0
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
)]);
|
||||
apply_blocking(
|
||||
@@ -3071,6 +3150,7 @@ X-Auth = "DOCS_AUTH"
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
)]);
|
||||
|
||||
@@ -3099,6 +3179,7 @@ X-Auth = "DOCS_AUTH"
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
);
|
||||
apply_blocking(
|
||||
@@ -3165,6 +3246,7 @@ url = "https://example.com/mcp"
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
),
|
||||
(
|
||||
@@ -3183,6 +3265,7 @@ url = "https://example.com/mcp"
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
),
|
||||
]);
|
||||
@@ -3264,6 +3347,7 @@ url = "https://example.com/mcp"
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
)]);
|
||||
|
||||
@@ -3307,6 +3391,7 @@ url = "https://example.com/mcp"
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: Some(vec!["allowed".to_string()]),
|
||||
disabled_tools: Some(vec!["blocked".to_string()]),
|
||||
scopes: None,
|
||||
},
|
||||
)]);
|
||||
|
||||
@@ -3618,6 +3703,7 @@ model_verbosity = "high"
|
||||
stream_max_retries: Some(10),
|
||||
stream_idle_timeout_ms: Some(300_000),
|
||||
requires_openai_auth: false,
|
||||
supports_websockets: false,
|
||||
};
|
||||
let model_provider_map = {
|
||||
let mut model_provider_map = built_in_model_providers();
|
||||
@@ -3708,6 +3794,7 @@ model_verbosity = "high"
|
||||
model_verbosity: None,
|
||||
model_personality: None,
|
||||
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
|
||||
session_object_storage_url: None,
|
||||
base_instructions: None,
|
||||
developer_instructions: None,
|
||||
compact_prompt: None,
|
||||
@@ -3718,6 +3805,7 @@ model_verbosity = "high"
|
||||
use_experimental_unified_exec_tool: false,
|
||||
ghost_snapshot: GhostSnapshotConfig::default(),
|
||||
features: Features::with_defaults(),
|
||||
suppress_unstable_features_warning: false,
|
||||
active_profile: Some("o3".to_string()),
|
||||
active_project: ProjectConfig { trust_level: None },
|
||||
windows_wsl_setup_acknowledged: false,
|
||||
@@ -3725,6 +3813,7 @@ model_verbosity = "high"
|
||||
check_for_update_on_startup: true,
|
||||
disable_paste_burst: false,
|
||||
tui_notifications: Default::default(),
|
||||
tui_notification_method: Default::default(),
|
||||
animations: true,
|
||||
show_tooltips: true,
|
||||
experimental_mode: None,
|
||||
@@ -3790,6 +3879,7 @@ model_verbosity = "high"
|
||||
model_verbosity: None,
|
||||
model_personality: None,
|
||||
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
|
||||
session_object_storage_url: None,
|
||||
base_instructions: None,
|
||||
developer_instructions: None,
|
||||
compact_prompt: None,
|
||||
@@ -3800,6 +3890,7 @@ model_verbosity = "high"
|
||||
use_experimental_unified_exec_tool: false,
|
||||
ghost_snapshot: GhostSnapshotConfig::default(),
|
||||
features: Features::with_defaults(),
|
||||
suppress_unstable_features_warning: false,
|
||||
active_profile: Some("gpt3".to_string()),
|
||||
active_project: ProjectConfig { trust_level: None },
|
||||
windows_wsl_setup_acknowledged: false,
|
||||
@@ -3807,6 +3898,7 @@ model_verbosity = "high"
|
||||
check_for_update_on_startup: true,
|
||||
disable_paste_burst: false,
|
||||
tui_notifications: Default::default(),
|
||||
tui_notification_method: Default::default(),
|
||||
animations: true,
|
||||
show_tooltips: true,
|
||||
experimental_mode: None,
|
||||
@@ -3887,6 +3979,7 @@ model_verbosity = "high"
|
||||
model_verbosity: None,
|
||||
model_personality: None,
|
||||
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
|
||||
session_object_storage_url: None,
|
||||
base_instructions: None,
|
||||
developer_instructions: None,
|
||||
compact_prompt: None,
|
||||
@@ -3897,6 +3990,7 @@ model_verbosity = "high"
|
||||
use_experimental_unified_exec_tool: false,
|
||||
ghost_snapshot: GhostSnapshotConfig::default(),
|
||||
features: Features::with_defaults(),
|
||||
suppress_unstable_features_warning: false,
|
||||
active_profile: Some("zdr".to_string()),
|
||||
active_project: ProjectConfig { trust_level: None },
|
||||
windows_wsl_setup_acknowledged: false,
|
||||
@@ -3904,6 +3998,7 @@ model_verbosity = "high"
|
||||
check_for_update_on_startup: true,
|
||||
disable_paste_burst: false,
|
||||
tui_notifications: Default::default(),
|
||||
tui_notification_method: Default::default(),
|
||||
animations: true,
|
||||
show_tooltips: true,
|
||||
experimental_mode: None,
|
||||
@@ -3970,6 +4065,7 @@ model_verbosity = "high"
|
||||
model_verbosity: Some(Verbosity::High),
|
||||
model_personality: None,
|
||||
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
|
||||
session_object_storage_url: None,
|
||||
base_instructions: None,
|
||||
developer_instructions: None,
|
||||
compact_prompt: None,
|
||||
@@ -3980,6 +4076,7 @@ model_verbosity = "high"
|
||||
use_experimental_unified_exec_tool: false,
|
||||
ghost_snapshot: GhostSnapshotConfig::default(),
|
||||
features: Features::with_defaults(),
|
||||
suppress_unstable_features_warning: false,
|
||||
active_profile: Some("gpt5".to_string()),
|
||||
active_project: ProjectConfig { trust_level: None },
|
||||
windows_wsl_setup_acknowledged: false,
|
||||
@@ -3987,6 +4084,7 @@ model_verbosity = "high"
|
||||
check_for_update_on_startup: true,
|
||||
disable_paste_burst: false,
|
||||
tui_notifications: Default::default(),
|
||||
tui_notification_method: Default::default(),
|
||||
animations: true,
|
||||
show_tooltips: true,
|
||||
experimental_mode: None,
|
||||
@@ -4160,7 +4258,12 @@ trust_level = "untrusted"
|
||||
let cfg = toml::from_str::<ConfigToml>(config_with_untrusted)
|
||||
.expect("TOML deserialization should succeed");
|
||||
|
||||
let resolution = cfg.derive_sandbox_policy(None, None, &PathBuf::from("/tmp/test"));
|
||||
let resolution = cfg.derive_sandbox_policy(
|
||||
None,
|
||||
None,
|
||||
WindowsSandboxLevel::Disabled,
|
||||
&PathBuf::from("/tmp/test"),
|
||||
);
|
||||
|
||||
// Verify that untrusted projects get WorkspaceWrite (or ReadOnly on Windows due to downgrade)
|
||||
if cfg!(target_os = "windows") {
|
||||
@@ -4339,13 +4442,17 @@ mcp_oauth_callback_port = 5678
|
||||
|
||||
#[cfg(test)]
|
||||
mod notifications_tests {
|
||||
use crate::config::types::NotificationMethod;
|
||||
use crate::config::types::Notifications;
|
||||
use assert_matches::assert_matches;
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Deserialize, Debug, PartialEq)]
|
||||
struct TuiTomlTest {
|
||||
#[serde(default)]
|
||||
notifications: Notifications,
|
||||
#[serde(default)]
|
||||
notification_method: NotificationMethod,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, PartialEq)]
|
||||
@@ -4376,4 +4483,15 @@ mod notifications_tests {
|
||||
Notifications::Custom(ref v) if v == &vec!["foo".to_string()]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tui_notification_method() {
|
||||
let toml = r#"
|
||||
[tui]
|
||||
notification_method = "bel"
|
||||
"#;
|
||||
let parsed: RootTomlTest =
|
||||
toml::from_str(toml).expect("deserialize notification_method=\"bel\"");
|
||||
assert_eq!(parsed.tui.notification_method, NotificationMethod::Bel);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -73,6 +73,10 @@ pub struct McpServerConfig {
|
||||
/// Explicit deny-list of tools. These tools will be removed after applying `enabled_tools`.
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub disabled_tools: Option<Vec<String>>,
|
||||
|
||||
/// Optional OAuth scopes to request during MCP login.
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub scopes: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
// Raw MCP config shape used for deserialization and JSON Schema generation.
|
||||
@@ -113,6 +117,8 @@ pub(crate) struct RawMcpServerConfig {
|
||||
pub enabled_tools: Option<Vec<String>>,
|
||||
#[serde(default)]
|
||||
pub disabled_tools: Option<Vec<String>>,
|
||||
#[serde(default)]
|
||||
pub scopes: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for McpServerConfig {
|
||||
@@ -134,6 +140,7 @@ impl<'de> Deserialize<'de> for McpServerConfig {
|
||||
let enabled = raw.enabled.unwrap_or_else(default_enabled);
|
||||
let enabled_tools = raw.enabled_tools.clone();
|
||||
let disabled_tools = raw.disabled_tools.clone();
|
||||
let scopes = raw.scopes.clone();
|
||||
|
||||
fn throw_if_set<E, T>(transport: &str, field: &str, value: Option<&T>) -> Result<(), E>
|
||||
where
|
||||
@@ -188,6 +195,7 @@ impl<'de> Deserialize<'de> for McpServerConfig {
|
||||
disabled_reason: None,
|
||||
enabled_tools,
|
||||
disabled_tools,
|
||||
scopes,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -420,6 +428,25 @@ impl Default for Notifications {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, Default)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum NotificationMethod {
|
||||
#[default]
|
||||
Auto,
|
||||
Osc9,
|
||||
Bel,
|
||||
}
|
||||
|
||||
impl fmt::Display for NotificationMethod {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
NotificationMethod::Auto => write!(f, "auto"),
|
||||
NotificationMethod::Osc9 => write!(f, "osc9"),
|
||||
NotificationMethod::Bel => write!(f, "bel"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Collection of settings that are specific to the TUI.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema)]
|
||||
#[schemars(deny_unknown_fields)]
|
||||
@@ -429,6 +456,11 @@ pub struct Tui {
|
||||
#[serde(default)]
|
||||
pub notifications: Notifications,
|
||||
|
||||
/// Notification method to use for unfocused terminal notifications.
|
||||
/// Defaults to `auto`.
|
||||
#[serde(default)]
|
||||
pub notification_method: NotificationMethod,
|
||||
|
||||
/// Enable animations (welcome screen, shimmer effects, spinners).
|
||||
/// Defaults to `true`.
|
||||
#[serde(default = "default_true")]
|
||||
@@ -464,7 +496,6 @@ const fn default_true() -> bool {
|
||||
/// (primarily the Codex IDE extension). NOTE: these are different from
|
||||
/// notifications - notices are warnings, NUX screens, acknowledgements, etc.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema)]
|
||||
#[schemars(deny_unknown_fields)]
|
||||
pub struct Notice {
|
||||
/// Tracks whether the user has acknowledged the full access warning prompt.
|
||||
pub hide_full_access_warning: Option<bool>,
|
||||
|
||||
@@ -6,6 +6,8 @@ mod layer_io;
|
||||
mod macos;
|
||||
mod merge;
|
||||
mod overrides;
|
||||
#[cfg(test)]
|
||||
mod requirements_exec_policy;
|
||||
mod state;
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -512,10 +514,10 @@ impl ProjectTrustContext {
|
||||
let user_config_file = self.user_config_file.as_path().display();
|
||||
match decision.trust_level {
|
||||
Some(TrustLevel::Untrusted) => Some(format!(
|
||||
"{trust_key} is marked as untrusted in {user_config_file}. Mark it trusted to enable project config folders."
|
||||
"{trust_key} is marked as untrusted in {user_config_file}. To load config.toml, mark it trusted."
|
||||
)),
|
||||
_ => Some(format!(
|
||||
"Add {trust_key} as a trusted project in {user_config_file}."
|
||||
"To load config.toml, add {trust_key} as a trusted project in {user_config_file}."
|
||||
)),
|
||||
}
|
||||
}
|
||||
@@ -526,21 +528,16 @@ fn project_layer_entry(
|
||||
dot_codex_folder: &AbsolutePathBuf,
|
||||
layer_dir: &AbsolutePathBuf,
|
||||
config: TomlValue,
|
||||
config_toml_exists: bool,
|
||||
) -> ConfigLayerEntry {
|
||||
match trust_context.disabled_reason_for_dir(layer_dir) {
|
||||
Some(reason) => ConfigLayerEntry::new_disabled(
|
||||
ConfigLayerSource::Project {
|
||||
dot_codex_folder: dot_codex_folder.clone(),
|
||||
},
|
||||
config,
|
||||
reason,
|
||||
),
|
||||
None => ConfigLayerEntry::new(
|
||||
ConfigLayerSource::Project {
|
||||
dot_codex_folder: dot_codex_folder.clone(),
|
||||
},
|
||||
config,
|
||||
),
|
||||
let source = ConfigLayerSource::Project {
|
||||
dot_codex_folder: dot_codex_folder.clone(),
|
||||
};
|
||||
|
||||
if config_toml_exists && let Some(reason) = trust_context.disabled_reason_for_dir(layer_dir) {
|
||||
ConfigLayerEntry::new_disabled(source, config, reason)
|
||||
} else {
|
||||
ConfigLayerEntry::new(source, config)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -715,13 +712,15 @@ async fn load_project_layers(
|
||||
&dot_codex_abs,
|
||||
&layer_dir,
|
||||
TomlValue::Table(toml::map::Map::new()),
|
||||
true,
|
||||
));
|
||||
continue;
|
||||
}
|
||||
};
|
||||
let config =
|
||||
resolve_relative_paths_in_config_toml(config, dot_codex_abs.as_path())?;
|
||||
let entry = project_layer_entry(trust_context, &dot_codex_abs, &layer_dir, config);
|
||||
let entry =
|
||||
project_layer_entry(trust_context, &dot_codex_abs, &layer_dir, config, true);
|
||||
layers.push(entry);
|
||||
}
|
||||
Err(err) => {
|
||||
@@ -734,6 +733,7 @@ async fn load_project_layers(
|
||||
&dot_codex_abs,
|
||||
&layer_dir,
|
||||
TomlValue::Table(toml::map::Map::new()),
|
||||
false,
|
||||
));
|
||||
} else {
|
||||
let config_file_display = config_file.as_path().display();
|
||||
|
||||
188
codex-rs/core/src/config_loader/requirements_exec_policy.rs
Normal file
188
codex-rs/core/src/config_loader/requirements_exec_policy.rs
Normal file
@@ -0,0 +1,188 @@
|
||||
use codex_execpolicy::Decision;
|
||||
use codex_execpolicy::Policy;
|
||||
use codex_execpolicy::rule::PatternToken;
|
||||
use codex_execpolicy::rule::PrefixPattern;
|
||||
use codex_execpolicy::rule::PrefixRule;
|
||||
use codex_execpolicy::rule::RuleRef;
|
||||
use multimap::MultiMap;
|
||||
use serde::Deserialize;
|
||||
use std::sync::Arc;
|
||||
use thiserror::Error;
|
||||
|
||||
/// TOML types for expressing exec policy requirements.
|
||||
///
|
||||
/// These types are kept separate from `ConfigRequirementsToml` and are
|
||||
/// converted into `codex-execpolicy` rules.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Deserialize)]
|
||||
pub struct RequirementsExecPolicyTomlRoot {
|
||||
pub exec_policy: RequirementsExecPolicyToml,
|
||||
}
|
||||
|
||||
/// TOML representation of `[exec_policy]` within `requirements.toml`.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Deserialize)]
|
||||
pub struct RequirementsExecPolicyToml {
|
||||
pub prefix_rules: Vec<RequirementsExecPolicyPrefixRuleToml>,
|
||||
}
|
||||
|
||||
/// A TOML representation of the `prefix_rule(...)` Starlark builtin.
|
||||
///
|
||||
/// This mirrors the builtin defined in `execpolicy/src/parser.rs`.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Deserialize)]
|
||||
pub struct RequirementsExecPolicyPrefixRuleToml {
|
||||
pub pattern: Vec<RequirementsExecPolicyPatternTokenToml>,
|
||||
pub decision: Option<RequirementsExecPolicyDecisionToml>,
|
||||
pub justification: Option<String>,
|
||||
}
|
||||
|
||||
/// TOML-friendly representation of a pattern token.
|
||||
///
|
||||
/// Starlark supports either a string token or a list of alternative tokens at
|
||||
/// each position, but TOML arrays cannot mix strings and arrays. Using an
|
||||
/// array of tables sidesteps that restriction.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Deserialize)]
|
||||
pub struct RequirementsExecPolicyPatternTokenToml {
|
||||
pub token: Option<String>,
|
||||
pub any_of: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum RequirementsExecPolicyDecisionToml {
|
||||
Allow,
|
||||
Prompt,
|
||||
Forbidden,
|
||||
}
|
||||
|
||||
impl RequirementsExecPolicyDecisionToml {
|
||||
fn as_decision(self) -> Decision {
|
||||
match self {
|
||||
Self::Allow => Decision::Allow,
|
||||
Self::Prompt => Decision::Prompt,
|
||||
Self::Forbidden => Decision::Forbidden,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum RequirementsExecPolicyParseError {
|
||||
#[error("exec policy prefix_rules cannot be empty")]
|
||||
EmptyPrefixRules,
|
||||
|
||||
#[error("exec policy prefix_rule at index {rule_index} has an empty pattern")]
|
||||
EmptyPattern { rule_index: usize },
|
||||
|
||||
#[error(
|
||||
"exec policy prefix_rule at index {rule_index} has an invalid pattern token at index {token_index}: {reason}"
|
||||
)]
|
||||
InvalidPatternToken {
|
||||
rule_index: usize,
|
||||
token_index: usize,
|
||||
reason: String,
|
||||
},
|
||||
|
||||
#[error("exec policy prefix_rule at index {rule_index} has an empty justification")]
|
||||
EmptyJustification { rule_index: usize },
|
||||
}
|
||||
|
||||
impl RequirementsExecPolicyToml {
|
||||
/// Convert requirements TOML exec policy rules into the internal `.rules`
|
||||
/// representation used by `codex-execpolicy`.
|
||||
pub fn to_policy(&self) -> Result<Policy, RequirementsExecPolicyParseError> {
|
||||
if self.prefix_rules.is_empty() {
|
||||
return Err(RequirementsExecPolicyParseError::EmptyPrefixRules);
|
||||
}
|
||||
|
||||
let mut rules_by_program: MultiMap<String, RuleRef> = MultiMap::new();
|
||||
|
||||
for (rule_index, rule) in self.prefix_rules.iter().enumerate() {
|
||||
if let Some(justification) = &rule.justification
|
||||
&& justification.trim().is_empty()
|
||||
{
|
||||
return Err(RequirementsExecPolicyParseError::EmptyJustification { rule_index });
|
||||
}
|
||||
|
||||
if rule.pattern.is_empty() {
|
||||
return Err(RequirementsExecPolicyParseError::EmptyPattern { rule_index });
|
||||
}
|
||||
|
||||
let pattern_tokens = rule
|
||||
.pattern
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(token_index, token)| parse_pattern_token(token, rule_index, token_index))
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
let decision = rule
|
||||
.decision
|
||||
.map(RequirementsExecPolicyDecisionToml::as_decision)
|
||||
.unwrap_or(Decision::Allow);
|
||||
let justification = rule.justification.clone();
|
||||
|
||||
let (first_token, remaining_tokens) = pattern_tokens
|
||||
.split_first()
|
||||
.ok_or(RequirementsExecPolicyParseError::EmptyPattern { rule_index })?;
|
||||
|
||||
let rest: Arc<[PatternToken]> = remaining_tokens.to_vec().into();
|
||||
|
||||
for head in first_token.alternatives() {
|
||||
let rule: RuleRef = Arc::new(PrefixRule {
|
||||
pattern: PrefixPattern {
|
||||
first: Arc::from(head.as_str()),
|
||||
rest: rest.clone(),
|
||||
},
|
||||
decision,
|
||||
justification: justification.clone(),
|
||||
});
|
||||
rules_by_program.insert(head.clone(), rule);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Policy::new(rules_by_program))
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_pattern_token(
|
||||
token: &RequirementsExecPolicyPatternTokenToml,
|
||||
rule_index: usize,
|
||||
token_index: usize,
|
||||
) -> Result<PatternToken, RequirementsExecPolicyParseError> {
|
||||
match (&token.token, &token.any_of) {
|
||||
(Some(single), None) => {
|
||||
if single.trim().is_empty() {
|
||||
return Err(RequirementsExecPolicyParseError::InvalidPatternToken {
|
||||
rule_index,
|
||||
token_index,
|
||||
reason: "token cannot be empty".to_string(),
|
||||
});
|
||||
}
|
||||
Ok(PatternToken::Single(single.clone()))
|
||||
}
|
||||
(None, Some(alternatives)) => {
|
||||
if alternatives.is_empty() {
|
||||
return Err(RequirementsExecPolicyParseError::InvalidPatternToken {
|
||||
rule_index,
|
||||
token_index,
|
||||
reason: "any_of cannot be empty".to_string(),
|
||||
});
|
||||
}
|
||||
if alternatives.iter().any(|alt| alt.trim().is_empty()) {
|
||||
return Err(RequirementsExecPolicyParseError::InvalidPatternToken {
|
||||
rule_index,
|
||||
token_index,
|
||||
reason: "any_of cannot include empty tokens".to_string(),
|
||||
});
|
||||
}
|
||||
Ok(PatternToken::Alts(alternatives.clone()))
|
||||
}
|
||||
(Some(_), Some(_)) => Err(RequirementsExecPolicyParseError::InvalidPatternToken {
|
||||
rule_index,
|
||||
token_index,
|
||||
reason: "set either token or any_of, not both".to_string(),
|
||||
}),
|
||||
(None, None) => Err(RequirementsExecPolicyParseError::InvalidPatternToken {
|
||||
rule_index,
|
||||
token_index,
|
||||
reason: "set either token or any_of".to_string(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
@@ -911,3 +911,165 @@ async fn project_root_markers_supports_alternate_markers() -> std::io::Result<()
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
mod requirements_exec_policy_tests {
|
||||
use super::super::requirements_exec_policy::RequirementsExecPolicyDecisionToml;
|
||||
use super::super::requirements_exec_policy::RequirementsExecPolicyPatternTokenToml;
|
||||
use super::super::requirements_exec_policy::RequirementsExecPolicyPrefixRuleToml;
|
||||
use super::super::requirements_exec_policy::RequirementsExecPolicyToml;
|
||||
use super::super::requirements_exec_policy::RequirementsExecPolicyTomlRoot;
|
||||
use codex_execpolicy::Decision;
|
||||
use codex_execpolicy::Evaluation;
|
||||
use codex_execpolicy::RuleMatch;
|
||||
use pretty_assertions::assert_eq;
|
||||
use toml::from_str;
|
||||
|
||||
fn tokens(cmd: &[&str]) -> Vec<String> {
|
||||
cmd.iter().map(std::string::ToString::to_string).collect()
|
||||
}
|
||||
|
||||
fn allow_all(_: &[String]) -> Decision {
|
||||
Decision::Allow
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_single_prefix_rule_from_raw_toml() -> anyhow::Result<()> {
|
||||
let toml_str = r#"
|
||||
[exec_policy]
|
||||
prefix_rules = [
|
||||
{ pattern = [{ token = "rm" }], decision = "forbidden" },
|
||||
]
|
||||
"#;
|
||||
|
||||
let parsed: RequirementsExecPolicyTomlRoot = from_str(toml_str)?;
|
||||
|
||||
assert_eq!(
|
||||
parsed,
|
||||
RequirementsExecPolicyTomlRoot {
|
||||
exec_policy: RequirementsExecPolicyToml {
|
||||
prefix_rules: vec![RequirementsExecPolicyPrefixRuleToml {
|
||||
pattern: vec![RequirementsExecPolicyPatternTokenToml {
|
||||
token: Some("rm".to_string()),
|
||||
any_of: None,
|
||||
}],
|
||||
decision: Some(RequirementsExecPolicyDecisionToml::Forbidden),
|
||||
justification: None,
|
||||
}],
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_multiple_prefix_rules_from_raw_toml() -> anyhow::Result<()> {
|
||||
let toml_str = r#"
|
||||
[exec_policy]
|
||||
prefix_rules = [
|
||||
{ pattern = [{ token = "rm" }], decision = "forbidden" },
|
||||
{ pattern = [{ token = "git" }, { any_of = ["push", "commit"] }], decision = "prompt", justification = "review changes before push or commit" },
|
||||
]
|
||||
"#;
|
||||
|
||||
let parsed: RequirementsExecPolicyTomlRoot = from_str(toml_str)?;
|
||||
|
||||
assert_eq!(
|
||||
parsed,
|
||||
RequirementsExecPolicyTomlRoot {
|
||||
exec_policy: RequirementsExecPolicyToml {
|
||||
prefix_rules: vec![
|
||||
RequirementsExecPolicyPrefixRuleToml {
|
||||
pattern: vec![RequirementsExecPolicyPatternTokenToml {
|
||||
token: Some("rm".to_string()),
|
||||
any_of: None,
|
||||
}],
|
||||
decision: Some(RequirementsExecPolicyDecisionToml::Forbidden),
|
||||
justification: None,
|
||||
},
|
||||
RequirementsExecPolicyPrefixRuleToml {
|
||||
pattern: vec![
|
||||
RequirementsExecPolicyPatternTokenToml {
|
||||
token: Some("git".to_string()),
|
||||
any_of: None,
|
||||
},
|
||||
RequirementsExecPolicyPatternTokenToml {
|
||||
token: None,
|
||||
any_of: Some(vec!["push".to_string(), "commit".to_string()]),
|
||||
},
|
||||
],
|
||||
decision: Some(RequirementsExecPolicyDecisionToml::Prompt),
|
||||
justification: Some("review changes before push or commit".to_string()),
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn converts_rules_toml_into_internal_policy_representation() -> anyhow::Result<()> {
|
||||
let toml_str = r#"
|
||||
[exec_policy]
|
||||
prefix_rules = [
|
||||
{ pattern = [{ token = "rm" }], decision = "forbidden" },
|
||||
]
|
||||
"#;
|
||||
|
||||
let parsed: RequirementsExecPolicyTomlRoot = from_str(toml_str)?;
|
||||
let policy = parsed.exec_policy.to_policy()?;
|
||||
|
||||
assert_eq!(
|
||||
policy.check(&tokens(&["rm", "-rf", "/tmp"]), &allow_all),
|
||||
Evaluation {
|
||||
decision: Decision::Forbidden,
|
||||
matched_rules: vec![RuleMatch::PrefixRuleMatch {
|
||||
matched_prefix: tokens(&["rm"]),
|
||||
decision: Decision::Forbidden,
|
||||
justification: None,
|
||||
}],
|
||||
}
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn head_any_of_expands_into_multiple_program_rules() -> anyhow::Result<()> {
|
||||
let toml_str = r#"
|
||||
[exec_policy]
|
||||
prefix_rules = [
|
||||
{ pattern = [{ any_of = ["git", "hg"] }, { token = "status" }], decision = "prompt" },
|
||||
]
|
||||
"#;
|
||||
let parsed: RequirementsExecPolicyTomlRoot = from_str(toml_str)?;
|
||||
let policy = parsed.exec_policy.to_policy()?;
|
||||
|
||||
assert_eq!(
|
||||
policy.check(&tokens(&["git", "status"]), &allow_all),
|
||||
Evaluation {
|
||||
decision: Decision::Prompt,
|
||||
matched_rules: vec![RuleMatch::PrefixRuleMatch {
|
||||
matched_prefix: tokens(&["git", "status"]),
|
||||
decision: Decision::Prompt,
|
||||
justification: None,
|
||||
}],
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
policy.check(&tokens(&["hg", "status"]), &allow_all),
|
||||
Evaluation {
|
||||
decision: Decision::Prompt,
|
||||
matched_rules: vec![RuleMatch::PrefixRuleMatch {
|
||||
matched_prefix: tokens(&["hg", "status"]),
|
||||
decision: Decision::Prompt,
|
||||
justification: None,
|
||||
}],
|
||||
}
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,9 +3,8 @@ use std::env;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use async_channel::unbounded;
|
||||
pub use codex_app_server_protocol::AppInfo;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
|
||||
use crate::AuthManager;
|
||||
@@ -15,28 +14,13 @@ use crate::features::Feature;
|
||||
use crate::mcp::CODEX_APPS_MCP_SERVER_NAME;
|
||||
use crate::mcp::auth::compute_auth_statuses;
|
||||
use crate::mcp::with_codex_apps_mcp;
|
||||
use crate::mcp_connection_manager::DEFAULT_STARTUP_TIMEOUT;
|
||||
use crate::mcp_connection_manager::McpConnectionManager;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ConnectorInfo {
|
||||
#[serde(rename = "id")]
|
||||
pub connector_id: String,
|
||||
#[serde(rename = "name")]
|
||||
pub connector_name: String,
|
||||
#[serde(default, rename = "description")]
|
||||
pub connector_description: Option<String>,
|
||||
#[serde(default, rename = "logo_url")]
|
||||
pub logo_url: Option<String>,
|
||||
#[serde(default, rename = "install_url")]
|
||||
pub install_url: Option<String>,
|
||||
#[serde(default)]
|
||||
pub is_accessible: bool,
|
||||
}
|
||||
|
||||
pub async fn list_accessible_connectors_from_mcp_tools(
|
||||
config: &Config,
|
||||
) -> anyhow::Result<Vec<ConnectorInfo>> {
|
||||
if !config.features.enabled(Feature::Connectors) {
|
||||
) -> anyhow::Result<Vec<AppInfo>> {
|
||||
if !config.features.enabled(Feature::Apps) {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
@@ -72,6 +56,13 @@ pub async fn list_accessible_connectors_from_mcp_tools(
|
||||
)
|
||||
.await;
|
||||
|
||||
if let Some(cfg) = mcp_servers.get(CODEX_APPS_MCP_SERVER_NAME) {
|
||||
let timeout = cfg.startup_timeout_sec.unwrap_or(DEFAULT_STARTUP_TIMEOUT);
|
||||
mcp_connection_manager
|
||||
.wait_for_server_ready(CODEX_APPS_MCP_SERVER_NAME, timeout)
|
||||
.await;
|
||||
}
|
||||
|
||||
let tools = mcp_connection_manager.list_all_tools().await;
|
||||
cancel_token.cancel();
|
||||
|
||||
@@ -86,13 +77,17 @@ fn auth_manager_from_config(config: &Config) -> std::sync::Arc<AuthManager> {
|
||||
)
|
||||
}
|
||||
|
||||
pub fn connector_display_label(connector: &ConnectorInfo) -> String {
|
||||
format_connector_label(&connector.connector_name, &connector.connector_id)
|
||||
pub fn connector_display_label(connector: &AppInfo) -> String {
|
||||
format_connector_label(&connector.name, &connector.id)
|
||||
}
|
||||
|
||||
pub fn connector_mention_slug(connector: &AppInfo) -> String {
|
||||
connector_name_slug(&connector_display_label(connector))
|
||||
}
|
||||
|
||||
pub(crate) fn accessible_connectors_from_mcp_tools(
|
||||
mcp_tools: &HashMap<String, crate::mcp_connection_manager::ToolInfo>,
|
||||
) -> Vec<ConnectorInfo> {
|
||||
) -> Vec<AppInfo> {
|
||||
let tools = mcp_tools.values().filter_map(|tool| {
|
||||
if tool.server_name != CODEX_APPS_MCP_SERVER_NAME {
|
||||
return None;
|
||||
@@ -105,34 +100,37 @@ pub(crate) fn accessible_connectors_from_mcp_tools(
|
||||
}
|
||||
|
||||
pub fn merge_connectors(
|
||||
connectors: Vec<ConnectorInfo>,
|
||||
accessible_connectors: Vec<ConnectorInfo>,
|
||||
) -> Vec<ConnectorInfo> {
|
||||
let mut merged: HashMap<String, ConnectorInfo> = connectors
|
||||
connectors: Vec<AppInfo>,
|
||||
accessible_connectors: Vec<AppInfo>,
|
||||
) -> Vec<AppInfo> {
|
||||
let mut merged: HashMap<String, AppInfo> = connectors
|
||||
.into_iter()
|
||||
.map(|mut connector| {
|
||||
connector.is_accessible = false;
|
||||
(connector.connector_id.clone(), connector)
|
||||
(connector.id.clone(), connector)
|
||||
})
|
||||
.collect();
|
||||
|
||||
for mut connector in accessible_connectors {
|
||||
connector.is_accessible = true;
|
||||
let connector_id = connector.connector_id.clone();
|
||||
let connector_id = connector.id.clone();
|
||||
if let Some(existing) = merged.get_mut(&connector_id) {
|
||||
existing.is_accessible = true;
|
||||
if existing.connector_name == existing.connector_id
|
||||
&& connector.connector_name != connector.connector_id
|
||||
{
|
||||
existing.connector_name = connector.connector_name;
|
||||
if existing.name == existing.id && connector.name != connector.id {
|
||||
existing.name = connector.name;
|
||||
}
|
||||
if existing.connector_description.is_none() && connector.connector_description.is_some()
|
||||
{
|
||||
existing.connector_description = connector.connector_description;
|
||||
if existing.description.is_none() && connector.description.is_some() {
|
||||
existing.description = connector.description;
|
||||
}
|
||||
if existing.logo_url.is_none() && connector.logo_url.is_some() {
|
||||
existing.logo_url = connector.logo_url;
|
||||
}
|
||||
if existing.logo_url_dark.is_none() && connector.logo_url_dark.is_some() {
|
||||
existing.logo_url_dark = connector.logo_url_dark;
|
||||
}
|
||||
if existing.distribution_channel.is_none() && connector.distribution_channel.is_some() {
|
||||
existing.distribution_channel = connector.distribution_channel;
|
||||
}
|
||||
} else {
|
||||
merged.insert(connector_id, connector);
|
||||
}
|
||||
@@ -141,23 +139,20 @@ pub fn merge_connectors(
|
||||
let mut merged = merged.into_values().collect::<Vec<_>>();
|
||||
for connector in &mut merged {
|
||||
if connector.install_url.is_none() {
|
||||
connector.install_url = Some(connector_install_url(
|
||||
&connector.connector_name,
|
||||
&connector.connector_id,
|
||||
));
|
||||
connector.install_url = Some(connector_install_url(&connector.name, &connector.id));
|
||||
}
|
||||
}
|
||||
merged.sort_by(|left, right| {
|
||||
right
|
||||
.is_accessible
|
||||
.cmp(&left.is_accessible)
|
||||
.then_with(|| left.connector_name.cmp(&right.connector_name))
|
||||
.then_with(|| left.connector_id.cmp(&right.connector_id))
|
||||
.then_with(|| left.name.cmp(&right.name))
|
||||
.then_with(|| left.id.cmp(&right.id))
|
||||
});
|
||||
merged
|
||||
}
|
||||
|
||||
fn collect_accessible_connectors<I>(tools: I) -> Vec<ConnectorInfo>
|
||||
fn collect_accessible_connectors<I>(tools: I) -> Vec<AppInfo>
|
||||
where
|
||||
I: IntoIterator<Item = (String, Option<String>)>,
|
||||
{
|
||||
@@ -172,14 +167,16 @@ where
|
||||
connectors.insert(connector_id, connector_name);
|
||||
}
|
||||
}
|
||||
let mut accessible: Vec<ConnectorInfo> = connectors
|
||||
let mut accessible: Vec<AppInfo> = connectors
|
||||
.into_iter()
|
||||
.map(|(connector_id, connector_name)| ConnectorInfo {
|
||||
install_url: Some(connector_install_url(&connector_name, &connector_id)),
|
||||
connector_id,
|
||||
connector_name,
|
||||
connector_description: None,
|
||||
.map(|(connector_id, connector_name)| AppInfo {
|
||||
id: connector_id.clone(),
|
||||
name: connector_name.clone(),
|
||||
description: None,
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: Some(connector_install_url(&connector_name, &connector_id)),
|
||||
is_accessible: true,
|
||||
})
|
||||
.collect();
|
||||
@@ -187,8 +184,8 @@ where
|
||||
right
|
||||
.is_accessible
|
||||
.cmp(&left.is_accessible)
|
||||
.then_with(|| left.connector_name.cmp(&right.connector_name))
|
||||
.then_with(|| left.connector_id.cmp(&right.connector_id))
|
||||
.then_with(|| left.name.cmp(&right.name))
|
||||
.then_with(|| left.id.cmp(&right.id))
|
||||
});
|
||||
accessible
|
||||
}
|
||||
@@ -205,7 +202,7 @@ pub fn connector_install_url(name: &str, connector_id: &str) -> String {
|
||||
format!("https://chatgpt.com/apps/{slug}/{connector_id}")
|
||||
}
|
||||
|
||||
fn connector_name_slug(name: &str) -> String {
|
||||
pub fn connector_name_slug(name: &str) -> String {
|
||||
let mut normalized = String::with_capacity(name.len());
|
||||
for character in name.chars() {
|
||||
if character.is_ascii_alphanumeric() {
|
||||
|
||||
@@ -95,6 +95,12 @@ pub fn originator() -> Originator {
|
||||
get_originator_value(None)
|
||||
}
|
||||
|
||||
pub fn is_first_party_originator(originator_value: &str) -> bool {
|
||||
originator_value == DEFAULT_ORIGINATOR
|
||||
|| originator_value == "codex_vscode"
|
||||
|| originator_value.starts_with("Codex ")
|
||||
}
|
||||
|
||||
pub fn get_codex_user_agent() -> String {
|
||||
let build_version = env!("CARGO_PKG_VERSION");
|
||||
let os_info = os_info::get();
|
||||
@@ -185,6 +191,7 @@ fn is_sandboxed() -> bool {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn test_get_codex_user_agent() {
|
||||
@@ -194,6 +201,15 @@ mod tests {
|
||||
assert!(user_agent.starts_with(&prefix));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_first_party_originator_matches_known_values() {
|
||||
assert_eq!(is_first_party_originator(DEFAULT_ORIGINATOR), true);
|
||||
assert_eq!(is_first_party_originator("codex_vscode"), true);
|
||||
assert_eq!(is_first_party_originator("Codex Something Else"), true);
|
||||
assert_eq!(is_first_party_originator("codex_cli"), false);
|
||||
assert_eq!(is_first_party_originator("Other"), false);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_create_client_sets_default_headers() {
|
||||
skip_if_no_network!();
|
||||
|
||||
@@ -113,6 +113,9 @@ pub enum CodexErr {
|
||||
#[error("{0}")]
|
||||
UsageLimitReached(UsageLimitReachedError),
|
||||
|
||||
#[error("{0}")]
|
||||
ModelCap(ModelCapError),
|
||||
|
||||
#[error("{0}")]
|
||||
ResponseStreamFailed(ResponseStreamFailed),
|
||||
|
||||
@@ -205,7 +208,8 @@ impl CodexErr {
|
||||
| CodexErr::AgentLimitReached { .. }
|
||||
| CodexErr::Spawn
|
||||
| CodexErr::SessionConfiguredNotFirstEvent
|
||||
| CodexErr::UsageLimitReached(_) => false,
|
||||
| CodexErr::UsageLimitReached(_)
|
||||
| CodexErr::ModelCap(_) => false,
|
||||
CodexErr::Stream(..)
|
||||
| CodexErr::Timeout
|
||||
| CodexErr::UnexpectedStatus(_)
|
||||
@@ -394,6 +398,30 @@ impl std::fmt::Display for UsageLimitReachedError {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ModelCapError {
|
||||
pub(crate) model: String,
|
||||
pub(crate) reset_after_seconds: Option<u64>,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ModelCapError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let mut message = format!(
|
||||
"Model {} is at capacity. Please try a different model.",
|
||||
self.model
|
||||
);
|
||||
if let Some(seconds) = self.reset_after_seconds {
|
||||
message.push_str(&format!(
|
||||
" Try again in {}.",
|
||||
format_duration_short(seconds)
|
||||
));
|
||||
} else {
|
||||
message.push_str(" Try again later.");
|
||||
}
|
||||
write!(f, "{message}")
|
||||
}
|
||||
}
|
||||
|
||||
fn retry_suffix(resets_at: Option<&DateTime<Utc>>) -> String {
|
||||
if let Some(resets_at) = resets_at {
|
||||
let formatted = format_retry_timestamp(resets_at);
|
||||
@@ -425,6 +453,18 @@ fn format_retry_timestamp(resets_at: &DateTime<Utc>) -> String {
|
||||
}
|
||||
}
|
||||
|
||||
fn format_duration_short(seconds: u64) -> String {
|
||||
if seconds < 60 {
|
||||
"less than a minute".to_string()
|
||||
} else if seconds < 3600 {
|
||||
format!("{}m", seconds / 60)
|
||||
} else if seconds < 86_400 {
|
||||
format!("{}h", seconds / 3600)
|
||||
} else {
|
||||
format!("{}d", seconds / 86_400)
|
||||
}
|
||||
}
|
||||
|
||||
fn day_suffix(day: u32) -> &'static str {
|
||||
match day {
|
||||
11..=13 => "th",
|
||||
@@ -488,6 +528,10 @@ impl CodexErr {
|
||||
CodexErr::UsageLimitReached(_)
|
||||
| CodexErr::QuotaExceeded
|
||||
| CodexErr::UsageNotIncluded => CodexErrorInfo::UsageLimitExceeded,
|
||||
CodexErr::ModelCap(err) => CodexErrorInfo::ModelCap {
|
||||
model: err.model.clone(),
|
||||
reset_after_seconds: err.reset_after_seconds,
|
||||
},
|
||||
CodexErr::RetryLimit(_) => CodexErrorInfo::ResponseTooManyFailedAttempts {
|
||||
http_status_code: self.http_status_code_value(),
|
||||
},
|
||||
@@ -631,6 +675,45 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn model_cap_error_formats_message() {
|
||||
let err = ModelCapError {
|
||||
model: "boomslang".to_string(),
|
||||
reset_after_seconds: Some(120),
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"Model boomslang is at capacity. Please try a different model. Try again in 2m."
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn model_cap_error_formats_message_without_reset() {
|
||||
let err = ModelCapError {
|
||||
model: "boomslang".to_string(),
|
||||
reset_after_seconds: None,
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"Model boomslang is at capacity. Please try a different model. Try again later."
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn model_cap_error_maps_to_protocol() {
|
||||
let err = CodexErr::ModelCap(ModelCapError {
|
||||
model: "boomslang".to_string(),
|
||||
reset_after_seconds: Some(30),
|
||||
});
|
||||
assert_eq!(
|
||||
err.to_codex_protocol_error(),
|
||||
CodexErrorInfo::ModelCap {
|
||||
model: "boomslang".to_string(),
|
||||
reset_after_seconds: Some(30),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sandbox_denied_uses_aggregated_output_when_stderr_empty() {
|
||||
let output = ExecToolCallOutput {
|
||||
|
||||
@@ -21,6 +21,7 @@ use crate::instructions::SkillInstructions;
|
||||
use crate::instructions::UserInstructions;
|
||||
use crate::session_prefix::is_session_prefix;
|
||||
use crate::user_shell_command::is_user_shell_command_text;
|
||||
use crate::web_search::web_search_action_detail;
|
||||
|
||||
fn parse_user_message(message: &[ContentItem]) -> Option<UserMessageItem> {
|
||||
if UserInstructions::is_user_instructions(message)
|
||||
@@ -127,14 +128,17 @@ pub fn parse_turn_item(item: &ResponseItem) -> Option<TurnItem> {
|
||||
raw_content,
|
||||
}))
|
||||
}
|
||||
ResponseItem::WebSearchCall {
|
||||
id,
|
||||
action: WebSearchAction::Search { query },
|
||||
..
|
||||
} => Some(TurnItem::WebSearch(WebSearchItem {
|
||||
id: id.clone().unwrap_or_default(),
|
||||
query: query.clone().unwrap_or_default(),
|
||||
})),
|
||||
ResponseItem::WebSearchCall { id, action, .. } => {
|
||||
let (action, query) = match action {
|
||||
Some(action) => (action.clone(), web_search_action_detail(action)),
|
||||
None => (WebSearchAction::Other, String::new()),
|
||||
};
|
||||
Some(TurnItem::WebSearch(WebSearchItem {
|
||||
id: id.clone().unwrap_or_default(),
|
||||
query,
|
||||
action,
|
||||
}))
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
@@ -144,6 +148,7 @@ mod tests {
|
||||
use super::parse_turn_item;
|
||||
use codex_protocol::items::AgentMessageContent;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::items::WebSearchItem;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ReasoningItemContent;
|
||||
use codex_protocol::models::ReasoningItemReasoningSummary;
|
||||
@@ -419,18 +424,102 @@ mod tests {
|
||||
let item = ResponseItem::WebSearchCall {
|
||||
id: Some("ws_1".to_string()),
|
||||
status: Some("completed".to_string()),
|
||||
action: WebSearchAction::Search {
|
||||
action: Some(WebSearchAction::Search {
|
||||
query: Some("weather".to_string()),
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
let turn_item = parse_turn_item(&item).expect("expected web search turn item");
|
||||
|
||||
match turn_item {
|
||||
TurnItem::WebSearch(search) => {
|
||||
assert_eq!(search.id, "ws_1");
|
||||
assert_eq!(search.query, "weather");
|
||||
}
|
||||
TurnItem::WebSearch(search) => assert_eq!(
|
||||
search,
|
||||
WebSearchItem {
|
||||
id: "ws_1".to_string(),
|
||||
query: "weather".to_string(),
|
||||
action: WebSearchAction::Search {
|
||||
query: Some("weather".to_string()),
|
||||
},
|
||||
}
|
||||
),
|
||||
other => panic!("expected TurnItem::WebSearch, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_web_search_open_page_call() {
|
||||
let item = ResponseItem::WebSearchCall {
|
||||
id: Some("ws_open".to_string()),
|
||||
status: Some("completed".to_string()),
|
||||
action: Some(WebSearchAction::OpenPage {
|
||||
url: Some("https://example.com".to_string()),
|
||||
}),
|
||||
};
|
||||
|
||||
let turn_item = parse_turn_item(&item).expect("expected web search turn item");
|
||||
|
||||
match turn_item {
|
||||
TurnItem::WebSearch(search) => assert_eq!(
|
||||
search,
|
||||
WebSearchItem {
|
||||
id: "ws_open".to_string(),
|
||||
query: "https://example.com".to_string(),
|
||||
action: WebSearchAction::OpenPage {
|
||||
url: Some("https://example.com".to_string()),
|
||||
},
|
||||
}
|
||||
),
|
||||
other => panic!("expected TurnItem::WebSearch, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_web_search_find_in_page_call() {
|
||||
let item = ResponseItem::WebSearchCall {
|
||||
id: Some("ws_find".to_string()),
|
||||
status: Some("completed".to_string()),
|
||||
action: Some(WebSearchAction::FindInPage {
|
||||
url: Some("https://example.com".to_string()),
|
||||
pattern: Some("needle".to_string()),
|
||||
}),
|
||||
};
|
||||
|
||||
let turn_item = parse_turn_item(&item).expect("expected web search turn item");
|
||||
|
||||
match turn_item {
|
||||
TurnItem::WebSearch(search) => assert_eq!(
|
||||
search,
|
||||
WebSearchItem {
|
||||
id: "ws_find".to_string(),
|
||||
query: "'needle' in https://example.com".to_string(),
|
||||
action: WebSearchAction::FindInPage {
|
||||
url: Some("https://example.com".to_string()),
|
||||
pattern: Some("needle".to_string()),
|
||||
},
|
||||
}
|
||||
),
|
||||
other => panic!("expected TurnItem::WebSearch, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_partial_web_search_call_without_action_as_other() {
|
||||
let item = ResponseItem::WebSearchCall {
|
||||
id: Some("ws_partial".to_string()),
|
||||
status: Some("in_progress".to_string()),
|
||||
action: None,
|
||||
};
|
||||
|
||||
let turn_item = parse_turn_item(&item).expect("expected web search turn item");
|
||||
match turn_item {
|
||||
TurnItem::WebSearch(search) => assert_eq!(
|
||||
search,
|
||||
WebSearchItem {
|
||||
id: "ws_partial".to_string(),
|
||||
query: String::new(),
|
||||
action: WebSearchAction::Other,
|
||||
}
|
||||
),
|
||||
other => panic!("expected TurnItem::WebSearch, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -64,6 +64,7 @@ pub struct ExecParams {
|
||||
pub expiration: ExecExpiration,
|
||||
pub env: HashMap<String, String>,
|
||||
pub sandbox_permissions: SandboxPermissions,
|
||||
pub windows_sandbox_level: codex_protocol::config_types::WindowsSandboxLevel,
|
||||
pub justification: Option<String>,
|
||||
pub arg0: Option<String>,
|
||||
}
|
||||
@@ -141,11 +142,15 @@ pub async fn process_exec_tool_call(
|
||||
codex_linux_sandbox_exe: &Option<PathBuf>,
|
||||
stdout_stream: Option<StdoutStream>,
|
||||
) -> Result<ExecToolCallOutput> {
|
||||
let windows_sandbox_level = params.windows_sandbox_level;
|
||||
let sandbox_type = match &sandbox_policy {
|
||||
SandboxPolicy::DangerFullAccess | SandboxPolicy::ExternalSandbox { .. } => {
|
||||
SandboxType::None
|
||||
}
|
||||
_ => get_platform_sandbox().unwrap_or(SandboxType::None),
|
||||
_ => get_platform_sandbox(
|
||||
windows_sandbox_level != codex_protocol::config_types::WindowsSandboxLevel::Disabled,
|
||||
)
|
||||
.unwrap_or(SandboxType::None),
|
||||
};
|
||||
tracing::debug!("Sandbox type: {sandbox_type:?}");
|
||||
|
||||
@@ -155,6 +160,7 @@ pub async fn process_exec_tool_call(
|
||||
expiration,
|
||||
env,
|
||||
sandbox_permissions,
|
||||
windows_sandbox_level,
|
||||
justification,
|
||||
arg0: _,
|
||||
} = params;
|
||||
@@ -184,6 +190,7 @@ pub async fn process_exec_tool_call(
|
||||
sandbox_type,
|
||||
sandbox_cwd,
|
||||
codex_linux_sandbox_exe.as_ref(),
|
||||
windows_sandbox_level,
|
||||
)
|
||||
.map_err(CodexErr::from)?;
|
||||
|
||||
@@ -202,6 +209,7 @@ pub(crate) async fn execute_exec_env(
|
||||
env,
|
||||
expiration,
|
||||
sandbox,
|
||||
windows_sandbox_level,
|
||||
sandbox_permissions,
|
||||
justification,
|
||||
arg0,
|
||||
@@ -213,6 +221,7 @@ pub(crate) async fn execute_exec_env(
|
||||
expiration,
|
||||
env,
|
||||
sandbox_permissions,
|
||||
windows_sandbox_level,
|
||||
justification,
|
||||
arg0,
|
||||
};
|
||||
@@ -223,13 +232,79 @@ pub(crate) async fn execute_exec_env(
|
||||
finalize_exec_result(raw_output_result, sandbox, duration)
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
fn extract_create_process_as_user_error_code(err: &str) -> Option<String> {
|
||||
let marker = "CreateProcessAsUserW failed: ";
|
||||
let start = err.find(marker)? + marker.len();
|
||||
let tail = &err[start..];
|
||||
let digits: String = tail.chars().take_while(char::is_ascii_digit).collect();
|
||||
if digits.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(digits)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
fn windowsapps_path_kind(path: &str) -> &'static str {
|
||||
let lower = path.to_ascii_lowercase();
|
||||
if lower.contains("\\program files\\windowsapps\\") {
|
||||
return "windowsapps_package";
|
||||
}
|
||||
if lower.contains("\\appdata\\local\\microsoft\\windowsapps\\") {
|
||||
return "windowsapps_alias";
|
||||
}
|
||||
if lower.contains("\\windowsapps\\") {
|
||||
return "windowsapps_other";
|
||||
}
|
||||
"other"
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
fn record_windows_sandbox_spawn_failure(
|
||||
command_path: Option<&str>,
|
||||
windows_sandbox_level: codex_protocol::config_types::WindowsSandboxLevel,
|
||||
err: &str,
|
||||
) {
|
||||
let Some(error_code) = extract_create_process_as_user_error_code(err) else {
|
||||
return;
|
||||
};
|
||||
let path = command_path.unwrap_or("unknown");
|
||||
let exe = Path::new(path)
|
||||
.file_name()
|
||||
.and_then(|name| name.to_str())
|
||||
.unwrap_or("unknown")
|
||||
.to_ascii_lowercase();
|
||||
let path_kind = windowsapps_path_kind(path);
|
||||
let level = if matches!(
|
||||
windows_sandbox_level,
|
||||
codex_protocol::config_types::WindowsSandboxLevel::Elevated
|
||||
) {
|
||||
"elevated"
|
||||
} else {
|
||||
"legacy"
|
||||
};
|
||||
if let Some(metrics) = codex_otel::metrics::global() {
|
||||
let _ = metrics.counter(
|
||||
"codex.windows_sandbox.createprocessasuserw_failed",
|
||||
1,
|
||||
&[
|
||||
("error_code", error_code.as_str()),
|
||||
("path_kind", path_kind),
|
||||
("exe", exe.as_str()),
|
||||
("level", level),
|
||||
],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
async fn exec_windows_sandbox(
|
||||
params: ExecParams,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
) -> Result<RawExecToolCallOutput> {
|
||||
use crate::config::find_codex_home;
|
||||
use crate::safety::is_windows_elevated_sandbox_enabled;
|
||||
use codex_protocol::config_types::WindowsSandboxLevel;
|
||||
use codex_windows_sandbox::run_windows_sandbox_capture;
|
||||
use codex_windows_sandbox::run_windows_sandbox_capture_elevated;
|
||||
|
||||
@@ -238,6 +313,7 @@ async fn exec_windows_sandbox(
|
||||
cwd,
|
||||
env,
|
||||
expiration,
|
||||
windows_sandbox_level,
|
||||
..
|
||||
} = params;
|
||||
// TODO(iceweasel-oai): run_windows_sandbox_capture should support all
|
||||
@@ -255,7 +331,9 @@ async fn exec_windows_sandbox(
|
||||
"windows sandbox: failed to resolve codex_home: {err}"
|
||||
)))
|
||||
})?;
|
||||
let use_elevated = is_windows_elevated_sandbox_enabled();
|
||||
let command_path = command.first().cloned();
|
||||
let sandbox_level = windows_sandbox_level;
|
||||
let use_elevated = matches!(sandbox_level, WindowsSandboxLevel::Elevated);
|
||||
let spawn_res = tokio::task::spawn_blocking(move || {
|
||||
if use_elevated {
|
||||
run_windows_sandbox_capture_elevated(
|
||||
@@ -284,6 +362,11 @@ async fn exec_windows_sandbox(
|
||||
let capture = match spawn_res {
|
||||
Ok(Ok(v)) => v,
|
||||
Ok(Err(err)) => {
|
||||
record_windows_sandbox_spawn_failure(
|
||||
command_path.as_deref(),
|
||||
sandbox_level,
|
||||
&err.to_string(),
|
||||
);
|
||||
return Err(CodexErr::Io(io::Error::other(format!(
|
||||
"windows sandbox: {err}"
|
||||
))));
|
||||
@@ -312,20 +395,7 @@ async fn exec_windows_sandbox(
|
||||
text: stderr_text,
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
// Best-effort aggregate: stdout then stderr (capped).
|
||||
let mut aggregated = Vec::with_capacity(
|
||||
stdout
|
||||
.text
|
||||
.len()
|
||||
.saturating_add(stderr.text.len())
|
||||
.min(EXEC_OUTPUT_MAX_BYTES),
|
||||
);
|
||||
append_capped(&mut aggregated, &stdout.text, EXEC_OUTPUT_MAX_BYTES);
|
||||
append_capped(&mut aggregated, &stderr.text, EXEC_OUTPUT_MAX_BYTES);
|
||||
let aggregated_output = StreamOutput {
|
||||
text: aggregated,
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
let aggregated_output = aggregate_output(&stdout, &stderr);
|
||||
|
||||
Ok(RawExecToolCallOutput {
|
||||
exit_status,
|
||||
@@ -519,6 +589,39 @@ fn append_capped(dst: &mut Vec<u8>, src: &[u8], max_bytes: usize) {
|
||||
dst.extend_from_slice(&src[..take]);
|
||||
}
|
||||
|
||||
fn aggregate_output(
|
||||
stdout: &StreamOutput<Vec<u8>>,
|
||||
stderr: &StreamOutput<Vec<u8>>,
|
||||
) -> StreamOutput<Vec<u8>> {
|
||||
let total_len = stdout.text.len().saturating_add(stderr.text.len());
|
||||
let max_bytes = EXEC_OUTPUT_MAX_BYTES;
|
||||
let mut aggregated = Vec::with_capacity(total_len.min(max_bytes));
|
||||
|
||||
if total_len <= max_bytes {
|
||||
aggregated.extend_from_slice(&stdout.text);
|
||||
aggregated.extend_from_slice(&stderr.text);
|
||||
return StreamOutput {
|
||||
text: aggregated,
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
}
|
||||
|
||||
// Under contention, reserve 1/3 for stdout and 2/3 for stderr; rebalance unused stderr to stdout.
|
||||
let want_stdout = stdout.text.len().min(max_bytes / 3);
|
||||
let want_stderr = stderr.text.len();
|
||||
let stderr_take = want_stderr.min(max_bytes.saturating_sub(want_stdout));
|
||||
let remaining = max_bytes.saturating_sub(want_stdout + stderr_take);
|
||||
let stdout_take = want_stdout + remaining.min(stdout.text.len().saturating_sub(want_stdout));
|
||||
|
||||
aggregated.extend_from_slice(&stdout.text[..stdout_take]);
|
||||
aggregated.extend_from_slice(&stderr.text[..stderr_take]);
|
||||
|
||||
StreamOutput {
|
||||
text: aggregated,
|
||||
truncated_after_lines: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ExecToolCallOutput {
|
||||
pub exit_code: i32,
|
||||
@@ -564,6 +667,7 @@ async fn exec(
|
||||
env,
|
||||
arg0,
|
||||
expiration,
|
||||
windows_sandbox_level: _,
|
||||
..
|
||||
} = params;
|
||||
|
||||
@@ -683,20 +787,7 @@ async fn consume_truncated_output(
|
||||
Duration::from_millis(IO_DRAIN_TIMEOUT_MS),
|
||||
)
|
||||
.await?;
|
||||
// Best-effort aggregate: stdout then stderr (capped).
|
||||
let mut aggregated = Vec::with_capacity(
|
||||
stdout
|
||||
.text
|
||||
.len()
|
||||
.saturating_add(stderr.text.len())
|
||||
.min(EXEC_OUTPUT_MAX_BYTES),
|
||||
);
|
||||
append_capped(&mut aggregated, &stdout.text, EXEC_OUTPUT_MAX_BYTES);
|
||||
append_capped(&mut aggregated, &stderr.text, EXEC_OUTPUT_MAX_BYTES * 2);
|
||||
let aggregated_output = StreamOutput {
|
||||
text: aggregated,
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
let aggregated_output = aggregate_output(&stdout, &stderr);
|
||||
|
||||
Ok(RawExecToolCallOutput {
|
||||
exit_status,
|
||||
@@ -771,6 +862,7 @@ fn synthetic_exit_status(code: i32) -> ExitStatus {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::time::Duration;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
|
||||
@@ -846,6 +938,85 @@ mod tests {
|
||||
assert_eq!(out.text.len(), EXEC_OUTPUT_MAX_BYTES);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn aggregate_output_prefers_stderr_on_contention() {
|
||||
let stdout = StreamOutput {
|
||||
text: vec![b'a'; EXEC_OUTPUT_MAX_BYTES],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
let stderr = StreamOutput {
|
||||
text: vec![b'b'; EXEC_OUTPUT_MAX_BYTES],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
|
||||
let aggregated = aggregate_output(&stdout, &stderr);
|
||||
let stdout_cap = EXEC_OUTPUT_MAX_BYTES / 3;
|
||||
let stderr_cap = EXEC_OUTPUT_MAX_BYTES.saturating_sub(stdout_cap);
|
||||
|
||||
assert_eq!(aggregated.text.len(), EXEC_OUTPUT_MAX_BYTES);
|
||||
assert_eq!(aggregated.text[..stdout_cap], vec![b'a'; stdout_cap]);
|
||||
assert_eq!(aggregated.text[stdout_cap..], vec![b'b'; stderr_cap]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn aggregate_output_fills_remaining_capacity_with_stderr() {
|
||||
let stdout_len = EXEC_OUTPUT_MAX_BYTES / 10;
|
||||
let stdout = StreamOutput {
|
||||
text: vec![b'a'; stdout_len],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
let stderr = StreamOutput {
|
||||
text: vec![b'b'; EXEC_OUTPUT_MAX_BYTES],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
|
||||
let aggregated = aggregate_output(&stdout, &stderr);
|
||||
let stderr_cap = EXEC_OUTPUT_MAX_BYTES.saturating_sub(stdout_len);
|
||||
|
||||
assert_eq!(aggregated.text.len(), EXEC_OUTPUT_MAX_BYTES);
|
||||
assert_eq!(aggregated.text[..stdout_len], vec![b'a'; stdout_len]);
|
||||
assert_eq!(aggregated.text[stdout_len..], vec![b'b'; stderr_cap]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn aggregate_output_rebalances_when_stderr_is_small() {
|
||||
let stdout = StreamOutput {
|
||||
text: vec![b'a'; EXEC_OUTPUT_MAX_BYTES],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
let stderr = StreamOutput {
|
||||
text: vec![b'b'; 1],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
|
||||
let aggregated = aggregate_output(&stdout, &stderr);
|
||||
let stdout_len = EXEC_OUTPUT_MAX_BYTES.saturating_sub(1);
|
||||
|
||||
assert_eq!(aggregated.text.len(), EXEC_OUTPUT_MAX_BYTES);
|
||||
assert_eq!(aggregated.text[..stdout_len], vec![b'a'; stdout_len]);
|
||||
assert_eq!(aggregated.text[stdout_len..], vec![b'b'; 1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn aggregate_output_keeps_stdout_then_stderr_when_under_cap() {
|
||||
let stdout = StreamOutput {
|
||||
text: vec![b'a'; 4],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
let stderr = StreamOutput {
|
||||
text: vec![b'b'; 3],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
|
||||
let aggregated = aggregate_output(&stdout, &stderr);
|
||||
let mut expected = Vec::new();
|
||||
expected.extend_from_slice(&stdout.text);
|
||||
expected.extend_from_slice(&stderr.text);
|
||||
|
||||
assert_eq!(aggregated.text, expected);
|
||||
assert_eq!(aggregated.truncated_after_lines, None);
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
#[test]
|
||||
fn sandbox_detection_flags_sigsys_exit_code() {
|
||||
@@ -878,6 +1049,7 @@ mod tests {
|
||||
expiration: 500.into(),
|
||||
env,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
windows_sandbox_level: codex_protocol::config_types::WindowsSandboxLevel::Disabled,
|
||||
justification: None,
|
||||
arg0: None,
|
||||
};
|
||||
@@ -923,6 +1095,7 @@ mod tests {
|
||||
expiration: ExecExpiration::Cancellation(cancel_token),
|
||||
env,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
windows_sandbox_level: codex_protocol::config_types::WindowsSandboxLevel::Disabled,
|
||||
justification: None,
|
||||
arg0: None,
|
||||
};
|
||||
|
||||
@@ -87,6 +87,15 @@ pub(crate) struct ExecPolicyManager {
|
||||
policy: ArcSwap<Policy>,
|
||||
}
|
||||
|
||||
pub(crate) struct ExecApprovalRequest<'a> {
|
||||
pub(crate) features: &'a Features,
|
||||
pub(crate) command: &'a [String],
|
||||
pub(crate) approval_policy: AskForApproval,
|
||||
pub(crate) sandbox_policy: &'a SandboxPolicy,
|
||||
pub(crate) sandbox_permissions: SandboxPermissions,
|
||||
pub(crate) prefix_rule: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
impl ExecPolicyManager {
|
||||
pub(crate) fn new(policy: Arc<Policy>) -> Self {
|
||||
Self {
|
||||
@@ -112,12 +121,16 @@ impl ExecPolicyManager {
|
||||
|
||||
pub(crate) async fn create_exec_approval_requirement_for_command(
|
||||
&self,
|
||||
features: &Features,
|
||||
command: &[String],
|
||||
approval_policy: AskForApproval,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
sandbox_permissions: SandboxPermissions,
|
||||
req: ExecApprovalRequest<'_>,
|
||||
) -> ExecApprovalRequirement {
|
||||
let ExecApprovalRequest {
|
||||
features,
|
||||
command,
|
||||
approval_policy,
|
||||
sandbox_policy,
|
||||
sandbox_permissions,
|
||||
prefix_rule,
|
||||
} = req;
|
||||
let exec_policy = self.current();
|
||||
let commands =
|
||||
parse_shell_lc_plain_commands(command).unwrap_or_else(|| vec![command.to_vec()]);
|
||||
@@ -131,6 +144,12 @@ impl ExecPolicyManager {
|
||||
};
|
||||
let evaluation = exec_policy.check_multiple(commands.iter(), &exec_policy_fallback);
|
||||
|
||||
let requested_amendment = derive_requested_execpolicy_amendment(
|
||||
features,
|
||||
prefix_rule.as_ref(),
|
||||
&evaluation.matched_rules,
|
||||
);
|
||||
|
||||
match evaluation.decision {
|
||||
Decision::Forbidden => ExecApprovalRequirement::Forbidden {
|
||||
reason: derive_forbidden_reason(command, &evaluation),
|
||||
@@ -144,9 +163,11 @@ impl ExecPolicyManager {
|
||||
ExecApprovalRequirement::NeedsApproval {
|
||||
reason: derive_prompt_reason(command, &evaluation),
|
||||
proposed_execpolicy_amendment: if features.enabled(Feature::ExecPolicy) {
|
||||
try_derive_execpolicy_amendment_for_prompt_rules(
|
||||
&evaluation.matched_rules,
|
||||
)
|
||||
requested_amendment.or_else(|| {
|
||||
try_derive_execpolicy_amendment_for_prompt_rules(
|
||||
&evaluation.matched_rules,
|
||||
)
|
||||
})
|
||||
} else {
|
||||
None
|
||||
},
|
||||
@@ -382,6 +403,30 @@ fn try_derive_execpolicy_amendment_for_allow_rules(
|
||||
})
|
||||
}
|
||||
|
||||
fn derive_requested_execpolicy_amendment(
|
||||
features: &Features,
|
||||
prefix_rule: Option<&Vec<String>>,
|
||||
matched_rules: &[RuleMatch],
|
||||
) -> Option<ExecPolicyAmendment> {
|
||||
if !features.enabled(Feature::ExecPolicy) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let prefix_rule = prefix_rule?;
|
||||
if prefix_rule.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if matched_rules
|
||||
.iter()
|
||||
.any(|rule_match| is_policy_match(rule_match) && rule_match.decision() == Decision::Prompt)
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(ExecPolicyAmendment::new(prefix_rule.clone()))
|
||||
}
|
||||
|
||||
/// Only return a reason when a policy rule drove the prompt decision.
|
||||
fn derive_prompt_reason(command_args: &[String], evaluation: &Evaluation) -> Option<String> {
|
||||
let command = render_shlex_command(command_args);
|
||||
@@ -756,13 +801,14 @@ prefix_rule(pattern=["rm"], decision="forbidden")
|
||||
|
||||
let manager = ExecPolicyManager::new(policy);
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&forbidden_script,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &forbidden_script,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::DangerFullAccess,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -790,17 +836,18 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::new(policy);
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&[
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &[
|
||||
"rm".to_string(),
|
||||
"-rf".to_string(),
|
||||
"/some/important/folder".to_string(),
|
||||
],
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::DangerFullAccess,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -823,13 +870,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::new(policy);
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::DangerFullAccess,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -853,13 +901,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::new(policy);
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::Never,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: &SandboxPolicy::DangerFullAccess,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -876,13 +925,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::default();
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::UnlessTrusted,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::UnlessTrusted,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -894,6 +944,40 @@ prefix_rule(
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn request_rule_uses_prefix_rule() {
|
||||
let command = vec![
|
||||
"cargo".to_string(),
|
||||
"install".to_string(),
|
||||
"cargo-insta".to_string(),
|
||||
];
|
||||
let manager = ExecPolicyManager::default();
|
||||
let mut features = Features::with_defaults();
|
||||
features.enable(Feature::RequestRule);
|
||||
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &features,
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::RequireEscalated,
|
||||
prefix_rule: Some(vec!["cargo".to_string(), "install".to_string()]),
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
requirement,
|
||||
ExecApprovalRequirement::NeedsApproval {
|
||||
reason: None,
|
||||
proposed_execpolicy_amendment: Some(ExecPolicyAmendment::new(vec![
|
||||
"cargo".to_string(),
|
||||
"install".to_string(),
|
||||
])),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn heuristics_apply_when_other_commands_match_policy() {
|
||||
let policy_src = r#"prefix_rule(pattern=["apple"], decision="allow")"#;
|
||||
@@ -910,13 +994,14 @@ prefix_rule(
|
||||
|
||||
assert_eq!(
|
||||
ExecPolicyManager::new(policy)
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::UnlessTrusted,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::UnlessTrusted,
|
||||
sandbox_policy: &SandboxPolicy::DangerFullAccess,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await,
|
||||
ExecApprovalRequirement::NeedsApproval {
|
||||
reason: None,
|
||||
@@ -984,13 +1069,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::default();
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::UnlessTrusted,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::UnlessTrusted,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -1011,13 +1097,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::default();
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&features,
|
||||
&command,
|
||||
AskForApproval::UnlessTrusted,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &features,
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::UnlessTrusted,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -1041,13 +1128,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::new(policy);
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::DangerFullAccess,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -1068,13 +1156,14 @@ prefix_rule(
|
||||
];
|
||||
let manager = ExecPolicyManager::default();
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::UnlessTrusted,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::UnlessTrusted,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -1106,13 +1195,14 @@ prefix_rule(
|
||||
|
||||
assert_eq!(
|
||||
ExecPolicyManager::new(policy)
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::UnlessTrusted,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::UnlessTrusted,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await,
|
||||
ExecApprovalRequirement::NeedsApproval {
|
||||
reason: None,
|
||||
@@ -1129,13 +1219,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::default();
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -1159,13 +1250,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::new(policy);
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -1226,13 +1318,14 @@ prefix_rule(
|
||||
assert_eq!(
|
||||
expected_req,
|
||||
policy
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&features,
|
||||
&sneaky_command,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
permissions,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &features,
|
||||
command: &sneaky_command,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: permissions,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await,
|
||||
"{pwsh_approval_reason}"
|
||||
);
|
||||
@@ -1249,13 +1342,14 @@ prefix_rule(
|
||||
]))),
|
||||
},
|
||||
policy
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&features,
|
||||
&dangerous_command,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
permissions,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &features,
|
||||
command: &dangerous_command,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: permissions,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await,
|
||||
r#"On all platforms, a forbidden command should require approval
|
||||
(unless AskForApproval::Never is specified)."#
|
||||
@@ -1268,13 +1362,14 @@ prefix_rule(
|
||||
reason: "`rm -rf /important/data` rejected: blocked by policy".to_string(),
|
||||
},
|
||||
policy
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&features,
|
||||
&dangerous_command,
|
||||
AskForApproval::Never,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
permissions,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &features,
|
||||
command: &dangerous_command,
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: permissions,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await,
|
||||
r#"On all platforms, a forbidden command should require approval
|
||||
(unless AskForApproval::Never is specified)."#
|
||||
|
||||
@@ -5,14 +5,20 @@
|
||||
//! booleans through multiple types, call sites consult a single `Features`
|
||||
//! container attached to `Config`.
|
||||
|
||||
use crate::config::CONFIG_TOML_FILE;
|
||||
use crate::config::Config;
|
||||
use crate::config::ConfigToml;
|
||||
use crate::config::profile::ConfigProfile;
|
||||
use crate::protocol::Event;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::WarningEvent;
|
||||
use codex_otel::OtelManager;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::BTreeSet;
|
||||
use toml::Value as TomlValue;
|
||||
|
||||
mod legacy;
|
||||
pub(crate) use legacy::LegacyFeatureToggles;
|
||||
@@ -21,8 +27,8 @@ pub(crate) use legacy::legacy_feature_keys;
|
||||
/// High-level lifecycle stage for a feature.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum Stage {
|
||||
/// Closed beta features to be used while developing or within the company.
|
||||
Beta,
|
||||
/// Features that are still under development, not ready for external use
|
||||
UnderDevelopment,
|
||||
/// Experimental features made available to users through the `/experimental` menu
|
||||
Experimental {
|
||||
name: &'static str,
|
||||
@@ -38,14 +44,14 @@ pub enum Stage {
|
||||
}
|
||||
|
||||
impl Stage {
|
||||
pub fn beta_menu_name(self) -> Option<&'static str> {
|
||||
pub fn experimental_menu_name(self) -> Option<&'static str> {
|
||||
match self {
|
||||
Stage::Experimental { name, .. } => Some(name),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn beta_menu_description(self) -> Option<&'static str> {
|
||||
pub fn experimental_menu_description(self) -> Option<&'static str> {
|
||||
match self {
|
||||
Stage::Experimental {
|
||||
menu_description, ..
|
||||
@@ -54,7 +60,7 @@ impl Stage {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn beta_announcement(self) -> Option<&'static str> {
|
||||
pub fn experimental_announcement(self) -> Option<&'static str> {
|
||||
match self {
|
||||
Stage::Experimental { announcement, .. } => Some(announcement),
|
||||
_ => None,
|
||||
@@ -83,6 +89,8 @@ pub enum Feature {
|
||||
WebSearchCached,
|
||||
/// Gate the execpolicy enforcement for shell/unified exec.
|
||||
ExecPolicy,
|
||||
/// Allow the model to request approval and propose exec rules.
|
||||
RequestRule,
|
||||
/// Enable Windows sandbox (restricted token) on Windows.
|
||||
WindowsSandbox,
|
||||
/// Use the elevated Windows sandbox pipeline (setup + runner).
|
||||
@@ -93,6 +101,8 @@ pub enum Feature {
|
||||
RemoteModels,
|
||||
/// Experimental shell snapshotting.
|
||||
ShellSnapshot,
|
||||
/// Persist rollout metadata to a local SQLite database.
|
||||
Sqlite,
|
||||
/// Append additional AGENTS.md guidance to user instructions.
|
||||
ChildAgentsMd,
|
||||
/// Enforce UTF8 output in Powershell.
|
||||
@@ -101,12 +111,18 @@ pub enum Feature {
|
||||
EnableRequestCompression,
|
||||
/// Enable collab tools.
|
||||
Collab,
|
||||
/// Enable connectors (apps).
|
||||
Connectors,
|
||||
/// Enable apps.
|
||||
Apps,
|
||||
/// Allow prompting and installing missing MCP dependencies.
|
||||
SkillMcpDependencyInstall,
|
||||
/// Prompt for missing skill env var dependencies.
|
||||
SkillEnvVarDependencyPrompt,
|
||||
/// Steer feature flag - when enabled, Enter submits immediately instead of queuing.
|
||||
Steer,
|
||||
/// Enable collaboration modes (Plan, Code, Pair Programming, Execute).
|
||||
CollaborationModes,
|
||||
/// Enable personality selection in the TUI.
|
||||
Personality,
|
||||
/// Use the Responses API WebSocket transport for OpenAI by default.
|
||||
ResponsesWebsockets,
|
||||
}
|
||||
@@ -136,6 +152,8 @@ impl Feature {
|
||||
pub struct LegacyFeatureUsage {
|
||||
pub alias: String,
|
||||
pub feature: Feature,
|
||||
pub summary: String,
|
||||
pub details: Option<String>,
|
||||
}
|
||||
|
||||
/// Holds the effective set of enabled features.
|
||||
@@ -192,9 +210,12 @@ impl Features {
|
||||
}
|
||||
|
||||
pub fn record_legacy_usage_force(&mut self, alias: &str, feature: Feature) {
|
||||
let (summary, details) = legacy_usage_notice(alias, feature);
|
||||
self.legacy_usages.insert(LegacyFeatureUsage {
|
||||
alias: alias.to_string(),
|
||||
feature,
|
||||
summary,
|
||||
details,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -205,10 +226,8 @@ impl Features {
|
||||
self.record_legacy_usage_force(alias, feature);
|
||||
}
|
||||
|
||||
pub fn legacy_feature_usages(&self) -> impl Iterator<Item = (&str, Feature)> + '_ {
|
||||
self.legacy_usages
|
||||
.iter()
|
||||
.map(|usage| (usage.alias.as_str(), usage.feature))
|
||||
pub fn legacy_feature_usages(&self) -> impl Iterator<Item = &LegacyFeatureUsage> + '_ {
|
||||
self.legacy_usages.iter()
|
||||
}
|
||||
|
||||
pub fn emit_metrics(&self, otel: &OtelManager) {
|
||||
@@ -229,6 +248,21 @@ impl Features {
|
||||
/// Apply a table of key -> bool toggles (e.g. from TOML).
|
||||
pub fn apply_map(&mut self, m: &BTreeMap<String, bool>) {
|
||||
for (k, v) in m {
|
||||
match k.as_str() {
|
||||
"web_search_request" => {
|
||||
self.record_legacy_usage_force(
|
||||
"features.web_search_request",
|
||||
Feature::WebSearchRequest,
|
||||
);
|
||||
}
|
||||
"web_search_cached" => {
|
||||
self.record_legacy_usage_force(
|
||||
"features.web_search_cached",
|
||||
Feature::WebSearchCached,
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
match feature_for_key(k) {
|
||||
Some(feat) => {
|
||||
if k != feat.key() {
|
||||
@@ -289,6 +323,42 @@ impl Features {
|
||||
}
|
||||
}
|
||||
|
||||
fn legacy_usage_notice(alias: &str, feature: Feature) -> (String, Option<String>) {
|
||||
let canonical = feature.key();
|
||||
match feature {
|
||||
Feature::WebSearchRequest | Feature::WebSearchCached => {
|
||||
let label = match alias {
|
||||
"web_search" => "[features].web_search",
|
||||
"tools.web_search" => "[tools].web_search",
|
||||
"features.web_search_request" | "web_search_request" => {
|
||||
"[features].web_search_request"
|
||||
}
|
||||
"features.web_search_cached" | "web_search_cached" => {
|
||||
"[features].web_search_cached"
|
||||
}
|
||||
_ => alias,
|
||||
};
|
||||
let summary = format!("`{label}` is deprecated. Use `web_search` instead.");
|
||||
(summary, Some(web_search_details().to_string()))
|
||||
}
|
||||
_ => {
|
||||
let summary = format!("`{alias}` is deprecated. Use `[features].{canonical}` instead.");
|
||||
let details = if alias == canonical {
|
||||
None
|
||||
} else {
|
||||
Some(format!(
|
||||
"Enable it with `--enable {canonical}` or `[features].{canonical}` in config.toml. See https://github.com/openai/codex/blob/main/docs/config.md#feature-flags for details."
|
||||
))
|
||||
};
|
||||
(summary, details)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn web_search_details() -> &'static str {
|
||||
"Set `web_search` to `\"live\"`, `\"cached\"`, or `\"disabled\"` in config.toml."
|
||||
}
|
||||
|
||||
/// Keys accepted in `[features]` tables.
|
||||
fn feature_for_key(key: &str) -> Option<Feature> {
|
||||
for spec in FEATURES {
|
||||
@@ -337,16 +407,16 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
FeatureSpec {
|
||||
id: Feature::WebSearchRequest,
|
||||
key: "web_search_request",
|
||||
stage: Stage::Stable,
|
||||
stage: Stage::Deprecated,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::WebSearchCached,
|
||||
key: "web_search_cached",
|
||||
stage: Stage::Beta,
|
||||
stage: Stage::Deprecated,
|
||||
default_enabled: false,
|
||||
},
|
||||
// Beta program. Rendered in the `/experimental` menu for users.
|
||||
// Experimental program. Rendered in the `/experimental` menu for users.
|
||||
FeatureSpec {
|
||||
id: Feature::UnifiedExec,
|
||||
key: "unified_exec",
|
||||
@@ -367,46 +437,58 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
},
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::Sqlite,
|
||||
key: "sqlite",
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::ChildAgentsMd,
|
||||
key: "child_agents_md",
|
||||
stage: Stage::Beta,
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::ApplyPatchFreeform,
|
||||
key: "apply_patch_freeform",
|
||||
stage: Stage::Beta,
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::ExecPolicy,
|
||||
key: "exec_policy",
|
||||
stage: Stage::Beta,
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: true,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::RequestRule,
|
||||
key: "request_rule",
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::WindowsSandbox,
|
||||
key: "experimental_windows_sandbox",
|
||||
stage: Stage::Beta,
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::WindowsSandboxElevated,
|
||||
key: "elevated_windows_sandbox",
|
||||
stage: Stage::Beta,
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::RemoteCompaction,
|
||||
key: "remote_compaction",
|
||||
stage: Stage::Beta,
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: true,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::RemoteModels,
|
||||
key: "remote_models",
|
||||
stage: Stage::Beta,
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: true,
|
||||
},
|
||||
FeatureSpec {
|
||||
@@ -421,26 +503,42 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
#[cfg(windows)]
|
||||
default_enabled: true,
|
||||
#[cfg(not(windows))]
|
||||
stage: Stage::Beta,
|
||||
stage: Stage::UnderDevelopment,
|
||||
#[cfg(not(windows))]
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::EnableRequestCompression,
|
||||
key: "enable_request_compression",
|
||||
stage: Stage::Beta,
|
||||
default_enabled: false,
|
||||
stage: Stage::Stable,
|
||||
default_enabled: true,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::Collab,
|
||||
key: "collab",
|
||||
stage: Stage::Beta,
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::Connectors,
|
||||
key: "connectors",
|
||||
stage: Stage::Beta,
|
||||
id: Feature::Apps,
|
||||
key: "apps",
|
||||
stage: Stage::Experimental {
|
||||
name: "Apps",
|
||||
menu_description: "Use a connected ChatGPT App using \"$\". Install Apps via /apps command. Restart Codex after enabling.",
|
||||
announcement: "NEW: Use ChatGPT Apps (Connectors) in Codex via $ mentions. Enable in /experimental and restart Codex!",
|
||||
},
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::SkillMcpDependencyInstall,
|
||||
key: "skill_mcp_dependency_install",
|
||||
stage: Stage::Stable,
|
||||
default_enabled: true,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::SkillEnvVarDependencyPrompt,
|
||||
key: "skill_env_var_dependency_prompt",
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
@@ -456,13 +554,70 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
FeatureSpec {
|
||||
id: Feature::CollaborationModes,
|
||||
key: "collaboration_modes",
|
||||
stage: Stage::Beta,
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::Personality,
|
||||
key: "personality",
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::ResponsesWebsockets,
|
||||
key: "responses_websockets",
|
||||
stage: Stage::Beta,
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
];
|
||||
|
||||
/// Push a warning event if any under-development features are enabled.
|
||||
pub fn maybe_push_unstable_features_warning(
|
||||
config: &Config,
|
||||
post_session_configured_events: &mut Vec<Event>,
|
||||
) {
|
||||
if config.suppress_unstable_features_warning {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut under_development_feature_keys = Vec::new();
|
||||
if let Some(table) = config
|
||||
.config_layer_stack
|
||||
.effective_config()
|
||||
.get("features")
|
||||
.and_then(TomlValue::as_table)
|
||||
{
|
||||
for (key, value) in table {
|
||||
if value.as_bool() != Some(true) {
|
||||
continue;
|
||||
}
|
||||
let Some(spec) = FEATURES.iter().find(|spec| spec.key == key.as_str()) else {
|
||||
continue;
|
||||
};
|
||||
if !config.features.enabled(spec.id) {
|
||||
continue;
|
||||
}
|
||||
if matches!(spec.stage, Stage::UnderDevelopment) {
|
||||
under_development_feature_keys.push(spec.key.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if under_development_feature_keys.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let under_development_feature_keys = under_development_feature_keys.join(", ");
|
||||
let config_path = config
|
||||
.codex_home
|
||||
.join(CONFIG_TOML_FILE)
|
||||
.display()
|
||||
.to_string();
|
||||
let message = format!(
|
||||
"Under-development features enabled: {under_development_feature_keys}. Under-development features are incomplete and may behave unpredictably. To suppress this warning, set `suppress_unstable_features_warning = true` in {config_path}."
|
||||
);
|
||||
post_session_configured_events.push(Event {
|
||||
id: "".to_owned(),
|
||||
msg: EventMsg::Warning(WarningEvent { message }),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -9,6 +9,10 @@ struct Alias {
|
||||
}
|
||||
|
||||
const ALIASES: &[Alias] = &[
|
||||
Alias {
|
||||
legacy_key: "connectors",
|
||||
feature: Feature::Apps,
|
||||
},
|
||||
Alias {
|
||||
legacy_key: "enable_experimental_windows_sandbox",
|
||||
feature: Feature::WindowsSandbox,
|
||||
|
||||
@@ -38,10 +38,12 @@ pub mod landlock;
|
||||
pub mod mcp;
|
||||
mod mcp_connection_manager;
|
||||
pub mod models_manager;
|
||||
mod transport_manager;
|
||||
pub use mcp_connection_manager::MCP_SANDBOX_STATE_CAPABILITY;
|
||||
pub use mcp_connection_manager::MCP_SANDBOX_STATE_METHOD;
|
||||
pub use mcp_connection_manager::SandboxState;
|
||||
mod mcp_tool_call;
|
||||
mod mentions;
|
||||
mod message_history;
|
||||
mod model_provider_info;
|
||||
pub mod parse_command;
|
||||
@@ -49,6 +51,7 @@ pub mod path_utils;
|
||||
pub mod powershell;
|
||||
pub mod sandboxing;
|
||||
mod session_prefix;
|
||||
pub mod session_share;
|
||||
mod stream_events_utils;
|
||||
mod text_encoding;
|
||||
pub mod token_data;
|
||||
@@ -69,6 +72,7 @@ mod event_mapping;
|
||||
pub mod review_format;
|
||||
pub mod review_prompts;
|
||||
mod thread_manager;
|
||||
pub mod web_search;
|
||||
pub use codex_protocol::protocol::InitialHistory;
|
||||
pub use thread_manager::NewThread;
|
||||
pub use thread_manager::ThreadManager;
|
||||
@@ -90,6 +94,7 @@ pub mod shell;
|
||||
pub mod shell_snapshot;
|
||||
pub mod skills;
|
||||
pub mod spawn;
|
||||
pub mod state_db;
|
||||
pub mod terminal;
|
||||
mod tools;
|
||||
pub mod turn_diff_tracker;
|
||||
@@ -98,6 +103,7 @@ pub use rollout::INTERACTIVE_SESSION_SOURCES;
|
||||
pub use rollout::RolloutRecorder;
|
||||
pub use rollout::SESSIONS_SUBDIR;
|
||||
pub use rollout::SessionMeta;
|
||||
pub use rollout::find_archived_thread_path_by_id_str;
|
||||
#[deprecated(note = "use find_thread_path_by_id_str")]
|
||||
pub use rollout::find_conversation_path_by_id_str;
|
||||
pub use rollout::find_thread_path_by_id_str;
|
||||
@@ -108,6 +114,8 @@ pub use rollout::list::ThreadsPage;
|
||||
pub use rollout::list::parse_cursor;
|
||||
pub use rollout::list::read_head_for_summary;
|
||||
pub use rollout::list::read_session_meta_line;
|
||||
pub use rollout::rollout_date_parts;
|
||||
pub use transport_manager::TransportManager;
|
||||
mod function_tool;
|
||||
mod state;
|
||||
mod tasks;
|
||||
@@ -123,9 +131,6 @@ pub use exec_policy::ExecPolicyError;
|
||||
pub use exec_policy::check_execpolicy_for_warnings;
|
||||
pub use exec_policy::load_exec_policy;
|
||||
pub use safety::get_platform_sandbox;
|
||||
pub use safety::is_windows_elevated_sandbox_enabled;
|
||||
pub use safety::set_windows_elevated_sandbox_enabled;
|
||||
pub use safety::set_windows_sandbox_enabled;
|
||||
pub use tools::spec::parse_tool_input_schema;
|
||||
// Re-export the protocol types from the standalone `codex-protocol` crate so existing
|
||||
// `codex_core::protocol::...` references continue to work across the workspace.
|
||||
|
||||
@@ -4,12 +4,53 @@ use anyhow::Result;
|
||||
use codex_protocol::protocol::McpAuthStatus;
|
||||
use codex_rmcp_client::OAuthCredentialsStoreMode;
|
||||
use codex_rmcp_client::determine_streamable_http_auth_status;
|
||||
use codex_rmcp_client::supports_oauth_login;
|
||||
use futures::future::join_all;
|
||||
use tracing::warn;
|
||||
|
||||
use crate::config::types::McpServerConfig;
|
||||
use crate::config::types::McpServerTransportConfig;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct McpOAuthLoginConfig {
|
||||
pub url: String,
|
||||
pub http_headers: Option<HashMap<String, String>>,
|
||||
pub env_http_headers: Option<HashMap<String, String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum McpOAuthLoginSupport {
|
||||
Supported(McpOAuthLoginConfig),
|
||||
Unsupported,
|
||||
Unknown(anyhow::Error),
|
||||
}
|
||||
|
||||
pub async fn oauth_login_support(transport: &McpServerTransportConfig) -> McpOAuthLoginSupport {
|
||||
let McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
http_headers,
|
||||
env_http_headers,
|
||||
} = transport
|
||||
else {
|
||||
return McpOAuthLoginSupport::Unsupported;
|
||||
};
|
||||
|
||||
if bearer_token_env_var.is_some() {
|
||||
return McpOAuthLoginSupport::Unsupported;
|
||||
}
|
||||
|
||||
match supports_oauth_login(url).await {
|
||||
Ok(true) => McpOAuthLoginSupport::Supported(McpOAuthLoginConfig {
|
||||
url: url.clone(),
|
||||
http_headers: http_headers.clone(),
|
||||
env_http_headers: env_http_headers.clone(),
|
||||
}),
|
||||
Ok(false) => McpOAuthLoginSupport::Unsupported,
|
||||
Err(err) => McpOAuthLoginSupport::Unknown(err),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct McpAuthStatusEntry {
|
||||
pub config: McpServerConfig,
|
||||
|
||||
@@ -1,7 +1,12 @@
|
||||
pub mod auth;
|
||||
mod skill_dependencies;
|
||||
|
||||
pub(crate) use skill_dependencies::maybe_prompt_and_install_mcp_dependencies;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
use std::path::PathBuf;
|
||||
use std::time::Duration;
|
||||
|
||||
use async_channel::unbounded;
|
||||
use codex_protocol::protocol::McpListToolsResponseEvent;
|
||||
@@ -21,7 +26,7 @@ use crate::mcp_connection_manager::SandboxState;
|
||||
|
||||
const MCP_TOOL_NAME_PREFIX: &str = "mcp";
|
||||
const MCP_TOOL_NAME_DELIMITER: &str = "__";
|
||||
pub(crate) const CODEX_APPS_MCP_SERVER_NAME: &str = "codex_apps_mcp";
|
||||
pub(crate) const CODEX_APPS_MCP_SERVER_NAME: &str = "codex_apps";
|
||||
const CODEX_CONNECTORS_TOKEN_ENV_VAR: &str = "CODEX_CONNECTORS_TOKEN";
|
||||
|
||||
fn codex_apps_mcp_bearer_token_env_var() -> Option<String> {
|
||||
@@ -93,10 +98,11 @@ fn codex_apps_mcp_server_config(config: &Config, auth: Option<&CodexAuth>) -> Mc
|
||||
},
|
||||
enabled: true,
|
||||
disabled_reason: None,
|
||||
startup_timeout_sec: None,
|
||||
startup_timeout_sec: Some(Duration::from_secs(30)),
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -123,7 +129,7 @@ pub(crate) fn effective_mcp_servers(
|
||||
) -> HashMap<String, McpServerConfig> {
|
||||
with_codex_apps_mcp(
|
||||
config.mcp_servers.get().clone(),
|
||||
config.features.enabled(Feature::Connectors),
|
||||
config.features.enabled(Feature::Apps),
|
||||
auth,
|
||||
config,
|
||||
)
|
||||
|
||||
519
codex-rs/core/src/mcp/skill_dependencies.rs
Normal file
519
codex-rs/core/src/mcp/skill_dependencies.rs
Normal file
@@ -0,0 +1,519 @@
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use codex_protocol::request_user_input::RequestUserInputArgs;
|
||||
use codex_protocol::request_user_input::RequestUserInputQuestion;
|
||||
use codex_protocol::request_user_input::RequestUserInputQuestionOption;
|
||||
use codex_protocol::request_user_input::RequestUserInputResponse;
|
||||
use codex_rmcp_client::perform_oauth_login;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
use tracing::warn;
|
||||
|
||||
use super::auth::McpOAuthLoginSupport;
|
||||
use super::auth::oauth_login_support;
|
||||
use super::effective_mcp_servers;
|
||||
use crate::codex::Session;
|
||||
use crate::codex::TurnContext;
|
||||
use crate::config::Config;
|
||||
use crate::config::edit::ConfigEditsBuilder;
|
||||
use crate::config::load_global_mcp_servers;
|
||||
use crate::config::types::McpServerConfig;
|
||||
use crate::config::types::McpServerTransportConfig;
|
||||
use crate::default_client::is_first_party_originator;
|
||||
use crate::default_client::originator;
|
||||
use crate::features::Feature;
|
||||
use crate::skills::SkillMetadata;
|
||||
use crate::skills::model::SkillToolDependency;
|
||||
|
||||
const SKILL_MCP_DEPENDENCY_PROMPT_ID: &str = "skill_mcp_dependency_install";
|
||||
const MCP_DEPENDENCY_OPTION_INSTALL: &str = "Install";
|
||||
const MCP_DEPENDENCY_OPTION_SKIP: &str = "Continue anyway";
|
||||
|
||||
fn is_full_access_mode(turn_context: &TurnContext) -> bool {
|
||||
matches!(turn_context.approval_policy, AskForApproval::Never)
|
||||
&& matches!(
|
||||
turn_context.sandbox_policy,
|
||||
SandboxPolicy::DangerFullAccess | SandboxPolicy::ExternalSandbox { .. }
|
||||
)
|
||||
}
|
||||
|
||||
fn format_missing_mcp_dependencies(missing: &HashMap<String, McpServerConfig>) -> String {
|
||||
let mut names = missing.keys().cloned().collect::<Vec<_>>();
|
||||
names.sort();
|
||||
names.join(", ")
|
||||
}
|
||||
|
||||
async fn filter_prompted_mcp_dependencies(
|
||||
sess: &Session,
|
||||
missing: &HashMap<String, McpServerConfig>,
|
||||
) -> HashMap<String, McpServerConfig> {
|
||||
let prompted = sess.mcp_dependency_prompted().await;
|
||||
if prompted.is_empty() {
|
||||
return missing.clone();
|
||||
}
|
||||
|
||||
missing
|
||||
.iter()
|
||||
.filter(|(name, config)| !prompted.contains(&canonical_mcp_server_key(name, config)))
|
||||
.map(|(name, config)| (name.clone(), config.clone()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
async fn should_install_mcp_dependencies(
|
||||
sess: &Session,
|
||||
turn_context: &TurnContext,
|
||||
missing: &HashMap<String, McpServerConfig>,
|
||||
cancellation_token: &CancellationToken,
|
||||
) -> bool {
|
||||
if is_full_access_mode(turn_context) {
|
||||
return true;
|
||||
}
|
||||
|
||||
let server_list = format_missing_mcp_dependencies(missing);
|
||||
let question = RequestUserInputQuestion {
|
||||
id: SKILL_MCP_DEPENDENCY_PROMPT_ID.to_string(),
|
||||
header: "Install MCP servers?".to_string(),
|
||||
question: format!(
|
||||
"The following MCP servers are required by the selected skills but are not installed yet: {server_list}. Install them now?"
|
||||
),
|
||||
is_other: false,
|
||||
is_secret: false,
|
||||
options: Some(vec![
|
||||
RequestUserInputQuestionOption {
|
||||
label: MCP_DEPENDENCY_OPTION_INSTALL.to_string(),
|
||||
description:
|
||||
"Install and enable the missing MCP servers in your global config."
|
||||
.to_string(),
|
||||
},
|
||||
RequestUserInputQuestionOption {
|
||||
label: MCP_DEPENDENCY_OPTION_SKIP.to_string(),
|
||||
description: "Skip installation for now and do not show again for these MCP servers in this session."
|
||||
.to_string(),
|
||||
},
|
||||
]),
|
||||
};
|
||||
let args = RequestUserInputArgs {
|
||||
questions: vec![question],
|
||||
};
|
||||
let sub_id = &turn_context.sub_id;
|
||||
let call_id = format!("mcp-deps-{sub_id}");
|
||||
let response_fut = sess.request_user_input(turn_context, call_id, args);
|
||||
let response = tokio::select! {
|
||||
biased;
|
||||
_ = cancellation_token.cancelled() => {
|
||||
let empty = RequestUserInputResponse {
|
||||
answers: HashMap::new(),
|
||||
};
|
||||
sess.notify_user_input_response(sub_id, empty.clone()).await;
|
||||
empty
|
||||
}
|
||||
response = response_fut => response.unwrap_or_else(|| RequestUserInputResponse {
|
||||
answers: HashMap::new(),
|
||||
}),
|
||||
};
|
||||
|
||||
let install = response
|
||||
.answers
|
||||
.get(SKILL_MCP_DEPENDENCY_PROMPT_ID)
|
||||
.is_some_and(|answer| {
|
||||
answer
|
||||
.answers
|
||||
.iter()
|
||||
.any(|entry| entry == MCP_DEPENDENCY_OPTION_INSTALL)
|
||||
});
|
||||
|
||||
let prompted_keys = missing
|
||||
.iter()
|
||||
.map(|(name, config)| canonical_mcp_server_key(name, config));
|
||||
sess.record_mcp_dependency_prompted(prompted_keys).await;
|
||||
|
||||
install
|
||||
}
|
||||
|
||||
pub(crate) async fn maybe_prompt_and_install_mcp_dependencies(
|
||||
sess: &Session,
|
||||
turn_context: &TurnContext,
|
||||
cancellation_token: &CancellationToken,
|
||||
mentioned_skills: &[SkillMetadata],
|
||||
) {
|
||||
let originator_value = originator().value;
|
||||
if !is_first_party_originator(originator_value.as_str()) {
|
||||
// Only support first-party clients for now.
|
||||
return;
|
||||
}
|
||||
|
||||
let config = turn_context.client.config();
|
||||
if mentioned_skills.is_empty() || !config.features.enabled(Feature::SkillMcpDependencyInstall) {
|
||||
return;
|
||||
}
|
||||
|
||||
let installed = config.mcp_servers.get().clone();
|
||||
let missing = collect_missing_mcp_dependencies(mentioned_skills, &installed);
|
||||
if missing.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let unprompted_missing = filter_prompted_mcp_dependencies(sess, &missing).await;
|
||||
if unprompted_missing.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
if should_install_mcp_dependencies(sess, turn_context, &unprompted_missing, cancellation_token)
|
||||
.await
|
||||
{
|
||||
maybe_install_mcp_dependencies(sess, turn_context, config.as_ref(), mentioned_skills).await;
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn maybe_install_mcp_dependencies(
|
||||
sess: &Session,
|
||||
turn_context: &TurnContext,
|
||||
config: &Config,
|
||||
mentioned_skills: &[SkillMetadata],
|
||||
) {
|
||||
if mentioned_skills.is_empty() || !config.features.enabled(Feature::SkillMcpDependencyInstall) {
|
||||
return;
|
||||
}
|
||||
|
||||
let codex_home = config.codex_home.clone();
|
||||
let installed = config.mcp_servers.get().clone();
|
||||
let missing = collect_missing_mcp_dependencies(mentioned_skills, &installed);
|
||||
if missing.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut servers = match load_global_mcp_servers(&codex_home).await {
|
||||
Ok(servers) => servers,
|
||||
Err(err) => {
|
||||
warn!("failed to load MCP servers while installing skill dependencies: {err}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let mut updated = false;
|
||||
let mut added = Vec::new();
|
||||
for (name, config) in missing {
|
||||
if servers.contains_key(&name) {
|
||||
continue;
|
||||
}
|
||||
servers.insert(name.clone(), config.clone());
|
||||
added.push((name, config));
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if !updated {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Err(err) = ConfigEditsBuilder::new(&codex_home)
|
||||
.replace_mcp_servers(&servers)
|
||||
.apply()
|
||||
.await
|
||||
{
|
||||
warn!("failed to persist MCP dependencies for mentioned skills: {err}");
|
||||
return;
|
||||
}
|
||||
|
||||
for (name, server_config) in added {
|
||||
let oauth_config = match oauth_login_support(&server_config.transport).await {
|
||||
McpOAuthLoginSupport::Supported(config) => config,
|
||||
McpOAuthLoginSupport::Unsupported => continue,
|
||||
McpOAuthLoginSupport::Unknown(err) => {
|
||||
warn!("MCP server may or may not require login for dependency {name}: {err}");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
sess.notify_background_event(
|
||||
turn_context,
|
||||
format!(
|
||||
"Authenticating MCP {name}... Follow instructions in your browser if prompted."
|
||||
),
|
||||
)
|
||||
.await;
|
||||
|
||||
if let Err(err) = perform_oauth_login(
|
||||
&name,
|
||||
&oauth_config.url,
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
oauth_config.http_headers,
|
||||
oauth_config.env_http_headers,
|
||||
&[],
|
||||
config.mcp_oauth_callback_port,
|
||||
)
|
||||
.await
|
||||
{
|
||||
warn!("failed to login to MCP dependency {name}: {err}");
|
||||
}
|
||||
}
|
||||
|
||||
// Refresh from the effective merged MCP map (global + repo + managed) and
|
||||
// overlay the updated global servers so we don't drop repo-scoped servers.
|
||||
let auth = sess.services.auth_manager.auth().await;
|
||||
let mut refresh_servers = effective_mcp_servers(config, auth.as_ref());
|
||||
for (name, server_config) in &servers {
|
||||
refresh_servers
|
||||
.entry(name.clone())
|
||||
.or_insert_with(|| server_config.clone());
|
||||
}
|
||||
sess.refresh_mcp_servers_now(
|
||||
turn_context,
|
||||
refresh_servers,
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
fn canonical_mcp_key(transport: &str, identifier: &str, fallback: &str) -> String {
|
||||
let identifier = identifier.trim();
|
||||
if identifier.is_empty() {
|
||||
fallback.to_string()
|
||||
} else {
|
||||
format!("mcp__{transport}__{identifier}")
|
||||
}
|
||||
}
|
||||
|
||||
fn canonical_mcp_server_key(name: &str, config: &McpServerConfig) -> String {
|
||||
match &config.transport {
|
||||
McpServerTransportConfig::Stdio { command, .. } => {
|
||||
canonical_mcp_key("stdio", command, name)
|
||||
}
|
||||
McpServerTransportConfig::StreamableHttp { url, .. } => {
|
||||
canonical_mcp_key("streamable_http", url, name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn canonical_mcp_dependency_key(dependency: &SkillToolDependency) -> Result<String, String> {
|
||||
let transport = dependency.transport.as_deref().unwrap_or("streamable_http");
|
||||
if transport.eq_ignore_ascii_case("streamable_http") {
|
||||
let url = dependency
|
||||
.url
|
||||
.as_ref()
|
||||
.ok_or_else(|| "missing url for streamable_http dependency".to_string())?;
|
||||
return Ok(canonical_mcp_key("streamable_http", url, &dependency.value));
|
||||
}
|
||||
if transport.eq_ignore_ascii_case("stdio") {
|
||||
let command = dependency
|
||||
.command
|
||||
.as_ref()
|
||||
.ok_or_else(|| "missing command for stdio dependency".to_string())?;
|
||||
return Ok(canonical_mcp_key("stdio", command, &dependency.value));
|
||||
}
|
||||
Err(format!("unsupported transport {transport}"))
|
||||
}
|
||||
|
||||
pub(crate) fn collect_missing_mcp_dependencies(
|
||||
mentioned_skills: &[SkillMetadata],
|
||||
installed: &HashMap<String, McpServerConfig>,
|
||||
) -> HashMap<String, McpServerConfig> {
|
||||
let mut missing = HashMap::new();
|
||||
let installed_keys: HashSet<String> = installed
|
||||
.iter()
|
||||
.map(|(name, config)| canonical_mcp_server_key(name, config))
|
||||
.collect();
|
||||
let mut seen_canonical_keys = HashSet::new();
|
||||
|
||||
for skill in mentioned_skills {
|
||||
let Some(dependencies) = skill.dependencies.as_ref() else {
|
||||
continue;
|
||||
};
|
||||
|
||||
for tool in &dependencies.tools {
|
||||
if !tool.r#type.eq_ignore_ascii_case("mcp") {
|
||||
continue;
|
||||
}
|
||||
let dependency_key = match canonical_mcp_dependency_key(tool) {
|
||||
Ok(key) => key,
|
||||
Err(err) => {
|
||||
let dependency = tool.value.as_str();
|
||||
let skill_name = skill.name.as_str();
|
||||
warn!(
|
||||
"unable to auto-install MCP dependency {dependency} for skill {skill_name}: {err}",
|
||||
);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
if installed_keys.contains(&dependency_key)
|
||||
|| seen_canonical_keys.contains(&dependency_key)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
let config = match mcp_dependency_to_server_config(tool) {
|
||||
Ok(config) => config,
|
||||
Err(err) => {
|
||||
let dependency = dependency_key.as_str();
|
||||
let skill_name = skill.name.as_str();
|
||||
warn!(
|
||||
"unable to auto-install MCP dependency {dependency} for skill {skill_name}: {err}",
|
||||
);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
missing.insert(tool.value.clone(), config);
|
||||
seen_canonical_keys.insert(dependency_key);
|
||||
}
|
||||
}
|
||||
|
||||
missing
|
||||
}
|
||||
|
||||
fn mcp_dependency_to_server_config(
|
||||
dependency: &SkillToolDependency,
|
||||
) -> Result<McpServerConfig, String> {
|
||||
let transport = dependency.transport.as_deref().unwrap_or("streamable_http");
|
||||
if transport.eq_ignore_ascii_case("streamable_http") {
|
||||
let url = dependency
|
||||
.url
|
||||
.as_ref()
|
||||
.ok_or_else(|| "missing url for streamable_http dependency".to_string())?;
|
||||
return Ok(McpServerConfig {
|
||||
transport: McpServerTransportConfig::StreamableHttp {
|
||||
url: url.clone(),
|
||||
bearer_token_env_var: None,
|
||||
http_headers: None,
|
||||
env_http_headers: None,
|
||||
},
|
||||
enabled: true,
|
||||
disabled_reason: None,
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
});
|
||||
}
|
||||
|
||||
if transport.eq_ignore_ascii_case("stdio") {
|
||||
let command = dependency
|
||||
.command
|
||||
.as_ref()
|
||||
.ok_or_else(|| "missing command for stdio dependency".to_string())?;
|
||||
return Ok(McpServerConfig {
|
||||
transport: McpServerTransportConfig::Stdio {
|
||||
command: command.clone(),
|
||||
args: Vec::new(),
|
||||
env: None,
|
||||
env_vars: Vec::new(),
|
||||
cwd: None,
|
||||
},
|
||||
enabled: true,
|
||||
disabled_reason: None,
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
});
|
||||
}
|
||||
|
||||
Err(format!("unsupported transport {transport}"))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::skills::model::SkillDependencies;
|
||||
use codex_protocol::protocol::SkillScope;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::PathBuf;
|
||||
|
||||
fn skill_with_tools(tools: Vec<SkillToolDependency>) -> SkillMetadata {
|
||||
SkillMetadata {
|
||||
name: "skill".to_string(),
|
||||
description: "skill".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: Some(SkillDependencies { tools }),
|
||||
path: PathBuf::from("skill"),
|
||||
scope: SkillScope::User,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_missing_respects_canonical_installed_key() {
|
||||
let url = "https://example.com/mcp".to_string();
|
||||
let skills = vec![skill_with_tools(vec![SkillToolDependency {
|
||||
r#type: "mcp".to_string(),
|
||||
value: "github".to_string(),
|
||||
description: None,
|
||||
transport: Some("streamable_http".to_string()),
|
||||
command: None,
|
||||
url: Some(url.clone()),
|
||||
}])];
|
||||
let installed = HashMap::from([(
|
||||
"alias".to_string(),
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var: None,
|
||||
http_headers: None,
|
||||
env_http_headers: None,
|
||||
},
|
||||
enabled: true,
|
||||
disabled_reason: None,
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
)]);
|
||||
|
||||
assert_eq!(
|
||||
collect_missing_mcp_dependencies(&skills, &installed),
|
||||
HashMap::new()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_missing_dedupes_by_canonical_key_but_preserves_original_name() {
|
||||
let url = "https://example.com/one".to_string();
|
||||
let skills = vec![skill_with_tools(vec![
|
||||
SkillToolDependency {
|
||||
r#type: "mcp".to_string(),
|
||||
value: "alias-one".to_string(),
|
||||
description: None,
|
||||
transport: Some("streamable_http".to_string()),
|
||||
command: None,
|
||||
url: Some(url.clone()),
|
||||
},
|
||||
SkillToolDependency {
|
||||
r#type: "mcp".to_string(),
|
||||
value: "alias-two".to_string(),
|
||||
description: None,
|
||||
transport: Some("streamable_http".to_string()),
|
||||
command: None,
|
||||
url: Some(url.clone()),
|
||||
},
|
||||
])];
|
||||
|
||||
let expected = HashMap::from([(
|
||||
"alias-one".to_string(),
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var: None,
|
||||
http_headers: None,
|
||||
env_http_headers: None,
|
||||
},
|
||||
enabled: true,
|
||||
disabled_reason: None,
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
)]);
|
||||
|
||||
assert_eq!(
|
||||
collect_missing_mcp_dependencies(&skills, &HashMap::new()),
|
||||
expected
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -14,6 +14,7 @@ use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::mcp::CODEX_APPS_MCP_SERVER_NAME;
|
||||
use crate::mcp::auth::McpAuthStatusEntry;
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
@@ -436,13 +437,33 @@ impl McpConnectionManager {
|
||||
.await
|
||||
}
|
||||
|
||||
pub(crate) async fn wait_for_server_ready(&self, server_name: &str, timeout: Duration) -> bool {
|
||||
let Some(async_managed_client) = self.clients.get(server_name) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
match tokio::time::timeout(timeout, async_managed_client.client()).await {
|
||||
Ok(Ok(_)) => true,
|
||||
Ok(Err(_)) | Err(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a single map that contains all tools. Each key is the
|
||||
/// fully-qualified name for the tool.
|
||||
#[instrument(level = "trace", skip_all)]
|
||||
pub async fn list_all_tools(&self) -> HashMap<String, ToolInfo> {
|
||||
let mut tools = HashMap::new();
|
||||
for managed_client in self.clients.values() {
|
||||
if let Ok(client) = managed_client.client().await {
|
||||
for (server_name, managed_client) in &self.clients {
|
||||
let client = if server_name == CODEX_APPS_MCP_SERVER_NAME {
|
||||
// Avoid blocking on codex_apps_mcp startup; use tools only when ready.
|
||||
match managed_client.client.clone().now_or_never() {
|
||||
Some(Ok(client)) => Some(client),
|
||||
_ => None,
|
||||
}
|
||||
} else {
|
||||
managed_client.client().await.ok()
|
||||
};
|
||||
if let Some(client) = client {
|
||||
tools.extend(qualify_tools(filter_tools(
|
||||
client.tools,
|
||||
client.tool_filter,
|
||||
@@ -1182,6 +1203,7 @@ mod tests {
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
auth_status: McpAuthStatus::Unsupported,
|
||||
};
|
||||
@@ -1227,6 +1249,7 @@ mod tests {
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
auth_status: McpAuthStatus::Unsupported,
|
||||
};
|
||||
|
||||
64
codex-rs/core/src/mentions.rs
Normal file
64
codex-rs/core/src/mentions.rs
Normal file
@@ -0,0 +1,64 @@
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use codex_protocol::user_input::UserInput;
|
||||
|
||||
use crate::connectors;
|
||||
use crate::skills::SkillMetadata;
|
||||
use crate::skills::injection::extract_tool_mentions;
|
||||
|
||||
pub(crate) struct CollectedToolMentions {
|
||||
pub(crate) plain_names: HashSet<String>,
|
||||
pub(crate) paths: HashSet<String>,
|
||||
}
|
||||
|
||||
pub(crate) fn collect_tool_mentions_from_messages(messages: &[String]) -> CollectedToolMentions {
|
||||
let mut plain_names = HashSet::new();
|
||||
let mut paths = HashSet::new();
|
||||
for message in messages {
|
||||
let mentions = extract_tool_mentions(message);
|
||||
plain_names.extend(mentions.plain_names().map(str::to_string));
|
||||
paths.extend(mentions.paths().map(str::to_string));
|
||||
}
|
||||
CollectedToolMentions { plain_names, paths }
|
||||
}
|
||||
|
||||
pub(crate) fn collect_explicit_app_paths(input: &[UserInput]) -> Vec<String> {
|
||||
input
|
||||
.iter()
|
||||
.filter_map(|item| match item {
|
||||
UserInput::Mention { path, .. } => Some(path.clone()),
|
||||
_ => None,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub(crate) fn build_skill_name_counts(
|
||||
skills: &[SkillMetadata],
|
||||
disabled_paths: &HashSet<PathBuf>,
|
||||
) -> (HashMap<String, usize>, HashMap<String, usize>) {
|
||||
let mut exact_counts: HashMap<String, usize> = HashMap::new();
|
||||
let mut lower_counts: HashMap<String, usize> = HashMap::new();
|
||||
for skill in skills {
|
||||
if disabled_paths.contains(&skill.path) {
|
||||
continue;
|
||||
}
|
||||
*exact_counts.entry(skill.name.clone()).or_insert(0) += 1;
|
||||
*lower_counts
|
||||
.entry(skill.name.to_ascii_lowercase())
|
||||
.or_insert(0) += 1;
|
||||
}
|
||||
(exact_counts, lower_counts)
|
||||
}
|
||||
|
||||
pub(crate) fn build_connector_slug_counts(
|
||||
connectors: &[connectors::AppInfo],
|
||||
) -> HashMap<String, usize> {
|
||||
let mut counts: HashMap<String, usize> = HashMap::new();
|
||||
for connector in connectors {
|
||||
let slug = connectors::connector_mention_slug(connector);
|
||||
*counts.entry(slug).or_insert(0) += 1;
|
||||
}
|
||||
counts
|
||||
}
|
||||
@@ -43,10 +43,6 @@ pub enum WireApi {
|
||||
/// The Responses API exposed by OpenAI at `/v1/responses`.
|
||||
Responses,
|
||||
|
||||
/// Experimental: Responses API over WebSocket transport.
|
||||
#[serde(rename = "responses_websocket")]
|
||||
ResponsesWebsocket,
|
||||
|
||||
/// Regular Chat Completions compatible with `/v1/chat/completions`.
|
||||
#[default]
|
||||
Chat,
|
||||
@@ -105,6 +101,10 @@ pub struct ModelProviderInfo {
|
||||
/// and API key (if needed) comes from the "env_key" environment variable.
|
||||
#[serde(default)]
|
||||
pub requires_openai_auth: bool,
|
||||
|
||||
/// Whether this provider supports the Responses API WebSocket transport.
|
||||
#[serde(default)]
|
||||
pub supports_websockets: bool,
|
||||
}
|
||||
|
||||
impl ModelProviderInfo {
|
||||
@@ -162,7 +162,6 @@ impl ModelProviderInfo {
|
||||
query_params: self.query_params.clone(),
|
||||
wire: match self.wire_api {
|
||||
WireApi::Responses => ApiWireApi::Responses,
|
||||
WireApi::ResponsesWebsocket => ApiWireApi::Responses,
|
||||
WireApi::Chat => ApiWireApi::Chat,
|
||||
},
|
||||
headers,
|
||||
@@ -254,6 +253,7 @@ impl ModelProviderInfo {
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
requires_openai_auth: true,
|
||||
supports_websockets: true,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -332,6 +332,7 @@ pub fn create_oss_provider_with_base_url(base_url: &str, wire_api: WireApi) -> M
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
requires_openai_auth: false,
|
||||
supports_websockets: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -360,6 +361,7 @@ base_url = "http://localhost:11434/v1"
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
requires_openai_auth: false,
|
||||
supports_websockets: false,
|
||||
};
|
||||
|
||||
let provider: ModelProviderInfo = toml::from_str(azure_provider_toml).unwrap();
|
||||
@@ -390,6 +392,7 @@ query_params = { api-version = "2025-04-01-preview" }
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
requires_openai_auth: false,
|
||||
supports_websockets: false,
|
||||
};
|
||||
|
||||
let provider: ModelProviderInfo = toml::from_str(azure_provider_toml).unwrap();
|
||||
@@ -423,6 +426,7 @@ env_http_headers = { "X-Example-Env-Header" = "EXAMPLE_ENV_VAR" }
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
requires_openai_auth: false,
|
||||
supports_websockets: false,
|
||||
};
|
||||
|
||||
let provider: ModelProviderInfo = toml::from_str(azure_provider_toml).unwrap();
|
||||
@@ -454,6 +458,7 @@ env_http_headers = { "X-Example-Env-Header" = "EXAMPLE_ENV_VAR" }
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
requires_openai_auth: false,
|
||||
supports_websockets: false,
|
||||
};
|
||||
let api = provider.to_api_provider(None).expect("api provider");
|
||||
assert!(
|
||||
@@ -476,6 +481,7 @@ env_http_headers = { "X-Example-Env-Header" = "EXAMPLE_ENV_VAR" }
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
requires_openai_auth: false,
|
||||
supports_websockets: false,
|
||||
};
|
||||
let named_api = named_provider.to_api_provider(None).expect("api provider");
|
||||
assert!(named_api.is_azure_responses_endpoint());
|
||||
@@ -500,6 +506,7 @@ env_http_headers = { "X-Example-Env-Header" = "EXAMPLE_ENV_VAR" }
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
requires_openai_auth: false,
|
||||
supports_websockets: false,
|
||||
};
|
||||
let api = provider.to_api_provider(None).expect("api provider");
|
||||
assert!(
|
||||
|
||||
@@ -28,7 +28,7 @@ fn plan_preset() -> CollaborationModeMask {
|
||||
name: "Plan".to_string(),
|
||||
mode: Some(ModeKind::Plan),
|
||||
model: None,
|
||||
reasoning_effort: Some(Some(ReasoningEffort::High)),
|
||||
reasoning_effort: Some(Some(ReasoningEffort::Medium)),
|
||||
developer_instructions: Some(Some(COLLABORATION_MODE_PLAN.to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -432,6 +432,7 @@ mod tests {
|
||||
stream_max_retries: Some(0),
|
||||
stream_idle_timeout_ms: Some(5_000),
|
||||
requires_openai_auth: false,
|
||||
supports_websockets: false,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -213,6 +213,16 @@ pub(crate) fn find_model_info_for_slug(slug: &str) -> ModelInfo {
|
||||
truncation_policy: TruncationPolicyConfig::tokens(10_000),
|
||||
context_window: Some(CONTEXT_WINDOW_272K),
|
||||
supported_reasoning_levels: supported_reasoning_level_low_medium_high_xhigh(),
|
||||
model_instructions_template: Some(ModelInstructionsTemplate {
|
||||
template: GPT_5_2_CODEX_INSTRUCTIONS_TEMPLATE.to_string(),
|
||||
personality_messages: Some(PersonalityMessages(BTreeMap::from([(
|
||||
Personality::Friendly,
|
||||
PERSONALITY_FRIENDLY.to_string(),
|
||||
), (
|
||||
Personality::Pragmatic,
|
||||
PERSONALITY_PRAGMATIC.to_string(),
|
||||
)]))),
|
||||
}),
|
||||
)
|
||||
} else if slug.starts_with("gpt-5.1-codex-max") {
|
||||
model_info!(
|
||||
@@ -259,9 +269,7 @@ pub(crate) fn find_model_info_for_slug(slug: &str) -> ModelInfo {
|
||||
truncation_policy: TruncationPolicyConfig::tokens(10_000),
|
||||
context_window: Some(CONTEXT_WINDOW_272K),
|
||||
)
|
||||
} else if (slug.starts_with("gpt-5.2") || slug.starts_with("boomslang"))
|
||||
&& !slug.contains("codex")
|
||||
{
|
||||
} else if slug.starts_with("gpt-5.2") || slug.starts_with("boomslang") {
|
||||
model_info!(
|
||||
slug,
|
||||
apply_patch_tool_type: Some(ApplyPatchToolType::Freeform),
|
||||
@@ -276,7 +284,7 @@ pub(crate) fn find_model_info_for_slug(slug: &str) -> ModelInfo {
|
||||
context_window: Some(CONTEXT_WINDOW_272K),
|
||||
supported_reasoning_levels: supported_reasoning_level_low_medium_high_xhigh_non_codex(),
|
||||
)
|
||||
} else if slug.starts_with("gpt-5.1") && !slug.contains("codex") {
|
||||
} else if slug.starts_with("gpt-5.1") {
|
||||
model_info!(
|
||||
slug,
|
||||
apply_patch_tool_type: Some(ApplyPatchToolType::Freeform),
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
use async_trait::async_trait;
|
||||
use std::cmp::Reverse;
|
||||
use std::ffi::OsStr;
|
||||
use std::io::{self};
|
||||
use std::num::NonZero;
|
||||
use std::ops::ControlFlow;
|
||||
@@ -6,8 +8,6 @@ use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use time::OffsetDateTime;
|
||||
use time::PrimitiveDateTime;
|
||||
use time::format_description::FormatItem;
|
||||
@@ -15,9 +15,12 @@ use time::format_description::well_known::Rfc3339;
|
||||
use time::macros::format_description;
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::ARCHIVED_SESSIONS_SUBDIR;
|
||||
use super::SESSIONS_SUBDIR;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::state_db;
|
||||
use codex_file_search as file_search;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
use codex_protocol::protocol::RolloutLine;
|
||||
use codex_protocol::protocol::SessionMetaLine;
|
||||
@@ -792,7 +795,7 @@ async fn collect_rollout_day_files(
|
||||
Ok(day_files)
|
||||
}
|
||||
|
||||
fn parse_timestamp_uuid_from_filename(name: &str) -> Option<(OffsetDateTime, Uuid)> {
|
||||
pub(crate) fn parse_timestamp_uuid_from_filename(name: &str) -> Option<(OffsetDateTime, Uuid)> {
|
||||
// Expected: rollout-YYYY-MM-DDThh-mm-ss-<uuid>.jsonl
|
||||
let core = name.strip_prefix("rollout-")?.strip_suffix(".jsonl")?;
|
||||
|
||||
@@ -1054,11 +1057,9 @@ fn truncate_to_seconds(dt: OffsetDateTime) -> Option<OffsetDateTime> {
|
||||
dt.replace_nanosecond(0).ok()
|
||||
}
|
||||
|
||||
/// Locate a recorded thread rollout file by its UUID string using the existing
|
||||
/// paginated listing implementation. Returns `Ok(Some(path))` if found, `Ok(None)` if not present
|
||||
/// or the id is invalid.
|
||||
pub async fn find_thread_path_by_id_str(
|
||||
async fn find_thread_path_by_id_str_in_subdir(
|
||||
codex_home: &Path,
|
||||
subdir: &str,
|
||||
id_str: &str,
|
||||
) -> io::Result<Option<PathBuf>> {
|
||||
// Validate UUID format early.
|
||||
@@ -1067,7 +1068,7 @@ pub async fn find_thread_path_by_id_str(
|
||||
}
|
||||
|
||||
let mut root = codex_home.to_path_buf();
|
||||
root.push(SESSIONS_SUBDIR);
|
||||
root.push(subdir);
|
||||
if !root.exists() {
|
||||
return Ok(None);
|
||||
}
|
||||
@@ -1093,9 +1094,65 @@ pub async fn find_thread_path_by_id_str(
|
||||
)
|
||||
.map_err(|e| io::Error::other(format!("file search failed: {e}")))?;
|
||||
|
||||
Ok(results
|
||||
let found = results
|
||||
.matches
|
||||
.into_iter()
|
||||
.next()
|
||||
.map(|m| root.join(m.path)))
|
||||
.map(|m| root.join(m.path));
|
||||
|
||||
// Checking if DB is at parity.
|
||||
// TODO(jif): sqlite migration phase 1
|
||||
let archived_only = match subdir {
|
||||
SESSIONS_SUBDIR => Some(false),
|
||||
ARCHIVED_SESSIONS_SUBDIR => Some(true),
|
||||
_ => None,
|
||||
};
|
||||
let state_db_ctx = state_db::open_if_present(codex_home, "").await;
|
||||
if let Some(state_db_ctx) = state_db_ctx.as_deref()
|
||||
&& let Ok(thread_id) = ThreadId::from_string(id_str)
|
||||
{
|
||||
let db_path = state_db::find_rollout_path_by_id(
|
||||
Some(state_db_ctx),
|
||||
thread_id,
|
||||
archived_only,
|
||||
"find_path_query",
|
||||
)
|
||||
.await;
|
||||
let canonical_path = found.as_deref();
|
||||
if db_path.as_deref() != canonical_path {
|
||||
tracing::warn!(
|
||||
"state db path mismatch for thread {thread_id:?}: canonical={canonical_path:?} db={db_path:?}"
|
||||
);
|
||||
state_db::record_discrepancy("find_thread_path_by_id_str_in_subdir", "path_mismatch");
|
||||
}
|
||||
}
|
||||
Ok(found)
|
||||
}
|
||||
|
||||
/// Locate a recorded thread rollout file by its UUID string using the existing
|
||||
/// paginated listing implementation. Returns `Ok(Some(path))` if found, `Ok(None)` if not present
|
||||
/// or the id is invalid.
|
||||
pub async fn find_thread_path_by_id_str(
|
||||
codex_home: &Path,
|
||||
id_str: &str,
|
||||
) -> io::Result<Option<PathBuf>> {
|
||||
find_thread_path_by_id_str_in_subdir(codex_home, SESSIONS_SUBDIR, id_str).await
|
||||
}
|
||||
|
||||
/// Locate an archived thread rollout file by its UUID string.
|
||||
pub async fn find_archived_thread_path_by_id_str(
|
||||
codex_home: &Path,
|
||||
id_str: &str,
|
||||
) -> io::Result<Option<PathBuf>> {
|
||||
find_thread_path_by_id_str_in_subdir(codex_home, ARCHIVED_SESSIONS_SUBDIR, id_str).await
|
||||
}
|
||||
|
||||
/// Extract the `YYYY/MM/DD` directory components from a rollout filename.
|
||||
pub fn rollout_date_parts(file_name: &OsStr) -> Option<(String, String, String)> {
|
||||
let name = file_name.to_string_lossy();
|
||||
let date = name.strip_prefix("rollout-")?.get(..10)?;
|
||||
let year = date.get(..4)?.to_string();
|
||||
let month = date.get(5..7)?.to_string();
|
||||
let day = date.get(8..10)?.to_string();
|
||||
Some((year, month, day))
|
||||
}
|
||||
|
||||
360
codex-rs/core/src/rollout/metadata.rs
Normal file
360
codex-rs/core/src/rollout/metadata.rs
Normal file
@@ -0,0 +1,360 @@
|
||||
use crate::config::Config;
|
||||
use crate::rollout;
|
||||
use crate::rollout::list::parse_timestamp_uuid_from_filename;
|
||||
use crate::rollout::recorder::RolloutRecorder;
|
||||
use chrono::DateTime;
|
||||
use chrono::NaiveDateTime;
|
||||
use chrono::Timelike;
|
||||
use chrono::Utc;
|
||||
use codex_otel::OtelManager;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use codex_protocol::protocol::SessionMetaLine;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_state::BackfillStats;
|
||||
use codex_state::DB_ERROR_METRIC;
|
||||
use codex_state::DB_METRIC_BACKFILL;
|
||||
use codex_state::ExtractionOutcome;
|
||||
use codex_state::ThreadMetadataBuilder;
|
||||
use codex_state::apply_rollout_item;
|
||||
use std::cmp::Reverse;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use tracing::info;
|
||||
use tracing::warn;
|
||||
|
||||
const ROLLOUT_PREFIX: &str = "rollout-";
|
||||
const ROLLOUT_SUFFIX: &str = ".jsonl";
|
||||
|
||||
pub(crate) fn builder_from_session_meta(
|
||||
session_meta: &SessionMetaLine,
|
||||
rollout_path: &Path,
|
||||
) -> Option<ThreadMetadataBuilder> {
|
||||
let created_at = parse_timestamp_to_utc(session_meta.meta.timestamp.as_str())?;
|
||||
let mut builder = ThreadMetadataBuilder::new(
|
||||
session_meta.meta.id,
|
||||
rollout_path.to_path_buf(),
|
||||
created_at,
|
||||
session_meta.meta.source.clone(),
|
||||
);
|
||||
builder.model_provider = session_meta.meta.model_provider.clone();
|
||||
builder.cwd = session_meta.meta.cwd.clone();
|
||||
builder.sandbox_policy = SandboxPolicy::ReadOnly;
|
||||
builder.approval_mode = AskForApproval::OnRequest;
|
||||
if let Some(git) = session_meta.git.as_ref() {
|
||||
builder.git_sha = git.commit_hash.clone();
|
||||
builder.git_branch = git.branch.clone();
|
||||
builder.git_origin_url = git.repository_url.clone();
|
||||
}
|
||||
Some(builder)
|
||||
}
|
||||
|
||||
pub(crate) fn builder_from_items(
|
||||
items: &[RolloutItem],
|
||||
rollout_path: &Path,
|
||||
) -> Option<ThreadMetadataBuilder> {
|
||||
if let Some(session_meta) = items.iter().find_map(|item| match item {
|
||||
RolloutItem::SessionMeta(meta_line) => Some(meta_line),
|
||||
RolloutItem::ResponseItem(_)
|
||||
| RolloutItem::Compacted(_)
|
||||
| RolloutItem::TurnContext(_)
|
||||
| RolloutItem::EventMsg(_) => None,
|
||||
}) && let Some(builder) = builder_from_session_meta(session_meta, rollout_path)
|
||||
{
|
||||
return Some(builder);
|
||||
}
|
||||
|
||||
let file_name = rollout_path.file_name()?.to_str()?;
|
||||
if !file_name.starts_with(ROLLOUT_PREFIX) || !file_name.ends_with(ROLLOUT_SUFFIX) {
|
||||
return None;
|
||||
}
|
||||
let (created_ts, uuid) = parse_timestamp_uuid_from_filename(file_name)?;
|
||||
let created_at =
|
||||
DateTime::<Utc>::from_timestamp(created_ts.unix_timestamp(), 0)?.with_nanosecond(0)?;
|
||||
let id = ThreadId::from_string(&uuid.to_string()).ok()?;
|
||||
Some(ThreadMetadataBuilder::new(
|
||||
id,
|
||||
rollout_path.to_path_buf(),
|
||||
created_at,
|
||||
SessionSource::default(),
|
||||
))
|
||||
}
|
||||
|
||||
pub(crate) async fn extract_metadata_from_rollout(
|
||||
rollout_path: &Path,
|
||||
default_provider: &str,
|
||||
otel: Option<&OtelManager>,
|
||||
) -> anyhow::Result<ExtractionOutcome> {
|
||||
let (items, _thread_id, parse_errors) =
|
||||
RolloutRecorder::load_rollout_items(rollout_path).await?;
|
||||
if items.is_empty() {
|
||||
return Err(anyhow::anyhow!(
|
||||
"empty session file: {}",
|
||||
rollout_path.display()
|
||||
));
|
||||
}
|
||||
let builder = builder_from_items(items.as_slice(), rollout_path).ok_or_else(|| {
|
||||
anyhow::anyhow!(
|
||||
"rollout missing metadata builder: {}",
|
||||
rollout_path.display()
|
||||
)
|
||||
})?;
|
||||
let mut metadata = builder.build(default_provider);
|
||||
for item in &items {
|
||||
apply_rollout_item(&mut metadata, item, default_provider);
|
||||
}
|
||||
if let Some(updated_at) = file_modified_time_utc(rollout_path).await {
|
||||
metadata.updated_at = updated_at;
|
||||
}
|
||||
if parse_errors > 0
|
||||
&& let Some(otel) = otel
|
||||
{
|
||||
otel.counter(
|
||||
DB_ERROR_METRIC,
|
||||
parse_errors as i64,
|
||||
&[("stage", "extract_metadata_from_rollout")],
|
||||
);
|
||||
}
|
||||
Ok(ExtractionOutcome {
|
||||
metadata,
|
||||
parse_errors,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) async fn backfill_sessions(
|
||||
runtime: &codex_state::StateRuntime,
|
||||
config: &Config,
|
||||
otel: Option<&OtelManager>,
|
||||
) {
|
||||
let sessions_root = config.codex_home.join(rollout::SESSIONS_SUBDIR);
|
||||
let archived_root = config.codex_home.join(rollout::ARCHIVED_SESSIONS_SUBDIR);
|
||||
let mut rollout_paths: Vec<(PathBuf, bool)> = Vec::new();
|
||||
for (root, archived) in [(sessions_root, false), (archived_root, true)] {
|
||||
if !tokio::fs::try_exists(&root).await.unwrap_or(false) {
|
||||
continue;
|
||||
}
|
||||
match collect_rollout_paths(&root).await {
|
||||
Ok(paths) => {
|
||||
rollout_paths.extend(paths.into_iter().map(|path| (path, archived)));
|
||||
}
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"failed to collect rollout paths under {}: {err}",
|
||||
root.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
rollout_paths.sort_by_key(|(path, _archived)| {
|
||||
let parsed = path
|
||||
.file_name()
|
||||
.and_then(|name| name.to_str())
|
||||
.and_then(parse_timestamp_uuid_from_filename)
|
||||
.unwrap_or((time::OffsetDateTime::UNIX_EPOCH, uuid::Uuid::nil()));
|
||||
(Reverse(parsed.0), Reverse(parsed.1))
|
||||
});
|
||||
let mut stats = BackfillStats {
|
||||
scanned: 0,
|
||||
upserted: 0,
|
||||
failed: 0,
|
||||
};
|
||||
for (path, archived) in rollout_paths {
|
||||
stats.scanned = stats.scanned.saturating_add(1);
|
||||
match extract_metadata_from_rollout(&path, config.model_provider_id.as_str(), otel).await {
|
||||
Ok(outcome) => {
|
||||
if outcome.parse_errors > 0
|
||||
&& let Some(otel) = otel
|
||||
{
|
||||
otel.counter(
|
||||
DB_ERROR_METRIC,
|
||||
outcome.parse_errors as i64,
|
||||
&[("stage", "backfill_sessions")],
|
||||
);
|
||||
}
|
||||
let mut metadata = outcome.metadata;
|
||||
if archived && metadata.archived_at.is_none() {
|
||||
let fallback_archived_at = metadata.updated_at;
|
||||
metadata.archived_at = file_modified_time_utc(&path)
|
||||
.await
|
||||
.or(Some(fallback_archived_at));
|
||||
}
|
||||
if let Err(err) = runtime.upsert_thread(&metadata).await {
|
||||
stats.failed = stats.failed.saturating_add(1);
|
||||
warn!("failed to upsert rollout {}: {err}", path.display());
|
||||
} else {
|
||||
stats.upserted = stats.upserted.saturating_add(1);
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
stats.failed = stats.failed.saturating_add(1);
|
||||
warn!("failed to extract rollout {}: {err}", path.display());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
info!(
|
||||
"state db backfill scanned={}, upserted={}, failed={}",
|
||||
stats.scanned, stats.upserted, stats.failed
|
||||
);
|
||||
if let Some(otel) = otel {
|
||||
otel.counter(
|
||||
DB_METRIC_BACKFILL,
|
||||
stats.upserted as i64,
|
||||
&[("status", "upserted")],
|
||||
);
|
||||
otel.counter(
|
||||
DB_METRIC_BACKFILL,
|
||||
stats.failed as i64,
|
||||
&[("status", "failed")],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async fn file_modified_time_utc(path: &Path) -> Option<DateTime<Utc>> {
|
||||
let modified = tokio::fs::metadata(path).await.ok()?.modified().ok()?;
|
||||
let updated_at: DateTime<Utc> = modified.into();
|
||||
updated_at.with_nanosecond(0)
|
||||
}
|
||||
|
||||
fn parse_timestamp_to_utc(ts: &str) -> Option<DateTime<Utc>> {
|
||||
const FILENAME_TS_FORMAT: &str = "%Y-%m-%dT%H-%M-%S";
|
||||
if let Ok(naive) = NaiveDateTime::parse_from_str(ts, FILENAME_TS_FORMAT) {
|
||||
let dt = DateTime::<Utc>::from_naive_utc_and_offset(naive, Utc);
|
||||
return dt.with_nanosecond(0);
|
||||
}
|
||||
if let Ok(dt) = DateTime::parse_from_rfc3339(ts) {
|
||||
return dt.with_timezone(&Utc).with_nanosecond(0);
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
async fn collect_rollout_paths(root: &Path) -> std::io::Result<Vec<PathBuf>> {
|
||||
let mut stack = vec![root.to_path_buf()];
|
||||
let mut paths = Vec::new();
|
||||
while let Some(dir) = stack.pop() {
|
||||
let mut read_dir = match tokio::fs::read_dir(&dir).await {
|
||||
Ok(read_dir) => read_dir,
|
||||
Err(err) => {
|
||||
warn!("failed to read directory {}: {err}", dir.display());
|
||||
continue;
|
||||
}
|
||||
};
|
||||
while let Some(entry) = read_dir.next_entry().await? {
|
||||
let path = entry.path();
|
||||
let file_type = entry.file_type().await?;
|
||||
if file_type.is_dir() {
|
||||
stack.push(path);
|
||||
continue;
|
||||
}
|
||||
if !file_type.is_file() {
|
||||
continue;
|
||||
}
|
||||
let file_name = entry.file_name();
|
||||
let Some(name) = file_name.to_str() else {
|
||||
continue;
|
||||
};
|
||||
if name.starts_with(ROLLOUT_PREFIX) && name.ends_with(ROLLOUT_SUFFIX) {
|
||||
paths.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(paths)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use chrono::DateTime;
|
||||
use chrono::NaiveDateTime;
|
||||
use chrono::Timelike;
|
||||
use chrono::Utc;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::CompactedItem;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
use codex_protocol::protocol::RolloutLine;
|
||||
use codex_protocol::protocol::SessionMeta;
|
||||
use codex_protocol::protocol::SessionMetaLine;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_state::ThreadMetadataBuilder;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::fs::File;
|
||||
use std::io::Write;
|
||||
use tempfile::tempdir;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[tokio::test]
|
||||
async fn extract_metadata_from_rollout_uses_session_meta() {
|
||||
let dir = tempdir().expect("tempdir");
|
||||
let uuid = Uuid::new_v4();
|
||||
let id = ThreadId::from_string(&uuid.to_string()).expect("thread id");
|
||||
let path = dir
|
||||
.path()
|
||||
.join(format!("rollout-2026-01-27T12-34-56-{uuid}.jsonl"));
|
||||
|
||||
let session_meta = SessionMeta {
|
||||
id,
|
||||
forked_from_id: None,
|
||||
timestamp: "2026-01-27T12:34:56Z".to_string(),
|
||||
cwd: dir.path().to_path_buf(),
|
||||
originator: "cli".to_string(),
|
||||
cli_version: "0.0.0".to_string(),
|
||||
source: SessionSource::default(),
|
||||
model_provider: Some("openai".to_string()),
|
||||
base_instructions: None,
|
||||
};
|
||||
let session_meta_line = SessionMetaLine {
|
||||
meta: session_meta,
|
||||
git: None,
|
||||
};
|
||||
let rollout_line = RolloutLine {
|
||||
timestamp: "2026-01-27T12:34:56Z".to_string(),
|
||||
item: RolloutItem::SessionMeta(session_meta_line.clone()),
|
||||
};
|
||||
let json = serde_json::to_string(&rollout_line).expect("rollout json");
|
||||
let mut file = File::create(&path).expect("create rollout");
|
||||
writeln!(file, "{json}").expect("write rollout");
|
||||
|
||||
let outcome = extract_metadata_from_rollout(&path, "openai", None)
|
||||
.await
|
||||
.expect("extract");
|
||||
|
||||
let builder =
|
||||
builder_from_session_meta(&session_meta_line, path.as_path()).expect("builder");
|
||||
let mut expected = builder.build("openai");
|
||||
apply_rollout_item(&mut expected, &rollout_line.item, "openai");
|
||||
expected.updated_at = file_modified_time_utc(&path).await.expect("mtime");
|
||||
|
||||
assert_eq!(outcome.metadata, expected);
|
||||
assert_eq!(outcome.parse_errors, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn builder_from_items_falls_back_to_filename() {
|
||||
let dir = tempdir().expect("tempdir");
|
||||
let uuid = Uuid::new_v4();
|
||||
let path = dir
|
||||
.path()
|
||||
.join(format!("rollout-2026-01-27T12-34-56-{uuid}.jsonl"));
|
||||
let items = vec![RolloutItem::Compacted(CompactedItem {
|
||||
message: "noop".to_string(),
|
||||
replacement_history: None,
|
||||
})];
|
||||
|
||||
let builder = builder_from_items(items.as_slice(), path.as_path()).expect("builder");
|
||||
let naive = NaiveDateTime::parse_from_str("2026-01-27T12-34-56", "%Y-%m-%dT%H-%M-%S")
|
||||
.expect("timestamp");
|
||||
let created_at = DateTime::<Utc>::from_naive_utc_and_offset(naive, Utc)
|
||||
.with_nanosecond(0)
|
||||
.expect("nanosecond");
|
||||
let expected = ThreadMetadataBuilder::new(
|
||||
ThreadId::from_string(&uuid.to_string()).expect("thread id"),
|
||||
path,
|
||||
created_at,
|
||||
SessionSource::default(),
|
||||
);
|
||||
|
||||
assert_eq!(builder, expected);
|
||||
}
|
||||
}
|
||||
@@ -9,15 +9,18 @@ pub const INTERACTIVE_SESSION_SOURCES: &[SessionSource] =
|
||||
|
||||
pub(crate) mod error;
|
||||
pub mod list;
|
||||
pub(crate) mod metadata;
|
||||
pub(crate) mod policy;
|
||||
pub mod recorder;
|
||||
pub(crate) mod truncation;
|
||||
|
||||
pub use codex_protocol::protocol::SessionMeta;
|
||||
pub(crate) use error::map_session_init_error;
|
||||
pub use list::find_archived_thread_path_by_id_str;
|
||||
pub use list::find_thread_path_by_id_str;
|
||||
#[deprecated(note = "use find_thread_path_by_id_str")]
|
||||
pub use list::find_thread_path_by_id_str as find_conversation_path_by_id_str;
|
||||
pub use list::rollout_date_parts;
|
||||
pub use recorder::RolloutRecorder;
|
||||
pub use recorder::RolloutRecorderParams;
|
||||
|
||||
|
||||
@@ -28,11 +28,14 @@ use super::list::ThreadSortKey;
|
||||
use super::list::ThreadsPage;
|
||||
use super::list::get_threads;
|
||||
use super::list::get_threads_in_root;
|
||||
use super::metadata;
|
||||
use super::policy::is_persisted_response_item;
|
||||
use crate::config::Config;
|
||||
use crate::default_client::originator;
|
||||
use crate::git_info::collect_git_info;
|
||||
use crate::path_utils;
|
||||
use crate::state_db;
|
||||
use crate::state_db::StateDbHandle;
|
||||
use codex_protocol::protocol::InitialHistory;
|
||||
use codex_protocol::protocol::ResumedHistory;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
@@ -40,6 +43,7 @@ use codex_protocol::protocol::RolloutLine;
|
||||
use codex_protocol::protocol::SessionMeta;
|
||||
use codex_protocol::protocol::SessionMetaLine;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_state::ThreadMetadataBuilder;
|
||||
|
||||
/// Records all [`ResponseItem`]s for a session and flushes them to disk after
|
||||
/// every update.
|
||||
@@ -54,6 +58,7 @@ use codex_protocol::protocol::SessionSource;
|
||||
pub struct RolloutRecorder {
|
||||
tx: Sender<RolloutCmd>,
|
||||
pub(crate) rollout_path: PathBuf,
|
||||
state_db: Option<StateDbHandle>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
@@ -111,7 +116,8 @@ impl RolloutRecorder {
|
||||
model_providers: Option<&[String]>,
|
||||
default_provider: &str,
|
||||
) -> std::io::Result<ThreadsPage> {
|
||||
get_threads(
|
||||
let stage = "list_threads";
|
||||
let page = get_threads(
|
||||
codex_home,
|
||||
page_size,
|
||||
cursor,
|
||||
@@ -120,7 +126,34 @@ impl RolloutRecorder {
|
||||
model_providers,
|
||||
default_provider,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// TODO(jif): drop after sqlite migration phase 1
|
||||
let state_db_ctx = state_db::open_if_present(codex_home, default_provider).await;
|
||||
if let Some(db_ids) = state_db::list_thread_ids_db(
|
||||
state_db_ctx.as_deref(),
|
||||
codex_home,
|
||||
page_size,
|
||||
cursor,
|
||||
sort_key,
|
||||
allowed_sources,
|
||||
model_providers,
|
||||
false,
|
||||
stage,
|
||||
)
|
||||
.await
|
||||
{
|
||||
if page.items.len() != db_ids.len() {
|
||||
state_db::record_discrepancy(stage, "bad_len");
|
||||
return Ok(page);
|
||||
}
|
||||
for (id, item) in db_ids.iter().zip(page.items.iter()) {
|
||||
if !item.path.display().to_string().contains(&id.to_string()) {
|
||||
state_db::record_discrepancy(stage, "bad_id");
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(page)
|
||||
}
|
||||
|
||||
/// List archived threads (rollout files) under the archived sessions directory.
|
||||
@@ -133,8 +166,9 @@ impl RolloutRecorder {
|
||||
model_providers: Option<&[String]>,
|
||||
default_provider: &str,
|
||||
) -> std::io::Result<ThreadsPage> {
|
||||
let stage = "list_archived_threads";
|
||||
let root = codex_home.join(ARCHIVED_SESSIONS_SUBDIR);
|
||||
get_threads_in_root(
|
||||
let page = get_threads_in_root(
|
||||
root,
|
||||
page_size,
|
||||
cursor,
|
||||
@@ -146,7 +180,34 @@ impl RolloutRecorder {
|
||||
layout: ThreadListLayout::Flat,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
// TODO(jif): drop after sqlite migration phase 1
|
||||
let state_db_ctx = state_db::open_if_present(codex_home, default_provider).await;
|
||||
if let Some(db_ids) = state_db::list_thread_ids_db(
|
||||
state_db_ctx.as_deref(),
|
||||
codex_home,
|
||||
page_size,
|
||||
cursor,
|
||||
sort_key,
|
||||
allowed_sources,
|
||||
model_providers,
|
||||
true,
|
||||
stage,
|
||||
)
|
||||
.await
|
||||
{
|
||||
if page.items.len() != db_ids.len() {
|
||||
state_db::record_discrepancy(stage, "bad_len");
|
||||
return Ok(page);
|
||||
}
|
||||
for (id, item) in db_ids.iter().zip(page.items.iter()) {
|
||||
if !item.path.display().to_string().contains(&id.to_string()) {
|
||||
state_db::record_discrepancy(stage, "bad_id");
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(page)
|
||||
}
|
||||
|
||||
/// Find the newest recorded thread path, optionally filtering to a matching cwd.
|
||||
@@ -186,7 +247,12 @@ impl RolloutRecorder {
|
||||
/// Attempt to create a new [`RolloutRecorder`]. If the sessions directory
|
||||
/// cannot be created or the rollout file cannot be opened we return the
|
||||
/// error so the caller can decide whether to disable persistence.
|
||||
pub async fn new(config: &Config, params: RolloutRecorderParams) -> std::io::Result<Self> {
|
||||
pub async fn new(
|
||||
config: &Config,
|
||||
params: RolloutRecorderParams,
|
||||
state_db_ctx: Option<StateDbHandle>,
|
||||
state_builder: Option<ThreadMetadataBuilder>,
|
||||
) -> std::io::Result<Self> {
|
||||
let (file, rollout_path, meta) = match params {
|
||||
RolloutRecorderParams::Create {
|
||||
conversation_id,
|
||||
@@ -246,9 +312,30 @@ impl RolloutRecorder {
|
||||
// Spawn a Tokio task that owns the file handle and performs async
|
||||
// writes. Using `tokio::fs::File` keeps everything on the async I/O
|
||||
// driver instead of blocking the runtime.
|
||||
tokio::task::spawn(rollout_writer(file, rx, meta, cwd));
|
||||
tokio::task::spawn(rollout_writer(
|
||||
file,
|
||||
rx,
|
||||
meta,
|
||||
cwd,
|
||||
rollout_path.clone(),
|
||||
state_db_ctx.clone(),
|
||||
state_builder,
|
||||
config.model_provider_id.clone(),
|
||||
));
|
||||
|
||||
Ok(Self { tx, rollout_path })
|
||||
Ok(Self {
|
||||
tx,
|
||||
rollout_path,
|
||||
state_db: state_db_ctx,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn rollout_path(&self) -> &Path {
|
||||
self.rollout_path.as_path()
|
||||
}
|
||||
|
||||
pub fn state_db(&self) -> Option<StateDbHandle> {
|
||||
self.state_db.clone()
|
||||
}
|
||||
|
||||
pub(crate) async fn record_items(&self, items: &[RolloutItem]) -> std::io::Result<()> {
|
||||
@@ -281,7 +368,9 @@ impl RolloutRecorder {
|
||||
.map_err(|e| IoError::other(format!("failed waiting for rollout flush: {e}")))
|
||||
}
|
||||
|
||||
pub async fn get_rollout_history(path: &Path) -> std::io::Result<InitialHistory> {
|
||||
pub(crate) async fn load_rollout_items(
|
||||
path: &Path,
|
||||
) -> std::io::Result<(Vec<RolloutItem>, Option<ThreadId>, usize)> {
|
||||
info!("Resuming rollout from {path:?}");
|
||||
let text = tokio::fs::read_to_string(path).await?;
|
||||
if text.trim().is_empty() {
|
||||
@@ -290,6 +379,7 @@ impl RolloutRecorder {
|
||||
|
||||
let mut items: Vec<RolloutItem> = Vec::new();
|
||||
let mut thread_id: Option<ThreadId> = None;
|
||||
let mut parse_errors = 0usize;
|
||||
for line in text.lines() {
|
||||
if line.trim().is_empty() {
|
||||
continue;
|
||||
@@ -298,6 +388,7 @@ impl RolloutRecorder {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
warn!("failed to parse line as JSON: {line:?}, error: {e}");
|
||||
parse_errors = parse_errors.saturating_add(1);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
@@ -328,15 +419,22 @@ impl RolloutRecorder {
|
||||
},
|
||||
Err(e) => {
|
||||
warn!("failed to parse rollout line: {v:?}, error: {e}");
|
||||
parse_errors = parse_errors.saturating_add(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
info!(
|
||||
"Resumed rollout with {} items, thread ID: {:?}",
|
||||
"Resumed rollout with {} items, thread ID: {:?}, parse errors: {}",
|
||||
items.len(),
|
||||
thread_id
|
||||
thread_id,
|
||||
parse_errors,
|
||||
);
|
||||
Ok((items, thread_id, parse_errors))
|
||||
}
|
||||
|
||||
pub async fn get_rollout_history(path: &Path) -> std::io::Result<InitialHistory> {
|
||||
let (items, thread_id, _parse_errors) = Self::load_rollout_items(path).await?;
|
||||
let conversation_id = thread_id
|
||||
.ok_or_else(|| IoError::other("failed to parse thread ID from rollout file"))?;
|
||||
|
||||
@@ -417,13 +515,21 @@ fn create_log_file(config: &Config, conversation_id: ThreadId) -> std::io::Resul
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn rollout_writer(
|
||||
file: tokio::fs::File,
|
||||
mut rx: mpsc::Receiver<RolloutCmd>,
|
||||
mut meta: Option<SessionMeta>,
|
||||
cwd: std::path::PathBuf,
|
||||
rollout_path: PathBuf,
|
||||
state_db_ctx: Option<StateDbHandle>,
|
||||
mut state_builder: Option<ThreadMetadataBuilder>,
|
||||
default_provider: String,
|
||||
) -> std::io::Result<()> {
|
||||
let mut writer = JsonlWriter { file };
|
||||
if let Some(builder) = state_builder.as_mut() {
|
||||
builder.rollout_path = rollout_path.clone();
|
||||
}
|
||||
|
||||
// If we have a meta, collect git info asynchronously and write meta first
|
||||
if let Some(session_meta) = meta.take() {
|
||||
@@ -432,22 +538,50 @@ async fn rollout_writer(
|
||||
meta: session_meta,
|
||||
git: git_info,
|
||||
};
|
||||
if state_db_ctx.is_some() {
|
||||
state_builder =
|
||||
metadata::builder_from_session_meta(&session_meta_line, rollout_path.as_path());
|
||||
}
|
||||
|
||||
// Write the SessionMeta as the first item in the file, wrapped in a rollout line
|
||||
writer
|
||||
.write_rollout_item(RolloutItem::SessionMeta(session_meta_line))
|
||||
.await?;
|
||||
let rollout_item = RolloutItem::SessionMeta(session_meta_line);
|
||||
writer.write_rollout_item(&rollout_item).await?;
|
||||
state_db::reconcile_rollout(
|
||||
state_db_ctx.as_deref(),
|
||||
rollout_path.as_path(),
|
||||
default_provider.as_str(),
|
||||
state_builder.as_ref(),
|
||||
std::slice::from_ref(&rollout_item),
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
// Process rollout commands
|
||||
while let Some(cmd) = rx.recv().await {
|
||||
match cmd {
|
||||
RolloutCmd::AddItems(items) => {
|
||||
let mut persisted_items = Vec::new();
|
||||
for item in items {
|
||||
if is_persisted_response_item(&item) {
|
||||
writer.write_rollout_item(item).await?;
|
||||
writer.write_rollout_item(&item).await?;
|
||||
persisted_items.push(item);
|
||||
}
|
||||
}
|
||||
if persisted_items.is_empty() {
|
||||
continue;
|
||||
}
|
||||
if let Some(builder) = state_builder.as_mut() {
|
||||
builder.rollout_path = rollout_path.clone();
|
||||
}
|
||||
state_db::apply_rollout_items(
|
||||
state_db_ctx.as_deref(),
|
||||
rollout_path.as_path(),
|
||||
default_provider.as_str(),
|
||||
state_builder.as_ref(),
|
||||
persisted_items.as_slice(),
|
||||
"rollout_writer",
|
||||
)
|
||||
.await;
|
||||
}
|
||||
RolloutCmd::Flush { ack } => {
|
||||
// Ensure underlying file is flushed and then ack.
|
||||
@@ -470,8 +604,15 @@ struct JsonlWriter {
|
||||
file: tokio::fs::File,
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize)]
|
||||
struct RolloutLineRef<'a> {
|
||||
timestamp: String,
|
||||
#[serde(flatten)]
|
||||
item: &'a RolloutItem,
|
||||
}
|
||||
|
||||
impl JsonlWriter {
|
||||
async fn write_rollout_item(&mut self, rollout_item: RolloutItem) -> std::io::Result<()> {
|
||||
async fn write_rollout_item(&mut self, rollout_item: &RolloutItem) -> std::io::Result<()> {
|
||||
let timestamp_format: &[FormatItem] = format_description!(
|
||||
"[year]-[month]-[day]T[hour]:[minute]:[second].[subsecond digits:3]Z"
|
||||
);
|
||||
@@ -479,7 +620,7 @@ impl JsonlWriter {
|
||||
.format(timestamp_format)
|
||||
.map_err(|e| IoError::other(format!("failed to format timestamp: {e}")))?;
|
||||
|
||||
let line = RolloutLine {
|
||||
let line = RolloutLineRef {
|
||||
timestamp,
|
||||
item: rollout_item,
|
||||
};
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
#![allow(clippy::unwrap_used, clippy::expect_used)]
|
||||
|
||||
use std::ffi::OsStr;
|
||||
use std::fs::File;
|
||||
use std::fs::FileTimes;
|
||||
use std::fs::{self};
|
||||
@@ -21,6 +22,7 @@ use crate::rollout::list::ThreadItem;
|
||||
use crate::rollout::list::ThreadSortKey;
|
||||
use crate::rollout::list::ThreadsPage;
|
||||
use crate::rollout::list::get_threads;
|
||||
use crate::rollout::rollout_date_parts;
|
||||
use anyhow::Result;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::models::ContentItem;
|
||||
@@ -43,6 +45,16 @@ fn provider_vec(providers: &[&str]) -> Vec<String> {
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rollout_date_parts_extracts_directory_components() {
|
||||
let file_name = OsStr::new("rollout-2025-03-01T09-00-00-123.jsonl");
|
||||
let parts = rollout_date_parts(file_name);
|
||||
assert_eq!(
|
||||
parts,
|
||||
Some(("2025".to_string(), "03".to_string(), "01".to_string()))
|
||||
);
|
||||
}
|
||||
|
||||
fn write_session_file(
|
||||
root: &Path,
|
||||
ts_str: &str,
|
||||
|
||||
@@ -10,45 +10,7 @@ use crate::util::resolve_path;
|
||||
|
||||
use crate::protocol::AskForApproval;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
use std::sync::atomic::AtomicBool;
|
||||
#[cfg(target_os = "windows")]
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
static WINDOWS_SANDBOX_ENABLED: AtomicBool = AtomicBool::new(false);
|
||||
#[cfg(target_os = "windows")]
|
||||
static WINDOWS_ELEVATED_SANDBOX_ENABLED: AtomicBool = AtomicBool::new(false);
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
pub fn set_windows_sandbox_enabled(enabled: bool) {
|
||||
WINDOWS_SANDBOX_ENABLED.store(enabled, Ordering::Relaxed);
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
#[allow(dead_code)]
|
||||
pub fn set_windows_sandbox_enabled(_enabled: bool) {}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
pub fn set_windows_elevated_sandbox_enabled(enabled: bool) {
|
||||
WINDOWS_ELEVATED_SANDBOX_ENABLED.store(enabled, Ordering::Relaxed);
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
#[allow(dead_code)]
|
||||
pub fn set_windows_elevated_sandbox_enabled(_enabled: bool) {}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
pub fn is_windows_elevated_sandbox_enabled() -> bool {
|
||||
WINDOWS_ELEVATED_SANDBOX_ENABLED.load(Ordering::Relaxed)
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
#[allow(dead_code)]
|
||||
pub fn is_windows_elevated_sandbox_enabled() -> bool {
|
||||
false
|
||||
}
|
||||
use codex_protocol::config_types::WindowsSandboxLevel;
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum SafetyCheck {
|
||||
@@ -67,6 +29,7 @@ pub fn assess_patch_safety(
|
||||
policy: AskForApproval,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
cwd: &Path,
|
||||
windows_sandbox_level: WindowsSandboxLevel,
|
||||
) -> SafetyCheck {
|
||||
if action.is_empty() {
|
||||
return SafetyCheck::Reject {
|
||||
@@ -104,7 +67,7 @@ pub fn assess_patch_safety(
|
||||
// Only auto‑approve when we can actually enforce a sandbox. Otherwise
|
||||
// fall back to asking the user because the patch may touch arbitrary
|
||||
// paths outside the project.
|
||||
match get_platform_sandbox() {
|
||||
match get_platform_sandbox(windows_sandbox_level != WindowsSandboxLevel::Disabled) {
|
||||
Some(sandbox_type) => SafetyCheck::AutoApprove {
|
||||
sandbox_type,
|
||||
user_explicitly_approved: false,
|
||||
@@ -122,19 +85,17 @@ pub fn assess_patch_safety(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_platform_sandbox() -> Option<SandboxType> {
|
||||
pub fn get_platform_sandbox(windows_sandbox_enabled: bool) -> Option<SandboxType> {
|
||||
if cfg!(target_os = "macos") {
|
||||
Some(SandboxType::MacosSeatbelt)
|
||||
} else if cfg!(target_os = "linux") {
|
||||
Some(SandboxType::LinuxSeccomp)
|
||||
} else if cfg!(target_os = "windows") {
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
if WINDOWS_SANDBOX_ENABLED.load(Ordering::Relaxed) {
|
||||
return Some(SandboxType::WindowsRestrictedToken);
|
||||
}
|
||||
if windows_sandbox_enabled {
|
||||
Some(SandboxType::WindowsRestrictedToken)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
None
|
||||
} else {
|
||||
None
|
||||
}
|
||||
@@ -277,7 +238,13 @@ mod tests {
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
assess_patch_safety(&add_inside, AskForApproval::OnRequest, &policy, &cwd),
|
||||
assess_patch_safety(
|
||||
&add_inside,
|
||||
AskForApproval::OnRequest,
|
||||
&policy,
|
||||
&cwd,
|
||||
WindowsSandboxLevel::Disabled
|
||||
),
|
||||
SafetyCheck::AutoApprove {
|
||||
sandbox_type: SandboxType::None,
|
||||
user_explicitly_approved: false,
|
||||
|
||||
@@ -21,6 +21,7 @@ use crate::seatbelt::create_seatbelt_command_args;
|
||||
use crate::spawn::CODEX_SANDBOX_ENV_VAR;
|
||||
use crate::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
||||
use crate::tools::sandboxing::SandboxablePreference;
|
||||
use codex_protocol::config_types::WindowsSandboxLevel;
|
||||
pub use codex_protocol::models::SandboxPermissions;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
@@ -44,6 +45,7 @@ pub struct ExecEnv {
|
||||
pub env: HashMap<String, String>,
|
||||
pub expiration: ExecExpiration,
|
||||
pub sandbox: SandboxType,
|
||||
pub windows_sandbox_level: WindowsSandboxLevel,
|
||||
pub sandbox_permissions: SandboxPermissions,
|
||||
pub justification: Option<String>,
|
||||
pub arg0: Option<String>,
|
||||
@@ -76,19 +78,26 @@ impl SandboxManager {
|
||||
&self,
|
||||
policy: &SandboxPolicy,
|
||||
pref: SandboxablePreference,
|
||||
windows_sandbox_level: WindowsSandboxLevel,
|
||||
) -> SandboxType {
|
||||
match pref {
|
||||
SandboxablePreference::Forbid => SandboxType::None,
|
||||
SandboxablePreference::Require => {
|
||||
// Require a platform sandbox when available; on Windows this
|
||||
// respects the experimental_windows_sandbox feature.
|
||||
crate::safety::get_platform_sandbox().unwrap_or(SandboxType::None)
|
||||
crate::safety::get_platform_sandbox(
|
||||
windows_sandbox_level != WindowsSandboxLevel::Disabled,
|
||||
)
|
||||
.unwrap_or(SandboxType::None)
|
||||
}
|
||||
SandboxablePreference::Auto => match policy {
|
||||
SandboxPolicy::DangerFullAccess | SandboxPolicy::ExternalSandbox { .. } => {
|
||||
SandboxType::None
|
||||
}
|
||||
_ => crate::safety::get_platform_sandbox().unwrap_or(SandboxType::None),
|
||||
_ => crate::safety::get_platform_sandbox(
|
||||
windows_sandbox_level != WindowsSandboxLevel::Disabled,
|
||||
)
|
||||
.unwrap_or(SandboxType::None),
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -100,6 +109,7 @@ impl SandboxManager {
|
||||
sandbox: SandboxType,
|
||||
sandbox_policy_cwd: &Path,
|
||||
codex_linux_sandbox_exe: Option<&PathBuf>,
|
||||
windows_sandbox_level: WindowsSandboxLevel,
|
||||
) -> Result<ExecEnv, SandboxTransformError> {
|
||||
let mut env = spec.env;
|
||||
if !policy.has_full_network_access() {
|
||||
@@ -160,6 +170,7 @@ impl SandboxManager {
|
||||
env,
|
||||
expiration: spec.expiration,
|
||||
sandbox,
|
||||
windows_sandbox_level,
|
||||
sandbox_permissions: spec.sandbox_permissions,
|
||||
justification: spec.justification,
|
||||
arg0: arg0_override,
|
||||
|
||||
633
codex-rs/core/src/session_share.rs
Normal file
633
codex-rs/core/src/session_share.rs
Normal file
@@ -0,0 +1,633 @@
|
||||
use std::ffi::OsStr;
|
||||
use std::ffi::OsString;
|
||||
use std::io;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Context;
|
||||
use azure_core::auth::TokenCredential;
|
||||
use azure_identity::AzureCliCredential;
|
||||
use codex_protocol::ThreadId;
|
||||
use reqwest::StatusCode;
|
||||
use reqwest::Url;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use time::OffsetDateTime;
|
||||
use time::format_description::FormatItem;
|
||||
use time::macros::format_description;
|
||||
|
||||
const SHARE_OBJECT_PREFIX: &str = "sessions";
|
||||
const SHARE_OBJECT_SUFFIX: &str = ".jsonl";
|
||||
const SHARE_META_SUFFIX: &str = ".meta.json";
|
||||
const AZURE_STORAGE_SCOPE: &str = "https://storage.azure.com/.default";
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SessionShareResult {
|
||||
pub remote_id: ThreadId,
|
||||
pub object_url: Url,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum SessionObjectStore {
|
||||
Http(HttpObjectStore),
|
||||
Azure(AzureObjectStore),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct HttpObjectStore {
|
||||
base_url: Url,
|
||||
client: reqwest::Client,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct AzureObjectStore {
|
||||
endpoint: Url,
|
||||
container: String,
|
||||
prefix: String,
|
||||
sas_query: Option<String>,
|
||||
client: reqwest::Client,
|
||||
credential: Option<Arc<dyn TokenCredential>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
struct SessionShareMeta {
|
||||
owner: String,
|
||||
created_at: i64,
|
||||
updated_at: i64,
|
||||
}
|
||||
|
||||
impl SessionObjectStore {
|
||||
pub async fn new(base_url: &str) -> anyhow::Result<Self> {
|
||||
let mut url = Url::parse(base_url)
|
||||
.with_context(|| format!("invalid session_object_storage_url: {base_url}"))?;
|
||||
match url.scheme() {
|
||||
"az" => Ok(SessionObjectStore::Azure(AzureObjectStore::new_from_az(
|
||||
&url,
|
||||
)?)),
|
||||
"http" | "https" => {
|
||||
if is_azure_blob_url(&url) {
|
||||
Ok(SessionObjectStore::Azure(AzureObjectStore::new(&url)?))
|
||||
} else {
|
||||
ensure_trailing_slash(&mut url);
|
||||
Ok(SessionObjectStore::Http(HttpObjectStore {
|
||||
base_url: url,
|
||||
client: reqwest::Client::new(),
|
||||
}))
|
||||
}
|
||||
}
|
||||
other => Err(anyhow::anyhow!(
|
||||
"unsupported session_object_storage_url scheme {other}"
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
fn object_url(&self, key: &str) -> anyhow::Result<Url> {
|
||||
match self {
|
||||
SessionObjectStore::Http(store) => store.object_url(key),
|
||||
SessionObjectStore::Azure(store) => store.object_url(key),
|
||||
}
|
||||
}
|
||||
|
||||
async fn object_exists(&self, key: &str) -> anyhow::Result<bool> {
|
||||
match self {
|
||||
SessionObjectStore::Http(store) => store.object_exists(key).await,
|
||||
SessionObjectStore::Azure(store) => store.object_exists(key).await,
|
||||
}
|
||||
}
|
||||
|
||||
async fn put_object(&self, key: &str, data: Vec<u8>, content_type: &str) -> anyhow::Result<()> {
|
||||
match self {
|
||||
SessionObjectStore::Http(store) => store.put_object(key, data, content_type).await,
|
||||
SessionObjectStore::Azure(store) => store.put_object(key, data, content_type).await,
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_object_bytes(&self, key: &str) -> anyhow::Result<Option<Vec<u8>>> {
|
||||
match self {
|
||||
SessionObjectStore::Http(store) => store.get_object_bytes(key).await,
|
||||
SessionObjectStore::Azure(store) => store.get_object_bytes(key).await,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn upload_rollout_with_owner(
|
||||
base_url: &str,
|
||||
session_id: ThreadId,
|
||||
owner: &str,
|
||||
rollout_path: &Path,
|
||||
) -> anyhow::Result<SessionShareResult> {
|
||||
let data = tokio::fs::read(rollout_path)
|
||||
.await
|
||||
.with_context(|| format!("failed to read rollout at {}", rollout_path.display()))?;
|
||||
let store = SessionObjectStore::new(base_url).await?;
|
||||
let key = object_key(session_id);
|
||||
let meta_key = meta_key(session_id);
|
||||
let rollout_exists = store.object_exists(&key).await?;
|
||||
let now = OffsetDateTime::now_utc().unix_timestamp();
|
||||
let meta = fetch_meta(&store, &meta_key).await?;
|
||||
|
||||
match (rollout_exists, meta) {
|
||||
(true, Some(meta)) => {
|
||||
if meta.owner != owner {
|
||||
return Err(anyhow::anyhow!(
|
||||
"remote session already exists and belongs to another user"
|
||||
));
|
||||
}
|
||||
store
|
||||
.put_object(&key, data, "application/x-ndjson")
|
||||
.await
|
||||
.with_context(|| format!("failed to upload rollout for id {session_id}"))?;
|
||||
let updated = SessionShareMeta {
|
||||
owner: meta.owner,
|
||||
created_at: meta.created_at,
|
||||
updated_at: now,
|
||||
};
|
||||
upload_meta(&store, &meta_key, &updated).await?;
|
||||
}
|
||||
(true, None) => {
|
||||
// Recover from a previous metadata upload failure by restoring metadata
|
||||
// and overwriting the rollout blob.
|
||||
let meta = SessionShareMeta {
|
||||
owner: owner.to_string(),
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
};
|
||||
upload_meta(&store, &meta_key, &meta).await?;
|
||||
store
|
||||
.put_object(&key, data, "application/x-ndjson")
|
||||
.await
|
||||
.with_context(|| format!("failed to upload rollout for id {session_id}"))?;
|
||||
}
|
||||
(false, Some(meta)) => {
|
||||
if meta.owner != owner {
|
||||
return Err(anyhow::anyhow!(
|
||||
"remote session metadata already exists and belongs to another user"
|
||||
));
|
||||
}
|
||||
store
|
||||
.put_object(&key, data, "application/x-ndjson")
|
||||
.await
|
||||
.with_context(|| format!("failed to upload rollout for id {session_id}"))?;
|
||||
let updated = SessionShareMeta {
|
||||
owner: meta.owner,
|
||||
created_at: meta.created_at,
|
||||
updated_at: now,
|
||||
};
|
||||
upload_meta(&store, &meta_key, &updated).await?;
|
||||
}
|
||||
(false, None) => {
|
||||
let meta = SessionShareMeta {
|
||||
owner: owner.to_string(),
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
};
|
||||
upload_meta(&store, &meta_key, &meta).await?;
|
||||
store
|
||||
.put_object(&key, data, "application/x-ndjson")
|
||||
.await
|
||||
.with_context(|| format!("failed to upload rollout for id {session_id}"))?;
|
||||
}
|
||||
}
|
||||
|
||||
let object_url = store.object_url(&key)?;
|
||||
Ok(SessionShareResult {
|
||||
remote_id: session_id,
|
||||
object_url,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn download_rollout_if_available(
|
||||
base_url: &str,
|
||||
session_id: ThreadId,
|
||||
codex_home: &Path,
|
||||
) -> anyhow::Result<Option<PathBuf>> {
|
||||
let store = SessionObjectStore::new(base_url).await?;
|
||||
let key = object_key(session_id);
|
||||
let meta_key = meta_key(session_id);
|
||||
let Some(data) = store.get_object_bytes(&key).await? else {
|
||||
return Ok(None);
|
||||
};
|
||||
let path = build_rollout_download_path(codex_home, session_id)?;
|
||||
let parent = path
|
||||
.parent()
|
||||
.ok_or_else(|| anyhow::anyhow!("failed to resolve rollout directory"))?;
|
||||
tokio::fs::create_dir_all(parent)
|
||||
.await
|
||||
.with_context(|| format!("failed to create rollout directory {}", parent.display()))?;
|
||||
tokio::fs::write(&path, data)
|
||||
.await
|
||||
.with_context(|| format!("failed to write rollout file {}", path.display()))?;
|
||||
let meta_path = share_meta_path_for_rollout_path(&path);
|
||||
match fetch_meta(&store, &meta_key).await? {
|
||||
Some(meta) => {
|
||||
let payload =
|
||||
serde_json::to_vec(&meta).with_context(|| "failed to serialize metadata")?;
|
||||
tokio::fs::write(&meta_path, payload)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!("failed to write share metadata {}", meta_path.display())
|
||||
})?;
|
||||
}
|
||||
None => {
|
||||
let _ = tokio::fs::remove_file(&meta_path).await;
|
||||
}
|
||||
}
|
||||
Ok(Some(path))
|
||||
}
|
||||
|
||||
fn object_key(id: ThreadId) -> String {
|
||||
format!("{SHARE_OBJECT_PREFIX}/{id}{SHARE_OBJECT_SUFFIX}")
|
||||
}
|
||||
|
||||
fn meta_key(id: ThreadId) -> String {
|
||||
format!("{SHARE_OBJECT_PREFIX}/{id}{SHARE_META_SUFFIX}")
|
||||
}
|
||||
|
||||
pub fn local_share_owner(rollout_path: &Path) -> anyhow::Result<Option<String>> {
|
||||
let meta_path = share_meta_path_for_rollout_path(rollout_path);
|
||||
let bytes = match std::fs::read(&meta_path) {
|
||||
Ok(bytes) => bytes,
|
||||
Err(err) if err.kind() == io::ErrorKind::NotFound => return Ok(None),
|
||||
Err(err) => {
|
||||
return Err(err)
|
||||
.with_context(|| format!("failed to read share metadata {}", meta_path.display()));
|
||||
}
|
||||
};
|
||||
let meta: SessionShareMeta =
|
||||
serde_json::from_slice(&bytes).with_context(|| "failed to parse session share metadata")?;
|
||||
Ok(Some(meta.owner))
|
||||
}
|
||||
|
||||
async fn fetch_meta(
|
||||
store: &SessionObjectStore,
|
||||
key: &str,
|
||||
) -> anyhow::Result<Option<SessionShareMeta>> {
|
||||
let Some(bytes) = store.get_object_bytes(key).await? else {
|
||||
return Ok(None);
|
||||
};
|
||||
let meta: SessionShareMeta =
|
||||
serde_json::from_slice(&bytes).with_context(|| "failed to parse session share metadata")?;
|
||||
Ok(Some(meta))
|
||||
}
|
||||
|
||||
async fn upload_meta(
|
||||
store: &SessionObjectStore,
|
||||
key: &str,
|
||||
meta: &SessionShareMeta,
|
||||
) -> anyhow::Result<()> {
|
||||
let payload = serde_json::to_vec(meta).with_context(|| "failed to serialize metadata")?;
|
||||
store.put_object(key, payload, "application/json").await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn build_rollout_download_path(codex_home: &Path, session_id: ThreadId) -> anyhow::Result<PathBuf> {
|
||||
let timestamp = OffsetDateTime::now_local()
|
||||
.map_err(|e| anyhow::anyhow!("failed to get local time: {e}"))?;
|
||||
let format: &[FormatItem] =
|
||||
format_description!("[year]-[month]-[day]T[hour]-[minute]-[second]");
|
||||
let date_str = timestamp
|
||||
.format(format)
|
||||
.map_err(|e| anyhow::anyhow!("failed to format timestamp: {e}"))?;
|
||||
let mut dir = codex_home.to_path_buf();
|
||||
dir.push(crate::rollout::SESSIONS_SUBDIR);
|
||||
dir.push(timestamp.year().to_string());
|
||||
dir.push(format!("{:02}", u8::from(timestamp.month())));
|
||||
dir.push(format!("{:02}", timestamp.day()));
|
||||
let filename = format!("rollout-{date_str}-{session_id}.jsonl");
|
||||
Ok(dir.join(filename))
|
||||
}
|
||||
|
||||
fn share_meta_path_for_rollout_path(path: &Path) -> PathBuf {
|
||||
let file_name = path.file_name().unwrap_or_else(|| OsStr::new("session"));
|
||||
let mut name = OsString::from(file_name);
|
||||
name.push(".share-meta.json");
|
||||
path.with_file_name(name)
|
||||
}
|
||||
|
||||
impl HttpObjectStore {
|
||||
fn object_url(&self, key: &str) -> anyhow::Result<Url> {
|
||||
let mut base = self.base_url.clone();
|
||||
let query = base.query().map(str::to_string);
|
||||
base.set_query(None);
|
||||
let mut joined = base
|
||||
.join(key)
|
||||
.with_context(|| format!("failed to build object URL for key {key}"))?;
|
||||
if let Some(query) = query {
|
||||
joined.set_query(Some(&query));
|
||||
}
|
||||
Ok(joined)
|
||||
}
|
||||
|
||||
async fn object_exists(&self, key: &str) -> anyhow::Result<bool> {
|
||||
let url = self.object_url(key)?;
|
||||
let response = self.client.head(url).send().await?;
|
||||
match response.status() {
|
||||
StatusCode::NOT_FOUND => Ok(false),
|
||||
StatusCode::METHOD_NOT_ALLOWED | StatusCode::NOT_IMPLEMENTED => {
|
||||
self.object_exists_via_get(key).await
|
||||
}
|
||||
status if status.is_success() => Ok(true),
|
||||
status => Err(anyhow::anyhow!(
|
||||
"object store HEAD failed with status {status}"
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
async fn object_exists_via_get(&self, key: &str) -> anyhow::Result<bool> {
|
||||
let url = self.object_url(key)?;
|
||||
let response = self
|
||||
.client
|
||||
.get(url)
|
||||
.header(reqwest::header::RANGE, "bytes=0-0")
|
||||
.send()
|
||||
.await?;
|
||||
match response.status() {
|
||||
StatusCode::NOT_FOUND => Ok(false),
|
||||
StatusCode::PARTIAL_CONTENT | StatusCode::OK => Ok(true),
|
||||
status => Err(anyhow::anyhow!(
|
||||
"object store GET probe failed with status {status}"
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
async fn put_object(&self, key: &str, data: Vec<u8>, content_type: &str) -> anyhow::Result<()> {
|
||||
let url = self.object_url(key)?;
|
||||
let response = self
|
||||
.client
|
||||
.put(url)
|
||||
.header(reqwest::header::CONTENT_TYPE, content_type)
|
||||
.body(data)
|
||||
.send()
|
||||
.await?;
|
||||
if response.status().is_success() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(anyhow::anyhow!(
|
||||
"object store PUT failed with status {}",
|
||||
response.status()
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_object_bytes(&self, key: &str) -> anyhow::Result<Option<Vec<u8>>> {
|
||||
let url = self.object_url(key)?;
|
||||
let response = self.client.get(url).send().await?;
|
||||
match response.status() {
|
||||
StatusCode::NOT_FOUND => Ok(None),
|
||||
status if status.is_success() => {
|
||||
let bytes = response.bytes().await?;
|
||||
Ok(Some(bytes.to_vec()))
|
||||
}
|
||||
status => Err(anyhow::anyhow!(
|
||||
"object store GET failed with status {status}"
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AzureObjectStore {
|
||||
fn new(url: &Url) -> anyhow::Result<Self> {
|
||||
let endpoint = azure_endpoint(url)?;
|
||||
let (container, prefix) = azure_container_and_prefix(url)?;
|
||||
let sas_query = url.query().map(str::to_string);
|
||||
let credential = if sas_query.is_some() {
|
||||
None
|
||||
} else {
|
||||
let credential: Arc<dyn TokenCredential> = Arc::new(AzureCliCredential::new());
|
||||
Some(credential)
|
||||
};
|
||||
Ok(Self {
|
||||
endpoint,
|
||||
container,
|
||||
prefix,
|
||||
sas_query,
|
||||
client: reqwest::Client::new(),
|
||||
credential,
|
||||
})
|
||||
}
|
||||
|
||||
fn new_from_az(url: &Url) -> anyhow::Result<Self> {
|
||||
let account = url
|
||||
.host_str()
|
||||
.ok_or_else(|| anyhow::anyhow!("az url missing account name"))?;
|
||||
let endpoint = azure_endpoint_for_account(account)?;
|
||||
let (container, prefix) = azure_container_and_prefix(url)?;
|
||||
let sas_query = url.query().map(str::to_string);
|
||||
let credential = if sas_query.is_some() {
|
||||
None
|
||||
} else {
|
||||
let credential: Arc<dyn TokenCredential> = Arc::new(AzureCliCredential::new());
|
||||
Some(credential)
|
||||
};
|
||||
Ok(Self {
|
||||
endpoint,
|
||||
container,
|
||||
prefix,
|
||||
sas_query,
|
||||
client: reqwest::Client::new(),
|
||||
credential,
|
||||
})
|
||||
}
|
||||
|
||||
fn object_url(&self, key: &str) -> anyhow::Result<Url> {
|
||||
let full_key = join_prefix(&self.prefix, key);
|
||||
let mut url = self.endpoint.clone();
|
||||
if full_key.is_empty() {
|
||||
url.set_path(&format!("/{}", self.container));
|
||||
} else {
|
||||
url.set_path(&format!("/{}/{}", self.container, full_key));
|
||||
}
|
||||
if let Some(query) = &self.sas_query {
|
||||
url.set_query(Some(query));
|
||||
}
|
||||
Ok(url)
|
||||
}
|
||||
|
||||
async fn object_exists(&self, key: &str) -> anyhow::Result<bool> {
|
||||
let url = self.object_url(key)?;
|
||||
let response = self
|
||||
.authorized_request(self.client.head(url))
|
||||
.await?
|
||||
.send()
|
||||
.await?;
|
||||
match response.status() {
|
||||
StatusCode::NOT_FOUND => Ok(false),
|
||||
status if status.is_success() => Ok(true),
|
||||
status => Err(anyhow::anyhow!(
|
||||
"azure blob HEAD failed with status {status}{}",
|
||||
azure_response_context(response.headers())
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
async fn put_object(&self, key: &str, data: Vec<u8>, content_type: &str) -> anyhow::Result<()> {
|
||||
let url = self.object_url(key)?;
|
||||
let response = self
|
||||
.authorized_request(
|
||||
self.client
|
||||
.put(url)
|
||||
.header("x-ms-blob-type", "BlockBlob")
|
||||
.header(reqwest::header::CONTENT_TYPE, content_type)
|
||||
.body(data),
|
||||
)
|
||||
.await?
|
||||
.send()
|
||||
.await?;
|
||||
if response.status().is_success() {
|
||||
Ok(())
|
||||
} else {
|
||||
let status = response.status();
|
||||
let headers = azure_response_context(response.headers());
|
||||
let body = response.text().await.unwrap_or_default();
|
||||
let body_snippet = azure_response_body_snippet(&body);
|
||||
Err(anyhow::anyhow!(
|
||||
"azure blob PUT failed with status {status}{headers}{body_snippet}"
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_object_bytes(&self, key: &str) -> anyhow::Result<Option<Vec<u8>>> {
|
||||
let url = self.object_url(key)?;
|
||||
let response = self
|
||||
.authorized_request(self.client.get(url))
|
||||
.await?
|
||||
.send()
|
||||
.await?;
|
||||
match response.status() {
|
||||
StatusCode::NOT_FOUND => Ok(None),
|
||||
status if status.is_success() => {
|
||||
let bytes = response.bytes().await?;
|
||||
Ok(Some(bytes.to_vec()))
|
||||
}
|
||||
status => Err(anyhow::anyhow!(
|
||||
"azure blob GET failed with status {status}{}",
|
||||
azure_response_context(response.headers())
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn ensure_trailing_slash(url: &mut Url) {
|
||||
let path = url.path();
|
||||
if path.ends_with('/') {
|
||||
return;
|
||||
}
|
||||
let trimmed = path.trim_end_matches('/');
|
||||
let new_path = if trimmed.is_empty() {
|
||||
"/".to_string()
|
||||
} else {
|
||||
format!("{trimmed}/")
|
||||
};
|
||||
url.set_path(&new_path);
|
||||
}
|
||||
|
||||
fn join_prefix(prefix: &str, key: &str) -> String {
|
||||
if prefix.is_empty() {
|
||||
key.to_string()
|
||||
} else {
|
||||
format!("{prefix}/{key}")
|
||||
}
|
||||
}
|
||||
|
||||
fn is_azure_blob_url(url: &Url) -> bool {
|
||||
let Some(host) = url.host_str() else {
|
||||
return false;
|
||||
};
|
||||
host.ends_with(".blob.core.windows.net")
|
||||
}
|
||||
|
||||
fn azure_endpoint(url: &Url) -> anyhow::Result<Url> {
|
||||
let mut endpoint = url.clone();
|
||||
endpoint.set_path("/");
|
||||
endpoint.set_query(None);
|
||||
endpoint.set_fragment(None);
|
||||
Ok(endpoint)
|
||||
}
|
||||
|
||||
fn azure_container_and_prefix(url: &Url) -> anyhow::Result<(String, String)> {
|
||||
let segments = url
|
||||
.path_segments()
|
||||
.map(|iter| {
|
||||
iter.filter(|segment| !segment.is_empty())
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
azure_container_and_prefix_from_segments(&segments)
|
||||
}
|
||||
|
||||
fn azure_container_and_prefix_from_segments(segments: &[&str]) -> anyhow::Result<(String, String)> {
|
||||
if segments.is_empty() {
|
||||
return Err(anyhow::anyhow!(
|
||||
"azure blob url must include a container name"
|
||||
));
|
||||
}
|
||||
let container = segments[0].to_string();
|
||||
let prefix = segments[1..].join("/");
|
||||
Ok((container, prefix))
|
||||
}
|
||||
|
||||
fn azure_request(builder: reqwest::RequestBuilder) -> reqwest::RequestBuilder {
|
||||
builder.header("x-ms-version", "2021-08-06")
|
||||
}
|
||||
|
||||
fn azure_endpoint_for_account(account: &str) -> anyhow::Result<Url> {
|
||||
let endpoint = format!("https://{account}.blob.core.windows.net/");
|
||||
Url::parse(&endpoint).with_context(|| "failed to build azure blob endpoint")
|
||||
}
|
||||
|
||||
impl AzureObjectStore {
|
||||
async fn authorized_request(
|
||||
&self,
|
||||
builder: reqwest::RequestBuilder,
|
||||
) -> anyhow::Result<reqwest::RequestBuilder> {
|
||||
let builder = azure_request(builder);
|
||||
let Some(credential) = &self.credential else {
|
||||
return Ok(builder);
|
||||
};
|
||||
let token = credential
|
||||
.get_token(&[AZURE_STORAGE_SCOPE])
|
||||
.await
|
||||
.with_context(|| "failed to acquire azure blob access token")?;
|
||||
Ok(builder.bearer_auth(token.token.secret()))
|
||||
}
|
||||
}
|
||||
|
||||
fn azure_response_context(headers: &reqwest::header::HeaderMap) -> String {
|
||||
let mut parts = Vec::new();
|
||||
if let Some(value) = azure_header_value(headers, "x-ms-error-code") {
|
||||
parts.push(format!("x-ms-error-code={value}"));
|
||||
}
|
||||
if let Some(value) = azure_header_value(headers, "x-ms-request-id") {
|
||||
parts.push(format!("x-ms-request-id={value}"));
|
||||
}
|
||||
if let Some(value) = azure_header_value(headers, "www-authenticate") {
|
||||
parts.push(format!("www-authenticate={value}"));
|
||||
}
|
||||
if parts.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
format!(" ({})", parts.join(", "))
|
||||
}
|
||||
}
|
||||
|
||||
fn azure_header_value(headers: &reqwest::header::HeaderMap, name: &str) -> Option<String> {
|
||||
headers
|
||||
.get(name)
|
||||
.and_then(|value| value.to_str().ok())
|
||||
.map(ToString::to_string)
|
||||
}
|
||||
|
||||
fn azure_response_body_snippet(body: &str) -> String {
|
||||
let trimmed = body.trim();
|
||||
if trimmed.is_empty() {
|
||||
return String::new();
|
||||
}
|
||||
let snippet = if trimmed.len() <= 512 {
|
||||
trimmed.to_string()
|
||||
} else {
|
||||
let truncated: String = trimmed.chars().take(512).collect();
|
||||
format!("{truncated}...")
|
||||
};
|
||||
format!(" (body={snippet})")
|
||||
}
|
||||
@@ -19,6 +19,8 @@ use tokio::fs;
|
||||
use tokio::process::Command;
|
||||
use tokio::sync::watch;
|
||||
use tokio::time::timeout;
|
||||
use tracing::Instrument;
|
||||
use tracing::info_span;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct ShellSnapshot {
|
||||
@@ -42,17 +44,21 @@ impl ShellSnapshot {
|
||||
|
||||
let snapshot_shell = shell.clone();
|
||||
let snapshot_session_id = session_id;
|
||||
tokio::spawn(async move {
|
||||
let timer = otel_manager.start_timer("codex.shell_snapshot.duration_ms", &[]);
|
||||
let snapshot =
|
||||
ShellSnapshot::try_new(&codex_home, snapshot_session_id, &snapshot_shell)
|
||||
.await
|
||||
.map(Arc::new);
|
||||
let success = if snapshot.is_some() { "true" } else { "false" };
|
||||
let _ = timer.map(|timer| timer.record(&[("success", success)]));
|
||||
otel_manager.counter("codex.shell_snapshot", 1, &[("success", success)]);
|
||||
let _ = shell_snapshot_tx.send(snapshot);
|
||||
});
|
||||
let snapshot_span = info_span!("shell_snapshot", thread_id = %snapshot_session_id);
|
||||
tokio::spawn(
|
||||
async move {
|
||||
let timer = otel_manager.start_timer("codex.shell_snapshot.duration_ms", &[]);
|
||||
let snapshot =
|
||||
ShellSnapshot::try_new(&codex_home, snapshot_session_id, &snapshot_shell)
|
||||
.await
|
||||
.map(Arc::new);
|
||||
let success = if snapshot.is_some() { "true" } else { "false" };
|
||||
let _ = timer.map(|timer| timer.record(&[("success", success)]));
|
||||
otel_manager.counter("codex.shell_snapshot", 1, &[("success", success)]);
|
||||
let _ = shell_snapshot_tx.send(snapshot);
|
||||
}
|
||||
.instrument(snapshot_span),
|
||||
);
|
||||
}
|
||||
|
||||
async fn try_new(codex_home: &Path, session_id: ThreadId, shell: &Shell) -> Option<Self> {
|
||||
@@ -464,8 +470,6 @@ mod tests {
|
||||
use pretty_assertions::assert_eq;
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::ffi::OsStrExt;
|
||||
#[cfg(target_os = "linux")]
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
#[cfg(unix)]
|
||||
use std::process::Command;
|
||||
#[cfg(target_os = "linux")]
|
||||
@@ -562,27 +566,16 @@ mod tests {
|
||||
use tokio::time::sleep;
|
||||
|
||||
let dir = tempdir()?;
|
||||
let shell_path = dir.path().join("hanging-shell.sh");
|
||||
let pid_path = dir.path().join("pid");
|
||||
|
||||
let script = format!(
|
||||
"#!/bin/sh\n\
|
||||
echo $$ > {}\n\
|
||||
sleep 30\n",
|
||||
pid_path.display()
|
||||
);
|
||||
fs::write(&shell_path, script).await?;
|
||||
let mut permissions = std::fs::metadata(&shell_path)?.permissions();
|
||||
permissions.set_mode(0o755);
|
||||
std::fs::set_permissions(&shell_path, permissions)?;
|
||||
let script = format!("echo $$ > \"{}\"; sleep 30", pid_path.display());
|
||||
|
||||
let shell = Shell {
|
||||
shell_type: ShellType::Sh,
|
||||
shell_path,
|
||||
shell_path: PathBuf::from("/bin/sh"),
|
||||
shell_snapshot: crate::shell::empty_shell_snapshot_receiver(),
|
||||
};
|
||||
|
||||
let err = run_script_with_timeout(&shell, "ignored", Duration::from_millis(500), true)
|
||||
let err = run_script_with_timeout(&shell, &script, Duration::from_secs(1), true)
|
||||
.await
|
||||
.expect_err("snapshot shell should time out");
|
||||
assert!(
|
||||
|
||||
162
codex-rs/core/src/skills/env_var_dependencies.rs
Normal file
162
codex-rs/core/src/skills/env_var_dependencies.rs
Normal file
@@ -0,0 +1,162 @@
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::env;
|
||||
use std::sync::Arc;
|
||||
|
||||
use codex_protocol::request_user_input::RequestUserInputArgs;
|
||||
use codex_protocol::request_user_input::RequestUserInputQuestion;
|
||||
use codex_protocol::request_user_input::RequestUserInputResponse;
|
||||
use tracing::warn;
|
||||
|
||||
use crate::codex::Session;
|
||||
use crate::codex::TurnContext;
|
||||
use crate::skills::SkillMetadata;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(crate) struct SkillDependencyInfo {
|
||||
pub(crate) skill_name: String,
|
||||
pub(crate) name: String,
|
||||
pub(crate) description: Option<String>,
|
||||
}
|
||||
|
||||
/// Resolve required dependency values (session cache, then env vars),
|
||||
/// and prompt the UI for any missing ones.
|
||||
pub(crate) async fn resolve_skill_dependencies_for_turn(
|
||||
sess: &Arc<Session>,
|
||||
turn_context: &Arc<TurnContext>,
|
||||
dependencies: &[SkillDependencyInfo],
|
||||
) {
|
||||
if dependencies.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let existing_env = sess.dependency_env().await;
|
||||
let mut loaded_values = HashMap::new();
|
||||
let mut missing = Vec::new();
|
||||
let mut seen_names = HashSet::new();
|
||||
|
||||
for dependency in dependencies {
|
||||
let name = dependency.name.clone();
|
||||
if !seen_names.insert(name.clone()) {
|
||||
continue;
|
||||
}
|
||||
if existing_env.contains_key(&name) {
|
||||
continue;
|
||||
}
|
||||
match env::var(&name) {
|
||||
Ok(value) => {
|
||||
loaded_values.insert(name.clone(), value);
|
||||
continue;
|
||||
}
|
||||
Err(env::VarError::NotPresent) => {}
|
||||
Err(err) => {
|
||||
warn!("failed to read env var {name}: {err}");
|
||||
}
|
||||
}
|
||||
missing.push(dependency.clone());
|
||||
}
|
||||
|
||||
if !loaded_values.is_empty() {
|
||||
sess.set_dependency_env(loaded_values).await;
|
||||
}
|
||||
|
||||
if !missing.is_empty() {
|
||||
request_skill_dependencies(sess, turn_context, &missing).await;
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn collect_env_var_dependencies(
|
||||
mentioned_skills: &[SkillMetadata],
|
||||
) -> Vec<SkillDependencyInfo> {
|
||||
let mut dependencies = Vec::new();
|
||||
for skill in mentioned_skills {
|
||||
let Some(skill_dependencies) = &skill.dependencies else {
|
||||
continue;
|
||||
};
|
||||
for tool in &skill_dependencies.tools {
|
||||
if tool.r#type != "env_var" {
|
||||
continue;
|
||||
}
|
||||
if tool.value.is_empty() {
|
||||
continue;
|
||||
}
|
||||
dependencies.push(SkillDependencyInfo {
|
||||
skill_name: skill.name.clone(),
|
||||
name: tool.value.clone(),
|
||||
description: tool.description.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
dependencies
|
||||
}
|
||||
|
||||
/// Prompt via request_user_input to gather missing env vars.
|
||||
pub(crate) async fn request_skill_dependencies(
|
||||
sess: &Arc<Session>,
|
||||
turn_context: &Arc<TurnContext>,
|
||||
dependencies: &[SkillDependencyInfo],
|
||||
) {
|
||||
let questions = dependencies
|
||||
.iter()
|
||||
.map(|dep| {
|
||||
let requirement = dep.description.as_ref().map_or_else(
|
||||
|| format!("The skill \"{}\" requires \"{}\" to be set.", dep.skill_name, dep.name),
|
||||
|description| {
|
||||
format!(
|
||||
"The skill \"{}\" requires \"{}\" to be set ({}).",
|
||||
dep.skill_name, dep.name, description
|
||||
)
|
||||
},
|
||||
);
|
||||
let question = format!(
|
||||
"{requirement} This is an experimental internal feature. The value is stored in memory for this session only.",
|
||||
);
|
||||
RequestUserInputQuestion {
|
||||
id: dep.name.clone(),
|
||||
header: "Skill requires environment variable".to_string(),
|
||||
question,
|
||||
is_other: false,
|
||||
is_secret: true,
|
||||
options: None,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if questions.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let args = RequestUserInputArgs { questions };
|
||||
let call_id = format!("skill-deps-{}", turn_context.sub_id);
|
||||
let response = sess
|
||||
.request_user_input(turn_context, call_id, args)
|
||||
.await
|
||||
.unwrap_or_else(|| RequestUserInputResponse {
|
||||
answers: HashMap::new(),
|
||||
});
|
||||
|
||||
if response.answers.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut values = HashMap::new();
|
||||
for (name, answer) in response.answers {
|
||||
let mut user_note = None;
|
||||
for entry in &answer.answers {
|
||||
if let Some(note) = entry.strip_prefix("user_note: ")
|
||||
&& !note.trim().is_empty()
|
||||
{
|
||||
user_note = Some(note.trim().to_string());
|
||||
}
|
||||
}
|
||||
if let Some(value) = user_note {
|
||||
values.insert(name, value);
|
||||
}
|
||||
}
|
||||
|
||||
if values.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
sess.set_dependency_env(values).await;
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::instructions::SkillInstructions;
|
||||
use crate::skills::SkillLoadOutcome;
|
||||
use crate::skills::SkillMetadata;
|
||||
use codex_otel::OtelManager;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
@@ -16,20 +16,9 @@ pub(crate) struct SkillInjections {
|
||||
}
|
||||
|
||||
pub(crate) async fn build_skill_injections(
|
||||
inputs: &[UserInput],
|
||||
skills: Option<&SkillLoadOutcome>,
|
||||
mentioned_skills: &[SkillMetadata],
|
||||
otel: Option<&OtelManager>,
|
||||
) -> SkillInjections {
|
||||
if inputs.is_empty() {
|
||||
return SkillInjections::default();
|
||||
}
|
||||
|
||||
let Some(outcome) = skills else {
|
||||
return SkillInjections::default();
|
||||
};
|
||||
|
||||
let mentioned_skills =
|
||||
collect_explicit_skill_mentions(inputs, &outcome.skills, &outcome.disabled_paths);
|
||||
if mentioned_skills.is_empty() {
|
||||
return SkillInjections::default();
|
||||
}
|
||||
@@ -42,15 +31,15 @@ pub(crate) async fn build_skill_injections(
|
||||
for skill in mentioned_skills {
|
||||
match fs::read_to_string(&skill.path).await {
|
||||
Ok(contents) => {
|
||||
emit_skill_injected_metric(otel, &skill, "ok");
|
||||
emit_skill_injected_metric(otel, skill, "ok");
|
||||
result.items.push(ResponseItem::from(SkillInstructions {
|
||||
name: skill.name,
|
||||
name: skill.name.clone(),
|
||||
path: skill.path.to_string_lossy().into_owned(),
|
||||
contents,
|
||||
}));
|
||||
}
|
||||
Err(err) => {
|
||||
emit_skill_injected_metric(otel, &skill, "error");
|
||||
emit_skill_injected_metric(otel, skill, "error");
|
||||
let message = format!(
|
||||
"Failed to load skill {name} at {path}: {err:#}",
|
||||
name = skill.name,
|
||||
@@ -76,23 +65,673 @@ fn emit_skill_injected_metric(otel: Option<&OtelManager>, skill: &SkillMetadata,
|
||||
);
|
||||
}
|
||||
|
||||
fn collect_explicit_skill_mentions(
|
||||
/// Collect explicitly mentioned skills from `$name` text mentions.
|
||||
///
|
||||
/// Text inputs are scanned once to extract `$skill-name` tokens, then we iterate `skills`
|
||||
/// in their existing order to preserve prior ordering semantics. Explicit links are
|
||||
/// resolved by path and plain names are only used when the match is unambiguous.
|
||||
///
|
||||
/// Complexity: `O(S + T + N_t * S)` time, `O(S)` space, where:
|
||||
/// `S` = number of skills, `T` = total text length, `N_t` = number of text inputs.
|
||||
pub(crate) fn collect_explicit_skill_mentions(
|
||||
inputs: &[UserInput],
|
||||
skills: &[SkillMetadata],
|
||||
disabled_paths: &HashSet<PathBuf>,
|
||||
skill_name_counts: &HashMap<String, usize>,
|
||||
connector_slug_counts: &HashMap<String, usize>,
|
||||
) -> Vec<SkillMetadata> {
|
||||
let selection_context = SkillSelectionContext {
|
||||
skills,
|
||||
disabled_paths,
|
||||
skill_name_counts,
|
||||
connector_slug_counts,
|
||||
};
|
||||
let mut selected: Vec<SkillMetadata> = Vec::new();
|
||||
let mut seen: HashSet<String> = HashSet::new();
|
||||
let mut seen_names: HashSet<String> = HashSet::new();
|
||||
let mut seen_paths: HashSet<PathBuf> = HashSet::new();
|
||||
|
||||
for input in inputs {
|
||||
if let UserInput::Skill { name, path } = input
|
||||
&& seen.insert(name.clone())
|
||||
&& let Some(skill) = skills.iter().find(|s| s.name == *name && s.path == *path)
|
||||
&& !disabled_paths.contains(&skill.path)
|
||||
{
|
||||
selected.push(skill.clone());
|
||||
if let UserInput::Text { text, .. } = input {
|
||||
let mentioned_names = extract_tool_mentions(text);
|
||||
select_skills_from_mentions(
|
||||
&selection_context,
|
||||
&mentioned_names,
|
||||
&mut seen_names,
|
||||
&mut seen_paths,
|
||||
&mut selected,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
selected
|
||||
}
|
||||
|
||||
struct SkillSelectionContext<'a> {
|
||||
skills: &'a [SkillMetadata],
|
||||
disabled_paths: &'a HashSet<PathBuf>,
|
||||
skill_name_counts: &'a HashMap<String, usize>,
|
||||
connector_slug_counts: &'a HashMap<String, usize>,
|
||||
}
|
||||
|
||||
pub(crate) struct ToolMentions<'a> {
|
||||
names: HashSet<&'a str>,
|
||||
paths: HashSet<&'a str>,
|
||||
plain_names: HashSet<&'a str>,
|
||||
}
|
||||
|
||||
impl<'a> ToolMentions<'a> {
|
||||
fn is_empty(&self) -> bool {
|
||||
self.names.is_empty() && self.paths.is_empty()
|
||||
}
|
||||
|
||||
pub(crate) fn plain_names(&self) -> impl Iterator<Item = &'a str> + '_ {
|
||||
self.plain_names.iter().copied()
|
||||
}
|
||||
|
||||
pub(crate) fn paths(&self) -> impl Iterator<Item = &'a str> + '_ {
|
||||
self.paths.iter().copied()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub(crate) enum ToolMentionKind {
|
||||
App,
|
||||
Mcp,
|
||||
Skill,
|
||||
Other,
|
||||
}
|
||||
|
||||
const APP_PATH_PREFIX: &str = "app://";
|
||||
const MCP_PATH_PREFIX: &str = "mcp://";
|
||||
const SKILL_PATH_PREFIX: &str = "skill://";
|
||||
const SKILL_FILENAME: &str = "SKILL.md";
|
||||
|
||||
pub(crate) fn tool_kind_for_path(path: &str) -> ToolMentionKind {
|
||||
if path.starts_with(APP_PATH_PREFIX) {
|
||||
ToolMentionKind::App
|
||||
} else if path.starts_with(MCP_PATH_PREFIX) {
|
||||
ToolMentionKind::Mcp
|
||||
} else if path.starts_with(SKILL_PATH_PREFIX) || is_skill_filename(path) {
|
||||
ToolMentionKind::Skill
|
||||
} else {
|
||||
ToolMentionKind::Other
|
||||
}
|
||||
}
|
||||
|
||||
fn is_skill_filename(path: &str) -> bool {
|
||||
let file_name = path.rsplit(['/', '\\']).next().unwrap_or(path);
|
||||
file_name.eq_ignore_ascii_case(SKILL_FILENAME)
|
||||
}
|
||||
|
||||
pub(crate) fn app_id_from_path(path: &str) -> Option<&str> {
|
||||
path.strip_prefix(APP_PATH_PREFIX)
|
||||
.filter(|value| !value.is_empty())
|
||||
}
|
||||
|
||||
pub(crate) fn normalize_skill_path(path: &str) -> &str {
|
||||
path.strip_prefix(SKILL_PATH_PREFIX).unwrap_or(path)
|
||||
}
|
||||
|
||||
/// Extract `$tool-name` mentions from a single text input.
|
||||
///
|
||||
/// Supports explicit resource links in the form `[$tool-name](resource path)`. When a
|
||||
/// resource path is present, it is captured for exact path matching while also tracking
|
||||
/// the name for fallback matching.
|
||||
pub(crate) fn extract_tool_mentions(text: &str) -> ToolMentions<'_> {
|
||||
let text_bytes = text.as_bytes();
|
||||
let mut mentioned_names: HashSet<&str> = HashSet::new();
|
||||
let mut mentioned_paths: HashSet<&str> = HashSet::new();
|
||||
let mut plain_names: HashSet<&str> = HashSet::new();
|
||||
|
||||
let mut index = 0;
|
||||
while index < text_bytes.len() {
|
||||
let byte = text_bytes[index];
|
||||
if byte == b'['
|
||||
&& let Some((name, path, end_index)) =
|
||||
parse_linked_tool_mention(text, text_bytes, index)
|
||||
{
|
||||
if !is_common_env_var(name) {
|
||||
let kind = tool_kind_for_path(path);
|
||||
if !matches!(kind, ToolMentionKind::App | ToolMentionKind::Mcp) {
|
||||
mentioned_names.insert(name);
|
||||
}
|
||||
mentioned_paths.insert(path);
|
||||
}
|
||||
index = end_index;
|
||||
continue;
|
||||
}
|
||||
|
||||
if byte != b'$' {
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
let name_start = index + 1;
|
||||
let Some(first_name_byte) = text_bytes.get(name_start) else {
|
||||
index += 1;
|
||||
continue;
|
||||
};
|
||||
if !is_mention_name_char(*first_name_byte) {
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut name_end = name_start + 1;
|
||||
while let Some(next_byte) = text_bytes.get(name_end)
|
||||
&& is_mention_name_char(*next_byte)
|
||||
{
|
||||
name_end += 1;
|
||||
}
|
||||
|
||||
let name = &text[name_start..name_end];
|
||||
if !is_common_env_var(name) {
|
||||
mentioned_names.insert(name);
|
||||
plain_names.insert(name);
|
||||
}
|
||||
index = name_end;
|
||||
}
|
||||
|
||||
ToolMentions {
|
||||
names: mentioned_names,
|
||||
paths: mentioned_paths,
|
||||
plain_names,
|
||||
}
|
||||
}
|
||||
|
||||
/// Select mentioned skills while preserving the order of `skills`.
|
||||
fn select_skills_from_mentions(
|
||||
selection_context: &SkillSelectionContext<'_>,
|
||||
mentions: &ToolMentions<'_>,
|
||||
seen_names: &mut HashSet<String>,
|
||||
seen_paths: &mut HashSet<PathBuf>,
|
||||
selected: &mut Vec<SkillMetadata>,
|
||||
) {
|
||||
if mentions.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let mention_skill_paths: HashSet<&str> = mentions
|
||||
.paths()
|
||||
.filter(|path| {
|
||||
!matches!(
|
||||
tool_kind_for_path(path),
|
||||
ToolMentionKind::App | ToolMentionKind::Mcp
|
||||
)
|
||||
})
|
||||
.map(normalize_skill_path)
|
||||
.collect();
|
||||
|
||||
for skill in selection_context.skills {
|
||||
if selection_context.disabled_paths.contains(&skill.path)
|
||||
|| seen_paths.contains(&skill.path)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
let path_str = skill.path.to_string_lossy();
|
||||
if mention_skill_paths.contains(path_str.as_ref()) {
|
||||
seen_paths.insert(skill.path.clone());
|
||||
seen_names.insert(skill.name.clone());
|
||||
selected.push(skill.clone());
|
||||
}
|
||||
}
|
||||
|
||||
for skill in selection_context.skills {
|
||||
if selection_context.disabled_paths.contains(&skill.path)
|
||||
|| seen_paths.contains(&skill.path)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if !mentions.plain_names.contains(skill.name.as_str()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let skill_count = selection_context
|
||||
.skill_name_counts
|
||||
.get(skill.name.as_str())
|
||||
.copied()
|
||||
.unwrap_or(0);
|
||||
let connector_count = selection_context
|
||||
.connector_slug_counts
|
||||
.get(&skill.name.to_ascii_lowercase())
|
||||
.copied()
|
||||
.unwrap_or(0);
|
||||
if skill_count != 1 || connector_count != 0 {
|
||||
continue;
|
||||
}
|
||||
|
||||
if seen_names.insert(skill.name.clone()) {
|
||||
seen_paths.insert(skill.path.clone());
|
||||
selected.push(skill.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_linked_tool_mention<'a>(
|
||||
text: &'a str,
|
||||
text_bytes: &[u8],
|
||||
start: usize,
|
||||
) -> Option<(&'a str, &'a str, usize)> {
|
||||
let dollar_index = start + 1;
|
||||
if text_bytes.get(dollar_index) != Some(&b'$') {
|
||||
return None;
|
||||
}
|
||||
|
||||
let name_start = dollar_index + 1;
|
||||
let first_name_byte = text_bytes.get(name_start)?;
|
||||
if !is_mention_name_char(*first_name_byte) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut name_end = name_start + 1;
|
||||
while let Some(next_byte) = text_bytes.get(name_end)
|
||||
&& is_mention_name_char(*next_byte)
|
||||
{
|
||||
name_end += 1;
|
||||
}
|
||||
|
||||
if text_bytes.get(name_end) != Some(&b']') {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut path_start = name_end + 1;
|
||||
while let Some(next_byte) = text_bytes.get(path_start)
|
||||
&& next_byte.is_ascii_whitespace()
|
||||
{
|
||||
path_start += 1;
|
||||
}
|
||||
if text_bytes.get(path_start) != Some(&b'(') {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut path_end = path_start + 1;
|
||||
while let Some(next_byte) = text_bytes.get(path_end)
|
||||
&& *next_byte != b')'
|
||||
{
|
||||
path_end += 1;
|
||||
}
|
||||
if text_bytes.get(path_end) != Some(&b')') {
|
||||
return None;
|
||||
}
|
||||
|
||||
let path = text[path_start + 1..path_end].trim();
|
||||
if path.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let name = &text[name_start..name_end];
|
||||
Some((name, path, path_end + 1))
|
||||
}
|
||||
|
||||
fn is_common_env_var(name: &str) -> bool {
|
||||
let upper = name.to_ascii_uppercase();
|
||||
matches!(
|
||||
upper.as_str(),
|
||||
"PATH"
|
||||
| "HOME"
|
||||
| "USER"
|
||||
| "SHELL"
|
||||
| "PWD"
|
||||
| "TMPDIR"
|
||||
| "TEMP"
|
||||
| "TMP"
|
||||
| "LANG"
|
||||
| "TERM"
|
||||
| "XDG_CONFIG_HOME"
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn text_mentions_skill(text: &str, skill_name: &str) -> bool {
|
||||
if skill_name.is_empty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
let text_bytes = text.as_bytes();
|
||||
let skill_bytes = skill_name.as_bytes();
|
||||
|
||||
for (index, byte) in text_bytes.iter().copied().enumerate() {
|
||||
if byte != b'$' {
|
||||
continue;
|
||||
}
|
||||
|
||||
let name_start = index + 1;
|
||||
let Some(rest) = text_bytes.get(name_start..) else {
|
||||
continue;
|
||||
};
|
||||
if !rest.starts_with(skill_bytes) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let after_index = name_start + skill_bytes.len();
|
||||
let after = text_bytes.get(after_index).copied();
|
||||
if after.is_none_or(|b| !is_mention_name_char(b)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
fn is_mention_name_char(byte: u8) -> bool {
|
||||
matches!(byte, b'a'..=b'z' | b'A'..=b'Z' | b'0'..=b'9' | b'_' | b'-')
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
|
||||
fn make_skill(name: &str, path: &str) -> SkillMetadata {
|
||||
SkillMetadata {
|
||||
name: name.to_string(),
|
||||
description: format!("{name} skill"),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: PathBuf::from(path),
|
||||
scope: codex_protocol::protocol::SkillScope::User,
|
||||
}
|
||||
}
|
||||
|
||||
fn set<'a>(items: &'a [&'a str]) -> HashSet<&'a str> {
|
||||
items.iter().copied().collect()
|
||||
}
|
||||
|
||||
fn assert_mentions(text: &str, expected_names: &[&str], expected_paths: &[&str]) {
|
||||
let mentions = extract_tool_mentions(text);
|
||||
assert_eq!(mentions.names, set(expected_names));
|
||||
assert_eq!(mentions.paths, set(expected_paths));
|
||||
}
|
||||
|
||||
fn build_skill_name_counts(
|
||||
skills: &[SkillMetadata],
|
||||
disabled_paths: &HashSet<PathBuf>,
|
||||
) -> HashMap<String, usize> {
|
||||
let mut counts = HashMap::new();
|
||||
for skill in skills {
|
||||
if disabled_paths.contains(&skill.path) {
|
||||
continue;
|
||||
}
|
||||
*counts.entry(skill.name.clone()).or_insert(0) += 1;
|
||||
}
|
||||
counts
|
||||
}
|
||||
|
||||
fn collect_mentions(
|
||||
inputs: &[UserInput],
|
||||
skills: &[SkillMetadata],
|
||||
disabled_paths: &HashSet<PathBuf>,
|
||||
connector_slug_counts: &HashMap<String, usize>,
|
||||
) -> Vec<SkillMetadata> {
|
||||
let skill_name_counts = build_skill_name_counts(skills, disabled_paths);
|
||||
collect_explicit_skill_mentions(
|
||||
inputs,
|
||||
skills,
|
||||
disabled_paths,
|
||||
&skill_name_counts,
|
||||
connector_slug_counts,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn text_mentions_skill_requires_exact_boundary() {
|
||||
assert_eq!(
|
||||
true,
|
||||
text_mentions_skill("use $notion-research-doc please", "notion-research-doc")
|
||||
);
|
||||
assert_eq!(
|
||||
true,
|
||||
text_mentions_skill("($notion-research-doc)", "notion-research-doc")
|
||||
);
|
||||
assert_eq!(
|
||||
true,
|
||||
text_mentions_skill("$notion-research-doc.", "notion-research-doc")
|
||||
);
|
||||
assert_eq!(
|
||||
false,
|
||||
text_mentions_skill("$notion-research-docs", "notion-research-doc")
|
||||
);
|
||||
assert_eq!(
|
||||
false,
|
||||
text_mentions_skill("$notion-research-doc_extra", "notion-research-doc")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn text_mentions_skill_handles_end_boundary_and_near_misses() {
|
||||
assert_eq!(true, text_mentions_skill("$alpha-skill", "alpha-skill"));
|
||||
assert_eq!(false, text_mentions_skill("$alpha-skillx", "alpha-skill"));
|
||||
assert_eq!(
|
||||
true,
|
||||
text_mentions_skill("$alpha-skillx and later $alpha-skill ", "alpha-skill")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn text_mentions_skill_handles_many_dollars_without_looping() {
|
||||
let prefix = "$".repeat(256);
|
||||
let text = format!("{prefix} not-a-mention");
|
||||
assert_eq!(false, text_mentions_skill(&text, "alpha-skill"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_tool_mentions_handles_plain_and_linked_mentions() {
|
||||
assert_mentions(
|
||||
"use $alpha and [$beta](/tmp/beta)",
|
||||
&["alpha", "beta"],
|
||||
&["/tmp/beta"],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_tool_mentions_skips_common_env_vars() {
|
||||
assert_mentions("use $PATH and $alpha", &["alpha"], &[]);
|
||||
assert_mentions("use [$HOME](/tmp/skill)", &[], &[]);
|
||||
assert_mentions("use $XDG_CONFIG_HOME and $beta", &["beta"], &[]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_tool_mentions_requires_link_syntax() {
|
||||
assert_mentions("[beta](/tmp/beta)", &[], &[]);
|
||||
assert_mentions("[$beta] /tmp/beta", &["beta"], &[]);
|
||||
assert_mentions("[$beta]()", &["beta"], &[]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_tool_mentions_trims_linked_paths_and_allows_spacing() {
|
||||
assert_mentions("use [$beta] ( /tmp/beta )", &["beta"], &["/tmp/beta"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_tool_mentions_stops_at_non_name_chars() {
|
||||
assert_mentions(
|
||||
"use $alpha.skill and $beta_extra",
|
||||
&["alpha", "beta_extra"],
|
||||
&[],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_text_respects_skill_order() {
|
||||
let alpha = make_skill("alpha-skill", "/tmp/alpha");
|
||||
let beta = make_skill("beta-skill", "/tmp/beta");
|
||||
let skills = vec![beta.clone(), alpha.clone()];
|
||||
let inputs = vec![UserInput::Text {
|
||||
text: "first $alpha-skill then $beta-skill".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}];
|
||||
let connector_counts = HashMap::new();
|
||||
|
||||
let selected = collect_mentions(&inputs, &skills, &HashSet::new(), &connector_counts);
|
||||
|
||||
// Text scanning should not change the previous selection ordering semantics.
|
||||
assert_eq!(selected, vec![beta, alpha]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_ignores_structured_inputs() {
|
||||
let alpha = make_skill("alpha-skill", "/tmp/alpha");
|
||||
let beta = make_skill("beta-skill", "/tmp/beta");
|
||||
let skills = vec![alpha.clone(), beta];
|
||||
let inputs = vec![
|
||||
UserInput::Text {
|
||||
text: "please run $alpha-skill".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
},
|
||||
UserInput::Skill {
|
||||
name: "beta-skill".to_string(),
|
||||
path: PathBuf::from("/tmp/beta"),
|
||||
},
|
||||
];
|
||||
let connector_counts = HashMap::new();
|
||||
|
||||
let selected = collect_mentions(&inputs, &skills, &HashSet::new(), &connector_counts);
|
||||
|
||||
assert_eq!(selected, vec![alpha]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_dedupes_by_path() {
|
||||
let alpha = make_skill("alpha-skill", "/tmp/alpha");
|
||||
let skills = vec![alpha.clone()];
|
||||
let inputs = vec and [$alpha-skill](/tmp/alpha)".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}];
|
||||
let connector_counts = HashMap::new();
|
||||
|
||||
let selected = collect_mentions(&inputs, &skills, &HashSet::new(), &connector_counts);
|
||||
|
||||
assert_eq!(selected, vec![alpha]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_skips_ambiguous_name() {
|
||||
let alpha = make_skill("demo-skill", "/tmp/alpha");
|
||||
let beta = make_skill("demo-skill", "/tmp/beta");
|
||||
let skills = vec![alpha, beta];
|
||||
let inputs = vec![UserInput::Text {
|
||||
text: "use $demo-skill and again $demo-skill".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}];
|
||||
let connector_counts = HashMap::new();
|
||||
|
||||
let selected = collect_mentions(&inputs, &skills, &HashSet::new(), &connector_counts);
|
||||
|
||||
assert_eq!(selected, Vec::new());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_prefers_linked_path_over_name() {
|
||||
let alpha = make_skill("demo-skill", "/tmp/alpha");
|
||||
let beta = make_skill("demo-skill", "/tmp/beta");
|
||||
let skills = vec![alpha, beta.clone()];
|
||||
let inputs = vec".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}];
|
||||
let connector_counts = HashMap::new();
|
||||
|
||||
let selected = collect_mentions(&inputs, &skills, &HashSet::new(), &connector_counts);
|
||||
|
||||
assert_eq!(selected, vec![beta]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_skips_plain_name_when_connector_matches() {
|
||||
let alpha = make_skill("alpha-skill", "/tmp/alpha");
|
||||
let skills = vec![alpha];
|
||||
let inputs = vec![UserInput::Text {
|
||||
text: "use $alpha-skill".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}];
|
||||
let connector_counts = HashMap::from([("alpha-skill".to_string(), 1)]);
|
||||
|
||||
let selected = collect_mentions(&inputs, &skills, &HashSet::new(), &connector_counts);
|
||||
|
||||
assert_eq!(selected, Vec::new());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_allows_explicit_path_with_connector_conflict() {
|
||||
let alpha = make_skill("alpha-skill", "/tmp/alpha");
|
||||
let skills = vec![alpha.clone()];
|
||||
let inputs = vec".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}];
|
||||
let connector_counts = HashMap::from([("alpha-skill".to_string(), 1)]);
|
||||
|
||||
let selected = collect_mentions(&inputs, &skills, &HashSet::new(), &connector_counts);
|
||||
|
||||
assert_eq!(selected, vec![alpha]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_skips_when_linked_path_disabled() {
|
||||
let alpha = make_skill("demo-skill", "/tmp/alpha");
|
||||
let beta = make_skill("demo-skill", "/tmp/beta");
|
||||
let skills = vec![alpha, beta];
|
||||
let inputs = vec".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}];
|
||||
let disabled = HashSet::from([PathBuf::from("/tmp/alpha")]);
|
||||
let connector_counts = HashMap::new();
|
||||
|
||||
let selected = collect_mentions(&inputs, &skills, &disabled, &connector_counts);
|
||||
|
||||
assert_eq!(selected, Vec::new());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_prefers_resource_path() {
|
||||
let alpha = make_skill("demo-skill", "/tmp/alpha");
|
||||
let beta = make_skill("demo-skill", "/tmp/beta");
|
||||
let skills = vec![alpha, beta.clone()];
|
||||
let inputs = vec".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}];
|
||||
let connector_counts = HashMap::new();
|
||||
|
||||
let selected = collect_mentions(&inputs, &skills, &HashSet::new(), &connector_counts);
|
||||
|
||||
assert_eq!(selected, vec![beta]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_skips_missing_path_with_no_fallback() {
|
||||
let alpha = make_skill("demo-skill", "/tmp/alpha");
|
||||
let beta = make_skill("demo-skill", "/tmp/beta");
|
||||
let skills = vec![alpha, beta];
|
||||
let inputs = vec".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}];
|
||||
let connector_counts = HashMap::new();
|
||||
|
||||
let selected = collect_mentions(&inputs, &skills, &HashSet::new(), &connector_counts);
|
||||
|
||||
assert_eq!(selected, Vec::new());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_skips_missing_path_without_fallback() {
|
||||
let alpha = make_skill("demo-skill", "/tmp/alpha");
|
||||
let skills = vec![alpha];
|
||||
let inputs = vec".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}];
|
||||
let connector_counts = HashMap::new();
|
||||
|
||||
let selected = collect_mentions(&inputs, &skills, &HashSet::new(), &connector_counts);
|
||||
|
||||
assert_eq!(selected, Vec::new());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
use crate::config::Config;
|
||||
use crate::config_loader::ConfigLayerStack;
|
||||
use crate::config_loader::ConfigLayerStackOrdering;
|
||||
use crate::skills::model::SkillDependencies;
|
||||
use crate::skills::model::SkillError;
|
||||
use crate::skills::model::SkillInterface;
|
||||
use crate::skills::model::SkillLoadOutcome;
|
||||
use crate::skills::model::SkillMetadata;
|
||||
use crate::skills::model::SkillToolDependency;
|
||||
use crate::skills::system::system_cache_root_dir;
|
||||
use codex_app_server_protocol::ConfigLayerSource;
|
||||
use codex_protocol::protocol::SkillScope;
|
||||
@@ -35,9 +37,11 @@ struct SkillFrontmatterMetadata {
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Deserialize)]
|
||||
struct SkillToml {
|
||||
struct SkillMetadataFile {
|
||||
#[serde(default)]
|
||||
interface: Option<Interface>,
|
||||
#[serde(default)]
|
||||
dependencies: Option<Dependencies>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Deserialize)]
|
||||
@@ -50,13 +54,36 @@ struct Interface {
|
||||
default_prompt: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Deserialize)]
|
||||
struct Dependencies {
|
||||
#[serde(default)]
|
||||
tools: Vec<DependencyTool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Deserialize)]
|
||||
struct DependencyTool {
|
||||
#[serde(rename = "type")]
|
||||
kind: Option<String>,
|
||||
value: Option<String>,
|
||||
description: Option<String>,
|
||||
transport: Option<String>,
|
||||
command: Option<String>,
|
||||
url: Option<String>,
|
||||
}
|
||||
|
||||
const SKILLS_FILENAME: &str = "SKILL.md";
|
||||
const SKILLS_TOML_FILENAME: &str = "SKILL.toml";
|
||||
const SKILLS_JSON_FILENAME: &str = "SKILL.json";
|
||||
const SKILLS_DIR_NAME: &str = "skills";
|
||||
const MAX_NAME_LEN: usize = 64;
|
||||
const MAX_DESCRIPTION_LEN: usize = 1024;
|
||||
const MAX_SHORT_DESCRIPTION_LEN: usize = MAX_DESCRIPTION_LEN;
|
||||
const MAX_DEFAULT_PROMPT_LEN: usize = MAX_DESCRIPTION_LEN;
|
||||
const MAX_DEPENDENCY_TYPE_LEN: usize = MAX_NAME_LEN;
|
||||
const MAX_DEPENDENCY_TRANSPORT_LEN: usize = MAX_NAME_LEN;
|
||||
const MAX_DEPENDENCY_VALUE_LEN: usize = MAX_DESCRIPTION_LEN;
|
||||
const MAX_DEPENDENCY_DESCRIPTION_LEN: usize = MAX_DESCRIPTION_LEN;
|
||||
const MAX_DEPENDENCY_COMMAND_LEN: usize = MAX_DESCRIPTION_LEN;
|
||||
const MAX_DEPENDENCY_URL_LEN: usize = MAX_DESCRIPTION_LEN;
|
||||
// Traversal depth from the skills root.
|
||||
const MAX_SCAN_DEPTH: usize = 6;
|
||||
const MAX_SKILLS_DIRS_PER_ROOT: usize = 2000;
|
||||
@@ -345,7 +372,7 @@ fn parse_skill_file(path: &Path, scope: SkillScope) -> Result<SkillMetadata, Ski
|
||||
.as_deref()
|
||||
.map(sanitize_single_line)
|
||||
.filter(|value| !value.is_empty());
|
||||
let interface = load_skill_interface(path);
|
||||
let (interface, dependencies) = load_skill_metadata(path);
|
||||
|
||||
validate_len(&name, MAX_NAME_LEN, "name")?;
|
||||
validate_len(&description, MAX_DESCRIPTION_LEN, "description")?;
|
||||
@@ -364,41 +391,54 @@ fn parse_skill_file(path: &Path, scope: SkillScope) -> Result<SkillMetadata, Ski
|
||||
description,
|
||||
short_description,
|
||||
interface,
|
||||
dependencies,
|
||||
path: resolved_path,
|
||||
scope,
|
||||
})
|
||||
}
|
||||
|
||||
fn load_skill_interface(skill_path: &Path) -> Option<SkillInterface> {
|
||||
// Fail open: optional SKILL.toml metadata should not block loading SKILL.md.
|
||||
let skill_dir = skill_path.parent()?;
|
||||
let interface_path = skill_dir.join(SKILLS_TOML_FILENAME);
|
||||
if !interface_path.exists() {
|
||||
return None;
|
||||
fn load_skill_metadata(skill_path: &Path) -> (Option<SkillInterface>, Option<SkillDependencies>) {
|
||||
// Fail open: optional metadata should not block loading SKILL.md.
|
||||
let Some(skill_dir) = skill_path.parent() else {
|
||||
return (None, None);
|
||||
};
|
||||
let metadata_path = skill_dir.join(SKILLS_JSON_FILENAME);
|
||||
if !metadata_path.exists() {
|
||||
return (None, None);
|
||||
}
|
||||
|
||||
let contents = match fs::read_to_string(&interface_path) {
|
||||
let contents = match fs::read_to_string(&metadata_path) {
|
||||
Ok(contents) => contents,
|
||||
Err(error) => {
|
||||
tracing::warn!(
|
||||
"ignoring {path}: failed to read SKILL.toml: {error}",
|
||||
path = interface_path.display()
|
||||
"ignoring {path}: failed to read {label}: {error}",
|
||||
path = metadata_path.display(),
|
||||
label = SKILLS_JSON_FILENAME
|
||||
);
|
||||
return None;
|
||||
return (None, None);
|
||||
}
|
||||
};
|
||||
let parsed: SkillToml = match toml::from_str(&contents) {
|
||||
|
||||
let parsed: SkillMetadataFile = match serde_json::from_str(&contents) {
|
||||
Ok(parsed) => parsed,
|
||||
Err(error) => {
|
||||
tracing::warn!(
|
||||
"ignoring {path}: invalid TOML: {error}",
|
||||
path = interface_path.display()
|
||||
"ignoring {path}: invalid {label}: {error}",
|
||||
path = metadata_path.display(),
|
||||
label = SKILLS_JSON_FILENAME
|
||||
);
|
||||
return None;
|
||||
return (None, None);
|
||||
}
|
||||
};
|
||||
let interface = parsed.interface?;
|
||||
|
||||
(
|
||||
resolve_interface(parsed.interface, skill_dir),
|
||||
resolve_dependencies(parsed.dependencies),
|
||||
)
|
||||
}
|
||||
|
||||
fn resolve_interface(interface: Option<Interface>, skill_dir: &Path) -> Option<SkillInterface> {
|
||||
let interface = interface?;
|
||||
let interface = SkillInterface {
|
||||
display_name: resolve_str(
|
||||
interface.display_name,
|
||||
@@ -428,6 +468,58 @@ fn load_skill_interface(skill_path: &Path) -> Option<SkillInterface> {
|
||||
if has_fields { Some(interface) } else { None }
|
||||
}
|
||||
|
||||
fn resolve_dependencies(dependencies: Option<Dependencies>) -> Option<SkillDependencies> {
|
||||
let dependencies = dependencies?;
|
||||
let tools: Vec<SkillToolDependency> = dependencies
|
||||
.tools
|
||||
.into_iter()
|
||||
.filter_map(resolve_dependency_tool)
|
||||
.collect();
|
||||
if tools.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(SkillDependencies { tools })
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_dependency_tool(tool: DependencyTool) -> Option<SkillToolDependency> {
|
||||
let r#type = resolve_required_str(
|
||||
tool.kind,
|
||||
MAX_DEPENDENCY_TYPE_LEN,
|
||||
"dependencies.tools.type",
|
||||
)?;
|
||||
let value = resolve_required_str(
|
||||
tool.value,
|
||||
MAX_DEPENDENCY_VALUE_LEN,
|
||||
"dependencies.tools.value",
|
||||
)?;
|
||||
let description = resolve_str(
|
||||
tool.description,
|
||||
MAX_DEPENDENCY_DESCRIPTION_LEN,
|
||||
"dependencies.tools.description",
|
||||
);
|
||||
let transport = resolve_str(
|
||||
tool.transport,
|
||||
MAX_DEPENDENCY_TRANSPORT_LEN,
|
||||
"dependencies.tools.transport",
|
||||
);
|
||||
let command = resolve_str(
|
||||
tool.command,
|
||||
MAX_DEPENDENCY_COMMAND_LEN,
|
||||
"dependencies.tools.command",
|
||||
);
|
||||
let url = resolve_str(tool.url, MAX_DEPENDENCY_URL_LEN, "dependencies.tools.url");
|
||||
|
||||
Some(SkillToolDependency {
|
||||
r#type,
|
||||
value,
|
||||
description,
|
||||
transport,
|
||||
command,
|
||||
url,
|
||||
})
|
||||
}
|
||||
|
||||
fn resolve_asset_path(
|
||||
skill_dir: &Path,
|
||||
field: &'static str,
|
||||
@@ -511,6 +603,18 @@ fn resolve_str(value: Option<String>, max_len: usize, field: &'static str) -> Op
|
||||
Some(value)
|
||||
}
|
||||
|
||||
fn resolve_required_str(
|
||||
value: Option<String>,
|
||||
max_len: usize,
|
||||
field: &'static str,
|
||||
) -> Option<String> {
|
||||
let Some(value) = value else {
|
||||
tracing::warn!("ignoring {field}: value is missing");
|
||||
return None;
|
||||
};
|
||||
resolve_str(Some(value), max_len, field)
|
||||
}
|
||||
|
||||
fn resolve_color_str(value: Option<String>, field: &'static str) -> Option<String> {
|
||||
let value = value?;
|
||||
let value = value.trim();
|
||||
@@ -755,29 +859,136 @@ mod tests {
|
||||
path
|
||||
}
|
||||
|
||||
fn write_skill_interface_at(skill_dir: &Path, contents: &str) -> PathBuf {
|
||||
let path = skill_dir.join(SKILLS_TOML_FILENAME);
|
||||
fn write_skill_metadata_at(skill_dir: &Path, filename: &str, contents: &str) -> PathBuf {
|
||||
let path = skill_dir.join(filename);
|
||||
fs::write(&path, contents).unwrap();
|
||||
path
|
||||
}
|
||||
|
||||
fn write_skill_interface_at(skill_dir: &Path, contents: &str) -> PathBuf {
|
||||
write_skill_metadata_at(skill_dir, SKILLS_JSON_FILENAME, contents)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn loads_skill_interface_metadata_happy_path() {
|
||||
async fn loads_skill_dependencies_metadata_from_json() {
|
||||
let codex_home = tempfile::tempdir().expect("tempdir");
|
||||
let skill_path = write_skill(&codex_home, "demo", "ui-skill", "from toml");
|
||||
let skill_path = write_skill(&codex_home, "demo", "dep-skill", "from json");
|
||||
let skill_dir = skill_path.parent().expect("skill dir");
|
||||
|
||||
write_skill_metadata_at(
|
||||
skill_dir,
|
||||
SKILLS_JSON_FILENAME,
|
||||
r#"
|
||||
{
|
||||
"dependencies": {
|
||||
"tools": [
|
||||
{
|
||||
"type": "env_var",
|
||||
"value": "GITHUB_TOKEN",
|
||||
"description": "GitHub API token with repo scopes"
|
||||
},
|
||||
{
|
||||
"type": "mcp",
|
||||
"value": "github",
|
||||
"description": "GitHub MCP server",
|
||||
"transport": "streamable_http",
|
||||
"url": "https://example.com/mcp"
|
||||
},
|
||||
{
|
||||
"type": "cli",
|
||||
"value": "gh",
|
||||
"description": "GitHub CLI"
|
||||
},
|
||||
{
|
||||
"type": "mcp",
|
||||
"value": "local-gh",
|
||||
"description": "Local GH MCP server",
|
||||
"transport": "stdio",
|
||||
"command": "gh-mcp"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
||||
let cfg = make_config(&codex_home).await;
|
||||
let outcome = load_skills(&cfg);
|
||||
|
||||
assert!(
|
||||
outcome.errors.is_empty(),
|
||||
"unexpected errors: {:?}",
|
||||
outcome.errors
|
||||
);
|
||||
assert_eq!(
|
||||
outcome.skills,
|
||||
vec![SkillMetadata {
|
||||
name: "dep-skill".to_string(),
|
||||
description: "from json".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: Some(SkillDependencies {
|
||||
tools: vec![
|
||||
SkillToolDependency {
|
||||
r#type: "env_var".to_string(),
|
||||
value: "GITHUB_TOKEN".to_string(),
|
||||
description: Some("GitHub API token with repo scopes".to_string()),
|
||||
transport: None,
|
||||
command: None,
|
||||
url: None,
|
||||
},
|
||||
SkillToolDependency {
|
||||
r#type: "mcp".to_string(),
|
||||
value: "github".to_string(),
|
||||
description: Some("GitHub MCP server".to_string()),
|
||||
transport: Some("streamable_http".to_string()),
|
||||
command: None,
|
||||
url: Some("https://example.com/mcp".to_string()),
|
||||
},
|
||||
SkillToolDependency {
|
||||
r#type: "cli".to_string(),
|
||||
value: "gh".to_string(),
|
||||
description: Some("GitHub CLI".to_string()),
|
||||
transport: None,
|
||||
command: None,
|
||||
url: None,
|
||||
},
|
||||
SkillToolDependency {
|
||||
r#type: "mcp".to_string(),
|
||||
value: "local-gh".to_string(),
|
||||
description: Some("Local GH MCP server".to_string()),
|
||||
transport: Some("stdio".to_string()),
|
||||
command: Some("gh-mcp".to_string()),
|
||||
url: None,
|
||||
},
|
||||
],
|
||||
}),
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn loads_skill_interface_metadata_from_json() {
|
||||
let codex_home = tempfile::tempdir().expect("tempdir");
|
||||
let skill_path = write_skill(&codex_home, "demo", "ui-skill", "from json");
|
||||
let skill_dir = skill_path.parent().expect("skill dir");
|
||||
let normalized_skill_dir = normalized(skill_dir);
|
||||
|
||||
write_skill_interface_at(
|
||||
skill_dir,
|
||||
r##"
|
||||
[interface]
|
||||
display_name = "UI Skill"
|
||||
short_description = " short desc "
|
||||
icon_small = "./assets/small-400px.png"
|
||||
icon_large = "./assets/large-logo.svg"
|
||||
brand_color = "#3B82F6"
|
||||
default_prompt = " default prompt "
|
||||
{
|
||||
"interface": {
|
||||
"display_name": "UI Skill",
|
||||
"short_description": " short desc ",
|
||||
"icon_small": "./assets/small-400px.png",
|
||||
"icon_large": "./assets/large-logo.svg",
|
||||
"brand_color": "#3B82F6",
|
||||
"default_prompt": " default prompt "
|
||||
}
|
||||
}
|
||||
"##,
|
||||
);
|
||||
|
||||
@@ -793,7 +1004,7 @@ default_prompt = " default prompt "
|
||||
outcome.skills,
|
||||
vec![SkillMetadata {
|
||||
name: "ui-skill".to_string(),
|
||||
description: "from toml".to_string(),
|
||||
description: "from json".to_string(),
|
||||
short_description: None,
|
||||
interface: Some(SkillInterface {
|
||||
display_name: Some("UI Skill".to_string()),
|
||||
@@ -803,7 +1014,8 @@ default_prompt = " default prompt "
|
||||
brand_color: Some("#3B82F6".to_string()),
|
||||
default_prompt: Some("default prompt".to_string()),
|
||||
}),
|
||||
path: normalized(&skill_path),
|
||||
dependencies: None,
|
||||
path: normalized(skill_path.as_path()),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
);
|
||||
@@ -812,17 +1024,20 @@ default_prompt = " default prompt "
|
||||
#[tokio::test]
|
||||
async fn accepts_icon_paths_under_assets_dir() {
|
||||
let codex_home = tempfile::tempdir().expect("tempdir");
|
||||
let skill_path = write_skill(&codex_home, "demo", "ui-skill", "from toml");
|
||||
let skill_path = write_skill(&codex_home, "demo", "ui-skill", "from json");
|
||||
let skill_dir = skill_path.parent().expect("skill dir");
|
||||
let normalized_skill_dir = normalized(skill_dir);
|
||||
|
||||
write_skill_interface_at(
|
||||
skill_dir,
|
||||
r#"
|
||||
[interface]
|
||||
display_name = "UI Skill"
|
||||
icon_small = "assets/icon.png"
|
||||
icon_large = "./assets/logo.svg"
|
||||
{
|
||||
"interface": {
|
||||
"display_name": "UI Skill",
|
||||
"icon_small": "assets/icon.png",
|
||||
"icon_large": "./assets/logo.svg"
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
||||
@@ -838,7 +1053,7 @@ icon_large = "./assets/logo.svg"
|
||||
outcome.skills,
|
||||
vec![SkillMetadata {
|
||||
name: "ui-skill".to_string(),
|
||||
description: "from toml".to_string(),
|
||||
description: "from json".to_string(),
|
||||
short_description: None,
|
||||
interface: Some(SkillInterface {
|
||||
display_name: Some("UI Skill".to_string()),
|
||||
@@ -848,6 +1063,7 @@ icon_large = "./assets/logo.svg"
|
||||
brand_color: None,
|
||||
default_prompt: None,
|
||||
}),
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
@@ -857,14 +1073,17 @@ icon_large = "./assets/logo.svg"
|
||||
#[tokio::test]
|
||||
async fn ignores_invalid_brand_color() {
|
||||
let codex_home = tempfile::tempdir().expect("tempdir");
|
||||
let skill_path = write_skill(&codex_home, "demo", "ui-skill", "from toml");
|
||||
let skill_path = write_skill(&codex_home, "demo", "ui-skill", "from json");
|
||||
let skill_dir = skill_path.parent().expect("skill dir");
|
||||
|
||||
write_skill_interface_at(
|
||||
skill_dir,
|
||||
r#"
|
||||
[interface]
|
||||
brand_color = "blue"
|
||||
{
|
||||
"interface": {
|
||||
"brand_color": "blue"
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
||||
@@ -880,9 +1099,10 @@ brand_color = "blue"
|
||||
outcome.skills,
|
||||
vec![SkillMetadata {
|
||||
name: "ui-skill".to_string(),
|
||||
description: "from toml".to_string(),
|
||||
description: "from json".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
@@ -892,7 +1112,7 @@ brand_color = "blue"
|
||||
#[tokio::test]
|
||||
async fn ignores_default_prompt_over_max_length() {
|
||||
let codex_home = tempfile::tempdir().expect("tempdir");
|
||||
let skill_path = write_skill(&codex_home, "demo", "ui-skill", "from toml");
|
||||
let skill_path = write_skill(&codex_home, "demo", "ui-skill", "from json");
|
||||
let skill_dir = skill_path.parent().expect("skill dir");
|
||||
let normalized_skill_dir = normalized(skill_dir);
|
||||
let too_long = "x".repeat(MAX_DEFAULT_PROMPT_LEN + 1);
|
||||
@@ -901,10 +1121,13 @@ brand_color = "blue"
|
||||
skill_dir,
|
||||
&format!(
|
||||
r##"
|
||||
[interface]
|
||||
display_name = "UI Skill"
|
||||
icon_small = "./assets/small-400px.png"
|
||||
default_prompt = "{too_long}"
|
||||
{{
|
||||
"interface": {{
|
||||
"display_name": "UI Skill",
|
||||
"icon_small": "./assets/small-400px.png",
|
||||
"default_prompt": "{too_long}"
|
||||
}}
|
||||
}}
|
||||
"##
|
||||
),
|
||||
);
|
||||
@@ -921,7 +1144,7 @@ default_prompt = "{too_long}"
|
||||
outcome.skills,
|
||||
vec![SkillMetadata {
|
||||
name: "ui-skill".to_string(),
|
||||
description: "from toml".to_string(),
|
||||
description: "from json".to_string(),
|
||||
short_description: None,
|
||||
interface: Some(SkillInterface {
|
||||
display_name: Some("UI Skill".to_string()),
|
||||
@@ -931,6 +1154,7 @@ default_prompt = "{too_long}"
|
||||
brand_color: None,
|
||||
default_prompt: None,
|
||||
}),
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
@@ -940,15 +1164,18 @@ default_prompt = "{too_long}"
|
||||
#[tokio::test]
|
||||
async fn drops_interface_when_icons_are_invalid() {
|
||||
let codex_home = tempfile::tempdir().expect("tempdir");
|
||||
let skill_path = write_skill(&codex_home, "demo", "ui-skill", "from toml");
|
||||
let skill_path = write_skill(&codex_home, "demo", "ui-skill", "from json");
|
||||
let skill_dir = skill_path.parent().expect("skill dir");
|
||||
|
||||
write_skill_interface_at(
|
||||
skill_dir,
|
||||
r#"
|
||||
[interface]
|
||||
icon_small = "icon.png"
|
||||
icon_large = "./assets/../logo.svg"
|
||||
{
|
||||
"interface": {
|
||||
"icon_small": "icon.png",
|
||||
"icon_large": "./assets/../logo.svg"
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
||||
@@ -964,9 +1191,10 @@ icon_large = "./assets/../logo.svg"
|
||||
outcome.skills,
|
||||
vec![SkillMetadata {
|
||||
name: "ui-skill".to_string(),
|
||||
description: "from toml".to_string(),
|
||||
description: "from json".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
@@ -1009,6 +1237,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "from link".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&shared_skill_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
@@ -1067,6 +1296,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "still loads".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
@@ -1101,6 +1331,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "from link".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&shared_skill_path),
|
||||
scope: SkillScope::Admin,
|
||||
}]
|
||||
@@ -1139,6 +1370,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "from link".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&linked_skill_path),
|
||||
scope: SkillScope::Repo,
|
||||
}]
|
||||
@@ -1200,6 +1432,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "loads".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&within_depth_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
@@ -1225,6 +1458,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "does things carefully".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
@@ -1254,6 +1488,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "long description".to_string(),
|
||||
short_description: Some("short summary".to_string()),
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
@@ -1364,6 +1599,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "from repo".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::Repo,
|
||||
}]
|
||||
@@ -1415,6 +1651,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "from nested".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&nested_skill_path),
|
||||
scope: SkillScope::Repo,
|
||||
},
|
||||
@@ -1423,6 +1660,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "from root".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&root_skill_path),
|
||||
scope: SkillScope::Repo,
|
||||
},
|
||||
@@ -1460,6 +1698,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "from cwd".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::Repo,
|
||||
}]
|
||||
@@ -1495,6 +1734,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "from repo".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::Repo,
|
||||
}]
|
||||
@@ -1534,6 +1774,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "from repo".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&repo_skill_path),
|
||||
scope: SkillScope::Repo,
|
||||
},
|
||||
@@ -1542,6 +1783,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "from user".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&user_skill_path),
|
||||
scope: SkillScope::User,
|
||||
},
|
||||
@@ -1604,6 +1846,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: first_description.to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: first_path,
|
||||
scope: SkillScope::Repo,
|
||||
},
|
||||
@@ -1612,6 +1855,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: second_description.to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: second_path,
|
||||
scope: SkillScope::Repo,
|
||||
},
|
||||
@@ -1681,6 +1925,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "from repo".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::Repo,
|
||||
}]
|
||||
@@ -1737,6 +1982,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "from system".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::System,
|
||||
}]
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
mod env_var_dependencies;
|
||||
pub mod injection;
|
||||
pub mod loader;
|
||||
pub mod manager;
|
||||
@@ -5,8 +6,11 @@ pub mod model;
|
||||
pub mod render;
|
||||
pub mod system;
|
||||
|
||||
pub(crate) use env_var_dependencies::collect_env_var_dependencies;
|
||||
pub(crate) use env_var_dependencies::resolve_skill_dependencies_for_turn;
|
||||
pub(crate) use injection::SkillInjections;
|
||||
pub(crate) use injection::build_skill_injections;
|
||||
pub(crate) use injection::collect_explicit_skill_mentions;
|
||||
pub use loader::load_skills;
|
||||
pub use manager::SkillsManager;
|
||||
pub use model::SkillError;
|
||||
|
||||
@@ -9,6 +9,7 @@ pub struct SkillMetadata {
|
||||
pub description: String,
|
||||
pub short_description: Option<String>,
|
||||
pub interface: Option<SkillInterface>,
|
||||
pub dependencies: Option<SkillDependencies>,
|
||||
pub path: PathBuf,
|
||||
pub scope: SkillScope,
|
||||
}
|
||||
@@ -23,6 +24,21 @@ pub struct SkillInterface {
|
||||
pub default_prompt: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct SkillDependencies {
|
||||
pub tools: Vec<SkillToolDependency>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct SkillToolDependency {
|
||||
pub r#type: String,
|
||||
pub value: String,
|
||||
pub description: Option<String>,
|
||||
pub transport: Option<String>,
|
||||
pub command: Option<String>,
|
||||
pub url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct SkillError {
|
||||
pub path: PathBuf,
|
||||
|
||||
@@ -7,7 +7,9 @@ use crate::exec_policy::ExecPolicyManager;
|
||||
use crate::mcp_connection_manager::McpConnectionManager;
|
||||
use crate::models_manager::manager::ModelsManager;
|
||||
use crate::skills::SkillsManager;
|
||||
use crate::state_db::StateDbHandle;
|
||||
use crate::tools::sandboxing::ApprovalStore;
|
||||
use crate::transport_manager::TransportManager;
|
||||
use crate::unified_exec::UnifiedExecProcessManager;
|
||||
use crate::user_notification::UserNotifier;
|
||||
use codex_otel::OtelManager;
|
||||
@@ -30,4 +32,6 @@ pub(crate) struct SessionServices {
|
||||
pub(crate) tool_approvals: Mutex<ApprovalStore>,
|
||||
pub(crate) skills_manager: Arc<SkillsManager>,
|
||||
pub(crate) agent_control: AgentControl,
|
||||
pub(crate) state_db: Option<StateDbHandle>,
|
||||
pub(crate) transport_manager: TransportManager,
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
//! Session-wide mutable state.
|
||||
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
|
||||
use crate::codex::SessionConfiguration;
|
||||
use crate::context_manager::ContextManager;
|
||||
@@ -15,6 +17,13 @@ pub(crate) struct SessionState {
|
||||
pub(crate) history: ContextManager,
|
||||
pub(crate) latest_rate_limits: Option<RateLimitSnapshot>,
|
||||
pub(crate) server_reasoning_included: bool,
|
||||
pub(crate) dependency_env: HashMap<String, String>,
|
||||
pub(crate) mcp_dependency_prompted: HashSet<String>,
|
||||
/// Whether the session's initial context has been seeded into history.
|
||||
///
|
||||
/// TODO(owen): This is a temporary solution to avoid updating a thread's updated_at
|
||||
/// timestamp when resuming a session. Remove this once SQLite is in place.
|
||||
pub(crate) initial_context_seeded: bool,
|
||||
}
|
||||
|
||||
impl SessionState {
|
||||
@@ -26,6 +35,9 @@ impl SessionState {
|
||||
history,
|
||||
latest_rate_limits: None,
|
||||
server_reasoning_included: false,
|
||||
dependency_env: HashMap::new(),
|
||||
mcp_dependency_prompted: HashSet::new(),
|
||||
initial_context_seeded: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -92,6 +104,27 @@ impl SessionState {
|
||||
pub(crate) fn server_reasoning_included(&self) -> bool {
|
||||
self.server_reasoning_included
|
||||
}
|
||||
|
||||
pub(crate) fn record_mcp_dependency_prompted<I>(&mut self, names: I)
|
||||
where
|
||||
I: IntoIterator<Item = String>,
|
||||
{
|
||||
self.mcp_dependency_prompted.extend(names);
|
||||
}
|
||||
|
||||
pub(crate) fn mcp_dependency_prompted(&self) -> HashSet<String> {
|
||||
self.mcp_dependency_prompted.clone()
|
||||
}
|
||||
|
||||
pub(crate) fn set_dependency_env(&mut self, values: HashMap<String, String>) {
|
||||
for (key, value) in values {
|
||||
self.dependency_env.insert(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn dependency_env(&self) -> HashMap<String, String> {
|
||||
self.dependency_env.clone()
|
||||
}
|
||||
}
|
||||
|
||||
// Sometimes new snapshots don't include credits or plan information.
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user