mirror of
https://github.com/openai/codex.git
synced 2026-02-02 06:57:03 +00:00
Compare commits
110 Commits
remove/doc
...
collab-mod
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
181615cab0 | ||
|
|
8472942485 | ||
|
|
0666e95234 | ||
|
|
81a17bb2c1 | ||
|
|
b79bf69af6 | ||
|
|
ca9d417633 | ||
|
|
fbb3a30953 | ||
|
|
2d9ac8227a | ||
|
|
03aee7140f | ||
|
|
48f203120d | ||
|
|
bdd8a7d58b | ||
|
|
b7f26d74f0 | ||
|
|
3b1cddf001 | ||
|
|
798c4b3260 | ||
|
|
3e798c5a7d | ||
|
|
e6c4f548ab | ||
|
|
d6631fb5a9 | ||
|
|
89c5f3c4d4 | ||
|
|
b654b7a9ae | ||
|
|
2945667dcc | ||
|
|
d29129f352 | ||
|
|
4ba911d48c | ||
|
|
6a06726af2 | ||
|
|
714dc8d8bd | ||
|
|
780482da84 | ||
|
|
4d9ae3a298 | ||
|
|
e70592f85a | ||
|
|
b4b4763009 | ||
|
|
be33de3f87 | ||
|
|
8cc338aecf | ||
|
|
335713f7e9 | ||
|
|
b9cd089d1f | ||
|
|
ecc66f4f52 | ||
|
|
9757e1418d | ||
|
|
52609c6f42 | ||
|
|
ce3d764ae1 | ||
|
|
26590d7927 | ||
|
|
8497163363 | ||
|
|
83d7c44500 | ||
|
|
7b34cad1b1 | ||
|
|
ff9fa56368 | ||
|
|
fe920d7804 | ||
|
|
147e7118e0 | ||
|
|
f7699e0487 | ||
|
|
66de985e4e | ||
|
|
b7edeee8ca | ||
|
|
851617ff5a | ||
|
|
b8156706e6 | ||
|
|
35e03a0716 | ||
|
|
ad5f9e7370 | ||
|
|
96386755b6 | ||
|
|
74bd6d7178 | ||
|
|
2a624661ef | ||
|
|
231406bd04 | ||
|
|
3878c3dc7c | ||
|
|
dabafe204a | ||
|
|
71b8d937ed | ||
|
|
996e09ca24 | ||
|
|
9f79365691 | ||
|
|
fef3e36f67 | ||
|
|
3bb8e69dd3 | ||
|
|
add648df82 | ||
|
|
1609f6aa81 | ||
|
|
a90ab789c2 | ||
|
|
3f3916e595 | ||
|
|
19d8f71a98 | ||
|
|
3ae966edd8 | ||
|
|
c7c2b3cf8d | ||
|
|
337643b00a | ||
|
|
28051d18c6 | ||
|
|
2f8a44baea | ||
|
|
30eb655ad1 | ||
|
|
700a29e157 | ||
|
|
c40ad65bd8 | ||
|
|
894923ed5d | ||
|
|
fc0fd85349 | ||
|
|
877b76bb9d | ||
|
|
538e1059a3 | ||
|
|
067922a734 | ||
|
|
dd24ac6b26 | ||
|
|
ddc704d4c6 | ||
|
|
3b726d9550 | ||
|
|
74ffbbe7c1 | ||
|
|
742f086ee6 | ||
|
|
ab99df0694 | ||
|
|
509ff1c643 | ||
|
|
cabb2085cc | ||
|
|
4db6da32a3 | ||
|
|
0adcd8aa86 | ||
|
|
28bd7db14a | ||
|
|
0c72d8fd6e | ||
|
|
7c96f2e84c | ||
|
|
f45a8733bf | ||
|
|
b655a092ba | ||
|
|
b7bba3614e | ||
|
|
86adf53235 | ||
|
|
998e88b12a | ||
|
|
c900de271a | ||
|
|
a641a6427c | ||
|
|
5d13427ef4 | ||
|
|
394b967432 | ||
|
|
6a279f6d77 | ||
|
|
47aa1f3b6a | ||
|
|
73bd84dee0 | ||
|
|
32b062d0e1 | ||
|
|
f29a0defa2 | ||
|
|
2e5aa809f4 | ||
|
|
6418e65356 | ||
|
|
764712c116 | ||
|
|
5ace350186 |
@@ -1,3 +1,4 @@
|
||||
# Without this, Bazel will consider BUILD.bazel files in
|
||||
# .git/sl/origbackups (which can be populated by Sapling SCM).
|
||||
.git
|
||||
codex-rs/target
|
||||
|
||||
7
.bazelrc
7
.bazelrc
@@ -1,13 +1,19 @@
|
||||
common --repo_env=BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN=1
|
||||
common --repo_env=BAZEL_NO_APPLE_CPP_TOOLCHAIN=1
|
||||
# Dummy xcode config so we don't need to build xcode_locator in repo rule.
|
||||
common --xcode_version_config=//:disable_xcode
|
||||
|
||||
common --disk_cache=~/.cache/bazel-disk-cache
|
||||
common --repo_contents_cache=~/.cache/bazel-repo-contents-cache
|
||||
common --repository_cache=~/.cache/bazel-repo-cache
|
||||
common --remote_cache_compression
|
||||
startup --experimental_remote_repo_contents_cache
|
||||
|
||||
common --experimental_platform_in_output_dir
|
||||
|
||||
# Runfiles strategy rationale: codex-rs/utils/cargo-bin/README.md
|
||||
common --noenable_runfiles
|
||||
|
||||
common --enable_platform_specific_config
|
||||
# TODO(zbarsky): We need to untangle these libc constraints to get linux remote builds working.
|
||||
common:linux --host_platform=//:local
|
||||
@@ -43,4 +49,3 @@ common --jobs=30
|
||||
common:remote --extra_execution_platforms=//:rbe
|
||||
common:remote --remote_executor=grpcs://remote.buildbuddy.io
|
||||
common:remote --jobs=800
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[codespell]
|
||||
# Ref: https://github.com/codespell-project/codespell#using-a-config-file
|
||||
skip = .git*,vendor,*-lock.yaml,*.lock,.codespellrc,*test.ts,*.jsonl,frame*.txt
|
||||
skip = .git*,vendor,*-lock.yaml,*.lock,.codespellrc,*test.ts,*.jsonl,frame*.txt,*.snap,*.snap.new
|
||||
check-hidden = true
|
||||
ignore-regex = ^\s*"image/\S+": ".*|\b(afterAll)\b
|
||||
ignore-words-list = ratatui,ser,iTerm,iterm2,iterm
|
||||
|
||||
96
.github/workflows/rust-ci.yml
vendored
96
.github/workflows/rust-ci.yml
vendored
@@ -59,7 +59,7 @@ jobs:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
with:
|
||||
components: rustfmt
|
||||
- name: cargo fmt
|
||||
@@ -77,7 +77,7 @@ jobs:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: cargo-shear
|
||||
@@ -177,11 +177,31 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- name: Install UBSan runtime (musl)
|
||||
if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
sudo apt-get update -y
|
||||
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y libubsan1
|
||||
fi
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
components: clippy
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Use hermetic Cargo home (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cargo_home="${GITHUB_WORKSPACE}/.cargo-home"
|
||||
mkdir -p "${cargo_home}/bin"
|
||||
echo "CARGO_HOME=${cargo_home}" >> "$GITHUB_ENV"
|
||||
echo "${cargo_home}/bin" >> "$GITHUB_PATH"
|
||||
: > "${cargo_home}/config.toml"
|
||||
|
||||
- name: Compute lockfile hash
|
||||
id: lockhash
|
||||
working-directory: codex-rs
|
||||
@@ -202,6 +222,10 @@ jobs:
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
${{ github.workspace }}/.cargo-home/bin/
|
||||
${{ github.workspace }}/.cargo-home/registry/index/
|
||||
${{ github.workspace }}/.cargo-home/registry/cache/
|
||||
${{ github.workspace }}/.cargo-home/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
|
||||
restore-keys: |
|
||||
cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
@@ -244,6 +268,14 @@ jobs:
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Disable sccache wrapper (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "RUSTC_WRAPPER=" >> "$GITHUB_ENV"
|
||||
echo "RUSTC_WORKSPACE_WRAPPER=" >> "$GITHUB_ENV"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Prepare APT cache directories (musl)
|
||||
shell: bash
|
||||
@@ -277,6 +309,58 @@ jobs:
|
||||
shell: bash
|
||||
run: bash "${GITHUB_WORKSPACE}/.github/scripts/install-musl-build-tools.sh"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Configure rustc UBSan wrapper (musl host)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
ubsan=""
|
||||
if command -v ldconfig >/dev/null 2>&1; then
|
||||
ubsan="$(ldconfig -p | grep -m1 'libubsan\.so\.1' | sed -E 's/.*=> (.*)$/\1/')"
|
||||
fi
|
||||
wrapper_root="${RUNNER_TEMP:-/tmp}"
|
||||
wrapper="${wrapper_root}/rustc-ubsan-wrapper"
|
||||
cat > "${wrapper}" <<EOF
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
if [[ -n "${ubsan}" ]]; then
|
||||
export LD_PRELOAD="${ubsan}\${LD_PRELOAD:+:\${LD_PRELOAD}}"
|
||||
fi
|
||||
exec "\$1" "\${@:2}"
|
||||
EOF
|
||||
chmod +x "${wrapper}"
|
||||
echo "RUSTC_WRAPPER=${wrapper}" >> "$GITHUB_ENV"
|
||||
echo "RUSTC_WORKSPACE_WRAPPER=" >> "$GITHUB_ENV"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Clear sanitizer flags (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Clear global Rust flags so host/proc-macro builds don't pull in UBSan.
|
||||
echo "RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_ENCODED_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "RUSTDOCFLAGS=" >> "$GITHUB_ENV"
|
||||
# Override any runner-level Cargo config rustflags as well.
|
||||
echo "CARGO_BUILD_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
|
||||
sanitize_flags() {
|
||||
local input="$1"
|
||||
input="${input//-fsanitize=undefined/}"
|
||||
input="${input//-fno-sanitize-recover=undefined/}"
|
||||
input="${input//-fno-sanitize-trap=undefined/}"
|
||||
echo "$input"
|
||||
}
|
||||
|
||||
cflags="$(sanitize_flags "${CFLAGS-}")"
|
||||
cxxflags="$(sanitize_flags "${CXXFLAGS-}")"
|
||||
echo "CFLAGS=${cflags}" >> "$GITHUB_ENV"
|
||||
echo "CXXFLAGS=${cxxflags}" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Install cargo-chef
|
||||
if: ${{ matrix.profile == 'release' }}
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
@@ -322,6 +406,10 @@ jobs:
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
${{ github.workspace }}/.cargo-home/bin/
|
||||
${{ github.workspace }}/.cargo-home/registry/index/
|
||||
${{ github.workspace }}/.cargo-home/registry/cache/
|
||||
${{ github.workspace }}/.cargo-home/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
|
||||
|
||||
- name: Save sccache cache (fallback)
|
||||
@@ -422,7 +510,7 @@ jobs:
|
||||
- name: Install DotSlash
|
||||
uses: facebook/install-dotslash@v2
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
|
||||
82
.github/workflows/rust-release.yml
vendored
82
.github/workflows/rust-release.yml
vendored
@@ -21,7 +21,6 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
|
||||
- name: Validate tag matches Cargo.toml version
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -90,10 +89,30 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- name: Install UBSan runtime (musl)
|
||||
if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
sudo apt-get update -y
|
||||
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y libubsan1
|
||||
fi
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Use hermetic Cargo home (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cargo_home="${GITHUB_WORKSPACE}/.cargo-home"
|
||||
mkdir -p "${cargo_home}/bin"
|
||||
echo "CARGO_HOME=${cargo_home}" >> "$GITHUB_ENV"
|
||||
echo "${cargo_home}/bin" >> "$GITHUB_PATH"
|
||||
: > "${cargo_home}/config.toml"
|
||||
|
||||
- uses: actions/cache@v5
|
||||
with:
|
||||
path: |
|
||||
@@ -101,6 +120,10 @@ jobs:
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
${{ github.workspace }}/.cargo-home/bin/
|
||||
${{ github.workspace }}/.cargo-home/registry/index/
|
||||
${{ github.workspace }}/.cargo-home/registry/cache/
|
||||
${{ github.workspace }}/.cargo-home/git/db/
|
||||
${{ github.workspace }}/codex-rs/target/
|
||||
key: cargo-${{ matrix.runner }}-${{ matrix.target }}-release-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
@@ -116,6 +139,58 @@ jobs:
|
||||
TARGET: ${{ matrix.target }}
|
||||
run: bash "${GITHUB_WORKSPACE}/.github/scripts/install-musl-build-tools.sh"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Configure rustc UBSan wrapper (musl host)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
ubsan=""
|
||||
if command -v ldconfig >/dev/null 2>&1; then
|
||||
ubsan="$(ldconfig -p | grep -m1 'libubsan\.so\.1' | sed -E 's/.*=> (.*)$/\1/')"
|
||||
fi
|
||||
wrapper_root="${RUNNER_TEMP:-/tmp}"
|
||||
wrapper="${wrapper_root}/rustc-ubsan-wrapper"
|
||||
cat > "${wrapper}" <<EOF
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
if [[ -n "${ubsan}" ]]; then
|
||||
export LD_PRELOAD="${ubsan}\${LD_PRELOAD:+:\${LD_PRELOAD}}"
|
||||
fi
|
||||
exec "\$1" "\${@:2}"
|
||||
EOF
|
||||
chmod +x "${wrapper}"
|
||||
echo "RUSTC_WRAPPER=${wrapper}" >> "$GITHUB_ENV"
|
||||
echo "RUSTC_WORKSPACE_WRAPPER=" >> "$GITHUB_ENV"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Clear sanitizer flags (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Clear global Rust flags so host/proc-macro builds don't pull in UBSan.
|
||||
echo "RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_ENCODED_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "RUSTDOCFLAGS=" >> "$GITHUB_ENV"
|
||||
# Override any runner-level Cargo config rustflags as well.
|
||||
echo "CARGO_BUILD_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
|
||||
sanitize_flags() {
|
||||
local input="$1"
|
||||
input="${input//-fsanitize=undefined/}"
|
||||
input="${input//-fno-sanitize-recover=undefined/}"
|
||||
input="${input//-fno-sanitize-trap=undefined/}"
|
||||
echo "$input"
|
||||
}
|
||||
|
||||
cflags="$(sanitize_flags "${CFLAGS-}")"
|
||||
cxxflags="$(sanitize_flags "${CXXFLAGS-}")"
|
||||
echo "CFLAGS=${cflags}" >> "$GITHUB_ENV"
|
||||
echo "CXXFLAGS=${cxxflags}" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Cargo build
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -252,6 +327,7 @@ jobs:
|
||||
# Path that contains the uncompressed binaries for the current
|
||||
# ${{ matrix.target }}
|
||||
dest="dist/${{ matrix.target }}"
|
||||
repo_root=$PWD
|
||||
|
||||
# We want to ship the raw Windows executables in the GitHub Release
|
||||
# in addition to the compressed archives. Keep the originals for
|
||||
@@ -305,7 +381,7 @@ jobs:
|
||||
cp "$setup_src" "$bundle_dir/codex-windows-sandbox-setup.exe"
|
||||
# Use an absolute path so bundle zips land in the real dist
|
||||
# dir even when 7z runs from a temp directory.
|
||||
(cd "$bundle_dir" && 7z a "$(pwd)/$dest/${base}.zip" .)
|
||||
(cd "$bundle_dir" && 7z a "$repo_root/$dest/${base}.zip" .)
|
||||
else
|
||||
echo "warning: missing sandbox binaries; falling back to single-binary zip"
|
||||
echo "warning: expected $runner_src and $setup_src"
|
||||
|
||||
2
.github/workflows/sdk.yml
vendored
2
.github/workflows/sdk.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
node-version: 22
|
||||
cache: pnpm
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
|
||||
- name: build codex
|
||||
run: cargo build --bin codex
|
||||
|
||||
72
.github/workflows/shell-tool-mcp.yml
vendored
72
.github/workflows/shell-tool-mcp.yml
vendored
@@ -93,7 +93,17 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- name: Install UBSan runtime (musl)
|
||||
if: ${{ matrix.install_musl }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
sudo apt-get update -y
|
||||
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y libubsan1
|
||||
fi
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
@@ -109,6 +119,58 @@ jobs:
|
||||
TARGET: ${{ matrix.target }}
|
||||
run: bash "${GITHUB_WORKSPACE}/.github/scripts/install-musl-build-tools.sh"
|
||||
|
||||
- if: ${{ matrix.install_musl }}
|
||||
name: Configure rustc UBSan wrapper (musl host)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
ubsan=""
|
||||
if command -v ldconfig >/dev/null 2>&1; then
|
||||
ubsan="$(ldconfig -p | grep -m1 'libubsan\.so\.1' | sed -E 's/.*=> (.*)$/\1/')"
|
||||
fi
|
||||
wrapper_root="${RUNNER_TEMP:-/tmp}"
|
||||
wrapper="${wrapper_root}/rustc-ubsan-wrapper"
|
||||
cat > "${wrapper}" <<EOF
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
if [[ -n "${ubsan}" ]]; then
|
||||
export LD_PRELOAD="${ubsan}\${LD_PRELOAD:+:\${LD_PRELOAD}}"
|
||||
fi
|
||||
exec "\$1" "\${@:2}"
|
||||
EOF
|
||||
chmod +x "${wrapper}"
|
||||
echo "RUSTC_WRAPPER=${wrapper}" >> "$GITHUB_ENV"
|
||||
echo "RUSTC_WORKSPACE_WRAPPER=" >> "$GITHUB_ENV"
|
||||
|
||||
- if: ${{ matrix.install_musl }}
|
||||
name: Clear sanitizer flags (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Clear global Rust flags so host/proc-macro builds don't pull in UBSan.
|
||||
echo "RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_ENCODED_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "RUSTDOCFLAGS=" >> "$GITHUB_ENV"
|
||||
# Override any runner-level Cargo config rustflags as well.
|
||||
echo "CARGO_BUILD_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
|
||||
sanitize_flags() {
|
||||
local input="$1"
|
||||
input="${input//-fsanitize=undefined/}"
|
||||
input="${input//-fno-sanitize-recover=undefined/}"
|
||||
input="${input//-fno-sanitize-trap=undefined/}"
|
||||
echo "$input"
|
||||
}
|
||||
|
||||
cflags="$(sanitize_flags "${CFLAGS-}")"
|
||||
cxxflags="$(sanitize_flags "${CXXFLAGS-}")"
|
||||
echo "CFLAGS=${cflags}" >> "$GITHUB_ENV"
|
||||
echo "CXXFLAGS=${cxxflags}" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Build exec server binaries
|
||||
run: cargo build --release --target ${{ matrix.target }} --bin codex-exec-mcp-server --bin codex-execve-wrapper
|
||||
|
||||
@@ -282,7 +344,6 @@ jobs:
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10.8.1
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
@@ -375,12 +436,6 @@ jobs:
|
||||
id-token: write
|
||||
contents: read
|
||||
steps:
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10.8.1
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
@@ -388,6 +443,7 @@ jobs:
|
||||
registry-url: https://registry.npmjs.org
|
||||
scope: "@openai"
|
||||
|
||||
# Trusted publishing requires npm CLI version 11.5.1 or later.
|
||||
- name: Update npm
|
||||
run: npm install -g npm@latest
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ In the codex-rs folder where the rust code lives:
|
||||
- Always collapse if statements per https://rust-lang.github.io/rust-clippy/master/index.html#collapsible_if
|
||||
- Always inline format! args when possible per https://rust-lang.github.io/rust-clippy/master/index.html#uninlined_format_args
|
||||
- Use method references over closures when possible per https://rust-lang.github.io/rust-clippy/master/index.html#redundant_closure_for_method_calls
|
||||
- When possible, make `match` statements exhaustive and avoid wildcard arms.
|
||||
- When writing tests, prefer comparing the equality of entire objects over fields one by one.
|
||||
- When making a change that adds or changes an API, ensure that the documentation in the `docs/` folder is up to date if applicable.
|
||||
- If you change `ConfigToml` or nested config types, run `just write-config-schema` to update `codex-rs/core/config.schema.json`.
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
load("@apple_support//xcode:xcode_config.bzl", "xcode_config")
|
||||
|
||||
xcode_config(name = "disable_xcode")
|
||||
|
||||
# We mark the local platform as glibc-compatible so that rust can grab a toolchain for us.
|
||||
# TODO(zbarsky): Upstream a better libc constraint into rules_rust.
|
||||
# We only enable this on linux though for sanity, and because it breaks remote execution.
|
||||
|
||||
14
MODULE.bazel
14
MODULE.bazel
@@ -27,6 +27,8 @@ register_toolchains(
|
||||
"@toolchains_llvm_bootstrapped//toolchain:all",
|
||||
)
|
||||
|
||||
# Needed to disable xcode...
|
||||
bazel_dep(name = "apple_support", version = "2.1.0")
|
||||
bazel_dep(name = "rules_cc", version = "0.2.16")
|
||||
bazel_dep(name = "rules_platform", version = "0.1.0")
|
||||
bazel_dep(name = "rules_rust", version = "0.68.1")
|
||||
@@ -53,7 +55,7 @@ rust = use_extension("@rules_rust//rust:extensions.bzl", "rust")
|
||||
rust.toolchain(
|
||||
edition = "2024",
|
||||
extra_target_triples = RUST_TRIPLES,
|
||||
versions = ["1.90.0"],
|
||||
versions = ["1.93.0"],
|
||||
)
|
||||
use_repo(rust, "rust_toolchains")
|
||||
|
||||
@@ -67,6 +69,11 @@ crate.from_cargo(
|
||||
cargo_toml = "//codex-rs:Cargo.toml",
|
||||
platform_triples = RUST_TRIPLES,
|
||||
)
|
||||
crate.annotation(
|
||||
crate = "nucleo-matcher",
|
||||
strip_prefix = "matcher",
|
||||
version = "0.3.1",
|
||||
)
|
||||
|
||||
bazel_dep(name = "openssl", version = "3.5.4.bcr.0")
|
||||
|
||||
@@ -85,6 +92,11 @@ crate.annotation(
|
||||
|
||||
inject_repo(crate, "openssl")
|
||||
|
||||
crate.annotation(
|
||||
crate = "runfiles",
|
||||
workspace_cargo_toml = "rust/runfiles/Cargo.toml",
|
||||
)
|
||||
|
||||
# Fix readme inclusions
|
||||
crate.annotation(
|
||||
crate = "windows-link",
|
||||
|
||||
119
MODULE.bazel.lock
generated
119
MODULE.bazel.lock
generated
File diff suppressed because one or more lines are too long
70
PNPM.md
70
PNPM.md
@@ -1,70 +0,0 @@
|
||||
# Migration to pnpm
|
||||
|
||||
This project has been migrated from npm to pnpm to improve dependency management and developer experience.
|
||||
|
||||
## Why pnpm?
|
||||
|
||||
- **Faster installation**: pnpm is significantly faster than npm and yarn
|
||||
- **Disk space savings**: pnpm uses a content-addressable store to avoid duplication
|
||||
- **Phantom dependency prevention**: pnpm creates a strict node_modules structure
|
||||
- **Native workspaces support**: simplified monorepo management
|
||||
|
||||
## How to use pnpm
|
||||
|
||||
### Installation
|
||||
|
||||
```bash
|
||||
# Global installation of pnpm
|
||||
npm install -g pnpm@10.8.1
|
||||
|
||||
# Or with corepack (available with Node.js 22+)
|
||||
corepack enable
|
||||
corepack prepare pnpm@10.8.1 --activate
|
||||
```
|
||||
|
||||
### Common commands
|
||||
|
||||
| npm command | pnpm equivalent |
|
||||
| --------------- | ---------------- |
|
||||
| `npm install` | `pnpm install` |
|
||||
| `npm run build` | `pnpm run build` |
|
||||
| `npm test` | `pnpm test` |
|
||||
| `npm run lint` | `pnpm run lint` |
|
||||
|
||||
### Workspace-specific commands
|
||||
|
||||
| Action | Command |
|
||||
| ------------------------------------------ | ---------------------------------------- |
|
||||
| Run a command in a specific package | `pnpm --filter @openai/codex run build` |
|
||||
| Install a dependency in a specific package | `pnpm --filter @openai/codex add lodash` |
|
||||
| Run a command in all packages | `pnpm -r run test` |
|
||||
|
||||
## Monorepo structure
|
||||
|
||||
```
|
||||
codex/
|
||||
├── pnpm-workspace.yaml # Workspace configuration
|
||||
├── .npmrc # pnpm configuration
|
||||
├── package.json # Root dependencies and scripts
|
||||
├── codex-cli/ # Main package
|
||||
│ └── package.json # codex-cli specific dependencies
|
||||
└── docs/ # Documentation (future package)
|
||||
```
|
||||
|
||||
## Configuration files
|
||||
|
||||
- **pnpm-workspace.yaml**: Defines the packages included in the monorepo
|
||||
- **.npmrc**: Configures pnpm behavior
|
||||
- **Root package.json**: Contains shared scripts and dependencies
|
||||
|
||||
## CI/CD
|
||||
|
||||
CI/CD workflows have been updated to use pnpm instead of npm. Make sure your CI environments use pnpm 10.8.1 or higher.
|
||||
|
||||
## Known issues
|
||||
|
||||
If you encounter issues with pnpm, try the following solutions:
|
||||
|
||||
1. Remove the `node_modules` folder and `pnpm-lock.yaml` file, then run `pnpm install`
|
||||
2. Make sure you're using pnpm 10.8.1 or higher
|
||||
3. Verify that Node.js 22 or higher is installed
|
||||
18
codex-cli/package-lock.json
generated
18
codex-cli/package-lock.json
generated
@@ -1,18 +0,0 @@
|
||||
{
|
||||
"name": "@openai/codex",
|
||||
"version": "0.0.0-dev",
|
||||
"lockfileVersion": 3,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@openai/codex",
|
||||
"version": "0.0.0-dev",
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
"codex": "bin/codex.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -17,5 +17,6 @@
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/openai/codex.git",
|
||||
"directory": "codex-cli"
|
||||
}
|
||||
},
|
||||
"packageManager": "pnpm@10.28.2+sha512.41872f037ad22f7348e3b1debbaf7e867cfd448f2726d9cf74c08f19507c31d2c8e7a11525b983febc2df640b5438dee6023ebb1f84ed43cc2d654d2bc326264"
|
||||
}
|
||||
|
||||
604
codex-rs/Cargo.lock
generated
604
codex-rs/Cargo.lock
generated
@@ -602,6 +602,15 @@ dependencies = [
|
||||
"pin-project-lite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "atoi"
|
||||
version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528"
|
||||
dependencies = [
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "atomic-waker"
|
||||
version = "1.1.2"
|
||||
@@ -616,9 +625,9 @@ checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
|
||||
|
||||
[[package]]
|
||||
name = "axum"
|
||||
version = "0.8.4"
|
||||
version = "0.8.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "021e862c184ae977658b36c4500f7feac3221ca5da43e3f25bd04ab6c79a29b5"
|
||||
checksum = "8b52af3cb4058c895d37317bb27508dccc8e5f2d39454016b297bf4a400597b8"
|
||||
dependencies = [
|
||||
"axum-core",
|
||||
"bytes",
|
||||
@@ -634,8 +643,7 @@ dependencies = [
|
||||
"mime",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
"rustversion",
|
||||
"serde",
|
||||
"serde_core",
|
||||
"serde_json",
|
||||
"serde_path_to_error",
|
||||
"sync_wrapper",
|
||||
@@ -647,9 +655,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "axum-core"
|
||||
version = "0.5.2"
|
||||
version = "0.5.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "68464cd0412f486726fb3373129ef5d2993f90c34bc2bc1c1e9943b2f4fc7ca6"
|
||||
checksum = "08c78f31d7b1291f7ee735c1c6780ccde7785daae9a9206026862dab7d8792d1"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"futures-core",
|
||||
@@ -658,7 +666,6 @@ dependencies = [
|
||||
"http-body-util",
|
||||
"mime",
|
||||
"pin-project-lite",
|
||||
"rustversion",
|
||||
"sync_wrapper",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
@@ -741,6 +748,9 @@ name = "bitflags"
|
||||
version = "2.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3"
|
||||
dependencies = [
|
||||
"serde_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "block-buffer"
|
||||
@@ -1070,6 +1080,7 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"app_test_support",
|
||||
"async-trait",
|
||||
"axum",
|
||||
"base64",
|
||||
"chrono",
|
||||
@@ -1079,6 +1090,7 @@ dependencies = [
|
||||
"codex-chatgpt",
|
||||
"codex-common",
|
||||
"codex-core",
|
||||
"codex-execpolicy",
|
||||
"codex-feedback",
|
||||
"codex-file-search",
|
||||
"codex-login",
|
||||
@@ -1209,10 +1221,12 @@ dependencies = [
|
||||
"codex-core",
|
||||
"codex-git",
|
||||
"codex-utils-cargo-bin",
|
||||
"pretty_assertions",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"urlencoding",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1365,6 +1379,7 @@ dependencies = [
|
||||
"codex-otel",
|
||||
"codex-protocol",
|
||||
"codex-rmcp-client",
|
||||
"codex-state",
|
||||
"codex-utils-absolute-path",
|
||||
"codex-utils-cargo-bin",
|
||||
"codex-utils-pty",
|
||||
@@ -1390,6 +1405,7 @@ dependencies = [
|
||||
"libc",
|
||||
"maplit",
|
||||
"mcp-types",
|
||||
"multimap",
|
||||
"once_cell",
|
||||
"openssl-sys",
|
||||
"os_info",
|
||||
@@ -1559,11 +1575,13 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"crossbeam-channel",
|
||||
"ignore",
|
||||
"nucleo-matcher",
|
||||
"nucleo",
|
||||
"pretty_assertions",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
@@ -1685,6 +1703,7 @@ dependencies = [
|
||||
"rama-http",
|
||||
"rama-http-backend",
|
||||
"rama-net",
|
||||
"rama-socks5",
|
||||
"rama-tcp",
|
||||
"rama-tls-boring",
|
||||
"rama-unix",
|
||||
@@ -1757,6 +1776,7 @@ name = "codex-protocol"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"codex-execpolicy",
|
||||
"codex-git",
|
||||
"codex-utils-absolute-path",
|
||||
"codex-utils-image",
|
||||
@@ -1826,6 +1846,27 @@ dependencies = [
|
||||
"which",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-state"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chrono",
|
||||
"clap",
|
||||
"codex-otel",
|
||||
"codex-protocol",
|
||||
"dirs",
|
||||
"owo-colors",
|
||||
"pretty_assertions",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sqlx",
|
||||
"tokio",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-stdio-to-uds"
|
||||
version = "0.0.0"
|
||||
@@ -1852,6 +1893,7 @@ dependencies = [
|
||||
"codex-app-server-protocol",
|
||||
"codex-arg0",
|
||||
"codex-backend-client",
|
||||
"codex-chatgpt",
|
||||
"codex-cli",
|
||||
"codex-common",
|
||||
"codex-core",
|
||||
@@ -1860,6 +1902,7 @@ dependencies = [
|
||||
"codex-login",
|
||||
"codex-otel",
|
||||
"codex-protocol",
|
||||
"codex-state",
|
||||
"codex-utils-absolute-path",
|
||||
"codex-utils-cargo-bin",
|
||||
"codex-utils-pty",
|
||||
@@ -1941,7 +1984,7 @@ name = "codex-utils-cargo-bin"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"assert_cmd",
|
||||
"path-absolutize",
|
||||
"runfiles",
|
||||
"thiserror 2.0.17",
|
||||
]
|
||||
|
||||
@@ -2109,6 +2152,12 @@ dependencies = [
|
||||
"serde_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "const-oid"
|
||||
version = "0.9.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
|
||||
|
||||
[[package]]
|
||||
name = "const_format"
|
||||
version = "0.2.35"
|
||||
@@ -2196,6 +2245,7 @@ dependencies = [
|
||||
"tokio-tungstenite",
|
||||
"walkdir",
|
||||
"wiremock",
|
||||
"zstd",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2207,6 +2257,21 @@ dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crc"
|
||||
version = "3.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5eb8a2a1cd12ab0d987a5d5e825195d372001a4094a0376319d5a0ad71c1ba0d"
|
||||
dependencies = [
|
||||
"crc-catalog",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crc-catalog"
|
||||
version = "2.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5"
|
||||
|
||||
[[package]]
|
||||
name = "crc32fast"
|
||||
version = "1.5.0"
|
||||
@@ -2250,6 +2315,15 @@ dependencies = [
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-queue"
|
||||
version = "0.3.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115"
|
||||
dependencies = [
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-utils"
|
||||
version = "0.8.21"
|
||||
@@ -2528,6 +2602,7 @@ version = "0.7.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb"
|
||||
dependencies = [
|
||||
"const-oid",
|
||||
"pem-rfc7468",
|
||||
"zeroize",
|
||||
]
|
||||
@@ -2626,6 +2701,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
|
||||
dependencies = [
|
||||
"block-buffer",
|
||||
"const-oid",
|
||||
"crypto-common",
|
||||
"subtle",
|
||||
]
|
||||
@@ -2773,6 +2849,9 @@ name = "either"
|
||||
version = "1.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ena"
|
||||
@@ -2915,6 +2994,17 @@ version = "3.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dea2df4cf52843e0452895c455a1a2cfbb842a1e7329671acf418fdc53ed4c59"
|
||||
|
||||
[[package]]
|
||||
name = "etcetera"
|
||||
version = "0.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"home",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "event-listener"
|
||||
version = "5.4.0"
|
||||
@@ -3083,6 +3173,17 @@ dependencies = [
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "flume"
|
||||
version = "0.11.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "da0e4dd2a88388a1f4ccc7c9ce104604dab68d9f408dc34cd45823d5a9069095"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
"futures-sink",
|
||||
"spin",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "flume"
|
||||
version = "0.12.0"
|
||||
@@ -3231,6 +3332,17 @@ dependencies = [
|
||||
"futures-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "futures-intrusive"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
"lock_api",
|
||||
"parking_lot",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "futures-io"
|
||||
version = "0.3.31"
|
||||
@@ -3385,9 +3497,9 @@ checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280"
|
||||
|
||||
[[package]]
|
||||
name = "globset"
|
||||
version = "0.4.16"
|
||||
version = "0.4.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "54a1028dfc5f5df5da8a56a73e6c153c9a9708ec57232470703592a3f18e49f5"
|
||||
checksum = "52dfc19153a48bde0cbd630453615c8151bce3a5adfac7a0aebfbf0a1e1f57e3"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"bstr",
|
||||
@@ -3463,6 +3575,15 @@ dependencies = [
|
||||
"foldhash 0.2.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashlink"
|
||||
version = "0.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1"
|
||||
dependencies = [
|
||||
"hashbrown 0.15.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "heck"
|
||||
version = "0.5.0"
|
||||
@@ -3666,7 +3787,7 @@ dependencies = [
|
||||
"tokio",
|
||||
"tokio-rustls",
|
||||
"tower-service",
|
||||
"webpki-roots",
|
||||
"webpki-roots 1.0.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3716,7 +3837,7 @@ dependencies = [
|
||||
"libc",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
"socket2 0.5.10",
|
||||
"socket2 0.6.1",
|
||||
"system-configuration",
|
||||
"tokio",
|
||||
"tower-service",
|
||||
@@ -4298,6 +4419,9 @@ name = "lazy_static"
|
||||
version = "1.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
|
||||
dependencies = [
|
||||
"spin",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
@@ -4324,6 +4448,12 @@ dependencies = [
|
||||
"windows-link 0.2.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libm"
|
||||
version = "0.2.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b6d2cec3eae94f9f509c767b45932f1ada8350c4bdb85af2fcab4a3c14807981"
|
||||
|
||||
[[package]]
|
||||
name = "libredox"
|
||||
version = "0.1.6"
|
||||
@@ -4332,6 +4462,18 @@ checksum = "4488594b9328dee448adb906d8b126d9b7deb7cf5c22161ee591610bb1be83c0"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libsqlite3-sys"
|
||||
version = "0.30.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"pkg-config",
|
||||
"vcpkg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4516,6 +4658,16 @@ dependencies = [
|
||||
"wiremock",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "md-5"
|
||||
version = "0.10.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"digest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "md5"
|
||||
version = "0.8.0"
|
||||
@@ -4759,11 +4911,20 @@ dependencies = [
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nucleo"
|
||||
version = "0.5.0"
|
||||
source = "git+https://github.com/helix-editor/nucleo.git?rev=4253de9faabb4e5c6d81d946a5e35a90f87347ee#4253de9faabb4e5c6d81d946a5e35a90f87347ee"
|
||||
dependencies = [
|
||||
"nucleo-matcher",
|
||||
"parking_lot",
|
||||
"rayon",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nucleo-matcher"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bf33f538733d1a5a3494b836ba913207f14d9d4a1d3cd67030c5061bdd2cac85"
|
||||
source = "git+https://github.com/helix-editor/nucleo.git?rev=4253de9faabb4e5c6d81d946a5e35a90f87347ee#4253de9faabb4e5c6d81d946a5e35a90f87347ee"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"unicode-segmentation",
|
||||
@@ -4793,6 +4954,22 @@ dependencies = [
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-bigint-dig"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e661dda6640fad38e827a6d4a310ff4763082116fe217f279885c97f511bb0b7"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"libm",
|
||||
"num-integer",
|
||||
"num-iter",
|
||||
"num-traits",
|
||||
"rand 0.8.5",
|
||||
"smallvec",
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-complex"
|
||||
version = "0.4.6"
|
||||
@@ -4846,6 +5023,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"libm",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -5328,6 +5506,27 @@ dependencies = [
|
||||
"futures-io",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pkcs1"
|
||||
version = "0.7.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f"
|
||||
dependencies = [
|
||||
"der",
|
||||
"pkcs8",
|
||||
"spki",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pkcs8"
|
||||
version = "0.10.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7"
|
||||
dependencies = [
|
||||
"der",
|
||||
"spki",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pkg-config"
|
||||
version = "0.3.32"
|
||||
@@ -5632,7 +5831,7 @@ dependencies = [
|
||||
"quinn-udp",
|
||||
"rustc-hash",
|
||||
"rustls",
|
||||
"socket2 0.5.10",
|
||||
"socket2 0.6.1",
|
||||
"thiserror 2.0.17",
|
||||
"tokio",
|
||||
"tracing",
|
||||
@@ -5669,7 +5868,7 @@ dependencies = [
|
||||
"cfg_aliases 0.2.1",
|
||||
"libc",
|
||||
"once_cell",
|
||||
"socket2 0.5.10",
|
||||
"socket2 0.6.1",
|
||||
"tracing",
|
||||
"windows-sys 0.60.2",
|
||||
]
|
||||
@@ -5941,7 +6140,7 @@ checksum = "b28ee9e1e5d39264414b71f5c33e7fbb66b382c3fac456fe0daad39cf5509933"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"const_format",
|
||||
"flume",
|
||||
"flume 0.12.0",
|
||||
"hex",
|
||||
"ipnet",
|
||||
"itertools 0.14.0",
|
||||
@@ -5961,6 +6160,21 @@ dependencies = [
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rama-socks5"
|
||||
version = "0.3.0-alpha.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5468b263516daaf258de32542c1974b7cbe962363ad913dcb669f5d46db0ef3e"
|
||||
dependencies = [
|
||||
"byteorder",
|
||||
"rama-core",
|
||||
"rama-net",
|
||||
"rama-tcp",
|
||||
"rama-udp",
|
||||
"rama-utils",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rama-tcp"
|
||||
version = "0.3.0-alpha.4"
|
||||
@@ -5984,7 +6198,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "def3d5d06d3ca3a2d2e4376cf93de0555cd9c7960f085bf77be9562f5c9ace8f"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"flume",
|
||||
"flume 0.12.0",
|
||||
"itertools 0.14.0",
|
||||
"moka",
|
||||
"parking_lot",
|
||||
@@ -5999,6 +6213,18 @@ dependencies = [
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rama-udp"
|
||||
version = "0.3.0-alpha.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "36ed05e0ecac73e084e92a3a8b1fbf16fdae8958c506f0f0eada180a2d99eef4"
|
||||
dependencies = [
|
||||
"rama-core",
|
||||
"rama-net",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rama-unix"
|
||||
version = "0.3.0-alpha.4"
|
||||
@@ -6126,6 +6352,26 @@ dependencies = [
|
||||
"ratatui",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rayon"
|
||||
version = "1.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f"
|
||||
dependencies = [
|
||||
"either",
|
||||
"rayon-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rayon-core"
|
||||
version = "1.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91"
|
||||
dependencies = [
|
||||
"crossbeam-deque",
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.5.15"
|
||||
@@ -6264,7 +6510,7 @@ dependencies = [
|
||||
"wasm-bindgen-futures",
|
||||
"wasm-streams",
|
||||
"web-sys",
|
||||
"webpki-roots",
|
||||
"webpki-roots 1.0.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6335,6 +6581,31 @@ dependencies = [
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rsa"
|
||||
version = "0.9.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b8573f03f5883dcaebdfcf4725caa1ecb9c15b2ef50c43a07b816e06799bb12d"
|
||||
dependencies = [
|
||||
"const-oid",
|
||||
"digest",
|
||||
"num-bigint-dig",
|
||||
"num-integer",
|
||||
"num-traits",
|
||||
"pkcs1",
|
||||
"pkcs8",
|
||||
"rand_core 0.6.4",
|
||||
"signature",
|
||||
"spki",
|
||||
"subtle",
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "runfiles"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/dzbarsky/rules_rust?rev=b56cbaa8465e74127f1ea216f813cd377295ad81#b56cbaa8465e74127f1ea216f813cd377295ad81"
|
||||
|
||||
[[package]]
|
||||
name = "rustc-demangle"
|
||||
version = "0.1.25"
|
||||
@@ -7084,6 +7355,16 @@ dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "signature"
|
||||
version = "2.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de"
|
||||
dependencies = [
|
||||
"digest",
|
||||
"rand_core 0.6.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "simd-adler32"
|
||||
version = "0.3.7"
|
||||
@@ -7168,6 +7449,218 @@ dependencies = [
|
||||
"lock_api",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "spki"
|
||||
version = "0.7.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d"
|
||||
dependencies = [
|
||||
"base64ct",
|
||||
"der",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlx"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1fefb893899429669dcdd979aff487bd78f4064e5e7907e4269081e0ef7d97dc"
|
||||
dependencies = [
|
||||
"sqlx-core",
|
||||
"sqlx-macros",
|
||||
"sqlx-mysql",
|
||||
"sqlx-postgres",
|
||||
"sqlx-sqlite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlx-core"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ee6798b1838b6a0f69c007c133b8df5866302197e404e8b6ee8ed3e3a5e68dc6"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"bytes",
|
||||
"chrono",
|
||||
"crc",
|
||||
"crossbeam-queue",
|
||||
"either",
|
||||
"event-listener",
|
||||
"futures-core",
|
||||
"futures-intrusive",
|
||||
"futures-io",
|
||||
"futures-util",
|
||||
"hashbrown 0.15.4",
|
||||
"hashlink",
|
||||
"indexmap 2.12.0",
|
||||
"log",
|
||||
"memchr",
|
||||
"once_cell",
|
||||
"percent-encoding",
|
||||
"rustls",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha2",
|
||||
"smallvec",
|
||||
"thiserror 2.0.17",
|
||||
"time",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
"tracing",
|
||||
"url",
|
||||
"uuid",
|
||||
"webpki-roots 0.26.11",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlx-macros"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a2d452988ccaacfbf5e0bdbc348fb91d7c8af5bee192173ac3636b5fb6e6715d"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"sqlx-core",
|
||||
"sqlx-macros-core",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlx-macros-core"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "19a9c1841124ac5a61741f96e1d9e2ec77424bf323962dd894bdb93f37d5219b"
|
||||
dependencies = [
|
||||
"dotenvy",
|
||||
"either",
|
||||
"heck",
|
||||
"hex",
|
||||
"once_cell",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha2",
|
||||
"sqlx-core",
|
||||
"sqlx-mysql",
|
||||
"sqlx-postgres",
|
||||
"sqlx-sqlite",
|
||||
"syn 2.0.104",
|
||||
"tokio",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlx-mysql"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aa003f0038df784eb8fecbbac13affe3da23b45194bd57dba231c8f48199c526"
|
||||
dependencies = [
|
||||
"atoi",
|
||||
"base64",
|
||||
"bitflags 2.10.0",
|
||||
"byteorder",
|
||||
"bytes",
|
||||
"chrono",
|
||||
"crc",
|
||||
"digest",
|
||||
"dotenvy",
|
||||
"either",
|
||||
"futures-channel",
|
||||
"futures-core",
|
||||
"futures-io",
|
||||
"futures-util",
|
||||
"generic-array",
|
||||
"hex",
|
||||
"hkdf",
|
||||
"hmac",
|
||||
"itoa",
|
||||
"log",
|
||||
"md-5",
|
||||
"memchr",
|
||||
"once_cell",
|
||||
"percent-encoding",
|
||||
"rand 0.8.5",
|
||||
"rsa",
|
||||
"serde",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"smallvec",
|
||||
"sqlx-core",
|
||||
"stringprep",
|
||||
"thiserror 2.0.17",
|
||||
"time",
|
||||
"tracing",
|
||||
"uuid",
|
||||
"whoami",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlx-postgres"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "db58fcd5a53cf07c184b154801ff91347e4c30d17a3562a635ff028ad5deda46"
|
||||
dependencies = [
|
||||
"atoi",
|
||||
"base64",
|
||||
"bitflags 2.10.0",
|
||||
"byteorder",
|
||||
"chrono",
|
||||
"crc",
|
||||
"dotenvy",
|
||||
"etcetera",
|
||||
"futures-channel",
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
"hex",
|
||||
"hkdf",
|
||||
"hmac",
|
||||
"home",
|
||||
"itoa",
|
||||
"log",
|
||||
"md-5",
|
||||
"memchr",
|
||||
"once_cell",
|
||||
"rand 0.8.5",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha2",
|
||||
"smallvec",
|
||||
"sqlx-core",
|
||||
"stringprep",
|
||||
"thiserror 2.0.17",
|
||||
"time",
|
||||
"tracing",
|
||||
"uuid",
|
||||
"whoami",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlx-sqlite"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c2d12fe70b2c1b4401038055f90f151b78208de1f9f89a7dbfd41587a10c3eea"
|
||||
dependencies = [
|
||||
"atoi",
|
||||
"chrono",
|
||||
"flume 0.11.1",
|
||||
"futures-channel",
|
||||
"futures-core",
|
||||
"futures-executor",
|
||||
"futures-intrusive",
|
||||
"futures-util",
|
||||
"libsqlite3-sys",
|
||||
"log",
|
||||
"percent-encoding",
|
||||
"serde",
|
||||
"serde_urlencoded",
|
||||
"sqlx-core",
|
||||
"thiserror 2.0.17",
|
||||
"time",
|
||||
"tracing",
|
||||
"url",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sse-stream"
|
||||
version = "0.2.1"
|
||||
@@ -7301,6 +7794,17 @@ dependencies = [
|
||||
"precomputed-hash",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "stringprep"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1"
|
||||
dependencies = [
|
||||
"unicode-bidi",
|
||||
"unicode-normalization",
|
||||
"unicode-properties",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "strsim"
|
||||
version = "0.10.0"
|
||||
@@ -7797,12 +8301,10 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tokio-test"
|
||||
version = "0.4.4"
|
||||
version = "0.4.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2468baabc3311435b55dd935f702f42cd1b8abb7e754fb7dfb16bd36aa88f9f7"
|
||||
checksum = "3f6d24790a10a7af737693a3e8f1d03faef7e6ca0cc99aae5066f533766de545"
|
||||
dependencies = [
|
||||
"async-stream",
|
||||
"bytes",
|
||||
"futures-core",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
@@ -8001,9 +8503,9 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3"
|
||||
|
||||
[[package]]
|
||||
name = "tracing"
|
||||
version = "0.1.43"
|
||||
version = "0.1.44"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2d15d90a0b5c19378952d479dc858407149d7bb45a14de0142f6c534b16fc647"
|
||||
checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100"
|
||||
dependencies = [
|
||||
"log",
|
||||
"pin-project-lite",
|
||||
@@ -8036,9 +8538,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tracing-core"
|
||||
version = "0.1.35"
|
||||
version = "0.1.36"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7a04e24fab5c89c6a36eb8558c9656f30d81de51dfa4d3b45f26b21d61fa0a6c"
|
||||
checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"valuable",
|
||||
@@ -8263,6 +8765,12 @@ version = "2.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-bidi"
|
||||
version = "0.3.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.18"
|
||||
@@ -8275,6 +8783,21 @@ version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-normalization"
|
||||
version = "0.1.25"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5fd4f6878c9cb28d874b009da9e8d183b5abc80117c40bbd187a1fde336be6e8"
|
||||
dependencies = [
|
||||
"tinyvec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-properties"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7df058c713841ad818f1dc5d3fd88063241cc61f49f5fbea4b951e8cf5a8d71d"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-segmentation"
|
||||
version = "1.12.0"
|
||||
@@ -8482,6 +9005,12 @@ dependencies = [
|
||||
"wit-bindgen-rt",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasite"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b"
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen"
|
||||
version = "0.2.100"
|
||||
@@ -8681,6 +9210,15 @@ dependencies = [
|
||||
"rustls-pki-types",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "webpki-roots"
|
||||
version = "0.26.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9"
|
||||
dependencies = [
|
||||
"webpki-roots 1.0.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "webpki-roots"
|
||||
version = "1.0.2"
|
||||
@@ -8707,6 +9245,16 @@ dependencies = [
|
||||
"winsafe",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "whoami"
|
||||
version = "1.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5d4a4db5077702ca3015d3d02d74974948aba2ad9e12ab7df718ee64ccd7e97d"
|
||||
dependencies = [
|
||||
"libredox",
|
||||
"wasite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "widestring"
|
||||
version = "1.2.1"
|
||||
|
||||
@@ -47,6 +47,7 @@ members = [
|
||||
"utils/string",
|
||||
"codex-client",
|
||||
"codex-api",
|
||||
"state",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
@@ -91,6 +92,7 @@ codex-process-hardening = { path = "process-hardening" }
|
||||
codex-protocol = { path = "protocol" }
|
||||
codex-responses-api-proxy = { path = "responses-api-proxy" }
|
||||
codex-rmcp-client = { path = "rmcp-client" }
|
||||
codex-state = { path = "state" }
|
||||
codex-stdio-to-uds = { path = "stdio-to-uds" }
|
||||
codex-tui = { path = "tui" }
|
||||
codex-utils-absolute-path = { path = "utils/absolute-path" }
|
||||
@@ -126,6 +128,7 @@ clap = "4"
|
||||
clap_complete = "4"
|
||||
color-eyre = "0.6.3"
|
||||
crossterm = "0.28.1"
|
||||
crossbeam-channel = "0.5.15"
|
||||
ctor = "0.6.3"
|
||||
derive_more = "2"
|
||||
diffy = "0.4.2"
|
||||
@@ -159,7 +162,7 @@ maplit = "1.0.2"
|
||||
mime_guess = "2.0.5"
|
||||
multimap = "0.10.0"
|
||||
notify = "8.2.0"
|
||||
nucleo-matcher = "0.3.1"
|
||||
nucleo = { git = "https://github.com/helix-editor/nucleo.git", rev = "4253de9faabb4e5c6d81d946a5e35a90f87347ee" }
|
||||
once_cell = "1.20.2"
|
||||
openssl-sys = "*"
|
||||
opentelemetry = "0.31.0"
|
||||
@@ -183,6 +186,7 @@ regex = "1.12.2"
|
||||
regex-lite = "0.1.8"
|
||||
reqwest = "0.12"
|
||||
rmcp = { version = "0.12.0", default-features = false }
|
||||
runfiles = { git = "https://github.com/dzbarsky/rules_rust", rev = "b56cbaa8465e74127f1ea216f813cd377295ad81" }
|
||||
schemars = "0.8.22"
|
||||
seccompiler = "0.5.0"
|
||||
sentry = "0.46.0"
|
||||
@@ -198,6 +202,7 @@ semver = "1.0"
|
||||
shlex = "1.3.0"
|
||||
similar = "2.7.0"
|
||||
socket2 = "0.6.1"
|
||||
sqlx = { version = "0.8.6", default-features = false, features = ["chrono", "json", "macros", "migrate", "runtime-tokio-rustls", "sqlite", "time", "uuid"] }
|
||||
starlark = "0.13.0"
|
||||
strum = "0.27.2"
|
||||
strum_macros = "0.27.2"
|
||||
@@ -216,7 +221,7 @@ tokio-tungstenite = { version = "0.28.0", features = ["proxy", "rustls-tls-nativ
|
||||
tokio-util = "0.7.18"
|
||||
toml = "0.9.5"
|
||||
toml_edit = "0.24.0"
|
||||
tracing = "0.1.43"
|
||||
tracing = "0.1.44"
|
||||
tracing-appender = "0.2.3"
|
||||
tracing-subscriber = "0.3.22"
|
||||
tracing-test = "0.2.5"
|
||||
|
||||
@@ -28,6 +28,11 @@ impl GitSha {
|
||||
pub enum AuthMode {
|
||||
ApiKey,
|
||||
ChatGPT,
|
||||
/// [UNSTABLE] FOR OPENAI INTERNAL USE ONLY - DO NOT USE.
|
||||
#[serde(rename = "chatgptAuthTokens")]
|
||||
#[ts(rename = "chatgptAuthTokens")]
|
||||
#[strum(serialize = "chatgptAuthTokens")]
|
||||
ChatgptAuthTokens,
|
||||
}
|
||||
|
||||
/// Generates an `enum ClientRequest` where each variant is a request that the
|
||||
@@ -534,6 +539,11 @@ server_request_definitions! {
|
||||
response: v2::DynamicToolCallResponse,
|
||||
},
|
||||
|
||||
ChatgptAuthTokensRefresh => "account/chatgptAuthTokens/refresh" {
|
||||
params: v2::ChatgptAuthTokensRefreshParams,
|
||||
response: v2::ChatgptAuthTokensRefreshResponse,
|
||||
},
|
||||
|
||||
/// DEPRECATED APIs below
|
||||
/// Request to approve a patch.
|
||||
/// This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).
|
||||
@@ -598,6 +608,7 @@ server_notification_definitions! {
|
||||
ReasoningSummaryTextDelta => "item/reasoning/summaryTextDelta" (v2::ReasoningSummaryTextDeltaNotification),
|
||||
ReasoningSummaryPartAdded => "item/reasoning/summaryPartAdded" (v2::ReasoningSummaryPartAddedNotification),
|
||||
ReasoningTextDelta => "item/reasoning/textDelta" (v2::ReasoningTextDeltaNotification),
|
||||
/// Deprecated: Use `ContextCompaction` item type instead.
|
||||
ContextCompacted => "thread/compacted" (v2::ContextCompactedNotification),
|
||||
DeprecationNotice => "deprecationNotice" (v2::DeprecationNoticeNotification),
|
||||
ConfigWarning => "configWarning" (v2::ConfigWarningNotification),
|
||||
@@ -752,6 +763,29 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_chatgpt_auth_tokens_refresh_request() -> Result<()> {
|
||||
let request = ServerRequest::ChatgptAuthTokensRefresh {
|
||||
request_id: RequestId::Integer(8),
|
||||
params: v2::ChatgptAuthTokensRefreshParams {
|
||||
reason: v2::ChatgptAuthTokensRefreshReason::Unauthorized,
|
||||
previous_account_id: Some("org-123".to_string()),
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/chatgptAuthTokens/refresh",
|
||||
"id": 8,
|
||||
"params": {
|
||||
"reason": "unauthorized",
|
||||
"previousAccountId": "org-123"
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_get_account_rate_limits() -> Result<()> {
|
||||
let request = ClientRequest::GetAccountRateLimits {
|
||||
@@ -841,10 +875,34 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_account_login_chatgpt_auth_tokens() -> Result<()> {
|
||||
let request = ClientRequest::LoginAccount {
|
||||
request_id: RequestId::Integer(5),
|
||||
params: v2::LoginAccountParams::ChatgptAuthTokens {
|
||||
access_token: "access-token".to_string(),
|
||||
id_token: "id-token".to_string(),
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/login/start",
|
||||
"id": 5,
|
||||
"params": {
|
||||
"type": "chatgptAuthTokens",
|
||||
"accessToken": "access-token",
|
||||
"idToken": "id-token"
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_get_account() -> Result<()> {
|
||||
let request = ClientRequest::GetAccount {
|
||||
request_id: RequestId::Integer(5),
|
||||
request_id: RequestId::Integer(6),
|
||||
params: v2::GetAccountParams {
|
||||
refresh_token: false,
|
||||
},
|
||||
@@ -852,7 +910,7 @@ mod tests {
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/read",
|
||||
"id": 5,
|
||||
"id": 6,
|
||||
"params": {
|
||||
"refreshToken": false
|
||||
}
|
||||
|
||||
@@ -27,10 +27,12 @@ use codex_protocol::protocol::NetworkAccess as CoreNetworkAccess;
|
||||
use codex_protocol::protocol::RateLimitSnapshot as CoreRateLimitSnapshot;
|
||||
use codex_protocol::protocol::RateLimitWindow as CoreRateLimitWindow;
|
||||
use codex_protocol::protocol::SessionSource as CoreSessionSource;
|
||||
use codex_protocol::protocol::SkillDependencies as CoreSkillDependencies;
|
||||
use codex_protocol::protocol::SkillErrorInfo as CoreSkillErrorInfo;
|
||||
use codex_protocol::protocol::SkillInterface as CoreSkillInterface;
|
||||
use codex_protocol::protocol::SkillMetadata as CoreSkillMetadata;
|
||||
use codex_protocol::protocol::SkillScope as CoreSkillScope;
|
||||
use codex_protocol::protocol::SkillToolDependency as CoreSkillToolDependency;
|
||||
use codex_protocol::protocol::SubAgentSource as CoreSubAgentSource;
|
||||
use codex_protocol::protocol::TokenUsage as CoreTokenUsage;
|
||||
use codex_protocol::protocol::TokenUsageInfo as CoreTokenUsageInfo;
|
||||
@@ -84,6 +86,10 @@ macro_rules! v2_enum_from_core {
|
||||
pub enum CodexErrorInfo {
|
||||
ContextWindowExceeded,
|
||||
UsageLimitExceeded,
|
||||
ModelCap {
|
||||
model: String,
|
||||
reset_after_seconds: Option<u64>,
|
||||
},
|
||||
HttpConnectionFailed {
|
||||
#[serde(rename = "httpStatusCode")]
|
||||
#[ts(rename = "httpStatusCode")]
|
||||
@@ -120,6 +126,13 @@ impl From<CoreCodexErrorInfo> for CodexErrorInfo {
|
||||
match value {
|
||||
CoreCodexErrorInfo::ContextWindowExceeded => CodexErrorInfo::ContextWindowExceeded,
|
||||
CoreCodexErrorInfo::UsageLimitExceeded => CodexErrorInfo::UsageLimitExceeded,
|
||||
CoreCodexErrorInfo::ModelCap {
|
||||
model,
|
||||
reset_after_seconds,
|
||||
} => CodexErrorInfo::ModelCap {
|
||||
model,
|
||||
reset_after_seconds,
|
||||
},
|
||||
CoreCodexErrorInfo::HttpConnectionFailed { http_status_code } => {
|
||||
CodexErrorInfo::HttpConnectionFailed { http_status_code }
|
||||
}
|
||||
@@ -822,6 +835,24 @@ pub enum LoginAccountParams {
|
||||
#[serde(rename = "chatgpt")]
|
||||
#[ts(rename = "chatgpt")]
|
||||
Chatgpt,
|
||||
/// [UNSTABLE] FOR OPENAI INTERNAL USE ONLY - DO NOT USE.
|
||||
/// The access token must contain the same scopes that Codex-managed ChatGPT auth tokens have.
|
||||
#[serde(rename = "chatgptAuthTokens")]
|
||||
#[ts(rename = "chatgptAuthTokens")]
|
||||
ChatgptAuthTokens {
|
||||
/// ID token (JWT) supplied by the client.
|
||||
///
|
||||
/// This token is used for identity and account metadata (email, plan type,
|
||||
/// workspace id).
|
||||
#[serde(rename = "idToken")]
|
||||
#[ts(rename = "idToken")]
|
||||
id_token: String,
|
||||
/// Access token (JWT) supplied by the client.
|
||||
/// This token is used for backend API requests.
|
||||
#[serde(rename = "accessToken")]
|
||||
#[ts(rename = "accessToken")]
|
||||
access_token: String,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -841,6 +872,9 @@ pub enum LoginAccountResponse {
|
||||
/// URL the client should open in a browser to initiate the OAuth flow.
|
||||
auth_url: String,
|
||||
},
|
||||
#[serde(rename = "chatgptAuthTokens", rename_all = "camelCase")]
|
||||
#[ts(rename = "chatgptAuthTokens", rename_all = "camelCase")]
|
||||
ChatgptAuthTokens {},
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -871,6 +905,37 @@ pub struct CancelLoginAccountResponse {
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct LogoutAccountResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum ChatgptAuthTokensRefreshReason {
|
||||
/// Codex attempted a backend request and received `401 Unauthorized`.
|
||||
Unauthorized,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ChatgptAuthTokensRefreshParams {
|
||||
pub reason: ChatgptAuthTokensRefreshReason,
|
||||
/// Workspace/account identifier that Codex was previously using.
|
||||
///
|
||||
/// Clients that manage multiple accounts/workspaces can use this as a hint
|
||||
/// to refresh the token for the correct workspace.
|
||||
///
|
||||
/// This may be `null` when the prior ID token did not include a workspace
|
||||
/// identifier (`chatgpt_account_id`) or when the token could not be parsed.
|
||||
pub previous_account_id: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ChatgptAuthTokensRefreshResponse {
|
||||
pub id_token: String,
|
||||
pub access_token: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -882,6 +947,11 @@ pub struct GetAccountRateLimitsResponse {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct GetAccountParams {
|
||||
/// When `true`, requests a proactive token refresh before returning.
|
||||
///
|
||||
/// In managed auth mode this triggers the normal refresh-token flow. In
|
||||
/// external auth mode this flag is ignored. Clients should refresh tokens
|
||||
/// themselves and call `account/login/start` with `chatgptAuthTokens`.
|
||||
#[serde(default)]
|
||||
pub refresh_token: bool,
|
||||
}
|
||||
@@ -1001,6 +1071,8 @@ pub struct AppInfo {
|
||||
pub name: String,
|
||||
pub description: Option<String>,
|
||||
pub logo_url: Option<String>,
|
||||
pub logo_url_dark: Option<String>,
|
||||
pub distribution_channel: Option<String>,
|
||||
pub install_url: Option<String>,
|
||||
#[serde(default)]
|
||||
pub is_accessible: bool,
|
||||
@@ -1395,11 +1467,14 @@ pub struct SkillMetadata {
|
||||
pub description: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
/// Legacy short_description from SKILL.md. Prefer SKILL.toml interface.short_description.
|
||||
/// Legacy short_description from SKILL.md. Prefer SKILL.json interface.short_description.
|
||||
pub short_description: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub interface: Option<SkillInterface>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub dependencies: Option<SkillDependencies>,
|
||||
pub path: PathBuf,
|
||||
pub scope: SkillScope,
|
||||
pub enabled: bool,
|
||||
@@ -1423,6 +1498,35 @@ pub struct SkillInterface {
|
||||
pub default_prompt: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct SkillDependencies {
|
||||
pub tools: Vec<SkillToolDependency>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct SkillToolDependency {
|
||||
#[serde(rename = "type")]
|
||||
#[ts(rename = "type")]
|
||||
pub r#type: String,
|
||||
pub value: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub description: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub transport: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub command: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -1462,6 +1566,7 @@ impl From<CoreSkillMetadata> for SkillMetadata {
|
||||
description: value.description,
|
||||
short_description: value.short_description,
|
||||
interface: value.interface.map(SkillInterface::from),
|
||||
dependencies: value.dependencies.map(SkillDependencies::from),
|
||||
path: value.path,
|
||||
scope: value.scope.into(),
|
||||
enabled: true,
|
||||
@@ -1482,6 +1587,31 @@ impl From<CoreSkillInterface> for SkillInterface {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CoreSkillDependencies> for SkillDependencies {
|
||||
fn from(value: CoreSkillDependencies) -> Self {
|
||||
Self {
|
||||
tools: value
|
||||
.tools
|
||||
.into_iter()
|
||||
.map(SkillToolDependency::from)
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CoreSkillToolDependency> for SkillToolDependency {
|
||||
fn from(value: CoreSkillToolDependency) -> Self {
|
||||
Self {
|
||||
r#type: value.r#type,
|
||||
value: value.value,
|
||||
description: value.description,
|
||||
transport: value.transport,
|
||||
command: value.command,
|
||||
url: value.url,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CoreSkillScope> for SkillScope {
|
||||
fn from(value: CoreSkillScope) -> Self {
|
||||
match value {
|
||||
@@ -1837,6 +1967,10 @@ pub enum UserInput {
|
||||
name: String,
|
||||
path: PathBuf,
|
||||
},
|
||||
Mention {
|
||||
name: String,
|
||||
path: String,
|
||||
},
|
||||
}
|
||||
|
||||
impl UserInput {
|
||||
@@ -1852,6 +1986,7 @@ impl UserInput {
|
||||
UserInput::Image { url } => CoreUserInput::Image { image_url: url },
|
||||
UserInput::LocalImage { path } => CoreUserInput::LocalImage { path },
|
||||
UserInput::Skill { name, path } => CoreUserInput::Skill { name, path },
|
||||
UserInput::Mention { name, path } => CoreUserInput::Mention { name, path },
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1869,6 +2004,7 @@ impl From<CoreUserInput> for UserInput {
|
||||
CoreUserInput::Image { image_url } => UserInput::Image { url: image_url },
|
||||
CoreUserInput::LocalImage { path } => UserInput::LocalImage { path },
|
||||
CoreUserInput::Skill { name, path } => UserInput::Skill { name, path },
|
||||
CoreUserInput::Mention { name, path } => UserInput::Mention { name, path },
|
||||
_ => unreachable!("unsupported user input variant"),
|
||||
}
|
||||
}
|
||||
@@ -1969,6 +2105,9 @@ pub enum ThreadItem {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
ExitedReviewMode { id: String, review: String },
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
ContextCompaction { id: String },
|
||||
}
|
||||
|
||||
impl From<CoreTurnItem> for ThreadItem {
|
||||
@@ -1997,6 +2136,9 @@ impl From<CoreTurnItem> for ThreadItem {
|
||||
id: search.id,
|
||||
query: search.query,
|
||||
},
|
||||
CoreTurnItem::ContextCompaction(compaction) => {
|
||||
ThreadItem::ContextCompaction { id: compaction.id }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2359,6 +2501,7 @@ pub struct WindowsWorldWritableWarningNotification {
|
||||
pub failed_scan: bool,
|
||||
}
|
||||
|
||||
/// Deprecated: Use `ContextCompaction` item type instead.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -2457,6 +2600,8 @@ pub struct ToolRequestUserInputQuestion {
|
||||
pub question: String,
|
||||
#[serde(default)]
|
||||
pub is_other: bool,
|
||||
#[serde(default)]
|
||||
pub is_secret: bool,
|
||||
pub options: Option<Vec<ToolRequestUserInputOption>>,
|
||||
}
|
||||
|
||||
@@ -2621,6 +2766,7 @@ mod tests {
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::items::UserMessageItem;
|
||||
use codex_protocol::items::WebSearchItem;
|
||||
use codex_protocol::models::WebSearchAction;
|
||||
use codex_protocol::protocol::NetworkAccess as CoreNetworkAccess;
|
||||
use codex_protocol::user_input::UserInput as CoreUserInput;
|
||||
use pretty_assertions::assert_eq;
|
||||
@@ -2664,6 +2810,10 @@ mod tests {
|
||||
name: "skill-creator".to_string(),
|
||||
path: PathBuf::from("/repo/.codex/skills/skill-creator/SKILL.md"),
|
||||
},
|
||||
CoreUserInput::Mention {
|
||||
name: "Demo App".to_string(),
|
||||
path: "app://demo-app".to_string(),
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
@@ -2686,6 +2836,10 @@ mod tests {
|
||||
name: "skill-creator".to_string(),
|
||||
path: PathBuf::from("/repo/.codex/skills/skill-creator/SKILL.md"),
|
||||
},
|
||||
UserInput::Mention {
|
||||
name: "Demo App".to_string(),
|
||||
path: "app://demo-app".to_string(),
|
||||
},
|
||||
],
|
||||
}
|
||||
);
|
||||
@@ -2728,6 +2882,9 @@ mod tests {
|
||||
let search_item = TurnItem::WebSearch(WebSearchItem {
|
||||
id: "search-1".to_string(),
|
||||
query: "docs".to_string(),
|
||||
action: WebSearchAction::Search {
|
||||
query: Some("docs".to_string()),
|
||||
},
|
||||
});
|
||||
|
||||
assert_eq!(
|
||||
|
||||
@@ -17,6 +17,7 @@ workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
async-trait = { workspace = true }
|
||||
codex-arg0 = { workspace = true }
|
||||
codex-common = { workspace = true, features = ["cli"] }
|
||||
codex-core = { workspace = true }
|
||||
@@ -56,6 +57,7 @@ axum = { workspace = true, default-features = false, features = [
|
||||
"tokio",
|
||||
] }
|
||||
base64 = { workspace = true }
|
||||
codex-execpolicy = { workspace = true }
|
||||
core_test_support = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
os_info = { workspace = true }
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
- [Events](#events)
|
||||
- [Approvals](#approvals)
|
||||
- [Skills](#skills)
|
||||
- [Apps](#apps)
|
||||
- [Auth endpoints](#auth-endpoints)
|
||||
|
||||
## Protocol
|
||||
@@ -296,6 +297,26 @@ Invoke a skill explicitly by including `$<skill-name>` in the text input and add
|
||||
} } }
|
||||
```
|
||||
|
||||
### Example: Start a turn (invoke an app)
|
||||
|
||||
Invoke an app by including `$<app-slug>` in the text input and adding a `mention` input item with the app id in `app://<connector-id>` form.
|
||||
|
||||
```json
|
||||
{ "method": "turn/start", "id": 34, "params": {
|
||||
"threadId": "thr_123",
|
||||
"input": [
|
||||
{ "type": "text", "text": "$demo-app Summarize the latest updates." },
|
||||
{ "type": "mention", "name": "Demo App", "path": "app://demo-app" }
|
||||
]
|
||||
} }
|
||||
{ "id": 34, "result": { "turn": {
|
||||
"id": "turn_458",
|
||||
"status": "inProgress",
|
||||
"items": [],
|
||||
"error": null
|
||||
} } }
|
||||
```
|
||||
|
||||
### Example: Interrupt an active turn
|
||||
|
||||
You can cancel a running Turn with `turn/interrupt`.
|
||||
@@ -431,7 +452,8 @@ Today both notifications carry an empty `items` array even when item events were
|
||||
- `imageView` — `{id, path}` emitted when the agent invokes the image viewer tool.
|
||||
- `enteredReviewMode` — `{id, review}` sent when the reviewer starts; `review` is a short user-facing label such as `"current changes"` or the requested target description.
|
||||
- `exitedReviewMode` — `{id, review}` emitted when the reviewer finishes; `review` is the full plain-text review (usually, overall notes plus bullet point findings).
|
||||
- `compacted` - `{threadId, turnId}` when codex compacts the conversation history. This can happen automatically.
|
||||
- `contextCompaction` — `{id}` emitted when codex compacts the conversation history. This can happen automatically.
|
||||
- `compacted` - `{threadId, turnId}` when codex compacts the conversation history. This can happen automatically. **Deprecated:** Use `contextCompaction` instead.
|
||||
|
||||
All items emit two shared lifecycle events:
|
||||
|
||||
@@ -582,14 +604,72 @@ To enable or disable a skill by path:
|
||||
}
|
||||
```
|
||||
|
||||
## Apps
|
||||
|
||||
Use `app/list` to fetch available apps (connectors). Each entry includes metadata like the app `id`, display `name`, `installUrl`, and whether it is currently accessible.
|
||||
|
||||
```json
|
||||
{ "method": "app/list", "id": 50, "params": {
|
||||
"cursor": null,
|
||||
"limit": 50
|
||||
} }
|
||||
{ "id": 50, "result": {
|
||||
"data": [
|
||||
{
|
||||
"id": "demo-app",
|
||||
"name": "Demo App",
|
||||
"description": "Example connector for documentation.",
|
||||
"logoUrl": "https://example.com/demo-app.png",
|
||||
"logoUrlDark": null,
|
||||
"distributionChannel": null,
|
||||
"installUrl": "https://chatgpt.com/apps/demo-app/demo-app",
|
||||
"isAccessible": true
|
||||
}
|
||||
],
|
||||
"nextCursor": null
|
||||
} }
|
||||
```
|
||||
|
||||
Invoke an app by inserting `$<app-slug>` in the text input. The slug is derived from the app name and lowercased with non-alphanumeric characters replaced by `-` (for example, "Demo App" becomes `$demo-app`). Add a `mention` input item (recommended) so the server uses the exact `app://<connector-id>` path rather than guessing by name.
|
||||
|
||||
Example:
|
||||
|
||||
```
|
||||
$demo-app Pull the latest updates from the team.
|
||||
```
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "turn/start",
|
||||
"id": 51,
|
||||
"params": {
|
||||
"threadId": "thread-1",
|
||||
"input": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "$demo-app Pull the latest updates from the team."
|
||||
},
|
||||
{ "type": "mention", "name": "Demo App", "path": "app://demo-app" }
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Auth endpoints
|
||||
|
||||
The JSON-RPC auth/account surface exposes request/response methods plus server-initiated notifications (no `id`). Use these to determine auth state, start or cancel logins, logout, and inspect ChatGPT rate limits.
|
||||
|
||||
### Authentication modes
|
||||
|
||||
Codex supports these authentication modes. The current mode is surfaced in `account/updated` (`authMode`) and can be inferred from `account/read`.
|
||||
|
||||
- **API key (`apiKey`)**: Caller supplies an OpenAI API key via `account/login/start` with `type: "apiKey"`. The API key is saved and used for API requests.
|
||||
- **ChatGPT managed (`chatgpt`)** (recommended): Codex owns the ChatGPT OAuth flow and refresh tokens. Start via `account/login/start` with `type: "chatgpt"`; Codex persists tokens to disk and refreshes them automatically.
|
||||
|
||||
### API Overview
|
||||
|
||||
- `account/read` — fetch current account info; optionally refresh tokens.
|
||||
- `account/login/start` — begin login (`apiKey` or `chatgpt`).
|
||||
- `account/login/start` — begin login (`apiKey`, `chatgpt`).
|
||||
- `account/login/completed` (notify) — emitted when a login attempt finishes (success or error).
|
||||
- `account/login/cancel` — cancel a pending ChatGPT login by `loginId`.
|
||||
- `account/logout` — sign out; triggers `account/updated`.
|
||||
|
||||
@@ -279,6 +279,7 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
header: question.header,
|
||||
question: question.question,
|
||||
is_other: question.is_other,
|
||||
is_secret: question.is_secret,
|
||||
options: question.options.map(|options| {
|
||||
options
|
||||
.into_iter()
|
||||
|
||||
@@ -13,7 +13,6 @@ use codex_app_server_protocol::AccountLoginCompletedNotification;
|
||||
use codex_app_server_protocol::AccountUpdatedNotification;
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
use codex_app_server_protocol::AddConversationSubscriptionResponse;
|
||||
use codex_app_server_protocol::AppInfo as ApiAppInfo;
|
||||
use codex_app_server_protocol::AppsListParams;
|
||||
use codex_app_server_protocol::AppsListResponse;
|
||||
use codex_app_server_protocol::ArchiveConversationParams;
|
||||
@@ -58,6 +57,7 @@ use codex_app_server_protocol::ListConversationsResponse;
|
||||
use codex_app_server_protocol::ListMcpServerStatusParams;
|
||||
use codex_app_server_protocol::ListMcpServerStatusResponse;
|
||||
use codex_app_server_protocol::LoginAccountParams;
|
||||
use codex_app_server_protocol::LoginAccountResponse;
|
||||
use codex_app_server_protocol::LoginApiKeyParams;
|
||||
use codex_app_server_protocol::LoginApiKeyResponse;
|
||||
use codex_app_server_protocol::LoginChatGptCompleteNotification;
|
||||
@@ -142,6 +142,7 @@ use codex_core::ThreadManager;
|
||||
use codex_core::ThreadSortKey as CoreThreadSortKey;
|
||||
use codex_core::auth::CLIENT_ID;
|
||||
use codex_core::auth::login_with_api_key;
|
||||
use codex_core::auth::login_with_chatgpt_auth_tokens;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::config::ConfigService;
|
||||
@@ -169,6 +170,9 @@ use codex_core::read_head_for_summary;
|
||||
use codex_core::read_session_meta_line;
|
||||
use codex_core::rollout_date_parts;
|
||||
use codex_core::sandboxing::SandboxPermissions;
|
||||
use codex_core::state_db::get_state_db;
|
||||
use codex_core::token_data::parse_id_token;
|
||||
use codex_core::windows_sandbox::WindowsSandboxLevelExt;
|
||||
use codex_feedback::CodexFeedback;
|
||||
use codex_login::ServerOptions as LoginServerOptions;
|
||||
use codex_login::ShutdownHandle;
|
||||
@@ -176,6 +180,7 @@ use codex_login::run_login_server;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::config_types::ForcedLoginMethod;
|
||||
use codex_protocol::config_types::Personality;
|
||||
use codex_protocol::config_types::WindowsSandboxLevel;
|
||||
use codex_protocol::dynamic_tools::DynamicToolSpec as CoreDynamicToolSpec;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
@@ -605,6 +610,22 @@ impl CodexMessageProcessor {
|
||||
LoginAccountParams::Chatgpt => {
|
||||
self.login_chatgpt_v2(request_id).await;
|
||||
}
|
||||
LoginAccountParams::ChatgptAuthTokens {
|
||||
id_token,
|
||||
access_token,
|
||||
} => {
|
||||
self.login_chatgpt_auth_tokens(request_id, id_token, access_token)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn external_auth_active_error(&self) -> JSONRPCErrorError {
|
||||
JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "External auth is active. Use account/login/start (chatgptAuthTokens) to update it or account/logout to clear it."
|
||||
.to_string(),
|
||||
data: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -612,6 +633,10 @@ impl CodexMessageProcessor {
|
||||
&mut self,
|
||||
params: &LoginApiKeyParams,
|
||||
) -> std::result::Result<(), JSONRPCErrorError> {
|
||||
if self.auth_manager.is_external_auth_active() {
|
||||
return Err(self.external_auth_active_error());
|
||||
}
|
||||
|
||||
if matches!(
|
||||
self.config.forced_login_method,
|
||||
Some(ForcedLoginMethod::Chatgpt)
|
||||
@@ -704,6 +729,10 @@ impl CodexMessageProcessor {
|
||||
) -> std::result::Result<LoginServerOptions, JSONRPCErrorError> {
|
||||
let config = self.config.as_ref();
|
||||
|
||||
if self.auth_manager.is_external_auth_active() {
|
||||
return Err(self.external_auth_active_error());
|
||||
}
|
||||
|
||||
if matches!(config.forced_login_method, Some(ForcedLoginMethod::Api)) {
|
||||
return Err(JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
@@ -962,6 +991,98 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
}
|
||||
|
||||
async fn login_chatgpt_auth_tokens(
|
||||
&mut self,
|
||||
request_id: RequestId,
|
||||
id_token: String,
|
||||
access_token: String,
|
||||
) {
|
||||
if matches!(
|
||||
self.config.forced_login_method,
|
||||
Some(ForcedLoginMethod::Api)
|
||||
) {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "External ChatGPT auth is disabled. Use API key login instead."
|
||||
.to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
|
||||
// Cancel any active login attempt to avoid persisting managed auth state.
|
||||
{
|
||||
let mut guard = self.active_login.lock().await;
|
||||
if let Some(active) = guard.take() {
|
||||
drop(active);
|
||||
}
|
||||
}
|
||||
|
||||
let id_token_info = match parse_id_token(&id_token) {
|
||||
Ok(info) => info,
|
||||
Err(err) => {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!("invalid id token: {err}"),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(expected_workspace) = self.config.forced_chatgpt_workspace_id.as_deref()
|
||||
&& id_token_info.chatgpt_account_id.as_deref() != Some(expected_workspace)
|
||||
{
|
||||
let account_id = id_token_info.chatgpt_account_id;
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!(
|
||||
"External auth must use workspace {expected_workspace}, but received {account_id:?}."
|
||||
),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
|
||||
if let Err(err) =
|
||||
login_with_chatgpt_auth_tokens(&self.config.codex_home, &id_token, &access_token)
|
||||
{
|
||||
let error = JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: format!("failed to set external auth: {err}"),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
self.auth_manager.reload();
|
||||
|
||||
self.outgoing
|
||||
.send_response(request_id, LoginAccountResponse::ChatgptAuthTokens {})
|
||||
.await;
|
||||
|
||||
let payload_login_completed = AccountLoginCompletedNotification {
|
||||
login_id: None,
|
||||
success: true,
|
||||
error: None,
|
||||
};
|
||||
self.outgoing
|
||||
.send_server_notification(ServerNotification::AccountLoginCompleted(
|
||||
payload_login_completed,
|
||||
))
|
||||
.await;
|
||||
|
||||
let payload_v2 = AccountUpdatedNotification {
|
||||
auth_mode: self.auth_manager.get_auth_mode(),
|
||||
};
|
||||
self.outgoing
|
||||
.send_server_notification(ServerNotification::AccountUpdated(payload_v2))
|
||||
.await;
|
||||
}
|
||||
|
||||
async fn logout_common(&mut self) -> std::result::Result<Option<AuthMode>, JSONRPCErrorError> {
|
||||
// Cancel any active login attempt.
|
||||
{
|
||||
@@ -1024,6 +1145,9 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
|
||||
async fn refresh_token_if_requested(&self, do_refresh: bool) {
|
||||
if self.auth_manager.is_external_auth_active() {
|
||||
return;
|
||||
}
|
||||
if do_refresh && let Err(err) = self.auth_manager.refresh_token().await {
|
||||
tracing::warn!("failed to refresh token while getting account: {err}");
|
||||
}
|
||||
@@ -1098,7 +1222,7 @@ impl CodexMessageProcessor {
|
||||
let account = match self.auth_manager.auth_cached() {
|
||||
Some(auth) => Some(match auth.mode {
|
||||
AuthMode::ApiKey => Account::ApiKey {},
|
||||
AuthMode::ChatGPT => {
|
||||
AuthMode::ChatGPT | AuthMode::ChatgptAuthTokens => {
|
||||
let email = auth.get_account_email();
|
||||
let plan_type = auth.account_plan_type();
|
||||
|
||||
@@ -1157,7 +1281,7 @@ impl CodexMessageProcessor {
|
||||
});
|
||||
};
|
||||
|
||||
if auth.mode != AuthMode::ChatGPT {
|
||||
if !matches!(auth.mode, AuthMode::ChatGPT | AuthMode::ChatgptAuthTokens) {
|
||||
return Err(JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "chatgpt authentication required to read rate limits".to_string(),
|
||||
@@ -1259,12 +1383,14 @@ impl CodexMessageProcessor {
|
||||
let timeout_ms = params
|
||||
.timeout_ms
|
||||
.and_then(|timeout_ms| u64::try_from(timeout_ms).ok());
|
||||
let windows_sandbox_level = WindowsSandboxLevel::from_config(&self.config);
|
||||
let exec_params = ExecParams {
|
||||
command: params.command,
|
||||
cwd,
|
||||
expiration: timeout_ms.into(),
|
||||
env,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
windows_sandbox_level,
|
||||
justification: None,
|
||||
arg0: None,
|
||||
};
|
||||
@@ -1605,6 +1731,7 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
|
||||
async fn thread_archive(&mut self, request_id: RequestId, params: ThreadArchiveParams) {
|
||||
// TODO(jif) mostly rewrite this using sqlite after phase 1
|
||||
let thread_id = match ThreadId::from_string(¶ms.thread_id) {
|
||||
Ok(id) => id,
|
||||
Err(err) => {
|
||||
@@ -1654,6 +1781,7 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
|
||||
async fn thread_unarchive(&mut self, request_id: RequestId, params: ThreadUnarchiveParams) {
|
||||
// TODO(jif) mostly rewrite this using sqlite after phase 1
|
||||
let thread_id = match ThreadId::from_string(¶ms.thread_id) {
|
||||
Ok(id) => id,
|
||||
Err(err) => {
|
||||
@@ -1696,6 +1824,7 @@ impl CodexMessageProcessor {
|
||||
|
||||
let rollout_path_display = archived_path.display().to_string();
|
||||
let fallback_provider = self.config.model_provider_id.clone();
|
||||
let state_db_ctx = get_state_db(&self.config, None).await;
|
||||
let archived_folder = self
|
||||
.config
|
||||
.codex_home
|
||||
@@ -1774,6 +1903,11 @@ impl CodexMessageProcessor {
|
||||
message: format!("failed to unarchive thread: {err}"),
|
||||
data: None,
|
||||
})?;
|
||||
if let Some(ctx) = state_db_ctx {
|
||||
let _ = ctx
|
||||
.mark_unarchived(thread_id, restored_path.as_path())
|
||||
.await;
|
||||
}
|
||||
let summary =
|
||||
read_summary_from_rollout(restored_path.as_path(), fallback_provider.as_str())
|
||||
.await
|
||||
@@ -2503,7 +2637,6 @@ impl CodexMessageProcessor {
|
||||
};
|
||||
|
||||
let fallback_provider = self.config.model_provider_id.as_str();
|
||||
|
||||
match read_summary_from_rollout(&path, fallback_provider).await {
|
||||
Ok(summary) => {
|
||||
let response = GetConversationSummaryResponse { summary };
|
||||
@@ -3526,8 +3659,13 @@ impl CodexMessageProcessor {
|
||||
});
|
||||
}
|
||||
|
||||
let mut state_db_ctx = None;
|
||||
|
||||
// If the thread is active, request shutdown and wait briefly.
|
||||
if let Some(conversation) = self.thread_manager.remove_thread(&thread_id).await {
|
||||
if let Some(ctx) = conversation.state_db() {
|
||||
state_db_ctx = Some(ctx);
|
||||
}
|
||||
info!("thread {thread_id} was active; shutting down");
|
||||
// Request shutdown.
|
||||
match conversation.submit(Op::Shutdown).await {
|
||||
@@ -3554,14 +3692,24 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
}
|
||||
|
||||
if state_db_ctx.is_none() {
|
||||
state_db_ctx = get_state_db(&self.config, None).await;
|
||||
}
|
||||
|
||||
// Move the rollout file to archived.
|
||||
let result: std::io::Result<()> = async {
|
||||
let result: std::io::Result<()> = async move {
|
||||
let archive_folder = self
|
||||
.config
|
||||
.codex_home
|
||||
.join(codex_core::ARCHIVED_SESSIONS_SUBDIR);
|
||||
tokio::fs::create_dir_all(&archive_folder).await?;
|
||||
tokio::fs::rename(&canonical_rollout_path, &archive_folder.join(&file_name)).await?;
|
||||
let archived_path = archive_folder.join(&file_name);
|
||||
tokio::fs::rename(&canonical_rollout_path, &archived_path).await?;
|
||||
if let Some(ctx) = state_db_ctx {
|
||||
let _ = ctx
|
||||
.mark_archived(thread_id, archived_path.as_path(), Utc::now())
|
||||
.await;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
.await;
|
||||
@@ -3685,7 +3833,7 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
};
|
||||
|
||||
if !config.features.enabled(Feature::Connectors) {
|
||||
if !config.features.enabled(Feature::Apps) {
|
||||
self.outgoing
|
||||
.send_response(
|
||||
request_id,
|
||||
@@ -3748,18 +3896,7 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
|
||||
let end = start.saturating_add(effective_limit).min(total);
|
||||
let data = connectors[start..end]
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|connector| ApiAppInfo {
|
||||
id: connector.connector_id,
|
||||
name: connector.connector_name,
|
||||
description: connector.connector_description,
|
||||
logo_url: connector.logo_url,
|
||||
install_url: connector.install_url,
|
||||
is_accessible: connector.is_accessible,
|
||||
})
|
||||
.collect();
|
||||
let data = connectors[start..end].to_vec();
|
||||
|
||||
let next_cursor = if end < total {
|
||||
Some(end.to_string())
|
||||
@@ -3887,6 +4024,7 @@ impl CodexMessageProcessor {
|
||||
cwd: params.cwd,
|
||||
approval_policy: params.approval_policy.map(AskForApproval::to_core),
|
||||
sandbox_policy: params.sandbox_policy.map(|p| p.to_core()),
|
||||
windows_sandbox_level: None,
|
||||
model: params.model,
|
||||
effort: params.effort.map(Some),
|
||||
summary: params.summary,
|
||||
@@ -4517,6 +4655,22 @@ fn skills_to_info(
|
||||
default_prompt: interface.default_prompt,
|
||||
}
|
||||
}),
|
||||
dependencies: skill.dependencies.clone().map(|dependencies| {
|
||||
codex_app_server_protocol::SkillDependencies {
|
||||
tools: dependencies
|
||||
.tools
|
||||
.into_iter()
|
||||
.map(|tool| codex_app_server_protocol::SkillToolDependency {
|
||||
r#type: tool.r#type,
|
||||
value: tool.value,
|
||||
description: tool.description,
|
||||
transport: tool.transport,
|
||||
command: tool.command,
|
||||
url: tool.url,
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}),
|
||||
path: skill.path.clone(),
|
||||
scope: skill.scope.into(),
|
||||
enabled,
|
||||
|
||||
@@ -312,7 +312,7 @@ pub async fn run_main(
|
||||
JSONRPCMessage::Request(r) => processor.process_request(r).await,
|
||||
JSONRPCMessage::Response(r) => processor.process_response(r).await,
|
||||
JSONRPCMessage::Notification(n) => processor.process_notification(n).await,
|
||||
JSONRPCMessage::Error(e) => processor.process_error(e),
|
||||
JSONRPCMessage::Error(e) => processor.process_error(e).await,
|
||||
}
|
||||
}
|
||||
created = thread_created_rx.recv(), if listen_for_threads => {
|
||||
|
||||
@@ -5,6 +5,10 @@ use crate::codex_message_processor::CodexMessageProcessor;
|
||||
use crate::config_api::ConfigApi;
|
||||
use crate::error_code::INVALID_REQUEST_ERROR_CODE;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use async_trait::async_trait;
|
||||
use codex_app_server_protocol::ChatgptAuthTokensRefreshParams;
|
||||
use codex_app_server_protocol::ChatgptAuthTokensRefreshReason;
|
||||
use codex_app_server_protocol::ChatgptAuthTokensRefreshResponse;
|
||||
use codex_app_server_protocol::ClientInfo;
|
||||
use codex_app_server_protocol::ClientRequest;
|
||||
use codex_app_server_protocol::ConfigBatchWriteParams;
|
||||
@@ -19,8 +23,13 @@ use codex_app_server_protocol::JSONRPCRequest;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_app_server_protocol::ServerRequestPayload;
|
||||
use codex_core::AuthManager;
|
||||
use codex_core::ThreadManager;
|
||||
use codex_core::auth::ExternalAuthRefreshContext;
|
||||
use codex_core::auth::ExternalAuthRefreshReason;
|
||||
use codex_core::auth::ExternalAuthRefresher;
|
||||
use codex_core::auth::ExternalAuthTokens;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config_loader::LoaderOverrides;
|
||||
use codex_core::default_client::SetOriginatorError;
|
||||
@@ -31,8 +40,64 @@ use codex_feedback::CodexFeedback;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use tokio::sync::broadcast;
|
||||
use tokio::time::Duration;
|
||||
use tokio::time::timeout;
|
||||
use toml::Value as TomlValue;
|
||||
|
||||
const EXTERNAL_AUTH_REFRESH_TIMEOUT: Duration = Duration::from_secs(10);
|
||||
|
||||
#[derive(Clone)]
|
||||
struct ExternalAuthRefreshBridge {
|
||||
outgoing: Arc<OutgoingMessageSender>,
|
||||
}
|
||||
|
||||
impl ExternalAuthRefreshBridge {
|
||||
fn map_reason(reason: ExternalAuthRefreshReason) -> ChatgptAuthTokensRefreshReason {
|
||||
match reason {
|
||||
ExternalAuthRefreshReason::Unauthorized => ChatgptAuthTokensRefreshReason::Unauthorized,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ExternalAuthRefresher for ExternalAuthRefreshBridge {
|
||||
async fn refresh(
|
||||
&self,
|
||||
context: ExternalAuthRefreshContext,
|
||||
) -> std::io::Result<ExternalAuthTokens> {
|
||||
let params = ChatgptAuthTokensRefreshParams {
|
||||
reason: Self::map_reason(context.reason),
|
||||
previous_account_id: context.previous_account_id,
|
||||
};
|
||||
|
||||
let (request_id, rx) = self
|
||||
.outgoing
|
||||
.send_request_with_id(ServerRequestPayload::ChatgptAuthTokensRefresh(params))
|
||||
.await;
|
||||
|
||||
let result = match timeout(EXTERNAL_AUTH_REFRESH_TIMEOUT, rx).await {
|
||||
Ok(result) => result.map_err(|err| {
|
||||
std::io::Error::other(format!("auth refresh request canceled: {err}"))
|
||||
})?,
|
||||
Err(_) => {
|
||||
let _canceled = self.outgoing.cancel_request(&request_id).await;
|
||||
return Err(std::io::Error::other(format!(
|
||||
"auth refresh request timed out after {}s",
|
||||
EXTERNAL_AUTH_REFRESH_TIMEOUT.as_secs()
|
||||
)));
|
||||
}
|
||||
};
|
||||
|
||||
let response: ChatgptAuthTokensRefreshResponse =
|
||||
serde_json::from_value(result).map_err(std::io::Error::other)?;
|
||||
|
||||
Ok(ExternalAuthTokens {
|
||||
access_token: response.access_token,
|
||||
id_token: response.id_token,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct MessageProcessor {
|
||||
outgoing: Arc<OutgoingMessageSender>,
|
||||
codex_message_processor: CodexMessageProcessor,
|
||||
@@ -59,6 +124,10 @@ impl MessageProcessor {
|
||||
false,
|
||||
config.cli_auth_credentials_store_mode,
|
||||
);
|
||||
auth_manager.set_forced_chatgpt_workspace_id(config.forced_chatgpt_workspace_id.clone());
|
||||
auth_manager.set_external_auth_refresher(Arc::new(ExternalAuthRefreshBridge {
|
||||
outgoing: outgoing.clone(),
|
||||
}));
|
||||
let thread_manager = Arc::new(ThreadManager::new(
|
||||
config.codex_home.clone(),
|
||||
auth_manager.clone(),
|
||||
@@ -236,8 +305,9 @@ impl MessageProcessor {
|
||||
}
|
||||
|
||||
/// Handle an error object received from the peer.
|
||||
pub(crate) fn process_error(&mut self, err: JSONRPCError) {
|
||||
pub(crate) async fn process_error(&mut self, err: JSONRPCError) {
|
||||
tracing::error!("<- error: {:?}", err);
|
||||
self.outgoing.notify_client_error(err.id, err.error).await;
|
||||
}
|
||||
|
||||
async fn handle_config_read(&self, request_id: RequestId, params: ConfigReadParams) {
|
||||
|
||||
@@ -39,6 +39,14 @@ impl OutgoingMessageSender {
|
||||
&self,
|
||||
request: ServerRequestPayload,
|
||||
) -> oneshot::Receiver<Result> {
|
||||
let (_id, rx) = self.send_request_with_id(request).await;
|
||||
rx
|
||||
}
|
||||
|
||||
pub(crate) async fn send_request_with_id(
|
||||
&self,
|
||||
request: ServerRequestPayload,
|
||||
) -> (RequestId, oneshot::Receiver<Result>) {
|
||||
let id = RequestId::Integer(self.next_request_id.fetch_add(1, Ordering::Relaxed));
|
||||
let outgoing_message_id = id.clone();
|
||||
let (tx_approve, rx_approve) = oneshot::channel();
|
||||
@@ -54,7 +62,7 @@ impl OutgoingMessageSender {
|
||||
let mut request_id_to_callback = self.request_id_to_callback.lock().await;
|
||||
request_id_to_callback.remove(&outgoing_message_id);
|
||||
}
|
||||
rx_approve
|
||||
(outgoing_message_id, rx_approve)
|
||||
}
|
||||
|
||||
pub(crate) async fn notify_client_response(&self, id: RequestId, result: Result) {
|
||||
@@ -75,6 +83,30 @@ impl OutgoingMessageSender {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn notify_client_error(&self, id: RequestId, error: JSONRPCErrorError) {
|
||||
let entry = {
|
||||
let mut request_id_to_callback = self.request_id_to_callback.lock().await;
|
||||
request_id_to_callback.remove_entry(&id)
|
||||
};
|
||||
|
||||
match entry {
|
||||
Some((id, _sender)) => {
|
||||
warn!("client responded with error for {id:?}: {error:?}");
|
||||
}
|
||||
None => {
|
||||
warn!("could not find callback for {id:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn cancel_request(&self, id: &RequestId) -> bool {
|
||||
let entry = {
|
||||
let mut request_id_to_callback = self.request_id_to_callback.lock().await;
|
||||
request_id_to_callback.remove_entry(id)
|
||||
};
|
||||
entry.is_some()
|
||||
}
|
||||
|
||||
pub(crate) async fn send_response<T: Serialize>(&self, id: RequestId, response: T) {
|
||||
match serde_json::to_value(response) {
|
||||
Ok(result) => {
|
||||
|
||||
@@ -6,6 +6,7 @@ use base64::Engine;
|
||||
use base64::engine::general_purpose::URL_SAFE_NO_PAD;
|
||||
use chrono::DateTime;
|
||||
use chrono::Utc;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use codex_core::auth::AuthDotJson;
|
||||
use codex_core::auth::save_auth;
|
||||
@@ -158,6 +159,7 @@ pub fn write_chatgpt_auth(
|
||||
let last_refresh = fixture.last_refresh.unwrap_or_else(|| Some(Utc::now()));
|
||||
|
||||
let auth = AuthDotJson {
|
||||
auth_mode: Some(AuthMode::ChatGPT),
|
||||
openai_api_key: None,
|
||||
tokens: Some(tokens),
|
||||
last_refresh,
|
||||
|
||||
72
codex-rs/app-server/tests/common/config.rs
Normal file
72
codex-rs/app-server/tests/common/config.rs
Normal file
@@ -0,0 +1,72 @@
|
||||
use codex_core::features::FEATURES;
|
||||
use codex_core::features::Feature;
|
||||
use std::collections::BTreeMap;
|
||||
use std::path::Path;
|
||||
|
||||
pub fn write_mock_responses_config_toml(
|
||||
codex_home: &Path,
|
||||
server_uri: &str,
|
||||
feature_flags: &BTreeMap<Feature, bool>,
|
||||
auto_compact_limit: i64,
|
||||
requires_openai_auth: Option<bool>,
|
||||
model_provider_id: &str,
|
||||
compact_prompt: &str,
|
||||
) -> std::io::Result<()> {
|
||||
// Phase 1: build the features block for config.toml.
|
||||
let mut features = BTreeMap::from([(Feature::RemoteModels, false)]);
|
||||
for (feature, enabled) in feature_flags {
|
||||
features.insert(*feature, *enabled);
|
||||
}
|
||||
let feature_entries = features
|
||||
.into_iter()
|
||||
.map(|(feature, enabled)| {
|
||||
let key = FEATURES
|
||||
.iter()
|
||||
.find(|spec| spec.id == feature)
|
||||
.map(|spec| spec.key)
|
||||
.unwrap_or_else(|| panic!("missing feature key for {feature:?}"));
|
||||
format!("{key} = {enabled}")
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
// Phase 2: build provider-specific config bits.
|
||||
let requires_line = match requires_openai_auth {
|
||||
Some(true) => "requires_openai_auth = true\n".to_string(),
|
||||
Some(false) | None => String::new(),
|
||||
};
|
||||
let provider_block = if model_provider_id == "openai" {
|
||||
String::new()
|
||||
} else {
|
||||
format!(
|
||||
r#"
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "responses"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
{requires_line}
|
||||
"#
|
||||
)
|
||||
};
|
||||
// Phase 3: write the final config file.
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
compact_prompt = "{compact_prompt}"
|
||||
model_auto_compact_token_limit = {auto_compact_limit}
|
||||
|
||||
model_provider = "{model_provider_id}"
|
||||
|
||||
[features]
|
||||
{feature_entries}
|
||||
{provider_block}
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
mod auth_fixtures;
|
||||
mod config;
|
||||
mod mcp_process;
|
||||
mod mock_model_server;
|
||||
mod models_cache;
|
||||
@@ -10,6 +11,7 @@ pub use auth_fixtures::ChatGptIdTokenClaims;
|
||||
pub use auth_fixtures::encode_id_token;
|
||||
pub use auth_fixtures::write_chatgpt_auth;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
pub use config::write_mock_responses_config_toml;
|
||||
pub use core_test_support::format_with_current_shell;
|
||||
pub use core_test_support::format_with_current_shell_display;
|
||||
pub use core_test_support::format_with_current_shell_display_non_login;
|
||||
|
||||
@@ -29,11 +29,13 @@ use codex_app_server_protocol::GetAuthStatusParams;
|
||||
use codex_app_server_protocol::InitializeParams;
|
||||
use codex_app_server_protocol::InterruptConversationParams;
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCErrorError;
|
||||
use codex_app_server_protocol::JSONRPCMessage;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCRequest;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::ListConversationsParams;
|
||||
use codex_app_server_protocol::LoginAccountParams;
|
||||
use codex_app_server_protocol::LoginApiKeyParams;
|
||||
use codex_app_server_protocol::ModelListParams;
|
||||
use codex_app_server_protocol::NewConversationParams;
|
||||
@@ -298,6 +300,20 @@ impl McpProcess {
|
||||
self.send_request("account/read", params).await
|
||||
}
|
||||
|
||||
/// Send an `account/login/start` JSON-RPC request with ChatGPT auth tokens.
|
||||
pub async fn send_chatgpt_auth_tokens_login_request(
|
||||
&mut self,
|
||||
id_token: String,
|
||||
access_token: String,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = LoginAccountParams::ChatgptAuthTokens {
|
||||
id_token,
|
||||
access_token,
|
||||
};
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("account/login/start", params).await
|
||||
}
|
||||
|
||||
/// Send a `feedback/upload` JSON-RPC request.
|
||||
pub async fn send_feedback_upload_request(
|
||||
&mut self,
|
||||
@@ -608,6 +624,15 @@ impl McpProcess {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn send_error(
|
||||
&mut self,
|
||||
id: RequestId,
|
||||
error: JSONRPCErrorError,
|
||||
) -> anyhow::Result<()> {
|
||||
self.send_jsonrpc_message(JSONRPCMessage::Error(JSONRPCError { id, error }))
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn send_notification(
|
||||
&mut self,
|
||||
notification: ClientNotification,
|
||||
|
||||
@@ -48,8 +48,7 @@ async fn test_fuzzy_file_search_sorts_and_includes_indices() -> Result<()> {
|
||||
.await??;
|
||||
|
||||
let value = resp.result;
|
||||
// The path separator on Windows affects the score.
|
||||
let expected_score = if cfg!(windows) { 69 } else { 72 };
|
||||
let expected_score = 72;
|
||||
|
||||
assert_eq!(
|
||||
value,
|
||||
@@ -59,16 +58,9 @@ async fn test_fuzzy_file_search_sorts_and_includes_indices() -> Result<()> {
|
||||
"root": root_path.clone(),
|
||||
"path": "abexy",
|
||||
"file_name": "abexy",
|
||||
"score": 88,
|
||||
"score": 84,
|
||||
"indices": [0, 1, 2],
|
||||
},
|
||||
{
|
||||
"root": root_path.clone(),
|
||||
"path": "abcde",
|
||||
"file_name": "abcde",
|
||||
"score": 74,
|
||||
"indices": [0, 1, 4],
|
||||
},
|
||||
{
|
||||
"root": root_path.clone(),
|
||||
"path": sub_abce_rel,
|
||||
@@ -76,6 +68,13 @@ async fn test_fuzzy_file_search_sorts_and_includes_indices() -> Result<()> {
|
||||
"score": expected_score,
|
||||
"indices": [4, 5, 7],
|
||||
},
|
||||
{
|
||||
"root": root_path.clone(),
|
||||
"path": "abcde",
|
||||
"file_name": "abcde",
|
||||
"score": 71,
|
||||
"indices": [0, 1, 4],
|
||||
},
|
||||
]
|
||||
})
|
||||
);
|
||||
|
||||
@@ -11,6 +11,7 @@ use codex_app_server_protocol::NewConversationResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserMessageResponse;
|
||||
use codex_execpolicy::Policy;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::DeveloperInstructions;
|
||||
@@ -358,6 +359,8 @@ fn assert_permissions_message(item: &ResponseItem) {
|
||||
let expected = DeveloperInstructions::from_policy(
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
AskForApproval::Never,
|
||||
&Policy::empty(),
|
||||
false,
|
||||
&PathBuf::from("/tmp"),
|
||||
)
|
||||
.into_text();
|
||||
|
||||
@@ -4,28 +4,43 @@ use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
|
||||
use app_test_support::ChatGptAuthFixture;
|
||||
use app_test_support::ChatGptIdTokenClaims;
|
||||
use app_test_support::encode_id_token;
|
||||
use app_test_support::write_chatgpt_auth;
|
||||
use app_test_support::write_models_cache;
|
||||
use codex_app_server_protocol::Account;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_app_server_protocol::CancelLoginAccountParams;
|
||||
use codex_app_server_protocol::CancelLoginAccountResponse;
|
||||
use codex_app_server_protocol::CancelLoginAccountStatus;
|
||||
use codex_app_server_protocol::ChatgptAuthTokensRefreshReason;
|
||||
use codex_app_server_protocol::ChatgptAuthTokensRefreshResponse;
|
||||
use codex_app_server_protocol::GetAccountParams;
|
||||
use codex_app_server_protocol::GetAccountResponse;
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCErrorError;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::LoginAccountResponse;
|
||||
use codex_app_server_protocol::LogoutAccountResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::TurnCompletedNotification;
|
||||
use codex_app_server_protocol::TurnStatus;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use codex_login::login_with_api_key;
|
||||
use codex_protocol::account::PlanType as AccountPlanType;
|
||||
use core_test_support::responses;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use serial_test::serial;
|
||||
use std::path::Path;
|
||||
use std::time::Duration;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
use wiremock::MockServer;
|
||||
use wiremock::ResponseTemplate;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
@@ -35,10 +50,14 @@ struct CreateConfigTomlParams {
|
||||
forced_method: Option<String>,
|
||||
forced_workspace_id: Option<String>,
|
||||
requires_openai_auth: Option<bool>,
|
||||
base_url: Option<String>,
|
||||
}
|
||||
|
||||
fn create_config_toml(codex_home: &Path, params: CreateConfigTomlParams) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
let base_url = params
|
||||
.base_url
|
||||
.unwrap_or_else(|| "http://127.0.0.1:0/v1".to_string());
|
||||
let forced_line = if let Some(method) = params.forced_method {
|
||||
format!("forced_login_method = \"{method}\"\n")
|
||||
} else {
|
||||
@@ -66,7 +85,7 @@ model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "http://127.0.0.1:0/v1"
|
||||
base_url = "{base_url}"
|
||||
wire_api = "responses"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
@@ -133,6 +152,627 @@ async fn logout_account_removes_auth_and_notifies() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn set_auth_token_updates_account_and_notifies() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mock_server = MockServer::start().await;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
requires_openai_auth: Some(true),
|
||||
base_url: Some(format!("{}/v1", mock_server.uri())),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
write_models_cache(codex_home.path())?;
|
||||
|
||||
let id_token = encode_id_token(
|
||||
&ChatGptIdTokenClaims::new()
|
||||
.email("embedded@example.com")
|
||||
.plan_type("pro")
|
||||
.chatgpt_account_id("org-embedded"),
|
||||
)?;
|
||||
let access_token = "access-embedded".to_string();
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let set_id = mcp
|
||||
.send_chatgpt_auth_tokens_login_request(id_token.clone(), access_token)
|
||||
.await?;
|
||||
let set_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(set_id)),
|
||||
)
|
||||
.await??;
|
||||
let response: LoginAccountResponse = to_response(set_resp)?;
|
||||
assert_eq!(response, LoginAccountResponse::ChatgptAuthTokens {});
|
||||
|
||||
let note = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/updated"),
|
||||
)
|
||||
.await??;
|
||||
let parsed: ServerNotification = note.try_into()?;
|
||||
let ServerNotification::AccountUpdated(payload) = parsed else {
|
||||
bail!("unexpected notification: {parsed:?}");
|
||||
};
|
||||
assert_eq!(payload.auth_mode, Some(AuthMode::ChatgptAuthTokens));
|
||||
|
||||
let get_id = mcp
|
||||
.send_get_account_request(GetAccountParams {
|
||||
refresh_token: false,
|
||||
})
|
||||
.await?;
|
||||
let get_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(get_id)),
|
||||
)
|
||||
.await??;
|
||||
let account: GetAccountResponse = to_response(get_resp)?;
|
||||
assert_eq!(
|
||||
account,
|
||||
GetAccountResponse {
|
||||
account: Some(Account::Chatgpt {
|
||||
email: "embedded@example.com".to_string(),
|
||||
plan_type: AccountPlanType::Pro,
|
||||
}),
|
||||
requires_openai_auth: true,
|
||||
}
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn account_read_refresh_token_is_noop_in_external_mode() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
requires_openai_auth: Some(true),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
write_models_cache(codex_home.path())?;
|
||||
|
||||
let id_token = encode_id_token(
|
||||
&ChatGptIdTokenClaims::new()
|
||||
.email("embedded@example.com")
|
||||
.plan_type("pro")
|
||||
.chatgpt_account_id("org-embedded"),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let set_id = mcp
|
||||
.send_chatgpt_auth_tokens_login_request(id_token, "access-embedded".to_string())
|
||||
.await?;
|
||||
let set_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(set_id)),
|
||||
)
|
||||
.await??;
|
||||
let response: LoginAccountResponse = to_response(set_resp)?;
|
||||
assert_eq!(response, LoginAccountResponse::ChatgptAuthTokens {});
|
||||
let _updated = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/updated"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let get_id = mcp
|
||||
.send_get_account_request(GetAccountParams {
|
||||
refresh_token: true,
|
||||
})
|
||||
.await?;
|
||||
let get_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(get_id)),
|
||||
)
|
||||
.await??;
|
||||
let account: GetAccountResponse = to_response(get_resp)?;
|
||||
assert_eq!(
|
||||
account,
|
||||
GetAccountResponse {
|
||||
account: Some(Account::Chatgpt {
|
||||
email: "embedded@example.com".to_string(),
|
||||
plan_type: AccountPlanType::Pro,
|
||||
}),
|
||||
requires_openai_auth: true,
|
||||
}
|
||||
);
|
||||
|
||||
let refresh_request = timeout(
|
||||
Duration::from_millis(250),
|
||||
mcp.read_stream_until_request_message(),
|
||||
)
|
||||
.await;
|
||||
assert!(
|
||||
refresh_request.is_err(),
|
||||
"external mode should not emit account/chatgptAuthTokens/refresh for refreshToken=true"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn respond_to_refresh_request(
|
||||
mcp: &mut McpProcess,
|
||||
access_token: &str,
|
||||
id_token: &str,
|
||||
) -> Result<()> {
|
||||
let refresh_req: ServerRequest = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_request_message(),
|
||||
)
|
||||
.await??;
|
||||
let ServerRequest::ChatgptAuthTokensRefresh { request_id, params } = refresh_req else {
|
||||
bail!("expected account/chatgptAuthTokens/refresh request, got {refresh_req:?}");
|
||||
};
|
||||
assert_eq!(params.reason, ChatgptAuthTokensRefreshReason::Unauthorized);
|
||||
let response = ChatgptAuthTokensRefreshResponse {
|
||||
access_token: access_token.to_string(),
|
||||
id_token: id_token.to_string(),
|
||||
};
|
||||
mcp.send_response(request_id, serde_json::to_value(response)?)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
// 401 response triggers account/chatgptAuthTokens/refresh and retries with new tokens.
|
||||
async fn external_auth_refreshes_on_unauthorized() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mock_server = MockServer::start().await;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
requires_openai_auth: Some(true),
|
||||
base_url: Some(format!("{}/v1", mock_server.uri())),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
write_models_cache(codex_home.path())?;
|
||||
|
||||
let success_sse = responses::sse(vec![
|
||||
responses::ev_response_created("resp-turn"),
|
||||
responses::ev_assistant_message("msg-turn", "turn ok"),
|
||||
responses::ev_completed("resp-turn"),
|
||||
]);
|
||||
let unauthorized = ResponseTemplate::new(401).set_body_json(json!({
|
||||
"error": { "message": "unauthorized" }
|
||||
}));
|
||||
let responses_mock = responses::mount_response_sequence(
|
||||
&mock_server,
|
||||
vec![unauthorized, responses::sse_response(success_sse)],
|
||||
)
|
||||
.await;
|
||||
|
||||
let initial_id_token = encode_id_token(
|
||||
&ChatGptIdTokenClaims::new()
|
||||
.email("initial@example.com")
|
||||
.plan_type("pro")
|
||||
.chatgpt_account_id("org-initial"),
|
||||
)?;
|
||||
let refreshed_id_token = encode_id_token(
|
||||
&ChatGptIdTokenClaims::new()
|
||||
.email("refreshed@example.com")
|
||||
.plan_type("pro")
|
||||
.chatgpt_account_id("org-refreshed"),
|
||||
)?;
|
||||
let initial_access_token = "access-initial".to_string();
|
||||
let refreshed_access_token = "access-refreshed".to_string();
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let set_id = mcp
|
||||
.send_chatgpt_auth_tokens_login_request(
|
||||
initial_id_token.clone(),
|
||||
initial_access_token.clone(),
|
||||
)
|
||||
.await?;
|
||||
let set_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(set_id)),
|
||||
)
|
||||
.await??;
|
||||
let response: LoginAccountResponse = to_response(set_resp)?;
|
||||
assert_eq!(response, LoginAccountResponse::ChatgptAuthTokens {});
|
||||
let _updated = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/updated"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(codex_app_server_protocol::ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let thread = to_response::<codex_app_server_protocol::ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(codex_app_server_protocol::TurnStartParams {
|
||||
thread_id: thread.thread.id,
|
||||
input: vec![codex_app_server_protocol::UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
respond_to_refresh_request(&mut mcp, &refreshed_access_token, &refreshed_id_token).await?;
|
||||
let _turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let _turn_completed = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let requests = responses_mock.requests();
|
||||
assert_eq!(requests.len(), 2);
|
||||
assert_eq!(
|
||||
requests[0].header("authorization"),
|
||||
Some(format!("Bearer {initial_access_token}"))
|
||||
);
|
||||
assert_eq!(
|
||||
requests[1].header("authorization"),
|
||||
Some(format!("Bearer {refreshed_access_token}"))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
// Client returns JSON-RPC error to refresh; turn fails.
|
||||
async fn external_auth_refresh_error_fails_turn() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mock_server = MockServer::start().await;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
requires_openai_auth: Some(true),
|
||||
base_url: Some(format!("{}/v1", mock_server.uri())),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
write_models_cache(codex_home.path())?;
|
||||
|
||||
let unauthorized = ResponseTemplate::new(401).set_body_json(json!({
|
||||
"error": { "message": "unauthorized" }
|
||||
}));
|
||||
let _responses_mock =
|
||||
responses::mount_response_sequence(&mock_server, vec![unauthorized]).await;
|
||||
|
||||
let initial_id_token = encode_id_token(
|
||||
&ChatGptIdTokenClaims::new()
|
||||
.email("initial@example.com")
|
||||
.plan_type("pro")
|
||||
.chatgpt_account_id("org-initial"),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let set_id = mcp
|
||||
.send_chatgpt_auth_tokens_login_request(initial_id_token, "access-initial".to_string())
|
||||
.await?;
|
||||
let set_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(set_id)),
|
||||
)
|
||||
.await??;
|
||||
let response: LoginAccountResponse = to_response(set_resp)?;
|
||||
assert_eq!(response, LoginAccountResponse::ChatgptAuthTokens {});
|
||||
let _updated = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/updated"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(codex_app_server_protocol::ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let thread = to_response::<codex_app_server_protocol::ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(codex_app_server_protocol::TurnStartParams {
|
||||
thread_id: thread.thread.id.clone(),
|
||||
input: vec![codex_app_server_protocol::UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
|
||||
let refresh_req: ServerRequest = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_request_message(),
|
||||
)
|
||||
.await??;
|
||||
let ServerRequest::ChatgptAuthTokensRefresh { request_id, .. } = refresh_req else {
|
||||
bail!("expected account/chatgptAuthTokens/refresh request, got {refresh_req:?}");
|
||||
};
|
||||
|
||||
mcp.send_error(
|
||||
request_id,
|
||||
JSONRPCErrorError {
|
||||
code: -32_000,
|
||||
message: "refresh failed".to_string(),
|
||||
data: None,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
let _turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let completed_notif: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
let completed: TurnCompletedNotification = serde_json::from_value(
|
||||
completed_notif
|
||||
.params
|
||||
.expect("turn/completed params must be present"),
|
||||
)?;
|
||||
assert_eq!(completed.turn.status, TurnStatus::Failed);
|
||||
assert!(completed.turn.error.is_some());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
// Refresh returns tokens for the wrong workspace; turn fails.
|
||||
async fn external_auth_refresh_mismatched_workspace_fails_turn() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mock_server = MockServer::start().await;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
forced_workspace_id: Some("org-expected".to_string()),
|
||||
requires_openai_auth: Some(true),
|
||||
base_url: Some(format!("{}/v1", mock_server.uri())),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
write_models_cache(codex_home.path())?;
|
||||
|
||||
let unauthorized = ResponseTemplate::new(401).set_body_json(json!({
|
||||
"error": { "message": "unauthorized" }
|
||||
}));
|
||||
let _responses_mock =
|
||||
responses::mount_response_sequence(&mock_server, vec![unauthorized]).await;
|
||||
|
||||
let initial_id_token = encode_id_token(
|
||||
&ChatGptIdTokenClaims::new()
|
||||
.email("initial@example.com")
|
||||
.plan_type("pro")
|
||||
.chatgpt_account_id("org-expected"),
|
||||
)?;
|
||||
let refreshed_id_token = encode_id_token(
|
||||
&ChatGptIdTokenClaims::new()
|
||||
.email("refreshed@example.com")
|
||||
.plan_type("pro")
|
||||
.chatgpt_account_id("org-other"),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let set_id = mcp
|
||||
.send_chatgpt_auth_tokens_login_request(initial_id_token, "access-initial".to_string())
|
||||
.await?;
|
||||
let set_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(set_id)),
|
||||
)
|
||||
.await??;
|
||||
let response: LoginAccountResponse = to_response(set_resp)?;
|
||||
assert_eq!(response, LoginAccountResponse::ChatgptAuthTokens {});
|
||||
let _updated = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/updated"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(codex_app_server_protocol::ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let thread = to_response::<codex_app_server_protocol::ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(codex_app_server_protocol::TurnStartParams {
|
||||
thread_id: thread.thread.id.clone(),
|
||||
input: vec![codex_app_server_protocol::UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
|
||||
let refresh_req: ServerRequest = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_request_message(),
|
||||
)
|
||||
.await??;
|
||||
let ServerRequest::ChatgptAuthTokensRefresh { request_id, .. } = refresh_req else {
|
||||
bail!("expected account/chatgptAuthTokens/refresh request, got {refresh_req:?}");
|
||||
};
|
||||
|
||||
mcp.send_response(
|
||||
request_id,
|
||||
serde_json::to_value(ChatgptAuthTokensRefreshResponse {
|
||||
access_token: "access-refreshed".to_string(),
|
||||
id_token: refreshed_id_token,
|
||||
})?,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let _turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let completed_notif: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
let completed: TurnCompletedNotification = serde_json::from_value(
|
||||
completed_notif
|
||||
.params
|
||||
.expect("turn/completed params must be present"),
|
||||
)?;
|
||||
assert_eq!(completed.turn.status, TurnStatus::Failed);
|
||||
assert!(completed.turn.error.is_some());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
// Refresh returns a malformed id_token; turn fails.
|
||||
async fn external_auth_refresh_invalid_id_token_fails_turn() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mock_server = MockServer::start().await;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
requires_openai_auth: Some(true),
|
||||
base_url: Some(format!("{}/v1", mock_server.uri())),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
write_models_cache(codex_home.path())?;
|
||||
|
||||
let unauthorized = ResponseTemplate::new(401).set_body_json(json!({
|
||||
"error": { "message": "unauthorized" }
|
||||
}));
|
||||
let _responses_mock =
|
||||
responses::mount_response_sequence(&mock_server, vec![unauthorized]).await;
|
||||
|
||||
let initial_id_token = encode_id_token(
|
||||
&ChatGptIdTokenClaims::new()
|
||||
.email("initial@example.com")
|
||||
.plan_type("pro")
|
||||
.chatgpt_account_id("org-initial"),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let set_id = mcp
|
||||
.send_chatgpt_auth_tokens_login_request(initial_id_token, "access-initial".to_string())
|
||||
.await?;
|
||||
let set_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(set_id)),
|
||||
)
|
||||
.await??;
|
||||
let response: LoginAccountResponse = to_response(set_resp)?;
|
||||
assert_eq!(response, LoginAccountResponse::ChatgptAuthTokens {});
|
||||
let _updated = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/updated"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(codex_app_server_protocol::ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let thread = to_response::<codex_app_server_protocol::ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(codex_app_server_protocol::TurnStartParams {
|
||||
thread_id: thread.thread.id.clone(),
|
||||
input: vec![codex_app_server_protocol::UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
|
||||
let refresh_req: ServerRequest = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_request_message(),
|
||||
)
|
||||
.await??;
|
||||
let ServerRequest::ChatgptAuthTokensRefresh { request_id, .. } = refresh_req else {
|
||||
bail!("expected account/chatgptAuthTokens/refresh request, got {refresh_req:?}");
|
||||
};
|
||||
|
||||
mcp.send_response(
|
||||
request_id,
|
||||
serde_json::to_value(ChatgptAuthTokensRefreshResponse {
|
||||
access_token: "access-refreshed".to_string(),
|
||||
id_token: "not-a-jwt".to_string(),
|
||||
})?,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let _turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let completed_notif: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
let completed: TurnCompletedNotification = serde_json::from_value(
|
||||
completed_notif
|
||||
.params
|
||||
.expect("turn/completed params must be present"),
|
||||
)?;
|
||||
assert_eq!(completed.turn.status, TurnStatus::Failed);
|
||||
assert!(completed.turn.error.is_some());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn login_account_api_key_succeeds_and_notifies() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
@@ -304,6 +944,71 @@ async fn login_account_chatgpt_start_can_be_cancelled() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
// Serialize tests that launch the login server since it binds to a fixed port.
|
||||
#[serial(login_port)]
|
||||
async fn set_auth_token_cancels_active_chatgpt_login() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), CreateConfigTomlParams::default())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Initiate the ChatGPT login flow
|
||||
let request_id = mcp.send_login_account_chatgpt_request().await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let login: LoginAccountResponse = to_response(resp)?;
|
||||
let LoginAccountResponse::Chatgpt { login_id, .. } = login else {
|
||||
bail!("unexpected login response: {login:?}");
|
||||
};
|
||||
|
||||
let id_token = encode_id_token(
|
||||
&ChatGptIdTokenClaims::new()
|
||||
.email("embedded@example.com")
|
||||
.plan_type("pro")
|
||||
.chatgpt_account_id("org-embedded"),
|
||||
)?;
|
||||
// Set an external auth token instead of completing the ChatGPT login flow.
|
||||
// This should cancel the active login attempt.
|
||||
let set_id = mcp
|
||||
.send_chatgpt_auth_tokens_login_request(id_token, "access-embedded".to_string())
|
||||
.await?;
|
||||
let set_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(set_id)),
|
||||
)
|
||||
.await??;
|
||||
let response: LoginAccountResponse = to_response(set_resp)?;
|
||||
assert_eq!(response, LoginAccountResponse::ChatgptAuthTokens {});
|
||||
let _updated = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/updated"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
// Verify that the active login attempt was cancelled.
|
||||
// We check this by trying to cancel it and expecting a not found error.
|
||||
let cancel_id = mcp
|
||||
.send_cancel_login_account_request(CancelLoginAccountParams {
|
||||
login_id: login_id.clone(),
|
||||
})
|
||||
.await?;
|
||||
let cancel_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(cancel_id)),
|
||||
)
|
||||
.await??;
|
||||
let cancel: CancelLoginAccountResponse = to_response(cancel_resp)?;
|
||||
assert_eq!(cancel.status, CancelLoginAccountStatus::NotFound);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
// Serialize tests that launch the login server since it binds to a fixed port.
|
||||
#[serial(login_port)]
|
||||
|
||||
@@ -13,14 +13,13 @@ use axum::extract::State;
|
||||
use axum::http::HeaderMap;
|
||||
use axum::http::StatusCode;
|
||||
use axum::http::header::AUTHORIZATION;
|
||||
use axum::routing::post;
|
||||
use axum::routing::get;
|
||||
use codex_app_server_protocol::AppInfo;
|
||||
use codex_app_server_protocol::AppsListParams;
|
||||
use codex_app_server_protocol::AppsListResponse;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use codex_core::connectors::ConnectorInfo;
|
||||
use pretty_assertions::assert_eq;
|
||||
use rmcp::handler::server::ServerHandler;
|
||||
use rmcp::model::JsonObject;
|
||||
@@ -71,19 +70,23 @@ async fn list_apps_returns_empty_when_connectors_disabled() -> Result<()> {
|
||||
#[tokio::test]
|
||||
async fn list_apps_returns_connectors_with_accessible_flags() -> Result<()> {
|
||||
let connectors = vec![
|
||||
ConnectorInfo {
|
||||
connector_id: "alpha".to_string(),
|
||||
connector_name: "Alpha".to_string(),
|
||||
connector_description: Some("Alpha connector".to_string()),
|
||||
AppInfo {
|
||||
id: "alpha".to_string(),
|
||||
name: "Alpha".to_string(),
|
||||
description: Some("Alpha connector".to_string()),
|
||||
logo_url: Some("https://example.com/alpha.png".to_string()),
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
},
|
||||
ConnectorInfo {
|
||||
connector_id: "beta".to_string(),
|
||||
connector_name: "beta".to_string(),
|
||||
connector_description: None,
|
||||
AppInfo {
|
||||
id: "beta".to_string(),
|
||||
name: "beta".to_string(),
|
||||
description: None,
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
},
|
||||
@@ -127,6 +130,8 @@ async fn list_apps_returns_connectors_with_accessible_flags() -> Result<()> {
|
||||
name: "Beta App".to_string(),
|
||||
description: None,
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: Some("https://chatgpt.com/apps/beta/beta".to_string()),
|
||||
is_accessible: true,
|
||||
},
|
||||
@@ -135,6 +140,8 @@ async fn list_apps_returns_connectors_with_accessible_flags() -> Result<()> {
|
||||
name: "Alpha".to_string(),
|
||||
description: Some("Alpha connector".to_string()),
|
||||
logo_url: Some("https://example.com/alpha.png".to_string()),
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: Some("https://chatgpt.com/apps/alpha/alpha".to_string()),
|
||||
is_accessible: false,
|
||||
},
|
||||
@@ -150,19 +157,23 @@ async fn list_apps_returns_connectors_with_accessible_flags() -> Result<()> {
|
||||
#[tokio::test]
|
||||
async fn list_apps_paginates_results() -> Result<()> {
|
||||
let connectors = vec![
|
||||
ConnectorInfo {
|
||||
connector_id: "alpha".to_string(),
|
||||
connector_name: "Alpha".to_string(),
|
||||
connector_description: Some("Alpha connector".to_string()),
|
||||
AppInfo {
|
||||
id: "alpha".to_string(),
|
||||
name: "Alpha".to_string(),
|
||||
description: Some("Alpha connector".to_string()),
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
},
|
||||
ConnectorInfo {
|
||||
connector_id: "beta".to_string(),
|
||||
connector_name: "beta".to_string(),
|
||||
connector_description: None,
|
||||
AppInfo {
|
||||
id: "beta".to_string(),
|
||||
name: "beta".to_string(),
|
||||
description: None,
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
},
|
||||
@@ -206,6 +217,8 @@ async fn list_apps_paginates_results() -> Result<()> {
|
||||
name: "Beta App".to_string(),
|
||||
description: None,
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: Some("https://chatgpt.com/apps/beta/beta".to_string()),
|
||||
is_accessible: true,
|
||||
}];
|
||||
@@ -234,6 +247,8 @@ async fn list_apps_paginates_results() -> Result<()> {
|
||||
name: "Alpha".to_string(),
|
||||
description: Some("Alpha connector".to_string()),
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: Some("https://chatgpt.com/apps/alpha/alpha".to_string()),
|
||||
is_accessible: false,
|
||||
}];
|
||||
@@ -289,13 +304,13 @@ impl ServerHandler for AppListMcpServer {
|
||||
}
|
||||
|
||||
async fn start_apps_server(
|
||||
connectors: Vec<ConnectorInfo>,
|
||||
connectors: Vec<AppInfo>,
|
||||
tools: Vec<Tool>,
|
||||
) -> Result<(String, JoinHandle<()>)> {
|
||||
let state = AppsServerState {
|
||||
expected_bearer: "Bearer chatgpt-token".to_string(),
|
||||
expected_account_id: "account-123".to_string(),
|
||||
response: json!({ "connectors": connectors }),
|
||||
response: json!({ "apps": connectors, "next_token": null }),
|
||||
};
|
||||
let state = Arc::new(state);
|
||||
let tools = Arc::new(tools);
|
||||
@@ -313,7 +328,11 @@ async fn start_apps_server(
|
||||
);
|
||||
|
||||
let router = Router::new()
|
||||
.route("/aip/connectors/list_accessible", post(list_connectors))
|
||||
.route("/connectors/directory/list", get(list_directory_connectors))
|
||||
.route(
|
||||
"/connectors/directory/list_workspace",
|
||||
get(list_directory_connectors),
|
||||
)
|
||||
.with_state(state)
|
||||
.nest_service("/api/codex/apps", mcp_service);
|
||||
|
||||
@@ -324,7 +343,7 @@ async fn start_apps_server(
|
||||
Ok((format!("http://{addr}"), handle))
|
||||
}
|
||||
|
||||
async fn list_connectors(
|
||||
async fn list_directory_connectors(
|
||||
State(state): State<Arc<AppsServerState>>,
|
||||
headers: HeaderMap,
|
||||
) -> Result<impl axum::response::IntoResponse, StatusCode> {
|
||||
|
||||
282
codex-rs/app-server/tests/suite/v2/compaction.rs
Normal file
282
codex-rs/app-server/tests/suite/v2/compaction.rs
Normal file
@@ -0,0 +1,282 @@
|
||||
//! End-to-end compaction flow tests.
|
||||
//!
|
||||
//! Phases:
|
||||
//! 1) Arrange: mock responses/compact endpoints + config.
|
||||
//! 2) Act: start a thread and submit multiple turns to trigger auto-compaction.
|
||||
//! 3) Assert: verify item/started + item/completed notifications for context compaction.
|
||||
|
||||
#![expect(clippy::expect_used)]
|
||||
|
||||
use anyhow::Result;
|
||||
use app_test_support::ChatGptAuthFixture;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use app_test_support::write_chatgpt_auth;
|
||||
use app_test_support::write_mock_responses_config_toml;
|
||||
use codex_app_server_protocol::ItemCompletedNotification;
|
||||
use codex_app_server_protocol::ItemStartedNotification;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadItem;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::TurnCompletedNotification;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_app_server_protocol::TurnStartResponse;
|
||||
use codex_app_server_protocol::UserInput as V2UserInput;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use codex_core::features::Feature;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use core_test_support::responses;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::collections::BTreeMap;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
const AUTO_COMPACT_LIMIT: i64 = 1_000;
|
||||
const COMPACT_PROMPT: &str = "Summarize the conversation.";
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn auto_compaction_local_emits_started_and_completed_items() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = responses::start_mock_server().await;
|
||||
let sse1 = responses::sse(vec![
|
||||
responses::ev_assistant_message("m1", "FIRST_REPLY"),
|
||||
responses::ev_completed_with_tokens("r1", 70_000),
|
||||
]);
|
||||
let sse2 = responses::sse(vec![
|
||||
responses::ev_assistant_message("m2", "SECOND_REPLY"),
|
||||
responses::ev_completed_with_tokens("r2", 330_000),
|
||||
]);
|
||||
let sse3 = responses::sse(vec![
|
||||
responses::ev_assistant_message("m3", "LOCAL_SUMMARY"),
|
||||
responses::ev_completed_with_tokens("r3", 200),
|
||||
]);
|
||||
let sse4 = responses::sse(vec![
|
||||
responses::ev_assistant_message("m4", "FINAL_REPLY"),
|
||||
responses::ev_completed_with_tokens("r4", 120),
|
||||
]);
|
||||
responses::mount_sse_sequence(&server, vec![sse1, sse2, sse3, sse4]).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
write_mock_responses_config_toml(
|
||||
codex_home.path(),
|
||||
&server.uri(),
|
||||
&BTreeMap::default(),
|
||||
AUTO_COMPACT_LIMIT,
|
||||
None,
|
||||
"mock_provider",
|
||||
COMPACT_PROMPT,
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let thread_id = start_thread(&mut mcp).await?;
|
||||
for message in ["first", "second", "third"] {
|
||||
send_turn_and_wait(&mut mcp, &thread_id, message).await?;
|
||||
}
|
||||
|
||||
let started = wait_for_context_compaction_started(&mut mcp).await?;
|
||||
let completed = wait_for_context_compaction_completed(&mut mcp).await?;
|
||||
|
||||
let ThreadItem::ContextCompaction { id: started_id } = started.item else {
|
||||
unreachable!("started item should be context compaction");
|
||||
};
|
||||
let ThreadItem::ContextCompaction { id: completed_id } = completed.item else {
|
||||
unreachable!("completed item should be context compaction");
|
||||
};
|
||||
|
||||
assert_eq!(started.thread_id, thread_id);
|
||||
assert_eq!(completed.thread_id, thread_id);
|
||||
assert_eq!(started_id, completed_id);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn auto_compaction_remote_emits_started_and_completed_items() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = responses::start_mock_server().await;
|
||||
let sse1 = responses::sse(vec![
|
||||
responses::ev_assistant_message("m1", "FIRST_REPLY"),
|
||||
responses::ev_completed_with_tokens("r1", 70_000),
|
||||
]);
|
||||
let sse2 = responses::sse(vec![
|
||||
responses::ev_assistant_message("m2", "SECOND_REPLY"),
|
||||
responses::ev_completed_with_tokens("r2", 330_000),
|
||||
]);
|
||||
let sse3 = responses::sse(vec![
|
||||
responses::ev_assistant_message("m3", "FINAL_REPLY"),
|
||||
responses::ev_completed_with_tokens("r3", 120),
|
||||
]);
|
||||
let responses_log = responses::mount_sse_sequence(&server, vec![sse1, sse2, sse3]).await;
|
||||
|
||||
let compacted_history = vec![
|
||||
ResponseItem::Message {
|
||||
id: None,
|
||||
role: "assistant".to_string(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: "REMOTE_COMPACT_SUMMARY".to_string(),
|
||||
}],
|
||||
end_turn: None,
|
||||
},
|
||||
ResponseItem::Compaction {
|
||||
encrypted_content: "ENCRYPTED_COMPACTION_SUMMARY".to_string(),
|
||||
},
|
||||
];
|
||||
let compact_mock = responses::mount_compact_json_once(
|
||||
&server,
|
||||
serde_json::json!({ "output": compacted_history }),
|
||||
)
|
||||
.await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
let mut features = BTreeMap::default();
|
||||
features.insert(Feature::RemoteCompaction, true);
|
||||
write_mock_responses_config_toml(
|
||||
codex_home.path(),
|
||||
&server.uri(),
|
||||
&features,
|
||||
AUTO_COMPACT_LIMIT,
|
||||
Some(true),
|
||||
"openai",
|
||||
COMPACT_PROMPT,
|
||||
)?;
|
||||
write_chatgpt_auth(
|
||||
codex_home.path(),
|
||||
ChatGptAuthFixture::new("access-chatgpt").plan_type("pro"),
|
||||
AuthCredentialsStoreMode::File,
|
||||
)?;
|
||||
|
||||
let server_base_url = format!("{}/v1", server.uri());
|
||||
let mut mcp = McpProcess::new_with_env(
|
||||
codex_home.path(),
|
||||
&[
|
||||
("OPENAI_BASE_URL", Some(server_base_url.as_str())),
|
||||
("OPENAI_API_KEY", None),
|
||||
],
|
||||
)
|
||||
.await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let thread_id = start_thread(&mut mcp).await?;
|
||||
for message in ["first", "second", "third"] {
|
||||
send_turn_and_wait(&mut mcp, &thread_id, message).await?;
|
||||
}
|
||||
|
||||
let started = wait_for_context_compaction_started(&mut mcp).await?;
|
||||
let completed = wait_for_context_compaction_completed(&mut mcp).await?;
|
||||
|
||||
let ThreadItem::ContextCompaction { id: started_id } = started.item else {
|
||||
unreachable!("started item should be context compaction");
|
||||
};
|
||||
let ThreadItem::ContextCompaction { id: completed_id } = completed.item else {
|
||||
unreachable!("completed item should be context compaction");
|
||||
};
|
||||
|
||||
assert_eq!(started.thread_id, thread_id);
|
||||
assert_eq!(completed.thread_id, thread_id);
|
||||
assert_eq!(started_id, completed_id);
|
||||
|
||||
let compact_requests = compact_mock.requests();
|
||||
assert_eq!(compact_requests.len(), 1);
|
||||
assert_eq!(compact_requests[0].path(), "/v1/responses/compact");
|
||||
|
||||
let response_requests = responses_log.requests();
|
||||
assert_eq!(response_requests.len(), 3);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn start_thread(mcp: &mut McpProcess) -> Result<String> {
|
||||
let thread_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
Ok(thread.id)
|
||||
}
|
||||
|
||||
async fn send_turn_and_wait(mcp: &mut McpProcess, thread_id: &str, text: &str) -> Result<String> {
|
||||
let turn_id = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread_id.to_string(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: text.to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_id)),
|
||||
)
|
||||
.await??;
|
||||
let TurnStartResponse { turn } = to_response::<TurnStartResponse>(turn_resp)?;
|
||||
wait_for_turn_completed(mcp, &turn.id).await?;
|
||||
Ok(turn.id)
|
||||
}
|
||||
|
||||
async fn wait_for_turn_completed(mcp: &mut McpProcess, turn_id: &str) -> Result<()> {
|
||||
loop {
|
||||
let notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
let completed: TurnCompletedNotification =
|
||||
serde_json::from_value(notification.params.clone().expect("turn/completed params"))?;
|
||||
if completed.turn.id == turn_id {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn wait_for_context_compaction_started(
|
||||
mcp: &mut McpProcess,
|
||||
) -> Result<ItemStartedNotification> {
|
||||
loop {
|
||||
let notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("item/started"),
|
||||
)
|
||||
.await??;
|
||||
let started: ItemStartedNotification =
|
||||
serde_json::from_value(notification.params.clone().expect("item/started params"))?;
|
||||
if let ThreadItem::ContextCompaction { .. } = started.item {
|
||||
return Ok(started);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn wait_for_context_compaction_completed(
|
||||
mcp: &mut McpProcess,
|
||||
) -> Result<ItemCompletedNotification> {
|
||||
loop {
|
||||
let notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("item/completed"),
|
||||
)
|
||||
.await??;
|
||||
let completed: ItemCompletedNotification =
|
||||
serde_json::from_value(notification.params.clone().expect("item/completed params"))?;
|
||||
if let ThreadItem::ContextCompaction { .. } = completed.item {
|
||||
return Ok(completed);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,7 @@ mod account;
|
||||
mod analytics;
|
||||
mod app_list;
|
||||
mod collaboration_mode_list;
|
||||
mod compaction;
|
||||
mod config_rpc;
|
||||
mod dynamic_tools;
|
||||
mod initialize;
|
||||
|
||||
@@ -2,7 +2,9 @@ use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_fake_rollout_with_text_elements;
|
||||
use app_test_support::create_mock_responses_server_repeating_assistant;
|
||||
use app_test_support::rollout_path;
|
||||
use app_test_support::to_response;
|
||||
use chrono::Utc;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SessionSource;
|
||||
@@ -22,6 +24,8 @@ use codex_protocol::user_input::TextElement;
|
||||
use core_test_support::responses;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::fs::FileTimes;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
@@ -147,6 +151,116 @@ async fn thread_resume_returns_rollout_history() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_without_overrides_does_not_change_updated_at_or_mtime() -> Result<()> {
|
||||
let server = create_mock_responses_server_repeating_assistant("Done").await;
|
||||
let codex_home = TempDir::new()?;
|
||||
let rollout = setup_rollout_fixture(codex_home.path(), &server.uri())?;
|
||||
let thread_id = rollout.conversation_id.clone();
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let resume_id = mcp
|
||||
.send_thread_resume_request(ThreadResumeParams {
|
||||
thread_id: thread_id.clone(),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let resume_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(resume_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadResumeResponse { thread, .. } = to_response::<ThreadResumeResponse>(resume_resp)?;
|
||||
|
||||
assert_eq!(thread.updated_at, rollout.expected_updated_at);
|
||||
|
||||
let after_modified = std::fs::metadata(&rollout.rollout_file_path)?.modified()?;
|
||||
assert_eq!(after_modified, rollout.before_modified);
|
||||
|
||||
let turn_id = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id,
|
||||
input: vec![UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_id)),
|
||||
)
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let after_turn_modified = std::fs::metadata(&rollout.rollout_file_path)?.modified()?;
|
||||
assert!(after_turn_modified > rollout.before_modified);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_with_overrides_defers_updated_at_until_turn_start() -> Result<()> {
|
||||
let server = create_mock_responses_server_repeating_assistant("Done").await;
|
||||
let codex_home = TempDir::new()?;
|
||||
let rollout = setup_rollout_fixture(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let resume_id = mcp
|
||||
.send_thread_resume_request(ThreadResumeParams {
|
||||
thread_id: rollout.conversation_id.clone(),
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let resume_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(resume_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadResumeResponse { thread, .. } = to_response::<ThreadResumeResponse>(resume_resp)?;
|
||||
|
||||
assert_eq!(thread.updated_at, rollout.expected_updated_at);
|
||||
|
||||
let after_resume_modified = std::fs::metadata(&rollout.rollout_file_path)?.modified()?;
|
||||
assert_eq!(after_resume_modified, rollout.before_modified);
|
||||
|
||||
let turn_id = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: rollout.conversation_id,
|
||||
input: vec![UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_id)),
|
||||
)
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let after_turn_modified = std::fs::metadata(&rollout.rollout_file_path)?.modified()?;
|
||||
assert!(after_turn_modified > rollout.before_modified);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_prefers_path_over_thread_id() -> Result<()> {
|
||||
let server = create_mock_responses_server_repeating_assistant("Done").await;
|
||||
@@ -364,3 +478,51 @@ stream_max_retries = 0
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
fn set_rollout_mtime(path: &Path, updated_at_rfc3339: &str) -> Result<()> {
|
||||
let parsed = chrono::DateTime::parse_from_rfc3339(updated_at_rfc3339)?.with_timezone(&Utc);
|
||||
let times = FileTimes::new().set_modified(parsed.into());
|
||||
std::fs::OpenOptions::new()
|
||||
.append(true)
|
||||
.open(path)?
|
||||
.set_times(times)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct RolloutFixture {
|
||||
conversation_id: String,
|
||||
rollout_file_path: PathBuf,
|
||||
before_modified: std::time::SystemTime,
|
||||
expected_updated_at: i64,
|
||||
}
|
||||
|
||||
fn setup_rollout_fixture(codex_home: &Path, server_uri: &str) -> Result<RolloutFixture> {
|
||||
create_config_toml(codex_home, server_uri)?;
|
||||
|
||||
let preview = "Saved user message";
|
||||
let filename_ts = "2025-01-05T12-00-00";
|
||||
let meta_rfc3339 = "2025-01-05T12:00:00Z";
|
||||
let expected_updated_at_rfc3339 = "2025-01-07T00:00:00Z";
|
||||
let conversation_id = create_fake_rollout_with_text_elements(
|
||||
codex_home,
|
||||
filename_ts,
|
||||
meta_rfc3339,
|
||||
preview,
|
||||
Vec::new(),
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
let rollout_file_path = rollout_path(codex_home, filename_ts, &conversation_id);
|
||||
set_rollout_mtime(rollout_file_path.as_path(), expected_updated_at_rfc3339)?;
|
||||
let before_modified = std::fs::metadata(&rollout_file_path)?.modified()?;
|
||||
let expected_updated_at = chrono::DateTime::parse_from_rfc3339(expected_updated_at_rfc3339)?
|
||||
.with_timezone(&Utc)
|
||||
.timestamp();
|
||||
|
||||
Ok(RolloutFixture {
|
||||
conversation_id,
|
||||
rollout_file_path,
|
||||
before_modified,
|
||||
expected_updated_at,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use crate::types::CodeTaskDetailsResponse;
|
||||
use crate::types::ConfigFileResponse;
|
||||
use crate::types::CreditStatusDetails;
|
||||
use crate::types::PaginatedListTaskListItem;
|
||||
use crate::types::RateLimitStatusPayload;
|
||||
@@ -244,6 +245,20 @@ impl Client {
|
||||
self.decode_json::<TurnAttemptsSiblingTurnsResponse>(&url, &ct, &body)
|
||||
}
|
||||
|
||||
/// Fetch the managed requirements file from codex-backend.
|
||||
///
|
||||
/// `GET /api/codex/config/requirements` (Codex API style) or
|
||||
/// `GET /wham/config/requirements` (ChatGPT backend-api style).
|
||||
pub async fn get_config_requirements_file(&self) -> Result<ConfigFileResponse> {
|
||||
let url = match self.path_style {
|
||||
PathStyle::CodexApi => format!("{}/api/codex/config/requirements", self.base_url),
|
||||
PathStyle::ChatGptApi => format!("{}/wham/config/requirements", self.base_url),
|
||||
};
|
||||
let req = self.http.get(&url).headers(self.headers());
|
||||
let (body, ct) = self.exec_request(req, "GET", &url).await?;
|
||||
self.decode_json::<ConfigFileResponse>(&url, &ct, &body)
|
||||
}
|
||||
|
||||
/// Create a new task (user turn) by POSTing to the appropriate backend path
|
||||
/// based on `path_style`. Returns the created task id.
|
||||
pub async fn create_task(&self, request_body: serde_json::Value) -> Result<String> {
|
||||
|
||||
@@ -4,6 +4,7 @@ pub mod types;
|
||||
pub use client::Client;
|
||||
pub use types::CodeTaskDetailsResponse;
|
||||
pub use types::CodeTaskDetailsResponseExt;
|
||||
pub use types::ConfigFileResponse;
|
||||
pub use types::PaginatedListTaskListItem;
|
||||
pub use types::TaskListItem;
|
||||
pub use types::TurnAttemptsSiblingTurnsResponse;
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
pub use codex_backend_openapi_models::models::ConfigFileResponse;
|
||||
pub use codex_backend_openapi_models::models::CreditStatusDetails;
|
||||
pub use codex_backend_openapi_models::models::PaginatedListTaskListItem;
|
||||
pub use codex_backend_openapi_models::models::PlanType;
|
||||
|
||||
@@ -17,6 +17,8 @@ serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = ["full"] }
|
||||
codex-git = { workspace = true }
|
||||
urlencoding = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
@@ -5,13 +5,21 @@ use crate::chatgpt_token::get_chatgpt_token_data;
|
||||
use crate::chatgpt_token::init_chatgpt_token_from_auth;
|
||||
|
||||
use anyhow::Context;
|
||||
use serde::Serialize;
|
||||
use serde::de::DeserializeOwned;
|
||||
use std::time::Duration;
|
||||
|
||||
/// Make a GET request to the ChatGPT backend API.
|
||||
pub(crate) async fn chatgpt_get_request<T: DeserializeOwned>(
|
||||
config: &Config,
|
||||
path: String,
|
||||
) -> anyhow::Result<T> {
|
||||
chatgpt_get_request_with_timeout(config, path, None).await
|
||||
}
|
||||
|
||||
pub(crate) async fn chatgpt_get_request_with_timeout<T: DeserializeOwned>(
|
||||
config: &Config,
|
||||
path: String,
|
||||
timeout: Option<Duration>,
|
||||
) -> anyhow::Result<T> {
|
||||
let chatgpt_base_url = &config.chatgpt_base_url;
|
||||
init_chatgpt_token_from_auth(&config.codex_home, config.cli_auth_credentials_store_mode)
|
||||
@@ -28,48 +36,17 @@ pub(crate) async fn chatgpt_get_request<T: DeserializeOwned>(
|
||||
anyhow::anyhow!("ChatGPT account ID not available, please re-run `codex login`")
|
||||
});
|
||||
|
||||
let response = client
|
||||
let mut request = client
|
||||
.get(&url)
|
||||
.bearer_auth(&token.access_token)
|
||||
.header("chatgpt-account-id", account_id?)
|
||||
.header("Content-Type", "application/json")
|
||||
.send()
|
||||
.await
|
||||
.context("Failed to send request")?;
|
||||
|
||||
if response.status().is_success() {
|
||||
let result: T = response
|
||||
.json()
|
||||
.await
|
||||
.context("Failed to parse JSON response")?;
|
||||
Ok(result)
|
||||
} else {
|
||||
let status = response.status();
|
||||
let body = response.text().await.unwrap_or_default();
|
||||
anyhow::bail!("Request failed with status {status}: {body}")
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn chatgpt_post_request<T: DeserializeOwned, P: Serialize>(
|
||||
config: &Config,
|
||||
access_token: &str,
|
||||
account_id: &str,
|
||||
path: &str,
|
||||
payload: &P,
|
||||
) -> anyhow::Result<T> {
|
||||
let chatgpt_base_url = &config.chatgpt_base_url;
|
||||
let client = create_client();
|
||||
let url = format!("{chatgpt_base_url}{path}");
|
||||
|
||||
let response = client
|
||||
.post(&url)
|
||||
.bearer_auth(access_token)
|
||||
.header("chatgpt-account-id", account_id)
|
||||
.header("Content-Type", "application/json")
|
||||
.json(payload)
|
||||
.send()
|
||||
.await
|
||||
.context("Failed to send request")?;
|
||||
.header("Content-Type", "application/json");
|
||||
|
||||
if let Some(timeout) = timeout {
|
||||
request = request.timeout(timeout);
|
||||
}
|
||||
|
||||
let response = request.send().await.context("Failed to send request")?;
|
||||
|
||||
if response.status().is_success() {
|
||||
let result: T = response
|
||||
|
||||
@@ -1,43 +1,45 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use codex_core::config::Config;
|
||||
use codex_core::features::Feature;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::chatgpt_client::chatgpt_post_request;
|
||||
use crate::chatgpt_client::chatgpt_get_request_with_timeout;
|
||||
use crate::chatgpt_token::get_chatgpt_token_data;
|
||||
use crate::chatgpt_token::init_chatgpt_token_from_auth;
|
||||
|
||||
pub use codex_core::connectors::ConnectorInfo;
|
||||
pub use codex_core::connectors::AppInfo;
|
||||
pub use codex_core::connectors::connector_display_label;
|
||||
use codex_core::connectors::connector_install_url;
|
||||
pub use codex_core::connectors::list_accessible_connectors_from_mcp_tools;
|
||||
use codex_core::connectors::merge_connectors;
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct ListConnectorsRequest {
|
||||
principals: Vec<Principal>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct Principal {
|
||||
#[serde(rename = "type")]
|
||||
principal_type: PrincipalType,
|
||||
id: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
||||
enum PrincipalType {
|
||||
User,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ListConnectorsResponse {
|
||||
connectors: Vec<ConnectorInfo>,
|
||||
struct DirectoryListResponse {
|
||||
apps: Vec<DirectoryApp>,
|
||||
#[serde(alias = "nextToken")]
|
||||
next_token: Option<String>,
|
||||
}
|
||||
|
||||
pub async fn list_connectors(config: &Config) -> anyhow::Result<Vec<ConnectorInfo>> {
|
||||
if !config.features.enabled(Feature::Connectors) {
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
struct DirectoryApp {
|
||||
id: String,
|
||||
name: String,
|
||||
description: Option<String>,
|
||||
#[serde(alias = "logoUrl")]
|
||||
logo_url: Option<String>,
|
||||
#[serde(alias = "logoUrlDark")]
|
||||
logo_url_dark: Option<String>,
|
||||
#[serde(alias = "distributionChannel")]
|
||||
distribution_channel: Option<String>,
|
||||
visibility: Option<String>,
|
||||
}
|
||||
|
||||
const DIRECTORY_CONNECTORS_TIMEOUT: Duration = Duration::from_secs(60);
|
||||
|
||||
pub async fn list_connectors(config: &Config) -> anyhow::Result<Vec<AppInfo>> {
|
||||
if !config.features.enabled(Feature::Apps) {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
let (connectors_result, accessible_result) = tokio::join!(
|
||||
@@ -46,11 +48,12 @@ pub async fn list_connectors(config: &Config) -> anyhow::Result<Vec<ConnectorInf
|
||||
);
|
||||
let connectors = connectors_result?;
|
||||
let accessible = accessible_result?;
|
||||
Ok(merge_connectors(connectors, accessible))
|
||||
let merged = merge_connectors(connectors, accessible);
|
||||
Ok(filter_disallowed_connectors(merged))
|
||||
}
|
||||
|
||||
pub async fn list_all_connectors(config: &Config) -> anyhow::Result<Vec<ConnectorInfo>> {
|
||||
if !config.features.enabled(Feature::Connectors) {
|
||||
pub async fn list_all_connectors(config: &Config) -> anyhow::Result<Vec<AppInfo>> {
|
||||
if !config.features.enabled(Feature::Apps) {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
init_chatgpt_token_from_auth(&config.codex_home, config.cli_auth_credentials_store_mode)
|
||||
@@ -58,56 +61,149 @@ pub async fn list_all_connectors(config: &Config) -> anyhow::Result<Vec<Connecto
|
||||
|
||||
let token_data =
|
||||
get_chatgpt_token_data().ok_or_else(|| anyhow::anyhow!("ChatGPT token not available"))?;
|
||||
let user_id = token_data
|
||||
.id_token
|
||||
.chatgpt_user_id
|
||||
.as_deref()
|
||||
.ok_or_else(|| {
|
||||
anyhow::anyhow!("ChatGPT user ID not available, please re-run `codex login`")
|
||||
})?;
|
||||
let account_id = token_data
|
||||
.id_token
|
||||
.chatgpt_account_id
|
||||
.as_deref()
|
||||
.ok_or_else(|| {
|
||||
anyhow::anyhow!("ChatGPT account ID not available, please re-run `codex login`")
|
||||
})?;
|
||||
let principal_id = format!("{user_id}__{account_id}");
|
||||
let request = ListConnectorsRequest {
|
||||
principals: vec![Principal {
|
||||
principal_type: PrincipalType::User,
|
||||
id: principal_id,
|
||||
}],
|
||||
};
|
||||
let response: ListConnectorsResponse = chatgpt_post_request(
|
||||
config,
|
||||
token_data.access_token.as_str(),
|
||||
account_id,
|
||||
"/aip/connectors/list_accessible?skip_actions=true&external_logos=true",
|
||||
&request,
|
||||
)
|
||||
.await?;
|
||||
let mut connectors = response.connectors;
|
||||
let mut apps = list_directory_connectors(config).await?;
|
||||
if token_data.id_token.is_workspace_account() {
|
||||
apps.extend(list_workspace_connectors(config).await?);
|
||||
}
|
||||
let mut connectors = merge_directory_apps(apps)
|
||||
.into_iter()
|
||||
.map(directory_app_to_app_info)
|
||||
.collect::<Vec<_>>();
|
||||
for connector in &mut connectors {
|
||||
let install_url = match connector.install_url.take() {
|
||||
Some(install_url) => install_url,
|
||||
None => connector_install_url(&connector.connector_name, &connector.connector_id),
|
||||
None => connector_install_url(&connector.name, &connector.id),
|
||||
};
|
||||
connector.connector_name =
|
||||
normalize_connector_name(&connector.connector_name, &connector.connector_id);
|
||||
connector.connector_description =
|
||||
normalize_connector_value(connector.connector_description.as_deref());
|
||||
connector.name = normalize_connector_name(&connector.name, &connector.id);
|
||||
connector.description = normalize_connector_value(connector.description.as_deref());
|
||||
connector.install_url = Some(install_url);
|
||||
connector.is_accessible = false;
|
||||
}
|
||||
connectors.sort_by(|left, right| {
|
||||
left.connector_name
|
||||
.cmp(&right.connector_name)
|
||||
.then_with(|| left.connector_id.cmp(&right.connector_id))
|
||||
left.name
|
||||
.cmp(&right.name)
|
||||
.then_with(|| left.id.cmp(&right.id))
|
||||
});
|
||||
Ok(connectors)
|
||||
}
|
||||
|
||||
async fn list_directory_connectors(config: &Config) -> anyhow::Result<Vec<DirectoryApp>> {
|
||||
let mut apps = Vec::new();
|
||||
let mut next_token: Option<String> = None;
|
||||
loop {
|
||||
let path = match next_token.as_deref() {
|
||||
Some(token) => {
|
||||
let encoded_token = urlencoding::encode(token);
|
||||
format!("/connectors/directory/list?tier=categorized&token={encoded_token}")
|
||||
}
|
||||
None => "/connectors/directory/list?tier=categorized".to_string(),
|
||||
};
|
||||
let response: DirectoryListResponse =
|
||||
chatgpt_get_request_with_timeout(config, path, Some(DIRECTORY_CONNECTORS_TIMEOUT))
|
||||
.await?;
|
||||
apps.extend(
|
||||
response
|
||||
.apps
|
||||
.into_iter()
|
||||
.filter(|app| !is_hidden_directory_app(app)),
|
||||
);
|
||||
next_token = response
|
||||
.next_token
|
||||
.map(|token| token.trim().to_string())
|
||||
.filter(|token| !token.is_empty());
|
||||
if next_token.is_none() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Ok(apps)
|
||||
}
|
||||
|
||||
async fn list_workspace_connectors(config: &Config) -> anyhow::Result<Vec<DirectoryApp>> {
|
||||
let response: anyhow::Result<DirectoryListResponse> = chatgpt_get_request_with_timeout(
|
||||
config,
|
||||
"/connectors/directory/list_workspace".to_string(),
|
||||
Some(DIRECTORY_CONNECTORS_TIMEOUT),
|
||||
)
|
||||
.await;
|
||||
match response {
|
||||
Ok(response) => Ok(response
|
||||
.apps
|
||||
.into_iter()
|
||||
.filter(|app| !is_hidden_directory_app(app))
|
||||
.collect()),
|
||||
Err(_) => Ok(Vec::new()),
|
||||
}
|
||||
}
|
||||
|
||||
fn merge_directory_apps(apps: Vec<DirectoryApp>) -> Vec<DirectoryApp> {
|
||||
let mut merged: HashMap<String, DirectoryApp> = HashMap::new();
|
||||
for app in apps {
|
||||
if let Some(existing) = merged.get_mut(&app.id) {
|
||||
merge_directory_app(existing, app);
|
||||
} else {
|
||||
merged.insert(app.id.clone(), app);
|
||||
}
|
||||
}
|
||||
merged.into_values().collect()
|
||||
}
|
||||
|
||||
fn merge_directory_app(existing: &mut DirectoryApp, incoming: DirectoryApp) {
|
||||
let DirectoryApp {
|
||||
id: _,
|
||||
name,
|
||||
description,
|
||||
logo_url,
|
||||
logo_url_dark,
|
||||
distribution_channel,
|
||||
visibility: _,
|
||||
} = incoming;
|
||||
|
||||
let incoming_name_is_empty = name.trim().is_empty();
|
||||
if existing.name.trim().is_empty() && !incoming_name_is_empty {
|
||||
existing.name = name;
|
||||
}
|
||||
|
||||
let incoming_description_present = description
|
||||
.as_deref()
|
||||
.map(|value| !value.trim().is_empty())
|
||||
.unwrap_or(false);
|
||||
let existing_description_present = existing
|
||||
.description
|
||||
.as_deref()
|
||||
.map(|value| !value.trim().is_empty())
|
||||
.unwrap_or(false);
|
||||
if !existing_description_present && incoming_description_present {
|
||||
existing.description = description;
|
||||
}
|
||||
|
||||
if existing.logo_url.is_none() && logo_url.is_some() {
|
||||
existing.logo_url = logo_url;
|
||||
}
|
||||
if existing.logo_url_dark.is_none() && logo_url_dark.is_some() {
|
||||
existing.logo_url_dark = logo_url_dark;
|
||||
}
|
||||
if existing.distribution_channel.is_none() && distribution_channel.is_some() {
|
||||
existing.distribution_channel = distribution_channel;
|
||||
}
|
||||
}
|
||||
|
||||
fn is_hidden_directory_app(app: &DirectoryApp) -> bool {
|
||||
matches!(app.visibility.as_deref(), Some("HIDDEN"))
|
||||
}
|
||||
|
||||
fn directory_app_to_app_info(app: DirectoryApp) -> AppInfo {
|
||||
AppInfo {
|
||||
id: app.id,
|
||||
name: app.name,
|
||||
description: app.description,
|
||||
logo_url: app.logo_url,
|
||||
logo_url_dark: app.logo_url_dark,
|
||||
distribution_channel: app.distribution_channel,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
}
|
||||
}
|
||||
|
||||
fn normalize_connector_name(name: &str, connector_id: &str) -> String {
|
||||
let trimmed = name.trim();
|
||||
if trimmed.is_empty() {
|
||||
@@ -123,3 +219,87 @@ fn normalize_connector_value(value: Option<&str>) -> Option<String> {
|
||||
.filter(|value| !value.is_empty())
|
||||
.map(str::to_string)
|
||||
}
|
||||
|
||||
const ALLOWED_APPS_SDK_APPS: &[&str] = &["asdk_app_69781557cc1481919cf5e9824fa2e792"];
|
||||
const DISALLOWED_CONNECTOR_IDS: &[&str] = &["asdk_app_6938a94a61d881918ef32cb999ff937c"];
|
||||
const DISALLOWED_CONNECTOR_PREFIX: &str = "connector_openai_";
|
||||
|
||||
fn filter_disallowed_connectors(connectors: Vec<AppInfo>) -> Vec<AppInfo> {
|
||||
// TODO: Support Apps SDK connectors.
|
||||
connectors
|
||||
.into_iter()
|
||||
.filter(is_connector_allowed)
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn is_connector_allowed(connector: &AppInfo) -> bool {
|
||||
let connector_id = connector.id.as_str();
|
||||
if connector_id.starts_with(DISALLOWED_CONNECTOR_PREFIX)
|
||||
|| DISALLOWED_CONNECTOR_IDS.contains(&connector_id)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if connector_id.starts_with("asdk_app_") {
|
||||
return ALLOWED_APPS_SDK_APPS.contains(&connector_id);
|
||||
}
|
||||
true
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
fn app(id: &str) -> AppInfo {
|
||||
AppInfo {
|
||||
id: id.to_string(),
|
||||
name: id.to_string(),
|
||||
description: None,
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn filters_internal_asdk_connectors() {
|
||||
let filtered = filter_disallowed_connectors(vec![app("asdk_app_hidden"), app("alpha")]);
|
||||
assert_eq!(filtered, vec![app("alpha")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn allows_whitelisted_asdk_connectors() {
|
||||
let filtered = filter_disallowed_connectors(vec![
|
||||
app("asdk_app_69781557cc1481919cf5e9824fa2e792"),
|
||||
app("beta"),
|
||||
]);
|
||||
assert_eq!(
|
||||
filtered,
|
||||
vec![
|
||||
app("asdk_app_69781557cc1481919cf5e9824fa2e792"),
|
||||
app("beta")
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn filters_openai_connectors() {
|
||||
let filtered = filter_disallowed_connectors(vec![
|
||||
app("connector_openai_foo"),
|
||||
app("connector_openai_bar"),
|
||||
app("gamma"),
|
||||
]);
|
||||
assert_eq!(filtered, vec![app("gamma")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn filters_disallowed_connector_ids() {
|
||||
let filtered = filter_disallowed_connectors(vec![
|
||||
app("asdk_app_6938a94a61d881918ef32cb999ff937c"),
|
||||
app("delta"),
|
||||
]);
|
||||
assert_eq!(filtered, vec![app("delta")]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -136,7 +136,8 @@ async fn run_command_under_sandbox(
|
||||
if let SandboxType::Windows = sandbox_type {
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::windows_sandbox::WindowsSandboxLevelExt;
|
||||
use codex_protocol::config_types::WindowsSandboxLevel;
|
||||
use codex_windows_sandbox::run_windows_sandbox_capture;
|
||||
use codex_windows_sandbox::run_windows_sandbox_capture_elevated;
|
||||
|
||||
@@ -147,8 +148,10 @@ async fn run_command_under_sandbox(
|
||||
let env_map = env.clone();
|
||||
let command_vec = command.clone();
|
||||
let base_dir = config.codex_home.clone();
|
||||
let use_elevated = config.features.enabled(Feature::WindowsSandbox)
|
||||
&& config.features.enabled(Feature::WindowsSandboxElevated);
|
||||
let use_elevated = matches!(
|
||||
WindowsSandboxLevel::from_config(&config),
|
||||
WindowsSandboxLevel::Elevated
|
||||
);
|
||||
|
||||
// Preflight audit is invoked elsewhere at the appropriate times.
|
||||
let res = tokio::task::spawn_blocking(move || {
|
||||
|
||||
@@ -240,6 +240,10 @@ pub async fn run_login_status(cli_config_overrides: CliConfigOverrides) -> ! {
|
||||
eprintln!("Logged in using ChatGPT");
|
||||
std::process::exit(0);
|
||||
}
|
||||
AuthMode::ChatgptAuthTokens => {
|
||||
eprintln!("Logged in using ChatGPT (external tokens)");
|
||||
std::process::exit(0);
|
||||
}
|
||||
},
|
||||
Ok(None) => {
|
||||
eprintln!("Not logged in");
|
||||
|
||||
@@ -39,6 +39,9 @@ use crate::mcp_cmd::McpCli;
|
||||
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::config::edit::ConfigEditsBuilder;
|
||||
use codex_core::config::find_codex_home;
|
||||
use codex_core::features::Stage;
|
||||
use codex_core::features::is_known_feature_key;
|
||||
use codex_core::terminal::TerminalName;
|
||||
|
||||
@@ -147,7 +150,7 @@ struct ResumeCommand {
|
||||
session_id: Option<String>,
|
||||
|
||||
/// Continue the most recent session without showing the picker.
|
||||
#[arg(long = "last", default_value_t = false, conflicts_with = "session_id")]
|
||||
#[arg(long = "last", default_value_t = false)]
|
||||
last: bool,
|
||||
|
||||
/// Show all sessions (disables cwd filtering and shows CWD column).
|
||||
@@ -448,6 +451,16 @@ struct FeaturesCli {
|
||||
enum FeaturesSubcommand {
|
||||
/// List known features with their stage and effective state.
|
||||
List,
|
||||
/// Enable a feature in config.toml.
|
||||
Enable(FeatureSetArgs),
|
||||
/// Disable a feature in config.toml.
|
||||
Disable(FeatureSetArgs),
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct FeatureSetArgs {
|
||||
/// Feature key to update (for example: unified_exec).
|
||||
feature: String,
|
||||
}
|
||||
|
||||
fn stage_str(stage: codex_core::features::Stage) -> &'static str {
|
||||
@@ -711,12 +724,69 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
println!("{name:<name_width$} {stage:<stage_width$} {enabled}");
|
||||
}
|
||||
}
|
||||
FeaturesSubcommand::Enable(FeatureSetArgs { feature }) => {
|
||||
enable_feature_in_config(&interactive, &feature).await?;
|
||||
}
|
||||
FeaturesSubcommand::Disable(FeatureSetArgs { feature }) => {
|
||||
disable_feature_in_config(&interactive, &feature).await?;
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn enable_feature_in_config(interactive: &TuiCli, feature: &str) -> anyhow::Result<()> {
|
||||
FeatureToggles::validate_feature(feature)?;
|
||||
let codex_home = find_codex_home()?;
|
||||
ConfigEditsBuilder::new(&codex_home)
|
||||
.with_profile(interactive.config_profile.as_deref())
|
||||
.set_feature_enabled(feature, true)
|
||||
.apply()
|
||||
.await?;
|
||||
println!("Enabled feature `{feature}` in config.toml.");
|
||||
maybe_print_under_development_feature_warning(&codex_home, interactive, feature);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn disable_feature_in_config(interactive: &TuiCli, feature: &str) -> anyhow::Result<()> {
|
||||
FeatureToggles::validate_feature(feature)?;
|
||||
let codex_home = find_codex_home()?;
|
||||
ConfigEditsBuilder::new(&codex_home)
|
||||
.with_profile(interactive.config_profile.as_deref())
|
||||
.set_feature_enabled(feature, false)
|
||||
.apply()
|
||||
.await?;
|
||||
println!("Disabled feature `{feature}` in config.toml.");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn maybe_print_under_development_feature_warning(
|
||||
codex_home: &std::path::Path,
|
||||
interactive: &TuiCli,
|
||||
feature: &str,
|
||||
) {
|
||||
if interactive.config_profile.is_some() {
|
||||
return;
|
||||
}
|
||||
|
||||
let Some(spec) = codex_core::features::FEATURES
|
||||
.iter()
|
||||
.find(|spec| spec.key == feature)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
if !matches!(spec.stage, Stage::UnderDevelopment) {
|
||||
return;
|
||||
}
|
||||
|
||||
let config_path = codex_home.join(codex_core::config::CONFIG_TOML_FILE);
|
||||
eprintln!(
|
||||
"Under-development features enabled: {feature}. Under-development features are incomplete and may behave unpredictably. To suppress this warning, set `suppress_unstable_features_warning = true` in {}.",
|
||||
config_path.display()
|
||||
);
|
||||
}
|
||||
|
||||
/// Prepend root-level overrides so they have lower precedence than
|
||||
/// CLI-specific ones specified after the subcommand (if any).
|
||||
fn prepend_config_flags(
|
||||
@@ -932,6 +1002,24 @@ mod tests {
|
||||
finalize_fork_interactive(interactive, root_overrides, session_id, last, all, fork_cli)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exec_resume_last_accepts_prompt_positional() {
|
||||
let cli =
|
||||
MultitoolCli::try_parse_from(["codex", "exec", "--json", "resume", "--last", "2+2"])
|
||||
.expect("parse should succeed");
|
||||
|
||||
let Some(Subcommand::Exec(exec)) = cli.subcommand else {
|
||||
panic!("expected exec subcommand");
|
||||
};
|
||||
let Some(codex_exec::Command::Resume(args)) = exec.command else {
|
||||
panic!("expected exec resume");
|
||||
};
|
||||
|
||||
assert!(args.last);
|
||||
assert_eq!(args.session_id, None);
|
||||
assert_eq!(args.prompt.as_deref(), Some("2+2"));
|
||||
}
|
||||
|
||||
fn app_server_from_args(args: &[&str]) -> AppServerCommand {
|
||||
let cli = MultitoolCli::try_parse_from(args).expect("parse");
|
||||
let Subcommand::AppServer(app_server) = cli.subcommand.expect("app-server present") else {
|
||||
@@ -1153,6 +1241,32 @@ mod tests {
|
||||
assert!(app_server.analytics_default_enabled);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn features_enable_parses_feature_name() {
|
||||
let cli = MultitoolCli::try_parse_from(["codex", "features", "enable", "unified_exec"])
|
||||
.expect("parse should succeed");
|
||||
let Some(Subcommand::Features(FeaturesCli { sub })) = cli.subcommand else {
|
||||
panic!("expected features subcommand");
|
||||
};
|
||||
let FeaturesSubcommand::Enable(FeatureSetArgs { feature }) = sub else {
|
||||
panic!("expected features enable");
|
||||
};
|
||||
assert_eq!(feature, "unified_exec");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn features_disable_parses_feature_name() {
|
||||
let cli = MultitoolCli::try_parse_from(["codex", "features", "disable", "shell_tool"])
|
||||
.expect("parse should succeed");
|
||||
let Some(Subcommand::Features(FeaturesCli { sub })) = cli.subcommand else {
|
||||
panic!("expected features subcommand");
|
||||
};
|
||||
let FeaturesSubcommand::Disable(FeatureSetArgs { feature }) = sub else {
|
||||
panic!("expected features disable");
|
||||
};
|
||||
assert_eq!(feature, "shell_tool");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn feature_toggles_known_features_generate_overrides() {
|
||||
let toggles = FeatureToggles {
|
||||
|
||||
@@ -13,11 +13,12 @@ use codex_core::config::find_codex_home;
|
||||
use codex_core::config::load_global_mcp_servers;
|
||||
use codex_core::config::types::McpServerConfig;
|
||||
use codex_core::config::types::McpServerTransportConfig;
|
||||
use codex_core::mcp::auth::McpOAuthLoginSupport;
|
||||
use codex_core::mcp::auth::compute_auth_statuses;
|
||||
use codex_core::mcp::auth::oauth_login_support;
|
||||
use codex_core::protocol::McpAuthStatus;
|
||||
use codex_rmcp_client::delete_oauth_tokens;
|
||||
use codex_rmcp_client::perform_oauth_login;
|
||||
use codex_rmcp_client::supports_oauth_login;
|
||||
|
||||
/// Subcommands:
|
||||
/// - `list` — list configured servers (with `--json`)
|
||||
@@ -260,33 +261,25 @@ async fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Re
|
||||
|
||||
println!("Added global MCP server '{name}'.");
|
||||
|
||||
if let McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var: None,
|
||||
http_headers,
|
||||
env_http_headers,
|
||||
} = transport
|
||||
{
|
||||
match supports_oauth_login(&url).await {
|
||||
Ok(true) => {
|
||||
println!("Detected OAuth support. Starting OAuth flow…");
|
||||
perform_oauth_login(
|
||||
&name,
|
||||
&url,
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
http_headers.clone(),
|
||||
env_http_headers.clone(),
|
||||
&Vec::new(),
|
||||
config.mcp_oauth_callback_port,
|
||||
)
|
||||
.await?;
|
||||
println!("Successfully logged in.");
|
||||
}
|
||||
Ok(false) => {}
|
||||
Err(_) => println!(
|
||||
"MCP server may or may not require login. Run `codex mcp login {name}` to login."
|
||||
),
|
||||
match oauth_login_support(&transport).await {
|
||||
McpOAuthLoginSupport::Supported(oauth_config) => {
|
||||
println!("Detected OAuth support. Starting OAuth flow…");
|
||||
perform_oauth_login(
|
||||
&name,
|
||||
&oauth_config.url,
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
oauth_config.http_headers,
|
||||
oauth_config.env_http_headers,
|
||||
&Vec::new(),
|
||||
config.mcp_oauth_callback_port,
|
||||
)
|
||||
.await?;
|
||||
println!("Successfully logged in.");
|
||||
}
|
||||
McpOAuthLoginSupport::Unsupported => {}
|
||||
McpOAuthLoginSupport::Unknown(_) => println!(
|
||||
"MCP server may or may not require login. Run `codex mcp login {name}` to login."
|
||||
),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
58
codex-rs/cli/tests/features.rs
Normal file
58
codex-rs/cli/tests/features.rs
Normal file
@@ -0,0 +1,58 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use predicates::str::contains;
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn codex_command(codex_home: &Path) -> Result<assert_cmd::Command> {
|
||||
let mut cmd = assert_cmd::Command::new(codex_utils_cargo_bin::cargo_bin("codex")?);
|
||||
cmd.env("CODEX_HOME", codex_home);
|
||||
Ok(cmd)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn features_enable_writes_feature_flag_to_config() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut cmd = codex_command(codex_home.path())?;
|
||||
cmd.args(["features", "enable", "unified_exec"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("Enabled feature `unified_exec` in config.toml."));
|
||||
|
||||
let config = std::fs::read_to_string(codex_home.path().join("config.toml"))?;
|
||||
assert!(config.contains("[features]"));
|
||||
assert!(config.contains("unified_exec = true"));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn features_disable_writes_feature_flag_to_config() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut cmd = codex_command(codex_home.path())?;
|
||||
cmd.args(["features", "disable", "shell_tool"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("Disabled feature `shell_tool` in config.toml."));
|
||||
|
||||
let config = std::fs::read_to_string(codex_home.path().join("config.toml"))?;
|
||||
assert!(config.contains("[features]"));
|
||||
assert!(config.contains("shell_tool = false"));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn features_enable_under_development_feature_prints_warning() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut cmd = codex_command(codex_home.path())?;
|
||||
cmd.args(["features", "enable", "sqlite"])
|
||||
.assert()
|
||||
.success()
|
||||
.stderr(contains("Under-development features enabled: sqlite."));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -291,7 +291,7 @@ pub fn process_responses_event(
|
||||
if let Ok(item) = serde_json::from_value::<ResponseItem>(item_val) {
|
||||
return Ok(Some(ResponseEvent::OutputItemAdded(item)));
|
||||
}
|
||||
debug!("failed to parse ResponseItem from output_item.done");
|
||||
debug!("failed to parse ResponseItem from output_item.added");
|
||||
}
|
||||
}
|
||||
"response.reasoning_summary_part.added" => {
|
||||
|
||||
@@ -0,0 +1,40 @@
|
||||
/*
|
||||
* codex-backend
|
||||
*
|
||||
* codex-backend
|
||||
*
|
||||
* The version of the OpenAPI document: 0.0.1
|
||||
*
|
||||
* Generated by: https://openapi-generator.tech
|
||||
*/
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct ConfigFileResponse {
|
||||
#[serde(rename = "contents", skip_serializing_if = "Option::is_none")]
|
||||
pub contents: Option<String>,
|
||||
#[serde(rename = "sha256", skip_serializing_if = "Option::is_none")]
|
||||
pub sha256: Option<String>,
|
||||
#[serde(rename = "updated_at", skip_serializing_if = "Option::is_none")]
|
||||
pub updated_at: Option<String>,
|
||||
#[serde(rename = "updated_by_user_id", skip_serializing_if = "Option::is_none")]
|
||||
pub updated_by_user_id: Option<String>,
|
||||
}
|
||||
|
||||
impl ConfigFileResponse {
|
||||
pub fn new(
|
||||
contents: Option<String>,
|
||||
sha256: Option<String>,
|
||||
updated_at: Option<String>,
|
||||
updated_by_user_id: Option<String>,
|
||||
) -> ConfigFileResponse {
|
||||
ConfigFileResponse {
|
||||
contents,
|
||||
sha256,
|
||||
updated_at,
|
||||
updated_by_user_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,10 @@
|
||||
// Currently export only the types referenced by the workspace
|
||||
// The process for this will change
|
||||
|
||||
// Config
|
||||
pub mod config_file_response;
|
||||
pub use self::config_file_response::ConfigFileResponse;
|
||||
|
||||
// Cloud Tasks
|
||||
pub mod code_task_details_response;
|
||||
pub use self::code_task_details_response::CodeTaskDetailsResponse;
|
||||
|
||||
@@ -37,6 +37,7 @@ codex-keyring-store = { workspace = true }
|
||||
codex-otel = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-rmcp-client = { workspace = true }
|
||||
codex-state = { workspace = true }
|
||||
codex-utils-absolute-path = { workspace = true }
|
||||
codex-utils-pty = { workspace = true }
|
||||
codex-utils-readiness = { workspace = true }
|
||||
@@ -55,6 +56,7 @@ indoc = { workspace = true }
|
||||
keyring = { workspace = true, features = ["crypto-rust"] }
|
||||
libc = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
multimap = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
os_info = { workspace = true }
|
||||
rand = { workspace = true }
|
||||
|
||||
@@ -111,6 +111,13 @@
|
||||
"auto"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Store credentials in memory only for the current process.",
|
||||
"enum": [
|
||||
"ephemeral"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -144,6 +151,9 @@
|
||||
"apply_patch_freeform": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"apps": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"child_agents_md": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -180,6 +190,9 @@
|
||||
"include_apply_patch_tool": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"personality": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"powershell_utf8": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -189,6 +202,9 @@
|
||||
"remote_models": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"request_rule": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"responses_websockets": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -198,6 +214,15 @@
|
||||
"shell_tool": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"skill_env_var_dependency_prompt": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"skill_mcp_dependency_install": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"sqlite": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"steer": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -425,6 +450,11 @@
|
||||
"minimum": 0.0,
|
||||
"type": "integer"
|
||||
},
|
||||
"supports_websockets": {
|
||||
"default": false,
|
||||
"description": "Whether this provider supports the Responses API WebSocket transport.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"wire_api": {
|
||||
"allOf": [
|
||||
{
|
||||
@@ -441,7 +471,6 @@
|
||||
"type": "object"
|
||||
},
|
||||
"Notice": {
|
||||
"additionalProperties": false,
|
||||
"description": "Settings for notices we display to users via the tui and app-server clients (primarily the Codex IDE extension). NOTE: these are different from notifications - notices are warnings, NUX screens, acknowledgements, etc.",
|
||||
"properties": {
|
||||
"hide_full_access_warning": {
|
||||
@@ -475,6 +504,14 @@
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"NotificationMethod": {
|
||||
"enum": [
|
||||
"auto",
|
||||
"osc9",
|
||||
"bel"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"Notifications": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -983,6 +1020,15 @@
|
||||
"default": null,
|
||||
"description": "Start the TUI in the specified collaboration mode (plan/execute/etc.). Defaults to unset."
|
||||
},
|
||||
"notification_method": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/NotificationMethod"
|
||||
}
|
||||
],
|
||||
"default": "auto",
|
||||
"description": "Notification method to use for unfocused terminal notifications. Defaults to `auto`."
|
||||
},
|
||||
"notifications": {
|
||||
"allOf": [
|
||||
{
|
||||
@@ -1047,13 +1093,6 @@
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Experimental: Responses API over WebSocket transport.",
|
||||
"enum": [
|
||||
"responses_websocket"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Regular Chat Completions compatible with `/v1/chat/completions`.",
|
||||
"enum": [
|
||||
@@ -1137,6 +1176,9 @@
|
||||
"apply_patch_freeform": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"apps": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"child_agents_md": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -1173,6 +1215,9 @@
|
||||
"include_apply_patch_tool": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"personality": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"powershell_utf8": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -1182,6 +1227,9 @@
|
||||
"remote_models": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"request_rule": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"responses_websockets": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -1191,6 +1239,15 @@
|
||||
"shell_tool": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"skill_env_var_dependency_prompt": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"skill_mcp_dependency_install": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"sqlite": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"steer": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -1465,6 +1522,10 @@
|
||||
],
|
||||
"description": "User-level skill config entries keyed by SKILL.md path."
|
||||
},
|
||||
"suppress_unstable_features_warning": {
|
||||
"description": "Suppress warnings about unstable (under development) features.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"tool_output_token_limit": {
|
||||
"description": "Token budget applied when storing tool/function outputs in the context manager.",
|
||||
"format": "uint",
|
||||
|
||||
@@ -44,6 +44,8 @@ pub struct AgentProfile {
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
/// Whether to force a read-only sandbox policy.
|
||||
pub read_only: bool,
|
||||
/// Description to include in the tool specs.
|
||||
pub description: &'static str,
|
||||
}
|
||||
|
||||
impl AgentRole {
|
||||
@@ -51,7 +53,19 @@ impl AgentRole {
|
||||
pub fn enum_values() -> Vec<String> {
|
||||
ALL_ROLES
|
||||
.iter()
|
||||
.filter_map(|role| serde_json::to_string(role).ok())
|
||||
.filter_map(|role| {
|
||||
let description = role.profile().description;
|
||||
serde_json::to_string(role)
|
||||
.map(|role| {
|
||||
let description = if !description.is_empty() {
|
||||
format!(r#", "description": {description}"#)
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
format!(r#"{{ "name": {role}{description}}}"#)
|
||||
})
|
||||
.ok()
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
@@ -66,11 +80,33 @@ impl AgentRole {
|
||||
AgentRole::Worker => AgentProfile {
|
||||
// base_instructions: Some(WORKER_PROMPT),
|
||||
// model: Some(WORKER_MODEL),
|
||||
description: r#"Use for execution and production work.
|
||||
Typical tasks:
|
||||
- Implement part of a feature
|
||||
- Fix tests or bugs
|
||||
- Split large refactors into independent chunks
|
||||
Rules:
|
||||
- Explicitly assign **ownership** of the task (files / responsibility).
|
||||
- Always tell workers they are **not alone in the codebase**, and they should ignore edits made by others without touching them"#,
|
||||
..Default::default()
|
||||
},
|
||||
AgentRole::Explorer => AgentProfile {
|
||||
model: Some(EXPLORER_MODEL),
|
||||
reasoning_effort: Some(ReasoningEffort::Low),
|
||||
description: r#"Use for fast codebase understanding and information gathering.
|
||||
`explorer` are extremely fast agents so use them as much as you can to speed up the resolution of the global task.
|
||||
Typical tasks:
|
||||
- Locate usages of a symbol or concept
|
||||
- Understand how X is handled in Y
|
||||
- Review a section of code for issues
|
||||
- Assess impact of a potential change
|
||||
Rules:
|
||||
- Be explicit in what you are looking for. A good usage of `explorer` would mean that don't need to read the same code after the explorer send you the result.
|
||||
- **Always** prefer asking explorers rather than exploring the codebase yourself.
|
||||
- Spawn multiple explorers in parallel when useful and wait for all results.
|
||||
- You can ask the `explorer` to return file name, lines, entire code snippets, ...
|
||||
- Reuse the same explorer when it is relevant. If later in your process you have more questions on some code an explorer already covered, reuse this same explorer to be more efficient.
|
||||
"#,
|
||||
..Default::default()
|
||||
},
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ use serde::Deserialize;
|
||||
|
||||
use crate::auth::CodexAuth;
|
||||
use crate::error::CodexErr;
|
||||
use crate::error::ModelCapError;
|
||||
use crate::error::RetryLimitReachedError;
|
||||
use crate::error::UnexpectedResponseError;
|
||||
use crate::error::UsageLimitReachedError;
|
||||
@@ -49,6 +50,23 @@ pub(crate) fn map_api_error(err: ApiError) -> CodexErr {
|
||||
} else if status == http::StatusCode::INTERNAL_SERVER_ERROR {
|
||||
CodexErr::InternalServerError
|
||||
} else if status == http::StatusCode::TOO_MANY_REQUESTS {
|
||||
if let Some(model) = headers
|
||||
.as_ref()
|
||||
.and_then(|map| map.get(MODEL_CAP_MODEL_HEADER))
|
||||
.and_then(|value| value.to_str().ok())
|
||||
.map(str::to_string)
|
||||
{
|
||||
let reset_after_seconds = headers
|
||||
.as_ref()
|
||||
.and_then(|map| map.get(MODEL_CAP_RESET_AFTER_HEADER))
|
||||
.and_then(|value| value.to_str().ok())
|
||||
.and_then(|value| value.parse::<u64>().ok());
|
||||
return CodexErr::ModelCap(ModelCapError {
|
||||
model,
|
||||
reset_after_seconds,
|
||||
});
|
||||
}
|
||||
|
||||
if let Ok(err) = serde_json::from_str::<UsageErrorResponse>(&body_text) {
|
||||
if err.error.error_type.as_deref() == Some("usage_limit_reached") {
|
||||
let rate_limits = headers.as_ref().and_then(parse_rate_limit);
|
||||
@@ -92,6 +110,42 @@ pub(crate) fn map_api_error(err: ApiError) -> CodexErr {
|
||||
}
|
||||
}
|
||||
|
||||
const MODEL_CAP_MODEL_HEADER: &str = "x-codex-model-cap-model";
|
||||
const MODEL_CAP_RESET_AFTER_HEADER: &str = "x-codex-model-cap-reset-after-seconds";
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use codex_api::TransportError;
|
||||
use http::HeaderMap;
|
||||
use http::StatusCode;
|
||||
|
||||
#[test]
|
||||
fn map_api_error_maps_model_cap_headers() {
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
MODEL_CAP_MODEL_HEADER,
|
||||
http::HeaderValue::from_static("boomslang"),
|
||||
);
|
||||
headers.insert(
|
||||
MODEL_CAP_RESET_AFTER_HEADER,
|
||||
http::HeaderValue::from_static("120"),
|
||||
);
|
||||
let err = map_api_error(ApiError::Transport(TransportError::Http {
|
||||
status: StatusCode::TOO_MANY_REQUESTS,
|
||||
url: Some("http://example.com/v1/responses".to_string()),
|
||||
headers: Some(headers),
|
||||
body: Some(String::new()),
|
||||
}));
|
||||
|
||||
let CodexErr::ModelCap(model_cap) = err else {
|
||||
panic!("expected CodexErr::ModelCap, got {err:?}");
|
||||
};
|
||||
assert_eq!(model_cap.model, "boomslang");
|
||||
assert_eq!(model_cap.reset_after_seconds, Some(120));
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_request_id(headers: Option<&HeaderMap>) -> Option<String> {
|
||||
headers.and_then(|map| {
|
||||
["cf-ray", "x-request-id", "x-oai-request-id"]
|
||||
|
||||
@@ -42,6 +42,7 @@ pub(crate) async fn apply_patch(
|
||||
turn_context.approval_policy,
|
||||
&turn_context.sandbox_policy,
|
||||
&turn_context.cwd,
|
||||
turn_context.windows_sandbox_level,
|
||||
) {
|
||||
SafetyCheck::AutoApprove {
|
||||
user_explicitly_approved,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
mod storage;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use chrono::Utc;
|
||||
use reqwest::StatusCode;
|
||||
use serde::Deserialize;
|
||||
@@ -23,6 +24,7 @@ use crate::auth::storage::create_auth_storage;
|
||||
use crate::config::Config;
|
||||
use crate::error::RefreshTokenFailedError;
|
||||
use crate::error::RefreshTokenFailedReason;
|
||||
use crate::token_data::IdTokenInfo;
|
||||
use crate::token_data::KnownPlan as InternalKnownPlan;
|
||||
use crate::token_data::PlanType as InternalPlanType;
|
||||
use crate::token_data::TokenData;
|
||||
@@ -68,6 +70,31 @@ pub enum RefreshTokenError {
|
||||
Transient(#[from] std::io::Error),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct ExternalAuthTokens {
|
||||
pub access_token: String,
|
||||
pub id_token: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum ExternalAuthRefreshReason {
|
||||
Unauthorized,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct ExternalAuthRefreshContext {
|
||||
pub reason: ExternalAuthRefreshReason,
|
||||
pub previous_account_id: Option<String>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait ExternalAuthRefresher: Send + Sync {
|
||||
async fn refresh(
|
||||
&self,
|
||||
context: ExternalAuthRefreshContext,
|
||||
) -> std::io::Result<ExternalAuthTokens>;
|
||||
}
|
||||
|
||||
impl RefreshTokenError {
|
||||
pub fn failed_reason(&self) -> Option<RefreshTokenFailedReason> {
|
||||
match self {
|
||||
@@ -110,9 +137,9 @@ impl CodexAuth {
|
||||
pub fn get_token(&self) -> Result<String, std::io::Error> {
|
||||
match self.mode {
|
||||
AuthMode::ApiKey => Ok(self.api_key.clone().unwrap_or_default()),
|
||||
AuthMode::ChatGPT => {
|
||||
let id_token = self.get_token_data()?.access_token;
|
||||
Ok(id_token)
|
||||
AuthMode::ChatGPT | AuthMode::ChatgptAuthTokens => {
|
||||
let access_token = self.get_token_data()?.access_token;
|
||||
Ok(access_token)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -160,6 +187,7 @@ impl CodexAuth {
|
||||
/// Consider this private to integration tests.
|
||||
pub fn create_dummy_chatgpt_auth_for_testing() -> Self {
|
||||
let auth_dot_json = AuthDotJson {
|
||||
auth_mode: Some(AuthMode::ChatGPT),
|
||||
openai_api_key: None,
|
||||
tokens: Some(TokenData {
|
||||
id_token: Default::default(),
|
||||
@@ -229,6 +257,7 @@ pub fn login_with_api_key(
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> std::io::Result<()> {
|
||||
let auth_dot_json = AuthDotJson {
|
||||
auth_mode: Some(AuthMode::ApiKey),
|
||||
openai_api_key: Some(api_key.to_string()),
|
||||
tokens: None,
|
||||
last_refresh: None,
|
||||
@@ -236,6 +265,20 @@ pub fn login_with_api_key(
|
||||
save_auth(codex_home, &auth_dot_json, auth_credentials_store_mode)
|
||||
}
|
||||
|
||||
/// Writes an in-memory auth payload for externally managed ChatGPT tokens.
|
||||
pub fn login_with_chatgpt_auth_tokens(
|
||||
codex_home: &Path,
|
||||
id_token: &str,
|
||||
access_token: &str,
|
||||
) -> std::io::Result<()> {
|
||||
let auth_dot_json = AuthDotJson::from_external_token_strings(id_token, access_token)?;
|
||||
save_auth(
|
||||
codex_home,
|
||||
&auth_dot_json,
|
||||
AuthCredentialsStoreMode::Ephemeral,
|
||||
)
|
||||
}
|
||||
|
||||
/// Persist the provided auth payload using the specified backend.
|
||||
pub fn save_auth(
|
||||
codex_home: &Path,
|
||||
@@ -272,8 +315,8 @@ pub fn enforce_login_restrictions(config: &Config) -> std::io::Result<()> {
|
||||
if let Some(required_method) = config.forced_login_method {
|
||||
let method_violation = match (required_method, auth.mode) {
|
||||
(ForcedLoginMethod::Api, AuthMode::ApiKey) => None,
|
||||
(ForcedLoginMethod::Chatgpt, AuthMode::ChatGPT) => None,
|
||||
(ForcedLoginMethod::Api, AuthMode::ChatGPT) => Some(
|
||||
(ForcedLoginMethod::Chatgpt, AuthMode::ChatGPT | AuthMode::ChatgptAuthTokens) => None,
|
||||
(ForcedLoginMethod::Api, AuthMode::ChatGPT | AuthMode::ChatgptAuthTokens) => Some(
|
||||
"API key login is required, but ChatGPT is currently being used. Logging out."
|
||||
.to_string(),
|
||||
),
|
||||
@@ -293,7 +336,7 @@ pub fn enforce_login_restrictions(config: &Config) -> std::io::Result<()> {
|
||||
}
|
||||
|
||||
if let Some(expected_account_id) = config.forced_chatgpt_workspace_id.as_deref() {
|
||||
if auth.mode != AuthMode::ChatGPT {
|
||||
if !matches!(auth.mode, AuthMode::ChatGPT | AuthMode::ChatgptAuthTokens) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
@@ -337,12 +380,26 @@ fn logout_with_message(
|
||||
message: String,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> std::io::Result<()> {
|
||||
match logout(codex_home, auth_credentials_store_mode) {
|
||||
Ok(_) => Err(std::io::Error::other(message)),
|
||||
Err(err) => Err(std::io::Error::other(format!(
|
||||
"{message}. Failed to remove auth.json: {err}"
|
||||
))),
|
||||
// External auth tokens live in the ephemeral store, but persistent auth may still exist
|
||||
// from earlier logins. Clear both so a forced logout truly removes all active auth.
|
||||
let removal_result = logout_all_stores(codex_home, auth_credentials_store_mode);
|
||||
let error_message = match removal_result {
|
||||
Ok(_) => message,
|
||||
Err(err) => format!("{message}. Failed to remove auth.json: {err}"),
|
||||
};
|
||||
Err(std::io::Error::other(error_message))
|
||||
}
|
||||
|
||||
fn logout_all_stores(
|
||||
codex_home: &Path,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> std::io::Result<bool> {
|
||||
if auth_credentials_store_mode == AuthCredentialsStoreMode::Ephemeral {
|
||||
return logout(codex_home, AuthCredentialsStoreMode::Ephemeral);
|
||||
}
|
||||
let removed_ephemeral = logout(codex_home, AuthCredentialsStoreMode::Ephemeral)?;
|
||||
let removed_managed = logout(codex_home, auth_credentials_store_mode)?;
|
||||
Ok(removed_ephemeral || removed_managed)
|
||||
}
|
||||
|
||||
fn load_auth(
|
||||
@@ -350,6 +407,12 @@ fn load_auth(
|
||||
enable_codex_api_key_env: bool,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> std::io::Result<Option<CodexAuth>> {
|
||||
let build_auth = |auth_dot_json: AuthDotJson, storage_mode| {
|
||||
let client = crate::default_client::create_client();
|
||||
CodexAuth::from_auth_dot_json(codex_home, auth_dot_json, storage_mode, client)
|
||||
};
|
||||
|
||||
// API key via env var takes precedence over any other auth method.
|
||||
if enable_codex_api_key_env && let Some(api_key) = read_codex_api_key_from_env() {
|
||||
let client = crate::default_client::create_client();
|
||||
return Ok(Some(CodexAuth::from_api_key_with_client(
|
||||
@@ -358,39 +421,34 @@ fn load_auth(
|
||||
)));
|
||||
}
|
||||
|
||||
let storage = create_auth_storage(codex_home.to_path_buf(), auth_credentials_store_mode);
|
||||
// External ChatGPT auth tokens live in the in-memory (ephemeral) store. Always check this
|
||||
// first so external auth takes precedence over any persisted credentials.
|
||||
let ephemeral_storage = create_auth_storage(
|
||||
codex_home.to_path_buf(),
|
||||
AuthCredentialsStoreMode::Ephemeral,
|
||||
);
|
||||
if let Some(auth_dot_json) = ephemeral_storage.load()? {
|
||||
let auth = build_auth(auth_dot_json, AuthCredentialsStoreMode::Ephemeral)?;
|
||||
return Ok(Some(auth));
|
||||
}
|
||||
|
||||
let client = crate::default_client::create_client();
|
||||
// If the caller explicitly requested ephemeral auth, there is no persisted fallback.
|
||||
if auth_credentials_store_mode == AuthCredentialsStoreMode::Ephemeral {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Fall back to the configured persistent store (file/keyring/auto) for managed auth.
|
||||
let storage = create_auth_storage(codex_home.to_path_buf(), auth_credentials_store_mode);
|
||||
let auth_dot_json = match storage.load()? {
|
||||
Some(auth) => auth,
|
||||
None => return Ok(None),
|
||||
};
|
||||
|
||||
let AuthDotJson {
|
||||
openai_api_key: auth_json_api_key,
|
||||
tokens,
|
||||
last_refresh,
|
||||
} = auth_dot_json;
|
||||
|
||||
// Prefer AuthMode.ApiKey if it's set in the auth.json.
|
||||
if let Some(api_key) = &auth_json_api_key {
|
||||
return Ok(Some(CodexAuth::from_api_key_with_client(api_key, client)));
|
||||
}
|
||||
|
||||
Ok(Some(CodexAuth {
|
||||
api_key: None,
|
||||
mode: AuthMode::ChatGPT,
|
||||
storage: storage.clone(),
|
||||
auth_dot_json: Arc::new(Mutex::new(Some(AuthDotJson {
|
||||
openai_api_key: None,
|
||||
tokens,
|
||||
last_refresh,
|
||||
}))),
|
||||
client,
|
||||
}))
|
||||
let auth = build_auth(auth_dot_json, auth_credentials_store_mode)?;
|
||||
Ok(Some(auth))
|
||||
}
|
||||
|
||||
async fn update_tokens(
|
||||
fn update_tokens(
|
||||
storage: &Arc<dyn AuthStorageBackend>,
|
||||
id_token: Option<String>,
|
||||
access_token: Option<String>,
|
||||
@@ -537,17 +595,108 @@ fn refresh_token_endpoint() -> String {
|
||||
.unwrap_or_else(|_| REFRESH_TOKEN_URL.to_string())
|
||||
}
|
||||
|
||||
impl AuthDotJson {
|
||||
fn from_external_tokens(external: &ExternalAuthTokens, id_token: IdTokenInfo) -> Self {
|
||||
let account_id = id_token.chatgpt_account_id.clone();
|
||||
let tokens = TokenData {
|
||||
id_token,
|
||||
access_token: external.access_token.clone(),
|
||||
refresh_token: String::new(),
|
||||
account_id,
|
||||
};
|
||||
|
||||
Self {
|
||||
auth_mode: Some(AuthMode::ChatgptAuthTokens),
|
||||
openai_api_key: None,
|
||||
tokens: Some(tokens),
|
||||
last_refresh: Some(Utc::now()),
|
||||
}
|
||||
}
|
||||
|
||||
fn from_external_token_strings(id_token: &str, access_token: &str) -> std::io::Result<Self> {
|
||||
let id_token_info = parse_id_token(id_token).map_err(std::io::Error::other)?;
|
||||
let external = ExternalAuthTokens {
|
||||
access_token: access_token.to_string(),
|
||||
id_token: id_token.to_string(),
|
||||
};
|
||||
Ok(Self::from_external_tokens(&external, id_token_info))
|
||||
}
|
||||
|
||||
fn resolved_mode(&self) -> AuthMode {
|
||||
if let Some(mode) = self.auth_mode {
|
||||
return mode;
|
||||
}
|
||||
if self.openai_api_key.is_some() {
|
||||
return AuthMode::ApiKey;
|
||||
}
|
||||
AuthMode::ChatGPT
|
||||
}
|
||||
|
||||
fn storage_mode(
|
||||
&self,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> AuthCredentialsStoreMode {
|
||||
if self.resolved_mode() == AuthMode::ChatgptAuthTokens {
|
||||
AuthCredentialsStoreMode::Ephemeral
|
||||
} else {
|
||||
auth_credentials_store_mode
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CodexAuth {
|
||||
fn from_auth_dot_json(
|
||||
codex_home: &Path,
|
||||
auth_dot_json: AuthDotJson,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
client: CodexHttpClient,
|
||||
) -> std::io::Result<Self> {
|
||||
let auth_mode = auth_dot_json.resolved_mode();
|
||||
if auth_mode == AuthMode::ApiKey {
|
||||
let Some(api_key) = auth_dot_json.openai_api_key.as_deref() else {
|
||||
return Err(std::io::Error::other("API key auth is missing a key."));
|
||||
};
|
||||
return Ok(CodexAuth::from_api_key_with_client(api_key, client));
|
||||
}
|
||||
|
||||
let storage_mode = auth_dot_json.storage_mode(auth_credentials_store_mode);
|
||||
let storage = create_auth_storage(codex_home.to_path_buf(), storage_mode);
|
||||
Ok(Self {
|
||||
api_key: None,
|
||||
mode: auth_mode,
|
||||
storage,
|
||||
auth_dot_json: Arc::new(Mutex::new(Some(auth_dot_json))),
|
||||
client,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
use std::sync::RwLock;
|
||||
|
||||
/// Internal cached auth state.
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone)]
|
||||
struct CachedAuth {
|
||||
auth: Option<CodexAuth>,
|
||||
/// Callback used to refresh external auth by asking the parent app for new tokens.
|
||||
external_refresher: Option<Arc<dyn ExternalAuthRefresher>>,
|
||||
}
|
||||
|
||||
impl Debug for CachedAuth {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("CachedAuth")
|
||||
.field("auth_mode", &self.auth.as_ref().map(|auth| auth.mode))
|
||||
.field(
|
||||
"external_refresher",
|
||||
&self.external_refresher.as_ref().map(|_| "present"),
|
||||
)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
enum UnauthorizedRecoveryStep {
|
||||
Reload,
|
||||
RefreshToken,
|
||||
ExternalRefresh,
|
||||
Done,
|
||||
}
|
||||
|
||||
@@ -556,38 +705,63 @@ enum ReloadOutcome {
|
||||
Skipped,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
enum UnauthorizedRecoveryMode {
|
||||
Managed,
|
||||
External,
|
||||
}
|
||||
|
||||
// UnauthorizedRecovery is a state machine that handles an attempt to refresh the authentication when requests
|
||||
// to API fail with 401 status code.
|
||||
// The client calls next() every time it encounters a 401 error, one time per retry.
|
||||
// For API key based authentication, we don't do anything and let the error bubble to the user.
|
||||
//
|
||||
// For ChatGPT based authentication, we:
|
||||
// 1. Attempt to reload the auth data from disk. We only reload if the account id matches the one the current process is running as.
|
||||
// 2. Attempt to refresh the token using OAuth token refresh flow.
|
||||
// If after both steps the server still responds with 401 we let the error bubble to the user.
|
||||
//
|
||||
// For external ChatGPT auth tokens (chatgptAuthTokens), UnauthorizedRecovery does not touch disk or refresh
|
||||
// tokens locally. Instead it calls the ExternalAuthRefresher (account/chatgptAuthTokens/refresh) to ask the
|
||||
// parent app for new tokens, stores them in the ephemeral auth store, and retries once.
|
||||
pub struct UnauthorizedRecovery {
|
||||
manager: Arc<AuthManager>,
|
||||
step: UnauthorizedRecoveryStep,
|
||||
expected_account_id: Option<String>,
|
||||
mode: UnauthorizedRecoveryMode,
|
||||
}
|
||||
|
||||
impl UnauthorizedRecovery {
|
||||
fn new(manager: Arc<AuthManager>) -> Self {
|
||||
let expected_account_id = manager
|
||||
.auth_cached()
|
||||
.as_ref()
|
||||
.and_then(CodexAuth::get_account_id);
|
||||
let cached_auth = manager.auth_cached();
|
||||
let expected_account_id = cached_auth.as_ref().and_then(CodexAuth::get_account_id);
|
||||
let mode = match cached_auth {
|
||||
Some(auth) if auth.mode == AuthMode::ChatgptAuthTokens => {
|
||||
UnauthorizedRecoveryMode::External
|
||||
}
|
||||
_ => UnauthorizedRecoveryMode::Managed,
|
||||
};
|
||||
let step = match mode {
|
||||
UnauthorizedRecoveryMode::Managed => UnauthorizedRecoveryStep::Reload,
|
||||
UnauthorizedRecoveryMode::External => UnauthorizedRecoveryStep::ExternalRefresh,
|
||||
};
|
||||
Self {
|
||||
manager,
|
||||
step: UnauthorizedRecoveryStep::Reload,
|
||||
step,
|
||||
expected_account_id,
|
||||
mode,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn has_next(&self) -> bool {
|
||||
if !self
|
||||
.manager
|
||||
.auth_cached()
|
||||
.is_some_and(|auth| auth.mode == AuthMode::ChatGPT)
|
||||
if !self.manager.auth_cached().is_some_and(|auth| {
|
||||
matches!(auth.mode, AuthMode::ChatGPT | AuthMode::ChatgptAuthTokens)
|
||||
}) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if self.mode == UnauthorizedRecoveryMode::External
|
||||
&& !self.manager.has_external_auth_refresher()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
@@ -622,6 +796,12 @@ impl UnauthorizedRecovery {
|
||||
self.manager.refresh_token().await?;
|
||||
self.step = UnauthorizedRecoveryStep::Done;
|
||||
}
|
||||
UnauthorizedRecoveryStep::ExternalRefresh => {
|
||||
self.manager
|
||||
.refresh_external_auth(ExternalAuthRefreshReason::Unauthorized)
|
||||
.await?;
|
||||
self.step = UnauthorizedRecoveryStep::Done;
|
||||
}
|
||||
UnauthorizedRecoveryStep::Done => {}
|
||||
}
|
||||
Ok(())
|
||||
@@ -642,6 +822,7 @@ pub struct AuthManager {
|
||||
inner: RwLock<CachedAuth>,
|
||||
enable_codex_api_key_env: bool,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
forced_chatgpt_workspace_id: RwLock<Option<String>>,
|
||||
}
|
||||
|
||||
impl AuthManager {
|
||||
@@ -654,7 +835,7 @@ impl AuthManager {
|
||||
enable_codex_api_key_env: bool,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> Self {
|
||||
let auth = load_auth(
|
||||
let managed_auth = load_auth(
|
||||
&codex_home,
|
||||
enable_codex_api_key_env,
|
||||
auth_credentials_store_mode,
|
||||
@@ -663,34 +844,46 @@ impl AuthManager {
|
||||
.flatten();
|
||||
Self {
|
||||
codex_home,
|
||||
inner: RwLock::new(CachedAuth { auth }),
|
||||
inner: RwLock::new(CachedAuth {
|
||||
auth: managed_auth,
|
||||
external_refresher: None,
|
||||
}),
|
||||
enable_codex_api_key_env,
|
||||
auth_credentials_store_mode,
|
||||
forced_chatgpt_workspace_id: RwLock::new(None),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
/// Create an AuthManager with a specific CodexAuth, for testing only.
|
||||
pub fn from_auth_for_testing(auth: CodexAuth) -> Arc<Self> {
|
||||
let cached = CachedAuth { auth: Some(auth) };
|
||||
let cached = CachedAuth {
|
||||
auth: Some(auth),
|
||||
external_refresher: None,
|
||||
};
|
||||
|
||||
Arc::new(Self {
|
||||
codex_home: PathBuf::from("non-existent"),
|
||||
inner: RwLock::new(cached),
|
||||
enable_codex_api_key_env: false,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode::File,
|
||||
forced_chatgpt_workspace_id: RwLock::new(None),
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
/// Create an AuthManager with a specific CodexAuth and codex home, for testing only.
|
||||
pub fn from_auth_for_testing_with_home(auth: CodexAuth, codex_home: PathBuf) -> Arc<Self> {
|
||||
let cached = CachedAuth { auth: Some(auth) };
|
||||
let cached = CachedAuth {
|
||||
auth: Some(auth),
|
||||
external_refresher: None,
|
||||
};
|
||||
Arc::new(Self {
|
||||
codex_home,
|
||||
inner: RwLock::new(cached),
|
||||
enable_codex_api_key_env: false,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode::File,
|
||||
forced_chatgpt_workspace_id: RwLock::new(None),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -715,7 +908,7 @@ impl AuthManager {
|
||||
pub fn reload(&self) -> bool {
|
||||
tracing::info!("Reloading auth");
|
||||
let new_auth = self.load_auth_from_storage();
|
||||
self.set_auth(new_auth)
|
||||
self.set_cached_auth(new_auth)
|
||||
}
|
||||
|
||||
fn reload_if_account_id_matches(&self, expected_account_id: Option<&str>) -> ReloadOutcome {
|
||||
@@ -739,11 +932,11 @@ impl AuthManager {
|
||||
}
|
||||
|
||||
tracing::info!("Reloading auth for account {expected_account_id}");
|
||||
self.set_auth(new_auth);
|
||||
self.set_cached_auth(new_auth);
|
||||
ReloadOutcome::Reloaded
|
||||
}
|
||||
|
||||
fn auths_equal(a: &Option<CodexAuth>, b: &Option<CodexAuth>) -> bool {
|
||||
fn auths_equal(a: Option<&CodexAuth>, b: Option<&CodexAuth>) -> bool {
|
||||
match (a, b) {
|
||||
(None, None) => true,
|
||||
(Some(a), Some(b)) => a == b,
|
||||
@@ -761,9 +954,10 @@ impl AuthManager {
|
||||
.flatten()
|
||||
}
|
||||
|
||||
fn set_auth(&self, new_auth: Option<CodexAuth>) -> bool {
|
||||
fn set_cached_auth(&self, new_auth: Option<CodexAuth>) -> bool {
|
||||
if let Ok(mut guard) = self.inner.write() {
|
||||
let changed = !AuthManager::auths_equal(&guard.auth, &new_auth);
|
||||
let previous = guard.auth.as_ref();
|
||||
let changed = !AuthManager::auths_equal(previous, new_auth.as_ref());
|
||||
tracing::info!("Reloaded auth, changed: {changed}");
|
||||
guard.auth = new_auth;
|
||||
changed
|
||||
@@ -772,6 +966,38 @@ impl AuthManager {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_external_auth_refresher(&self, refresher: Arc<dyn ExternalAuthRefresher>) {
|
||||
if let Ok(mut guard) = self.inner.write() {
|
||||
guard.external_refresher = Some(refresher);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_forced_chatgpt_workspace_id(&self, workspace_id: Option<String>) {
|
||||
if let Ok(mut guard) = self.forced_chatgpt_workspace_id.write() {
|
||||
*guard = workspace_id;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn forced_chatgpt_workspace_id(&self) -> Option<String> {
|
||||
self.forced_chatgpt_workspace_id
|
||||
.read()
|
||||
.ok()
|
||||
.and_then(|guard| guard.clone())
|
||||
}
|
||||
|
||||
pub fn has_external_auth_refresher(&self) -> bool {
|
||||
self.inner
|
||||
.read()
|
||||
.ok()
|
||||
.map(|guard| guard.external_refresher.is_some())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
pub fn is_external_auth_active(&self) -> bool {
|
||||
self.auth_cached()
|
||||
.is_some_and(|auth| auth.mode == AuthMode::ChatgptAuthTokens)
|
||||
}
|
||||
|
||||
/// Convenience constructor returning an `Arc` wrapper.
|
||||
pub fn shared(
|
||||
codex_home: PathBuf,
|
||||
@@ -799,6 +1025,11 @@ impl AuthManager {
|
||||
Some(auth) => auth,
|
||||
None => return Ok(()),
|
||||
};
|
||||
if auth.mode == AuthMode::ChatgptAuthTokens {
|
||||
return self
|
||||
.refresh_external_auth(ExternalAuthRefreshReason::Unauthorized)
|
||||
.await;
|
||||
}
|
||||
let token_data = auth.get_current_token_data().ok_or_else(|| {
|
||||
RefreshTokenError::Transient(std::io::Error::other("Token data is not available."))
|
||||
})?;
|
||||
@@ -813,7 +1044,7 @@ impl AuthManager {
|
||||
/// reloads the in‑memory auth cache so callers immediately observe the
|
||||
/// unauthenticated state.
|
||||
pub fn logout(&self) -> std::io::Result<bool> {
|
||||
let removed = super::auth::logout(&self.codex_home, self.auth_credentials_store_mode)?;
|
||||
let removed = logout_all_stores(&self.codex_home, self.auth_credentials_store_mode)?;
|
||||
// Always reload to clear any cached auth (even if file absent).
|
||||
self.reload();
|
||||
Ok(removed)
|
||||
@@ -848,6 +1079,59 @@ impl AuthManager {
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
async fn refresh_external_auth(
|
||||
&self,
|
||||
reason: ExternalAuthRefreshReason,
|
||||
) -> Result<(), RefreshTokenError> {
|
||||
let forced_chatgpt_workspace_id = self.forced_chatgpt_workspace_id();
|
||||
let refresher = match self.inner.read() {
|
||||
Ok(guard) => guard.external_refresher.clone(),
|
||||
Err(_) => {
|
||||
return Err(RefreshTokenError::Transient(std::io::Error::other(
|
||||
"failed to read external auth state",
|
||||
)));
|
||||
}
|
||||
};
|
||||
|
||||
let Some(refresher) = refresher else {
|
||||
return Err(RefreshTokenError::Transient(std::io::Error::other(
|
||||
"external auth refresher is not configured",
|
||||
)));
|
||||
};
|
||||
|
||||
let previous_account_id = self
|
||||
.auth_cached()
|
||||
.as_ref()
|
||||
.and_then(CodexAuth::get_account_id);
|
||||
let context = ExternalAuthRefreshContext {
|
||||
reason,
|
||||
previous_account_id,
|
||||
};
|
||||
|
||||
let refreshed = refresher.refresh(context).await?;
|
||||
let id_token = parse_id_token(&refreshed.id_token)
|
||||
.map_err(|err| RefreshTokenError::Transient(std::io::Error::other(err)))?;
|
||||
if let Some(expected_workspace_id) = forced_chatgpt_workspace_id.as_deref() {
|
||||
let actual_workspace_id = id_token.chatgpt_account_id.as_deref();
|
||||
if actual_workspace_id != Some(expected_workspace_id) {
|
||||
return Err(RefreshTokenError::Transient(std::io::Error::other(
|
||||
format!(
|
||||
"external auth refresh returned workspace {actual_workspace_id:?}, expected {expected_workspace_id:?}",
|
||||
),
|
||||
)));
|
||||
}
|
||||
}
|
||||
let auth_dot_json = AuthDotJson::from_external_tokens(&refreshed, id_token);
|
||||
save_auth(
|
||||
&self.codex_home,
|
||||
&auth_dot_json,
|
||||
AuthCredentialsStoreMode::Ephemeral,
|
||||
)
|
||||
.map_err(RefreshTokenError::Transient)?;
|
||||
self.reload();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn refresh_tokens(
|
||||
&self,
|
||||
auth: &CodexAuth,
|
||||
@@ -861,7 +1145,6 @@ impl AuthManager {
|
||||
refresh_response.access_token,
|
||||
refresh_response.refresh_token,
|
||||
)
|
||||
.await
|
||||
.map_err(RefreshTokenError::from)?;
|
||||
|
||||
Ok(())
|
||||
@@ -910,7 +1193,6 @@ mod tests {
|
||||
Some("new-access-token".to_string()),
|
||||
Some("new-refresh-token".to_string()),
|
||||
)
|
||||
.await
|
||||
.expect("update_tokens should succeed");
|
||||
|
||||
let tokens = updated.tokens.expect("tokens should exist");
|
||||
@@ -991,6 +1273,7 @@ mod tests {
|
||||
|
||||
assert_eq!(
|
||||
&AuthDotJson {
|
||||
auth_mode: None,
|
||||
openai_api_key: None,
|
||||
tokens: Some(TokenData {
|
||||
id_token: IdTokenInfo {
|
||||
@@ -1034,6 +1317,7 @@ mod tests {
|
||||
fn logout_removes_auth_file() -> Result<(), std::io::Error> {
|
||||
let dir = tempdir()?;
|
||||
let auth_dot_json = AuthDotJson {
|
||||
auth_mode: Some(AuthMode::ApiKey),
|
||||
openai_api_key: Some("sk-test-key".to_string()),
|
||||
tokens: None,
|
||||
last_refresh: None,
|
||||
|
||||
@@ -5,6 +5,7 @@ use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use sha2::Digest;
|
||||
use sha2::Sha256;
|
||||
use std::collections::HashMap;
|
||||
use std::fmt::Debug;
|
||||
use std::fs::File;
|
||||
use std::fs::OpenOptions;
|
||||
@@ -15,11 +16,14 @@ use std::os::unix::fs::OpenOptionsExt;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
use tracing::warn;
|
||||
|
||||
use crate::token_data::TokenData;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_keyring_store::DefaultKeyringStore;
|
||||
use codex_keyring_store::KeyringStore;
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
/// Determine where Codex should store CLI auth credentials.
|
||||
#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
|
||||
@@ -32,11 +36,16 @@ pub enum AuthCredentialsStoreMode {
|
||||
Keyring,
|
||||
/// Use keyring when available; otherwise, fall back to a file in CODEX_HOME.
|
||||
Auto,
|
||||
/// Store credentials in memory only for the current process.
|
||||
Ephemeral,
|
||||
}
|
||||
|
||||
/// Expected structure for $CODEX_HOME/auth.json.
|
||||
#[derive(Deserialize, Serialize, Clone, Debug, PartialEq)]
|
||||
pub struct AuthDotJson {
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub auth_mode: Option<AuthMode>,
|
||||
|
||||
#[serde(rename = "OPENAI_API_KEY")]
|
||||
pub openai_api_key: Option<String>,
|
||||
|
||||
@@ -76,8 +85,8 @@ impl FileAuthStorage {
|
||||
Self { codex_home }
|
||||
}
|
||||
|
||||
/// Attempt to read and refresh the `auth.json` file in the given `CODEX_HOME` directory.
|
||||
/// Returns the full AuthDotJson structure after refreshing if necessary.
|
||||
/// Attempt to read and parse the `auth.json` file in the given `CODEX_HOME` directory.
|
||||
/// Returns the full AuthDotJson structure.
|
||||
pub(super) fn try_read_auth_json(&self, auth_file: &Path) -> std::io::Result<AuthDotJson> {
|
||||
let mut file = File::open(auth_file)?;
|
||||
let mut contents = String::new();
|
||||
@@ -256,6 +265,49 @@ impl AuthStorageBackend for AutoAuthStorage {
|
||||
}
|
||||
}
|
||||
|
||||
// A global in-memory store for mapping codex_home -> AuthDotJson.
|
||||
static EPHEMERAL_AUTH_STORE: Lazy<Mutex<HashMap<String, AuthDotJson>>> =
|
||||
Lazy::new(|| Mutex::new(HashMap::new()));
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct EphemeralAuthStorage {
|
||||
codex_home: PathBuf,
|
||||
}
|
||||
|
||||
impl EphemeralAuthStorage {
|
||||
fn new(codex_home: PathBuf) -> Self {
|
||||
Self { codex_home }
|
||||
}
|
||||
|
||||
fn with_store<F, T>(&self, action: F) -> std::io::Result<T>
|
||||
where
|
||||
F: FnOnce(&mut HashMap<String, AuthDotJson>, String) -> std::io::Result<T>,
|
||||
{
|
||||
let key = compute_store_key(&self.codex_home)?;
|
||||
let mut store = EPHEMERAL_AUTH_STORE
|
||||
.lock()
|
||||
.map_err(|_| std::io::Error::other("failed to lock ephemeral auth storage"))?;
|
||||
action(&mut store, key)
|
||||
}
|
||||
}
|
||||
|
||||
impl AuthStorageBackend for EphemeralAuthStorage {
|
||||
fn load(&self) -> std::io::Result<Option<AuthDotJson>> {
|
||||
self.with_store(|store, key| Ok(store.get(&key).cloned()))
|
||||
}
|
||||
|
||||
fn save(&self, auth: &AuthDotJson) -> std::io::Result<()> {
|
||||
self.with_store(|store, key| {
|
||||
store.insert(key, auth.clone());
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
fn delete(&self) -> std::io::Result<bool> {
|
||||
self.with_store(|store, key| Ok(store.remove(&key).is_some()))
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn create_auth_storage(
|
||||
codex_home: PathBuf,
|
||||
mode: AuthCredentialsStoreMode,
|
||||
@@ -275,6 +327,7 @@ fn create_auth_storage_with_keyring_store(
|
||||
Arc::new(KeyringAuthStorage::new(codex_home, keyring_store))
|
||||
}
|
||||
AuthCredentialsStoreMode::Auto => Arc::new(AutoAuthStorage::new(codex_home, keyring_store)),
|
||||
AuthCredentialsStoreMode::Ephemeral => Arc::new(EphemeralAuthStorage::new(codex_home)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -296,6 +349,7 @@ mod tests {
|
||||
let codex_home = tempdir()?;
|
||||
let storage = FileAuthStorage::new(codex_home.path().to_path_buf());
|
||||
let auth_dot_json = AuthDotJson {
|
||||
auth_mode: Some(AuthMode::ApiKey),
|
||||
openai_api_key: Some("test-key".to_string()),
|
||||
tokens: None,
|
||||
last_refresh: Some(Utc::now()),
|
||||
@@ -315,6 +369,7 @@ mod tests {
|
||||
let codex_home = tempdir()?;
|
||||
let storage = FileAuthStorage::new(codex_home.path().to_path_buf());
|
||||
let auth_dot_json = AuthDotJson {
|
||||
auth_mode: Some(AuthMode::ApiKey),
|
||||
openai_api_key: Some("test-key".to_string()),
|
||||
tokens: None,
|
||||
last_refresh: Some(Utc::now()),
|
||||
@@ -336,6 +391,7 @@ mod tests {
|
||||
fn file_storage_delete_removes_auth_file() -> anyhow::Result<()> {
|
||||
let dir = tempdir()?;
|
||||
let auth_dot_json = AuthDotJson {
|
||||
auth_mode: Some(AuthMode::ApiKey),
|
||||
openai_api_key: Some("sk-test-key".to_string()),
|
||||
tokens: None,
|
||||
last_refresh: None,
|
||||
@@ -350,6 +406,32 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ephemeral_storage_save_load_delete_is_in_memory_only() -> anyhow::Result<()> {
|
||||
let dir = tempdir()?;
|
||||
let storage = create_auth_storage(
|
||||
dir.path().to_path_buf(),
|
||||
AuthCredentialsStoreMode::Ephemeral,
|
||||
);
|
||||
let auth_dot_json = AuthDotJson {
|
||||
auth_mode: Some(AuthMode::ApiKey),
|
||||
openai_api_key: Some("sk-ephemeral".to_string()),
|
||||
tokens: None,
|
||||
last_refresh: Some(Utc::now()),
|
||||
};
|
||||
|
||||
storage.save(&auth_dot_json)?;
|
||||
let loaded = storage.load()?;
|
||||
assert_eq!(Some(auth_dot_json), loaded);
|
||||
|
||||
let removed = storage.delete()?;
|
||||
assert!(removed);
|
||||
let loaded = storage.load()?;
|
||||
assert_eq!(None, loaded);
|
||||
assert!(!get_auth_file(dir.path()).exists());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn seed_keyring_and_fallback_auth_file_for_delete<F>(
|
||||
mock_keyring: &MockKeyringStore,
|
||||
codex_home: &Path,
|
||||
@@ -425,6 +507,7 @@ mod tests {
|
||||
|
||||
fn auth_with_prefix(prefix: &str) -> AuthDotJson {
|
||||
AuthDotJson {
|
||||
auth_mode: Some(AuthMode::ApiKey),
|
||||
openai_api_key: Some(format!("{prefix}-api-key")),
|
||||
tokens: Some(TokenData {
|
||||
id_token: id_token_with_prefix(prefix),
|
||||
@@ -445,6 +528,7 @@ mod tests {
|
||||
Arc::new(mock_keyring.clone()),
|
||||
);
|
||||
let expected = AuthDotJson {
|
||||
auth_mode: Some(AuthMode::ApiKey),
|
||||
openai_api_key: Some("sk-test".to_string()),
|
||||
tokens: None,
|
||||
last_refresh: None,
|
||||
@@ -481,6 +565,7 @@ mod tests {
|
||||
let auth_file = get_auth_file(codex_home.path());
|
||||
std::fs::write(&auth_file, "stale")?;
|
||||
let auth = AuthDotJson {
|
||||
auth_mode: Some(AuthMode::ChatGPT),
|
||||
openai_api_key: None,
|
||||
tokens: Some(TokenData {
|
||||
id_token: Default::default(),
|
||||
|
||||
@@ -65,6 +65,7 @@ use crate::model_provider_info::ModelProviderInfo;
|
||||
use crate::model_provider_info::WireApi;
|
||||
use crate::tools::spec::create_tools_json_for_chat_completions_api;
|
||||
use crate::tools::spec::create_tools_json_for_responses_api;
|
||||
use crate::transport_manager::TransportManager;
|
||||
|
||||
pub const WEB_SEARCH_ELIGIBLE_HEADER: &str = "x-oai-web-search-eligible";
|
||||
pub const X_CODEX_TURN_STATE_HEADER: &str = "x-codex-turn-state";
|
||||
@@ -80,6 +81,7 @@ struct ModelClientState {
|
||||
effort: Option<ReasoningEffortConfig>,
|
||||
summary: ReasoningSummaryConfig,
|
||||
session_source: SessionSource,
|
||||
transport_manager: TransportManager,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -91,6 +93,7 @@ pub struct ModelClientSession {
|
||||
state: Arc<ModelClientState>,
|
||||
connection: Option<ApiWebSocketConnection>,
|
||||
websocket_last_items: Vec<ResponseItem>,
|
||||
transport_manager: TransportManager,
|
||||
/// Turn state for sticky routing.
|
||||
///
|
||||
/// This is an `OnceLock` that stores the turn state value received from the server
|
||||
@@ -116,6 +119,7 @@ impl ModelClient {
|
||||
summary: ReasoningSummaryConfig,
|
||||
conversation_id: ThreadId,
|
||||
session_source: SessionSource,
|
||||
transport_manager: TransportManager,
|
||||
) -> Self {
|
||||
Self {
|
||||
state: Arc::new(ModelClientState {
|
||||
@@ -128,6 +132,7 @@ impl ModelClient {
|
||||
effort,
|
||||
summary,
|
||||
session_source,
|
||||
transport_manager,
|
||||
}),
|
||||
}
|
||||
}
|
||||
@@ -137,6 +142,7 @@ impl ModelClient {
|
||||
state: Arc::clone(&self.state),
|
||||
connection: None,
|
||||
websocket_last_items: Vec::new(),
|
||||
transport_manager: self.state.transport_manager.clone(),
|
||||
turn_state: Arc::new(OnceLock::new()),
|
||||
}
|
||||
}
|
||||
@@ -171,6 +177,10 @@ impl ModelClient {
|
||||
self.state.session_source.clone()
|
||||
}
|
||||
|
||||
pub(crate) fn transport_manager(&self) -> TransportManager {
|
||||
self.state.transport_manager.clone()
|
||||
}
|
||||
|
||||
/// Returns the currently configured model slug.
|
||||
pub fn get_model(&self) -> String {
|
||||
self.state.model_info.slug.clone()
|
||||
@@ -250,9 +260,18 @@ impl ModelClientSession {
|
||||
/// For Chat providers, the underlying stream is optionally aggregated
|
||||
/// based on the `show_raw_agent_reasoning` flag in the config.
|
||||
pub async fn stream(&mut self, prompt: &Prompt) -> Result<ResponseStream> {
|
||||
match self.state.provider.wire_api {
|
||||
WireApi::Responses => self.stream_responses_api(prompt).await,
|
||||
WireApi::ResponsesWebsocket => self.stream_responses_websocket(prompt).await,
|
||||
let wire_api = self.state.provider.wire_api;
|
||||
match wire_api {
|
||||
WireApi::Responses => {
|
||||
let websocket_enabled = self.responses_websocket_enabled()
|
||||
&& !self.transport_manager.disable_websockets();
|
||||
|
||||
if websocket_enabled {
|
||||
self.stream_responses_websocket(prompt).await
|
||||
} else {
|
||||
self.stream_responses_api(prompt).await
|
||||
}
|
||||
}
|
||||
WireApi::Chat => {
|
||||
let api_stream = self.stream_chat_completions(prompt).await?;
|
||||
|
||||
@@ -271,6 +290,34 @@ impl ModelClientSession {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn try_switch_fallback_transport(&mut self) -> bool {
|
||||
let websocket_enabled = self.responses_websocket_enabled();
|
||||
let activated = self
|
||||
.transport_manager
|
||||
.activate_http_fallback(websocket_enabled);
|
||||
if activated {
|
||||
warn!("falling back to HTTP");
|
||||
self.state.otel_manager.counter(
|
||||
"codex.transport.fallback_to_http",
|
||||
1,
|
||||
&[("from_wire_api", "responses_websocket")],
|
||||
);
|
||||
|
||||
self.connection = None;
|
||||
self.websocket_last_items.clear();
|
||||
}
|
||||
activated
|
||||
}
|
||||
|
||||
fn responses_websocket_enabled(&self) -> bool {
|
||||
self.state.provider.supports_websockets
|
||||
&& self
|
||||
.state
|
||||
.config
|
||||
.features
|
||||
.enabled(Feature::ResponsesWebsockets)
|
||||
}
|
||||
|
||||
fn build_responses_request(&self, prompt: &Prompt) -> Result<ApiPrompt> {
|
||||
let instructions = prompt.base_instructions.text.clone();
|
||||
let tools_json: Vec<Value> = create_tools_json_for_responses_api(&prompt.tools)?;
|
||||
@@ -422,7 +469,9 @@ impl ModelClientSession {
|
||||
.config
|
||||
.features
|
||||
.enabled(Feature::EnableRequestCompression)
|
||||
&& auth.is_some_and(|auth| auth.mode == AuthMode::ChatGPT)
|
||||
&& auth.is_some_and(|auth| {
|
||||
matches!(auth.mode, AuthMode::ChatGPT | AuthMode::ChatgptAuthTokens)
|
||||
})
|
||||
&& self.state.provider.is_openai()
|
||||
{
|
||||
Compression::Zstd
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -12,6 +12,8 @@ use codex_protocol::protocol::SessionSource;
|
||||
use std::path::PathBuf;
|
||||
use tokio::sync::watch;
|
||||
|
||||
use crate::state_db::StateDbHandle;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ThreadConfigSnapshot {
|
||||
pub model: String,
|
||||
@@ -64,6 +66,10 @@ impl CodexThread {
|
||||
self.rollout_path.clone()
|
||||
}
|
||||
|
||||
pub fn state_db(&self) -> Option<StateDbHandle> {
|
||||
self.codex.state_db()
|
||||
}
|
||||
|
||||
pub async fn config_snapshot(&self) -> ThreadConfigSnapshot {
|
||||
self.codex.thread_config_snapshot().await
|
||||
}
|
||||
|
||||
@@ -10,7 +10,6 @@ use crate::error::CodexErr;
|
||||
use crate::error::Result as CodexResult;
|
||||
use crate::features::Feature;
|
||||
use crate::protocol::CompactedItem;
|
||||
use crate::protocol::ContextCompactedEvent;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::TurnContextItem;
|
||||
use crate::protocol::TurnStartedEvent;
|
||||
@@ -20,6 +19,7 @@ use crate::truncate::TruncationPolicy;
|
||||
use crate::truncate::approx_token_count;
|
||||
use crate::truncate::truncate_text;
|
||||
use crate::util::backoff;
|
||||
use codex_protocol::items::ContextCompactionItem;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseInputItem;
|
||||
@@ -71,6 +71,9 @@ async fn run_compact_task_inner(
|
||||
turn_context: Arc<TurnContext>,
|
||||
input: Vec<UserInput>,
|
||||
) {
|
||||
let compaction_item = TurnItem::ContextCompaction(ContextCompactionItem::new());
|
||||
sess.emit_turn_item_started(&turn_context, &compaction_item)
|
||||
.await;
|
||||
let initial_input_for_turn: ResponseInputItem = ResponseInputItem::from(input);
|
||||
|
||||
let mut history = sess.clone_history().await;
|
||||
@@ -193,9 +196,8 @@ async fn run_compact_task_inner(
|
||||
});
|
||||
sess.persist_rollout_items(&[rollout_item]).await;
|
||||
|
||||
let event = EventMsg::ContextCompacted(ContextCompactedEvent {});
|
||||
sess.send_event(&turn_context, event).await;
|
||||
|
||||
sess.emit_turn_item_completed(&turn_context, compaction_item)
|
||||
.await;
|
||||
let warning = EventMsg::Warning(WarningEvent {
|
||||
message: "Heads up: Long threads and multiple compactions can cause the model to be less accurate. Start a new thread when possible to keep threads small and targeted.".to_string(),
|
||||
});
|
||||
|
||||
@@ -5,10 +5,11 @@ use crate::codex::Session;
|
||||
use crate::codex::TurnContext;
|
||||
use crate::error::Result as CodexResult;
|
||||
use crate::protocol::CompactedItem;
|
||||
use crate::protocol::ContextCompactedEvent;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::RolloutItem;
|
||||
use crate::protocol::TurnStartedEvent;
|
||||
use codex_protocol::items::ContextCompactionItem;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
|
||||
pub(crate) async fn run_inline_remote_auto_compact_task(
|
||||
@@ -40,6 +41,9 @@ async fn run_remote_compact_task_inner_impl(
|
||||
sess: &Arc<Session>,
|
||||
turn_context: &Arc<TurnContext>,
|
||||
) -> CodexResult<()> {
|
||||
let compaction_item = TurnItem::ContextCompaction(ContextCompactionItem::new());
|
||||
sess.emit_turn_item_started(turn_context, &compaction_item)
|
||||
.await;
|
||||
let history = sess.clone_history().await;
|
||||
|
||||
// Required to keep `/undo` available after compaction
|
||||
@@ -77,8 +81,7 @@ async fn run_remote_compact_task_inner_impl(
|
||||
sess.persist_rollout_items(&[RolloutItem::Compacted(compacted_item)])
|
||||
.await;
|
||||
|
||||
let event = EventMsg::ContextCompacted(ContextCompactedEvent {});
|
||||
sess.send_event(turn_context, event).await;
|
||||
|
||||
sess.emit_turn_item_completed(turn_context, compaction_item)
|
||||
.await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ use crate::config::types::McpServerConfig;
|
||||
use crate::config::types::McpServerDisabledReason;
|
||||
use crate::config::types::McpServerTransportConfig;
|
||||
use crate::config::types::Notice;
|
||||
use crate::config::types::NotificationMethod;
|
||||
use crate::config::types::Notifications;
|
||||
use crate::config::types::OtelConfig;
|
||||
use crate::config::types::OtelConfigToml;
|
||||
@@ -38,6 +39,7 @@ use crate::project_doc::DEFAULT_PROJECT_DOC_FILENAME;
|
||||
use crate::project_doc::LOCAL_PROJECT_DOC_FILENAME;
|
||||
use crate::protocol::AskForApproval;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
use crate::windows_sandbox::WindowsSandboxLevelExt;
|
||||
use codex_app_server_protocol::Tools;
|
||||
use codex_app_server_protocol::UserSavedConfig;
|
||||
use codex_protocol::config_types::AltScreenMode;
|
||||
@@ -49,6 +51,7 @@ use codex_protocol::config_types::SandboxMode;
|
||||
use codex_protocol::config_types::TrustLevel;
|
||||
use codex_protocol::config_types::Verbosity;
|
||||
use codex_protocol::config_types::WebSearchMode;
|
||||
use codex_protocol::config_types::WindowsSandboxLevel;
|
||||
use codex_protocol::openai_models::ReasoningEffort;
|
||||
use codex_rmcp_client::OAuthCredentialsStoreMode;
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
@@ -190,10 +193,13 @@ pub struct Config {
|
||||
/// If unset the feature is disabled.
|
||||
pub notify: Option<Vec<String>>,
|
||||
|
||||
/// TUI notifications preference. When set, the TUI will send OSC 9 notifications on approvals
|
||||
/// and turn completions when not focused.
|
||||
/// TUI notifications preference. When set, the TUI will send terminal notifications on
|
||||
/// approvals and turn completions when not focused.
|
||||
pub tui_notifications: Notifications,
|
||||
|
||||
/// Notification method for terminal notifications (osc9 or bel).
|
||||
pub tui_notification_method: NotificationMethod,
|
||||
|
||||
/// Enable ASCII animations and shimmer effects in the TUI.
|
||||
pub animations: bool,
|
||||
|
||||
@@ -316,6 +322,9 @@ pub struct Config {
|
||||
/// Centralized feature flags; source of truth for feature gating.
|
||||
pub features: Features,
|
||||
|
||||
/// When `true`, suppress warnings about unstable (under development) features.
|
||||
pub suppress_unstable_features_warning: bool,
|
||||
|
||||
/// The active profile name used to derive this `Config` (if any).
|
||||
pub active_profile: Option<String>,
|
||||
|
||||
@@ -906,6 +915,9 @@ pub struct ConfigToml {
|
||||
#[schemars(schema_with = "crate::config::schema::features_schema")]
|
||||
pub features: Option<FeaturesToml>,
|
||||
|
||||
/// Suppress warnings about unstable (under development) features.
|
||||
pub suppress_unstable_features_warning: Option<bool>,
|
||||
|
||||
/// Settings for ghost snapshots (used for undo).
|
||||
#[serde(default)]
|
||||
pub ghost_snapshot: Option<GhostSnapshotToml>,
|
||||
@@ -1050,6 +1062,7 @@ impl ConfigToml {
|
||||
&self,
|
||||
sandbox_mode_override: Option<SandboxMode>,
|
||||
profile_sandbox_mode: Option<SandboxMode>,
|
||||
windows_sandbox_level: WindowsSandboxLevel,
|
||||
resolved_cwd: &Path,
|
||||
) -> SandboxPolicyResolution {
|
||||
let resolved_sandbox_mode = sandbox_mode_override
|
||||
@@ -1088,7 +1101,7 @@ impl ConfigToml {
|
||||
if cfg!(target_os = "windows")
|
||||
&& matches!(resolved_sandbox_mode, SandboxMode::WorkspaceWrite)
|
||||
// If the experimental Windows sandbox is enabled, do not force a downgrade.
|
||||
&& crate::safety::get_platform_sandbox().is_none()
|
||||
&& windows_sandbox_level == codex_protocol::config_types::WindowsSandboxLevel::Disabled
|
||||
{
|
||||
sandbox_policy = SandboxPolicy::new_read_only_policy();
|
||||
forced_auto_mode_downgraded_on_windows = true;
|
||||
@@ -1212,6 +1225,20 @@ fn resolve_web_search_mode(
|
||||
None
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_web_search_mode_for_turn(
|
||||
explicit_mode: Option<WebSearchMode>,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
) -> WebSearchMode {
|
||||
if let Some(mode) = explicit_mode {
|
||||
return mode;
|
||||
}
|
||||
if matches!(sandbox_policy, SandboxPolicy::DangerFullAccess) {
|
||||
WebSearchMode::Live
|
||||
} else {
|
||||
WebSearchMode::Cached
|
||||
}
|
||||
}
|
||||
|
||||
impl Config {
|
||||
#[cfg(test)]
|
||||
fn load_from_base_config_with_overrides(
|
||||
@@ -1278,17 +1305,6 @@ impl Config {
|
||||
};
|
||||
|
||||
let features = Features::from_config(&cfg, &config_profile, feature_overrides);
|
||||
let web_search_mode = resolve_web_search_mode(&cfg, &config_profile, &features);
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
// Base flag controls sandbox on/off; elevated only applies when base is enabled.
|
||||
let sandbox_enabled = features.enabled(Feature::WindowsSandbox);
|
||||
crate::safety::set_windows_sandbox_enabled(sandbox_enabled);
|
||||
let elevated_enabled =
|
||||
sandbox_enabled && features.enabled(Feature::WindowsSandboxElevated);
|
||||
crate::safety::set_windows_elevated_sandbox_enabled(elevated_enabled);
|
||||
}
|
||||
|
||||
let resolved_cwd = {
|
||||
use std::env;
|
||||
|
||||
@@ -1315,10 +1331,16 @@ impl Config {
|
||||
.get_active_project(&resolved_cwd)
|
||||
.unwrap_or(ProjectConfig { trust_level: None });
|
||||
|
||||
let windows_sandbox_level = WindowsSandboxLevel::from_features(&features);
|
||||
let SandboxPolicyResolution {
|
||||
policy: mut sandbox_policy,
|
||||
forced_auto_mode_downgraded_on_windows,
|
||||
} = cfg.derive_sandbox_policy(sandbox_mode, config_profile.sandbox_mode, &resolved_cwd);
|
||||
} = cfg.derive_sandbox_policy(
|
||||
sandbox_mode,
|
||||
config_profile.sandbox_mode,
|
||||
windows_sandbox_level,
|
||||
&resolved_cwd,
|
||||
);
|
||||
if let SandboxPolicy::WorkspaceWrite { writable_roots, .. } = &mut sandbox_policy {
|
||||
for path in additional_writable_roots {
|
||||
if !writable_roots.iter().any(|existing| existing == &path) {
|
||||
@@ -1338,6 +1360,7 @@ impl Config {
|
||||
AskForApproval::default()
|
||||
}
|
||||
});
|
||||
let web_search_mode = resolve_web_search_mode(&cfg, &config_profile, &features);
|
||||
// TODO(dylan): We should be able to leverage ConfigLayerStack so that
|
||||
// we can reliably check this at every config level.
|
||||
let did_user_set_custom_approval_policy_or_sandbox_mode = approval_policy_override
|
||||
@@ -1349,12 +1372,6 @@ impl Config {
|
||||
|| cfg.sandbox_mode.is_some();
|
||||
|
||||
let mut model_providers = built_in_model_providers();
|
||||
if features.enabled(Feature::ResponsesWebsockets)
|
||||
&& let Some(provider) = model_providers.get_mut("openai")
|
||||
&& provider.is_openai()
|
||||
{
|
||||
provider.wire_api = crate::model_provider_info::WireApi::ResponsesWebsocket;
|
||||
}
|
||||
// Merge user-defined providers into the built-in list.
|
||||
for (key, provider) in cfg.model_providers.into_iter() {
|
||||
model_providers.entry(key).or_insert(provider);
|
||||
@@ -1564,6 +1581,9 @@ impl Config {
|
||||
use_experimental_unified_exec_tool,
|
||||
ghost_snapshot,
|
||||
features,
|
||||
suppress_unstable_features_warning: cfg
|
||||
.suppress_unstable_features_warning
|
||||
.unwrap_or(false),
|
||||
active_profile: active_profile_name,
|
||||
active_project,
|
||||
windows_wsl_setup_acknowledged: cfg.windows_wsl_setup_acknowledged.unwrap_or(false),
|
||||
@@ -1585,6 +1605,11 @@ impl Config {
|
||||
.as_ref()
|
||||
.map(|t| t.notifications.clone())
|
||||
.unwrap_or_default(),
|
||||
tui_notification_method: cfg
|
||||
.tui
|
||||
.as_ref()
|
||||
.map(|t| t.notification_method)
|
||||
.unwrap_or_default(),
|
||||
animations: cfg.tui.as_ref().map(|t| t.animations).unwrap_or(true),
|
||||
show_tooltips: cfg.tui.as_ref().map(|t| t.show_tooltips).unwrap_or(true),
|
||||
experimental_mode: cfg.tui.as_ref().and_then(|t| t.experimental_mode),
|
||||
@@ -1657,20 +1682,19 @@ impl Config {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_windows_sandbox_globally(&mut self, value: bool) {
|
||||
crate::safety::set_windows_sandbox_enabled(value);
|
||||
pub fn set_windows_sandbox_enabled(&mut self, value: bool) {
|
||||
if value {
|
||||
self.features.enable(Feature::WindowsSandbox);
|
||||
self.forced_auto_mode_downgraded_on_windows = false;
|
||||
} else {
|
||||
self.features.disable(Feature::WindowsSandbox);
|
||||
}
|
||||
self.forced_auto_mode_downgraded_on_windows = !value;
|
||||
}
|
||||
|
||||
pub fn set_windows_elevated_sandbox_globally(&mut self, value: bool) {
|
||||
crate::safety::set_windows_elevated_sandbox_enabled(value);
|
||||
pub fn set_windows_elevated_sandbox_enabled(&mut self, value: bool) {
|
||||
if value {
|
||||
self.features.enable(Feature::WindowsSandboxElevated);
|
||||
self.forced_auto_mode_downgraded_on_windows = false;
|
||||
} else {
|
||||
self.features.disable(Feature::WindowsSandboxElevated);
|
||||
}
|
||||
@@ -1744,6 +1768,7 @@ mod tests {
|
||||
use crate::config::types::FeedbackConfigToml;
|
||||
use crate::config::types::HistoryPersistence;
|
||||
use crate::config::types::McpServerTransportConfig;
|
||||
use crate::config::types::NotificationMethod;
|
||||
use crate::config::types::Notifications;
|
||||
use crate::config_loader::RequirementSource;
|
||||
use crate::features::Feature;
|
||||
@@ -1840,6 +1865,7 @@ persistence = "none"
|
||||
tui,
|
||||
Tui {
|
||||
notifications: Notifications::Enabled(true),
|
||||
notification_method: NotificationMethod::Auto,
|
||||
animations: true,
|
||||
show_tooltips: true,
|
||||
experimental_mode: None,
|
||||
@@ -1862,6 +1888,7 @@ network_access = false # This should be ignored.
|
||||
let resolution = sandbox_full_access_cfg.derive_sandbox_policy(
|
||||
sandbox_mode_override,
|
||||
None,
|
||||
WindowsSandboxLevel::Disabled,
|
||||
&PathBuf::from("/tmp/test"),
|
||||
);
|
||||
assert_eq!(
|
||||
@@ -1885,6 +1912,7 @@ network_access = true # This should be ignored.
|
||||
let resolution = sandbox_read_only_cfg.derive_sandbox_policy(
|
||||
sandbox_mode_override,
|
||||
None,
|
||||
WindowsSandboxLevel::Disabled,
|
||||
&PathBuf::from("/tmp/test"),
|
||||
);
|
||||
assert_eq!(
|
||||
@@ -1916,6 +1944,7 @@ exclude_slash_tmp = true
|
||||
let resolution = sandbox_workspace_write_cfg.derive_sandbox_policy(
|
||||
sandbox_mode_override,
|
||||
None,
|
||||
WindowsSandboxLevel::Disabled,
|
||||
&PathBuf::from("/tmp/test"),
|
||||
);
|
||||
if cfg!(target_os = "windows") {
|
||||
@@ -1964,6 +1993,7 @@ trust_level = "trusted"
|
||||
let resolution = sandbox_workspace_write_cfg.derive_sandbox_policy(
|
||||
sandbox_mode_override,
|
||||
None,
|
||||
WindowsSandboxLevel::Disabled,
|
||||
&PathBuf::from("/tmp/test"),
|
||||
);
|
||||
if cfg!(target_os = "windows") {
|
||||
@@ -2255,7 +2285,7 @@ trust_level = "trusted"
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn web_search_mode_uses_none_if_unset() {
|
||||
fn web_search_mode_defaults_to_none_if_unset() {
|
||||
let cfg = ConfigToml::default();
|
||||
let profile = ConfigProfile::default();
|
||||
let features = Features::with_defaults();
|
||||
@@ -2295,6 +2325,30 @@ trust_level = "trusted"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn web_search_mode_for_turn_defaults_to_cached_when_unset() {
|
||||
let mode = resolve_web_search_mode_for_turn(None, &SandboxPolicy::ReadOnly);
|
||||
|
||||
assert_eq!(mode, WebSearchMode::Cached);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn web_search_mode_for_turn_defaults_to_live_for_danger_full_access() {
|
||||
let mode = resolve_web_search_mode_for_turn(None, &SandboxPolicy::DangerFullAccess);
|
||||
|
||||
assert_eq!(mode, WebSearchMode::Live);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn web_search_mode_for_turn_prefers_explicit_value() {
|
||||
let mode = resolve_web_search_mode_for_turn(
|
||||
Some(WebSearchMode::Cached),
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
);
|
||||
|
||||
assert_eq!(mode, WebSearchMode::Cached);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn profile_legacy_toggles_override_base() -> std::io::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
@@ -2495,7 +2549,7 @@ profile = "project"
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn responses_websockets_feature_updates_openai_provider() -> std::io::Result<()> {
|
||||
fn responses_websockets_feature_does_not_change_wire_api() -> std::io::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mut entries = BTreeMap::new();
|
||||
entries.insert("responses_websockets".to_string(), true);
|
||||
@@ -2512,7 +2566,7 @@ profile = "project"
|
||||
|
||||
assert_eq!(
|
||||
config.model_provider.wire_api,
|
||||
crate::model_provider_info::WireApi::ResponsesWebsocket
|
||||
crate::model_provider_info::WireApi::Responses
|
||||
);
|
||||
|
||||
Ok(())
|
||||
@@ -3632,6 +3686,7 @@ model_verbosity = "high"
|
||||
stream_max_retries: Some(10),
|
||||
stream_idle_timeout_ms: Some(300_000),
|
||||
requires_openai_auth: false,
|
||||
supports_websockets: false,
|
||||
};
|
||||
let model_provider_map = {
|
||||
let mut model_provider_map = built_in_model_providers();
|
||||
@@ -3732,6 +3787,7 @@ model_verbosity = "high"
|
||||
use_experimental_unified_exec_tool: false,
|
||||
ghost_snapshot: GhostSnapshotConfig::default(),
|
||||
features: Features::with_defaults(),
|
||||
suppress_unstable_features_warning: false,
|
||||
active_profile: Some("o3".to_string()),
|
||||
active_project: ProjectConfig { trust_level: None },
|
||||
windows_wsl_setup_acknowledged: false,
|
||||
@@ -3739,6 +3795,7 @@ model_verbosity = "high"
|
||||
check_for_update_on_startup: true,
|
||||
disable_paste_burst: false,
|
||||
tui_notifications: Default::default(),
|
||||
tui_notification_method: Default::default(),
|
||||
animations: true,
|
||||
show_tooltips: true,
|
||||
experimental_mode: None,
|
||||
@@ -3814,6 +3871,7 @@ model_verbosity = "high"
|
||||
use_experimental_unified_exec_tool: false,
|
||||
ghost_snapshot: GhostSnapshotConfig::default(),
|
||||
features: Features::with_defaults(),
|
||||
suppress_unstable_features_warning: false,
|
||||
active_profile: Some("gpt3".to_string()),
|
||||
active_project: ProjectConfig { trust_level: None },
|
||||
windows_wsl_setup_acknowledged: false,
|
||||
@@ -3821,6 +3879,7 @@ model_verbosity = "high"
|
||||
check_for_update_on_startup: true,
|
||||
disable_paste_burst: false,
|
||||
tui_notifications: Default::default(),
|
||||
tui_notification_method: Default::default(),
|
||||
animations: true,
|
||||
show_tooltips: true,
|
||||
experimental_mode: None,
|
||||
@@ -3911,6 +3970,7 @@ model_verbosity = "high"
|
||||
use_experimental_unified_exec_tool: false,
|
||||
ghost_snapshot: GhostSnapshotConfig::default(),
|
||||
features: Features::with_defaults(),
|
||||
suppress_unstable_features_warning: false,
|
||||
active_profile: Some("zdr".to_string()),
|
||||
active_project: ProjectConfig { trust_level: None },
|
||||
windows_wsl_setup_acknowledged: false,
|
||||
@@ -3918,6 +3978,7 @@ model_verbosity = "high"
|
||||
check_for_update_on_startup: true,
|
||||
disable_paste_burst: false,
|
||||
tui_notifications: Default::default(),
|
||||
tui_notification_method: Default::default(),
|
||||
animations: true,
|
||||
show_tooltips: true,
|
||||
experimental_mode: None,
|
||||
@@ -3994,6 +4055,7 @@ model_verbosity = "high"
|
||||
use_experimental_unified_exec_tool: false,
|
||||
ghost_snapshot: GhostSnapshotConfig::default(),
|
||||
features: Features::with_defaults(),
|
||||
suppress_unstable_features_warning: false,
|
||||
active_profile: Some("gpt5".to_string()),
|
||||
active_project: ProjectConfig { trust_level: None },
|
||||
windows_wsl_setup_acknowledged: false,
|
||||
@@ -4001,6 +4063,7 @@ model_verbosity = "high"
|
||||
check_for_update_on_startup: true,
|
||||
disable_paste_burst: false,
|
||||
tui_notifications: Default::default(),
|
||||
tui_notification_method: Default::default(),
|
||||
animations: true,
|
||||
show_tooltips: true,
|
||||
experimental_mode: None,
|
||||
@@ -4174,7 +4237,12 @@ trust_level = "untrusted"
|
||||
let cfg = toml::from_str::<ConfigToml>(config_with_untrusted)
|
||||
.expect("TOML deserialization should succeed");
|
||||
|
||||
let resolution = cfg.derive_sandbox_policy(None, None, &PathBuf::from("/tmp/test"));
|
||||
let resolution = cfg.derive_sandbox_policy(
|
||||
None,
|
||||
None,
|
||||
WindowsSandboxLevel::Disabled,
|
||||
&PathBuf::from("/tmp/test"),
|
||||
);
|
||||
|
||||
// Verify that untrusted projects get WorkspaceWrite (or ReadOnly on Windows due to downgrade)
|
||||
if cfg!(target_os = "windows") {
|
||||
@@ -4353,13 +4421,17 @@ mcp_oauth_callback_port = 5678
|
||||
|
||||
#[cfg(test)]
|
||||
mod notifications_tests {
|
||||
use crate::config::types::NotificationMethod;
|
||||
use crate::config::types::Notifications;
|
||||
use assert_matches::assert_matches;
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Deserialize, Debug, PartialEq)]
|
||||
struct TuiTomlTest {
|
||||
#[serde(default)]
|
||||
notifications: Notifications,
|
||||
#[serde(default)]
|
||||
notification_method: NotificationMethod,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, PartialEq)]
|
||||
@@ -4390,4 +4462,15 @@ mod notifications_tests {
|
||||
Notifications::Custom(ref v) if v == &vec!["foo".to_string()]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tui_notification_method() {
|
||||
let toml = r#"
|
||||
[tui]
|
||||
notification_method = "bel"
|
||||
"#;
|
||||
let parsed: RootTomlTest =
|
||||
toml::from_str(toml).expect("deserialize notification_method=\"bel\"");
|
||||
assert_eq!(parsed.tui.notification_method, NotificationMethod::Bel);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -428,6 +428,25 @@ impl Default for Notifications {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, Default)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum NotificationMethod {
|
||||
#[default]
|
||||
Auto,
|
||||
Osc9,
|
||||
Bel,
|
||||
}
|
||||
|
||||
impl fmt::Display for NotificationMethod {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
NotificationMethod::Auto => write!(f, "auto"),
|
||||
NotificationMethod::Osc9 => write!(f, "osc9"),
|
||||
NotificationMethod::Bel => write!(f, "bel"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Collection of settings that are specific to the TUI.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema)]
|
||||
#[schemars(deny_unknown_fields)]
|
||||
@@ -437,6 +456,11 @@ pub struct Tui {
|
||||
#[serde(default)]
|
||||
pub notifications: Notifications,
|
||||
|
||||
/// Notification method to use for unfocused terminal notifications.
|
||||
/// Defaults to `auto`.
|
||||
#[serde(default)]
|
||||
pub notification_method: NotificationMethod,
|
||||
|
||||
/// Enable animations (welcome screen, shimmer effects, spinners).
|
||||
/// Defaults to `true`.
|
||||
#[serde(default = "default_true")]
|
||||
@@ -472,7 +496,6 @@ const fn default_true() -> bool {
|
||||
/// (primarily the Codex IDE extension). NOTE: these are different from
|
||||
/// notifications - notices are warnings, NUX screens, acknowledgements, etc.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema)]
|
||||
#[schemars(deny_unknown_fields)]
|
||||
pub struct Notice {
|
||||
/// Tracks whether the user has acknowledged the full access warning prompt.
|
||||
pub hide_full_access_warning: Option<bool>,
|
||||
|
||||
@@ -6,6 +6,8 @@ mod layer_io;
|
||||
mod macos;
|
||||
mod merge;
|
||||
mod overrides;
|
||||
#[cfg(test)]
|
||||
mod requirements_exec_policy;
|
||||
mod state;
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
188
codex-rs/core/src/config_loader/requirements_exec_policy.rs
Normal file
188
codex-rs/core/src/config_loader/requirements_exec_policy.rs
Normal file
@@ -0,0 +1,188 @@
|
||||
use codex_execpolicy::Decision;
|
||||
use codex_execpolicy::Policy;
|
||||
use codex_execpolicy::rule::PatternToken;
|
||||
use codex_execpolicy::rule::PrefixPattern;
|
||||
use codex_execpolicy::rule::PrefixRule;
|
||||
use codex_execpolicy::rule::RuleRef;
|
||||
use multimap::MultiMap;
|
||||
use serde::Deserialize;
|
||||
use std::sync::Arc;
|
||||
use thiserror::Error;
|
||||
|
||||
/// TOML types for expressing exec policy requirements.
|
||||
///
|
||||
/// These types are kept separate from `ConfigRequirementsToml` and are
|
||||
/// converted into `codex-execpolicy` rules.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Deserialize)]
|
||||
pub struct RequirementsExecPolicyTomlRoot {
|
||||
pub exec_policy: RequirementsExecPolicyToml,
|
||||
}
|
||||
|
||||
/// TOML representation of `[exec_policy]` within `requirements.toml`.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Deserialize)]
|
||||
pub struct RequirementsExecPolicyToml {
|
||||
pub prefix_rules: Vec<RequirementsExecPolicyPrefixRuleToml>,
|
||||
}
|
||||
|
||||
/// A TOML representation of the `prefix_rule(...)` Starlark builtin.
|
||||
///
|
||||
/// This mirrors the builtin defined in `execpolicy/src/parser.rs`.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Deserialize)]
|
||||
pub struct RequirementsExecPolicyPrefixRuleToml {
|
||||
pub pattern: Vec<RequirementsExecPolicyPatternTokenToml>,
|
||||
pub decision: Option<RequirementsExecPolicyDecisionToml>,
|
||||
pub justification: Option<String>,
|
||||
}
|
||||
|
||||
/// TOML-friendly representation of a pattern token.
|
||||
///
|
||||
/// Starlark supports either a string token or a list of alternative tokens at
|
||||
/// each position, but TOML arrays cannot mix strings and arrays. Using an
|
||||
/// array of tables sidesteps that restriction.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Deserialize)]
|
||||
pub struct RequirementsExecPolicyPatternTokenToml {
|
||||
pub token: Option<String>,
|
||||
pub any_of: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum RequirementsExecPolicyDecisionToml {
|
||||
Allow,
|
||||
Prompt,
|
||||
Forbidden,
|
||||
}
|
||||
|
||||
impl RequirementsExecPolicyDecisionToml {
|
||||
fn as_decision(self) -> Decision {
|
||||
match self {
|
||||
Self::Allow => Decision::Allow,
|
||||
Self::Prompt => Decision::Prompt,
|
||||
Self::Forbidden => Decision::Forbidden,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum RequirementsExecPolicyParseError {
|
||||
#[error("exec policy prefix_rules cannot be empty")]
|
||||
EmptyPrefixRules,
|
||||
|
||||
#[error("exec policy prefix_rule at index {rule_index} has an empty pattern")]
|
||||
EmptyPattern { rule_index: usize },
|
||||
|
||||
#[error(
|
||||
"exec policy prefix_rule at index {rule_index} has an invalid pattern token at index {token_index}: {reason}"
|
||||
)]
|
||||
InvalidPatternToken {
|
||||
rule_index: usize,
|
||||
token_index: usize,
|
||||
reason: String,
|
||||
},
|
||||
|
||||
#[error("exec policy prefix_rule at index {rule_index} has an empty justification")]
|
||||
EmptyJustification { rule_index: usize },
|
||||
}
|
||||
|
||||
impl RequirementsExecPolicyToml {
|
||||
/// Convert requirements TOML exec policy rules into the internal `.rules`
|
||||
/// representation used by `codex-execpolicy`.
|
||||
pub fn to_policy(&self) -> Result<Policy, RequirementsExecPolicyParseError> {
|
||||
if self.prefix_rules.is_empty() {
|
||||
return Err(RequirementsExecPolicyParseError::EmptyPrefixRules);
|
||||
}
|
||||
|
||||
let mut rules_by_program: MultiMap<String, RuleRef> = MultiMap::new();
|
||||
|
||||
for (rule_index, rule) in self.prefix_rules.iter().enumerate() {
|
||||
if let Some(justification) = &rule.justification
|
||||
&& justification.trim().is_empty()
|
||||
{
|
||||
return Err(RequirementsExecPolicyParseError::EmptyJustification { rule_index });
|
||||
}
|
||||
|
||||
if rule.pattern.is_empty() {
|
||||
return Err(RequirementsExecPolicyParseError::EmptyPattern { rule_index });
|
||||
}
|
||||
|
||||
let pattern_tokens = rule
|
||||
.pattern
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(token_index, token)| parse_pattern_token(token, rule_index, token_index))
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
let decision = rule
|
||||
.decision
|
||||
.map(RequirementsExecPolicyDecisionToml::as_decision)
|
||||
.unwrap_or(Decision::Allow);
|
||||
let justification = rule.justification.clone();
|
||||
|
||||
let (first_token, remaining_tokens) = pattern_tokens
|
||||
.split_first()
|
||||
.ok_or(RequirementsExecPolicyParseError::EmptyPattern { rule_index })?;
|
||||
|
||||
let rest: Arc<[PatternToken]> = remaining_tokens.to_vec().into();
|
||||
|
||||
for head in first_token.alternatives() {
|
||||
let rule: RuleRef = Arc::new(PrefixRule {
|
||||
pattern: PrefixPattern {
|
||||
first: Arc::from(head.as_str()),
|
||||
rest: rest.clone(),
|
||||
},
|
||||
decision,
|
||||
justification: justification.clone(),
|
||||
});
|
||||
rules_by_program.insert(head.clone(), rule);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Policy::new(rules_by_program))
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_pattern_token(
|
||||
token: &RequirementsExecPolicyPatternTokenToml,
|
||||
rule_index: usize,
|
||||
token_index: usize,
|
||||
) -> Result<PatternToken, RequirementsExecPolicyParseError> {
|
||||
match (&token.token, &token.any_of) {
|
||||
(Some(single), None) => {
|
||||
if single.trim().is_empty() {
|
||||
return Err(RequirementsExecPolicyParseError::InvalidPatternToken {
|
||||
rule_index,
|
||||
token_index,
|
||||
reason: "token cannot be empty".to_string(),
|
||||
});
|
||||
}
|
||||
Ok(PatternToken::Single(single.clone()))
|
||||
}
|
||||
(None, Some(alternatives)) => {
|
||||
if alternatives.is_empty() {
|
||||
return Err(RequirementsExecPolicyParseError::InvalidPatternToken {
|
||||
rule_index,
|
||||
token_index,
|
||||
reason: "any_of cannot be empty".to_string(),
|
||||
});
|
||||
}
|
||||
if alternatives.iter().any(|alt| alt.trim().is_empty()) {
|
||||
return Err(RequirementsExecPolicyParseError::InvalidPatternToken {
|
||||
rule_index,
|
||||
token_index,
|
||||
reason: "any_of cannot include empty tokens".to_string(),
|
||||
});
|
||||
}
|
||||
Ok(PatternToken::Alts(alternatives.clone()))
|
||||
}
|
||||
(Some(_), Some(_)) => Err(RequirementsExecPolicyParseError::InvalidPatternToken {
|
||||
rule_index,
|
||||
token_index,
|
||||
reason: "set either token or any_of, not both".to_string(),
|
||||
}),
|
||||
(None, None) => Err(RequirementsExecPolicyParseError::InvalidPatternToken {
|
||||
rule_index,
|
||||
token_index,
|
||||
reason: "set either token or any_of".to_string(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
@@ -911,3 +911,165 @@ async fn project_root_markers_supports_alternate_markers() -> std::io::Result<()
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
mod requirements_exec_policy_tests {
|
||||
use super::super::requirements_exec_policy::RequirementsExecPolicyDecisionToml;
|
||||
use super::super::requirements_exec_policy::RequirementsExecPolicyPatternTokenToml;
|
||||
use super::super::requirements_exec_policy::RequirementsExecPolicyPrefixRuleToml;
|
||||
use super::super::requirements_exec_policy::RequirementsExecPolicyToml;
|
||||
use super::super::requirements_exec_policy::RequirementsExecPolicyTomlRoot;
|
||||
use codex_execpolicy::Decision;
|
||||
use codex_execpolicy::Evaluation;
|
||||
use codex_execpolicy::RuleMatch;
|
||||
use pretty_assertions::assert_eq;
|
||||
use toml::from_str;
|
||||
|
||||
fn tokens(cmd: &[&str]) -> Vec<String> {
|
||||
cmd.iter().map(std::string::ToString::to_string).collect()
|
||||
}
|
||||
|
||||
fn allow_all(_: &[String]) -> Decision {
|
||||
Decision::Allow
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_single_prefix_rule_from_raw_toml() -> anyhow::Result<()> {
|
||||
let toml_str = r#"
|
||||
[exec_policy]
|
||||
prefix_rules = [
|
||||
{ pattern = [{ token = "rm" }], decision = "forbidden" },
|
||||
]
|
||||
"#;
|
||||
|
||||
let parsed: RequirementsExecPolicyTomlRoot = from_str(toml_str)?;
|
||||
|
||||
assert_eq!(
|
||||
parsed,
|
||||
RequirementsExecPolicyTomlRoot {
|
||||
exec_policy: RequirementsExecPolicyToml {
|
||||
prefix_rules: vec![RequirementsExecPolicyPrefixRuleToml {
|
||||
pattern: vec![RequirementsExecPolicyPatternTokenToml {
|
||||
token: Some("rm".to_string()),
|
||||
any_of: None,
|
||||
}],
|
||||
decision: Some(RequirementsExecPolicyDecisionToml::Forbidden),
|
||||
justification: None,
|
||||
}],
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_multiple_prefix_rules_from_raw_toml() -> anyhow::Result<()> {
|
||||
let toml_str = r#"
|
||||
[exec_policy]
|
||||
prefix_rules = [
|
||||
{ pattern = [{ token = "rm" }], decision = "forbidden" },
|
||||
{ pattern = [{ token = "git" }, { any_of = ["push", "commit"] }], decision = "prompt", justification = "review changes before push or commit" },
|
||||
]
|
||||
"#;
|
||||
|
||||
let parsed: RequirementsExecPolicyTomlRoot = from_str(toml_str)?;
|
||||
|
||||
assert_eq!(
|
||||
parsed,
|
||||
RequirementsExecPolicyTomlRoot {
|
||||
exec_policy: RequirementsExecPolicyToml {
|
||||
prefix_rules: vec![
|
||||
RequirementsExecPolicyPrefixRuleToml {
|
||||
pattern: vec![RequirementsExecPolicyPatternTokenToml {
|
||||
token: Some("rm".to_string()),
|
||||
any_of: None,
|
||||
}],
|
||||
decision: Some(RequirementsExecPolicyDecisionToml::Forbidden),
|
||||
justification: None,
|
||||
},
|
||||
RequirementsExecPolicyPrefixRuleToml {
|
||||
pattern: vec![
|
||||
RequirementsExecPolicyPatternTokenToml {
|
||||
token: Some("git".to_string()),
|
||||
any_of: None,
|
||||
},
|
||||
RequirementsExecPolicyPatternTokenToml {
|
||||
token: None,
|
||||
any_of: Some(vec!["push".to_string(), "commit".to_string()]),
|
||||
},
|
||||
],
|
||||
decision: Some(RequirementsExecPolicyDecisionToml::Prompt),
|
||||
justification: Some("review changes before push or commit".to_string()),
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn converts_rules_toml_into_internal_policy_representation() -> anyhow::Result<()> {
|
||||
let toml_str = r#"
|
||||
[exec_policy]
|
||||
prefix_rules = [
|
||||
{ pattern = [{ token = "rm" }], decision = "forbidden" },
|
||||
]
|
||||
"#;
|
||||
|
||||
let parsed: RequirementsExecPolicyTomlRoot = from_str(toml_str)?;
|
||||
let policy = parsed.exec_policy.to_policy()?;
|
||||
|
||||
assert_eq!(
|
||||
policy.check(&tokens(&["rm", "-rf", "/tmp"]), &allow_all),
|
||||
Evaluation {
|
||||
decision: Decision::Forbidden,
|
||||
matched_rules: vec![RuleMatch::PrefixRuleMatch {
|
||||
matched_prefix: tokens(&["rm"]),
|
||||
decision: Decision::Forbidden,
|
||||
justification: None,
|
||||
}],
|
||||
}
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn head_any_of_expands_into_multiple_program_rules() -> anyhow::Result<()> {
|
||||
let toml_str = r#"
|
||||
[exec_policy]
|
||||
prefix_rules = [
|
||||
{ pattern = [{ any_of = ["git", "hg"] }, { token = "status" }], decision = "prompt" },
|
||||
]
|
||||
"#;
|
||||
let parsed: RequirementsExecPolicyTomlRoot = from_str(toml_str)?;
|
||||
let policy = parsed.exec_policy.to_policy()?;
|
||||
|
||||
assert_eq!(
|
||||
policy.check(&tokens(&["git", "status"]), &allow_all),
|
||||
Evaluation {
|
||||
decision: Decision::Prompt,
|
||||
matched_rules: vec![RuleMatch::PrefixRuleMatch {
|
||||
matched_prefix: tokens(&["git", "status"]),
|
||||
decision: Decision::Prompt,
|
||||
justification: None,
|
||||
}],
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
policy.check(&tokens(&["hg", "status"]), &allow_all),
|
||||
Evaluation {
|
||||
decision: Decision::Prompt,
|
||||
matched_rules: vec![RuleMatch::PrefixRuleMatch {
|
||||
matched_prefix: tokens(&["hg", "status"]),
|
||||
decision: Decision::Prompt,
|
||||
justification: None,
|
||||
}],
|
||||
}
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,9 +3,8 @@ use std::env;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use async_channel::unbounded;
|
||||
pub use codex_app_server_protocol::AppInfo;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
|
||||
use crate::AuthManager;
|
||||
@@ -15,28 +14,13 @@ use crate::features::Feature;
|
||||
use crate::mcp::CODEX_APPS_MCP_SERVER_NAME;
|
||||
use crate::mcp::auth::compute_auth_statuses;
|
||||
use crate::mcp::with_codex_apps_mcp;
|
||||
use crate::mcp_connection_manager::DEFAULT_STARTUP_TIMEOUT;
|
||||
use crate::mcp_connection_manager::McpConnectionManager;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ConnectorInfo {
|
||||
#[serde(rename = "id")]
|
||||
pub connector_id: String,
|
||||
#[serde(rename = "name")]
|
||||
pub connector_name: String,
|
||||
#[serde(default, rename = "description")]
|
||||
pub connector_description: Option<String>,
|
||||
#[serde(default, rename = "logo_url")]
|
||||
pub logo_url: Option<String>,
|
||||
#[serde(default, rename = "install_url")]
|
||||
pub install_url: Option<String>,
|
||||
#[serde(default)]
|
||||
pub is_accessible: bool,
|
||||
}
|
||||
|
||||
pub async fn list_accessible_connectors_from_mcp_tools(
|
||||
config: &Config,
|
||||
) -> anyhow::Result<Vec<ConnectorInfo>> {
|
||||
if !config.features.enabled(Feature::Connectors) {
|
||||
) -> anyhow::Result<Vec<AppInfo>> {
|
||||
if !config.features.enabled(Feature::Apps) {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
@@ -72,6 +56,13 @@ pub async fn list_accessible_connectors_from_mcp_tools(
|
||||
)
|
||||
.await;
|
||||
|
||||
if let Some(cfg) = mcp_servers.get(CODEX_APPS_MCP_SERVER_NAME) {
|
||||
let timeout = cfg.startup_timeout_sec.unwrap_or(DEFAULT_STARTUP_TIMEOUT);
|
||||
mcp_connection_manager
|
||||
.wait_for_server_ready(CODEX_APPS_MCP_SERVER_NAME, timeout)
|
||||
.await;
|
||||
}
|
||||
|
||||
let tools = mcp_connection_manager.list_all_tools().await;
|
||||
cancel_token.cancel();
|
||||
|
||||
@@ -86,13 +77,17 @@ fn auth_manager_from_config(config: &Config) -> std::sync::Arc<AuthManager> {
|
||||
)
|
||||
}
|
||||
|
||||
pub fn connector_display_label(connector: &ConnectorInfo) -> String {
|
||||
format_connector_label(&connector.connector_name, &connector.connector_id)
|
||||
pub fn connector_display_label(connector: &AppInfo) -> String {
|
||||
format_connector_label(&connector.name, &connector.id)
|
||||
}
|
||||
|
||||
pub fn connector_mention_slug(connector: &AppInfo) -> String {
|
||||
connector_name_slug(&connector_display_label(connector))
|
||||
}
|
||||
|
||||
pub(crate) fn accessible_connectors_from_mcp_tools(
|
||||
mcp_tools: &HashMap<String, crate::mcp_connection_manager::ToolInfo>,
|
||||
) -> Vec<ConnectorInfo> {
|
||||
) -> Vec<AppInfo> {
|
||||
let tools = mcp_tools.values().filter_map(|tool| {
|
||||
if tool.server_name != CODEX_APPS_MCP_SERVER_NAME {
|
||||
return None;
|
||||
@@ -105,34 +100,37 @@ pub(crate) fn accessible_connectors_from_mcp_tools(
|
||||
}
|
||||
|
||||
pub fn merge_connectors(
|
||||
connectors: Vec<ConnectorInfo>,
|
||||
accessible_connectors: Vec<ConnectorInfo>,
|
||||
) -> Vec<ConnectorInfo> {
|
||||
let mut merged: HashMap<String, ConnectorInfo> = connectors
|
||||
connectors: Vec<AppInfo>,
|
||||
accessible_connectors: Vec<AppInfo>,
|
||||
) -> Vec<AppInfo> {
|
||||
let mut merged: HashMap<String, AppInfo> = connectors
|
||||
.into_iter()
|
||||
.map(|mut connector| {
|
||||
connector.is_accessible = false;
|
||||
(connector.connector_id.clone(), connector)
|
||||
(connector.id.clone(), connector)
|
||||
})
|
||||
.collect();
|
||||
|
||||
for mut connector in accessible_connectors {
|
||||
connector.is_accessible = true;
|
||||
let connector_id = connector.connector_id.clone();
|
||||
let connector_id = connector.id.clone();
|
||||
if let Some(existing) = merged.get_mut(&connector_id) {
|
||||
existing.is_accessible = true;
|
||||
if existing.connector_name == existing.connector_id
|
||||
&& connector.connector_name != connector.connector_id
|
||||
{
|
||||
existing.connector_name = connector.connector_name;
|
||||
if existing.name == existing.id && connector.name != connector.id {
|
||||
existing.name = connector.name;
|
||||
}
|
||||
if existing.connector_description.is_none() && connector.connector_description.is_some()
|
||||
{
|
||||
existing.connector_description = connector.connector_description;
|
||||
if existing.description.is_none() && connector.description.is_some() {
|
||||
existing.description = connector.description;
|
||||
}
|
||||
if existing.logo_url.is_none() && connector.logo_url.is_some() {
|
||||
existing.logo_url = connector.logo_url;
|
||||
}
|
||||
if existing.logo_url_dark.is_none() && connector.logo_url_dark.is_some() {
|
||||
existing.logo_url_dark = connector.logo_url_dark;
|
||||
}
|
||||
if existing.distribution_channel.is_none() && connector.distribution_channel.is_some() {
|
||||
existing.distribution_channel = connector.distribution_channel;
|
||||
}
|
||||
} else {
|
||||
merged.insert(connector_id, connector);
|
||||
}
|
||||
@@ -141,23 +139,20 @@ pub fn merge_connectors(
|
||||
let mut merged = merged.into_values().collect::<Vec<_>>();
|
||||
for connector in &mut merged {
|
||||
if connector.install_url.is_none() {
|
||||
connector.install_url = Some(connector_install_url(
|
||||
&connector.connector_name,
|
||||
&connector.connector_id,
|
||||
));
|
||||
connector.install_url = Some(connector_install_url(&connector.name, &connector.id));
|
||||
}
|
||||
}
|
||||
merged.sort_by(|left, right| {
|
||||
right
|
||||
.is_accessible
|
||||
.cmp(&left.is_accessible)
|
||||
.then_with(|| left.connector_name.cmp(&right.connector_name))
|
||||
.then_with(|| left.connector_id.cmp(&right.connector_id))
|
||||
.then_with(|| left.name.cmp(&right.name))
|
||||
.then_with(|| left.id.cmp(&right.id))
|
||||
});
|
||||
merged
|
||||
}
|
||||
|
||||
fn collect_accessible_connectors<I>(tools: I) -> Vec<ConnectorInfo>
|
||||
fn collect_accessible_connectors<I>(tools: I) -> Vec<AppInfo>
|
||||
where
|
||||
I: IntoIterator<Item = (String, Option<String>)>,
|
||||
{
|
||||
@@ -172,14 +167,16 @@ where
|
||||
connectors.insert(connector_id, connector_name);
|
||||
}
|
||||
}
|
||||
let mut accessible: Vec<ConnectorInfo> = connectors
|
||||
let mut accessible: Vec<AppInfo> = connectors
|
||||
.into_iter()
|
||||
.map(|(connector_id, connector_name)| ConnectorInfo {
|
||||
install_url: Some(connector_install_url(&connector_name, &connector_id)),
|
||||
connector_id,
|
||||
connector_name,
|
||||
connector_description: None,
|
||||
.map(|(connector_id, connector_name)| AppInfo {
|
||||
id: connector_id.clone(),
|
||||
name: connector_name.clone(),
|
||||
description: None,
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: Some(connector_install_url(&connector_name, &connector_id)),
|
||||
is_accessible: true,
|
||||
})
|
||||
.collect();
|
||||
@@ -187,8 +184,8 @@ where
|
||||
right
|
||||
.is_accessible
|
||||
.cmp(&left.is_accessible)
|
||||
.then_with(|| left.connector_name.cmp(&right.connector_name))
|
||||
.then_with(|| left.connector_id.cmp(&right.connector_id))
|
||||
.then_with(|| left.name.cmp(&right.name))
|
||||
.then_with(|| left.id.cmp(&right.id))
|
||||
});
|
||||
accessible
|
||||
}
|
||||
@@ -205,7 +202,7 @@ pub fn connector_install_url(name: &str, connector_id: &str) -> String {
|
||||
format!("https://chatgpt.com/apps/{slug}/{connector_id}")
|
||||
}
|
||||
|
||||
fn connector_name_slug(name: &str) -> String {
|
||||
pub fn connector_name_slug(name: &str) -> String {
|
||||
let mut normalized = String::with_capacity(name.len());
|
||||
for character in name.chars() {
|
||||
if character.is_ascii_alphanumeric() {
|
||||
|
||||
@@ -95,6 +95,12 @@ pub fn originator() -> Originator {
|
||||
get_originator_value(None)
|
||||
}
|
||||
|
||||
pub fn is_first_party_originator(originator_value: &str) -> bool {
|
||||
originator_value == DEFAULT_ORIGINATOR
|
||||
|| originator_value == "codex_vscode"
|
||||
|| originator_value.starts_with("Codex ")
|
||||
}
|
||||
|
||||
pub fn get_codex_user_agent() -> String {
|
||||
let build_version = env!("CARGO_PKG_VERSION");
|
||||
let os_info = os_info::get();
|
||||
@@ -185,6 +191,7 @@ fn is_sandboxed() -> bool {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn test_get_codex_user_agent() {
|
||||
@@ -194,6 +201,15 @@ mod tests {
|
||||
assert!(user_agent.starts_with(&prefix));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_first_party_originator_matches_known_values() {
|
||||
assert_eq!(is_first_party_originator(DEFAULT_ORIGINATOR), true);
|
||||
assert_eq!(is_first_party_originator("codex_vscode"), true);
|
||||
assert_eq!(is_first_party_originator("Codex Something Else"), true);
|
||||
assert_eq!(is_first_party_originator("codex_cli"), false);
|
||||
assert_eq!(is_first_party_originator("Other"), false);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_create_client_sets_default_headers() {
|
||||
skip_if_no_network!();
|
||||
|
||||
@@ -113,6 +113,9 @@ pub enum CodexErr {
|
||||
#[error("{0}")]
|
||||
UsageLimitReached(UsageLimitReachedError),
|
||||
|
||||
#[error("{0}")]
|
||||
ModelCap(ModelCapError),
|
||||
|
||||
#[error("{0}")]
|
||||
ResponseStreamFailed(ResponseStreamFailed),
|
||||
|
||||
@@ -205,7 +208,8 @@ impl CodexErr {
|
||||
| CodexErr::AgentLimitReached { .. }
|
||||
| CodexErr::Spawn
|
||||
| CodexErr::SessionConfiguredNotFirstEvent
|
||||
| CodexErr::UsageLimitReached(_) => false,
|
||||
| CodexErr::UsageLimitReached(_)
|
||||
| CodexErr::ModelCap(_) => false,
|
||||
CodexErr::Stream(..)
|
||||
| CodexErr::Timeout
|
||||
| CodexErr::UnexpectedStatus(_)
|
||||
@@ -394,6 +398,30 @@ impl std::fmt::Display for UsageLimitReachedError {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ModelCapError {
|
||||
pub(crate) model: String,
|
||||
pub(crate) reset_after_seconds: Option<u64>,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ModelCapError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let mut message = format!(
|
||||
"Model {} is at capacity. Please try a different model.",
|
||||
self.model
|
||||
);
|
||||
if let Some(seconds) = self.reset_after_seconds {
|
||||
message.push_str(&format!(
|
||||
" Try again in {}.",
|
||||
format_duration_short(seconds)
|
||||
));
|
||||
} else {
|
||||
message.push_str(" Try again later.");
|
||||
}
|
||||
write!(f, "{message}")
|
||||
}
|
||||
}
|
||||
|
||||
fn retry_suffix(resets_at: Option<&DateTime<Utc>>) -> String {
|
||||
if let Some(resets_at) = resets_at {
|
||||
let formatted = format_retry_timestamp(resets_at);
|
||||
@@ -425,6 +453,18 @@ fn format_retry_timestamp(resets_at: &DateTime<Utc>) -> String {
|
||||
}
|
||||
}
|
||||
|
||||
fn format_duration_short(seconds: u64) -> String {
|
||||
if seconds < 60 {
|
||||
"less than a minute".to_string()
|
||||
} else if seconds < 3600 {
|
||||
format!("{}m", seconds / 60)
|
||||
} else if seconds < 86_400 {
|
||||
format!("{}h", seconds / 3600)
|
||||
} else {
|
||||
format!("{}d", seconds / 86_400)
|
||||
}
|
||||
}
|
||||
|
||||
fn day_suffix(day: u32) -> &'static str {
|
||||
match day {
|
||||
11..=13 => "th",
|
||||
@@ -488,6 +528,10 @@ impl CodexErr {
|
||||
CodexErr::UsageLimitReached(_)
|
||||
| CodexErr::QuotaExceeded
|
||||
| CodexErr::UsageNotIncluded => CodexErrorInfo::UsageLimitExceeded,
|
||||
CodexErr::ModelCap(err) => CodexErrorInfo::ModelCap {
|
||||
model: err.model.clone(),
|
||||
reset_after_seconds: err.reset_after_seconds,
|
||||
},
|
||||
CodexErr::RetryLimit(_) => CodexErrorInfo::ResponseTooManyFailedAttempts {
|
||||
http_status_code: self.http_status_code_value(),
|
||||
},
|
||||
@@ -631,6 +675,45 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn model_cap_error_formats_message() {
|
||||
let err = ModelCapError {
|
||||
model: "boomslang".to_string(),
|
||||
reset_after_seconds: Some(120),
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"Model boomslang is at capacity. Please try a different model. Try again in 2m."
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn model_cap_error_formats_message_without_reset() {
|
||||
let err = ModelCapError {
|
||||
model: "boomslang".to_string(),
|
||||
reset_after_seconds: None,
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"Model boomslang is at capacity. Please try a different model. Try again later."
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn model_cap_error_maps_to_protocol() {
|
||||
let err = CodexErr::ModelCap(ModelCapError {
|
||||
model: "boomslang".to_string(),
|
||||
reset_after_seconds: Some(30),
|
||||
});
|
||||
assert_eq!(
|
||||
err.to_codex_protocol_error(),
|
||||
CodexErrorInfo::ModelCap {
|
||||
model: "boomslang".to_string(),
|
||||
reset_after_seconds: Some(30),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sandbox_denied_uses_aggregated_output_when_stderr_empty() {
|
||||
let output = ExecToolCallOutput {
|
||||
|
||||
@@ -21,6 +21,7 @@ use crate::instructions::SkillInstructions;
|
||||
use crate::instructions::UserInstructions;
|
||||
use crate::session_prefix::is_session_prefix;
|
||||
use crate::user_shell_command::is_user_shell_command_text;
|
||||
use crate::web_search::web_search_action_detail;
|
||||
|
||||
fn parse_user_message(message: &[ContentItem]) -> Option<UserMessageItem> {
|
||||
if UserInstructions::is_user_instructions(message)
|
||||
@@ -127,14 +128,17 @@ pub fn parse_turn_item(item: &ResponseItem) -> Option<TurnItem> {
|
||||
raw_content,
|
||||
}))
|
||||
}
|
||||
ResponseItem::WebSearchCall {
|
||||
id,
|
||||
action: WebSearchAction::Search { query },
|
||||
..
|
||||
} => Some(TurnItem::WebSearch(WebSearchItem {
|
||||
id: id.clone().unwrap_or_default(),
|
||||
query: query.clone().unwrap_or_default(),
|
||||
})),
|
||||
ResponseItem::WebSearchCall { id, action, .. } => {
|
||||
let (action, query) = match action {
|
||||
Some(action) => (action.clone(), web_search_action_detail(action)),
|
||||
None => (WebSearchAction::Other, String::new()),
|
||||
};
|
||||
Some(TurnItem::WebSearch(WebSearchItem {
|
||||
id: id.clone().unwrap_or_default(),
|
||||
query,
|
||||
action,
|
||||
}))
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
@@ -144,6 +148,7 @@ mod tests {
|
||||
use super::parse_turn_item;
|
||||
use codex_protocol::items::AgentMessageContent;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::items::WebSearchItem;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ReasoningItemContent;
|
||||
use codex_protocol::models::ReasoningItemReasoningSummary;
|
||||
@@ -419,18 +424,102 @@ mod tests {
|
||||
let item = ResponseItem::WebSearchCall {
|
||||
id: Some("ws_1".to_string()),
|
||||
status: Some("completed".to_string()),
|
||||
action: WebSearchAction::Search {
|
||||
action: Some(WebSearchAction::Search {
|
||||
query: Some("weather".to_string()),
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
let turn_item = parse_turn_item(&item).expect("expected web search turn item");
|
||||
|
||||
match turn_item {
|
||||
TurnItem::WebSearch(search) => {
|
||||
assert_eq!(search.id, "ws_1");
|
||||
assert_eq!(search.query, "weather");
|
||||
}
|
||||
TurnItem::WebSearch(search) => assert_eq!(
|
||||
search,
|
||||
WebSearchItem {
|
||||
id: "ws_1".to_string(),
|
||||
query: "weather".to_string(),
|
||||
action: WebSearchAction::Search {
|
||||
query: Some("weather".to_string()),
|
||||
},
|
||||
}
|
||||
),
|
||||
other => panic!("expected TurnItem::WebSearch, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_web_search_open_page_call() {
|
||||
let item = ResponseItem::WebSearchCall {
|
||||
id: Some("ws_open".to_string()),
|
||||
status: Some("completed".to_string()),
|
||||
action: Some(WebSearchAction::OpenPage {
|
||||
url: Some("https://example.com".to_string()),
|
||||
}),
|
||||
};
|
||||
|
||||
let turn_item = parse_turn_item(&item).expect("expected web search turn item");
|
||||
|
||||
match turn_item {
|
||||
TurnItem::WebSearch(search) => assert_eq!(
|
||||
search,
|
||||
WebSearchItem {
|
||||
id: "ws_open".to_string(),
|
||||
query: "https://example.com".to_string(),
|
||||
action: WebSearchAction::OpenPage {
|
||||
url: Some("https://example.com".to_string()),
|
||||
},
|
||||
}
|
||||
),
|
||||
other => panic!("expected TurnItem::WebSearch, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_web_search_find_in_page_call() {
|
||||
let item = ResponseItem::WebSearchCall {
|
||||
id: Some("ws_find".to_string()),
|
||||
status: Some("completed".to_string()),
|
||||
action: Some(WebSearchAction::FindInPage {
|
||||
url: Some("https://example.com".to_string()),
|
||||
pattern: Some("needle".to_string()),
|
||||
}),
|
||||
};
|
||||
|
||||
let turn_item = parse_turn_item(&item).expect("expected web search turn item");
|
||||
|
||||
match turn_item {
|
||||
TurnItem::WebSearch(search) => assert_eq!(
|
||||
search,
|
||||
WebSearchItem {
|
||||
id: "ws_find".to_string(),
|
||||
query: "'needle' in https://example.com".to_string(),
|
||||
action: WebSearchAction::FindInPage {
|
||||
url: Some("https://example.com".to_string()),
|
||||
pattern: Some("needle".to_string()),
|
||||
},
|
||||
}
|
||||
),
|
||||
other => panic!("expected TurnItem::WebSearch, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_partial_web_search_call_without_action_as_other() {
|
||||
let item = ResponseItem::WebSearchCall {
|
||||
id: Some("ws_partial".to_string()),
|
||||
status: Some("in_progress".to_string()),
|
||||
action: None,
|
||||
};
|
||||
|
||||
let turn_item = parse_turn_item(&item).expect("expected web search turn item");
|
||||
match turn_item {
|
||||
TurnItem::WebSearch(search) => assert_eq!(
|
||||
search,
|
||||
WebSearchItem {
|
||||
id: "ws_partial".to_string(),
|
||||
query: String::new(),
|
||||
action: WebSearchAction::Other,
|
||||
}
|
||||
),
|
||||
other => panic!("expected TurnItem::WebSearch, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -64,6 +64,7 @@ pub struct ExecParams {
|
||||
pub expiration: ExecExpiration,
|
||||
pub env: HashMap<String, String>,
|
||||
pub sandbox_permissions: SandboxPermissions,
|
||||
pub windows_sandbox_level: codex_protocol::config_types::WindowsSandboxLevel,
|
||||
pub justification: Option<String>,
|
||||
pub arg0: Option<String>,
|
||||
}
|
||||
@@ -141,11 +142,15 @@ pub async fn process_exec_tool_call(
|
||||
codex_linux_sandbox_exe: &Option<PathBuf>,
|
||||
stdout_stream: Option<StdoutStream>,
|
||||
) -> Result<ExecToolCallOutput> {
|
||||
let windows_sandbox_level = params.windows_sandbox_level;
|
||||
let sandbox_type = match &sandbox_policy {
|
||||
SandboxPolicy::DangerFullAccess | SandboxPolicy::ExternalSandbox { .. } => {
|
||||
SandboxType::None
|
||||
}
|
||||
_ => get_platform_sandbox().unwrap_or(SandboxType::None),
|
||||
_ => get_platform_sandbox(
|
||||
windows_sandbox_level != codex_protocol::config_types::WindowsSandboxLevel::Disabled,
|
||||
)
|
||||
.unwrap_or(SandboxType::None),
|
||||
};
|
||||
tracing::debug!("Sandbox type: {sandbox_type:?}");
|
||||
|
||||
@@ -155,6 +160,7 @@ pub async fn process_exec_tool_call(
|
||||
expiration,
|
||||
env,
|
||||
sandbox_permissions,
|
||||
windows_sandbox_level,
|
||||
justification,
|
||||
arg0: _,
|
||||
} = params;
|
||||
@@ -184,6 +190,7 @@ pub async fn process_exec_tool_call(
|
||||
sandbox_type,
|
||||
sandbox_cwd,
|
||||
codex_linux_sandbox_exe.as_ref(),
|
||||
windows_sandbox_level,
|
||||
)
|
||||
.map_err(CodexErr::from)?;
|
||||
|
||||
@@ -202,6 +209,7 @@ pub(crate) async fn execute_exec_env(
|
||||
env,
|
||||
expiration,
|
||||
sandbox,
|
||||
windows_sandbox_level,
|
||||
sandbox_permissions,
|
||||
justification,
|
||||
arg0,
|
||||
@@ -213,6 +221,7 @@ pub(crate) async fn execute_exec_env(
|
||||
expiration,
|
||||
env,
|
||||
sandbox_permissions,
|
||||
windows_sandbox_level,
|
||||
justification,
|
||||
arg0,
|
||||
};
|
||||
@@ -223,13 +232,79 @@ pub(crate) async fn execute_exec_env(
|
||||
finalize_exec_result(raw_output_result, sandbox, duration)
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
fn extract_create_process_as_user_error_code(err: &str) -> Option<String> {
|
||||
let marker = "CreateProcessAsUserW failed: ";
|
||||
let start = err.find(marker)? + marker.len();
|
||||
let tail = &err[start..];
|
||||
let digits: String = tail.chars().take_while(char::is_ascii_digit).collect();
|
||||
if digits.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(digits)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
fn windowsapps_path_kind(path: &str) -> &'static str {
|
||||
let lower = path.to_ascii_lowercase();
|
||||
if lower.contains("\\program files\\windowsapps\\") {
|
||||
return "windowsapps_package";
|
||||
}
|
||||
if lower.contains("\\appdata\\local\\microsoft\\windowsapps\\") {
|
||||
return "windowsapps_alias";
|
||||
}
|
||||
if lower.contains("\\windowsapps\\") {
|
||||
return "windowsapps_other";
|
||||
}
|
||||
"other"
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
fn record_windows_sandbox_spawn_failure(
|
||||
command_path: Option<&str>,
|
||||
windows_sandbox_level: codex_protocol::config_types::WindowsSandboxLevel,
|
||||
err: &str,
|
||||
) {
|
||||
let Some(error_code) = extract_create_process_as_user_error_code(err) else {
|
||||
return;
|
||||
};
|
||||
let path = command_path.unwrap_or("unknown");
|
||||
let exe = Path::new(path)
|
||||
.file_name()
|
||||
.and_then(|name| name.to_str())
|
||||
.unwrap_or("unknown")
|
||||
.to_ascii_lowercase();
|
||||
let path_kind = windowsapps_path_kind(path);
|
||||
let level = if matches!(
|
||||
windows_sandbox_level,
|
||||
codex_protocol::config_types::WindowsSandboxLevel::Elevated
|
||||
) {
|
||||
"elevated"
|
||||
} else {
|
||||
"legacy"
|
||||
};
|
||||
if let Some(metrics) = codex_otel::metrics::global() {
|
||||
let _ = metrics.counter(
|
||||
"codex.windows_sandbox.createprocessasuserw_failed",
|
||||
1,
|
||||
&[
|
||||
("error_code", error_code.as_str()),
|
||||
("path_kind", path_kind),
|
||||
("exe", exe.as_str()),
|
||||
("level", level),
|
||||
],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
async fn exec_windows_sandbox(
|
||||
params: ExecParams,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
) -> Result<RawExecToolCallOutput> {
|
||||
use crate::config::find_codex_home;
|
||||
use crate::safety::is_windows_elevated_sandbox_enabled;
|
||||
use codex_protocol::config_types::WindowsSandboxLevel;
|
||||
use codex_windows_sandbox::run_windows_sandbox_capture;
|
||||
use codex_windows_sandbox::run_windows_sandbox_capture_elevated;
|
||||
|
||||
@@ -238,6 +313,7 @@ async fn exec_windows_sandbox(
|
||||
cwd,
|
||||
env,
|
||||
expiration,
|
||||
windows_sandbox_level,
|
||||
..
|
||||
} = params;
|
||||
// TODO(iceweasel-oai): run_windows_sandbox_capture should support all
|
||||
@@ -255,7 +331,9 @@ async fn exec_windows_sandbox(
|
||||
"windows sandbox: failed to resolve codex_home: {err}"
|
||||
)))
|
||||
})?;
|
||||
let use_elevated = is_windows_elevated_sandbox_enabled();
|
||||
let command_path = command.first().cloned();
|
||||
let sandbox_level = windows_sandbox_level;
|
||||
let use_elevated = matches!(sandbox_level, WindowsSandboxLevel::Elevated);
|
||||
let spawn_res = tokio::task::spawn_blocking(move || {
|
||||
if use_elevated {
|
||||
run_windows_sandbox_capture_elevated(
|
||||
@@ -284,6 +362,11 @@ async fn exec_windows_sandbox(
|
||||
let capture = match spawn_res {
|
||||
Ok(Ok(v)) => v,
|
||||
Ok(Err(err)) => {
|
||||
record_windows_sandbox_spawn_failure(
|
||||
command_path.as_deref(),
|
||||
sandbox_level,
|
||||
&err.to_string(),
|
||||
);
|
||||
return Err(CodexErr::Io(io::Error::other(format!(
|
||||
"windows sandbox: {err}"
|
||||
))));
|
||||
@@ -312,20 +395,7 @@ async fn exec_windows_sandbox(
|
||||
text: stderr_text,
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
// Best-effort aggregate: stdout then stderr (capped).
|
||||
let mut aggregated = Vec::with_capacity(
|
||||
stdout
|
||||
.text
|
||||
.len()
|
||||
.saturating_add(stderr.text.len())
|
||||
.min(EXEC_OUTPUT_MAX_BYTES),
|
||||
);
|
||||
append_capped(&mut aggregated, &stdout.text, EXEC_OUTPUT_MAX_BYTES);
|
||||
append_capped(&mut aggregated, &stderr.text, EXEC_OUTPUT_MAX_BYTES);
|
||||
let aggregated_output = StreamOutput {
|
||||
text: aggregated,
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
let aggregated_output = aggregate_output(&stdout, &stderr);
|
||||
|
||||
Ok(RawExecToolCallOutput {
|
||||
exit_status,
|
||||
@@ -519,6 +589,39 @@ fn append_capped(dst: &mut Vec<u8>, src: &[u8], max_bytes: usize) {
|
||||
dst.extend_from_slice(&src[..take]);
|
||||
}
|
||||
|
||||
fn aggregate_output(
|
||||
stdout: &StreamOutput<Vec<u8>>,
|
||||
stderr: &StreamOutput<Vec<u8>>,
|
||||
) -> StreamOutput<Vec<u8>> {
|
||||
let total_len = stdout.text.len().saturating_add(stderr.text.len());
|
||||
let max_bytes = EXEC_OUTPUT_MAX_BYTES;
|
||||
let mut aggregated = Vec::with_capacity(total_len.min(max_bytes));
|
||||
|
||||
if total_len <= max_bytes {
|
||||
aggregated.extend_from_slice(&stdout.text);
|
||||
aggregated.extend_from_slice(&stderr.text);
|
||||
return StreamOutput {
|
||||
text: aggregated,
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
}
|
||||
|
||||
// Under contention, reserve 1/3 for stdout and 2/3 for stderr; rebalance unused stderr to stdout.
|
||||
let want_stdout = stdout.text.len().min(max_bytes / 3);
|
||||
let want_stderr = stderr.text.len();
|
||||
let stderr_take = want_stderr.min(max_bytes.saturating_sub(want_stdout));
|
||||
let remaining = max_bytes.saturating_sub(want_stdout + stderr_take);
|
||||
let stdout_take = want_stdout + remaining.min(stdout.text.len().saturating_sub(want_stdout));
|
||||
|
||||
aggregated.extend_from_slice(&stdout.text[..stdout_take]);
|
||||
aggregated.extend_from_slice(&stderr.text[..stderr_take]);
|
||||
|
||||
StreamOutput {
|
||||
text: aggregated,
|
||||
truncated_after_lines: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ExecToolCallOutput {
|
||||
pub exit_code: i32,
|
||||
@@ -564,6 +667,7 @@ async fn exec(
|
||||
env,
|
||||
arg0,
|
||||
expiration,
|
||||
windows_sandbox_level: _,
|
||||
..
|
||||
} = params;
|
||||
|
||||
@@ -683,20 +787,7 @@ async fn consume_truncated_output(
|
||||
Duration::from_millis(IO_DRAIN_TIMEOUT_MS),
|
||||
)
|
||||
.await?;
|
||||
// Best-effort aggregate: stdout then stderr (capped).
|
||||
let mut aggregated = Vec::with_capacity(
|
||||
stdout
|
||||
.text
|
||||
.len()
|
||||
.saturating_add(stderr.text.len())
|
||||
.min(EXEC_OUTPUT_MAX_BYTES),
|
||||
);
|
||||
append_capped(&mut aggregated, &stdout.text, EXEC_OUTPUT_MAX_BYTES);
|
||||
append_capped(&mut aggregated, &stderr.text, EXEC_OUTPUT_MAX_BYTES * 2);
|
||||
let aggregated_output = StreamOutput {
|
||||
text: aggregated,
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
let aggregated_output = aggregate_output(&stdout, &stderr);
|
||||
|
||||
Ok(RawExecToolCallOutput {
|
||||
exit_status,
|
||||
@@ -771,6 +862,7 @@ fn synthetic_exit_status(code: i32) -> ExitStatus {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::time::Duration;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
|
||||
@@ -846,6 +938,85 @@ mod tests {
|
||||
assert_eq!(out.text.len(), EXEC_OUTPUT_MAX_BYTES);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn aggregate_output_prefers_stderr_on_contention() {
|
||||
let stdout = StreamOutput {
|
||||
text: vec![b'a'; EXEC_OUTPUT_MAX_BYTES],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
let stderr = StreamOutput {
|
||||
text: vec![b'b'; EXEC_OUTPUT_MAX_BYTES],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
|
||||
let aggregated = aggregate_output(&stdout, &stderr);
|
||||
let stdout_cap = EXEC_OUTPUT_MAX_BYTES / 3;
|
||||
let stderr_cap = EXEC_OUTPUT_MAX_BYTES.saturating_sub(stdout_cap);
|
||||
|
||||
assert_eq!(aggregated.text.len(), EXEC_OUTPUT_MAX_BYTES);
|
||||
assert_eq!(aggregated.text[..stdout_cap], vec![b'a'; stdout_cap]);
|
||||
assert_eq!(aggregated.text[stdout_cap..], vec![b'b'; stderr_cap]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn aggregate_output_fills_remaining_capacity_with_stderr() {
|
||||
let stdout_len = EXEC_OUTPUT_MAX_BYTES / 10;
|
||||
let stdout = StreamOutput {
|
||||
text: vec![b'a'; stdout_len],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
let stderr = StreamOutput {
|
||||
text: vec![b'b'; EXEC_OUTPUT_MAX_BYTES],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
|
||||
let aggregated = aggregate_output(&stdout, &stderr);
|
||||
let stderr_cap = EXEC_OUTPUT_MAX_BYTES.saturating_sub(stdout_len);
|
||||
|
||||
assert_eq!(aggregated.text.len(), EXEC_OUTPUT_MAX_BYTES);
|
||||
assert_eq!(aggregated.text[..stdout_len], vec![b'a'; stdout_len]);
|
||||
assert_eq!(aggregated.text[stdout_len..], vec![b'b'; stderr_cap]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn aggregate_output_rebalances_when_stderr_is_small() {
|
||||
let stdout = StreamOutput {
|
||||
text: vec![b'a'; EXEC_OUTPUT_MAX_BYTES],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
let stderr = StreamOutput {
|
||||
text: vec![b'b'; 1],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
|
||||
let aggregated = aggregate_output(&stdout, &stderr);
|
||||
let stdout_len = EXEC_OUTPUT_MAX_BYTES.saturating_sub(1);
|
||||
|
||||
assert_eq!(aggregated.text.len(), EXEC_OUTPUT_MAX_BYTES);
|
||||
assert_eq!(aggregated.text[..stdout_len], vec![b'a'; stdout_len]);
|
||||
assert_eq!(aggregated.text[stdout_len..], vec![b'b'; 1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn aggregate_output_keeps_stdout_then_stderr_when_under_cap() {
|
||||
let stdout = StreamOutput {
|
||||
text: vec![b'a'; 4],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
let stderr = StreamOutput {
|
||||
text: vec![b'b'; 3],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
|
||||
let aggregated = aggregate_output(&stdout, &stderr);
|
||||
let mut expected = Vec::new();
|
||||
expected.extend_from_slice(&stdout.text);
|
||||
expected.extend_from_slice(&stderr.text);
|
||||
|
||||
assert_eq!(aggregated.text, expected);
|
||||
assert_eq!(aggregated.truncated_after_lines, None);
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
#[test]
|
||||
fn sandbox_detection_flags_sigsys_exit_code() {
|
||||
@@ -878,6 +1049,7 @@ mod tests {
|
||||
expiration: 500.into(),
|
||||
env,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
windows_sandbox_level: codex_protocol::config_types::WindowsSandboxLevel::Disabled,
|
||||
justification: None,
|
||||
arg0: None,
|
||||
};
|
||||
@@ -923,6 +1095,7 @@ mod tests {
|
||||
expiration: ExecExpiration::Cancellation(cancel_token),
|
||||
env,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
windows_sandbox_level: codex_protocol::config_types::WindowsSandboxLevel::Disabled,
|
||||
justification: None,
|
||||
arg0: None,
|
||||
};
|
||||
|
||||
@@ -87,6 +87,15 @@ pub(crate) struct ExecPolicyManager {
|
||||
policy: ArcSwap<Policy>,
|
||||
}
|
||||
|
||||
pub(crate) struct ExecApprovalRequest<'a> {
|
||||
pub(crate) features: &'a Features,
|
||||
pub(crate) command: &'a [String],
|
||||
pub(crate) approval_policy: AskForApproval,
|
||||
pub(crate) sandbox_policy: &'a SandboxPolicy,
|
||||
pub(crate) sandbox_permissions: SandboxPermissions,
|
||||
pub(crate) prefix_rule: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
impl ExecPolicyManager {
|
||||
pub(crate) fn new(policy: Arc<Policy>) -> Self {
|
||||
Self {
|
||||
@@ -112,12 +121,16 @@ impl ExecPolicyManager {
|
||||
|
||||
pub(crate) async fn create_exec_approval_requirement_for_command(
|
||||
&self,
|
||||
features: &Features,
|
||||
command: &[String],
|
||||
approval_policy: AskForApproval,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
sandbox_permissions: SandboxPermissions,
|
||||
req: ExecApprovalRequest<'_>,
|
||||
) -> ExecApprovalRequirement {
|
||||
let ExecApprovalRequest {
|
||||
features,
|
||||
command,
|
||||
approval_policy,
|
||||
sandbox_policy,
|
||||
sandbox_permissions,
|
||||
prefix_rule,
|
||||
} = req;
|
||||
let exec_policy = self.current();
|
||||
let commands =
|
||||
parse_shell_lc_plain_commands(command).unwrap_or_else(|| vec![command.to_vec()]);
|
||||
@@ -131,6 +144,12 @@ impl ExecPolicyManager {
|
||||
};
|
||||
let evaluation = exec_policy.check_multiple(commands.iter(), &exec_policy_fallback);
|
||||
|
||||
let requested_amendment = derive_requested_execpolicy_amendment(
|
||||
features,
|
||||
prefix_rule.as_ref(),
|
||||
&evaluation.matched_rules,
|
||||
);
|
||||
|
||||
match evaluation.decision {
|
||||
Decision::Forbidden => ExecApprovalRequirement::Forbidden {
|
||||
reason: derive_forbidden_reason(command, &evaluation),
|
||||
@@ -144,9 +163,11 @@ impl ExecPolicyManager {
|
||||
ExecApprovalRequirement::NeedsApproval {
|
||||
reason: derive_prompt_reason(command, &evaluation),
|
||||
proposed_execpolicy_amendment: if features.enabled(Feature::ExecPolicy) {
|
||||
try_derive_execpolicy_amendment_for_prompt_rules(
|
||||
&evaluation.matched_rules,
|
||||
)
|
||||
requested_amendment.or_else(|| {
|
||||
try_derive_execpolicy_amendment_for_prompt_rules(
|
||||
&evaluation.matched_rules,
|
||||
)
|
||||
})
|
||||
} else {
|
||||
None
|
||||
},
|
||||
@@ -382,6 +403,30 @@ fn try_derive_execpolicy_amendment_for_allow_rules(
|
||||
})
|
||||
}
|
||||
|
||||
fn derive_requested_execpolicy_amendment(
|
||||
features: &Features,
|
||||
prefix_rule: Option<&Vec<String>>,
|
||||
matched_rules: &[RuleMatch],
|
||||
) -> Option<ExecPolicyAmendment> {
|
||||
if !features.enabled(Feature::ExecPolicy) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let prefix_rule = prefix_rule?;
|
||||
if prefix_rule.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if matched_rules
|
||||
.iter()
|
||||
.any(|rule_match| is_policy_match(rule_match) && rule_match.decision() == Decision::Prompt)
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(ExecPolicyAmendment::new(prefix_rule.clone()))
|
||||
}
|
||||
|
||||
/// Only return a reason when a policy rule drove the prompt decision.
|
||||
fn derive_prompt_reason(command_args: &[String], evaluation: &Evaluation) -> Option<String> {
|
||||
let command = render_shlex_command(command_args);
|
||||
@@ -756,13 +801,14 @@ prefix_rule(pattern=["rm"], decision="forbidden")
|
||||
|
||||
let manager = ExecPolicyManager::new(policy);
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&forbidden_script,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &forbidden_script,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::DangerFullAccess,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -790,17 +836,18 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::new(policy);
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&[
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &[
|
||||
"rm".to_string(),
|
||||
"-rf".to_string(),
|
||||
"/some/important/folder".to_string(),
|
||||
],
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::DangerFullAccess,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -823,13 +870,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::new(policy);
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::DangerFullAccess,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -853,13 +901,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::new(policy);
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::Never,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: &SandboxPolicy::DangerFullAccess,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -876,13 +925,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::default();
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::UnlessTrusted,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::UnlessTrusted,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -894,6 +944,40 @@ prefix_rule(
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn request_rule_uses_prefix_rule() {
|
||||
let command = vec![
|
||||
"cargo".to_string(),
|
||||
"install".to_string(),
|
||||
"cargo-insta".to_string(),
|
||||
];
|
||||
let manager = ExecPolicyManager::default();
|
||||
let mut features = Features::with_defaults();
|
||||
features.enable(Feature::RequestRule);
|
||||
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &features,
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::RequireEscalated,
|
||||
prefix_rule: Some(vec!["cargo".to_string(), "install".to_string()]),
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
requirement,
|
||||
ExecApprovalRequirement::NeedsApproval {
|
||||
reason: None,
|
||||
proposed_execpolicy_amendment: Some(ExecPolicyAmendment::new(vec![
|
||||
"cargo".to_string(),
|
||||
"install".to_string(),
|
||||
])),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn heuristics_apply_when_other_commands_match_policy() {
|
||||
let policy_src = r#"prefix_rule(pattern=["apple"], decision="allow")"#;
|
||||
@@ -910,13 +994,14 @@ prefix_rule(
|
||||
|
||||
assert_eq!(
|
||||
ExecPolicyManager::new(policy)
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::UnlessTrusted,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::UnlessTrusted,
|
||||
sandbox_policy: &SandboxPolicy::DangerFullAccess,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await,
|
||||
ExecApprovalRequirement::NeedsApproval {
|
||||
reason: None,
|
||||
@@ -984,13 +1069,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::default();
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::UnlessTrusted,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::UnlessTrusted,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -1011,13 +1097,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::default();
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&features,
|
||||
&command,
|
||||
AskForApproval::UnlessTrusted,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &features,
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::UnlessTrusted,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -1041,13 +1128,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::new(policy);
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::DangerFullAccess,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -1068,13 +1156,14 @@ prefix_rule(
|
||||
];
|
||||
let manager = ExecPolicyManager::default();
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::UnlessTrusted,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::UnlessTrusted,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -1106,13 +1195,14 @@ prefix_rule(
|
||||
|
||||
assert_eq!(
|
||||
ExecPolicyManager::new(policy)
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::UnlessTrusted,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::UnlessTrusted,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await,
|
||||
ExecApprovalRequirement::NeedsApproval {
|
||||
reason: None,
|
||||
@@ -1129,13 +1219,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::default();
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -1159,13 +1250,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::new(policy);
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -1226,13 +1318,14 @@ prefix_rule(
|
||||
assert_eq!(
|
||||
expected_req,
|
||||
policy
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&features,
|
||||
&sneaky_command,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
permissions,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &features,
|
||||
command: &sneaky_command,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: permissions,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await,
|
||||
"{pwsh_approval_reason}"
|
||||
);
|
||||
@@ -1249,13 +1342,14 @@ prefix_rule(
|
||||
]))),
|
||||
},
|
||||
policy
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&features,
|
||||
&dangerous_command,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
permissions,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &features,
|
||||
command: &dangerous_command,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: permissions,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await,
|
||||
r#"On all platforms, a forbidden command should require approval
|
||||
(unless AskForApproval::Never is specified)."#
|
||||
@@ -1268,13 +1362,14 @@ prefix_rule(
|
||||
reason: "`rm -rf /important/data` rejected: blocked by policy".to_string(),
|
||||
},
|
||||
policy
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&features,
|
||||
&dangerous_command,
|
||||
AskForApproval::Never,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
permissions,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &features,
|
||||
command: &dangerous_command,
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: permissions,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await,
|
||||
r#"On all platforms, a forbidden command should require approval
|
||||
(unless AskForApproval::Never is specified)."#
|
||||
|
||||
@@ -5,14 +5,20 @@
|
||||
//! booleans through multiple types, call sites consult a single `Features`
|
||||
//! container attached to `Config`.
|
||||
|
||||
use crate::config::CONFIG_TOML_FILE;
|
||||
use crate::config::Config;
|
||||
use crate::config::ConfigToml;
|
||||
use crate::config::profile::ConfigProfile;
|
||||
use crate::protocol::Event;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::WarningEvent;
|
||||
use codex_otel::OtelManager;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::BTreeSet;
|
||||
use toml::Value as TomlValue;
|
||||
|
||||
mod legacy;
|
||||
pub(crate) use legacy::LegacyFeatureToggles;
|
||||
@@ -83,6 +89,8 @@ pub enum Feature {
|
||||
WebSearchCached,
|
||||
/// Gate the execpolicy enforcement for shell/unified exec.
|
||||
ExecPolicy,
|
||||
/// Allow the model to request approval and propose exec rules.
|
||||
RequestRule,
|
||||
/// Enable Windows sandbox (restricted token) on Windows.
|
||||
WindowsSandbox,
|
||||
/// Use the elevated Windows sandbox pipeline (setup + runner).
|
||||
@@ -93,6 +101,8 @@ pub enum Feature {
|
||||
RemoteModels,
|
||||
/// Experimental shell snapshotting.
|
||||
ShellSnapshot,
|
||||
/// Persist rollout metadata to a local SQLite database.
|
||||
Sqlite,
|
||||
/// Append additional AGENTS.md guidance to user instructions.
|
||||
ChildAgentsMd,
|
||||
/// Enforce UTF8 output in Powershell.
|
||||
@@ -101,12 +111,18 @@ pub enum Feature {
|
||||
EnableRequestCompression,
|
||||
/// Enable collab tools.
|
||||
Collab,
|
||||
/// Enable connectors (apps).
|
||||
Connectors,
|
||||
/// Enable apps.
|
||||
Apps,
|
||||
/// Allow prompting and installing missing MCP dependencies.
|
||||
SkillMcpDependencyInstall,
|
||||
/// Prompt for missing skill env var dependencies.
|
||||
SkillEnvVarDependencyPrompt,
|
||||
/// Steer feature flag - when enabled, Enter submits immediately instead of queuing.
|
||||
Steer,
|
||||
/// Enable collaboration modes (Plan, Code, Pair Programming, Execute).
|
||||
CollaborationModes,
|
||||
/// Enable personality selection in the TUI.
|
||||
Personality,
|
||||
/// Use the Responses API WebSocket transport for OpenAI by default.
|
||||
ResponsesWebsockets,
|
||||
}
|
||||
@@ -136,6 +152,8 @@ impl Feature {
|
||||
pub struct LegacyFeatureUsage {
|
||||
pub alias: String,
|
||||
pub feature: Feature,
|
||||
pub summary: String,
|
||||
pub details: Option<String>,
|
||||
}
|
||||
|
||||
/// Holds the effective set of enabled features.
|
||||
@@ -192,9 +210,12 @@ impl Features {
|
||||
}
|
||||
|
||||
pub fn record_legacy_usage_force(&mut self, alias: &str, feature: Feature) {
|
||||
let (summary, details) = legacy_usage_notice(alias, feature);
|
||||
self.legacy_usages.insert(LegacyFeatureUsage {
|
||||
alias: alias.to_string(),
|
||||
feature,
|
||||
summary,
|
||||
details,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -205,10 +226,8 @@ impl Features {
|
||||
self.record_legacy_usage_force(alias, feature);
|
||||
}
|
||||
|
||||
pub fn legacy_feature_usages(&self) -> impl Iterator<Item = (&str, Feature)> + '_ {
|
||||
self.legacy_usages
|
||||
.iter()
|
||||
.map(|usage| (usage.alias.as_str(), usage.feature))
|
||||
pub fn legacy_feature_usages(&self) -> impl Iterator<Item = &LegacyFeatureUsage> + '_ {
|
||||
self.legacy_usages.iter()
|
||||
}
|
||||
|
||||
pub fn emit_metrics(&self, otel: &OtelManager) {
|
||||
@@ -229,6 +248,21 @@ impl Features {
|
||||
/// Apply a table of key -> bool toggles (e.g. from TOML).
|
||||
pub fn apply_map(&mut self, m: &BTreeMap<String, bool>) {
|
||||
for (k, v) in m {
|
||||
match k.as_str() {
|
||||
"web_search_request" => {
|
||||
self.record_legacy_usage_force(
|
||||
"features.web_search_request",
|
||||
Feature::WebSearchRequest,
|
||||
);
|
||||
}
|
||||
"web_search_cached" => {
|
||||
self.record_legacy_usage_force(
|
||||
"features.web_search_cached",
|
||||
Feature::WebSearchCached,
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
match feature_for_key(k) {
|
||||
Some(feat) => {
|
||||
if k != feat.key() {
|
||||
@@ -289,6 +323,42 @@ impl Features {
|
||||
}
|
||||
}
|
||||
|
||||
fn legacy_usage_notice(alias: &str, feature: Feature) -> (String, Option<String>) {
|
||||
let canonical = feature.key();
|
||||
match feature {
|
||||
Feature::WebSearchRequest | Feature::WebSearchCached => {
|
||||
let label = match alias {
|
||||
"web_search" => "[features].web_search",
|
||||
"tools.web_search" => "[tools].web_search",
|
||||
"features.web_search_request" | "web_search_request" => {
|
||||
"[features].web_search_request"
|
||||
}
|
||||
"features.web_search_cached" | "web_search_cached" => {
|
||||
"[features].web_search_cached"
|
||||
}
|
||||
_ => alias,
|
||||
};
|
||||
let summary = format!("`{label}` is deprecated. Use `web_search` instead.");
|
||||
(summary, Some(web_search_details().to_string()))
|
||||
}
|
||||
_ => {
|
||||
let summary = format!("`{alias}` is deprecated. Use `[features].{canonical}` instead.");
|
||||
let details = if alias == canonical {
|
||||
None
|
||||
} else {
|
||||
Some(format!(
|
||||
"Enable it with `--enable {canonical}` or `[features].{canonical}` in config.toml. See https://github.com/openai/codex/blob/main/docs/config.md#feature-flags for details."
|
||||
))
|
||||
};
|
||||
(summary, details)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn web_search_details() -> &'static str {
|
||||
"Set `web_search` to `\"live\"`, `\"cached\"`, or `\"disabled\"` in config.toml."
|
||||
}
|
||||
|
||||
/// Keys accepted in `[features]` tables.
|
||||
fn feature_for_key(key: &str) -> Option<Feature> {
|
||||
for spec in FEATURES {
|
||||
@@ -337,13 +407,13 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
FeatureSpec {
|
||||
id: Feature::WebSearchRequest,
|
||||
key: "web_search_request",
|
||||
stage: Stage::Stable,
|
||||
stage: Stage::Deprecated,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::WebSearchCached,
|
||||
key: "web_search_cached",
|
||||
stage: Stage::UnderDevelopment,
|
||||
stage: Stage::Deprecated,
|
||||
default_enabled: false,
|
||||
},
|
||||
// Experimental program. Rendered in the `/experimental` menu for users.
|
||||
@@ -367,6 +437,12 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
},
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::Sqlite,
|
||||
key: "sqlite",
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::ChildAgentsMd,
|
||||
key: "child_agents_md",
|
||||
@@ -385,6 +461,12 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: true,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::RequestRule,
|
||||
key: "request_rule",
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::WindowsSandbox,
|
||||
key: "experimental_windows_sandbox",
|
||||
@@ -428,8 +510,8 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
FeatureSpec {
|
||||
id: Feature::EnableRequestCompression,
|
||||
key: "enable_request_compression",
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
stage: Stage::Stable,
|
||||
default_enabled: true,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::Collab,
|
||||
@@ -438,8 +520,24 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::Connectors,
|
||||
key: "connectors",
|
||||
id: Feature::Apps,
|
||||
key: "apps",
|
||||
stage: Stage::Experimental {
|
||||
name: "Apps",
|
||||
menu_description: "Use a connected ChatGPT App using \"$\". Install Apps via /apps command. Restart Codex after enabling.",
|
||||
announcement: "NEW: Use ChatGPT Apps (Connectors) in Codex via $ mentions. Enable in /experimental and restart Codex!",
|
||||
},
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::SkillMcpDependencyInstall,
|
||||
key: "skill_mcp_dependency_install",
|
||||
stage: Stage::Stable,
|
||||
default_enabled: true,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::SkillEnvVarDependencyPrompt,
|
||||
key: "skill_env_var_dependency_prompt",
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
@@ -459,6 +557,12 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::Personality,
|
||||
key: "personality",
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::ResponsesWebsockets,
|
||||
key: "responses_websockets",
|
||||
@@ -466,3 +570,54 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
default_enabled: false,
|
||||
},
|
||||
];
|
||||
|
||||
/// Push a warning event if any under-development features are enabled.
|
||||
pub fn maybe_push_unstable_features_warning(
|
||||
config: &Config,
|
||||
post_session_configured_events: &mut Vec<Event>,
|
||||
) {
|
||||
if config.suppress_unstable_features_warning {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut under_development_feature_keys = Vec::new();
|
||||
if let Some(table) = config
|
||||
.config_layer_stack
|
||||
.effective_config()
|
||||
.get("features")
|
||||
.and_then(TomlValue::as_table)
|
||||
{
|
||||
for (key, value) in table {
|
||||
if value.as_bool() != Some(true) {
|
||||
continue;
|
||||
}
|
||||
let Some(spec) = FEATURES.iter().find(|spec| spec.key == key.as_str()) else {
|
||||
continue;
|
||||
};
|
||||
if !config.features.enabled(spec.id) {
|
||||
continue;
|
||||
}
|
||||
if matches!(spec.stage, Stage::UnderDevelopment) {
|
||||
under_development_feature_keys.push(spec.key.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if under_development_feature_keys.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let under_development_feature_keys = under_development_feature_keys.join(", ");
|
||||
let config_path = config
|
||||
.codex_home
|
||||
.join(CONFIG_TOML_FILE)
|
||||
.display()
|
||||
.to_string();
|
||||
let message = format!(
|
||||
"Under-development features enabled: {under_development_feature_keys}. Under-development features are incomplete and may behave unpredictably. To suppress this warning, set `suppress_unstable_features_warning = true` in {config_path}."
|
||||
);
|
||||
post_session_configured_events.push(Event {
|
||||
id: "".to_owned(),
|
||||
msg: EventMsg::Warning(WarningEvent { message }),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -9,6 +9,10 @@ struct Alias {
|
||||
}
|
||||
|
||||
const ALIASES: &[Alias] = &[
|
||||
Alias {
|
||||
legacy_key: "connectors",
|
||||
feature: Feature::Apps,
|
||||
},
|
||||
Alias {
|
||||
legacy_key: "enable_experimental_windows_sandbox",
|
||||
feature: Feature::WindowsSandbox,
|
||||
|
||||
@@ -38,10 +38,12 @@ pub mod landlock;
|
||||
pub mod mcp;
|
||||
mod mcp_connection_manager;
|
||||
pub mod models_manager;
|
||||
mod transport_manager;
|
||||
pub use mcp_connection_manager::MCP_SANDBOX_STATE_CAPABILITY;
|
||||
pub use mcp_connection_manager::MCP_SANDBOX_STATE_METHOD;
|
||||
pub use mcp_connection_manager::SandboxState;
|
||||
mod mcp_tool_call;
|
||||
mod mentions;
|
||||
mod message_history;
|
||||
mod model_provider_info;
|
||||
pub mod parse_command;
|
||||
@@ -69,6 +71,7 @@ mod event_mapping;
|
||||
pub mod review_format;
|
||||
pub mod review_prompts;
|
||||
mod thread_manager;
|
||||
pub mod web_search;
|
||||
pub use codex_protocol::protocol::InitialHistory;
|
||||
pub use thread_manager::NewThread;
|
||||
pub use thread_manager::ThreadManager;
|
||||
@@ -90,6 +93,7 @@ pub mod shell;
|
||||
pub mod shell_snapshot;
|
||||
pub mod skills;
|
||||
pub mod spawn;
|
||||
pub mod state_db;
|
||||
pub mod terminal;
|
||||
mod tools;
|
||||
pub mod turn_diff_tracker;
|
||||
@@ -110,6 +114,7 @@ pub use rollout::list::parse_cursor;
|
||||
pub use rollout::list::read_head_for_summary;
|
||||
pub use rollout::list::read_session_meta_line;
|
||||
pub use rollout::rollout_date_parts;
|
||||
pub use transport_manager::TransportManager;
|
||||
mod function_tool;
|
||||
mod state;
|
||||
mod tasks;
|
||||
@@ -125,9 +130,6 @@ pub use exec_policy::ExecPolicyError;
|
||||
pub use exec_policy::check_execpolicy_for_warnings;
|
||||
pub use exec_policy::load_exec_policy;
|
||||
pub use safety::get_platform_sandbox;
|
||||
pub use safety::is_windows_elevated_sandbox_enabled;
|
||||
pub use safety::set_windows_elevated_sandbox_enabled;
|
||||
pub use safety::set_windows_sandbox_enabled;
|
||||
pub use tools::spec::parse_tool_input_schema;
|
||||
// Re-export the protocol types from the standalone `codex-protocol` crate so existing
|
||||
// `codex_core::protocol::...` references continue to work across the workspace.
|
||||
|
||||
@@ -4,12 +4,53 @@ use anyhow::Result;
|
||||
use codex_protocol::protocol::McpAuthStatus;
|
||||
use codex_rmcp_client::OAuthCredentialsStoreMode;
|
||||
use codex_rmcp_client::determine_streamable_http_auth_status;
|
||||
use codex_rmcp_client::supports_oauth_login;
|
||||
use futures::future::join_all;
|
||||
use tracing::warn;
|
||||
|
||||
use crate::config::types::McpServerConfig;
|
||||
use crate::config::types::McpServerTransportConfig;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct McpOAuthLoginConfig {
|
||||
pub url: String,
|
||||
pub http_headers: Option<HashMap<String, String>>,
|
||||
pub env_http_headers: Option<HashMap<String, String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum McpOAuthLoginSupport {
|
||||
Supported(McpOAuthLoginConfig),
|
||||
Unsupported,
|
||||
Unknown(anyhow::Error),
|
||||
}
|
||||
|
||||
pub async fn oauth_login_support(transport: &McpServerTransportConfig) -> McpOAuthLoginSupport {
|
||||
let McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
http_headers,
|
||||
env_http_headers,
|
||||
} = transport
|
||||
else {
|
||||
return McpOAuthLoginSupport::Unsupported;
|
||||
};
|
||||
|
||||
if bearer_token_env_var.is_some() {
|
||||
return McpOAuthLoginSupport::Unsupported;
|
||||
}
|
||||
|
||||
match supports_oauth_login(url).await {
|
||||
Ok(true) => McpOAuthLoginSupport::Supported(McpOAuthLoginConfig {
|
||||
url: url.clone(),
|
||||
http_headers: http_headers.clone(),
|
||||
env_http_headers: env_http_headers.clone(),
|
||||
}),
|
||||
Ok(false) => McpOAuthLoginSupport::Unsupported,
|
||||
Err(err) => McpOAuthLoginSupport::Unknown(err),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct McpAuthStatusEntry {
|
||||
pub config: McpServerConfig,
|
||||
|
||||
@@ -1,7 +1,12 @@
|
||||
pub mod auth;
|
||||
mod skill_dependencies;
|
||||
|
||||
pub(crate) use skill_dependencies::maybe_prompt_and_install_mcp_dependencies;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
use std::path::PathBuf;
|
||||
use std::time::Duration;
|
||||
|
||||
use async_channel::unbounded;
|
||||
use codex_protocol::protocol::McpListToolsResponseEvent;
|
||||
@@ -21,7 +26,7 @@ use crate::mcp_connection_manager::SandboxState;
|
||||
|
||||
const MCP_TOOL_NAME_PREFIX: &str = "mcp";
|
||||
const MCP_TOOL_NAME_DELIMITER: &str = "__";
|
||||
pub(crate) const CODEX_APPS_MCP_SERVER_NAME: &str = "codex_apps_mcp";
|
||||
pub(crate) const CODEX_APPS_MCP_SERVER_NAME: &str = "codex_apps";
|
||||
const CODEX_CONNECTORS_TOKEN_ENV_VAR: &str = "CODEX_CONNECTORS_TOKEN";
|
||||
|
||||
fn codex_apps_mcp_bearer_token_env_var() -> Option<String> {
|
||||
@@ -93,7 +98,7 @@ fn codex_apps_mcp_server_config(config: &Config, auth: Option<&CodexAuth>) -> Mc
|
||||
},
|
||||
enabled: true,
|
||||
disabled_reason: None,
|
||||
startup_timeout_sec: None,
|
||||
startup_timeout_sec: Some(Duration::from_secs(30)),
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
@@ -124,7 +129,7 @@ pub(crate) fn effective_mcp_servers(
|
||||
) -> HashMap<String, McpServerConfig> {
|
||||
with_codex_apps_mcp(
|
||||
config.mcp_servers.get().clone(),
|
||||
config.features.enabled(Feature::Connectors),
|
||||
config.features.enabled(Feature::Apps),
|
||||
auth,
|
||||
config,
|
||||
)
|
||||
|
||||
519
codex-rs/core/src/mcp/skill_dependencies.rs
Normal file
519
codex-rs/core/src/mcp/skill_dependencies.rs
Normal file
@@ -0,0 +1,519 @@
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use codex_protocol::request_user_input::RequestUserInputArgs;
|
||||
use codex_protocol::request_user_input::RequestUserInputQuestion;
|
||||
use codex_protocol::request_user_input::RequestUserInputQuestionOption;
|
||||
use codex_protocol::request_user_input::RequestUserInputResponse;
|
||||
use codex_rmcp_client::perform_oauth_login;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
use tracing::warn;
|
||||
|
||||
use super::auth::McpOAuthLoginSupport;
|
||||
use super::auth::oauth_login_support;
|
||||
use super::effective_mcp_servers;
|
||||
use crate::codex::Session;
|
||||
use crate::codex::TurnContext;
|
||||
use crate::config::Config;
|
||||
use crate::config::edit::ConfigEditsBuilder;
|
||||
use crate::config::load_global_mcp_servers;
|
||||
use crate::config::types::McpServerConfig;
|
||||
use crate::config::types::McpServerTransportConfig;
|
||||
use crate::default_client::is_first_party_originator;
|
||||
use crate::default_client::originator;
|
||||
use crate::features::Feature;
|
||||
use crate::skills::SkillMetadata;
|
||||
use crate::skills::model::SkillToolDependency;
|
||||
|
||||
const SKILL_MCP_DEPENDENCY_PROMPT_ID: &str = "skill_mcp_dependency_install";
|
||||
const MCP_DEPENDENCY_OPTION_INSTALL: &str = "Install";
|
||||
const MCP_DEPENDENCY_OPTION_SKIP: &str = "Continue anyway";
|
||||
|
||||
fn is_full_access_mode(turn_context: &TurnContext) -> bool {
|
||||
matches!(turn_context.approval_policy, AskForApproval::Never)
|
||||
&& matches!(
|
||||
turn_context.sandbox_policy,
|
||||
SandboxPolicy::DangerFullAccess | SandboxPolicy::ExternalSandbox { .. }
|
||||
)
|
||||
}
|
||||
|
||||
fn format_missing_mcp_dependencies(missing: &HashMap<String, McpServerConfig>) -> String {
|
||||
let mut names = missing.keys().cloned().collect::<Vec<_>>();
|
||||
names.sort();
|
||||
names.join(", ")
|
||||
}
|
||||
|
||||
async fn filter_prompted_mcp_dependencies(
|
||||
sess: &Session,
|
||||
missing: &HashMap<String, McpServerConfig>,
|
||||
) -> HashMap<String, McpServerConfig> {
|
||||
let prompted = sess.mcp_dependency_prompted().await;
|
||||
if prompted.is_empty() {
|
||||
return missing.clone();
|
||||
}
|
||||
|
||||
missing
|
||||
.iter()
|
||||
.filter(|(name, config)| !prompted.contains(&canonical_mcp_server_key(name, config)))
|
||||
.map(|(name, config)| (name.clone(), config.clone()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
async fn should_install_mcp_dependencies(
|
||||
sess: &Session,
|
||||
turn_context: &TurnContext,
|
||||
missing: &HashMap<String, McpServerConfig>,
|
||||
cancellation_token: &CancellationToken,
|
||||
) -> bool {
|
||||
if is_full_access_mode(turn_context) {
|
||||
return true;
|
||||
}
|
||||
|
||||
let server_list = format_missing_mcp_dependencies(missing);
|
||||
let question = RequestUserInputQuestion {
|
||||
id: SKILL_MCP_DEPENDENCY_PROMPT_ID.to_string(),
|
||||
header: "Install MCP servers?".to_string(),
|
||||
question: format!(
|
||||
"The following MCP servers are required by the selected skills but are not installed yet: {server_list}. Install them now?"
|
||||
),
|
||||
is_other: false,
|
||||
is_secret: false,
|
||||
options: Some(vec![
|
||||
RequestUserInputQuestionOption {
|
||||
label: MCP_DEPENDENCY_OPTION_INSTALL.to_string(),
|
||||
description:
|
||||
"Install and enable the missing MCP servers in your global config."
|
||||
.to_string(),
|
||||
},
|
||||
RequestUserInputQuestionOption {
|
||||
label: MCP_DEPENDENCY_OPTION_SKIP.to_string(),
|
||||
description: "Skip installation for now and do not show again for these MCP servers in this session."
|
||||
.to_string(),
|
||||
},
|
||||
]),
|
||||
};
|
||||
let args = RequestUserInputArgs {
|
||||
questions: vec![question],
|
||||
};
|
||||
let sub_id = &turn_context.sub_id;
|
||||
let call_id = format!("mcp-deps-{sub_id}");
|
||||
let response_fut = sess.request_user_input(turn_context, call_id, args);
|
||||
let response = tokio::select! {
|
||||
biased;
|
||||
_ = cancellation_token.cancelled() => {
|
||||
let empty = RequestUserInputResponse {
|
||||
answers: HashMap::new(),
|
||||
};
|
||||
sess.notify_user_input_response(sub_id, empty.clone()).await;
|
||||
empty
|
||||
}
|
||||
response = response_fut => response.unwrap_or_else(|| RequestUserInputResponse {
|
||||
answers: HashMap::new(),
|
||||
}),
|
||||
};
|
||||
|
||||
let install = response
|
||||
.answers
|
||||
.get(SKILL_MCP_DEPENDENCY_PROMPT_ID)
|
||||
.is_some_and(|answer| {
|
||||
answer
|
||||
.answers
|
||||
.iter()
|
||||
.any(|entry| entry == MCP_DEPENDENCY_OPTION_INSTALL)
|
||||
});
|
||||
|
||||
let prompted_keys = missing
|
||||
.iter()
|
||||
.map(|(name, config)| canonical_mcp_server_key(name, config));
|
||||
sess.record_mcp_dependency_prompted(prompted_keys).await;
|
||||
|
||||
install
|
||||
}
|
||||
|
||||
pub(crate) async fn maybe_prompt_and_install_mcp_dependencies(
|
||||
sess: &Session,
|
||||
turn_context: &TurnContext,
|
||||
cancellation_token: &CancellationToken,
|
||||
mentioned_skills: &[SkillMetadata],
|
||||
) {
|
||||
let originator_value = originator().value;
|
||||
if !is_first_party_originator(originator_value.as_str()) {
|
||||
// Only support first-party clients for now.
|
||||
return;
|
||||
}
|
||||
|
||||
let config = turn_context.client.config();
|
||||
if mentioned_skills.is_empty() || !config.features.enabled(Feature::SkillMcpDependencyInstall) {
|
||||
return;
|
||||
}
|
||||
|
||||
let installed = config.mcp_servers.get().clone();
|
||||
let missing = collect_missing_mcp_dependencies(mentioned_skills, &installed);
|
||||
if missing.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let unprompted_missing = filter_prompted_mcp_dependencies(sess, &missing).await;
|
||||
if unprompted_missing.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
if should_install_mcp_dependencies(sess, turn_context, &unprompted_missing, cancellation_token)
|
||||
.await
|
||||
{
|
||||
maybe_install_mcp_dependencies(sess, turn_context, config.as_ref(), mentioned_skills).await;
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn maybe_install_mcp_dependencies(
|
||||
sess: &Session,
|
||||
turn_context: &TurnContext,
|
||||
config: &Config,
|
||||
mentioned_skills: &[SkillMetadata],
|
||||
) {
|
||||
if mentioned_skills.is_empty() || !config.features.enabled(Feature::SkillMcpDependencyInstall) {
|
||||
return;
|
||||
}
|
||||
|
||||
let codex_home = config.codex_home.clone();
|
||||
let installed = config.mcp_servers.get().clone();
|
||||
let missing = collect_missing_mcp_dependencies(mentioned_skills, &installed);
|
||||
if missing.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut servers = match load_global_mcp_servers(&codex_home).await {
|
||||
Ok(servers) => servers,
|
||||
Err(err) => {
|
||||
warn!("failed to load MCP servers while installing skill dependencies: {err}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let mut updated = false;
|
||||
let mut added = Vec::new();
|
||||
for (name, config) in missing {
|
||||
if servers.contains_key(&name) {
|
||||
continue;
|
||||
}
|
||||
servers.insert(name.clone(), config.clone());
|
||||
added.push((name, config));
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if !updated {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Err(err) = ConfigEditsBuilder::new(&codex_home)
|
||||
.replace_mcp_servers(&servers)
|
||||
.apply()
|
||||
.await
|
||||
{
|
||||
warn!("failed to persist MCP dependencies for mentioned skills: {err}");
|
||||
return;
|
||||
}
|
||||
|
||||
for (name, server_config) in added {
|
||||
let oauth_config = match oauth_login_support(&server_config.transport).await {
|
||||
McpOAuthLoginSupport::Supported(config) => config,
|
||||
McpOAuthLoginSupport::Unsupported => continue,
|
||||
McpOAuthLoginSupport::Unknown(err) => {
|
||||
warn!("MCP server may or may not require login for dependency {name}: {err}");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
sess.notify_background_event(
|
||||
turn_context,
|
||||
format!(
|
||||
"Authenticating MCP {name}... Follow instructions in your browser if prompted."
|
||||
),
|
||||
)
|
||||
.await;
|
||||
|
||||
if let Err(err) = perform_oauth_login(
|
||||
&name,
|
||||
&oauth_config.url,
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
oauth_config.http_headers,
|
||||
oauth_config.env_http_headers,
|
||||
&[],
|
||||
config.mcp_oauth_callback_port,
|
||||
)
|
||||
.await
|
||||
{
|
||||
warn!("failed to login to MCP dependency {name}: {err}");
|
||||
}
|
||||
}
|
||||
|
||||
// Refresh from the effective merged MCP map (global + repo + managed) and
|
||||
// overlay the updated global servers so we don't drop repo-scoped servers.
|
||||
let auth = sess.services.auth_manager.auth().await;
|
||||
let mut refresh_servers = effective_mcp_servers(config, auth.as_ref());
|
||||
for (name, server_config) in &servers {
|
||||
refresh_servers
|
||||
.entry(name.clone())
|
||||
.or_insert_with(|| server_config.clone());
|
||||
}
|
||||
sess.refresh_mcp_servers_now(
|
||||
turn_context,
|
||||
refresh_servers,
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
fn canonical_mcp_key(transport: &str, identifier: &str, fallback: &str) -> String {
|
||||
let identifier = identifier.trim();
|
||||
if identifier.is_empty() {
|
||||
fallback.to_string()
|
||||
} else {
|
||||
format!("mcp__{transport}__{identifier}")
|
||||
}
|
||||
}
|
||||
|
||||
fn canonical_mcp_server_key(name: &str, config: &McpServerConfig) -> String {
|
||||
match &config.transport {
|
||||
McpServerTransportConfig::Stdio { command, .. } => {
|
||||
canonical_mcp_key("stdio", command, name)
|
||||
}
|
||||
McpServerTransportConfig::StreamableHttp { url, .. } => {
|
||||
canonical_mcp_key("streamable_http", url, name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn canonical_mcp_dependency_key(dependency: &SkillToolDependency) -> Result<String, String> {
|
||||
let transport = dependency.transport.as_deref().unwrap_or("streamable_http");
|
||||
if transport.eq_ignore_ascii_case("streamable_http") {
|
||||
let url = dependency
|
||||
.url
|
||||
.as_ref()
|
||||
.ok_or_else(|| "missing url for streamable_http dependency".to_string())?;
|
||||
return Ok(canonical_mcp_key("streamable_http", url, &dependency.value));
|
||||
}
|
||||
if transport.eq_ignore_ascii_case("stdio") {
|
||||
let command = dependency
|
||||
.command
|
||||
.as_ref()
|
||||
.ok_or_else(|| "missing command for stdio dependency".to_string())?;
|
||||
return Ok(canonical_mcp_key("stdio", command, &dependency.value));
|
||||
}
|
||||
Err(format!("unsupported transport {transport}"))
|
||||
}
|
||||
|
||||
pub(crate) fn collect_missing_mcp_dependencies(
|
||||
mentioned_skills: &[SkillMetadata],
|
||||
installed: &HashMap<String, McpServerConfig>,
|
||||
) -> HashMap<String, McpServerConfig> {
|
||||
let mut missing = HashMap::new();
|
||||
let installed_keys: HashSet<String> = installed
|
||||
.iter()
|
||||
.map(|(name, config)| canonical_mcp_server_key(name, config))
|
||||
.collect();
|
||||
let mut seen_canonical_keys = HashSet::new();
|
||||
|
||||
for skill in mentioned_skills {
|
||||
let Some(dependencies) = skill.dependencies.as_ref() else {
|
||||
continue;
|
||||
};
|
||||
|
||||
for tool in &dependencies.tools {
|
||||
if !tool.r#type.eq_ignore_ascii_case("mcp") {
|
||||
continue;
|
||||
}
|
||||
let dependency_key = match canonical_mcp_dependency_key(tool) {
|
||||
Ok(key) => key,
|
||||
Err(err) => {
|
||||
let dependency = tool.value.as_str();
|
||||
let skill_name = skill.name.as_str();
|
||||
warn!(
|
||||
"unable to auto-install MCP dependency {dependency} for skill {skill_name}: {err}",
|
||||
);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
if installed_keys.contains(&dependency_key)
|
||||
|| seen_canonical_keys.contains(&dependency_key)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
let config = match mcp_dependency_to_server_config(tool) {
|
||||
Ok(config) => config,
|
||||
Err(err) => {
|
||||
let dependency = dependency_key.as_str();
|
||||
let skill_name = skill.name.as_str();
|
||||
warn!(
|
||||
"unable to auto-install MCP dependency {dependency} for skill {skill_name}: {err}",
|
||||
);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
missing.insert(tool.value.clone(), config);
|
||||
seen_canonical_keys.insert(dependency_key);
|
||||
}
|
||||
}
|
||||
|
||||
missing
|
||||
}
|
||||
|
||||
fn mcp_dependency_to_server_config(
|
||||
dependency: &SkillToolDependency,
|
||||
) -> Result<McpServerConfig, String> {
|
||||
let transport = dependency.transport.as_deref().unwrap_or("streamable_http");
|
||||
if transport.eq_ignore_ascii_case("streamable_http") {
|
||||
let url = dependency
|
||||
.url
|
||||
.as_ref()
|
||||
.ok_or_else(|| "missing url for streamable_http dependency".to_string())?;
|
||||
return Ok(McpServerConfig {
|
||||
transport: McpServerTransportConfig::StreamableHttp {
|
||||
url: url.clone(),
|
||||
bearer_token_env_var: None,
|
||||
http_headers: None,
|
||||
env_http_headers: None,
|
||||
},
|
||||
enabled: true,
|
||||
disabled_reason: None,
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
});
|
||||
}
|
||||
|
||||
if transport.eq_ignore_ascii_case("stdio") {
|
||||
let command = dependency
|
||||
.command
|
||||
.as_ref()
|
||||
.ok_or_else(|| "missing command for stdio dependency".to_string())?;
|
||||
return Ok(McpServerConfig {
|
||||
transport: McpServerTransportConfig::Stdio {
|
||||
command: command.clone(),
|
||||
args: Vec::new(),
|
||||
env: None,
|
||||
env_vars: Vec::new(),
|
||||
cwd: None,
|
||||
},
|
||||
enabled: true,
|
||||
disabled_reason: None,
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
});
|
||||
}
|
||||
|
||||
Err(format!("unsupported transport {transport}"))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::skills::model::SkillDependencies;
|
||||
use codex_protocol::protocol::SkillScope;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::PathBuf;
|
||||
|
||||
fn skill_with_tools(tools: Vec<SkillToolDependency>) -> SkillMetadata {
|
||||
SkillMetadata {
|
||||
name: "skill".to_string(),
|
||||
description: "skill".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: Some(SkillDependencies { tools }),
|
||||
path: PathBuf::from("skill"),
|
||||
scope: SkillScope::User,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_missing_respects_canonical_installed_key() {
|
||||
let url = "https://example.com/mcp".to_string();
|
||||
let skills = vec![skill_with_tools(vec![SkillToolDependency {
|
||||
r#type: "mcp".to_string(),
|
||||
value: "github".to_string(),
|
||||
description: None,
|
||||
transport: Some("streamable_http".to_string()),
|
||||
command: None,
|
||||
url: Some(url.clone()),
|
||||
}])];
|
||||
let installed = HashMap::from([(
|
||||
"alias".to_string(),
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var: None,
|
||||
http_headers: None,
|
||||
env_http_headers: None,
|
||||
},
|
||||
enabled: true,
|
||||
disabled_reason: None,
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
)]);
|
||||
|
||||
assert_eq!(
|
||||
collect_missing_mcp_dependencies(&skills, &installed),
|
||||
HashMap::new()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_missing_dedupes_by_canonical_key_but_preserves_original_name() {
|
||||
let url = "https://example.com/one".to_string();
|
||||
let skills = vec![skill_with_tools(vec![
|
||||
SkillToolDependency {
|
||||
r#type: "mcp".to_string(),
|
||||
value: "alias-one".to_string(),
|
||||
description: None,
|
||||
transport: Some("streamable_http".to_string()),
|
||||
command: None,
|
||||
url: Some(url.clone()),
|
||||
},
|
||||
SkillToolDependency {
|
||||
r#type: "mcp".to_string(),
|
||||
value: "alias-two".to_string(),
|
||||
description: None,
|
||||
transport: Some("streamable_http".to_string()),
|
||||
command: None,
|
||||
url: Some(url.clone()),
|
||||
},
|
||||
])];
|
||||
|
||||
let expected = HashMap::from([(
|
||||
"alias-one".to_string(),
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var: None,
|
||||
http_headers: None,
|
||||
env_http_headers: None,
|
||||
},
|
||||
enabled: true,
|
||||
disabled_reason: None,
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
)]);
|
||||
|
||||
assert_eq!(
|
||||
collect_missing_mcp_dependencies(&skills, &HashMap::new()),
|
||||
expected
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -14,6 +14,7 @@ use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::mcp::CODEX_APPS_MCP_SERVER_NAME;
|
||||
use crate::mcp::auth::McpAuthStatusEntry;
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
@@ -436,13 +437,33 @@ impl McpConnectionManager {
|
||||
.await
|
||||
}
|
||||
|
||||
pub(crate) async fn wait_for_server_ready(&self, server_name: &str, timeout: Duration) -> bool {
|
||||
let Some(async_managed_client) = self.clients.get(server_name) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
match tokio::time::timeout(timeout, async_managed_client.client()).await {
|
||||
Ok(Ok(_)) => true,
|
||||
Ok(Err(_)) | Err(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a single map that contains all tools. Each key is the
|
||||
/// fully-qualified name for the tool.
|
||||
#[instrument(level = "trace", skip_all)]
|
||||
pub async fn list_all_tools(&self) -> HashMap<String, ToolInfo> {
|
||||
let mut tools = HashMap::new();
|
||||
for managed_client in self.clients.values() {
|
||||
if let Ok(client) = managed_client.client().await {
|
||||
for (server_name, managed_client) in &self.clients {
|
||||
let client = if server_name == CODEX_APPS_MCP_SERVER_NAME {
|
||||
// Avoid blocking on codex_apps_mcp startup; use tools only when ready.
|
||||
match managed_client.client.clone().now_or_never() {
|
||||
Some(Ok(client)) => Some(client),
|
||||
_ => None,
|
||||
}
|
||||
} else {
|
||||
managed_client.client().await.ok()
|
||||
};
|
||||
if let Some(client) = client {
|
||||
tools.extend(qualify_tools(filter_tools(
|
||||
client.tools,
|
||||
client.tool_filter,
|
||||
|
||||
64
codex-rs/core/src/mentions.rs
Normal file
64
codex-rs/core/src/mentions.rs
Normal file
@@ -0,0 +1,64 @@
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use codex_protocol::user_input::UserInput;
|
||||
|
||||
use crate::connectors;
|
||||
use crate::skills::SkillMetadata;
|
||||
use crate::skills::injection::extract_tool_mentions;
|
||||
|
||||
pub(crate) struct CollectedToolMentions {
|
||||
pub(crate) plain_names: HashSet<String>,
|
||||
pub(crate) paths: HashSet<String>,
|
||||
}
|
||||
|
||||
pub(crate) fn collect_tool_mentions_from_messages(messages: &[String]) -> CollectedToolMentions {
|
||||
let mut plain_names = HashSet::new();
|
||||
let mut paths = HashSet::new();
|
||||
for message in messages {
|
||||
let mentions = extract_tool_mentions(message);
|
||||
plain_names.extend(mentions.plain_names().map(str::to_string));
|
||||
paths.extend(mentions.paths().map(str::to_string));
|
||||
}
|
||||
CollectedToolMentions { plain_names, paths }
|
||||
}
|
||||
|
||||
pub(crate) fn collect_explicit_app_paths(input: &[UserInput]) -> Vec<String> {
|
||||
input
|
||||
.iter()
|
||||
.filter_map(|item| match item {
|
||||
UserInput::Mention { path, .. } => Some(path.clone()),
|
||||
_ => None,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub(crate) fn build_skill_name_counts(
|
||||
skills: &[SkillMetadata],
|
||||
disabled_paths: &HashSet<PathBuf>,
|
||||
) -> (HashMap<String, usize>, HashMap<String, usize>) {
|
||||
let mut exact_counts: HashMap<String, usize> = HashMap::new();
|
||||
let mut lower_counts: HashMap<String, usize> = HashMap::new();
|
||||
for skill in skills {
|
||||
if disabled_paths.contains(&skill.path) {
|
||||
continue;
|
||||
}
|
||||
*exact_counts.entry(skill.name.clone()).or_insert(0) += 1;
|
||||
*lower_counts
|
||||
.entry(skill.name.to_ascii_lowercase())
|
||||
.or_insert(0) += 1;
|
||||
}
|
||||
(exact_counts, lower_counts)
|
||||
}
|
||||
|
||||
pub(crate) fn build_connector_slug_counts(
|
||||
connectors: &[connectors::AppInfo],
|
||||
) -> HashMap<String, usize> {
|
||||
let mut counts: HashMap<String, usize> = HashMap::new();
|
||||
for connector in connectors {
|
||||
let slug = connectors::connector_mention_slug(connector);
|
||||
*counts.entry(slug).or_insert(0) += 1;
|
||||
}
|
||||
counts
|
||||
}
|
||||
@@ -43,10 +43,6 @@ pub enum WireApi {
|
||||
/// The Responses API exposed by OpenAI at `/v1/responses`.
|
||||
Responses,
|
||||
|
||||
/// Experimental: Responses API over WebSocket transport.
|
||||
#[serde(rename = "responses_websocket")]
|
||||
ResponsesWebsocket,
|
||||
|
||||
/// Regular Chat Completions compatible with `/v1/chat/completions`.
|
||||
#[default]
|
||||
Chat,
|
||||
@@ -105,6 +101,10 @@ pub struct ModelProviderInfo {
|
||||
/// and API key (if needed) comes from the "env_key" environment variable.
|
||||
#[serde(default)]
|
||||
pub requires_openai_auth: bool,
|
||||
|
||||
/// Whether this provider supports the Responses API WebSocket transport.
|
||||
#[serde(default)]
|
||||
pub supports_websockets: bool,
|
||||
}
|
||||
|
||||
impl ModelProviderInfo {
|
||||
@@ -137,7 +137,10 @@ impl ModelProviderInfo {
|
||||
&self,
|
||||
auth_mode: Option<AuthMode>,
|
||||
) -> crate::error::Result<ApiProvider> {
|
||||
let default_base_url = if matches!(auth_mode, Some(AuthMode::ChatGPT)) {
|
||||
let default_base_url = if matches!(
|
||||
auth_mode,
|
||||
Some(AuthMode::ChatGPT | AuthMode::ChatgptAuthTokens)
|
||||
) {
|
||||
"https://chatgpt.com/backend-api/codex"
|
||||
} else {
|
||||
"https://api.openai.com/v1"
|
||||
@@ -162,7 +165,6 @@ impl ModelProviderInfo {
|
||||
query_params: self.query_params.clone(),
|
||||
wire: match self.wire_api {
|
||||
WireApi::Responses => ApiWireApi::Responses,
|
||||
WireApi::ResponsesWebsocket => ApiWireApi::Responses,
|
||||
WireApi::Chat => ApiWireApi::Chat,
|
||||
},
|
||||
headers,
|
||||
@@ -254,6 +256,7 @@ impl ModelProviderInfo {
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
requires_openai_auth: true,
|
||||
supports_websockets: true,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -332,6 +335,7 @@ pub fn create_oss_provider_with_base_url(base_url: &str, wire_api: WireApi) -> M
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
requires_openai_auth: false,
|
||||
supports_websockets: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -360,6 +364,7 @@ base_url = "http://localhost:11434/v1"
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
requires_openai_auth: false,
|
||||
supports_websockets: false,
|
||||
};
|
||||
|
||||
let provider: ModelProviderInfo = toml::from_str(azure_provider_toml).unwrap();
|
||||
@@ -390,6 +395,7 @@ query_params = { api-version = "2025-04-01-preview" }
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
requires_openai_auth: false,
|
||||
supports_websockets: false,
|
||||
};
|
||||
|
||||
let provider: ModelProviderInfo = toml::from_str(azure_provider_toml).unwrap();
|
||||
@@ -423,6 +429,7 @@ env_http_headers = { "X-Example-Env-Header" = "EXAMPLE_ENV_VAR" }
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
requires_openai_auth: false,
|
||||
supports_websockets: false,
|
||||
};
|
||||
|
||||
let provider: ModelProviderInfo = toml::from_str(azure_provider_toml).unwrap();
|
||||
@@ -454,6 +461,7 @@ env_http_headers = { "X-Example-Env-Header" = "EXAMPLE_ENV_VAR" }
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
requires_openai_auth: false,
|
||||
supports_websockets: false,
|
||||
};
|
||||
let api = provider.to_api_provider(None).expect("api provider");
|
||||
assert!(
|
||||
@@ -476,6 +484,7 @@ env_http_headers = { "X-Example-Env-Header" = "EXAMPLE_ENV_VAR" }
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
requires_openai_auth: false,
|
||||
supports_websockets: false,
|
||||
};
|
||||
let named_api = named_provider.to_api_provider(None).expect("api provider");
|
||||
assert!(named_api.is_azure_responses_endpoint());
|
||||
@@ -500,6 +509,7 @@ env_http_headers = { "X-Example-Env-Header" = "EXAMPLE_ENV_VAR" }
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
requires_openai_auth: false,
|
||||
supports_websockets: false,
|
||||
};
|
||||
let api = provider.to_api_provider(None).expect("api provider");
|
||||
assert!(
|
||||
|
||||
@@ -28,7 +28,7 @@ fn plan_preset() -> CollaborationModeMask {
|
||||
name: "Plan".to_string(),
|
||||
mode: Some(ModeKind::Plan),
|
||||
model: None,
|
||||
reasoning_effort: Some(Some(ReasoningEffort::High)),
|
||||
reasoning_effort: Some(Some(ReasoningEffort::Medium)),
|
||||
developer_instructions: Some(Some(COLLABORATION_MODE_PLAN.to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -204,7 +204,8 @@ impl ModelsManager {
|
||||
let _timer =
|
||||
codex_otel::start_global_timer("codex.remote_models.fetch_update.duration_ms", &[]);
|
||||
let auth = self.auth_manager.auth().await;
|
||||
let api_provider = self.provider.to_api_provider(Some(AuthMode::ChatGPT))?;
|
||||
let auth_mode = self.auth_manager.get_auth_mode();
|
||||
let api_provider = self.provider.to_api_provider(auth_mode)?;
|
||||
let api_auth = auth_provider_from_auth(auth.clone(), &self.provider)?;
|
||||
let transport = ReqwestTransport::new(build_reqwest_client());
|
||||
let client = ModelsClient::new(transport, api_provider, api_auth);
|
||||
@@ -271,7 +272,10 @@ impl ModelsManager {
|
||||
let remote_presets: Vec<ModelPreset> = remote_models.into_iter().map(Into::into).collect();
|
||||
let existing_presets = self.local_models.clone();
|
||||
let mut merged_presets = ModelPreset::merge(remote_presets, existing_presets);
|
||||
let chatgpt_mode = self.auth_manager.get_auth_mode() == Some(AuthMode::ChatGPT);
|
||||
let chatgpt_mode = matches!(
|
||||
self.auth_manager.get_auth_mode(),
|
||||
Some(AuthMode::ChatGPT | AuthMode::ChatgptAuthTokens)
|
||||
);
|
||||
merged_presets = ModelPreset::filter_by_auth(merged_presets, chatgpt_mode);
|
||||
|
||||
for preset in &mut merged_presets {
|
||||
@@ -432,6 +436,7 @@ mod tests {
|
||||
stream_max_retries: Some(0),
|
||||
stream_idle_timeout_ms: Some(5_000),
|
||||
requires_openai_auth: false,
|
||||
supports_websockets: false,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -178,6 +178,7 @@ pub(crate) fn find_model_info_for_slug(slug: &str) -> ModelInfo {
|
||||
Personality::Pragmatic,
|
||||
PERSONALITY_PRAGMATIC.to_string(),
|
||||
)]))),
|
||||
collaboration_modes_messages: None,
|
||||
}),
|
||||
apply_patch_tool_type: Some(ApplyPatchToolType::Freeform),
|
||||
shell_type: ConfigShellToolType::ShellCommand,
|
||||
@@ -213,6 +214,17 @@ pub(crate) fn find_model_info_for_slug(slug: &str) -> ModelInfo {
|
||||
truncation_policy: TruncationPolicyConfig::tokens(10_000),
|
||||
context_window: Some(CONTEXT_WINDOW_272K),
|
||||
supported_reasoning_levels: supported_reasoning_level_low_medium_high_xhigh(),
|
||||
model_instructions_template: Some(ModelInstructionsTemplate {
|
||||
template: GPT_5_2_CODEX_INSTRUCTIONS_TEMPLATE.to_string(),
|
||||
personality_messages: Some(PersonalityMessages(BTreeMap::from([(
|
||||
Personality::Friendly,
|
||||
PERSONALITY_FRIENDLY.to_string(),
|
||||
), (
|
||||
Personality::Pragmatic,
|
||||
PERSONALITY_PRAGMATIC.to_string(),
|
||||
)]))),
|
||||
collaboration_modes_messages: None,
|
||||
}),
|
||||
)
|
||||
} else if slug.starts_with("gpt-5.1-codex-max") {
|
||||
model_info!(
|
||||
@@ -259,9 +271,7 @@ pub(crate) fn find_model_info_for_slug(slug: &str) -> ModelInfo {
|
||||
truncation_policy: TruncationPolicyConfig::tokens(10_000),
|
||||
context_window: Some(CONTEXT_WINDOW_272K),
|
||||
)
|
||||
} else if (slug.starts_with("gpt-5.2") || slug.starts_with("boomslang"))
|
||||
&& !slug.contains("codex")
|
||||
{
|
||||
} else if slug.starts_with("gpt-5.2") || slug.starts_with("boomslang") {
|
||||
model_info!(
|
||||
slug,
|
||||
apply_patch_tool_type: Some(ApplyPatchToolType::Freeform),
|
||||
@@ -276,7 +286,7 @@ pub(crate) fn find_model_info_for_slug(slug: &str) -> ModelInfo {
|
||||
context_window: Some(CONTEXT_WINDOW_272K),
|
||||
supported_reasoning_levels: supported_reasoning_level_low_medium_high_xhigh_non_codex(),
|
||||
)
|
||||
} else if slug.starts_with("gpt-5.1") && !slug.contains("codex") {
|
||||
} else if slug.starts_with("gpt-5.1") {
|
||||
model_info!(
|
||||
slug,
|
||||
apply_patch_tool_type: Some(ApplyPatchToolType::Freeform),
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use async_trait::async_trait;
|
||||
use std::cmp::Reverse;
|
||||
use std::ffi::OsStr;
|
||||
use std::io::{self};
|
||||
@@ -7,8 +8,6 @@ use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use time::OffsetDateTime;
|
||||
use time::PrimitiveDateTime;
|
||||
use time::format_description::FormatItem;
|
||||
@@ -19,7 +18,9 @@ use uuid::Uuid;
|
||||
use super::ARCHIVED_SESSIONS_SUBDIR;
|
||||
use super::SESSIONS_SUBDIR;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::state_db;
|
||||
use codex_file_search as file_search;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
use codex_protocol::protocol::RolloutLine;
|
||||
use codex_protocol::protocol::SessionMetaLine;
|
||||
@@ -794,7 +795,7 @@ async fn collect_rollout_day_files(
|
||||
Ok(day_files)
|
||||
}
|
||||
|
||||
fn parse_timestamp_uuid_from_filename(name: &str) -> Option<(OffsetDateTime, Uuid)> {
|
||||
pub(crate) fn parse_timestamp_uuid_from_filename(name: &str) -> Option<(OffsetDateTime, Uuid)> {
|
||||
// Expected: rollout-YYYY-MM-DDThh-mm-ss-<uuid>.jsonl
|
||||
let core = name.strip_prefix("rollout-")?.strip_suffix(".jsonl")?;
|
||||
|
||||
@@ -1093,11 +1094,39 @@ async fn find_thread_path_by_id_str_in_subdir(
|
||||
)
|
||||
.map_err(|e| io::Error::other(format!("file search failed: {e}")))?;
|
||||
|
||||
Ok(results
|
||||
let found = results
|
||||
.matches
|
||||
.into_iter()
|
||||
.next()
|
||||
.map(|m| root.join(m.path)))
|
||||
.map(|m| root.join(m.path));
|
||||
|
||||
// Checking if DB is at parity.
|
||||
// TODO(jif): sqlite migration phase 1
|
||||
let archived_only = match subdir {
|
||||
SESSIONS_SUBDIR => Some(false),
|
||||
ARCHIVED_SESSIONS_SUBDIR => Some(true),
|
||||
_ => None,
|
||||
};
|
||||
let state_db_ctx = state_db::open_if_present(codex_home, "").await;
|
||||
if let Some(state_db_ctx) = state_db_ctx.as_deref()
|
||||
&& let Ok(thread_id) = ThreadId::from_string(id_str)
|
||||
{
|
||||
let db_path = state_db::find_rollout_path_by_id(
|
||||
Some(state_db_ctx),
|
||||
thread_id,
|
||||
archived_only,
|
||||
"find_path_query",
|
||||
)
|
||||
.await;
|
||||
let canonical_path = found.as_deref();
|
||||
if db_path.as_deref() != canonical_path {
|
||||
tracing::warn!(
|
||||
"state db path mismatch for thread {thread_id:?}: canonical={canonical_path:?} db={db_path:?}"
|
||||
);
|
||||
state_db::record_discrepancy("find_thread_path_by_id_str_in_subdir", "path_mismatch");
|
||||
}
|
||||
}
|
||||
Ok(found)
|
||||
}
|
||||
|
||||
/// Locate a recorded thread rollout file by its UUID string using the existing
|
||||
|
||||
360
codex-rs/core/src/rollout/metadata.rs
Normal file
360
codex-rs/core/src/rollout/metadata.rs
Normal file
@@ -0,0 +1,360 @@
|
||||
use crate::config::Config;
|
||||
use crate::rollout;
|
||||
use crate::rollout::list::parse_timestamp_uuid_from_filename;
|
||||
use crate::rollout::recorder::RolloutRecorder;
|
||||
use chrono::DateTime;
|
||||
use chrono::NaiveDateTime;
|
||||
use chrono::Timelike;
|
||||
use chrono::Utc;
|
||||
use codex_otel::OtelManager;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use codex_protocol::protocol::SessionMetaLine;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_state::BackfillStats;
|
||||
use codex_state::DB_ERROR_METRIC;
|
||||
use codex_state::DB_METRIC_BACKFILL;
|
||||
use codex_state::ExtractionOutcome;
|
||||
use codex_state::ThreadMetadataBuilder;
|
||||
use codex_state::apply_rollout_item;
|
||||
use std::cmp::Reverse;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use tracing::info;
|
||||
use tracing::warn;
|
||||
|
||||
const ROLLOUT_PREFIX: &str = "rollout-";
|
||||
const ROLLOUT_SUFFIX: &str = ".jsonl";
|
||||
|
||||
pub(crate) fn builder_from_session_meta(
|
||||
session_meta: &SessionMetaLine,
|
||||
rollout_path: &Path,
|
||||
) -> Option<ThreadMetadataBuilder> {
|
||||
let created_at = parse_timestamp_to_utc(session_meta.meta.timestamp.as_str())?;
|
||||
let mut builder = ThreadMetadataBuilder::new(
|
||||
session_meta.meta.id,
|
||||
rollout_path.to_path_buf(),
|
||||
created_at,
|
||||
session_meta.meta.source.clone(),
|
||||
);
|
||||
builder.model_provider = session_meta.meta.model_provider.clone();
|
||||
builder.cwd = session_meta.meta.cwd.clone();
|
||||
builder.sandbox_policy = SandboxPolicy::ReadOnly;
|
||||
builder.approval_mode = AskForApproval::OnRequest;
|
||||
if let Some(git) = session_meta.git.as_ref() {
|
||||
builder.git_sha = git.commit_hash.clone();
|
||||
builder.git_branch = git.branch.clone();
|
||||
builder.git_origin_url = git.repository_url.clone();
|
||||
}
|
||||
Some(builder)
|
||||
}
|
||||
|
||||
pub(crate) fn builder_from_items(
|
||||
items: &[RolloutItem],
|
||||
rollout_path: &Path,
|
||||
) -> Option<ThreadMetadataBuilder> {
|
||||
if let Some(session_meta) = items.iter().find_map(|item| match item {
|
||||
RolloutItem::SessionMeta(meta_line) => Some(meta_line),
|
||||
RolloutItem::ResponseItem(_)
|
||||
| RolloutItem::Compacted(_)
|
||||
| RolloutItem::TurnContext(_)
|
||||
| RolloutItem::EventMsg(_) => None,
|
||||
}) && let Some(builder) = builder_from_session_meta(session_meta, rollout_path)
|
||||
{
|
||||
return Some(builder);
|
||||
}
|
||||
|
||||
let file_name = rollout_path.file_name()?.to_str()?;
|
||||
if !file_name.starts_with(ROLLOUT_PREFIX) || !file_name.ends_with(ROLLOUT_SUFFIX) {
|
||||
return None;
|
||||
}
|
||||
let (created_ts, uuid) = parse_timestamp_uuid_from_filename(file_name)?;
|
||||
let created_at =
|
||||
DateTime::<Utc>::from_timestamp(created_ts.unix_timestamp(), 0)?.with_nanosecond(0)?;
|
||||
let id = ThreadId::from_string(&uuid.to_string()).ok()?;
|
||||
Some(ThreadMetadataBuilder::new(
|
||||
id,
|
||||
rollout_path.to_path_buf(),
|
||||
created_at,
|
||||
SessionSource::default(),
|
||||
))
|
||||
}
|
||||
|
||||
pub(crate) async fn extract_metadata_from_rollout(
|
||||
rollout_path: &Path,
|
||||
default_provider: &str,
|
||||
otel: Option<&OtelManager>,
|
||||
) -> anyhow::Result<ExtractionOutcome> {
|
||||
let (items, _thread_id, parse_errors) =
|
||||
RolloutRecorder::load_rollout_items(rollout_path).await?;
|
||||
if items.is_empty() {
|
||||
return Err(anyhow::anyhow!(
|
||||
"empty session file: {}",
|
||||
rollout_path.display()
|
||||
));
|
||||
}
|
||||
let builder = builder_from_items(items.as_slice(), rollout_path).ok_or_else(|| {
|
||||
anyhow::anyhow!(
|
||||
"rollout missing metadata builder: {}",
|
||||
rollout_path.display()
|
||||
)
|
||||
})?;
|
||||
let mut metadata = builder.build(default_provider);
|
||||
for item in &items {
|
||||
apply_rollout_item(&mut metadata, item, default_provider);
|
||||
}
|
||||
if let Some(updated_at) = file_modified_time_utc(rollout_path).await {
|
||||
metadata.updated_at = updated_at;
|
||||
}
|
||||
if parse_errors > 0
|
||||
&& let Some(otel) = otel
|
||||
{
|
||||
otel.counter(
|
||||
DB_ERROR_METRIC,
|
||||
parse_errors as i64,
|
||||
&[("stage", "extract_metadata_from_rollout")],
|
||||
);
|
||||
}
|
||||
Ok(ExtractionOutcome {
|
||||
metadata,
|
||||
parse_errors,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) async fn backfill_sessions(
|
||||
runtime: &codex_state::StateRuntime,
|
||||
config: &Config,
|
||||
otel: Option<&OtelManager>,
|
||||
) {
|
||||
let sessions_root = config.codex_home.join(rollout::SESSIONS_SUBDIR);
|
||||
let archived_root = config.codex_home.join(rollout::ARCHIVED_SESSIONS_SUBDIR);
|
||||
let mut rollout_paths: Vec<(PathBuf, bool)> = Vec::new();
|
||||
for (root, archived) in [(sessions_root, false), (archived_root, true)] {
|
||||
if !tokio::fs::try_exists(&root).await.unwrap_or(false) {
|
||||
continue;
|
||||
}
|
||||
match collect_rollout_paths(&root).await {
|
||||
Ok(paths) => {
|
||||
rollout_paths.extend(paths.into_iter().map(|path| (path, archived)));
|
||||
}
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"failed to collect rollout paths under {}: {err}",
|
||||
root.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
rollout_paths.sort_by_key(|(path, _archived)| {
|
||||
let parsed = path
|
||||
.file_name()
|
||||
.and_then(|name| name.to_str())
|
||||
.and_then(parse_timestamp_uuid_from_filename)
|
||||
.unwrap_or((time::OffsetDateTime::UNIX_EPOCH, uuid::Uuid::nil()));
|
||||
(Reverse(parsed.0), Reverse(parsed.1))
|
||||
});
|
||||
let mut stats = BackfillStats {
|
||||
scanned: 0,
|
||||
upserted: 0,
|
||||
failed: 0,
|
||||
};
|
||||
for (path, archived) in rollout_paths {
|
||||
stats.scanned = stats.scanned.saturating_add(1);
|
||||
match extract_metadata_from_rollout(&path, config.model_provider_id.as_str(), otel).await {
|
||||
Ok(outcome) => {
|
||||
if outcome.parse_errors > 0
|
||||
&& let Some(otel) = otel
|
||||
{
|
||||
otel.counter(
|
||||
DB_ERROR_METRIC,
|
||||
outcome.parse_errors as i64,
|
||||
&[("stage", "backfill_sessions")],
|
||||
);
|
||||
}
|
||||
let mut metadata = outcome.metadata;
|
||||
if archived && metadata.archived_at.is_none() {
|
||||
let fallback_archived_at = metadata.updated_at;
|
||||
metadata.archived_at = file_modified_time_utc(&path)
|
||||
.await
|
||||
.or(Some(fallback_archived_at));
|
||||
}
|
||||
if let Err(err) = runtime.upsert_thread(&metadata).await {
|
||||
stats.failed = stats.failed.saturating_add(1);
|
||||
warn!("failed to upsert rollout {}: {err}", path.display());
|
||||
} else {
|
||||
stats.upserted = stats.upserted.saturating_add(1);
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
stats.failed = stats.failed.saturating_add(1);
|
||||
warn!("failed to extract rollout {}: {err}", path.display());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
info!(
|
||||
"state db backfill scanned={}, upserted={}, failed={}",
|
||||
stats.scanned, stats.upserted, stats.failed
|
||||
);
|
||||
if let Some(otel) = otel {
|
||||
otel.counter(
|
||||
DB_METRIC_BACKFILL,
|
||||
stats.upserted as i64,
|
||||
&[("status", "upserted")],
|
||||
);
|
||||
otel.counter(
|
||||
DB_METRIC_BACKFILL,
|
||||
stats.failed as i64,
|
||||
&[("status", "failed")],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async fn file_modified_time_utc(path: &Path) -> Option<DateTime<Utc>> {
|
||||
let modified = tokio::fs::metadata(path).await.ok()?.modified().ok()?;
|
||||
let updated_at: DateTime<Utc> = modified.into();
|
||||
updated_at.with_nanosecond(0)
|
||||
}
|
||||
|
||||
fn parse_timestamp_to_utc(ts: &str) -> Option<DateTime<Utc>> {
|
||||
const FILENAME_TS_FORMAT: &str = "%Y-%m-%dT%H-%M-%S";
|
||||
if let Ok(naive) = NaiveDateTime::parse_from_str(ts, FILENAME_TS_FORMAT) {
|
||||
let dt = DateTime::<Utc>::from_naive_utc_and_offset(naive, Utc);
|
||||
return dt.with_nanosecond(0);
|
||||
}
|
||||
if let Ok(dt) = DateTime::parse_from_rfc3339(ts) {
|
||||
return dt.with_timezone(&Utc).with_nanosecond(0);
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
async fn collect_rollout_paths(root: &Path) -> std::io::Result<Vec<PathBuf>> {
|
||||
let mut stack = vec![root.to_path_buf()];
|
||||
let mut paths = Vec::new();
|
||||
while let Some(dir) = stack.pop() {
|
||||
let mut read_dir = match tokio::fs::read_dir(&dir).await {
|
||||
Ok(read_dir) => read_dir,
|
||||
Err(err) => {
|
||||
warn!("failed to read directory {}: {err}", dir.display());
|
||||
continue;
|
||||
}
|
||||
};
|
||||
while let Some(entry) = read_dir.next_entry().await? {
|
||||
let path = entry.path();
|
||||
let file_type = entry.file_type().await?;
|
||||
if file_type.is_dir() {
|
||||
stack.push(path);
|
||||
continue;
|
||||
}
|
||||
if !file_type.is_file() {
|
||||
continue;
|
||||
}
|
||||
let file_name = entry.file_name();
|
||||
let Some(name) = file_name.to_str() else {
|
||||
continue;
|
||||
};
|
||||
if name.starts_with(ROLLOUT_PREFIX) && name.ends_with(ROLLOUT_SUFFIX) {
|
||||
paths.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(paths)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use chrono::DateTime;
|
||||
use chrono::NaiveDateTime;
|
||||
use chrono::Timelike;
|
||||
use chrono::Utc;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::CompactedItem;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
use codex_protocol::protocol::RolloutLine;
|
||||
use codex_protocol::protocol::SessionMeta;
|
||||
use codex_protocol::protocol::SessionMetaLine;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_state::ThreadMetadataBuilder;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::fs::File;
|
||||
use std::io::Write;
|
||||
use tempfile::tempdir;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[tokio::test]
|
||||
async fn extract_metadata_from_rollout_uses_session_meta() {
|
||||
let dir = tempdir().expect("tempdir");
|
||||
let uuid = Uuid::new_v4();
|
||||
let id = ThreadId::from_string(&uuid.to_string()).expect("thread id");
|
||||
let path = dir
|
||||
.path()
|
||||
.join(format!("rollout-2026-01-27T12-34-56-{uuid}.jsonl"));
|
||||
|
||||
let session_meta = SessionMeta {
|
||||
id,
|
||||
forked_from_id: None,
|
||||
timestamp: "2026-01-27T12:34:56Z".to_string(),
|
||||
cwd: dir.path().to_path_buf(),
|
||||
originator: "cli".to_string(),
|
||||
cli_version: "0.0.0".to_string(),
|
||||
source: SessionSource::default(),
|
||||
model_provider: Some("openai".to_string()),
|
||||
base_instructions: None,
|
||||
};
|
||||
let session_meta_line = SessionMetaLine {
|
||||
meta: session_meta,
|
||||
git: None,
|
||||
};
|
||||
let rollout_line = RolloutLine {
|
||||
timestamp: "2026-01-27T12:34:56Z".to_string(),
|
||||
item: RolloutItem::SessionMeta(session_meta_line.clone()),
|
||||
};
|
||||
let json = serde_json::to_string(&rollout_line).expect("rollout json");
|
||||
let mut file = File::create(&path).expect("create rollout");
|
||||
writeln!(file, "{json}").expect("write rollout");
|
||||
|
||||
let outcome = extract_metadata_from_rollout(&path, "openai", None)
|
||||
.await
|
||||
.expect("extract");
|
||||
|
||||
let builder =
|
||||
builder_from_session_meta(&session_meta_line, path.as_path()).expect("builder");
|
||||
let mut expected = builder.build("openai");
|
||||
apply_rollout_item(&mut expected, &rollout_line.item, "openai");
|
||||
expected.updated_at = file_modified_time_utc(&path).await.expect("mtime");
|
||||
|
||||
assert_eq!(outcome.metadata, expected);
|
||||
assert_eq!(outcome.parse_errors, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn builder_from_items_falls_back_to_filename() {
|
||||
let dir = tempdir().expect("tempdir");
|
||||
let uuid = Uuid::new_v4();
|
||||
let path = dir
|
||||
.path()
|
||||
.join(format!("rollout-2026-01-27T12-34-56-{uuid}.jsonl"));
|
||||
let items = vec![RolloutItem::Compacted(CompactedItem {
|
||||
message: "noop".to_string(),
|
||||
replacement_history: None,
|
||||
})];
|
||||
|
||||
let builder = builder_from_items(items.as_slice(), path.as_path()).expect("builder");
|
||||
let naive = NaiveDateTime::parse_from_str("2026-01-27T12-34-56", "%Y-%m-%dT%H-%M-%S")
|
||||
.expect("timestamp");
|
||||
let created_at = DateTime::<Utc>::from_naive_utc_and_offset(naive, Utc)
|
||||
.with_nanosecond(0)
|
||||
.expect("nanosecond");
|
||||
let expected = ThreadMetadataBuilder::new(
|
||||
ThreadId::from_string(&uuid.to_string()).expect("thread id"),
|
||||
path,
|
||||
created_at,
|
||||
SessionSource::default(),
|
||||
);
|
||||
|
||||
assert_eq!(builder, expected);
|
||||
}
|
||||
}
|
||||
@@ -9,6 +9,7 @@ pub const INTERACTIVE_SESSION_SOURCES: &[SessionSource] =
|
||||
|
||||
pub(crate) mod error;
|
||||
pub mod list;
|
||||
pub(crate) mod metadata;
|
||||
pub(crate) mod policy;
|
||||
pub mod recorder;
|
||||
pub(crate) mod truncation;
|
||||
|
||||
@@ -28,11 +28,14 @@ use super::list::ThreadSortKey;
|
||||
use super::list::ThreadsPage;
|
||||
use super::list::get_threads;
|
||||
use super::list::get_threads_in_root;
|
||||
use super::metadata;
|
||||
use super::policy::is_persisted_response_item;
|
||||
use crate::config::Config;
|
||||
use crate::default_client::originator;
|
||||
use crate::git_info::collect_git_info;
|
||||
use crate::path_utils;
|
||||
use crate::state_db;
|
||||
use crate::state_db::StateDbHandle;
|
||||
use codex_protocol::protocol::InitialHistory;
|
||||
use codex_protocol::protocol::ResumedHistory;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
@@ -40,6 +43,7 @@ use codex_protocol::protocol::RolloutLine;
|
||||
use codex_protocol::protocol::SessionMeta;
|
||||
use codex_protocol::protocol::SessionMetaLine;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_state::ThreadMetadataBuilder;
|
||||
|
||||
/// Records all [`ResponseItem`]s for a session and flushes them to disk after
|
||||
/// every update.
|
||||
@@ -54,6 +58,7 @@ use codex_protocol::protocol::SessionSource;
|
||||
pub struct RolloutRecorder {
|
||||
tx: Sender<RolloutCmd>,
|
||||
pub(crate) rollout_path: PathBuf,
|
||||
state_db: Option<StateDbHandle>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
@@ -111,7 +116,8 @@ impl RolloutRecorder {
|
||||
model_providers: Option<&[String]>,
|
||||
default_provider: &str,
|
||||
) -> std::io::Result<ThreadsPage> {
|
||||
get_threads(
|
||||
let stage = "list_threads";
|
||||
let page = get_threads(
|
||||
codex_home,
|
||||
page_size,
|
||||
cursor,
|
||||
@@ -120,7 +126,34 @@ impl RolloutRecorder {
|
||||
model_providers,
|
||||
default_provider,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// TODO(jif): drop after sqlite migration phase 1
|
||||
let state_db_ctx = state_db::open_if_present(codex_home, default_provider).await;
|
||||
if let Some(db_ids) = state_db::list_thread_ids_db(
|
||||
state_db_ctx.as_deref(),
|
||||
codex_home,
|
||||
page_size,
|
||||
cursor,
|
||||
sort_key,
|
||||
allowed_sources,
|
||||
model_providers,
|
||||
false,
|
||||
stage,
|
||||
)
|
||||
.await
|
||||
{
|
||||
if page.items.len() != db_ids.len() {
|
||||
state_db::record_discrepancy(stage, "bad_len");
|
||||
return Ok(page);
|
||||
}
|
||||
for (id, item) in db_ids.iter().zip(page.items.iter()) {
|
||||
if !item.path.display().to_string().contains(&id.to_string()) {
|
||||
state_db::record_discrepancy(stage, "bad_id");
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(page)
|
||||
}
|
||||
|
||||
/// List archived threads (rollout files) under the archived sessions directory.
|
||||
@@ -133,8 +166,9 @@ impl RolloutRecorder {
|
||||
model_providers: Option<&[String]>,
|
||||
default_provider: &str,
|
||||
) -> std::io::Result<ThreadsPage> {
|
||||
let stage = "list_archived_threads";
|
||||
let root = codex_home.join(ARCHIVED_SESSIONS_SUBDIR);
|
||||
get_threads_in_root(
|
||||
let page = get_threads_in_root(
|
||||
root,
|
||||
page_size,
|
||||
cursor,
|
||||
@@ -146,7 +180,34 @@ impl RolloutRecorder {
|
||||
layout: ThreadListLayout::Flat,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
// TODO(jif): drop after sqlite migration phase 1
|
||||
let state_db_ctx = state_db::open_if_present(codex_home, default_provider).await;
|
||||
if let Some(db_ids) = state_db::list_thread_ids_db(
|
||||
state_db_ctx.as_deref(),
|
||||
codex_home,
|
||||
page_size,
|
||||
cursor,
|
||||
sort_key,
|
||||
allowed_sources,
|
||||
model_providers,
|
||||
true,
|
||||
stage,
|
||||
)
|
||||
.await
|
||||
{
|
||||
if page.items.len() != db_ids.len() {
|
||||
state_db::record_discrepancy(stage, "bad_len");
|
||||
return Ok(page);
|
||||
}
|
||||
for (id, item) in db_ids.iter().zip(page.items.iter()) {
|
||||
if !item.path.display().to_string().contains(&id.to_string()) {
|
||||
state_db::record_discrepancy(stage, "bad_id");
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(page)
|
||||
}
|
||||
|
||||
/// Find the newest recorded thread path, optionally filtering to a matching cwd.
|
||||
@@ -186,7 +247,12 @@ impl RolloutRecorder {
|
||||
/// Attempt to create a new [`RolloutRecorder`]. If the sessions directory
|
||||
/// cannot be created or the rollout file cannot be opened we return the
|
||||
/// error so the caller can decide whether to disable persistence.
|
||||
pub async fn new(config: &Config, params: RolloutRecorderParams) -> std::io::Result<Self> {
|
||||
pub async fn new(
|
||||
config: &Config,
|
||||
params: RolloutRecorderParams,
|
||||
state_db_ctx: Option<StateDbHandle>,
|
||||
state_builder: Option<ThreadMetadataBuilder>,
|
||||
) -> std::io::Result<Self> {
|
||||
let (file, rollout_path, meta) = match params {
|
||||
RolloutRecorderParams::Create {
|
||||
conversation_id,
|
||||
@@ -246,9 +312,30 @@ impl RolloutRecorder {
|
||||
// Spawn a Tokio task that owns the file handle and performs async
|
||||
// writes. Using `tokio::fs::File` keeps everything on the async I/O
|
||||
// driver instead of blocking the runtime.
|
||||
tokio::task::spawn(rollout_writer(file, rx, meta, cwd));
|
||||
tokio::task::spawn(rollout_writer(
|
||||
file,
|
||||
rx,
|
||||
meta,
|
||||
cwd,
|
||||
rollout_path.clone(),
|
||||
state_db_ctx.clone(),
|
||||
state_builder,
|
||||
config.model_provider_id.clone(),
|
||||
));
|
||||
|
||||
Ok(Self { tx, rollout_path })
|
||||
Ok(Self {
|
||||
tx,
|
||||
rollout_path,
|
||||
state_db: state_db_ctx,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn rollout_path(&self) -> &Path {
|
||||
self.rollout_path.as_path()
|
||||
}
|
||||
|
||||
pub fn state_db(&self) -> Option<StateDbHandle> {
|
||||
self.state_db.clone()
|
||||
}
|
||||
|
||||
pub(crate) async fn record_items(&self, items: &[RolloutItem]) -> std::io::Result<()> {
|
||||
@@ -281,7 +368,9 @@ impl RolloutRecorder {
|
||||
.map_err(|e| IoError::other(format!("failed waiting for rollout flush: {e}")))
|
||||
}
|
||||
|
||||
pub async fn get_rollout_history(path: &Path) -> std::io::Result<InitialHistory> {
|
||||
pub(crate) async fn load_rollout_items(
|
||||
path: &Path,
|
||||
) -> std::io::Result<(Vec<RolloutItem>, Option<ThreadId>, usize)> {
|
||||
info!("Resuming rollout from {path:?}");
|
||||
let text = tokio::fs::read_to_string(path).await?;
|
||||
if text.trim().is_empty() {
|
||||
@@ -290,6 +379,7 @@ impl RolloutRecorder {
|
||||
|
||||
let mut items: Vec<RolloutItem> = Vec::new();
|
||||
let mut thread_id: Option<ThreadId> = None;
|
||||
let mut parse_errors = 0usize;
|
||||
for line in text.lines() {
|
||||
if line.trim().is_empty() {
|
||||
continue;
|
||||
@@ -298,6 +388,7 @@ impl RolloutRecorder {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
warn!("failed to parse line as JSON: {line:?}, error: {e}");
|
||||
parse_errors = parse_errors.saturating_add(1);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
@@ -328,15 +419,22 @@ impl RolloutRecorder {
|
||||
},
|
||||
Err(e) => {
|
||||
warn!("failed to parse rollout line: {v:?}, error: {e}");
|
||||
parse_errors = parse_errors.saturating_add(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
info!(
|
||||
"Resumed rollout with {} items, thread ID: {:?}",
|
||||
"Resumed rollout with {} items, thread ID: {:?}, parse errors: {}",
|
||||
items.len(),
|
||||
thread_id
|
||||
thread_id,
|
||||
parse_errors,
|
||||
);
|
||||
Ok((items, thread_id, parse_errors))
|
||||
}
|
||||
|
||||
pub async fn get_rollout_history(path: &Path) -> std::io::Result<InitialHistory> {
|
||||
let (items, thread_id, _parse_errors) = Self::load_rollout_items(path).await?;
|
||||
let conversation_id = thread_id
|
||||
.ok_or_else(|| IoError::other("failed to parse thread ID from rollout file"))?;
|
||||
|
||||
@@ -417,13 +515,21 @@ fn create_log_file(config: &Config, conversation_id: ThreadId) -> std::io::Resul
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn rollout_writer(
|
||||
file: tokio::fs::File,
|
||||
mut rx: mpsc::Receiver<RolloutCmd>,
|
||||
mut meta: Option<SessionMeta>,
|
||||
cwd: std::path::PathBuf,
|
||||
rollout_path: PathBuf,
|
||||
state_db_ctx: Option<StateDbHandle>,
|
||||
mut state_builder: Option<ThreadMetadataBuilder>,
|
||||
default_provider: String,
|
||||
) -> std::io::Result<()> {
|
||||
let mut writer = JsonlWriter { file };
|
||||
if let Some(builder) = state_builder.as_mut() {
|
||||
builder.rollout_path = rollout_path.clone();
|
||||
}
|
||||
|
||||
// If we have a meta, collect git info asynchronously and write meta first
|
||||
if let Some(session_meta) = meta.take() {
|
||||
@@ -432,22 +538,50 @@ async fn rollout_writer(
|
||||
meta: session_meta,
|
||||
git: git_info,
|
||||
};
|
||||
if state_db_ctx.is_some() {
|
||||
state_builder =
|
||||
metadata::builder_from_session_meta(&session_meta_line, rollout_path.as_path());
|
||||
}
|
||||
|
||||
// Write the SessionMeta as the first item in the file, wrapped in a rollout line
|
||||
writer
|
||||
.write_rollout_item(RolloutItem::SessionMeta(session_meta_line))
|
||||
.await?;
|
||||
let rollout_item = RolloutItem::SessionMeta(session_meta_line);
|
||||
writer.write_rollout_item(&rollout_item).await?;
|
||||
state_db::reconcile_rollout(
|
||||
state_db_ctx.as_deref(),
|
||||
rollout_path.as_path(),
|
||||
default_provider.as_str(),
|
||||
state_builder.as_ref(),
|
||||
std::slice::from_ref(&rollout_item),
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
// Process rollout commands
|
||||
while let Some(cmd) = rx.recv().await {
|
||||
match cmd {
|
||||
RolloutCmd::AddItems(items) => {
|
||||
let mut persisted_items = Vec::new();
|
||||
for item in items {
|
||||
if is_persisted_response_item(&item) {
|
||||
writer.write_rollout_item(item).await?;
|
||||
writer.write_rollout_item(&item).await?;
|
||||
persisted_items.push(item);
|
||||
}
|
||||
}
|
||||
if persisted_items.is_empty() {
|
||||
continue;
|
||||
}
|
||||
if let Some(builder) = state_builder.as_mut() {
|
||||
builder.rollout_path = rollout_path.clone();
|
||||
}
|
||||
state_db::apply_rollout_items(
|
||||
state_db_ctx.as_deref(),
|
||||
rollout_path.as_path(),
|
||||
default_provider.as_str(),
|
||||
state_builder.as_ref(),
|
||||
persisted_items.as_slice(),
|
||||
"rollout_writer",
|
||||
)
|
||||
.await;
|
||||
}
|
||||
RolloutCmd::Flush { ack } => {
|
||||
// Ensure underlying file is flushed and then ack.
|
||||
@@ -470,8 +604,15 @@ struct JsonlWriter {
|
||||
file: tokio::fs::File,
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize)]
|
||||
struct RolloutLineRef<'a> {
|
||||
timestamp: String,
|
||||
#[serde(flatten)]
|
||||
item: &'a RolloutItem,
|
||||
}
|
||||
|
||||
impl JsonlWriter {
|
||||
async fn write_rollout_item(&mut self, rollout_item: RolloutItem) -> std::io::Result<()> {
|
||||
async fn write_rollout_item(&mut self, rollout_item: &RolloutItem) -> std::io::Result<()> {
|
||||
let timestamp_format: &[FormatItem] = format_description!(
|
||||
"[year]-[month]-[day]T[hour]:[minute]:[second].[subsecond digits:3]Z"
|
||||
);
|
||||
@@ -479,7 +620,7 @@ impl JsonlWriter {
|
||||
.format(timestamp_format)
|
||||
.map_err(|e| IoError::other(format!("failed to format timestamp: {e}")))?;
|
||||
|
||||
let line = RolloutLine {
|
||||
let line = RolloutLineRef {
|
||||
timestamp,
|
||||
item: rollout_item,
|
||||
};
|
||||
|
||||
@@ -10,45 +10,7 @@ use crate::util::resolve_path;
|
||||
|
||||
use crate::protocol::AskForApproval;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
use std::sync::atomic::AtomicBool;
|
||||
#[cfg(target_os = "windows")]
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
static WINDOWS_SANDBOX_ENABLED: AtomicBool = AtomicBool::new(false);
|
||||
#[cfg(target_os = "windows")]
|
||||
static WINDOWS_ELEVATED_SANDBOX_ENABLED: AtomicBool = AtomicBool::new(false);
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
pub fn set_windows_sandbox_enabled(enabled: bool) {
|
||||
WINDOWS_SANDBOX_ENABLED.store(enabled, Ordering::Relaxed);
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
#[allow(dead_code)]
|
||||
pub fn set_windows_sandbox_enabled(_enabled: bool) {}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
pub fn set_windows_elevated_sandbox_enabled(enabled: bool) {
|
||||
WINDOWS_ELEVATED_SANDBOX_ENABLED.store(enabled, Ordering::Relaxed);
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
#[allow(dead_code)]
|
||||
pub fn set_windows_elevated_sandbox_enabled(_enabled: bool) {}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
pub fn is_windows_elevated_sandbox_enabled() -> bool {
|
||||
WINDOWS_ELEVATED_SANDBOX_ENABLED.load(Ordering::Relaxed)
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
#[allow(dead_code)]
|
||||
pub fn is_windows_elevated_sandbox_enabled() -> bool {
|
||||
false
|
||||
}
|
||||
use codex_protocol::config_types::WindowsSandboxLevel;
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum SafetyCheck {
|
||||
@@ -67,6 +29,7 @@ pub fn assess_patch_safety(
|
||||
policy: AskForApproval,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
cwd: &Path,
|
||||
windows_sandbox_level: WindowsSandboxLevel,
|
||||
) -> SafetyCheck {
|
||||
if action.is_empty() {
|
||||
return SafetyCheck::Reject {
|
||||
@@ -104,7 +67,7 @@ pub fn assess_patch_safety(
|
||||
// Only auto‑approve when we can actually enforce a sandbox. Otherwise
|
||||
// fall back to asking the user because the patch may touch arbitrary
|
||||
// paths outside the project.
|
||||
match get_platform_sandbox() {
|
||||
match get_platform_sandbox(windows_sandbox_level != WindowsSandboxLevel::Disabled) {
|
||||
Some(sandbox_type) => SafetyCheck::AutoApprove {
|
||||
sandbox_type,
|
||||
user_explicitly_approved: false,
|
||||
@@ -122,19 +85,17 @@ pub fn assess_patch_safety(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_platform_sandbox() -> Option<SandboxType> {
|
||||
pub fn get_platform_sandbox(windows_sandbox_enabled: bool) -> Option<SandboxType> {
|
||||
if cfg!(target_os = "macos") {
|
||||
Some(SandboxType::MacosSeatbelt)
|
||||
} else if cfg!(target_os = "linux") {
|
||||
Some(SandboxType::LinuxSeccomp)
|
||||
} else if cfg!(target_os = "windows") {
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
if WINDOWS_SANDBOX_ENABLED.load(Ordering::Relaxed) {
|
||||
return Some(SandboxType::WindowsRestrictedToken);
|
||||
}
|
||||
if windows_sandbox_enabled {
|
||||
Some(SandboxType::WindowsRestrictedToken)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
None
|
||||
} else {
|
||||
None
|
||||
}
|
||||
@@ -277,7 +238,13 @@ mod tests {
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
assess_patch_safety(&add_inside, AskForApproval::OnRequest, &policy, &cwd),
|
||||
assess_patch_safety(
|
||||
&add_inside,
|
||||
AskForApproval::OnRequest,
|
||||
&policy,
|
||||
&cwd,
|
||||
WindowsSandboxLevel::Disabled
|
||||
),
|
||||
SafetyCheck::AutoApprove {
|
||||
sandbox_type: SandboxType::None,
|
||||
user_explicitly_approved: false,
|
||||
|
||||
@@ -21,6 +21,7 @@ use crate::seatbelt::create_seatbelt_command_args;
|
||||
use crate::spawn::CODEX_SANDBOX_ENV_VAR;
|
||||
use crate::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
||||
use crate::tools::sandboxing::SandboxablePreference;
|
||||
use codex_protocol::config_types::WindowsSandboxLevel;
|
||||
pub use codex_protocol::models::SandboxPermissions;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
@@ -44,6 +45,7 @@ pub struct ExecEnv {
|
||||
pub env: HashMap<String, String>,
|
||||
pub expiration: ExecExpiration,
|
||||
pub sandbox: SandboxType,
|
||||
pub windows_sandbox_level: WindowsSandboxLevel,
|
||||
pub sandbox_permissions: SandboxPermissions,
|
||||
pub justification: Option<String>,
|
||||
pub arg0: Option<String>,
|
||||
@@ -76,19 +78,26 @@ impl SandboxManager {
|
||||
&self,
|
||||
policy: &SandboxPolicy,
|
||||
pref: SandboxablePreference,
|
||||
windows_sandbox_level: WindowsSandboxLevel,
|
||||
) -> SandboxType {
|
||||
match pref {
|
||||
SandboxablePreference::Forbid => SandboxType::None,
|
||||
SandboxablePreference::Require => {
|
||||
// Require a platform sandbox when available; on Windows this
|
||||
// respects the experimental_windows_sandbox feature.
|
||||
crate::safety::get_platform_sandbox().unwrap_or(SandboxType::None)
|
||||
crate::safety::get_platform_sandbox(
|
||||
windows_sandbox_level != WindowsSandboxLevel::Disabled,
|
||||
)
|
||||
.unwrap_or(SandboxType::None)
|
||||
}
|
||||
SandboxablePreference::Auto => match policy {
|
||||
SandboxPolicy::DangerFullAccess | SandboxPolicy::ExternalSandbox { .. } => {
|
||||
SandboxType::None
|
||||
}
|
||||
_ => crate::safety::get_platform_sandbox().unwrap_or(SandboxType::None),
|
||||
_ => crate::safety::get_platform_sandbox(
|
||||
windows_sandbox_level != WindowsSandboxLevel::Disabled,
|
||||
)
|
||||
.unwrap_or(SandboxType::None),
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -100,6 +109,7 @@ impl SandboxManager {
|
||||
sandbox: SandboxType,
|
||||
sandbox_policy_cwd: &Path,
|
||||
codex_linux_sandbox_exe: Option<&PathBuf>,
|
||||
windows_sandbox_level: WindowsSandboxLevel,
|
||||
) -> Result<ExecEnv, SandboxTransformError> {
|
||||
let mut env = spec.env;
|
||||
if !policy.has_full_network_access() {
|
||||
@@ -160,6 +170,7 @@ impl SandboxManager {
|
||||
env,
|
||||
expiration: spec.expiration,
|
||||
sandbox,
|
||||
windows_sandbox_level,
|
||||
sandbox_permissions: spec.sandbox_permissions,
|
||||
justification: spec.justification,
|
||||
arg0: arg0_override,
|
||||
|
||||
@@ -19,6 +19,8 @@ use tokio::fs;
|
||||
use tokio::process::Command;
|
||||
use tokio::sync::watch;
|
||||
use tokio::time::timeout;
|
||||
use tracing::Instrument;
|
||||
use tracing::info_span;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct ShellSnapshot {
|
||||
@@ -42,17 +44,21 @@ impl ShellSnapshot {
|
||||
|
||||
let snapshot_shell = shell.clone();
|
||||
let snapshot_session_id = session_id;
|
||||
tokio::spawn(async move {
|
||||
let timer = otel_manager.start_timer("codex.shell_snapshot.duration_ms", &[]);
|
||||
let snapshot =
|
||||
ShellSnapshot::try_new(&codex_home, snapshot_session_id, &snapshot_shell)
|
||||
.await
|
||||
.map(Arc::new);
|
||||
let success = if snapshot.is_some() { "true" } else { "false" };
|
||||
let _ = timer.map(|timer| timer.record(&[("success", success)]));
|
||||
otel_manager.counter("codex.shell_snapshot", 1, &[("success", success)]);
|
||||
let _ = shell_snapshot_tx.send(snapshot);
|
||||
});
|
||||
let snapshot_span = info_span!("shell_snapshot", thread_id = %snapshot_session_id);
|
||||
tokio::spawn(
|
||||
async move {
|
||||
let timer = otel_manager.start_timer("codex.shell_snapshot.duration_ms", &[]);
|
||||
let snapshot =
|
||||
ShellSnapshot::try_new(&codex_home, snapshot_session_id, &snapshot_shell)
|
||||
.await
|
||||
.map(Arc::new);
|
||||
let success = if snapshot.is_some() { "true" } else { "false" };
|
||||
let _ = timer.map(|timer| timer.record(&[("success", success)]));
|
||||
otel_manager.counter("codex.shell_snapshot", 1, &[("success", success)]);
|
||||
let _ = shell_snapshot_tx.send(snapshot);
|
||||
}
|
||||
.instrument(snapshot_span),
|
||||
);
|
||||
}
|
||||
|
||||
async fn try_new(codex_home: &Path, session_id: ThreadId, shell: &Shell) -> Option<Self> {
|
||||
|
||||
162
codex-rs/core/src/skills/env_var_dependencies.rs
Normal file
162
codex-rs/core/src/skills/env_var_dependencies.rs
Normal file
@@ -0,0 +1,162 @@
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::env;
|
||||
use std::sync::Arc;
|
||||
|
||||
use codex_protocol::request_user_input::RequestUserInputArgs;
|
||||
use codex_protocol::request_user_input::RequestUserInputQuestion;
|
||||
use codex_protocol::request_user_input::RequestUserInputResponse;
|
||||
use tracing::warn;
|
||||
|
||||
use crate::codex::Session;
|
||||
use crate::codex::TurnContext;
|
||||
use crate::skills::SkillMetadata;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(crate) struct SkillDependencyInfo {
|
||||
pub(crate) skill_name: String,
|
||||
pub(crate) name: String,
|
||||
pub(crate) description: Option<String>,
|
||||
}
|
||||
|
||||
/// Resolve required dependency values (session cache, then env vars),
|
||||
/// and prompt the UI for any missing ones.
|
||||
pub(crate) async fn resolve_skill_dependencies_for_turn(
|
||||
sess: &Arc<Session>,
|
||||
turn_context: &Arc<TurnContext>,
|
||||
dependencies: &[SkillDependencyInfo],
|
||||
) {
|
||||
if dependencies.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let existing_env = sess.dependency_env().await;
|
||||
let mut loaded_values = HashMap::new();
|
||||
let mut missing = Vec::new();
|
||||
let mut seen_names = HashSet::new();
|
||||
|
||||
for dependency in dependencies {
|
||||
let name = dependency.name.clone();
|
||||
if !seen_names.insert(name.clone()) {
|
||||
continue;
|
||||
}
|
||||
if existing_env.contains_key(&name) {
|
||||
continue;
|
||||
}
|
||||
match env::var(&name) {
|
||||
Ok(value) => {
|
||||
loaded_values.insert(name.clone(), value);
|
||||
continue;
|
||||
}
|
||||
Err(env::VarError::NotPresent) => {}
|
||||
Err(err) => {
|
||||
warn!("failed to read env var {name}: {err}");
|
||||
}
|
||||
}
|
||||
missing.push(dependency.clone());
|
||||
}
|
||||
|
||||
if !loaded_values.is_empty() {
|
||||
sess.set_dependency_env(loaded_values).await;
|
||||
}
|
||||
|
||||
if !missing.is_empty() {
|
||||
request_skill_dependencies(sess, turn_context, &missing).await;
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn collect_env_var_dependencies(
|
||||
mentioned_skills: &[SkillMetadata],
|
||||
) -> Vec<SkillDependencyInfo> {
|
||||
let mut dependencies = Vec::new();
|
||||
for skill in mentioned_skills {
|
||||
let Some(skill_dependencies) = &skill.dependencies else {
|
||||
continue;
|
||||
};
|
||||
for tool in &skill_dependencies.tools {
|
||||
if tool.r#type != "env_var" {
|
||||
continue;
|
||||
}
|
||||
if tool.value.is_empty() {
|
||||
continue;
|
||||
}
|
||||
dependencies.push(SkillDependencyInfo {
|
||||
skill_name: skill.name.clone(),
|
||||
name: tool.value.clone(),
|
||||
description: tool.description.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
dependencies
|
||||
}
|
||||
|
||||
/// Prompt via request_user_input to gather missing env vars.
|
||||
pub(crate) async fn request_skill_dependencies(
|
||||
sess: &Arc<Session>,
|
||||
turn_context: &Arc<TurnContext>,
|
||||
dependencies: &[SkillDependencyInfo],
|
||||
) {
|
||||
let questions = dependencies
|
||||
.iter()
|
||||
.map(|dep| {
|
||||
let requirement = dep.description.as_ref().map_or_else(
|
||||
|| format!("The skill \"{}\" requires \"{}\" to be set.", dep.skill_name, dep.name),
|
||||
|description| {
|
||||
format!(
|
||||
"The skill \"{}\" requires \"{}\" to be set ({}).",
|
||||
dep.skill_name, dep.name, description
|
||||
)
|
||||
},
|
||||
);
|
||||
let question = format!(
|
||||
"{requirement} This is an experimental internal feature. The value is stored in memory for this session only.",
|
||||
);
|
||||
RequestUserInputQuestion {
|
||||
id: dep.name.clone(),
|
||||
header: "Skill requires environment variable".to_string(),
|
||||
question,
|
||||
is_other: false,
|
||||
is_secret: true,
|
||||
options: None,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if questions.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let args = RequestUserInputArgs { questions };
|
||||
let call_id = format!("skill-deps-{}", turn_context.sub_id);
|
||||
let response = sess
|
||||
.request_user_input(turn_context, call_id, args)
|
||||
.await
|
||||
.unwrap_or_else(|| RequestUserInputResponse {
|
||||
answers: HashMap::new(),
|
||||
});
|
||||
|
||||
if response.answers.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut values = HashMap::new();
|
||||
for (name, answer) in response.answers {
|
||||
let mut user_note = None;
|
||||
for entry in &answer.answers {
|
||||
if let Some(note) = entry.strip_prefix("user_note: ")
|
||||
&& !note.trim().is_empty()
|
||||
{
|
||||
user_note = Some(note.trim().to_string());
|
||||
}
|
||||
}
|
||||
if let Some(value) = user_note {
|
||||
values.insert(name, value);
|
||||
}
|
||||
}
|
||||
|
||||
if values.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
sess.set_dependency_env(values).await;
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::instructions::SkillInstructions;
|
||||
use crate::skills::SkillLoadOutcome;
|
||||
use crate::skills::SkillMetadata;
|
||||
use codex_otel::OtelManager;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
@@ -16,20 +16,9 @@ pub(crate) struct SkillInjections {
|
||||
}
|
||||
|
||||
pub(crate) async fn build_skill_injections(
|
||||
inputs: &[UserInput],
|
||||
skills: Option<&SkillLoadOutcome>,
|
||||
mentioned_skills: &[SkillMetadata],
|
||||
otel: Option<&OtelManager>,
|
||||
) -> SkillInjections {
|
||||
if inputs.is_empty() {
|
||||
return SkillInjections::default();
|
||||
}
|
||||
|
||||
let Some(outcome) = skills else {
|
||||
return SkillInjections::default();
|
||||
};
|
||||
|
||||
let mentioned_skills =
|
||||
collect_explicit_skill_mentions(inputs, &outcome.skills, &outcome.disabled_paths);
|
||||
if mentioned_skills.is_empty() {
|
||||
return SkillInjections::default();
|
||||
}
|
||||
@@ -42,15 +31,15 @@ pub(crate) async fn build_skill_injections(
|
||||
for skill in mentioned_skills {
|
||||
match fs::read_to_string(&skill.path).await {
|
||||
Ok(contents) => {
|
||||
emit_skill_injected_metric(otel, &skill, "ok");
|
||||
emit_skill_injected_metric(otel, skill, "ok");
|
||||
result.items.push(ResponseItem::from(SkillInstructions {
|
||||
name: skill.name,
|
||||
name: skill.name.clone(),
|
||||
path: skill.path.to_string_lossy().into_owned(),
|
||||
contents,
|
||||
}));
|
||||
}
|
||||
Err(err) => {
|
||||
emit_skill_injected_metric(otel, &skill, "error");
|
||||
emit_skill_injected_metric(otel, skill, "error");
|
||||
let message = format!(
|
||||
"Failed to load skill {name} at {path}: {err:#}",
|
||||
name = skill.name,
|
||||
@@ -76,23 +65,673 @@ fn emit_skill_injected_metric(otel: Option<&OtelManager>, skill: &SkillMetadata,
|
||||
);
|
||||
}
|
||||
|
||||
fn collect_explicit_skill_mentions(
|
||||
/// Collect explicitly mentioned skills from `$name` text mentions.
|
||||
///
|
||||
/// Text inputs are scanned once to extract `$skill-name` tokens, then we iterate `skills`
|
||||
/// in their existing order to preserve prior ordering semantics. Explicit links are
|
||||
/// resolved by path and plain names are only used when the match is unambiguous.
|
||||
///
|
||||
/// Complexity: `O(S + T + N_t * S)` time, `O(S)` space, where:
|
||||
/// `S` = number of skills, `T` = total text length, `N_t` = number of text inputs.
|
||||
pub(crate) fn collect_explicit_skill_mentions(
|
||||
inputs: &[UserInput],
|
||||
skills: &[SkillMetadata],
|
||||
disabled_paths: &HashSet<PathBuf>,
|
||||
skill_name_counts: &HashMap<String, usize>,
|
||||
connector_slug_counts: &HashMap<String, usize>,
|
||||
) -> Vec<SkillMetadata> {
|
||||
let selection_context = SkillSelectionContext {
|
||||
skills,
|
||||
disabled_paths,
|
||||
skill_name_counts,
|
||||
connector_slug_counts,
|
||||
};
|
||||
let mut selected: Vec<SkillMetadata> = Vec::new();
|
||||
let mut seen: HashSet<String> = HashSet::new();
|
||||
let mut seen_names: HashSet<String> = HashSet::new();
|
||||
let mut seen_paths: HashSet<PathBuf> = HashSet::new();
|
||||
|
||||
for input in inputs {
|
||||
if let UserInput::Skill { name, path } = input
|
||||
&& seen.insert(name.clone())
|
||||
&& let Some(skill) = skills.iter().find(|s| s.name == *name && s.path == *path)
|
||||
&& !disabled_paths.contains(&skill.path)
|
||||
{
|
||||
selected.push(skill.clone());
|
||||
if let UserInput::Text { text, .. } = input {
|
||||
let mentioned_names = extract_tool_mentions(text);
|
||||
select_skills_from_mentions(
|
||||
&selection_context,
|
||||
&mentioned_names,
|
||||
&mut seen_names,
|
||||
&mut seen_paths,
|
||||
&mut selected,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
selected
|
||||
}
|
||||
|
||||
struct SkillSelectionContext<'a> {
|
||||
skills: &'a [SkillMetadata],
|
||||
disabled_paths: &'a HashSet<PathBuf>,
|
||||
skill_name_counts: &'a HashMap<String, usize>,
|
||||
connector_slug_counts: &'a HashMap<String, usize>,
|
||||
}
|
||||
|
||||
pub(crate) struct ToolMentions<'a> {
|
||||
names: HashSet<&'a str>,
|
||||
paths: HashSet<&'a str>,
|
||||
plain_names: HashSet<&'a str>,
|
||||
}
|
||||
|
||||
impl<'a> ToolMentions<'a> {
|
||||
fn is_empty(&self) -> bool {
|
||||
self.names.is_empty() && self.paths.is_empty()
|
||||
}
|
||||
|
||||
pub(crate) fn plain_names(&self) -> impl Iterator<Item = &'a str> + '_ {
|
||||
self.plain_names.iter().copied()
|
||||
}
|
||||
|
||||
pub(crate) fn paths(&self) -> impl Iterator<Item = &'a str> + '_ {
|
||||
self.paths.iter().copied()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub(crate) enum ToolMentionKind {
|
||||
App,
|
||||
Mcp,
|
||||
Skill,
|
||||
Other,
|
||||
}
|
||||
|
||||
const APP_PATH_PREFIX: &str = "app://";
|
||||
const MCP_PATH_PREFIX: &str = "mcp://";
|
||||
const SKILL_PATH_PREFIX: &str = "skill://";
|
||||
const SKILL_FILENAME: &str = "SKILL.md";
|
||||
|
||||
pub(crate) fn tool_kind_for_path(path: &str) -> ToolMentionKind {
|
||||
if path.starts_with(APP_PATH_PREFIX) {
|
||||
ToolMentionKind::App
|
||||
} else if path.starts_with(MCP_PATH_PREFIX) {
|
||||
ToolMentionKind::Mcp
|
||||
} else if path.starts_with(SKILL_PATH_PREFIX) || is_skill_filename(path) {
|
||||
ToolMentionKind::Skill
|
||||
} else {
|
||||
ToolMentionKind::Other
|
||||
}
|
||||
}
|
||||
|
||||
fn is_skill_filename(path: &str) -> bool {
|
||||
let file_name = path.rsplit(['/', '\\']).next().unwrap_or(path);
|
||||
file_name.eq_ignore_ascii_case(SKILL_FILENAME)
|
||||
}
|
||||
|
||||
pub(crate) fn app_id_from_path(path: &str) -> Option<&str> {
|
||||
path.strip_prefix(APP_PATH_PREFIX)
|
||||
.filter(|value| !value.is_empty())
|
||||
}
|
||||
|
||||
pub(crate) fn normalize_skill_path(path: &str) -> &str {
|
||||
path.strip_prefix(SKILL_PATH_PREFIX).unwrap_or(path)
|
||||
}
|
||||
|
||||
/// Extract `$tool-name` mentions from a single text input.
|
||||
///
|
||||
/// Supports explicit resource links in the form `[$tool-name](resource path)`. When a
|
||||
/// resource path is present, it is captured for exact path matching while also tracking
|
||||
/// the name for fallback matching.
|
||||
pub(crate) fn extract_tool_mentions(text: &str) -> ToolMentions<'_> {
|
||||
let text_bytes = text.as_bytes();
|
||||
let mut mentioned_names: HashSet<&str> = HashSet::new();
|
||||
let mut mentioned_paths: HashSet<&str> = HashSet::new();
|
||||
let mut plain_names: HashSet<&str> = HashSet::new();
|
||||
|
||||
let mut index = 0;
|
||||
while index < text_bytes.len() {
|
||||
let byte = text_bytes[index];
|
||||
if byte == b'['
|
||||
&& let Some((name, path, end_index)) =
|
||||
parse_linked_tool_mention(text, text_bytes, index)
|
||||
{
|
||||
if !is_common_env_var(name) {
|
||||
let kind = tool_kind_for_path(path);
|
||||
if !matches!(kind, ToolMentionKind::App | ToolMentionKind::Mcp) {
|
||||
mentioned_names.insert(name);
|
||||
}
|
||||
mentioned_paths.insert(path);
|
||||
}
|
||||
index = end_index;
|
||||
continue;
|
||||
}
|
||||
|
||||
if byte != b'$' {
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
let name_start = index + 1;
|
||||
let Some(first_name_byte) = text_bytes.get(name_start) else {
|
||||
index += 1;
|
||||
continue;
|
||||
};
|
||||
if !is_mention_name_char(*first_name_byte) {
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut name_end = name_start + 1;
|
||||
while let Some(next_byte) = text_bytes.get(name_end)
|
||||
&& is_mention_name_char(*next_byte)
|
||||
{
|
||||
name_end += 1;
|
||||
}
|
||||
|
||||
let name = &text[name_start..name_end];
|
||||
if !is_common_env_var(name) {
|
||||
mentioned_names.insert(name);
|
||||
plain_names.insert(name);
|
||||
}
|
||||
index = name_end;
|
||||
}
|
||||
|
||||
ToolMentions {
|
||||
names: mentioned_names,
|
||||
paths: mentioned_paths,
|
||||
plain_names,
|
||||
}
|
||||
}
|
||||
|
||||
/// Select mentioned skills while preserving the order of `skills`.
|
||||
fn select_skills_from_mentions(
|
||||
selection_context: &SkillSelectionContext<'_>,
|
||||
mentions: &ToolMentions<'_>,
|
||||
seen_names: &mut HashSet<String>,
|
||||
seen_paths: &mut HashSet<PathBuf>,
|
||||
selected: &mut Vec<SkillMetadata>,
|
||||
) {
|
||||
if mentions.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let mention_skill_paths: HashSet<&str> = mentions
|
||||
.paths()
|
||||
.filter(|path| {
|
||||
!matches!(
|
||||
tool_kind_for_path(path),
|
||||
ToolMentionKind::App | ToolMentionKind::Mcp
|
||||
)
|
||||
})
|
||||
.map(normalize_skill_path)
|
||||
.collect();
|
||||
|
||||
for skill in selection_context.skills {
|
||||
if selection_context.disabled_paths.contains(&skill.path)
|
||||
|| seen_paths.contains(&skill.path)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
let path_str = skill.path.to_string_lossy();
|
||||
if mention_skill_paths.contains(path_str.as_ref()) {
|
||||
seen_paths.insert(skill.path.clone());
|
||||
seen_names.insert(skill.name.clone());
|
||||
selected.push(skill.clone());
|
||||
}
|
||||
}
|
||||
|
||||
for skill in selection_context.skills {
|
||||
if selection_context.disabled_paths.contains(&skill.path)
|
||||
|| seen_paths.contains(&skill.path)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if !mentions.plain_names.contains(skill.name.as_str()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let skill_count = selection_context
|
||||
.skill_name_counts
|
||||
.get(skill.name.as_str())
|
||||
.copied()
|
||||
.unwrap_or(0);
|
||||
let connector_count = selection_context
|
||||
.connector_slug_counts
|
||||
.get(&skill.name.to_ascii_lowercase())
|
||||
.copied()
|
||||
.unwrap_or(0);
|
||||
if skill_count != 1 || connector_count != 0 {
|
||||
continue;
|
||||
}
|
||||
|
||||
if seen_names.insert(skill.name.clone()) {
|
||||
seen_paths.insert(skill.path.clone());
|
||||
selected.push(skill.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_linked_tool_mention<'a>(
|
||||
text: &'a str,
|
||||
text_bytes: &[u8],
|
||||
start: usize,
|
||||
) -> Option<(&'a str, &'a str, usize)> {
|
||||
let dollar_index = start + 1;
|
||||
if text_bytes.get(dollar_index) != Some(&b'$') {
|
||||
return None;
|
||||
}
|
||||
|
||||
let name_start = dollar_index + 1;
|
||||
let first_name_byte = text_bytes.get(name_start)?;
|
||||
if !is_mention_name_char(*first_name_byte) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut name_end = name_start + 1;
|
||||
while let Some(next_byte) = text_bytes.get(name_end)
|
||||
&& is_mention_name_char(*next_byte)
|
||||
{
|
||||
name_end += 1;
|
||||
}
|
||||
|
||||
if text_bytes.get(name_end) != Some(&b']') {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut path_start = name_end + 1;
|
||||
while let Some(next_byte) = text_bytes.get(path_start)
|
||||
&& next_byte.is_ascii_whitespace()
|
||||
{
|
||||
path_start += 1;
|
||||
}
|
||||
if text_bytes.get(path_start) != Some(&b'(') {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut path_end = path_start + 1;
|
||||
while let Some(next_byte) = text_bytes.get(path_end)
|
||||
&& *next_byte != b')'
|
||||
{
|
||||
path_end += 1;
|
||||
}
|
||||
if text_bytes.get(path_end) != Some(&b')') {
|
||||
return None;
|
||||
}
|
||||
|
||||
let path = text[path_start + 1..path_end].trim();
|
||||
if path.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let name = &text[name_start..name_end];
|
||||
Some((name, path, path_end + 1))
|
||||
}
|
||||
|
||||
fn is_common_env_var(name: &str) -> bool {
|
||||
let upper = name.to_ascii_uppercase();
|
||||
matches!(
|
||||
upper.as_str(),
|
||||
"PATH"
|
||||
| "HOME"
|
||||
| "USER"
|
||||
| "SHELL"
|
||||
| "PWD"
|
||||
| "TMPDIR"
|
||||
| "TEMP"
|
||||
| "TMP"
|
||||
| "LANG"
|
||||
| "TERM"
|
||||
| "XDG_CONFIG_HOME"
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn text_mentions_skill(text: &str, skill_name: &str) -> bool {
|
||||
if skill_name.is_empty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
let text_bytes = text.as_bytes();
|
||||
let skill_bytes = skill_name.as_bytes();
|
||||
|
||||
for (index, byte) in text_bytes.iter().copied().enumerate() {
|
||||
if byte != b'$' {
|
||||
continue;
|
||||
}
|
||||
|
||||
let name_start = index + 1;
|
||||
let Some(rest) = text_bytes.get(name_start..) else {
|
||||
continue;
|
||||
};
|
||||
if !rest.starts_with(skill_bytes) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let after_index = name_start + skill_bytes.len();
|
||||
let after = text_bytes.get(after_index).copied();
|
||||
if after.is_none_or(|b| !is_mention_name_char(b)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
fn is_mention_name_char(byte: u8) -> bool {
|
||||
matches!(byte, b'a'..=b'z' | b'A'..=b'Z' | b'0'..=b'9' | b'_' | b'-')
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
|
||||
fn make_skill(name: &str, path: &str) -> SkillMetadata {
|
||||
SkillMetadata {
|
||||
name: name.to_string(),
|
||||
description: format!("{name} skill"),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: PathBuf::from(path),
|
||||
scope: codex_protocol::protocol::SkillScope::User,
|
||||
}
|
||||
}
|
||||
|
||||
fn set<'a>(items: &'a [&'a str]) -> HashSet<&'a str> {
|
||||
items.iter().copied().collect()
|
||||
}
|
||||
|
||||
fn assert_mentions(text: &str, expected_names: &[&str], expected_paths: &[&str]) {
|
||||
let mentions = extract_tool_mentions(text);
|
||||
assert_eq!(mentions.names, set(expected_names));
|
||||
assert_eq!(mentions.paths, set(expected_paths));
|
||||
}
|
||||
|
||||
fn build_skill_name_counts(
|
||||
skills: &[SkillMetadata],
|
||||
disabled_paths: &HashSet<PathBuf>,
|
||||
) -> HashMap<String, usize> {
|
||||
let mut counts = HashMap::new();
|
||||
for skill in skills {
|
||||
if disabled_paths.contains(&skill.path) {
|
||||
continue;
|
||||
}
|
||||
*counts.entry(skill.name.clone()).or_insert(0) += 1;
|
||||
}
|
||||
counts
|
||||
}
|
||||
|
||||
fn collect_mentions(
|
||||
inputs: &[UserInput],
|
||||
skills: &[SkillMetadata],
|
||||
disabled_paths: &HashSet<PathBuf>,
|
||||
connector_slug_counts: &HashMap<String, usize>,
|
||||
) -> Vec<SkillMetadata> {
|
||||
let skill_name_counts = build_skill_name_counts(skills, disabled_paths);
|
||||
collect_explicit_skill_mentions(
|
||||
inputs,
|
||||
skills,
|
||||
disabled_paths,
|
||||
&skill_name_counts,
|
||||
connector_slug_counts,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn text_mentions_skill_requires_exact_boundary() {
|
||||
assert_eq!(
|
||||
true,
|
||||
text_mentions_skill("use $notion-research-doc please", "notion-research-doc")
|
||||
);
|
||||
assert_eq!(
|
||||
true,
|
||||
text_mentions_skill("($notion-research-doc)", "notion-research-doc")
|
||||
);
|
||||
assert_eq!(
|
||||
true,
|
||||
text_mentions_skill("$notion-research-doc.", "notion-research-doc")
|
||||
);
|
||||
assert_eq!(
|
||||
false,
|
||||
text_mentions_skill("$notion-research-docs", "notion-research-doc")
|
||||
);
|
||||
assert_eq!(
|
||||
false,
|
||||
text_mentions_skill("$notion-research-doc_extra", "notion-research-doc")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn text_mentions_skill_handles_end_boundary_and_near_misses() {
|
||||
assert_eq!(true, text_mentions_skill("$alpha-skill", "alpha-skill"));
|
||||
assert_eq!(false, text_mentions_skill("$alpha-skillx", "alpha-skill"));
|
||||
assert_eq!(
|
||||
true,
|
||||
text_mentions_skill("$alpha-skillx and later $alpha-skill ", "alpha-skill")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn text_mentions_skill_handles_many_dollars_without_looping() {
|
||||
let prefix = "$".repeat(256);
|
||||
let text = format!("{prefix} not-a-mention");
|
||||
assert_eq!(false, text_mentions_skill(&text, "alpha-skill"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_tool_mentions_handles_plain_and_linked_mentions() {
|
||||
assert_mentions(
|
||||
"use $alpha and [$beta](/tmp/beta)",
|
||||
&["alpha", "beta"],
|
||||
&["/tmp/beta"],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_tool_mentions_skips_common_env_vars() {
|
||||
assert_mentions("use $PATH and $alpha", &["alpha"], &[]);
|
||||
assert_mentions("use [$HOME](/tmp/skill)", &[], &[]);
|
||||
assert_mentions("use $XDG_CONFIG_HOME and $beta", &["beta"], &[]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_tool_mentions_requires_link_syntax() {
|
||||
assert_mentions("[beta](/tmp/beta)", &[], &[]);
|
||||
assert_mentions("[$beta] /tmp/beta", &["beta"], &[]);
|
||||
assert_mentions("[$beta]()", &["beta"], &[]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_tool_mentions_trims_linked_paths_and_allows_spacing() {
|
||||
assert_mentions("use [$beta] ( /tmp/beta )", &["beta"], &["/tmp/beta"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_tool_mentions_stops_at_non_name_chars() {
|
||||
assert_mentions(
|
||||
"use $alpha.skill and $beta_extra",
|
||||
&["alpha", "beta_extra"],
|
||||
&[],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_text_respects_skill_order() {
|
||||
let alpha = make_skill("alpha-skill", "/tmp/alpha");
|
||||
let beta = make_skill("beta-skill", "/tmp/beta");
|
||||
let skills = vec![beta.clone(), alpha.clone()];
|
||||
let inputs = vec![UserInput::Text {
|
||||
text: "first $alpha-skill then $beta-skill".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}];
|
||||
let connector_counts = HashMap::new();
|
||||
|
||||
let selected = collect_mentions(&inputs, &skills, &HashSet::new(), &connector_counts);
|
||||
|
||||
// Text scanning should not change the previous selection ordering semantics.
|
||||
assert_eq!(selected, vec![beta, alpha]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_ignores_structured_inputs() {
|
||||
let alpha = make_skill("alpha-skill", "/tmp/alpha");
|
||||
let beta = make_skill("beta-skill", "/tmp/beta");
|
||||
let skills = vec![alpha.clone(), beta];
|
||||
let inputs = vec![
|
||||
UserInput::Text {
|
||||
text: "please run $alpha-skill".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
},
|
||||
UserInput::Skill {
|
||||
name: "beta-skill".to_string(),
|
||||
path: PathBuf::from("/tmp/beta"),
|
||||
},
|
||||
];
|
||||
let connector_counts = HashMap::new();
|
||||
|
||||
let selected = collect_mentions(&inputs, &skills, &HashSet::new(), &connector_counts);
|
||||
|
||||
assert_eq!(selected, vec![alpha]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_dedupes_by_path() {
|
||||
let alpha = make_skill("alpha-skill", "/tmp/alpha");
|
||||
let skills = vec![alpha.clone()];
|
||||
let inputs = vec and [$alpha-skill](/tmp/alpha)".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}];
|
||||
let connector_counts = HashMap::new();
|
||||
|
||||
let selected = collect_mentions(&inputs, &skills, &HashSet::new(), &connector_counts);
|
||||
|
||||
assert_eq!(selected, vec![alpha]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_skips_ambiguous_name() {
|
||||
let alpha = make_skill("demo-skill", "/tmp/alpha");
|
||||
let beta = make_skill("demo-skill", "/tmp/beta");
|
||||
let skills = vec![alpha, beta];
|
||||
let inputs = vec![UserInput::Text {
|
||||
text: "use $demo-skill and again $demo-skill".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}];
|
||||
let connector_counts = HashMap::new();
|
||||
|
||||
let selected = collect_mentions(&inputs, &skills, &HashSet::new(), &connector_counts);
|
||||
|
||||
assert_eq!(selected, Vec::new());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_prefers_linked_path_over_name() {
|
||||
let alpha = make_skill("demo-skill", "/tmp/alpha");
|
||||
let beta = make_skill("demo-skill", "/tmp/beta");
|
||||
let skills = vec![alpha, beta.clone()];
|
||||
let inputs = vec".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}];
|
||||
let connector_counts = HashMap::new();
|
||||
|
||||
let selected = collect_mentions(&inputs, &skills, &HashSet::new(), &connector_counts);
|
||||
|
||||
assert_eq!(selected, vec![beta]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_skips_plain_name_when_connector_matches() {
|
||||
let alpha = make_skill("alpha-skill", "/tmp/alpha");
|
||||
let skills = vec![alpha];
|
||||
let inputs = vec![UserInput::Text {
|
||||
text: "use $alpha-skill".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}];
|
||||
let connector_counts = HashMap::from([("alpha-skill".to_string(), 1)]);
|
||||
|
||||
let selected = collect_mentions(&inputs, &skills, &HashSet::new(), &connector_counts);
|
||||
|
||||
assert_eq!(selected, Vec::new());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_allows_explicit_path_with_connector_conflict() {
|
||||
let alpha = make_skill("alpha-skill", "/tmp/alpha");
|
||||
let skills = vec![alpha.clone()];
|
||||
let inputs = vec".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}];
|
||||
let connector_counts = HashMap::from([("alpha-skill".to_string(), 1)]);
|
||||
|
||||
let selected = collect_mentions(&inputs, &skills, &HashSet::new(), &connector_counts);
|
||||
|
||||
assert_eq!(selected, vec![alpha]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_skips_when_linked_path_disabled() {
|
||||
let alpha = make_skill("demo-skill", "/tmp/alpha");
|
||||
let beta = make_skill("demo-skill", "/tmp/beta");
|
||||
let skills = vec![alpha, beta];
|
||||
let inputs = vec".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}];
|
||||
let disabled = HashSet::from([PathBuf::from("/tmp/alpha")]);
|
||||
let connector_counts = HashMap::new();
|
||||
|
||||
let selected = collect_mentions(&inputs, &skills, &disabled, &connector_counts);
|
||||
|
||||
assert_eq!(selected, Vec::new());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_prefers_resource_path() {
|
||||
let alpha = make_skill("demo-skill", "/tmp/alpha");
|
||||
let beta = make_skill("demo-skill", "/tmp/beta");
|
||||
let skills = vec![alpha, beta.clone()];
|
||||
let inputs = vec".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}];
|
||||
let connector_counts = HashMap::new();
|
||||
|
||||
let selected = collect_mentions(&inputs, &skills, &HashSet::new(), &connector_counts);
|
||||
|
||||
assert_eq!(selected, vec![beta]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_skips_missing_path_with_no_fallback() {
|
||||
let alpha = make_skill("demo-skill", "/tmp/alpha");
|
||||
let beta = make_skill("demo-skill", "/tmp/beta");
|
||||
let skills = vec![alpha, beta];
|
||||
let inputs = vec".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}];
|
||||
let connector_counts = HashMap::new();
|
||||
|
||||
let selected = collect_mentions(&inputs, &skills, &HashSet::new(), &connector_counts);
|
||||
|
||||
assert_eq!(selected, Vec::new());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_skips_missing_path_without_fallback() {
|
||||
let alpha = make_skill("demo-skill", "/tmp/alpha");
|
||||
let skills = vec![alpha];
|
||||
let inputs = vec".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}];
|
||||
let connector_counts = HashMap::new();
|
||||
|
||||
let selected = collect_mentions(&inputs, &skills, &HashSet::new(), &connector_counts);
|
||||
|
||||
assert_eq!(selected, Vec::new());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
use crate::config::Config;
|
||||
use crate::config_loader::ConfigLayerStack;
|
||||
use crate::config_loader::ConfigLayerStackOrdering;
|
||||
use crate::skills::model::SkillDependencies;
|
||||
use crate::skills::model::SkillError;
|
||||
use crate::skills::model::SkillInterface;
|
||||
use crate::skills::model::SkillLoadOutcome;
|
||||
use crate::skills::model::SkillMetadata;
|
||||
use crate::skills::model::SkillToolDependency;
|
||||
use crate::skills::system::system_cache_root_dir;
|
||||
use codex_app_server_protocol::ConfigLayerSource;
|
||||
use codex_protocol::protocol::SkillScope;
|
||||
@@ -35,9 +37,11 @@ struct SkillFrontmatterMetadata {
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Deserialize)]
|
||||
struct SkillToml {
|
||||
struct SkillMetadataFile {
|
||||
#[serde(default)]
|
||||
interface: Option<Interface>,
|
||||
#[serde(default)]
|
||||
dependencies: Option<Dependencies>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Deserialize)]
|
||||
@@ -50,13 +54,36 @@ struct Interface {
|
||||
default_prompt: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Deserialize)]
|
||||
struct Dependencies {
|
||||
#[serde(default)]
|
||||
tools: Vec<DependencyTool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Deserialize)]
|
||||
struct DependencyTool {
|
||||
#[serde(rename = "type")]
|
||||
kind: Option<String>,
|
||||
value: Option<String>,
|
||||
description: Option<String>,
|
||||
transport: Option<String>,
|
||||
command: Option<String>,
|
||||
url: Option<String>,
|
||||
}
|
||||
|
||||
const SKILLS_FILENAME: &str = "SKILL.md";
|
||||
const SKILLS_TOML_FILENAME: &str = "SKILL.toml";
|
||||
const SKILLS_JSON_FILENAME: &str = "SKILL.json";
|
||||
const SKILLS_DIR_NAME: &str = "skills";
|
||||
const MAX_NAME_LEN: usize = 64;
|
||||
const MAX_DESCRIPTION_LEN: usize = 1024;
|
||||
const MAX_SHORT_DESCRIPTION_LEN: usize = MAX_DESCRIPTION_LEN;
|
||||
const MAX_DEFAULT_PROMPT_LEN: usize = MAX_DESCRIPTION_LEN;
|
||||
const MAX_DEPENDENCY_TYPE_LEN: usize = MAX_NAME_LEN;
|
||||
const MAX_DEPENDENCY_TRANSPORT_LEN: usize = MAX_NAME_LEN;
|
||||
const MAX_DEPENDENCY_VALUE_LEN: usize = MAX_DESCRIPTION_LEN;
|
||||
const MAX_DEPENDENCY_DESCRIPTION_LEN: usize = MAX_DESCRIPTION_LEN;
|
||||
const MAX_DEPENDENCY_COMMAND_LEN: usize = MAX_DESCRIPTION_LEN;
|
||||
const MAX_DEPENDENCY_URL_LEN: usize = MAX_DESCRIPTION_LEN;
|
||||
// Traversal depth from the skills root.
|
||||
const MAX_SCAN_DEPTH: usize = 6;
|
||||
const MAX_SKILLS_DIRS_PER_ROOT: usize = 2000;
|
||||
@@ -345,7 +372,7 @@ fn parse_skill_file(path: &Path, scope: SkillScope) -> Result<SkillMetadata, Ski
|
||||
.as_deref()
|
||||
.map(sanitize_single_line)
|
||||
.filter(|value| !value.is_empty());
|
||||
let interface = load_skill_interface(path);
|
||||
let (interface, dependencies) = load_skill_metadata(path);
|
||||
|
||||
validate_len(&name, MAX_NAME_LEN, "name")?;
|
||||
validate_len(&description, MAX_DESCRIPTION_LEN, "description")?;
|
||||
@@ -364,41 +391,54 @@ fn parse_skill_file(path: &Path, scope: SkillScope) -> Result<SkillMetadata, Ski
|
||||
description,
|
||||
short_description,
|
||||
interface,
|
||||
dependencies,
|
||||
path: resolved_path,
|
||||
scope,
|
||||
})
|
||||
}
|
||||
|
||||
fn load_skill_interface(skill_path: &Path) -> Option<SkillInterface> {
|
||||
// Fail open: optional SKILL.toml metadata should not block loading SKILL.md.
|
||||
let skill_dir = skill_path.parent()?;
|
||||
let interface_path = skill_dir.join(SKILLS_TOML_FILENAME);
|
||||
if !interface_path.exists() {
|
||||
return None;
|
||||
fn load_skill_metadata(skill_path: &Path) -> (Option<SkillInterface>, Option<SkillDependencies>) {
|
||||
// Fail open: optional metadata should not block loading SKILL.md.
|
||||
let Some(skill_dir) = skill_path.parent() else {
|
||||
return (None, None);
|
||||
};
|
||||
let metadata_path = skill_dir.join(SKILLS_JSON_FILENAME);
|
||||
if !metadata_path.exists() {
|
||||
return (None, None);
|
||||
}
|
||||
|
||||
let contents = match fs::read_to_string(&interface_path) {
|
||||
let contents = match fs::read_to_string(&metadata_path) {
|
||||
Ok(contents) => contents,
|
||||
Err(error) => {
|
||||
tracing::warn!(
|
||||
"ignoring {path}: failed to read SKILL.toml: {error}",
|
||||
path = interface_path.display()
|
||||
"ignoring {path}: failed to read {label}: {error}",
|
||||
path = metadata_path.display(),
|
||||
label = SKILLS_JSON_FILENAME
|
||||
);
|
||||
return None;
|
||||
return (None, None);
|
||||
}
|
||||
};
|
||||
let parsed: SkillToml = match toml::from_str(&contents) {
|
||||
|
||||
let parsed: SkillMetadataFile = match serde_json::from_str(&contents) {
|
||||
Ok(parsed) => parsed,
|
||||
Err(error) => {
|
||||
tracing::warn!(
|
||||
"ignoring {path}: invalid TOML: {error}",
|
||||
path = interface_path.display()
|
||||
"ignoring {path}: invalid {label}: {error}",
|
||||
path = metadata_path.display(),
|
||||
label = SKILLS_JSON_FILENAME
|
||||
);
|
||||
return None;
|
||||
return (None, None);
|
||||
}
|
||||
};
|
||||
let interface = parsed.interface?;
|
||||
|
||||
(
|
||||
resolve_interface(parsed.interface, skill_dir),
|
||||
resolve_dependencies(parsed.dependencies),
|
||||
)
|
||||
}
|
||||
|
||||
fn resolve_interface(interface: Option<Interface>, skill_dir: &Path) -> Option<SkillInterface> {
|
||||
let interface = interface?;
|
||||
let interface = SkillInterface {
|
||||
display_name: resolve_str(
|
||||
interface.display_name,
|
||||
@@ -428,6 +468,58 @@ fn load_skill_interface(skill_path: &Path) -> Option<SkillInterface> {
|
||||
if has_fields { Some(interface) } else { None }
|
||||
}
|
||||
|
||||
fn resolve_dependencies(dependencies: Option<Dependencies>) -> Option<SkillDependencies> {
|
||||
let dependencies = dependencies?;
|
||||
let tools: Vec<SkillToolDependency> = dependencies
|
||||
.tools
|
||||
.into_iter()
|
||||
.filter_map(resolve_dependency_tool)
|
||||
.collect();
|
||||
if tools.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(SkillDependencies { tools })
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_dependency_tool(tool: DependencyTool) -> Option<SkillToolDependency> {
|
||||
let r#type = resolve_required_str(
|
||||
tool.kind,
|
||||
MAX_DEPENDENCY_TYPE_LEN,
|
||||
"dependencies.tools.type",
|
||||
)?;
|
||||
let value = resolve_required_str(
|
||||
tool.value,
|
||||
MAX_DEPENDENCY_VALUE_LEN,
|
||||
"dependencies.tools.value",
|
||||
)?;
|
||||
let description = resolve_str(
|
||||
tool.description,
|
||||
MAX_DEPENDENCY_DESCRIPTION_LEN,
|
||||
"dependencies.tools.description",
|
||||
);
|
||||
let transport = resolve_str(
|
||||
tool.transport,
|
||||
MAX_DEPENDENCY_TRANSPORT_LEN,
|
||||
"dependencies.tools.transport",
|
||||
);
|
||||
let command = resolve_str(
|
||||
tool.command,
|
||||
MAX_DEPENDENCY_COMMAND_LEN,
|
||||
"dependencies.tools.command",
|
||||
);
|
||||
let url = resolve_str(tool.url, MAX_DEPENDENCY_URL_LEN, "dependencies.tools.url");
|
||||
|
||||
Some(SkillToolDependency {
|
||||
r#type,
|
||||
value,
|
||||
description,
|
||||
transport,
|
||||
command,
|
||||
url,
|
||||
})
|
||||
}
|
||||
|
||||
fn resolve_asset_path(
|
||||
skill_dir: &Path,
|
||||
field: &'static str,
|
||||
@@ -511,6 +603,18 @@ fn resolve_str(value: Option<String>, max_len: usize, field: &'static str) -> Op
|
||||
Some(value)
|
||||
}
|
||||
|
||||
fn resolve_required_str(
|
||||
value: Option<String>,
|
||||
max_len: usize,
|
||||
field: &'static str,
|
||||
) -> Option<String> {
|
||||
let Some(value) = value else {
|
||||
tracing::warn!("ignoring {field}: value is missing");
|
||||
return None;
|
||||
};
|
||||
resolve_str(Some(value), max_len, field)
|
||||
}
|
||||
|
||||
fn resolve_color_str(value: Option<String>, field: &'static str) -> Option<String> {
|
||||
let value = value?;
|
||||
let value = value.trim();
|
||||
@@ -755,29 +859,136 @@ mod tests {
|
||||
path
|
||||
}
|
||||
|
||||
fn write_skill_interface_at(skill_dir: &Path, contents: &str) -> PathBuf {
|
||||
let path = skill_dir.join(SKILLS_TOML_FILENAME);
|
||||
fn write_skill_metadata_at(skill_dir: &Path, filename: &str, contents: &str) -> PathBuf {
|
||||
let path = skill_dir.join(filename);
|
||||
fs::write(&path, contents).unwrap();
|
||||
path
|
||||
}
|
||||
|
||||
fn write_skill_interface_at(skill_dir: &Path, contents: &str) -> PathBuf {
|
||||
write_skill_metadata_at(skill_dir, SKILLS_JSON_FILENAME, contents)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn loads_skill_interface_metadata_happy_path() {
|
||||
async fn loads_skill_dependencies_metadata_from_json() {
|
||||
let codex_home = tempfile::tempdir().expect("tempdir");
|
||||
let skill_path = write_skill(&codex_home, "demo", "ui-skill", "from toml");
|
||||
let skill_path = write_skill(&codex_home, "demo", "dep-skill", "from json");
|
||||
let skill_dir = skill_path.parent().expect("skill dir");
|
||||
|
||||
write_skill_metadata_at(
|
||||
skill_dir,
|
||||
SKILLS_JSON_FILENAME,
|
||||
r#"
|
||||
{
|
||||
"dependencies": {
|
||||
"tools": [
|
||||
{
|
||||
"type": "env_var",
|
||||
"value": "GITHUB_TOKEN",
|
||||
"description": "GitHub API token with repo scopes"
|
||||
},
|
||||
{
|
||||
"type": "mcp",
|
||||
"value": "github",
|
||||
"description": "GitHub MCP server",
|
||||
"transport": "streamable_http",
|
||||
"url": "https://example.com/mcp"
|
||||
},
|
||||
{
|
||||
"type": "cli",
|
||||
"value": "gh",
|
||||
"description": "GitHub CLI"
|
||||
},
|
||||
{
|
||||
"type": "mcp",
|
||||
"value": "local-gh",
|
||||
"description": "Local GH MCP server",
|
||||
"transport": "stdio",
|
||||
"command": "gh-mcp"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
||||
let cfg = make_config(&codex_home).await;
|
||||
let outcome = load_skills(&cfg);
|
||||
|
||||
assert!(
|
||||
outcome.errors.is_empty(),
|
||||
"unexpected errors: {:?}",
|
||||
outcome.errors
|
||||
);
|
||||
assert_eq!(
|
||||
outcome.skills,
|
||||
vec![SkillMetadata {
|
||||
name: "dep-skill".to_string(),
|
||||
description: "from json".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: Some(SkillDependencies {
|
||||
tools: vec![
|
||||
SkillToolDependency {
|
||||
r#type: "env_var".to_string(),
|
||||
value: "GITHUB_TOKEN".to_string(),
|
||||
description: Some("GitHub API token with repo scopes".to_string()),
|
||||
transport: None,
|
||||
command: None,
|
||||
url: None,
|
||||
},
|
||||
SkillToolDependency {
|
||||
r#type: "mcp".to_string(),
|
||||
value: "github".to_string(),
|
||||
description: Some("GitHub MCP server".to_string()),
|
||||
transport: Some("streamable_http".to_string()),
|
||||
command: None,
|
||||
url: Some("https://example.com/mcp".to_string()),
|
||||
},
|
||||
SkillToolDependency {
|
||||
r#type: "cli".to_string(),
|
||||
value: "gh".to_string(),
|
||||
description: Some("GitHub CLI".to_string()),
|
||||
transport: None,
|
||||
command: None,
|
||||
url: None,
|
||||
},
|
||||
SkillToolDependency {
|
||||
r#type: "mcp".to_string(),
|
||||
value: "local-gh".to_string(),
|
||||
description: Some("Local GH MCP server".to_string()),
|
||||
transport: Some("stdio".to_string()),
|
||||
command: Some("gh-mcp".to_string()),
|
||||
url: None,
|
||||
},
|
||||
],
|
||||
}),
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn loads_skill_interface_metadata_from_json() {
|
||||
let codex_home = tempfile::tempdir().expect("tempdir");
|
||||
let skill_path = write_skill(&codex_home, "demo", "ui-skill", "from json");
|
||||
let skill_dir = skill_path.parent().expect("skill dir");
|
||||
let normalized_skill_dir = normalized(skill_dir);
|
||||
|
||||
write_skill_interface_at(
|
||||
skill_dir,
|
||||
r##"
|
||||
[interface]
|
||||
display_name = "UI Skill"
|
||||
short_description = " short desc "
|
||||
icon_small = "./assets/small-400px.png"
|
||||
icon_large = "./assets/large-logo.svg"
|
||||
brand_color = "#3B82F6"
|
||||
default_prompt = " default prompt "
|
||||
{
|
||||
"interface": {
|
||||
"display_name": "UI Skill",
|
||||
"short_description": " short desc ",
|
||||
"icon_small": "./assets/small-400px.png",
|
||||
"icon_large": "./assets/large-logo.svg",
|
||||
"brand_color": "#3B82F6",
|
||||
"default_prompt": " default prompt "
|
||||
}
|
||||
}
|
||||
"##,
|
||||
);
|
||||
|
||||
@@ -793,7 +1004,7 @@ default_prompt = " default prompt "
|
||||
outcome.skills,
|
||||
vec![SkillMetadata {
|
||||
name: "ui-skill".to_string(),
|
||||
description: "from toml".to_string(),
|
||||
description: "from json".to_string(),
|
||||
short_description: None,
|
||||
interface: Some(SkillInterface {
|
||||
display_name: Some("UI Skill".to_string()),
|
||||
@@ -803,7 +1014,8 @@ default_prompt = " default prompt "
|
||||
brand_color: Some("#3B82F6".to_string()),
|
||||
default_prompt: Some("default prompt".to_string()),
|
||||
}),
|
||||
path: normalized(&skill_path),
|
||||
dependencies: None,
|
||||
path: normalized(skill_path.as_path()),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
);
|
||||
@@ -812,17 +1024,20 @@ default_prompt = " default prompt "
|
||||
#[tokio::test]
|
||||
async fn accepts_icon_paths_under_assets_dir() {
|
||||
let codex_home = tempfile::tempdir().expect("tempdir");
|
||||
let skill_path = write_skill(&codex_home, "demo", "ui-skill", "from toml");
|
||||
let skill_path = write_skill(&codex_home, "demo", "ui-skill", "from json");
|
||||
let skill_dir = skill_path.parent().expect("skill dir");
|
||||
let normalized_skill_dir = normalized(skill_dir);
|
||||
|
||||
write_skill_interface_at(
|
||||
skill_dir,
|
||||
r#"
|
||||
[interface]
|
||||
display_name = "UI Skill"
|
||||
icon_small = "assets/icon.png"
|
||||
icon_large = "./assets/logo.svg"
|
||||
{
|
||||
"interface": {
|
||||
"display_name": "UI Skill",
|
||||
"icon_small": "assets/icon.png",
|
||||
"icon_large": "./assets/logo.svg"
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
||||
@@ -838,7 +1053,7 @@ icon_large = "./assets/logo.svg"
|
||||
outcome.skills,
|
||||
vec![SkillMetadata {
|
||||
name: "ui-skill".to_string(),
|
||||
description: "from toml".to_string(),
|
||||
description: "from json".to_string(),
|
||||
short_description: None,
|
||||
interface: Some(SkillInterface {
|
||||
display_name: Some("UI Skill".to_string()),
|
||||
@@ -848,6 +1063,7 @@ icon_large = "./assets/logo.svg"
|
||||
brand_color: None,
|
||||
default_prompt: None,
|
||||
}),
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
@@ -857,14 +1073,17 @@ icon_large = "./assets/logo.svg"
|
||||
#[tokio::test]
|
||||
async fn ignores_invalid_brand_color() {
|
||||
let codex_home = tempfile::tempdir().expect("tempdir");
|
||||
let skill_path = write_skill(&codex_home, "demo", "ui-skill", "from toml");
|
||||
let skill_path = write_skill(&codex_home, "demo", "ui-skill", "from json");
|
||||
let skill_dir = skill_path.parent().expect("skill dir");
|
||||
|
||||
write_skill_interface_at(
|
||||
skill_dir,
|
||||
r#"
|
||||
[interface]
|
||||
brand_color = "blue"
|
||||
{
|
||||
"interface": {
|
||||
"brand_color": "blue"
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
||||
@@ -880,9 +1099,10 @@ brand_color = "blue"
|
||||
outcome.skills,
|
||||
vec![SkillMetadata {
|
||||
name: "ui-skill".to_string(),
|
||||
description: "from toml".to_string(),
|
||||
description: "from json".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
@@ -892,7 +1112,7 @@ brand_color = "blue"
|
||||
#[tokio::test]
|
||||
async fn ignores_default_prompt_over_max_length() {
|
||||
let codex_home = tempfile::tempdir().expect("tempdir");
|
||||
let skill_path = write_skill(&codex_home, "demo", "ui-skill", "from toml");
|
||||
let skill_path = write_skill(&codex_home, "demo", "ui-skill", "from json");
|
||||
let skill_dir = skill_path.parent().expect("skill dir");
|
||||
let normalized_skill_dir = normalized(skill_dir);
|
||||
let too_long = "x".repeat(MAX_DEFAULT_PROMPT_LEN + 1);
|
||||
@@ -901,10 +1121,13 @@ brand_color = "blue"
|
||||
skill_dir,
|
||||
&format!(
|
||||
r##"
|
||||
[interface]
|
||||
display_name = "UI Skill"
|
||||
icon_small = "./assets/small-400px.png"
|
||||
default_prompt = "{too_long}"
|
||||
{{
|
||||
"interface": {{
|
||||
"display_name": "UI Skill",
|
||||
"icon_small": "./assets/small-400px.png",
|
||||
"default_prompt": "{too_long}"
|
||||
}}
|
||||
}}
|
||||
"##
|
||||
),
|
||||
);
|
||||
@@ -921,7 +1144,7 @@ default_prompt = "{too_long}"
|
||||
outcome.skills,
|
||||
vec![SkillMetadata {
|
||||
name: "ui-skill".to_string(),
|
||||
description: "from toml".to_string(),
|
||||
description: "from json".to_string(),
|
||||
short_description: None,
|
||||
interface: Some(SkillInterface {
|
||||
display_name: Some("UI Skill".to_string()),
|
||||
@@ -931,6 +1154,7 @@ default_prompt = "{too_long}"
|
||||
brand_color: None,
|
||||
default_prompt: None,
|
||||
}),
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
@@ -940,15 +1164,18 @@ default_prompt = "{too_long}"
|
||||
#[tokio::test]
|
||||
async fn drops_interface_when_icons_are_invalid() {
|
||||
let codex_home = tempfile::tempdir().expect("tempdir");
|
||||
let skill_path = write_skill(&codex_home, "demo", "ui-skill", "from toml");
|
||||
let skill_path = write_skill(&codex_home, "demo", "ui-skill", "from json");
|
||||
let skill_dir = skill_path.parent().expect("skill dir");
|
||||
|
||||
write_skill_interface_at(
|
||||
skill_dir,
|
||||
r#"
|
||||
[interface]
|
||||
icon_small = "icon.png"
|
||||
icon_large = "./assets/../logo.svg"
|
||||
{
|
||||
"interface": {
|
||||
"icon_small": "icon.png",
|
||||
"icon_large": "./assets/../logo.svg"
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
||||
@@ -964,9 +1191,10 @@ icon_large = "./assets/../logo.svg"
|
||||
outcome.skills,
|
||||
vec![SkillMetadata {
|
||||
name: "ui-skill".to_string(),
|
||||
description: "from toml".to_string(),
|
||||
description: "from json".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
@@ -1009,6 +1237,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "from link".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&shared_skill_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
@@ -1067,6 +1296,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "still loads".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
@@ -1101,6 +1331,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "from link".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&shared_skill_path),
|
||||
scope: SkillScope::Admin,
|
||||
}]
|
||||
@@ -1139,6 +1370,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "from link".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&linked_skill_path),
|
||||
scope: SkillScope::Repo,
|
||||
}]
|
||||
@@ -1200,6 +1432,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "loads".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&within_depth_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
@@ -1225,6 +1458,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "does things carefully".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
@@ -1254,6 +1488,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "long description".to_string(),
|
||||
short_description: Some("short summary".to_string()),
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
@@ -1364,6 +1599,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "from repo".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::Repo,
|
||||
}]
|
||||
@@ -1415,6 +1651,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "from nested".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&nested_skill_path),
|
||||
scope: SkillScope::Repo,
|
||||
},
|
||||
@@ -1423,6 +1660,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "from root".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&root_skill_path),
|
||||
scope: SkillScope::Repo,
|
||||
},
|
||||
@@ -1460,6 +1698,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "from cwd".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::Repo,
|
||||
}]
|
||||
@@ -1495,6 +1734,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "from repo".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::Repo,
|
||||
}]
|
||||
@@ -1534,6 +1774,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "from repo".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&repo_skill_path),
|
||||
scope: SkillScope::Repo,
|
||||
},
|
||||
@@ -1542,6 +1783,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "from user".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&user_skill_path),
|
||||
scope: SkillScope::User,
|
||||
},
|
||||
@@ -1604,6 +1846,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: first_description.to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: first_path,
|
||||
scope: SkillScope::Repo,
|
||||
},
|
||||
@@ -1612,6 +1855,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: second_description.to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: second_path,
|
||||
scope: SkillScope::Repo,
|
||||
},
|
||||
@@ -1681,6 +1925,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "from repo".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::Repo,
|
||||
}]
|
||||
@@ -1737,6 +1982,7 @@ icon_large = "./assets/../logo.svg"
|
||||
description: "from system".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::System,
|
||||
}]
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
mod env_var_dependencies;
|
||||
pub mod injection;
|
||||
pub mod loader;
|
||||
pub mod manager;
|
||||
@@ -5,8 +6,11 @@ pub mod model;
|
||||
pub mod render;
|
||||
pub mod system;
|
||||
|
||||
pub(crate) use env_var_dependencies::collect_env_var_dependencies;
|
||||
pub(crate) use env_var_dependencies::resolve_skill_dependencies_for_turn;
|
||||
pub(crate) use injection::SkillInjections;
|
||||
pub(crate) use injection::build_skill_injections;
|
||||
pub(crate) use injection::collect_explicit_skill_mentions;
|
||||
pub use loader::load_skills;
|
||||
pub use manager::SkillsManager;
|
||||
pub use model::SkillError;
|
||||
|
||||
@@ -9,6 +9,7 @@ pub struct SkillMetadata {
|
||||
pub description: String,
|
||||
pub short_description: Option<String>,
|
||||
pub interface: Option<SkillInterface>,
|
||||
pub dependencies: Option<SkillDependencies>,
|
||||
pub path: PathBuf,
|
||||
pub scope: SkillScope,
|
||||
}
|
||||
@@ -23,6 +24,21 @@ pub struct SkillInterface {
|
||||
pub default_prompt: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct SkillDependencies {
|
||||
pub tools: Vec<SkillToolDependency>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct SkillToolDependency {
|
||||
pub r#type: String,
|
||||
pub value: String,
|
||||
pub description: Option<String>,
|
||||
pub transport: Option<String>,
|
||||
pub command: Option<String>,
|
||||
pub url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct SkillError {
|
||||
pub path: PathBuf,
|
||||
|
||||
@@ -7,7 +7,9 @@ use crate::exec_policy::ExecPolicyManager;
|
||||
use crate::mcp_connection_manager::McpConnectionManager;
|
||||
use crate::models_manager::manager::ModelsManager;
|
||||
use crate::skills::SkillsManager;
|
||||
use crate::state_db::StateDbHandle;
|
||||
use crate::tools::sandboxing::ApprovalStore;
|
||||
use crate::transport_manager::TransportManager;
|
||||
use crate::unified_exec::UnifiedExecProcessManager;
|
||||
use crate::user_notification::UserNotifier;
|
||||
use codex_otel::OtelManager;
|
||||
@@ -30,4 +32,6 @@ pub(crate) struct SessionServices {
|
||||
pub(crate) tool_approvals: Mutex<ApprovalStore>,
|
||||
pub(crate) skills_manager: Arc<SkillsManager>,
|
||||
pub(crate) agent_control: AgentControl,
|
||||
pub(crate) state_db: Option<StateDbHandle>,
|
||||
pub(crate) transport_manager: TransportManager,
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
//! Session-wide mutable state.
|
||||
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
|
||||
use crate::codex::SessionConfiguration;
|
||||
use crate::context_manager::ContextManager;
|
||||
@@ -15,6 +17,13 @@ pub(crate) struct SessionState {
|
||||
pub(crate) history: ContextManager,
|
||||
pub(crate) latest_rate_limits: Option<RateLimitSnapshot>,
|
||||
pub(crate) server_reasoning_included: bool,
|
||||
pub(crate) dependency_env: HashMap<String, String>,
|
||||
pub(crate) mcp_dependency_prompted: HashSet<String>,
|
||||
/// Whether the session's initial context has been seeded into history.
|
||||
///
|
||||
/// TODO(owen): This is a temporary solution to avoid updating a thread's updated_at
|
||||
/// timestamp when resuming a session. Remove this once SQLite is in place.
|
||||
pub(crate) initial_context_seeded: bool,
|
||||
}
|
||||
|
||||
impl SessionState {
|
||||
@@ -26,6 +35,9 @@ impl SessionState {
|
||||
history,
|
||||
latest_rate_limits: None,
|
||||
server_reasoning_included: false,
|
||||
dependency_env: HashMap::new(),
|
||||
mcp_dependency_prompted: HashSet::new(),
|
||||
initial_context_seeded: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -92,6 +104,27 @@ impl SessionState {
|
||||
pub(crate) fn server_reasoning_included(&self) -> bool {
|
||||
self.server_reasoning_included
|
||||
}
|
||||
|
||||
pub(crate) fn record_mcp_dependency_prompted<I>(&mut self, names: I)
|
||||
where
|
||||
I: IntoIterator<Item = String>,
|
||||
{
|
||||
self.mcp_dependency_prompted.extend(names);
|
||||
}
|
||||
|
||||
pub(crate) fn mcp_dependency_prompted(&self) -> HashSet<String> {
|
||||
self.mcp_dependency_prompted.clone()
|
||||
}
|
||||
|
||||
pub(crate) fn set_dependency_env(&mut self, values: HashMap<String, String>) {
|
||||
for (key, value) in values {
|
||||
self.dependency_env.insert(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn dependency_env(&self) -> HashMap<String, String> {
|
||||
self.dependency_env.clone()
|
||||
}
|
||||
}
|
||||
|
||||
// Sometimes new snapshots don't include credits or plan information.
|
||||
|
||||
317
codex-rs/core/src/state_db.rs
Normal file
317
codex-rs/core/src/state_db.rs
Normal file
@@ -0,0 +1,317 @@
|
||||
use crate::config::Config;
|
||||
use crate::features::Feature;
|
||||
use crate::rollout::list::Cursor;
|
||||
use crate::rollout::list::ThreadSortKey;
|
||||
use crate::rollout::metadata;
|
||||
use chrono::DateTime;
|
||||
use chrono::NaiveDateTime;
|
||||
use chrono::Timelike;
|
||||
use chrono::Utc;
|
||||
use codex_otel::OtelManager;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
pub use codex_state::LogEntry;
|
||||
use codex_state::STATE_DB_FILENAME;
|
||||
use codex_state::ThreadMetadataBuilder;
|
||||
use serde_json::Value;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use tracing::warn;
|
||||
use uuid::Uuid;
|
||||
|
||||
/// Core-facing handle to the optional SQLite-backed state runtime.
|
||||
pub type StateDbHandle = Arc<codex_state::StateRuntime>;
|
||||
|
||||
/// Initialize the state runtime when the `sqlite` feature flag is enabled. To only be used
|
||||
/// inside `core`. The initialization should not be done anywhere else.
|
||||
pub(crate) async fn init_if_enabled(
|
||||
config: &Config,
|
||||
otel: Option<&OtelManager>,
|
||||
) -> Option<StateDbHandle> {
|
||||
let state_path = config.codex_home.join(STATE_DB_FILENAME);
|
||||
if !config.features.enabled(Feature::Sqlite) {
|
||||
// We delete the file on best effort basis to maintain retro-compatibility in the future.
|
||||
let wal_path = state_path.with_extension("sqlite-wal");
|
||||
let shm_path = state_path.with_extension("sqlite-shm");
|
||||
for path in [state_path.as_path(), wal_path.as_path(), shm_path.as_path()] {
|
||||
tokio::fs::remove_file(path).await.ok();
|
||||
}
|
||||
return None;
|
||||
}
|
||||
let existed = tokio::fs::try_exists(&state_path).await.unwrap_or(false);
|
||||
let runtime = match codex_state::StateRuntime::init(
|
||||
config.codex_home.clone(),
|
||||
config.model_provider_id.clone(),
|
||||
otel.cloned(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(runtime) => runtime,
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"failed to initialize state runtime at {}: {err}",
|
||||
config.codex_home.display()
|
||||
);
|
||||
if let Some(otel) = otel {
|
||||
otel.counter("codex.db.init", 1, &[("status", "init_error")]);
|
||||
}
|
||||
return None;
|
||||
}
|
||||
};
|
||||
if !existed {
|
||||
let runtime_for_backfill = Arc::clone(&runtime);
|
||||
let config_for_backfill = config.clone();
|
||||
let otel_for_backfill = otel.cloned();
|
||||
tokio::task::spawn(async move {
|
||||
metadata::backfill_sessions(
|
||||
runtime_for_backfill.as_ref(),
|
||||
&config_for_backfill,
|
||||
otel_for_backfill.as_ref(),
|
||||
)
|
||||
.await;
|
||||
});
|
||||
}
|
||||
Some(runtime)
|
||||
}
|
||||
|
||||
/// Get the DB if the feature is enabled and the DB exists.
|
||||
pub async fn get_state_db(config: &Config, otel: Option<&OtelManager>) -> Option<StateDbHandle> {
|
||||
let state_path = config.codex_home.join(STATE_DB_FILENAME);
|
||||
if !config.features.enabled(Feature::Sqlite)
|
||||
|| !tokio::fs::try_exists(&state_path).await.unwrap_or(false)
|
||||
{
|
||||
return None;
|
||||
}
|
||||
codex_state::StateRuntime::init(
|
||||
config.codex_home.clone(),
|
||||
config.model_provider_id.clone(),
|
||||
otel.cloned(),
|
||||
)
|
||||
.await
|
||||
.ok()
|
||||
}
|
||||
|
||||
/// Open the state runtime when the SQLite file exists, without feature gating.
|
||||
///
|
||||
/// This is used for parity checks during the SQLite migration phase.
|
||||
pub async fn open_if_present(codex_home: &Path, default_provider: &str) -> Option<StateDbHandle> {
|
||||
let db_path = codex_home.join(STATE_DB_FILENAME);
|
||||
if !tokio::fs::try_exists(&db_path).await.unwrap_or(false) {
|
||||
return None;
|
||||
}
|
||||
let runtime = codex_state::StateRuntime::init(
|
||||
codex_home.to_path_buf(),
|
||||
default_provider.to_string(),
|
||||
None,
|
||||
)
|
||||
.await
|
||||
.ok()?;
|
||||
Some(runtime)
|
||||
}
|
||||
|
||||
fn cursor_to_anchor(cursor: Option<&Cursor>) -> Option<codex_state::Anchor> {
|
||||
let cursor = cursor?;
|
||||
let value = serde_json::to_value(cursor).ok()?;
|
||||
let cursor_str = value.as_str()?;
|
||||
let (ts_str, id_str) = cursor_str.split_once('|')?;
|
||||
if id_str.contains('|') {
|
||||
return None;
|
||||
}
|
||||
let id = Uuid::parse_str(id_str).ok()?;
|
||||
let ts = if let Ok(naive) = NaiveDateTime::parse_from_str(ts_str, "%Y-%m-%dT%H-%M-%S") {
|
||||
DateTime::<Utc>::from_naive_utc_and_offset(naive, Utc)
|
||||
} else if let Ok(dt) = DateTime::parse_from_rfc3339(ts_str) {
|
||||
dt.with_timezone(&Utc)
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
.with_nanosecond(0)?;
|
||||
Some(codex_state::Anchor { ts, id })
|
||||
}
|
||||
|
||||
/// List thread ids from SQLite for parity checks without rollout scanning.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn list_thread_ids_db(
|
||||
context: Option<&codex_state::StateRuntime>,
|
||||
codex_home: &Path,
|
||||
page_size: usize,
|
||||
cursor: Option<&Cursor>,
|
||||
sort_key: ThreadSortKey,
|
||||
allowed_sources: &[SessionSource],
|
||||
model_providers: Option<&[String]>,
|
||||
archived_only: bool,
|
||||
stage: &str,
|
||||
) -> Option<Vec<ThreadId>> {
|
||||
let ctx = context?;
|
||||
if ctx.codex_home() != codex_home {
|
||||
warn!(
|
||||
"state db codex_home mismatch: expected {}, got {}",
|
||||
ctx.codex_home().display(),
|
||||
codex_home.display()
|
||||
);
|
||||
}
|
||||
|
||||
let anchor = cursor_to_anchor(cursor);
|
||||
let allowed_sources: Vec<String> = allowed_sources
|
||||
.iter()
|
||||
.map(|value| match serde_json::to_value(value) {
|
||||
Ok(Value::String(s)) => s,
|
||||
Ok(other) => other.to_string(),
|
||||
Err(_) => String::new(),
|
||||
})
|
||||
.collect();
|
||||
let model_providers = model_providers.map(<[String]>::to_vec);
|
||||
match ctx
|
||||
.list_thread_ids(
|
||||
page_size,
|
||||
anchor.as_ref(),
|
||||
match sort_key {
|
||||
ThreadSortKey::CreatedAt => codex_state::SortKey::CreatedAt,
|
||||
ThreadSortKey::UpdatedAt => codex_state::SortKey::UpdatedAt,
|
||||
},
|
||||
allowed_sources.as_slice(),
|
||||
model_providers.as_deref(),
|
||||
archived_only,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(ids) => Some(ids),
|
||||
Err(err) => {
|
||||
warn!("state db list_thread_ids failed during {stage}: {err}");
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Look up the rollout path for a thread id using SQLite.
|
||||
pub async fn find_rollout_path_by_id(
|
||||
context: Option<&codex_state::StateRuntime>,
|
||||
thread_id: ThreadId,
|
||||
archived_only: Option<bool>,
|
||||
stage: &str,
|
||||
) -> Option<PathBuf> {
|
||||
let ctx = context?;
|
||||
ctx.find_rollout_path_by_id(thread_id, archived_only)
|
||||
.await
|
||||
.unwrap_or_else(|err| {
|
||||
warn!("state db find_rollout_path_by_id failed during {stage}: {err}");
|
||||
None
|
||||
})
|
||||
}
|
||||
|
||||
/// Reconcile rollout items into SQLite, falling back to scanning the rollout file.
|
||||
pub async fn reconcile_rollout(
|
||||
context: Option<&codex_state::StateRuntime>,
|
||||
rollout_path: &Path,
|
||||
default_provider: &str,
|
||||
builder: Option<&ThreadMetadataBuilder>,
|
||||
items: &[RolloutItem],
|
||||
) {
|
||||
let Some(ctx) = context else {
|
||||
return;
|
||||
};
|
||||
if builder.is_some() || !items.is_empty() {
|
||||
apply_rollout_items(
|
||||
Some(ctx),
|
||||
rollout_path,
|
||||
default_provider,
|
||||
builder,
|
||||
items,
|
||||
"reconcile_rollout",
|
||||
)
|
||||
.await;
|
||||
return;
|
||||
}
|
||||
let outcome =
|
||||
match metadata::extract_metadata_from_rollout(rollout_path, default_provider, None).await {
|
||||
Ok(outcome) => outcome,
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"state db reconcile_rollout extraction failed {}: {err}",
|
||||
rollout_path.display()
|
||||
);
|
||||
return;
|
||||
}
|
||||
};
|
||||
if let Err(err) = ctx.upsert_thread(&outcome.metadata).await {
|
||||
warn!(
|
||||
"state db reconcile_rollout upsert failed {}: {err}",
|
||||
rollout_path.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Apply rollout items incrementally to SQLite.
|
||||
pub async fn apply_rollout_items(
|
||||
context: Option<&codex_state::StateRuntime>,
|
||||
rollout_path: &Path,
|
||||
_default_provider: &str,
|
||||
builder: Option<&ThreadMetadataBuilder>,
|
||||
items: &[RolloutItem],
|
||||
stage: &str,
|
||||
) {
|
||||
let Some(ctx) = context else {
|
||||
return;
|
||||
};
|
||||
let mut builder = match builder {
|
||||
Some(builder) => builder.clone(),
|
||||
None => match metadata::builder_from_items(items, rollout_path) {
|
||||
Some(builder) => builder,
|
||||
None => {
|
||||
warn!(
|
||||
"state db apply_rollout_items missing builder during {stage}: {}",
|
||||
rollout_path.display()
|
||||
);
|
||||
record_discrepancy(stage, "missing_builder");
|
||||
return;
|
||||
}
|
||||
},
|
||||
};
|
||||
builder.rollout_path = rollout_path.to_path_buf();
|
||||
if let Err(err) = ctx.apply_rollout_items(&builder, items, None).await {
|
||||
warn!(
|
||||
"state db apply_rollout_items failed during {stage} for {}: {err}",
|
||||
rollout_path.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Record a state discrepancy metric with a stage and reason tag.
|
||||
pub fn record_discrepancy(stage: &str, reason: &str) {
|
||||
// We access the global metric because the call sites might not have access to the broader
|
||||
// OtelManager.
|
||||
if let Some(metric) = codex_otel::metrics::global() {
|
||||
let _ = metric.counter(
|
||||
"codex.db.discrepancy",
|
||||
1,
|
||||
&[("stage", stage), ("reason", reason)],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::rollout::list::parse_cursor;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn cursor_to_anchor_normalizes_timestamp_format() {
|
||||
let uuid = Uuid::new_v4();
|
||||
let ts_str = "2026-01-27T12-34-56";
|
||||
let token = format!("{ts_str}|{uuid}");
|
||||
let cursor = parse_cursor(token.as_str()).expect("cursor should parse");
|
||||
let anchor = cursor_to_anchor(Some(&cursor)).expect("anchor should parse");
|
||||
|
||||
let naive =
|
||||
NaiveDateTime::parse_from_str(ts_str, "%Y-%m-%dT%H-%M-%S").expect("ts should parse");
|
||||
let expected_ts = DateTime::<Utc>::from_naive_utc_and_offset(naive, Utc)
|
||||
.with_nanosecond(0)
|
||||
.expect("nanosecond");
|
||||
|
||||
assert_eq!(anchor.id, uuid);
|
||||
assert_eq!(anchor.ts, expected_ts);
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user