mirror of
https://github.com/openai/codex.git
synced 2026-02-02 06:57:03 +00:00
Compare commits
113 Commits
compaction
...
gt/blockin
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
761a37d5c1 | ||
|
|
e04cfc3601 | ||
|
|
149f3aa27a | ||
|
|
a046481ad9 | ||
|
|
10ea117ee1 | ||
|
|
8d142fd63d | ||
|
|
13e85b1549 | ||
|
|
31d1e49340 | ||
|
|
d59685f6d4 | ||
|
|
748141bdda | ||
|
|
0fac2744f7 | ||
|
|
5f81e8e70b | ||
|
|
9008a0eff9 | ||
|
|
aacd530a41 | ||
|
|
dd6c1d3787 | ||
|
|
83317ed4bf | ||
|
|
b7351f7f53 | ||
|
|
2457bb3c40 | ||
|
|
9b29a48a09 | ||
|
|
e6d913af2d | ||
|
|
2d10aa6859 | ||
|
|
ec4a2d07e4 | ||
|
|
40bf11bd52 | ||
|
|
1ce722ed2e | ||
|
|
5662eb8b75 | ||
|
|
23db79fae2 | ||
|
|
dfafc546ab | ||
|
|
377ab0c77c | ||
|
|
0212f4010e | ||
|
|
079f4952e0 | ||
|
|
eff11f792b | ||
|
|
887bec0dee | ||
|
|
09d25e91e9 | ||
|
|
6cee538380 | ||
|
|
e85d019daa | ||
|
|
1ef5455eb6 | ||
|
|
25ad414680 | ||
|
|
129787493f | ||
|
|
a0ccef9d5c | ||
|
|
c0cad80668 | ||
|
|
f8056e62d4 | ||
|
|
a270a28a06 | ||
|
|
34f89b12d0 | ||
|
|
e3ab0bd973 | ||
|
|
d550fbf41a | ||
|
|
36f2fe8af9 | ||
|
|
a9cf449a80 | ||
|
|
7151387474 | ||
|
|
c6e1288ef1 | ||
|
|
11958221a3 | ||
|
|
81a17bb2c1 | ||
|
|
b79bf69af6 | ||
|
|
ca9d417633 | ||
|
|
fbb3a30953 | ||
|
|
2d9ac8227a | ||
|
|
03aee7140f | ||
|
|
48f203120d | ||
|
|
bdd8a7d58b | ||
|
|
b7f26d74f0 | ||
|
|
3b1cddf001 | ||
|
|
798c4b3260 | ||
|
|
3e798c5a7d | ||
|
|
e6c4f548ab | ||
|
|
d6631fb5a9 | ||
|
|
89c5f3c4d4 | ||
|
|
b654b7a9ae | ||
|
|
2945667dcc | ||
|
|
d29129f352 | ||
|
|
4ba911d48c | ||
|
|
6a06726af2 | ||
|
|
714dc8d8bd | ||
|
|
780482da84 | ||
|
|
4d9ae3a298 | ||
|
|
e70592f85a | ||
|
|
b4b4763009 | ||
|
|
be33de3f87 | ||
|
|
8cc338aecf | ||
|
|
335713f7e9 | ||
|
|
b9cd089d1f | ||
|
|
ecc66f4f52 | ||
|
|
9757e1418d | ||
|
|
52609c6f42 | ||
|
|
ce3d764ae1 | ||
|
|
26590d7927 | ||
|
|
8497163363 | ||
|
|
83d7c44500 | ||
|
|
7b34cad1b1 | ||
|
|
ff9fa56368 | ||
|
|
fe920d7804 | ||
|
|
147e7118e0 | ||
|
|
f7699e0487 | ||
|
|
66de985e4e | ||
|
|
b7edeee8ca | ||
|
|
851617ff5a | ||
|
|
b8156706e6 | ||
|
|
35e03a0716 | ||
|
|
ad5f9e7370 | ||
|
|
96386755b6 | ||
|
|
74bd6d7178 | ||
|
|
2a624661ef | ||
|
|
231406bd04 | ||
|
|
3878c3dc7c | ||
|
|
dabafe204a | ||
|
|
71b8d937ed | ||
|
|
996e09ca24 | ||
|
|
9f79365691 | ||
|
|
fef3e36f67 | ||
|
|
3bb8e69dd3 | ||
|
|
add648df82 | ||
|
|
1609f6aa81 | ||
|
|
a90ab789c2 | ||
|
|
3f3916e595 | ||
|
|
19d8f71a98 |
@@ -1,3 +1,4 @@
|
||||
# Without this, Bazel will consider BUILD.bazel files in
|
||||
# .git/sl/origbackups (which can be populated by Sapling SCM).
|
||||
.git
|
||||
codex-rs/target
|
||||
|
||||
7
.bazelrc
7
.bazelrc
@@ -1,13 +1,19 @@
|
||||
common --repo_env=BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN=1
|
||||
common --repo_env=BAZEL_NO_APPLE_CPP_TOOLCHAIN=1
|
||||
# Dummy xcode config so we don't need to build xcode_locator in repo rule.
|
||||
common --xcode_version_config=//:disable_xcode
|
||||
|
||||
common --disk_cache=~/.cache/bazel-disk-cache
|
||||
common --repo_contents_cache=~/.cache/bazel-repo-contents-cache
|
||||
common --repository_cache=~/.cache/bazel-repo-cache
|
||||
common --remote_cache_compression
|
||||
startup --experimental_remote_repo_contents_cache
|
||||
|
||||
common --experimental_platform_in_output_dir
|
||||
|
||||
# Runfiles strategy rationale: codex-rs/utils/cargo-bin/README.md
|
||||
common --noenable_runfiles
|
||||
|
||||
common --enable_platform_specific_config
|
||||
# TODO(zbarsky): We need to untangle these libc constraints to get linux remote builds working.
|
||||
common:linux --host_platform=//:local
|
||||
@@ -43,4 +49,3 @@ common --jobs=30
|
||||
common:remote --extra_execution_platforms=//:rbe
|
||||
common:remote --remote_executor=grpcs://remote.buildbuddy.io
|
||||
common:remote --jobs=800
|
||||
|
||||
|
||||
96
.github/workflows/rust-ci.yml
vendored
96
.github/workflows/rust-ci.yml
vendored
@@ -59,7 +59,7 @@ jobs:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
with:
|
||||
components: rustfmt
|
||||
- name: cargo fmt
|
||||
@@ -77,7 +77,7 @@ jobs:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: cargo-shear
|
||||
@@ -177,11 +177,31 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- name: Install UBSan runtime (musl)
|
||||
if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
sudo apt-get update -y
|
||||
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y libubsan1
|
||||
fi
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
components: clippy
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Use hermetic Cargo home (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cargo_home="${GITHUB_WORKSPACE}/.cargo-home"
|
||||
mkdir -p "${cargo_home}/bin"
|
||||
echo "CARGO_HOME=${cargo_home}" >> "$GITHUB_ENV"
|
||||
echo "${cargo_home}/bin" >> "$GITHUB_PATH"
|
||||
: > "${cargo_home}/config.toml"
|
||||
|
||||
- name: Compute lockfile hash
|
||||
id: lockhash
|
||||
working-directory: codex-rs
|
||||
@@ -202,6 +222,10 @@ jobs:
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
${{ github.workspace }}/.cargo-home/bin/
|
||||
${{ github.workspace }}/.cargo-home/registry/index/
|
||||
${{ github.workspace }}/.cargo-home/registry/cache/
|
||||
${{ github.workspace }}/.cargo-home/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
|
||||
restore-keys: |
|
||||
cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
@@ -244,6 +268,14 @@ jobs:
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Disable sccache wrapper (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "RUSTC_WRAPPER=" >> "$GITHUB_ENV"
|
||||
echo "RUSTC_WORKSPACE_WRAPPER=" >> "$GITHUB_ENV"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Prepare APT cache directories (musl)
|
||||
shell: bash
|
||||
@@ -277,6 +309,58 @@ jobs:
|
||||
shell: bash
|
||||
run: bash "${GITHUB_WORKSPACE}/.github/scripts/install-musl-build-tools.sh"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Configure rustc UBSan wrapper (musl host)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
ubsan=""
|
||||
if command -v ldconfig >/dev/null 2>&1; then
|
||||
ubsan="$(ldconfig -p | grep -m1 'libubsan\.so\.1' | sed -E 's/.*=> (.*)$/\1/')"
|
||||
fi
|
||||
wrapper_root="${RUNNER_TEMP:-/tmp}"
|
||||
wrapper="${wrapper_root}/rustc-ubsan-wrapper"
|
||||
cat > "${wrapper}" <<EOF
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
if [[ -n "${ubsan}" ]]; then
|
||||
export LD_PRELOAD="${ubsan}\${LD_PRELOAD:+:\${LD_PRELOAD}}"
|
||||
fi
|
||||
exec "\$1" "\${@:2}"
|
||||
EOF
|
||||
chmod +x "${wrapper}"
|
||||
echo "RUSTC_WRAPPER=${wrapper}" >> "$GITHUB_ENV"
|
||||
echo "RUSTC_WORKSPACE_WRAPPER=" >> "$GITHUB_ENV"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Clear sanitizer flags (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Clear global Rust flags so host/proc-macro builds don't pull in UBSan.
|
||||
echo "RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_ENCODED_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "RUSTDOCFLAGS=" >> "$GITHUB_ENV"
|
||||
# Override any runner-level Cargo config rustflags as well.
|
||||
echo "CARGO_BUILD_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
|
||||
sanitize_flags() {
|
||||
local input="$1"
|
||||
input="${input//-fsanitize=undefined/}"
|
||||
input="${input//-fno-sanitize-recover=undefined/}"
|
||||
input="${input//-fno-sanitize-trap=undefined/}"
|
||||
echo "$input"
|
||||
}
|
||||
|
||||
cflags="$(sanitize_flags "${CFLAGS-}")"
|
||||
cxxflags="$(sanitize_flags "${CXXFLAGS-}")"
|
||||
echo "CFLAGS=${cflags}" >> "$GITHUB_ENV"
|
||||
echo "CXXFLAGS=${cxxflags}" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Install cargo-chef
|
||||
if: ${{ matrix.profile == 'release' }}
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
@@ -322,6 +406,10 @@ jobs:
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
${{ github.workspace }}/.cargo-home/bin/
|
||||
${{ github.workspace }}/.cargo-home/registry/index/
|
||||
${{ github.workspace }}/.cargo-home/registry/cache/
|
||||
${{ github.workspace }}/.cargo-home/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
|
||||
|
||||
- name: Save sccache cache (fallback)
|
||||
@@ -422,7 +510,7 @@ jobs:
|
||||
- name: Install DotSlash
|
||||
uses: facebook/install-dotslash@v2
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
|
||||
79
.github/workflows/rust-release.yml
vendored
79
.github/workflows/rust-release.yml
vendored
@@ -21,7 +21,6 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
|
||||
- name: Validate tag matches Cargo.toml version
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -90,10 +89,30 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- name: Install UBSan runtime (musl)
|
||||
if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
sudo apt-get update -y
|
||||
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y libubsan1
|
||||
fi
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Use hermetic Cargo home (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cargo_home="${GITHUB_WORKSPACE}/.cargo-home"
|
||||
mkdir -p "${cargo_home}/bin"
|
||||
echo "CARGO_HOME=${cargo_home}" >> "$GITHUB_ENV"
|
||||
echo "${cargo_home}/bin" >> "$GITHUB_PATH"
|
||||
: > "${cargo_home}/config.toml"
|
||||
|
||||
- uses: actions/cache@v5
|
||||
with:
|
||||
path: |
|
||||
@@ -101,6 +120,10 @@ jobs:
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
${{ github.workspace }}/.cargo-home/bin/
|
||||
${{ github.workspace }}/.cargo-home/registry/index/
|
||||
${{ github.workspace }}/.cargo-home/registry/cache/
|
||||
${{ github.workspace }}/.cargo-home/git/db/
|
||||
${{ github.workspace }}/codex-rs/target/
|
||||
key: cargo-${{ matrix.runner }}-${{ matrix.target }}-release-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
@@ -116,6 +139,58 @@ jobs:
|
||||
TARGET: ${{ matrix.target }}
|
||||
run: bash "${GITHUB_WORKSPACE}/.github/scripts/install-musl-build-tools.sh"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Configure rustc UBSan wrapper (musl host)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
ubsan=""
|
||||
if command -v ldconfig >/dev/null 2>&1; then
|
||||
ubsan="$(ldconfig -p | grep -m1 'libubsan\.so\.1' | sed -E 's/.*=> (.*)$/\1/')"
|
||||
fi
|
||||
wrapper_root="${RUNNER_TEMP:-/tmp}"
|
||||
wrapper="${wrapper_root}/rustc-ubsan-wrapper"
|
||||
cat > "${wrapper}" <<EOF
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
if [[ -n "${ubsan}" ]]; then
|
||||
export LD_PRELOAD="${ubsan}\${LD_PRELOAD:+:\${LD_PRELOAD}}"
|
||||
fi
|
||||
exec "\$1" "\${@:2}"
|
||||
EOF
|
||||
chmod +x "${wrapper}"
|
||||
echo "RUSTC_WRAPPER=${wrapper}" >> "$GITHUB_ENV"
|
||||
echo "RUSTC_WORKSPACE_WRAPPER=" >> "$GITHUB_ENV"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Clear sanitizer flags (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Clear global Rust flags so host/proc-macro builds don't pull in UBSan.
|
||||
echo "RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_ENCODED_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "RUSTDOCFLAGS=" >> "$GITHUB_ENV"
|
||||
# Override any runner-level Cargo config rustflags as well.
|
||||
echo "CARGO_BUILD_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
|
||||
sanitize_flags() {
|
||||
local input="$1"
|
||||
input="${input//-fsanitize=undefined/}"
|
||||
input="${input//-fno-sanitize-recover=undefined/}"
|
||||
input="${input//-fno-sanitize-trap=undefined/}"
|
||||
echo "$input"
|
||||
}
|
||||
|
||||
cflags="$(sanitize_flags "${CFLAGS-}")"
|
||||
cxxflags="$(sanitize_flags "${CXXFLAGS-}")"
|
||||
echo "CFLAGS=${cflags}" >> "$GITHUB_ENV"
|
||||
echo "CXXFLAGS=${cxxflags}" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Cargo build
|
||||
shell: bash
|
||||
run: |
|
||||
|
||||
2
.github/workflows/sdk.yml
vendored
2
.github/workflows/sdk.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
node-version: 22
|
||||
cache: pnpm
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
|
||||
- name: build codex
|
||||
run: cargo build --bin codex
|
||||
|
||||
72
.github/workflows/shell-tool-mcp.yml
vendored
72
.github/workflows/shell-tool-mcp.yml
vendored
@@ -93,7 +93,17 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- name: Install UBSan runtime (musl)
|
||||
if: ${{ matrix.install_musl }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
sudo apt-get update -y
|
||||
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y libubsan1
|
||||
fi
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
@@ -109,6 +119,58 @@ jobs:
|
||||
TARGET: ${{ matrix.target }}
|
||||
run: bash "${GITHUB_WORKSPACE}/.github/scripts/install-musl-build-tools.sh"
|
||||
|
||||
- if: ${{ matrix.install_musl }}
|
||||
name: Configure rustc UBSan wrapper (musl host)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
ubsan=""
|
||||
if command -v ldconfig >/dev/null 2>&1; then
|
||||
ubsan="$(ldconfig -p | grep -m1 'libubsan\.so\.1' | sed -E 's/.*=> (.*)$/\1/')"
|
||||
fi
|
||||
wrapper_root="${RUNNER_TEMP:-/tmp}"
|
||||
wrapper="${wrapper_root}/rustc-ubsan-wrapper"
|
||||
cat > "${wrapper}" <<EOF
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
if [[ -n "${ubsan}" ]]; then
|
||||
export LD_PRELOAD="${ubsan}\${LD_PRELOAD:+:\${LD_PRELOAD}}"
|
||||
fi
|
||||
exec "\$1" "\${@:2}"
|
||||
EOF
|
||||
chmod +x "${wrapper}"
|
||||
echo "RUSTC_WRAPPER=${wrapper}" >> "$GITHUB_ENV"
|
||||
echo "RUSTC_WORKSPACE_WRAPPER=" >> "$GITHUB_ENV"
|
||||
|
||||
- if: ${{ matrix.install_musl }}
|
||||
name: Clear sanitizer flags (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Clear global Rust flags so host/proc-macro builds don't pull in UBSan.
|
||||
echo "RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_ENCODED_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "RUSTDOCFLAGS=" >> "$GITHUB_ENV"
|
||||
# Override any runner-level Cargo config rustflags as well.
|
||||
echo "CARGO_BUILD_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
|
||||
sanitize_flags() {
|
||||
local input="$1"
|
||||
input="${input//-fsanitize=undefined/}"
|
||||
input="${input//-fno-sanitize-recover=undefined/}"
|
||||
input="${input//-fno-sanitize-trap=undefined/}"
|
||||
echo "$input"
|
||||
}
|
||||
|
||||
cflags="$(sanitize_flags "${CFLAGS-}")"
|
||||
cxxflags="$(sanitize_flags "${CXXFLAGS-}")"
|
||||
echo "CFLAGS=${cflags}" >> "$GITHUB_ENV"
|
||||
echo "CXXFLAGS=${cxxflags}" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Build exec server binaries
|
||||
run: cargo build --release --target ${{ matrix.target }} --bin codex-exec-mcp-server --bin codex-execve-wrapper
|
||||
|
||||
@@ -282,7 +344,6 @@ jobs:
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10.8.1
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
@@ -375,12 +436,6 @@ jobs:
|
||||
id-token: write
|
||||
contents: read
|
||||
steps:
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10.8.1
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
@@ -388,6 +443,7 @@ jobs:
|
||||
registry-url: https://registry.npmjs.org
|
||||
scope: "@openai"
|
||||
|
||||
# Trusted publishing requires npm CLI version 11.5.1 or later.
|
||||
- name: Update npm
|
||||
run: npm install -g npm@latest
|
||||
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
load("@apple_support//xcode:xcode_config.bzl", "xcode_config")
|
||||
|
||||
xcode_config(name = "disable_xcode")
|
||||
|
||||
# We mark the local platform as glibc-compatible so that rust can grab a toolchain for us.
|
||||
# TODO(zbarsky): Upstream a better libc constraint into rules_rust.
|
||||
# We only enable this on linux though for sanity, and because it breaks remote execution.
|
||||
|
||||
14
MODULE.bazel
14
MODULE.bazel
@@ -27,6 +27,8 @@ register_toolchains(
|
||||
"@toolchains_llvm_bootstrapped//toolchain:all",
|
||||
)
|
||||
|
||||
# Needed to disable xcode...
|
||||
bazel_dep(name = "apple_support", version = "2.1.0")
|
||||
bazel_dep(name = "rules_cc", version = "0.2.16")
|
||||
bazel_dep(name = "rules_platform", version = "0.1.0")
|
||||
bazel_dep(name = "rules_rust", version = "0.68.1")
|
||||
@@ -53,7 +55,7 @@ rust = use_extension("@rules_rust//rust:extensions.bzl", "rust")
|
||||
rust.toolchain(
|
||||
edition = "2024",
|
||||
extra_target_triples = RUST_TRIPLES,
|
||||
versions = ["1.90.0"],
|
||||
versions = ["1.93.0"],
|
||||
)
|
||||
use_repo(rust, "rust_toolchains")
|
||||
|
||||
@@ -67,6 +69,11 @@ crate.from_cargo(
|
||||
cargo_toml = "//codex-rs:Cargo.toml",
|
||||
platform_triples = RUST_TRIPLES,
|
||||
)
|
||||
crate.annotation(
|
||||
crate = "nucleo-matcher",
|
||||
strip_prefix = "matcher",
|
||||
version = "0.3.1",
|
||||
)
|
||||
|
||||
bazel_dep(name = "openssl", version = "3.5.4.bcr.0")
|
||||
|
||||
@@ -85,6 +92,11 @@ crate.annotation(
|
||||
|
||||
inject_repo(crate, "openssl")
|
||||
|
||||
crate.annotation(
|
||||
crate = "runfiles",
|
||||
workspace_cargo_toml = "rust/runfiles/Cargo.toml",
|
||||
)
|
||||
|
||||
# Fix readme inclusions
|
||||
crate.annotation(
|
||||
crate = "windows-link",
|
||||
|
||||
119
MODULE.bazel.lock
generated
119
MODULE.bazel.lock
generated
File diff suppressed because one or more lines are too long
70
PNPM.md
70
PNPM.md
@@ -1,70 +0,0 @@
|
||||
# Migration to pnpm
|
||||
|
||||
This project has been migrated from npm to pnpm to improve dependency management and developer experience.
|
||||
|
||||
## Why pnpm?
|
||||
|
||||
- **Faster installation**: pnpm is significantly faster than npm and yarn
|
||||
- **Disk space savings**: pnpm uses a content-addressable store to avoid duplication
|
||||
- **Phantom dependency prevention**: pnpm creates a strict node_modules structure
|
||||
- **Native workspaces support**: simplified monorepo management
|
||||
|
||||
## How to use pnpm
|
||||
|
||||
### Installation
|
||||
|
||||
```bash
|
||||
# Global installation of pnpm
|
||||
npm install -g pnpm@10.28.2
|
||||
|
||||
# Or with corepack (available with Node.js 22+)
|
||||
corepack enable
|
||||
corepack prepare pnpm@10.8.1 --activate
|
||||
```
|
||||
|
||||
### Common commands
|
||||
|
||||
| npm command | pnpm equivalent |
|
||||
| --------------- | ---------------- |
|
||||
| `npm install` | `pnpm install` |
|
||||
| `npm run build` | `pnpm run build` |
|
||||
| `npm test` | `pnpm test` |
|
||||
| `npm run lint` | `pnpm run lint` |
|
||||
|
||||
### Workspace-specific commands
|
||||
|
||||
| Action | Command |
|
||||
| ------------------------------------------ | ---------------------------------------- |
|
||||
| Run a command in a specific package | `pnpm --filter @openai/codex run build` |
|
||||
| Install a dependency in a specific package | `pnpm --filter @openai/codex add lodash` |
|
||||
| Run a command in all packages | `pnpm -r run test` |
|
||||
|
||||
## Monorepo structure
|
||||
|
||||
```
|
||||
codex/
|
||||
├── pnpm-workspace.yaml # Workspace configuration
|
||||
├── .npmrc # pnpm configuration
|
||||
├── package.json # Root dependencies and scripts
|
||||
├── codex-cli/ # Main package
|
||||
│ └── package.json # codex-cli specific dependencies
|
||||
└── docs/ # Documentation (future package)
|
||||
```
|
||||
|
||||
## Configuration files
|
||||
|
||||
- **pnpm-workspace.yaml**: Defines the packages included in the monorepo
|
||||
- **.npmrc**: Configures pnpm behavior
|
||||
- **Root package.json**: Contains shared scripts and dependencies
|
||||
|
||||
## CI/CD
|
||||
|
||||
CI/CD workflows have been updated to use pnpm instead of npm. Make sure your CI environments use pnpm 10.28.2 or higher.
|
||||
|
||||
## Known issues
|
||||
|
||||
If you encounter issues with pnpm, try the following solutions:
|
||||
|
||||
1. Remove the `node_modules` folder and `pnpm-lock.yaml` file, then run `pnpm install`
|
||||
2. Make sure you're using pnpm 10.28.2 or higher
|
||||
3. Verify that Node.js 22 or higher is installed
|
||||
@@ -14,4 +14,4 @@ target_app = "cli"
|
||||
[[announcements]]
|
||||
content = "This is a test announcement"
|
||||
version_regex = "^0\\.0\\.0$"
|
||||
to_date = "2026-01-10"
|
||||
to_date = "2026-05-10"
|
||||
|
||||
18
codex-cli/package-lock.json
generated
18
codex-cli/package-lock.json
generated
@@ -1,18 +0,0 @@
|
||||
{
|
||||
"name": "@openai/codex",
|
||||
"version": "0.0.0-dev",
|
||||
"lockfileVersion": 3,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@openai/codex",
|
||||
"version": "0.0.0-dev",
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
"codex": "bin/codex.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -17,5 +17,6 @@
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/openai/codex.git",
|
||||
"directory": "codex-cli"
|
||||
}
|
||||
},
|
||||
"packageManager": "pnpm@10.28.2+sha512.41872f037ad22f7348e3b1debbaf7e867cfd448f2726d9cf74c08f19507c31d2c8e7a11525b983febc2df640b5438dee6023ebb1f84ed43cc2d654d2bc326264"
|
||||
}
|
||||
|
||||
596
codex-rs/Cargo.lock
generated
596
codex-rs/Cargo.lock
generated
@@ -361,7 +361,7 @@ dependencies = [
|
||||
"objc2-foundation",
|
||||
"parking_lot",
|
||||
"percent-encoding",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.60.2",
|
||||
"wl-clipboard-rs",
|
||||
"x11rb",
|
||||
]
|
||||
@@ -602,6 +602,15 @@ dependencies = [
|
||||
"pin-project-lite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "atoi"
|
||||
version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528"
|
||||
dependencies = [
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "atomic-waker"
|
||||
version = "1.1.2"
|
||||
@@ -739,6 +748,9 @@ name = "bitflags"
|
||||
version = "2.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3"
|
||||
dependencies = [
|
||||
"serde_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "block-buffer"
|
||||
@@ -1068,6 +1080,7 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"app_test_support",
|
||||
"async-trait",
|
||||
"axum",
|
||||
"base64",
|
||||
"chrono",
|
||||
@@ -1075,8 +1088,10 @@ dependencies = [
|
||||
"codex-arg0",
|
||||
"codex-backend-client",
|
||||
"codex-chatgpt",
|
||||
"codex-cloud-requirements",
|
||||
"codex-common",
|
||||
"codex-core",
|
||||
"codex-execpolicy",
|
||||
"codex-feedback",
|
||||
"codex-file-search",
|
||||
"codex-login",
|
||||
@@ -1207,10 +1222,12 @@ dependencies = [
|
||||
"codex-core",
|
||||
"codex-git",
|
||||
"codex-utils-cargo-bin",
|
||||
"pretty_assertions",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"urlencoding",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1276,6 +1293,26 @@ dependencies = [
|
||||
"zstd",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-cloud-requirements"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"base64",
|
||||
"codex-backend-client",
|
||||
"codex-core",
|
||||
"codex-otel",
|
||||
"codex-protocol",
|
||||
"pretty_assertions",
|
||||
"serde_json",
|
||||
"tempfile",
|
||||
"thiserror 2.0.17",
|
||||
"tokio",
|
||||
"toml 0.9.5",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-cloud-tasks"
|
||||
version = "0.0.0"
|
||||
@@ -1363,8 +1400,10 @@ dependencies = [
|
||||
"codex-otel",
|
||||
"codex-protocol",
|
||||
"codex-rmcp-client",
|
||||
"codex-state",
|
||||
"codex-utils-absolute-path",
|
||||
"codex-utils-cargo-bin",
|
||||
"codex-utils-home-dir",
|
||||
"codex-utils-pty",
|
||||
"codex-utils-readiness",
|
||||
"codex-utils-string",
|
||||
@@ -1372,7 +1411,6 @@ dependencies = [
|
||||
"core-foundation 0.9.4",
|
||||
"core_test_support",
|
||||
"ctor 0.6.3",
|
||||
"dirs",
|
||||
"dunce",
|
||||
"encoding_rs",
|
||||
"env-flags",
|
||||
@@ -1388,6 +1426,7 @@ dependencies = [
|
||||
"libc",
|
||||
"maplit",
|
||||
"mcp-types",
|
||||
"multimap",
|
||||
"once_cell",
|
||||
"openssl-sys",
|
||||
"os_info",
|
||||
@@ -1451,6 +1490,7 @@ dependencies = [
|
||||
"assert_cmd",
|
||||
"clap",
|
||||
"codex-arg0",
|
||||
"codex-cloud-requirements",
|
||||
"codex-common",
|
||||
"codex-core",
|
||||
"codex-protocol",
|
||||
@@ -1557,11 +1597,13 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"crossbeam-channel",
|
||||
"ignore",
|
||||
"nucleo-matcher",
|
||||
"nucleo",
|
||||
"pretty_assertions",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
@@ -1756,6 +1798,7 @@ name = "codex-protocol"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"codex-execpolicy",
|
||||
"codex-git",
|
||||
"codex-utils-absolute-path",
|
||||
"codex-utils-image",
|
||||
@@ -1803,7 +1846,7 @@ dependencies = [
|
||||
"codex-keyring-store",
|
||||
"codex-protocol",
|
||||
"codex-utils-cargo-bin",
|
||||
"dirs",
|
||||
"codex-utils-home-dir",
|
||||
"futures",
|
||||
"keyring",
|
||||
"mcp-types",
|
||||
@@ -1825,6 +1868,27 @@ dependencies = [
|
||||
"which",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-state"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chrono",
|
||||
"clap",
|
||||
"codex-otel",
|
||||
"codex-protocol",
|
||||
"dirs",
|
||||
"owo-colors",
|
||||
"pretty_assertions",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sqlx",
|
||||
"tokio",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-stdio-to-uds"
|
||||
version = "0.0.0"
|
||||
@@ -1851,7 +1915,9 @@ dependencies = [
|
||||
"codex-app-server-protocol",
|
||||
"codex-arg0",
|
||||
"codex-backend-client",
|
||||
"codex-chatgpt",
|
||||
"codex-cli",
|
||||
"codex-cloud-requirements",
|
||||
"codex-common",
|
||||
"codex-core",
|
||||
"codex-feedback",
|
||||
@@ -1859,6 +1925,7 @@ dependencies = [
|
||||
"codex-login",
|
||||
"codex-otel",
|
||||
"codex-protocol",
|
||||
"codex-state",
|
||||
"codex-utils-absolute-path",
|
||||
"codex-utils-cargo-bin",
|
||||
"codex-utils-pty",
|
||||
@@ -1940,10 +2007,19 @@ name = "codex-utils-cargo-bin"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"assert_cmd",
|
||||
"path-absolutize",
|
||||
"runfiles",
|
||||
"thiserror 2.0.17",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-utils-home-dir"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"dirs",
|
||||
"pretty_assertions",
|
||||
"tempfile",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-utils-image"
|
||||
version = "0.0.0"
|
||||
@@ -2108,6 +2184,12 @@ dependencies = [
|
||||
"serde_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "const-oid"
|
||||
version = "0.9.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
|
||||
|
||||
[[package]]
|
||||
name = "const_format"
|
||||
version = "0.2.35"
|
||||
@@ -2195,6 +2277,7 @@ dependencies = [
|
||||
"tokio-tungstenite",
|
||||
"walkdir",
|
||||
"wiremock",
|
||||
"zstd",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2206,6 +2289,21 @@ dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crc"
|
||||
version = "3.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5eb8a2a1cd12ab0d987a5d5e825195d372001a4094a0376319d5a0ad71c1ba0d"
|
||||
dependencies = [
|
||||
"crc-catalog",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crc-catalog"
|
||||
version = "2.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5"
|
||||
|
||||
[[package]]
|
||||
name = "crc32fast"
|
||||
version = "1.5.0"
|
||||
@@ -2249,6 +2347,15 @@ dependencies = [
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-queue"
|
||||
version = "0.3.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115"
|
||||
dependencies = [
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-utils"
|
||||
version = "0.8.21"
|
||||
@@ -2527,6 +2634,7 @@ version = "0.7.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb"
|
||||
dependencies = [
|
||||
"const-oid",
|
||||
"pem-rfc7468",
|
||||
"zeroize",
|
||||
]
|
||||
@@ -2625,6 +2733,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
|
||||
dependencies = [
|
||||
"block-buffer",
|
||||
"const-oid",
|
||||
"crypto-common",
|
||||
"subtle",
|
||||
]
|
||||
@@ -2772,6 +2881,9 @@ name = "either"
|
||||
version = "1.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ena"
|
||||
@@ -2905,7 +3017,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.60.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2914,6 +3026,17 @@ version = "3.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dea2df4cf52843e0452895c455a1a2cfbb842a1e7329671acf418fdc53ed4c59"
|
||||
|
||||
[[package]]
|
||||
name = "etcetera"
|
||||
version = "0.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"home",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "event-listener"
|
||||
version = "5.4.0"
|
||||
@@ -3005,7 +3128,7 @@ checksum = "0ce92ff622d6dadf7349484f42c93271a0d49b7cc4d466a936405bacbe10aa78"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"rustix 1.0.8",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3082,6 +3205,17 @@ dependencies = [
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "flume"
|
||||
version = "0.11.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "da0e4dd2a88388a1f4ccc7c9ce104604dab68d9f408dc34cd45823d5a9069095"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
"futures-sink",
|
||||
"spin",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "flume"
|
||||
version = "0.12.0"
|
||||
@@ -3230,6 +3364,17 @@ dependencies = [
|
||||
"futures-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "futures-intrusive"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
"lock_api",
|
||||
"parking_lot",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "futures-io"
|
||||
version = "0.3.31"
|
||||
@@ -3310,7 +3455,7 @@ dependencies = [
|
||||
"libc",
|
||||
"log",
|
||||
"rustversion",
|
||||
"windows-link 0.1.3",
|
||||
"windows-link 0.2.0",
|
||||
"windows-result 0.3.4",
|
||||
]
|
||||
|
||||
@@ -3462,6 +3607,15 @@ dependencies = [
|
||||
"foldhash 0.2.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashlink"
|
||||
version = "0.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1"
|
||||
dependencies = [
|
||||
"hashbrown 0.15.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "heck"
|
||||
version = "0.5.0"
|
||||
@@ -3665,7 +3819,7 @@ dependencies = [
|
||||
"tokio",
|
||||
"tokio-rustls",
|
||||
"tower-service",
|
||||
"webpki-roots",
|
||||
"webpki-roots 1.0.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4092,7 +4246,7 @@ checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9"
|
||||
dependencies = [
|
||||
"hermit-abi",
|
||||
"libc",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4297,6 +4451,9 @@ name = "lazy_static"
|
||||
version = "1.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
|
||||
dependencies = [
|
||||
"spin",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
@@ -4323,6 +4480,12 @@ dependencies = [
|
||||
"windows-link 0.2.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libm"
|
||||
version = "0.2.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b6d2cec3eae94f9f509c767b45932f1ada8350c4bdb85af2fcab4a3c14807981"
|
||||
|
||||
[[package]]
|
||||
name = "libredox"
|
||||
version = "0.1.6"
|
||||
@@ -4331,6 +4494,18 @@ checksum = "4488594b9328dee448adb906d8b126d9b7deb7cf5c22161ee591610bb1be83c0"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libsqlite3-sys"
|
||||
version = "0.30.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"pkg-config",
|
||||
"vcpkg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4515,6 +4690,16 @@ dependencies = [
|
||||
"wiremock",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "md-5"
|
||||
version = "0.10.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"digest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "md5"
|
||||
version = "0.8.0"
|
||||
@@ -4758,11 +4943,20 @@ dependencies = [
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nucleo"
|
||||
version = "0.5.0"
|
||||
source = "git+https://github.com/helix-editor/nucleo.git?rev=4253de9faabb4e5c6d81d946a5e35a90f87347ee#4253de9faabb4e5c6d81d946a5e35a90f87347ee"
|
||||
dependencies = [
|
||||
"nucleo-matcher",
|
||||
"parking_lot",
|
||||
"rayon",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nucleo-matcher"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bf33f538733d1a5a3494b836ba913207f14d9d4a1d3cd67030c5061bdd2cac85"
|
||||
source = "git+https://github.com/helix-editor/nucleo.git?rev=4253de9faabb4e5c6d81d946a5e35a90f87347ee#4253de9faabb4e5c6d81d946a5e35a90f87347ee"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"unicode-segmentation",
|
||||
@@ -4792,6 +4986,22 @@ dependencies = [
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-bigint-dig"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e661dda6640fad38e827a6d4a310ff4763082116fe217f279885c97f511bb0b7"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"libm",
|
||||
"num-integer",
|
||||
"num-iter",
|
||||
"num-traits",
|
||||
"rand 0.8.5",
|
||||
"smallvec",
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-complex"
|
||||
version = "0.4.6"
|
||||
@@ -4845,6 +5055,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"libm",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -5327,6 +5538,27 @@ dependencies = [
|
||||
"futures-io",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pkcs1"
|
||||
version = "0.7.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f"
|
||||
dependencies = [
|
||||
"der",
|
||||
"pkcs8",
|
||||
"spki",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pkcs8"
|
||||
version = "0.10.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7"
|
||||
dependencies = [
|
||||
"der",
|
||||
"spki",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pkg-config"
|
||||
version = "0.3.32"
|
||||
@@ -5670,7 +5902,7 @@ dependencies = [
|
||||
"once_cell",
|
||||
"socket2 0.6.1",
|
||||
"tracing",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.60.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -5940,7 +6172,7 @@ checksum = "b28ee9e1e5d39264414b71f5c33e7fbb66b382c3fac456fe0daad39cf5509933"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"const_format",
|
||||
"flume",
|
||||
"flume 0.12.0",
|
||||
"hex",
|
||||
"ipnet",
|
||||
"itertools 0.14.0",
|
||||
@@ -5998,7 +6230,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "def3d5d06d3ca3a2d2e4376cf93de0555cd9c7960f085bf77be9562f5c9ace8f"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"flume",
|
||||
"flume 0.12.0",
|
||||
"itertools 0.14.0",
|
||||
"moka",
|
||||
"parking_lot",
|
||||
@@ -6152,6 +6384,26 @@ dependencies = [
|
||||
"ratatui",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rayon"
|
||||
version = "1.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f"
|
||||
dependencies = [
|
||||
"either",
|
||||
"rayon-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rayon-core"
|
||||
version = "1.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91"
|
||||
dependencies = [
|
||||
"crossbeam-deque",
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.5.15"
|
||||
@@ -6290,7 +6542,7 @@ dependencies = [
|
||||
"wasm-bindgen-futures",
|
||||
"wasm-streams",
|
||||
"web-sys",
|
||||
"webpki-roots",
|
||||
"webpki-roots 1.0.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6361,6 +6613,31 @@ dependencies = [
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rsa"
|
||||
version = "0.9.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b8573f03f5883dcaebdfcf4725caa1ecb9c15b2ef50c43a07b816e06799bb12d"
|
||||
dependencies = [
|
||||
"const-oid",
|
||||
"digest",
|
||||
"num-bigint-dig",
|
||||
"num-integer",
|
||||
"num-traits",
|
||||
"pkcs1",
|
||||
"pkcs8",
|
||||
"rand_core 0.6.4",
|
||||
"signature",
|
||||
"spki",
|
||||
"subtle",
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "runfiles"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/dzbarsky/rules_rust?rev=b56cbaa8465e74127f1ea216f813cd377295ad81#b56cbaa8465e74127f1ea216f813cd377295ad81"
|
||||
|
||||
[[package]]
|
||||
name = "rustc-demangle"
|
||||
version = "0.1.25"
|
||||
@@ -6392,7 +6669,7 @@ dependencies = [
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys 0.4.15",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6405,7 +6682,7 @@ dependencies = [
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys 0.9.4",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.60.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -7110,6 +7387,16 @@ dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "signature"
|
||||
version = "2.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de"
|
||||
dependencies = [
|
||||
"digest",
|
||||
"rand_core 0.6.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "simd-adler32"
|
||||
version = "0.3.7"
|
||||
@@ -7194,6 +7481,218 @@ dependencies = [
|
||||
"lock_api",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "spki"
|
||||
version = "0.7.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d"
|
||||
dependencies = [
|
||||
"base64ct",
|
||||
"der",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlx"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1fefb893899429669dcdd979aff487bd78f4064e5e7907e4269081e0ef7d97dc"
|
||||
dependencies = [
|
||||
"sqlx-core",
|
||||
"sqlx-macros",
|
||||
"sqlx-mysql",
|
||||
"sqlx-postgres",
|
||||
"sqlx-sqlite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlx-core"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ee6798b1838b6a0f69c007c133b8df5866302197e404e8b6ee8ed3e3a5e68dc6"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"bytes",
|
||||
"chrono",
|
||||
"crc",
|
||||
"crossbeam-queue",
|
||||
"either",
|
||||
"event-listener",
|
||||
"futures-core",
|
||||
"futures-intrusive",
|
||||
"futures-io",
|
||||
"futures-util",
|
||||
"hashbrown 0.15.4",
|
||||
"hashlink",
|
||||
"indexmap 2.12.0",
|
||||
"log",
|
||||
"memchr",
|
||||
"once_cell",
|
||||
"percent-encoding",
|
||||
"rustls",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha2",
|
||||
"smallvec",
|
||||
"thiserror 2.0.17",
|
||||
"time",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
"tracing",
|
||||
"url",
|
||||
"uuid",
|
||||
"webpki-roots 0.26.11",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlx-macros"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a2d452988ccaacfbf5e0bdbc348fb91d7c8af5bee192173ac3636b5fb6e6715d"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"sqlx-core",
|
||||
"sqlx-macros-core",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlx-macros-core"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "19a9c1841124ac5a61741f96e1d9e2ec77424bf323962dd894bdb93f37d5219b"
|
||||
dependencies = [
|
||||
"dotenvy",
|
||||
"either",
|
||||
"heck",
|
||||
"hex",
|
||||
"once_cell",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha2",
|
||||
"sqlx-core",
|
||||
"sqlx-mysql",
|
||||
"sqlx-postgres",
|
||||
"sqlx-sqlite",
|
||||
"syn 2.0.104",
|
||||
"tokio",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlx-mysql"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aa003f0038df784eb8fecbbac13affe3da23b45194bd57dba231c8f48199c526"
|
||||
dependencies = [
|
||||
"atoi",
|
||||
"base64",
|
||||
"bitflags 2.10.0",
|
||||
"byteorder",
|
||||
"bytes",
|
||||
"chrono",
|
||||
"crc",
|
||||
"digest",
|
||||
"dotenvy",
|
||||
"either",
|
||||
"futures-channel",
|
||||
"futures-core",
|
||||
"futures-io",
|
||||
"futures-util",
|
||||
"generic-array",
|
||||
"hex",
|
||||
"hkdf",
|
||||
"hmac",
|
||||
"itoa",
|
||||
"log",
|
||||
"md-5",
|
||||
"memchr",
|
||||
"once_cell",
|
||||
"percent-encoding",
|
||||
"rand 0.8.5",
|
||||
"rsa",
|
||||
"serde",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"smallvec",
|
||||
"sqlx-core",
|
||||
"stringprep",
|
||||
"thiserror 2.0.17",
|
||||
"time",
|
||||
"tracing",
|
||||
"uuid",
|
||||
"whoami",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlx-postgres"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "db58fcd5a53cf07c184b154801ff91347e4c30d17a3562a635ff028ad5deda46"
|
||||
dependencies = [
|
||||
"atoi",
|
||||
"base64",
|
||||
"bitflags 2.10.0",
|
||||
"byteorder",
|
||||
"chrono",
|
||||
"crc",
|
||||
"dotenvy",
|
||||
"etcetera",
|
||||
"futures-channel",
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
"hex",
|
||||
"hkdf",
|
||||
"hmac",
|
||||
"home",
|
||||
"itoa",
|
||||
"log",
|
||||
"md-5",
|
||||
"memchr",
|
||||
"once_cell",
|
||||
"rand 0.8.5",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha2",
|
||||
"smallvec",
|
||||
"sqlx-core",
|
||||
"stringprep",
|
||||
"thiserror 2.0.17",
|
||||
"time",
|
||||
"tracing",
|
||||
"uuid",
|
||||
"whoami",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlx-sqlite"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c2d12fe70b2c1b4401038055f90f151b78208de1f9f89a7dbfd41587a10c3eea"
|
||||
dependencies = [
|
||||
"atoi",
|
||||
"chrono",
|
||||
"flume 0.11.1",
|
||||
"futures-channel",
|
||||
"futures-core",
|
||||
"futures-executor",
|
||||
"futures-intrusive",
|
||||
"futures-util",
|
||||
"libsqlite3-sys",
|
||||
"log",
|
||||
"percent-encoding",
|
||||
"serde",
|
||||
"serde_urlencoded",
|
||||
"sqlx-core",
|
||||
"thiserror 2.0.17",
|
||||
"time",
|
||||
"tracing",
|
||||
"url",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sse-stream"
|
||||
version = "0.2.1"
|
||||
@@ -7327,6 +7826,17 @@ dependencies = [
|
||||
"precomputed-hash",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "stringprep"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1"
|
||||
dependencies = [
|
||||
"unicode-bidi",
|
||||
"unicode-normalization",
|
||||
"unicode-properties",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "strsim"
|
||||
version = "0.10.0"
|
||||
@@ -7492,7 +8002,7 @@ dependencies = [
|
||||
"getrandom 0.3.3",
|
||||
"once_cell",
|
||||
"rustix 1.0.8",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.61.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -8287,6 +8797,12 @@ version = "2.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-bidi"
|
||||
version = "0.3.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.18"
|
||||
@@ -8299,6 +8815,21 @@ version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-normalization"
|
||||
version = "0.1.25"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5fd4f6878c9cb28d874b009da9e8d183b5abc80117c40bbd187a1fde336be6e8"
|
||||
dependencies = [
|
||||
"tinyvec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-properties"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7df058c713841ad818f1dc5d3fd88063241cc61f49f5fbea4b951e8cf5a8d71d"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-segmentation"
|
||||
version = "1.12.0"
|
||||
@@ -8506,6 +9037,12 @@ dependencies = [
|
||||
"wit-bindgen-rt",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasite"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b"
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen"
|
||||
version = "0.2.100"
|
||||
@@ -8705,6 +9242,15 @@ dependencies = [
|
||||
"rustls-pki-types",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "webpki-roots"
|
||||
version = "0.26.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9"
|
||||
dependencies = [
|
||||
"webpki-roots 1.0.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "webpki-roots"
|
||||
version = "1.0.2"
|
||||
@@ -8731,6 +9277,16 @@ dependencies = [
|
||||
"winsafe",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "whoami"
|
||||
version = "1.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5d4a4db5077702ca3015d3d02d74974948aba2ad9e12ab7df718ee64ccd7e97d"
|
||||
dependencies = [
|
||||
"libredox",
|
||||
"wasite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "widestring"
|
||||
version = "1.2.1"
|
||||
@@ -8774,7 +9330,7 @@ version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
|
||||
dependencies = [
|
||||
"windows-sys 0.48.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@@ -11,6 +11,7 @@ members = [
|
||||
"arg0",
|
||||
"feedback",
|
||||
"codex-backend-openapi-models",
|
||||
"cloud-requirements",
|
||||
"cloud-tasks",
|
||||
"cloud-tasks-client",
|
||||
"cli",
|
||||
@@ -42,11 +43,13 @@ members = [
|
||||
"utils/cache",
|
||||
"utils/image",
|
||||
"utils/json-to-toml",
|
||||
"utils/home-dir",
|
||||
"utils/pty",
|
||||
"utils/readiness",
|
||||
"utils/string",
|
||||
"codex-client",
|
||||
"codex-api",
|
||||
"state",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
@@ -70,6 +73,7 @@ codex-apply-patch = { path = "apply-patch" }
|
||||
codex-arg0 = { path = "arg0" }
|
||||
codex-async-utils = { path = "async-utils" }
|
||||
codex-backend-client = { path = "backend-client" }
|
||||
codex-cloud-requirements = { path = "cloud-requirements" }
|
||||
codex-chatgpt = { path = "chatgpt" }
|
||||
codex-cli = { path = "cli"}
|
||||
codex-client = { path = "codex-client" }
|
||||
@@ -91,6 +95,7 @@ codex-process-hardening = { path = "process-hardening" }
|
||||
codex-protocol = { path = "protocol" }
|
||||
codex-responses-api-proxy = { path = "responses-api-proxy" }
|
||||
codex-rmcp-client = { path = "rmcp-client" }
|
||||
codex-state = { path = "state" }
|
||||
codex-stdio-to-uds = { path = "stdio-to-uds" }
|
||||
codex-tui = { path = "tui" }
|
||||
codex-utils-absolute-path = { path = "utils/absolute-path" }
|
||||
@@ -98,6 +103,7 @@ codex-utils-cache = { path = "utils/cache" }
|
||||
codex-utils-cargo-bin = { path = "utils/cargo-bin" }
|
||||
codex-utils-image = { path = "utils/image" }
|
||||
codex-utils-json-to-toml = { path = "utils/json-to-toml" }
|
||||
codex-utils-home-dir = { path = "utils/home-dir" }
|
||||
codex-utils-pty = { path = "utils/pty" }
|
||||
codex-utils-readiness = { path = "utils/readiness" }
|
||||
codex-utils-string = { path = "utils/string" }
|
||||
@@ -126,6 +132,7 @@ clap = "4"
|
||||
clap_complete = "4"
|
||||
color-eyre = "0.6.3"
|
||||
crossterm = "0.28.1"
|
||||
crossbeam-channel = "0.5.15"
|
||||
ctor = "0.6.3"
|
||||
derive_more = "2"
|
||||
diffy = "0.4.2"
|
||||
@@ -159,7 +166,7 @@ maplit = "1.0.2"
|
||||
mime_guess = "2.0.5"
|
||||
multimap = "0.10.0"
|
||||
notify = "8.2.0"
|
||||
nucleo-matcher = "0.3.1"
|
||||
nucleo = { git = "https://github.com/helix-editor/nucleo.git", rev = "4253de9faabb4e5c6d81d946a5e35a90f87347ee" }
|
||||
once_cell = "1.20.2"
|
||||
openssl-sys = "*"
|
||||
opentelemetry = "0.31.0"
|
||||
@@ -183,6 +190,7 @@ regex = "1.12.2"
|
||||
regex-lite = "0.1.8"
|
||||
reqwest = "0.12"
|
||||
rmcp = { version = "0.12.0", default-features = false }
|
||||
runfiles = { git = "https://github.com/dzbarsky/rules_rust", rev = "b56cbaa8465e74127f1ea216f813cd377295ad81" }
|
||||
schemars = "0.8.22"
|
||||
seccompiler = "0.5.0"
|
||||
sentry = "0.46.0"
|
||||
@@ -198,6 +206,7 @@ semver = "1.0"
|
||||
shlex = "1.3.0"
|
||||
similar = "2.7.0"
|
||||
socket2 = "0.6.1"
|
||||
sqlx = { version = "0.8.6", default-features = false, features = ["chrono", "json", "macros", "migrate", "runtime-tokio-rustls", "sqlite", "time", "uuid"] }
|
||||
starlark = "0.13.0"
|
||||
strum = "0.27.2"
|
||||
strum_macros = "0.27.2"
|
||||
|
||||
@@ -23,11 +23,22 @@ impl GitSha {
|
||||
}
|
||||
}
|
||||
|
||||
/// Authentication mode for OpenAI-backed providers.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Display, JsonSchema, TS)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum AuthMode {
|
||||
/// OpenAI API key provided by the caller and stored by Codex.
|
||||
ApiKey,
|
||||
ChatGPT,
|
||||
/// ChatGPT OAuth managed by Codex (tokens persisted and refreshed by Codex).
|
||||
Chatgpt,
|
||||
/// [UNSTABLE] FOR OPENAI INTERNAL USE ONLY - DO NOT USE.
|
||||
///
|
||||
/// ChatGPT auth tokens are supplied by an external host app and are only
|
||||
/// stored in memory. Token refresh must be handled by the external host app.
|
||||
#[serde(rename = "chatgptAuthTokens")]
|
||||
#[ts(rename = "chatgptAuthTokens")]
|
||||
#[strum(serialize = "chatgptAuthTokens")]
|
||||
ChatgptAuthTokens,
|
||||
}
|
||||
|
||||
/// Generates an `enum ClientRequest` where each variant is a request that the
|
||||
@@ -117,6 +128,10 @@ client_request_definitions! {
|
||||
params: v2::ThreadArchiveParams,
|
||||
response: v2::ThreadArchiveResponse,
|
||||
},
|
||||
ThreadSetName => "thread/name/set" {
|
||||
params: v2::ThreadSetNameParams,
|
||||
response: v2::ThreadSetNameResponse,
|
||||
},
|
||||
ThreadUnarchive => "thread/unarchive" {
|
||||
params: v2::ThreadUnarchiveParams,
|
||||
response: v2::ThreadUnarchiveResponse,
|
||||
@@ -534,6 +549,11 @@ server_request_definitions! {
|
||||
response: v2::DynamicToolCallResponse,
|
||||
},
|
||||
|
||||
ChatgptAuthTokensRefresh => "account/chatgptAuthTokens/refresh" {
|
||||
params: v2::ChatgptAuthTokensRefreshParams,
|
||||
response: v2::ChatgptAuthTokensRefreshResponse,
|
||||
},
|
||||
|
||||
/// DEPRECATED APIs below
|
||||
/// Request to approve a patch.
|
||||
/// This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).
|
||||
@@ -578,6 +598,7 @@ server_notification_definitions! {
|
||||
/// NEW NOTIFICATIONS
|
||||
Error => "error" (v2::ErrorNotification),
|
||||
ThreadStarted => "thread/started" (v2::ThreadStartedNotification),
|
||||
ThreadNameUpdated => "thread/name/updated" (v2::ThreadNameUpdatedNotification),
|
||||
ThreadTokenUsageUpdated => "thread/tokenUsage/updated" (v2::ThreadTokenUsageUpdatedNotification),
|
||||
TurnStarted => "turn/started" (v2::TurnStartedNotification),
|
||||
TurnCompleted => "turn/completed" (v2::TurnCompletedNotification),
|
||||
@@ -588,6 +609,8 @@ server_notification_definitions! {
|
||||
/// This event is internal-only. Used by Codex Cloud.
|
||||
RawResponseItemCompleted => "rawResponseItem/completed" (v2::RawResponseItemCompletedNotification),
|
||||
AgentMessageDelta => "item/agentMessage/delta" (v2::AgentMessageDeltaNotification),
|
||||
/// EXPERIMENTAL - proposed plan streaming deltas for plan items.
|
||||
PlanDelta => "item/plan/delta" (v2::PlanDeltaNotification),
|
||||
CommandExecutionOutputDelta => "item/commandExecution/outputDelta" (v2::CommandExecutionOutputDeltaNotification),
|
||||
TerminalInteraction => "item/commandExecution/terminalInteraction" (v2::TerminalInteractionNotification),
|
||||
FileChangeOutputDelta => "item/fileChange/outputDelta" (v2::FileChangeOutputDeltaNotification),
|
||||
@@ -598,7 +621,7 @@ server_notification_definitions! {
|
||||
ReasoningSummaryTextDelta => "item/reasoning/summaryTextDelta" (v2::ReasoningSummaryTextDeltaNotification),
|
||||
ReasoningSummaryPartAdded => "item/reasoning/summaryPartAdded" (v2::ReasoningSummaryPartAddedNotification),
|
||||
ReasoningTextDelta => "item/reasoning/textDelta" (v2::ReasoningTextDeltaNotification),
|
||||
ContextCompactionStarted => "thread/compaction/started" (v2::ContextCompactionStartedNotification),
|
||||
/// Deprecated: Use `ContextCompaction` item type instead.
|
||||
ContextCompacted => "thread/compacted" (v2::ContextCompactedNotification),
|
||||
DeprecationNotice => "deprecationNotice" (v2::DeprecationNoticeNotification),
|
||||
ConfigWarning => "configWarning" (v2::ConfigWarningNotification),
|
||||
@@ -753,6 +776,29 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_chatgpt_auth_tokens_refresh_request() -> Result<()> {
|
||||
let request = ServerRequest::ChatgptAuthTokensRefresh {
|
||||
request_id: RequestId::Integer(8),
|
||||
params: v2::ChatgptAuthTokensRefreshParams {
|
||||
reason: v2::ChatgptAuthTokensRefreshReason::Unauthorized,
|
||||
previous_account_id: Some("org-123".to_string()),
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/chatgptAuthTokens/refresh",
|
||||
"id": 8,
|
||||
"params": {
|
||||
"reason": "unauthorized",
|
||||
"previousAccountId": "org-123"
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_get_account_rate_limits() -> Result<()> {
|
||||
let request = ClientRequest::GetAccountRateLimits {
|
||||
@@ -842,10 +888,34 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_account_login_chatgpt_auth_tokens() -> Result<()> {
|
||||
let request = ClientRequest::LoginAccount {
|
||||
request_id: RequestId::Integer(5),
|
||||
params: v2::LoginAccountParams::ChatgptAuthTokens {
|
||||
access_token: "access-token".to_string(),
|
||||
id_token: "id-token".to_string(),
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/login/start",
|
||||
"id": 5,
|
||||
"params": {
|
||||
"type": "chatgptAuthTokens",
|
||||
"accessToken": "access-token",
|
||||
"idToken": "id-token"
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_get_account() -> Result<()> {
|
||||
let request = ClientRequest::GetAccount {
|
||||
request_id: RequestId::Integer(5),
|
||||
request_id: RequestId::Integer(6),
|
||||
params: v2::GetAccountParams {
|
||||
refresh_token: false,
|
||||
},
|
||||
@@ -853,7 +923,7 @@ mod tests {
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/read",
|
||||
"id": 5,
|
||||
"id": 6,
|
||||
"params": {
|
||||
"refreshToken": false
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ use crate::protocol::v2::UserInput;
|
||||
use codex_protocol::protocol::AgentReasoningEvent;
|
||||
use codex_protocol::protocol::AgentReasoningRawContentEvent;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::ItemCompletedEvent;
|
||||
use codex_protocol::protocol::ThreadRolledBackEvent;
|
||||
use codex_protocol::protocol::TurnAbortedEvent;
|
||||
use codex_protocol::protocol::UserMessageEvent;
|
||||
@@ -55,9 +56,8 @@ impl ThreadHistoryBuilder {
|
||||
EventMsg::AgentReasoningRawContent(payload) => {
|
||||
self.handle_agent_reasoning_raw_content(payload)
|
||||
}
|
||||
EventMsg::ItemCompleted(payload) => self.handle_item_completed(payload),
|
||||
EventMsg::TokenCount(_) => {}
|
||||
EventMsg::ContextCompactionStarted(_) => {}
|
||||
EventMsg::ContextCompactionEnded(_) => {}
|
||||
EventMsg::EnteredReviewMode(_) => {}
|
||||
EventMsg::ExitedReviewMode(_) => {}
|
||||
EventMsg::ThreadRolledBack(payload) => self.handle_thread_rollback(payload),
|
||||
@@ -127,6 +127,19 @@ impl ThreadHistoryBuilder {
|
||||
});
|
||||
}
|
||||
|
||||
fn handle_item_completed(&mut self, payload: &ItemCompletedEvent) {
|
||||
if let codex_protocol::items::TurnItem::Plan(plan) = &payload.item {
|
||||
if plan.text.is_empty() {
|
||||
return;
|
||||
}
|
||||
let id = self.next_item_id();
|
||||
self.ensure_turn().items.push(ThreadItem::Plan {
|
||||
id,
|
||||
text: plan.text.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_turn_aborted(&mut self, _payload: &TurnAbortedEvent) {
|
||||
let Some(turn) = self.current_turn.as_mut() else {
|
||||
return;
|
||||
|
||||
@@ -27,10 +27,12 @@ use codex_protocol::protocol::NetworkAccess as CoreNetworkAccess;
|
||||
use codex_protocol::protocol::RateLimitSnapshot as CoreRateLimitSnapshot;
|
||||
use codex_protocol::protocol::RateLimitWindow as CoreRateLimitWindow;
|
||||
use codex_protocol::protocol::SessionSource as CoreSessionSource;
|
||||
use codex_protocol::protocol::SkillDependencies as CoreSkillDependencies;
|
||||
use codex_protocol::protocol::SkillErrorInfo as CoreSkillErrorInfo;
|
||||
use codex_protocol::protocol::SkillInterface as CoreSkillInterface;
|
||||
use codex_protocol::protocol::SkillMetadata as CoreSkillMetadata;
|
||||
use codex_protocol::protocol::SkillScope as CoreSkillScope;
|
||||
use codex_protocol::protocol::SkillToolDependency as CoreSkillToolDependency;
|
||||
use codex_protocol::protocol::SubAgentSource as CoreSubAgentSource;
|
||||
use codex_protocol::protocol::TokenUsage as CoreTokenUsage;
|
||||
use codex_protocol::protocol::TokenUsageInfo as CoreTokenUsageInfo;
|
||||
@@ -84,6 +86,10 @@ macro_rules! v2_enum_from_core {
|
||||
pub enum CodexErrorInfo {
|
||||
ContextWindowExceeded,
|
||||
UsageLimitExceeded,
|
||||
ModelCap {
|
||||
model: String,
|
||||
reset_after_seconds: Option<u64>,
|
||||
},
|
||||
HttpConnectionFailed {
|
||||
#[serde(rename = "httpStatusCode")]
|
||||
#[ts(rename = "httpStatusCode")]
|
||||
@@ -120,6 +126,13 @@ impl From<CoreCodexErrorInfo> for CodexErrorInfo {
|
||||
match value {
|
||||
CoreCodexErrorInfo::ContextWindowExceeded => CodexErrorInfo::ContextWindowExceeded,
|
||||
CoreCodexErrorInfo::UsageLimitExceeded => CodexErrorInfo::UsageLimitExceeded,
|
||||
CoreCodexErrorInfo::ModelCap {
|
||||
model,
|
||||
reset_after_seconds,
|
||||
} => CodexErrorInfo::ModelCap {
|
||||
model,
|
||||
reset_after_seconds,
|
||||
},
|
||||
CoreCodexErrorInfo::HttpConnectionFailed { http_status_code } => {
|
||||
CodexErrorInfo::HttpConnectionFailed { http_status_code }
|
||||
}
|
||||
@@ -484,6 +497,14 @@ pub struct ConfigReadResponse {
|
||||
pub struct ConfigRequirements {
|
||||
pub allowed_approval_policies: Option<Vec<AskForApproval>>,
|
||||
pub allowed_sandbox_modes: Option<Vec<SandboxMode>>,
|
||||
pub enforce_residency: Option<ResidencyRequirement>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum ResidencyRequirement {
|
||||
Us,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -822,6 +843,24 @@ pub enum LoginAccountParams {
|
||||
#[serde(rename = "chatgpt")]
|
||||
#[ts(rename = "chatgpt")]
|
||||
Chatgpt,
|
||||
/// [UNSTABLE] FOR OPENAI INTERNAL USE ONLY - DO NOT USE.
|
||||
/// The access token must contain the same scopes that Codex-managed ChatGPT auth tokens have.
|
||||
#[serde(rename = "chatgptAuthTokens")]
|
||||
#[ts(rename = "chatgptAuthTokens")]
|
||||
ChatgptAuthTokens {
|
||||
/// ID token (JWT) supplied by the client.
|
||||
///
|
||||
/// This token is used for identity and account metadata (email, plan type,
|
||||
/// workspace id).
|
||||
#[serde(rename = "idToken")]
|
||||
#[ts(rename = "idToken")]
|
||||
id_token: String,
|
||||
/// Access token (JWT) supplied by the client.
|
||||
/// This token is used for backend API requests.
|
||||
#[serde(rename = "accessToken")]
|
||||
#[ts(rename = "accessToken")]
|
||||
access_token: String,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -841,6 +880,9 @@ pub enum LoginAccountResponse {
|
||||
/// URL the client should open in a browser to initiate the OAuth flow.
|
||||
auth_url: String,
|
||||
},
|
||||
#[serde(rename = "chatgptAuthTokens", rename_all = "camelCase")]
|
||||
#[ts(rename = "chatgptAuthTokens", rename_all = "camelCase")]
|
||||
ChatgptAuthTokens {},
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -871,6 +913,37 @@ pub struct CancelLoginAccountResponse {
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct LogoutAccountResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum ChatgptAuthTokensRefreshReason {
|
||||
/// Codex attempted a backend request and received `401 Unauthorized`.
|
||||
Unauthorized,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ChatgptAuthTokensRefreshParams {
|
||||
pub reason: ChatgptAuthTokensRefreshReason,
|
||||
/// Workspace/account identifier that Codex was previously using.
|
||||
///
|
||||
/// Clients that manage multiple accounts/workspaces can use this as a hint
|
||||
/// to refresh the token for the correct workspace.
|
||||
///
|
||||
/// This may be `null` when the prior ID token did not include a workspace
|
||||
/// identifier (`chatgpt_account_id`) or when the token could not be parsed.
|
||||
pub previous_account_id: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ChatgptAuthTokensRefreshResponse {
|
||||
pub id_token: String,
|
||||
pub access_token: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -882,6 +955,11 @@ pub struct GetAccountRateLimitsResponse {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct GetAccountParams {
|
||||
/// When `true`, requests a proactive token refresh before returning.
|
||||
///
|
||||
/// In managed auth mode this triggers the normal refresh-token flow. In
|
||||
/// external auth mode this flag is ignored. Clients should refresh tokens
|
||||
/// themselves and call `account/login/start` with `chatgptAuthTokens`.
|
||||
#[serde(default)]
|
||||
pub refresh_token: bool,
|
||||
}
|
||||
@@ -1001,6 +1079,8 @@ pub struct AppInfo {
|
||||
pub name: String,
|
||||
pub description: Option<String>,
|
||||
pub logo_url: Option<String>,
|
||||
pub logo_url_dark: Option<String>,
|
||||
pub distribution_channel: Option<String>,
|
||||
pub install_url: Option<String>,
|
||||
#[serde(default)]
|
||||
pub is_accessible: bool,
|
||||
@@ -1223,6 +1303,14 @@ pub struct ThreadArchiveParams {
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadArchiveResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadSetNameParams {
|
||||
pub thread_id: String,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -1230,6 +1318,11 @@ pub struct ThreadUnarchiveParams {
|
||||
pub thread_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadSetNameResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -1395,11 +1488,14 @@ pub struct SkillMetadata {
|
||||
pub description: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
/// Legacy short_description from SKILL.md. Prefer SKILL.toml interface.short_description.
|
||||
/// Legacy short_description from SKILL.md. Prefer SKILL.json interface.short_description.
|
||||
pub short_description: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub interface: Option<SkillInterface>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub dependencies: Option<SkillDependencies>,
|
||||
pub path: PathBuf,
|
||||
pub scope: SkillScope,
|
||||
pub enabled: bool,
|
||||
@@ -1423,6 +1519,35 @@ pub struct SkillInterface {
|
||||
pub default_prompt: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct SkillDependencies {
|
||||
pub tools: Vec<SkillToolDependency>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct SkillToolDependency {
|
||||
#[serde(rename = "type")]
|
||||
#[ts(rename = "type")]
|
||||
pub r#type: String,
|
||||
pub value: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub description: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub transport: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub command: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -1462,6 +1587,7 @@ impl From<CoreSkillMetadata> for SkillMetadata {
|
||||
description: value.description,
|
||||
short_description: value.short_description,
|
||||
interface: value.interface.map(SkillInterface::from),
|
||||
dependencies: value.dependencies.map(SkillDependencies::from),
|
||||
path: value.path,
|
||||
scope: value.scope.into(),
|
||||
enabled: true,
|
||||
@@ -1482,6 +1608,31 @@ impl From<CoreSkillInterface> for SkillInterface {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CoreSkillDependencies> for SkillDependencies {
|
||||
fn from(value: CoreSkillDependencies) -> Self {
|
||||
Self {
|
||||
tools: value
|
||||
.tools
|
||||
.into_iter()
|
||||
.map(SkillToolDependency::from)
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CoreSkillToolDependency> for SkillToolDependency {
|
||||
fn from(value: CoreSkillToolDependency) -> Self {
|
||||
Self {
|
||||
r#type: value.r#type,
|
||||
value: value.value,
|
||||
description: value.description,
|
||||
transport: value.transport,
|
||||
command: value.command,
|
||||
url: value.url,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CoreSkillScope> for SkillScope {
|
||||
fn from(value: CoreSkillScope) -> Self {
|
||||
match value {
|
||||
@@ -1837,6 +1988,10 @@ pub enum UserInput {
|
||||
name: String,
|
||||
path: PathBuf,
|
||||
},
|
||||
Mention {
|
||||
name: String,
|
||||
path: String,
|
||||
},
|
||||
}
|
||||
|
||||
impl UserInput {
|
||||
@@ -1852,6 +2007,7 @@ impl UserInput {
|
||||
UserInput::Image { url } => CoreUserInput::Image { image_url: url },
|
||||
UserInput::LocalImage { path } => CoreUserInput::LocalImage { path },
|
||||
UserInput::Skill { name, path } => CoreUserInput::Skill { name, path },
|
||||
UserInput::Mention { name, path } => CoreUserInput::Mention { name, path },
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1869,6 +2025,7 @@ impl From<CoreUserInput> for UserInput {
|
||||
CoreUserInput::Image { image_url } => UserInput::Image { url: image_url },
|
||||
CoreUserInput::LocalImage { path } => UserInput::LocalImage { path },
|
||||
CoreUserInput::Skill { name, path } => UserInput::Skill { name, path },
|
||||
CoreUserInput::Mention { name, path } => UserInput::Mention { name, path },
|
||||
_ => unreachable!("unsupported user input variant"),
|
||||
}
|
||||
}
|
||||
@@ -1887,6 +2044,11 @@ pub enum ThreadItem {
|
||||
AgentMessage { id: String, text: String },
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
/// EXPERIMENTAL - proposed plan item content. The completed plan item is
|
||||
/// authoritative and may not match the concatenation of `PlanDelta` text.
|
||||
Plan { id: String, text: String },
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
Reasoning {
|
||||
id: String,
|
||||
#[serde(default)]
|
||||
@@ -1991,6 +2153,10 @@ impl From<CoreTurnItem> for ThreadItem {
|
||||
.collect::<String>();
|
||||
ThreadItem::AgentMessage { id: agent.id, text }
|
||||
}
|
||||
CoreTurnItem::Plan(plan) => ThreadItem::Plan {
|
||||
id: plan.id,
|
||||
text: plan.text,
|
||||
},
|
||||
CoreTurnItem::Reasoning(reasoning) => ThreadItem::Reasoning {
|
||||
id: reasoning.id,
|
||||
summary: reasoning.summary_text,
|
||||
@@ -2149,6 +2315,16 @@ pub struct ThreadStartedNotification {
|
||||
pub thread: Thread,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadNameUpdatedNotification {
|
||||
pub thread_id: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub thread_name: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -2269,6 +2445,18 @@ pub struct AgentMessageDeltaNotification {
|
||||
pub delta: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
/// EXPERIMENTAL - proposed plan streaming deltas for plan items. Clients should
|
||||
/// not assume concatenated deltas match the completed plan item content.
|
||||
pub struct PlanDeltaNotification {
|
||||
pub thread_id: String,
|
||||
pub turn_id: String,
|
||||
pub item_id: String,
|
||||
pub delta: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -2365,6 +2553,7 @@ pub struct WindowsWorldWritableWarningNotification {
|
||||
pub failed_scan: bool,
|
||||
}
|
||||
|
||||
/// Deprecated: Use `ContextCompaction` item type instead.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -2373,14 +2562,6 @@ pub struct ContextCompactedNotification {
|
||||
pub turn_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ContextCompactionStartedNotification {
|
||||
pub thread_id: String,
|
||||
pub turn_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -2464,13 +2645,15 @@ pub struct ToolRequestUserInputOption {
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
/// EXPERIMENTAL. Represents one request_user_input question and its optional options.
|
||||
/// EXPERIMENTAL. Represents one request_user_input question and its required options.
|
||||
pub struct ToolRequestUserInputQuestion {
|
||||
pub id: String,
|
||||
pub header: String,
|
||||
pub question: String,
|
||||
#[serde(default)]
|
||||
pub is_other: bool,
|
||||
#[serde(default)]
|
||||
pub is_secret: bool,
|
||||
pub options: Option<Vec<ToolRequestUserInputOption>>,
|
||||
}
|
||||
|
||||
@@ -2631,7 +2814,6 @@ mod tests {
|
||||
use super::*;
|
||||
use codex_protocol::items::AgentMessageContent;
|
||||
use codex_protocol::items::AgentMessageItem;
|
||||
use codex_protocol::items::ContextCompactionItem;
|
||||
use codex_protocol::items::ReasoningItem;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::items::UserMessageItem;
|
||||
@@ -2680,6 +2862,10 @@ mod tests {
|
||||
name: "skill-creator".to_string(),
|
||||
path: PathBuf::from("/repo/.codex/skills/skill-creator/SKILL.md"),
|
||||
},
|
||||
CoreUserInput::Mention {
|
||||
name: "Demo App".to_string(),
|
||||
path: "app://demo-app".to_string(),
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
@@ -2702,6 +2888,10 @@ mod tests {
|
||||
name: "skill-creator".to_string(),
|
||||
path: PathBuf::from("/repo/.codex/skills/skill-creator/SKILL.md"),
|
||||
},
|
||||
UserInput::Mention {
|
||||
name: "Demo App".to_string(),
|
||||
path: "app://demo-app".to_string(),
|
||||
},
|
||||
],
|
||||
}
|
||||
);
|
||||
@@ -2756,17 +2946,6 @@ mod tests {
|
||||
query: "docs".to_string(),
|
||||
}
|
||||
);
|
||||
|
||||
let compaction_item = TurnItem::ContextCompaction(ContextCompactionItem {
|
||||
id: "compact-1".to_string(),
|
||||
});
|
||||
|
||||
assert_eq!(
|
||||
ThreadItem::from(compaction_item),
|
||||
ThreadItem::ContextCompaction {
|
||||
id: "compact-1".to_string(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -17,7 +17,9 @@ workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
async-trait = { workspace = true }
|
||||
codex-arg0 = { workspace = true }
|
||||
codex-cloud-requirements = { workspace = true }
|
||||
codex-common = { workspace = true, features = ["cli"] }
|
||||
codex-core = { workspace = true }
|
||||
codex-backend-client = { workspace = true }
|
||||
@@ -56,6 +58,7 @@ axum = { workspace = true, default-features = false, features = [
|
||||
"tokio",
|
||||
] }
|
||||
base64 = { workspace = true }
|
||||
codex-execpolicy = { workspace = true }
|
||||
core_test_support = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
os_info = { workspace = true }
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
- [Events](#events)
|
||||
- [Approvals](#approvals)
|
||||
- [Skills](#skills)
|
||||
- [Apps](#apps)
|
||||
- [Auth endpoints](#auth-endpoints)
|
||||
|
||||
## Protocol
|
||||
@@ -81,6 +82,7 @@ Example (from OpenAI's official VSCode extension):
|
||||
- `thread/loaded/list` — list the thread ids currently loaded in memory.
|
||||
- `thread/read` — read a stored thread by id without resuming it; optionally include turns via `includeTurns`.
|
||||
- `thread/archive` — move a thread’s rollout file into the archived directory; returns `{}` on success.
|
||||
- `thread/name/set` — set or update a thread’s user-facing name; returns `{}` on success. Thread names are not required to be unique; name lookups resolve to the most recently updated thread.
|
||||
- `thread/unarchive` — move an archived rollout file back into the sessions directory; returns the restored `thread` on success.
|
||||
- `thread/rollback` — drop the last N turns from the agent’s in-memory context and persist a rollback marker in the rollout so future resumes see the pruned history; returns the updated `thread` (with `turns` populated) on success.
|
||||
- `turn/start` — add user input to a thread and begin Codex generation; responds with the initial `turn` object and streams `turn/started`, `item/*`, and `turn/completed` notifications.
|
||||
@@ -101,7 +103,7 @@ Example (from OpenAI's official VSCode extension):
|
||||
- `config/read` — fetch the effective config on disk after resolving config layering.
|
||||
- `config/value/write` — write a single config key/value to the user's config.toml on disk.
|
||||
- `config/batchWrite` — apply multiple config edits atomically to the user's config.toml on disk.
|
||||
- `configRequirements/read` — fetch the loaded requirements allow-lists from `requirements.toml` and/or MDM (or `null` if none are configured).
|
||||
- `configRequirements/read` — fetch the loaded requirements allow-lists and `enforceResidency` from `requirements.toml` and/or MDM (or `null` if none are configured).
|
||||
|
||||
### Example: Start or resume a thread
|
||||
|
||||
@@ -296,6 +298,26 @@ Invoke a skill explicitly by including `$<skill-name>` in the text input and add
|
||||
} } }
|
||||
```
|
||||
|
||||
### Example: Start a turn (invoke an app)
|
||||
|
||||
Invoke an app by including `$<app-slug>` in the text input and adding a `mention` input item with the app id in `app://<connector-id>` form.
|
||||
|
||||
```json
|
||||
{ "method": "turn/start", "id": 34, "params": {
|
||||
"threadId": "thr_123",
|
||||
"input": [
|
||||
{ "type": "text", "text": "$demo-app Summarize the latest updates." },
|
||||
{ "type": "mention", "name": "Demo App", "path": "app://demo-app" }
|
||||
]
|
||||
} }
|
||||
{ "id": 34, "result": { "turn": {
|
||||
"id": "turn_458",
|
||||
"status": "inProgress",
|
||||
"items": [],
|
||||
"error": null
|
||||
} } }
|
||||
```
|
||||
|
||||
### Example: Interrupt an active turn
|
||||
|
||||
You can cancel a running Turn with `turn/interrupt`.
|
||||
@@ -422,6 +444,7 @@ Today both notifications carry an empty `items` array even when item events were
|
||||
|
||||
- `userMessage` — `{id, content}` where `content` is a list of user inputs (`text`, `image`, or `localImage`).
|
||||
- `agentMessage` — `{id, text}` containing the accumulated agent reply.
|
||||
- `plan` — `{id, text}` emitted for plan-mode turns; plan text can stream via `item/plan/delta` (experimental).
|
||||
- `reasoning` — `{id, summary, content}` where `summary` holds streamed reasoning summaries (applicable for most OpenAI models) and `content` holds raw reasoning blocks (applicable for e.g. open source models).
|
||||
- `commandExecution` — `{id, command, cwd, status, commandActions, aggregatedOutput?, exitCode?, durationMs?}` for sandboxed commands; `status` is `inProgress`, `completed`, `failed`, or `declined`.
|
||||
- `fileChange` — `{id, changes, status}` describing proposed edits; `changes` list `{path, kind, diff}` and `status` is `inProgress`, `completed`, `failed`, or `declined`.
|
||||
@@ -431,7 +454,8 @@ Today both notifications carry an empty `items` array even when item events were
|
||||
- `imageView` — `{id, path}` emitted when the agent invokes the image viewer tool.
|
||||
- `enteredReviewMode` — `{id, review}` sent when the reviewer starts; `review` is a short user-facing label such as `"current changes"` or the requested target description.
|
||||
- `exitedReviewMode` — `{id, review}` emitted when the reviewer finishes; `review` is the full plain-text review (usually, overall notes plus bullet point findings).
|
||||
- `compacted` - `{threadId, turnId}` when codex compacts the conversation history. This can happen automatically.
|
||||
- `contextCompaction` — `{id}` emitted when codex compacts the conversation history. This can happen automatically.
|
||||
- `compacted` - `{threadId, turnId}` when codex compacts the conversation history. This can happen automatically. **Deprecated:** Use `contextCompaction` instead.
|
||||
|
||||
All items emit two shared lifecycle events:
|
||||
|
||||
@@ -444,6 +468,10 @@ There are additional item-specific events:
|
||||
|
||||
- `item/agentMessage/delta` — appends streamed text for the agent message; concatenate `delta` values for the same `itemId` in order to reconstruct the full reply.
|
||||
|
||||
#### plan
|
||||
|
||||
- `item/plan/delta` — streams proposed plan content for plan items (experimental); concatenate `delta` values for the same plan `itemId`. These deltas correspond to the `<proposed_plan>` block.
|
||||
|
||||
#### reasoning
|
||||
|
||||
- `item/reasoning/summaryTextDelta` — streams readable reasoning summaries; `summaryIndex` increments when a new summary section opens.
|
||||
@@ -582,14 +610,72 @@ To enable or disable a skill by path:
|
||||
}
|
||||
```
|
||||
|
||||
## Apps
|
||||
|
||||
Use `app/list` to fetch available apps (connectors). Each entry includes metadata like the app `id`, display `name`, `installUrl`, and whether it is currently accessible.
|
||||
|
||||
```json
|
||||
{ "method": "app/list", "id": 50, "params": {
|
||||
"cursor": null,
|
||||
"limit": 50
|
||||
} }
|
||||
{ "id": 50, "result": {
|
||||
"data": [
|
||||
{
|
||||
"id": "demo-app",
|
||||
"name": "Demo App",
|
||||
"description": "Example connector for documentation.",
|
||||
"logoUrl": "https://example.com/demo-app.png",
|
||||
"logoUrlDark": null,
|
||||
"distributionChannel": null,
|
||||
"installUrl": "https://chatgpt.com/apps/demo-app/demo-app",
|
||||
"isAccessible": true
|
||||
}
|
||||
],
|
||||
"nextCursor": null
|
||||
} }
|
||||
```
|
||||
|
||||
Invoke an app by inserting `$<app-slug>` in the text input. The slug is derived from the app name and lowercased with non-alphanumeric characters replaced by `-` (for example, "Demo App" becomes `$demo-app`). Add a `mention` input item (recommended) so the server uses the exact `app://<connector-id>` path rather than guessing by name.
|
||||
|
||||
Example:
|
||||
|
||||
```
|
||||
$demo-app Pull the latest updates from the team.
|
||||
```
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "turn/start",
|
||||
"id": 51,
|
||||
"params": {
|
||||
"threadId": "thread-1",
|
||||
"input": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "$demo-app Pull the latest updates from the team."
|
||||
},
|
||||
{ "type": "mention", "name": "Demo App", "path": "app://demo-app" }
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Auth endpoints
|
||||
|
||||
The JSON-RPC auth/account surface exposes request/response methods plus server-initiated notifications (no `id`). Use these to determine auth state, start or cancel logins, logout, and inspect ChatGPT rate limits.
|
||||
|
||||
### Authentication modes
|
||||
|
||||
Codex supports these authentication modes. The current mode is surfaced in `account/updated` (`authMode`) and can be inferred from `account/read`.
|
||||
|
||||
- **API key (`apiKey`)**: Caller supplies an OpenAI API key via `account/login/start` with `type: "apiKey"`. The API key is saved and used for API requests.
|
||||
- **ChatGPT managed (`chatgpt`)** (recommended): Codex owns the ChatGPT OAuth flow and refresh tokens. Start via `account/login/start` with `type: "chatgpt"`; Codex persists tokens to disk and refreshes them automatically.
|
||||
|
||||
### API Overview
|
||||
|
||||
- `account/read` — fetch current account info; optionally refresh tokens.
|
||||
- `account/login/start` — begin login (`apiKey` or `chatgpt`).
|
||||
- `account/login/start` — begin login (`apiKey`, `chatgpt`).
|
||||
- `account/login/completed` (notify) — emitted when a login attempt finishes (success or error).
|
||||
- `account/login/cancel` — cancel a pending ChatGPT login by `loginId`.
|
||||
- `account/logout` — sign out; triggers `account/updated`.
|
||||
|
||||
@@ -24,7 +24,6 @@ use codex_app_server_protocol::CommandExecutionRequestApprovalParams;
|
||||
use codex_app_server_protocol::CommandExecutionRequestApprovalResponse;
|
||||
use codex_app_server_protocol::CommandExecutionStatus;
|
||||
use codex_app_server_protocol::ContextCompactedNotification;
|
||||
use codex_app_server_protocol::ContextCompactionStartedNotification;
|
||||
use codex_app_server_protocol::DeprecationNoticeNotification;
|
||||
use codex_app_server_protocol::DynamicToolCallParams;
|
||||
use codex_app_server_protocol::ErrorNotification;
|
||||
@@ -45,6 +44,7 @@ use codex_app_server_protocol::McpToolCallResult;
|
||||
use codex_app_server_protocol::McpToolCallStatus;
|
||||
use codex_app_server_protocol::PatchApplyStatus;
|
||||
use codex_app_server_protocol::PatchChangeKind as V2PatchChangeKind;
|
||||
use codex_app_server_protocol::PlanDeltaNotification;
|
||||
use codex_app_server_protocol::RawResponseItemCompletedNotification;
|
||||
use codex_app_server_protocol::ReasoningSummaryPartAddedNotification;
|
||||
use codex_app_server_protocol::ReasoningSummaryTextDeltaNotification;
|
||||
@@ -53,6 +53,7 @@ use codex_app_server_protocol::ServerNotification;
|
||||
use codex_app_server_protocol::ServerRequestPayload;
|
||||
use codex_app_server_protocol::TerminalInteractionNotification;
|
||||
use codex_app_server_protocol::ThreadItem;
|
||||
use codex_app_server_protocol::ThreadNameUpdatedNotification;
|
||||
use codex_app_server_protocol::ThreadRollbackResponse;
|
||||
use codex_app_server_protocol::ThreadTokenUsage;
|
||||
use codex_app_server_protocol::ThreadTokenUsageUpdatedNotification;
|
||||
@@ -118,6 +119,7 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
msg,
|
||||
} = event;
|
||||
match msg {
|
||||
EventMsg::TurnStarted(_) => {}
|
||||
EventMsg::TurnComplete(_ev) => {
|
||||
handle_turn_complete(
|
||||
conversation_id,
|
||||
@@ -280,6 +282,7 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
header: question.header,
|
||||
question: question.question,
|
||||
is_other: question.is_other,
|
||||
is_secret: question.is_secret,
|
||||
options: question.options.map(|options| {
|
||||
options
|
||||
.into_iter()
|
||||
@@ -592,28 +595,30 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
.await;
|
||||
}
|
||||
EventMsg::AgentMessageContentDelta(event) => {
|
||||
let codex_protocol::protocol::AgentMessageContentDeltaEvent { item_id, delta, .. } =
|
||||
event;
|
||||
let notification = AgentMessageDeltaNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
item_id,
|
||||
delta,
|
||||
};
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::AgentMessageDelta(notification))
|
||||
.await;
|
||||
}
|
||||
EventMsg::PlanDelta(event) => {
|
||||
let notification = PlanDeltaNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
item_id: event.item_id,
|
||||
delta: event.delta,
|
||||
};
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::AgentMessageDelta(notification))
|
||||
.send_server_notification(ServerNotification::PlanDelta(notification))
|
||||
.await;
|
||||
}
|
||||
EventMsg::ContextCompactionStarted(..) => {
|
||||
let notification = ContextCompactionStartedNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
};
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::ContextCompactionStarted(
|
||||
notification,
|
||||
))
|
||||
.await;
|
||||
}
|
||||
EventMsg::ContextCompactionEnded(..) => {
|
||||
EventMsg::ContextCompacted(..) => {
|
||||
let notification = ContextCompactedNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
@@ -1108,6 +1113,17 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
outgoing.send_response(request_id, response).await;
|
||||
}
|
||||
}
|
||||
EventMsg::ThreadNameUpdated(thread_name_event) => {
|
||||
if let ApiVersion::V2 = api_version {
|
||||
let notification = ThreadNameUpdatedNotification {
|
||||
thread_id: thread_name_event.thread_id.to_string(),
|
||||
thread_name: thread_name_event.thread_name,
|
||||
};
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::ThreadNameUpdated(notification))
|
||||
.await;
|
||||
}
|
||||
}
|
||||
EventMsg::TurnDiff(turn_diff_event) => {
|
||||
handle_turn_diff(
|
||||
conversation_id,
|
||||
@@ -1159,6 +1175,7 @@ async fn handle_turn_plan_update(
|
||||
api_version: ApiVersion,
|
||||
outgoing: &OutgoingMessageSender,
|
||||
) {
|
||||
// `update_plan` is a todo/checklist tool; it is not related to plan-mode updates
|
||||
if let ApiVersion::V2 = api_version {
|
||||
let notification = TurnPlanUpdatedNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
|
||||
@@ -13,7 +13,6 @@ use codex_app_server_protocol::AccountLoginCompletedNotification;
|
||||
use codex_app_server_protocol::AccountUpdatedNotification;
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
use codex_app_server_protocol::AddConversationSubscriptionResponse;
|
||||
use codex_app_server_protocol::AppInfo as ApiAppInfo;
|
||||
use codex_app_server_protocol::AppsListParams;
|
||||
use codex_app_server_protocol::AppsListResponse;
|
||||
use codex_app_server_protocol::ArchiveConversationParams;
|
||||
@@ -58,6 +57,7 @@ use codex_app_server_protocol::ListConversationsResponse;
|
||||
use codex_app_server_protocol::ListMcpServerStatusParams;
|
||||
use codex_app_server_protocol::ListMcpServerStatusResponse;
|
||||
use codex_app_server_protocol::LoginAccountParams;
|
||||
use codex_app_server_protocol::LoginAccountResponse;
|
||||
use codex_app_server_protocol::LoginApiKeyParams;
|
||||
use codex_app_server_protocol::LoginApiKeyResponse;
|
||||
use codex_app_server_protocol::LoginChatGptCompleteNotification;
|
||||
@@ -110,6 +110,8 @@ use codex_app_server_protocol::ThreadReadResponse;
|
||||
use codex_app_server_protocol::ThreadResumeParams;
|
||||
use codex_app_server_protocol::ThreadResumeResponse;
|
||||
use codex_app_server_protocol::ThreadRollbackParams;
|
||||
use codex_app_server_protocol::ThreadSetNameParams;
|
||||
use codex_app_server_protocol::ThreadSetNameResponse;
|
||||
use codex_app_server_protocol::ThreadSortKey;
|
||||
use codex_app_server_protocol::ThreadSourceKind;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
@@ -131,6 +133,7 @@ use codex_app_server_protocol::build_turns_from_event_msgs;
|
||||
use codex_backend_client::Client as BackendClient;
|
||||
use codex_chatgpt::connectors;
|
||||
use codex_core::AuthManager;
|
||||
use codex_core::CodexAuth;
|
||||
use codex_core::CodexThread;
|
||||
use codex_core::Cursor as RolloutCursor;
|
||||
use codex_core::InitialHistory;
|
||||
@@ -142,12 +145,14 @@ use codex_core::ThreadManager;
|
||||
use codex_core::ThreadSortKey as CoreThreadSortKey;
|
||||
use codex_core::auth::CLIENT_ID;
|
||||
use codex_core::auth::login_with_api_key;
|
||||
use codex_core::auth::login_with_chatgpt_auth_tokens;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::config::ConfigService;
|
||||
use codex_core::config::edit::ConfigEdit;
|
||||
use codex_core::config::edit::ConfigEditsBuilder;
|
||||
use codex_core::config::types::McpServerTransportConfig;
|
||||
use codex_core::config_loader::CloudRequirementsLoader;
|
||||
use codex_core::default_client::get_codex_user_agent;
|
||||
use codex_core::error::CodexErr;
|
||||
use codex_core::exec::ExecParams;
|
||||
@@ -169,6 +174,8 @@ use codex_core::read_head_for_summary;
|
||||
use codex_core::read_session_meta_line;
|
||||
use codex_core::rollout_date_parts;
|
||||
use codex_core::sandboxing::SandboxPermissions;
|
||||
use codex_core::state_db::get_state_db;
|
||||
use codex_core::token_data::parse_id_token;
|
||||
use codex_core::windows_sandbox::WindowsSandboxLevelExt;
|
||||
use codex_feedback::CodexFeedback;
|
||||
use codex_login::ServerOptions as LoginServerOptions;
|
||||
@@ -257,6 +264,7 @@ pub(crate) struct CodexMessageProcessor {
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
config: Arc<Config>,
|
||||
cli_overrides: Vec<(String, TomlValue)>,
|
||||
cloud_requirements: CloudRequirementsLoader,
|
||||
conversation_listeners: HashMap<Uuid, oneshot::Sender<()>>,
|
||||
listener_thread_ids_by_subscription: HashMap<Uuid, ThreadId>,
|
||||
active_login: Arc<Mutex<Option<ActiveLogin>>>,
|
||||
@@ -275,6 +283,17 @@ pub(crate) enum ApiVersion {
|
||||
V2,
|
||||
}
|
||||
|
||||
pub(crate) struct CodexMessageProcessorArgs {
|
||||
pub(crate) auth_manager: Arc<AuthManager>,
|
||||
pub(crate) thread_manager: Arc<ThreadManager>,
|
||||
pub(crate) outgoing: Arc<OutgoingMessageSender>,
|
||||
pub(crate) codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
pub(crate) config: Arc<Config>,
|
||||
pub(crate) cli_overrides: Vec<(String, TomlValue)>,
|
||||
pub(crate) cloud_requirements: CloudRequirementsLoader,
|
||||
pub(crate) feedback: CodexFeedback,
|
||||
}
|
||||
|
||||
impl CodexMessageProcessor {
|
||||
async fn load_thread(
|
||||
&self,
|
||||
@@ -299,15 +318,17 @@ impl CodexMessageProcessor {
|
||||
|
||||
Ok((thread_id, thread))
|
||||
}
|
||||
pub fn new(
|
||||
auth_manager: Arc<AuthManager>,
|
||||
thread_manager: Arc<ThreadManager>,
|
||||
outgoing: Arc<OutgoingMessageSender>,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
config: Arc<Config>,
|
||||
cli_overrides: Vec<(String, TomlValue)>,
|
||||
feedback: CodexFeedback,
|
||||
) -> Self {
|
||||
pub fn new(args: CodexMessageProcessorArgs) -> Self {
|
||||
let CodexMessageProcessorArgs {
|
||||
auth_manager,
|
||||
thread_manager,
|
||||
outgoing,
|
||||
codex_linux_sandbox_exe,
|
||||
config,
|
||||
cli_overrides,
|
||||
cloud_requirements,
|
||||
feedback,
|
||||
} = args;
|
||||
Self {
|
||||
auth_manager,
|
||||
thread_manager,
|
||||
@@ -315,6 +336,7 @@ impl CodexMessageProcessor {
|
||||
codex_linux_sandbox_exe,
|
||||
config,
|
||||
cli_overrides,
|
||||
cloud_requirements,
|
||||
conversation_listeners: HashMap::new(),
|
||||
listener_thread_ids_by_subscription: HashMap::new(),
|
||||
active_login: Arc::new(Mutex::new(None)),
|
||||
@@ -327,7 +349,10 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
|
||||
async fn load_latest_config(&self) -> Result<Config, JSONRPCErrorError> {
|
||||
Config::load_with_cli_overrides(self.cli_overrides.clone())
|
||||
codex_core::config::ConfigBuilder::default()
|
||||
.cli_overrides(self.cli_overrides.clone())
|
||||
.cloud_requirements(self.cloud_requirements.clone())
|
||||
.build()
|
||||
.await
|
||||
.map_err(|err| JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
@@ -413,6 +438,9 @@ impl CodexMessageProcessor {
|
||||
ClientRequest::ThreadArchive { request_id, params } => {
|
||||
self.thread_archive(request_id, params).await;
|
||||
}
|
||||
ClientRequest::ThreadSetName { request_id, params } => {
|
||||
self.thread_set_name(request_id, params).await;
|
||||
}
|
||||
ClientRequest::ThreadUnarchive { request_id, params } => {
|
||||
self.thread_unarchive(request_id, params).await;
|
||||
}
|
||||
@@ -607,6 +635,22 @@ impl CodexMessageProcessor {
|
||||
LoginAccountParams::Chatgpt => {
|
||||
self.login_chatgpt_v2(request_id).await;
|
||||
}
|
||||
LoginAccountParams::ChatgptAuthTokens {
|
||||
id_token,
|
||||
access_token,
|
||||
} => {
|
||||
self.login_chatgpt_auth_tokens(request_id, id_token, access_token)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn external_auth_active_error(&self) -> JSONRPCErrorError {
|
||||
JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "External auth is active. Use account/login/start (chatgptAuthTokens) to update it or account/logout to clear it."
|
||||
.to_string(),
|
||||
data: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -614,6 +658,10 @@ impl CodexMessageProcessor {
|
||||
&mut self,
|
||||
params: &LoginApiKeyParams,
|
||||
) -> std::result::Result<(), JSONRPCErrorError> {
|
||||
if self.auth_manager.is_external_auth_active() {
|
||||
return Err(self.external_auth_active_error());
|
||||
}
|
||||
|
||||
if matches!(
|
||||
self.config.forced_login_method,
|
||||
Some(ForcedLoginMethod::Chatgpt)
|
||||
@@ -658,7 +706,11 @@ impl CodexMessageProcessor {
|
||||
.await;
|
||||
|
||||
let payload = AuthStatusChangeNotification {
|
||||
auth_method: self.auth_manager.auth_cached().map(|auth| auth.mode),
|
||||
auth_method: self
|
||||
.auth_manager
|
||||
.auth_cached()
|
||||
.as_ref()
|
||||
.map(CodexAuth::api_auth_mode),
|
||||
};
|
||||
self.outgoing
|
||||
.send_server_notification(ServerNotification::AuthStatusChange(payload))
|
||||
@@ -688,7 +740,11 @@ impl CodexMessageProcessor {
|
||||
.await;
|
||||
|
||||
let payload_v2 = AccountUpdatedNotification {
|
||||
auth_mode: self.auth_manager.auth_cached().map(|auth| auth.mode),
|
||||
auth_mode: self
|
||||
.auth_manager
|
||||
.auth_cached()
|
||||
.as_ref()
|
||||
.map(CodexAuth::api_auth_mode),
|
||||
};
|
||||
self.outgoing
|
||||
.send_server_notification(ServerNotification::AccountUpdated(payload_v2))
|
||||
@@ -706,6 +762,10 @@ impl CodexMessageProcessor {
|
||||
) -> std::result::Result<LoginServerOptions, JSONRPCErrorError> {
|
||||
let config = self.config.as_ref();
|
||||
|
||||
if self.auth_manager.is_external_auth_active() {
|
||||
return Err(self.external_auth_active_error());
|
||||
}
|
||||
|
||||
if matches!(config.forced_login_method, Some(ForcedLoginMethod::Api)) {
|
||||
return Err(JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
@@ -780,7 +840,10 @@ impl CodexMessageProcessor {
|
||||
auth_manager.reload();
|
||||
|
||||
// Notify clients with the actual current auth mode.
|
||||
let current_auth_method = auth_manager.auth_cached().map(|a| a.mode);
|
||||
let current_auth_method = auth_manager
|
||||
.auth_cached()
|
||||
.as_ref()
|
||||
.map(CodexAuth::api_auth_mode);
|
||||
let payload = AuthStatusChangeNotification {
|
||||
auth_method: current_auth_method,
|
||||
};
|
||||
@@ -870,7 +933,10 @@ impl CodexMessageProcessor {
|
||||
auth_manager.reload();
|
||||
|
||||
// Notify clients with the actual current auth mode.
|
||||
let current_auth_method = auth_manager.auth_cached().map(|a| a.mode);
|
||||
let current_auth_method = auth_manager
|
||||
.auth_cached()
|
||||
.as_ref()
|
||||
.map(CodexAuth::api_auth_mode);
|
||||
let payload_v2 = AccountUpdatedNotification {
|
||||
auth_mode: current_auth_method,
|
||||
};
|
||||
@@ -964,6 +1030,98 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
}
|
||||
|
||||
async fn login_chatgpt_auth_tokens(
|
||||
&mut self,
|
||||
request_id: RequestId,
|
||||
id_token: String,
|
||||
access_token: String,
|
||||
) {
|
||||
if matches!(
|
||||
self.config.forced_login_method,
|
||||
Some(ForcedLoginMethod::Api)
|
||||
) {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "External ChatGPT auth is disabled. Use API key login instead."
|
||||
.to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
|
||||
// Cancel any active login attempt to avoid persisting managed auth state.
|
||||
{
|
||||
let mut guard = self.active_login.lock().await;
|
||||
if let Some(active) = guard.take() {
|
||||
drop(active);
|
||||
}
|
||||
}
|
||||
|
||||
let id_token_info = match parse_id_token(&id_token) {
|
||||
Ok(info) => info,
|
||||
Err(err) => {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!("invalid id token: {err}"),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(expected_workspace) = self.config.forced_chatgpt_workspace_id.as_deref()
|
||||
&& id_token_info.chatgpt_account_id.as_deref() != Some(expected_workspace)
|
||||
{
|
||||
let account_id = id_token_info.chatgpt_account_id;
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!(
|
||||
"External auth must use workspace {expected_workspace}, but received {account_id:?}."
|
||||
),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
|
||||
if let Err(err) =
|
||||
login_with_chatgpt_auth_tokens(&self.config.codex_home, &id_token, &access_token)
|
||||
{
|
||||
let error = JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: format!("failed to set external auth: {err}"),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
self.auth_manager.reload();
|
||||
|
||||
self.outgoing
|
||||
.send_response(request_id, LoginAccountResponse::ChatgptAuthTokens {})
|
||||
.await;
|
||||
|
||||
let payload_login_completed = AccountLoginCompletedNotification {
|
||||
login_id: None,
|
||||
success: true,
|
||||
error: None,
|
||||
};
|
||||
self.outgoing
|
||||
.send_server_notification(ServerNotification::AccountLoginCompleted(
|
||||
payload_login_completed,
|
||||
))
|
||||
.await;
|
||||
|
||||
let payload_v2 = AccountUpdatedNotification {
|
||||
auth_mode: self.auth_manager.get_auth_mode(),
|
||||
};
|
||||
self.outgoing
|
||||
.send_server_notification(ServerNotification::AccountUpdated(payload_v2))
|
||||
.await;
|
||||
}
|
||||
|
||||
async fn logout_common(&mut self) -> std::result::Result<Option<AuthMode>, JSONRPCErrorError> {
|
||||
// Cancel any active login attempt.
|
||||
{
|
||||
@@ -982,7 +1140,11 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
|
||||
// Reflect the current auth method after logout (likely None).
|
||||
Ok(self.auth_manager.auth_cached().map(|auth| auth.mode))
|
||||
Ok(self
|
||||
.auth_manager
|
||||
.auth_cached()
|
||||
.as_ref()
|
||||
.map(CodexAuth::api_auth_mode))
|
||||
}
|
||||
|
||||
async fn logout_v1(&mut self, request_id: RequestId) {
|
||||
@@ -1026,6 +1188,9 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
|
||||
async fn refresh_token_if_requested(&self, do_refresh: bool) {
|
||||
if self.auth_manager.is_external_auth_active() {
|
||||
return;
|
||||
}
|
||||
if do_refresh && let Err(err) = self.auth_manager.refresh_token().await {
|
||||
tracing::warn!("failed to refresh token while getting account: {err}");
|
||||
}
|
||||
@@ -1051,7 +1216,7 @@ impl CodexMessageProcessor {
|
||||
} else {
|
||||
match self.auth_manager.auth().await {
|
||||
Some(auth) => {
|
||||
let auth_mode = auth.mode;
|
||||
let auth_mode = auth.api_auth_mode();
|
||||
let (reported_auth_method, token_opt) = match auth.get_token() {
|
||||
Ok(token) if !token.is_empty() => {
|
||||
let tok = if include_token { Some(token) } else { None };
|
||||
@@ -1098,9 +1263,9 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
|
||||
let account = match self.auth_manager.auth_cached() {
|
||||
Some(auth) => Some(match auth.mode {
|
||||
AuthMode::ApiKey => Account::ApiKey {},
|
||||
AuthMode::ChatGPT => {
|
||||
Some(auth) => Some(match auth {
|
||||
CodexAuth::ApiKey(_) => Account::ApiKey {},
|
||||
CodexAuth::Chatgpt(_) | CodexAuth::ChatgptAuthTokens(_) => {
|
||||
let email = auth.get_account_email();
|
||||
let plan_type = auth.account_plan_type();
|
||||
|
||||
@@ -1159,7 +1324,7 @@ impl CodexMessageProcessor {
|
||||
});
|
||||
};
|
||||
|
||||
if auth.mode != AuthMode::ChatGPT {
|
||||
if !auth.is_chatgpt_auth() {
|
||||
return Err(JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "chatgpt authentication required to read rate limits".to_string(),
|
||||
@@ -1373,6 +1538,7 @@ impl CodexMessageProcessor {
|
||||
&self.cli_overrides,
|
||||
Some(request_overrides),
|
||||
typesafe_overrides,
|
||||
&self.cloud_requirements,
|
||||
)
|
||||
.await
|
||||
{
|
||||
@@ -1457,6 +1623,7 @@ impl CodexMessageProcessor {
|
||||
&self.cli_overrides,
|
||||
config,
|
||||
typesafe_overrides,
|
||||
&self.cloud_requirements,
|
||||
)
|
||||
.await
|
||||
{
|
||||
@@ -1609,6 +1776,7 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
|
||||
async fn thread_archive(&mut self, request_id: RequestId, params: ThreadArchiveParams) {
|
||||
// TODO(jif) mostly rewrite this using sqlite after phase 1
|
||||
let thread_id = match ThreadId::from_string(¶ms.thread_id) {
|
||||
Ok(id) => id,
|
||||
Err(err) => {
|
||||
@@ -1657,7 +1825,38 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
}
|
||||
|
||||
async fn thread_set_name(&self, request_id: RequestId, params: ThreadSetNameParams) {
|
||||
let ThreadSetNameParams { thread_id, name } = params;
|
||||
let Some(name) = codex_core::util::normalize_thread_name(&name) else {
|
||||
self.send_invalid_request_error(
|
||||
request_id,
|
||||
"thread name must not be empty".to_string(),
|
||||
)
|
||||
.await;
|
||||
return;
|
||||
};
|
||||
|
||||
let (_, thread) = match self.load_thread(&thread_id).await {
|
||||
Ok(v) => v,
|
||||
Err(error) => {
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
if let Err(err) = thread.submit(Op::SetThreadName { name }).await {
|
||||
self.send_internal_error(request_id, format!("failed to set thread name: {err}"))
|
||||
.await;
|
||||
return;
|
||||
}
|
||||
|
||||
self.outgoing
|
||||
.send_response(request_id, ThreadSetNameResponse {})
|
||||
.await;
|
||||
}
|
||||
|
||||
async fn thread_unarchive(&mut self, request_id: RequestId, params: ThreadUnarchiveParams) {
|
||||
// TODO(jif) mostly rewrite this using sqlite after phase 1
|
||||
let thread_id = match ThreadId::from_string(¶ms.thread_id) {
|
||||
Ok(id) => id,
|
||||
Err(err) => {
|
||||
@@ -1700,6 +1899,7 @@ impl CodexMessageProcessor {
|
||||
|
||||
let rollout_path_display = archived_path.display().to_string();
|
||||
let fallback_provider = self.config.model_provider_id.clone();
|
||||
let state_db_ctx = get_state_db(&self.config, None).await;
|
||||
let archived_folder = self
|
||||
.config
|
||||
.codex_home
|
||||
@@ -1778,6 +1978,11 @@ impl CodexMessageProcessor {
|
||||
message: format!("failed to unarchive thread: {err}"),
|
||||
data: None,
|
||||
})?;
|
||||
if let Some(ctx) = state_db_ctx {
|
||||
let _ = ctx
|
||||
.mark_unarchived(thread_id, restored_path.as_path())
|
||||
.await;
|
||||
}
|
||||
let summary =
|
||||
read_summary_from_rollout(restored_path.as_path(), fallback_provider.as_str())
|
||||
.await
|
||||
@@ -2166,6 +2371,7 @@ impl CodexMessageProcessor {
|
||||
request_overrides,
|
||||
typesafe_overrides,
|
||||
history_cwd,
|
||||
&self.cloud_requirements,
|
||||
)
|
||||
.await
|
||||
{
|
||||
@@ -2358,6 +2564,7 @@ impl CodexMessageProcessor {
|
||||
request_overrides,
|
||||
typesafe_overrides,
|
||||
history_cwd,
|
||||
&self.cloud_requirements,
|
||||
)
|
||||
.await
|
||||
{
|
||||
@@ -2507,7 +2714,6 @@ impl CodexMessageProcessor {
|
||||
};
|
||||
|
||||
let fallback_provider = self.config.model_provider_id.as_str();
|
||||
|
||||
match read_summary_from_rollout(&path, fallback_provider).await {
|
||||
Ok(summary) => {
|
||||
let response = GetConversationSummaryResponse { summary };
|
||||
@@ -3153,6 +3359,7 @@ impl CodexMessageProcessor {
|
||||
request_overrides,
|
||||
typesafe_overrides,
|
||||
history_cwd,
|
||||
&self.cloud_requirements,
|
||||
)
|
||||
.await
|
||||
{
|
||||
@@ -3341,6 +3548,7 @@ impl CodexMessageProcessor {
|
||||
request_overrides,
|
||||
typesafe_overrides,
|
||||
history_cwd,
|
||||
&self.cloud_requirements,
|
||||
)
|
||||
.await
|
||||
{
|
||||
@@ -3530,8 +3738,13 @@ impl CodexMessageProcessor {
|
||||
});
|
||||
}
|
||||
|
||||
let mut state_db_ctx = None;
|
||||
|
||||
// If the thread is active, request shutdown and wait briefly.
|
||||
if let Some(conversation) = self.thread_manager.remove_thread(&thread_id).await {
|
||||
if let Some(ctx) = conversation.state_db() {
|
||||
state_db_ctx = Some(ctx);
|
||||
}
|
||||
info!("thread {thread_id} was active; shutting down");
|
||||
// Request shutdown.
|
||||
match conversation.submit(Op::Shutdown).await {
|
||||
@@ -3558,14 +3771,24 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
}
|
||||
|
||||
if state_db_ctx.is_none() {
|
||||
state_db_ctx = get_state_db(&self.config, None).await;
|
||||
}
|
||||
|
||||
// Move the rollout file to archived.
|
||||
let result: std::io::Result<()> = async {
|
||||
let result: std::io::Result<()> = async move {
|
||||
let archive_folder = self
|
||||
.config
|
||||
.codex_home
|
||||
.join(codex_core::ARCHIVED_SESSIONS_SUBDIR);
|
||||
tokio::fs::create_dir_all(&archive_folder).await?;
|
||||
tokio::fs::rename(&canonical_rollout_path, &archive_folder.join(&file_name)).await?;
|
||||
let archived_path = archive_folder.join(&file_name);
|
||||
tokio::fs::rename(&canonical_rollout_path, &archived_path).await?;
|
||||
if let Some(ctx) = state_db_ctx {
|
||||
let _ = ctx
|
||||
.mark_archived(thread_id, archived_path.as_path(), Utc::now())
|
||||
.await;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
.await;
|
||||
@@ -3689,7 +3912,7 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
};
|
||||
|
||||
if !config.features.enabled(Feature::Connectors) {
|
||||
if !config.features.enabled(Feature::Apps) {
|
||||
self.outgoing
|
||||
.send_response(
|
||||
request_id,
|
||||
@@ -3752,18 +3975,7 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
|
||||
let end = start.saturating_add(effective_limit).min(total);
|
||||
let data = connectors[start..end]
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|connector| ApiAppInfo {
|
||||
id: connector.connector_id,
|
||||
name: connector.connector_name,
|
||||
description: connector.connector_description,
|
||||
logo_url: connector.logo_url,
|
||||
install_url: connector.install_url,
|
||||
is_accessible: connector.is_accessible,
|
||||
})
|
||||
.collect();
|
||||
let data = connectors[start..end].to_vec();
|
||||
|
||||
let next_cursor = if end < total {
|
||||
Some(end.to_string())
|
||||
@@ -4522,6 +4734,22 @@ fn skills_to_info(
|
||||
default_prompt: interface.default_prompt,
|
||||
}
|
||||
}),
|
||||
dependencies: skill.dependencies.clone().map(|dependencies| {
|
||||
codex_app_server_protocol::SkillDependencies {
|
||||
tools: dependencies
|
||||
.tools
|
||||
.into_iter()
|
||||
.map(|tool| codex_app_server_protocol::SkillToolDependency {
|
||||
r#type: tool.r#type,
|
||||
value: tool.value,
|
||||
description: tool.description,
|
||||
transport: tool.transport,
|
||||
command: tool.command,
|
||||
url: tool.url,
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}),
|
||||
path: skill.path.clone(),
|
||||
scope: skill.scope.into(),
|
||||
enabled,
|
||||
@@ -4591,6 +4819,7 @@ async fn derive_config_from_params(
|
||||
cli_overrides: &[(String, TomlValue)],
|
||||
request_overrides: Option<HashMap<String, serde_json::Value>>,
|
||||
typesafe_overrides: ConfigOverrides,
|
||||
cloud_requirements: &CloudRequirementsLoader,
|
||||
) -> std::io::Result<Config> {
|
||||
let merged_cli_overrides = cli_overrides
|
||||
.iter()
|
||||
@@ -4603,7 +4832,11 @@ async fn derive_config_from_params(
|
||||
)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Config::load_with_cli_overrides_and_harness_overrides(merged_cli_overrides, typesafe_overrides)
|
||||
codex_core::config::ConfigBuilder::default()
|
||||
.cli_overrides(merged_cli_overrides)
|
||||
.harness_overrides(typesafe_overrides)
|
||||
.cloud_requirements(cloud_requirements.clone())
|
||||
.build()
|
||||
.await
|
||||
}
|
||||
|
||||
@@ -4612,6 +4845,7 @@ async fn derive_config_for_cwd(
|
||||
request_overrides: Option<HashMap<String, serde_json::Value>>,
|
||||
typesafe_overrides: ConfigOverrides,
|
||||
cwd: Option<PathBuf>,
|
||||
cloud_requirements: &CloudRequirementsLoader,
|
||||
) -> std::io::Result<Config> {
|
||||
let merged_cli_overrides = cli_overrides
|
||||
.iter()
|
||||
@@ -4628,6 +4862,7 @@ async fn derive_config_for_cwd(
|
||||
.cli_overrides(merged_cli_overrides)
|
||||
.harness_overrides(typesafe_overrides)
|
||||
.fallback_cwd(cwd)
|
||||
.cloud_requirements(cloud_requirements.clone())
|
||||
.build()
|
||||
.await
|
||||
}
|
||||
|
||||
@@ -12,8 +12,10 @@ use codex_app_server_protocol::JSONRPCErrorError;
|
||||
use codex_app_server_protocol::SandboxMode;
|
||||
use codex_core::config::ConfigService;
|
||||
use codex_core::config::ConfigServiceError;
|
||||
use codex_core::config_loader::CloudRequirementsLoader;
|
||||
use codex_core::config_loader::ConfigRequirementsToml;
|
||||
use codex_core::config_loader::LoaderOverrides;
|
||||
use codex_core::config_loader::ResidencyRequirement as CoreResidencyRequirement;
|
||||
use codex_core::config_loader::SandboxModeRequirement as CoreSandboxModeRequirement;
|
||||
use serde_json::json;
|
||||
use std::path::PathBuf;
|
||||
@@ -29,9 +31,15 @@ impl ConfigApi {
|
||||
codex_home: PathBuf,
|
||||
cli_overrides: Vec<(String, TomlValue)>,
|
||||
loader_overrides: LoaderOverrides,
|
||||
cloud_requirements: CloudRequirementsLoader,
|
||||
) -> Self {
|
||||
Self {
|
||||
service: ConfigService::new(codex_home, cli_overrides, loader_overrides),
|
||||
service: ConfigService::new(
|
||||
codex_home,
|
||||
cli_overrides,
|
||||
loader_overrides,
|
||||
cloud_requirements,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -84,6 +92,9 @@ fn map_requirements_toml_to_api(requirements: ConfigRequirementsToml) -> ConfigR
|
||||
.filter_map(map_sandbox_mode_requirement_to_api)
|
||||
.collect()
|
||||
}),
|
||||
enforce_residency: requirements
|
||||
.enforce_residency
|
||||
.map(map_residency_requirement_to_api),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -96,6 +107,14 @@ fn map_sandbox_mode_requirement_to_api(mode: CoreSandboxModeRequirement) -> Opti
|
||||
}
|
||||
}
|
||||
|
||||
fn map_residency_requirement_to_api(
|
||||
residency: CoreResidencyRequirement,
|
||||
) -> codex_app_server_protocol::ResidencyRequirement {
|
||||
match residency {
|
||||
CoreResidencyRequirement::Us => codex_app_server_protocol::ResidencyRequirement::Us,
|
||||
}
|
||||
}
|
||||
|
||||
fn map_error(err: ConfigServiceError) -> JSONRPCErrorError {
|
||||
if let Some(code) = err.write_error_code() {
|
||||
return config_write_error(code, err.to_string());
|
||||
@@ -136,6 +155,8 @@ mod tests {
|
||||
CoreSandboxModeRequirement::ExternalSandbox,
|
||||
]),
|
||||
mcp_servers: None,
|
||||
rules: None,
|
||||
enforce_residency: Some(CoreResidencyRequirement::Us),
|
||||
};
|
||||
|
||||
let mapped = map_requirements_toml_to_api(requirements);
|
||||
@@ -151,5 +172,9 @@ mod tests {
|
||||
mapped.allowed_sandbox_modes,
|
||||
Some(vec![SandboxMode::ReadOnly]),
|
||||
);
|
||||
assert_eq!(
|
||||
mapped.enforce_residency,
|
||||
Some(codex_app_server_protocol::ResidencyRequirement::Us),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,17 +1,14 @@
|
||||
use std::num::NonZero;
|
||||
use std::num::NonZeroUsize;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
|
||||
use codex_app_server_protocol::FuzzyFileSearchResult;
|
||||
use codex_file_search as file_search;
|
||||
use tokio::task::JoinSet;
|
||||
use tracing::warn;
|
||||
|
||||
const LIMIT_PER_ROOT: usize = 50;
|
||||
const MATCH_LIMIT: usize = 50;
|
||||
const MAX_THREADS: usize = 12;
|
||||
const COMPUTE_INDICES: bool = true;
|
||||
|
||||
pub(crate) async fn run_fuzzy_file_search(
|
||||
query: String,
|
||||
@@ -23,64 +20,54 @@ pub(crate) async fn run_fuzzy_file_search(
|
||||
}
|
||||
|
||||
#[expect(clippy::expect_used)]
|
||||
let limit_per_root =
|
||||
NonZero::new(LIMIT_PER_ROOT).expect("LIMIT_PER_ROOT should be a valid non-zero usize");
|
||||
let limit = NonZero::new(MATCH_LIMIT).expect("MATCH_LIMIT should be a valid non-zero usize");
|
||||
|
||||
let cores = std::thread::available_parallelism()
|
||||
.map(std::num::NonZero::get)
|
||||
.unwrap_or(1);
|
||||
let threads = cores.min(MAX_THREADS);
|
||||
let threads_per_root = (threads / roots.len()).max(1);
|
||||
let threads = NonZero::new(threads_per_root).unwrap_or(NonZeroUsize::MIN);
|
||||
#[expect(clippy::expect_used)]
|
||||
let threads = NonZero::new(threads.max(1)).expect("threads should be non-zero");
|
||||
let search_dirs: Vec<PathBuf> = roots.iter().map(PathBuf::from).collect();
|
||||
|
||||
let mut files: Vec<FuzzyFileSearchResult> = Vec::new();
|
||||
let mut join_set = JoinSet::new();
|
||||
|
||||
for root in roots {
|
||||
let search_dir = PathBuf::from(&root);
|
||||
let query = query.clone();
|
||||
let cancel_flag = cancellation_flag.clone();
|
||||
join_set.spawn_blocking(move || {
|
||||
match file_search::run(
|
||||
query.as_str(),
|
||||
limit_per_root,
|
||||
&search_dir,
|
||||
Vec::new(),
|
||||
let mut files = match tokio::task::spawn_blocking(move || {
|
||||
file_search::run(
|
||||
query.as_str(),
|
||||
search_dirs,
|
||||
file_search::FileSearchOptions {
|
||||
limit,
|
||||
threads,
|
||||
cancel_flag,
|
||||
COMPUTE_INDICES,
|
||||
true,
|
||||
) {
|
||||
Ok(res) => Ok((root, res)),
|
||||
Err(err) => Err((root, err)),
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
while let Some(res) = join_set.join_next().await {
|
||||
match res {
|
||||
Ok(Ok((root, res))) => {
|
||||
for m in res.matches {
|
||||
let path = m.path;
|
||||
let file_name = file_search::file_name_from_path(&path);
|
||||
let result = FuzzyFileSearchResult {
|
||||
root: root.clone(),
|
||||
path,
|
||||
file_name,
|
||||
score: m.score,
|
||||
indices: m.indices,
|
||||
};
|
||||
files.push(result);
|
||||
compute_indices: true,
|
||||
..Default::default()
|
||||
},
|
||||
Some(cancellation_flag),
|
||||
)
|
||||
})
|
||||
.await
|
||||
{
|
||||
Ok(Ok(res)) => res
|
||||
.matches
|
||||
.into_iter()
|
||||
.map(|m| {
|
||||
let file_name = m.path.file_name().unwrap_or_default();
|
||||
FuzzyFileSearchResult {
|
||||
root: m.root.to_string_lossy().to_string(),
|
||||
path: m.path.to_string_lossy().to_string(),
|
||||
file_name: file_name.to_string_lossy().to_string(),
|
||||
score: m.score,
|
||||
indices: m.indices,
|
||||
}
|
||||
}
|
||||
Ok(Err((root, err))) => {
|
||||
warn!("fuzzy-file-search in dir '{root}' failed: {err}");
|
||||
}
|
||||
Err(err) => {
|
||||
warn!("fuzzy-file-search join_next failed: {err}");
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
Ok(Err(err)) => {
|
||||
warn!("fuzzy-file-search failed: {err}");
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
warn!("fuzzy-file-search join failed: {err}");
|
||||
Vec::new()
|
||||
}
|
||||
};
|
||||
|
||||
files.sort_by(file_search::cmp_by_score_desc_then_path_asc::<
|
||||
FuzzyFileSearchResult,
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
#![deny(clippy::print_stdout, clippy::print_stderr)]
|
||||
|
||||
use codex_cloud_requirements::cloud_requirements_loader;
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_core::AuthManager;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigBuilder;
|
||||
use codex_core::config_loader::CloudRequirementsLoader;
|
||||
use codex_core::config_loader::ConfigLayerStackOrdering;
|
||||
use codex_core::config_loader::LoaderOverrides;
|
||||
use std::io::ErrorKind;
|
||||
@@ -10,6 +13,7 @@ use std::io::Result as IoResult;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::message_processor::MessageProcessor;
|
||||
use crate::message_processor::MessageProcessorArgs;
|
||||
use crate::outgoing_message::OutgoingMessage;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use codex_app_server_protocol::ConfigLayerSource;
|
||||
@@ -204,11 +208,32 @@ pub async fn run_main(
|
||||
format!("error parsing -c overrides: {e}"),
|
||||
)
|
||||
})?;
|
||||
let cloud_requirements = match ConfigBuilder::default()
|
||||
.cli_overrides(cli_kv_overrides.clone())
|
||||
.loader_overrides(loader_overrides.clone())
|
||||
.build()
|
||||
.await
|
||||
{
|
||||
Ok(config) => {
|
||||
let auth_manager = AuthManager::shared(
|
||||
config.codex_home.clone(),
|
||||
false,
|
||||
config.cli_auth_credentials_store_mode,
|
||||
);
|
||||
cloud_requirements_loader(auth_manager, config.chatgpt_base_url)
|
||||
}
|
||||
Err(err) => {
|
||||
warn!(error = %err, "Failed to preload config for cloud requirements");
|
||||
// TODO(gt): Make cloud requirements preload failures blocking once we can fail-closed.
|
||||
CloudRequirementsLoader::default()
|
||||
}
|
||||
};
|
||||
let loader_overrides_for_config_api = loader_overrides.clone();
|
||||
let mut config_warnings = Vec::new();
|
||||
let config = match ConfigBuilder::default()
|
||||
.cli_overrides(cli_kv_overrides.clone())
|
||||
.loader_overrides(loader_overrides)
|
||||
.cloud_requirements(cloud_requirements.clone())
|
||||
.build()
|
||||
.await
|
||||
{
|
||||
@@ -290,15 +315,16 @@ pub async fn run_main(
|
||||
let outgoing_message_sender = OutgoingMessageSender::new(outgoing_tx);
|
||||
let cli_overrides: Vec<(String, TomlValue)> = cli_kv_overrides.clone();
|
||||
let loader_overrides = loader_overrides_for_config_api;
|
||||
let mut processor = MessageProcessor::new(
|
||||
outgoing_message_sender,
|
||||
let mut processor = MessageProcessor::new(MessageProcessorArgs {
|
||||
outgoing: outgoing_message_sender,
|
||||
codex_linux_sandbox_exe,
|
||||
std::sync::Arc::new(config),
|
||||
config: std::sync::Arc::new(config),
|
||||
cli_overrides,
|
||||
loader_overrides,
|
||||
feedback.clone(),
|
||||
cloud_requirements: cloud_requirements.clone(),
|
||||
feedback: feedback.clone(),
|
||||
config_warnings,
|
||||
);
|
||||
});
|
||||
let mut thread_created_rx = processor.thread_created_receiver();
|
||||
async move {
|
||||
let mut listen_for_threads = true;
|
||||
@@ -312,7 +338,7 @@ pub async fn run_main(
|
||||
JSONRPCMessage::Request(r) => processor.process_request(r).await,
|
||||
JSONRPCMessage::Response(r) => processor.process_response(r).await,
|
||||
JSONRPCMessage::Notification(n) => processor.process_notification(n).await,
|
||||
JSONRPCMessage::Error(e) => processor.process_error(e),
|
||||
JSONRPCMessage::Error(e) => processor.process_error(e).await,
|
||||
}
|
||||
}
|
||||
created = thread_created_rx.recv(), if listen_for_threads => {
|
||||
|
||||
@@ -2,9 +2,14 @@ use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::codex_message_processor::CodexMessageProcessor;
|
||||
use crate::codex_message_processor::CodexMessageProcessorArgs;
|
||||
use crate::config_api::ConfigApi;
|
||||
use crate::error_code::INVALID_REQUEST_ERROR_CODE;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use async_trait::async_trait;
|
||||
use codex_app_server_protocol::ChatgptAuthTokensRefreshParams;
|
||||
use codex_app_server_protocol::ChatgptAuthTokensRefreshReason;
|
||||
use codex_app_server_protocol::ChatgptAuthTokensRefreshResponse;
|
||||
use codex_app_server_protocol::ClientInfo;
|
||||
use codex_app_server_protocol::ClientRequest;
|
||||
use codex_app_server_protocol::ConfigBatchWriteParams;
|
||||
@@ -19,66 +24,154 @@ use codex_app_server_protocol::JSONRPCRequest;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_app_server_protocol::ServerRequestPayload;
|
||||
use codex_core::AuthManager;
|
||||
use codex_core::ThreadManager;
|
||||
use codex_core::auth::ExternalAuthRefreshContext;
|
||||
use codex_core::auth::ExternalAuthRefreshReason;
|
||||
use codex_core::auth::ExternalAuthRefresher;
|
||||
use codex_core::auth::ExternalAuthTokens;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config_loader::CloudRequirementsLoader;
|
||||
use codex_core::config_loader::LoaderOverrides;
|
||||
use codex_core::default_client::SetOriginatorError;
|
||||
use codex_core::default_client::USER_AGENT_SUFFIX;
|
||||
use codex_core::default_client::get_codex_user_agent;
|
||||
use codex_core::default_client::set_default_client_residency_requirement;
|
||||
use codex_core::default_client::set_default_originator;
|
||||
use codex_feedback::CodexFeedback;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use tokio::sync::broadcast;
|
||||
use tokio::time::Duration;
|
||||
use tokio::time::timeout;
|
||||
use toml::Value as TomlValue;
|
||||
|
||||
const EXTERNAL_AUTH_REFRESH_TIMEOUT: Duration = Duration::from_secs(10);
|
||||
|
||||
#[derive(Clone)]
|
||||
struct ExternalAuthRefreshBridge {
|
||||
outgoing: Arc<OutgoingMessageSender>,
|
||||
}
|
||||
|
||||
impl ExternalAuthRefreshBridge {
|
||||
fn map_reason(reason: ExternalAuthRefreshReason) -> ChatgptAuthTokensRefreshReason {
|
||||
match reason {
|
||||
ExternalAuthRefreshReason::Unauthorized => ChatgptAuthTokensRefreshReason::Unauthorized,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ExternalAuthRefresher for ExternalAuthRefreshBridge {
|
||||
async fn refresh(
|
||||
&self,
|
||||
context: ExternalAuthRefreshContext,
|
||||
) -> std::io::Result<ExternalAuthTokens> {
|
||||
let params = ChatgptAuthTokensRefreshParams {
|
||||
reason: Self::map_reason(context.reason),
|
||||
previous_account_id: context.previous_account_id,
|
||||
};
|
||||
|
||||
let (request_id, rx) = self
|
||||
.outgoing
|
||||
.send_request_with_id(ServerRequestPayload::ChatgptAuthTokensRefresh(params))
|
||||
.await;
|
||||
|
||||
let result = match timeout(EXTERNAL_AUTH_REFRESH_TIMEOUT, rx).await {
|
||||
Ok(result) => result.map_err(|err| {
|
||||
std::io::Error::other(format!("auth refresh request canceled: {err}"))
|
||||
})?,
|
||||
Err(_) => {
|
||||
let _canceled = self.outgoing.cancel_request(&request_id).await;
|
||||
return Err(std::io::Error::other(format!(
|
||||
"auth refresh request timed out after {}s",
|
||||
EXTERNAL_AUTH_REFRESH_TIMEOUT.as_secs()
|
||||
)));
|
||||
}
|
||||
};
|
||||
|
||||
let response: ChatgptAuthTokensRefreshResponse =
|
||||
serde_json::from_value(result).map_err(std::io::Error::other)?;
|
||||
|
||||
Ok(ExternalAuthTokens {
|
||||
access_token: response.access_token,
|
||||
id_token: response.id_token,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct MessageProcessor {
|
||||
outgoing: Arc<OutgoingMessageSender>,
|
||||
codex_message_processor: CodexMessageProcessor,
|
||||
config_api: ConfigApi,
|
||||
config: Arc<Config>,
|
||||
initialized: bool,
|
||||
config_warnings: Vec<ConfigWarningNotification>,
|
||||
}
|
||||
|
||||
pub(crate) struct MessageProcessorArgs {
|
||||
pub(crate) outgoing: OutgoingMessageSender,
|
||||
pub(crate) codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
pub(crate) config: Arc<Config>,
|
||||
pub(crate) cli_overrides: Vec<(String, TomlValue)>,
|
||||
pub(crate) loader_overrides: LoaderOverrides,
|
||||
pub(crate) cloud_requirements: CloudRequirementsLoader,
|
||||
pub(crate) feedback: CodexFeedback,
|
||||
pub(crate) config_warnings: Vec<ConfigWarningNotification>,
|
||||
}
|
||||
|
||||
impl MessageProcessor {
|
||||
/// Create a new `MessageProcessor`, retaining a handle to the outgoing
|
||||
/// `Sender` so handlers can enqueue messages to be written to stdout.
|
||||
pub(crate) fn new(
|
||||
outgoing: OutgoingMessageSender,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
config: Arc<Config>,
|
||||
cli_overrides: Vec<(String, TomlValue)>,
|
||||
loader_overrides: LoaderOverrides,
|
||||
feedback: CodexFeedback,
|
||||
config_warnings: Vec<ConfigWarningNotification>,
|
||||
) -> Self {
|
||||
pub(crate) fn new(args: MessageProcessorArgs) -> Self {
|
||||
let MessageProcessorArgs {
|
||||
outgoing,
|
||||
codex_linux_sandbox_exe,
|
||||
config,
|
||||
cli_overrides,
|
||||
loader_overrides,
|
||||
cloud_requirements,
|
||||
feedback,
|
||||
config_warnings,
|
||||
} = args;
|
||||
let outgoing = Arc::new(outgoing);
|
||||
let auth_manager = AuthManager::shared(
|
||||
config.codex_home.clone(),
|
||||
false,
|
||||
config.cli_auth_credentials_store_mode,
|
||||
);
|
||||
auth_manager.set_forced_chatgpt_workspace_id(config.forced_chatgpt_workspace_id.clone());
|
||||
auth_manager.set_external_auth_refresher(Arc::new(ExternalAuthRefreshBridge {
|
||||
outgoing: outgoing.clone(),
|
||||
}));
|
||||
let thread_manager = Arc::new(ThreadManager::new(
|
||||
config.codex_home.clone(),
|
||||
auth_manager.clone(),
|
||||
SessionSource::VSCode,
|
||||
));
|
||||
let codex_message_processor = CodexMessageProcessor::new(
|
||||
let codex_message_processor = CodexMessageProcessor::new(CodexMessageProcessorArgs {
|
||||
auth_manager,
|
||||
thread_manager,
|
||||
outgoing.clone(),
|
||||
outgoing: outgoing.clone(),
|
||||
codex_linux_sandbox_exe,
|
||||
Arc::clone(&config),
|
||||
cli_overrides.clone(),
|
||||
config: Arc::clone(&config),
|
||||
cli_overrides: cli_overrides.clone(),
|
||||
cloud_requirements: cloud_requirements.clone(),
|
||||
feedback,
|
||||
});
|
||||
let config_api = ConfigApi::new(
|
||||
config.codex_home.clone(),
|
||||
cli_overrides,
|
||||
loader_overrides,
|
||||
cloud_requirements,
|
||||
);
|
||||
let config_api = ConfigApi::new(config.codex_home.clone(), cli_overrides, loader_overrides);
|
||||
|
||||
Self {
|
||||
outgoing,
|
||||
codex_message_processor,
|
||||
config_api,
|
||||
config,
|
||||
initialized: false,
|
||||
config_warnings,
|
||||
}
|
||||
@@ -151,6 +244,7 @@ impl MessageProcessor {
|
||||
}
|
||||
}
|
||||
}
|
||||
set_default_client_residency_requirement(self.config.enforce_residency.value());
|
||||
let user_agent_suffix = format!("{name}; {version}");
|
||||
if let Ok(mut suffix) = USER_AGENT_SUFFIX.lock() {
|
||||
*suffix = Some(user_agent_suffix);
|
||||
@@ -236,8 +330,9 @@ impl MessageProcessor {
|
||||
}
|
||||
|
||||
/// Handle an error object received from the peer.
|
||||
pub(crate) fn process_error(&mut self, err: JSONRPCError) {
|
||||
pub(crate) async fn process_error(&mut self, err: JSONRPCError) {
|
||||
tracing::error!("<- error: {:?}", err);
|
||||
self.outgoing.notify_client_error(err.id, err.error).await;
|
||||
}
|
||||
|
||||
async fn handle_config_read(&self, request_id: RequestId, params: ConfigReadParams) {
|
||||
|
||||
@@ -39,6 +39,14 @@ impl OutgoingMessageSender {
|
||||
&self,
|
||||
request: ServerRequestPayload,
|
||||
) -> oneshot::Receiver<Result> {
|
||||
let (_id, rx) = self.send_request_with_id(request).await;
|
||||
rx
|
||||
}
|
||||
|
||||
pub(crate) async fn send_request_with_id(
|
||||
&self,
|
||||
request: ServerRequestPayload,
|
||||
) -> (RequestId, oneshot::Receiver<Result>) {
|
||||
let id = RequestId::Integer(self.next_request_id.fetch_add(1, Ordering::Relaxed));
|
||||
let outgoing_message_id = id.clone();
|
||||
let (tx_approve, rx_approve) = oneshot::channel();
|
||||
@@ -54,7 +62,7 @@ impl OutgoingMessageSender {
|
||||
let mut request_id_to_callback = self.request_id_to_callback.lock().await;
|
||||
request_id_to_callback.remove(&outgoing_message_id);
|
||||
}
|
||||
rx_approve
|
||||
(outgoing_message_id, rx_approve)
|
||||
}
|
||||
|
||||
pub(crate) async fn notify_client_response(&self, id: RequestId, result: Result) {
|
||||
@@ -75,6 +83,30 @@ impl OutgoingMessageSender {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn notify_client_error(&self, id: RequestId, error: JSONRPCErrorError) {
|
||||
let entry = {
|
||||
let mut request_id_to_callback = self.request_id_to_callback.lock().await;
|
||||
request_id_to_callback.remove_entry(&id)
|
||||
};
|
||||
|
||||
match entry {
|
||||
Some((id, _sender)) => {
|
||||
warn!("client responded with error for {id:?}: {error:?}");
|
||||
}
|
||||
None => {
|
||||
warn!("could not find callback for {id:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn cancel_request(&self, id: &RequestId) -> bool {
|
||||
let entry = {
|
||||
let mut request_id_to_callback = self.request_id_to_callback.lock().await;
|
||||
request_id_to_callback.remove_entry(id)
|
||||
};
|
||||
entry.is_some()
|
||||
}
|
||||
|
||||
pub(crate) async fn send_response<T: Serialize>(&self, id: RequestId, response: T) {
|
||||
match serde_json::to_value(response) {
|
||||
Ok(result) => {
|
||||
|
||||
@@ -6,6 +6,7 @@ use base64::Engine;
|
||||
use base64::engine::general_purpose::URL_SAFE_NO_PAD;
|
||||
use chrono::DateTime;
|
||||
use chrono::Utc;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use codex_core::auth::AuthDotJson;
|
||||
use codex_core::auth::save_auth;
|
||||
@@ -158,6 +159,7 @@ pub fn write_chatgpt_auth(
|
||||
let last_refresh = fixture.last_refresh.unwrap_or_else(|| Some(Utc::now()));
|
||||
|
||||
let auth = AuthDotJson {
|
||||
auth_mode: Some(AuthMode::Chatgpt),
|
||||
openai_api_key: None,
|
||||
tokens: Some(tokens),
|
||||
last_refresh,
|
||||
|
||||
72
codex-rs/app-server/tests/common/config.rs
Normal file
72
codex-rs/app-server/tests/common/config.rs
Normal file
@@ -0,0 +1,72 @@
|
||||
use codex_core::features::FEATURES;
|
||||
use codex_core::features::Feature;
|
||||
use std::collections::BTreeMap;
|
||||
use std::path::Path;
|
||||
|
||||
pub fn write_mock_responses_config_toml(
|
||||
codex_home: &Path,
|
||||
server_uri: &str,
|
||||
feature_flags: &BTreeMap<Feature, bool>,
|
||||
auto_compact_limit: i64,
|
||||
requires_openai_auth: Option<bool>,
|
||||
model_provider_id: &str,
|
||||
compact_prompt: &str,
|
||||
) -> std::io::Result<()> {
|
||||
// Phase 1: build the features block for config.toml.
|
||||
let mut features = BTreeMap::from([(Feature::RemoteModels, false)]);
|
||||
for (feature, enabled) in feature_flags {
|
||||
features.insert(*feature, *enabled);
|
||||
}
|
||||
let feature_entries = features
|
||||
.into_iter()
|
||||
.map(|(feature, enabled)| {
|
||||
let key = FEATURES
|
||||
.iter()
|
||||
.find(|spec| spec.id == feature)
|
||||
.map(|spec| spec.key)
|
||||
.unwrap_or_else(|| panic!("missing feature key for {feature:?}"));
|
||||
format!("{key} = {enabled}")
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
// Phase 2: build provider-specific config bits.
|
||||
let requires_line = match requires_openai_auth {
|
||||
Some(true) => "requires_openai_auth = true\n".to_string(),
|
||||
Some(false) | None => String::new(),
|
||||
};
|
||||
let provider_block = if model_provider_id == "openai" {
|
||||
String::new()
|
||||
} else {
|
||||
format!(
|
||||
r#"
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "responses"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
{requires_line}
|
||||
"#
|
||||
)
|
||||
};
|
||||
// Phase 3: write the final config file.
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
compact_prompt = "{compact_prompt}"
|
||||
model_auto_compact_token_limit = {auto_compact_limit}
|
||||
|
||||
model_provider = "{model_provider_id}"
|
||||
|
||||
[features]
|
||||
{feature_entries}
|
||||
{provider_block}
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
mod auth_fixtures;
|
||||
mod config;
|
||||
mod mcp_process;
|
||||
mod mock_model_server;
|
||||
mod models_cache;
|
||||
@@ -10,6 +11,7 @@ pub use auth_fixtures::ChatGptIdTokenClaims;
|
||||
pub use auth_fixtures::encode_id_token;
|
||||
pub use auth_fixtures::write_chatgpt_auth;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
pub use config::write_mock_responses_config_toml;
|
||||
pub use core_test_support::format_with_current_shell;
|
||||
pub use core_test_support::format_with_current_shell_display;
|
||||
pub use core_test_support::format_with_current_shell_display_non_login;
|
||||
|
||||
@@ -29,11 +29,13 @@ use codex_app_server_protocol::GetAuthStatusParams;
|
||||
use codex_app_server_protocol::InitializeParams;
|
||||
use codex_app_server_protocol::InterruptConversationParams;
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCErrorError;
|
||||
use codex_app_server_protocol::JSONRPCMessage;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCRequest;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::ListConversationsParams;
|
||||
use codex_app_server_protocol::LoginAccountParams;
|
||||
use codex_app_server_protocol::LoginApiKeyParams;
|
||||
use codex_app_server_protocol::ModelListParams;
|
||||
use codex_app_server_protocol::NewConversationParams;
|
||||
@@ -298,6 +300,20 @@ impl McpProcess {
|
||||
self.send_request("account/read", params).await
|
||||
}
|
||||
|
||||
/// Send an `account/login/start` JSON-RPC request with ChatGPT auth tokens.
|
||||
pub async fn send_chatgpt_auth_tokens_login_request(
|
||||
&mut self,
|
||||
id_token: String,
|
||||
access_token: String,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = LoginAccountParams::ChatgptAuthTokens {
|
||||
id_token,
|
||||
access_token,
|
||||
};
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("account/login/start", params).await
|
||||
}
|
||||
|
||||
/// Send a `feedback/upload` JSON-RPC request.
|
||||
pub async fn send_feedback_upload_request(
|
||||
&mut self,
|
||||
@@ -608,6 +624,15 @@ impl McpProcess {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn send_error(
|
||||
&mut self,
|
||||
id: RequestId,
|
||||
error: JSONRPCErrorError,
|
||||
) -> anyhow::Result<()> {
|
||||
self.send_jsonrpc_message(JSONRPCMessage::Error(JSONRPCError { id, error }))
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn send_notification(
|
||||
&mut self,
|
||||
notification: ClientNotification,
|
||||
@@ -711,6 +736,10 @@ impl McpProcess {
|
||||
Ok(notification)
|
||||
}
|
||||
|
||||
pub async fn read_next_message(&mut self) -> anyhow::Result<JSONRPCMessage> {
|
||||
self.read_stream_until_message(|_| true).await
|
||||
}
|
||||
|
||||
/// Clears any buffered messages so future reads only consider new stream items.
|
||||
///
|
||||
/// We call this when e.g. we want to validate against the next turn and no longer care about
|
||||
|
||||
@@ -27,7 +27,7 @@ fn preset_to_info(preset: &ModelPreset, priority: i32) -> ModelInfo {
|
||||
priority,
|
||||
upgrade: preset.upgrade.as_ref().map(|u| u.into()),
|
||||
base_instructions: "base instructions".to_string(),
|
||||
model_instructions_template: None,
|
||||
model_messages: None,
|
||||
supports_reasoning_summaries: false,
|
||||
support_verbosity: false,
|
||||
default_verbosity: None,
|
||||
|
||||
@@ -67,7 +67,6 @@ pub fn create_request_user_input_sse_response(call_id: &str) -> anyhow::Result<S
|
||||
"id": "confirm_path",
|
||||
"header": "Confirm",
|
||||
"question": "Proceed with the plan?",
|
||||
"isOther": false,
|
||||
"options": [{
|
||||
"label": "Yes (Recommended)",
|
||||
"description": "Continue the current plan."
|
||||
|
||||
@@ -81,6 +81,7 @@ pub fn create_fake_rollout_with_source(
|
||||
source,
|
||||
model_provider: model_provider.map(str::to_string),
|
||||
base_instructions: None,
|
||||
dynamic_tools: None,
|
||||
};
|
||||
let payload = serde_json::to_value(SessionMetaLine {
|
||||
meta,
|
||||
@@ -159,6 +160,7 @@ pub fn create_fake_rollout_with_text_elements(
|
||||
source: SessionSource::Cli,
|
||||
model_provider: model_provider.map(str::to_string),
|
||||
base_instructions: None,
|
||||
dynamic_tools: None,
|
||||
};
|
||||
let payload = serde_json::to_value(SessionMetaLine {
|
||||
meta,
|
||||
|
||||
@@ -48,8 +48,7 @@ async fn test_fuzzy_file_search_sorts_and_includes_indices() -> Result<()> {
|
||||
.await??;
|
||||
|
||||
let value = resp.result;
|
||||
// The path separator on Windows affects the score.
|
||||
let expected_score = if cfg!(windows) { 69 } else { 72 };
|
||||
let expected_score = 72;
|
||||
|
||||
assert_eq!(
|
||||
value,
|
||||
@@ -59,16 +58,9 @@ async fn test_fuzzy_file_search_sorts_and_includes_indices() -> Result<()> {
|
||||
"root": root_path.clone(),
|
||||
"path": "abexy",
|
||||
"file_name": "abexy",
|
||||
"score": 88,
|
||||
"score": 84,
|
||||
"indices": [0, 1, 2],
|
||||
},
|
||||
{
|
||||
"root": root_path.clone(),
|
||||
"path": "abcde",
|
||||
"file_name": "abcde",
|
||||
"score": 74,
|
||||
"indices": [0, 1, 4],
|
||||
},
|
||||
{
|
||||
"root": root_path.clone(),
|
||||
"path": sub_abce_rel,
|
||||
@@ -76,6 +68,13 @@ async fn test_fuzzy_file_search_sorts_and_includes_indices() -> Result<()> {
|
||||
"score": expected_score,
|
||||
"indices": [4, 5, 7],
|
||||
},
|
||||
{
|
||||
"root": root_path.clone(),
|
||||
"path": "abcde",
|
||||
"file_name": "abcde",
|
||||
"score": 71,
|
||||
"indices": [0, 1, 4],
|
||||
},
|
||||
]
|
||||
})
|
||||
);
|
||||
|
||||
@@ -11,6 +11,7 @@ use codex_app_server_protocol::NewConversationResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserMessageResponse;
|
||||
use codex_execpolicy::Policy;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::DeveloperInstructions;
|
||||
@@ -358,6 +359,8 @@ fn assert_permissions_message(item: &ResponseItem) {
|
||||
let expected = DeveloperInstructions::from_policy(
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
AskForApproval::Never,
|
||||
&Policy::empty(),
|
||||
false,
|
||||
&PathBuf::from("/tmp"),
|
||||
)
|
||||
.into_text();
|
||||
|
||||
@@ -4,28 +4,43 @@ use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
|
||||
use app_test_support::ChatGptAuthFixture;
|
||||
use app_test_support::ChatGptIdTokenClaims;
|
||||
use app_test_support::encode_id_token;
|
||||
use app_test_support::write_chatgpt_auth;
|
||||
use app_test_support::write_models_cache;
|
||||
use codex_app_server_protocol::Account;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_app_server_protocol::CancelLoginAccountParams;
|
||||
use codex_app_server_protocol::CancelLoginAccountResponse;
|
||||
use codex_app_server_protocol::CancelLoginAccountStatus;
|
||||
use codex_app_server_protocol::ChatgptAuthTokensRefreshReason;
|
||||
use codex_app_server_protocol::ChatgptAuthTokensRefreshResponse;
|
||||
use codex_app_server_protocol::GetAccountParams;
|
||||
use codex_app_server_protocol::GetAccountResponse;
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCErrorError;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::LoginAccountResponse;
|
||||
use codex_app_server_protocol::LogoutAccountResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::TurnCompletedNotification;
|
||||
use codex_app_server_protocol::TurnStatus;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use codex_login::login_with_api_key;
|
||||
use codex_protocol::account::PlanType as AccountPlanType;
|
||||
use core_test_support::responses;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use serial_test::serial;
|
||||
use std::path::Path;
|
||||
use std::time::Duration;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
use wiremock::MockServer;
|
||||
use wiremock::ResponseTemplate;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
@@ -35,10 +50,14 @@ struct CreateConfigTomlParams {
|
||||
forced_method: Option<String>,
|
||||
forced_workspace_id: Option<String>,
|
||||
requires_openai_auth: Option<bool>,
|
||||
base_url: Option<String>,
|
||||
}
|
||||
|
||||
fn create_config_toml(codex_home: &Path, params: CreateConfigTomlParams) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
let base_url = params
|
||||
.base_url
|
||||
.unwrap_or_else(|| "http://127.0.0.1:0/v1".to_string());
|
||||
let forced_line = if let Some(method) = params.forced_method {
|
||||
format!("forced_login_method = \"{method}\"\n")
|
||||
} else {
|
||||
@@ -66,7 +85,7 @@ model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "http://127.0.0.1:0/v1"
|
||||
base_url = "{base_url}"
|
||||
wire_api = "responses"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
@@ -133,6 +152,627 @@ async fn logout_account_removes_auth_and_notifies() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn set_auth_token_updates_account_and_notifies() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mock_server = MockServer::start().await;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
requires_openai_auth: Some(true),
|
||||
base_url: Some(format!("{}/v1", mock_server.uri())),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
write_models_cache(codex_home.path())?;
|
||||
|
||||
let id_token = encode_id_token(
|
||||
&ChatGptIdTokenClaims::new()
|
||||
.email("embedded@example.com")
|
||||
.plan_type("pro")
|
||||
.chatgpt_account_id("org-embedded"),
|
||||
)?;
|
||||
let access_token = "access-embedded".to_string();
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let set_id = mcp
|
||||
.send_chatgpt_auth_tokens_login_request(id_token.clone(), access_token)
|
||||
.await?;
|
||||
let set_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(set_id)),
|
||||
)
|
||||
.await??;
|
||||
let response: LoginAccountResponse = to_response(set_resp)?;
|
||||
assert_eq!(response, LoginAccountResponse::ChatgptAuthTokens {});
|
||||
|
||||
let note = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/updated"),
|
||||
)
|
||||
.await??;
|
||||
let parsed: ServerNotification = note.try_into()?;
|
||||
let ServerNotification::AccountUpdated(payload) = parsed else {
|
||||
bail!("unexpected notification: {parsed:?}");
|
||||
};
|
||||
assert_eq!(payload.auth_mode, Some(AuthMode::ChatgptAuthTokens));
|
||||
|
||||
let get_id = mcp
|
||||
.send_get_account_request(GetAccountParams {
|
||||
refresh_token: false,
|
||||
})
|
||||
.await?;
|
||||
let get_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(get_id)),
|
||||
)
|
||||
.await??;
|
||||
let account: GetAccountResponse = to_response(get_resp)?;
|
||||
assert_eq!(
|
||||
account,
|
||||
GetAccountResponse {
|
||||
account: Some(Account::Chatgpt {
|
||||
email: "embedded@example.com".to_string(),
|
||||
plan_type: AccountPlanType::Pro,
|
||||
}),
|
||||
requires_openai_auth: true,
|
||||
}
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn account_read_refresh_token_is_noop_in_external_mode() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
requires_openai_auth: Some(true),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
write_models_cache(codex_home.path())?;
|
||||
|
||||
let id_token = encode_id_token(
|
||||
&ChatGptIdTokenClaims::new()
|
||||
.email("embedded@example.com")
|
||||
.plan_type("pro")
|
||||
.chatgpt_account_id("org-embedded"),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let set_id = mcp
|
||||
.send_chatgpt_auth_tokens_login_request(id_token, "access-embedded".to_string())
|
||||
.await?;
|
||||
let set_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(set_id)),
|
||||
)
|
||||
.await??;
|
||||
let response: LoginAccountResponse = to_response(set_resp)?;
|
||||
assert_eq!(response, LoginAccountResponse::ChatgptAuthTokens {});
|
||||
let _updated = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/updated"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let get_id = mcp
|
||||
.send_get_account_request(GetAccountParams {
|
||||
refresh_token: true,
|
||||
})
|
||||
.await?;
|
||||
let get_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(get_id)),
|
||||
)
|
||||
.await??;
|
||||
let account: GetAccountResponse = to_response(get_resp)?;
|
||||
assert_eq!(
|
||||
account,
|
||||
GetAccountResponse {
|
||||
account: Some(Account::Chatgpt {
|
||||
email: "embedded@example.com".to_string(),
|
||||
plan_type: AccountPlanType::Pro,
|
||||
}),
|
||||
requires_openai_auth: true,
|
||||
}
|
||||
);
|
||||
|
||||
let refresh_request = timeout(
|
||||
Duration::from_millis(250),
|
||||
mcp.read_stream_until_request_message(),
|
||||
)
|
||||
.await;
|
||||
assert!(
|
||||
refresh_request.is_err(),
|
||||
"external mode should not emit account/chatgptAuthTokens/refresh for refreshToken=true"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn respond_to_refresh_request(
|
||||
mcp: &mut McpProcess,
|
||||
access_token: &str,
|
||||
id_token: &str,
|
||||
) -> Result<()> {
|
||||
let refresh_req: ServerRequest = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_request_message(),
|
||||
)
|
||||
.await??;
|
||||
let ServerRequest::ChatgptAuthTokensRefresh { request_id, params } = refresh_req else {
|
||||
bail!("expected account/chatgptAuthTokens/refresh request, got {refresh_req:?}");
|
||||
};
|
||||
assert_eq!(params.reason, ChatgptAuthTokensRefreshReason::Unauthorized);
|
||||
let response = ChatgptAuthTokensRefreshResponse {
|
||||
access_token: access_token.to_string(),
|
||||
id_token: id_token.to_string(),
|
||||
};
|
||||
mcp.send_response(request_id, serde_json::to_value(response)?)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
// 401 response triggers account/chatgptAuthTokens/refresh and retries with new tokens.
|
||||
async fn external_auth_refreshes_on_unauthorized() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mock_server = MockServer::start().await;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
requires_openai_auth: Some(true),
|
||||
base_url: Some(format!("{}/v1", mock_server.uri())),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
write_models_cache(codex_home.path())?;
|
||||
|
||||
let success_sse = responses::sse(vec![
|
||||
responses::ev_response_created("resp-turn"),
|
||||
responses::ev_assistant_message("msg-turn", "turn ok"),
|
||||
responses::ev_completed("resp-turn"),
|
||||
]);
|
||||
let unauthorized = ResponseTemplate::new(401).set_body_json(json!({
|
||||
"error": { "message": "unauthorized" }
|
||||
}));
|
||||
let responses_mock = responses::mount_response_sequence(
|
||||
&mock_server,
|
||||
vec![unauthorized, responses::sse_response(success_sse)],
|
||||
)
|
||||
.await;
|
||||
|
||||
let initial_id_token = encode_id_token(
|
||||
&ChatGptIdTokenClaims::new()
|
||||
.email("initial@example.com")
|
||||
.plan_type("pro")
|
||||
.chatgpt_account_id("org-initial"),
|
||||
)?;
|
||||
let refreshed_id_token = encode_id_token(
|
||||
&ChatGptIdTokenClaims::new()
|
||||
.email("refreshed@example.com")
|
||||
.plan_type("pro")
|
||||
.chatgpt_account_id("org-refreshed"),
|
||||
)?;
|
||||
let initial_access_token = "access-initial".to_string();
|
||||
let refreshed_access_token = "access-refreshed".to_string();
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let set_id = mcp
|
||||
.send_chatgpt_auth_tokens_login_request(
|
||||
initial_id_token.clone(),
|
||||
initial_access_token.clone(),
|
||||
)
|
||||
.await?;
|
||||
let set_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(set_id)),
|
||||
)
|
||||
.await??;
|
||||
let response: LoginAccountResponse = to_response(set_resp)?;
|
||||
assert_eq!(response, LoginAccountResponse::ChatgptAuthTokens {});
|
||||
let _updated = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/updated"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(codex_app_server_protocol::ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let thread = to_response::<codex_app_server_protocol::ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(codex_app_server_protocol::TurnStartParams {
|
||||
thread_id: thread.thread.id,
|
||||
input: vec![codex_app_server_protocol::UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
respond_to_refresh_request(&mut mcp, &refreshed_access_token, &refreshed_id_token).await?;
|
||||
let _turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let _turn_completed = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let requests = responses_mock.requests();
|
||||
assert_eq!(requests.len(), 2);
|
||||
assert_eq!(
|
||||
requests[0].header("authorization"),
|
||||
Some(format!("Bearer {initial_access_token}"))
|
||||
);
|
||||
assert_eq!(
|
||||
requests[1].header("authorization"),
|
||||
Some(format!("Bearer {refreshed_access_token}"))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
// Client returns JSON-RPC error to refresh; turn fails.
|
||||
async fn external_auth_refresh_error_fails_turn() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mock_server = MockServer::start().await;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
requires_openai_auth: Some(true),
|
||||
base_url: Some(format!("{}/v1", mock_server.uri())),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
write_models_cache(codex_home.path())?;
|
||||
|
||||
let unauthorized = ResponseTemplate::new(401).set_body_json(json!({
|
||||
"error": { "message": "unauthorized" }
|
||||
}));
|
||||
let _responses_mock =
|
||||
responses::mount_response_sequence(&mock_server, vec![unauthorized]).await;
|
||||
|
||||
let initial_id_token = encode_id_token(
|
||||
&ChatGptIdTokenClaims::new()
|
||||
.email("initial@example.com")
|
||||
.plan_type("pro")
|
||||
.chatgpt_account_id("org-initial"),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let set_id = mcp
|
||||
.send_chatgpt_auth_tokens_login_request(initial_id_token, "access-initial".to_string())
|
||||
.await?;
|
||||
let set_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(set_id)),
|
||||
)
|
||||
.await??;
|
||||
let response: LoginAccountResponse = to_response(set_resp)?;
|
||||
assert_eq!(response, LoginAccountResponse::ChatgptAuthTokens {});
|
||||
let _updated = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/updated"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(codex_app_server_protocol::ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let thread = to_response::<codex_app_server_protocol::ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(codex_app_server_protocol::TurnStartParams {
|
||||
thread_id: thread.thread.id.clone(),
|
||||
input: vec![codex_app_server_protocol::UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
|
||||
let refresh_req: ServerRequest = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_request_message(),
|
||||
)
|
||||
.await??;
|
||||
let ServerRequest::ChatgptAuthTokensRefresh { request_id, .. } = refresh_req else {
|
||||
bail!("expected account/chatgptAuthTokens/refresh request, got {refresh_req:?}");
|
||||
};
|
||||
|
||||
mcp.send_error(
|
||||
request_id,
|
||||
JSONRPCErrorError {
|
||||
code: -32_000,
|
||||
message: "refresh failed".to_string(),
|
||||
data: None,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
let _turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let completed_notif: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
let completed: TurnCompletedNotification = serde_json::from_value(
|
||||
completed_notif
|
||||
.params
|
||||
.expect("turn/completed params must be present"),
|
||||
)?;
|
||||
assert_eq!(completed.turn.status, TurnStatus::Failed);
|
||||
assert!(completed.turn.error.is_some());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
// Refresh returns tokens for the wrong workspace; turn fails.
|
||||
async fn external_auth_refresh_mismatched_workspace_fails_turn() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mock_server = MockServer::start().await;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
forced_workspace_id: Some("org-expected".to_string()),
|
||||
requires_openai_auth: Some(true),
|
||||
base_url: Some(format!("{}/v1", mock_server.uri())),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
write_models_cache(codex_home.path())?;
|
||||
|
||||
let unauthorized = ResponseTemplate::new(401).set_body_json(json!({
|
||||
"error": { "message": "unauthorized" }
|
||||
}));
|
||||
let _responses_mock =
|
||||
responses::mount_response_sequence(&mock_server, vec![unauthorized]).await;
|
||||
|
||||
let initial_id_token = encode_id_token(
|
||||
&ChatGptIdTokenClaims::new()
|
||||
.email("initial@example.com")
|
||||
.plan_type("pro")
|
||||
.chatgpt_account_id("org-expected"),
|
||||
)?;
|
||||
let refreshed_id_token = encode_id_token(
|
||||
&ChatGptIdTokenClaims::new()
|
||||
.email("refreshed@example.com")
|
||||
.plan_type("pro")
|
||||
.chatgpt_account_id("org-other"),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let set_id = mcp
|
||||
.send_chatgpt_auth_tokens_login_request(initial_id_token, "access-initial".to_string())
|
||||
.await?;
|
||||
let set_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(set_id)),
|
||||
)
|
||||
.await??;
|
||||
let response: LoginAccountResponse = to_response(set_resp)?;
|
||||
assert_eq!(response, LoginAccountResponse::ChatgptAuthTokens {});
|
||||
let _updated = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/updated"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(codex_app_server_protocol::ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let thread = to_response::<codex_app_server_protocol::ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(codex_app_server_protocol::TurnStartParams {
|
||||
thread_id: thread.thread.id.clone(),
|
||||
input: vec![codex_app_server_protocol::UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
|
||||
let refresh_req: ServerRequest = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_request_message(),
|
||||
)
|
||||
.await??;
|
||||
let ServerRequest::ChatgptAuthTokensRefresh { request_id, .. } = refresh_req else {
|
||||
bail!("expected account/chatgptAuthTokens/refresh request, got {refresh_req:?}");
|
||||
};
|
||||
|
||||
mcp.send_response(
|
||||
request_id,
|
||||
serde_json::to_value(ChatgptAuthTokensRefreshResponse {
|
||||
access_token: "access-refreshed".to_string(),
|
||||
id_token: refreshed_id_token,
|
||||
})?,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let _turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let completed_notif: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
let completed: TurnCompletedNotification = serde_json::from_value(
|
||||
completed_notif
|
||||
.params
|
||||
.expect("turn/completed params must be present"),
|
||||
)?;
|
||||
assert_eq!(completed.turn.status, TurnStatus::Failed);
|
||||
assert!(completed.turn.error.is_some());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
// Refresh returns a malformed id_token; turn fails.
|
||||
async fn external_auth_refresh_invalid_id_token_fails_turn() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mock_server = MockServer::start().await;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
requires_openai_auth: Some(true),
|
||||
base_url: Some(format!("{}/v1", mock_server.uri())),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
write_models_cache(codex_home.path())?;
|
||||
|
||||
let unauthorized = ResponseTemplate::new(401).set_body_json(json!({
|
||||
"error": { "message": "unauthorized" }
|
||||
}));
|
||||
let _responses_mock =
|
||||
responses::mount_response_sequence(&mock_server, vec![unauthorized]).await;
|
||||
|
||||
let initial_id_token = encode_id_token(
|
||||
&ChatGptIdTokenClaims::new()
|
||||
.email("initial@example.com")
|
||||
.plan_type("pro")
|
||||
.chatgpt_account_id("org-initial"),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let set_id = mcp
|
||||
.send_chatgpt_auth_tokens_login_request(initial_id_token, "access-initial".to_string())
|
||||
.await?;
|
||||
let set_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(set_id)),
|
||||
)
|
||||
.await??;
|
||||
let response: LoginAccountResponse = to_response(set_resp)?;
|
||||
assert_eq!(response, LoginAccountResponse::ChatgptAuthTokens {});
|
||||
let _updated = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/updated"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(codex_app_server_protocol::ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let thread = to_response::<codex_app_server_protocol::ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(codex_app_server_protocol::TurnStartParams {
|
||||
thread_id: thread.thread.id.clone(),
|
||||
input: vec![codex_app_server_protocol::UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
|
||||
let refresh_req: ServerRequest = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_request_message(),
|
||||
)
|
||||
.await??;
|
||||
let ServerRequest::ChatgptAuthTokensRefresh { request_id, .. } = refresh_req else {
|
||||
bail!("expected account/chatgptAuthTokens/refresh request, got {refresh_req:?}");
|
||||
};
|
||||
|
||||
mcp.send_response(
|
||||
request_id,
|
||||
serde_json::to_value(ChatgptAuthTokensRefreshResponse {
|
||||
access_token: "access-refreshed".to_string(),
|
||||
id_token: "not-a-jwt".to_string(),
|
||||
})?,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let _turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let completed_notif: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
let completed: TurnCompletedNotification = serde_json::from_value(
|
||||
completed_notif
|
||||
.params
|
||||
.expect("turn/completed params must be present"),
|
||||
)?;
|
||||
assert_eq!(completed.turn.status, TurnStatus::Failed);
|
||||
assert!(completed.turn.error.is_some());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn login_account_api_key_succeeds_and_notifies() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
@@ -304,6 +944,71 @@ async fn login_account_chatgpt_start_can_be_cancelled() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
// Serialize tests that launch the login server since it binds to a fixed port.
|
||||
#[serial(login_port)]
|
||||
async fn set_auth_token_cancels_active_chatgpt_login() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), CreateConfigTomlParams::default())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Initiate the ChatGPT login flow
|
||||
let request_id = mcp.send_login_account_chatgpt_request().await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let login: LoginAccountResponse = to_response(resp)?;
|
||||
let LoginAccountResponse::Chatgpt { login_id, .. } = login else {
|
||||
bail!("unexpected login response: {login:?}");
|
||||
};
|
||||
|
||||
let id_token = encode_id_token(
|
||||
&ChatGptIdTokenClaims::new()
|
||||
.email("embedded@example.com")
|
||||
.plan_type("pro")
|
||||
.chatgpt_account_id("org-embedded"),
|
||||
)?;
|
||||
// Set an external auth token instead of completing the ChatGPT login flow.
|
||||
// This should cancel the active login attempt.
|
||||
let set_id = mcp
|
||||
.send_chatgpt_auth_tokens_login_request(id_token, "access-embedded".to_string())
|
||||
.await?;
|
||||
let set_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(set_id)),
|
||||
)
|
||||
.await??;
|
||||
let response: LoginAccountResponse = to_response(set_resp)?;
|
||||
assert_eq!(response, LoginAccountResponse::ChatgptAuthTokens {});
|
||||
let _updated = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/updated"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
// Verify that the active login attempt was cancelled.
|
||||
// We check this by trying to cancel it and expecting a not found error.
|
||||
let cancel_id = mcp
|
||||
.send_cancel_login_account_request(CancelLoginAccountParams {
|
||||
login_id: login_id.clone(),
|
||||
})
|
||||
.await?;
|
||||
let cancel_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(cancel_id)),
|
||||
)
|
||||
.await??;
|
||||
let cancel: CancelLoginAccountResponse = to_response(cancel_resp)?;
|
||||
assert_eq!(cancel.status, CancelLoginAccountStatus::NotFound);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
// Serialize tests that launch the login server since it binds to a fixed port.
|
||||
#[serial(login_port)]
|
||||
|
||||
@@ -13,14 +13,13 @@ use axum::extract::State;
|
||||
use axum::http::HeaderMap;
|
||||
use axum::http::StatusCode;
|
||||
use axum::http::header::AUTHORIZATION;
|
||||
use axum::routing::post;
|
||||
use axum::routing::get;
|
||||
use codex_app_server_protocol::AppInfo;
|
||||
use codex_app_server_protocol::AppsListParams;
|
||||
use codex_app_server_protocol::AppsListResponse;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use codex_core::connectors::ConnectorInfo;
|
||||
use pretty_assertions::assert_eq;
|
||||
use rmcp::handler::server::ServerHandler;
|
||||
use rmcp::model::JsonObject;
|
||||
@@ -71,19 +70,23 @@ async fn list_apps_returns_empty_when_connectors_disabled() -> Result<()> {
|
||||
#[tokio::test]
|
||||
async fn list_apps_returns_connectors_with_accessible_flags() -> Result<()> {
|
||||
let connectors = vec![
|
||||
ConnectorInfo {
|
||||
connector_id: "alpha".to_string(),
|
||||
connector_name: "Alpha".to_string(),
|
||||
connector_description: Some("Alpha connector".to_string()),
|
||||
AppInfo {
|
||||
id: "alpha".to_string(),
|
||||
name: "Alpha".to_string(),
|
||||
description: Some("Alpha connector".to_string()),
|
||||
logo_url: Some("https://example.com/alpha.png".to_string()),
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
},
|
||||
ConnectorInfo {
|
||||
connector_id: "beta".to_string(),
|
||||
connector_name: "beta".to_string(),
|
||||
connector_description: None,
|
||||
AppInfo {
|
||||
id: "beta".to_string(),
|
||||
name: "beta".to_string(),
|
||||
description: None,
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
},
|
||||
@@ -127,6 +130,8 @@ async fn list_apps_returns_connectors_with_accessible_flags() -> Result<()> {
|
||||
name: "Beta App".to_string(),
|
||||
description: None,
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: Some("https://chatgpt.com/apps/beta/beta".to_string()),
|
||||
is_accessible: true,
|
||||
},
|
||||
@@ -135,6 +140,8 @@ async fn list_apps_returns_connectors_with_accessible_flags() -> Result<()> {
|
||||
name: "Alpha".to_string(),
|
||||
description: Some("Alpha connector".to_string()),
|
||||
logo_url: Some("https://example.com/alpha.png".to_string()),
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: Some("https://chatgpt.com/apps/alpha/alpha".to_string()),
|
||||
is_accessible: false,
|
||||
},
|
||||
@@ -150,19 +157,23 @@ async fn list_apps_returns_connectors_with_accessible_flags() -> Result<()> {
|
||||
#[tokio::test]
|
||||
async fn list_apps_paginates_results() -> Result<()> {
|
||||
let connectors = vec![
|
||||
ConnectorInfo {
|
||||
connector_id: "alpha".to_string(),
|
||||
connector_name: "Alpha".to_string(),
|
||||
connector_description: Some("Alpha connector".to_string()),
|
||||
AppInfo {
|
||||
id: "alpha".to_string(),
|
||||
name: "Alpha".to_string(),
|
||||
description: Some("Alpha connector".to_string()),
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
},
|
||||
ConnectorInfo {
|
||||
connector_id: "beta".to_string(),
|
||||
connector_name: "beta".to_string(),
|
||||
connector_description: None,
|
||||
AppInfo {
|
||||
id: "beta".to_string(),
|
||||
name: "beta".to_string(),
|
||||
description: None,
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
},
|
||||
@@ -206,6 +217,8 @@ async fn list_apps_paginates_results() -> Result<()> {
|
||||
name: "Beta App".to_string(),
|
||||
description: None,
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: Some("https://chatgpt.com/apps/beta/beta".to_string()),
|
||||
is_accessible: true,
|
||||
}];
|
||||
@@ -234,6 +247,8 @@ async fn list_apps_paginates_results() -> Result<()> {
|
||||
name: "Alpha".to_string(),
|
||||
description: Some("Alpha connector".to_string()),
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: Some("https://chatgpt.com/apps/alpha/alpha".to_string()),
|
||||
is_accessible: false,
|
||||
}];
|
||||
@@ -289,13 +304,13 @@ impl ServerHandler for AppListMcpServer {
|
||||
}
|
||||
|
||||
async fn start_apps_server(
|
||||
connectors: Vec<ConnectorInfo>,
|
||||
connectors: Vec<AppInfo>,
|
||||
tools: Vec<Tool>,
|
||||
) -> Result<(String, JoinHandle<()>)> {
|
||||
let state = AppsServerState {
|
||||
expected_bearer: "Bearer chatgpt-token".to_string(),
|
||||
expected_account_id: "account-123".to_string(),
|
||||
response: json!({ "connectors": connectors }),
|
||||
response: json!({ "apps": connectors, "next_token": null }),
|
||||
};
|
||||
let state = Arc::new(state);
|
||||
let tools = Arc::new(tools);
|
||||
@@ -313,7 +328,11 @@ async fn start_apps_server(
|
||||
);
|
||||
|
||||
let router = Router::new()
|
||||
.route("/aip/connectors/list_accessible", post(list_connectors))
|
||||
.route("/connectors/directory/list", get(list_directory_connectors))
|
||||
.route(
|
||||
"/connectors/directory/list_workspace",
|
||||
get(list_directory_connectors),
|
||||
)
|
||||
.with_state(state)
|
||||
.nest_service("/api/codex/apps", mcp_service);
|
||||
|
||||
@@ -324,7 +343,7 @@ async fn start_apps_server(
|
||||
Ok((format!("http://{addr}"), handle))
|
||||
}
|
||||
|
||||
async fn list_connectors(
|
||||
async fn list_directory_connectors(
|
||||
State(state): State<Arc<AppsServerState>>,
|
||||
headers: HeaderMap,
|
||||
) -> Result<impl axum::response::IntoResponse, StatusCode> {
|
||||
|
||||
282
codex-rs/app-server/tests/suite/v2/compaction.rs
Normal file
282
codex-rs/app-server/tests/suite/v2/compaction.rs
Normal file
@@ -0,0 +1,282 @@
|
||||
//! End-to-end compaction flow tests.
|
||||
//!
|
||||
//! Phases:
|
||||
//! 1) Arrange: mock responses/compact endpoints + config.
|
||||
//! 2) Act: start a thread and submit multiple turns to trigger auto-compaction.
|
||||
//! 3) Assert: verify item/started + item/completed notifications for context compaction.
|
||||
|
||||
#![expect(clippy::expect_used)]
|
||||
|
||||
use anyhow::Result;
|
||||
use app_test_support::ChatGptAuthFixture;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use app_test_support::write_chatgpt_auth;
|
||||
use app_test_support::write_mock_responses_config_toml;
|
||||
use codex_app_server_protocol::ItemCompletedNotification;
|
||||
use codex_app_server_protocol::ItemStartedNotification;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadItem;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::TurnCompletedNotification;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_app_server_protocol::TurnStartResponse;
|
||||
use codex_app_server_protocol::UserInput as V2UserInput;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use codex_core::features::Feature;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use core_test_support::responses;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::collections::BTreeMap;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
const AUTO_COMPACT_LIMIT: i64 = 1_000;
|
||||
const COMPACT_PROMPT: &str = "Summarize the conversation.";
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn auto_compaction_local_emits_started_and_completed_items() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = responses::start_mock_server().await;
|
||||
let sse1 = responses::sse(vec![
|
||||
responses::ev_assistant_message("m1", "FIRST_REPLY"),
|
||||
responses::ev_completed_with_tokens("r1", 70_000),
|
||||
]);
|
||||
let sse2 = responses::sse(vec![
|
||||
responses::ev_assistant_message("m2", "SECOND_REPLY"),
|
||||
responses::ev_completed_with_tokens("r2", 330_000),
|
||||
]);
|
||||
let sse3 = responses::sse(vec![
|
||||
responses::ev_assistant_message("m3", "LOCAL_SUMMARY"),
|
||||
responses::ev_completed_with_tokens("r3", 200),
|
||||
]);
|
||||
let sse4 = responses::sse(vec![
|
||||
responses::ev_assistant_message("m4", "FINAL_REPLY"),
|
||||
responses::ev_completed_with_tokens("r4", 120),
|
||||
]);
|
||||
responses::mount_sse_sequence(&server, vec![sse1, sse2, sse3, sse4]).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
write_mock_responses_config_toml(
|
||||
codex_home.path(),
|
||||
&server.uri(),
|
||||
&BTreeMap::default(),
|
||||
AUTO_COMPACT_LIMIT,
|
||||
None,
|
||||
"mock_provider",
|
||||
COMPACT_PROMPT,
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let thread_id = start_thread(&mut mcp).await?;
|
||||
for message in ["first", "second", "third"] {
|
||||
send_turn_and_wait(&mut mcp, &thread_id, message).await?;
|
||||
}
|
||||
|
||||
let started = wait_for_context_compaction_started(&mut mcp).await?;
|
||||
let completed = wait_for_context_compaction_completed(&mut mcp).await?;
|
||||
|
||||
let ThreadItem::ContextCompaction { id: started_id } = started.item else {
|
||||
unreachable!("started item should be context compaction");
|
||||
};
|
||||
let ThreadItem::ContextCompaction { id: completed_id } = completed.item else {
|
||||
unreachable!("completed item should be context compaction");
|
||||
};
|
||||
|
||||
assert_eq!(started.thread_id, thread_id);
|
||||
assert_eq!(completed.thread_id, thread_id);
|
||||
assert_eq!(started_id, completed_id);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn auto_compaction_remote_emits_started_and_completed_items() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = responses::start_mock_server().await;
|
||||
let sse1 = responses::sse(vec![
|
||||
responses::ev_assistant_message("m1", "FIRST_REPLY"),
|
||||
responses::ev_completed_with_tokens("r1", 70_000),
|
||||
]);
|
||||
let sse2 = responses::sse(vec![
|
||||
responses::ev_assistant_message("m2", "SECOND_REPLY"),
|
||||
responses::ev_completed_with_tokens("r2", 330_000),
|
||||
]);
|
||||
let sse3 = responses::sse(vec![
|
||||
responses::ev_assistant_message("m3", "FINAL_REPLY"),
|
||||
responses::ev_completed_with_tokens("r3", 120),
|
||||
]);
|
||||
let responses_log = responses::mount_sse_sequence(&server, vec![sse1, sse2, sse3]).await;
|
||||
|
||||
let compacted_history = vec![
|
||||
ResponseItem::Message {
|
||||
id: None,
|
||||
role: "assistant".to_string(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: "REMOTE_COMPACT_SUMMARY".to_string(),
|
||||
}],
|
||||
end_turn: None,
|
||||
},
|
||||
ResponseItem::Compaction {
|
||||
encrypted_content: "ENCRYPTED_COMPACTION_SUMMARY".to_string(),
|
||||
},
|
||||
];
|
||||
let compact_mock = responses::mount_compact_json_once(
|
||||
&server,
|
||||
serde_json::json!({ "output": compacted_history }),
|
||||
)
|
||||
.await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
let mut features = BTreeMap::default();
|
||||
features.insert(Feature::RemoteCompaction, true);
|
||||
write_mock_responses_config_toml(
|
||||
codex_home.path(),
|
||||
&server.uri(),
|
||||
&features,
|
||||
AUTO_COMPACT_LIMIT,
|
||||
Some(true),
|
||||
"openai",
|
||||
COMPACT_PROMPT,
|
||||
)?;
|
||||
write_chatgpt_auth(
|
||||
codex_home.path(),
|
||||
ChatGptAuthFixture::new("access-chatgpt").plan_type("pro"),
|
||||
AuthCredentialsStoreMode::File,
|
||||
)?;
|
||||
|
||||
let server_base_url = format!("{}/v1", server.uri());
|
||||
let mut mcp = McpProcess::new_with_env(
|
||||
codex_home.path(),
|
||||
&[
|
||||
("OPENAI_BASE_URL", Some(server_base_url.as_str())),
|
||||
("OPENAI_API_KEY", None),
|
||||
],
|
||||
)
|
||||
.await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let thread_id = start_thread(&mut mcp).await?;
|
||||
for message in ["first", "second", "third"] {
|
||||
send_turn_and_wait(&mut mcp, &thread_id, message).await?;
|
||||
}
|
||||
|
||||
let started = wait_for_context_compaction_started(&mut mcp).await?;
|
||||
let completed = wait_for_context_compaction_completed(&mut mcp).await?;
|
||||
|
||||
let ThreadItem::ContextCompaction { id: started_id } = started.item else {
|
||||
unreachable!("started item should be context compaction");
|
||||
};
|
||||
let ThreadItem::ContextCompaction { id: completed_id } = completed.item else {
|
||||
unreachable!("completed item should be context compaction");
|
||||
};
|
||||
|
||||
assert_eq!(started.thread_id, thread_id);
|
||||
assert_eq!(completed.thread_id, thread_id);
|
||||
assert_eq!(started_id, completed_id);
|
||||
|
||||
let compact_requests = compact_mock.requests();
|
||||
assert_eq!(compact_requests.len(), 1);
|
||||
assert_eq!(compact_requests[0].path(), "/v1/responses/compact");
|
||||
|
||||
let response_requests = responses_log.requests();
|
||||
assert_eq!(response_requests.len(), 3);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn start_thread(mcp: &mut McpProcess) -> Result<String> {
|
||||
let thread_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
Ok(thread.id)
|
||||
}
|
||||
|
||||
async fn send_turn_and_wait(mcp: &mut McpProcess, thread_id: &str, text: &str) -> Result<String> {
|
||||
let turn_id = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread_id.to_string(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: text.to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_id)),
|
||||
)
|
||||
.await??;
|
||||
let TurnStartResponse { turn } = to_response::<TurnStartResponse>(turn_resp)?;
|
||||
wait_for_turn_completed(mcp, &turn.id).await?;
|
||||
Ok(turn.id)
|
||||
}
|
||||
|
||||
async fn wait_for_turn_completed(mcp: &mut McpProcess, turn_id: &str) -> Result<()> {
|
||||
loop {
|
||||
let notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
let completed: TurnCompletedNotification =
|
||||
serde_json::from_value(notification.params.clone().expect("turn/completed params"))?;
|
||||
if completed.turn.id == turn_id {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn wait_for_context_compaction_started(
|
||||
mcp: &mut McpProcess,
|
||||
) -> Result<ItemStartedNotification> {
|
||||
loop {
|
||||
let notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("item/started"),
|
||||
)
|
||||
.await??;
|
||||
let started: ItemStartedNotification =
|
||||
serde_json::from_value(notification.params.clone().expect("item/started params"))?;
|
||||
if let ThreadItem::ContextCompaction { .. } = started.item {
|
||||
return Ok(started);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn wait_for_context_compaction_completed(
|
||||
mcp: &mut McpProcess,
|
||||
) -> Result<ItemCompletedNotification> {
|
||||
loop {
|
||||
let notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("item/completed"),
|
||||
)
|
||||
.await??;
|
||||
let completed: ItemCompletedNotification =
|
||||
serde_json::from_value(notification.params.clone().expect("item/completed params"))?;
|
||||
if let ThreadItem::ContextCompaction { .. } = completed.item {
|
||||
return Ok(completed);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,11 +2,13 @@ mod account;
|
||||
mod analytics;
|
||||
mod app_list;
|
||||
mod collaboration_mode_list;
|
||||
mod compaction;
|
||||
mod config_rpc;
|
||||
mod dynamic_tools;
|
||||
mod initialize;
|
||||
mod model_list;
|
||||
mod output_schema;
|
||||
mod plan_item;
|
||||
mod rate_limits;
|
||||
mod request_user_input;
|
||||
mod review;
|
||||
|
||||
257
codex-rs/app-server/tests/suite/v2/plan_item.rs
Normal file
257
codex-rs/app-server/tests/suite/v2/plan_item.rs
Normal file
@@ -0,0 +1,257 @@
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_mock_responses_server_sequence;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::ItemCompletedNotification;
|
||||
use codex_app_server_protocol::ItemStartedNotification;
|
||||
use codex_app_server_protocol::JSONRPCMessage;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::PlanDeltaNotification;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadItem;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::TurnCompletedNotification;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_app_server_protocol::TurnStartResponse;
|
||||
use codex_app_server_protocol::TurnStatus;
|
||||
use codex_app_server_protocol::UserInput as V2UserInput;
|
||||
use codex_core::features::FEATURES;
|
||||
use codex_core::features::Feature;
|
||||
use codex_protocol::config_types::CollaborationMode;
|
||||
use codex_protocol::config_types::ModeKind;
|
||||
use codex_protocol::config_types::Settings;
|
||||
use core_test_support::responses;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::collections::BTreeMap;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn plan_mode_uses_proposed_plan_block_for_plan_item() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let plan_block = "<proposed_plan>\n# Final plan\n- first\n- second\n</proposed_plan>\n";
|
||||
let full_message = format!("Preface\n{plan_block}Postscript");
|
||||
let responses = vec![responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_message_item_added("msg-1", ""),
|
||||
responses::ev_output_text_delta(&full_message),
|
||||
responses::ev_assistant_message("msg-1", &full_message),
|
||||
responses::ev_completed("resp-1"),
|
||||
])];
|
||||
let server = create_mock_responses_server_sequence(responses).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let turn = start_plan_mode_turn(&mut mcp).await?;
|
||||
let (_, completed_items, plan_deltas, turn_completed) =
|
||||
collect_turn_notifications(&mut mcp).await?;
|
||||
|
||||
assert_eq!(turn_completed.turn.id, turn.id);
|
||||
assert_eq!(turn_completed.turn.status, TurnStatus::Completed);
|
||||
|
||||
let expected_plan = ThreadItem::Plan {
|
||||
id: format!("{}-plan", turn.id),
|
||||
text: "# Final plan\n- first\n- second\n".to_string(),
|
||||
};
|
||||
let expected_plan_id = format!("{}-plan", turn.id);
|
||||
let streamed_plan = plan_deltas
|
||||
.iter()
|
||||
.map(|delta| delta.delta.as_str())
|
||||
.collect::<String>();
|
||||
assert_eq!(streamed_plan, "# Final plan\n- first\n- second\n");
|
||||
assert!(
|
||||
plan_deltas
|
||||
.iter()
|
||||
.all(|delta| delta.item_id == expected_plan_id)
|
||||
);
|
||||
let plan_items = completed_items
|
||||
.iter()
|
||||
.filter_map(|item| match item {
|
||||
ThreadItem::Plan { .. } => Some(item.clone()),
|
||||
_ => None,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(plan_items, vec![expected_plan]);
|
||||
assert!(
|
||||
completed_items
|
||||
.iter()
|
||||
.any(|item| matches!(item, ThreadItem::AgentMessage { .. })),
|
||||
"agent message items should still be emitted alongside the plan item"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn plan_mode_without_proposed_plan_does_not_emit_plan_item() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let responses = vec![responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_assistant_message("msg-1", "Done"),
|
||||
responses::ev_completed("resp-1"),
|
||||
])];
|
||||
let server = create_mock_responses_server_sequence(responses).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let _turn = start_plan_mode_turn(&mut mcp).await?;
|
||||
let (_, completed_items, plan_deltas, _) = collect_turn_notifications(&mut mcp).await?;
|
||||
|
||||
let has_plan_item = completed_items
|
||||
.iter()
|
||||
.any(|item| matches!(item, ThreadItem::Plan { .. }));
|
||||
assert!(!has_plan_item);
|
||||
assert!(plan_deltas.is_empty());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn start_plan_mode_turn(mcp: &mut McpProcess) -> Result<codex_app_server_protocol::Turn> {
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let thread = to_response::<ThreadStartResponse>(thread_resp)?.thread;
|
||||
|
||||
let collaboration_mode = CollaborationMode {
|
||||
mode: ModeKind::Plan,
|
||||
settings: Settings {
|
||||
model: "mock-model".to_string(),
|
||||
reasoning_effort: None,
|
||||
developer_instructions: None,
|
||||
},
|
||||
};
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id,
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "Plan this".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
collaboration_mode: Some(collaboration_mode),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
Ok(to_response::<TurnStartResponse>(turn_resp)?.turn)
|
||||
}
|
||||
|
||||
async fn collect_turn_notifications(
|
||||
mcp: &mut McpProcess,
|
||||
) -> Result<(
|
||||
Vec<ThreadItem>,
|
||||
Vec<ThreadItem>,
|
||||
Vec<PlanDeltaNotification>,
|
||||
TurnCompletedNotification,
|
||||
)> {
|
||||
let mut started_items = Vec::new();
|
||||
let mut completed_items = Vec::new();
|
||||
let mut plan_deltas = Vec::new();
|
||||
|
||||
loop {
|
||||
let message = timeout(DEFAULT_READ_TIMEOUT, mcp.read_next_message()).await??;
|
||||
let JSONRPCMessage::Notification(notification) = message else {
|
||||
continue;
|
||||
};
|
||||
match notification.method.as_str() {
|
||||
"item/started" => {
|
||||
let params = notification
|
||||
.params
|
||||
.ok_or_else(|| anyhow!("item/started notifications must include params"))?;
|
||||
let payload: ItemStartedNotification = serde_json::from_value(params)?;
|
||||
started_items.push(payload.item);
|
||||
}
|
||||
"item/completed" => {
|
||||
let params = notification
|
||||
.params
|
||||
.ok_or_else(|| anyhow!("item/completed notifications must include params"))?;
|
||||
let payload: ItemCompletedNotification = serde_json::from_value(params)?;
|
||||
completed_items.push(payload.item);
|
||||
}
|
||||
"item/plan/delta" => {
|
||||
let params = notification
|
||||
.params
|
||||
.ok_or_else(|| anyhow!("item/plan/delta notifications must include params"))?;
|
||||
let payload: PlanDeltaNotification = serde_json::from_value(params)?;
|
||||
plan_deltas.push(payload);
|
||||
}
|
||||
"turn/completed" => {
|
||||
let params = notification
|
||||
.params
|
||||
.ok_or_else(|| anyhow!("turn/completed notifications must include params"))?;
|
||||
let payload: TurnCompletedNotification = serde_json::from_value(params)?;
|
||||
return Ok((started_items, completed_items, plan_deltas, payload));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn create_config_toml(codex_home: &Path, server_uri: &str) -> std::io::Result<()> {
|
||||
let features = BTreeMap::from([
|
||||
(Feature::RemoteModels, false),
|
||||
(Feature::CollaborationModes, true),
|
||||
]);
|
||||
let feature_entries = features
|
||||
.into_iter()
|
||||
.map(|(feature, enabled)| {
|
||||
let key = FEATURES
|
||||
.iter()
|
||||
.find(|spec| spec.id == feature)
|
||||
.map(|spec| spec.key)
|
||||
.unwrap_or_else(|| panic!("missing feature key for {feature:?}"));
|
||||
format!("{key} = {enabled}")
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[features]
|
||||
{feature_entries}
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "responses"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -31,7 +31,7 @@ use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
const DEFAULT_BASE_INSTRUCTIONS: &str = "You are Codex, based on GPT-5. You are running as a coding agent in the Codex CLI on a user's computer.";
|
||||
const CODEX_5_2_INSTRUCTIONS_TEMPLATE_DEFAULT: &str = "You are Codex, a coding agent based on GPT-5. You and the user share the same workspace and collaborate to achieve the user's goals.";
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_returns_original_thread() -> Result<()> {
|
||||
@@ -368,7 +368,7 @@ async fn thread_resume_supports_history_and_overrides() -> Result<()> {
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_accepts_personality_override_v2() -> Result<()> {
|
||||
async fn thread_resume_accepts_personality_override() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = responses::start_mock_server().await;
|
||||
@@ -438,14 +438,14 @@ async fn thread_resume_accepts_personality_override_v2() -> Result<()> {
|
||||
let request = response_mock.single_request();
|
||||
let developer_texts = request.message_input_texts("developer");
|
||||
assert!(
|
||||
!developer_texts
|
||||
developer_texts
|
||||
.iter()
|
||||
.any(|text| text.contains("<personality_spec>")),
|
||||
"did not expect a personality update message in developer input, got {developer_texts:?}"
|
||||
"expected a personality update message in developer input, got {developer_texts:?}"
|
||||
);
|
||||
let instructions_text = request.instructions_text();
|
||||
assert!(
|
||||
instructions_text.contains(DEFAULT_BASE_INSTRUCTIONS),
|
||||
instructions_text.contains(CODEX_5_2_INSTRUCTIONS_TEMPLATE_DEFAULT),
|
||||
"expected default base instructions from history, got {instructions_text:?}"
|
||||
);
|
||||
|
||||
@@ -459,7 +459,7 @@ fn create_config_toml(codex_home: &std::path::Path, server_uri: &str) -> std::io
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
model = "gpt-5.2-codex"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
@@ -467,6 +467,7 @@ model_provider = "mock_provider"
|
||||
|
||||
[features]
|
||||
remote_models = false
|
||||
personality = true
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
|
||||
@@ -63,7 +63,7 @@ async fn turn_start_sends_originator_header() -> Result<()> {
|
||||
codex_home.path(),
|
||||
&server.uri(),
|
||||
"never",
|
||||
&BTreeMap::default(),
|
||||
&BTreeMap::from([(Feature::Personality, true)]),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
@@ -138,7 +138,7 @@ async fn turn_start_emits_user_message_item_with_text_elements() -> Result<()> {
|
||||
codex_home.path(),
|
||||
&server.uri(),
|
||||
"never",
|
||||
&BTreeMap::default(),
|
||||
&BTreeMap::from([(Feature::Personality, true)]),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
@@ -230,7 +230,7 @@ async fn turn_start_emits_notifications_and_accepts_model_override() -> Result<(
|
||||
codex_home.path(),
|
||||
&server.uri(),
|
||||
"never",
|
||||
&BTreeMap::default(),
|
||||
&BTreeMap::from([(Feature::Personality, true)]),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
@@ -425,7 +425,7 @@ async fn turn_start_accepts_personality_override_v2() -> Result<()> {
|
||||
codex_home.path(),
|
||||
&server.uri(),
|
||||
"never",
|
||||
&BTreeMap::default(),
|
||||
&BTreeMap::from([(Feature::Personality, true)]),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
@@ -473,6 +473,7 @@ async fn turn_start_accepts_personality_override_v2() -> Result<()> {
|
||||
if developer_texts.is_empty() {
|
||||
eprintln!("request body: {}", request.body_json());
|
||||
}
|
||||
|
||||
assert!(
|
||||
developer_texts
|
||||
.iter()
|
||||
|
||||
@@ -351,6 +351,7 @@ impl Client {
|
||||
fn map_plan_type(plan_type: crate::types::PlanType) -> AccountPlanType {
|
||||
match plan_type {
|
||||
crate::types::PlanType::Free => AccountPlanType::Free,
|
||||
crate::types::PlanType::Go => AccountPlanType::Go,
|
||||
crate::types::PlanType::Plus => AccountPlanType::Plus,
|
||||
crate::types::PlanType::Pro => AccountPlanType::Pro,
|
||||
crate::types::PlanType::Team => AccountPlanType::Team,
|
||||
@@ -358,7 +359,6 @@ impl Client {
|
||||
crate::types::PlanType::Enterprise => AccountPlanType::Enterprise,
|
||||
crate::types::PlanType::Edu | crate::types::PlanType::Education => AccountPlanType::Edu,
|
||||
crate::types::PlanType::Guest
|
||||
| crate::types::PlanType::Go
|
||||
| crate::types::PlanType::FreeWorkspace
|
||||
| crate::types::PlanType::Quorum
|
||||
| crate::types::PlanType::K12 => AccountPlanType::Unknown,
|
||||
|
||||
@@ -17,6 +17,8 @@ serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = ["full"] }
|
||||
codex-git = { workspace = true }
|
||||
urlencoding = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
@@ -5,13 +5,21 @@ use crate::chatgpt_token::get_chatgpt_token_data;
|
||||
use crate::chatgpt_token::init_chatgpt_token_from_auth;
|
||||
|
||||
use anyhow::Context;
|
||||
use serde::Serialize;
|
||||
use serde::de::DeserializeOwned;
|
||||
use std::time::Duration;
|
||||
|
||||
/// Make a GET request to the ChatGPT backend API.
|
||||
pub(crate) async fn chatgpt_get_request<T: DeserializeOwned>(
|
||||
config: &Config,
|
||||
path: String,
|
||||
) -> anyhow::Result<T> {
|
||||
chatgpt_get_request_with_timeout(config, path, None).await
|
||||
}
|
||||
|
||||
pub(crate) async fn chatgpt_get_request_with_timeout<T: DeserializeOwned>(
|
||||
config: &Config,
|
||||
path: String,
|
||||
timeout: Option<Duration>,
|
||||
) -> anyhow::Result<T> {
|
||||
let chatgpt_base_url = &config.chatgpt_base_url;
|
||||
init_chatgpt_token_from_auth(&config.codex_home, config.cli_auth_credentials_store_mode)
|
||||
@@ -28,48 +36,17 @@ pub(crate) async fn chatgpt_get_request<T: DeserializeOwned>(
|
||||
anyhow::anyhow!("ChatGPT account ID not available, please re-run `codex login`")
|
||||
});
|
||||
|
||||
let response = client
|
||||
let mut request = client
|
||||
.get(&url)
|
||||
.bearer_auth(&token.access_token)
|
||||
.header("chatgpt-account-id", account_id?)
|
||||
.header("Content-Type", "application/json")
|
||||
.send()
|
||||
.await
|
||||
.context("Failed to send request")?;
|
||||
|
||||
if response.status().is_success() {
|
||||
let result: T = response
|
||||
.json()
|
||||
.await
|
||||
.context("Failed to parse JSON response")?;
|
||||
Ok(result)
|
||||
} else {
|
||||
let status = response.status();
|
||||
let body = response.text().await.unwrap_or_default();
|
||||
anyhow::bail!("Request failed with status {status}: {body}")
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn chatgpt_post_request<T: DeserializeOwned, P: Serialize>(
|
||||
config: &Config,
|
||||
access_token: &str,
|
||||
account_id: &str,
|
||||
path: &str,
|
||||
payload: &P,
|
||||
) -> anyhow::Result<T> {
|
||||
let chatgpt_base_url = &config.chatgpt_base_url;
|
||||
let client = create_client();
|
||||
let url = format!("{chatgpt_base_url}{path}");
|
||||
|
||||
let response = client
|
||||
.post(&url)
|
||||
.bearer_auth(access_token)
|
||||
.header("chatgpt-account-id", account_id)
|
||||
.header("Content-Type", "application/json")
|
||||
.json(payload)
|
||||
.send()
|
||||
.await
|
||||
.context("Failed to send request")?;
|
||||
.header("Content-Type", "application/json");
|
||||
|
||||
if let Some(timeout) = timeout {
|
||||
request = request.timeout(timeout);
|
||||
}
|
||||
|
||||
let response = request.send().await.context("Failed to send request")?;
|
||||
|
||||
if response.status().is_success() {
|
||||
let result: T = response
|
||||
|
||||
@@ -1,43 +1,45 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use codex_core::config::Config;
|
||||
use codex_core::features::Feature;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::chatgpt_client::chatgpt_post_request;
|
||||
use crate::chatgpt_client::chatgpt_get_request_with_timeout;
|
||||
use crate::chatgpt_token::get_chatgpt_token_data;
|
||||
use crate::chatgpt_token::init_chatgpt_token_from_auth;
|
||||
|
||||
pub use codex_core::connectors::ConnectorInfo;
|
||||
pub use codex_core::connectors::AppInfo;
|
||||
pub use codex_core::connectors::connector_display_label;
|
||||
use codex_core::connectors::connector_install_url;
|
||||
pub use codex_core::connectors::list_accessible_connectors_from_mcp_tools;
|
||||
use codex_core::connectors::merge_connectors;
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct ListConnectorsRequest {
|
||||
principals: Vec<Principal>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct Principal {
|
||||
#[serde(rename = "type")]
|
||||
principal_type: PrincipalType,
|
||||
id: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
||||
enum PrincipalType {
|
||||
User,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ListConnectorsResponse {
|
||||
connectors: Vec<ConnectorInfo>,
|
||||
struct DirectoryListResponse {
|
||||
apps: Vec<DirectoryApp>,
|
||||
#[serde(alias = "nextToken")]
|
||||
next_token: Option<String>,
|
||||
}
|
||||
|
||||
pub async fn list_connectors(config: &Config) -> anyhow::Result<Vec<ConnectorInfo>> {
|
||||
if !config.features.enabled(Feature::Connectors) {
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
struct DirectoryApp {
|
||||
id: String,
|
||||
name: String,
|
||||
description: Option<String>,
|
||||
#[serde(alias = "logoUrl")]
|
||||
logo_url: Option<String>,
|
||||
#[serde(alias = "logoUrlDark")]
|
||||
logo_url_dark: Option<String>,
|
||||
#[serde(alias = "distributionChannel")]
|
||||
distribution_channel: Option<String>,
|
||||
visibility: Option<String>,
|
||||
}
|
||||
|
||||
const DIRECTORY_CONNECTORS_TIMEOUT: Duration = Duration::from_secs(60);
|
||||
|
||||
pub async fn list_connectors(config: &Config) -> anyhow::Result<Vec<AppInfo>> {
|
||||
if !config.features.enabled(Feature::Apps) {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
let (connectors_result, accessible_result) = tokio::join!(
|
||||
@@ -46,11 +48,12 @@ pub async fn list_connectors(config: &Config) -> anyhow::Result<Vec<ConnectorInf
|
||||
);
|
||||
let connectors = connectors_result?;
|
||||
let accessible = accessible_result?;
|
||||
Ok(merge_connectors(connectors, accessible))
|
||||
let merged = merge_connectors(connectors, accessible);
|
||||
Ok(filter_disallowed_connectors(merged))
|
||||
}
|
||||
|
||||
pub async fn list_all_connectors(config: &Config) -> anyhow::Result<Vec<ConnectorInfo>> {
|
||||
if !config.features.enabled(Feature::Connectors) {
|
||||
pub async fn list_all_connectors(config: &Config) -> anyhow::Result<Vec<AppInfo>> {
|
||||
if !config.features.enabled(Feature::Apps) {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
init_chatgpt_token_from_auth(&config.codex_home, config.cli_auth_credentials_store_mode)
|
||||
@@ -58,56 +61,149 @@ pub async fn list_all_connectors(config: &Config) -> anyhow::Result<Vec<Connecto
|
||||
|
||||
let token_data =
|
||||
get_chatgpt_token_data().ok_or_else(|| anyhow::anyhow!("ChatGPT token not available"))?;
|
||||
let user_id = token_data
|
||||
.id_token
|
||||
.chatgpt_user_id
|
||||
.as_deref()
|
||||
.ok_or_else(|| {
|
||||
anyhow::anyhow!("ChatGPT user ID not available, please re-run `codex login`")
|
||||
})?;
|
||||
let account_id = token_data
|
||||
.id_token
|
||||
.chatgpt_account_id
|
||||
.as_deref()
|
||||
.ok_or_else(|| {
|
||||
anyhow::anyhow!("ChatGPT account ID not available, please re-run `codex login`")
|
||||
})?;
|
||||
let principal_id = format!("{user_id}__{account_id}");
|
||||
let request = ListConnectorsRequest {
|
||||
principals: vec![Principal {
|
||||
principal_type: PrincipalType::User,
|
||||
id: principal_id,
|
||||
}],
|
||||
};
|
||||
let response: ListConnectorsResponse = chatgpt_post_request(
|
||||
config,
|
||||
token_data.access_token.as_str(),
|
||||
account_id,
|
||||
"/aip/connectors/list_accessible?skip_actions=true&external_logos=true",
|
||||
&request,
|
||||
)
|
||||
.await?;
|
||||
let mut connectors = response.connectors;
|
||||
let mut apps = list_directory_connectors(config).await?;
|
||||
if token_data.id_token.is_workspace_account() {
|
||||
apps.extend(list_workspace_connectors(config).await?);
|
||||
}
|
||||
let mut connectors = merge_directory_apps(apps)
|
||||
.into_iter()
|
||||
.map(directory_app_to_app_info)
|
||||
.collect::<Vec<_>>();
|
||||
for connector in &mut connectors {
|
||||
let install_url = match connector.install_url.take() {
|
||||
Some(install_url) => install_url,
|
||||
None => connector_install_url(&connector.connector_name, &connector.connector_id),
|
||||
None => connector_install_url(&connector.name, &connector.id),
|
||||
};
|
||||
connector.connector_name =
|
||||
normalize_connector_name(&connector.connector_name, &connector.connector_id);
|
||||
connector.connector_description =
|
||||
normalize_connector_value(connector.connector_description.as_deref());
|
||||
connector.name = normalize_connector_name(&connector.name, &connector.id);
|
||||
connector.description = normalize_connector_value(connector.description.as_deref());
|
||||
connector.install_url = Some(install_url);
|
||||
connector.is_accessible = false;
|
||||
}
|
||||
connectors.sort_by(|left, right| {
|
||||
left.connector_name
|
||||
.cmp(&right.connector_name)
|
||||
.then_with(|| left.connector_id.cmp(&right.connector_id))
|
||||
left.name
|
||||
.cmp(&right.name)
|
||||
.then_with(|| left.id.cmp(&right.id))
|
||||
});
|
||||
Ok(connectors)
|
||||
}
|
||||
|
||||
async fn list_directory_connectors(config: &Config) -> anyhow::Result<Vec<DirectoryApp>> {
|
||||
let mut apps = Vec::new();
|
||||
let mut next_token: Option<String> = None;
|
||||
loop {
|
||||
let path = match next_token.as_deref() {
|
||||
Some(token) => {
|
||||
let encoded_token = urlencoding::encode(token);
|
||||
format!("/connectors/directory/list?tier=categorized&token={encoded_token}")
|
||||
}
|
||||
None => "/connectors/directory/list?tier=categorized".to_string(),
|
||||
};
|
||||
let response: DirectoryListResponse =
|
||||
chatgpt_get_request_with_timeout(config, path, Some(DIRECTORY_CONNECTORS_TIMEOUT))
|
||||
.await?;
|
||||
apps.extend(
|
||||
response
|
||||
.apps
|
||||
.into_iter()
|
||||
.filter(|app| !is_hidden_directory_app(app)),
|
||||
);
|
||||
next_token = response
|
||||
.next_token
|
||||
.map(|token| token.trim().to_string())
|
||||
.filter(|token| !token.is_empty());
|
||||
if next_token.is_none() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Ok(apps)
|
||||
}
|
||||
|
||||
async fn list_workspace_connectors(config: &Config) -> anyhow::Result<Vec<DirectoryApp>> {
|
||||
let response: anyhow::Result<DirectoryListResponse> = chatgpt_get_request_with_timeout(
|
||||
config,
|
||||
"/connectors/directory/list_workspace".to_string(),
|
||||
Some(DIRECTORY_CONNECTORS_TIMEOUT),
|
||||
)
|
||||
.await;
|
||||
match response {
|
||||
Ok(response) => Ok(response
|
||||
.apps
|
||||
.into_iter()
|
||||
.filter(|app| !is_hidden_directory_app(app))
|
||||
.collect()),
|
||||
Err(_) => Ok(Vec::new()),
|
||||
}
|
||||
}
|
||||
|
||||
fn merge_directory_apps(apps: Vec<DirectoryApp>) -> Vec<DirectoryApp> {
|
||||
let mut merged: HashMap<String, DirectoryApp> = HashMap::new();
|
||||
for app in apps {
|
||||
if let Some(existing) = merged.get_mut(&app.id) {
|
||||
merge_directory_app(existing, app);
|
||||
} else {
|
||||
merged.insert(app.id.clone(), app);
|
||||
}
|
||||
}
|
||||
merged.into_values().collect()
|
||||
}
|
||||
|
||||
fn merge_directory_app(existing: &mut DirectoryApp, incoming: DirectoryApp) {
|
||||
let DirectoryApp {
|
||||
id: _,
|
||||
name,
|
||||
description,
|
||||
logo_url,
|
||||
logo_url_dark,
|
||||
distribution_channel,
|
||||
visibility: _,
|
||||
} = incoming;
|
||||
|
||||
let incoming_name_is_empty = name.trim().is_empty();
|
||||
if existing.name.trim().is_empty() && !incoming_name_is_empty {
|
||||
existing.name = name;
|
||||
}
|
||||
|
||||
let incoming_description_present = description
|
||||
.as_deref()
|
||||
.map(|value| !value.trim().is_empty())
|
||||
.unwrap_or(false);
|
||||
let existing_description_present = existing
|
||||
.description
|
||||
.as_deref()
|
||||
.map(|value| !value.trim().is_empty())
|
||||
.unwrap_or(false);
|
||||
if !existing_description_present && incoming_description_present {
|
||||
existing.description = description;
|
||||
}
|
||||
|
||||
if existing.logo_url.is_none() && logo_url.is_some() {
|
||||
existing.logo_url = logo_url;
|
||||
}
|
||||
if existing.logo_url_dark.is_none() && logo_url_dark.is_some() {
|
||||
existing.logo_url_dark = logo_url_dark;
|
||||
}
|
||||
if existing.distribution_channel.is_none() && distribution_channel.is_some() {
|
||||
existing.distribution_channel = distribution_channel;
|
||||
}
|
||||
}
|
||||
|
||||
fn is_hidden_directory_app(app: &DirectoryApp) -> bool {
|
||||
matches!(app.visibility.as_deref(), Some("HIDDEN"))
|
||||
}
|
||||
|
||||
fn directory_app_to_app_info(app: DirectoryApp) -> AppInfo {
|
||||
AppInfo {
|
||||
id: app.id,
|
||||
name: app.name,
|
||||
description: app.description,
|
||||
logo_url: app.logo_url,
|
||||
logo_url_dark: app.logo_url_dark,
|
||||
distribution_channel: app.distribution_channel,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
}
|
||||
}
|
||||
|
||||
fn normalize_connector_name(name: &str, connector_id: &str) -> String {
|
||||
let trimmed = name.trim();
|
||||
if trimmed.is_empty() {
|
||||
@@ -123,3 +219,91 @@ fn normalize_connector_value(value: Option<&str>) -> Option<String> {
|
||||
.filter(|value| !value.is_empty())
|
||||
.map(str::to_string)
|
||||
}
|
||||
|
||||
const ALLOWED_APPS_SDK_APPS: &[&str] = &["asdk_app_69781557cc1481919cf5e9824fa2e792"];
|
||||
const DISALLOWED_CONNECTOR_IDS: &[&str] = &[
|
||||
"asdk_app_6938a94a61d881918ef32cb999ff937c",
|
||||
"connector_2b0a9009c9c64bf9933a3dae3f2b1254",
|
||||
"connector_68de829bf7648191acd70a907364c67c",
|
||||
];
|
||||
const DISALLOWED_CONNECTOR_PREFIX: &str = "connector_openai_";
|
||||
|
||||
fn filter_disallowed_connectors(connectors: Vec<AppInfo>) -> Vec<AppInfo> {
|
||||
// TODO: Support Apps SDK connectors.
|
||||
connectors
|
||||
.into_iter()
|
||||
.filter(is_connector_allowed)
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn is_connector_allowed(connector: &AppInfo) -> bool {
|
||||
let connector_id = connector.id.as_str();
|
||||
if connector_id.starts_with(DISALLOWED_CONNECTOR_PREFIX)
|
||||
|| DISALLOWED_CONNECTOR_IDS.contains(&connector_id)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if connector_id.starts_with("asdk_app_") {
|
||||
return ALLOWED_APPS_SDK_APPS.contains(&connector_id);
|
||||
}
|
||||
true
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
fn app(id: &str) -> AppInfo {
|
||||
AppInfo {
|
||||
id: id.to_string(),
|
||||
name: id.to_string(),
|
||||
description: None,
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn filters_internal_asdk_connectors() {
|
||||
let filtered = filter_disallowed_connectors(vec![app("asdk_app_hidden"), app("alpha")]);
|
||||
assert_eq!(filtered, vec![app("alpha")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn allows_whitelisted_asdk_connectors() {
|
||||
let filtered = filter_disallowed_connectors(vec![
|
||||
app("asdk_app_69781557cc1481919cf5e9824fa2e792"),
|
||||
app("beta"),
|
||||
]);
|
||||
assert_eq!(
|
||||
filtered,
|
||||
vec![
|
||||
app("asdk_app_69781557cc1481919cf5e9824fa2e792"),
|
||||
app("beta")
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn filters_openai_connectors() {
|
||||
let filtered = filter_disallowed_connectors(vec![
|
||||
app("connector_openai_foo"),
|
||||
app("connector_openai_bar"),
|
||||
app("gamma"),
|
||||
]);
|
||||
assert_eq!(filtered, vec![app("gamma")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn filters_disallowed_connector_ids() {
|
||||
let filtered = filter_disallowed_connectors(vec![
|
||||
app("asdk_app_6938a94a61d881918ef32cb999ff937c"),
|
||||
app("delta"),
|
||||
]);
|
||||
assert_eq!(filtered, vec![app("delta")]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -136,7 +136,8 @@ async fn run_command_under_sandbox(
|
||||
if let SandboxType::Windows = sandbox_type {
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::windows_sandbox::WindowsSandboxLevelExt;
|
||||
use codex_protocol::config_types::WindowsSandboxLevel;
|
||||
use codex_windows_sandbox::run_windows_sandbox_capture;
|
||||
use codex_windows_sandbox::run_windows_sandbox_capture_elevated;
|
||||
|
||||
@@ -147,8 +148,10 @@ async fn run_command_under_sandbox(
|
||||
let env_map = env.clone();
|
||||
let command_vec = command.clone();
|
||||
let base_dir = config.codex_home.clone();
|
||||
let use_elevated = config.features.enabled(Feature::WindowsSandbox)
|
||||
&& config.features.enabled(Feature::WindowsSandboxElevated);
|
||||
let use_elevated = matches!(
|
||||
WindowsSandboxLevel::from_config(&config),
|
||||
WindowsSandboxLevel::Elevated
|
||||
);
|
||||
|
||||
// Preflight audit is invoked elsewhere at the appropriate times.
|
||||
let res = tokio::task::spawn_blocking(move || {
|
||||
|
||||
@@ -225,7 +225,7 @@ pub async fn run_login_status(cli_config_overrides: CliConfigOverrides) -> ! {
|
||||
let config = load_config_or_exit(cli_config_overrides).await;
|
||||
|
||||
match CodexAuth::from_auth_storage(&config.codex_home, config.cli_auth_credentials_store_mode) {
|
||||
Ok(Some(auth)) => match auth.mode {
|
||||
Ok(Some(auth)) => match auth.api_auth_mode() {
|
||||
AuthMode::ApiKey => match auth.get_token() {
|
||||
Ok(api_key) => {
|
||||
eprintln!("Logged in using an API key - {}", safe_format_key(&api_key));
|
||||
@@ -236,10 +236,14 @@ pub async fn run_login_status(cli_config_overrides: CliConfigOverrides) -> ! {
|
||||
std::process::exit(1);
|
||||
}
|
||||
},
|
||||
AuthMode::ChatGPT => {
|
||||
AuthMode::Chatgpt => {
|
||||
eprintln!("Logged in using ChatGPT");
|
||||
std::process::exit(0);
|
||||
}
|
||||
AuthMode::ChatgptAuthTokens => {
|
||||
eprintln!("Logged in using ChatGPT (external tokens)");
|
||||
std::process::exit(0);
|
||||
}
|
||||
},
|
||||
Ok(None) => {
|
||||
eprintln!("Not logged in");
|
||||
|
||||
@@ -39,6 +39,9 @@ use crate::mcp_cmd::McpCli;
|
||||
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::config::edit::ConfigEditsBuilder;
|
||||
use codex_core::config::find_codex_home;
|
||||
use codex_core::features::Stage;
|
||||
use codex_core::features::is_known_feature_key;
|
||||
use codex_core::terminal::TerminalName;
|
||||
|
||||
@@ -141,7 +144,7 @@ struct CompletionCommand {
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct ResumeCommand {
|
||||
/// Conversation/session id (UUID). When provided, resumes this session.
|
||||
/// Conversation/session id (UUID) or thread name. UUIDs take precedence if it parses.
|
||||
/// If omitted, use --last to pick the most recent recorded session.
|
||||
#[arg(value_name = "SESSION_ID")]
|
||||
session_id: Option<String>,
|
||||
@@ -320,6 +323,7 @@ fn format_exit_messages(exit_info: AppExitInfo, color_enabled: bool) -> Vec<Stri
|
||||
let AppExitInfo {
|
||||
token_usage,
|
||||
thread_id: conversation_id,
|
||||
thread_name,
|
||||
..
|
||||
} = exit_info;
|
||||
|
||||
@@ -332,8 +336,9 @@ fn format_exit_messages(exit_info: AppExitInfo, color_enabled: bool) -> Vec<Stri
|
||||
codex_core::protocol::FinalOutput::from(token_usage)
|
||||
)];
|
||||
|
||||
if let Some(session_id) = conversation_id {
|
||||
let resume_cmd = format!("codex resume {session_id}");
|
||||
if let Some(resume_cmd) =
|
||||
codex_core::util::resume_command(thread_name.as_deref(), conversation_id)
|
||||
{
|
||||
let command = if color_enabled {
|
||||
resume_cmd.cyan().to_string()
|
||||
} else {
|
||||
@@ -448,6 +453,16 @@ struct FeaturesCli {
|
||||
enum FeaturesSubcommand {
|
||||
/// List known features with their stage and effective state.
|
||||
List,
|
||||
/// Enable a feature in config.toml.
|
||||
Enable(FeatureSetArgs),
|
||||
/// Disable a feature in config.toml.
|
||||
Disable(FeatureSetArgs),
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct FeatureSetArgs {
|
||||
/// Feature key to update (for example: unified_exec).
|
||||
feature: String,
|
||||
}
|
||||
|
||||
fn stage_str(stage: codex_core::features::Stage) -> &'static str {
|
||||
@@ -711,12 +726,69 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
println!("{name:<name_width$} {stage:<stage_width$} {enabled}");
|
||||
}
|
||||
}
|
||||
FeaturesSubcommand::Enable(FeatureSetArgs { feature }) => {
|
||||
enable_feature_in_config(&interactive, &feature).await?;
|
||||
}
|
||||
FeaturesSubcommand::Disable(FeatureSetArgs { feature }) => {
|
||||
disable_feature_in_config(&interactive, &feature).await?;
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn enable_feature_in_config(interactive: &TuiCli, feature: &str) -> anyhow::Result<()> {
|
||||
FeatureToggles::validate_feature(feature)?;
|
||||
let codex_home = find_codex_home()?;
|
||||
ConfigEditsBuilder::new(&codex_home)
|
||||
.with_profile(interactive.config_profile.as_deref())
|
||||
.set_feature_enabled(feature, true)
|
||||
.apply()
|
||||
.await?;
|
||||
println!("Enabled feature `{feature}` in config.toml.");
|
||||
maybe_print_under_development_feature_warning(&codex_home, interactive, feature);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn disable_feature_in_config(interactive: &TuiCli, feature: &str) -> anyhow::Result<()> {
|
||||
FeatureToggles::validate_feature(feature)?;
|
||||
let codex_home = find_codex_home()?;
|
||||
ConfigEditsBuilder::new(&codex_home)
|
||||
.with_profile(interactive.config_profile.as_deref())
|
||||
.set_feature_enabled(feature, false)
|
||||
.apply()
|
||||
.await?;
|
||||
println!("Disabled feature `{feature}` in config.toml.");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn maybe_print_under_development_feature_warning(
|
||||
codex_home: &std::path::Path,
|
||||
interactive: &TuiCli,
|
||||
feature: &str,
|
||||
) {
|
||||
if interactive.config_profile.is_some() {
|
||||
return;
|
||||
}
|
||||
|
||||
let Some(spec) = codex_core::features::FEATURES
|
||||
.iter()
|
||||
.find(|spec| spec.key == feature)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
if !matches!(spec.stage, Stage::UnderDevelopment) {
|
||||
return;
|
||||
}
|
||||
|
||||
let config_path = codex_home.join(codex_core::config::CONFIG_TOML_FILE);
|
||||
eprintln!(
|
||||
"Under-development features enabled: {feature}. Under-development features are incomplete and may behave unpredictably. To suppress this warning, set `suppress_unstable_features_warning = true` in {}.",
|
||||
config_path.display()
|
||||
);
|
||||
}
|
||||
|
||||
/// Prepend root-level overrides so they have lower precedence than
|
||||
/// CLI-specific ones specified after the subcommand (if any).
|
||||
fn prepend_config_flags(
|
||||
@@ -958,7 +1030,7 @@ mod tests {
|
||||
app_server
|
||||
}
|
||||
|
||||
fn sample_exit_info(conversation: Option<&str>) -> AppExitInfo {
|
||||
fn sample_exit_info(conversation_id: Option<&str>, thread_name: Option<&str>) -> AppExitInfo {
|
||||
let token_usage = TokenUsage {
|
||||
output_tokens: 2,
|
||||
total_tokens: 2,
|
||||
@@ -966,7 +1038,10 @@ mod tests {
|
||||
};
|
||||
AppExitInfo {
|
||||
token_usage,
|
||||
thread_id: conversation.map(ThreadId::from_string).map(Result::unwrap),
|
||||
thread_id: conversation_id
|
||||
.map(ThreadId::from_string)
|
||||
.map(Result::unwrap),
|
||||
thread_name: thread_name.map(str::to_string),
|
||||
update_action: None,
|
||||
exit_reason: ExitReason::UserRequested,
|
||||
}
|
||||
@@ -977,6 +1052,7 @@ mod tests {
|
||||
let exit_info = AppExitInfo {
|
||||
token_usage: TokenUsage::default(),
|
||||
thread_id: None,
|
||||
thread_name: None,
|
||||
update_action: None,
|
||||
exit_reason: ExitReason::UserRequested,
|
||||
};
|
||||
@@ -986,7 +1062,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn format_exit_messages_includes_resume_hint_without_color() {
|
||||
let exit_info = sample_exit_info(Some("123e4567-e89b-12d3-a456-426614174000"));
|
||||
let exit_info = sample_exit_info(Some("123e4567-e89b-12d3-a456-426614174000"), None);
|
||||
let lines = format_exit_messages(exit_info, false);
|
||||
assert_eq!(
|
||||
lines,
|
||||
@@ -1000,12 +1076,28 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn format_exit_messages_applies_color_when_enabled() {
|
||||
let exit_info = sample_exit_info(Some("123e4567-e89b-12d3-a456-426614174000"));
|
||||
let exit_info = sample_exit_info(Some("123e4567-e89b-12d3-a456-426614174000"), None);
|
||||
let lines = format_exit_messages(exit_info, true);
|
||||
assert_eq!(lines.len(), 2);
|
||||
assert!(lines[1].contains("\u{1b}[36m"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn format_exit_messages_prefers_thread_name() {
|
||||
let exit_info = sample_exit_info(
|
||||
Some("123e4567-e89b-12d3-a456-426614174000"),
|
||||
Some("my-thread"),
|
||||
);
|
||||
let lines = format_exit_messages(exit_info, false);
|
||||
assert_eq!(
|
||||
lines,
|
||||
vec![
|
||||
"Token usage: total=2 input=0 output=2".to_string(),
|
||||
"To continue this session, run codex resume my-thread".to_string(),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resume_model_flag_applies_when_no_root_flags() {
|
||||
let interactive =
|
||||
@@ -1171,6 +1263,32 @@ mod tests {
|
||||
assert!(app_server.analytics_default_enabled);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn features_enable_parses_feature_name() {
|
||||
let cli = MultitoolCli::try_parse_from(["codex", "features", "enable", "unified_exec"])
|
||||
.expect("parse should succeed");
|
||||
let Some(Subcommand::Features(FeaturesCli { sub })) = cli.subcommand else {
|
||||
panic!("expected features subcommand");
|
||||
};
|
||||
let FeaturesSubcommand::Enable(FeatureSetArgs { feature }) = sub else {
|
||||
panic!("expected features enable");
|
||||
};
|
||||
assert_eq!(feature, "unified_exec");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn features_disable_parses_feature_name() {
|
||||
let cli = MultitoolCli::try_parse_from(["codex", "features", "disable", "shell_tool"])
|
||||
.expect("parse should succeed");
|
||||
let Some(Subcommand::Features(FeaturesCli { sub })) = cli.subcommand else {
|
||||
panic!("expected features subcommand");
|
||||
};
|
||||
let FeaturesSubcommand::Disable(FeatureSetArgs { feature }) = sub else {
|
||||
panic!("expected features disable");
|
||||
};
|
||||
assert_eq!(feature, "shell_tool");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn feature_toggles_known_features_generate_overrides() {
|
||||
let toggles = FeatureToggles {
|
||||
|
||||
@@ -13,11 +13,12 @@ use codex_core::config::find_codex_home;
|
||||
use codex_core::config::load_global_mcp_servers;
|
||||
use codex_core::config::types::McpServerConfig;
|
||||
use codex_core::config::types::McpServerTransportConfig;
|
||||
use codex_core::mcp::auth::McpOAuthLoginSupport;
|
||||
use codex_core::mcp::auth::compute_auth_statuses;
|
||||
use codex_core::mcp::auth::oauth_login_support;
|
||||
use codex_core::protocol::McpAuthStatus;
|
||||
use codex_rmcp_client::delete_oauth_tokens;
|
||||
use codex_rmcp_client::perform_oauth_login;
|
||||
use codex_rmcp_client::supports_oauth_login;
|
||||
|
||||
/// Subcommands:
|
||||
/// - `list` — list configured servers (with `--json`)
|
||||
@@ -260,33 +261,25 @@ async fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Re
|
||||
|
||||
println!("Added global MCP server '{name}'.");
|
||||
|
||||
if let McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var: None,
|
||||
http_headers,
|
||||
env_http_headers,
|
||||
} = transport
|
||||
{
|
||||
match supports_oauth_login(&url).await {
|
||||
Ok(true) => {
|
||||
println!("Detected OAuth support. Starting OAuth flow…");
|
||||
perform_oauth_login(
|
||||
&name,
|
||||
&url,
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
http_headers.clone(),
|
||||
env_http_headers.clone(),
|
||||
&Vec::new(),
|
||||
config.mcp_oauth_callback_port,
|
||||
)
|
||||
.await?;
|
||||
println!("Successfully logged in.");
|
||||
}
|
||||
Ok(false) => {}
|
||||
Err(_) => println!(
|
||||
"MCP server may or may not require login. Run `codex mcp login {name}` to login."
|
||||
),
|
||||
match oauth_login_support(&transport).await {
|
||||
McpOAuthLoginSupport::Supported(oauth_config) => {
|
||||
println!("Detected OAuth support. Starting OAuth flow…");
|
||||
perform_oauth_login(
|
||||
&name,
|
||||
&oauth_config.url,
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
oauth_config.http_headers,
|
||||
oauth_config.env_http_headers,
|
||||
&Vec::new(),
|
||||
config.mcp_oauth_callback_port,
|
||||
)
|
||||
.await?;
|
||||
println!("Successfully logged in.");
|
||||
}
|
||||
McpOAuthLoginSupport::Unsupported => {}
|
||||
McpOAuthLoginSupport::Unknown(_) => println!(
|
||||
"MCP server may or may not require login. Run `codex mcp login {name}` to login."
|
||||
),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
58
codex-rs/cli/tests/features.rs
Normal file
58
codex-rs/cli/tests/features.rs
Normal file
@@ -0,0 +1,58 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use predicates::str::contains;
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn codex_command(codex_home: &Path) -> Result<assert_cmd::Command> {
|
||||
let mut cmd = assert_cmd::Command::new(codex_utils_cargo_bin::cargo_bin("codex")?);
|
||||
cmd.env("CODEX_HOME", codex_home);
|
||||
Ok(cmd)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn features_enable_writes_feature_flag_to_config() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut cmd = codex_command(codex_home.path())?;
|
||||
cmd.args(["features", "enable", "unified_exec"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("Enabled feature `unified_exec` in config.toml."));
|
||||
|
||||
let config = std::fs::read_to_string(codex_home.path().join("config.toml"))?;
|
||||
assert!(config.contains("[features]"));
|
||||
assert!(config.contains("unified_exec = true"));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn features_disable_writes_feature_flag_to_config() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut cmd = codex_command(codex_home.path())?;
|
||||
cmd.args(["features", "disable", "shell_tool"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("Disabled feature `shell_tool` in config.toml."));
|
||||
|
||||
let config = std::fs::read_to_string(codex_home.path().join("config.toml"))?;
|
||||
assert!(config.contains("[features]"));
|
||||
assert!(config.contains("shell_tool = false"));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn features_enable_under_development_feature_prints_warning() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut cmd = codex_command(codex_home.path())?;
|
||||
cmd.args(["features", "enable", "sqlite"])
|
||||
.assert()
|
||||
.success()
|
||||
.stderr(contains("Under-development features enabled: sqlite."));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
6
codex-rs/cloud-requirements/BUILD.bazel
Normal file
6
codex-rs/cloud-requirements/BUILD.bazel
Normal file
@@ -0,0 +1,6 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "cloud-requirements",
|
||||
crate_name = "codex_cloud_requirements",
|
||||
)
|
||||
27
codex-rs/cloud-requirements/Cargo.toml
Normal file
27
codex-rs/cloud-requirements/Cargo.toml
Normal file
@@ -0,0 +1,27 @@
|
||||
[package]
|
||||
name = "codex-cloud-requirements"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
async-trait = { workspace = true }
|
||||
codex-backend-client = { workspace = true }
|
||||
codex-core = { workspace = true }
|
||||
codex-otel = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
tokio = { workspace = true, features = ["sync", "time"] }
|
||||
thiserror = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
tokio = { workspace = true, features = ["macros", "rt", "test-util", "time"] }
|
||||
459
codex-rs/cloud-requirements/src/lib.rs
Normal file
459
codex-rs/cloud-requirements/src/lib.rs
Normal file
@@ -0,0 +1,459 @@
|
||||
//! Cloud-hosted config requirements for Codex.
|
||||
//!
|
||||
//! This crate fetches `requirements.toml` data from the backend as an alternative to loading it
|
||||
//! from the local filesystem. It only applies to Enterprise ChatGPT customers.
|
||||
//!
|
||||
//! Enterprise ChatGPT customers must successfully fetch these requirements before Codex will run.
|
||||
|
||||
use async_trait::async_trait;
|
||||
use codex_backend_client::Client as BackendClient;
|
||||
use codex_core::AuthManager;
|
||||
use codex_core::auth::CodexAuth;
|
||||
use codex_core::config_loader::CloudRequirementsLoader;
|
||||
use codex_core::config_loader::ConfigRequirementsToml;
|
||||
use codex_protocol::account::PlanType;
|
||||
use std::io;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use std::time::Instant;
|
||||
use thiserror::Error;
|
||||
use tokio::time::timeout;
|
||||
|
||||
/// This blocks codecs startup, so must be short.
|
||||
const CLOUD_REQUIREMENTS_TIMEOUT: Duration = Duration::from_secs(5);
|
||||
|
||||
#[derive(Debug, Error, Clone, PartialEq, Eq)]
|
||||
enum CloudRequirementsError {
|
||||
#[error("cloud requirements user error: {0}")]
|
||||
User(CloudRequirementsUserError),
|
||||
#[error("cloud requirements network error: {0}")]
|
||||
Network(CloudRequirementsNetworkError),
|
||||
}
|
||||
|
||||
impl From<CloudRequirementsUserError> for CloudRequirementsError {
|
||||
fn from(err: CloudRequirementsUserError) -> Self {
|
||||
CloudRequirementsError::User(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CloudRequirementsNetworkError> for CloudRequirementsError {
|
||||
fn from(err: CloudRequirementsNetworkError) -> Self {
|
||||
CloudRequirementsError::Network(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CloudRequirementsError> for io::Error {
|
||||
fn from(err: CloudRequirementsError) -> Self {
|
||||
let kind = match &err {
|
||||
CloudRequirementsError::User(_) => io::ErrorKind::InvalidData,
|
||||
CloudRequirementsError::Network(CloudRequirementsNetworkError::Timeout { .. }) => {
|
||||
io::ErrorKind::TimedOut
|
||||
}
|
||||
CloudRequirementsError::Network(_) => io::ErrorKind::Other,
|
||||
};
|
||||
io::Error::new(kind, err)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Error, Clone, PartialEq, Eq)]
|
||||
enum CloudRequirementsUserError {
|
||||
#[error("failed to parse requirements TOML: {message}")]
|
||||
InvalidToml { message: String },
|
||||
}
|
||||
|
||||
#[derive(Debug, Error, Clone, PartialEq, Eq)]
|
||||
enum CloudRequirementsNetworkError {
|
||||
#[error("backend client initialization failed: {message}")]
|
||||
BackendClient { message: String },
|
||||
#[error("request failed: {message}")]
|
||||
Request { message: String },
|
||||
#[error("cloud requirements response missing contents")]
|
||||
MissingContents,
|
||||
#[error("timed out after {timeout_ms}ms")]
|
||||
Timeout { timeout_ms: u64 },
|
||||
#[error("cloud requirements task failed: {message}")]
|
||||
Task { message: String },
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
trait RequirementsFetcher: Send + Sync {
|
||||
/// Returns requirements as a TOML string.
|
||||
async fn fetch_requirements(&self, auth: &CodexAuth) -> Result<String, CloudRequirementsError>;
|
||||
}
|
||||
|
||||
struct BackendRequirementsFetcher {
|
||||
base_url: String,
|
||||
}
|
||||
|
||||
impl BackendRequirementsFetcher {
|
||||
fn new(base_url: String) -> Self {
|
||||
Self { base_url }
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl RequirementsFetcher for BackendRequirementsFetcher {
|
||||
async fn fetch_requirements(&self, auth: &CodexAuth) -> Result<String, CloudRequirementsError> {
|
||||
let client = BackendClient::from_auth(self.base_url.clone(), auth)
|
||||
.inspect_err(|err| {
|
||||
tracing::warn!(
|
||||
error = %err,
|
||||
"Failed to construct backend client for cloud requirements"
|
||||
);
|
||||
})
|
||||
.map_err(|err| CloudRequirementsNetworkError::BackendClient {
|
||||
message: err.to_string(),
|
||||
})
|
||||
.map_err(CloudRequirementsError::from)?;
|
||||
|
||||
let response = client
|
||||
.get_config_requirements_file()
|
||||
.await
|
||||
.inspect_err(|err| tracing::warn!(error = %err, "Failed to fetch cloud requirements"))
|
||||
.map_err(|err| CloudRequirementsNetworkError::Request {
|
||||
message: err.to_string(),
|
||||
})
|
||||
.map_err(CloudRequirementsError::from)?;
|
||||
|
||||
let Some(contents) = response.contents else {
|
||||
tracing::warn!("Cloud requirements response missing contents");
|
||||
return Err(CloudRequirementsError::from(
|
||||
CloudRequirementsNetworkError::MissingContents,
|
||||
));
|
||||
};
|
||||
|
||||
Ok(contents)
|
||||
}
|
||||
}
|
||||
|
||||
struct CloudRequirementsService {
|
||||
auth_manager: Arc<AuthManager>,
|
||||
fetcher: Arc<dyn RequirementsFetcher>,
|
||||
timeout: Duration,
|
||||
}
|
||||
|
||||
impl CloudRequirementsService {
|
||||
fn new(
|
||||
auth_manager: Arc<AuthManager>,
|
||||
fetcher: Arc<dyn RequirementsFetcher>,
|
||||
timeout: Duration,
|
||||
) -> Self {
|
||||
Self {
|
||||
auth_manager,
|
||||
fetcher,
|
||||
timeout,
|
||||
}
|
||||
}
|
||||
|
||||
async fn fetch_with_timeout(
|
||||
&self,
|
||||
) -> Result<Option<ConfigRequirementsToml>, CloudRequirementsError> {
|
||||
let _timer =
|
||||
codex_otel::start_global_timer("codex.cloud_requirements.fetch.duration_ms", &[]);
|
||||
let started_at = Instant::now();
|
||||
let result = timeout(self.timeout, self.fetch()).await.map_err(|_| {
|
||||
CloudRequirementsNetworkError::Timeout {
|
||||
timeout_ms: self.timeout.as_millis() as u64,
|
||||
}
|
||||
})?;
|
||||
|
||||
let elapsed_ms = started_at.elapsed().as_millis();
|
||||
match result.as_ref() {
|
||||
Ok(Some(requirements)) => {
|
||||
tracing::info!(
|
||||
elapsed_ms,
|
||||
status = "success",
|
||||
requirements = ?requirements,
|
||||
"Cloud requirements load completed"
|
||||
);
|
||||
println!(
|
||||
"cloud_requirements status=success elapsed_ms={elapsed_ms} value={requirements:?}"
|
||||
);
|
||||
}
|
||||
Ok(None) => {
|
||||
tracing::info!(
|
||||
elapsed_ms,
|
||||
status = "none",
|
||||
requirements = %"none",
|
||||
"Cloud requirements load completed"
|
||||
);
|
||||
println!("cloud_requirements status=none elapsed_ms={elapsed_ms} value=none");
|
||||
}
|
||||
Err(err) => {
|
||||
tracing::warn!(
|
||||
elapsed_ms,
|
||||
status = "error",
|
||||
requirements = %"none",
|
||||
error = %err,
|
||||
"Cloud requirements load failed"
|
||||
);
|
||||
println!(
|
||||
"cloud_requirements status=error elapsed_ms={elapsed_ms} value=none error={err}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
async fn fetch(&self) -> Result<Option<ConfigRequirementsToml>, CloudRequirementsError> {
|
||||
let auth = match self.auth_manager.auth().await {
|
||||
Some(auth) => auth,
|
||||
None => return Ok(None),
|
||||
};
|
||||
if !(auth.is_chatgpt_auth() && auth.account_plan_type() == Some(PlanType::Enterprise)) {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let contents = self.fetcher.fetch_requirements(&auth).await?;
|
||||
parse_cloud_requirements(&contents)
|
||||
.inspect_err(|err| tracing::warn!(error = %err, "Failed to parse cloud requirements"))
|
||||
.map_err(CloudRequirementsError::from)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cloud_requirements_loader(
|
||||
auth_manager: Arc<AuthManager>,
|
||||
chatgpt_base_url: String,
|
||||
) -> CloudRequirementsLoader {
|
||||
let service = CloudRequirementsService::new(
|
||||
auth_manager,
|
||||
Arc::new(BackendRequirementsFetcher::new(chatgpt_base_url)),
|
||||
CLOUD_REQUIREMENTS_TIMEOUT,
|
||||
);
|
||||
let task = tokio::spawn(async move { service.fetch_with_timeout().await });
|
||||
CloudRequirementsLoader::new(async move {
|
||||
task.await
|
||||
.map_err(|err| {
|
||||
CloudRequirementsError::from(CloudRequirementsNetworkError::Task {
|
||||
message: err.to_string(),
|
||||
})
|
||||
})
|
||||
.and_then(std::convert::identity)
|
||||
.map_err(io::Error::from)
|
||||
.inspect_err(|err| tracing::warn!(error = %err, "Cloud requirements task failed"))
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_cloud_requirements(
|
||||
contents: &str,
|
||||
) -> Result<Option<ConfigRequirementsToml>, CloudRequirementsUserError> {
|
||||
if contents.trim().is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let requirements: ConfigRequirementsToml =
|
||||
toml::from_str(contents).map_err(|err| CloudRequirementsUserError::InvalidToml {
|
||||
message: err.to_string(),
|
||||
})?;
|
||||
if requirements.is_empty() {
|
||||
Ok(None)
|
||||
} else {
|
||||
Ok(Some(requirements))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use anyhow::Result;
|
||||
use base64::Engine;
|
||||
use base64::engine::general_purpose::URL_SAFE_NO_PAD;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use std::future::pending;
|
||||
use std::path::Path;
|
||||
use tempfile::tempdir;
|
||||
|
||||
fn write_auth_json(codex_home: &Path, value: serde_json::Value) -> Result<()> {
|
||||
std::fs::write(codex_home.join("auth.json"), serde_json::to_string(&value)?)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn auth_manager_with_api_key() -> Result<Arc<AuthManager>> {
|
||||
let tmp = tempdir()?;
|
||||
let auth_json = json!({
|
||||
"OPENAI_API_KEY": "sk-test-key",
|
||||
"tokens": null,
|
||||
"last_refresh": null,
|
||||
});
|
||||
write_auth_json(tmp.path(), auth_json)?;
|
||||
Ok(Arc::new(AuthManager::new(
|
||||
tmp.path().to_path_buf(),
|
||||
false,
|
||||
AuthCredentialsStoreMode::File,
|
||||
)))
|
||||
}
|
||||
|
||||
fn auth_manager_with_plan(plan_type: &str) -> Result<Arc<AuthManager>> {
|
||||
let tmp = tempdir()?;
|
||||
let header = json!({ "alg": "none", "typ": "JWT" });
|
||||
let auth_payload = json!({
|
||||
"chatgpt_plan_type": plan_type,
|
||||
"chatgpt_user_id": "user-12345",
|
||||
"user_id": "user-12345",
|
||||
});
|
||||
let payload = json!({
|
||||
"email": "user@example.com",
|
||||
"https://api.openai.com/auth": auth_payload,
|
||||
});
|
||||
let header_b64 = URL_SAFE_NO_PAD.encode(serde_json::to_vec(&header)?);
|
||||
let payload_b64 = URL_SAFE_NO_PAD.encode(serde_json::to_vec(&payload)?);
|
||||
let signature_b64 = URL_SAFE_NO_PAD.encode(b"sig");
|
||||
let fake_jwt = format!("{header_b64}.{payload_b64}.{signature_b64}");
|
||||
|
||||
let auth_json = json!({
|
||||
"OPENAI_API_KEY": null,
|
||||
"tokens": {
|
||||
"id_token": fake_jwt,
|
||||
"access_token": "test-access-token",
|
||||
"refresh_token": "test-refresh-token",
|
||||
},
|
||||
"last_refresh": null,
|
||||
});
|
||||
write_auth_json(tmp.path(), auth_json)?;
|
||||
Ok(Arc::new(AuthManager::new(
|
||||
tmp.path().to_path_buf(),
|
||||
false,
|
||||
AuthCredentialsStoreMode::File,
|
||||
)))
|
||||
}
|
||||
|
||||
fn parse_for_fetch(
|
||||
contents: Option<&str>,
|
||||
) -> Result<Option<ConfigRequirementsToml>, CloudRequirementsUserError> {
|
||||
contents.map(parse_cloud_requirements).unwrap_or(Ok(None))
|
||||
}
|
||||
|
||||
struct StaticFetcher {
|
||||
result: Result<String, CloudRequirementsError>,
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl RequirementsFetcher for StaticFetcher {
|
||||
async fn fetch_requirements(
|
||||
&self,
|
||||
_auth: &CodexAuth,
|
||||
) -> Result<String, CloudRequirementsError> {
|
||||
self.result.clone()
|
||||
}
|
||||
}
|
||||
|
||||
struct PendingFetcher;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl RequirementsFetcher for PendingFetcher {
|
||||
async fn fetch_requirements(
|
||||
&self,
|
||||
_auth: &CodexAuth,
|
||||
) -> Result<String, CloudRequirementsError> {
|
||||
pending::<()>().await;
|
||||
Ok(String::new())
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_cloud_requirements_skips_non_chatgpt_auth() -> Result<()> {
|
||||
let service = CloudRequirementsService::new(
|
||||
auth_manager_with_api_key()?,
|
||||
Arc::new(StaticFetcher {
|
||||
result: Ok(String::new()),
|
||||
}),
|
||||
CLOUD_REQUIREMENTS_TIMEOUT,
|
||||
);
|
||||
assert_eq!(service.fetch().await, Ok(None));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_cloud_requirements_skips_non_enterprise_plan() -> Result<()> {
|
||||
let service = CloudRequirementsService::new(
|
||||
auth_manager_with_plan("pro")?,
|
||||
Arc::new(StaticFetcher {
|
||||
result: Ok(String::new()),
|
||||
}),
|
||||
CLOUD_REQUIREMENTS_TIMEOUT,
|
||||
);
|
||||
assert_eq!(service.fetch().await, Ok(None));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_cloud_requirements_returns_missing_contents_error() -> Result<()> {
|
||||
let service = CloudRequirementsService::new(
|
||||
auth_manager_with_plan("enterprise")?,
|
||||
Arc::new(StaticFetcher {
|
||||
result: Err(CloudRequirementsError::Network(
|
||||
CloudRequirementsNetworkError::MissingContents,
|
||||
)),
|
||||
}),
|
||||
CLOUD_REQUIREMENTS_TIMEOUT,
|
||||
);
|
||||
assert_eq!(
|
||||
service.fetch().await,
|
||||
Err(CloudRequirementsError::Network(
|
||||
CloudRequirementsNetworkError::MissingContents
|
||||
))
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_cloud_requirements_handles_empty_contents() -> Result<()> {
|
||||
assert_eq!(parse_for_fetch(Some(" ")), Ok(None));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_cloud_requirements_handles_invalid_toml() -> Result<()> {
|
||||
assert!(matches!(
|
||||
parse_for_fetch(Some("not = [")),
|
||||
Err(CloudRequirementsUserError::InvalidToml { .. })
|
||||
));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_cloud_requirements_ignores_empty_requirements() -> Result<()> {
|
||||
assert_eq!(parse_for_fetch(Some("# comment")), Ok(None));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_cloud_requirements_parses_valid_toml() -> Result<()> {
|
||||
assert_eq!(
|
||||
parse_for_fetch(Some("allowed_approval_policies = [\"never\"]")),
|
||||
Ok(Some(ConfigRequirementsToml {
|
||||
allowed_approval_policies: Some(vec![AskForApproval::Never]),
|
||||
allowed_sandbox_modes: None,
|
||||
mcp_servers: None,
|
||||
rules: None,
|
||||
enforce_residency: None,
|
||||
}))
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(start_paused = true)]
|
||||
async fn fetch_cloud_requirements_times_out() -> Result<()> {
|
||||
let service = CloudRequirementsService::new(
|
||||
auth_manager_with_plan("enterprise")?,
|
||||
Arc::new(PendingFetcher),
|
||||
CLOUD_REQUIREMENTS_TIMEOUT,
|
||||
);
|
||||
let handle = tokio::spawn(async move { service.fetch_with_timeout().await });
|
||||
tokio::time::advance(CLOUD_REQUIREMENTS_TIMEOUT + Duration::from_millis(1)).await;
|
||||
|
||||
assert_eq!(
|
||||
handle.await?,
|
||||
Err(CloudRequirementsError::Network(
|
||||
CloudRequirementsNetworkError::Timeout {
|
||||
timeout_ms: CLOUD_REQUIREMENTS_TIMEOUT.as_millis() as u64,
|
||||
}
|
||||
))
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -33,6 +33,7 @@ pub use crate::endpoint::responses_websocket::ResponsesWebsocketConnection;
|
||||
pub use crate::error::ApiError;
|
||||
pub use crate::provider::Provider;
|
||||
pub use crate::provider::WireApi;
|
||||
pub use crate::provider::is_azure_responses_wire_base_url;
|
||||
pub use crate::requests::ChatRequest;
|
||||
pub use crate::requests::ChatRequestBuilder;
|
||||
pub use crate::requests::ResponsesRequest;
|
||||
|
||||
@@ -95,16 +95,7 @@ impl Provider {
|
||||
}
|
||||
|
||||
pub fn is_azure_responses_endpoint(&self) -> bool {
|
||||
if self.wire != WireApi::Responses {
|
||||
return false;
|
||||
}
|
||||
|
||||
if self.name.eq_ignore_ascii_case("azure") {
|
||||
return true;
|
||||
}
|
||||
|
||||
self.base_url.to_ascii_lowercase().contains("openai.azure.")
|
||||
|| matches_azure_responses_base_url(&self.base_url)
|
||||
is_azure_responses_wire_base_url(self.wire.clone(), &self.name, Some(&self.base_url))
|
||||
}
|
||||
|
||||
pub fn websocket_url_for_path(&self, path: &str) -> Result<Url, url::ParseError> {
|
||||
@@ -121,6 +112,23 @@ impl Provider {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_azure_responses_wire_base_url(wire: WireApi, name: &str, base_url: Option<&str>) -> bool {
|
||||
if wire != WireApi::Responses {
|
||||
return false;
|
||||
}
|
||||
|
||||
if name.eq_ignore_ascii_case("azure") {
|
||||
return true;
|
||||
}
|
||||
|
||||
let Some(base_url) = base_url else {
|
||||
return false;
|
||||
};
|
||||
|
||||
let base = base_url.to_ascii_lowercase();
|
||||
base.contains("openai.azure.") || matches_azure_responses_base_url(&base)
|
||||
}
|
||||
|
||||
fn matches_azure_responses_base_url(base_url: &str) -> bool {
|
||||
const AZURE_MARKERS: [&str; 5] = [
|
||||
"cognitiveservices.azure.",
|
||||
@@ -129,6 +137,54 @@ fn matches_azure_responses_base_url(base_url: &str) -> bool {
|
||||
"azurefd.",
|
||||
"windows.net/openai",
|
||||
];
|
||||
let base = base_url.to_ascii_lowercase();
|
||||
AZURE_MARKERS.iter().any(|marker| base.contains(marker))
|
||||
AZURE_MARKERS.iter().any(|marker| base_url.contains(marker))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn detects_azure_responses_base_urls() {
|
||||
let positive_cases = [
|
||||
"https://foo.openai.azure.com/openai",
|
||||
"https://foo.openai.azure.us/openai/deployments/bar",
|
||||
"https://foo.cognitiveservices.azure.cn/openai",
|
||||
"https://foo.aoai.azure.com/openai",
|
||||
"https://foo.openai.azure-api.net/openai",
|
||||
"https://foo.z01.azurefd.net/",
|
||||
];
|
||||
|
||||
for base_url in positive_cases {
|
||||
assert!(
|
||||
is_azure_responses_wire_base_url(WireApi::Responses, "test", Some(base_url)),
|
||||
"expected {base_url} to be detected as Azure"
|
||||
);
|
||||
}
|
||||
|
||||
assert!(is_azure_responses_wire_base_url(
|
||||
WireApi::Responses,
|
||||
"Azure",
|
||||
Some("https://example.com")
|
||||
));
|
||||
|
||||
let negative_cases = [
|
||||
"https://api.openai.com/v1",
|
||||
"https://example.com/openai",
|
||||
"https://myproxy.azurewebsites.net/openai",
|
||||
];
|
||||
|
||||
for base_url in negative_cases {
|
||||
assert!(
|
||||
!is_azure_responses_wire_base_url(WireApi::Responses, "test", Some(base_url)),
|
||||
"expected {base_url} not to be detected as Azure"
|
||||
);
|
||||
}
|
||||
|
||||
assert!(!is_azure_responses_wire_base_url(
|
||||
WireApi::Chat,
|
||||
"Azure",
|
||||
Some("https://foo.openai.azure.com/openai")
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -77,7 +77,7 @@ async fn models_client_hits_models_endpoint() {
|
||||
priority: 1,
|
||||
upgrade: None,
|
||||
base_instructions: "base instructions".to_string(),
|
||||
model_instructions_template: None,
|
||||
model_messages: None,
|
||||
supports_reasoning_summaries: false,
|
||||
support_verbosity: false,
|
||||
default_verbosity: None,
|
||||
|
||||
@@ -37,12 +37,13 @@ codex-keyring-store = { workspace = true }
|
||||
codex-otel = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-rmcp-client = { workspace = true }
|
||||
codex-state = { workspace = true }
|
||||
codex-utils-absolute-path = { workspace = true }
|
||||
codex-utils-home-dir = { workspace = true }
|
||||
codex-utils-pty = { workspace = true }
|
||||
codex-utils-readiness = { workspace = true }
|
||||
codex-utils-string = { workspace = true }
|
||||
codex-windows-sandbox = { package = "codex-windows-sandbox", path = "../windows-sandbox-rs" }
|
||||
dirs = { workspace = true }
|
||||
dunce = { workspace = true }
|
||||
encoding_rs = { workspace = true }
|
||||
env-flags = { workspace = true }
|
||||
@@ -55,6 +56,7 @@ indoc = { workspace = true }
|
||||
keyring = { workspace = true, features = ["crypto-rust"] }
|
||||
libc = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
multimap = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
os_info = { workspace = true }
|
||||
rand = { workspace = true }
|
||||
|
||||
@@ -111,6 +111,13 @@
|
||||
"auto"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Store credentials in memory only for the current process.",
|
||||
"enum": [
|
||||
"ephemeral"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -144,6 +151,9 @@
|
||||
"apply_patch_freeform": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"apps": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"child_agents_md": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -180,6 +190,9 @@
|
||||
"include_apply_patch_tool": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"personality": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"powershell_utf8": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -189,6 +202,9 @@
|
||||
"remote_models": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"request_rule": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"responses_websockets": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -198,6 +214,15 @@
|
||||
"shell_tool": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"skill_env_var_dependency_prompt": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"skill_mcp_dependency_install": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"sqlite": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"steer": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -425,6 +450,11 @@
|
||||
"minimum": 0.0,
|
||||
"type": "integer"
|
||||
},
|
||||
"supports_websockets": {
|
||||
"default": false,
|
||||
"description": "Whether this provider supports the Responses API WebSocket transport.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"wire_api": {
|
||||
"allOf": [
|
||||
{
|
||||
@@ -441,7 +471,6 @@
|
||||
"type": "object"
|
||||
},
|
||||
"Notice": {
|
||||
"additionalProperties": false,
|
||||
"description": "Settings for notices we display to users via the tui and app-server clients (primarily the Codex IDE extension). NOTE: these are different from notifications - notices are warnings, NUX screens, acknowledgements, etc.",
|
||||
"properties": {
|
||||
"hide_full_access_warning": {
|
||||
@@ -475,6 +504,14 @@
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"NotificationMethod": {
|
||||
"enum": [
|
||||
"auto",
|
||||
"osc9",
|
||||
"bel"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"Notifications": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -983,6 +1020,15 @@
|
||||
"default": null,
|
||||
"description": "Start the TUI in the specified collaboration mode (plan/execute/etc.). Defaults to unset."
|
||||
},
|
||||
"notification_method": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/NotificationMethod"
|
||||
}
|
||||
],
|
||||
"default": "auto",
|
||||
"description": "Notification method to use for unfocused terminal notifications. Defaults to `auto`."
|
||||
},
|
||||
"notifications": {
|
||||
"allOf": [
|
||||
{
|
||||
@@ -1047,13 +1093,6 @@
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Experimental: Responses API over WebSocket transport.",
|
||||
"enum": [
|
||||
"responses_websocket"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Regular Chat Completions compatible with `/v1/chat/completions`.",
|
||||
"enum": [
|
||||
@@ -1137,6 +1176,9 @@
|
||||
"apply_patch_freeform": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"apps": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"child_agents_md": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -1173,6 +1215,9 @@
|
||||
"include_apply_patch_tool": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"personality": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"powershell_utf8": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -1182,6 +1227,9 @@
|
||||
"remote_models": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"request_rule": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"responses_websockets": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -1191,6 +1239,15 @@
|
||||
"shell_tool": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"skill_env_var_dependency_prompt": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"skill_mcp_dependency_install": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"sqlite": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"steer": {
|
||||
"type": "boolean"
|
||||
},
|
||||
|
||||
@@ -146,6 +146,7 @@ mod tests {
|
||||
use crate::config::Config;
|
||||
use crate::config::ConfigBuilder;
|
||||
use assert_matches::assert_matches;
|
||||
use codex_protocol::config_types::ModeKind;
|
||||
use codex_protocol::protocol::ErrorEvent;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::TurnAbortReason;
|
||||
@@ -231,6 +232,7 @@ mod tests {
|
||||
async fn on_event_updates_status_from_task_started() {
|
||||
let status = agent_status_from_event(&EventMsg::TurnStarted(TurnStartedEvent {
|
||||
model_context_window: None,
|
||||
collaboration_mode_kind: ModeKind::Custom,
|
||||
}));
|
||||
assert_eq!(status, Some(AgentStatus::Running));
|
||||
}
|
||||
|
||||
@@ -92,20 +92,16 @@ Rules:
|
||||
},
|
||||
AgentRole::Explorer => AgentProfile {
|
||||
model: Some(EXPLORER_MODEL),
|
||||
reasoning_effort: Some(ReasoningEffort::Low),
|
||||
description: r#"Use for fast codebase understanding and information gathering.
|
||||
`explorer` are extremely fast agents so use them as much as you can to speed up the resolution of the global task.
|
||||
Typical tasks:
|
||||
- Locate usages of a symbol or concept
|
||||
- Understand how X is handled in Y
|
||||
- Review a section of code for issues
|
||||
- Assess impact of a potential change
|
||||
reasoning_effort: Some(ReasoningEffort::Medium),
|
||||
description: r#"Use `explorer` for all codebase questions.
|
||||
Explorers are fast and authoritative.
|
||||
Always prefer them over manual search or file reading.
|
||||
Rules:
|
||||
- Be explicit in what you are looking for. A good usage of `explorer` would mean that don't need to read the same code after the explorer send you the result.
|
||||
- **Always** prefer asking explorers rather than exploring the codebase yourself.
|
||||
- Spawn multiple explorers in parallel when useful and wait for all results.
|
||||
- You can ask the `explorer` to return file name, lines, entire code snippets, ...
|
||||
- Reuse the same explorer when it is relevant. If later in your process you have more questions on some code an explorer already covered, reuse this same explorer to be more efficient.
|
||||
- Ask explorers first and precisely.
|
||||
- Do not re-read or re-search code they cover.
|
||||
- Trust explorer results without verification.
|
||||
- Run explorers in parallel when useful.
|
||||
- Reuse existing explorers for related questions.
|
||||
"#,
|
||||
..Default::default()
|
||||
},
|
||||
|
||||
@@ -9,6 +9,7 @@ use serde::Deserialize;
|
||||
|
||||
use crate::auth::CodexAuth;
|
||||
use crate::error::CodexErr;
|
||||
use crate::error::ModelCapError;
|
||||
use crate::error::RetryLimitReachedError;
|
||||
use crate::error::UnexpectedResponseError;
|
||||
use crate::error::UsageLimitReachedError;
|
||||
@@ -49,6 +50,23 @@ pub(crate) fn map_api_error(err: ApiError) -> CodexErr {
|
||||
} else if status == http::StatusCode::INTERNAL_SERVER_ERROR {
|
||||
CodexErr::InternalServerError
|
||||
} else if status == http::StatusCode::TOO_MANY_REQUESTS {
|
||||
if let Some(model) = headers
|
||||
.as_ref()
|
||||
.and_then(|map| map.get(MODEL_CAP_MODEL_HEADER))
|
||||
.and_then(|value| value.to_str().ok())
|
||||
.map(str::to_string)
|
||||
{
|
||||
let reset_after_seconds = headers
|
||||
.as_ref()
|
||||
.and_then(|map| map.get(MODEL_CAP_RESET_AFTER_HEADER))
|
||||
.and_then(|value| value.to_str().ok())
|
||||
.and_then(|value| value.parse::<u64>().ok());
|
||||
return CodexErr::ModelCap(ModelCapError {
|
||||
model,
|
||||
reset_after_seconds,
|
||||
});
|
||||
}
|
||||
|
||||
if let Ok(err) = serde_json::from_str::<UsageErrorResponse>(&body_text) {
|
||||
if err.error.error_type.as_deref() == Some("usage_limit_reached") {
|
||||
let rate_limits = headers.as_ref().and_then(parse_rate_limit);
|
||||
@@ -92,6 +110,42 @@ pub(crate) fn map_api_error(err: ApiError) -> CodexErr {
|
||||
}
|
||||
}
|
||||
|
||||
const MODEL_CAP_MODEL_HEADER: &str = "x-codex-model-cap-model";
|
||||
const MODEL_CAP_RESET_AFTER_HEADER: &str = "x-codex-model-cap-reset-after-seconds";
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use codex_api::TransportError;
|
||||
use http::HeaderMap;
|
||||
use http::StatusCode;
|
||||
|
||||
#[test]
|
||||
fn map_api_error_maps_model_cap_headers() {
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
MODEL_CAP_MODEL_HEADER,
|
||||
http::HeaderValue::from_static("boomslang"),
|
||||
);
|
||||
headers.insert(
|
||||
MODEL_CAP_RESET_AFTER_HEADER,
|
||||
http::HeaderValue::from_static("120"),
|
||||
);
|
||||
let err = map_api_error(ApiError::Transport(TransportError::Http {
|
||||
status: StatusCode::TOO_MANY_REQUESTS,
|
||||
url: Some("http://example.com/v1/responses".to_string()),
|
||||
headers: Some(headers),
|
||||
body: Some(String::new()),
|
||||
}));
|
||||
|
||||
let CodexErr::ModelCap(model_cap) = err else {
|
||||
panic!("expected CodexErr::ModelCap, got {err:?}");
|
||||
};
|
||||
assert_eq!(model_cap.model, "boomslang");
|
||||
assert_eq!(model_cap.reset_after_seconds, Some(120));
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_request_id(headers: Option<&HeaderMap>) -> Option<String> {
|
||||
headers.and_then(|map| {
|
||||
["cf-ray", "x-request-id", "x-oai-request-id"]
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
mod storage;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use chrono::Utc;
|
||||
use reqwest::StatusCode;
|
||||
use serde::Deserialize;
|
||||
@@ -12,8 +13,9 @@ use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
use std::sync::RwLock;
|
||||
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_app_server_protocol::AuthMode as ApiAuthMode;
|
||||
use codex_protocol::config_types::ForcedLoginMethod;
|
||||
|
||||
pub use crate::auth::storage::AuthCredentialsStoreMode;
|
||||
@@ -23,6 +25,7 @@ use crate::auth::storage::create_auth_storage;
|
||||
use crate::config::Config;
|
||||
use crate::error::RefreshTokenFailedError;
|
||||
use crate::error::RefreshTokenFailedReason;
|
||||
use crate::token_data::IdTokenInfo;
|
||||
use crate::token_data::KnownPlan as InternalKnownPlan;
|
||||
use crate::token_data::PlanType as InternalPlanType;
|
||||
use crate::token_data::TokenData;
|
||||
@@ -33,19 +36,50 @@ use codex_protocol::account::PlanType as AccountPlanType;
|
||||
use serde_json::Value;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CodexAuth {
|
||||
pub mode: AuthMode,
|
||||
/// Account type for the current user.
|
||||
///
|
||||
/// This is used internally to determine the base URL for generating responses,
|
||||
/// and to gate ChatGPT-only behaviors like rate limits and available models (as
|
||||
/// opposed to API key-based auth).
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum AuthMode {
|
||||
ApiKey,
|
||||
Chatgpt,
|
||||
}
|
||||
|
||||
pub(crate) api_key: Option<String>,
|
||||
pub(crate) auth_dot_json: Arc<Mutex<Option<AuthDotJson>>>,
|
||||
/// Authentication mechanism used by the current user.
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum CodexAuth {
|
||||
ApiKey(ApiKeyAuth),
|
||||
Chatgpt(ChatgptAuth),
|
||||
ChatgptAuthTokens(ChatgptAuthTokens),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ApiKeyAuth {
|
||||
api_key: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ChatgptAuth {
|
||||
state: ChatgptAuthState,
|
||||
storage: Arc<dyn AuthStorageBackend>,
|
||||
pub(crate) client: CodexHttpClient,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ChatgptAuthTokens {
|
||||
state: ChatgptAuthState,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct ChatgptAuthState {
|
||||
auth_dot_json: Arc<Mutex<Option<AuthDotJson>>>,
|
||||
client: CodexHttpClient,
|
||||
}
|
||||
|
||||
impl PartialEq for CodexAuth {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.mode == other.mode
|
||||
self.api_auth_mode() == other.api_auth_mode()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -68,6 +102,31 @@ pub enum RefreshTokenError {
|
||||
Transient(#[from] std::io::Error),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct ExternalAuthTokens {
|
||||
pub access_token: String,
|
||||
pub id_token: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum ExternalAuthRefreshReason {
|
||||
Unauthorized,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct ExternalAuthRefreshContext {
|
||||
pub reason: ExternalAuthRefreshReason,
|
||||
pub previous_account_id: Option<String>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait ExternalAuthRefresher: Send + Sync {
|
||||
async fn refresh(
|
||||
&self,
|
||||
context: ExternalAuthRefreshContext,
|
||||
) -> std::io::Result<ExternalAuthTokens>;
|
||||
}
|
||||
|
||||
impl RefreshTokenError {
|
||||
pub fn failed_reason(&self) -> Option<RefreshTokenFailedReason> {
|
||||
match self {
|
||||
@@ -87,14 +146,78 @@ impl From<RefreshTokenError> for std::io::Error {
|
||||
}
|
||||
|
||||
impl CodexAuth {
|
||||
fn from_auth_dot_json(
|
||||
codex_home: &Path,
|
||||
auth_dot_json: AuthDotJson,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
client: CodexHttpClient,
|
||||
) -> std::io::Result<Self> {
|
||||
let auth_mode = auth_dot_json.resolved_mode();
|
||||
if auth_mode == ApiAuthMode::ApiKey {
|
||||
let Some(api_key) = auth_dot_json.openai_api_key.as_deref() else {
|
||||
return Err(std::io::Error::other("API key auth is missing a key."));
|
||||
};
|
||||
return Ok(CodexAuth::from_api_key_with_client(api_key, client));
|
||||
}
|
||||
|
||||
let storage_mode = auth_dot_json.storage_mode(auth_credentials_store_mode);
|
||||
let state = ChatgptAuthState {
|
||||
auth_dot_json: Arc::new(Mutex::new(Some(auth_dot_json))),
|
||||
client,
|
||||
};
|
||||
|
||||
match auth_mode {
|
||||
ApiAuthMode::Chatgpt => {
|
||||
let storage = create_auth_storage(codex_home.to_path_buf(), storage_mode);
|
||||
Ok(Self::Chatgpt(ChatgptAuth { state, storage }))
|
||||
}
|
||||
ApiAuthMode::ChatgptAuthTokens => {
|
||||
Ok(Self::ChatgptAuthTokens(ChatgptAuthTokens { state }))
|
||||
}
|
||||
ApiAuthMode::ApiKey => unreachable!("api key mode is handled above"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Loads the available auth information from auth storage.
|
||||
pub fn from_auth_storage(
|
||||
codex_home: &Path,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> std::io::Result<Option<CodexAuth>> {
|
||||
) -> std::io::Result<Option<Self>> {
|
||||
load_auth(codex_home, false, auth_credentials_store_mode)
|
||||
}
|
||||
|
||||
pub fn internal_auth_mode(&self) -> AuthMode {
|
||||
match self {
|
||||
Self::ApiKey(_) => AuthMode::ApiKey,
|
||||
Self::Chatgpt(_) | Self::ChatgptAuthTokens(_) => AuthMode::Chatgpt,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn api_auth_mode(&self) -> ApiAuthMode {
|
||||
match self {
|
||||
Self::ApiKey(_) => ApiAuthMode::ApiKey,
|
||||
Self::Chatgpt(_) => ApiAuthMode::Chatgpt,
|
||||
Self::ChatgptAuthTokens(_) => ApiAuthMode::ChatgptAuthTokens,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_chatgpt_auth(&self) -> bool {
|
||||
self.internal_auth_mode() == AuthMode::Chatgpt
|
||||
}
|
||||
|
||||
pub fn is_external_chatgpt_tokens(&self) -> bool {
|
||||
matches!(self, Self::ChatgptAuthTokens(_))
|
||||
}
|
||||
|
||||
/// Returns `None` is `is_internal_auth_mode() != AuthMode::ApiKey`.
|
||||
pub fn api_key(&self) -> Option<&str> {
|
||||
match self {
|
||||
Self::ApiKey(auth) => Some(auth.api_key.as_str()),
|
||||
Self::Chatgpt(_) | Self::ChatgptAuthTokens(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `Err` if `is_chatgpt_auth()` is false.
|
||||
pub fn get_token_data(&self) -> Result<TokenData, std::io::Error> {
|
||||
let auth_dot_json: Option<AuthDotJson> = self.get_current_auth_json();
|
||||
match auth_dot_json {
|
||||
@@ -107,20 +230,23 @@ impl CodexAuth {
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the token string used for bearer authentication.
|
||||
pub fn get_token(&self) -> Result<String, std::io::Error> {
|
||||
match self.mode {
|
||||
AuthMode::ApiKey => Ok(self.api_key.clone().unwrap_or_default()),
|
||||
AuthMode::ChatGPT => {
|
||||
let id_token = self.get_token_data()?.access_token;
|
||||
Ok(id_token)
|
||||
match self {
|
||||
Self::ApiKey(auth) => Ok(auth.api_key.clone()),
|
||||
Self::Chatgpt(_) | Self::ChatgptAuthTokens(_) => {
|
||||
let access_token = self.get_token_data()?.access_token;
|
||||
Ok(access_token)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `None` if `is_chatgpt_auth()` is false.
|
||||
pub fn get_account_id(&self) -> Option<String> {
|
||||
self.get_current_token_data().and_then(|t| t.account_id)
|
||||
}
|
||||
|
||||
/// Returns `None` if `is_chatgpt_auth()` is false.
|
||||
pub fn get_account_email(&self) -> Option<String> {
|
||||
self.get_current_token_data().and_then(|t| t.id_token.email)
|
||||
}
|
||||
@@ -132,6 +258,7 @@ impl CodexAuth {
|
||||
pub fn account_plan_type(&self) -> Option<AccountPlanType> {
|
||||
let map_known = |kp: &InternalKnownPlan| match kp {
|
||||
InternalKnownPlan::Free => AccountPlanType::Free,
|
||||
InternalKnownPlan::Go => AccountPlanType::Go,
|
||||
InternalKnownPlan::Plus => AccountPlanType::Plus,
|
||||
InternalKnownPlan::Pro => AccountPlanType::Pro,
|
||||
InternalKnownPlan::Team => AccountPlanType::Team,
|
||||
@@ -148,11 +275,18 @@ impl CodexAuth {
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns `None` if `is_chatgpt_auth()` is false.
|
||||
fn get_current_auth_json(&self) -> Option<AuthDotJson> {
|
||||
let state = match self {
|
||||
Self::Chatgpt(auth) => &auth.state,
|
||||
Self::ChatgptAuthTokens(auth) => &auth.state,
|
||||
Self::ApiKey(_) => return None,
|
||||
};
|
||||
#[expect(clippy::unwrap_used)]
|
||||
self.auth_dot_json.lock().unwrap().clone()
|
||||
state.auth_dot_json.lock().unwrap().clone()
|
||||
}
|
||||
|
||||
/// Returns `None` if `is_chatgpt_auth()` is false.
|
||||
fn get_current_token_data(&self) -> Option<TokenData> {
|
||||
self.get_current_auth_json().and_then(|t| t.tokens)
|
||||
}
|
||||
@@ -160,6 +294,7 @@ impl CodexAuth {
|
||||
/// Consider this private to integration tests.
|
||||
pub fn create_dummy_chatgpt_auth_for_testing() -> Self {
|
||||
let auth_dot_json = AuthDotJson {
|
||||
auth_mode: Some(ApiAuthMode::Chatgpt),
|
||||
openai_api_key: None,
|
||||
tokens: Some(TokenData {
|
||||
id_token: Default::default(),
|
||||
@@ -170,24 +305,19 @@ impl CodexAuth {
|
||||
last_refresh: Some(Utc::now()),
|
||||
};
|
||||
|
||||
let auth_dot_json = Arc::new(Mutex::new(Some(auth_dot_json)));
|
||||
Self {
|
||||
api_key: None,
|
||||
mode: AuthMode::ChatGPT,
|
||||
storage: create_auth_storage(PathBuf::new(), AuthCredentialsStoreMode::File),
|
||||
auth_dot_json,
|
||||
client: crate::default_client::create_client(),
|
||||
}
|
||||
let client = crate::default_client::create_client();
|
||||
let state = ChatgptAuthState {
|
||||
auth_dot_json: Arc::new(Mutex::new(Some(auth_dot_json))),
|
||||
client,
|
||||
};
|
||||
let storage = create_auth_storage(PathBuf::new(), AuthCredentialsStoreMode::File);
|
||||
Self::Chatgpt(ChatgptAuth { state, storage })
|
||||
}
|
||||
|
||||
fn from_api_key_with_client(api_key: &str, client: CodexHttpClient) -> Self {
|
||||
Self {
|
||||
api_key: Some(api_key.to_owned()),
|
||||
mode: AuthMode::ApiKey,
|
||||
storage: create_auth_storage(PathBuf::new(), AuthCredentialsStoreMode::File),
|
||||
auth_dot_json: Arc::new(Mutex::new(None)),
|
||||
client,
|
||||
}
|
||||
fn from_api_key_with_client(api_key: &str, _client: CodexHttpClient) -> Self {
|
||||
Self::ApiKey(ApiKeyAuth {
|
||||
api_key: api_key.to_owned(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn from_api_key(api_key: &str) -> Self {
|
||||
@@ -195,6 +325,25 @@ impl CodexAuth {
|
||||
}
|
||||
}
|
||||
|
||||
impl ChatgptAuth {
|
||||
fn current_auth_json(&self) -> Option<AuthDotJson> {
|
||||
#[expect(clippy::unwrap_used)]
|
||||
self.state.auth_dot_json.lock().unwrap().clone()
|
||||
}
|
||||
|
||||
fn current_token_data(&self) -> Option<TokenData> {
|
||||
self.current_auth_json().and_then(|auth| auth.tokens)
|
||||
}
|
||||
|
||||
fn storage(&self) -> &Arc<dyn AuthStorageBackend> {
|
||||
&self.storage
|
||||
}
|
||||
|
||||
fn client(&self) -> &CodexHttpClient {
|
||||
&self.state.client
|
||||
}
|
||||
}
|
||||
|
||||
pub const OPENAI_API_KEY_ENV_VAR: &str = "OPENAI_API_KEY";
|
||||
pub const CODEX_API_KEY_ENV_VAR: &str = "CODEX_API_KEY";
|
||||
|
||||
@@ -229,6 +378,7 @@ pub fn login_with_api_key(
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> std::io::Result<()> {
|
||||
let auth_dot_json = AuthDotJson {
|
||||
auth_mode: Some(ApiAuthMode::ApiKey),
|
||||
openai_api_key: Some(api_key.to_string()),
|
||||
tokens: None,
|
||||
last_refresh: None,
|
||||
@@ -236,6 +386,20 @@ pub fn login_with_api_key(
|
||||
save_auth(codex_home, &auth_dot_json, auth_credentials_store_mode)
|
||||
}
|
||||
|
||||
/// Writes an in-memory auth payload for externally managed ChatGPT tokens.
|
||||
pub fn login_with_chatgpt_auth_tokens(
|
||||
codex_home: &Path,
|
||||
id_token: &str,
|
||||
access_token: &str,
|
||||
) -> std::io::Result<()> {
|
||||
let auth_dot_json = AuthDotJson::from_external_token_strings(id_token, access_token)?;
|
||||
save_auth(
|
||||
codex_home,
|
||||
&auth_dot_json,
|
||||
AuthCredentialsStoreMode::Ephemeral,
|
||||
)
|
||||
}
|
||||
|
||||
/// Persist the provided auth payload using the specified backend.
|
||||
pub fn save_auth(
|
||||
codex_home: &Path,
|
||||
@@ -270,10 +434,10 @@ pub fn enforce_login_restrictions(config: &Config) -> std::io::Result<()> {
|
||||
};
|
||||
|
||||
if let Some(required_method) = config.forced_login_method {
|
||||
let method_violation = match (required_method, auth.mode) {
|
||||
let method_violation = match (required_method, auth.internal_auth_mode()) {
|
||||
(ForcedLoginMethod::Api, AuthMode::ApiKey) => None,
|
||||
(ForcedLoginMethod::Chatgpt, AuthMode::ChatGPT) => None,
|
||||
(ForcedLoginMethod::Api, AuthMode::ChatGPT) => Some(
|
||||
(ForcedLoginMethod::Chatgpt, AuthMode::Chatgpt) => None,
|
||||
(ForcedLoginMethod::Api, AuthMode::Chatgpt) => Some(
|
||||
"API key login is required, but ChatGPT is currently being used. Logging out."
|
||||
.to_string(),
|
||||
),
|
||||
@@ -293,7 +457,7 @@ pub fn enforce_login_restrictions(config: &Config) -> std::io::Result<()> {
|
||||
}
|
||||
|
||||
if let Some(expected_account_id) = config.forced_chatgpt_workspace_id.as_deref() {
|
||||
if auth.mode != AuthMode::ChatGPT {
|
||||
if !auth.is_chatgpt_auth() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
@@ -337,12 +501,26 @@ fn logout_with_message(
|
||||
message: String,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> std::io::Result<()> {
|
||||
match logout(codex_home, auth_credentials_store_mode) {
|
||||
Ok(_) => Err(std::io::Error::other(message)),
|
||||
Err(err) => Err(std::io::Error::other(format!(
|
||||
"{message}. Failed to remove auth.json: {err}"
|
||||
))),
|
||||
// External auth tokens live in the ephemeral store, but persistent auth may still exist
|
||||
// from earlier logins. Clear both so a forced logout truly removes all active auth.
|
||||
let removal_result = logout_all_stores(codex_home, auth_credentials_store_mode);
|
||||
let error_message = match removal_result {
|
||||
Ok(_) => message,
|
||||
Err(err) => format!("{message}. Failed to remove auth.json: {err}"),
|
||||
};
|
||||
Err(std::io::Error::other(error_message))
|
||||
}
|
||||
|
||||
fn logout_all_stores(
|
||||
codex_home: &Path,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> std::io::Result<bool> {
|
||||
if auth_credentials_store_mode == AuthCredentialsStoreMode::Ephemeral {
|
||||
return logout(codex_home, AuthCredentialsStoreMode::Ephemeral);
|
||||
}
|
||||
let removed_ephemeral = logout(codex_home, AuthCredentialsStoreMode::Ephemeral)?;
|
||||
let removed_managed = logout(codex_home, auth_credentials_store_mode)?;
|
||||
Ok(removed_ephemeral || removed_managed)
|
||||
}
|
||||
|
||||
fn load_auth(
|
||||
@@ -350,6 +528,12 @@ fn load_auth(
|
||||
enable_codex_api_key_env: bool,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> std::io::Result<Option<CodexAuth>> {
|
||||
let build_auth = |auth_dot_json: AuthDotJson, storage_mode| {
|
||||
let client = crate::default_client::create_client();
|
||||
CodexAuth::from_auth_dot_json(codex_home, auth_dot_json, storage_mode, client)
|
||||
};
|
||||
|
||||
// API key via env var takes precedence over any other auth method.
|
||||
if enable_codex_api_key_env && let Some(api_key) = read_codex_api_key_from_env() {
|
||||
let client = crate::default_client::create_client();
|
||||
return Ok(Some(CodexAuth::from_api_key_with_client(
|
||||
@@ -358,39 +542,34 @@ fn load_auth(
|
||||
)));
|
||||
}
|
||||
|
||||
let storage = create_auth_storage(codex_home.to_path_buf(), auth_credentials_store_mode);
|
||||
// External ChatGPT auth tokens live in the in-memory (ephemeral) store. Always check this
|
||||
// first so external auth takes precedence over any persisted credentials.
|
||||
let ephemeral_storage = create_auth_storage(
|
||||
codex_home.to_path_buf(),
|
||||
AuthCredentialsStoreMode::Ephemeral,
|
||||
);
|
||||
if let Some(auth_dot_json) = ephemeral_storage.load()? {
|
||||
let auth = build_auth(auth_dot_json, AuthCredentialsStoreMode::Ephemeral)?;
|
||||
return Ok(Some(auth));
|
||||
}
|
||||
|
||||
let client = crate::default_client::create_client();
|
||||
// If the caller explicitly requested ephemeral auth, there is no persisted fallback.
|
||||
if auth_credentials_store_mode == AuthCredentialsStoreMode::Ephemeral {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Fall back to the configured persistent store (file/keyring/auto) for managed auth.
|
||||
let storage = create_auth_storage(codex_home.to_path_buf(), auth_credentials_store_mode);
|
||||
let auth_dot_json = match storage.load()? {
|
||||
Some(auth) => auth,
|
||||
None => return Ok(None),
|
||||
};
|
||||
|
||||
let AuthDotJson {
|
||||
openai_api_key: auth_json_api_key,
|
||||
tokens,
|
||||
last_refresh,
|
||||
} = auth_dot_json;
|
||||
|
||||
// Prefer AuthMode.ApiKey if it's set in the auth.json.
|
||||
if let Some(api_key) = &auth_json_api_key {
|
||||
return Ok(Some(CodexAuth::from_api_key_with_client(api_key, client)));
|
||||
}
|
||||
|
||||
Ok(Some(CodexAuth {
|
||||
api_key: None,
|
||||
mode: AuthMode::ChatGPT,
|
||||
storage: storage.clone(),
|
||||
auth_dot_json: Arc::new(Mutex::new(Some(AuthDotJson {
|
||||
openai_api_key: None,
|
||||
tokens,
|
||||
last_refresh,
|
||||
}))),
|
||||
client,
|
||||
}))
|
||||
let auth = build_auth(auth_dot_json, auth_credentials_store_mode)?;
|
||||
Ok(Some(auth))
|
||||
}
|
||||
|
||||
async fn update_tokens(
|
||||
fn update_tokens(
|
||||
storage: &Arc<dyn AuthStorageBackend>,
|
||||
id_token: Option<String>,
|
||||
access_token: Option<String>,
|
||||
@@ -537,17 +716,82 @@ fn refresh_token_endpoint() -> String {
|
||||
.unwrap_or_else(|_| REFRESH_TOKEN_URL.to_string())
|
||||
}
|
||||
|
||||
use std::sync::RwLock;
|
||||
impl AuthDotJson {
|
||||
fn from_external_tokens(external: &ExternalAuthTokens, id_token: IdTokenInfo) -> Self {
|
||||
let account_id = id_token.chatgpt_account_id.clone();
|
||||
let tokens = TokenData {
|
||||
id_token,
|
||||
access_token: external.access_token.clone(),
|
||||
refresh_token: String::new(),
|
||||
account_id,
|
||||
};
|
||||
|
||||
Self {
|
||||
auth_mode: Some(ApiAuthMode::ChatgptAuthTokens),
|
||||
openai_api_key: None,
|
||||
tokens: Some(tokens),
|
||||
last_refresh: Some(Utc::now()),
|
||||
}
|
||||
}
|
||||
|
||||
fn from_external_token_strings(id_token: &str, access_token: &str) -> std::io::Result<Self> {
|
||||
let id_token_info = parse_id_token(id_token).map_err(std::io::Error::other)?;
|
||||
let external = ExternalAuthTokens {
|
||||
access_token: access_token.to_string(),
|
||||
id_token: id_token.to_string(),
|
||||
};
|
||||
Ok(Self::from_external_tokens(&external, id_token_info))
|
||||
}
|
||||
|
||||
fn resolved_mode(&self) -> ApiAuthMode {
|
||||
if let Some(mode) = self.auth_mode {
|
||||
return mode;
|
||||
}
|
||||
if self.openai_api_key.is_some() {
|
||||
return ApiAuthMode::ApiKey;
|
||||
}
|
||||
ApiAuthMode::Chatgpt
|
||||
}
|
||||
|
||||
fn storage_mode(
|
||||
&self,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> AuthCredentialsStoreMode {
|
||||
if self.resolved_mode() == ApiAuthMode::ChatgptAuthTokens {
|
||||
AuthCredentialsStoreMode::Ephemeral
|
||||
} else {
|
||||
auth_credentials_store_mode
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Internal cached auth state.
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone)]
|
||||
struct CachedAuth {
|
||||
auth: Option<CodexAuth>,
|
||||
/// Callback used to refresh external auth by asking the parent app for new tokens.
|
||||
external_refresher: Option<Arc<dyn ExternalAuthRefresher>>,
|
||||
}
|
||||
|
||||
impl Debug for CachedAuth {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("CachedAuth")
|
||||
.field(
|
||||
"auth_mode",
|
||||
&self.auth.as_ref().map(CodexAuth::api_auth_mode),
|
||||
)
|
||||
.field(
|
||||
"external_refresher",
|
||||
&self.external_refresher.as_ref().map(|_| "present"),
|
||||
)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
enum UnauthorizedRecoveryStep {
|
||||
Reload,
|
||||
RefreshToken,
|
||||
ExternalRefresh,
|
||||
Done,
|
||||
}
|
||||
|
||||
@@ -556,30 +800,53 @@ enum ReloadOutcome {
|
||||
Skipped,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
enum UnauthorizedRecoveryMode {
|
||||
Managed,
|
||||
External,
|
||||
}
|
||||
|
||||
// UnauthorizedRecovery is a state machine that handles an attempt to refresh the authentication when requests
|
||||
// to API fail with 401 status code.
|
||||
// The client calls next() every time it encounters a 401 error, one time per retry.
|
||||
// For API key based authentication, we don't do anything and let the error bubble to the user.
|
||||
//
|
||||
// For ChatGPT based authentication, we:
|
||||
// 1. Attempt to reload the auth data from disk. We only reload if the account id matches the one the current process is running as.
|
||||
// 2. Attempt to refresh the token using OAuth token refresh flow.
|
||||
// If after both steps the server still responds with 401 we let the error bubble to the user.
|
||||
//
|
||||
// For external ChatGPT auth tokens (chatgptAuthTokens), UnauthorizedRecovery does not touch disk or refresh
|
||||
// tokens locally. Instead it calls the ExternalAuthRefresher (account/chatgptAuthTokens/refresh) to ask the
|
||||
// parent app for new tokens, stores them in the ephemeral auth store, and retries once.
|
||||
pub struct UnauthorizedRecovery {
|
||||
manager: Arc<AuthManager>,
|
||||
step: UnauthorizedRecoveryStep,
|
||||
expected_account_id: Option<String>,
|
||||
mode: UnauthorizedRecoveryMode,
|
||||
}
|
||||
|
||||
impl UnauthorizedRecovery {
|
||||
fn new(manager: Arc<AuthManager>) -> Self {
|
||||
let expected_account_id = manager
|
||||
.auth_cached()
|
||||
let cached_auth = manager.auth_cached();
|
||||
let expected_account_id = cached_auth.as_ref().and_then(CodexAuth::get_account_id);
|
||||
let mode = if cached_auth
|
||||
.as_ref()
|
||||
.and_then(CodexAuth::get_account_id);
|
||||
.is_some_and(CodexAuth::is_external_chatgpt_tokens)
|
||||
{
|
||||
UnauthorizedRecoveryMode::External
|
||||
} else {
|
||||
UnauthorizedRecoveryMode::Managed
|
||||
};
|
||||
let step = match mode {
|
||||
UnauthorizedRecoveryMode::Managed => UnauthorizedRecoveryStep::Reload,
|
||||
UnauthorizedRecoveryMode::External => UnauthorizedRecoveryStep::ExternalRefresh,
|
||||
};
|
||||
Self {
|
||||
manager,
|
||||
step: UnauthorizedRecoveryStep::Reload,
|
||||
step,
|
||||
expected_account_id,
|
||||
mode,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -587,7 +854,14 @@ impl UnauthorizedRecovery {
|
||||
if !self
|
||||
.manager
|
||||
.auth_cached()
|
||||
.is_some_and(|auth| auth.mode == AuthMode::ChatGPT)
|
||||
.as_ref()
|
||||
.is_some_and(CodexAuth::is_chatgpt_auth)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if self.mode == UnauthorizedRecoveryMode::External
|
||||
&& !self.manager.has_external_auth_refresher()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
@@ -622,6 +896,12 @@ impl UnauthorizedRecovery {
|
||||
self.manager.refresh_token().await?;
|
||||
self.step = UnauthorizedRecoveryStep::Done;
|
||||
}
|
||||
UnauthorizedRecoveryStep::ExternalRefresh => {
|
||||
self.manager
|
||||
.refresh_external_auth(ExternalAuthRefreshReason::Unauthorized)
|
||||
.await?;
|
||||
self.step = UnauthorizedRecoveryStep::Done;
|
||||
}
|
||||
UnauthorizedRecoveryStep::Done => {}
|
||||
}
|
||||
Ok(())
|
||||
@@ -642,6 +922,7 @@ pub struct AuthManager {
|
||||
inner: RwLock<CachedAuth>,
|
||||
enable_codex_api_key_env: bool,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
forced_chatgpt_workspace_id: RwLock<Option<String>>,
|
||||
}
|
||||
|
||||
impl AuthManager {
|
||||
@@ -654,7 +935,7 @@ impl AuthManager {
|
||||
enable_codex_api_key_env: bool,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> Self {
|
||||
let auth = load_auth(
|
||||
let managed_auth = load_auth(
|
||||
&codex_home,
|
||||
enable_codex_api_key_env,
|
||||
auth_credentials_store_mode,
|
||||
@@ -663,34 +944,46 @@ impl AuthManager {
|
||||
.flatten();
|
||||
Self {
|
||||
codex_home,
|
||||
inner: RwLock::new(CachedAuth { auth }),
|
||||
inner: RwLock::new(CachedAuth {
|
||||
auth: managed_auth,
|
||||
external_refresher: None,
|
||||
}),
|
||||
enable_codex_api_key_env,
|
||||
auth_credentials_store_mode,
|
||||
forced_chatgpt_workspace_id: RwLock::new(None),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
/// Create an AuthManager with a specific CodexAuth, for testing only.
|
||||
pub fn from_auth_for_testing(auth: CodexAuth) -> Arc<Self> {
|
||||
let cached = CachedAuth { auth: Some(auth) };
|
||||
let cached = CachedAuth {
|
||||
auth: Some(auth),
|
||||
external_refresher: None,
|
||||
};
|
||||
|
||||
Arc::new(Self {
|
||||
codex_home: PathBuf::from("non-existent"),
|
||||
inner: RwLock::new(cached),
|
||||
enable_codex_api_key_env: false,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode::File,
|
||||
forced_chatgpt_workspace_id: RwLock::new(None),
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
/// Create an AuthManager with a specific CodexAuth and codex home, for testing only.
|
||||
pub fn from_auth_for_testing_with_home(auth: CodexAuth, codex_home: PathBuf) -> Arc<Self> {
|
||||
let cached = CachedAuth { auth: Some(auth) };
|
||||
let cached = CachedAuth {
|
||||
auth: Some(auth),
|
||||
external_refresher: None,
|
||||
};
|
||||
Arc::new(Self {
|
||||
codex_home,
|
||||
inner: RwLock::new(cached),
|
||||
enable_codex_api_key_env: false,
|
||||
auth_credentials_store_mode: AuthCredentialsStoreMode::File,
|
||||
forced_chatgpt_workspace_id: RwLock::new(None),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -715,7 +1008,7 @@ impl AuthManager {
|
||||
pub fn reload(&self) -> bool {
|
||||
tracing::info!("Reloading auth");
|
||||
let new_auth = self.load_auth_from_storage();
|
||||
self.set_auth(new_auth)
|
||||
self.set_cached_auth(new_auth)
|
||||
}
|
||||
|
||||
fn reload_if_account_id_matches(&self, expected_account_id: Option<&str>) -> ReloadOutcome {
|
||||
@@ -739,11 +1032,11 @@ impl AuthManager {
|
||||
}
|
||||
|
||||
tracing::info!("Reloading auth for account {expected_account_id}");
|
||||
self.set_auth(new_auth);
|
||||
self.set_cached_auth(new_auth);
|
||||
ReloadOutcome::Reloaded
|
||||
}
|
||||
|
||||
fn auths_equal(a: &Option<CodexAuth>, b: &Option<CodexAuth>) -> bool {
|
||||
fn auths_equal(a: Option<&CodexAuth>, b: Option<&CodexAuth>) -> bool {
|
||||
match (a, b) {
|
||||
(None, None) => true,
|
||||
(Some(a), Some(b)) => a == b,
|
||||
@@ -761,9 +1054,10 @@ impl AuthManager {
|
||||
.flatten()
|
||||
}
|
||||
|
||||
fn set_auth(&self, new_auth: Option<CodexAuth>) -> bool {
|
||||
fn set_cached_auth(&self, new_auth: Option<CodexAuth>) -> bool {
|
||||
if let Ok(mut guard) = self.inner.write() {
|
||||
let changed = !AuthManager::auths_equal(&guard.auth, &new_auth);
|
||||
let previous = guard.auth.as_ref();
|
||||
let changed = !AuthManager::auths_equal(previous, new_auth.as_ref());
|
||||
tracing::info!("Reloaded auth, changed: {changed}");
|
||||
guard.auth = new_auth;
|
||||
changed
|
||||
@@ -772,6 +1066,39 @@ impl AuthManager {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_external_auth_refresher(&self, refresher: Arc<dyn ExternalAuthRefresher>) {
|
||||
if let Ok(mut guard) = self.inner.write() {
|
||||
guard.external_refresher = Some(refresher);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_forced_chatgpt_workspace_id(&self, workspace_id: Option<String>) {
|
||||
if let Ok(mut guard) = self.forced_chatgpt_workspace_id.write() {
|
||||
*guard = workspace_id;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn forced_chatgpt_workspace_id(&self) -> Option<String> {
|
||||
self.forced_chatgpt_workspace_id
|
||||
.read()
|
||||
.ok()
|
||||
.and_then(|guard| guard.clone())
|
||||
}
|
||||
|
||||
pub fn has_external_auth_refresher(&self) -> bool {
|
||||
self.inner
|
||||
.read()
|
||||
.ok()
|
||||
.map(|guard| guard.external_refresher.is_some())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
pub fn is_external_auth_active(&self) -> bool {
|
||||
self.auth_cached()
|
||||
.as_ref()
|
||||
.is_some_and(CodexAuth::is_external_chatgpt_tokens)
|
||||
}
|
||||
|
||||
/// Convenience constructor returning an `Arc` wrapper.
|
||||
pub fn shared(
|
||||
codex_home: PathBuf,
|
||||
@@ -799,13 +1126,25 @@ impl AuthManager {
|
||||
Some(auth) => auth,
|
||||
None => return Ok(()),
|
||||
};
|
||||
let token_data = auth.get_current_token_data().ok_or_else(|| {
|
||||
RefreshTokenError::Transient(std::io::Error::other("Token data is not available."))
|
||||
})?;
|
||||
self.refresh_tokens(&auth, token_data.refresh_token).await?;
|
||||
// Reload to pick up persisted changes.
|
||||
self.reload();
|
||||
Ok(())
|
||||
match auth {
|
||||
CodexAuth::ChatgptAuthTokens(_) => {
|
||||
self.refresh_external_auth(ExternalAuthRefreshReason::Unauthorized)
|
||||
.await
|
||||
}
|
||||
CodexAuth::Chatgpt(chatgpt_auth) => {
|
||||
let token_data = chatgpt_auth.current_token_data().ok_or_else(|| {
|
||||
RefreshTokenError::Transient(std::io::Error::other(
|
||||
"Token data is not available.",
|
||||
))
|
||||
})?;
|
||||
self.refresh_tokens(&chatgpt_auth, token_data.refresh_token)
|
||||
.await?;
|
||||
// Reload to pick up persisted changes.
|
||||
self.reload();
|
||||
Ok(())
|
||||
}
|
||||
CodexAuth::ApiKey(_) => Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Log out by deleting the on‑disk auth.json (if present). Returns Ok(true)
|
||||
@@ -813,22 +1152,29 @@ impl AuthManager {
|
||||
/// reloads the in‑memory auth cache so callers immediately observe the
|
||||
/// unauthenticated state.
|
||||
pub fn logout(&self) -> std::io::Result<bool> {
|
||||
let removed = super::auth::logout(&self.codex_home, self.auth_credentials_store_mode)?;
|
||||
let removed = logout_all_stores(&self.codex_home, self.auth_credentials_store_mode)?;
|
||||
// Always reload to clear any cached auth (even if file absent).
|
||||
self.reload();
|
||||
Ok(removed)
|
||||
}
|
||||
|
||||
pub fn get_auth_mode(&self) -> Option<AuthMode> {
|
||||
self.auth_cached().map(|a| a.mode)
|
||||
pub fn get_auth_mode(&self) -> Option<ApiAuthMode> {
|
||||
self.auth_cached().as_ref().map(CodexAuth::api_auth_mode)
|
||||
}
|
||||
|
||||
pub fn get_internal_auth_mode(&self) -> Option<AuthMode> {
|
||||
self.auth_cached()
|
||||
.as_ref()
|
||||
.map(CodexAuth::internal_auth_mode)
|
||||
}
|
||||
|
||||
async fn refresh_if_stale(&self, auth: &CodexAuth) -> Result<bool, RefreshTokenError> {
|
||||
if auth.mode != AuthMode::ChatGPT {
|
||||
return Ok(false);
|
||||
}
|
||||
let chatgpt_auth = match auth {
|
||||
CodexAuth::Chatgpt(chatgpt_auth) => chatgpt_auth,
|
||||
_ => return Ok(false),
|
||||
};
|
||||
|
||||
let auth_dot_json = match auth.get_current_auth_json() {
|
||||
let auth_dot_json = match chatgpt_auth.current_auth_json() {
|
||||
Some(auth_dot_json) => auth_dot_json,
|
||||
None => return Ok(false),
|
||||
};
|
||||
@@ -843,25 +1189,78 @@ impl AuthManager {
|
||||
if last_refresh >= Utc::now() - chrono::Duration::days(TOKEN_REFRESH_INTERVAL) {
|
||||
return Ok(false);
|
||||
}
|
||||
self.refresh_tokens(auth, tokens.refresh_token).await?;
|
||||
self.refresh_tokens(chatgpt_auth, tokens.refresh_token)
|
||||
.await?;
|
||||
self.reload();
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
async fn refresh_external_auth(
|
||||
&self,
|
||||
reason: ExternalAuthRefreshReason,
|
||||
) -> Result<(), RefreshTokenError> {
|
||||
let forced_chatgpt_workspace_id = self.forced_chatgpt_workspace_id();
|
||||
let refresher = match self.inner.read() {
|
||||
Ok(guard) => guard.external_refresher.clone(),
|
||||
Err(_) => {
|
||||
return Err(RefreshTokenError::Transient(std::io::Error::other(
|
||||
"failed to read external auth state",
|
||||
)));
|
||||
}
|
||||
};
|
||||
|
||||
let Some(refresher) = refresher else {
|
||||
return Err(RefreshTokenError::Transient(std::io::Error::other(
|
||||
"external auth refresher is not configured",
|
||||
)));
|
||||
};
|
||||
|
||||
let previous_account_id = self
|
||||
.auth_cached()
|
||||
.as_ref()
|
||||
.and_then(CodexAuth::get_account_id);
|
||||
let context = ExternalAuthRefreshContext {
|
||||
reason,
|
||||
previous_account_id,
|
||||
};
|
||||
|
||||
let refreshed = refresher.refresh(context).await?;
|
||||
let id_token = parse_id_token(&refreshed.id_token)
|
||||
.map_err(|err| RefreshTokenError::Transient(std::io::Error::other(err)))?;
|
||||
if let Some(expected_workspace_id) = forced_chatgpt_workspace_id.as_deref() {
|
||||
let actual_workspace_id = id_token.chatgpt_account_id.as_deref();
|
||||
if actual_workspace_id != Some(expected_workspace_id) {
|
||||
return Err(RefreshTokenError::Transient(std::io::Error::other(
|
||||
format!(
|
||||
"external auth refresh returned workspace {actual_workspace_id:?}, expected {expected_workspace_id:?}",
|
||||
),
|
||||
)));
|
||||
}
|
||||
}
|
||||
let auth_dot_json = AuthDotJson::from_external_tokens(&refreshed, id_token);
|
||||
save_auth(
|
||||
&self.codex_home,
|
||||
&auth_dot_json,
|
||||
AuthCredentialsStoreMode::Ephemeral,
|
||||
)
|
||||
.map_err(RefreshTokenError::Transient)?;
|
||||
self.reload();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn refresh_tokens(
|
||||
&self,
|
||||
auth: &CodexAuth,
|
||||
auth: &ChatgptAuth,
|
||||
refresh_token: String,
|
||||
) -> Result<(), RefreshTokenError> {
|
||||
let refresh_response = try_refresh_token(refresh_token, &auth.client).await?;
|
||||
let refresh_response = try_refresh_token(refresh_token, auth.client()).await?;
|
||||
|
||||
update_tokens(
|
||||
&auth.storage,
|
||||
auth.storage(),
|
||||
refresh_response.id_token,
|
||||
refresh_response.access_token,
|
||||
refresh_response.refresh_token,
|
||||
)
|
||||
.await
|
||||
.map_err(RefreshTokenError::from)?;
|
||||
|
||||
Ok(())
|
||||
@@ -910,7 +1309,6 @@ mod tests {
|
||||
Some("new-access-token".to_string()),
|
||||
Some("new-refresh-token".to_string()),
|
||||
)
|
||||
.await
|
||||
.expect("update_tokens should succeed");
|
||||
|
||||
let tokens = updated.tokens.expect("tokens should exist");
|
||||
@@ -971,26 +1369,22 @@ mod tests {
|
||||
)
|
||||
.expect("failed to write auth file");
|
||||
|
||||
let CodexAuth {
|
||||
api_key,
|
||||
mode,
|
||||
auth_dot_json,
|
||||
storage: _,
|
||||
..
|
||||
} = super::load_auth(codex_home.path(), false, AuthCredentialsStoreMode::File)
|
||||
let auth = super::load_auth(codex_home.path(), false, AuthCredentialsStoreMode::File)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert_eq!(None, api_key);
|
||||
assert_eq!(AuthMode::ChatGPT, mode);
|
||||
assert_eq!(None, auth.api_key());
|
||||
assert_eq!(AuthMode::Chatgpt, auth.internal_auth_mode());
|
||||
|
||||
let guard = auth_dot_json.lock().unwrap();
|
||||
let auth_dot_json = guard.as_ref().expect("AuthDotJson should exist");
|
||||
let auth_dot_json = auth
|
||||
.get_current_auth_json()
|
||||
.expect("AuthDotJson should exist");
|
||||
let last_refresh = auth_dot_json
|
||||
.last_refresh
|
||||
.expect("last_refresh should be recorded");
|
||||
|
||||
assert_eq!(
|
||||
&AuthDotJson {
|
||||
AuthDotJson {
|
||||
auth_mode: None,
|
||||
openai_api_key: None,
|
||||
tokens: Some(TokenData {
|
||||
id_token: IdTokenInfo {
|
||||
@@ -1024,8 +1418,8 @@ mod tests {
|
||||
let auth = super::load_auth(dir.path(), false, AuthCredentialsStoreMode::File)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert_eq!(auth.mode, AuthMode::ApiKey);
|
||||
assert_eq!(auth.api_key, Some("sk-test-key".to_string()));
|
||||
assert_eq!(auth.internal_auth_mode(), AuthMode::ApiKey);
|
||||
assert_eq!(auth.api_key(), Some("sk-test-key"));
|
||||
|
||||
assert!(auth.get_token_data().is_err());
|
||||
}
|
||||
@@ -1034,6 +1428,7 @@ mod tests {
|
||||
fn logout_removes_auth_file() -> Result<(), std::io::Error> {
|
||||
let dir = tempdir()?;
|
||||
let auth_dot_json = AuthDotJson {
|
||||
auth_mode: Some(ApiAuthMode::ApiKey),
|
||||
openai_api_key: Some("sk-test-key".to_string()),
|
||||
tokens: None,
|
||||
last_refresh: None,
|
||||
|
||||
@@ -5,6 +5,7 @@ use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use sha2::Digest;
|
||||
use sha2::Sha256;
|
||||
use std::collections::HashMap;
|
||||
use std::fmt::Debug;
|
||||
use std::fs::File;
|
||||
use std::fs::OpenOptions;
|
||||
@@ -15,11 +16,14 @@ use std::os::unix::fs::OpenOptionsExt;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
use tracing::warn;
|
||||
|
||||
use crate::token_data::TokenData;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_keyring_store::DefaultKeyringStore;
|
||||
use codex_keyring_store::KeyringStore;
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
/// Determine where Codex should store CLI auth credentials.
|
||||
#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
|
||||
@@ -32,11 +36,16 @@ pub enum AuthCredentialsStoreMode {
|
||||
Keyring,
|
||||
/// Use keyring when available; otherwise, fall back to a file in CODEX_HOME.
|
||||
Auto,
|
||||
/// Store credentials in memory only for the current process.
|
||||
Ephemeral,
|
||||
}
|
||||
|
||||
/// Expected structure for $CODEX_HOME/auth.json.
|
||||
#[derive(Deserialize, Serialize, Clone, Debug, PartialEq)]
|
||||
pub struct AuthDotJson {
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub auth_mode: Option<AuthMode>,
|
||||
|
||||
#[serde(rename = "OPENAI_API_KEY")]
|
||||
pub openai_api_key: Option<String>,
|
||||
|
||||
@@ -76,8 +85,8 @@ impl FileAuthStorage {
|
||||
Self { codex_home }
|
||||
}
|
||||
|
||||
/// Attempt to read and refresh the `auth.json` file in the given `CODEX_HOME` directory.
|
||||
/// Returns the full AuthDotJson structure after refreshing if necessary.
|
||||
/// Attempt to read and parse the `auth.json` file in the given `CODEX_HOME` directory.
|
||||
/// Returns the full AuthDotJson structure.
|
||||
pub(super) fn try_read_auth_json(&self, auth_file: &Path) -> std::io::Result<AuthDotJson> {
|
||||
let mut file = File::open(auth_file)?;
|
||||
let mut contents = String::new();
|
||||
@@ -256,6 +265,49 @@ impl AuthStorageBackend for AutoAuthStorage {
|
||||
}
|
||||
}
|
||||
|
||||
// A global in-memory store for mapping codex_home -> AuthDotJson.
|
||||
static EPHEMERAL_AUTH_STORE: Lazy<Mutex<HashMap<String, AuthDotJson>>> =
|
||||
Lazy::new(|| Mutex::new(HashMap::new()));
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct EphemeralAuthStorage {
|
||||
codex_home: PathBuf,
|
||||
}
|
||||
|
||||
impl EphemeralAuthStorage {
|
||||
fn new(codex_home: PathBuf) -> Self {
|
||||
Self { codex_home }
|
||||
}
|
||||
|
||||
fn with_store<F, T>(&self, action: F) -> std::io::Result<T>
|
||||
where
|
||||
F: FnOnce(&mut HashMap<String, AuthDotJson>, String) -> std::io::Result<T>,
|
||||
{
|
||||
let key = compute_store_key(&self.codex_home)?;
|
||||
let mut store = EPHEMERAL_AUTH_STORE
|
||||
.lock()
|
||||
.map_err(|_| std::io::Error::other("failed to lock ephemeral auth storage"))?;
|
||||
action(&mut store, key)
|
||||
}
|
||||
}
|
||||
|
||||
impl AuthStorageBackend for EphemeralAuthStorage {
|
||||
fn load(&self) -> std::io::Result<Option<AuthDotJson>> {
|
||||
self.with_store(|store, key| Ok(store.get(&key).cloned()))
|
||||
}
|
||||
|
||||
fn save(&self, auth: &AuthDotJson) -> std::io::Result<()> {
|
||||
self.with_store(|store, key| {
|
||||
store.insert(key, auth.clone());
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
fn delete(&self) -> std::io::Result<bool> {
|
||||
self.with_store(|store, key| Ok(store.remove(&key).is_some()))
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn create_auth_storage(
|
||||
codex_home: PathBuf,
|
||||
mode: AuthCredentialsStoreMode,
|
||||
@@ -275,6 +327,7 @@ fn create_auth_storage_with_keyring_store(
|
||||
Arc::new(KeyringAuthStorage::new(codex_home, keyring_store))
|
||||
}
|
||||
AuthCredentialsStoreMode::Auto => Arc::new(AutoAuthStorage::new(codex_home, keyring_store)),
|
||||
AuthCredentialsStoreMode::Ephemeral => Arc::new(EphemeralAuthStorage::new(codex_home)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -296,6 +349,7 @@ mod tests {
|
||||
let codex_home = tempdir()?;
|
||||
let storage = FileAuthStorage::new(codex_home.path().to_path_buf());
|
||||
let auth_dot_json = AuthDotJson {
|
||||
auth_mode: Some(AuthMode::ApiKey),
|
||||
openai_api_key: Some("test-key".to_string()),
|
||||
tokens: None,
|
||||
last_refresh: Some(Utc::now()),
|
||||
@@ -315,6 +369,7 @@ mod tests {
|
||||
let codex_home = tempdir()?;
|
||||
let storage = FileAuthStorage::new(codex_home.path().to_path_buf());
|
||||
let auth_dot_json = AuthDotJson {
|
||||
auth_mode: Some(AuthMode::ApiKey),
|
||||
openai_api_key: Some("test-key".to_string()),
|
||||
tokens: None,
|
||||
last_refresh: Some(Utc::now()),
|
||||
@@ -336,6 +391,7 @@ mod tests {
|
||||
fn file_storage_delete_removes_auth_file() -> anyhow::Result<()> {
|
||||
let dir = tempdir()?;
|
||||
let auth_dot_json = AuthDotJson {
|
||||
auth_mode: Some(AuthMode::ApiKey),
|
||||
openai_api_key: Some("sk-test-key".to_string()),
|
||||
tokens: None,
|
||||
last_refresh: None,
|
||||
@@ -350,6 +406,32 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ephemeral_storage_save_load_delete_is_in_memory_only() -> anyhow::Result<()> {
|
||||
let dir = tempdir()?;
|
||||
let storage = create_auth_storage(
|
||||
dir.path().to_path_buf(),
|
||||
AuthCredentialsStoreMode::Ephemeral,
|
||||
);
|
||||
let auth_dot_json = AuthDotJson {
|
||||
auth_mode: Some(AuthMode::ApiKey),
|
||||
openai_api_key: Some("sk-ephemeral".to_string()),
|
||||
tokens: None,
|
||||
last_refresh: Some(Utc::now()),
|
||||
};
|
||||
|
||||
storage.save(&auth_dot_json)?;
|
||||
let loaded = storage.load()?;
|
||||
assert_eq!(Some(auth_dot_json), loaded);
|
||||
|
||||
let removed = storage.delete()?;
|
||||
assert!(removed);
|
||||
let loaded = storage.load()?;
|
||||
assert_eq!(None, loaded);
|
||||
assert!(!get_auth_file(dir.path()).exists());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn seed_keyring_and_fallback_auth_file_for_delete<F>(
|
||||
mock_keyring: &MockKeyringStore,
|
||||
codex_home: &Path,
|
||||
@@ -425,6 +507,7 @@ mod tests {
|
||||
|
||||
fn auth_with_prefix(prefix: &str) -> AuthDotJson {
|
||||
AuthDotJson {
|
||||
auth_mode: Some(AuthMode::ApiKey),
|
||||
openai_api_key: Some(format!("{prefix}-api-key")),
|
||||
tokens: Some(TokenData {
|
||||
id_token: id_token_with_prefix(prefix),
|
||||
@@ -445,6 +528,7 @@ mod tests {
|
||||
Arc::new(mock_keyring.clone()),
|
||||
);
|
||||
let expected = AuthDotJson {
|
||||
auth_mode: Some(AuthMode::ApiKey),
|
||||
openai_api_key: Some("sk-test".to_string()),
|
||||
tokens: None,
|
||||
last_refresh: None,
|
||||
@@ -481,6 +565,7 @@ mod tests {
|
||||
let auth_file = get_auth_file(codex_home.path());
|
||||
std::fs::write(&auth_file, "stale")?;
|
||||
let auth = AuthDotJson {
|
||||
auth_mode: Some(AuthMode::Chatgpt),
|
||||
openai_api_key: None,
|
||||
tokens: Some(TokenData {
|
||||
id_token: Default::default(),
|
||||
|
||||
@@ -27,7 +27,6 @@ use codex_api::common::ResponsesWsRequest;
|
||||
use codex_api::create_text_param_for_request;
|
||||
use codex_api::error::ApiError;
|
||||
use codex_api::requests::responses::Compression;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_otel::OtelManager;
|
||||
|
||||
use codex_protocol::ThreadId;
|
||||
@@ -50,6 +49,7 @@ use tokio::sync::mpsc;
|
||||
use tracing::warn;
|
||||
|
||||
use crate::AuthManager;
|
||||
use crate::auth::CodexAuth;
|
||||
use crate::auth::RefreshTokenError;
|
||||
use crate::client_common::Prompt;
|
||||
use crate::client_common::ResponseEvent;
|
||||
@@ -65,6 +65,7 @@ use crate::model_provider_info::ModelProviderInfo;
|
||||
use crate::model_provider_info::WireApi;
|
||||
use crate::tools::spec::create_tools_json_for_chat_completions_api;
|
||||
use crate::tools::spec::create_tools_json_for_responses_api;
|
||||
use crate::transport_manager::TransportManager;
|
||||
|
||||
pub const WEB_SEARCH_ELIGIBLE_HEADER: &str = "x-oai-web-search-eligible";
|
||||
pub const X_CODEX_TURN_STATE_HEADER: &str = "x-codex-turn-state";
|
||||
@@ -80,6 +81,7 @@ struct ModelClientState {
|
||||
effort: Option<ReasoningEffortConfig>,
|
||||
summary: ReasoningSummaryConfig,
|
||||
session_source: SessionSource,
|
||||
transport_manager: TransportManager,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -91,6 +93,7 @@ pub struct ModelClientSession {
|
||||
state: Arc<ModelClientState>,
|
||||
connection: Option<ApiWebSocketConnection>,
|
||||
websocket_last_items: Vec<ResponseItem>,
|
||||
transport_manager: TransportManager,
|
||||
/// Turn state for sticky routing.
|
||||
///
|
||||
/// This is an `OnceLock` that stores the turn state value received from the server
|
||||
@@ -116,6 +119,7 @@ impl ModelClient {
|
||||
summary: ReasoningSummaryConfig,
|
||||
conversation_id: ThreadId,
|
||||
session_source: SessionSource,
|
||||
transport_manager: TransportManager,
|
||||
) -> Self {
|
||||
Self {
|
||||
state: Arc::new(ModelClientState {
|
||||
@@ -128,6 +132,7 @@ impl ModelClient {
|
||||
effort,
|
||||
summary,
|
||||
session_source,
|
||||
transport_manager,
|
||||
}),
|
||||
}
|
||||
}
|
||||
@@ -137,6 +142,7 @@ impl ModelClient {
|
||||
state: Arc::clone(&self.state),
|
||||
connection: None,
|
||||
websocket_last_items: Vec::new(),
|
||||
transport_manager: self.state.transport_manager.clone(),
|
||||
turn_state: Arc::new(OnceLock::new()),
|
||||
}
|
||||
}
|
||||
@@ -171,6 +177,10 @@ impl ModelClient {
|
||||
self.state.session_source.clone()
|
||||
}
|
||||
|
||||
pub(crate) fn transport_manager(&self) -> TransportManager {
|
||||
self.state.transport_manager.clone()
|
||||
}
|
||||
|
||||
/// Returns the currently configured model slug.
|
||||
pub fn get_model(&self) -> String {
|
||||
self.state.model_info.slug.clone()
|
||||
@@ -210,7 +220,7 @@ impl ModelClient {
|
||||
let api_provider = self
|
||||
.state
|
||||
.provider
|
||||
.to_api_provider(auth.as_ref().map(|a| a.mode))?;
|
||||
.to_api_provider(auth.as_ref().map(CodexAuth::internal_auth_mode))?;
|
||||
let api_auth = auth_provider_from_auth(auth.clone(), &self.state.provider)?;
|
||||
let transport = ReqwestTransport::new(build_reqwest_client());
|
||||
let request_telemetry = self.build_request_telemetry();
|
||||
@@ -250,9 +260,18 @@ impl ModelClientSession {
|
||||
/// For Chat providers, the underlying stream is optionally aggregated
|
||||
/// based on the `show_raw_agent_reasoning` flag in the config.
|
||||
pub async fn stream(&mut self, prompt: &Prompt) -> Result<ResponseStream> {
|
||||
match self.state.provider.wire_api {
|
||||
WireApi::Responses => self.stream_responses_api(prompt).await,
|
||||
WireApi::ResponsesWebsocket => self.stream_responses_websocket(prompt).await,
|
||||
let wire_api = self.state.provider.wire_api;
|
||||
match wire_api {
|
||||
WireApi::Responses => {
|
||||
let websocket_enabled = self.responses_websocket_enabled()
|
||||
&& !self.transport_manager.disable_websockets();
|
||||
|
||||
if websocket_enabled {
|
||||
self.stream_responses_websocket(prompt).await
|
||||
} else {
|
||||
self.stream_responses_api(prompt).await
|
||||
}
|
||||
}
|
||||
WireApi::Chat => {
|
||||
let api_stream = self.stream_chat_completions(prompt).await?;
|
||||
|
||||
@@ -271,6 +290,34 @@ impl ModelClientSession {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn try_switch_fallback_transport(&mut self) -> bool {
|
||||
let websocket_enabled = self.responses_websocket_enabled();
|
||||
let activated = self
|
||||
.transport_manager
|
||||
.activate_http_fallback(websocket_enabled);
|
||||
if activated {
|
||||
warn!("falling back to HTTP");
|
||||
self.state.otel_manager.counter(
|
||||
"codex.transport.fallback_to_http",
|
||||
1,
|
||||
&[("from_wire_api", "responses_websocket")],
|
||||
);
|
||||
|
||||
self.connection = None;
|
||||
self.websocket_last_items.clear();
|
||||
}
|
||||
activated
|
||||
}
|
||||
|
||||
fn responses_websocket_enabled(&self) -> bool {
|
||||
self.state.provider.supports_websockets
|
||||
&& self
|
||||
.state
|
||||
.config
|
||||
.features
|
||||
.enabled(Feature::ResponsesWebsockets)
|
||||
}
|
||||
|
||||
fn build_responses_request(&self, prompt: &Prompt) -> Result<ApiPrompt> {
|
||||
let instructions = prompt.base_instructions.text.clone();
|
||||
let tools_json: Vec<Value> = create_tools_json_for_responses_api(&prompt.tools)?;
|
||||
@@ -422,7 +469,7 @@ impl ModelClientSession {
|
||||
.config
|
||||
.features
|
||||
.enabled(Feature::EnableRequestCompression)
|
||||
&& auth.is_some_and(|auth| auth.mode == AuthMode::ChatGPT)
|
||||
&& auth.is_some_and(CodexAuth::is_chatgpt_auth)
|
||||
&& self.state.provider.is_openai()
|
||||
{
|
||||
Compression::Zstd
|
||||
@@ -460,7 +507,7 @@ impl ModelClientSession {
|
||||
let api_provider = self
|
||||
.state
|
||||
.provider
|
||||
.to_api_provider(auth.as_ref().map(|a| a.mode))?;
|
||||
.to_api_provider(auth.as_ref().map(CodexAuth::internal_auth_mode))?;
|
||||
let api_auth = auth_provider_from_auth(auth.clone(), &self.state.provider)?;
|
||||
let transport = ReqwestTransport::new(build_reqwest_client());
|
||||
let (request_telemetry, sse_telemetry) = self.build_streaming_telemetry();
|
||||
@@ -516,7 +563,7 @@ impl ModelClientSession {
|
||||
let api_provider = self
|
||||
.state
|
||||
.provider
|
||||
.to_api_provider(auth.as_ref().map(|a| a.mode))?;
|
||||
.to_api_provider(auth.as_ref().map(CodexAuth::internal_auth_mode))?;
|
||||
let api_auth = auth_provider_from_auth(auth.clone(), &self.state.provider)?;
|
||||
let transport = ReqwestTransport::new(build_reqwest_client());
|
||||
let (request_telemetry, sse_telemetry) = self.build_streaming_telemetry();
|
||||
@@ -562,7 +609,7 @@ impl ModelClientSession {
|
||||
let api_provider = self
|
||||
.state
|
||||
.provider
|
||||
.to_api_provider(auth.as_ref().map(|a| a.mode))?;
|
||||
.to_api_provider(auth.as_ref().map(CodexAuth::internal_auth_mode))?;
|
||||
let api_auth = auth_provider_from_auth(auth.clone(), &self.state.provider)?;
|
||||
let compression = self.responses_request_compression(auth.as_ref());
|
||||
|
||||
@@ -655,11 +702,13 @@ fn build_responses_headers(
|
||||
let mut headers = experimental_feature_headers(config);
|
||||
headers.insert(
|
||||
WEB_SEARCH_ELIGIBLE_HEADER,
|
||||
HeaderValue::from_static(if config.web_search_mode == WebSearchMode::Disabled {
|
||||
"false"
|
||||
} else {
|
||||
"true"
|
||||
}),
|
||||
HeaderValue::from_static(
|
||||
if matches!(config.web_search_mode, Some(WebSearchMode::Disabled)) {
|
||||
"false"
|
||||
} else {
|
||||
"true"
|
||||
},
|
||||
),
|
||||
);
|
||||
if let Some(turn_state) = turn_state
|
||||
&& let Some(state) = turn_state.get()
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -208,6 +208,10 @@ async fn forward_events(
|
||||
id: _,
|
||||
msg: EventMsg::SessionConfigured(_),
|
||||
} => {}
|
||||
Event {
|
||||
id: _,
|
||||
msg: EventMsg::ThreadNameUpdated(_),
|
||||
} => {}
|
||||
Event {
|
||||
id,
|
||||
msg: EventMsg::ExecApprovalRequest(event),
|
||||
|
||||
@@ -12,6 +12,8 @@ use codex_protocol::protocol::SessionSource;
|
||||
use std::path::PathBuf;
|
||||
use tokio::sync::watch;
|
||||
|
||||
use crate::state_db::StateDbHandle;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ThreadConfigSnapshot {
|
||||
pub model: String,
|
||||
@@ -64,6 +66,10 @@ impl CodexThread {
|
||||
self.rollout_path.clone()
|
||||
}
|
||||
|
||||
pub fn state_db(&self) -> Option<StateDbHandle> {
|
||||
self.codex.state_db()
|
||||
}
|
||||
|
||||
pub async fn config_snapshot(&self) -> ThreadConfigSnapshot {
|
||||
self.codex.thread_config_snapshot().await
|
||||
}
|
||||
|
||||
@@ -10,8 +10,6 @@ use crate::error::CodexErr;
|
||||
use crate::error::Result as CodexResult;
|
||||
use crate::features::Feature;
|
||||
use crate::protocol::CompactedItem;
|
||||
use crate::protocol::ContextCompactionEndedEvent;
|
||||
use crate::protocol::ContextCompactionStartedEvent;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::TurnContextItem;
|
||||
use crate::protocol::TurnStartedEvent;
|
||||
@@ -30,7 +28,6 @@ use codex_protocol::protocol::RolloutItem;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use futures::prelude::*;
|
||||
use tracing::error;
|
||||
use uuid::Uuid;
|
||||
|
||||
pub const SUMMARIZATION_PROMPT: &str = include_str!("../templates/compact/prompt.md");
|
||||
pub const SUMMARY_PREFIX: &str = include_str!("../templates/compact/summary_prefix.md");
|
||||
@@ -64,6 +61,7 @@ pub(crate) async fn run_compact_task(
|
||||
) {
|
||||
let start_event = EventMsg::TurnStarted(TurnStartedEvent {
|
||||
model_context_window: turn_context.client.get_model_context_window(),
|
||||
collaboration_mode_kind: turn_context.collaboration_mode_kind,
|
||||
});
|
||||
sess.send_event(&turn_context, start_event).await;
|
||||
run_compact_task_inner(sess.clone(), turn_context, input).await;
|
||||
@@ -74,9 +72,9 @@ async fn run_compact_task_inner(
|
||||
turn_context: Arc<TurnContext>,
|
||||
input: Vec<UserInput>,
|
||||
) {
|
||||
let compaction_item = compaction_turn_item();
|
||||
emit_compaction_started(&sess, &turn_context, &compaction_item).await;
|
||||
|
||||
let compaction_item = TurnItem::ContextCompaction(ContextCompactionItem::new());
|
||||
sess.emit_turn_item_started(&turn_context, &compaction_item)
|
||||
.await;
|
||||
let initial_input_for_turn: ResponseInputItem = ResponseInputItem::from(input);
|
||||
|
||||
let mut history = sess.clone_history().await;
|
||||
@@ -137,7 +135,6 @@ async fn run_compact_task_inner(
|
||||
break;
|
||||
}
|
||||
Err(CodexErr::Interrupted) => {
|
||||
emit_compaction_ended(&sess, &turn_context, compaction_item.clone()).await;
|
||||
return;
|
||||
}
|
||||
Err(e @ CodexErr::ContextWindowExceeded) => {
|
||||
@@ -154,7 +151,6 @@ async fn run_compact_task_inner(
|
||||
sess.set_total_tokens_full(turn_context.as_ref()).await;
|
||||
let event = EventMsg::Error(e.to_error_event(None));
|
||||
sess.send_event(&turn_context, event).await;
|
||||
emit_compaction_ended(&sess, &turn_context, compaction_item.clone()).await;
|
||||
return;
|
||||
}
|
||||
Err(e) => {
|
||||
@@ -172,7 +168,6 @@ async fn run_compact_task_inner(
|
||||
} else {
|
||||
let event = EventMsg::Error(e.to_error_event(None));
|
||||
sess.send_event(&turn_context, event).await;
|
||||
emit_compaction_ended(&sess, &turn_context, compaction_item.clone()).await;
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -202,46 +197,14 @@ async fn run_compact_task_inner(
|
||||
});
|
||||
sess.persist_rollout_items(&[rollout_item]).await;
|
||||
|
||||
emit_compaction_ended(&sess, &turn_context, compaction_item).await;
|
||||
|
||||
sess.emit_turn_item_completed(&turn_context, compaction_item)
|
||||
.await;
|
||||
let warning = EventMsg::Warning(WarningEvent {
|
||||
message: "Heads up: Long threads and multiple compactions can cause the model to be less accurate. Start a new thread when possible to keep threads small and targeted.".to_string(),
|
||||
});
|
||||
sess.send_event(&turn_context, warning).await;
|
||||
}
|
||||
|
||||
fn compaction_turn_item() -> TurnItem {
|
||||
TurnItem::ContextCompaction(ContextCompactionItem {
|
||||
id: Uuid::new_v4().to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) async fn emit_compaction_started(
|
||||
sess: &Session,
|
||||
turn_context: &TurnContext,
|
||||
item: &TurnItem,
|
||||
) {
|
||||
sess.send_event(
|
||||
turn_context,
|
||||
EventMsg::ContextCompactionStarted(ContextCompactionStartedEvent {}),
|
||||
)
|
||||
.await;
|
||||
sess.emit_turn_item_started(turn_context, item).await;
|
||||
}
|
||||
|
||||
pub(crate) async fn emit_compaction_ended(
|
||||
sess: &Session,
|
||||
turn_context: &TurnContext,
|
||||
item: TurnItem,
|
||||
) {
|
||||
sess.emit_turn_item_completed(turn_context, item).await;
|
||||
sess.send_event(
|
||||
turn_context,
|
||||
EventMsg::ContextCompactionEnded(ContextCompactionEndedEvent {}),
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
pub fn content_items_to_text(content: &[ContentItem]) -> Option<String> {
|
||||
let mut pieces = Vec::new();
|
||||
for item in content {
|
||||
|
||||
@@ -3,8 +3,6 @@ use std::sync::Arc;
|
||||
use crate::Prompt;
|
||||
use crate::codex::Session;
|
||||
use crate::codex::TurnContext;
|
||||
use crate::compact::emit_compaction_ended;
|
||||
use crate::compact::emit_compaction_started;
|
||||
use crate::error::Result as CodexResult;
|
||||
use crate::protocol::CompactedItem;
|
||||
use crate::protocol::EventMsg;
|
||||
@@ -13,7 +11,6 @@ use crate::protocol::TurnStartedEvent;
|
||||
use codex_protocol::items::ContextCompactionItem;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use uuid::Uuid;
|
||||
|
||||
pub(crate) async fn run_inline_remote_auto_compact_task(
|
||||
sess: Arc<Session>,
|
||||
@@ -25,6 +22,7 @@ pub(crate) async fn run_inline_remote_auto_compact_task(
|
||||
pub(crate) async fn run_remote_compact_task(sess: Arc<Session>, turn_context: Arc<TurnContext>) {
|
||||
let start_event = EventMsg::TurnStarted(TurnStartedEvent {
|
||||
model_context_window: turn_context.client.get_model_context_window(),
|
||||
collaboration_mode_kind: turn_context.collaboration_mode_kind,
|
||||
});
|
||||
sess.send_event(&turn_context, start_event).await;
|
||||
|
||||
@@ -32,25 +30,21 @@ pub(crate) async fn run_remote_compact_task(sess: Arc<Session>, turn_context: Ar
|
||||
}
|
||||
|
||||
async fn run_remote_compact_task_inner(sess: &Arc<Session>, turn_context: &Arc<TurnContext>) {
|
||||
let compaction_item = TurnItem::ContextCompaction(ContextCompactionItem {
|
||||
id: Uuid::new_v4().to_string(),
|
||||
});
|
||||
emit_compaction_started(sess, turn_context, &compaction_item).await;
|
||||
|
||||
if let Err(err) = run_remote_compact_task_inner_impl(sess, turn_context).await {
|
||||
let event = EventMsg::Error(
|
||||
err.to_error_event(Some("Error running remote compact task".to_string())),
|
||||
);
|
||||
sess.send_event(turn_context, event).await;
|
||||
}
|
||||
|
||||
emit_compaction_ended(sess, turn_context, compaction_item).await;
|
||||
}
|
||||
|
||||
async fn run_remote_compact_task_inner_impl(
|
||||
sess: &Arc<Session>,
|
||||
turn_context: &Arc<TurnContext>,
|
||||
) -> CodexResult<()> {
|
||||
let compaction_item = TurnItem::ContextCompaction(ContextCompactionItem::new());
|
||||
sess.emit_turn_item_started(turn_context, &compaction_item)
|
||||
.await;
|
||||
let history = sess.clone_history().await;
|
||||
|
||||
// Required to keep `/undo` available after compaction
|
||||
@@ -88,5 +82,7 @@ async fn run_remote_compact_task_inner_impl(
|
||||
sess.persist_rollout_items(&[RolloutItem::Compacted(compacted_item)])
|
||||
.await;
|
||||
|
||||
sess.emit_turn_item_completed(turn_context, compaction_item)
|
||||
.await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -18,6 +18,12 @@ pub enum ConstraintError {
|
||||
|
||||
#[error("field `{field_name}` cannot be empty")]
|
||||
EmptyField { field_name: String },
|
||||
|
||||
#[error("invalid rules in requirements (set by {requirement_source}): {reason}")]
|
||||
ExecPolicyParse {
|
||||
requirement_source: RequirementSource,
|
||||
reason: String,
|
||||
},
|
||||
}
|
||||
|
||||
impl ConstraintError {
|
||||
|
||||
@@ -7,6 +7,7 @@ use crate::config::types::McpServerConfig;
|
||||
use crate::config::types::McpServerDisabledReason;
|
||||
use crate::config::types::McpServerTransportConfig;
|
||||
use crate::config::types::Notice;
|
||||
use crate::config::types::NotificationMethod;
|
||||
use crate::config::types::Notifications;
|
||||
use crate::config::types::OtelConfig;
|
||||
use crate::config::types::OtelConfigToml;
|
||||
@@ -17,11 +18,13 @@ use crate::config::types::ShellEnvironmentPolicyToml;
|
||||
use crate::config::types::SkillsConfig;
|
||||
use crate::config::types::Tui;
|
||||
use crate::config::types::UriBasedFileOpener;
|
||||
use crate::config_loader::CloudRequirementsLoader;
|
||||
use crate::config_loader::ConfigLayerStack;
|
||||
use crate::config_loader::ConfigRequirements;
|
||||
use crate::config_loader::LoaderOverrides;
|
||||
use crate::config_loader::McpServerIdentity;
|
||||
use crate::config_loader::McpServerRequirement;
|
||||
use crate::config_loader::ResidencyRequirement;
|
||||
use crate::config_loader::Sourced;
|
||||
use crate::config_loader::load_config_layers_state;
|
||||
use crate::features::Feature;
|
||||
@@ -55,7 +58,6 @@ use codex_protocol::openai_models::ReasoningEffort;
|
||||
use codex_rmcp_client::OAuthCredentialsStoreMode;
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
use codex_utils_absolute_path::AbsolutePathBufGuard;
|
||||
use dirs::home_dir;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
@@ -139,6 +141,11 @@ pub struct Config {
|
||||
|
||||
pub sandbox_policy: Constrained<SandboxPolicy>,
|
||||
|
||||
/// enforce_residency means web traffic cannot be routed outside of a
|
||||
/// particular geography. HTTP clients should direct their requests
|
||||
/// using backend-specific headers or URLs to enforce this.
|
||||
pub enforce_residency: Constrained<Option<ResidencyRequirement>>,
|
||||
|
||||
/// True if the user passed in an override or set a value in config.toml
|
||||
/// for either of approval_policy or sandbox_mode.
|
||||
pub did_user_set_custom_approval_policy_or_sandbox_mode: bool,
|
||||
@@ -192,10 +199,13 @@ pub struct Config {
|
||||
/// If unset the feature is disabled.
|
||||
pub notify: Option<Vec<String>>,
|
||||
|
||||
/// TUI notifications preference. When set, the TUI will send OSC 9 notifications on approvals
|
||||
/// and turn completions when not focused.
|
||||
/// TUI notifications preference. When set, the TUI will send terminal notifications on
|
||||
/// approvals and turn completions when not focused.
|
||||
pub tui_notifications: Notifications,
|
||||
|
||||
/// Notification method for terminal notifications (osc9 or bel).
|
||||
pub tui_notification_method: NotificationMethod,
|
||||
|
||||
/// Enable ASCII animations and shimmer effects in the TUI.
|
||||
pub animations: bool,
|
||||
|
||||
@@ -306,8 +316,8 @@ pub struct Config {
|
||||
/// model info's default preference.
|
||||
pub include_apply_patch_tool: bool,
|
||||
|
||||
/// Explicit or feature-derived web search mode. Defaults to cached.
|
||||
pub web_search_mode: WebSearchMode,
|
||||
/// Explicit or feature-derived web search mode.
|
||||
pub web_search_mode: Option<WebSearchMode>,
|
||||
|
||||
/// If set to `true`, used only the experimental unified exec tool.
|
||||
pub use_experimental_unified_exec_tool: bool,
|
||||
@@ -362,6 +372,7 @@ pub struct ConfigBuilder {
|
||||
cli_overrides: Option<Vec<(String, TomlValue)>>,
|
||||
harness_overrides: Option<ConfigOverrides>,
|
||||
loader_overrides: Option<LoaderOverrides>,
|
||||
cloud_requirements: CloudRequirementsLoader,
|
||||
fallback_cwd: Option<PathBuf>,
|
||||
}
|
||||
|
||||
@@ -386,6 +397,11 @@ impl ConfigBuilder {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn cloud_requirements(mut self, cloud_requirements: CloudRequirementsLoader) -> Self {
|
||||
self.cloud_requirements = cloud_requirements;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn fallback_cwd(mut self, fallback_cwd: Option<PathBuf>) -> Self {
|
||||
self.fallback_cwd = fallback_cwd;
|
||||
self
|
||||
@@ -397,6 +413,7 @@ impl ConfigBuilder {
|
||||
cli_overrides,
|
||||
harness_overrides,
|
||||
loader_overrides,
|
||||
cloud_requirements,
|
||||
fallback_cwd,
|
||||
} = self;
|
||||
let codex_home = codex_home.map_or_else(find_codex_home, std::io::Result::Ok)?;
|
||||
@@ -409,9 +426,14 @@ impl ConfigBuilder {
|
||||
None => AbsolutePathBuf::current_dir()?,
|
||||
};
|
||||
harness_overrides.cwd = Some(cwd.to_path_buf());
|
||||
let config_layer_stack =
|
||||
load_config_layers_state(&codex_home, Some(cwd), &cli_overrides, loader_overrides)
|
||||
.await?;
|
||||
let config_layer_stack = load_config_layers_state(
|
||||
&codex_home,
|
||||
Some(cwd),
|
||||
&cli_overrides,
|
||||
loader_overrides,
|
||||
cloud_requirements,
|
||||
)
|
||||
.await?;
|
||||
let merged_toml = config_layer_stack.effective_config();
|
||||
|
||||
// Note that each layer in ConfigLayerStack should have resolved
|
||||
@@ -507,6 +529,7 @@ pub async fn load_config_as_toml_with_cli_overrides(
|
||||
Some(cwd.clone()),
|
||||
&cli_overrides,
|
||||
LoaderOverrides::default(),
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -605,9 +628,14 @@ pub async fn load_global_mcp_servers(
|
||||
// There is no cwd/project context for this query, so this will not include
|
||||
// MCP servers defined in in-repo .codex/ folders.
|
||||
let cwd: Option<AbsolutePathBuf> = None;
|
||||
let config_layer_stack =
|
||||
load_config_layers_state(codex_home, cwd, &cli_overrides, LoaderOverrides::default())
|
||||
.await?;
|
||||
let config_layer_stack = load_config_layers_state(
|
||||
codex_home,
|
||||
cwd,
|
||||
&cli_overrides,
|
||||
LoaderOverrides::default(),
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
let merged_toml = config_layer_stack.effective_config();
|
||||
let Some(servers_value) = merged_toml.get("mcp_servers") else {
|
||||
return Ok(BTreeMap::new());
|
||||
@@ -1203,27 +1231,40 @@ pub fn resolve_oss_provider(
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolve the web search mode from explicit config, feature flags, and sandbox policy.
|
||||
/// Live search is auto-enabled when sandbox policy is `DangerFullAccess`
|
||||
/// Resolve the web search mode from explicit config and feature flags.
|
||||
fn resolve_web_search_mode(
|
||||
config_toml: &ConfigToml,
|
||||
config_profile: &ConfigProfile,
|
||||
features: &Features,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
) -> WebSearchMode {
|
||||
) -> Option<WebSearchMode> {
|
||||
if let Some(mode) = config_profile.web_search.or(config_toml.web_search) {
|
||||
return mode;
|
||||
return Some(mode);
|
||||
}
|
||||
if features.enabled(Feature::WebSearchCached) {
|
||||
return WebSearchMode::Cached;
|
||||
return Some(WebSearchMode::Cached);
|
||||
}
|
||||
if features.enabled(Feature::WebSearchRequest) {
|
||||
return WebSearchMode::Live;
|
||||
return Some(WebSearchMode::Live);
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_web_search_mode_for_turn(
|
||||
explicit_mode: Option<WebSearchMode>,
|
||||
is_azure_responses_endpoint: bool,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
) -> WebSearchMode {
|
||||
if let Some(mode) = explicit_mode {
|
||||
return mode;
|
||||
}
|
||||
if is_azure_responses_endpoint {
|
||||
return WebSearchMode::Disabled;
|
||||
}
|
||||
if matches!(sandbox_policy, SandboxPolicy::DangerFullAccess) {
|
||||
return WebSearchMode::Live;
|
||||
WebSearchMode::Live
|
||||
} else {
|
||||
WebSearchMode::Cached
|
||||
}
|
||||
WebSearchMode::Cached
|
||||
}
|
||||
|
||||
impl Config {
|
||||
@@ -1347,8 +1388,7 @@ impl Config {
|
||||
AskForApproval::default()
|
||||
}
|
||||
});
|
||||
let web_search_mode =
|
||||
resolve_web_search_mode(&cfg, &config_profile, &features, &sandbox_policy);
|
||||
let web_search_mode = resolve_web_search_mode(&cfg, &config_profile, &features);
|
||||
// TODO(dylan): We should be able to leverage ConfigLayerStack so that
|
||||
// we can reliably check this at every config level.
|
||||
let did_user_set_custom_approval_policy_or_sandbox_mode = approval_policy_override
|
||||
@@ -1360,12 +1400,6 @@ impl Config {
|
||||
|| cfg.sandbox_mode.is_some();
|
||||
|
||||
let mut model_providers = built_in_model_providers();
|
||||
if features.enabled(Feature::ResponsesWebsockets)
|
||||
&& let Some(provider) = model_providers.get_mut("openai")
|
||||
&& provider.is_openai()
|
||||
{
|
||||
provider.wire_api = crate::model_provider_info::WireApi::ResponsesWebsocket;
|
||||
}
|
||||
// Merge user-defined providers into the built-in list.
|
||||
for (key, provider) in cfg.model_providers.into_iter() {
|
||||
model_providers.entry(key).or_insert(provider);
|
||||
@@ -1487,6 +1521,8 @@ impl Config {
|
||||
approval_policy: mut constrained_approval_policy,
|
||||
sandbox_policy: mut constrained_sandbox_policy,
|
||||
mcp_servers,
|
||||
exec_policy: _,
|
||||
enforce_residency,
|
||||
} = requirements;
|
||||
|
||||
constrained_approval_policy
|
||||
@@ -1509,6 +1545,7 @@ impl Config {
|
||||
cwd: resolved_cwd,
|
||||
approval_policy: constrained_approval_policy,
|
||||
sandbox_policy: constrained_sandbox_policy,
|
||||
enforce_residency,
|
||||
did_user_set_custom_approval_policy_or_sandbox_mode,
|
||||
forced_auto_mode_downgraded_on_windows,
|
||||
shell_environment_policy,
|
||||
@@ -1599,6 +1636,11 @@ impl Config {
|
||||
.as_ref()
|
||||
.map(|t| t.notifications.clone())
|
||||
.unwrap_or_default(),
|
||||
tui_notification_method: cfg
|
||||
.tui
|
||||
.as_ref()
|
||||
.map(|t| t.notification_method)
|
||||
.unwrap_or_default(),
|
||||
animations: cfg.tui.as_ref().map(|t| t.animations).unwrap_or(true),
|
||||
show_tooltips: cfg.tui.as_ref().map(|t| t.show_tooltips).unwrap_or(true),
|
||||
experimental_mode: cfg.tui.as_ref().and_then(|t| t.experimental_mode),
|
||||
@@ -1671,18 +1713,19 @@ impl Config {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_windows_sandbox_globally(&mut self, value: bool) {
|
||||
pub fn set_windows_sandbox_enabled(&mut self, value: bool) {
|
||||
if value {
|
||||
self.features.enable(Feature::WindowsSandbox);
|
||||
self.forced_auto_mode_downgraded_on_windows = false;
|
||||
} else {
|
||||
self.features.disable(Feature::WindowsSandbox);
|
||||
}
|
||||
self.forced_auto_mode_downgraded_on_windows = !value;
|
||||
}
|
||||
|
||||
pub fn set_windows_elevated_sandbox_globally(&mut self, value: bool) {
|
||||
pub fn set_windows_elevated_sandbox_enabled(&mut self, value: bool) {
|
||||
if value {
|
||||
self.features.enable(Feature::WindowsSandboxElevated);
|
||||
self.forced_auto_mode_downgraded_on_windows = false;
|
||||
} else {
|
||||
self.features.disable(Feature::WindowsSandboxElevated);
|
||||
}
|
||||
@@ -1717,27 +1760,12 @@ fn toml_uses_deprecated_instructions_file(value: &TomlValue) -> bool {
|
||||
/// specified by the `CODEX_HOME` environment variable. If not set, defaults to
|
||||
/// `~/.codex`.
|
||||
///
|
||||
/// - If `CODEX_HOME` is set, the value will be canonicalized and this
|
||||
/// function will Err if the path does not exist.
|
||||
/// - If `CODEX_HOME` is set, the value must exist and be a directory. The
|
||||
/// value will be canonicalized and this function will Err otherwise.
|
||||
/// - If `CODEX_HOME` is not set, this function does not verify that the
|
||||
/// directory exists.
|
||||
pub fn find_codex_home() -> std::io::Result<PathBuf> {
|
||||
// Honor the `CODEX_HOME` environment variable when it is set to allow users
|
||||
// (and tests) to override the default location.
|
||||
if let Ok(val) = std::env::var("CODEX_HOME")
|
||||
&& !val.is_empty()
|
||||
{
|
||||
return PathBuf::from(val).canonicalize();
|
||||
}
|
||||
|
||||
let mut p = home_dir().ok_or_else(|| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"Could not find home directory",
|
||||
)
|
||||
})?;
|
||||
p.push(".codex");
|
||||
Ok(p)
|
||||
codex_utils_home_dir::find_codex_home()
|
||||
}
|
||||
|
||||
/// Returns the path to the folder where Codex logs are stored. Does not verify
|
||||
@@ -1756,6 +1784,7 @@ mod tests {
|
||||
use crate::config::types::FeedbackConfigToml;
|
||||
use crate::config::types::HistoryPersistence;
|
||||
use crate::config::types::McpServerTransportConfig;
|
||||
use crate::config::types::NotificationMethod;
|
||||
use crate::config::types::Notifications;
|
||||
use crate::config_loader::RequirementSource;
|
||||
use crate::features::Feature;
|
||||
@@ -1852,6 +1881,7 @@ persistence = "none"
|
||||
tui,
|
||||
Tui {
|
||||
notifications: Notifications::Enabled(true),
|
||||
notification_method: NotificationMethod::Auto,
|
||||
animations: true,
|
||||
show_tooltips: true,
|
||||
experimental_mode: None,
|
||||
@@ -2271,15 +2301,12 @@ trust_level = "trusted"
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn web_search_mode_defaults_to_cached_if_unset() {
|
||||
fn web_search_mode_defaults_to_none_if_unset() {
|
||||
let cfg = ConfigToml::default();
|
||||
let profile = ConfigProfile::default();
|
||||
let features = Features::with_defaults();
|
||||
|
||||
assert_eq!(
|
||||
resolve_web_search_mode(&cfg, &profile, &features, &SandboxPolicy::ReadOnly),
|
||||
WebSearchMode::Cached
|
||||
);
|
||||
assert_eq!(resolve_web_search_mode(&cfg, &profile, &features), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -2293,8 +2320,8 @@ trust_level = "trusted"
|
||||
features.enable(Feature::WebSearchCached);
|
||||
|
||||
assert_eq!(
|
||||
resolve_web_search_mode(&cfg, &profile, &features, &SandboxPolicy::ReadOnly),
|
||||
WebSearchMode::Live
|
||||
resolve_web_search_mode(&cfg, &profile, &features),
|
||||
Some(WebSearchMode::Live)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -2309,48 +2336,41 @@ trust_level = "trusted"
|
||||
features.enable(Feature::WebSearchRequest);
|
||||
|
||||
assert_eq!(
|
||||
resolve_web_search_mode(&cfg, &profile, &features, &SandboxPolicy::ReadOnly),
|
||||
WebSearchMode::Disabled
|
||||
resolve_web_search_mode(&cfg, &profile, &features),
|
||||
Some(WebSearchMode::Disabled)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn danger_full_access_defaults_web_search_live_when_unset() -> std::io::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let cfg = ConfigToml {
|
||||
sandbox_mode: Some(SandboxMode::DangerFullAccess),
|
||||
..Default::default()
|
||||
};
|
||||
fn web_search_mode_for_turn_defaults_to_cached_when_unset() {
|
||||
let mode = resolve_web_search_mode_for_turn(None, false, &SandboxPolicy::ReadOnly);
|
||||
|
||||
let config = Config::load_from_base_config_with_overrides(
|
||||
cfg,
|
||||
ConfigOverrides::default(),
|
||||
codex_home.path().to_path_buf(),
|
||||
)?;
|
||||
|
||||
assert_eq!(config.web_search_mode, WebSearchMode::Live);
|
||||
|
||||
Ok(())
|
||||
assert_eq!(mode, WebSearchMode::Cached);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn explicit_web_search_mode_wins_in_danger_full_access() -> std::io::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let cfg = ConfigToml {
|
||||
sandbox_mode: Some(SandboxMode::DangerFullAccess),
|
||||
web_search: Some(WebSearchMode::Cached),
|
||||
..Default::default()
|
||||
};
|
||||
fn web_search_mode_for_turn_defaults_to_live_for_danger_full_access() {
|
||||
let mode = resolve_web_search_mode_for_turn(None, false, &SandboxPolicy::DangerFullAccess);
|
||||
|
||||
let config = Config::load_from_base_config_with_overrides(
|
||||
cfg,
|
||||
ConfigOverrides::default(),
|
||||
codex_home.path().to_path_buf(),
|
||||
)?;
|
||||
assert_eq!(mode, WebSearchMode::Live);
|
||||
}
|
||||
|
||||
assert_eq!(config.web_search_mode, WebSearchMode::Cached);
|
||||
#[test]
|
||||
fn web_search_mode_for_turn_prefers_explicit_value() {
|
||||
let mode = resolve_web_search_mode_for_turn(
|
||||
Some(WebSearchMode::Cached),
|
||||
false,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
assert_eq!(mode, WebSearchMode::Cached);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn web_search_mode_for_turn_disables_for_azure_responses_endpoint() {
|
||||
let mode = resolve_web_search_mode_for_turn(None, true, &SandboxPolicy::DangerFullAccess);
|
||||
|
||||
assert_eq!(mode, WebSearchMode::Disabled);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -2553,7 +2573,7 @@ profile = "project"
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn responses_websockets_feature_updates_openai_provider() -> std::io::Result<()> {
|
||||
fn responses_websockets_feature_does_not_change_wire_api() -> std::io::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mut entries = BTreeMap::new();
|
||||
entries.insert("responses_websockets".to_string(), true);
|
||||
@@ -2570,7 +2590,7 @@ profile = "project"
|
||||
|
||||
assert_eq!(
|
||||
config.model_provider.wire_api,
|
||||
crate::model_provider_info::WireApi::ResponsesWebsocket
|
||||
crate::model_provider_info::WireApi::Responses
|
||||
);
|
||||
|
||||
Ok(())
|
||||
@@ -2615,8 +2635,14 @@ profile = "project"
|
||||
};
|
||||
|
||||
let cwd = AbsolutePathBuf::try_from(codex_home.path())?;
|
||||
let config_layer_stack =
|
||||
load_config_layers_state(codex_home.path(), Some(cwd), &Vec::new(), overrides).await?;
|
||||
let config_layer_stack = load_config_layers_state(
|
||||
codex_home.path(),
|
||||
Some(cwd),
|
||||
&Vec::new(),
|
||||
overrides,
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
let cfg = deserialize_config_toml_with_base(
|
||||
config_layer_stack.effective_config(),
|
||||
codex_home.path(),
|
||||
@@ -2743,6 +2769,7 @@ profile = "project"
|
||||
Some(cwd),
|
||||
&[("model".to_string(), TomlValue::String("cli".to_string()))],
|
||||
overrides,
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -3690,6 +3717,7 @@ model_verbosity = "high"
|
||||
stream_max_retries: Some(10),
|
||||
stream_idle_timeout_ms: Some(300_000),
|
||||
requires_openai_auth: false,
|
||||
supports_websockets: false,
|
||||
};
|
||||
let model_provider_map = {
|
||||
let mut model_provider_map = built_in_model_providers();
|
||||
@@ -3751,6 +3779,7 @@ model_verbosity = "high"
|
||||
model_provider: fixture.openai_provider.clone(),
|
||||
approval_policy: Constrained::allow_any(AskForApproval::Never),
|
||||
sandbox_policy: Constrained::allow_any(SandboxPolicy::new_read_only_policy()),
|
||||
enforce_residency: Constrained::allow_any(None),
|
||||
did_user_set_custom_approval_policy_or_sandbox_mode: true,
|
||||
forced_auto_mode_downgraded_on_windows: false,
|
||||
shell_environment_policy: ShellEnvironmentPolicy::default(),
|
||||
@@ -3786,7 +3815,7 @@ model_verbosity = "high"
|
||||
forced_chatgpt_workspace_id: None,
|
||||
forced_login_method: None,
|
||||
include_apply_patch_tool: false,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: None,
|
||||
use_experimental_unified_exec_tool: false,
|
||||
ghost_snapshot: GhostSnapshotConfig::default(),
|
||||
features: Features::with_defaults(),
|
||||
@@ -3798,6 +3827,7 @@ model_verbosity = "high"
|
||||
check_for_update_on_startup: true,
|
||||
disable_paste_burst: false,
|
||||
tui_notifications: Default::default(),
|
||||
tui_notification_method: Default::default(),
|
||||
animations: true,
|
||||
show_tooltips: true,
|
||||
experimental_mode: None,
|
||||
@@ -3834,6 +3864,7 @@ model_verbosity = "high"
|
||||
model_provider: fixture.openai_chat_completions_provider.clone(),
|
||||
approval_policy: Constrained::allow_any(AskForApproval::UnlessTrusted),
|
||||
sandbox_policy: Constrained::allow_any(SandboxPolicy::new_read_only_policy()),
|
||||
enforce_residency: Constrained::allow_any(None),
|
||||
did_user_set_custom_approval_policy_or_sandbox_mode: true,
|
||||
forced_auto_mode_downgraded_on_windows: false,
|
||||
shell_environment_policy: ShellEnvironmentPolicy::default(),
|
||||
@@ -3869,7 +3900,7 @@ model_verbosity = "high"
|
||||
forced_chatgpt_workspace_id: None,
|
||||
forced_login_method: None,
|
||||
include_apply_patch_tool: false,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: None,
|
||||
use_experimental_unified_exec_tool: false,
|
||||
ghost_snapshot: GhostSnapshotConfig::default(),
|
||||
features: Features::with_defaults(),
|
||||
@@ -3881,6 +3912,7 @@ model_verbosity = "high"
|
||||
check_for_update_on_startup: true,
|
||||
disable_paste_burst: false,
|
||||
tui_notifications: Default::default(),
|
||||
tui_notification_method: Default::default(),
|
||||
animations: true,
|
||||
show_tooltips: true,
|
||||
experimental_mode: None,
|
||||
@@ -3932,6 +3964,7 @@ model_verbosity = "high"
|
||||
model_provider: fixture.openai_provider.clone(),
|
||||
approval_policy: Constrained::allow_any(AskForApproval::OnFailure),
|
||||
sandbox_policy: Constrained::allow_any(SandboxPolicy::new_read_only_policy()),
|
||||
enforce_residency: Constrained::allow_any(None),
|
||||
did_user_set_custom_approval_policy_or_sandbox_mode: true,
|
||||
forced_auto_mode_downgraded_on_windows: false,
|
||||
shell_environment_policy: ShellEnvironmentPolicy::default(),
|
||||
@@ -3967,7 +4000,7 @@ model_verbosity = "high"
|
||||
forced_chatgpt_workspace_id: None,
|
||||
forced_login_method: None,
|
||||
include_apply_patch_tool: false,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: None,
|
||||
use_experimental_unified_exec_tool: false,
|
||||
ghost_snapshot: GhostSnapshotConfig::default(),
|
||||
features: Features::with_defaults(),
|
||||
@@ -3979,6 +4012,7 @@ model_verbosity = "high"
|
||||
check_for_update_on_startup: true,
|
||||
disable_paste_burst: false,
|
||||
tui_notifications: Default::default(),
|
||||
tui_notification_method: Default::default(),
|
||||
animations: true,
|
||||
show_tooltips: true,
|
||||
experimental_mode: None,
|
||||
@@ -4016,6 +4050,7 @@ model_verbosity = "high"
|
||||
model_provider: fixture.openai_provider.clone(),
|
||||
approval_policy: Constrained::allow_any(AskForApproval::OnFailure),
|
||||
sandbox_policy: Constrained::allow_any(SandboxPolicy::new_read_only_policy()),
|
||||
enforce_residency: Constrained::allow_any(None),
|
||||
did_user_set_custom_approval_policy_or_sandbox_mode: true,
|
||||
forced_auto_mode_downgraded_on_windows: false,
|
||||
shell_environment_policy: ShellEnvironmentPolicy::default(),
|
||||
@@ -4051,7 +4086,7 @@ model_verbosity = "high"
|
||||
forced_chatgpt_workspace_id: None,
|
||||
forced_login_method: None,
|
||||
include_apply_patch_tool: false,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: None,
|
||||
use_experimental_unified_exec_tool: false,
|
||||
ghost_snapshot: GhostSnapshotConfig::default(),
|
||||
features: Features::with_defaults(),
|
||||
@@ -4063,6 +4098,7 @@ model_verbosity = "high"
|
||||
check_for_update_on_startup: true,
|
||||
disable_paste_burst: false,
|
||||
tui_notifications: Default::default(),
|
||||
tui_notification_method: Default::default(),
|
||||
animations: true,
|
||||
show_tooltips: true,
|
||||
experimental_mode: None,
|
||||
@@ -4420,13 +4456,17 @@ mcp_oauth_callback_port = 5678
|
||||
|
||||
#[cfg(test)]
|
||||
mod notifications_tests {
|
||||
use crate::config::types::NotificationMethod;
|
||||
use crate::config::types::Notifications;
|
||||
use assert_matches::assert_matches;
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Deserialize, Debug, PartialEq)]
|
||||
struct TuiTomlTest {
|
||||
#[serde(default)]
|
||||
notifications: Notifications,
|
||||
#[serde(default)]
|
||||
notification_method: NotificationMethod,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, PartialEq)]
|
||||
@@ -4457,4 +4497,15 @@ mod notifications_tests {
|
||||
Notifications::Custom(ref v) if v == &vec!["foo".to_string()]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tui_notification_method() {
|
||||
let toml = r#"
|
||||
[tui]
|
||||
notification_method = "bel"
|
||||
"#;
|
||||
let parsed: RootTomlTest =
|
||||
toml::from_str(toml).expect("deserialize notification_method=\"bel\"");
|
||||
assert_eq!(parsed.tui.notification_method, NotificationMethod::Bel);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ use super::CONFIG_TOML_FILE;
|
||||
use super::ConfigToml;
|
||||
use crate::config::edit::ConfigEdit;
|
||||
use crate::config::edit::ConfigEditsBuilder;
|
||||
use crate::config_loader::CloudRequirementsLoader;
|
||||
use crate::config_loader::ConfigLayerEntry;
|
||||
use crate::config_loader::ConfigLayerStack;
|
||||
use crate::config_loader::ConfigLayerStackOrdering;
|
||||
@@ -109,6 +110,7 @@ pub struct ConfigService {
|
||||
codex_home: PathBuf,
|
||||
cli_overrides: Vec<(String, TomlValue)>,
|
||||
loader_overrides: LoaderOverrides,
|
||||
cloud_requirements: CloudRequirementsLoader,
|
||||
}
|
||||
|
||||
impl ConfigService {
|
||||
@@ -116,11 +118,13 @@ impl ConfigService {
|
||||
codex_home: PathBuf,
|
||||
cli_overrides: Vec<(String, TomlValue)>,
|
||||
loader_overrides: LoaderOverrides,
|
||||
cloud_requirements: CloudRequirementsLoader,
|
||||
) -> Self {
|
||||
Self {
|
||||
codex_home,
|
||||
cli_overrides,
|
||||
loader_overrides,
|
||||
cloud_requirements,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -129,6 +133,7 @@ impl ConfigService {
|
||||
codex_home,
|
||||
cli_overrides: Vec::new(),
|
||||
loader_overrides: LoaderOverrides::default(),
|
||||
cloud_requirements: CloudRequirementsLoader::default(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -146,6 +151,7 @@ impl ConfigService {
|
||||
.cli_overrides(self.cli_overrides.clone())
|
||||
.loader_overrides(self.loader_overrides.clone())
|
||||
.fallback_cwd(Some(cwd.to_path_buf()))
|
||||
.cloud_requirements(self.cloud_requirements.clone())
|
||||
.build()
|
||||
.await
|
||||
.map_err(|err| {
|
||||
@@ -376,6 +382,7 @@ impl ConfigService {
|
||||
cwd,
|
||||
&self.cli_overrides,
|
||||
self.loader_overrides.clone(),
|
||||
self.cloud_requirements.clone(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
@@ -813,6 +820,7 @@ remote_compaction = true
|
||||
managed_preferences_base64: None,
|
||||
macos_managed_config_requirements_base64: None,
|
||||
},
|
||||
CloudRequirementsLoader::default(),
|
||||
);
|
||||
|
||||
let response = service
|
||||
@@ -895,6 +903,7 @@ remote_compaction = true
|
||||
managed_preferences_base64: None,
|
||||
macos_managed_config_requirements_base64: None,
|
||||
},
|
||||
CloudRequirementsLoader::default(),
|
||||
);
|
||||
|
||||
let result = service
|
||||
@@ -999,6 +1008,7 @@ remote_compaction = true
|
||||
managed_preferences_base64: None,
|
||||
macos_managed_config_requirements_base64: None,
|
||||
},
|
||||
CloudRequirementsLoader::default(),
|
||||
);
|
||||
|
||||
let error = service
|
||||
@@ -1047,6 +1057,7 @@ remote_compaction = true
|
||||
managed_preferences_base64: None,
|
||||
macos_managed_config_requirements_base64: None,
|
||||
},
|
||||
CloudRequirementsLoader::default(),
|
||||
);
|
||||
|
||||
let response = service
|
||||
@@ -1094,6 +1105,7 @@ remote_compaction = true
|
||||
managed_preferences_base64: None,
|
||||
macos_managed_config_requirements_base64: None,
|
||||
},
|
||||
CloudRequirementsLoader::default(),
|
||||
);
|
||||
|
||||
let result = service
|
||||
|
||||
@@ -428,6 +428,25 @@ impl Default for Notifications {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, Default)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum NotificationMethod {
|
||||
#[default]
|
||||
Auto,
|
||||
Osc9,
|
||||
Bel,
|
||||
}
|
||||
|
||||
impl fmt::Display for NotificationMethod {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
NotificationMethod::Auto => write!(f, "auto"),
|
||||
NotificationMethod::Osc9 => write!(f, "osc9"),
|
||||
NotificationMethod::Bel => write!(f, "bel"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Collection of settings that are specific to the TUI.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema)]
|
||||
#[schemars(deny_unknown_fields)]
|
||||
@@ -437,6 +456,11 @@ pub struct Tui {
|
||||
#[serde(default)]
|
||||
pub notifications: Notifications,
|
||||
|
||||
/// Notification method to use for unfocused terminal notifications.
|
||||
/// Defaults to `auto`.
|
||||
#[serde(default)]
|
||||
pub notification_method: NotificationMethod,
|
||||
|
||||
/// Enable animations (welcome screen, shimmer effects, spinners).
|
||||
/// Defaults to `true`.
|
||||
#[serde(default = "default_true")]
|
||||
@@ -472,7 +496,6 @@ const fn default_true() -> bool {
|
||||
/// (primarily the Codex IDE extension). NOTE: these are different from
|
||||
/// notifications - notices are warnings, NUX screens, acknowledgements, etc.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema)]
|
||||
#[schemars(deny_unknown_fields)]
|
||||
pub struct Notice {
|
||||
/// Tracks whether the user has acknowledged the full access warning prompt.
|
||||
pub hide_full_access_warning: Option<bool>,
|
||||
|
||||
@@ -10,7 +10,7 @@ This module is the canonical place to **load and describe Codex configuration la
|
||||
|
||||
Exported from `codex_core::config_loader`:
|
||||
|
||||
- `load_config_layers_state(codex_home, cwd_opt, cli_overrides, overrides) -> ConfigLayerStack`
|
||||
- `load_config_layers_state(codex_home, cwd_opt, cli_overrides, overrides, cloud_requirements) -> ConfigLayerStack`
|
||||
- `ConfigLayerStack`
|
||||
- `effective_config() -> toml::Value`
|
||||
- `origins() -> HashMap<String, ConfigLayerMetadata>`
|
||||
@@ -49,6 +49,7 @@ let layers = load_config_layers_state(
|
||||
Some(cwd),
|
||||
&cli_overrides,
|
||||
LoaderOverrides::default(),
|
||||
None,
|
||||
).await?;
|
||||
|
||||
let effective = layers.effective_config();
|
||||
|
||||
66
codex-rs/core/src/config_loader/cloud_requirements.rs
Normal file
66
codex-rs/core/src/config_loader/cloud_requirements.rs
Normal file
@@ -0,0 +1,66 @@
|
||||
use crate::config_loader::ConfigRequirementsToml;
|
||||
use futures::future::BoxFuture;
|
||||
use futures::future::FutureExt;
|
||||
use futures::future::Shared;
|
||||
use std::fmt;
|
||||
use std::future::Future;
|
||||
use std::io;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct CloudRequirementsLoader {
|
||||
fut: Shared<BoxFuture<'static, Arc<io::Result<Option<ConfigRequirementsToml>>>>>,
|
||||
}
|
||||
|
||||
impl CloudRequirementsLoader {
|
||||
pub fn new<F>(fut: F) -> Self
|
||||
where
|
||||
F: Future<Output = io::Result<Option<ConfigRequirementsToml>>> + Send + 'static,
|
||||
{
|
||||
Self {
|
||||
fut: fut.map(Arc::new).boxed().shared(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get(&self) -> io::Result<Option<ConfigRequirementsToml>> {
|
||||
match self.fut.clone().await.as_ref() {
|
||||
Ok(requirements) => Ok(requirements.clone()),
|
||||
Err(err) => Err(io::Error::new(err.kind(), err.to_string())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for CloudRequirementsLoader {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("CloudRequirementsLoader").finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for CloudRequirementsLoader {
|
||||
fn default() -> Self {
|
||||
Self::new(async { Ok(None) })
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicUsize;
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
#[tokio::test]
|
||||
async fn shared_future_runs_once() {
|
||||
let counter = Arc::new(AtomicUsize::new(0));
|
||||
let counter_clone = Arc::clone(&counter);
|
||||
let loader = CloudRequirementsLoader::new(async move {
|
||||
counter_clone.fetch_add(1, Ordering::SeqCst);
|
||||
Ok(Some(ConfigRequirementsToml::default()))
|
||||
});
|
||||
|
||||
let (first, second) = tokio::join!(loader.get(), loader.get());
|
||||
assert_eq!(first.as_ref().ok(), second.as_ref().ok());
|
||||
assert_eq!(counter.load(Ordering::SeqCst), 1);
|
||||
}
|
||||
}
|
||||
@@ -6,6 +6,8 @@ use serde::Deserialize;
|
||||
use std::collections::BTreeMap;
|
||||
use std::fmt;
|
||||
|
||||
use super::requirements_exec_policy::RequirementsExecPolicy;
|
||||
use super::requirements_exec_policy::RequirementsExecPolicyToml;
|
||||
use crate::config::Constrained;
|
||||
use crate::config::ConstraintError;
|
||||
|
||||
@@ -13,6 +15,7 @@ use crate::config::ConstraintError;
|
||||
pub enum RequirementSource {
|
||||
Unknown,
|
||||
MdmManagedPreferences { domain: String, key: String },
|
||||
CloudRequirements,
|
||||
SystemRequirementsToml { file: AbsolutePathBuf },
|
||||
LegacyManagedConfigTomlFromFile { file: AbsolutePathBuf },
|
||||
LegacyManagedConfigTomlFromMdm,
|
||||
@@ -25,6 +28,9 @@ impl fmt::Display for RequirementSource {
|
||||
RequirementSource::MdmManagedPreferences { domain, key } => {
|
||||
write!(f, "MDM {domain}:{key}")
|
||||
}
|
||||
RequirementSource::CloudRequirements => {
|
||||
write!(f, "cloud requirements")
|
||||
}
|
||||
RequirementSource::SystemRequirementsToml { file } => {
|
||||
write!(f, "{}", file.as_path().display())
|
||||
}
|
||||
@@ -45,6 +51,8 @@ pub struct ConfigRequirements {
|
||||
pub approval_policy: Constrained<AskForApproval>,
|
||||
pub sandbox_policy: Constrained<SandboxPolicy>,
|
||||
pub mcp_servers: Option<Sourced<BTreeMap<String, McpServerRequirement>>>,
|
||||
pub(crate) exec_policy: Option<Sourced<RequirementsExecPolicy>>,
|
||||
pub enforce_residency: Constrained<Option<ResidencyRequirement>>,
|
||||
}
|
||||
|
||||
impl Default for ConfigRequirements {
|
||||
@@ -53,6 +61,8 @@ impl Default for ConfigRequirements {
|
||||
approval_policy: Constrained::allow_any_from_default(),
|
||||
sandbox_policy: Constrained::allow_any(SandboxPolicy::ReadOnly),
|
||||
mcp_servers: None,
|
||||
exec_policy: None,
|
||||
enforce_residency: Constrained::allow_any(None),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -75,6 +85,8 @@ pub struct ConfigRequirementsToml {
|
||||
pub allowed_approval_policies: Option<Vec<AskForApproval>>,
|
||||
pub allowed_sandbox_modes: Option<Vec<SandboxModeRequirement>>,
|
||||
pub mcp_servers: Option<BTreeMap<String, McpServerRequirement>>,
|
||||
pub rules: Option<RequirementsExecPolicyToml>,
|
||||
pub enforce_residency: Option<ResidencyRequirement>,
|
||||
}
|
||||
|
||||
/// Value paired with the requirement source it came from, for better error
|
||||
@@ -104,6 +116,8 @@ pub struct ConfigRequirementsWithSources {
|
||||
pub allowed_approval_policies: Option<Sourced<Vec<AskForApproval>>>,
|
||||
pub allowed_sandbox_modes: Option<Sourced<Vec<SandboxModeRequirement>>>,
|
||||
pub mcp_servers: Option<Sourced<BTreeMap<String, McpServerRequirement>>>,
|
||||
pub rules: Option<Sourced<RequirementsExecPolicyToml>>,
|
||||
pub enforce_residency: Option<Sourced<ResidencyRequirement>>,
|
||||
}
|
||||
|
||||
impl ConfigRequirementsWithSources {
|
||||
@@ -135,6 +149,8 @@ impl ConfigRequirementsWithSources {
|
||||
allowed_approval_policies,
|
||||
allowed_sandbox_modes,
|
||||
mcp_servers,
|
||||
rules,
|
||||
enforce_residency,
|
||||
}
|
||||
);
|
||||
}
|
||||
@@ -144,11 +160,15 @@ impl ConfigRequirementsWithSources {
|
||||
allowed_approval_policies,
|
||||
allowed_sandbox_modes,
|
||||
mcp_servers,
|
||||
rules,
|
||||
enforce_residency,
|
||||
} = self;
|
||||
ConfigRequirementsToml {
|
||||
allowed_approval_policies: allowed_approval_policies.map(|sourced| sourced.value),
|
||||
allowed_sandbox_modes: allowed_sandbox_modes.map(|sourced| sourced.value),
|
||||
mcp_servers: mcp_servers.map(|sourced| sourced.value),
|
||||
rules: rules.map(|sourced| sourced.value),
|
||||
enforce_residency: enforce_residency.map(|sourced| sourced.value),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -180,11 +200,19 @@ impl From<SandboxMode> for SandboxModeRequirement {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, Copy, PartialEq, Eq)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum ResidencyRequirement {
|
||||
Us,
|
||||
}
|
||||
|
||||
impl ConfigRequirementsToml {
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.allowed_approval_policies.is_none()
|
||||
&& self.allowed_sandbox_modes.is_none()
|
||||
&& self.mcp_servers.is_none()
|
||||
&& self.rules.is_none()
|
||||
&& self.enforce_residency.is_none()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -196,6 +224,8 @@ impl TryFrom<ConfigRequirementsWithSources> for ConfigRequirements {
|
||||
allowed_approval_policies,
|
||||
allowed_sandbox_modes,
|
||||
mcp_servers,
|
||||
rules,
|
||||
enforce_residency,
|
||||
} = toml;
|
||||
|
||||
let approval_policy: Constrained<AskForApproval> = match allowed_approval_policies {
|
||||
@@ -270,10 +300,46 @@ impl TryFrom<ConfigRequirementsWithSources> for ConfigRequirements {
|
||||
}
|
||||
None => Constrained::allow_any(default_sandbox_policy),
|
||||
};
|
||||
let exec_policy = match rules {
|
||||
Some(Sourced { value, source }) => {
|
||||
let policy = value.to_requirements_policy().map_err(|err| {
|
||||
ConstraintError::ExecPolicyParse {
|
||||
requirement_source: source.clone(),
|
||||
reason: err.to_string(),
|
||||
}
|
||||
})?;
|
||||
Some(Sourced::new(policy, source))
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
let enforce_residency: Constrained<Option<ResidencyRequirement>> = match enforce_residency {
|
||||
Some(Sourced {
|
||||
value: residency,
|
||||
source: requirement_source,
|
||||
}) => {
|
||||
let required = Some(residency);
|
||||
Constrained::new(required, move |candidate| {
|
||||
if candidate == &required {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(ConstraintError::InvalidValue {
|
||||
field_name: "enforce_residency",
|
||||
candidate: format!("{candidate:?}"),
|
||||
allowed: format!("{required:?}"),
|
||||
requirement_source: requirement_source.clone(),
|
||||
})
|
||||
}
|
||||
})?
|
||||
}
|
||||
None => Constrained::allow_any(None),
|
||||
};
|
||||
Ok(ConfigRequirements {
|
||||
approval_policy,
|
||||
sandbox_policy,
|
||||
mcp_servers,
|
||||
exec_policy,
|
||||
enforce_residency,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -282,16 +348,25 @@ impl TryFrom<ConfigRequirementsWithSources> for ConfigRequirements {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use anyhow::Result;
|
||||
use codex_execpolicy::Decision;
|
||||
use codex_execpolicy::Evaluation;
|
||||
use codex_execpolicy::RuleMatch;
|
||||
use codex_protocol::protocol::NetworkAccess;
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
use pretty_assertions::assert_eq;
|
||||
use toml::from_str;
|
||||
|
||||
fn tokens(cmd: &[&str]) -> Vec<String> {
|
||||
cmd.iter().map(std::string::ToString::to_string).collect()
|
||||
}
|
||||
|
||||
fn with_unknown_source(toml: ConfigRequirementsToml) -> ConfigRequirementsWithSources {
|
||||
let ConfigRequirementsToml {
|
||||
allowed_approval_policies,
|
||||
allowed_sandbox_modes,
|
||||
mcp_servers,
|
||||
rules,
|
||||
enforce_residency,
|
||||
} = toml;
|
||||
ConfigRequirementsWithSources {
|
||||
allowed_approval_policies: allowed_approval_policies
|
||||
@@ -299,6 +374,9 @@ mod tests {
|
||||
allowed_sandbox_modes: allowed_sandbox_modes
|
||||
.map(|value| Sourced::new(value, RequirementSource::Unknown)),
|
||||
mcp_servers: mcp_servers.map(|value| Sourced::new(value, RequirementSource::Unknown)),
|
||||
rules: rules.map(|value| Sourced::new(value, RequirementSource::Unknown)),
|
||||
enforce_residency: enforce_residency
|
||||
.map(|value| Sourced::new(value, RequirementSource::Unknown)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -312,6 +390,8 @@ mod tests {
|
||||
SandboxModeRequirement::WorkspaceWrite,
|
||||
SandboxModeRequirement::DangerFullAccess,
|
||||
];
|
||||
let enforce_residency = ResidencyRequirement::Us;
|
||||
let enforce_source = source.clone();
|
||||
|
||||
// Intentionally constructed without `..Default::default()` so adding a new field to
|
||||
// `ConfigRequirementsToml` forces this test to be updated.
|
||||
@@ -319,6 +399,8 @@ mod tests {
|
||||
allowed_approval_policies: Some(allowed_approval_policies.clone()),
|
||||
allowed_sandbox_modes: Some(allowed_sandbox_modes.clone()),
|
||||
mcp_servers: None,
|
||||
rules: None,
|
||||
enforce_residency: Some(enforce_residency),
|
||||
};
|
||||
|
||||
target.merge_unset_fields(source.clone(), other);
|
||||
@@ -332,6 +414,8 @@ mod tests {
|
||||
)),
|
||||
allowed_sandbox_modes: Some(Sourced::new(allowed_sandbox_modes, source)),
|
||||
mcp_servers: None,
|
||||
rules: None,
|
||||
enforce_residency: Some(Sourced::new(enforce_residency, enforce_source)),
|
||||
}
|
||||
);
|
||||
}
|
||||
@@ -360,6 +444,8 @@ mod tests {
|
||||
)),
|
||||
allowed_sandbox_modes: None,
|
||||
mcp_servers: None,
|
||||
rules: None,
|
||||
enforce_residency: None,
|
||||
}
|
||||
);
|
||||
Ok(())
|
||||
@@ -396,6 +482,8 @@ mod tests {
|
||||
)),
|
||||
allowed_sandbox_modes: None,
|
||||
mcp_servers: None,
|
||||
rules: None,
|
||||
enforce_residency: None,
|
||||
}
|
||||
);
|
||||
Ok(())
|
||||
@@ -448,6 +536,33 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn constraint_error_includes_cloud_requirements_source() -> Result<()> {
|
||||
let source: ConfigRequirementsToml = from_str(
|
||||
r#"
|
||||
allowed_approval_policies = ["on-request"]
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let source_location = RequirementSource::CloudRequirements;
|
||||
|
||||
let mut target = ConfigRequirementsWithSources::default();
|
||||
target.merge_unset_fields(source_location.clone(), source);
|
||||
let requirements = ConfigRequirements::try_from(target)?;
|
||||
|
||||
assert_eq!(
|
||||
requirements.approval_policy.can_set(&AskForApproval::Never),
|
||||
Err(ConstraintError::InvalidValue {
|
||||
field_name: "approval_policy",
|
||||
candidate: "Never".into(),
|
||||
allowed: "[OnRequest]".into(),
|
||||
requirement_source: source_location,
|
||||
})
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_allowed_approval_policies() -> Result<()> {
|
||||
let toml_str = r#"
|
||||
@@ -595,4 +710,64 @@ mod tests {
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_exec_policy_requirements() -> Result<()> {
|
||||
let toml_str = r#"
|
||||
[rules]
|
||||
prefix_rules = [
|
||||
{ pattern = [{ token = "rm" }], decision = "forbidden" },
|
||||
]
|
||||
"#;
|
||||
let config: ConfigRequirementsToml = from_str(toml_str)?;
|
||||
let requirements: ConfigRequirements = with_unknown_source(config).try_into()?;
|
||||
let policy = requirements.exec_policy.expect("exec policy").value;
|
||||
|
||||
assert_eq!(
|
||||
policy.as_ref().check(&tokens(&["rm", "-rf"]), &|_| {
|
||||
panic!("rule should match so heuristic should not be called");
|
||||
}),
|
||||
Evaluation {
|
||||
decision: Decision::Forbidden,
|
||||
matched_rules: vec![RuleMatch::PrefixRuleMatch {
|
||||
matched_prefix: tokens(&["rm"]),
|
||||
decision: Decision::Forbidden,
|
||||
justification: None,
|
||||
}],
|
||||
}
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exec_policy_error_includes_requirement_source() -> Result<()> {
|
||||
let toml_str = r#"
|
||||
[rules]
|
||||
prefix_rules = [
|
||||
{ pattern = [{ token = "rm" }] },
|
||||
]
|
||||
"#;
|
||||
let config: ConfigRequirementsToml = from_str(toml_str)?;
|
||||
let requirements_toml_file =
|
||||
AbsolutePathBuf::from_absolute_path("/etc/codex/requirements.toml")?;
|
||||
let source_location = RequirementSource::SystemRequirementsToml {
|
||||
file: requirements_toml_file,
|
||||
};
|
||||
|
||||
let mut requirements_with_sources = ConfigRequirementsWithSources::default();
|
||||
requirements_with_sources.merge_unset_fields(source_location.clone(), config);
|
||||
let err = ConfigRequirements::try_from(requirements_with_sources)
|
||||
.expect_err("invalid exec policy");
|
||||
|
||||
assert_eq!(
|
||||
err,
|
||||
ConstraintError::ExecPolicyParse {
|
||||
requirement_source: source_location,
|
||||
reason: "rules prefix_rule at index 0 is missing a decision".to_string(),
|
||||
}
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
mod cloud_requirements;
|
||||
mod config_requirements;
|
||||
mod diagnostics;
|
||||
mod fingerprint;
|
||||
@@ -6,6 +7,7 @@ mod layer_io;
|
||||
mod macos;
|
||||
mod merge;
|
||||
mod overrides;
|
||||
mod requirements_exec_policy;
|
||||
mod state;
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -23,16 +25,19 @@ use codex_protocol::config_types::TrustLevel;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
use codex_utils_absolute_path::AbsolutePathBufGuard;
|
||||
use dunce::canonicalize as normalize_path;
|
||||
use serde::Deserialize;
|
||||
use std::io;
|
||||
use std::path::Path;
|
||||
use toml::Value as TomlValue;
|
||||
|
||||
pub use cloud_requirements::CloudRequirementsLoader;
|
||||
pub use config_requirements::ConfigRequirements;
|
||||
pub use config_requirements::ConfigRequirementsToml;
|
||||
pub use config_requirements::McpServerIdentity;
|
||||
pub use config_requirements::McpServerRequirement;
|
||||
pub use config_requirements::RequirementSource;
|
||||
pub use config_requirements::ResidencyRequirement;
|
||||
pub use config_requirements::SandboxModeRequirement;
|
||||
pub use config_requirements::Sourced;
|
||||
pub use diagnostics::ConfigError;
|
||||
@@ -67,6 +72,7 @@ const DEFAULT_PROJECT_ROOT_MARKERS: &[&str] = &[".git"];
|
||||
/// earlier layer cannot be overridden by a later layer:
|
||||
///
|
||||
/// - admin: managed preferences (*)
|
||||
/// - cloud: managed cloud requirements
|
||||
/// - system `/etc/codex/requirements.toml`
|
||||
///
|
||||
/// For backwards compatibility, we also load from
|
||||
@@ -96,6 +102,7 @@ pub async fn load_config_layers_state(
|
||||
cwd: Option<AbsolutePathBuf>,
|
||||
cli_overrides: &[(String, TomlValue)],
|
||||
overrides: LoaderOverrides,
|
||||
cloud_requirements: CloudRequirementsLoader,
|
||||
) -> io::Result<ConfigLayerStack> {
|
||||
let mut config_requirements_toml = ConfigRequirementsWithSources::default();
|
||||
|
||||
@@ -108,6 +115,11 @@ pub async fn load_config_layers_state(
|
||||
)
|
||||
.await?;
|
||||
|
||||
if let Some(requirements) = cloud_requirements.get().await? {
|
||||
config_requirements_toml
|
||||
.merge_unset_fields(RequirementSource::CloudRequirements, requirements);
|
||||
}
|
||||
|
||||
// Honor /etc/codex/requirements.toml.
|
||||
if cfg!(unix) {
|
||||
load_requirements_toml(
|
||||
@@ -224,6 +236,7 @@ pub async fn load_config_layers_state(
|
||||
&cwd,
|
||||
&project_trust_context.project_root,
|
||||
&project_trust_context,
|
||||
codex_home,
|
||||
)
|
||||
.await?;
|
||||
layers.extend(project_layers);
|
||||
@@ -659,7 +672,11 @@ async fn load_project_layers(
|
||||
cwd: &AbsolutePathBuf,
|
||||
project_root: &AbsolutePathBuf,
|
||||
trust_context: &ProjectTrustContext,
|
||||
codex_home: &Path,
|
||||
) -> io::Result<Vec<ConfigLayerEntry>> {
|
||||
let codex_home_abs = AbsolutePathBuf::from_absolute_path(codex_home)?;
|
||||
let codex_home_normalized =
|
||||
normalize_path(codex_home_abs.as_path()).unwrap_or_else(|_| codex_home_abs.to_path_buf());
|
||||
let mut dirs = cwd
|
||||
.as_path()
|
||||
.ancestors()
|
||||
@@ -690,6 +707,11 @@ async fn load_project_layers(
|
||||
let layer_dir = AbsolutePathBuf::from_absolute_path(dir)?;
|
||||
let decision = trust_context.decision_for_dir(&layer_dir);
|
||||
let dot_codex_abs = AbsolutePathBuf::from_absolute_path(&dot_codex)?;
|
||||
let dot_codex_normalized =
|
||||
normalize_path(dot_codex_abs.as_path()).unwrap_or_else(|_| dot_codex_abs.to_path_buf());
|
||||
if dot_codex_abs == codex_home_abs || dot_codex_normalized == codex_home_normalized {
|
||||
continue;
|
||||
}
|
||||
let config_file = dot_codex_abs.join(CONFIG_TOML_FILE)?;
|
||||
match tokio::fs::read_to_string(&config_file).await {
|
||||
Ok(contents) => {
|
||||
|
||||
236
codex-rs/core/src/config_loader/requirements_exec_policy.rs
Normal file
236
codex-rs/core/src/config_loader/requirements_exec_policy.rs
Normal file
@@ -0,0 +1,236 @@
|
||||
use codex_execpolicy::Decision;
|
||||
use codex_execpolicy::Policy;
|
||||
use codex_execpolicy::rule::PatternToken;
|
||||
use codex_execpolicy::rule::PrefixPattern;
|
||||
use codex_execpolicy::rule::PrefixRule;
|
||||
use codex_execpolicy::rule::RuleRef;
|
||||
use multimap::MultiMap;
|
||||
use serde::Deserialize;
|
||||
use std::sync::Arc;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct RequirementsExecPolicy {
|
||||
policy: Policy,
|
||||
}
|
||||
|
||||
impl RequirementsExecPolicy {
|
||||
pub fn new(policy: Policy) -> Self {
|
||||
Self { policy }
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for RequirementsExecPolicy {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
policy_fingerprint(&self.policy) == policy_fingerprint(&other.policy)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for RequirementsExecPolicy {}
|
||||
|
||||
impl AsRef<Policy> for RequirementsExecPolicy {
|
||||
fn as_ref(&self) -> &Policy {
|
||||
&self.policy
|
||||
}
|
||||
}
|
||||
|
||||
fn policy_fingerprint(policy: &Policy) -> Vec<String> {
|
||||
let mut entries = Vec::new();
|
||||
for (program, rules) in policy.rules().iter_all() {
|
||||
for rule in rules {
|
||||
entries.push(format!("{program}:{rule:?}"));
|
||||
}
|
||||
}
|
||||
entries.sort();
|
||||
entries
|
||||
}
|
||||
|
||||
/// TOML representation of `[rules]` within `requirements.toml`.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Deserialize)]
|
||||
pub struct RequirementsExecPolicyToml {
|
||||
pub prefix_rules: Vec<RequirementsExecPolicyPrefixRuleToml>,
|
||||
}
|
||||
|
||||
/// A TOML representation of the `prefix_rule(...)` Starlark builtin.
|
||||
///
|
||||
/// This mirrors the builtin defined in `execpolicy/src/parser.rs`.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Deserialize)]
|
||||
pub struct RequirementsExecPolicyPrefixRuleToml {
|
||||
pub pattern: Vec<RequirementsExecPolicyPatternTokenToml>,
|
||||
pub decision: Option<RequirementsExecPolicyDecisionToml>,
|
||||
pub justification: Option<String>,
|
||||
}
|
||||
|
||||
/// TOML-friendly representation of a pattern token.
|
||||
///
|
||||
/// Starlark supports either a string token or a list of alternative tokens at
|
||||
/// each position, but TOML arrays cannot mix strings and arrays. Using an
|
||||
/// array of tables sidesteps that restriction.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Deserialize)]
|
||||
pub struct RequirementsExecPolicyPatternTokenToml {
|
||||
pub token: Option<String>,
|
||||
pub any_of: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum RequirementsExecPolicyDecisionToml {
|
||||
Allow,
|
||||
Prompt,
|
||||
Forbidden,
|
||||
}
|
||||
|
||||
impl RequirementsExecPolicyDecisionToml {
|
||||
fn as_decision(self) -> Decision {
|
||||
match self {
|
||||
Self::Allow => Decision::Allow,
|
||||
Self::Prompt => Decision::Prompt,
|
||||
Self::Forbidden => Decision::Forbidden,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum RequirementsExecPolicyParseError {
|
||||
#[error("rules prefix_rules cannot be empty")]
|
||||
EmptyPrefixRules,
|
||||
|
||||
#[error("rules prefix_rule at index {rule_index} has an empty pattern")]
|
||||
EmptyPattern { rule_index: usize },
|
||||
|
||||
#[error(
|
||||
"rules prefix_rule at index {rule_index} has an invalid pattern token at index {token_index}: {reason}"
|
||||
)]
|
||||
InvalidPatternToken {
|
||||
rule_index: usize,
|
||||
token_index: usize,
|
||||
reason: String,
|
||||
},
|
||||
|
||||
#[error("rules prefix_rule at index {rule_index} has an empty justification")]
|
||||
EmptyJustification { rule_index: usize },
|
||||
|
||||
#[error("rules prefix_rule at index {rule_index} is missing a decision")]
|
||||
MissingDecision { rule_index: usize },
|
||||
|
||||
#[error(
|
||||
"rules prefix_rule at index {rule_index} has decision 'allow', which is not permitted in requirements.toml: Codex merges these rules with other config and uses the most restrictive result (use 'prompt' or 'forbidden')"
|
||||
)]
|
||||
AllowDecisionNotAllowed { rule_index: usize },
|
||||
}
|
||||
|
||||
impl RequirementsExecPolicyToml {
|
||||
/// Convert requirements TOML rules into the internal `.rules`
|
||||
/// representation used by `codex-execpolicy`.
|
||||
pub fn to_policy(&self) -> Result<Policy, RequirementsExecPolicyParseError> {
|
||||
if self.prefix_rules.is_empty() {
|
||||
return Err(RequirementsExecPolicyParseError::EmptyPrefixRules);
|
||||
}
|
||||
|
||||
let mut rules_by_program: MultiMap<String, RuleRef> = MultiMap::new();
|
||||
|
||||
for (rule_index, rule) in self.prefix_rules.iter().enumerate() {
|
||||
if let Some(justification) = &rule.justification
|
||||
&& justification.trim().is_empty()
|
||||
{
|
||||
return Err(RequirementsExecPolicyParseError::EmptyJustification { rule_index });
|
||||
}
|
||||
|
||||
if rule.pattern.is_empty() {
|
||||
return Err(RequirementsExecPolicyParseError::EmptyPattern { rule_index });
|
||||
}
|
||||
|
||||
let pattern_tokens = rule
|
||||
.pattern
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(token_index, token)| parse_pattern_token(token, rule_index, token_index))
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
let decision = match rule.decision {
|
||||
Some(RequirementsExecPolicyDecisionToml::Allow) => {
|
||||
return Err(RequirementsExecPolicyParseError::AllowDecisionNotAllowed {
|
||||
rule_index,
|
||||
});
|
||||
}
|
||||
Some(decision) => decision.as_decision(),
|
||||
None => {
|
||||
return Err(RequirementsExecPolicyParseError::MissingDecision { rule_index });
|
||||
}
|
||||
};
|
||||
let justification = rule.justification.clone();
|
||||
|
||||
let (first_token, remaining_tokens) = pattern_tokens
|
||||
.split_first()
|
||||
.ok_or(RequirementsExecPolicyParseError::EmptyPattern { rule_index })?;
|
||||
|
||||
let rest: Arc<[PatternToken]> = remaining_tokens.to_vec().into();
|
||||
|
||||
for head in first_token.alternatives() {
|
||||
let rule: RuleRef = Arc::new(PrefixRule {
|
||||
pattern: PrefixPattern {
|
||||
first: Arc::from(head.as_str()),
|
||||
rest: rest.clone(),
|
||||
},
|
||||
decision,
|
||||
justification: justification.clone(),
|
||||
});
|
||||
rules_by_program.insert(head.clone(), rule);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Policy::new(rules_by_program))
|
||||
}
|
||||
|
||||
pub(crate) fn to_requirements_policy(
|
||||
&self,
|
||||
) -> Result<RequirementsExecPolicy, RequirementsExecPolicyParseError> {
|
||||
self.to_policy().map(RequirementsExecPolicy::new)
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_pattern_token(
|
||||
token: &RequirementsExecPolicyPatternTokenToml,
|
||||
rule_index: usize,
|
||||
token_index: usize,
|
||||
) -> Result<PatternToken, RequirementsExecPolicyParseError> {
|
||||
match (&token.token, &token.any_of) {
|
||||
(Some(single), None) => {
|
||||
if single.trim().is_empty() {
|
||||
return Err(RequirementsExecPolicyParseError::InvalidPatternToken {
|
||||
rule_index,
|
||||
token_index,
|
||||
reason: "token cannot be empty".to_string(),
|
||||
});
|
||||
}
|
||||
Ok(PatternToken::Single(single.clone()))
|
||||
}
|
||||
(None, Some(alternatives)) => {
|
||||
if alternatives.is_empty() {
|
||||
return Err(RequirementsExecPolicyParseError::InvalidPatternToken {
|
||||
rule_index,
|
||||
token_index,
|
||||
reason: "any_of cannot be empty".to_string(),
|
||||
});
|
||||
}
|
||||
if alternatives.iter().any(|alt| alt.trim().is_empty()) {
|
||||
return Err(RequirementsExecPolicyParseError::InvalidPatternToken {
|
||||
rule_index,
|
||||
token_index,
|
||||
reason: "any_of cannot include empty tokens".to_string(),
|
||||
});
|
||||
}
|
||||
Ok(PatternToken::Alts(alternatives.clone()))
|
||||
}
|
||||
(Some(_), Some(_)) => Err(RequirementsExecPolicyParseError::InvalidPatternToken {
|
||||
rule_index,
|
||||
token_index,
|
||||
reason: "set either token or any_of, not both".to_string(),
|
||||
}),
|
||||
(None, None) => Err(RequirementsExecPolicyParseError::InvalidPatternToken {
|
||||
rule_index,
|
||||
token_index,
|
||||
reason: "set either token or any_of".to_string(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
@@ -4,11 +4,15 @@ use crate::config::CONFIG_TOML_FILE;
|
||||
use crate::config::ConfigBuilder;
|
||||
use crate::config::ConfigOverrides;
|
||||
use crate::config::ConfigToml;
|
||||
use crate::config::ConstraintError;
|
||||
use crate::config::ProjectConfig;
|
||||
use crate::config_loader::CloudRequirementsLoader;
|
||||
use crate::config_loader::ConfigLayerEntry;
|
||||
use crate::config_loader::ConfigLoadError;
|
||||
use crate::config_loader::ConfigRequirements;
|
||||
use crate::config_loader::ConfigRequirementsToml;
|
||||
use crate::config_loader::config_requirements::ConfigRequirementsWithSources;
|
||||
use crate::config_loader::config_requirements::RequirementSource;
|
||||
use crate::config_loader::fingerprint::version_for_toml;
|
||||
use crate::config_loader::load_requirements_toml;
|
||||
use codex_protocol::config_types::TrustLevel;
|
||||
@@ -65,6 +69,7 @@ async fn returns_config_error_for_invalid_user_config_toml() {
|
||||
Some(cwd),
|
||||
&[] as &[(String, TomlValue)],
|
||||
LoaderOverrides::default(),
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await
|
||||
.expect_err("expected error");
|
||||
@@ -94,6 +99,7 @@ async fn returns_config_error_for_invalid_managed_config_toml() {
|
||||
Some(cwd),
|
||||
&[] as &[(String, TomlValue)],
|
||||
overrides,
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await
|
||||
.expect_err("expected error");
|
||||
@@ -182,6 +188,7 @@ extra = true
|
||||
Some(cwd),
|
||||
&[] as &[(String, TomlValue)],
|
||||
overrides,
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await
|
||||
.expect("load config");
|
||||
@@ -218,6 +225,7 @@ async fn returns_empty_when_all_layers_missing() {
|
||||
Some(cwd),
|
||||
&[] as &[(String, TomlValue)],
|
||||
overrides,
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await
|
||||
.expect("load layers");
|
||||
@@ -315,6 +323,7 @@ flag = false
|
||||
Some(cwd),
|
||||
&[] as &[(String, TomlValue)],
|
||||
overrides,
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await
|
||||
.expect("load config");
|
||||
@@ -354,6 +363,7 @@ allowed_sandbox_modes = ["read-only"]
|
||||
),
|
||||
),
|
||||
},
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -414,6 +424,7 @@ allowed_approval_policies = ["never"]
|
||||
),
|
||||
),
|
||||
},
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -440,6 +451,7 @@ async fn load_requirements_toml_produces_expected_constraints() -> anyhow::Resul
|
||||
&requirements_file,
|
||||
r#"
|
||||
allowed_approval_policies = ["never", "on-request"]
|
||||
enforce_residency = "us"
|
||||
"#,
|
||||
)
|
||||
.await?;
|
||||
@@ -454,7 +466,6 @@ allowed_approval_policies = ["never", "on-request"]
|
||||
.cloned(),
|
||||
Some(vec![AskForApproval::Never, AskForApproval::OnRequest])
|
||||
);
|
||||
|
||||
let config_requirements: ConfigRequirements = config_requirements_toml.try_into()?;
|
||||
assert_eq!(
|
||||
config_requirements.approval_policy.value(),
|
||||
@@ -469,6 +480,99 @@ allowed_approval_policies = ["never", "on-request"]
|
||||
.can_set(&AskForApproval::OnFailure)
|
||||
.is_err()
|
||||
);
|
||||
assert_eq!(
|
||||
config_requirements.enforce_residency.value(),
|
||||
Some(crate::config_loader::ResidencyRequirement::Us)
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "current_thread")]
|
||||
async fn cloud_requirements_are_not_overwritten_by_system_requirements() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
let requirements_file = tmp.path().join("requirements.toml");
|
||||
tokio::fs::write(
|
||||
&requirements_file,
|
||||
r#"
|
||||
allowed_approval_policies = ["on-request"]
|
||||
"#,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut config_requirements_toml = ConfigRequirementsWithSources::default();
|
||||
config_requirements_toml.merge_unset_fields(
|
||||
RequirementSource::CloudRequirements,
|
||||
ConfigRequirementsToml {
|
||||
allowed_approval_policies: Some(vec![AskForApproval::Never]),
|
||||
allowed_sandbox_modes: None,
|
||||
mcp_servers: None,
|
||||
rules: None,
|
||||
enforce_residency: None,
|
||||
},
|
||||
);
|
||||
load_requirements_toml(&mut config_requirements_toml, &requirements_file).await?;
|
||||
|
||||
assert_eq!(
|
||||
config_requirements_toml
|
||||
.allowed_approval_policies
|
||||
.as_ref()
|
||||
.map(|sourced| sourced.value.clone()),
|
||||
Some(vec![AskForApproval::Never])
|
||||
);
|
||||
assert_eq!(
|
||||
config_requirements_toml
|
||||
.allowed_approval_policies
|
||||
.as_ref()
|
||||
.map(|sourced| sourced.source.clone()),
|
||||
Some(RequirementSource::CloudRequirements)
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn load_config_layers_includes_cloud_requirements() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
let codex_home = tmp.path().join("home");
|
||||
tokio::fs::create_dir_all(&codex_home).await?;
|
||||
let cwd = AbsolutePathBuf::from_absolute_path(tmp.path())?;
|
||||
|
||||
let requirements = ConfigRequirementsToml {
|
||||
allowed_approval_policies: Some(vec![AskForApproval::Never]),
|
||||
allowed_sandbox_modes: None,
|
||||
mcp_servers: None,
|
||||
rules: None,
|
||||
enforce_residency: None,
|
||||
};
|
||||
let expected = requirements.clone();
|
||||
let cloud_requirements = CloudRequirementsLoader::new(async move { Ok(Some(requirements)) });
|
||||
|
||||
let layers = load_config_layers_state(
|
||||
&codex_home,
|
||||
Some(cwd),
|
||||
&[] as &[(String, TomlValue)],
|
||||
LoaderOverrides::default(),
|
||||
cloud_requirements,
|
||||
)
|
||||
.await?;
|
||||
|
||||
assert_eq!(
|
||||
layers.requirements_toml().allowed_approval_policies,
|
||||
expected.allowed_approval_policies
|
||||
);
|
||||
assert_eq!(
|
||||
layers
|
||||
.requirements()
|
||||
.approval_policy
|
||||
.can_set(&AskForApproval::OnRequest),
|
||||
Err(ConstraintError::InvalidValue {
|
||||
field_name: "approval_policy",
|
||||
candidate: "OnRequest".into(),
|
||||
allowed: "[Never]".into(),
|
||||
requirement_source: RequirementSource::CloudRequirements,
|
||||
})
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -501,6 +605,7 @@ async fn project_layers_prefer_closest_cwd() -> std::io::Result<()> {
|
||||
Some(cwd),
|
||||
&[] as &[(String, TomlValue)],
|
||||
LoaderOverrides::default(),
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -632,6 +737,7 @@ async fn project_layer_is_added_when_dot_codex_exists_without_config_toml() -> s
|
||||
Some(cwd),
|
||||
&[] as &[(String, TomlValue)],
|
||||
LoaderOverrides::default(),
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -655,6 +761,103 @@ async fn project_layer_is_added_when_dot_codex_exists_without_config_toml() -> s
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn codex_home_is_not_loaded_as_project_layer_from_home_dir() -> std::io::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
let home_dir = tmp.path().join("home");
|
||||
let codex_home = home_dir.join(".codex");
|
||||
tokio::fs::create_dir_all(&codex_home).await?;
|
||||
tokio::fs::write(codex_home.join(CONFIG_TOML_FILE), "foo = \"user\"\n").await?;
|
||||
|
||||
let cwd = AbsolutePathBuf::from_absolute_path(&home_dir)?;
|
||||
let layers = load_config_layers_state(
|
||||
&codex_home,
|
||||
Some(cwd),
|
||||
&[] as &[(String, TomlValue)],
|
||||
LoaderOverrides::default(),
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let project_layers: Vec<_> = layers
|
||||
.get_layers(
|
||||
super::ConfigLayerStackOrdering::HighestPrecedenceFirst,
|
||||
true,
|
||||
)
|
||||
.into_iter()
|
||||
.filter(|layer| matches!(layer.name, super::ConfigLayerSource::Project { .. }))
|
||||
.collect();
|
||||
let expected: Vec<&ConfigLayerEntry> = Vec::new();
|
||||
assert_eq!(expected, project_layers);
|
||||
assert_eq!(
|
||||
layers.effective_config().get("foo"),
|
||||
Some(&TomlValue::String("user".to_string()))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn codex_home_within_project_tree_is_not_double_loaded() -> std::io::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
let project_root = tmp.path().join("project");
|
||||
let nested = project_root.join("child");
|
||||
let project_dot_codex = project_root.join(".codex");
|
||||
let nested_dot_codex = nested.join(".codex");
|
||||
|
||||
tokio::fs::create_dir_all(&nested_dot_codex).await?;
|
||||
tokio::fs::create_dir_all(project_root.join(".git")).await?;
|
||||
tokio::fs::write(nested_dot_codex.join(CONFIG_TOML_FILE), "foo = \"child\"\n").await?;
|
||||
|
||||
tokio::fs::create_dir_all(&project_dot_codex).await?;
|
||||
make_config_for_test(&project_dot_codex, &project_root, TrustLevel::Trusted, None).await?;
|
||||
let user_config_path = project_dot_codex.join(CONFIG_TOML_FILE);
|
||||
let user_config_contents = tokio::fs::read_to_string(&user_config_path).await?;
|
||||
tokio::fs::write(
|
||||
&user_config_path,
|
||||
format!("foo = \"user\"\n{user_config_contents}"),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let cwd = AbsolutePathBuf::from_absolute_path(&nested)?;
|
||||
let layers = load_config_layers_state(
|
||||
&project_dot_codex,
|
||||
Some(cwd),
|
||||
&[] as &[(String, TomlValue)],
|
||||
LoaderOverrides::default(),
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let project_layers: Vec<_> = layers
|
||||
.get_layers(
|
||||
super::ConfigLayerStackOrdering::HighestPrecedenceFirst,
|
||||
true,
|
||||
)
|
||||
.into_iter()
|
||||
.filter(|layer| matches!(layer.name, super::ConfigLayerSource::Project { .. }))
|
||||
.collect();
|
||||
|
||||
let child_config: TomlValue = toml::from_str("foo = \"child\"\n").expect("parse child config");
|
||||
assert_eq!(
|
||||
vec![&ConfigLayerEntry {
|
||||
name: super::ConfigLayerSource::Project {
|
||||
dot_codex_folder: AbsolutePathBuf::from_absolute_path(&nested_dot_codex)?,
|
||||
},
|
||||
config: child_config.clone(),
|
||||
version: version_for_toml(&child_config),
|
||||
disabled_reason: None,
|
||||
}],
|
||||
project_layers
|
||||
);
|
||||
assert_eq!(
|
||||
layers.effective_config().get("foo"),
|
||||
Some(&TomlValue::String("child".to_string()))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn project_layers_disabled_when_untrusted_or_unknown() -> std::io::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
@@ -691,6 +894,7 @@ async fn project_layers_disabled_when_untrusted_or_unknown() -> std::io::Result<
|
||||
Some(cwd.clone()),
|
||||
&[] as &[(String, TomlValue)],
|
||||
LoaderOverrides::default(),
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
let project_layers_untrusted: Vec<_> = layers_untrusted
|
||||
@@ -728,6 +932,7 @@ async fn project_layers_disabled_when_untrusted_or_unknown() -> std::io::Result<
|
||||
Some(cwd),
|
||||
&[] as &[(String, TomlValue)],
|
||||
LoaderOverrides::default(),
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
let project_layers_unknown: Vec<_> = layers_unknown
|
||||
@@ -788,6 +993,7 @@ async fn invalid_project_config_ignored_when_untrusted_or_unknown() -> std::io::
|
||||
Some(cwd.clone()),
|
||||
&[] as &[(String, TomlValue)],
|
||||
LoaderOverrides::default(),
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
let project_layers: Vec<_> = layers
|
||||
@@ -843,6 +1049,7 @@ async fn cli_overrides_with_relative_paths_do_not_break_trust_check() -> std::io
|
||||
Some(cwd),
|
||||
&cli_overrides,
|
||||
LoaderOverrides::default(),
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -884,6 +1091,7 @@ async fn project_root_markers_supports_alternate_markers() -> std::io::Result<()
|
||||
Some(cwd),
|
||||
&[] as &[(String, TomlValue)],
|
||||
LoaderOverrides::default(),
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -911,3 +1119,325 @@ async fn project_root_markers_supports_alternate_markers() -> std::io::Result<()
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
mod requirements_exec_policy_tests {
|
||||
use super::super::config_requirements::ConfigRequirementsWithSources;
|
||||
use super::super::requirements_exec_policy::RequirementsExecPolicyDecisionToml;
|
||||
use super::super::requirements_exec_policy::RequirementsExecPolicyParseError;
|
||||
use super::super::requirements_exec_policy::RequirementsExecPolicyPatternTokenToml;
|
||||
use super::super::requirements_exec_policy::RequirementsExecPolicyPrefixRuleToml;
|
||||
use super::super::requirements_exec_policy::RequirementsExecPolicyToml;
|
||||
use crate::config_loader::ConfigLayerEntry;
|
||||
use crate::config_loader::ConfigLayerStack;
|
||||
use crate::config_loader::ConfigRequirements;
|
||||
use crate::config_loader::ConfigRequirementsToml;
|
||||
use crate::config_loader::RequirementSource;
|
||||
use crate::exec_policy::load_exec_policy;
|
||||
use codex_app_server_protocol::ConfigLayerSource;
|
||||
use codex_execpolicy::Decision;
|
||||
use codex_execpolicy::Evaluation;
|
||||
use codex_execpolicy::RuleMatch;
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::Path;
|
||||
use tempfile::tempdir;
|
||||
use toml::Value as TomlValue;
|
||||
use toml::from_str;
|
||||
|
||||
fn tokens(cmd: &[&str]) -> Vec<String> {
|
||||
cmd.iter().map(std::string::ToString::to_string).collect()
|
||||
}
|
||||
|
||||
fn panic_if_called(_: &[String]) -> Decision {
|
||||
panic!("rule should match so heuristic should not be called");
|
||||
}
|
||||
|
||||
fn config_stack_for_dot_codex_folder_with_requirements(
|
||||
dot_codex_folder: &Path,
|
||||
requirements: ConfigRequirements,
|
||||
) -> ConfigLayerStack {
|
||||
let dot_codex_folder = AbsolutePathBuf::from_absolute_path(dot_codex_folder)
|
||||
.expect("absolute dot_codex_folder");
|
||||
let layer = ConfigLayerEntry::new(
|
||||
ConfigLayerSource::Project { dot_codex_folder },
|
||||
TomlValue::Table(Default::default()),
|
||||
);
|
||||
ConfigLayerStack::new(vec![layer], requirements, ConfigRequirementsToml::default())
|
||||
.expect("ConfigLayerStack")
|
||||
}
|
||||
|
||||
fn requirements_from_toml(toml_str: &str) -> ConfigRequirements {
|
||||
let config: ConfigRequirementsToml = from_str(toml_str).expect("parse requirements toml");
|
||||
let mut with_sources = ConfigRequirementsWithSources::default();
|
||||
with_sources.merge_unset_fields(RequirementSource::Unknown, config);
|
||||
ConfigRequirements::try_from(with_sources).expect("requirements")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_single_prefix_rule_from_raw_toml() -> anyhow::Result<()> {
|
||||
let toml_str = r#"
|
||||
prefix_rules = [
|
||||
{ pattern = [{ token = "rm" }], decision = "forbidden" },
|
||||
]
|
||||
"#;
|
||||
|
||||
let parsed: RequirementsExecPolicyToml = from_str(toml_str)?;
|
||||
|
||||
assert_eq!(
|
||||
parsed,
|
||||
RequirementsExecPolicyToml {
|
||||
prefix_rules: vec![RequirementsExecPolicyPrefixRuleToml {
|
||||
pattern: vec![RequirementsExecPolicyPatternTokenToml {
|
||||
token: Some("rm".to_string()),
|
||||
any_of: None,
|
||||
}],
|
||||
decision: Some(RequirementsExecPolicyDecisionToml::Forbidden),
|
||||
justification: None,
|
||||
}],
|
||||
}
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_multiple_prefix_rules_from_raw_toml() -> anyhow::Result<()> {
|
||||
let toml_str = r#"
|
||||
prefix_rules = [
|
||||
{ pattern = [{ token = "rm" }], decision = "forbidden" },
|
||||
{ pattern = [{ token = "git" }, { any_of = ["push", "commit"] }], decision = "prompt", justification = "review changes before push or commit" },
|
||||
]
|
||||
"#;
|
||||
|
||||
let parsed: RequirementsExecPolicyToml = from_str(toml_str)?;
|
||||
|
||||
assert_eq!(
|
||||
parsed,
|
||||
RequirementsExecPolicyToml {
|
||||
prefix_rules: vec![
|
||||
RequirementsExecPolicyPrefixRuleToml {
|
||||
pattern: vec![RequirementsExecPolicyPatternTokenToml {
|
||||
token: Some("rm".to_string()),
|
||||
any_of: None,
|
||||
}],
|
||||
decision: Some(RequirementsExecPolicyDecisionToml::Forbidden),
|
||||
justification: None,
|
||||
},
|
||||
RequirementsExecPolicyPrefixRuleToml {
|
||||
pattern: vec![
|
||||
RequirementsExecPolicyPatternTokenToml {
|
||||
token: Some("git".to_string()),
|
||||
any_of: None,
|
||||
},
|
||||
RequirementsExecPolicyPatternTokenToml {
|
||||
token: None,
|
||||
any_of: Some(vec!["push".to_string(), "commit".to_string()]),
|
||||
},
|
||||
],
|
||||
decision: Some(RequirementsExecPolicyDecisionToml::Prompt),
|
||||
justification: Some("review changes before push or commit".to_string()),
|
||||
},
|
||||
],
|
||||
}
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn converts_rules_toml_into_internal_policy_representation() -> anyhow::Result<()> {
|
||||
let toml_str = r#"
|
||||
prefix_rules = [
|
||||
{ pattern = [{ token = "rm" }], decision = "forbidden" },
|
||||
]
|
||||
"#;
|
||||
|
||||
let parsed: RequirementsExecPolicyToml = from_str(toml_str)?;
|
||||
let policy = parsed.to_policy()?;
|
||||
|
||||
assert_eq!(
|
||||
policy.check(&tokens(&["rm", "-rf", "/tmp"]), &panic_if_called),
|
||||
Evaluation {
|
||||
decision: Decision::Forbidden,
|
||||
matched_rules: vec![RuleMatch::PrefixRuleMatch {
|
||||
matched_prefix: tokens(&["rm"]),
|
||||
decision: Decision::Forbidden,
|
||||
justification: None,
|
||||
}],
|
||||
}
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn head_any_of_expands_into_multiple_program_rules() -> anyhow::Result<()> {
|
||||
let toml_str = r#"
|
||||
prefix_rules = [
|
||||
{ pattern = [{ any_of = ["git", "hg"] }, { token = "status" }], decision = "prompt" },
|
||||
]
|
||||
"#;
|
||||
let parsed: RequirementsExecPolicyToml = from_str(toml_str)?;
|
||||
let policy = parsed.to_policy()?;
|
||||
|
||||
assert_eq!(
|
||||
policy.check(&tokens(&["git", "status"]), &panic_if_called),
|
||||
Evaluation {
|
||||
decision: Decision::Prompt,
|
||||
matched_rules: vec![RuleMatch::PrefixRuleMatch {
|
||||
matched_prefix: tokens(&["git", "status"]),
|
||||
decision: Decision::Prompt,
|
||||
justification: None,
|
||||
}],
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
policy.check(&tokens(&["hg", "status"]), &panic_if_called),
|
||||
Evaluation {
|
||||
decision: Decision::Prompt,
|
||||
matched_rules: vec![RuleMatch::PrefixRuleMatch {
|
||||
matched_prefix: tokens(&["hg", "status"]),
|
||||
decision: Decision::Prompt,
|
||||
justification: None,
|
||||
}],
|
||||
}
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_decision_is_rejected() -> anyhow::Result<()> {
|
||||
let toml_str = r#"
|
||||
prefix_rules = [
|
||||
{ pattern = [{ token = "rm" }] },
|
||||
]
|
||||
"#;
|
||||
|
||||
let parsed: RequirementsExecPolicyToml = from_str(toml_str)?;
|
||||
let err = parsed.to_policy().expect_err("missing decision");
|
||||
|
||||
assert!(matches!(
|
||||
err,
|
||||
RequirementsExecPolicyParseError::MissingDecision { rule_index: 0 }
|
||||
));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn allow_decision_is_rejected() -> anyhow::Result<()> {
|
||||
let toml_str = r#"
|
||||
prefix_rules = [
|
||||
{ pattern = [{ token = "rm" }], decision = "allow" },
|
||||
]
|
||||
"#;
|
||||
|
||||
let parsed: RequirementsExecPolicyToml = from_str(toml_str)?;
|
||||
let err = parsed.to_policy().expect_err("allow decision not allowed");
|
||||
|
||||
assert!(matches!(
|
||||
err,
|
||||
RequirementsExecPolicyParseError::AllowDecisionNotAllowed { rule_index: 0 }
|
||||
));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty_prefix_rules_is_rejected() -> anyhow::Result<()> {
|
||||
let toml_str = r#"
|
||||
prefix_rules = []
|
||||
"#;
|
||||
|
||||
let parsed: RequirementsExecPolicyToml = from_str(toml_str)?;
|
||||
let err = parsed.to_policy().expect_err("empty prefix rules");
|
||||
|
||||
assert!(matches!(
|
||||
err,
|
||||
RequirementsExecPolicyParseError::EmptyPrefixRules
|
||||
));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn loads_requirements_exec_policy_without_rules_files() -> anyhow::Result<()> {
|
||||
let temp_dir = tempdir()?;
|
||||
let requirements = requirements_from_toml(
|
||||
r#"
|
||||
[rules]
|
||||
prefix_rules = [
|
||||
{ pattern = [{ token = "rm" }], decision = "forbidden" },
|
||||
]
|
||||
"#,
|
||||
);
|
||||
let config_stack =
|
||||
config_stack_for_dot_codex_folder_with_requirements(temp_dir.path(), requirements);
|
||||
|
||||
let policy = load_exec_policy(&config_stack).await?;
|
||||
|
||||
assert_eq!(
|
||||
policy.check_multiple([vec!["rm".to_string()]].iter(), &panic_if_called),
|
||||
Evaluation {
|
||||
decision: Decision::Forbidden,
|
||||
matched_rules: vec![RuleMatch::PrefixRuleMatch {
|
||||
matched_prefix: vec!["rm".to_string()],
|
||||
decision: Decision::Forbidden,
|
||||
justification: None,
|
||||
}],
|
||||
}
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn merges_requirements_exec_policy_with_file_rules() -> anyhow::Result<()> {
|
||||
let temp_dir = tempdir()?;
|
||||
let policy_dir = temp_dir.path().join("rules");
|
||||
std::fs::create_dir_all(&policy_dir)?;
|
||||
std::fs::write(
|
||||
policy_dir.join("deny.rules"),
|
||||
r#"prefix_rule(pattern=["rm"], decision="forbidden")"#,
|
||||
)?;
|
||||
|
||||
let requirements = requirements_from_toml(
|
||||
r#"
|
||||
[rules]
|
||||
prefix_rules = [
|
||||
{ pattern = [{ token = "git" }, { token = "push" }], decision = "prompt" },
|
||||
]
|
||||
"#,
|
||||
);
|
||||
let config_stack =
|
||||
config_stack_for_dot_codex_folder_with_requirements(temp_dir.path(), requirements);
|
||||
|
||||
let policy = load_exec_policy(&config_stack).await?;
|
||||
|
||||
assert_eq!(
|
||||
policy.check_multiple([vec!["rm".to_string()]].iter(), &panic_if_called),
|
||||
Evaluation {
|
||||
decision: Decision::Forbidden,
|
||||
matched_rules: vec![RuleMatch::PrefixRuleMatch {
|
||||
matched_prefix: vec!["rm".to_string()],
|
||||
decision: Decision::Forbidden,
|
||||
justification: None,
|
||||
}],
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
policy.check_multiple(
|
||||
[vec!["git".to_string(), "push".to_string()]].iter(),
|
||||
&panic_if_called
|
||||
),
|
||||
Evaluation {
|
||||
decision: Decision::Prompt,
|
||||
matched_rules: vec![RuleMatch::PrefixRuleMatch {
|
||||
matched_prefix: vec!["git".to_string(), "push".to_string()],
|
||||
decision: Decision::Prompt,
|
||||
justification: None,
|
||||
}],
|
||||
}
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,9 +3,8 @@ use std::env;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use async_channel::unbounded;
|
||||
pub use codex_app_server_protocol::AppInfo;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
|
||||
use crate::AuthManager;
|
||||
@@ -15,28 +14,13 @@ use crate::features::Feature;
|
||||
use crate::mcp::CODEX_APPS_MCP_SERVER_NAME;
|
||||
use crate::mcp::auth::compute_auth_statuses;
|
||||
use crate::mcp::with_codex_apps_mcp;
|
||||
use crate::mcp_connection_manager::DEFAULT_STARTUP_TIMEOUT;
|
||||
use crate::mcp_connection_manager::McpConnectionManager;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ConnectorInfo {
|
||||
#[serde(rename = "id")]
|
||||
pub connector_id: String,
|
||||
#[serde(rename = "name")]
|
||||
pub connector_name: String,
|
||||
#[serde(default, rename = "description")]
|
||||
pub connector_description: Option<String>,
|
||||
#[serde(default, rename = "logo_url")]
|
||||
pub logo_url: Option<String>,
|
||||
#[serde(default, rename = "install_url")]
|
||||
pub install_url: Option<String>,
|
||||
#[serde(default)]
|
||||
pub is_accessible: bool,
|
||||
}
|
||||
|
||||
pub async fn list_accessible_connectors_from_mcp_tools(
|
||||
config: &Config,
|
||||
) -> anyhow::Result<Vec<ConnectorInfo>> {
|
||||
if !config.features.enabled(Feature::Connectors) {
|
||||
) -> anyhow::Result<Vec<AppInfo>> {
|
||||
if !config.features.enabled(Feature::Apps) {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
@@ -72,6 +56,13 @@ pub async fn list_accessible_connectors_from_mcp_tools(
|
||||
)
|
||||
.await;
|
||||
|
||||
if let Some(cfg) = mcp_servers.get(CODEX_APPS_MCP_SERVER_NAME) {
|
||||
let timeout = cfg.startup_timeout_sec.unwrap_or(DEFAULT_STARTUP_TIMEOUT);
|
||||
mcp_connection_manager
|
||||
.wait_for_server_ready(CODEX_APPS_MCP_SERVER_NAME, timeout)
|
||||
.await;
|
||||
}
|
||||
|
||||
let tools = mcp_connection_manager.list_all_tools().await;
|
||||
cancel_token.cancel();
|
||||
|
||||
@@ -86,13 +77,17 @@ fn auth_manager_from_config(config: &Config) -> std::sync::Arc<AuthManager> {
|
||||
)
|
||||
}
|
||||
|
||||
pub fn connector_display_label(connector: &ConnectorInfo) -> String {
|
||||
format_connector_label(&connector.connector_name, &connector.connector_id)
|
||||
pub fn connector_display_label(connector: &AppInfo) -> String {
|
||||
format_connector_label(&connector.name, &connector.id)
|
||||
}
|
||||
|
||||
pub fn connector_mention_slug(connector: &AppInfo) -> String {
|
||||
connector_name_slug(&connector_display_label(connector))
|
||||
}
|
||||
|
||||
pub(crate) fn accessible_connectors_from_mcp_tools(
|
||||
mcp_tools: &HashMap<String, crate::mcp_connection_manager::ToolInfo>,
|
||||
) -> Vec<ConnectorInfo> {
|
||||
) -> Vec<AppInfo> {
|
||||
let tools = mcp_tools.values().filter_map(|tool| {
|
||||
if tool.server_name != CODEX_APPS_MCP_SERVER_NAME {
|
||||
return None;
|
||||
@@ -105,34 +100,37 @@ pub(crate) fn accessible_connectors_from_mcp_tools(
|
||||
}
|
||||
|
||||
pub fn merge_connectors(
|
||||
connectors: Vec<ConnectorInfo>,
|
||||
accessible_connectors: Vec<ConnectorInfo>,
|
||||
) -> Vec<ConnectorInfo> {
|
||||
let mut merged: HashMap<String, ConnectorInfo> = connectors
|
||||
connectors: Vec<AppInfo>,
|
||||
accessible_connectors: Vec<AppInfo>,
|
||||
) -> Vec<AppInfo> {
|
||||
let mut merged: HashMap<String, AppInfo> = connectors
|
||||
.into_iter()
|
||||
.map(|mut connector| {
|
||||
connector.is_accessible = false;
|
||||
(connector.connector_id.clone(), connector)
|
||||
(connector.id.clone(), connector)
|
||||
})
|
||||
.collect();
|
||||
|
||||
for mut connector in accessible_connectors {
|
||||
connector.is_accessible = true;
|
||||
let connector_id = connector.connector_id.clone();
|
||||
let connector_id = connector.id.clone();
|
||||
if let Some(existing) = merged.get_mut(&connector_id) {
|
||||
existing.is_accessible = true;
|
||||
if existing.connector_name == existing.connector_id
|
||||
&& connector.connector_name != connector.connector_id
|
||||
{
|
||||
existing.connector_name = connector.connector_name;
|
||||
if existing.name == existing.id && connector.name != connector.id {
|
||||
existing.name = connector.name;
|
||||
}
|
||||
if existing.connector_description.is_none() && connector.connector_description.is_some()
|
||||
{
|
||||
existing.connector_description = connector.connector_description;
|
||||
if existing.description.is_none() && connector.description.is_some() {
|
||||
existing.description = connector.description;
|
||||
}
|
||||
if existing.logo_url.is_none() && connector.logo_url.is_some() {
|
||||
existing.logo_url = connector.logo_url;
|
||||
}
|
||||
if existing.logo_url_dark.is_none() && connector.logo_url_dark.is_some() {
|
||||
existing.logo_url_dark = connector.logo_url_dark;
|
||||
}
|
||||
if existing.distribution_channel.is_none() && connector.distribution_channel.is_some() {
|
||||
existing.distribution_channel = connector.distribution_channel;
|
||||
}
|
||||
} else {
|
||||
merged.insert(connector_id, connector);
|
||||
}
|
||||
@@ -141,23 +139,20 @@ pub fn merge_connectors(
|
||||
let mut merged = merged.into_values().collect::<Vec<_>>();
|
||||
for connector in &mut merged {
|
||||
if connector.install_url.is_none() {
|
||||
connector.install_url = Some(connector_install_url(
|
||||
&connector.connector_name,
|
||||
&connector.connector_id,
|
||||
));
|
||||
connector.install_url = Some(connector_install_url(&connector.name, &connector.id));
|
||||
}
|
||||
}
|
||||
merged.sort_by(|left, right| {
|
||||
right
|
||||
.is_accessible
|
||||
.cmp(&left.is_accessible)
|
||||
.then_with(|| left.connector_name.cmp(&right.connector_name))
|
||||
.then_with(|| left.connector_id.cmp(&right.connector_id))
|
||||
.then_with(|| left.name.cmp(&right.name))
|
||||
.then_with(|| left.id.cmp(&right.id))
|
||||
});
|
||||
merged
|
||||
}
|
||||
|
||||
fn collect_accessible_connectors<I>(tools: I) -> Vec<ConnectorInfo>
|
||||
fn collect_accessible_connectors<I>(tools: I) -> Vec<AppInfo>
|
||||
where
|
||||
I: IntoIterator<Item = (String, Option<String>)>,
|
||||
{
|
||||
@@ -172,14 +167,16 @@ where
|
||||
connectors.insert(connector_id, connector_name);
|
||||
}
|
||||
}
|
||||
let mut accessible: Vec<ConnectorInfo> = connectors
|
||||
let mut accessible: Vec<AppInfo> = connectors
|
||||
.into_iter()
|
||||
.map(|(connector_id, connector_name)| ConnectorInfo {
|
||||
install_url: Some(connector_install_url(&connector_name, &connector_id)),
|
||||
connector_id,
|
||||
connector_name,
|
||||
connector_description: None,
|
||||
.map(|(connector_id, connector_name)| AppInfo {
|
||||
id: connector_id.clone(),
|
||||
name: connector_name.clone(),
|
||||
description: None,
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: Some(connector_install_url(&connector_name, &connector_id)),
|
||||
is_accessible: true,
|
||||
})
|
||||
.collect();
|
||||
@@ -187,8 +184,8 @@ where
|
||||
right
|
||||
.is_accessible
|
||||
.cmp(&left.is_accessible)
|
||||
.then_with(|| left.connector_name.cmp(&right.connector_name))
|
||||
.then_with(|| left.connector_id.cmp(&right.connector_id))
|
||||
.then_with(|| left.name.cmp(&right.name))
|
||||
.then_with(|| left.id.cmp(&right.id))
|
||||
});
|
||||
accessible
|
||||
}
|
||||
@@ -205,7 +202,7 @@ pub fn connector_install_url(name: &str, connector_id: &str) -> String {
|
||||
format!("https://chatgpt.com/apps/{slug}/{connector_id}")
|
||||
}
|
||||
|
||||
fn connector_name_slug(name: &str) -> String {
|
||||
pub fn connector_name_slug(name: &str) -> String {
|
||||
let mut normalized = String::with_capacity(name.len());
|
||||
for character in name.chars() {
|
||||
if character.is_ascii_alphanumeric() {
|
||||
|
||||
@@ -266,7 +266,7 @@ impl ContextManager {
|
||||
}
|
||||
|
||||
fn process_item(&self, item: &ResponseItem, policy: TruncationPolicy) -> ResponseItem {
|
||||
let policy_with_serialization_budget = policy.mul(1.2);
|
||||
let policy_with_serialization_budget = policy * 1.2;
|
||||
match item {
|
||||
ResponseItem::FunctionCallOutput { call_id, output } => {
|
||||
let truncated =
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
use crate::config_loader::ResidencyRequirement;
|
||||
use crate::spawn::CODEX_SANDBOX_ENV_VAR;
|
||||
use codex_client::CodexHttpClient;
|
||||
pub use codex_client::CodexRequestBuilder;
|
||||
use reqwest::header::HeaderMap;
|
||||
use reqwest::header::HeaderValue;
|
||||
use std::sync::LazyLock;
|
||||
use std::sync::Mutex;
|
||||
@@ -24,6 +26,7 @@ use std::sync::RwLock;
|
||||
pub static USER_AGENT_SUFFIX: LazyLock<Mutex<Option<String>>> = LazyLock::new(|| Mutex::new(None));
|
||||
pub const DEFAULT_ORIGINATOR: &str = "codex_cli_rs";
|
||||
pub const CODEX_INTERNAL_ORIGINATOR_OVERRIDE_ENV_VAR: &str = "CODEX_INTERNAL_ORIGINATOR_OVERRIDE";
|
||||
pub const RESIDENCY_HEADER_NAME: &str = "x-openai-internal-codex-residency";
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Originator {
|
||||
@@ -31,6 +34,8 @@ pub struct Originator {
|
||||
pub header_value: HeaderValue,
|
||||
}
|
||||
static ORIGINATOR: LazyLock<RwLock<Option<Originator>>> = LazyLock::new(|| RwLock::new(None));
|
||||
static REQUIREMENTS_RESIDENCY: LazyLock<RwLock<Option<ResidencyRequirement>>> =
|
||||
LazyLock::new(|| RwLock::new(None));
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum SetOriginatorError {
|
||||
@@ -74,6 +79,14 @@ pub fn set_default_originator(value: String) -> Result<(), SetOriginatorError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn set_default_client_residency_requirement(enforce_residency: Option<ResidencyRequirement>) {
|
||||
let Ok(mut guard) = REQUIREMENTS_RESIDENCY.write() else {
|
||||
tracing::warn!("Failed to acquire requirements residency lock");
|
||||
return;
|
||||
};
|
||||
*guard = enforce_residency;
|
||||
}
|
||||
|
||||
pub fn originator() -> Originator {
|
||||
if let Ok(guard) = ORIGINATOR.read()
|
||||
&& let Some(originator) = guard.as_ref()
|
||||
@@ -95,6 +108,12 @@ pub fn originator() -> Originator {
|
||||
get_originator_value(None)
|
||||
}
|
||||
|
||||
pub fn is_first_party_originator(originator_value: &str) -> bool {
|
||||
originator_value == DEFAULT_ORIGINATOR
|
||||
|| originator_value == "codex_vscode"
|
||||
|| originator_value.starts_with("Codex ")
|
||||
}
|
||||
|
||||
pub fn get_codex_user_agent() -> String {
|
||||
let build_version = env!("CARGO_PKG_VERSION");
|
||||
let os_info = os_info::get();
|
||||
@@ -160,10 +179,17 @@ pub fn create_client() -> CodexHttpClient {
|
||||
}
|
||||
|
||||
pub fn build_reqwest_client() -> reqwest::Client {
|
||||
use reqwest::header::HeaderMap;
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert("originator", originator().header_value);
|
||||
if let Ok(guard) = REQUIREMENTS_RESIDENCY.read()
|
||||
&& let Some(requirement) = guard.as_ref()
|
||||
&& !headers.contains_key(RESIDENCY_HEADER_NAME)
|
||||
{
|
||||
let value = match requirement {
|
||||
ResidencyRequirement::Us => HeaderValue::from_static("us"),
|
||||
};
|
||||
headers.insert(RESIDENCY_HEADER_NAME, value);
|
||||
}
|
||||
let ua = get_codex_user_agent();
|
||||
|
||||
let mut builder = reqwest::Client::builder()
|
||||
@@ -185,6 +211,7 @@ fn is_sandboxed() -> bool {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn test_get_codex_user_agent() {
|
||||
@@ -194,10 +221,21 @@ mod tests {
|
||||
assert!(user_agent.starts_with(&prefix));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_first_party_originator_matches_known_values() {
|
||||
assert_eq!(is_first_party_originator(DEFAULT_ORIGINATOR), true);
|
||||
assert_eq!(is_first_party_originator("codex_vscode"), true);
|
||||
assert_eq!(is_first_party_originator("Codex Something Else"), true);
|
||||
assert_eq!(is_first_party_originator("codex_cli"), false);
|
||||
assert_eq!(is_first_party_originator("Other"), false);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_create_client_sets_default_headers() {
|
||||
skip_if_no_network!();
|
||||
|
||||
set_default_client_residency_requirement(Some(ResidencyRequirement::Us));
|
||||
|
||||
use wiremock::Mock;
|
||||
use wiremock::MockServer;
|
||||
use wiremock::ResponseTemplate;
|
||||
@@ -240,6 +278,13 @@ mod tests {
|
||||
.get("user-agent")
|
||||
.expect("user-agent header missing");
|
||||
assert_eq!(ua_header.to_str().unwrap(), expected_ua);
|
||||
|
||||
let residency_header = headers
|
||||
.get(RESIDENCY_HEADER_NAME)
|
||||
.expect("residency header missing");
|
||||
assert_eq!(residency_header.to_str().unwrap(), "us");
|
||||
|
||||
set_default_client_residency_requirement(None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -113,6 +113,9 @@ pub enum CodexErr {
|
||||
#[error("{0}")]
|
||||
UsageLimitReached(UsageLimitReachedError),
|
||||
|
||||
#[error("{0}")]
|
||||
ModelCap(ModelCapError),
|
||||
|
||||
#[error("{0}")]
|
||||
ResponseStreamFailed(ResponseStreamFailed),
|
||||
|
||||
@@ -205,7 +208,8 @@ impl CodexErr {
|
||||
| CodexErr::AgentLimitReached { .. }
|
||||
| CodexErr::Spawn
|
||||
| CodexErr::SessionConfiguredNotFirstEvent
|
||||
| CodexErr::UsageLimitReached(_) => false,
|
||||
| CodexErr::UsageLimitReached(_)
|
||||
| CodexErr::ModelCap(_) => false,
|
||||
CodexErr::Stream(..)
|
||||
| CodexErr::Timeout
|
||||
| CodexErr::UnexpectedStatus(_)
|
||||
@@ -371,9 +375,11 @@ impl std::fmt::Display for UsageLimitReachedError {
|
||||
retry_suffix_after_or(self.resets_at.as_ref())
|
||||
)
|
||||
}
|
||||
Some(PlanType::Known(KnownPlan::Free)) => {
|
||||
"You've hit your usage limit. Upgrade to Plus to continue using Codex (https://openai.com/chatgpt/pricing)."
|
||||
.to_string()
|
||||
Some(PlanType::Known(KnownPlan::Free)) | Some(PlanType::Known(KnownPlan::Go)) => {
|
||||
format!(
|
||||
"You've hit your usage limit. Upgrade to Plus to continue using Codex (https://openai.com/chatgpt/pricing),{}",
|
||||
retry_suffix_after_or(self.resets_at.as_ref())
|
||||
)
|
||||
}
|
||||
Some(PlanType::Known(KnownPlan::Pro)) => format!(
|
||||
"You've hit your usage limit. Visit https://chatgpt.com/codex/settings/usage to purchase more credits{}",
|
||||
@@ -394,6 +400,30 @@ impl std::fmt::Display for UsageLimitReachedError {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ModelCapError {
|
||||
pub(crate) model: String,
|
||||
pub(crate) reset_after_seconds: Option<u64>,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ModelCapError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let mut message = format!(
|
||||
"Model {} is at capacity. Please try a different model.",
|
||||
self.model
|
||||
);
|
||||
if let Some(seconds) = self.reset_after_seconds {
|
||||
message.push_str(&format!(
|
||||
" Try again in {}.",
|
||||
format_duration_short(seconds)
|
||||
));
|
||||
} else {
|
||||
message.push_str(" Try again later.");
|
||||
}
|
||||
write!(f, "{message}")
|
||||
}
|
||||
}
|
||||
|
||||
fn retry_suffix(resets_at: Option<&DateTime<Utc>>) -> String {
|
||||
if let Some(resets_at) = resets_at {
|
||||
let formatted = format_retry_timestamp(resets_at);
|
||||
@@ -425,6 +455,18 @@ fn format_retry_timestamp(resets_at: &DateTime<Utc>) -> String {
|
||||
}
|
||||
}
|
||||
|
||||
fn format_duration_short(seconds: u64) -> String {
|
||||
if seconds < 60 {
|
||||
"less than a minute".to_string()
|
||||
} else if seconds < 3600 {
|
||||
format!("{}m", seconds / 60)
|
||||
} else if seconds < 86_400 {
|
||||
format!("{}h", seconds / 3600)
|
||||
} else {
|
||||
format!("{}d", seconds / 86_400)
|
||||
}
|
||||
}
|
||||
|
||||
fn day_suffix(day: u32) -> &'static str {
|
||||
match day {
|
||||
11..=13 => "th",
|
||||
@@ -488,6 +530,10 @@ impl CodexErr {
|
||||
CodexErr::UsageLimitReached(_)
|
||||
| CodexErr::QuotaExceeded
|
||||
| CodexErr::UsageNotIncluded => CodexErrorInfo::UsageLimitExceeded,
|
||||
CodexErr::ModelCap(err) => CodexErrorInfo::ModelCap {
|
||||
model: err.model.clone(),
|
||||
reset_after_seconds: err.reset_after_seconds,
|
||||
},
|
||||
CodexErr::RetryLimit(_) => CodexErrorInfo::ResponseTooManyFailedAttempts {
|
||||
http_status_code: self.http_status_code_value(),
|
||||
},
|
||||
@@ -631,6 +677,45 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn model_cap_error_formats_message() {
|
||||
let err = ModelCapError {
|
||||
model: "boomslang".to_string(),
|
||||
reset_after_seconds: Some(120),
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"Model boomslang is at capacity. Please try a different model. Try again in 2m."
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn model_cap_error_formats_message_without_reset() {
|
||||
let err = ModelCapError {
|
||||
model: "boomslang".to_string(),
|
||||
reset_after_seconds: None,
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"Model boomslang is at capacity. Please try a different model. Try again later."
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn model_cap_error_maps_to_protocol() {
|
||||
let err = CodexErr::ModelCap(ModelCapError {
|
||||
model: "boomslang".to_string(),
|
||||
reset_after_seconds: Some(30),
|
||||
});
|
||||
assert_eq!(
|
||||
err.to_codex_protocol_error(),
|
||||
CodexErrorInfo::ModelCap {
|
||||
model: "boomslang".to_string(),
|
||||
reset_after_seconds: Some(30),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sandbox_denied_uses_aggregated_output_when_stderr_empty() {
|
||||
let output = ExecToolCallOutput {
|
||||
@@ -734,7 +819,20 @@ mod tests {
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"You've hit your usage limit. Upgrade to Plus to continue using Codex (https://openai.com/chatgpt/pricing)."
|
||||
"You've hit your usage limit. Upgrade to Plus to continue using Codex (https://openai.com/chatgpt/pricing), or try again later."
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn usage_limit_reached_error_formats_go_plan() {
|
||||
let err = UsageLimitReachedError {
|
||||
plan_type: Some(PlanType::Known(KnownPlan::Go)),
|
||||
resets_at: None,
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"You've hit your usage limit. Upgrade to Plus to continue using Codex (https://openai.com/chatgpt/pricing), or try again later."
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -232,6 +232,72 @@ pub(crate) async fn execute_exec_env(
|
||||
finalize_exec_result(raw_output_result, sandbox, duration)
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
fn extract_create_process_as_user_error_code(err: &str) -> Option<String> {
|
||||
let marker = "CreateProcessAsUserW failed: ";
|
||||
let start = err.find(marker)? + marker.len();
|
||||
let tail = &err[start..];
|
||||
let digits: String = tail.chars().take_while(char::is_ascii_digit).collect();
|
||||
if digits.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(digits)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
fn windowsapps_path_kind(path: &str) -> &'static str {
|
||||
let lower = path.to_ascii_lowercase();
|
||||
if lower.contains("\\program files\\windowsapps\\") {
|
||||
return "windowsapps_package";
|
||||
}
|
||||
if lower.contains("\\appdata\\local\\microsoft\\windowsapps\\") {
|
||||
return "windowsapps_alias";
|
||||
}
|
||||
if lower.contains("\\windowsapps\\") {
|
||||
return "windowsapps_other";
|
||||
}
|
||||
"other"
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
fn record_windows_sandbox_spawn_failure(
|
||||
command_path: Option<&str>,
|
||||
windows_sandbox_level: codex_protocol::config_types::WindowsSandboxLevel,
|
||||
err: &str,
|
||||
) {
|
||||
let Some(error_code) = extract_create_process_as_user_error_code(err) else {
|
||||
return;
|
||||
};
|
||||
let path = command_path.unwrap_or("unknown");
|
||||
let exe = Path::new(path)
|
||||
.file_name()
|
||||
.and_then(|name| name.to_str())
|
||||
.unwrap_or("unknown")
|
||||
.to_ascii_lowercase();
|
||||
let path_kind = windowsapps_path_kind(path);
|
||||
let level = if matches!(
|
||||
windows_sandbox_level,
|
||||
codex_protocol::config_types::WindowsSandboxLevel::Elevated
|
||||
) {
|
||||
"elevated"
|
||||
} else {
|
||||
"legacy"
|
||||
};
|
||||
if let Some(metrics) = codex_otel::metrics::global() {
|
||||
let _ = metrics.counter(
|
||||
"codex.windows_sandbox.createprocessasuserw_failed",
|
||||
1,
|
||||
&[
|
||||
("error_code", error_code.as_str()),
|
||||
("path_kind", path_kind),
|
||||
("exe", exe.as_str()),
|
||||
("level", level),
|
||||
],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
async fn exec_windows_sandbox(
|
||||
params: ExecParams,
|
||||
@@ -265,7 +331,9 @@ async fn exec_windows_sandbox(
|
||||
"windows sandbox: failed to resolve codex_home: {err}"
|
||||
)))
|
||||
})?;
|
||||
let use_elevated = matches!(windows_sandbox_level, WindowsSandboxLevel::Elevated);
|
||||
let command_path = command.first().cloned();
|
||||
let sandbox_level = windows_sandbox_level;
|
||||
let use_elevated = matches!(sandbox_level, WindowsSandboxLevel::Elevated);
|
||||
let spawn_res = tokio::task::spawn_blocking(move || {
|
||||
if use_elevated {
|
||||
run_windows_sandbox_capture_elevated(
|
||||
@@ -294,6 +362,11 @@ async fn exec_windows_sandbox(
|
||||
let capture = match spawn_res {
|
||||
Ok(Ok(v)) => v,
|
||||
Ok(Err(err)) => {
|
||||
record_windows_sandbox_spawn_failure(
|
||||
command_path.as_deref(),
|
||||
sandbox_level,
|
||||
&err.to_string(),
|
||||
);
|
||||
return Err(CodexErr::Io(io::Error::other(format!(
|
||||
"windows sandbox: {err}"
|
||||
))));
|
||||
|
||||
@@ -87,6 +87,15 @@ pub(crate) struct ExecPolicyManager {
|
||||
policy: ArcSwap<Policy>,
|
||||
}
|
||||
|
||||
pub(crate) struct ExecApprovalRequest<'a> {
|
||||
pub(crate) features: &'a Features,
|
||||
pub(crate) command: &'a [String],
|
||||
pub(crate) approval_policy: AskForApproval,
|
||||
pub(crate) sandbox_policy: &'a SandboxPolicy,
|
||||
pub(crate) sandbox_permissions: SandboxPermissions,
|
||||
pub(crate) prefix_rule: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
impl ExecPolicyManager {
|
||||
pub(crate) fn new(policy: Arc<Policy>) -> Self {
|
||||
Self {
|
||||
@@ -112,12 +121,16 @@ impl ExecPolicyManager {
|
||||
|
||||
pub(crate) async fn create_exec_approval_requirement_for_command(
|
||||
&self,
|
||||
features: &Features,
|
||||
command: &[String],
|
||||
approval_policy: AskForApproval,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
sandbox_permissions: SandboxPermissions,
|
||||
req: ExecApprovalRequest<'_>,
|
||||
) -> ExecApprovalRequirement {
|
||||
let ExecApprovalRequest {
|
||||
features,
|
||||
command,
|
||||
approval_policy,
|
||||
sandbox_policy,
|
||||
sandbox_permissions,
|
||||
prefix_rule,
|
||||
} = req;
|
||||
let exec_policy = self.current();
|
||||
let commands =
|
||||
parse_shell_lc_plain_commands(command).unwrap_or_else(|| vec![command.to_vec()]);
|
||||
@@ -131,6 +144,12 @@ impl ExecPolicyManager {
|
||||
};
|
||||
let evaluation = exec_policy.check_multiple(commands.iter(), &exec_policy_fallback);
|
||||
|
||||
let requested_amendment = derive_requested_execpolicy_amendment(
|
||||
features,
|
||||
prefix_rule.as_ref(),
|
||||
&evaluation.matched_rules,
|
||||
);
|
||||
|
||||
match evaluation.decision {
|
||||
Decision::Forbidden => ExecApprovalRequirement::Forbidden {
|
||||
reason: derive_forbidden_reason(command, &evaluation),
|
||||
@@ -144,9 +163,11 @@ impl ExecPolicyManager {
|
||||
ExecApprovalRequirement::NeedsApproval {
|
||||
reason: derive_prompt_reason(command, &evaluation),
|
||||
proposed_execpolicy_amendment: if features.enabled(Feature::ExecPolicy) {
|
||||
try_derive_execpolicy_amendment_for_prompt_rules(
|
||||
&evaluation.matched_rules,
|
||||
)
|
||||
requested_amendment.or_else(|| {
|
||||
try_derive_execpolicy_amendment_for_prompt_rules(
|
||||
&evaluation.matched_rules,
|
||||
)
|
||||
})
|
||||
} else {
|
||||
None
|
||||
},
|
||||
@@ -259,7 +280,18 @@ pub async fn load_exec_policy(config_stack: &ConfigLayerStack) -> Result<Policy,
|
||||
let policy = parser.build();
|
||||
tracing::debug!("loaded rules from {} files", policy_paths.len());
|
||||
|
||||
Ok(policy)
|
||||
let Some(requirements_policy) = config_stack.requirements().exec_policy.as_deref() else {
|
||||
return Ok(policy);
|
||||
};
|
||||
|
||||
let mut combined_rules = policy.rules().clone();
|
||||
for (program, rules) in requirements_policy.as_ref().rules().iter_all() {
|
||||
for rule in rules {
|
||||
combined_rules.insert(program.clone(), rule.clone());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Policy::new(combined_rules))
|
||||
}
|
||||
|
||||
/// If a command is not matched by any execpolicy rule, derive a [`Decision`].
|
||||
@@ -382,6 +414,30 @@ fn try_derive_execpolicy_amendment_for_allow_rules(
|
||||
})
|
||||
}
|
||||
|
||||
fn derive_requested_execpolicy_amendment(
|
||||
features: &Features,
|
||||
prefix_rule: Option<&Vec<String>>,
|
||||
matched_rules: &[RuleMatch],
|
||||
) -> Option<ExecPolicyAmendment> {
|
||||
if !features.enabled(Feature::ExecPolicy) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let prefix_rule = prefix_rule?;
|
||||
if prefix_rule.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if matched_rules
|
||||
.iter()
|
||||
.any(|rule_match| is_policy_match(rule_match) && rule_match.decision() == Decision::Prompt)
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(ExecPolicyAmendment::new(prefix_rule.clone()))
|
||||
}
|
||||
|
||||
/// Only return a reason when a policy rule drove the prompt decision.
|
||||
fn derive_prompt_reason(command_args: &[String], evaluation: &Evaluation) -> Option<String> {
|
||||
let command = render_shlex_command(command_args);
|
||||
@@ -756,13 +812,14 @@ prefix_rule(pattern=["rm"], decision="forbidden")
|
||||
|
||||
let manager = ExecPolicyManager::new(policy);
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&forbidden_script,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &forbidden_script,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::DangerFullAccess,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -790,17 +847,18 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::new(policy);
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&[
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &[
|
||||
"rm".to_string(),
|
||||
"-rf".to_string(),
|
||||
"/some/important/folder".to_string(),
|
||||
],
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::DangerFullAccess,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -823,13 +881,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::new(policy);
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::DangerFullAccess,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -853,13 +912,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::new(policy);
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::Never,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: &SandboxPolicy::DangerFullAccess,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -876,13 +936,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::default();
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::UnlessTrusted,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::UnlessTrusted,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -894,6 +955,40 @@ prefix_rule(
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn request_rule_uses_prefix_rule() {
|
||||
let command = vec![
|
||||
"cargo".to_string(),
|
||||
"install".to_string(),
|
||||
"cargo-insta".to_string(),
|
||||
];
|
||||
let manager = ExecPolicyManager::default();
|
||||
let mut features = Features::with_defaults();
|
||||
features.enable(Feature::RequestRule);
|
||||
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &features,
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::RequireEscalated,
|
||||
prefix_rule: Some(vec!["cargo".to_string(), "install".to_string()]),
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
requirement,
|
||||
ExecApprovalRequirement::NeedsApproval {
|
||||
reason: None,
|
||||
proposed_execpolicy_amendment: Some(ExecPolicyAmendment::new(vec![
|
||||
"cargo".to_string(),
|
||||
"install".to_string(),
|
||||
])),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn heuristics_apply_when_other_commands_match_policy() {
|
||||
let policy_src = r#"prefix_rule(pattern=["apple"], decision="allow")"#;
|
||||
@@ -910,13 +1005,14 @@ prefix_rule(
|
||||
|
||||
assert_eq!(
|
||||
ExecPolicyManager::new(policy)
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::UnlessTrusted,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::UnlessTrusted,
|
||||
sandbox_policy: &SandboxPolicy::DangerFullAccess,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await,
|
||||
ExecApprovalRequirement::NeedsApproval {
|
||||
reason: None,
|
||||
@@ -984,13 +1080,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::default();
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::UnlessTrusted,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::UnlessTrusted,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -1011,13 +1108,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::default();
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&features,
|
||||
&command,
|
||||
AskForApproval::UnlessTrusted,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &features,
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::UnlessTrusted,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -1041,13 +1139,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::new(policy);
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::DangerFullAccess,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -1068,13 +1167,14 @@ prefix_rule(
|
||||
];
|
||||
let manager = ExecPolicyManager::default();
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::UnlessTrusted,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::UnlessTrusted,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -1106,13 +1206,14 @@ prefix_rule(
|
||||
|
||||
assert_eq!(
|
||||
ExecPolicyManager::new(policy)
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::UnlessTrusted,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::UnlessTrusted,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await,
|
||||
ExecApprovalRequirement::NeedsApproval {
|
||||
reason: None,
|
||||
@@ -1129,13 +1230,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::default();
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -1159,13 +1261,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::new(policy);
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -1226,13 +1329,14 @@ prefix_rule(
|
||||
assert_eq!(
|
||||
expected_req,
|
||||
policy
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&features,
|
||||
&sneaky_command,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
permissions,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &features,
|
||||
command: &sneaky_command,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: permissions,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await,
|
||||
"{pwsh_approval_reason}"
|
||||
);
|
||||
@@ -1249,13 +1353,14 @@ prefix_rule(
|
||||
]))),
|
||||
},
|
||||
policy
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&features,
|
||||
&dangerous_command,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
permissions,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &features,
|
||||
command: &dangerous_command,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: permissions,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await,
|
||||
r#"On all platforms, a forbidden command should require approval
|
||||
(unless AskForApproval::Never is specified)."#
|
||||
@@ -1268,13 +1373,14 @@ prefix_rule(
|
||||
reason: "`rm -rf /important/data` rejected: blocked by policy".to_string(),
|
||||
},
|
||||
policy
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&features,
|
||||
&dangerous_command,
|
||||
AskForApproval::Never,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
permissions,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &features,
|
||||
command: &dangerous_command,
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: permissions,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await,
|
||||
r#"On all platforms, a forbidden command should require approval
|
||||
(unless AskForApproval::Never is specified)."#
|
||||
|
||||
@@ -89,6 +89,8 @@ pub enum Feature {
|
||||
WebSearchCached,
|
||||
/// Gate the execpolicy enforcement for shell/unified exec.
|
||||
ExecPolicy,
|
||||
/// Allow the model to request approval and propose exec rules.
|
||||
RequestRule,
|
||||
/// Enable Windows sandbox (restricted token) on Windows.
|
||||
WindowsSandbox,
|
||||
/// Use the elevated Windows sandbox pipeline (setup + runner).
|
||||
@@ -99,6 +101,8 @@ pub enum Feature {
|
||||
RemoteModels,
|
||||
/// Experimental shell snapshotting.
|
||||
ShellSnapshot,
|
||||
/// Persist rollout metadata to a local SQLite database.
|
||||
Sqlite,
|
||||
/// Append additional AGENTS.md guidance to user instructions.
|
||||
ChildAgentsMd,
|
||||
/// Enforce UTF8 output in Powershell.
|
||||
@@ -107,12 +111,18 @@ pub enum Feature {
|
||||
EnableRequestCompression,
|
||||
/// Enable collab tools.
|
||||
Collab,
|
||||
/// Enable connectors (apps).
|
||||
Connectors,
|
||||
/// Enable apps.
|
||||
Apps,
|
||||
/// Allow prompting and installing missing MCP dependencies.
|
||||
SkillMcpDependencyInstall,
|
||||
/// Prompt for missing skill env var dependencies.
|
||||
SkillEnvVarDependencyPrompt,
|
||||
/// Steer feature flag - when enabled, Enter submits immediately instead of queuing.
|
||||
Steer,
|
||||
/// Enable collaboration modes (Plan, Code, Pair Programming, Execute).
|
||||
CollaborationModes,
|
||||
/// Enable personality selection in the TUI.
|
||||
Personality,
|
||||
/// Use the Responses API WebSocket transport for OpenAI by default.
|
||||
ResponsesWebsockets,
|
||||
}
|
||||
@@ -142,6 +152,8 @@ impl Feature {
|
||||
pub struct LegacyFeatureUsage {
|
||||
pub alias: String,
|
||||
pub feature: Feature,
|
||||
pub summary: String,
|
||||
pub details: Option<String>,
|
||||
}
|
||||
|
||||
/// Holds the effective set of enabled features.
|
||||
@@ -198,9 +210,12 @@ impl Features {
|
||||
}
|
||||
|
||||
pub fn record_legacy_usage_force(&mut self, alias: &str, feature: Feature) {
|
||||
let (summary, details) = legacy_usage_notice(alias, feature);
|
||||
self.legacy_usages.insert(LegacyFeatureUsage {
|
||||
alias: alias.to_string(),
|
||||
feature,
|
||||
summary,
|
||||
details,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -211,10 +226,8 @@ impl Features {
|
||||
self.record_legacy_usage_force(alias, feature);
|
||||
}
|
||||
|
||||
pub fn legacy_feature_usages(&self) -> impl Iterator<Item = (&str, Feature)> + '_ {
|
||||
self.legacy_usages
|
||||
.iter()
|
||||
.map(|usage| (usage.alias.as_str(), usage.feature))
|
||||
pub fn legacy_feature_usages(&self) -> impl Iterator<Item = &LegacyFeatureUsage> + '_ {
|
||||
self.legacy_usages.iter()
|
||||
}
|
||||
|
||||
pub fn emit_metrics(&self, otel: &OtelManager) {
|
||||
@@ -235,6 +248,21 @@ impl Features {
|
||||
/// Apply a table of key -> bool toggles (e.g. from TOML).
|
||||
pub fn apply_map(&mut self, m: &BTreeMap<String, bool>) {
|
||||
for (k, v) in m {
|
||||
match k.as_str() {
|
||||
"web_search_request" => {
|
||||
self.record_legacy_usage_force(
|
||||
"features.web_search_request",
|
||||
Feature::WebSearchRequest,
|
||||
);
|
||||
}
|
||||
"web_search_cached" => {
|
||||
self.record_legacy_usage_force(
|
||||
"features.web_search_cached",
|
||||
Feature::WebSearchCached,
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
match feature_for_key(k) {
|
||||
Some(feat) => {
|
||||
if k != feat.key() {
|
||||
@@ -295,6 +323,42 @@ impl Features {
|
||||
}
|
||||
}
|
||||
|
||||
fn legacy_usage_notice(alias: &str, feature: Feature) -> (String, Option<String>) {
|
||||
let canonical = feature.key();
|
||||
match feature {
|
||||
Feature::WebSearchRequest | Feature::WebSearchCached => {
|
||||
let label = match alias {
|
||||
"web_search" => "[features].web_search",
|
||||
"tools.web_search" => "[tools].web_search",
|
||||
"features.web_search_request" | "web_search_request" => {
|
||||
"[features].web_search_request"
|
||||
}
|
||||
"features.web_search_cached" | "web_search_cached" => {
|
||||
"[features].web_search_cached"
|
||||
}
|
||||
_ => alias,
|
||||
};
|
||||
let summary = format!("`{label}` is deprecated. Use `web_search` instead.");
|
||||
(summary, Some(web_search_details().to_string()))
|
||||
}
|
||||
_ => {
|
||||
let summary = format!("`{alias}` is deprecated. Use `[features].{canonical}` instead.");
|
||||
let details = if alias == canonical {
|
||||
None
|
||||
} else {
|
||||
Some(format!(
|
||||
"Enable it with `--enable {canonical}` or `[features].{canonical}` in config.toml. See https://github.com/openai/codex/blob/main/docs/config.md#feature-flags for details."
|
||||
))
|
||||
};
|
||||
(summary, details)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn web_search_details() -> &'static str {
|
||||
"Set `web_search` to `\"live\"`, `\"cached\"`, or `\"disabled\"` in config.toml."
|
||||
}
|
||||
|
||||
/// Keys accepted in `[features]` tables.
|
||||
fn feature_for_key(key: &str) -> Option<Feature> {
|
||||
for spec in FEATURES {
|
||||
@@ -343,13 +407,13 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
FeatureSpec {
|
||||
id: Feature::WebSearchRequest,
|
||||
key: "web_search_request",
|
||||
stage: Stage::Stable,
|
||||
stage: Stage::Deprecated,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::WebSearchCached,
|
||||
key: "web_search_cached",
|
||||
stage: Stage::UnderDevelopment,
|
||||
stage: Stage::Deprecated,
|
||||
default_enabled: false,
|
||||
},
|
||||
// Experimental program. Rendered in the `/experimental` menu for users.
|
||||
@@ -373,6 +437,12 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
},
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::Sqlite,
|
||||
key: "sqlite",
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::ChildAgentsMd,
|
||||
key: "child_agents_md",
|
||||
@@ -391,6 +461,16 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: true,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::RequestRule,
|
||||
key: "request_rule",
|
||||
stage: Stage::Experimental {
|
||||
name: "Smart approvals",
|
||||
menu_description: "Get smarter \"Don't ask again\" rule requests.",
|
||||
announcement: "NEW: Try Smart approvals to get smarter \"Don't ask again\" requests. Enable in /experimental!",
|
||||
},
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::WindowsSandbox,
|
||||
key: "experimental_windows_sandbox",
|
||||
@@ -434,8 +514,8 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
FeatureSpec {
|
||||
id: Feature::EnableRequestCompression,
|
||||
key: "enable_request_compression",
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
stage: Stage::Stable,
|
||||
default_enabled: true,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::Collab,
|
||||
@@ -444,8 +524,24 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::Connectors,
|
||||
key: "connectors",
|
||||
id: Feature::Apps,
|
||||
key: "apps",
|
||||
stage: Stage::Experimental {
|
||||
name: "Apps",
|
||||
menu_description: "Use a connected ChatGPT App using \"$\". Install Apps via /apps command. Restart Codex after enabling.",
|
||||
announcement: "NEW: Use ChatGPT Apps (Connectors) in Codex via $ mentions. Enable in /experimental and restart Codex!",
|
||||
},
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::SkillMcpDependencyInstall,
|
||||
key: "skill_mcp_dependency_install",
|
||||
stage: Stage::Stable,
|
||||
default_enabled: true,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::SkillEnvVarDependencyPrompt,
|
||||
key: "skill_env_var_dependency_prompt",
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
@@ -465,6 +561,16 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::Personality,
|
||||
key: "personality",
|
||||
stage: Stage::Experimental {
|
||||
name: "Personality",
|
||||
menu_description: "Choose a communication style for Codex.",
|
||||
announcement: "NEW: Pick a personality for Codex. Enable in /experimental!",
|
||||
},
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::ResponsesWebsockets,
|
||||
key: "responses_websockets",
|
||||
|
||||
@@ -9,6 +9,10 @@ struct Alias {
|
||||
}
|
||||
|
||||
const ALIASES: &[Alias] = &[
|
||||
Alias {
|
||||
legacy_key: "connectors",
|
||||
feature: Feature::Apps,
|
||||
},
|
||||
Alias {
|
||||
legacy_key: "enable_experimental_windows_sandbox",
|
||||
feature: Feature::WindowsSandbox,
|
||||
|
||||
@@ -38,18 +38,22 @@ pub mod landlock;
|
||||
pub mod mcp;
|
||||
mod mcp_connection_manager;
|
||||
pub mod models_manager;
|
||||
mod transport_manager;
|
||||
pub use mcp_connection_manager::MCP_SANDBOX_STATE_CAPABILITY;
|
||||
pub use mcp_connection_manager::MCP_SANDBOX_STATE_METHOD;
|
||||
pub use mcp_connection_manager::SandboxState;
|
||||
mod mcp_tool_call;
|
||||
mod mentions;
|
||||
mod message_history;
|
||||
mod model_provider_info;
|
||||
pub mod parse_command;
|
||||
pub mod path_utils;
|
||||
pub mod powershell;
|
||||
mod proposed_plan_parser;
|
||||
pub mod sandboxing;
|
||||
mod session_prefix;
|
||||
mod stream_events_utils;
|
||||
mod tagged_block_parser;
|
||||
mod text_encoding;
|
||||
pub mod token_data;
|
||||
mod truncate;
|
||||
@@ -91,18 +95,21 @@ pub mod shell;
|
||||
pub mod shell_snapshot;
|
||||
pub mod skills;
|
||||
pub mod spawn;
|
||||
pub mod state_db;
|
||||
pub mod terminal;
|
||||
mod tools;
|
||||
pub mod turn_diff_tracker;
|
||||
pub use rollout::ARCHIVED_SESSIONS_SUBDIR;
|
||||
pub use rollout::INTERACTIVE_SESSION_SOURCES;
|
||||
pub use rollout::RolloutRecorder;
|
||||
pub use rollout::RolloutRecorderParams;
|
||||
pub use rollout::SESSIONS_SUBDIR;
|
||||
pub use rollout::SessionMeta;
|
||||
pub use rollout::find_archived_thread_path_by_id_str;
|
||||
#[deprecated(note = "use find_thread_path_by_id_str")]
|
||||
pub use rollout::find_conversation_path_by_id_str;
|
||||
pub use rollout::find_thread_path_by_id_str;
|
||||
pub use rollout::find_thread_path_by_name_str;
|
||||
pub use rollout::list::Cursor;
|
||||
pub use rollout::list::ThreadItem;
|
||||
pub use rollout::list::ThreadSortKey;
|
||||
@@ -111,6 +118,7 @@ pub use rollout::list::parse_cursor;
|
||||
pub use rollout::list::read_head_for_summary;
|
||||
pub use rollout::list::read_session_meta_line;
|
||||
pub use rollout::rollout_date_parts;
|
||||
pub use transport_manager::TransportManager;
|
||||
mod function_tool;
|
||||
mod state;
|
||||
mod tasks;
|
||||
|
||||
@@ -4,12 +4,53 @@ use anyhow::Result;
|
||||
use codex_protocol::protocol::McpAuthStatus;
|
||||
use codex_rmcp_client::OAuthCredentialsStoreMode;
|
||||
use codex_rmcp_client::determine_streamable_http_auth_status;
|
||||
use codex_rmcp_client::supports_oauth_login;
|
||||
use futures::future::join_all;
|
||||
use tracing::warn;
|
||||
|
||||
use crate::config::types::McpServerConfig;
|
||||
use crate::config::types::McpServerTransportConfig;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct McpOAuthLoginConfig {
|
||||
pub url: String,
|
||||
pub http_headers: Option<HashMap<String, String>>,
|
||||
pub env_http_headers: Option<HashMap<String, String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum McpOAuthLoginSupport {
|
||||
Supported(McpOAuthLoginConfig),
|
||||
Unsupported,
|
||||
Unknown(anyhow::Error),
|
||||
}
|
||||
|
||||
pub async fn oauth_login_support(transport: &McpServerTransportConfig) -> McpOAuthLoginSupport {
|
||||
let McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
http_headers,
|
||||
env_http_headers,
|
||||
} = transport
|
||||
else {
|
||||
return McpOAuthLoginSupport::Unsupported;
|
||||
};
|
||||
|
||||
if bearer_token_env_var.is_some() {
|
||||
return McpOAuthLoginSupport::Unsupported;
|
||||
}
|
||||
|
||||
match supports_oauth_login(url).await {
|
||||
Ok(true) => McpOAuthLoginSupport::Supported(McpOAuthLoginConfig {
|
||||
url: url.clone(),
|
||||
http_headers: http_headers.clone(),
|
||||
env_http_headers: env_http_headers.clone(),
|
||||
}),
|
||||
Ok(false) => McpOAuthLoginSupport::Unsupported,
|
||||
Err(err) => McpOAuthLoginSupport::Unknown(err),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct McpAuthStatusEntry {
|
||||
pub config: McpServerConfig,
|
||||
|
||||
@@ -1,7 +1,12 @@
|
||||
pub mod auth;
|
||||
mod skill_dependencies;
|
||||
|
||||
pub(crate) use skill_dependencies::maybe_prompt_and_install_mcp_dependencies;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
use std::path::PathBuf;
|
||||
use std::time::Duration;
|
||||
|
||||
use async_channel::unbounded;
|
||||
use codex_protocol::protocol::McpListToolsResponseEvent;
|
||||
@@ -21,7 +26,7 @@ use crate::mcp_connection_manager::SandboxState;
|
||||
|
||||
const MCP_TOOL_NAME_PREFIX: &str = "mcp";
|
||||
const MCP_TOOL_NAME_DELIMITER: &str = "__";
|
||||
pub(crate) const CODEX_APPS_MCP_SERVER_NAME: &str = "codex_apps_mcp";
|
||||
pub(crate) const CODEX_APPS_MCP_SERVER_NAME: &str = "codex_apps";
|
||||
const CODEX_CONNECTORS_TOKEN_ENV_VAR: &str = "CODEX_CONNECTORS_TOKEN";
|
||||
|
||||
fn codex_apps_mcp_bearer_token_env_var() -> Option<String> {
|
||||
@@ -93,7 +98,7 @@ fn codex_apps_mcp_server_config(config: &Config, auth: Option<&CodexAuth>) -> Mc
|
||||
},
|
||||
enabled: true,
|
||||
disabled_reason: None,
|
||||
startup_timeout_sec: None,
|
||||
startup_timeout_sec: Some(Duration::from_secs(30)),
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
@@ -124,7 +129,7 @@ pub(crate) fn effective_mcp_servers(
|
||||
) -> HashMap<String, McpServerConfig> {
|
||||
with_codex_apps_mcp(
|
||||
config.mcp_servers.get().clone(),
|
||||
config.features.enabled(Feature::Connectors),
|
||||
config.features.enabled(Feature::Apps),
|
||||
auth,
|
||||
config,
|
||||
)
|
||||
|
||||
519
codex-rs/core/src/mcp/skill_dependencies.rs
Normal file
519
codex-rs/core/src/mcp/skill_dependencies.rs
Normal file
@@ -0,0 +1,519 @@
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use codex_protocol::request_user_input::RequestUserInputArgs;
|
||||
use codex_protocol::request_user_input::RequestUserInputQuestion;
|
||||
use codex_protocol::request_user_input::RequestUserInputQuestionOption;
|
||||
use codex_protocol::request_user_input::RequestUserInputResponse;
|
||||
use codex_rmcp_client::perform_oauth_login;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
use tracing::warn;
|
||||
|
||||
use super::auth::McpOAuthLoginSupport;
|
||||
use super::auth::oauth_login_support;
|
||||
use super::effective_mcp_servers;
|
||||
use crate::codex::Session;
|
||||
use crate::codex::TurnContext;
|
||||
use crate::config::Config;
|
||||
use crate::config::edit::ConfigEditsBuilder;
|
||||
use crate::config::load_global_mcp_servers;
|
||||
use crate::config::types::McpServerConfig;
|
||||
use crate::config::types::McpServerTransportConfig;
|
||||
use crate::default_client::is_first_party_originator;
|
||||
use crate::default_client::originator;
|
||||
use crate::features::Feature;
|
||||
use crate::skills::SkillMetadata;
|
||||
use crate::skills::model::SkillToolDependency;
|
||||
|
||||
const SKILL_MCP_DEPENDENCY_PROMPT_ID: &str = "skill_mcp_dependency_install";
|
||||
const MCP_DEPENDENCY_OPTION_INSTALL: &str = "Install";
|
||||
const MCP_DEPENDENCY_OPTION_SKIP: &str = "Continue anyway";
|
||||
|
||||
fn is_full_access_mode(turn_context: &TurnContext) -> bool {
|
||||
matches!(turn_context.approval_policy, AskForApproval::Never)
|
||||
&& matches!(
|
||||
turn_context.sandbox_policy,
|
||||
SandboxPolicy::DangerFullAccess | SandboxPolicy::ExternalSandbox { .. }
|
||||
)
|
||||
}
|
||||
|
||||
fn format_missing_mcp_dependencies(missing: &HashMap<String, McpServerConfig>) -> String {
|
||||
let mut names = missing.keys().cloned().collect::<Vec<_>>();
|
||||
names.sort();
|
||||
names.join(", ")
|
||||
}
|
||||
|
||||
async fn filter_prompted_mcp_dependencies(
|
||||
sess: &Session,
|
||||
missing: &HashMap<String, McpServerConfig>,
|
||||
) -> HashMap<String, McpServerConfig> {
|
||||
let prompted = sess.mcp_dependency_prompted().await;
|
||||
if prompted.is_empty() {
|
||||
return missing.clone();
|
||||
}
|
||||
|
||||
missing
|
||||
.iter()
|
||||
.filter(|(name, config)| !prompted.contains(&canonical_mcp_server_key(name, config)))
|
||||
.map(|(name, config)| (name.clone(), config.clone()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
async fn should_install_mcp_dependencies(
|
||||
sess: &Session,
|
||||
turn_context: &TurnContext,
|
||||
missing: &HashMap<String, McpServerConfig>,
|
||||
cancellation_token: &CancellationToken,
|
||||
) -> bool {
|
||||
if is_full_access_mode(turn_context) {
|
||||
return true;
|
||||
}
|
||||
|
||||
let server_list = format_missing_mcp_dependencies(missing);
|
||||
let question = RequestUserInputQuestion {
|
||||
id: SKILL_MCP_DEPENDENCY_PROMPT_ID.to_string(),
|
||||
header: "Install MCP servers?".to_string(),
|
||||
question: format!(
|
||||
"The following MCP servers are required by the selected skills but are not installed yet: {server_list}. Install them now?"
|
||||
),
|
||||
is_other: false,
|
||||
is_secret: false,
|
||||
options: Some(vec![
|
||||
RequestUserInputQuestionOption {
|
||||
label: MCP_DEPENDENCY_OPTION_INSTALL.to_string(),
|
||||
description:
|
||||
"Install and enable the missing MCP servers in your global config."
|
||||
.to_string(),
|
||||
},
|
||||
RequestUserInputQuestionOption {
|
||||
label: MCP_DEPENDENCY_OPTION_SKIP.to_string(),
|
||||
description: "Skip installation for now and do not show again for these MCP servers in this session."
|
||||
.to_string(),
|
||||
},
|
||||
]),
|
||||
};
|
||||
let args = RequestUserInputArgs {
|
||||
questions: vec![question],
|
||||
};
|
||||
let sub_id = &turn_context.sub_id;
|
||||
let call_id = format!("mcp-deps-{sub_id}");
|
||||
let response_fut = sess.request_user_input(turn_context, call_id, args);
|
||||
let response = tokio::select! {
|
||||
biased;
|
||||
_ = cancellation_token.cancelled() => {
|
||||
let empty = RequestUserInputResponse {
|
||||
answers: HashMap::new(),
|
||||
};
|
||||
sess.notify_user_input_response(sub_id, empty.clone()).await;
|
||||
empty
|
||||
}
|
||||
response = response_fut => response.unwrap_or_else(|| RequestUserInputResponse {
|
||||
answers: HashMap::new(),
|
||||
}),
|
||||
};
|
||||
|
||||
let install = response
|
||||
.answers
|
||||
.get(SKILL_MCP_DEPENDENCY_PROMPT_ID)
|
||||
.is_some_and(|answer| {
|
||||
answer
|
||||
.answers
|
||||
.iter()
|
||||
.any(|entry| entry == MCP_DEPENDENCY_OPTION_INSTALL)
|
||||
});
|
||||
|
||||
let prompted_keys = missing
|
||||
.iter()
|
||||
.map(|(name, config)| canonical_mcp_server_key(name, config));
|
||||
sess.record_mcp_dependency_prompted(prompted_keys).await;
|
||||
|
||||
install
|
||||
}
|
||||
|
||||
pub(crate) async fn maybe_prompt_and_install_mcp_dependencies(
|
||||
sess: &Session,
|
||||
turn_context: &TurnContext,
|
||||
cancellation_token: &CancellationToken,
|
||||
mentioned_skills: &[SkillMetadata],
|
||||
) {
|
||||
let originator_value = originator().value;
|
||||
if !is_first_party_originator(originator_value.as_str()) {
|
||||
// Only support first-party clients for now.
|
||||
return;
|
||||
}
|
||||
|
||||
let config = turn_context.client.config();
|
||||
if mentioned_skills.is_empty() || !config.features.enabled(Feature::SkillMcpDependencyInstall) {
|
||||
return;
|
||||
}
|
||||
|
||||
let installed = config.mcp_servers.get().clone();
|
||||
let missing = collect_missing_mcp_dependencies(mentioned_skills, &installed);
|
||||
if missing.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let unprompted_missing = filter_prompted_mcp_dependencies(sess, &missing).await;
|
||||
if unprompted_missing.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
if should_install_mcp_dependencies(sess, turn_context, &unprompted_missing, cancellation_token)
|
||||
.await
|
||||
{
|
||||
maybe_install_mcp_dependencies(sess, turn_context, config.as_ref(), mentioned_skills).await;
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn maybe_install_mcp_dependencies(
|
||||
sess: &Session,
|
||||
turn_context: &TurnContext,
|
||||
config: &Config,
|
||||
mentioned_skills: &[SkillMetadata],
|
||||
) {
|
||||
if mentioned_skills.is_empty() || !config.features.enabled(Feature::SkillMcpDependencyInstall) {
|
||||
return;
|
||||
}
|
||||
|
||||
let codex_home = config.codex_home.clone();
|
||||
let installed = config.mcp_servers.get().clone();
|
||||
let missing = collect_missing_mcp_dependencies(mentioned_skills, &installed);
|
||||
if missing.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut servers = match load_global_mcp_servers(&codex_home).await {
|
||||
Ok(servers) => servers,
|
||||
Err(err) => {
|
||||
warn!("failed to load MCP servers while installing skill dependencies: {err}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let mut updated = false;
|
||||
let mut added = Vec::new();
|
||||
for (name, config) in missing {
|
||||
if servers.contains_key(&name) {
|
||||
continue;
|
||||
}
|
||||
servers.insert(name.clone(), config.clone());
|
||||
added.push((name, config));
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if !updated {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Err(err) = ConfigEditsBuilder::new(&codex_home)
|
||||
.replace_mcp_servers(&servers)
|
||||
.apply()
|
||||
.await
|
||||
{
|
||||
warn!("failed to persist MCP dependencies for mentioned skills: {err}");
|
||||
return;
|
||||
}
|
||||
|
||||
for (name, server_config) in added {
|
||||
let oauth_config = match oauth_login_support(&server_config.transport).await {
|
||||
McpOAuthLoginSupport::Supported(config) => config,
|
||||
McpOAuthLoginSupport::Unsupported => continue,
|
||||
McpOAuthLoginSupport::Unknown(err) => {
|
||||
warn!("MCP server may or may not require login for dependency {name}: {err}");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
sess.notify_background_event(
|
||||
turn_context,
|
||||
format!(
|
||||
"Authenticating MCP {name}... Follow instructions in your browser if prompted."
|
||||
),
|
||||
)
|
||||
.await;
|
||||
|
||||
if let Err(err) = perform_oauth_login(
|
||||
&name,
|
||||
&oauth_config.url,
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
oauth_config.http_headers,
|
||||
oauth_config.env_http_headers,
|
||||
&[],
|
||||
config.mcp_oauth_callback_port,
|
||||
)
|
||||
.await
|
||||
{
|
||||
warn!("failed to login to MCP dependency {name}: {err}");
|
||||
}
|
||||
}
|
||||
|
||||
// Refresh from the effective merged MCP map (global + repo + managed) and
|
||||
// overlay the updated global servers so we don't drop repo-scoped servers.
|
||||
let auth = sess.services.auth_manager.auth().await;
|
||||
let mut refresh_servers = effective_mcp_servers(config, auth.as_ref());
|
||||
for (name, server_config) in &servers {
|
||||
refresh_servers
|
||||
.entry(name.clone())
|
||||
.or_insert_with(|| server_config.clone());
|
||||
}
|
||||
sess.refresh_mcp_servers_now(
|
||||
turn_context,
|
||||
refresh_servers,
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
fn canonical_mcp_key(transport: &str, identifier: &str, fallback: &str) -> String {
|
||||
let identifier = identifier.trim();
|
||||
if identifier.is_empty() {
|
||||
fallback.to_string()
|
||||
} else {
|
||||
format!("mcp__{transport}__{identifier}")
|
||||
}
|
||||
}
|
||||
|
||||
fn canonical_mcp_server_key(name: &str, config: &McpServerConfig) -> String {
|
||||
match &config.transport {
|
||||
McpServerTransportConfig::Stdio { command, .. } => {
|
||||
canonical_mcp_key("stdio", command, name)
|
||||
}
|
||||
McpServerTransportConfig::StreamableHttp { url, .. } => {
|
||||
canonical_mcp_key("streamable_http", url, name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn canonical_mcp_dependency_key(dependency: &SkillToolDependency) -> Result<String, String> {
|
||||
let transport = dependency.transport.as_deref().unwrap_or("streamable_http");
|
||||
if transport.eq_ignore_ascii_case("streamable_http") {
|
||||
let url = dependency
|
||||
.url
|
||||
.as_ref()
|
||||
.ok_or_else(|| "missing url for streamable_http dependency".to_string())?;
|
||||
return Ok(canonical_mcp_key("streamable_http", url, &dependency.value));
|
||||
}
|
||||
if transport.eq_ignore_ascii_case("stdio") {
|
||||
let command = dependency
|
||||
.command
|
||||
.as_ref()
|
||||
.ok_or_else(|| "missing command for stdio dependency".to_string())?;
|
||||
return Ok(canonical_mcp_key("stdio", command, &dependency.value));
|
||||
}
|
||||
Err(format!("unsupported transport {transport}"))
|
||||
}
|
||||
|
||||
pub(crate) fn collect_missing_mcp_dependencies(
|
||||
mentioned_skills: &[SkillMetadata],
|
||||
installed: &HashMap<String, McpServerConfig>,
|
||||
) -> HashMap<String, McpServerConfig> {
|
||||
let mut missing = HashMap::new();
|
||||
let installed_keys: HashSet<String> = installed
|
||||
.iter()
|
||||
.map(|(name, config)| canonical_mcp_server_key(name, config))
|
||||
.collect();
|
||||
let mut seen_canonical_keys = HashSet::new();
|
||||
|
||||
for skill in mentioned_skills {
|
||||
let Some(dependencies) = skill.dependencies.as_ref() else {
|
||||
continue;
|
||||
};
|
||||
|
||||
for tool in &dependencies.tools {
|
||||
if !tool.r#type.eq_ignore_ascii_case("mcp") {
|
||||
continue;
|
||||
}
|
||||
let dependency_key = match canonical_mcp_dependency_key(tool) {
|
||||
Ok(key) => key,
|
||||
Err(err) => {
|
||||
let dependency = tool.value.as_str();
|
||||
let skill_name = skill.name.as_str();
|
||||
warn!(
|
||||
"unable to auto-install MCP dependency {dependency} for skill {skill_name}: {err}",
|
||||
);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
if installed_keys.contains(&dependency_key)
|
||||
|| seen_canonical_keys.contains(&dependency_key)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
let config = match mcp_dependency_to_server_config(tool) {
|
||||
Ok(config) => config,
|
||||
Err(err) => {
|
||||
let dependency = dependency_key.as_str();
|
||||
let skill_name = skill.name.as_str();
|
||||
warn!(
|
||||
"unable to auto-install MCP dependency {dependency} for skill {skill_name}: {err}",
|
||||
);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
missing.insert(tool.value.clone(), config);
|
||||
seen_canonical_keys.insert(dependency_key);
|
||||
}
|
||||
}
|
||||
|
||||
missing
|
||||
}
|
||||
|
||||
fn mcp_dependency_to_server_config(
|
||||
dependency: &SkillToolDependency,
|
||||
) -> Result<McpServerConfig, String> {
|
||||
let transport = dependency.transport.as_deref().unwrap_or("streamable_http");
|
||||
if transport.eq_ignore_ascii_case("streamable_http") {
|
||||
let url = dependency
|
||||
.url
|
||||
.as_ref()
|
||||
.ok_or_else(|| "missing url for streamable_http dependency".to_string())?;
|
||||
return Ok(McpServerConfig {
|
||||
transport: McpServerTransportConfig::StreamableHttp {
|
||||
url: url.clone(),
|
||||
bearer_token_env_var: None,
|
||||
http_headers: None,
|
||||
env_http_headers: None,
|
||||
},
|
||||
enabled: true,
|
||||
disabled_reason: None,
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
});
|
||||
}
|
||||
|
||||
if transport.eq_ignore_ascii_case("stdio") {
|
||||
let command = dependency
|
||||
.command
|
||||
.as_ref()
|
||||
.ok_or_else(|| "missing command for stdio dependency".to_string())?;
|
||||
return Ok(McpServerConfig {
|
||||
transport: McpServerTransportConfig::Stdio {
|
||||
command: command.clone(),
|
||||
args: Vec::new(),
|
||||
env: None,
|
||||
env_vars: Vec::new(),
|
||||
cwd: None,
|
||||
},
|
||||
enabled: true,
|
||||
disabled_reason: None,
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
});
|
||||
}
|
||||
|
||||
Err(format!("unsupported transport {transport}"))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::skills::model::SkillDependencies;
|
||||
use codex_protocol::protocol::SkillScope;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::PathBuf;
|
||||
|
||||
fn skill_with_tools(tools: Vec<SkillToolDependency>) -> SkillMetadata {
|
||||
SkillMetadata {
|
||||
name: "skill".to_string(),
|
||||
description: "skill".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: Some(SkillDependencies { tools }),
|
||||
path: PathBuf::from("skill"),
|
||||
scope: SkillScope::User,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_missing_respects_canonical_installed_key() {
|
||||
let url = "https://example.com/mcp".to_string();
|
||||
let skills = vec![skill_with_tools(vec![SkillToolDependency {
|
||||
r#type: "mcp".to_string(),
|
||||
value: "github".to_string(),
|
||||
description: None,
|
||||
transport: Some("streamable_http".to_string()),
|
||||
command: None,
|
||||
url: Some(url.clone()),
|
||||
}])];
|
||||
let installed = HashMap::from([(
|
||||
"alias".to_string(),
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var: None,
|
||||
http_headers: None,
|
||||
env_http_headers: None,
|
||||
},
|
||||
enabled: true,
|
||||
disabled_reason: None,
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
)]);
|
||||
|
||||
assert_eq!(
|
||||
collect_missing_mcp_dependencies(&skills, &installed),
|
||||
HashMap::new()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_missing_dedupes_by_canonical_key_but_preserves_original_name() {
|
||||
let url = "https://example.com/one".to_string();
|
||||
let skills = vec![skill_with_tools(vec![
|
||||
SkillToolDependency {
|
||||
r#type: "mcp".to_string(),
|
||||
value: "alias-one".to_string(),
|
||||
description: None,
|
||||
transport: Some("streamable_http".to_string()),
|
||||
command: None,
|
||||
url: Some(url.clone()),
|
||||
},
|
||||
SkillToolDependency {
|
||||
r#type: "mcp".to_string(),
|
||||
value: "alias-two".to_string(),
|
||||
description: None,
|
||||
transport: Some("streamable_http".to_string()),
|
||||
command: None,
|
||||
url: Some(url.clone()),
|
||||
},
|
||||
])];
|
||||
|
||||
let expected = HashMap::from([(
|
||||
"alias-one".to_string(),
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var: None,
|
||||
http_headers: None,
|
||||
env_http_headers: None,
|
||||
},
|
||||
enabled: true,
|
||||
disabled_reason: None,
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
)]);
|
||||
|
||||
assert_eq!(
|
||||
collect_missing_mcp_dependencies(&skills, &HashMap::new()),
|
||||
expected
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -14,6 +14,7 @@ use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::mcp::CODEX_APPS_MCP_SERVER_NAME;
|
||||
use crate::mcp::auth::McpAuthStatusEntry;
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
@@ -436,13 +437,33 @@ impl McpConnectionManager {
|
||||
.await
|
||||
}
|
||||
|
||||
pub(crate) async fn wait_for_server_ready(&self, server_name: &str, timeout: Duration) -> bool {
|
||||
let Some(async_managed_client) = self.clients.get(server_name) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
match tokio::time::timeout(timeout, async_managed_client.client()).await {
|
||||
Ok(Ok(_)) => true,
|
||||
Ok(Err(_)) | Err(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a single map that contains all tools. Each key is the
|
||||
/// fully-qualified name for the tool.
|
||||
#[instrument(level = "trace", skip_all)]
|
||||
pub async fn list_all_tools(&self) -> HashMap<String, ToolInfo> {
|
||||
let mut tools = HashMap::new();
|
||||
for managed_client in self.clients.values() {
|
||||
if let Ok(client) = managed_client.client().await {
|
||||
for (server_name, managed_client) in &self.clients {
|
||||
let client = if server_name == CODEX_APPS_MCP_SERVER_NAME {
|
||||
// Avoid blocking on codex_apps_mcp startup; use tools only when ready.
|
||||
match managed_client.client.clone().now_or_never() {
|
||||
Some(Ok(client)) => Some(client),
|
||||
_ => None,
|
||||
}
|
||||
} else {
|
||||
managed_client.client().await.ok()
|
||||
};
|
||||
if let Some(client) = client {
|
||||
tools.extend(qualify_tools(filter_tools(
|
||||
client.tools,
|
||||
client.tool_filter,
|
||||
@@ -748,6 +769,32 @@ fn filter_tools(tools: Vec<ToolInfo>, filter: ToolFilter) -> Vec<ToolInfo> {
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn normalize_codex_apps_tool_title(
|
||||
server_name: &str,
|
||||
connector_name: Option<&str>,
|
||||
value: &str,
|
||||
) -> String {
|
||||
if server_name != CODEX_APPS_MCP_SERVER_NAME {
|
||||
return value.to_string();
|
||||
}
|
||||
|
||||
let Some(connector_name) = connector_name
|
||||
.map(str::trim)
|
||||
.filter(|name| !name.is_empty())
|
||||
else {
|
||||
return value.to_string();
|
||||
};
|
||||
|
||||
let prefix = format!("{connector_name}_");
|
||||
if let Some(stripped) = value.strip_prefix(&prefix)
|
||||
&& !stripped.is_empty()
|
||||
{
|
||||
return stripped.to_string();
|
||||
}
|
||||
|
||||
value.to_string()
|
||||
}
|
||||
|
||||
fn resolve_bearer_token(
|
||||
server_name: &str,
|
||||
bearer_token_env_var: Option<&str>,
|
||||
@@ -905,12 +952,23 @@ async fn list_tools_for_client(
|
||||
Ok(resp
|
||||
.tools
|
||||
.into_iter()
|
||||
.map(|tool| ToolInfo {
|
||||
server_name: server_name.to_owned(),
|
||||
tool_name: tool.tool.name.clone(),
|
||||
tool: tool.tool,
|
||||
connector_id: tool.connector_id,
|
||||
connector_name: tool.connector_name,
|
||||
.map(|tool| {
|
||||
let connector_name = tool.connector_name;
|
||||
let mut tool_def = tool.tool;
|
||||
if let Some(title) = tool_def.title.as_deref() {
|
||||
let normalized_title =
|
||||
normalize_codex_apps_tool_title(server_name, connector_name.as_deref(), title);
|
||||
if tool_def.title.as_deref() != Some(normalized_title.as_str()) {
|
||||
tool_def.title = Some(normalized_title);
|
||||
}
|
||||
}
|
||||
ToolInfo {
|
||||
server_name: server_name.to_owned(),
|
||||
tool_name: tool_def.name.clone(),
|
||||
tool: tool_def,
|
||||
connector_id: tool.connector_id,
|
||||
connector_name,
|
||||
}
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
|
||||
@@ -1,20 +1,30 @@
|
||||
use std::time::Duration;
|
||||
use std::time::Instant;
|
||||
|
||||
use tracing::error;
|
||||
|
||||
use crate::codex::Session;
|
||||
use crate::codex::TurnContext;
|
||||
use crate::mcp::CODEX_APPS_MCP_SERVER_NAME;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::McpInvocation;
|
||||
use crate::protocol::McpToolCallBeginEvent;
|
||||
use crate::protocol::McpToolCallEndEvent;
|
||||
use codex_protocol::models::FunctionCallOutputPayload;
|
||||
use codex_protocol::models::ResponseInputItem;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use codex_protocol::request_user_input::RequestUserInputArgs;
|
||||
use codex_protocol::request_user_input::RequestUserInputQuestion;
|
||||
use codex_protocol::request_user_input::RequestUserInputQuestionOption;
|
||||
use codex_protocol::request_user_input::RequestUserInputResponse;
|
||||
use mcp_types::ToolAnnotations;
|
||||
use std::sync::Arc;
|
||||
|
||||
/// Handles the specified tool call dispatches the appropriate
|
||||
/// `McpToolCallBegin` and `McpToolCallEnd` events to the `Session`.
|
||||
pub(crate) async fn handle_mcp_tool_call(
|
||||
sess: &Session,
|
||||
sess: Arc<Session>,
|
||||
turn_context: &TurnContext,
|
||||
call_id: String,
|
||||
server: String,
|
||||
@@ -48,11 +58,79 @@ pub(crate) async fn handle_mcp_tool_call(
|
||||
arguments: arguments_value.clone(),
|
||||
};
|
||||
|
||||
if let Some(decision) =
|
||||
maybe_request_mcp_tool_approval(sess.as_ref(), turn_context, &call_id, &server, &tool_name)
|
||||
.await
|
||||
{
|
||||
let result = match decision {
|
||||
McpToolApprovalDecision::Accept => {
|
||||
let tool_call_begin_event = EventMsg::McpToolCallBegin(McpToolCallBeginEvent {
|
||||
call_id: call_id.clone(),
|
||||
invocation: invocation.clone(),
|
||||
});
|
||||
notify_mcp_tool_call_event(sess.as_ref(), turn_context, tool_call_begin_event)
|
||||
.await;
|
||||
|
||||
let start = Instant::now();
|
||||
let result = sess
|
||||
.call_tool(&server, &tool_name, arguments_value.clone())
|
||||
.await
|
||||
.map_err(|e| format!("tool call error: {e:?}"));
|
||||
if let Err(e) = &result {
|
||||
tracing::warn!("MCP tool call error: {e:?}");
|
||||
}
|
||||
let tool_call_end_event = EventMsg::McpToolCallEnd(McpToolCallEndEvent {
|
||||
call_id: call_id.clone(),
|
||||
invocation,
|
||||
duration: start.elapsed(),
|
||||
result: result.clone(),
|
||||
});
|
||||
notify_mcp_tool_call_event(
|
||||
sess.as_ref(),
|
||||
turn_context,
|
||||
tool_call_end_event.clone(),
|
||||
)
|
||||
.await;
|
||||
result
|
||||
}
|
||||
McpToolApprovalDecision::Decline => {
|
||||
let message = "user rejected MCP tool call".to_string();
|
||||
notify_mcp_tool_call_skip(
|
||||
sess.as_ref(),
|
||||
turn_context,
|
||||
&call_id,
|
||||
invocation,
|
||||
message,
|
||||
)
|
||||
.await
|
||||
}
|
||||
McpToolApprovalDecision::Cancel => {
|
||||
let message = "user cancelled MCP tool call".to_string();
|
||||
notify_mcp_tool_call_skip(
|
||||
sess.as_ref(),
|
||||
turn_context,
|
||||
&call_id,
|
||||
invocation,
|
||||
message,
|
||||
)
|
||||
.await
|
||||
}
|
||||
};
|
||||
|
||||
let status = if result.is_ok() { "ok" } else { "error" };
|
||||
turn_context
|
||||
.client
|
||||
.get_otel_manager()
|
||||
.counter("codex.mcp.call", 1, &[("status", status)]);
|
||||
|
||||
return ResponseInputItem::McpToolCallOutput { call_id, result };
|
||||
}
|
||||
|
||||
let tool_call_begin_event = EventMsg::McpToolCallBegin(McpToolCallBeginEvent {
|
||||
call_id: call_id.clone(),
|
||||
invocation: invocation.clone(),
|
||||
});
|
||||
notify_mcp_tool_call_event(sess, turn_context, tool_call_begin_event).await;
|
||||
notify_mcp_tool_call_event(sess.as_ref(), turn_context, tool_call_begin_event).await;
|
||||
|
||||
let start = Instant::now();
|
||||
// Perform the tool call.
|
||||
@@ -70,7 +148,7 @@ pub(crate) async fn handle_mcp_tool_call(
|
||||
result: result.clone(),
|
||||
});
|
||||
|
||||
notify_mcp_tool_call_event(sess, turn_context, tool_call_end_event.clone()).await;
|
||||
notify_mcp_tool_call_event(sess.as_ref(), turn_context, tool_call_end_event.clone()).await;
|
||||
|
||||
let status = if result.is_ok() { "ok" } else { "error" };
|
||||
turn_context
|
||||
@@ -84,3 +162,236 @@ pub(crate) async fn handle_mcp_tool_call(
|
||||
async fn notify_mcp_tool_call_event(sess: &Session, turn_context: &TurnContext, event: EventMsg) {
|
||||
sess.send_event(turn_context, event).await;
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
enum McpToolApprovalDecision {
|
||||
Accept,
|
||||
Decline,
|
||||
Cancel,
|
||||
}
|
||||
|
||||
struct McpToolApprovalMetadata {
|
||||
annotations: ToolAnnotations,
|
||||
connector_name: Option<String>,
|
||||
tool_title: Option<String>,
|
||||
}
|
||||
|
||||
const MCP_TOOL_APPROVAL_QUESTION_ID_PREFIX: &str = "mcp_tool_call_approval";
|
||||
const MCP_TOOL_APPROVAL_ACCEPT: &str = "Accept";
|
||||
const MCP_TOOL_APPROVAL_DECLINE: &str = "Decline";
|
||||
const MCP_TOOL_APPROVAL_CANCEL: &str = "Cancel";
|
||||
|
||||
async fn maybe_request_mcp_tool_approval(
|
||||
sess: &Session,
|
||||
turn_context: &TurnContext,
|
||||
call_id: &str,
|
||||
server: &str,
|
||||
tool_name: &str,
|
||||
) -> Option<McpToolApprovalDecision> {
|
||||
if is_full_access_mode(turn_context) {
|
||||
return None;
|
||||
}
|
||||
if server != CODEX_APPS_MCP_SERVER_NAME {
|
||||
return None;
|
||||
}
|
||||
|
||||
let metadata = lookup_mcp_tool_metadata(sess, server, tool_name).await?;
|
||||
if !requires_mcp_tool_approval(&metadata.annotations) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let question_id = format!("{MCP_TOOL_APPROVAL_QUESTION_ID_PREFIX}_{call_id}");
|
||||
let question = build_mcp_tool_approval_question(
|
||||
question_id.clone(),
|
||||
tool_name,
|
||||
metadata.tool_title.as_deref(),
|
||||
metadata.connector_name.as_deref(),
|
||||
&metadata.annotations,
|
||||
);
|
||||
let args = RequestUserInputArgs {
|
||||
questions: vec![question],
|
||||
};
|
||||
let response = sess
|
||||
.request_user_input(turn_context, call_id.to_string(), args)
|
||||
.await;
|
||||
Some(parse_mcp_tool_approval_response(response, &question_id))
|
||||
}
|
||||
|
||||
fn is_full_access_mode(turn_context: &TurnContext) -> bool {
|
||||
matches!(turn_context.approval_policy, AskForApproval::Never)
|
||||
&& matches!(
|
||||
turn_context.sandbox_policy,
|
||||
SandboxPolicy::DangerFullAccess | SandboxPolicy::ExternalSandbox { .. }
|
||||
)
|
||||
}
|
||||
|
||||
async fn lookup_mcp_tool_metadata(
|
||||
sess: &Session,
|
||||
server: &str,
|
||||
tool_name: &str,
|
||||
) -> Option<McpToolApprovalMetadata> {
|
||||
let tools = sess
|
||||
.services
|
||||
.mcp_connection_manager
|
||||
.read()
|
||||
.await
|
||||
.list_all_tools()
|
||||
.await;
|
||||
|
||||
tools.into_values().find_map(|tool_info| {
|
||||
if tool_info.server_name == server && tool_info.tool_name == tool_name {
|
||||
tool_info
|
||||
.tool
|
||||
.annotations
|
||||
.map(|annotations| McpToolApprovalMetadata {
|
||||
annotations,
|
||||
connector_name: tool_info.connector_name,
|
||||
tool_title: tool_info.tool.title,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn build_mcp_tool_approval_question(
|
||||
question_id: String,
|
||||
tool_name: &str,
|
||||
tool_title: Option<&str>,
|
||||
connector_name: Option<&str>,
|
||||
annotations: &ToolAnnotations,
|
||||
) -> RequestUserInputQuestion {
|
||||
let destructive = annotations.destructive_hint == Some(true);
|
||||
let open_world = annotations.open_world_hint == Some(true);
|
||||
let reason = match (destructive, open_world) {
|
||||
(true, true) => "may modify data and access external systems",
|
||||
(true, false) => "may modify or delete data",
|
||||
(false, true) => "may access external systems",
|
||||
(false, false) => "may have side effects",
|
||||
};
|
||||
|
||||
let tool_label = tool_title.unwrap_or(tool_name);
|
||||
let app_label = connector_name
|
||||
.map(|name| format!("The {name} app"))
|
||||
.unwrap_or_else(|| "This app".to_string());
|
||||
let question = format!(
|
||||
"{app_label} wants to run the tool \"{tool_label}\", which {reason}. Allow this action?"
|
||||
);
|
||||
|
||||
RequestUserInputQuestion {
|
||||
id: question_id,
|
||||
header: "Approve app tool call?".to_string(),
|
||||
question,
|
||||
is_other: false,
|
||||
is_secret: false,
|
||||
options: Some(vec![
|
||||
RequestUserInputQuestionOption {
|
||||
label: MCP_TOOL_APPROVAL_ACCEPT.to_string(),
|
||||
description: "Run the tool and continue.".to_string(),
|
||||
},
|
||||
RequestUserInputQuestionOption {
|
||||
label: MCP_TOOL_APPROVAL_DECLINE.to_string(),
|
||||
description: "Decline this tool call and continue.".to_string(),
|
||||
},
|
||||
RequestUserInputQuestionOption {
|
||||
label: MCP_TOOL_APPROVAL_CANCEL.to_string(),
|
||||
description: "Cancel this tool call".to_string(),
|
||||
},
|
||||
]),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_mcp_tool_approval_response(
|
||||
response: Option<RequestUserInputResponse>,
|
||||
question_id: &str,
|
||||
) -> McpToolApprovalDecision {
|
||||
let Some(response) = response else {
|
||||
return McpToolApprovalDecision::Cancel;
|
||||
};
|
||||
let answers = response
|
||||
.answers
|
||||
.get(question_id)
|
||||
.map(|answer| answer.answers.as_slice());
|
||||
let Some(answers) = answers else {
|
||||
return McpToolApprovalDecision::Cancel;
|
||||
};
|
||||
if answers
|
||||
.iter()
|
||||
.any(|answer| answer == MCP_TOOL_APPROVAL_ACCEPT)
|
||||
{
|
||||
McpToolApprovalDecision::Accept
|
||||
} else if answers
|
||||
.iter()
|
||||
.any(|answer| answer == MCP_TOOL_APPROVAL_CANCEL)
|
||||
{
|
||||
McpToolApprovalDecision::Cancel
|
||||
} else {
|
||||
McpToolApprovalDecision::Decline
|
||||
}
|
||||
}
|
||||
|
||||
fn requires_mcp_tool_approval(annotations: &ToolAnnotations) -> bool {
|
||||
annotations.read_only_hint == Some(false)
|
||||
&& (annotations.destructive_hint == Some(true) || annotations.open_world_hint == Some(true))
|
||||
}
|
||||
|
||||
async fn notify_mcp_tool_call_skip(
|
||||
sess: &Session,
|
||||
turn_context: &TurnContext,
|
||||
call_id: &str,
|
||||
invocation: McpInvocation,
|
||||
message: String,
|
||||
) -> Result<mcp_types::CallToolResult, String> {
|
||||
let tool_call_begin_event = EventMsg::McpToolCallBegin(McpToolCallBeginEvent {
|
||||
call_id: call_id.to_string(),
|
||||
invocation: invocation.clone(),
|
||||
});
|
||||
notify_mcp_tool_call_event(sess, turn_context, tool_call_begin_event).await;
|
||||
|
||||
let tool_call_end_event = EventMsg::McpToolCallEnd(McpToolCallEndEvent {
|
||||
call_id: call_id.to_string(),
|
||||
invocation,
|
||||
duration: Duration::ZERO,
|
||||
result: Err(message.clone()),
|
||||
});
|
||||
notify_mcp_tool_call_event(sess, turn_context, tool_call_end_event).await;
|
||||
Err(message)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
fn annotations(
|
||||
read_only: Option<bool>,
|
||||
destructive: Option<bool>,
|
||||
open_world: Option<bool>,
|
||||
) -> ToolAnnotations {
|
||||
ToolAnnotations {
|
||||
destructive_hint: destructive,
|
||||
idempotent_hint: None,
|
||||
open_world_hint: open_world,
|
||||
read_only_hint: read_only,
|
||||
title: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn approval_required_when_read_only_false_and_destructive() {
|
||||
let annotations = annotations(Some(false), Some(true), None);
|
||||
assert_eq!(requires_mcp_tool_approval(&annotations), true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn approval_required_when_read_only_false_and_open_world() {
|
||||
let annotations = annotations(Some(false), None, Some(true));
|
||||
assert_eq!(requires_mcp_tool_approval(&annotations), true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn approval_not_required_when_read_only_true() {
|
||||
let annotations = annotations(Some(true), Some(true), Some(true));
|
||||
assert_eq!(requires_mcp_tool_approval(&annotations), false);
|
||||
}
|
||||
}
|
||||
|
||||
64
codex-rs/core/src/mentions.rs
Normal file
64
codex-rs/core/src/mentions.rs
Normal file
@@ -0,0 +1,64 @@
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use codex_protocol::user_input::UserInput;
|
||||
|
||||
use crate::connectors;
|
||||
use crate::skills::SkillMetadata;
|
||||
use crate::skills::injection::extract_tool_mentions;
|
||||
|
||||
pub(crate) struct CollectedToolMentions {
|
||||
pub(crate) plain_names: HashSet<String>,
|
||||
pub(crate) paths: HashSet<String>,
|
||||
}
|
||||
|
||||
pub(crate) fn collect_tool_mentions_from_messages(messages: &[String]) -> CollectedToolMentions {
|
||||
let mut plain_names = HashSet::new();
|
||||
let mut paths = HashSet::new();
|
||||
for message in messages {
|
||||
let mentions = extract_tool_mentions(message);
|
||||
plain_names.extend(mentions.plain_names().map(str::to_string));
|
||||
paths.extend(mentions.paths().map(str::to_string));
|
||||
}
|
||||
CollectedToolMentions { plain_names, paths }
|
||||
}
|
||||
|
||||
pub(crate) fn collect_explicit_app_paths(input: &[UserInput]) -> Vec<String> {
|
||||
input
|
||||
.iter()
|
||||
.filter_map(|item| match item {
|
||||
UserInput::Mention { path, .. } => Some(path.clone()),
|
||||
_ => None,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub(crate) fn build_skill_name_counts(
|
||||
skills: &[SkillMetadata],
|
||||
disabled_paths: &HashSet<PathBuf>,
|
||||
) -> (HashMap<String, usize>, HashMap<String, usize>) {
|
||||
let mut exact_counts: HashMap<String, usize> = HashMap::new();
|
||||
let mut lower_counts: HashMap<String, usize> = HashMap::new();
|
||||
for skill in skills {
|
||||
if disabled_paths.contains(&skill.path) {
|
||||
continue;
|
||||
}
|
||||
*exact_counts.entry(skill.name.clone()).or_insert(0) += 1;
|
||||
*lower_counts
|
||||
.entry(skill.name.to_ascii_lowercase())
|
||||
.or_insert(0) += 1;
|
||||
}
|
||||
(exact_counts, lower_counts)
|
||||
}
|
||||
|
||||
pub(crate) fn build_connector_slug_counts(
|
||||
connectors: &[connectors::AppInfo],
|
||||
) -> HashMap<String, usize> {
|
||||
let mut counts: HashMap<String, usize> = HashMap::new();
|
||||
for connector in connectors {
|
||||
let slug = connectors::connector_mention_slug(connector);
|
||||
*counts.entry(slug).or_insert(0) += 1;
|
||||
}
|
||||
counts
|
||||
}
|
||||
@@ -5,10 +5,12 @@
|
||||
//! 2. User-defined entries inside `~/.codex/config.toml` under the `model_providers`
|
||||
//! key. These override or extend the defaults at runtime.
|
||||
|
||||
use crate::auth::AuthMode;
|
||||
use crate::error::EnvVarError;
|
||||
use codex_api::Provider as ApiProvider;
|
||||
use codex_api::WireApi as ApiWireApi;
|
||||
use codex_api::is_azure_responses_wire_base_url;
|
||||
use codex_api::provider::RetryConfig as ApiRetryConfig;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use http::HeaderMap;
|
||||
use http::header::HeaderName;
|
||||
use http::header::HeaderValue;
|
||||
@@ -19,7 +21,6 @@ use std::collections::HashMap;
|
||||
use std::env::VarError;
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::error::EnvVarError;
|
||||
const DEFAULT_STREAM_IDLE_TIMEOUT_MS: u64 = 300_000;
|
||||
const DEFAULT_STREAM_MAX_RETRIES: u64 = 5;
|
||||
const DEFAULT_REQUEST_MAX_RETRIES: u64 = 4;
|
||||
@@ -43,10 +44,6 @@ pub enum WireApi {
|
||||
/// The Responses API exposed by OpenAI at `/v1/responses`.
|
||||
Responses,
|
||||
|
||||
/// Experimental: Responses API over WebSocket transport.
|
||||
#[serde(rename = "responses_websocket")]
|
||||
ResponsesWebsocket,
|
||||
|
||||
/// Regular Chat Completions compatible with `/v1/chat/completions`.
|
||||
#[default]
|
||||
Chat,
|
||||
@@ -105,6 +102,10 @@ pub struct ModelProviderInfo {
|
||||
/// and API key (if needed) comes from the "env_key" environment variable.
|
||||
#[serde(default)]
|
||||
pub requires_openai_auth: bool,
|
||||
|
||||
/// Whether this provider supports the Responses API WebSocket transport.
|
||||
#[serde(default)]
|
||||
pub supports_websockets: bool,
|
||||
}
|
||||
|
||||
impl ModelProviderInfo {
|
||||
@@ -137,7 +138,7 @@ impl ModelProviderInfo {
|
||||
&self,
|
||||
auth_mode: Option<AuthMode>,
|
||||
) -> crate::error::Result<ApiProvider> {
|
||||
let default_base_url = if matches!(auth_mode, Some(AuthMode::ChatGPT)) {
|
||||
let default_base_url = if matches!(auth_mode, Some(AuthMode::Chatgpt)) {
|
||||
"https://chatgpt.com/backend-api/codex"
|
||||
} else {
|
||||
"https://api.openai.com/v1"
|
||||
@@ -162,7 +163,6 @@ impl ModelProviderInfo {
|
||||
query_params: self.query_params.clone(),
|
||||
wire: match self.wire_api {
|
||||
WireApi::Responses => ApiWireApi::Responses,
|
||||
WireApi::ResponsesWebsocket => ApiWireApi::Responses,
|
||||
WireApi::Chat => ApiWireApi::Chat,
|
||||
},
|
||||
headers,
|
||||
@@ -171,6 +171,15 @@ impl ModelProviderInfo {
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn is_azure_responses_endpoint(&self) -> bool {
|
||||
let wire = match self.wire_api {
|
||||
WireApi::Responses => ApiWireApi::Responses,
|
||||
WireApi::Chat => ApiWireApi::Chat,
|
||||
};
|
||||
|
||||
is_azure_responses_wire_base_url(wire, &self.name, self.base_url.as_deref())
|
||||
}
|
||||
|
||||
/// If `env_key` is Some, returns the API key for this provider if present
|
||||
/// (and non-empty) in the environment. If `env_key` is required but
|
||||
/// cannot be found, returns an error.
|
||||
@@ -254,6 +263,7 @@ impl ModelProviderInfo {
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
requires_openai_auth: true,
|
||||
supports_websockets: true,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -332,6 +342,7 @@ pub fn create_oss_provider_with_base_url(base_url: &str, wire_api: WireApi) -> M
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
requires_openai_auth: false,
|
||||
supports_websockets: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -360,6 +371,7 @@ base_url = "http://localhost:11434/v1"
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
requires_openai_auth: false,
|
||||
supports_websockets: false,
|
||||
};
|
||||
|
||||
let provider: ModelProviderInfo = toml::from_str(azure_provider_toml).unwrap();
|
||||
@@ -390,6 +402,7 @@ query_params = { api-version = "2025-04-01-preview" }
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
requires_openai_auth: false,
|
||||
supports_websockets: false,
|
||||
};
|
||||
|
||||
let provider: ModelProviderInfo = toml::from_str(azure_provider_toml).unwrap();
|
||||
@@ -423,89 +436,10 @@ env_http_headers = { "X-Example-Env-Header" = "EXAMPLE_ENV_VAR" }
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
requires_openai_auth: false,
|
||||
supports_websockets: false,
|
||||
};
|
||||
|
||||
let provider: ModelProviderInfo = toml::from_str(azure_provider_toml).unwrap();
|
||||
assert_eq!(expected_provider, provider);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detects_azure_responses_base_urls() {
|
||||
let positive_cases = [
|
||||
"https://foo.openai.azure.com/openai",
|
||||
"https://foo.openai.azure.us/openai/deployments/bar",
|
||||
"https://foo.cognitiveservices.azure.cn/openai",
|
||||
"https://foo.aoai.azure.com/openai",
|
||||
"https://foo.openai.azure-api.net/openai",
|
||||
"https://foo.z01.azurefd.net/",
|
||||
];
|
||||
for base_url in positive_cases {
|
||||
let provider = ModelProviderInfo {
|
||||
name: "test".into(),
|
||||
base_url: Some(base_url.into()),
|
||||
env_key: None,
|
||||
env_key_instructions: None,
|
||||
experimental_bearer_token: None,
|
||||
wire_api: WireApi::Responses,
|
||||
query_params: None,
|
||||
http_headers: None,
|
||||
env_http_headers: None,
|
||||
request_max_retries: None,
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
let api = provider.to_api_provider(None).expect("api provider");
|
||||
assert!(
|
||||
api.is_azure_responses_endpoint(),
|
||||
"expected {base_url} to be detected as Azure"
|
||||
);
|
||||
}
|
||||
|
||||
let named_provider = ModelProviderInfo {
|
||||
name: "Azure".into(),
|
||||
base_url: Some("https://example.com".into()),
|
||||
env_key: None,
|
||||
env_key_instructions: None,
|
||||
experimental_bearer_token: None,
|
||||
wire_api: WireApi::Responses,
|
||||
query_params: None,
|
||||
http_headers: None,
|
||||
env_http_headers: None,
|
||||
request_max_retries: None,
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
let named_api = named_provider.to_api_provider(None).expect("api provider");
|
||||
assert!(named_api.is_azure_responses_endpoint());
|
||||
|
||||
let negative_cases = [
|
||||
"https://api.openai.com/v1",
|
||||
"https://example.com/openai",
|
||||
"https://myproxy.azurewebsites.net/openai",
|
||||
];
|
||||
for base_url in negative_cases {
|
||||
let provider = ModelProviderInfo {
|
||||
name: "test".into(),
|
||||
base_url: Some(base_url.into()),
|
||||
env_key: None,
|
||||
env_key_instructions: None,
|
||||
experimental_bearer_token: None,
|
||||
wire_api: WireApi::Responses,
|
||||
query_params: None,
|
||||
http_headers: None,
|
||||
env_http_headers: None,
|
||||
request_max_retries: None,
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
let api = provider.to_api_provider(None).expect("api provider");
|
||||
assert!(
|
||||
!api.is_azure_responses_endpoint(),
|
||||
"expected {base_url} not to be detected as Azure"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ use super::cache::ModelsCacheManager;
|
||||
use crate::api_bridge::auth_provider_from_auth;
|
||||
use crate::api_bridge::map_api_error;
|
||||
use crate::auth::AuthManager;
|
||||
use crate::auth::AuthMode;
|
||||
use crate::config::Config;
|
||||
use crate::default_client::build_reqwest_client;
|
||||
use crate::error::CodexErr;
|
||||
@@ -13,7 +14,6 @@ use crate::models_manager::model_info;
|
||||
use crate::models_manager::model_presets::builtin_model_presets;
|
||||
use codex_api::ModelsClient;
|
||||
use codex_api::ReqwestTransport;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_protocol::config_types::CollaborationModeMask;
|
||||
use codex_protocol::openai_models::ModelInfo;
|
||||
use codex_protocol::openai_models::ModelPreset;
|
||||
@@ -61,7 +61,7 @@ impl ModelsManager {
|
||||
let cache_path = codex_home.join(MODEL_CACHE_FILE);
|
||||
let cache_manager = ModelsCacheManager::new(cache_path, DEFAULT_MODEL_CACHE_TTL);
|
||||
Self {
|
||||
local_models: builtin_model_presets(auth_manager.get_auth_mode()),
|
||||
local_models: builtin_model_presets(auth_manager.get_internal_auth_mode()),
|
||||
remote_models: RwLock::new(Self::load_remote_models_from_file().unwrap_or_default()),
|
||||
auth_manager,
|
||||
etag: RwLock::new(None),
|
||||
@@ -175,7 +175,7 @@ impl ModelsManager {
|
||||
refresh_strategy: RefreshStrategy,
|
||||
) -> CoreResult<()> {
|
||||
if !config.features.enabled(Feature::RemoteModels)
|
||||
|| self.auth_manager.get_auth_mode() == Some(AuthMode::ApiKey)
|
||||
|| self.auth_manager.get_internal_auth_mode() == Some(AuthMode::ApiKey)
|
||||
{
|
||||
return Ok(());
|
||||
}
|
||||
@@ -204,7 +204,8 @@ impl ModelsManager {
|
||||
let _timer =
|
||||
codex_otel::start_global_timer("codex.remote_models.fetch_update.duration_ms", &[]);
|
||||
let auth = self.auth_manager.auth().await;
|
||||
let api_provider = self.provider.to_api_provider(Some(AuthMode::ChatGPT))?;
|
||||
let auth_mode = self.auth_manager.get_internal_auth_mode();
|
||||
let api_provider = self.provider.to_api_provider(auth_mode)?;
|
||||
let api_auth = auth_provider_from_auth(auth.clone(), &self.provider)?;
|
||||
let transport = ReqwestTransport::new(build_reqwest_client());
|
||||
let client = ModelsClient::new(transport, api_provider, api_auth);
|
||||
@@ -271,7 +272,10 @@ impl ModelsManager {
|
||||
let remote_presets: Vec<ModelPreset> = remote_models.into_iter().map(Into::into).collect();
|
||||
let existing_presets = self.local_models.clone();
|
||||
let mut merged_presets = ModelPreset::merge(remote_presets, existing_presets);
|
||||
let chatgpt_mode = self.auth_manager.get_auth_mode() == Some(AuthMode::ChatGPT);
|
||||
let chatgpt_mode = matches!(
|
||||
self.auth_manager.get_internal_auth_mode(),
|
||||
Some(AuthMode::Chatgpt)
|
||||
);
|
||||
merged_presets = ModelPreset::filter_by_auth(merged_presets, chatgpt_mode);
|
||||
|
||||
for preset in &mut merged_presets {
|
||||
@@ -315,7 +319,7 @@ impl ModelsManager {
|
||||
let cache_path = codex_home.join(MODEL_CACHE_FILE);
|
||||
let cache_manager = ModelsCacheManager::new(cache_path, DEFAULT_MODEL_CACHE_TTL);
|
||||
Self {
|
||||
local_models: builtin_model_presets(auth_manager.get_auth_mode()),
|
||||
local_models: builtin_model_presets(auth_manager.get_internal_auth_mode()),
|
||||
remote_models: RwLock::new(Self::load_remote_models_from_file().unwrap_or_default()),
|
||||
auth_manager,
|
||||
etag: RwLock::new(None),
|
||||
@@ -432,6 +436,7 @@ mod tests {
|
||||
stream_max_retries: Some(0),
|
||||
stream_idle_timeout_ms: Some(5_000),
|
||||
requires_openai_auth: false,
|
||||
supports_websockets: false,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,19 +1,17 @@
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use codex_protocol::config_types::Personality;
|
||||
use codex_protocol::config_types::Verbosity;
|
||||
use codex_protocol::openai_models::ApplyPatchToolType;
|
||||
use codex_protocol::openai_models::ConfigShellToolType;
|
||||
use codex_protocol::openai_models::ModelInfo;
|
||||
use codex_protocol::openai_models::ModelInstructionsTemplate;
|
||||
use codex_protocol::openai_models::ModelInstructionsVariables;
|
||||
use codex_protocol::openai_models::ModelMessages;
|
||||
use codex_protocol::openai_models::ModelVisibility;
|
||||
use codex_protocol::openai_models::PersonalityMessages;
|
||||
use codex_protocol::openai_models::ReasoningEffort;
|
||||
use codex_protocol::openai_models::ReasoningEffortPreset;
|
||||
use codex_protocol::openai_models::TruncationMode;
|
||||
use codex_protocol::openai_models::TruncationPolicyConfig;
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::features::Feature;
|
||||
use crate::truncate::approx_bytes_for_tokens;
|
||||
use tracing::warn;
|
||||
|
||||
@@ -29,8 +27,11 @@ const GPT_5_1_CODEX_MAX_INSTRUCTIONS: &str = include_str!("../../gpt-5.1-codex-m
|
||||
const GPT_5_2_CODEX_INSTRUCTIONS: &str = include_str!("../../gpt-5.2-codex_prompt.md");
|
||||
const GPT_5_2_CODEX_INSTRUCTIONS_TEMPLATE: &str =
|
||||
include_str!("../../templates/model_instructions/gpt-5.2-codex_instructions_template.md");
|
||||
const PERSONALITY_FRIENDLY: &str = include_str!("../../templates/personalities/friendly.md");
|
||||
const PERSONALITY_PRAGMATIC: &str = include_str!("../../templates/personalities/pragmatic.md");
|
||||
|
||||
const GPT_5_2_CODEX_PERSONALITY_FRIENDLY: &str =
|
||||
include_str!("../../templates/personalities/gpt-5.2-codex_friendly.md");
|
||||
const GPT_5_2_CODEX_PERSONALITY_PRAGMATIC: &str =
|
||||
include_str!("../../templates/personalities/gpt-5.2-codex_pragmatic.md");
|
||||
|
||||
pub(crate) const CONTEXT_WINDOW_272K: i64 = 272_000;
|
||||
|
||||
@@ -54,7 +55,7 @@ macro_rules! model_info {
|
||||
priority: 99,
|
||||
upgrade: None,
|
||||
base_instructions: BASE_INSTRUCTIONS.to_string(),
|
||||
model_instructions_template: None,
|
||||
model_messages: None,
|
||||
supports_reasoning_summaries: false,
|
||||
support_verbosity: false,
|
||||
default_verbosity: None,
|
||||
@@ -100,8 +101,11 @@ pub(crate) fn with_config_overrides(mut model: ModelInfo, config: &Config) -> Mo
|
||||
|
||||
if let Some(base_instructions) = &config.base_instructions {
|
||||
model.base_instructions = base_instructions.clone();
|
||||
model.model_instructions_template = None;
|
||||
model.model_messages = None;
|
||||
} else if !config.features.enabled(Feature::Personality) {
|
||||
model.model_messages = None;
|
||||
}
|
||||
|
||||
model
|
||||
}
|
||||
|
||||
@@ -169,15 +173,13 @@ pub(crate) fn find_model_info_for_slug(slug: &str) -> ModelInfo {
|
||||
model_info!(
|
||||
slug,
|
||||
base_instructions: GPT_5_2_CODEX_INSTRUCTIONS.to_string(),
|
||||
model_instructions_template: Some(ModelInstructionsTemplate {
|
||||
template: GPT_5_2_CODEX_INSTRUCTIONS_TEMPLATE.to_string(),
|
||||
personality_messages: Some(PersonalityMessages(BTreeMap::from([(
|
||||
Personality::Friendly,
|
||||
PERSONALITY_FRIENDLY.to_string(),
|
||||
), (
|
||||
Personality::Pragmatic,
|
||||
PERSONALITY_PRAGMATIC.to_string(),
|
||||
)]))),
|
||||
model_messages: Some(ModelMessages {
|
||||
instructions_template: Some(GPT_5_2_CODEX_INSTRUCTIONS_TEMPLATE.to_string()),
|
||||
instructions_variables: Some(ModelInstructionsVariables {
|
||||
personality_default: Some("".to_string()),
|
||||
personality_friendly: Some(GPT_5_2_CODEX_PERSONALITY_FRIENDLY.to_string()),
|
||||
personality_pragmatic: Some(GPT_5_2_CODEX_PERSONALITY_PRAGMATIC.to_string()),
|
||||
}),
|
||||
}),
|
||||
apply_patch_tool_type: Some(ApplyPatchToolType::Freeform),
|
||||
shell_type: ConfigShellToolType::ShellCommand,
|
||||
@@ -213,6 +215,15 @@ pub(crate) fn find_model_info_for_slug(slug: &str) -> ModelInfo {
|
||||
truncation_policy: TruncationPolicyConfig::tokens(10_000),
|
||||
context_window: Some(CONTEXT_WINDOW_272K),
|
||||
supported_reasoning_levels: supported_reasoning_level_low_medium_high_xhigh(),
|
||||
base_instructions: GPT_5_2_CODEX_INSTRUCTIONS.to_string(),
|
||||
model_messages: Some(ModelMessages {
|
||||
instructions_template: Some(GPT_5_2_CODEX_INSTRUCTIONS_TEMPLATE.to_string()),
|
||||
instructions_variables: Some(ModelInstructionsVariables {
|
||||
personality_default: Some("".to_string()),
|
||||
personality_friendly: Some(GPT_5_2_CODEX_PERSONALITY_FRIENDLY.to_string()),
|
||||
personality_pragmatic: Some(GPT_5_2_CODEX_PERSONALITY_PRAGMATIC.to_string()),
|
||||
}),
|
||||
}),
|
||||
)
|
||||
} else if slug.starts_with("gpt-5.1-codex-max") {
|
||||
model_info!(
|
||||
@@ -259,9 +270,7 @@ pub(crate) fn find_model_info_for_slug(slug: &str) -> ModelInfo {
|
||||
truncation_policy: TruncationPolicyConfig::tokens(10_000),
|
||||
context_window: Some(CONTEXT_WINDOW_272K),
|
||||
)
|
||||
} else if (slug.starts_with("gpt-5.2") || slug.starts_with("boomslang"))
|
||||
&& !slug.contains("codex")
|
||||
{
|
||||
} else if slug.starts_with("gpt-5.2") || slug.starts_with("boomslang") {
|
||||
model_info!(
|
||||
slug,
|
||||
apply_patch_tool_type: Some(ApplyPatchToolType::Freeform),
|
||||
@@ -276,7 +285,7 @@ pub(crate) fn find_model_info_for_slug(slug: &str) -> ModelInfo {
|
||||
context_window: Some(CONTEXT_WINDOW_272K),
|
||||
supported_reasoning_levels: supported_reasoning_level_low_medium_high_xhigh_non_codex(),
|
||||
)
|
||||
} else if slug.starts_with("gpt-5.1") && !slug.contains("codex") {
|
||||
} else if slug.starts_with("gpt-5.1") {
|
||||
model_info!(
|
||||
slug,
|
||||
apply_patch_tool_type: Some(ApplyPatchToolType::Freeform),
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use crate::auth::AuthMode;
|
||||
use codex_protocol::openai_models::ModelPreset;
|
||||
use codex_protocol::openai_models::ModelUpgrade;
|
||||
use codex_protocol::openai_models::ReasoningEffort;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user