mirror of
https://github.com/openai/codex.git
synced 2026-02-03 15:33:41 +00:00
Compare commits
14 Commits
compaction
...
maxj/threa
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
338e90a836 | ||
|
|
231406bd04 | ||
|
|
3878c3dc7c | ||
|
|
dabafe204a | ||
|
|
71b8d937ed | ||
|
|
996e09ca24 | ||
|
|
9f79365691 | ||
|
|
fef3e36f67 | ||
|
|
3bb8e69dd3 | ||
|
|
add648df82 | ||
|
|
1609f6aa81 | ||
|
|
a90ab789c2 | ||
|
|
3f3916e595 | ||
|
|
19d8f71a98 |
88
.github/workflows/rust-ci.yml
vendored
88
.github/workflows/rust-ci.yml
vendored
@@ -177,11 +177,31 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- name: Install UBSan runtime (musl)
|
||||
if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
sudo apt-get update -y
|
||||
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y libubsan1
|
||||
fi
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
components: clippy
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Use hermetic Cargo home (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cargo_home="${GITHUB_WORKSPACE}/.cargo-home"
|
||||
mkdir -p "${cargo_home}/bin"
|
||||
echo "CARGO_HOME=${cargo_home}" >> "$GITHUB_ENV"
|
||||
echo "${cargo_home}/bin" >> "$GITHUB_PATH"
|
||||
: > "${cargo_home}/config.toml"
|
||||
|
||||
- name: Compute lockfile hash
|
||||
id: lockhash
|
||||
working-directory: codex-rs
|
||||
@@ -202,6 +222,10 @@ jobs:
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
${{ github.workspace }}/.cargo-home/bin/
|
||||
${{ github.workspace }}/.cargo-home/registry/index/
|
||||
${{ github.workspace }}/.cargo-home/registry/cache/
|
||||
${{ github.workspace }}/.cargo-home/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
|
||||
restore-keys: |
|
||||
cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
@@ -244,6 +268,14 @@ jobs:
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Disable sccache wrapper (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "RUSTC_WRAPPER=" >> "$GITHUB_ENV"
|
||||
echo "RUSTC_WORKSPACE_WRAPPER=" >> "$GITHUB_ENV"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Prepare APT cache directories (musl)
|
||||
shell: bash
|
||||
@@ -277,6 +309,58 @@ jobs:
|
||||
shell: bash
|
||||
run: bash "${GITHUB_WORKSPACE}/.github/scripts/install-musl-build-tools.sh"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Configure rustc UBSan wrapper (musl host)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
ubsan=""
|
||||
if command -v ldconfig >/dev/null 2>&1; then
|
||||
ubsan="$(ldconfig -p | grep -m1 'libubsan\.so\.1' | sed -E 's/.*=> (.*)$/\1/')"
|
||||
fi
|
||||
wrapper_root="${RUNNER_TEMP:-/tmp}"
|
||||
wrapper="${wrapper_root}/rustc-ubsan-wrapper"
|
||||
cat > "${wrapper}" <<EOF
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
if [[ -n "${ubsan}" ]]; then
|
||||
export LD_PRELOAD="${ubsan}\${LD_PRELOAD:+:\${LD_PRELOAD}}"
|
||||
fi
|
||||
exec "\$1" "\${@:2}"
|
||||
EOF
|
||||
chmod +x "${wrapper}"
|
||||
echo "RUSTC_WRAPPER=${wrapper}" >> "$GITHUB_ENV"
|
||||
echo "RUSTC_WORKSPACE_WRAPPER=" >> "$GITHUB_ENV"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Clear sanitizer flags (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Clear global Rust flags so host/proc-macro builds don't pull in UBSan.
|
||||
echo "RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_ENCODED_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "RUSTDOCFLAGS=" >> "$GITHUB_ENV"
|
||||
# Override any runner-level Cargo config rustflags as well.
|
||||
echo "CARGO_BUILD_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
|
||||
sanitize_flags() {
|
||||
local input="$1"
|
||||
input="${input//-fsanitize=undefined/}"
|
||||
input="${input//-fno-sanitize-recover=undefined/}"
|
||||
input="${input//-fno-sanitize-trap=undefined/}"
|
||||
echo "$input"
|
||||
}
|
||||
|
||||
cflags="$(sanitize_flags "${CFLAGS-}")"
|
||||
cxxflags="$(sanitize_flags "${CXXFLAGS-}")"
|
||||
echo "CFLAGS=${cflags}" >> "$GITHUB_ENV"
|
||||
echo "CXXFLAGS=${cxxflags}" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Install cargo-chef
|
||||
if: ${{ matrix.profile == 'release' }}
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
@@ -322,6 +406,10 @@ jobs:
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
${{ github.workspace }}/.cargo-home/bin/
|
||||
${{ github.workspace }}/.cargo-home/registry/index/
|
||||
${{ github.workspace }}/.cargo-home/registry/cache/
|
||||
${{ github.workspace }}/.cargo-home/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
|
||||
|
||||
- name: Save sccache cache (fallback)
|
||||
|
||||
77
.github/workflows/rust-release.yml
vendored
77
.github/workflows/rust-release.yml
vendored
@@ -21,7 +21,6 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
|
||||
- name: Validate tag matches Cargo.toml version
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -90,10 +89,30 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- name: Install UBSan runtime (musl)
|
||||
if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
sudo apt-get update -y
|
||||
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y libubsan1
|
||||
fi
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Use hermetic Cargo home (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cargo_home="${GITHUB_WORKSPACE}/.cargo-home"
|
||||
mkdir -p "${cargo_home}/bin"
|
||||
echo "CARGO_HOME=${cargo_home}" >> "$GITHUB_ENV"
|
||||
echo "${cargo_home}/bin" >> "$GITHUB_PATH"
|
||||
: > "${cargo_home}/config.toml"
|
||||
|
||||
- uses: actions/cache@v5
|
||||
with:
|
||||
path: |
|
||||
@@ -101,6 +120,10 @@ jobs:
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
${{ github.workspace }}/.cargo-home/bin/
|
||||
${{ github.workspace }}/.cargo-home/registry/index/
|
||||
${{ github.workspace }}/.cargo-home/registry/cache/
|
||||
${{ github.workspace }}/.cargo-home/git/db/
|
||||
${{ github.workspace }}/codex-rs/target/
|
||||
key: cargo-${{ matrix.runner }}-${{ matrix.target }}-release-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
@@ -116,6 +139,58 @@ jobs:
|
||||
TARGET: ${{ matrix.target }}
|
||||
run: bash "${GITHUB_WORKSPACE}/.github/scripts/install-musl-build-tools.sh"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Configure rustc UBSan wrapper (musl host)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
ubsan=""
|
||||
if command -v ldconfig >/dev/null 2>&1; then
|
||||
ubsan="$(ldconfig -p | grep -m1 'libubsan\.so\.1' | sed -E 's/.*=> (.*)$/\1/')"
|
||||
fi
|
||||
wrapper_root="${RUNNER_TEMP:-/tmp}"
|
||||
wrapper="${wrapper_root}/rustc-ubsan-wrapper"
|
||||
cat > "${wrapper}" <<EOF
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
if [[ -n "${ubsan}" ]]; then
|
||||
export LD_PRELOAD="${ubsan}\${LD_PRELOAD:+:\${LD_PRELOAD}}"
|
||||
fi
|
||||
exec "\$1" "\${@:2}"
|
||||
EOF
|
||||
chmod +x "${wrapper}"
|
||||
echo "RUSTC_WRAPPER=${wrapper}" >> "$GITHUB_ENV"
|
||||
echo "RUSTC_WORKSPACE_WRAPPER=" >> "$GITHUB_ENV"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Clear sanitizer flags (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Clear global Rust flags so host/proc-macro builds don't pull in UBSan.
|
||||
echo "RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_ENCODED_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "RUSTDOCFLAGS=" >> "$GITHUB_ENV"
|
||||
# Override any runner-level Cargo config rustflags as well.
|
||||
echo "CARGO_BUILD_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
|
||||
sanitize_flags() {
|
||||
local input="$1"
|
||||
input="${input//-fsanitize=undefined/}"
|
||||
input="${input//-fno-sanitize-recover=undefined/}"
|
||||
input="${input//-fno-sanitize-trap=undefined/}"
|
||||
echo "$input"
|
||||
}
|
||||
|
||||
cflags="$(sanitize_flags "${CFLAGS-}")"
|
||||
cxxflags="$(sanitize_flags "${CXXFLAGS-}")"
|
||||
echo "CFLAGS=${cflags}" >> "$GITHUB_ENV"
|
||||
echo "CXXFLAGS=${cxxflags}" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Cargo build
|
||||
shell: bash
|
||||
run: |
|
||||
|
||||
506
codex-rs/Cargo.lock
generated
506
codex-rs/Cargo.lock
generated
@@ -361,7 +361,7 @@ dependencies = [
|
||||
"objc2-foundation",
|
||||
"parking_lot",
|
||||
"percent-encoding",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.60.2",
|
||||
"wl-clipboard-rs",
|
||||
"x11rb",
|
||||
]
|
||||
@@ -602,6 +602,15 @@ dependencies = [
|
||||
"pin-project-lite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "atoi"
|
||||
version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528"
|
||||
dependencies = [
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "atomic-waker"
|
||||
version = "1.1.2"
|
||||
@@ -739,6 +748,9 @@ name = "bitflags"
|
||||
version = "2.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3"
|
||||
dependencies = [
|
||||
"serde_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "block-buffer"
|
||||
@@ -1077,6 +1089,7 @@ dependencies = [
|
||||
"codex-chatgpt",
|
||||
"codex-common",
|
||||
"codex-core",
|
||||
"codex-execpolicy",
|
||||
"codex-feedback",
|
||||
"codex-file-search",
|
||||
"codex-login",
|
||||
@@ -1363,6 +1376,7 @@ dependencies = [
|
||||
"codex-otel",
|
||||
"codex-protocol",
|
||||
"codex-rmcp-client",
|
||||
"codex-state",
|
||||
"codex-utils-absolute-path",
|
||||
"codex-utils-cargo-bin",
|
||||
"codex-utils-pty",
|
||||
@@ -1388,6 +1402,7 @@ dependencies = [
|
||||
"libc",
|
||||
"maplit",
|
||||
"mcp-types",
|
||||
"multimap",
|
||||
"once_cell",
|
||||
"openssl-sys",
|
||||
"os_info",
|
||||
@@ -1756,6 +1771,7 @@ name = "codex-protocol"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"codex-execpolicy",
|
||||
"codex-git",
|
||||
"codex-utils-absolute-path",
|
||||
"codex-utils-image",
|
||||
@@ -1825,6 +1841,23 @@ dependencies = [
|
||||
"which",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-state"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chrono",
|
||||
"codex-otel",
|
||||
"codex-protocol",
|
||||
"pretty_assertions",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sqlx",
|
||||
"tokio",
|
||||
"tracing",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-stdio-to-uds"
|
||||
version = "0.0.0"
|
||||
@@ -2108,6 +2141,12 @@ dependencies = [
|
||||
"serde_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "const-oid"
|
||||
version = "0.9.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
|
||||
|
||||
[[package]]
|
||||
name = "const_format"
|
||||
version = "0.2.35"
|
||||
@@ -2206,6 +2245,21 @@ dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crc"
|
||||
version = "3.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5eb8a2a1cd12ab0d987a5d5e825195d372001a4094a0376319d5a0ad71c1ba0d"
|
||||
dependencies = [
|
||||
"crc-catalog",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crc-catalog"
|
||||
version = "2.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5"
|
||||
|
||||
[[package]]
|
||||
name = "crc32fast"
|
||||
version = "1.5.0"
|
||||
@@ -2249,6 +2303,15 @@ dependencies = [
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-queue"
|
||||
version = "0.3.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115"
|
||||
dependencies = [
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-utils"
|
||||
version = "0.8.21"
|
||||
@@ -2527,6 +2590,7 @@ version = "0.7.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb"
|
||||
dependencies = [
|
||||
"const-oid",
|
||||
"pem-rfc7468",
|
||||
"zeroize",
|
||||
]
|
||||
@@ -2625,6 +2689,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
|
||||
dependencies = [
|
||||
"block-buffer",
|
||||
"const-oid",
|
||||
"crypto-common",
|
||||
"subtle",
|
||||
]
|
||||
@@ -2772,6 +2837,9 @@ name = "either"
|
||||
version = "1.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ena"
|
||||
@@ -2905,7 +2973,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.60.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2914,6 +2982,17 @@ version = "3.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dea2df4cf52843e0452895c455a1a2cfbb842a1e7329671acf418fdc53ed4c59"
|
||||
|
||||
[[package]]
|
||||
name = "etcetera"
|
||||
version = "0.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"home",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "event-listener"
|
||||
version = "5.4.0"
|
||||
@@ -3005,7 +3084,7 @@ checksum = "0ce92ff622d6dadf7349484f42c93271a0d49b7cc4d466a936405bacbe10aa78"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"rustix 1.0.8",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3082,6 +3161,17 @@ dependencies = [
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "flume"
|
||||
version = "0.11.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "da0e4dd2a88388a1f4ccc7c9ce104604dab68d9f408dc34cd45823d5a9069095"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
"futures-sink",
|
||||
"spin",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "flume"
|
||||
version = "0.12.0"
|
||||
@@ -3230,6 +3320,17 @@ dependencies = [
|
||||
"futures-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "futures-intrusive"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
"lock_api",
|
||||
"parking_lot",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "futures-io"
|
||||
version = "0.3.31"
|
||||
@@ -3310,7 +3411,7 @@ dependencies = [
|
||||
"libc",
|
||||
"log",
|
||||
"rustversion",
|
||||
"windows-link 0.1.3",
|
||||
"windows-link 0.2.0",
|
||||
"windows-result 0.3.4",
|
||||
]
|
||||
|
||||
@@ -3462,6 +3563,15 @@ dependencies = [
|
||||
"foldhash 0.2.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashlink"
|
||||
version = "0.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1"
|
||||
dependencies = [
|
||||
"hashbrown 0.15.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "heck"
|
||||
version = "0.5.0"
|
||||
@@ -3665,7 +3775,7 @@ dependencies = [
|
||||
"tokio",
|
||||
"tokio-rustls",
|
||||
"tower-service",
|
||||
"webpki-roots",
|
||||
"webpki-roots 1.0.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4092,7 +4202,7 @@ checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9"
|
||||
dependencies = [
|
||||
"hermit-abi",
|
||||
"libc",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4297,6 +4407,9 @@ name = "lazy_static"
|
||||
version = "1.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
|
||||
dependencies = [
|
||||
"spin",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
@@ -4323,6 +4436,12 @@ dependencies = [
|
||||
"windows-link 0.2.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libm"
|
||||
version = "0.2.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b6d2cec3eae94f9f509c767b45932f1ada8350c4bdb85af2fcab4a3c14807981"
|
||||
|
||||
[[package]]
|
||||
name = "libredox"
|
||||
version = "0.1.6"
|
||||
@@ -4331,6 +4450,18 @@ checksum = "4488594b9328dee448adb906d8b126d9b7deb7cf5c22161ee591610bb1be83c0"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libsqlite3-sys"
|
||||
version = "0.30.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"pkg-config",
|
||||
"vcpkg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4515,6 +4646,16 @@ dependencies = [
|
||||
"wiremock",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "md-5"
|
||||
version = "0.10.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"digest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "md5"
|
||||
version = "0.8.0"
|
||||
@@ -4792,6 +4933,22 @@ dependencies = [
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-bigint-dig"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e661dda6640fad38e827a6d4a310ff4763082116fe217f279885c97f511bb0b7"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"libm",
|
||||
"num-integer",
|
||||
"num-iter",
|
||||
"num-traits",
|
||||
"rand 0.8.5",
|
||||
"smallvec",
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-complex"
|
||||
version = "0.4.6"
|
||||
@@ -4845,6 +5002,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"libm",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -5327,6 +5485,27 @@ dependencies = [
|
||||
"futures-io",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pkcs1"
|
||||
version = "0.7.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f"
|
||||
dependencies = [
|
||||
"der",
|
||||
"pkcs8",
|
||||
"spki",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pkcs8"
|
||||
version = "0.10.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7"
|
||||
dependencies = [
|
||||
"der",
|
||||
"spki",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pkg-config"
|
||||
version = "0.3.32"
|
||||
@@ -5670,7 +5849,7 @@ dependencies = [
|
||||
"once_cell",
|
||||
"socket2 0.6.1",
|
||||
"tracing",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.60.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -5940,7 +6119,7 @@ checksum = "b28ee9e1e5d39264414b71f5c33e7fbb66b382c3fac456fe0daad39cf5509933"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"const_format",
|
||||
"flume",
|
||||
"flume 0.12.0",
|
||||
"hex",
|
||||
"ipnet",
|
||||
"itertools 0.14.0",
|
||||
@@ -5998,7 +6177,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "def3d5d06d3ca3a2d2e4376cf93de0555cd9c7960f085bf77be9562f5c9ace8f"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"flume",
|
||||
"flume 0.12.0",
|
||||
"itertools 0.14.0",
|
||||
"moka",
|
||||
"parking_lot",
|
||||
@@ -6290,7 +6469,7 @@ dependencies = [
|
||||
"wasm-bindgen-futures",
|
||||
"wasm-streams",
|
||||
"web-sys",
|
||||
"webpki-roots",
|
||||
"webpki-roots 1.0.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6361,6 +6540,26 @@ dependencies = [
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rsa"
|
||||
version = "0.9.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b8573f03f5883dcaebdfcf4725caa1ecb9c15b2ef50c43a07b816e06799bb12d"
|
||||
dependencies = [
|
||||
"const-oid",
|
||||
"digest",
|
||||
"num-bigint-dig",
|
||||
"num-integer",
|
||||
"num-traits",
|
||||
"pkcs1",
|
||||
"pkcs8",
|
||||
"rand_core 0.6.4",
|
||||
"signature",
|
||||
"spki",
|
||||
"subtle",
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc-demangle"
|
||||
version = "0.1.25"
|
||||
@@ -6392,7 +6591,7 @@ dependencies = [
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys 0.4.15",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6405,7 +6604,7 @@ dependencies = [
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys 0.9.4",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.60.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -7110,6 +7309,16 @@ dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "signature"
|
||||
version = "2.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de"
|
||||
dependencies = [
|
||||
"digest",
|
||||
"rand_core 0.6.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "simd-adler32"
|
||||
version = "0.3.7"
|
||||
@@ -7194,6 +7403,218 @@ dependencies = [
|
||||
"lock_api",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "spki"
|
||||
version = "0.7.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d"
|
||||
dependencies = [
|
||||
"base64ct",
|
||||
"der",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlx"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1fefb893899429669dcdd979aff487bd78f4064e5e7907e4269081e0ef7d97dc"
|
||||
dependencies = [
|
||||
"sqlx-core",
|
||||
"sqlx-macros",
|
||||
"sqlx-mysql",
|
||||
"sqlx-postgres",
|
||||
"sqlx-sqlite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlx-core"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ee6798b1838b6a0f69c007c133b8df5866302197e404e8b6ee8ed3e3a5e68dc6"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"bytes",
|
||||
"chrono",
|
||||
"crc",
|
||||
"crossbeam-queue",
|
||||
"either",
|
||||
"event-listener",
|
||||
"futures-core",
|
||||
"futures-intrusive",
|
||||
"futures-io",
|
||||
"futures-util",
|
||||
"hashbrown 0.15.4",
|
||||
"hashlink",
|
||||
"indexmap 2.12.0",
|
||||
"log",
|
||||
"memchr",
|
||||
"once_cell",
|
||||
"percent-encoding",
|
||||
"rustls",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha2",
|
||||
"smallvec",
|
||||
"thiserror 2.0.17",
|
||||
"time",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
"tracing",
|
||||
"url",
|
||||
"uuid",
|
||||
"webpki-roots 0.26.11",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlx-macros"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a2d452988ccaacfbf5e0bdbc348fb91d7c8af5bee192173ac3636b5fb6e6715d"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"sqlx-core",
|
||||
"sqlx-macros-core",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlx-macros-core"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "19a9c1841124ac5a61741f96e1d9e2ec77424bf323962dd894bdb93f37d5219b"
|
||||
dependencies = [
|
||||
"dotenvy",
|
||||
"either",
|
||||
"heck",
|
||||
"hex",
|
||||
"once_cell",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha2",
|
||||
"sqlx-core",
|
||||
"sqlx-mysql",
|
||||
"sqlx-postgres",
|
||||
"sqlx-sqlite",
|
||||
"syn 2.0.104",
|
||||
"tokio",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlx-mysql"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aa003f0038df784eb8fecbbac13affe3da23b45194bd57dba231c8f48199c526"
|
||||
dependencies = [
|
||||
"atoi",
|
||||
"base64",
|
||||
"bitflags 2.10.0",
|
||||
"byteorder",
|
||||
"bytes",
|
||||
"chrono",
|
||||
"crc",
|
||||
"digest",
|
||||
"dotenvy",
|
||||
"either",
|
||||
"futures-channel",
|
||||
"futures-core",
|
||||
"futures-io",
|
||||
"futures-util",
|
||||
"generic-array",
|
||||
"hex",
|
||||
"hkdf",
|
||||
"hmac",
|
||||
"itoa",
|
||||
"log",
|
||||
"md-5",
|
||||
"memchr",
|
||||
"once_cell",
|
||||
"percent-encoding",
|
||||
"rand 0.8.5",
|
||||
"rsa",
|
||||
"serde",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"smallvec",
|
||||
"sqlx-core",
|
||||
"stringprep",
|
||||
"thiserror 2.0.17",
|
||||
"time",
|
||||
"tracing",
|
||||
"uuid",
|
||||
"whoami",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlx-postgres"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "db58fcd5a53cf07c184b154801ff91347e4c30d17a3562a635ff028ad5deda46"
|
||||
dependencies = [
|
||||
"atoi",
|
||||
"base64",
|
||||
"bitflags 2.10.0",
|
||||
"byteorder",
|
||||
"chrono",
|
||||
"crc",
|
||||
"dotenvy",
|
||||
"etcetera",
|
||||
"futures-channel",
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
"hex",
|
||||
"hkdf",
|
||||
"hmac",
|
||||
"home",
|
||||
"itoa",
|
||||
"log",
|
||||
"md-5",
|
||||
"memchr",
|
||||
"once_cell",
|
||||
"rand 0.8.5",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha2",
|
||||
"smallvec",
|
||||
"sqlx-core",
|
||||
"stringprep",
|
||||
"thiserror 2.0.17",
|
||||
"time",
|
||||
"tracing",
|
||||
"uuid",
|
||||
"whoami",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlx-sqlite"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c2d12fe70b2c1b4401038055f90f151b78208de1f9f89a7dbfd41587a10c3eea"
|
||||
dependencies = [
|
||||
"atoi",
|
||||
"chrono",
|
||||
"flume 0.11.1",
|
||||
"futures-channel",
|
||||
"futures-core",
|
||||
"futures-executor",
|
||||
"futures-intrusive",
|
||||
"futures-util",
|
||||
"libsqlite3-sys",
|
||||
"log",
|
||||
"percent-encoding",
|
||||
"serde",
|
||||
"serde_urlencoded",
|
||||
"sqlx-core",
|
||||
"thiserror 2.0.17",
|
||||
"time",
|
||||
"tracing",
|
||||
"url",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sse-stream"
|
||||
version = "0.2.1"
|
||||
@@ -7327,6 +7748,17 @@ dependencies = [
|
||||
"precomputed-hash",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "stringprep"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1"
|
||||
dependencies = [
|
||||
"unicode-bidi",
|
||||
"unicode-normalization",
|
||||
"unicode-properties",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "strsim"
|
||||
version = "0.10.0"
|
||||
@@ -7492,7 +7924,7 @@ dependencies = [
|
||||
"getrandom 0.3.3",
|
||||
"once_cell",
|
||||
"rustix 1.0.8",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.61.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -8287,6 +8719,12 @@ version = "2.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-bidi"
|
||||
version = "0.3.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.18"
|
||||
@@ -8299,6 +8737,21 @@ version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-normalization"
|
||||
version = "0.1.25"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5fd4f6878c9cb28d874b009da9e8d183b5abc80117c40bbd187a1fde336be6e8"
|
||||
dependencies = [
|
||||
"tinyvec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-properties"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7df058c713841ad818f1dc5d3fd88063241cc61f49f5fbea4b951e8cf5a8d71d"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-segmentation"
|
||||
version = "1.12.0"
|
||||
@@ -8506,6 +8959,12 @@ dependencies = [
|
||||
"wit-bindgen-rt",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasite"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b"
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen"
|
||||
version = "0.2.100"
|
||||
@@ -8705,6 +9164,15 @@ dependencies = [
|
||||
"rustls-pki-types",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "webpki-roots"
|
||||
version = "0.26.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9"
|
||||
dependencies = [
|
||||
"webpki-roots 1.0.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "webpki-roots"
|
||||
version = "1.0.2"
|
||||
@@ -8731,6 +9199,16 @@ dependencies = [
|
||||
"winsafe",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "whoami"
|
||||
version = "1.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5d4a4db5077702ca3015d3d02d74974948aba2ad9e12ab7df718ee64ccd7e97d"
|
||||
dependencies = [
|
||||
"libredox",
|
||||
"wasite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "widestring"
|
||||
version = "1.2.1"
|
||||
@@ -8774,7 +9252,7 @@ version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
|
||||
dependencies = [
|
||||
"windows-sys 0.48.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@@ -47,6 +47,7 @@ members = [
|
||||
"utils/string",
|
||||
"codex-client",
|
||||
"codex-api",
|
||||
"state",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
@@ -91,6 +92,7 @@ codex-process-hardening = { path = "process-hardening" }
|
||||
codex-protocol = { path = "protocol" }
|
||||
codex-responses-api-proxy = { path = "responses-api-proxy" }
|
||||
codex-rmcp-client = { path = "rmcp-client" }
|
||||
codex-state = { path = "state" }
|
||||
codex-stdio-to-uds = { path = "stdio-to-uds" }
|
||||
codex-tui = { path = "tui" }
|
||||
codex-utils-absolute-path = { path = "utils/absolute-path" }
|
||||
@@ -198,6 +200,7 @@ semver = "1.0"
|
||||
shlex = "1.3.0"
|
||||
similar = "2.7.0"
|
||||
socket2 = "0.6.1"
|
||||
sqlx = { version = "0.8.6", default-features = false, features = ["chrono", "json", "macros", "migrate", "runtime-tokio-rustls", "sqlite", "time", "uuid"] }
|
||||
starlark = "0.13.0"
|
||||
strum = "0.27.2"
|
||||
strum_macros = "0.27.2"
|
||||
|
||||
@@ -598,7 +598,6 @@ server_notification_definitions! {
|
||||
ReasoningSummaryTextDelta => "item/reasoning/summaryTextDelta" (v2::ReasoningSummaryTextDeltaNotification),
|
||||
ReasoningSummaryPartAdded => "item/reasoning/summaryPartAdded" (v2::ReasoningSummaryPartAddedNotification),
|
||||
ReasoningTextDelta => "item/reasoning/textDelta" (v2::ReasoningTextDeltaNotification),
|
||||
ContextCompactionStarted => "thread/compaction/started" (v2::ContextCompactionStartedNotification),
|
||||
ContextCompacted => "thread/compacted" (v2::ContextCompactedNotification),
|
||||
DeprecationNotice => "deprecationNotice" (v2::DeprecationNoticeNotification),
|
||||
ConfigWarning => "configWarning" (v2::ConfigWarningNotification),
|
||||
|
||||
@@ -56,8 +56,6 @@ impl ThreadHistoryBuilder {
|
||||
self.handle_agent_reasoning_raw_content(payload)
|
||||
}
|
||||
EventMsg::TokenCount(_) => {}
|
||||
EventMsg::ContextCompactionStarted(_) => {}
|
||||
EventMsg::ContextCompactionEnded(_) => {}
|
||||
EventMsg::EnteredReviewMode(_) => {}
|
||||
EventMsg::ExitedReviewMode(_) => {}
|
||||
EventMsg::ThreadRolledBack(payload) => self.handle_thread_rollback(payload),
|
||||
|
||||
@@ -27,10 +27,12 @@ use codex_protocol::protocol::NetworkAccess as CoreNetworkAccess;
|
||||
use codex_protocol::protocol::RateLimitSnapshot as CoreRateLimitSnapshot;
|
||||
use codex_protocol::protocol::RateLimitWindow as CoreRateLimitWindow;
|
||||
use codex_protocol::protocol::SessionSource as CoreSessionSource;
|
||||
use codex_protocol::protocol::SkillDependencies as CoreSkillDependencies;
|
||||
use codex_protocol::protocol::SkillErrorInfo as CoreSkillErrorInfo;
|
||||
use codex_protocol::protocol::SkillInterface as CoreSkillInterface;
|
||||
use codex_protocol::protocol::SkillMetadata as CoreSkillMetadata;
|
||||
use codex_protocol::protocol::SkillScope as CoreSkillScope;
|
||||
use codex_protocol::protocol::SkillToolDependency as CoreSkillToolDependency;
|
||||
use codex_protocol::protocol::SubAgentSource as CoreSubAgentSource;
|
||||
use codex_protocol::protocol::TokenUsage as CoreTokenUsage;
|
||||
use codex_protocol::protocol::TokenUsageInfo as CoreTokenUsageInfo;
|
||||
@@ -1395,11 +1397,14 @@ pub struct SkillMetadata {
|
||||
pub description: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
/// Legacy short_description from SKILL.md. Prefer SKILL.toml interface.short_description.
|
||||
/// Legacy short_description from SKILL.md. Prefer SKILL.json interface.short_description.
|
||||
pub short_description: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub interface: Option<SkillInterface>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub dependencies: Option<SkillDependencies>,
|
||||
pub path: PathBuf,
|
||||
pub scope: SkillScope,
|
||||
pub enabled: bool,
|
||||
@@ -1423,6 +1428,35 @@ pub struct SkillInterface {
|
||||
pub default_prompt: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct SkillDependencies {
|
||||
pub tools: Vec<SkillToolDependency>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct SkillToolDependency {
|
||||
#[serde(rename = "type")]
|
||||
#[ts(rename = "type")]
|
||||
pub r#type: String,
|
||||
pub value: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub description: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub transport: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub command: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -1462,6 +1496,7 @@ impl From<CoreSkillMetadata> for SkillMetadata {
|
||||
description: value.description,
|
||||
short_description: value.short_description,
|
||||
interface: value.interface.map(SkillInterface::from),
|
||||
dependencies: value.dependencies.map(SkillDependencies::from),
|
||||
path: value.path,
|
||||
scope: value.scope.into(),
|
||||
enabled: true,
|
||||
@@ -1482,6 +1517,31 @@ impl From<CoreSkillInterface> for SkillInterface {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CoreSkillDependencies> for SkillDependencies {
|
||||
fn from(value: CoreSkillDependencies) -> Self {
|
||||
Self {
|
||||
tools: value
|
||||
.tools
|
||||
.into_iter()
|
||||
.map(SkillToolDependency::from)
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CoreSkillToolDependency> for SkillToolDependency {
|
||||
fn from(value: CoreSkillToolDependency) -> Self {
|
||||
Self {
|
||||
r#type: value.r#type,
|
||||
value: value.value,
|
||||
description: value.description,
|
||||
transport: value.transport,
|
||||
command: value.command,
|
||||
url: value.url,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CoreSkillScope> for SkillScope {
|
||||
fn from(value: CoreSkillScope) -> Self {
|
||||
match value {
|
||||
@@ -1969,9 +2029,6 @@ pub enum ThreadItem {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
ExitedReviewMode { id: String, review: String },
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
ContextCompaction { id: String },
|
||||
}
|
||||
|
||||
impl From<CoreTurnItem> for ThreadItem {
|
||||
@@ -2000,9 +2057,6 @@ impl From<CoreTurnItem> for ThreadItem {
|
||||
id: search.id,
|
||||
query: search.query,
|
||||
},
|
||||
CoreTurnItem::ContextCompaction(compaction) => {
|
||||
ThreadItem::ContextCompaction { id: compaction.id }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2373,14 +2427,6 @@ pub struct ContextCompactedNotification {
|
||||
pub turn_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ContextCompactionStartedNotification {
|
||||
pub thread_id: String,
|
||||
pub turn_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -2631,7 +2677,6 @@ mod tests {
|
||||
use super::*;
|
||||
use codex_protocol::items::AgentMessageContent;
|
||||
use codex_protocol::items::AgentMessageItem;
|
||||
use codex_protocol::items::ContextCompactionItem;
|
||||
use codex_protocol::items::ReasoningItem;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::items::UserMessageItem;
|
||||
@@ -2756,17 +2801,6 @@ mod tests {
|
||||
query: "docs".to_string(),
|
||||
}
|
||||
);
|
||||
|
||||
let compaction_item = TurnItem::ContextCompaction(ContextCompactionItem {
|
||||
id: "compact-1".to_string(),
|
||||
});
|
||||
|
||||
assert_eq!(
|
||||
ThreadItem::from(compaction_item),
|
||||
ThreadItem::ContextCompaction {
|
||||
id: "compact-1".to_string(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -56,6 +56,7 @@ axum = { workspace = true, default-features = false, features = [
|
||||
"tokio",
|
||||
] }
|
||||
base64 = { workspace = true }
|
||||
codex-execpolicy = { workspace = true }
|
||||
core_test_support = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
os_info = { workspace = true }
|
||||
|
||||
@@ -24,7 +24,6 @@ use codex_app_server_protocol::CommandExecutionRequestApprovalParams;
|
||||
use codex_app_server_protocol::CommandExecutionRequestApprovalResponse;
|
||||
use codex_app_server_protocol::CommandExecutionStatus;
|
||||
use codex_app_server_protocol::ContextCompactedNotification;
|
||||
use codex_app_server_protocol::ContextCompactionStartedNotification;
|
||||
use codex_app_server_protocol::DeprecationNoticeNotification;
|
||||
use codex_app_server_protocol::DynamicToolCallParams;
|
||||
use codex_app_server_protocol::ErrorNotification;
|
||||
@@ -602,18 +601,7 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
.send_server_notification(ServerNotification::AgentMessageDelta(notification))
|
||||
.await;
|
||||
}
|
||||
EventMsg::ContextCompactionStarted(..) => {
|
||||
let notification = ContextCompactionStartedNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
};
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::ContextCompactionStarted(
|
||||
notification,
|
||||
))
|
||||
.await;
|
||||
}
|
||||
EventMsg::ContextCompactionEnded(..) => {
|
||||
EventMsg::ContextCompacted(..) => {
|
||||
let notification = ContextCompactedNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
|
||||
@@ -169,6 +169,7 @@ use codex_core::read_head_for_summary;
|
||||
use codex_core::read_session_meta_line;
|
||||
use codex_core::rollout_date_parts;
|
||||
use codex_core::sandboxing::SandboxPermissions;
|
||||
use codex_core::state_db::{self};
|
||||
use codex_core::windows_sandbox::WindowsSandboxLevelExt;
|
||||
use codex_feedback::CodexFeedback;
|
||||
use codex_login::ServerOptions as LoginServerOptions;
|
||||
@@ -1257,7 +1258,7 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
|
||||
let cwd = params.cwd.unwrap_or_else(|| self.config.cwd.clone());
|
||||
let env = create_env(&self.config.shell_environment_policy);
|
||||
let env = create_env(&self.config.shell_environment_policy, None);
|
||||
let timeout_ms = params
|
||||
.timeout_ms
|
||||
.and_then(|timeout_ms| u64::try_from(timeout_ms).ok());
|
||||
@@ -1609,6 +1610,7 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
|
||||
async fn thread_archive(&mut self, request_id: RequestId, params: ThreadArchiveParams) {
|
||||
// TODO(jif) mostly rewrite this using sqlite after phase 1
|
||||
let thread_id = match ThreadId::from_string(¶ms.thread_id) {
|
||||
Ok(id) => id,
|
||||
Err(err) => {
|
||||
@@ -1658,6 +1660,7 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
|
||||
async fn thread_unarchive(&mut self, request_id: RequestId, params: ThreadUnarchiveParams) {
|
||||
// TODO(jif) mostly rewrite this using sqlite after phase 1
|
||||
let thread_id = match ThreadId::from_string(¶ms.thread_id) {
|
||||
Ok(id) => id,
|
||||
Err(err) => {
|
||||
@@ -1700,6 +1703,7 @@ impl CodexMessageProcessor {
|
||||
|
||||
let rollout_path_display = archived_path.display().to_string();
|
||||
let fallback_provider = self.config.model_provider_id.clone();
|
||||
let state_db_ctx = state_db::init_if_enabled(&self.config, None).await;
|
||||
let archived_folder = self
|
||||
.config
|
||||
.codex_home
|
||||
@@ -1778,6 +1782,11 @@ impl CodexMessageProcessor {
|
||||
message: format!("failed to unarchive thread: {err}"),
|
||||
data: None,
|
||||
})?;
|
||||
if let Some(ctx) = state_db_ctx {
|
||||
let _ = ctx
|
||||
.mark_unarchived(thread_id, restored_path.as_path())
|
||||
.await;
|
||||
}
|
||||
let summary =
|
||||
read_summary_from_rollout(restored_path.as_path(), fallback_provider.as_str())
|
||||
.await
|
||||
@@ -2507,7 +2516,6 @@ impl CodexMessageProcessor {
|
||||
};
|
||||
|
||||
let fallback_provider = self.config.model_provider_id.as_str();
|
||||
|
||||
match read_summary_from_rollout(&path, fallback_provider).await {
|
||||
Ok(summary) => {
|
||||
let response = GetConversationSummaryResponse { summary };
|
||||
@@ -3530,8 +3538,13 @@ impl CodexMessageProcessor {
|
||||
});
|
||||
}
|
||||
|
||||
let mut state_db_ctx = None;
|
||||
|
||||
// If the thread is active, request shutdown and wait briefly.
|
||||
if let Some(conversation) = self.thread_manager.remove_thread(&thread_id).await {
|
||||
if let Some(ctx) = conversation.state_db() {
|
||||
state_db_ctx = Some(ctx);
|
||||
}
|
||||
info!("thread {thread_id} was active; shutting down");
|
||||
// Request shutdown.
|
||||
match conversation.submit(Op::Shutdown).await {
|
||||
@@ -3558,14 +3571,24 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
}
|
||||
|
||||
if state_db_ctx.is_none() {
|
||||
state_db_ctx = state_db::init_if_enabled(&self.config, None).await;
|
||||
}
|
||||
|
||||
// Move the rollout file to archived.
|
||||
let result: std::io::Result<()> = async {
|
||||
let result: std::io::Result<()> = async move {
|
||||
let archive_folder = self
|
||||
.config
|
||||
.codex_home
|
||||
.join(codex_core::ARCHIVED_SESSIONS_SUBDIR);
|
||||
tokio::fs::create_dir_all(&archive_folder).await?;
|
||||
tokio::fs::rename(&canonical_rollout_path, &archive_folder.join(&file_name)).await?;
|
||||
let archived_path = archive_folder.join(&file_name);
|
||||
tokio::fs::rename(&canonical_rollout_path, &archived_path).await?;
|
||||
if let Some(ctx) = state_db_ctx {
|
||||
let _ = ctx
|
||||
.mark_archived(thread_id, archived_path.as_path(), Utc::now())
|
||||
.await;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
.await;
|
||||
@@ -4522,6 +4545,22 @@ fn skills_to_info(
|
||||
default_prompt: interface.default_prompt,
|
||||
}
|
||||
}),
|
||||
dependencies: skill.dependencies.clone().map(|dependencies| {
|
||||
codex_app_server_protocol::SkillDependencies {
|
||||
tools: dependencies
|
||||
.tools
|
||||
.into_iter()
|
||||
.map(|tool| codex_app_server_protocol::SkillToolDependency {
|
||||
r#type: tool.r#type,
|
||||
value: tool.value,
|
||||
description: tool.description,
|
||||
transport: tool.transport,
|
||||
command: tool.command,
|
||||
url: tool.url,
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}),
|
||||
path: skill.path.clone(),
|
||||
scope: skill.scope.into(),
|
||||
enabled,
|
||||
|
||||
@@ -11,6 +11,7 @@ use codex_app_server_protocol::NewConversationResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserMessageResponse;
|
||||
use codex_execpolicy::Policy;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::DeveloperInstructions;
|
||||
@@ -358,6 +359,8 @@ fn assert_permissions_message(item: &ResponseItem) {
|
||||
let expected = DeveloperInstructions::from_policy(
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
AskForApproval::Never,
|
||||
&Policy::empty(),
|
||||
false,
|
||||
&PathBuf::from("/tmp"),
|
||||
)
|
||||
.into_text();
|
||||
|
||||
@@ -130,7 +130,7 @@ async fn run_command_under_sandbox(
|
||||
let sandbox_policy_cwd = cwd.clone();
|
||||
|
||||
let stdio_policy = StdioPolicy::Inherit;
|
||||
let env = create_env(&config.shell_environment_policy);
|
||||
let env = create_env(&config.shell_environment_policy, None);
|
||||
|
||||
// Special-case Windows sandbox: execute and exit the process to emulate inherited stdio.
|
||||
if let SandboxType::Windows = sandbox_type {
|
||||
|
||||
@@ -13,11 +13,12 @@ use codex_core::config::find_codex_home;
|
||||
use codex_core::config::load_global_mcp_servers;
|
||||
use codex_core::config::types::McpServerConfig;
|
||||
use codex_core::config::types::McpServerTransportConfig;
|
||||
use codex_core::mcp::auth::McpOAuthLoginSupport;
|
||||
use codex_core::mcp::auth::compute_auth_statuses;
|
||||
use codex_core::mcp::auth::oauth_login_support;
|
||||
use codex_core::protocol::McpAuthStatus;
|
||||
use codex_rmcp_client::delete_oauth_tokens;
|
||||
use codex_rmcp_client::perform_oauth_login;
|
||||
use codex_rmcp_client::supports_oauth_login;
|
||||
|
||||
/// Subcommands:
|
||||
/// - `list` — list configured servers (with `--json`)
|
||||
@@ -260,33 +261,25 @@ async fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Re
|
||||
|
||||
println!("Added global MCP server '{name}'.");
|
||||
|
||||
if let McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var: None,
|
||||
http_headers,
|
||||
env_http_headers,
|
||||
} = transport
|
||||
{
|
||||
match supports_oauth_login(&url).await {
|
||||
Ok(true) => {
|
||||
println!("Detected OAuth support. Starting OAuth flow…");
|
||||
perform_oauth_login(
|
||||
&name,
|
||||
&url,
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
http_headers.clone(),
|
||||
env_http_headers.clone(),
|
||||
&Vec::new(),
|
||||
config.mcp_oauth_callback_port,
|
||||
)
|
||||
.await?;
|
||||
println!("Successfully logged in.");
|
||||
}
|
||||
Ok(false) => {}
|
||||
Err(_) => println!(
|
||||
"MCP server may or may not require login. Run `codex mcp login {name}` to login."
|
||||
),
|
||||
match oauth_login_support(&transport).await {
|
||||
McpOAuthLoginSupport::Supported(oauth_config) => {
|
||||
println!("Detected OAuth support. Starting OAuth flow…");
|
||||
perform_oauth_login(
|
||||
&name,
|
||||
&oauth_config.url,
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
oauth_config.http_headers,
|
||||
oauth_config.env_http_headers,
|
||||
&Vec::new(),
|
||||
config.mcp_oauth_callback_port,
|
||||
)
|
||||
.await?;
|
||||
println!("Successfully logged in.");
|
||||
}
|
||||
McpOAuthLoginSupport::Unsupported => {}
|
||||
McpOAuthLoginSupport::Unknown(_) => println!(
|
||||
"MCP server may or may not require login. Run `codex mcp login {name}` to login."
|
||||
),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -37,6 +37,7 @@ codex-keyring-store = { workspace = true }
|
||||
codex-otel = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-rmcp-client = { workspace = true }
|
||||
codex-state = { workspace = true }
|
||||
codex-utils-absolute-path = { workspace = true }
|
||||
codex-utils-pty = { workspace = true }
|
||||
codex-utils-readiness = { workspace = true }
|
||||
@@ -55,6 +56,7 @@ indoc = { workspace = true }
|
||||
keyring = { workspace = true, features = ["crypto-rust"] }
|
||||
libc = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
multimap = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
os_info = { workspace = true }
|
||||
rand = { workspace = true }
|
||||
|
||||
@@ -189,6 +189,9 @@
|
||||
"remote_models": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"request_rule": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"responses_websockets": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -198,6 +201,12 @@
|
||||
"shell_tool": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"skill_mcp_dependency_install": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"sqlite": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"steer": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -441,7 +450,6 @@
|
||||
"type": "object"
|
||||
},
|
||||
"Notice": {
|
||||
"additionalProperties": false,
|
||||
"description": "Settings for notices we display to users via the tui and app-server clients (primarily the Codex IDE extension). NOTE: these are different from notifications - notices are warnings, NUX screens, acknowledgements, etc.",
|
||||
"properties": {
|
||||
"hide_full_access_warning": {
|
||||
@@ -1182,6 +1190,9 @@
|
||||
"remote_models": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"request_rule": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"responses_websockets": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -1191,6 +1202,12 @@
|
||||
"shell_tool": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"skill_mcp_dependency_install": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"sqlite": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"steer": {
|
||||
"type": "boolean"
|
||||
},
|
||||
|
||||
@@ -655,11 +655,13 @@ fn build_responses_headers(
|
||||
let mut headers = experimental_feature_headers(config);
|
||||
headers.insert(
|
||||
WEB_SEARCH_ELIGIBLE_HEADER,
|
||||
HeaderValue::from_static(if config.web_search_mode == WebSearchMode::Disabled {
|
||||
"false"
|
||||
} else {
|
||||
"true"
|
||||
}),
|
||||
HeaderValue::from_static(
|
||||
if matches!(config.web_search_mode, Some(WebSearchMode::Disabled)) {
|
||||
"false"
|
||||
} else {
|
||||
"true"
|
||||
},
|
||||
),
|
||||
);
|
||||
if let Some(turn_state) = turn_state
|
||||
&& let Some(state) = turn_state.get()
|
||||
|
||||
@@ -100,6 +100,7 @@ use crate::config::Config;
|
||||
use crate::config::Constrained;
|
||||
use crate::config::ConstraintResult;
|
||||
use crate::config::GhostSnapshotConfig;
|
||||
use crate::config::resolve_web_search_mode_for_turn;
|
||||
use crate::config::types::McpServerConfig;
|
||||
use crate::config::types::ShellEnvironmentPolicy;
|
||||
use crate::context_manager::ContextManager;
|
||||
@@ -114,6 +115,7 @@ use crate::instructions::UserInstructions;
|
||||
use crate::mcp::CODEX_APPS_MCP_SERVER_NAME;
|
||||
use crate::mcp::auth::compute_auth_statuses;
|
||||
use crate::mcp::effective_mcp_servers;
|
||||
use crate::mcp::maybe_prompt_and_install_mcp_dependencies;
|
||||
use crate::mcp::with_codex_apps_mcp;
|
||||
use crate::mcp_connection_manager::McpConnectionManager;
|
||||
use crate::model_provider_info::CHAT_WIRE_API_DEPRECATION_SUMMARY;
|
||||
@@ -137,9 +139,11 @@ use crate::protocol::RequestUserInputEvent;
|
||||
use crate::protocol::ReviewDecision;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
use crate::protocol::SessionConfiguredEvent;
|
||||
use crate::protocol::SkillDependencies as ProtocolSkillDependencies;
|
||||
use crate::protocol::SkillErrorInfo;
|
||||
use crate::protocol::SkillInterface as ProtocolSkillInterface;
|
||||
use crate::protocol::SkillMetadata as ProtocolSkillMetadata;
|
||||
use crate::protocol::SkillToolDependency as ProtocolSkillToolDependency;
|
||||
use crate::protocol::StreamErrorEvent;
|
||||
use crate::protocol::Submission;
|
||||
use crate::protocol::TokenCountEvent;
|
||||
@@ -150,6 +154,7 @@ use crate::protocol::WarningEvent;
|
||||
use crate::rollout::RolloutRecorder;
|
||||
use crate::rollout::RolloutRecorderParams;
|
||||
use crate::rollout::map_session_init_error;
|
||||
use crate::rollout::metadata;
|
||||
use crate::shell;
|
||||
use crate::shell_snapshot::ShellSnapshot;
|
||||
use crate::skills::SkillError;
|
||||
@@ -157,9 +162,11 @@ use crate::skills::SkillInjections;
|
||||
use crate::skills::SkillMetadata;
|
||||
use crate::skills::SkillsManager;
|
||||
use crate::skills::build_skill_injections;
|
||||
use crate::skills::collect_explicit_skill_mentions;
|
||||
use crate::state::ActiveTurn;
|
||||
use crate::state::SessionServices;
|
||||
use crate::state::SessionState;
|
||||
use crate::state_db;
|
||||
use crate::tasks::GhostSnapshotTask;
|
||||
use crate::tasks::ReviewTask;
|
||||
use crate::tasks::SessionTask;
|
||||
@@ -185,6 +192,7 @@ use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::DeveloperInstructions;
|
||||
use codex_protocol::models::ResponseInputItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::models::render_command_prefix_list;
|
||||
use codex_protocol::protocol::CodexErrorInfo;
|
||||
use codex_protocol::protocol::InitialHistory;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
@@ -414,6 +422,10 @@ impl Codex {
|
||||
let state = self.session.state.lock().await;
|
||||
state.session_configuration.thread_config_snapshot()
|
||||
}
|
||||
|
||||
pub(crate) fn state_db(&self) -> Option<state_db::StateDbHandle> {
|
||||
self.session.state_db()
|
||||
}
|
||||
}
|
||||
|
||||
/// Context for an initialized model agent
|
||||
@@ -581,6 +593,10 @@ impl Session {
|
||||
session_configuration.collaboration_mode.reasoning_effort();
|
||||
per_turn_config.model_reasoning_summary = session_configuration.model_reasoning_summary;
|
||||
per_turn_config.model_personality = session_configuration.personality;
|
||||
per_turn_config.web_search_mode = Some(resolve_web_search_mode_for_turn(
|
||||
per_turn_config.web_search_mode,
|
||||
session_configuration.sandbox_policy.get(),
|
||||
));
|
||||
per_turn_config.features = config.features.clone();
|
||||
per_turn_config
|
||||
}
|
||||
@@ -689,6 +705,13 @@ impl Session {
|
||||
RolloutRecorderParams::resume(resumed_history.rollout_path.clone()),
|
||||
),
|
||||
};
|
||||
let state_builder = match &initial_history {
|
||||
InitialHistory::Resumed(resumed) => metadata::builder_from_items(
|
||||
resumed.history.as_slice(),
|
||||
resumed.rollout_path.as_path(),
|
||||
),
|
||||
InitialHistory::New | InitialHistory::Forked(_) => None,
|
||||
};
|
||||
|
||||
// Kick off independent async setup tasks in parallel to reduce startup latency.
|
||||
//
|
||||
@@ -697,11 +720,17 @@ impl Session {
|
||||
// - load history metadata
|
||||
let rollout_fut = async {
|
||||
if config.ephemeral {
|
||||
Ok(None)
|
||||
Ok::<_, anyhow::Error>((None, None))
|
||||
} else {
|
||||
RolloutRecorder::new(&config, rollout_params)
|
||||
.await
|
||||
.map(Some)
|
||||
let state_db_ctx = state_db::init_if_enabled(&config, None).await;
|
||||
let rollout_recorder = RolloutRecorder::new(
|
||||
&config,
|
||||
rollout_params,
|
||||
state_db_ctx.clone(),
|
||||
state_builder.clone(),
|
||||
)
|
||||
.await?;
|
||||
Ok((Some(rollout_recorder), state_db_ctx))
|
||||
}
|
||||
};
|
||||
|
||||
@@ -721,14 +750,14 @@ impl Session {
|
||||
|
||||
// Join all independent futures.
|
||||
let (
|
||||
rollout_recorder,
|
||||
rollout_recorder_and_state_db,
|
||||
(history_log_id, history_entry_count),
|
||||
(auth, mcp_servers, auth_statuses),
|
||||
) = tokio::join!(rollout_fut, history_meta_fut, auth_and_mcp_fut);
|
||||
|
||||
let rollout_recorder = rollout_recorder.map_err(|e| {
|
||||
let (rollout_recorder, state_db_ctx) = rollout_recorder_and_state_db.map_err(|e| {
|
||||
error!("failed to initialize rollout recorder: {e:#}");
|
||||
anyhow::Error::from(e)
|
||||
e
|
||||
})?;
|
||||
let rollout_path = rollout_recorder
|
||||
.as_ref()
|
||||
@@ -832,6 +861,7 @@ impl Session {
|
||||
tool_approvals: Mutex::new(ApprovalStore::default()),
|
||||
skills_manager,
|
||||
agent_control,
|
||||
state_db: state_db_ctx.clone(),
|
||||
};
|
||||
|
||||
let sess = Arc::new(Session {
|
||||
@@ -904,6 +934,10 @@ impl Session {
|
||||
self.tx_event.clone()
|
||||
}
|
||||
|
||||
pub(crate) fn state_db(&self) -> Option<state_db::StateDbHandle> {
|
||||
self.services.state_db.clone()
|
||||
}
|
||||
|
||||
/// Ensure all rollout writes are durably flushed.
|
||||
pub(crate) async fn flush_rollout(&self) {
|
||||
let recorder = {
|
||||
@@ -1217,6 +1251,8 @@ impl Session {
|
||||
DeveloperInstructions::from_policy(
|
||||
&next.sandbox_policy,
|
||||
next.approval_policy,
|
||||
self.services.exec_policy.current().as_ref(),
|
||||
self.features.enabled(Feature::RequestRule),
|
||||
&next.cwd,
|
||||
)
|
||||
.into(),
|
||||
@@ -1407,6 +1443,44 @@ impl Session {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn turn_context_for_sub_id(&self, sub_id: &str) -> Option<Arc<TurnContext>> {
|
||||
let active = self.active_turn.lock().await;
|
||||
active
|
||||
.as_ref()
|
||||
.and_then(|turn| turn.tasks.get(sub_id))
|
||||
.map(|task| Arc::clone(&task.turn_context))
|
||||
}
|
||||
|
||||
pub(crate) async fn record_execpolicy_amendment_message(
|
||||
&self,
|
||||
sub_id: &str,
|
||||
amendment: &ExecPolicyAmendment,
|
||||
) {
|
||||
let Some(prefixes) = render_command_prefix_list([amendment.command.as_slice()]) else {
|
||||
warn!("execpolicy amendment for {sub_id} had no command prefix");
|
||||
return;
|
||||
};
|
||||
let text = format!("Approved command prefix saved:\n{prefixes}");
|
||||
let message: ResponseItem = DeveloperInstructions::new(text.clone()).into();
|
||||
|
||||
if let Some(turn_context) = self.turn_context_for_sub_id(sub_id).await {
|
||||
self.record_conversation_items(&turn_context, std::slice::from_ref(&message))
|
||||
.await;
|
||||
return;
|
||||
}
|
||||
|
||||
if self
|
||||
.inject_response_items(vec![ResponseInputItem::Message {
|
||||
role: "developer".to_string(),
|
||||
content: vec![ContentItem::InputText { text }],
|
||||
}])
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
warn!("no active turn found to record execpolicy amendment message for {sub_id}");
|
||||
}
|
||||
}
|
||||
|
||||
/// Emit an exec approval request event and await the user's decision.
|
||||
///
|
||||
/// The request is keyed by `sub_id`/`call_id` so matching responses are delivered
|
||||
@@ -1740,6 +1814,8 @@ impl Session {
|
||||
DeveloperInstructions::from_policy(
|
||||
&turn_context.sandbox_policy,
|
||||
turn_context.approval_policy,
|
||||
self.services.exec_policy.current().as_ref(),
|
||||
self.features.enabled(Feature::RequestRule),
|
||||
&turn_context.cwd,
|
||||
)
|
||||
.into(),
|
||||
@@ -1852,6 +1928,19 @@ impl Session {
|
||||
self.send_token_count_event(turn_context).await;
|
||||
}
|
||||
|
||||
pub(crate) async fn mcp_dependency_prompted(&self) -> HashSet<String> {
|
||||
let state = self.state.lock().await;
|
||||
state.mcp_dependency_prompted()
|
||||
}
|
||||
|
||||
pub(crate) async fn record_mcp_dependency_prompted<I>(&self, names: I)
|
||||
where
|
||||
I: IntoIterator<Item = String>,
|
||||
{
|
||||
let mut state = self.state.lock().await;
|
||||
state.record_mcp_dependency_prompted(names);
|
||||
}
|
||||
|
||||
pub(crate) async fn set_server_reasoning_included(&self, included: bool) {
|
||||
let mut state = self.state.lock().await;
|
||||
state.set_server_reasoning_included(included);
|
||||
@@ -2096,35 +2185,12 @@ impl Session {
|
||||
Arc::clone(&self.services.user_shell)
|
||||
}
|
||||
|
||||
async fn refresh_mcp_servers_if_requested(&self, turn_context: &TurnContext) {
|
||||
let refresh_config = { self.pending_mcp_server_refresh_config.lock().await.take() };
|
||||
let Some(refresh_config) = refresh_config else {
|
||||
return;
|
||||
};
|
||||
|
||||
let McpServerRefreshConfig {
|
||||
mcp_servers,
|
||||
mcp_oauth_credentials_store_mode,
|
||||
} = refresh_config;
|
||||
|
||||
let mcp_servers =
|
||||
match serde_json::from_value::<HashMap<String, McpServerConfig>>(mcp_servers) {
|
||||
Ok(servers) => servers,
|
||||
Err(err) => {
|
||||
warn!("failed to parse MCP server refresh config: {err}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
let store_mode = match serde_json::from_value::<OAuthCredentialsStoreMode>(
|
||||
mcp_oauth_credentials_store_mode,
|
||||
) {
|
||||
Ok(mode) => mode,
|
||||
Err(err) => {
|
||||
warn!("failed to parse MCP OAuth refresh config: {err}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
async fn refresh_mcp_servers_inner(
|
||||
&self,
|
||||
turn_context: &TurnContext,
|
||||
mcp_servers: HashMap<String, McpServerConfig>,
|
||||
store_mode: OAuthCredentialsStoreMode,
|
||||
) {
|
||||
let auth = self.services.auth_manager.auth().await;
|
||||
let config = self.get_config().await;
|
||||
let mcp_servers = with_codex_apps_mcp(
|
||||
@@ -2157,6 +2223,49 @@ impl Session {
|
||||
*manager = refreshed_manager;
|
||||
}
|
||||
|
||||
async fn refresh_mcp_servers_if_requested(&self, turn_context: &TurnContext) {
|
||||
let refresh_config = { self.pending_mcp_server_refresh_config.lock().await.take() };
|
||||
let Some(refresh_config) = refresh_config else {
|
||||
return;
|
||||
};
|
||||
|
||||
let McpServerRefreshConfig {
|
||||
mcp_servers,
|
||||
mcp_oauth_credentials_store_mode,
|
||||
} = refresh_config;
|
||||
|
||||
let mcp_servers =
|
||||
match serde_json::from_value::<HashMap<String, McpServerConfig>>(mcp_servers) {
|
||||
Ok(servers) => servers,
|
||||
Err(err) => {
|
||||
warn!("failed to parse MCP server refresh config: {err}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
let store_mode = match serde_json::from_value::<OAuthCredentialsStoreMode>(
|
||||
mcp_oauth_credentials_store_mode,
|
||||
) {
|
||||
Ok(mode) => mode,
|
||||
Err(err) => {
|
||||
warn!("failed to parse MCP OAuth refresh config: {err}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
self.refresh_mcp_servers_inner(turn_context, mcp_servers, store_mode)
|
||||
.await;
|
||||
}
|
||||
|
||||
pub(crate) async fn refresh_mcp_servers_now(
|
||||
&self,
|
||||
turn_context: &TurnContext,
|
||||
mcp_servers: HashMap<String, McpServerConfig>,
|
||||
store_mode: OAuthCredentialsStoreMode,
|
||||
) {
|
||||
self.refresh_mcp_servers_inner(turn_context, mcp_servers, store_mode)
|
||||
.await;
|
||||
}
|
||||
|
||||
async fn mcp_startup_cancellation_token(&self) -> CancellationToken {
|
||||
self.services
|
||||
.mcp_startup_cancellation_token
|
||||
@@ -2553,18 +2662,26 @@ mod handlers {
|
||||
if let ReviewDecision::ApprovedExecpolicyAmendment {
|
||||
proposed_execpolicy_amendment,
|
||||
} = &decision
|
||||
&& let Err(err) = sess
|
||||
{
|
||||
match sess
|
||||
.persist_execpolicy_amendment(proposed_execpolicy_amendment)
|
||||
.await
|
||||
{
|
||||
let message = format!("Failed to apply execpolicy amendment: {err}");
|
||||
tracing::warn!("{message}");
|
||||
let warning = EventMsg::Warning(WarningEvent { message });
|
||||
sess.send_event_raw(Event {
|
||||
id: id.clone(),
|
||||
msg: warning,
|
||||
})
|
||||
.await;
|
||||
{
|
||||
Ok(()) => {
|
||||
sess.record_execpolicy_amendment_message(&id, proposed_execpolicy_amendment)
|
||||
.await;
|
||||
}
|
||||
Err(err) => {
|
||||
let message = format!("Failed to apply execpolicy amendment: {err}");
|
||||
tracing::warn!("{message}");
|
||||
let warning = EventMsg::Warning(WarningEvent { message });
|
||||
sess.send_event_raw(Event {
|
||||
id: id.clone(),
|
||||
msg: warning,
|
||||
})
|
||||
.await;
|
||||
}
|
||||
}
|
||||
}
|
||||
match decision {
|
||||
ReviewDecision::Abort => {
|
||||
@@ -2888,7 +3005,7 @@ async fn spawn_review_thread(
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &review_model_info,
|
||||
features: &review_features,
|
||||
web_search_mode: review_web_search_mode,
|
||||
web_search_mode: Some(review_web_search_mode),
|
||||
});
|
||||
|
||||
let review_prompt = resolved.prompt.clone();
|
||||
@@ -2900,7 +3017,7 @@ async fn spawn_review_thread(
|
||||
let mut per_turn_config = (*config).clone();
|
||||
per_turn_config.model = Some(model.clone());
|
||||
per_turn_config.features = review_features.clone();
|
||||
per_turn_config.web_search_mode = review_web_search_mode;
|
||||
per_turn_config.web_search_mode = Some(review_web_search_mode);
|
||||
|
||||
let otel_manager = parent_turn_context
|
||||
.client
|
||||
@@ -2980,6 +3097,22 @@ fn skills_to_info(
|
||||
brand_color: interface.brand_color,
|
||||
default_prompt: interface.default_prompt,
|
||||
}),
|
||||
dependencies: skill.dependencies.clone().map(|dependencies| {
|
||||
ProtocolSkillDependencies {
|
||||
tools: dependencies
|
||||
.tools
|
||||
.into_iter()
|
||||
.map(|tool| ProtocolSkillToolDependency {
|
||||
r#type: tool.r#type,
|
||||
value: tool.value,
|
||||
description: tool.description,
|
||||
transport: tool.transport,
|
||||
command: tool.command,
|
||||
url: tool.url,
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}),
|
||||
path: skill.path.clone(),
|
||||
scope: skill.scope,
|
||||
enabled: !disabled_paths.contains(&skill.path),
|
||||
@@ -3039,11 +3172,23 @@ pub(crate) async fn run_turn(
|
||||
.await,
|
||||
);
|
||||
|
||||
let mentioned_skills = skills_outcome.as_ref().map_or_else(Vec::new, |outcome| {
|
||||
collect_explicit_skill_mentions(&input, &outcome.skills, &outcome.disabled_paths)
|
||||
});
|
||||
|
||||
maybe_prompt_and_install_mcp_dependencies(
|
||||
sess.as_ref(),
|
||||
turn_context.as_ref(),
|
||||
&cancellation_token,
|
||||
&mentioned_skills,
|
||||
)
|
||||
.await;
|
||||
|
||||
let otel_manager = turn_context.client.get_otel_manager();
|
||||
let SkillInjections {
|
||||
items: skill_items,
|
||||
warnings: skill_warnings,
|
||||
} = build_skill_injections(&input, skills_outcome.as_ref(), Some(&otel_manager)).await;
|
||||
} = build_skill_injections(&mentioned_skills, Some(&otel_manager)).await;
|
||||
|
||||
for message in skill_warnings {
|
||||
sess.send_event(&turn_context, EventMsg::Warning(WarningEvent { message }))
|
||||
@@ -4459,6 +4604,7 @@ mod tests {
|
||||
tool_approvals: Mutex::new(ApprovalStore::default()),
|
||||
skills_manager,
|
||||
agent_control,
|
||||
state_db: None,
|
||||
};
|
||||
|
||||
let turn_context = Session::make_turn_context(
|
||||
@@ -4570,6 +4716,7 @@ mod tests {
|
||||
tool_approvals: Mutex::new(ApprovalStore::default()),
|
||||
skills_manager,
|
||||
agent_control,
|
||||
state_db: None,
|
||||
};
|
||||
|
||||
let turn_context = Arc::new(Session::make_turn_context(
|
||||
|
||||
@@ -12,6 +12,8 @@ use codex_protocol::protocol::SessionSource;
|
||||
use std::path::PathBuf;
|
||||
use tokio::sync::watch;
|
||||
|
||||
use crate::state_db::StateDbHandle;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ThreadConfigSnapshot {
|
||||
pub model: String,
|
||||
@@ -64,6 +66,10 @@ impl CodexThread {
|
||||
self.rollout_path.clone()
|
||||
}
|
||||
|
||||
pub fn state_db(&self) -> Option<StateDbHandle> {
|
||||
self.codex.state_db()
|
||||
}
|
||||
|
||||
pub async fn config_snapshot(&self) -> ThreadConfigSnapshot {
|
||||
self.codex.thread_config_snapshot().await
|
||||
}
|
||||
|
||||
@@ -10,8 +10,7 @@ use crate::error::CodexErr;
|
||||
use crate::error::Result as CodexResult;
|
||||
use crate::features::Feature;
|
||||
use crate::protocol::CompactedItem;
|
||||
use crate::protocol::ContextCompactionEndedEvent;
|
||||
use crate::protocol::ContextCompactionStartedEvent;
|
||||
use crate::protocol::ContextCompactedEvent;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::TurnContextItem;
|
||||
use crate::protocol::TurnStartedEvent;
|
||||
@@ -21,7 +20,6 @@ use crate::truncate::TruncationPolicy;
|
||||
use crate::truncate::approx_token_count;
|
||||
use crate::truncate::truncate_text;
|
||||
use crate::util::backoff;
|
||||
use codex_protocol::items::ContextCompactionItem;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseInputItem;
|
||||
@@ -30,7 +28,6 @@ use codex_protocol::protocol::RolloutItem;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use futures::prelude::*;
|
||||
use tracing::error;
|
||||
use uuid::Uuid;
|
||||
|
||||
pub const SUMMARIZATION_PROMPT: &str = include_str!("../templates/compact/prompt.md");
|
||||
pub const SUMMARY_PREFIX: &str = include_str!("../templates/compact/summary_prefix.md");
|
||||
@@ -74,9 +71,6 @@ async fn run_compact_task_inner(
|
||||
turn_context: Arc<TurnContext>,
|
||||
input: Vec<UserInput>,
|
||||
) {
|
||||
let compaction_item = compaction_turn_item();
|
||||
emit_compaction_started(&sess, &turn_context, &compaction_item).await;
|
||||
|
||||
let initial_input_for_turn: ResponseInputItem = ResponseInputItem::from(input);
|
||||
|
||||
let mut history = sess.clone_history().await;
|
||||
@@ -137,7 +131,6 @@ async fn run_compact_task_inner(
|
||||
break;
|
||||
}
|
||||
Err(CodexErr::Interrupted) => {
|
||||
emit_compaction_ended(&sess, &turn_context, compaction_item.clone()).await;
|
||||
return;
|
||||
}
|
||||
Err(e @ CodexErr::ContextWindowExceeded) => {
|
||||
@@ -154,7 +147,6 @@ async fn run_compact_task_inner(
|
||||
sess.set_total_tokens_full(turn_context.as_ref()).await;
|
||||
let event = EventMsg::Error(e.to_error_event(None));
|
||||
sess.send_event(&turn_context, event).await;
|
||||
emit_compaction_ended(&sess, &turn_context, compaction_item.clone()).await;
|
||||
return;
|
||||
}
|
||||
Err(e) => {
|
||||
@@ -172,7 +164,6 @@ async fn run_compact_task_inner(
|
||||
} else {
|
||||
let event = EventMsg::Error(e.to_error_event(None));
|
||||
sess.send_event(&turn_context, event).await;
|
||||
emit_compaction_ended(&sess, &turn_context, compaction_item.clone()).await;
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -202,7 +193,8 @@ async fn run_compact_task_inner(
|
||||
});
|
||||
sess.persist_rollout_items(&[rollout_item]).await;
|
||||
|
||||
emit_compaction_ended(&sess, &turn_context, compaction_item).await;
|
||||
let event = EventMsg::ContextCompacted(ContextCompactedEvent {});
|
||||
sess.send_event(&turn_context, event).await;
|
||||
|
||||
let warning = EventMsg::Warning(WarningEvent {
|
||||
message: "Heads up: Long threads and multiple compactions can cause the model to be less accurate. Start a new thread when possible to keep threads small and targeted.".to_string(),
|
||||
@@ -210,38 +202,6 @@ async fn run_compact_task_inner(
|
||||
sess.send_event(&turn_context, warning).await;
|
||||
}
|
||||
|
||||
fn compaction_turn_item() -> TurnItem {
|
||||
TurnItem::ContextCompaction(ContextCompactionItem {
|
||||
id: Uuid::new_v4().to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) async fn emit_compaction_started(
|
||||
sess: &Session,
|
||||
turn_context: &TurnContext,
|
||||
item: &TurnItem,
|
||||
) {
|
||||
sess.send_event(
|
||||
turn_context,
|
||||
EventMsg::ContextCompactionStarted(ContextCompactionStartedEvent {}),
|
||||
)
|
||||
.await;
|
||||
sess.emit_turn_item_started(turn_context, item).await;
|
||||
}
|
||||
|
||||
pub(crate) async fn emit_compaction_ended(
|
||||
sess: &Session,
|
||||
turn_context: &TurnContext,
|
||||
item: TurnItem,
|
||||
) {
|
||||
sess.emit_turn_item_completed(turn_context, item).await;
|
||||
sess.send_event(
|
||||
turn_context,
|
||||
EventMsg::ContextCompactionEnded(ContextCompactionEndedEvent {}),
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
pub fn content_items_to_text(content: &[ContentItem]) -> Option<String> {
|
||||
let mut pieces = Vec::new();
|
||||
for item in content {
|
||||
|
||||
@@ -3,17 +3,13 @@ use std::sync::Arc;
|
||||
use crate::Prompt;
|
||||
use crate::codex::Session;
|
||||
use crate::codex::TurnContext;
|
||||
use crate::compact::emit_compaction_ended;
|
||||
use crate::compact::emit_compaction_started;
|
||||
use crate::error::Result as CodexResult;
|
||||
use crate::protocol::CompactedItem;
|
||||
use crate::protocol::ContextCompactedEvent;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::RolloutItem;
|
||||
use crate::protocol::TurnStartedEvent;
|
||||
use codex_protocol::items::ContextCompactionItem;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use uuid::Uuid;
|
||||
|
||||
pub(crate) async fn run_inline_remote_auto_compact_task(
|
||||
sess: Arc<Session>,
|
||||
@@ -32,19 +28,12 @@ pub(crate) async fn run_remote_compact_task(sess: Arc<Session>, turn_context: Ar
|
||||
}
|
||||
|
||||
async fn run_remote_compact_task_inner(sess: &Arc<Session>, turn_context: &Arc<TurnContext>) {
|
||||
let compaction_item = TurnItem::ContextCompaction(ContextCompactionItem {
|
||||
id: Uuid::new_v4().to_string(),
|
||||
});
|
||||
emit_compaction_started(sess, turn_context, &compaction_item).await;
|
||||
|
||||
if let Err(err) = run_remote_compact_task_inner_impl(sess, turn_context).await {
|
||||
let event = EventMsg::Error(
|
||||
err.to_error_event(Some("Error running remote compact task".to_string())),
|
||||
);
|
||||
sess.send_event(turn_context, event).await;
|
||||
}
|
||||
|
||||
emit_compaction_ended(sess, turn_context, compaction_item).await;
|
||||
}
|
||||
|
||||
async fn run_remote_compact_task_inner_impl(
|
||||
@@ -88,5 +77,8 @@ async fn run_remote_compact_task_inner_impl(
|
||||
sess.persist_rollout_items(&[RolloutItem::Compacted(compacted_item)])
|
||||
.await;
|
||||
|
||||
let event = EventMsg::ContextCompacted(ContextCompactedEvent {});
|
||||
sess.send_event(turn_context, event).await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -306,8 +306,8 @@ pub struct Config {
|
||||
/// model info's default preference.
|
||||
pub include_apply_patch_tool: bool,
|
||||
|
||||
/// Explicit or feature-derived web search mode. Defaults to cached.
|
||||
pub web_search_mode: WebSearchMode,
|
||||
/// Explicit or feature-derived web search mode.
|
||||
pub web_search_mode: Option<WebSearchMode>,
|
||||
|
||||
/// If set to `true`, used only the experimental unified exec tool.
|
||||
pub use_experimental_unified_exec_tool: bool,
|
||||
@@ -1203,27 +1203,36 @@ pub fn resolve_oss_provider(
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolve the web search mode from explicit config, feature flags, and sandbox policy.
|
||||
/// Live search is auto-enabled when sandbox policy is `DangerFullAccess`
|
||||
/// Resolve the web search mode from explicit config and feature flags.
|
||||
fn resolve_web_search_mode(
|
||||
config_toml: &ConfigToml,
|
||||
config_profile: &ConfigProfile,
|
||||
features: &Features,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
) -> WebSearchMode {
|
||||
) -> Option<WebSearchMode> {
|
||||
if let Some(mode) = config_profile.web_search.or(config_toml.web_search) {
|
||||
return mode;
|
||||
return Some(mode);
|
||||
}
|
||||
if features.enabled(Feature::WebSearchCached) {
|
||||
return WebSearchMode::Cached;
|
||||
return Some(WebSearchMode::Cached);
|
||||
}
|
||||
if features.enabled(Feature::WebSearchRequest) {
|
||||
return WebSearchMode::Live;
|
||||
return Some(WebSearchMode::Live);
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_web_search_mode_for_turn(
|
||||
explicit_mode: Option<WebSearchMode>,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
) -> WebSearchMode {
|
||||
if let Some(mode) = explicit_mode {
|
||||
return mode;
|
||||
}
|
||||
if matches!(sandbox_policy, SandboxPolicy::DangerFullAccess) {
|
||||
return WebSearchMode::Live;
|
||||
WebSearchMode::Live
|
||||
} else {
|
||||
WebSearchMode::Cached
|
||||
}
|
||||
WebSearchMode::Cached
|
||||
}
|
||||
|
||||
impl Config {
|
||||
@@ -1347,8 +1356,7 @@ impl Config {
|
||||
AskForApproval::default()
|
||||
}
|
||||
});
|
||||
let web_search_mode =
|
||||
resolve_web_search_mode(&cfg, &config_profile, &features, &sandbox_policy);
|
||||
let web_search_mode = resolve_web_search_mode(&cfg, &config_profile, &features);
|
||||
// TODO(dylan): We should be able to leverage ConfigLayerStack so that
|
||||
// we can reliably check this at every config level.
|
||||
let did_user_set_custom_approval_policy_or_sandbox_mode = approval_policy_override
|
||||
@@ -2271,15 +2279,12 @@ trust_level = "trusted"
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn web_search_mode_defaults_to_cached_if_unset() {
|
||||
fn web_search_mode_defaults_to_none_if_unset() {
|
||||
let cfg = ConfigToml::default();
|
||||
let profile = ConfigProfile::default();
|
||||
let features = Features::with_defaults();
|
||||
|
||||
assert_eq!(
|
||||
resolve_web_search_mode(&cfg, &profile, &features, &SandboxPolicy::ReadOnly),
|
||||
WebSearchMode::Cached
|
||||
);
|
||||
assert_eq!(resolve_web_search_mode(&cfg, &profile, &features), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -2293,8 +2298,8 @@ trust_level = "trusted"
|
||||
features.enable(Feature::WebSearchCached);
|
||||
|
||||
assert_eq!(
|
||||
resolve_web_search_mode(&cfg, &profile, &features, &SandboxPolicy::ReadOnly),
|
||||
WebSearchMode::Live
|
||||
resolve_web_search_mode(&cfg, &profile, &features),
|
||||
Some(WebSearchMode::Live)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -2309,48 +2314,33 @@ trust_level = "trusted"
|
||||
features.enable(Feature::WebSearchRequest);
|
||||
|
||||
assert_eq!(
|
||||
resolve_web_search_mode(&cfg, &profile, &features, &SandboxPolicy::ReadOnly),
|
||||
WebSearchMode::Disabled
|
||||
resolve_web_search_mode(&cfg, &profile, &features),
|
||||
Some(WebSearchMode::Disabled)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn danger_full_access_defaults_web_search_live_when_unset() -> std::io::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let cfg = ConfigToml {
|
||||
sandbox_mode: Some(SandboxMode::DangerFullAccess),
|
||||
..Default::default()
|
||||
};
|
||||
fn web_search_mode_for_turn_defaults_to_cached_when_unset() {
|
||||
let mode = resolve_web_search_mode_for_turn(None, &SandboxPolicy::ReadOnly);
|
||||
|
||||
let config = Config::load_from_base_config_with_overrides(
|
||||
cfg,
|
||||
ConfigOverrides::default(),
|
||||
codex_home.path().to_path_buf(),
|
||||
)?;
|
||||
|
||||
assert_eq!(config.web_search_mode, WebSearchMode::Live);
|
||||
|
||||
Ok(())
|
||||
assert_eq!(mode, WebSearchMode::Cached);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn explicit_web_search_mode_wins_in_danger_full_access() -> std::io::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let cfg = ConfigToml {
|
||||
sandbox_mode: Some(SandboxMode::DangerFullAccess),
|
||||
web_search: Some(WebSearchMode::Cached),
|
||||
..Default::default()
|
||||
};
|
||||
fn web_search_mode_for_turn_defaults_to_live_for_danger_full_access() {
|
||||
let mode = resolve_web_search_mode_for_turn(None, &SandboxPolicy::DangerFullAccess);
|
||||
|
||||
let config = Config::load_from_base_config_with_overrides(
|
||||
cfg,
|
||||
ConfigOverrides::default(),
|
||||
codex_home.path().to_path_buf(),
|
||||
)?;
|
||||
assert_eq!(mode, WebSearchMode::Live);
|
||||
}
|
||||
|
||||
assert_eq!(config.web_search_mode, WebSearchMode::Cached);
|
||||
#[test]
|
||||
fn web_search_mode_for_turn_prefers_explicit_value() {
|
||||
let mode = resolve_web_search_mode_for_turn(
|
||||
Some(WebSearchMode::Cached),
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
assert_eq!(mode, WebSearchMode::Cached);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -3786,7 +3776,7 @@ model_verbosity = "high"
|
||||
forced_chatgpt_workspace_id: None,
|
||||
forced_login_method: None,
|
||||
include_apply_patch_tool: false,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: None,
|
||||
use_experimental_unified_exec_tool: false,
|
||||
ghost_snapshot: GhostSnapshotConfig::default(),
|
||||
features: Features::with_defaults(),
|
||||
@@ -3869,7 +3859,7 @@ model_verbosity = "high"
|
||||
forced_chatgpt_workspace_id: None,
|
||||
forced_login_method: None,
|
||||
include_apply_patch_tool: false,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: None,
|
||||
use_experimental_unified_exec_tool: false,
|
||||
ghost_snapshot: GhostSnapshotConfig::default(),
|
||||
features: Features::with_defaults(),
|
||||
@@ -3967,7 +3957,7 @@ model_verbosity = "high"
|
||||
forced_chatgpt_workspace_id: None,
|
||||
forced_login_method: None,
|
||||
include_apply_patch_tool: false,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: None,
|
||||
use_experimental_unified_exec_tool: false,
|
||||
ghost_snapshot: GhostSnapshotConfig::default(),
|
||||
features: Features::with_defaults(),
|
||||
@@ -4051,7 +4041,7 @@ model_verbosity = "high"
|
||||
forced_chatgpt_workspace_id: None,
|
||||
forced_login_method: None,
|
||||
include_apply_patch_tool: false,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: None,
|
||||
use_experimental_unified_exec_tool: false,
|
||||
ghost_snapshot: GhostSnapshotConfig::default(),
|
||||
features: Features::with_defaults(),
|
||||
|
||||
@@ -472,7 +472,6 @@ const fn default_true() -> bool {
|
||||
/// (primarily the Codex IDE extension). NOTE: these are different from
|
||||
/// notifications - notices are warnings, NUX screens, acknowledgements, etc.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema)]
|
||||
#[schemars(deny_unknown_fields)]
|
||||
pub struct Notice {
|
||||
/// Tracks whether the user has acknowledged the full access warning prompt.
|
||||
pub hide_full_access_warning: Option<bool>,
|
||||
|
||||
@@ -6,6 +6,8 @@ mod layer_io;
|
||||
mod macos;
|
||||
mod merge;
|
||||
mod overrides;
|
||||
#[cfg(test)]
|
||||
mod requirements_exec_policy;
|
||||
mod state;
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
188
codex-rs/core/src/config_loader/requirements_exec_policy.rs
Normal file
188
codex-rs/core/src/config_loader/requirements_exec_policy.rs
Normal file
@@ -0,0 +1,188 @@
|
||||
use codex_execpolicy::Decision;
|
||||
use codex_execpolicy::Policy;
|
||||
use codex_execpolicy::rule::PatternToken;
|
||||
use codex_execpolicy::rule::PrefixPattern;
|
||||
use codex_execpolicy::rule::PrefixRule;
|
||||
use codex_execpolicy::rule::RuleRef;
|
||||
use multimap::MultiMap;
|
||||
use serde::Deserialize;
|
||||
use std::sync::Arc;
|
||||
use thiserror::Error;
|
||||
|
||||
/// TOML types for expressing exec policy requirements.
|
||||
///
|
||||
/// These types are kept separate from `ConfigRequirementsToml` and are
|
||||
/// converted into `codex-execpolicy` rules.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Deserialize)]
|
||||
pub struct RequirementsExecPolicyTomlRoot {
|
||||
pub exec_policy: RequirementsExecPolicyToml,
|
||||
}
|
||||
|
||||
/// TOML representation of `[exec_policy]` within `requirements.toml`.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Deserialize)]
|
||||
pub struct RequirementsExecPolicyToml {
|
||||
pub prefix_rules: Vec<RequirementsExecPolicyPrefixRuleToml>,
|
||||
}
|
||||
|
||||
/// A TOML representation of the `prefix_rule(...)` Starlark builtin.
|
||||
///
|
||||
/// This mirrors the builtin defined in `execpolicy/src/parser.rs`.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Deserialize)]
|
||||
pub struct RequirementsExecPolicyPrefixRuleToml {
|
||||
pub pattern: Vec<RequirementsExecPolicyPatternTokenToml>,
|
||||
pub decision: Option<RequirementsExecPolicyDecisionToml>,
|
||||
pub justification: Option<String>,
|
||||
}
|
||||
|
||||
/// TOML-friendly representation of a pattern token.
|
||||
///
|
||||
/// Starlark supports either a string token or a list of alternative tokens at
|
||||
/// each position, but TOML arrays cannot mix strings and arrays. Using an
|
||||
/// array of tables sidesteps that restriction.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Deserialize)]
|
||||
pub struct RequirementsExecPolicyPatternTokenToml {
|
||||
pub token: Option<String>,
|
||||
pub any_of: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum RequirementsExecPolicyDecisionToml {
|
||||
Allow,
|
||||
Prompt,
|
||||
Forbidden,
|
||||
}
|
||||
|
||||
impl RequirementsExecPolicyDecisionToml {
|
||||
fn as_decision(self) -> Decision {
|
||||
match self {
|
||||
Self::Allow => Decision::Allow,
|
||||
Self::Prompt => Decision::Prompt,
|
||||
Self::Forbidden => Decision::Forbidden,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum RequirementsExecPolicyParseError {
|
||||
#[error("exec policy prefix_rules cannot be empty")]
|
||||
EmptyPrefixRules,
|
||||
|
||||
#[error("exec policy prefix_rule at index {rule_index} has an empty pattern")]
|
||||
EmptyPattern { rule_index: usize },
|
||||
|
||||
#[error(
|
||||
"exec policy prefix_rule at index {rule_index} has an invalid pattern token at index {token_index}: {reason}"
|
||||
)]
|
||||
InvalidPatternToken {
|
||||
rule_index: usize,
|
||||
token_index: usize,
|
||||
reason: String,
|
||||
},
|
||||
|
||||
#[error("exec policy prefix_rule at index {rule_index} has an empty justification")]
|
||||
EmptyJustification { rule_index: usize },
|
||||
}
|
||||
|
||||
impl RequirementsExecPolicyToml {
|
||||
/// Convert requirements TOML exec policy rules into the internal `.rules`
|
||||
/// representation used by `codex-execpolicy`.
|
||||
pub fn to_policy(&self) -> Result<Policy, RequirementsExecPolicyParseError> {
|
||||
if self.prefix_rules.is_empty() {
|
||||
return Err(RequirementsExecPolicyParseError::EmptyPrefixRules);
|
||||
}
|
||||
|
||||
let mut rules_by_program: MultiMap<String, RuleRef> = MultiMap::new();
|
||||
|
||||
for (rule_index, rule) in self.prefix_rules.iter().enumerate() {
|
||||
if let Some(justification) = &rule.justification
|
||||
&& justification.trim().is_empty()
|
||||
{
|
||||
return Err(RequirementsExecPolicyParseError::EmptyJustification { rule_index });
|
||||
}
|
||||
|
||||
if rule.pattern.is_empty() {
|
||||
return Err(RequirementsExecPolicyParseError::EmptyPattern { rule_index });
|
||||
}
|
||||
|
||||
let pattern_tokens = rule
|
||||
.pattern
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(token_index, token)| parse_pattern_token(token, rule_index, token_index))
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
let decision = rule
|
||||
.decision
|
||||
.map(RequirementsExecPolicyDecisionToml::as_decision)
|
||||
.unwrap_or(Decision::Allow);
|
||||
let justification = rule.justification.clone();
|
||||
|
||||
let (first_token, remaining_tokens) = pattern_tokens
|
||||
.split_first()
|
||||
.ok_or(RequirementsExecPolicyParseError::EmptyPattern { rule_index })?;
|
||||
|
||||
let rest: Arc<[PatternToken]> = remaining_tokens.to_vec().into();
|
||||
|
||||
for head in first_token.alternatives() {
|
||||
let rule: RuleRef = Arc::new(PrefixRule {
|
||||
pattern: PrefixPattern {
|
||||
first: Arc::from(head.as_str()),
|
||||
rest: rest.clone(),
|
||||
},
|
||||
decision,
|
||||
justification: justification.clone(),
|
||||
});
|
||||
rules_by_program.insert(head.clone(), rule);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Policy::new(rules_by_program))
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_pattern_token(
|
||||
token: &RequirementsExecPolicyPatternTokenToml,
|
||||
rule_index: usize,
|
||||
token_index: usize,
|
||||
) -> Result<PatternToken, RequirementsExecPolicyParseError> {
|
||||
match (&token.token, &token.any_of) {
|
||||
(Some(single), None) => {
|
||||
if single.trim().is_empty() {
|
||||
return Err(RequirementsExecPolicyParseError::InvalidPatternToken {
|
||||
rule_index,
|
||||
token_index,
|
||||
reason: "token cannot be empty".to_string(),
|
||||
});
|
||||
}
|
||||
Ok(PatternToken::Single(single.clone()))
|
||||
}
|
||||
(None, Some(alternatives)) => {
|
||||
if alternatives.is_empty() {
|
||||
return Err(RequirementsExecPolicyParseError::InvalidPatternToken {
|
||||
rule_index,
|
||||
token_index,
|
||||
reason: "any_of cannot be empty".to_string(),
|
||||
});
|
||||
}
|
||||
if alternatives.iter().any(|alt| alt.trim().is_empty()) {
|
||||
return Err(RequirementsExecPolicyParseError::InvalidPatternToken {
|
||||
rule_index,
|
||||
token_index,
|
||||
reason: "any_of cannot include empty tokens".to_string(),
|
||||
});
|
||||
}
|
||||
Ok(PatternToken::Alts(alternatives.clone()))
|
||||
}
|
||||
(Some(_), Some(_)) => Err(RequirementsExecPolicyParseError::InvalidPatternToken {
|
||||
rule_index,
|
||||
token_index,
|
||||
reason: "set either token or any_of, not both".to_string(),
|
||||
}),
|
||||
(None, None) => Err(RequirementsExecPolicyParseError::InvalidPatternToken {
|
||||
rule_index,
|
||||
token_index,
|
||||
reason: "set either token or any_of".to_string(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
@@ -911,3 +911,165 @@ async fn project_root_markers_supports_alternate_markers() -> std::io::Result<()
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
mod requirements_exec_policy_tests {
|
||||
use super::super::requirements_exec_policy::RequirementsExecPolicyDecisionToml;
|
||||
use super::super::requirements_exec_policy::RequirementsExecPolicyPatternTokenToml;
|
||||
use super::super::requirements_exec_policy::RequirementsExecPolicyPrefixRuleToml;
|
||||
use super::super::requirements_exec_policy::RequirementsExecPolicyToml;
|
||||
use super::super::requirements_exec_policy::RequirementsExecPolicyTomlRoot;
|
||||
use codex_execpolicy::Decision;
|
||||
use codex_execpolicy::Evaluation;
|
||||
use codex_execpolicy::RuleMatch;
|
||||
use pretty_assertions::assert_eq;
|
||||
use toml::from_str;
|
||||
|
||||
fn tokens(cmd: &[&str]) -> Vec<String> {
|
||||
cmd.iter().map(std::string::ToString::to_string).collect()
|
||||
}
|
||||
|
||||
fn allow_all(_: &[String]) -> Decision {
|
||||
Decision::Allow
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_single_prefix_rule_from_raw_toml() -> anyhow::Result<()> {
|
||||
let toml_str = r#"
|
||||
[exec_policy]
|
||||
prefix_rules = [
|
||||
{ pattern = [{ token = "rm" }], decision = "forbidden" },
|
||||
]
|
||||
"#;
|
||||
|
||||
let parsed: RequirementsExecPolicyTomlRoot = from_str(toml_str)?;
|
||||
|
||||
assert_eq!(
|
||||
parsed,
|
||||
RequirementsExecPolicyTomlRoot {
|
||||
exec_policy: RequirementsExecPolicyToml {
|
||||
prefix_rules: vec![RequirementsExecPolicyPrefixRuleToml {
|
||||
pattern: vec![RequirementsExecPolicyPatternTokenToml {
|
||||
token: Some("rm".to_string()),
|
||||
any_of: None,
|
||||
}],
|
||||
decision: Some(RequirementsExecPolicyDecisionToml::Forbidden),
|
||||
justification: None,
|
||||
}],
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_multiple_prefix_rules_from_raw_toml() -> anyhow::Result<()> {
|
||||
let toml_str = r#"
|
||||
[exec_policy]
|
||||
prefix_rules = [
|
||||
{ pattern = [{ token = "rm" }], decision = "forbidden" },
|
||||
{ pattern = [{ token = "git" }, { any_of = ["push", "commit"] }], decision = "prompt", justification = "review changes before push or commit" },
|
||||
]
|
||||
"#;
|
||||
|
||||
let parsed: RequirementsExecPolicyTomlRoot = from_str(toml_str)?;
|
||||
|
||||
assert_eq!(
|
||||
parsed,
|
||||
RequirementsExecPolicyTomlRoot {
|
||||
exec_policy: RequirementsExecPolicyToml {
|
||||
prefix_rules: vec![
|
||||
RequirementsExecPolicyPrefixRuleToml {
|
||||
pattern: vec![RequirementsExecPolicyPatternTokenToml {
|
||||
token: Some("rm".to_string()),
|
||||
any_of: None,
|
||||
}],
|
||||
decision: Some(RequirementsExecPolicyDecisionToml::Forbidden),
|
||||
justification: None,
|
||||
},
|
||||
RequirementsExecPolicyPrefixRuleToml {
|
||||
pattern: vec![
|
||||
RequirementsExecPolicyPatternTokenToml {
|
||||
token: Some("git".to_string()),
|
||||
any_of: None,
|
||||
},
|
||||
RequirementsExecPolicyPatternTokenToml {
|
||||
token: None,
|
||||
any_of: Some(vec!["push".to_string(), "commit".to_string()]),
|
||||
},
|
||||
],
|
||||
decision: Some(RequirementsExecPolicyDecisionToml::Prompt),
|
||||
justification: Some("review changes before push or commit".to_string()),
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn converts_rules_toml_into_internal_policy_representation() -> anyhow::Result<()> {
|
||||
let toml_str = r#"
|
||||
[exec_policy]
|
||||
prefix_rules = [
|
||||
{ pattern = [{ token = "rm" }], decision = "forbidden" },
|
||||
]
|
||||
"#;
|
||||
|
||||
let parsed: RequirementsExecPolicyTomlRoot = from_str(toml_str)?;
|
||||
let policy = parsed.exec_policy.to_policy()?;
|
||||
|
||||
assert_eq!(
|
||||
policy.check(&tokens(&["rm", "-rf", "/tmp"]), &allow_all),
|
||||
Evaluation {
|
||||
decision: Decision::Forbidden,
|
||||
matched_rules: vec![RuleMatch::PrefixRuleMatch {
|
||||
matched_prefix: tokens(&["rm"]),
|
||||
decision: Decision::Forbidden,
|
||||
justification: None,
|
||||
}],
|
||||
}
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn head_any_of_expands_into_multiple_program_rules() -> anyhow::Result<()> {
|
||||
let toml_str = r#"
|
||||
[exec_policy]
|
||||
prefix_rules = [
|
||||
{ pattern = [{ any_of = ["git", "hg"] }, { token = "status" }], decision = "prompt" },
|
||||
]
|
||||
"#;
|
||||
let parsed: RequirementsExecPolicyTomlRoot = from_str(toml_str)?;
|
||||
let policy = parsed.exec_policy.to_policy()?;
|
||||
|
||||
assert_eq!(
|
||||
policy.check(&tokens(&["git", "status"]), &allow_all),
|
||||
Evaluation {
|
||||
decision: Decision::Prompt,
|
||||
matched_rules: vec![RuleMatch::PrefixRuleMatch {
|
||||
matched_prefix: tokens(&["git", "status"]),
|
||||
decision: Decision::Prompt,
|
||||
justification: None,
|
||||
}],
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
policy.check(&tokens(&["hg", "status"]), &allow_all),
|
||||
Evaluation {
|
||||
decision: Decision::Prompt,
|
||||
matched_rules: vec![RuleMatch::PrefixRuleMatch {
|
||||
matched_prefix: tokens(&["hg", "status"]),
|
||||
decision: Decision::Prompt,
|
||||
justification: None,
|
||||
}],
|
||||
}
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -95,6 +95,12 @@ pub fn originator() -> Originator {
|
||||
get_originator_value(None)
|
||||
}
|
||||
|
||||
pub fn is_first_party_originator(originator_value: &str) -> bool {
|
||||
originator_value == DEFAULT_ORIGINATOR
|
||||
|| originator_value == "codex_vscode"
|
||||
|| originator_value.starts_with("Codex ")
|
||||
}
|
||||
|
||||
pub fn get_codex_user_agent() -> String {
|
||||
let build_version = env!("CARGO_PKG_VERSION");
|
||||
let os_info = os_info::get();
|
||||
@@ -185,6 +191,7 @@ fn is_sandboxed() -> bool {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn test_get_codex_user_agent() {
|
||||
@@ -194,6 +201,15 @@ mod tests {
|
||||
assert!(user_agent.starts_with(&prefix));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_first_party_originator_matches_known_values() {
|
||||
assert_eq!(is_first_party_originator(DEFAULT_ORIGINATOR), true);
|
||||
assert_eq!(is_first_party_originator("codex_vscode"), true);
|
||||
assert_eq!(is_first_party_originator("Codex Something Else"), true);
|
||||
assert_eq!(is_first_party_originator("codex_cli"), false);
|
||||
assert_eq!(is_first_party_originator("Other"), false);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_create_client_sets_default_headers() {
|
||||
skip_if_no_network!();
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
use crate::config::types::EnvironmentVariablePattern;
|
||||
use crate::config::types::ShellEnvironmentPolicy;
|
||||
use crate::config::types::ShellEnvironmentPolicyInherit;
|
||||
use codex_protocol::ThreadId;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
|
||||
pub const CODEX_THREAD_ID_ENV_VAR: &str = "CODEX_THREAD_ID";
|
||||
|
||||
/// Construct an environment map based on the rules in the specified policy. The
|
||||
/// resulting map can be passed directly to `Command::envs()` after calling
|
||||
/// `env_clear()` to ensure no unintended variables are leaked to the spawned
|
||||
@@ -11,11 +14,21 @@ use std::collections::HashSet;
|
||||
///
|
||||
/// The derivation follows the algorithm documented in the struct-level comment
|
||||
/// for [`ShellEnvironmentPolicy`].
|
||||
pub fn create_env(policy: &ShellEnvironmentPolicy) -> HashMap<String, String> {
|
||||
populate_env(std::env::vars(), policy)
|
||||
///
|
||||
/// `CODEX_THREAD_ID` is injected when a thread id is provided, even when
|
||||
/// `include_only` is set.
|
||||
pub fn create_env(
|
||||
policy: &ShellEnvironmentPolicy,
|
||||
thread_id: Option<ThreadId>,
|
||||
) -> HashMap<String, String> {
|
||||
populate_env(std::env::vars(), policy, thread_id)
|
||||
}
|
||||
|
||||
fn populate_env<I>(vars: I, policy: &ShellEnvironmentPolicy) -> HashMap<String, String>
|
||||
fn populate_env<I>(
|
||||
vars: I,
|
||||
policy: &ShellEnvironmentPolicy,
|
||||
thread_id: Option<ThreadId>,
|
||||
) -> HashMap<String, String>
|
||||
where
|
||||
I: IntoIterator<Item = (String, String)>,
|
||||
{
|
||||
@@ -72,6 +85,11 @@ where
|
||||
env_map.retain(|k, _| matches_any(k, &policy.include_only));
|
||||
}
|
||||
|
||||
// Step 6 – Populate the thread ID environment variable when provided.
|
||||
if let Some(thread_id) = thread_id {
|
||||
env_map.insert(CODEX_THREAD_ID_ENV_VAR.to_string(), thread_id.to_string());
|
||||
}
|
||||
|
||||
env_map
|
||||
}
|
||||
|
||||
@@ -98,14 +116,16 @@ mod tests {
|
||||
]);
|
||||
|
||||
let policy = ShellEnvironmentPolicy::default(); // inherit All, default excludes ignored
|
||||
let result = populate_env(vars, &policy);
|
||||
let thread_id = ThreadId::new();
|
||||
let result = populate_env(vars, &policy, Some(thread_id));
|
||||
|
||||
let expected: HashMap<String, String> = hashmap! {
|
||||
let mut expected: HashMap<String, String> = hashmap! {
|
||||
"PATH".to_string() => "/usr/bin".to_string(),
|
||||
"HOME".to_string() => "/home/user".to_string(),
|
||||
"API_KEY".to_string() => "secret".to_string(),
|
||||
"SECRET_TOKEN".to_string() => "t".to_string(),
|
||||
};
|
||||
expected.insert(CODEX_THREAD_ID_ENV_VAR.to_string(), thread_id.to_string());
|
||||
|
||||
assert_eq!(result, expected);
|
||||
}
|
||||
@@ -123,12 +143,14 @@ mod tests {
|
||||
ignore_default_excludes: false, // apply KEY/SECRET/TOKEN filter
|
||||
..Default::default()
|
||||
};
|
||||
let result = populate_env(vars, &policy);
|
||||
let thread_id = ThreadId::new();
|
||||
let result = populate_env(vars, &policy, Some(thread_id));
|
||||
|
||||
let expected: HashMap<String, String> = hashmap! {
|
||||
let mut expected: HashMap<String, String> = hashmap! {
|
||||
"PATH".to_string() => "/usr/bin".to_string(),
|
||||
"HOME".to_string() => "/home/user".to_string(),
|
||||
};
|
||||
expected.insert(CODEX_THREAD_ID_ENV_VAR.to_string(), thread_id.to_string());
|
||||
|
||||
assert_eq!(result, expected);
|
||||
}
|
||||
@@ -144,11 +166,13 @@ mod tests {
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let result = populate_env(vars, &policy);
|
||||
let thread_id = ThreadId::new();
|
||||
let result = populate_env(vars, &policy, Some(thread_id));
|
||||
|
||||
let expected: HashMap<String, String> = hashmap! {
|
||||
let mut expected: HashMap<String, String> = hashmap! {
|
||||
"PATH".to_string() => "/usr/bin".to_string(),
|
||||
};
|
||||
expected.insert(CODEX_THREAD_ID_ENV_VAR.to_string(), thread_id.to_string());
|
||||
|
||||
assert_eq!(result, expected);
|
||||
}
|
||||
@@ -163,11 +187,41 @@ mod tests {
|
||||
};
|
||||
policy.r#set.insert("NEW_VAR".to_string(), "42".to_string());
|
||||
|
||||
let result = populate_env(vars, &policy);
|
||||
let thread_id = ThreadId::new();
|
||||
let result = populate_env(vars, &policy, Some(thread_id));
|
||||
|
||||
let mut expected: HashMap<String, String> = hashmap! {
|
||||
"PATH".to_string() => "/usr/bin".to_string(),
|
||||
"NEW_VAR".to_string() => "42".to_string(),
|
||||
};
|
||||
expected.insert(CODEX_THREAD_ID_ENV_VAR.to_string(), thread_id.to_string());
|
||||
|
||||
assert_eq!(result, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn populate_env_inserts_thread_id() {
|
||||
let vars = make_vars(&[("PATH", "/usr/bin")]);
|
||||
let policy = ShellEnvironmentPolicy::default();
|
||||
let thread_id = ThreadId::new();
|
||||
let result = populate_env(vars, &policy, Some(thread_id));
|
||||
|
||||
let mut expected: HashMap<String, String> = hashmap! {
|
||||
"PATH".to_string() => "/usr/bin".to_string(),
|
||||
};
|
||||
expected.insert(CODEX_THREAD_ID_ENV_VAR.to_string(), thread_id.to_string());
|
||||
|
||||
assert_eq!(result, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn populate_env_omits_thread_id_when_missing() {
|
||||
let vars = make_vars(&[("PATH", "/usr/bin")]);
|
||||
let policy = ShellEnvironmentPolicy::default();
|
||||
let result = populate_env(vars, &policy, None);
|
||||
|
||||
let expected: HashMap<String, String> = hashmap! {
|
||||
"PATH".to_string() => "/usr/bin".to_string(),
|
||||
"NEW_VAR".to_string() => "42".to_string(),
|
||||
};
|
||||
|
||||
assert_eq!(result, expected);
|
||||
@@ -183,8 +237,10 @@ mod tests {
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let result = populate_env(vars.clone(), &policy);
|
||||
let expected: HashMap<String, String> = vars.into_iter().collect();
|
||||
let thread_id = ThreadId::new();
|
||||
let result = populate_env(vars.clone(), &policy, Some(thread_id));
|
||||
let mut expected: HashMap<String, String> = vars.into_iter().collect();
|
||||
expected.insert(CODEX_THREAD_ID_ENV_VAR.to_string(), thread_id.to_string());
|
||||
assert_eq!(result, expected);
|
||||
}
|
||||
|
||||
@@ -198,10 +254,12 @@ mod tests {
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let result = populate_env(vars, &policy);
|
||||
let expected: HashMap<String, String> = hashmap! {
|
||||
let thread_id = ThreadId::new();
|
||||
let result = populate_env(vars, &policy, Some(thread_id));
|
||||
let mut expected: HashMap<String, String> = hashmap! {
|
||||
"PATH".to_string() => "/usr/bin".to_string(),
|
||||
};
|
||||
expected.insert(CODEX_THREAD_ID_ENV_VAR.to_string(), thread_id.to_string());
|
||||
assert_eq!(result, expected);
|
||||
}
|
||||
|
||||
@@ -220,11 +278,13 @@ mod tests {
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let result = populate_env(vars, &policy);
|
||||
let expected: HashMap<String, String> = hashmap! {
|
||||
let thread_id = ThreadId::new();
|
||||
let result = populate_env(vars, &policy, Some(thread_id));
|
||||
let mut expected: HashMap<String, String> = hashmap! {
|
||||
"Path".to_string() => "C:\\Windows\\System32".to_string(),
|
||||
"TEMP".to_string() => "C:\\Temp".to_string(),
|
||||
};
|
||||
expected.insert(CODEX_THREAD_ID_ENV_VAR.to_string(), thread_id.to_string());
|
||||
|
||||
assert_eq!(result, expected);
|
||||
}
|
||||
@@ -242,10 +302,12 @@ mod tests {
|
||||
.r#set
|
||||
.insert("ONLY_VAR".to_string(), "yes".to_string());
|
||||
|
||||
let result = populate_env(vars, &policy);
|
||||
let expected: HashMap<String, String> = hashmap! {
|
||||
let thread_id = ThreadId::new();
|
||||
let result = populate_env(vars, &policy, Some(thread_id));
|
||||
let mut expected: HashMap<String, String> = hashmap! {
|
||||
"ONLY_VAR".to_string() => "yes".to_string(),
|
||||
};
|
||||
expected.insert(CODEX_THREAD_ID_ENV_VAR.to_string(), thread_id.to_string());
|
||||
assert_eq!(result, expected);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -87,6 +87,15 @@ pub(crate) struct ExecPolicyManager {
|
||||
policy: ArcSwap<Policy>,
|
||||
}
|
||||
|
||||
pub(crate) struct ExecApprovalRequest<'a> {
|
||||
pub(crate) features: &'a Features,
|
||||
pub(crate) command: &'a [String],
|
||||
pub(crate) approval_policy: AskForApproval,
|
||||
pub(crate) sandbox_policy: &'a SandboxPolicy,
|
||||
pub(crate) sandbox_permissions: SandboxPermissions,
|
||||
pub(crate) prefix_rule: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
impl ExecPolicyManager {
|
||||
pub(crate) fn new(policy: Arc<Policy>) -> Self {
|
||||
Self {
|
||||
@@ -112,12 +121,16 @@ impl ExecPolicyManager {
|
||||
|
||||
pub(crate) async fn create_exec_approval_requirement_for_command(
|
||||
&self,
|
||||
features: &Features,
|
||||
command: &[String],
|
||||
approval_policy: AskForApproval,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
sandbox_permissions: SandboxPermissions,
|
||||
req: ExecApprovalRequest<'_>,
|
||||
) -> ExecApprovalRequirement {
|
||||
let ExecApprovalRequest {
|
||||
features,
|
||||
command,
|
||||
approval_policy,
|
||||
sandbox_policy,
|
||||
sandbox_permissions,
|
||||
prefix_rule,
|
||||
} = req;
|
||||
let exec_policy = self.current();
|
||||
let commands =
|
||||
parse_shell_lc_plain_commands(command).unwrap_or_else(|| vec![command.to_vec()]);
|
||||
@@ -131,6 +144,12 @@ impl ExecPolicyManager {
|
||||
};
|
||||
let evaluation = exec_policy.check_multiple(commands.iter(), &exec_policy_fallback);
|
||||
|
||||
let requested_amendment = derive_requested_execpolicy_amendment(
|
||||
features,
|
||||
prefix_rule.as_ref(),
|
||||
&evaluation.matched_rules,
|
||||
);
|
||||
|
||||
match evaluation.decision {
|
||||
Decision::Forbidden => ExecApprovalRequirement::Forbidden {
|
||||
reason: derive_forbidden_reason(command, &evaluation),
|
||||
@@ -144,9 +163,11 @@ impl ExecPolicyManager {
|
||||
ExecApprovalRequirement::NeedsApproval {
|
||||
reason: derive_prompt_reason(command, &evaluation),
|
||||
proposed_execpolicy_amendment: if features.enabled(Feature::ExecPolicy) {
|
||||
try_derive_execpolicy_amendment_for_prompt_rules(
|
||||
&evaluation.matched_rules,
|
||||
)
|
||||
requested_amendment.or_else(|| {
|
||||
try_derive_execpolicy_amendment_for_prompt_rules(
|
||||
&evaluation.matched_rules,
|
||||
)
|
||||
})
|
||||
} else {
|
||||
None
|
||||
},
|
||||
@@ -382,6 +403,30 @@ fn try_derive_execpolicy_amendment_for_allow_rules(
|
||||
})
|
||||
}
|
||||
|
||||
fn derive_requested_execpolicy_amendment(
|
||||
features: &Features,
|
||||
prefix_rule: Option<&Vec<String>>,
|
||||
matched_rules: &[RuleMatch],
|
||||
) -> Option<ExecPolicyAmendment> {
|
||||
if !features.enabled(Feature::ExecPolicy) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let prefix_rule = prefix_rule?;
|
||||
if prefix_rule.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if matched_rules
|
||||
.iter()
|
||||
.any(|rule_match| is_policy_match(rule_match) && rule_match.decision() == Decision::Prompt)
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(ExecPolicyAmendment::new(prefix_rule.clone()))
|
||||
}
|
||||
|
||||
/// Only return a reason when a policy rule drove the prompt decision.
|
||||
fn derive_prompt_reason(command_args: &[String], evaluation: &Evaluation) -> Option<String> {
|
||||
let command = render_shlex_command(command_args);
|
||||
@@ -756,13 +801,14 @@ prefix_rule(pattern=["rm"], decision="forbidden")
|
||||
|
||||
let manager = ExecPolicyManager::new(policy);
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&forbidden_script,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &forbidden_script,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::DangerFullAccess,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -790,17 +836,18 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::new(policy);
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&[
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &[
|
||||
"rm".to_string(),
|
||||
"-rf".to_string(),
|
||||
"/some/important/folder".to_string(),
|
||||
],
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::DangerFullAccess,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -823,13 +870,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::new(policy);
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::DangerFullAccess,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -853,13 +901,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::new(policy);
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::Never,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: &SandboxPolicy::DangerFullAccess,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -876,13 +925,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::default();
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::UnlessTrusted,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::UnlessTrusted,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -894,6 +944,40 @@ prefix_rule(
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn request_rule_uses_prefix_rule() {
|
||||
let command = vec![
|
||||
"cargo".to_string(),
|
||||
"install".to_string(),
|
||||
"cargo-insta".to_string(),
|
||||
];
|
||||
let manager = ExecPolicyManager::default();
|
||||
let mut features = Features::with_defaults();
|
||||
features.enable(Feature::RequestRule);
|
||||
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &features,
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::RequireEscalated,
|
||||
prefix_rule: Some(vec!["cargo".to_string(), "install".to_string()]),
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
requirement,
|
||||
ExecApprovalRequirement::NeedsApproval {
|
||||
reason: None,
|
||||
proposed_execpolicy_amendment: Some(ExecPolicyAmendment::new(vec![
|
||||
"cargo".to_string(),
|
||||
"install".to_string(),
|
||||
])),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn heuristics_apply_when_other_commands_match_policy() {
|
||||
let policy_src = r#"prefix_rule(pattern=["apple"], decision="allow")"#;
|
||||
@@ -910,13 +994,14 @@ prefix_rule(
|
||||
|
||||
assert_eq!(
|
||||
ExecPolicyManager::new(policy)
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::UnlessTrusted,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::UnlessTrusted,
|
||||
sandbox_policy: &SandboxPolicy::DangerFullAccess,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await,
|
||||
ExecApprovalRequirement::NeedsApproval {
|
||||
reason: None,
|
||||
@@ -984,13 +1069,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::default();
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::UnlessTrusted,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::UnlessTrusted,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -1011,13 +1097,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::default();
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&features,
|
||||
&command,
|
||||
AskForApproval::UnlessTrusted,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &features,
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::UnlessTrusted,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -1041,13 +1128,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::new(policy);
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::DangerFullAccess,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -1068,13 +1156,14 @@ prefix_rule(
|
||||
];
|
||||
let manager = ExecPolicyManager::default();
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::UnlessTrusted,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::UnlessTrusted,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -1106,13 +1195,14 @@ prefix_rule(
|
||||
|
||||
assert_eq!(
|
||||
ExecPolicyManager::new(policy)
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::UnlessTrusted,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::UnlessTrusted,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await,
|
||||
ExecApprovalRequirement::NeedsApproval {
|
||||
reason: None,
|
||||
@@ -1129,13 +1219,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::default();
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -1159,13 +1250,14 @@ prefix_rule(
|
||||
|
||||
let manager = ExecPolicyManager::new(policy);
|
||||
let requirement = manager
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&Features::with_defaults(),
|
||||
&command,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &Features::with_defaults(),
|
||||
command: &command,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
@@ -1226,13 +1318,14 @@ prefix_rule(
|
||||
assert_eq!(
|
||||
expected_req,
|
||||
policy
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&features,
|
||||
&sneaky_command,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
permissions,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &features,
|
||||
command: &sneaky_command,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: permissions,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await,
|
||||
"{pwsh_approval_reason}"
|
||||
);
|
||||
@@ -1249,13 +1342,14 @@ prefix_rule(
|
||||
]))),
|
||||
},
|
||||
policy
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&features,
|
||||
&dangerous_command,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
permissions,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &features,
|
||||
command: &dangerous_command,
|
||||
approval_policy: AskForApproval::OnRequest,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: permissions,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await,
|
||||
r#"On all platforms, a forbidden command should require approval
|
||||
(unless AskForApproval::Never is specified)."#
|
||||
@@ -1268,13 +1362,14 @@ prefix_rule(
|
||||
reason: "`rm -rf /important/data` rejected: blocked by policy".to_string(),
|
||||
},
|
||||
policy
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&features,
|
||||
&dangerous_command,
|
||||
AskForApproval::Never,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
permissions,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &features,
|
||||
command: &dangerous_command,
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: &SandboxPolicy::ReadOnly,
|
||||
sandbox_permissions: permissions,
|
||||
prefix_rule: None,
|
||||
})
|
||||
.await,
|
||||
r#"On all platforms, a forbidden command should require approval
|
||||
(unless AskForApproval::Never is specified)."#
|
||||
|
||||
@@ -89,6 +89,8 @@ pub enum Feature {
|
||||
WebSearchCached,
|
||||
/// Gate the execpolicy enforcement for shell/unified exec.
|
||||
ExecPolicy,
|
||||
/// Allow the model to request approval and propose exec rules.
|
||||
RequestRule,
|
||||
/// Enable Windows sandbox (restricted token) on Windows.
|
||||
WindowsSandbox,
|
||||
/// Use the elevated Windows sandbox pipeline (setup + runner).
|
||||
@@ -99,6 +101,8 @@ pub enum Feature {
|
||||
RemoteModels,
|
||||
/// Experimental shell snapshotting.
|
||||
ShellSnapshot,
|
||||
/// Persist rollout metadata to a local SQLite database.
|
||||
Sqlite,
|
||||
/// Append additional AGENTS.md guidance to user instructions.
|
||||
ChildAgentsMd,
|
||||
/// Enforce UTF8 output in Powershell.
|
||||
@@ -109,6 +113,8 @@ pub enum Feature {
|
||||
Collab,
|
||||
/// Enable connectors (apps).
|
||||
Connectors,
|
||||
/// Allow prompting and installing missing MCP dependencies.
|
||||
SkillMcpDependencyInstall,
|
||||
/// Steer feature flag - when enabled, Enter submits immediately instead of queuing.
|
||||
Steer,
|
||||
/// Enable collaboration modes (Plan, Code, Pair Programming, Execute).
|
||||
@@ -373,6 +379,12 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
},
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::Sqlite,
|
||||
key: "sqlite",
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::ChildAgentsMd,
|
||||
key: "child_agents_md",
|
||||
@@ -391,6 +403,12 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: true,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::RequestRule,
|
||||
key: "request_rule",
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::WindowsSandbox,
|
||||
key: "experimental_windows_sandbox",
|
||||
@@ -449,6 +467,12 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::SkillMcpDependencyInstall,
|
||||
key: "skill_mcp_dependency_install",
|
||||
stage: Stage::Stable,
|
||||
default_enabled: true,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::Steer,
|
||||
key: "steer",
|
||||
|
||||
@@ -91,6 +91,7 @@ pub mod shell;
|
||||
pub mod shell_snapshot;
|
||||
pub mod skills;
|
||||
pub mod spawn;
|
||||
pub mod state_db;
|
||||
pub mod terminal;
|
||||
mod tools;
|
||||
pub mod turn_diff_tracker;
|
||||
|
||||
@@ -4,12 +4,53 @@ use anyhow::Result;
|
||||
use codex_protocol::protocol::McpAuthStatus;
|
||||
use codex_rmcp_client::OAuthCredentialsStoreMode;
|
||||
use codex_rmcp_client::determine_streamable_http_auth_status;
|
||||
use codex_rmcp_client::supports_oauth_login;
|
||||
use futures::future::join_all;
|
||||
use tracing::warn;
|
||||
|
||||
use crate::config::types::McpServerConfig;
|
||||
use crate::config::types::McpServerTransportConfig;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct McpOAuthLoginConfig {
|
||||
pub url: String,
|
||||
pub http_headers: Option<HashMap<String, String>>,
|
||||
pub env_http_headers: Option<HashMap<String, String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum McpOAuthLoginSupport {
|
||||
Supported(McpOAuthLoginConfig),
|
||||
Unsupported,
|
||||
Unknown(anyhow::Error),
|
||||
}
|
||||
|
||||
pub async fn oauth_login_support(transport: &McpServerTransportConfig) -> McpOAuthLoginSupport {
|
||||
let McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
http_headers,
|
||||
env_http_headers,
|
||||
} = transport
|
||||
else {
|
||||
return McpOAuthLoginSupport::Unsupported;
|
||||
};
|
||||
|
||||
if bearer_token_env_var.is_some() {
|
||||
return McpOAuthLoginSupport::Unsupported;
|
||||
}
|
||||
|
||||
match supports_oauth_login(url).await {
|
||||
Ok(true) => McpOAuthLoginSupport::Supported(McpOAuthLoginConfig {
|
||||
url: url.clone(),
|
||||
http_headers: http_headers.clone(),
|
||||
env_http_headers: env_http_headers.clone(),
|
||||
}),
|
||||
Ok(false) => McpOAuthLoginSupport::Unsupported,
|
||||
Err(err) => McpOAuthLoginSupport::Unknown(err),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct McpAuthStatusEntry {
|
||||
pub config: McpServerConfig,
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
pub mod auth;
|
||||
mod skill_dependencies;
|
||||
|
||||
pub(crate) use skill_dependencies::maybe_prompt_and_install_mcp_dependencies;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
use std::path::PathBuf;
|
||||
|
||||
518
codex-rs/core/src/mcp/skill_dependencies.rs
Normal file
518
codex-rs/core/src/mcp/skill_dependencies.rs
Normal file
@@ -0,0 +1,518 @@
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use codex_protocol::request_user_input::RequestUserInputArgs;
|
||||
use codex_protocol::request_user_input::RequestUserInputQuestion;
|
||||
use codex_protocol::request_user_input::RequestUserInputQuestionOption;
|
||||
use codex_protocol::request_user_input::RequestUserInputResponse;
|
||||
use codex_rmcp_client::perform_oauth_login;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
use tracing::warn;
|
||||
|
||||
use super::auth::McpOAuthLoginSupport;
|
||||
use super::auth::oauth_login_support;
|
||||
use super::effective_mcp_servers;
|
||||
use crate::codex::Session;
|
||||
use crate::codex::TurnContext;
|
||||
use crate::config::Config;
|
||||
use crate::config::edit::ConfigEditsBuilder;
|
||||
use crate::config::load_global_mcp_servers;
|
||||
use crate::config::types::McpServerConfig;
|
||||
use crate::config::types::McpServerTransportConfig;
|
||||
use crate::default_client::is_first_party_originator;
|
||||
use crate::default_client::originator;
|
||||
use crate::features::Feature;
|
||||
use crate::skills::SkillMetadata;
|
||||
use crate::skills::model::SkillToolDependency;
|
||||
|
||||
const SKILL_MCP_DEPENDENCY_PROMPT_ID: &str = "skill_mcp_dependency_install";
|
||||
const MCP_DEPENDENCY_OPTION_INSTALL: &str = "Install";
|
||||
const MCP_DEPENDENCY_OPTION_SKIP: &str = "Continue anyway";
|
||||
|
||||
fn is_full_access_mode(turn_context: &TurnContext) -> bool {
|
||||
matches!(turn_context.approval_policy, AskForApproval::Never)
|
||||
&& matches!(
|
||||
turn_context.sandbox_policy,
|
||||
SandboxPolicy::DangerFullAccess | SandboxPolicy::ExternalSandbox { .. }
|
||||
)
|
||||
}
|
||||
|
||||
fn format_missing_mcp_dependencies(missing: &HashMap<String, McpServerConfig>) -> String {
|
||||
let mut names = missing.keys().cloned().collect::<Vec<_>>();
|
||||
names.sort();
|
||||
names.join(", ")
|
||||
}
|
||||
|
||||
async fn filter_prompted_mcp_dependencies(
|
||||
sess: &Session,
|
||||
missing: &HashMap<String, McpServerConfig>,
|
||||
) -> HashMap<String, McpServerConfig> {
|
||||
let prompted = sess.mcp_dependency_prompted().await;
|
||||
if prompted.is_empty() {
|
||||
return missing.clone();
|
||||
}
|
||||
|
||||
missing
|
||||
.iter()
|
||||
.filter(|(name, config)| !prompted.contains(&canonical_mcp_server_key(name, config)))
|
||||
.map(|(name, config)| (name.clone(), config.clone()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
async fn should_install_mcp_dependencies(
|
||||
sess: &Session,
|
||||
turn_context: &TurnContext,
|
||||
missing: &HashMap<String, McpServerConfig>,
|
||||
cancellation_token: &CancellationToken,
|
||||
) -> bool {
|
||||
if is_full_access_mode(turn_context) {
|
||||
return true;
|
||||
}
|
||||
|
||||
let server_list = format_missing_mcp_dependencies(missing);
|
||||
let question = RequestUserInputQuestion {
|
||||
id: SKILL_MCP_DEPENDENCY_PROMPT_ID.to_string(),
|
||||
header: "Install MCP servers?".to_string(),
|
||||
question: format!(
|
||||
"The following MCP servers are required by the selected skills but are not installed yet: {server_list}. Install them now?"
|
||||
),
|
||||
is_other: false,
|
||||
options: Some(vec![
|
||||
RequestUserInputQuestionOption {
|
||||
label: MCP_DEPENDENCY_OPTION_INSTALL.to_string(),
|
||||
description:
|
||||
"Install and enable the missing MCP servers in your global config."
|
||||
.to_string(),
|
||||
},
|
||||
RequestUserInputQuestionOption {
|
||||
label: MCP_DEPENDENCY_OPTION_SKIP.to_string(),
|
||||
description: "Skip installation for now and do not show again for these MCP servers in this session."
|
||||
.to_string(),
|
||||
},
|
||||
]),
|
||||
};
|
||||
let args = RequestUserInputArgs {
|
||||
questions: vec![question],
|
||||
};
|
||||
let sub_id = &turn_context.sub_id;
|
||||
let call_id = format!("mcp-deps-{sub_id}");
|
||||
let response_fut = sess.request_user_input(turn_context, call_id, args);
|
||||
let response = tokio::select! {
|
||||
biased;
|
||||
_ = cancellation_token.cancelled() => {
|
||||
let empty = RequestUserInputResponse {
|
||||
answers: HashMap::new(),
|
||||
};
|
||||
sess.notify_user_input_response(sub_id, empty.clone()).await;
|
||||
empty
|
||||
}
|
||||
response = response_fut => response.unwrap_or_else(|| RequestUserInputResponse {
|
||||
answers: HashMap::new(),
|
||||
}),
|
||||
};
|
||||
|
||||
let install = response
|
||||
.answers
|
||||
.get(SKILL_MCP_DEPENDENCY_PROMPT_ID)
|
||||
.is_some_and(|answer| {
|
||||
answer
|
||||
.answers
|
||||
.iter()
|
||||
.any(|entry| entry == MCP_DEPENDENCY_OPTION_INSTALL)
|
||||
});
|
||||
|
||||
let prompted_keys = missing
|
||||
.iter()
|
||||
.map(|(name, config)| canonical_mcp_server_key(name, config));
|
||||
sess.record_mcp_dependency_prompted(prompted_keys).await;
|
||||
|
||||
install
|
||||
}
|
||||
|
||||
pub(crate) async fn maybe_prompt_and_install_mcp_dependencies(
|
||||
sess: &Session,
|
||||
turn_context: &TurnContext,
|
||||
cancellation_token: &CancellationToken,
|
||||
mentioned_skills: &[SkillMetadata],
|
||||
) {
|
||||
let originator_value = originator().value;
|
||||
if !is_first_party_originator(originator_value.as_str()) {
|
||||
// Only support first-party clients for now.
|
||||
return;
|
||||
}
|
||||
|
||||
let config = turn_context.client.config();
|
||||
if mentioned_skills.is_empty() || !config.features.enabled(Feature::SkillMcpDependencyInstall) {
|
||||
return;
|
||||
}
|
||||
|
||||
let installed = config.mcp_servers.get().clone();
|
||||
let missing = collect_missing_mcp_dependencies(mentioned_skills, &installed);
|
||||
if missing.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let unprompted_missing = filter_prompted_mcp_dependencies(sess, &missing).await;
|
||||
if unprompted_missing.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
if should_install_mcp_dependencies(sess, turn_context, &unprompted_missing, cancellation_token)
|
||||
.await
|
||||
{
|
||||
maybe_install_mcp_dependencies(sess, turn_context, config.as_ref(), mentioned_skills).await;
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn maybe_install_mcp_dependencies(
|
||||
sess: &Session,
|
||||
turn_context: &TurnContext,
|
||||
config: &Config,
|
||||
mentioned_skills: &[SkillMetadata],
|
||||
) {
|
||||
if mentioned_skills.is_empty() || !config.features.enabled(Feature::SkillMcpDependencyInstall) {
|
||||
return;
|
||||
}
|
||||
|
||||
let codex_home = config.codex_home.clone();
|
||||
let installed = config.mcp_servers.get().clone();
|
||||
let missing = collect_missing_mcp_dependencies(mentioned_skills, &installed);
|
||||
if missing.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut servers = match load_global_mcp_servers(&codex_home).await {
|
||||
Ok(servers) => servers,
|
||||
Err(err) => {
|
||||
warn!("failed to load MCP servers while installing skill dependencies: {err}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let mut updated = false;
|
||||
let mut added = Vec::new();
|
||||
for (name, config) in missing {
|
||||
if servers.contains_key(&name) {
|
||||
continue;
|
||||
}
|
||||
servers.insert(name.clone(), config.clone());
|
||||
added.push((name, config));
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if !updated {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Err(err) = ConfigEditsBuilder::new(&codex_home)
|
||||
.replace_mcp_servers(&servers)
|
||||
.apply()
|
||||
.await
|
||||
{
|
||||
warn!("failed to persist MCP dependencies for mentioned skills: {err}");
|
||||
return;
|
||||
}
|
||||
|
||||
for (name, server_config) in added {
|
||||
let oauth_config = match oauth_login_support(&server_config.transport).await {
|
||||
McpOAuthLoginSupport::Supported(config) => config,
|
||||
McpOAuthLoginSupport::Unsupported => continue,
|
||||
McpOAuthLoginSupport::Unknown(err) => {
|
||||
warn!("MCP server may or may not require login for dependency {name}: {err}");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
sess.notify_background_event(
|
||||
turn_context,
|
||||
format!(
|
||||
"Authenticating MCP {name}... Follow instructions in your browser if prompted."
|
||||
),
|
||||
)
|
||||
.await;
|
||||
|
||||
if let Err(err) = perform_oauth_login(
|
||||
&name,
|
||||
&oauth_config.url,
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
oauth_config.http_headers,
|
||||
oauth_config.env_http_headers,
|
||||
&[],
|
||||
config.mcp_oauth_callback_port,
|
||||
)
|
||||
.await
|
||||
{
|
||||
warn!("failed to login to MCP dependency {name}: {err}");
|
||||
}
|
||||
}
|
||||
|
||||
// Refresh from the effective merged MCP map (global + repo + managed) and
|
||||
// overlay the updated global servers so we don't drop repo-scoped servers.
|
||||
let auth = sess.services.auth_manager.auth().await;
|
||||
let mut refresh_servers = effective_mcp_servers(config, auth.as_ref());
|
||||
for (name, server_config) in &servers {
|
||||
refresh_servers
|
||||
.entry(name.clone())
|
||||
.or_insert_with(|| server_config.clone());
|
||||
}
|
||||
sess.refresh_mcp_servers_now(
|
||||
turn_context,
|
||||
refresh_servers,
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
fn canonical_mcp_key(transport: &str, identifier: &str, fallback: &str) -> String {
|
||||
let identifier = identifier.trim();
|
||||
if identifier.is_empty() {
|
||||
fallback.to_string()
|
||||
} else {
|
||||
format!("mcp__{transport}__{identifier}")
|
||||
}
|
||||
}
|
||||
|
||||
fn canonical_mcp_server_key(name: &str, config: &McpServerConfig) -> String {
|
||||
match &config.transport {
|
||||
McpServerTransportConfig::Stdio { command, .. } => {
|
||||
canonical_mcp_key("stdio", command, name)
|
||||
}
|
||||
McpServerTransportConfig::StreamableHttp { url, .. } => {
|
||||
canonical_mcp_key("streamable_http", url, name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn canonical_mcp_dependency_key(dependency: &SkillToolDependency) -> Result<String, String> {
|
||||
let transport = dependency.transport.as_deref().unwrap_or("streamable_http");
|
||||
if transport.eq_ignore_ascii_case("streamable_http") {
|
||||
let url = dependency
|
||||
.url
|
||||
.as_ref()
|
||||
.ok_or_else(|| "missing url for streamable_http dependency".to_string())?;
|
||||
return Ok(canonical_mcp_key("streamable_http", url, &dependency.value));
|
||||
}
|
||||
if transport.eq_ignore_ascii_case("stdio") {
|
||||
let command = dependency
|
||||
.command
|
||||
.as_ref()
|
||||
.ok_or_else(|| "missing command for stdio dependency".to_string())?;
|
||||
return Ok(canonical_mcp_key("stdio", command, &dependency.value));
|
||||
}
|
||||
Err(format!("unsupported transport {transport}"))
|
||||
}
|
||||
|
||||
pub(crate) fn collect_missing_mcp_dependencies(
|
||||
mentioned_skills: &[SkillMetadata],
|
||||
installed: &HashMap<String, McpServerConfig>,
|
||||
) -> HashMap<String, McpServerConfig> {
|
||||
let mut missing = HashMap::new();
|
||||
let installed_keys: HashSet<String> = installed
|
||||
.iter()
|
||||
.map(|(name, config)| canonical_mcp_server_key(name, config))
|
||||
.collect();
|
||||
let mut seen_canonical_keys = HashSet::new();
|
||||
|
||||
for skill in mentioned_skills {
|
||||
let Some(dependencies) = skill.dependencies.as_ref() else {
|
||||
continue;
|
||||
};
|
||||
|
||||
for tool in &dependencies.tools {
|
||||
if !tool.r#type.eq_ignore_ascii_case("mcp") {
|
||||
continue;
|
||||
}
|
||||
let dependency_key = match canonical_mcp_dependency_key(tool) {
|
||||
Ok(key) => key,
|
||||
Err(err) => {
|
||||
let dependency = tool.value.as_str();
|
||||
let skill_name = skill.name.as_str();
|
||||
warn!(
|
||||
"unable to auto-install MCP dependency {dependency} for skill {skill_name}: {err}",
|
||||
);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
if installed_keys.contains(&dependency_key)
|
||||
|| seen_canonical_keys.contains(&dependency_key)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
let config = match mcp_dependency_to_server_config(tool) {
|
||||
Ok(config) => config,
|
||||
Err(err) => {
|
||||
let dependency = dependency_key.as_str();
|
||||
let skill_name = skill.name.as_str();
|
||||
warn!(
|
||||
"unable to auto-install MCP dependency {dependency} for skill {skill_name}: {err}",
|
||||
);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
missing.insert(tool.value.clone(), config);
|
||||
seen_canonical_keys.insert(dependency_key);
|
||||
}
|
||||
}
|
||||
|
||||
missing
|
||||
}
|
||||
|
||||
fn mcp_dependency_to_server_config(
|
||||
dependency: &SkillToolDependency,
|
||||
) -> Result<McpServerConfig, String> {
|
||||
let transport = dependency.transport.as_deref().unwrap_or("streamable_http");
|
||||
if transport.eq_ignore_ascii_case("streamable_http") {
|
||||
let url = dependency
|
||||
.url
|
||||
.as_ref()
|
||||
.ok_or_else(|| "missing url for streamable_http dependency".to_string())?;
|
||||
return Ok(McpServerConfig {
|
||||
transport: McpServerTransportConfig::StreamableHttp {
|
||||
url: url.clone(),
|
||||
bearer_token_env_var: None,
|
||||
http_headers: None,
|
||||
env_http_headers: None,
|
||||
},
|
||||
enabled: true,
|
||||
disabled_reason: None,
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
});
|
||||
}
|
||||
|
||||
if transport.eq_ignore_ascii_case("stdio") {
|
||||
let command = dependency
|
||||
.command
|
||||
.as_ref()
|
||||
.ok_or_else(|| "missing command for stdio dependency".to_string())?;
|
||||
return Ok(McpServerConfig {
|
||||
transport: McpServerTransportConfig::Stdio {
|
||||
command: command.clone(),
|
||||
args: Vec::new(),
|
||||
env: None,
|
||||
env_vars: Vec::new(),
|
||||
cwd: None,
|
||||
},
|
||||
enabled: true,
|
||||
disabled_reason: None,
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
});
|
||||
}
|
||||
|
||||
Err(format!("unsupported transport {transport}"))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::skills::model::SkillDependencies;
|
||||
use codex_protocol::protocol::SkillScope;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::PathBuf;
|
||||
|
||||
fn skill_with_tools(tools: Vec<SkillToolDependency>) -> SkillMetadata {
|
||||
SkillMetadata {
|
||||
name: "skill".to_string(),
|
||||
description: "skill".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: Some(SkillDependencies { tools }),
|
||||
path: PathBuf::from("skill"),
|
||||
scope: SkillScope::User,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_missing_respects_canonical_installed_key() {
|
||||
let url = "https://example.com/mcp".to_string();
|
||||
let skills = vec![skill_with_tools(vec![SkillToolDependency {
|
||||
r#type: "mcp".to_string(),
|
||||
value: "github".to_string(),
|
||||
description: None,
|
||||
transport: Some("streamable_http".to_string()),
|
||||
command: None,
|
||||
url: Some(url.clone()),
|
||||
}])];
|
||||
let installed = HashMap::from([(
|
||||
"alias".to_string(),
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var: None,
|
||||
http_headers: None,
|
||||
env_http_headers: None,
|
||||
},
|
||||
enabled: true,
|
||||
disabled_reason: None,
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
)]);
|
||||
|
||||
assert_eq!(
|
||||
collect_missing_mcp_dependencies(&skills, &installed),
|
||||
HashMap::new()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_missing_dedupes_by_canonical_key_but_preserves_original_name() {
|
||||
let url = "https://example.com/one".to_string();
|
||||
let skills = vec![skill_with_tools(vec![
|
||||
SkillToolDependency {
|
||||
r#type: "mcp".to_string(),
|
||||
value: "alias-one".to_string(),
|
||||
description: None,
|
||||
transport: Some("streamable_http".to_string()),
|
||||
command: None,
|
||||
url: Some(url.clone()),
|
||||
},
|
||||
SkillToolDependency {
|
||||
r#type: "mcp".to_string(),
|
||||
value: "alias-two".to_string(),
|
||||
description: None,
|
||||
transport: Some("streamable_http".to_string()),
|
||||
command: None,
|
||||
url: Some(url.clone()),
|
||||
},
|
||||
])];
|
||||
|
||||
let expected = HashMap::from([(
|
||||
"alias-one".to_string(),
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var: None,
|
||||
http_headers: None,
|
||||
env_http_headers: None,
|
||||
},
|
||||
enabled: true,
|
||||
disabled_reason: None,
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
enabled_tools: None,
|
||||
disabled_tools: None,
|
||||
scopes: None,
|
||||
},
|
||||
)]);
|
||||
|
||||
assert_eq!(
|
||||
collect_missing_mcp_dependencies(&skills, &HashMap::new()),
|
||||
expected
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -259,9 +259,7 @@ pub(crate) fn find_model_info_for_slug(slug: &str) -> ModelInfo {
|
||||
truncation_policy: TruncationPolicyConfig::tokens(10_000),
|
||||
context_window: Some(CONTEXT_WINDOW_272K),
|
||||
)
|
||||
} else if (slug.starts_with("gpt-5.2") || slug.starts_with("boomslang"))
|
||||
&& !slug.contains("codex")
|
||||
{
|
||||
} else if slug.starts_with("gpt-5.2") || slug.starts_with("boomslang") {
|
||||
model_info!(
|
||||
slug,
|
||||
apply_patch_tool_type: Some(ApplyPatchToolType::Freeform),
|
||||
@@ -276,7 +274,7 @@ pub(crate) fn find_model_info_for_slug(slug: &str) -> ModelInfo {
|
||||
context_window: Some(CONTEXT_WINDOW_272K),
|
||||
supported_reasoning_levels: supported_reasoning_level_low_medium_high_xhigh_non_codex(),
|
||||
)
|
||||
} else if slug.starts_with("gpt-5.1") && !slug.contains("codex") {
|
||||
} else if slug.starts_with("gpt-5.1") {
|
||||
model_info!(
|
||||
slug,
|
||||
apply_patch_tool_type: Some(ApplyPatchToolType::Freeform),
|
||||
|
||||
@@ -19,7 +19,9 @@ use uuid::Uuid;
|
||||
use super::ARCHIVED_SESSIONS_SUBDIR;
|
||||
use super::SESSIONS_SUBDIR;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::state_db;
|
||||
use codex_file_search as file_search;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
use codex_protocol::protocol::RolloutLine;
|
||||
use codex_protocol::protocol::SessionMetaLine;
|
||||
@@ -794,7 +796,7 @@ async fn collect_rollout_day_files(
|
||||
Ok(day_files)
|
||||
}
|
||||
|
||||
fn parse_timestamp_uuid_from_filename(name: &str) -> Option<(OffsetDateTime, Uuid)> {
|
||||
pub(crate) fn parse_timestamp_uuid_from_filename(name: &str) -> Option<(OffsetDateTime, Uuid)> {
|
||||
// Expected: rollout-YYYY-MM-DDThh-mm-ss-<uuid>.jsonl
|
||||
let core = name.strip_prefix("rollout-")?.strip_suffix(".jsonl")?;
|
||||
|
||||
@@ -1093,11 +1095,39 @@ async fn find_thread_path_by_id_str_in_subdir(
|
||||
)
|
||||
.map_err(|e| io::Error::other(format!("file search failed: {e}")))?;
|
||||
|
||||
Ok(results
|
||||
let found = results
|
||||
.matches
|
||||
.into_iter()
|
||||
.next()
|
||||
.map(|m| root.join(m.path)))
|
||||
.map(|m| root.join(m.path));
|
||||
|
||||
// Checking if DB is at parity.
|
||||
// TODO(jif): sqlite migration phase 1
|
||||
let archived_only = match subdir {
|
||||
SESSIONS_SUBDIR => Some(false),
|
||||
ARCHIVED_SESSIONS_SUBDIR => Some(true),
|
||||
_ => None,
|
||||
};
|
||||
let state_db_ctx = state_db::open_if_present(codex_home, "").await;
|
||||
if let Some(state_db_ctx) = state_db_ctx.as_deref()
|
||||
&& let Ok(thread_id) = ThreadId::from_string(id_str)
|
||||
{
|
||||
let db_path = state_db::find_rollout_path_by_id(
|
||||
Some(state_db_ctx),
|
||||
thread_id,
|
||||
archived_only,
|
||||
"find_path_query",
|
||||
)
|
||||
.await;
|
||||
let canonical_path = found.as_deref();
|
||||
if db_path.as_deref() != canonical_path {
|
||||
tracing::warn!(
|
||||
"state db path mismatch for thread {thread_id:?}: canonical={canonical_path:?} db={db_path:?}"
|
||||
);
|
||||
state_db::record_discrepancy("find_thread_path_by_id_str_in_subdir", "path_mismatch");
|
||||
}
|
||||
}
|
||||
Ok(found)
|
||||
}
|
||||
|
||||
/// Locate a recorded thread rollout file by its UUID string using the existing
|
||||
|
||||
342
codex-rs/core/src/rollout/metadata.rs
Normal file
342
codex-rs/core/src/rollout/metadata.rs
Normal file
@@ -0,0 +1,342 @@
|
||||
use crate::config::Config;
|
||||
use crate::rollout;
|
||||
use crate::rollout::list::parse_timestamp_uuid_from_filename;
|
||||
use crate::rollout::recorder::RolloutRecorder;
|
||||
use chrono::DateTime;
|
||||
use chrono::NaiveDateTime;
|
||||
use chrono::Timelike;
|
||||
use chrono::Utc;
|
||||
use codex_otel::OtelManager;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use codex_protocol::protocol::SessionMetaLine;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_state::BackfillStats;
|
||||
use codex_state::DB_ERROR_METRIC;
|
||||
use codex_state::ExtractionOutcome;
|
||||
use codex_state::ThreadMetadataBuilder;
|
||||
use codex_state::apply_rollout_item;
|
||||
use std::cmp::Reverse;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use tracing::warn;
|
||||
|
||||
const ROLLOUT_PREFIX: &str = "rollout-";
|
||||
const ROLLOUT_SUFFIX: &str = ".jsonl";
|
||||
|
||||
pub(crate) fn builder_from_session_meta(
|
||||
session_meta: &SessionMetaLine,
|
||||
rollout_path: &Path,
|
||||
) -> Option<ThreadMetadataBuilder> {
|
||||
let created_at = parse_timestamp_to_utc(session_meta.meta.timestamp.as_str())?;
|
||||
let mut builder = ThreadMetadataBuilder::new(
|
||||
session_meta.meta.id,
|
||||
rollout_path.to_path_buf(),
|
||||
created_at,
|
||||
session_meta.meta.source.clone(),
|
||||
);
|
||||
builder.model_provider = session_meta.meta.model_provider.clone();
|
||||
builder.cwd = session_meta.meta.cwd.clone();
|
||||
builder.sandbox_policy = SandboxPolicy::ReadOnly;
|
||||
builder.approval_mode = AskForApproval::OnRequest;
|
||||
if let Some(git) = session_meta.git.as_ref() {
|
||||
builder.git_sha = git.commit_hash.clone();
|
||||
builder.git_branch = git.branch.clone();
|
||||
builder.git_origin_url = git.repository_url.clone();
|
||||
}
|
||||
Some(builder)
|
||||
}
|
||||
|
||||
pub(crate) fn builder_from_items(
|
||||
items: &[RolloutItem],
|
||||
rollout_path: &Path,
|
||||
) -> Option<ThreadMetadataBuilder> {
|
||||
if let Some(session_meta) = items.iter().find_map(|item| match item {
|
||||
RolloutItem::SessionMeta(meta_line) => Some(meta_line),
|
||||
RolloutItem::ResponseItem(_)
|
||||
| RolloutItem::Compacted(_)
|
||||
| RolloutItem::TurnContext(_)
|
||||
| RolloutItem::EventMsg(_) => None,
|
||||
}) && let Some(builder) = builder_from_session_meta(session_meta, rollout_path)
|
||||
{
|
||||
return Some(builder);
|
||||
}
|
||||
|
||||
let file_name = rollout_path.file_name()?.to_str()?;
|
||||
if !file_name.starts_with(ROLLOUT_PREFIX) || !file_name.ends_with(ROLLOUT_SUFFIX) {
|
||||
return None;
|
||||
}
|
||||
let (created_ts, uuid) = parse_timestamp_uuid_from_filename(file_name)?;
|
||||
let created_at =
|
||||
DateTime::<Utc>::from_timestamp(created_ts.unix_timestamp(), 0)?.with_nanosecond(0)?;
|
||||
let id = ThreadId::from_string(&uuid.to_string()).ok()?;
|
||||
Some(ThreadMetadataBuilder::new(
|
||||
id,
|
||||
rollout_path.to_path_buf(),
|
||||
created_at,
|
||||
SessionSource::default(),
|
||||
))
|
||||
}
|
||||
|
||||
pub(crate) async fn extract_metadata_from_rollout(
|
||||
rollout_path: &Path,
|
||||
default_provider: &str,
|
||||
otel: Option<&OtelManager>,
|
||||
) -> anyhow::Result<ExtractionOutcome> {
|
||||
let (items, _thread_id, parse_errors) =
|
||||
RolloutRecorder::load_rollout_items(rollout_path).await?;
|
||||
if items.is_empty() {
|
||||
return Err(anyhow::anyhow!(
|
||||
"empty session file: {}",
|
||||
rollout_path.display()
|
||||
));
|
||||
}
|
||||
let builder = builder_from_items(items.as_slice(), rollout_path).ok_or_else(|| {
|
||||
anyhow::anyhow!(
|
||||
"rollout missing metadata builder: {}",
|
||||
rollout_path.display()
|
||||
)
|
||||
})?;
|
||||
let mut metadata = builder.build(default_provider);
|
||||
for item in &items {
|
||||
apply_rollout_item(&mut metadata, item, default_provider);
|
||||
}
|
||||
if let Some(updated_at) = file_modified_time_utc(rollout_path).await {
|
||||
metadata.updated_at = updated_at;
|
||||
}
|
||||
if parse_errors > 0
|
||||
&& let Some(otel) = otel
|
||||
{
|
||||
otel.counter(
|
||||
DB_ERROR_METRIC,
|
||||
parse_errors as i64,
|
||||
&[("stage", "extract_metadata_from_rollout")],
|
||||
);
|
||||
}
|
||||
Ok(ExtractionOutcome {
|
||||
metadata,
|
||||
parse_errors,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) async fn backfill_sessions(
|
||||
runtime: &codex_state::StateRuntime,
|
||||
config: &Config,
|
||||
otel: Option<&OtelManager>,
|
||||
) -> BackfillStats {
|
||||
let sessions_root = config.codex_home.join(rollout::SESSIONS_SUBDIR);
|
||||
let archived_root = config.codex_home.join(rollout::ARCHIVED_SESSIONS_SUBDIR);
|
||||
let mut rollout_paths: Vec<(PathBuf, bool)> = Vec::new();
|
||||
for (root, archived) in [(sessions_root, false), (archived_root, true)] {
|
||||
if !tokio::fs::try_exists(&root).await.unwrap_or(false) {
|
||||
continue;
|
||||
}
|
||||
match collect_rollout_paths(&root).await {
|
||||
Ok(paths) => {
|
||||
rollout_paths.extend(paths.into_iter().map(|path| (path, archived)));
|
||||
}
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"failed to collect rollout paths under {}: {err}",
|
||||
root.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
rollout_paths.sort_by_key(|(path, _archived)| {
|
||||
let parsed = path
|
||||
.file_name()
|
||||
.and_then(|name| name.to_str())
|
||||
.and_then(parse_timestamp_uuid_from_filename)
|
||||
.unwrap_or((time::OffsetDateTime::UNIX_EPOCH, uuid::Uuid::nil()));
|
||||
(Reverse(parsed.0), Reverse(parsed.1))
|
||||
});
|
||||
let mut stats = BackfillStats {
|
||||
scanned: 0,
|
||||
upserted: 0,
|
||||
failed: 0,
|
||||
};
|
||||
for (path, archived) in rollout_paths {
|
||||
stats.scanned = stats.scanned.saturating_add(1);
|
||||
match extract_metadata_from_rollout(&path, config.model_provider_id.as_str(), otel).await {
|
||||
Ok(outcome) => {
|
||||
if outcome.parse_errors > 0
|
||||
&& let Some(otel) = otel
|
||||
{
|
||||
otel.counter(
|
||||
DB_ERROR_METRIC,
|
||||
outcome.parse_errors as i64,
|
||||
&[("stage", "backfill_sessions")],
|
||||
);
|
||||
}
|
||||
let mut metadata = outcome.metadata;
|
||||
if archived && metadata.archived_at.is_none() {
|
||||
let fallback_archived_at = metadata.updated_at;
|
||||
metadata.archived_at = file_modified_time_utc(&path)
|
||||
.await
|
||||
.or(Some(fallback_archived_at));
|
||||
}
|
||||
if let Err(err) = runtime.upsert_thread(&metadata).await {
|
||||
stats.failed = stats.failed.saturating_add(1);
|
||||
warn!("failed to upsert rollout {}: {err}", path.display());
|
||||
} else {
|
||||
stats.upserted = stats.upserted.saturating_add(1);
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
stats.failed = stats.failed.saturating_add(1);
|
||||
warn!("failed to extract rollout {}: {err}", path.display());
|
||||
}
|
||||
}
|
||||
}
|
||||
stats
|
||||
}
|
||||
|
||||
async fn file_modified_time_utc(path: &Path) -> Option<DateTime<Utc>> {
|
||||
let modified = tokio::fs::metadata(path).await.ok()?.modified().ok()?;
|
||||
let updated_at: DateTime<Utc> = modified.into();
|
||||
updated_at.with_nanosecond(0)
|
||||
}
|
||||
|
||||
fn parse_timestamp_to_utc(ts: &str) -> Option<DateTime<Utc>> {
|
||||
const FILENAME_TS_FORMAT: &str = "%Y-%m-%dT%H-%M-%S";
|
||||
if let Ok(naive) = NaiveDateTime::parse_from_str(ts, FILENAME_TS_FORMAT) {
|
||||
let dt = DateTime::<Utc>::from_naive_utc_and_offset(naive, Utc);
|
||||
return dt.with_nanosecond(0);
|
||||
}
|
||||
if let Ok(dt) = DateTime::parse_from_rfc3339(ts) {
|
||||
return dt.with_timezone(&Utc).with_nanosecond(0);
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
async fn collect_rollout_paths(root: &Path) -> std::io::Result<Vec<PathBuf>> {
|
||||
let mut stack = vec![root.to_path_buf()];
|
||||
let mut paths = Vec::new();
|
||||
while let Some(dir) = stack.pop() {
|
||||
let mut read_dir = match tokio::fs::read_dir(&dir).await {
|
||||
Ok(read_dir) => read_dir,
|
||||
Err(err) => {
|
||||
warn!("failed to read directory {}: {err}", dir.display());
|
||||
continue;
|
||||
}
|
||||
};
|
||||
while let Some(entry) = read_dir.next_entry().await? {
|
||||
let path = entry.path();
|
||||
let file_type = entry.file_type().await?;
|
||||
if file_type.is_dir() {
|
||||
stack.push(path);
|
||||
continue;
|
||||
}
|
||||
if !file_type.is_file() {
|
||||
continue;
|
||||
}
|
||||
let file_name = entry.file_name();
|
||||
let Some(name) = file_name.to_str() else {
|
||||
continue;
|
||||
};
|
||||
if name.starts_with(ROLLOUT_PREFIX) && name.ends_with(ROLLOUT_SUFFIX) {
|
||||
paths.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(paths)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use chrono::DateTime;
|
||||
use chrono::NaiveDateTime;
|
||||
use chrono::Timelike;
|
||||
use chrono::Utc;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::CompactedItem;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
use codex_protocol::protocol::RolloutLine;
|
||||
use codex_protocol::protocol::SessionMeta;
|
||||
use codex_protocol::protocol::SessionMetaLine;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_state::ThreadMetadataBuilder;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::fs::File;
|
||||
use std::io::Write;
|
||||
use tempfile::tempdir;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[tokio::test]
|
||||
async fn extract_metadata_from_rollout_uses_session_meta() {
|
||||
let dir = tempdir().expect("tempdir");
|
||||
let uuid = Uuid::new_v4();
|
||||
let id = ThreadId::from_string(&uuid.to_string()).expect("thread id");
|
||||
let path = dir
|
||||
.path()
|
||||
.join(format!("rollout-2026-01-27T12-34-56-{uuid}.jsonl"));
|
||||
|
||||
let session_meta = SessionMeta {
|
||||
id,
|
||||
forked_from_id: None,
|
||||
timestamp: "2026-01-27T12:34:56Z".to_string(),
|
||||
cwd: dir.path().to_path_buf(),
|
||||
originator: "cli".to_string(),
|
||||
cli_version: "0.0.0".to_string(),
|
||||
source: SessionSource::default(),
|
||||
model_provider: Some("openai".to_string()),
|
||||
base_instructions: None,
|
||||
};
|
||||
let session_meta_line = SessionMetaLine {
|
||||
meta: session_meta,
|
||||
git: None,
|
||||
};
|
||||
let rollout_line = RolloutLine {
|
||||
timestamp: "2026-01-27T12:34:56Z".to_string(),
|
||||
item: RolloutItem::SessionMeta(session_meta_line.clone()),
|
||||
};
|
||||
let json = serde_json::to_string(&rollout_line).expect("rollout json");
|
||||
let mut file = File::create(&path).expect("create rollout");
|
||||
writeln!(file, "{json}").expect("write rollout");
|
||||
|
||||
let outcome = extract_metadata_from_rollout(&path, "openai", None)
|
||||
.await
|
||||
.expect("extract");
|
||||
|
||||
let builder =
|
||||
builder_from_session_meta(&session_meta_line, path.as_path()).expect("builder");
|
||||
let mut expected = builder.build("openai");
|
||||
apply_rollout_item(&mut expected, &rollout_line.item, "openai");
|
||||
expected.updated_at = file_modified_time_utc(&path).await.expect("mtime");
|
||||
|
||||
assert_eq!(outcome.metadata, expected);
|
||||
assert_eq!(outcome.parse_errors, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn builder_from_items_falls_back_to_filename() {
|
||||
let dir = tempdir().expect("tempdir");
|
||||
let uuid = Uuid::new_v4();
|
||||
let path = dir
|
||||
.path()
|
||||
.join(format!("rollout-2026-01-27T12-34-56-{uuid}.jsonl"));
|
||||
let items = vec![RolloutItem::Compacted(CompactedItem {
|
||||
message: "noop".to_string(),
|
||||
replacement_history: None,
|
||||
})];
|
||||
|
||||
let builder = builder_from_items(items.as_slice(), path.as_path()).expect("builder");
|
||||
let naive = NaiveDateTime::parse_from_str("2026-01-27T12-34-56", "%Y-%m-%dT%H-%M-%S")
|
||||
.expect("timestamp");
|
||||
let created_at = DateTime::<Utc>::from_naive_utc_and_offset(naive, Utc)
|
||||
.with_nanosecond(0)
|
||||
.expect("nanosecond");
|
||||
let expected = ThreadMetadataBuilder::new(
|
||||
ThreadId::from_string(&uuid.to_string()).expect("thread id"),
|
||||
path,
|
||||
created_at,
|
||||
SessionSource::default(),
|
||||
);
|
||||
|
||||
assert_eq!(builder, expected);
|
||||
}
|
||||
}
|
||||
@@ -9,6 +9,7 @@ pub const INTERACTIVE_SESSION_SOURCES: &[SessionSource] =
|
||||
|
||||
pub(crate) mod error;
|
||||
pub mod list;
|
||||
pub(crate) mod metadata;
|
||||
pub(crate) mod policy;
|
||||
pub mod recorder;
|
||||
pub(crate) mod truncation;
|
||||
|
||||
@@ -42,8 +42,7 @@ pub(crate) fn should_persist_event_msg(ev: &EventMsg) -> bool {
|
||||
| EventMsg::AgentReasoning(_)
|
||||
| EventMsg::AgentReasoningRawContent(_)
|
||||
| EventMsg::TokenCount(_)
|
||||
| EventMsg::ContextCompactionStarted(_)
|
||||
| EventMsg::ContextCompactionEnded(_)
|
||||
| EventMsg::ContextCompacted(_)
|
||||
| EventMsg::EnteredReviewMode(_)
|
||||
| EventMsg::ExitedReviewMode(_)
|
||||
| EventMsg::ThreadRolledBack(_)
|
||||
|
||||
@@ -28,11 +28,14 @@ use super::list::ThreadSortKey;
|
||||
use super::list::ThreadsPage;
|
||||
use super::list::get_threads;
|
||||
use super::list::get_threads_in_root;
|
||||
use super::metadata;
|
||||
use super::policy::is_persisted_response_item;
|
||||
use crate::config::Config;
|
||||
use crate::default_client::originator;
|
||||
use crate::git_info::collect_git_info;
|
||||
use crate::path_utils;
|
||||
use crate::state_db;
|
||||
use crate::state_db::StateDbHandle;
|
||||
use codex_protocol::protocol::InitialHistory;
|
||||
use codex_protocol::protocol::ResumedHistory;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
@@ -40,6 +43,7 @@ use codex_protocol::protocol::RolloutLine;
|
||||
use codex_protocol::protocol::SessionMeta;
|
||||
use codex_protocol::protocol::SessionMetaLine;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_state::ThreadMetadataBuilder;
|
||||
|
||||
/// Records all [`ResponseItem`]s for a session and flushes them to disk after
|
||||
/// every update.
|
||||
@@ -54,6 +58,7 @@ use codex_protocol::protocol::SessionSource;
|
||||
pub struct RolloutRecorder {
|
||||
tx: Sender<RolloutCmd>,
|
||||
pub(crate) rollout_path: PathBuf,
|
||||
state_db: Option<StateDbHandle>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
@@ -111,7 +116,8 @@ impl RolloutRecorder {
|
||||
model_providers: Option<&[String]>,
|
||||
default_provider: &str,
|
||||
) -> std::io::Result<ThreadsPage> {
|
||||
get_threads(
|
||||
let stage = "list_threads";
|
||||
let page = get_threads(
|
||||
codex_home,
|
||||
page_size,
|
||||
cursor,
|
||||
@@ -120,7 +126,34 @@ impl RolloutRecorder {
|
||||
model_providers,
|
||||
default_provider,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// TODO(jif): drop after sqlite migration phase 1
|
||||
let state_db_ctx = state_db::open_if_present(codex_home, default_provider).await;
|
||||
if let Some(db_ids) = state_db::list_thread_ids_db(
|
||||
state_db_ctx.as_deref(),
|
||||
codex_home,
|
||||
page_size,
|
||||
cursor,
|
||||
sort_key,
|
||||
allowed_sources,
|
||||
model_providers,
|
||||
false,
|
||||
stage,
|
||||
)
|
||||
.await
|
||||
{
|
||||
if page.items.len() != db_ids.len() {
|
||||
state_db::record_discrepancy(stage, "bad_len");
|
||||
return Ok(page);
|
||||
}
|
||||
for (id, item) in db_ids.iter().zip(page.items.iter()) {
|
||||
if !item.path.display().to_string().contains(&id.to_string()) {
|
||||
state_db::record_discrepancy(stage, "bad_id");
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(page)
|
||||
}
|
||||
|
||||
/// List archived threads (rollout files) under the archived sessions directory.
|
||||
@@ -133,8 +166,9 @@ impl RolloutRecorder {
|
||||
model_providers: Option<&[String]>,
|
||||
default_provider: &str,
|
||||
) -> std::io::Result<ThreadsPage> {
|
||||
let stage = "list_archived_threads";
|
||||
let root = codex_home.join(ARCHIVED_SESSIONS_SUBDIR);
|
||||
get_threads_in_root(
|
||||
let page = get_threads_in_root(
|
||||
root,
|
||||
page_size,
|
||||
cursor,
|
||||
@@ -146,7 +180,34 @@ impl RolloutRecorder {
|
||||
layout: ThreadListLayout::Flat,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
// TODO(jif): drop after sqlite migration phase 1
|
||||
let state_db_ctx = state_db::open_if_present(codex_home, default_provider).await;
|
||||
if let Some(db_ids) = state_db::list_thread_ids_db(
|
||||
state_db_ctx.as_deref(),
|
||||
codex_home,
|
||||
page_size,
|
||||
cursor,
|
||||
sort_key,
|
||||
allowed_sources,
|
||||
model_providers,
|
||||
true,
|
||||
stage,
|
||||
)
|
||||
.await
|
||||
{
|
||||
if page.items.len() != db_ids.len() {
|
||||
state_db::record_discrepancy(stage, "bad_len");
|
||||
return Ok(page);
|
||||
}
|
||||
for (id, item) in db_ids.iter().zip(page.items.iter()) {
|
||||
if !item.path.display().to_string().contains(&id.to_string()) {
|
||||
state_db::record_discrepancy(stage, "bad_id");
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(page)
|
||||
}
|
||||
|
||||
/// Find the newest recorded thread path, optionally filtering to a matching cwd.
|
||||
@@ -186,7 +247,12 @@ impl RolloutRecorder {
|
||||
/// Attempt to create a new [`RolloutRecorder`]. If the sessions directory
|
||||
/// cannot be created or the rollout file cannot be opened we return the
|
||||
/// error so the caller can decide whether to disable persistence.
|
||||
pub async fn new(config: &Config, params: RolloutRecorderParams) -> std::io::Result<Self> {
|
||||
pub async fn new(
|
||||
config: &Config,
|
||||
params: RolloutRecorderParams,
|
||||
state_db_ctx: Option<StateDbHandle>,
|
||||
state_builder: Option<ThreadMetadataBuilder>,
|
||||
) -> std::io::Result<Self> {
|
||||
let (file, rollout_path, meta) = match params {
|
||||
RolloutRecorderParams::Create {
|
||||
conversation_id,
|
||||
@@ -246,9 +312,30 @@ impl RolloutRecorder {
|
||||
// Spawn a Tokio task that owns the file handle and performs async
|
||||
// writes. Using `tokio::fs::File` keeps everything on the async I/O
|
||||
// driver instead of blocking the runtime.
|
||||
tokio::task::spawn(rollout_writer(file, rx, meta, cwd));
|
||||
tokio::task::spawn(rollout_writer(
|
||||
file,
|
||||
rx,
|
||||
meta,
|
||||
cwd,
|
||||
rollout_path.clone(),
|
||||
state_db_ctx.clone(),
|
||||
state_builder,
|
||||
config.model_provider_id.clone(),
|
||||
));
|
||||
|
||||
Ok(Self { tx, rollout_path })
|
||||
Ok(Self {
|
||||
tx,
|
||||
rollout_path,
|
||||
state_db: state_db_ctx,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn rollout_path(&self) -> &Path {
|
||||
self.rollout_path.as_path()
|
||||
}
|
||||
|
||||
pub fn state_db(&self) -> Option<StateDbHandle> {
|
||||
self.state_db.clone()
|
||||
}
|
||||
|
||||
pub(crate) async fn record_items(&self, items: &[RolloutItem]) -> std::io::Result<()> {
|
||||
@@ -281,7 +368,9 @@ impl RolloutRecorder {
|
||||
.map_err(|e| IoError::other(format!("failed waiting for rollout flush: {e}")))
|
||||
}
|
||||
|
||||
pub async fn get_rollout_history(path: &Path) -> std::io::Result<InitialHistory> {
|
||||
pub(crate) async fn load_rollout_items(
|
||||
path: &Path,
|
||||
) -> std::io::Result<(Vec<RolloutItem>, Option<ThreadId>, usize)> {
|
||||
info!("Resuming rollout from {path:?}");
|
||||
let text = tokio::fs::read_to_string(path).await?;
|
||||
if text.trim().is_empty() {
|
||||
@@ -290,6 +379,7 @@ impl RolloutRecorder {
|
||||
|
||||
let mut items: Vec<RolloutItem> = Vec::new();
|
||||
let mut thread_id: Option<ThreadId> = None;
|
||||
let mut parse_errors = 0usize;
|
||||
for line in text.lines() {
|
||||
if line.trim().is_empty() {
|
||||
continue;
|
||||
@@ -298,6 +388,7 @@ impl RolloutRecorder {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
warn!("failed to parse line as JSON: {line:?}, error: {e}");
|
||||
parse_errors = parse_errors.saturating_add(1);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
@@ -328,15 +419,22 @@ impl RolloutRecorder {
|
||||
},
|
||||
Err(e) => {
|
||||
warn!("failed to parse rollout line: {v:?}, error: {e}");
|
||||
parse_errors = parse_errors.saturating_add(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
info!(
|
||||
"Resumed rollout with {} items, thread ID: {:?}",
|
||||
"Resumed rollout with {} items, thread ID: {:?}, parse errors: {}",
|
||||
items.len(),
|
||||
thread_id
|
||||
thread_id,
|
||||
parse_errors,
|
||||
);
|
||||
Ok((items, thread_id, parse_errors))
|
||||
}
|
||||
|
||||
pub async fn get_rollout_history(path: &Path) -> std::io::Result<InitialHistory> {
|
||||
let (items, thread_id, _parse_errors) = Self::load_rollout_items(path).await?;
|
||||
let conversation_id = thread_id
|
||||
.ok_or_else(|| IoError::other("failed to parse thread ID from rollout file"))?;
|
||||
|
||||
@@ -417,13 +515,21 @@ fn create_log_file(config: &Config, conversation_id: ThreadId) -> std::io::Resul
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn rollout_writer(
|
||||
file: tokio::fs::File,
|
||||
mut rx: mpsc::Receiver<RolloutCmd>,
|
||||
mut meta: Option<SessionMeta>,
|
||||
cwd: std::path::PathBuf,
|
||||
rollout_path: PathBuf,
|
||||
state_db_ctx: Option<StateDbHandle>,
|
||||
mut state_builder: Option<ThreadMetadataBuilder>,
|
||||
default_provider: String,
|
||||
) -> std::io::Result<()> {
|
||||
let mut writer = JsonlWriter { file };
|
||||
if let Some(builder) = state_builder.as_mut() {
|
||||
builder.rollout_path = rollout_path.clone();
|
||||
}
|
||||
|
||||
// If we have a meta, collect git info asynchronously and write meta first
|
||||
if let Some(session_meta) = meta.take() {
|
||||
@@ -432,22 +538,50 @@ async fn rollout_writer(
|
||||
meta: session_meta,
|
||||
git: git_info,
|
||||
};
|
||||
if state_db_ctx.is_some() {
|
||||
state_builder =
|
||||
metadata::builder_from_session_meta(&session_meta_line, rollout_path.as_path());
|
||||
}
|
||||
|
||||
// Write the SessionMeta as the first item in the file, wrapped in a rollout line
|
||||
writer
|
||||
.write_rollout_item(RolloutItem::SessionMeta(session_meta_line))
|
||||
.await?;
|
||||
let rollout_item = RolloutItem::SessionMeta(session_meta_line);
|
||||
writer.write_rollout_item(&rollout_item).await?;
|
||||
state_db::reconcile_rollout(
|
||||
state_db_ctx.as_deref(),
|
||||
rollout_path.as_path(),
|
||||
default_provider.as_str(),
|
||||
state_builder.as_ref(),
|
||||
std::slice::from_ref(&rollout_item),
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
// Process rollout commands
|
||||
while let Some(cmd) = rx.recv().await {
|
||||
match cmd {
|
||||
RolloutCmd::AddItems(items) => {
|
||||
let mut persisted_items = Vec::new();
|
||||
for item in items {
|
||||
if is_persisted_response_item(&item) {
|
||||
writer.write_rollout_item(item).await?;
|
||||
writer.write_rollout_item(&item).await?;
|
||||
persisted_items.push(item);
|
||||
}
|
||||
}
|
||||
if persisted_items.is_empty() {
|
||||
continue;
|
||||
}
|
||||
if let Some(builder) = state_builder.as_mut() {
|
||||
builder.rollout_path = rollout_path.clone();
|
||||
}
|
||||
state_db::apply_rollout_items(
|
||||
state_db_ctx.as_deref(),
|
||||
rollout_path.as_path(),
|
||||
default_provider.as_str(),
|
||||
state_builder.as_ref(),
|
||||
persisted_items.as_slice(),
|
||||
"rollout_writer",
|
||||
)
|
||||
.await;
|
||||
}
|
||||
RolloutCmd::Flush { ack } => {
|
||||
// Ensure underlying file is flushed and then ack.
|
||||
@@ -470,8 +604,15 @@ struct JsonlWriter {
|
||||
file: tokio::fs::File,
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize)]
|
||||
struct RolloutLineRef<'a> {
|
||||
timestamp: String,
|
||||
#[serde(flatten)]
|
||||
item: &'a RolloutItem,
|
||||
}
|
||||
|
||||
impl JsonlWriter {
|
||||
async fn write_rollout_item(&mut self, rollout_item: RolloutItem) -> std::io::Result<()> {
|
||||
async fn write_rollout_item(&mut self, rollout_item: &RolloutItem) -> std::io::Result<()> {
|
||||
let timestamp_format: &[FormatItem] = format_description!(
|
||||
"[year]-[month]-[day]T[hour]:[minute]:[second].[subsecond digits:3]Z"
|
||||
);
|
||||
@@ -479,7 +620,7 @@ impl JsonlWriter {
|
||||
.format(timestamp_format)
|
||||
.map_err(|e| IoError::other(format!("failed to format timestamp: {e}")))?;
|
||||
|
||||
let line = RolloutLine {
|
||||
let line = RolloutLineRef {
|
||||
timestamp,
|
||||
item: rollout_item,
|
||||
};
|
||||
|
||||
@@ -2,7 +2,6 @@ use std::collections::HashSet;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::instructions::SkillInstructions;
|
||||
use crate::skills::SkillLoadOutcome;
|
||||
use crate::skills::SkillMetadata;
|
||||
use codex_otel::OtelManager;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
@@ -16,20 +15,9 @@ pub(crate) struct SkillInjections {
|
||||
}
|
||||
|
||||
pub(crate) async fn build_skill_injections(
|
||||
inputs: &[UserInput],
|
||||
skills: Option<&SkillLoadOutcome>,
|
||||
mentioned_skills: &[SkillMetadata],
|
||||
otel: Option<&OtelManager>,
|
||||
) -> SkillInjections {
|
||||
if inputs.is_empty() {
|
||||
return SkillInjections::default();
|
||||
}
|
||||
|
||||
let Some(outcome) = skills else {
|
||||
return SkillInjections::default();
|
||||
};
|
||||
|
||||
let mentioned_skills =
|
||||
collect_explicit_skill_mentions(inputs, &outcome.skills, &outcome.disabled_paths);
|
||||
if mentioned_skills.is_empty() {
|
||||
return SkillInjections::default();
|
||||
}
|
||||
@@ -42,15 +30,15 @@ pub(crate) async fn build_skill_injections(
|
||||
for skill in mentioned_skills {
|
||||
match fs::read_to_string(&skill.path).await {
|
||||
Ok(contents) => {
|
||||
emit_skill_injected_metric(otel, &skill, "ok");
|
||||
emit_skill_injected_metric(otel, skill, "ok");
|
||||
result.items.push(ResponseItem::from(SkillInstructions {
|
||||
name: skill.name,
|
||||
name: skill.name.clone(),
|
||||
path: skill.path.to_string_lossy().into_owned(),
|
||||
contents,
|
||||
}));
|
||||
}
|
||||
Err(err) => {
|
||||
emit_skill_injected_metric(otel, &skill, "error");
|
||||
emit_skill_injected_metric(otel, skill, "error");
|
||||
let message = format!(
|
||||
"Failed to load skill {name} at {path}: {err:#}",
|
||||
name = skill.name,
|
||||
@@ -76,23 +64,488 @@ fn emit_skill_injected_metric(otel: Option<&OtelManager>, skill: &SkillMetadata,
|
||||
);
|
||||
}
|
||||
|
||||
fn collect_explicit_skill_mentions(
|
||||
/// Collect explicitly mentioned skills from `$name` text mentions.
|
||||
///
|
||||
/// Text inputs are scanned once to extract `$skill-name` tokens, then we iterate `skills`
|
||||
/// in their existing order to preserve prior ordering semantics.
|
||||
///
|
||||
/// Complexity: `O(S + T + N_t * S)` time, `O(S)` space, where:
|
||||
/// `S` = number of skills, `T` = total text length, `N_t` = number of text inputs.
|
||||
pub(crate) fn collect_explicit_skill_mentions(
|
||||
inputs: &[UserInput],
|
||||
skills: &[SkillMetadata],
|
||||
disabled_paths: &HashSet<PathBuf>,
|
||||
) -> Vec<SkillMetadata> {
|
||||
let mut selected: Vec<SkillMetadata> = Vec::new();
|
||||
let mut seen: HashSet<String> = HashSet::new();
|
||||
let mut seen_names: HashSet<String> = HashSet::new();
|
||||
let mut seen_paths: HashSet<PathBuf> = HashSet::new();
|
||||
|
||||
for input in inputs {
|
||||
if let UserInput::Skill { name, path } = input
|
||||
&& seen.insert(name.clone())
|
||||
&& let Some(skill) = skills.iter().find(|s| s.name == *name && s.path == *path)
|
||||
&& !disabled_paths.contains(&skill.path)
|
||||
{
|
||||
selected.push(skill.clone());
|
||||
if let UserInput::Text { text, .. } = input {
|
||||
let mentioned_names = extract_skill_mentions(text);
|
||||
select_skills_from_mentions(
|
||||
skills,
|
||||
disabled_paths,
|
||||
&mentioned_names,
|
||||
&mut seen_names,
|
||||
&mut seen_paths,
|
||||
&mut selected,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
selected
|
||||
}
|
||||
|
||||
struct SkillMentions<'a> {
|
||||
names: HashSet<&'a str>,
|
||||
paths: HashSet<&'a str>,
|
||||
}
|
||||
|
||||
impl<'a> SkillMentions<'a> {
|
||||
fn is_empty(&self) -> bool {
|
||||
self.names.is_empty() && self.paths.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract `$skill-name` mentions from a single text input.
|
||||
///
|
||||
/// Supports explicit resource links in the form `[$skill-name](resource path)`. When a
|
||||
/// resource path is present, it is captured for exact path matching while also tracking
|
||||
/// the name for fallback matching.
|
||||
fn extract_skill_mentions(text: &str) -> SkillMentions<'_> {
|
||||
let text_bytes = text.as_bytes();
|
||||
let mut mentioned_names: HashSet<&str> = HashSet::new();
|
||||
let mut mentioned_paths: HashSet<&str> = HashSet::new();
|
||||
|
||||
let mut index = 0;
|
||||
while index < text_bytes.len() {
|
||||
let byte = text_bytes[index];
|
||||
if byte == b'['
|
||||
&& let Some((name, path, end_index)) =
|
||||
parse_linked_skill_mention(text, text_bytes, index)
|
||||
{
|
||||
if !is_common_env_var(name) {
|
||||
mentioned_names.insert(name);
|
||||
mentioned_paths.insert(path);
|
||||
}
|
||||
index = end_index;
|
||||
continue;
|
||||
}
|
||||
|
||||
if byte != b'$' {
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
let name_start = index + 1;
|
||||
let Some(first_name_byte) = text_bytes.get(name_start) else {
|
||||
index += 1;
|
||||
continue;
|
||||
};
|
||||
if !is_skill_name_char(*first_name_byte) {
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut name_end = name_start + 1;
|
||||
while let Some(next_byte) = text_bytes.get(name_end)
|
||||
&& is_skill_name_char(*next_byte)
|
||||
{
|
||||
name_end += 1;
|
||||
}
|
||||
|
||||
let name = &text[name_start..name_end];
|
||||
if !is_common_env_var(name) {
|
||||
mentioned_names.insert(name);
|
||||
}
|
||||
index = name_end;
|
||||
}
|
||||
|
||||
SkillMentions {
|
||||
names: mentioned_names,
|
||||
paths: mentioned_paths,
|
||||
}
|
||||
}
|
||||
|
||||
/// Select mentioned skills while preserving the order of `skills`.
|
||||
fn select_skills_from_mentions(
|
||||
skills: &[SkillMetadata],
|
||||
disabled_paths: &HashSet<PathBuf>,
|
||||
mentions: &SkillMentions<'_>,
|
||||
seen_names: &mut HashSet<String>,
|
||||
seen_paths: &mut HashSet<PathBuf>,
|
||||
selected: &mut Vec<SkillMetadata>,
|
||||
) {
|
||||
if mentions.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
for skill in skills {
|
||||
if disabled_paths.contains(&skill.path) || seen_paths.contains(&skill.path) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let path_str = skill.path.to_string_lossy();
|
||||
if mentions.paths.contains(path_str.as_ref()) {
|
||||
seen_paths.insert(skill.path.clone());
|
||||
seen_names.insert(skill.name.clone());
|
||||
selected.push(skill.clone());
|
||||
}
|
||||
}
|
||||
|
||||
for skill in skills {
|
||||
if disabled_paths.contains(&skill.path) || seen_paths.contains(&skill.path) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if mentions.names.contains(skill.name.as_str()) && seen_names.insert(skill.name.clone()) {
|
||||
seen_paths.insert(skill.path.clone());
|
||||
selected.push(skill.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_linked_skill_mention<'a>(
|
||||
text: &'a str,
|
||||
text_bytes: &[u8],
|
||||
start: usize,
|
||||
) -> Option<(&'a str, &'a str, usize)> {
|
||||
let dollar_index = start + 1;
|
||||
if text_bytes.get(dollar_index) != Some(&b'$') {
|
||||
return None;
|
||||
}
|
||||
|
||||
let name_start = dollar_index + 1;
|
||||
let first_name_byte = text_bytes.get(name_start)?;
|
||||
if !is_skill_name_char(*first_name_byte) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut name_end = name_start + 1;
|
||||
while let Some(next_byte) = text_bytes.get(name_end)
|
||||
&& is_skill_name_char(*next_byte)
|
||||
{
|
||||
name_end += 1;
|
||||
}
|
||||
|
||||
if text_bytes.get(name_end) != Some(&b']') {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut path_start = name_end + 1;
|
||||
while let Some(next_byte) = text_bytes.get(path_start)
|
||||
&& next_byte.is_ascii_whitespace()
|
||||
{
|
||||
path_start += 1;
|
||||
}
|
||||
if text_bytes.get(path_start) != Some(&b'(') {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut path_end = path_start + 1;
|
||||
while let Some(next_byte) = text_bytes.get(path_end)
|
||||
&& *next_byte != b')'
|
||||
{
|
||||
path_end += 1;
|
||||
}
|
||||
if text_bytes.get(path_end) != Some(&b')') {
|
||||
return None;
|
||||
}
|
||||
|
||||
let path = text[path_start + 1..path_end].trim();
|
||||
if path.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let name = &text[name_start..name_end];
|
||||
Some((name, path, path_end + 1))
|
||||
}
|
||||
|
||||
fn is_common_env_var(name: &str) -> bool {
|
||||
let upper = name.to_ascii_uppercase();
|
||||
matches!(
|
||||
upper.as_str(),
|
||||
"PATH"
|
||||
| "HOME"
|
||||
| "USER"
|
||||
| "SHELL"
|
||||
| "PWD"
|
||||
| "TMPDIR"
|
||||
| "TEMP"
|
||||
| "TMP"
|
||||
| "LANG"
|
||||
| "TERM"
|
||||
| "XDG_CONFIG_HOME"
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn text_mentions_skill(text: &str, skill_name: &str) -> bool {
|
||||
if skill_name.is_empty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
let text_bytes = text.as_bytes();
|
||||
let skill_bytes = skill_name.as_bytes();
|
||||
|
||||
for (index, byte) in text_bytes.iter().copied().enumerate() {
|
||||
if byte != b'$' {
|
||||
continue;
|
||||
}
|
||||
|
||||
let name_start = index + 1;
|
||||
let Some(rest) = text_bytes.get(name_start..) else {
|
||||
continue;
|
||||
};
|
||||
if !rest.starts_with(skill_bytes) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let after_index = name_start + skill_bytes.len();
|
||||
let after = text_bytes.get(after_index).copied();
|
||||
if after.is_none_or(|b| !is_skill_name_char(b)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
fn is_skill_name_char(byte: u8) -> bool {
|
||||
matches!(byte, b'a'..=b'z' | b'A'..=b'Z' | b'0'..=b'9' | b'_' | b'-')
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::collections::HashSet;
|
||||
|
||||
fn make_skill(name: &str, path: &str) -> SkillMetadata {
|
||||
SkillMetadata {
|
||||
name: name.to_string(),
|
||||
description: format!("{name} skill"),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: PathBuf::from(path),
|
||||
scope: codex_protocol::protocol::SkillScope::User,
|
||||
}
|
||||
}
|
||||
|
||||
fn set<'a>(items: &'a [&'a str]) -> HashSet<&'a str> {
|
||||
items.iter().copied().collect()
|
||||
}
|
||||
|
||||
fn assert_mentions(text: &str, expected_names: &[&str], expected_paths: &[&str]) {
|
||||
let mentions = extract_skill_mentions(text);
|
||||
assert_eq!(mentions.names, set(expected_names));
|
||||
assert_eq!(mentions.paths, set(expected_paths));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn text_mentions_skill_requires_exact_boundary() {
|
||||
assert_eq!(
|
||||
true,
|
||||
text_mentions_skill("use $notion-research-doc please", "notion-research-doc")
|
||||
);
|
||||
assert_eq!(
|
||||
true,
|
||||
text_mentions_skill("($notion-research-doc)", "notion-research-doc")
|
||||
);
|
||||
assert_eq!(
|
||||
true,
|
||||
text_mentions_skill("$notion-research-doc.", "notion-research-doc")
|
||||
);
|
||||
assert_eq!(
|
||||
false,
|
||||
text_mentions_skill("$notion-research-docs", "notion-research-doc")
|
||||
);
|
||||
assert_eq!(
|
||||
false,
|
||||
text_mentions_skill("$notion-research-doc_extra", "notion-research-doc")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn text_mentions_skill_handles_end_boundary_and_near_misses() {
|
||||
assert_eq!(true, text_mentions_skill("$alpha-skill", "alpha-skill"));
|
||||
assert_eq!(false, text_mentions_skill("$alpha-skillx", "alpha-skill"));
|
||||
assert_eq!(
|
||||
true,
|
||||
text_mentions_skill("$alpha-skillx and later $alpha-skill ", "alpha-skill")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn text_mentions_skill_handles_many_dollars_without_looping() {
|
||||
let prefix = "$".repeat(256);
|
||||
let text = format!("{prefix} not-a-mention");
|
||||
assert_eq!(false, text_mentions_skill(&text, "alpha-skill"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_skill_mentions_handles_plain_and_linked_mentions() {
|
||||
assert_mentions(
|
||||
"use $alpha and [$beta](/tmp/beta)",
|
||||
&["alpha", "beta"],
|
||||
&["/tmp/beta"],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_skill_mentions_skips_common_env_vars() {
|
||||
assert_mentions("use $PATH and $alpha", &["alpha"], &[]);
|
||||
assert_mentions("use [$HOME](/tmp/skill)", &[], &[]);
|
||||
assert_mentions("use $XDG_CONFIG_HOME and $beta", &["beta"], &[]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_skill_mentions_requires_link_syntax() {
|
||||
assert_mentions("[beta](/tmp/beta)", &[], &[]);
|
||||
assert_mentions("[$beta] /tmp/beta", &["beta"], &[]);
|
||||
assert_mentions("[$beta]()", &["beta"], &[]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_skill_mentions_trims_linked_paths_and_allows_spacing() {
|
||||
assert_mentions("use [$beta] ( /tmp/beta )", &["beta"], &["/tmp/beta"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_skill_mentions_stops_at_non_name_chars() {
|
||||
assert_mentions(
|
||||
"use $alpha.skill and $beta_extra",
|
||||
&["alpha", "beta_extra"],
|
||||
&[],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_text_respects_skill_order() {
|
||||
let alpha = make_skill("alpha-skill", "/tmp/alpha");
|
||||
let beta = make_skill("beta-skill", "/tmp/beta");
|
||||
let skills = vec![beta.clone(), alpha.clone()];
|
||||
let inputs = vec![UserInput::Text {
|
||||
text: "first $alpha-skill then $beta-skill".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}];
|
||||
|
||||
let selected = collect_explicit_skill_mentions(&inputs, &skills, &HashSet::new());
|
||||
|
||||
// Text scanning should not change the previous selection ordering semantics.
|
||||
assert_eq!(selected, vec![beta, alpha]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_ignores_structured_inputs() {
|
||||
let alpha = make_skill("alpha-skill", "/tmp/alpha");
|
||||
let beta = make_skill("beta-skill", "/tmp/beta");
|
||||
let skills = vec![alpha.clone(), beta];
|
||||
let inputs = vec![
|
||||
UserInput::Text {
|
||||
text: "please run $alpha-skill".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
},
|
||||
UserInput::Skill {
|
||||
name: "beta-skill".to_string(),
|
||||
path: PathBuf::from("/tmp/beta"),
|
||||
},
|
||||
];
|
||||
|
||||
let selected = collect_explicit_skill_mentions(&inputs, &skills, &HashSet::new());
|
||||
|
||||
assert_eq!(selected, vec![alpha]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_dedupes_by_path() {
|
||||
let alpha = make_skill("alpha-skill", "/tmp/alpha");
|
||||
let skills = vec![alpha.clone()];
|
||||
let inputs = vec and [$alpha-skill](/tmp/alpha)".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}];
|
||||
|
||||
let selected = collect_explicit_skill_mentions(&inputs, &skills, &HashSet::new());
|
||||
|
||||
assert_eq!(selected, vec![alpha]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_dedupes_by_name() {
|
||||
let alpha = make_skill("demo-skill", "/tmp/alpha");
|
||||
let beta = make_skill("demo-skill", "/tmp/beta");
|
||||
let skills = vec![alpha.clone(), beta];
|
||||
let inputs = vec![UserInput::Text {
|
||||
text: "use $demo-skill and again $demo-skill".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}];
|
||||
|
||||
let selected = collect_explicit_skill_mentions(&inputs, &skills, &HashSet::new());
|
||||
|
||||
assert_eq!(selected, vec![alpha]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_prefers_linked_path_over_name() {
|
||||
let alpha = make_skill("demo-skill", "/tmp/alpha");
|
||||
let beta = make_skill("demo-skill", "/tmp/beta");
|
||||
let skills = vec![alpha, beta.clone()];
|
||||
let inputs = vec".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}];
|
||||
|
||||
let selected = collect_explicit_skill_mentions(&inputs, &skills, &HashSet::new());
|
||||
|
||||
assert_eq!(selected, vec![beta]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_falls_back_when_linked_path_disabled() {
|
||||
let alpha = make_skill("demo-skill", "/tmp/alpha");
|
||||
let beta = make_skill("demo-skill", "/tmp/beta");
|
||||
let skills = vec![alpha, beta.clone()];
|
||||
let inputs = vec".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}];
|
||||
let disabled = HashSet::from([PathBuf::from("/tmp/alpha")]);
|
||||
|
||||
let selected = collect_explicit_skill_mentions(&inputs, &skills, &disabled);
|
||||
|
||||
assert_eq!(selected, vec![beta]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_prefers_resource_path() {
|
||||
let alpha = make_skill("demo-skill", "/tmp/alpha");
|
||||
let beta = make_skill("demo-skill", "/tmp/beta");
|
||||
let skills = vec![alpha, beta.clone()];
|
||||
let inputs = vec".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}];
|
||||
|
||||
let selected = collect_explicit_skill_mentions(&inputs, &skills, &HashSet::new());
|
||||
|
||||
assert_eq!(selected, vec![beta]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_explicit_skill_mentions_falls_back_to_name_when_path_missing() {
|
||||
let alpha = make_skill("demo-skill", "/tmp/alpha");
|
||||
let beta = make_skill("demo-skill", "/tmp/beta");
|
||||
let skills = vec![alpha.clone(), beta];
|
||||
let inputs = vec".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}];
|
||||
|
||||
let selected = collect_explicit_skill_mentions(&inputs, &skills, &HashSet::new());
|
||||
|
||||
assert_eq!(selected, vec![alpha]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
use crate::config::Config;
|
||||
use crate::config_loader::ConfigLayerStack;
|
||||
use crate::config_loader::ConfigLayerStackOrdering;
|
||||
use crate::skills::model::SkillDependencies;
|
||||
use crate::skills::model::SkillError;
|
||||
use crate::skills::model::SkillInterface;
|
||||
use crate::skills::model::SkillLoadOutcome;
|
||||
use crate::skills::model::SkillMetadata;
|
||||
use crate::skills::model::SkillToolDependency;
|
||||
use crate::skills::system::system_cache_root_dir;
|
||||
use codex_app_server_protocol::ConfigLayerSource;
|
||||
use codex_protocol::protocol::SkillScope;
|
||||
@@ -38,6 +40,8 @@ struct SkillFrontmatterMetadata {
|
||||
struct SkillMetadataFile {
|
||||
#[serde(default)]
|
||||
interface: Option<Interface>,
|
||||
#[serde(default)]
|
||||
dependencies: Option<Dependencies>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Deserialize)]
|
||||
@@ -50,6 +54,23 @@ struct Interface {
|
||||
default_prompt: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Deserialize)]
|
||||
struct Dependencies {
|
||||
#[serde(default)]
|
||||
tools: Vec<DependencyTool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Deserialize)]
|
||||
struct DependencyTool {
|
||||
#[serde(rename = "type")]
|
||||
kind: Option<String>,
|
||||
value: Option<String>,
|
||||
description: Option<String>,
|
||||
transport: Option<String>,
|
||||
command: Option<String>,
|
||||
url: Option<String>,
|
||||
}
|
||||
|
||||
const SKILLS_FILENAME: &str = "SKILL.md";
|
||||
const SKILLS_JSON_FILENAME: &str = "SKILL.json";
|
||||
const SKILLS_DIR_NAME: &str = "skills";
|
||||
@@ -57,6 +78,12 @@ const MAX_NAME_LEN: usize = 64;
|
||||
const MAX_DESCRIPTION_LEN: usize = 1024;
|
||||
const MAX_SHORT_DESCRIPTION_LEN: usize = MAX_DESCRIPTION_LEN;
|
||||
const MAX_DEFAULT_PROMPT_LEN: usize = MAX_DESCRIPTION_LEN;
|
||||
const MAX_DEPENDENCY_TYPE_LEN: usize = MAX_NAME_LEN;
|
||||
const MAX_DEPENDENCY_TRANSPORT_LEN: usize = MAX_NAME_LEN;
|
||||
const MAX_DEPENDENCY_VALUE_LEN: usize = MAX_DESCRIPTION_LEN;
|
||||
const MAX_DEPENDENCY_DESCRIPTION_LEN: usize = MAX_DESCRIPTION_LEN;
|
||||
const MAX_DEPENDENCY_COMMAND_LEN: usize = MAX_DESCRIPTION_LEN;
|
||||
const MAX_DEPENDENCY_URL_LEN: usize = MAX_DESCRIPTION_LEN;
|
||||
// Traversal depth from the skills root.
|
||||
const MAX_SCAN_DEPTH: usize = 6;
|
||||
const MAX_SKILLS_DIRS_PER_ROOT: usize = 2000;
|
||||
@@ -345,7 +372,7 @@ fn parse_skill_file(path: &Path, scope: SkillScope) -> Result<SkillMetadata, Ski
|
||||
.as_deref()
|
||||
.map(sanitize_single_line)
|
||||
.filter(|value| !value.is_empty());
|
||||
let interface = load_skill_interface(path);
|
||||
let (interface, dependencies) = load_skill_metadata(path);
|
||||
|
||||
validate_len(&name, MAX_NAME_LEN, "name")?;
|
||||
validate_len(&description, MAX_DESCRIPTION_LEN, "description")?;
|
||||
@@ -364,41 +391,54 @@ fn parse_skill_file(path: &Path, scope: SkillScope) -> Result<SkillMetadata, Ski
|
||||
description,
|
||||
short_description,
|
||||
interface,
|
||||
dependencies,
|
||||
path: resolved_path,
|
||||
scope,
|
||||
})
|
||||
}
|
||||
|
||||
fn load_skill_interface(skill_path: &Path) -> Option<SkillInterface> {
|
||||
// Fail open: optional interface metadata should not block loading SKILL.md.
|
||||
let skill_dir = skill_path.parent()?;
|
||||
let interface_path = skill_dir.join(SKILLS_JSON_FILENAME);
|
||||
if !interface_path.exists() {
|
||||
return None;
|
||||
fn load_skill_metadata(skill_path: &Path) -> (Option<SkillInterface>, Option<SkillDependencies>) {
|
||||
// Fail open: optional metadata should not block loading SKILL.md.
|
||||
let Some(skill_dir) = skill_path.parent() else {
|
||||
return (None, None);
|
||||
};
|
||||
let metadata_path = skill_dir.join(SKILLS_JSON_FILENAME);
|
||||
if !metadata_path.exists() {
|
||||
return (None, None);
|
||||
}
|
||||
|
||||
let contents = match fs::read_to_string(&interface_path) {
|
||||
let contents = match fs::read_to_string(&metadata_path) {
|
||||
Ok(contents) => contents,
|
||||
Err(error) => {
|
||||
tracing::warn!(
|
||||
"ignoring {path}: failed to read SKILL.json: {error}",
|
||||
path = interface_path.display()
|
||||
"ignoring {path}: failed to read {label}: {error}",
|
||||
path = metadata_path.display(),
|
||||
label = SKILLS_JSON_FILENAME
|
||||
);
|
||||
return None;
|
||||
return (None, None);
|
||||
}
|
||||
};
|
||||
|
||||
let parsed: SkillMetadataFile = match serde_json::from_str(&contents) {
|
||||
Ok(parsed) => parsed,
|
||||
Err(error) => {
|
||||
tracing::warn!(
|
||||
"ignoring {path}: invalid JSON: {error}",
|
||||
path = interface_path.display()
|
||||
"ignoring {path}: invalid {label}: {error}",
|
||||
path = metadata_path.display(),
|
||||
label = SKILLS_JSON_FILENAME
|
||||
);
|
||||
return None;
|
||||
return (None, None);
|
||||
}
|
||||
};
|
||||
let interface = parsed.interface?;
|
||||
|
||||
(
|
||||
resolve_interface(parsed.interface, skill_dir),
|
||||
resolve_dependencies(parsed.dependencies),
|
||||
)
|
||||
}
|
||||
|
||||
fn resolve_interface(interface: Option<Interface>, skill_dir: &Path) -> Option<SkillInterface> {
|
||||
let interface = interface?;
|
||||
let interface = SkillInterface {
|
||||
display_name: resolve_str(
|
||||
interface.display_name,
|
||||
@@ -428,6 +468,58 @@ fn load_skill_interface(skill_path: &Path) -> Option<SkillInterface> {
|
||||
if has_fields { Some(interface) } else { None }
|
||||
}
|
||||
|
||||
fn resolve_dependencies(dependencies: Option<Dependencies>) -> Option<SkillDependencies> {
|
||||
let dependencies = dependencies?;
|
||||
let tools: Vec<SkillToolDependency> = dependencies
|
||||
.tools
|
||||
.into_iter()
|
||||
.filter_map(resolve_dependency_tool)
|
||||
.collect();
|
||||
if tools.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(SkillDependencies { tools })
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_dependency_tool(tool: DependencyTool) -> Option<SkillToolDependency> {
|
||||
let r#type = resolve_required_str(
|
||||
tool.kind,
|
||||
MAX_DEPENDENCY_TYPE_LEN,
|
||||
"dependencies.tools.type",
|
||||
)?;
|
||||
let value = resolve_required_str(
|
||||
tool.value,
|
||||
MAX_DEPENDENCY_VALUE_LEN,
|
||||
"dependencies.tools.value",
|
||||
)?;
|
||||
let description = resolve_str(
|
||||
tool.description,
|
||||
MAX_DEPENDENCY_DESCRIPTION_LEN,
|
||||
"dependencies.tools.description",
|
||||
);
|
||||
let transport = resolve_str(
|
||||
tool.transport,
|
||||
MAX_DEPENDENCY_TRANSPORT_LEN,
|
||||
"dependencies.tools.transport",
|
||||
);
|
||||
let command = resolve_str(
|
||||
tool.command,
|
||||
MAX_DEPENDENCY_COMMAND_LEN,
|
||||
"dependencies.tools.command",
|
||||
);
|
||||
let url = resolve_str(tool.url, MAX_DEPENDENCY_URL_LEN, "dependencies.tools.url");
|
||||
|
||||
Some(SkillToolDependency {
|
||||
r#type,
|
||||
value,
|
||||
description,
|
||||
transport,
|
||||
command,
|
||||
url,
|
||||
})
|
||||
}
|
||||
|
||||
fn resolve_asset_path(
|
||||
skill_dir: &Path,
|
||||
field: &'static str,
|
||||
@@ -511,6 +603,18 @@ fn resolve_str(value: Option<String>, max_len: usize, field: &'static str) -> Op
|
||||
Some(value)
|
||||
}
|
||||
|
||||
fn resolve_required_str(
|
||||
value: Option<String>,
|
||||
max_len: usize,
|
||||
field: &'static str,
|
||||
) -> Option<String> {
|
||||
let Some(value) = value else {
|
||||
tracing::warn!("ignoring {field}: value is missing");
|
||||
return None;
|
||||
};
|
||||
resolve_str(Some(value), max_len, field)
|
||||
}
|
||||
|
||||
fn resolve_color_str(value: Option<String>, field: &'static str) -> Option<String> {
|
||||
let value = value?;
|
||||
let value = value.trim();
|
||||
@@ -755,14 +859,118 @@ mod tests {
|
||||
path
|
||||
}
|
||||
|
||||
fn write_skill_interface_at(skill_dir: &Path, contents: &str) -> PathBuf {
|
||||
let path = skill_dir.join(SKILLS_JSON_FILENAME);
|
||||
fn write_skill_metadata_at(skill_dir: &Path, filename: &str, contents: &str) -> PathBuf {
|
||||
let path = skill_dir.join(filename);
|
||||
fs::write(&path, contents).unwrap();
|
||||
path
|
||||
}
|
||||
|
||||
fn write_skill_interface_at(skill_dir: &Path, contents: &str) -> PathBuf {
|
||||
write_skill_metadata_at(skill_dir, SKILLS_JSON_FILENAME, contents)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn loads_skill_interface_metadata_happy_path() {
|
||||
async fn loads_skill_dependencies_metadata_from_json() {
|
||||
let codex_home = tempfile::tempdir().expect("tempdir");
|
||||
let skill_path = write_skill(&codex_home, "demo", "dep-skill", "from json");
|
||||
let skill_dir = skill_path.parent().expect("skill dir");
|
||||
|
||||
write_skill_metadata_at(
|
||||
skill_dir,
|
||||
SKILLS_JSON_FILENAME,
|
||||
r#"
|
||||
{
|
||||
"dependencies": {
|
||||
"tools": [
|
||||
{
|
||||
"type": "env_var",
|
||||
"value": "GITHUB_TOKEN",
|
||||
"description": "GitHub API token with repo scopes"
|
||||
},
|
||||
{
|
||||
"type": "mcp",
|
||||
"value": "github",
|
||||
"description": "GitHub MCP server",
|
||||
"transport": "streamable_http",
|
||||
"url": "https://example.com/mcp"
|
||||
},
|
||||
{
|
||||
"type": "cli",
|
||||
"value": "gh",
|
||||
"description": "GitHub CLI"
|
||||
},
|
||||
{
|
||||
"type": "mcp",
|
||||
"value": "local-gh",
|
||||
"description": "Local GH MCP server",
|
||||
"transport": "stdio",
|
||||
"command": "gh-mcp"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
||||
let cfg = make_config(&codex_home).await;
|
||||
let outcome = load_skills(&cfg);
|
||||
|
||||
assert!(
|
||||
outcome.errors.is_empty(),
|
||||
"unexpected errors: {:?}",
|
||||
outcome.errors
|
||||
);
|
||||
assert_eq!(
|
||||
outcome.skills,
|
||||
vec![SkillMetadata {
|
||||
name: "dep-skill".to_string(),
|
||||
description: "from json".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: Some(SkillDependencies {
|
||||
tools: vec![
|
||||
SkillToolDependency {
|
||||
r#type: "env_var".to_string(),
|
||||
value: "GITHUB_TOKEN".to_string(),
|
||||
description: Some("GitHub API token with repo scopes".to_string()),
|
||||
transport: None,
|
||||
command: None,
|
||||
url: None,
|
||||
},
|
||||
SkillToolDependency {
|
||||
r#type: "mcp".to_string(),
|
||||
value: "github".to_string(),
|
||||
description: Some("GitHub MCP server".to_string()),
|
||||
transport: Some("streamable_http".to_string()),
|
||||
command: None,
|
||||
url: Some("https://example.com/mcp".to_string()),
|
||||
},
|
||||
SkillToolDependency {
|
||||
r#type: "cli".to_string(),
|
||||
value: "gh".to_string(),
|
||||
description: Some("GitHub CLI".to_string()),
|
||||
transport: None,
|
||||
command: None,
|
||||
url: None,
|
||||
},
|
||||
SkillToolDependency {
|
||||
r#type: "mcp".to_string(),
|
||||
value: "local-gh".to_string(),
|
||||
description: Some("Local GH MCP server".to_string()),
|
||||
transport: Some("stdio".to_string()),
|
||||
command: Some("gh-mcp".to_string()),
|
||||
url: None,
|
||||
},
|
||||
],
|
||||
}),
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn loads_skill_interface_metadata_from_json() {
|
||||
let codex_home = tempfile::tempdir().expect("tempdir");
|
||||
let skill_path = write_skill(&codex_home, "demo", "ui-skill", "from json");
|
||||
let skill_dir = skill_path.parent().expect("skill dir");
|
||||
@@ -806,6 +1014,7 @@ mod tests {
|
||||
brand_color: Some("#3B82F6".to_string()),
|
||||
default_prompt: Some("default prompt".to_string()),
|
||||
}),
|
||||
dependencies: None,
|
||||
path: normalized(skill_path.as_path()),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
@@ -854,6 +1063,7 @@ mod tests {
|
||||
brand_color: None,
|
||||
default_prompt: None,
|
||||
}),
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
@@ -892,6 +1102,7 @@ mod tests {
|
||||
description: "from json".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
@@ -943,6 +1154,7 @@ mod tests {
|
||||
brand_color: None,
|
||||
default_prompt: None,
|
||||
}),
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
@@ -982,6 +1194,7 @@ mod tests {
|
||||
description: "from json".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
@@ -1024,6 +1237,7 @@ mod tests {
|
||||
description: "from link".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&shared_skill_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
@@ -1082,6 +1296,7 @@ mod tests {
|
||||
description: "still loads".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
@@ -1116,6 +1331,7 @@ mod tests {
|
||||
description: "from link".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&shared_skill_path),
|
||||
scope: SkillScope::Admin,
|
||||
}]
|
||||
@@ -1154,6 +1370,7 @@ mod tests {
|
||||
description: "from link".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&linked_skill_path),
|
||||
scope: SkillScope::Repo,
|
||||
}]
|
||||
@@ -1215,6 +1432,7 @@ mod tests {
|
||||
description: "loads".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&within_depth_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
@@ -1240,6 +1458,7 @@ mod tests {
|
||||
description: "does things carefully".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
@@ -1269,6 +1488,7 @@ mod tests {
|
||||
description: "long description".to_string(),
|
||||
short_description: Some("short summary".to_string()),
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
@@ -1379,6 +1599,7 @@ mod tests {
|
||||
description: "from repo".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::Repo,
|
||||
}]
|
||||
@@ -1430,6 +1651,7 @@ mod tests {
|
||||
description: "from nested".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&nested_skill_path),
|
||||
scope: SkillScope::Repo,
|
||||
},
|
||||
@@ -1438,6 +1660,7 @@ mod tests {
|
||||
description: "from root".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&root_skill_path),
|
||||
scope: SkillScope::Repo,
|
||||
},
|
||||
@@ -1475,6 +1698,7 @@ mod tests {
|
||||
description: "from cwd".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::Repo,
|
||||
}]
|
||||
@@ -1510,6 +1734,7 @@ mod tests {
|
||||
description: "from repo".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::Repo,
|
||||
}]
|
||||
@@ -1549,6 +1774,7 @@ mod tests {
|
||||
description: "from repo".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&repo_skill_path),
|
||||
scope: SkillScope::Repo,
|
||||
},
|
||||
@@ -1557,6 +1783,7 @@ mod tests {
|
||||
description: "from user".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&user_skill_path),
|
||||
scope: SkillScope::User,
|
||||
},
|
||||
@@ -1619,6 +1846,7 @@ mod tests {
|
||||
description: first_description.to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: first_path,
|
||||
scope: SkillScope::Repo,
|
||||
},
|
||||
@@ -1627,6 +1855,7 @@ mod tests {
|
||||
description: second_description.to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: second_path,
|
||||
scope: SkillScope::Repo,
|
||||
},
|
||||
@@ -1696,6 +1925,7 @@ mod tests {
|
||||
description: "from repo".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::Repo,
|
||||
}]
|
||||
@@ -1752,6 +1982,7 @@ mod tests {
|
||||
description: "from system".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::System,
|
||||
}]
|
||||
|
||||
@@ -7,6 +7,7 @@ pub mod system;
|
||||
|
||||
pub(crate) use injection::SkillInjections;
|
||||
pub(crate) use injection::build_skill_injections;
|
||||
pub(crate) use injection::collect_explicit_skill_mentions;
|
||||
pub use loader::load_skills;
|
||||
pub use manager::SkillsManager;
|
||||
pub use model::SkillError;
|
||||
|
||||
@@ -9,6 +9,7 @@ pub struct SkillMetadata {
|
||||
pub description: String,
|
||||
pub short_description: Option<String>,
|
||||
pub interface: Option<SkillInterface>,
|
||||
pub dependencies: Option<SkillDependencies>,
|
||||
pub path: PathBuf,
|
||||
pub scope: SkillScope,
|
||||
}
|
||||
@@ -23,6 +24,21 @@ pub struct SkillInterface {
|
||||
pub default_prompt: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct SkillDependencies {
|
||||
pub tools: Vec<SkillToolDependency>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct SkillToolDependency {
|
||||
pub r#type: String,
|
||||
pub value: String,
|
||||
pub description: Option<String>,
|
||||
pub transport: Option<String>,
|
||||
pub command: Option<String>,
|
||||
pub url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct SkillError {
|
||||
pub path: PathBuf,
|
||||
|
||||
@@ -7,6 +7,7 @@ use crate::exec_policy::ExecPolicyManager;
|
||||
use crate::mcp_connection_manager::McpConnectionManager;
|
||||
use crate::models_manager::manager::ModelsManager;
|
||||
use crate::skills::SkillsManager;
|
||||
use crate::state_db::StateDbHandle;
|
||||
use crate::tools::sandboxing::ApprovalStore;
|
||||
use crate::unified_exec::UnifiedExecProcessManager;
|
||||
use crate::user_notification::UserNotifier;
|
||||
@@ -30,4 +31,5 @@ pub(crate) struct SessionServices {
|
||||
pub(crate) tool_approvals: Mutex<ApprovalStore>,
|
||||
pub(crate) skills_manager: Arc<SkillsManager>,
|
||||
pub(crate) agent_control: AgentControl,
|
||||
pub(crate) state_db: Option<StateDbHandle>,
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
//! Session-wide mutable state.
|
||||
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use std::collections::HashSet;
|
||||
|
||||
use crate::codex::SessionConfiguration;
|
||||
use crate::context_manager::ContextManager;
|
||||
@@ -15,6 +16,7 @@ pub(crate) struct SessionState {
|
||||
pub(crate) history: ContextManager,
|
||||
pub(crate) latest_rate_limits: Option<RateLimitSnapshot>,
|
||||
pub(crate) server_reasoning_included: bool,
|
||||
pub(crate) mcp_dependency_prompted: HashSet<String>,
|
||||
/// Whether the session's initial context has been seeded into history.
|
||||
///
|
||||
/// TODO(owen): This is a temporary solution to avoid updating a thread's updated_at
|
||||
@@ -31,6 +33,7 @@ impl SessionState {
|
||||
history,
|
||||
latest_rate_limits: None,
|
||||
server_reasoning_included: false,
|
||||
mcp_dependency_prompted: HashSet::new(),
|
||||
initial_context_seeded: false,
|
||||
}
|
||||
}
|
||||
@@ -98,6 +101,17 @@ impl SessionState {
|
||||
pub(crate) fn server_reasoning_included(&self) -> bool {
|
||||
self.server_reasoning_included
|
||||
}
|
||||
|
||||
pub(crate) fn record_mcp_dependency_prompted<I>(&mut self, names: I)
|
||||
where
|
||||
I: IntoIterator<Item = String>,
|
||||
{
|
||||
self.mcp_dependency_prompted.extend(names);
|
||||
}
|
||||
|
||||
pub(crate) fn mcp_dependency_prompted(&self) -> HashSet<String> {
|
||||
self.mcp_dependency_prompted.clone()
|
||||
}
|
||||
}
|
||||
|
||||
// Sometimes new snapshots don't include credits or plan information.
|
||||
|
||||
303
codex-rs/core/src/state_db.rs
Normal file
303
codex-rs/core/src/state_db.rs
Normal file
@@ -0,0 +1,303 @@
|
||||
use crate::config::Config;
|
||||
use crate::features::Feature;
|
||||
use crate::rollout::list::Cursor;
|
||||
use crate::rollout::list::ThreadSortKey;
|
||||
use crate::rollout::metadata;
|
||||
use chrono::DateTime;
|
||||
use chrono::NaiveDateTime;
|
||||
use chrono::Timelike;
|
||||
use chrono::Utc;
|
||||
use codex_otel::OtelManager;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_state::DB_METRIC_BACKFILL;
|
||||
use codex_state::STATE_DB_FILENAME;
|
||||
use codex_state::ThreadMetadataBuilder;
|
||||
use serde_json::Value;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use tracing::info;
|
||||
use tracing::warn;
|
||||
use uuid::Uuid;
|
||||
|
||||
/// Core-facing handle to the optional SQLite-backed state runtime.
|
||||
pub type StateDbHandle = Arc<codex_state::StateRuntime>;
|
||||
|
||||
/// Initialize the state runtime when the `sqlite` feature flag is enabled.
|
||||
pub async fn init_if_enabled(config: &Config, otel: Option<&OtelManager>) -> Option<StateDbHandle> {
|
||||
let state_path = config.codex_home.join(STATE_DB_FILENAME);
|
||||
if !config.features.enabled(Feature::Sqlite) {
|
||||
// We delete the file on best effort basis to maintain retro-compatibility in the future.
|
||||
let wal_path = state_path.with_extension("sqlite-wal");
|
||||
let shm_path = state_path.with_extension("sqlite-shm");
|
||||
for path in [state_path.as_path(), wal_path.as_path(), shm_path.as_path()] {
|
||||
tokio::fs::remove_file(path).await.ok();
|
||||
}
|
||||
return None;
|
||||
}
|
||||
let existed = tokio::fs::try_exists(&state_path).await.unwrap_or(false);
|
||||
let runtime = match codex_state::StateRuntime::init(
|
||||
config.codex_home.clone(),
|
||||
config.model_provider_id.clone(),
|
||||
otel.cloned(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(runtime) => runtime,
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"failed to initialize state runtime at {}: {err}",
|
||||
config.codex_home.display()
|
||||
);
|
||||
if let Some(otel) = otel {
|
||||
otel.counter("codex.db.init", 1, &[("status", "init_error")]);
|
||||
}
|
||||
return None;
|
||||
}
|
||||
};
|
||||
if !existed {
|
||||
let stats = metadata::backfill_sessions(runtime.as_ref(), config, otel).await;
|
||||
info!(
|
||||
"state db backfill scanned={}, upserted={}, failed={}",
|
||||
stats.scanned, stats.upserted, stats.failed
|
||||
);
|
||||
if let Some(otel) = otel {
|
||||
otel.counter(
|
||||
DB_METRIC_BACKFILL,
|
||||
stats.upserted as i64,
|
||||
&[("status", "upserted")],
|
||||
);
|
||||
otel.counter(
|
||||
DB_METRIC_BACKFILL,
|
||||
stats.failed as i64,
|
||||
&[("status", "failed")],
|
||||
);
|
||||
}
|
||||
}
|
||||
Some(runtime)
|
||||
}
|
||||
|
||||
/// Open the state runtime when the SQLite file exists, without feature gating.
|
||||
///
|
||||
/// This is used for parity checks during the SQLite migration phase.
|
||||
pub async fn open_if_present(codex_home: &Path, default_provider: &str) -> Option<StateDbHandle> {
|
||||
let db_path = codex_home.join(STATE_DB_FILENAME);
|
||||
if !tokio::fs::try_exists(&db_path).await.unwrap_or(false) {
|
||||
return None;
|
||||
}
|
||||
let runtime = codex_state::StateRuntime::init(
|
||||
codex_home.to_path_buf(),
|
||||
default_provider.to_string(),
|
||||
None,
|
||||
)
|
||||
.await
|
||||
.ok()?;
|
||||
Some(runtime)
|
||||
}
|
||||
|
||||
fn cursor_to_anchor(cursor: Option<&Cursor>) -> Option<codex_state::Anchor> {
|
||||
let cursor = cursor?;
|
||||
let value = serde_json::to_value(cursor).ok()?;
|
||||
let cursor_str = value.as_str()?;
|
||||
let (ts_str, id_str) = cursor_str.split_once('|')?;
|
||||
if id_str.contains('|') {
|
||||
return None;
|
||||
}
|
||||
let id = Uuid::parse_str(id_str).ok()?;
|
||||
let ts = if let Ok(naive) = NaiveDateTime::parse_from_str(ts_str, "%Y-%m-%dT%H-%M-%S") {
|
||||
DateTime::<Utc>::from_naive_utc_and_offset(naive, Utc)
|
||||
} else if let Ok(dt) = DateTime::parse_from_rfc3339(ts_str) {
|
||||
dt.with_timezone(&Utc)
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
.with_nanosecond(0)?;
|
||||
Some(codex_state::Anchor { ts, id })
|
||||
}
|
||||
|
||||
/// List thread ids from SQLite for parity checks without rollout scanning.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn list_thread_ids_db(
|
||||
context: Option<&codex_state::StateRuntime>,
|
||||
codex_home: &Path,
|
||||
page_size: usize,
|
||||
cursor: Option<&Cursor>,
|
||||
sort_key: ThreadSortKey,
|
||||
allowed_sources: &[SessionSource],
|
||||
model_providers: Option<&[String]>,
|
||||
archived_only: bool,
|
||||
stage: &str,
|
||||
) -> Option<Vec<ThreadId>> {
|
||||
let ctx = context?;
|
||||
if ctx.codex_home() != codex_home {
|
||||
warn!(
|
||||
"state db codex_home mismatch: expected {}, got {}",
|
||||
ctx.codex_home().display(),
|
||||
codex_home.display()
|
||||
);
|
||||
}
|
||||
|
||||
let anchor = cursor_to_anchor(cursor);
|
||||
let allowed_sources: Vec<String> = allowed_sources
|
||||
.iter()
|
||||
.map(|value| match serde_json::to_value(value) {
|
||||
Ok(Value::String(s)) => s,
|
||||
Ok(other) => other.to_string(),
|
||||
Err(_) => String::new(),
|
||||
})
|
||||
.collect();
|
||||
let model_providers = model_providers.map(<[String]>::to_vec);
|
||||
match ctx
|
||||
.list_thread_ids(
|
||||
page_size,
|
||||
anchor.as_ref(),
|
||||
match sort_key {
|
||||
ThreadSortKey::CreatedAt => codex_state::SortKey::CreatedAt,
|
||||
ThreadSortKey::UpdatedAt => codex_state::SortKey::UpdatedAt,
|
||||
},
|
||||
allowed_sources.as_slice(),
|
||||
model_providers.as_deref(),
|
||||
archived_only,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(ids) => Some(ids),
|
||||
Err(err) => {
|
||||
warn!("state db list_thread_ids failed during {stage}: {err}");
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Look up the rollout path for a thread id using SQLite.
|
||||
pub async fn find_rollout_path_by_id(
|
||||
context: Option<&codex_state::StateRuntime>,
|
||||
thread_id: ThreadId,
|
||||
archived_only: Option<bool>,
|
||||
stage: &str,
|
||||
) -> Option<PathBuf> {
|
||||
let ctx = context?;
|
||||
ctx.find_rollout_path_by_id(thread_id, archived_only)
|
||||
.await
|
||||
.unwrap_or_else(|err| {
|
||||
warn!("state db find_rollout_path_by_id failed during {stage}: {err}");
|
||||
None
|
||||
})
|
||||
}
|
||||
|
||||
/// Reconcile rollout items into SQLite, falling back to scanning the rollout file.
|
||||
pub async fn reconcile_rollout(
|
||||
context: Option<&codex_state::StateRuntime>,
|
||||
rollout_path: &Path,
|
||||
default_provider: &str,
|
||||
builder: Option<&ThreadMetadataBuilder>,
|
||||
items: &[RolloutItem],
|
||||
) {
|
||||
let Some(ctx) = context else {
|
||||
return;
|
||||
};
|
||||
if builder.is_some() || !items.is_empty() {
|
||||
apply_rollout_items(
|
||||
Some(ctx),
|
||||
rollout_path,
|
||||
default_provider,
|
||||
builder,
|
||||
items,
|
||||
"reconcile_rollout",
|
||||
)
|
||||
.await;
|
||||
return;
|
||||
}
|
||||
let outcome =
|
||||
match metadata::extract_metadata_from_rollout(rollout_path, default_provider, None).await {
|
||||
Ok(outcome) => outcome,
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"state db reconcile_rollout extraction failed {}: {err}",
|
||||
rollout_path.display()
|
||||
);
|
||||
return;
|
||||
}
|
||||
};
|
||||
if let Err(err) = ctx.upsert_thread(&outcome.metadata).await {
|
||||
warn!(
|
||||
"state db reconcile_rollout upsert failed {}: {err}",
|
||||
rollout_path.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Apply rollout items incrementally to SQLite.
|
||||
pub async fn apply_rollout_items(
|
||||
context: Option<&codex_state::StateRuntime>,
|
||||
rollout_path: &Path,
|
||||
_default_provider: &str,
|
||||
builder: Option<&ThreadMetadataBuilder>,
|
||||
items: &[RolloutItem],
|
||||
stage: &str,
|
||||
) {
|
||||
let Some(ctx) = context else {
|
||||
return;
|
||||
};
|
||||
let mut builder = match builder {
|
||||
Some(builder) => builder.clone(),
|
||||
None => match metadata::builder_from_items(items, rollout_path) {
|
||||
Some(builder) => builder,
|
||||
None => {
|
||||
warn!(
|
||||
"state db apply_rollout_items missing builder during {stage}: {}",
|
||||
rollout_path.display()
|
||||
);
|
||||
record_discrepancy(stage, "missing_builder");
|
||||
return;
|
||||
}
|
||||
},
|
||||
};
|
||||
builder.rollout_path = rollout_path.to_path_buf();
|
||||
if let Err(err) = ctx.apply_rollout_items(&builder, items, None).await {
|
||||
warn!(
|
||||
"state db apply_rollout_items failed during {stage} for {}: {err}",
|
||||
rollout_path.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Record a state discrepancy metric with a stage and reason tag.
|
||||
pub fn record_discrepancy(stage: &str, reason: &str) {
|
||||
// We access the global metric because the call sites might not have access to the broader
|
||||
// OtelManager.
|
||||
if let Some(metric) = codex_otel::metrics::global() {
|
||||
let _ = metric.counter(
|
||||
"codex.db.discrepancy",
|
||||
1,
|
||||
&[("stage", stage), ("reason", reason)],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::rollout::list::parse_cursor;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn cursor_to_anchor_normalizes_timestamp_format() {
|
||||
let uuid = Uuid::new_v4();
|
||||
let ts_str = "2026-01-27T12-34-56";
|
||||
let token = format!("{ts_str}|{uuid}");
|
||||
let cursor = parse_cursor(token.as_str()).expect("cursor should parse");
|
||||
let anchor = cursor_to_anchor(Some(&cursor)).expect("anchor should parse");
|
||||
|
||||
let naive =
|
||||
NaiveDateTime::parse_from_str(ts_str, "%Y-%m-%dT%H-%M-%S").expect("ts should parse");
|
||||
let expected_ts = DateTime::<Utc>::from_naive_utc_and_offset(naive, Utc)
|
||||
.with_nanosecond(0)
|
||||
.expect("nanosecond");
|
||||
|
||||
assert_eq!(anchor.id, uuid);
|
||||
assert_eq!(anchor.ts, expected_ts);
|
||||
}
|
||||
}
|
||||
@@ -86,7 +86,7 @@ async fn start_review_conversation(
|
||||
let mut sub_agent_config = config.as_ref().clone();
|
||||
// Carry over review-only feature restrictions so the delegate cannot
|
||||
// re-enable blocked tools (web search, view image).
|
||||
sub_agent_config.web_search_mode = WebSearchMode::Disabled;
|
||||
sub_agent_config.web_search_mode = Some(WebSearchMode::Disabled);
|
||||
|
||||
// Set explicit review rubric for the sub-agent
|
||||
sub_agent_config.base_instructions = Some(crate::REVIEW_PROMPT.to_string());
|
||||
|
||||
@@ -104,7 +104,10 @@ impl SessionTask for UserShellCommandTask {
|
||||
let exec_env = ExecEnv {
|
||||
command: exec_command.clone(),
|
||||
cwd: cwd.clone(),
|
||||
env: create_env(&turn_context.shell_environment_policy),
|
||||
env: create_env(
|
||||
&turn_context.shell_environment_policy,
|
||||
Some(session.conversation_id),
|
||||
),
|
||||
// TODO(zhao-oai): Now that we have ExecExpiration::Cancellation, we
|
||||
// should use that instead of an "arbitrarily large" timeout here.
|
||||
expiration: USER_SHELL_TIMEOUT_MS.into(),
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use async_trait::async_trait;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::models::ShellCommandToolCallParams;
|
||||
use codex_protocol::models::ShellToolCallParams;
|
||||
use std::sync::Arc;
|
||||
@@ -6,6 +7,7 @@ use std::sync::Arc;
|
||||
use crate::codex::TurnContext;
|
||||
use crate::exec::ExecParams;
|
||||
use crate::exec_env::create_env;
|
||||
use crate::exec_policy::ExecApprovalRequest;
|
||||
use crate::function_tool::FunctionCallError;
|
||||
use crate::is_safe_command::is_known_safe_command;
|
||||
use crate::protocol::ExecCommandSource;
|
||||
@@ -28,16 +30,31 @@ pub struct ShellHandler;
|
||||
|
||||
pub struct ShellCommandHandler;
|
||||
|
||||
struct RunExecLikeArgs {
|
||||
tool_name: String,
|
||||
exec_params: ExecParams,
|
||||
prefix_rule: Option<Vec<String>>,
|
||||
session: Arc<crate::codex::Session>,
|
||||
turn: Arc<TurnContext>,
|
||||
tracker: crate::tools::context::SharedTurnDiffTracker,
|
||||
call_id: String,
|
||||
freeform: bool,
|
||||
}
|
||||
|
||||
impl ShellHandler {
|
||||
fn to_exec_params(params: ShellToolCallParams, turn_context: &TurnContext) -> ExecParams {
|
||||
fn to_exec_params(
|
||||
params: &ShellToolCallParams,
|
||||
turn_context: &TurnContext,
|
||||
thread_id: ThreadId,
|
||||
) -> ExecParams {
|
||||
ExecParams {
|
||||
command: params.command,
|
||||
command: params.command.clone(),
|
||||
cwd: turn_context.resolve_path(params.workdir.clone()),
|
||||
expiration: params.timeout_ms.into(),
|
||||
env: create_env(&turn_context.shell_environment_policy),
|
||||
env: create_env(&turn_context.shell_environment_policy, Some(thread_id)),
|
||||
sandbox_permissions: params.sandbox_permissions.unwrap_or_default(),
|
||||
windows_sandbox_level: turn_context.windows_sandbox_level,
|
||||
justification: params.justification,
|
||||
justification: params.justification.clone(),
|
||||
arg0: None,
|
||||
}
|
||||
}
|
||||
@@ -50,9 +67,10 @@ impl ShellCommandHandler {
|
||||
}
|
||||
|
||||
fn to_exec_params(
|
||||
params: ShellCommandToolCallParams,
|
||||
params: &ShellCommandToolCallParams,
|
||||
session: &crate::codex::Session,
|
||||
turn_context: &TurnContext,
|
||||
thread_id: ThreadId,
|
||||
) -> ExecParams {
|
||||
let shell = session.user_shell();
|
||||
let command = Self::base_command(shell.as_ref(), ¶ms.command, params.login);
|
||||
@@ -61,10 +79,10 @@ impl ShellCommandHandler {
|
||||
command,
|
||||
cwd: turn_context.resolve_path(params.workdir.clone()),
|
||||
expiration: params.timeout_ms.into(),
|
||||
env: create_env(&turn_context.shell_environment_policy),
|
||||
env: create_env(&turn_context.shell_environment_policy, Some(thread_id)),
|
||||
sandbox_permissions: params.sandbox_permissions.unwrap_or_default(),
|
||||
windows_sandbox_level: turn_context.windows_sandbox_level,
|
||||
justification: params.justification,
|
||||
justification: params.justification.clone(),
|
||||
arg0: None,
|
||||
}
|
||||
}
|
||||
@@ -108,29 +126,34 @@ impl ToolHandler for ShellHandler {
|
||||
match payload {
|
||||
ToolPayload::Function { arguments } => {
|
||||
let params: ShellToolCallParams = parse_arguments(&arguments)?;
|
||||
let exec_params = Self::to_exec_params(params, turn.as_ref());
|
||||
Self::run_exec_like(
|
||||
tool_name.as_str(),
|
||||
let prefix_rule = params.prefix_rule.clone();
|
||||
let exec_params =
|
||||
Self::to_exec_params(¶ms, turn.as_ref(), session.conversation_id);
|
||||
Self::run_exec_like(RunExecLikeArgs {
|
||||
tool_name: tool_name.clone(),
|
||||
exec_params,
|
||||
prefix_rule,
|
||||
session,
|
||||
turn,
|
||||
tracker,
|
||||
call_id,
|
||||
false,
|
||||
)
|
||||
freeform: false,
|
||||
})
|
||||
.await
|
||||
}
|
||||
ToolPayload::LocalShell { params } => {
|
||||
let exec_params = Self::to_exec_params(params, turn.as_ref());
|
||||
Self::run_exec_like(
|
||||
tool_name.as_str(),
|
||||
let exec_params =
|
||||
Self::to_exec_params(¶ms, turn.as_ref(), session.conversation_id);
|
||||
Self::run_exec_like(RunExecLikeArgs {
|
||||
tool_name: tool_name.clone(),
|
||||
exec_params,
|
||||
prefix_rule: None,
|
||||
session,
|
||||
turn,
|
||||
tracker,
|
||||
call_id,
|
||||
false,
|
||||
)
|
||||
freeform: false,
|
||||
})
|
||||
.await
|
||||
}
|
||||
_ => Err(FunctionCallError::RespondToModel(format!(
|
||||
@@ -181,30 +204,48 @@ impl ToolHandler for ShellCommandHandler {
|
||||
};
|
||||
|
||||
let params: ShellCommandToolCallParams = parse_arguments(&arguments)?;
|
||||
let exec_params = Self::to_exec_params(params, session.as_ref(), turn.as_ref());
|
||||
ShellHandler::run_exec_like(
|
||||
tool_name.as_str(),
|
||||
let prefix_rule = params.prefix_rule.clone();
|
||||
let exec_params = Self::to_exec_params(
|
||||
¶ms,
|
||||
session.as_ref(),
|
||||
turn.as_ref(),
|
||||
session.conversation_id,
|
||||
);
|
||||
ShellHandler::run_exec_like(RunExecLikeArgs {
|
||||
tool_name,
|
||||
exec_params,
|
||||
prefix_rule,
|
||||
session,
|
||||
turn,
|
||||
tracker,
|
||||
call_id,
|
||||
true,
|
||||
)
|
||||
freeform: true,
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
impl ShellHandler {
|
||||
async fn run_exec_like(
|
||||
tool_name: &str,
|
||||
exec_params: ExecParams,
|
||||
session: Arc<crate::codex::Session>,
|
||||
turn: Arc<TurnContext>,
|
||||
tracker: crate::tools::context::SharedTurnDiffTracker,
|
||||
call_id: String,
|
||||
freeform: bool,
|
||||
) -> Result<ToolOutput, FunctionCallError> {
|
||||
async fn run_exec_like(args: RunExecLikeArgs) -> Result<ToolOutput, FunctionCallError> {
|
||||
let RunExecLikeArgs {
|
||||
tool_name,
|
||||
exec_params,
|
||||
prefix_rule,
|
||||
session,
|
||||
turn,
|
||||
tracker,
|
||||
call_id,
|
||||
freeform,
|
||||
} = args;
|
||||
|
||||
let features = session.features();
|
||||
let request_rule_enabled = features.enabled(crate::features::Feature::RequestRule);
|
||||
let prefix_rule = if request_rule_enabled {
|
||||
prefix_rule
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Approval policy guard for explicit escalation in non-OnRequest modes.
|
||||
if exec_params
|
||||
.sandbox_permissions
|
||||
@@ -214,9 +255,9 @@ impl ShellHandler {
|
||||
codex_protocol::protocol::AskForApproval::OnRequest
|
||||
)
|
||||
{
|
||||
let approval_policy = turn.approval_policy;
|
||||
return Err(FunctionCallError::RespondToModel(format!(
|
||||
"approval policy is {policy:?}; reject command — you should not ask for escalated permissions if the approval policy is {policy:?}",
|
||||
policy = turn.approval_policy
|
||||
"approval policy is {approval_policy:?}; reject command — you should not ask for escalated permissions if the approval policy is {approval_policy:?}"
|
||||
)));
|
||||
}
|
||||
|
||||
@@ -229,7 +270,7 @@ impl ShellHandler {
|
||||
turn.as_ref(),
|
||||
Some(&tracker),
|
||||
&call_id,
|
||||
tool_name,
|
||||
tool_name.as_str(),
|
||||
)
|
||||
.await?
|
||||
{
|
||||
@@ -246,17 +287,17 @@ impl ShellHandler {
|
||||
let event_ctx = ToolEventCtx::new(session.as_ref(), turn.as_ref(), &call_id, None);
|
||||
emitter.begin(event_ctx).await;
|
||||
|
||||
let features = session.features();
|
||||
let exec_approval_requirement = session
|
||||
.services
|
||||
.exec_policy
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&features,
|
||||
&exec_params.command,
|
||||
turn.approval_policy,
|
||||
&turn.sandbox_policy,
|
||||
exec_params.sandbox_permissions,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &features,
|
||||
command: &exec_params.command,
|
||||
approval_policy: turn.approval_policy,
|
||||
sandbox_policy: &turn.sandbox_policy,
|
||||
sandbox_permissions: exec_params.sandbox_permissions,
|
||||
prefix_rule,
|
||||
})
|
||||
.await;
|
||||
|
||||
let req = ShellRequest {
|
||||
@@ -274,7 +315,7 @@ impl ShellHandler {
|
||||
session: session.as_ref(),
|
||||
turn: turn.as_ref(),
|
||||
call_id: call_id.clone(),
|
||||
tool_name: tool_name.to_string(),
|
||||
tool_name,
|
||||
};
|
||||
let out = orchestrator
|
||||
.run(&mut runtime, &req, &tool_ctx, &turn, turn.approval_policy)
|
||||
@@ -369,7 +410,10 @@ mod tests {
|
||||
|
||||
let expected_command = session.user_shell().derive_exec_args(&command, true);
|
||||
let expected_cwd = turn_context.resolve_path(workdir.clone());
|
||||
let expected_env = create_env(&turn_context.shell_environment_policy);
|
||||
let expected_env = create_env(
|
||||
&turn_context.shell_environment_policy,
|
||||
Some(session.conversation_id),
|
||||
);
|
||||
|
||||
let params = ShellCommandToolCallParams {
|
||||
command,
|
||||
@@ -377,10 +421,16 @@ mod tests {
|
||||
login,
|
||||
timeout_ms,
|
||||
sandbox_permissions: Some(sandbox_permissions),
|
||||
prefix_rule: None,
|
||||
justification: justification.clone(),
|
||||
};
|
||||
|
||||
let exec_params = ShellCommandHandler::to_exec_params(params, &session, &turn_context);
|
||||
let exec_params = ShellCommandHandler::to_exec_params(
|
||||
¶ms,
|
||||
&session,
|
||||
&turn_context,
|
||||
session.conversation_id,
|
||||
);
|
||||
|
||||
// ExecParams cannot derive Eq due to the CancellationToken field, so we manually compare the fields.
|
||||
assert_eq!(exec_params.command, expected_command);
|
||||
|
||||
@@ -43,6 +43,8 @@ struct ExecCommandArgs {
|
||||
sandbox_permissions: SandboxPermissions,
|
||||
#[serde(default)]
|
||||
justification: Option<String>,
|
||||
#[serde(default)]
|
||||
prefix_rule: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
@@ -135,19 +137,28 @@ impl ToolHandler for UnifiedExecHandler {
|
||||
max_output_tokens,
|
||||
sandbox_permissions,
|
||||
justification,
|
||||
prefix_rule,
|
||||
..
|
||||
} = args;
|
||||
|
||||
let features = session.features();
|
||||
let request_rule_enabled = features.enabled(crate::features::Feature::RequestRule);
|
||||
let prefix_rule = if request_rule_enabled {
|
||||
prefix_rule
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if sandbox_permissions.requires_escalated_permissions()
|
||||
&& !matches!(
|
||||
context.turn.approval_policy,
|
||||
codex_protocol::protocol::AskForApproval::OnRequest
|
||||
)
|
||||
{
|
||||
let approval_policy = context.turn.approval_policy;
|
||||
manager.release_process_id(&process_id).await;
|
||||
return Err(FunctionCallError::RespondToModel(format!(
|
||||
"approval policy is {policy:?}; reject command — you cannot ask for escalated permissions if the approval policy is {policy:?}",
|
||||
policy = context.turn.approval_policy
|
||||
"approval policy is {approval_policy:?}; reject command — you cannot ask for escalated permissions if the approval policy is {approval_policy:?}"
|
||||
)));
|
||||
}
|
||||
|
||||
@@ -183,6 +194,7 @@ impl ToolHandler for UnifiedExecHandler {
|
||||
tty,
|
||||
sandbox_permissions,
|
||||
justification,
|
||||
prefix_rule,
|
||||
},
|
||||
&context,
|
||||
)
|
||||
|
||||
@@ -114,6 +114,7 @@ impl ToolRouter {
|
||||
workdir: exec.working_directory,
|
||||
timeout_ms: exec.timeout_ms,
|
||||
sandbox_permissions: Some(SandboxPermissions::UseDefault),
|
||||
prefix_rule: None,
|
||||
justification: None,
|
||||
};
|
||||
Ok(Some(ToolCall {
|
||||
|
||||
@@ -27,16 +27,17 @@ use std::collections::HashMap;
|
||||
pub(crate) struct ToolsConfig {
|
||||
pub shell_type: ConfigShellToolType,
|
||||
pub apply_patch_tool_type: Option<ApplyPatchToolType>,
|
||||
pub web_search_mode: WebSearchMode,
|
||||
pub web_search_mode: Option<WebSearchMode>,
|
||||
pub collab_tools: bool,
|
||||
pub collaboration_modes_tools: bool,
|
||||
pub request_rule_enabled: bool,
|
||||
pub experimental_supported_tools: Vec<String>,
|
||||
}
|
||||
|
||||
pub(crate) struct ToolsConfigParams<'a> {
|
||||
pub(crate) model_info: &'a ModelInfo,
|
||||
pub(crate) features: &'a Features,
|
||||
pub(crate) web_search_mode: WebSearchMode,
|
||||
pub(crate) web_search_mode: Option<WebSearchMode>,
|
||||
}
|
||||
|
||||
impl ToolsConfig {
|
||||
@@ -49,6 +50,7 @@ impl ToolsConfig {
|
||||
let include_apply_patch_tool = features.enabled(Feature::ApplyPatchFreeform);
|
||||
let include_collab_tools = features.enabled(Feature::Collab);
|
||||
let include_collaboration_modes_tools = features.enabled(Feature::CollaborationModes);
|
||||
let request_rule_enabled = features.enabled(Feature::RequestRule);
|
||||
|
||||
let shell_type = if !features.enabled(Feature::ShellTool) {
|
||||
ConfigShellToolType::Disabled
|
||||
@@ -81,6 +83,7 @@ impl ToolsConfig {
|
||||
web_search_mode: *web_search_mode,
|
||||
collab_tools: include_collab_tools,
|
||||
collaboration_modes_tools: include_collaboration_modes_tools,
|
||||
request_rule_enabled,
|
||||
experimental_supported_tools: model_info.experimental_supported_tools.clone(),
|
||||
}
|
||||
}
|
||||
@@ -142,8 +145,50 @@ impl From<JsonSchema> for AdditionalProperties {
|
||||
}
|
||||
}
|
||||
|
||||
fn create_exec_command_tool() -> ToolSpec {
|
||||
let properties = BTreeMap::from([
|
||||
fn create_approval_parameters(include_prefix_rule: bool) -> BTreeMap<String, JsonSchema> {
|
||||
let mut properties = BTreeMap::from([
|
||||
(
|
||||
"sandbox_permissions".to_string(),
|
||||
JsonSchema::String {
|
||||
description: Some(
|
||||
"Sandbox permissions for the command. Set to \"require_escalated\" to request running without sandbox restrictions; defaults to \"use_default\"."
|
||||
.to_string(),
|
||||
),
|
||||
},
|
||||
),
|
||||
(
|
||||
"justification".to_string(),
|
||||
JsonSchema::String {
|
||||
description: Some(
|
||||
r#"Only set if sandbox_permissions is \"require_escalated\".
|
||||
Request approval from the user to run this command outside the sandbox.
|
||||
Phrased as a simple question that summarizes the purpose of the
|
||||
command as it relates to the task at hand - e.g. 'Do you want to
|
||||
fetch and pull the latest version of this git branch?'"#
|
||||
.to_string(),
|
||||
),
|
||||
},
|
||||
),
|
||||
]);
|
||||
|
||||
if include_prefix_rule {
|
||||
properties.insert(
|
||||
"prefix_rule".to_string(),
|
||||
JsonSchema::Array {
|
||||
items: Box::new(JsonSchema::String { description: None }),
|
||||
description: Some(
|
||||
r#"Only specify when sandbox_permissions is `require_escalated`.
|
||||
Suggest a prefix command pattern that will allow you to fulfill similar requests from the user in the future.
|
||||
Should be a short but reasonable prefix, e.g. [\"git\", \"pull\"] or [\"uv\", \"run\"] or [\"pytest\"]."#.to_string(),
|
||||
),
|
||||
});
|
||||
}
|
||||
|
||||
properties
|
||||
}
|
||||
|
||||
fn create_exec_command_tool(include_prefix_rule: bool) -> ToolSpec {
|
||||
let mut properties = BTreeMap::from([
|
||||
(
|
||||
"cmd".to_string(),
|
||||
JsonSchema::String {
|
||||
@@ -199,25 +244,8 @@ fn create_exec_command_tool() -> ToolSpec {
|
||||
),
|
||||
},
|
||||
),
|
||||
(
|
||||
"sandbox_permissions".to_string(),
|
||||
JsonSchema::String {
|
||||
description: Some(
|
||||
"Sandbox permissions for the command. Set to \"require_escalated\" to request running without sandbox restrictions; defaults to \"use_default\"."
|
||||
.to_string(),
|
||||
),
|
||||
},
|
||||
),
|
||||
(
|
||||
"justification".to_string(),
|
||||
JsonSchema::String {
|
||||
description: Some(
|
||||
"Only set if sandbox_permissions is \"require_escalated\". 1-sentence explanation of why we want to run this command."
|
||||
.to_string(),
|
||||
),
|
||||
},
|
||||
),
|
||||
]);
|
||||
properties.extend(create_approval_parameters(include_prefix_rule));
|
||||
|
||||
ToolSpec::Function(ResponsesApiTool {
|
||||
name: "exec_command".to_string(),
|
||||
@@ -280,8 +308,8 @@ fn create_write_stdin_tool() -> ToolSpec {
|
||||
})
|
||||
}
|
||||
|
||||
fn create_shell_tool() -> ToolSpec {
|
||||
let properties = BTreeMap::from([
|
||||
fn create_shell_tool(include_prefix_rule: bool) -> ToolSpec {
|
||||
let mut properties = BTreeMap::from([
|
||||
(
|
||||
"command".to_string(),
|
||||
JsonSchema::Array {
|
||||
@@ -301,19 +329,8 @@ fn create_shell_tool() -> ToolSpec {
|
||||
description: Some("The timeout for the command in milliseconds".to_string()),
|
||||
},
|
||||
),
|
||||
(
|
||||
"sandbox_permissions".to_string(),
|
||||
JsonSchema::String {
|
||||
description: Some("Sandbox permissions for the command. Set to \"require_escalated\" to request running without sandbox restrictions; defaults to \"use_default\".".to_string()),
|
||||
},
|
||||
),
|
||||
(
|
||||
"justification".to_string(),
|
||||
JsonSchema::String {
|
||||
description: Some("Only set if sandbox_permissions is \"require_escalated\". 1-sentence explanation of why we want to run this command.".to_string()),
|
||||
},
|
||||
),
|
||||
]);
|
||||
properties.extend(create_approval_parameters(include_prefix_rule));
|
||||
|
||||
let description = if cfg!(windows) {
|
||||
r#"Runs a Powershell command (Windows) and returns its output. Arguments to `shell` will be passed to CreateProcessW(). Most commands should be prefixed with ["powershell.exe", "-Command"].
|
||||
@@ -344,8 +361,8 @@ Examples of valid command strings:
|
||||
})
|
||||
}
|
||||
|
||||
fn create_shell_command_tool() -> ToolSpec {
|
||||
let properties = BTreeMap::from([
|
||||
fn create_shell_command_tool(include_prefix_rule: bool) -> ToolSpec {
|
||||
let mut properties = BTreeMap::from([
|
||||
(
|
||||
"command".to_string(),
|
||||
JsonSchema::String {
|
||||
@@ -375,19 +392,8 @@ fn create_shell_command_tool() -> ToolSpec {
|
||||
description: Some("The timeout for the command in milliseconds".to_string()),
|
||||
},
|
||||
),
|
||||
(
|
||||
"sandbox_permissions".to_string(),
|
||||
JsonSchema::String {
|
||||
description: Some("Sandbox permissions for the command. Set to \"require_escalated\" to request running without sandbox restrictions; defaults to \"use_default\".".to_string()),
|
||||
},
|
||||
),
|
||||
(
|
||||
"justification".to_string(),
|
||||
JsonSchema::String {
|
||||
description: Some("Only set if sandbox_permissions is \"require_escalated\". 1-sentence explanation of why we want to run this command.".to_string()),
|
||||
},
|
||||
),
|
||||
]);
|
||||
properties.extend(create_approval_parameters(include_prefix_rule));
|
||||
|
||||
let description = if cfg!(windows) {
|
||||
r#"Runs a Powershell command (Windows) and returns its output.
|
||||
@@ -1292,13 +1298,13 @@ pub(crate) fn build_specs(
|
||||
|
||||
match &config.shell_type {
|
||||
ConfigShellToolType::Default => {
|
||||
builder.push_spec(create_shell_tool());
|
||||
builder.push_spec(create_shell_tool(config.request_rule_enabled));
|
||||
}
|
||||
ConfigShellToolType::Local => {
|
||||
builder.push_spec(ToolSpec::LocalShell {});
|
||||
}
|
||||
ConfigShellToolType::UnifiedExec => {
|
||||
builder.push_spec(create_exec_command_tool());
|
||||
builder.push_spec(create_exec_command_tool(config.request_rule_enabled));
|
||||
builder.push_spec(create_write_stdin_tool());
|
||||
builder.register_handler("exec_command", unified_exec_handler.clone());
|
||||
builder.register_handler("write_stdin", unified_exec_handler);
|
||||
@@ -1307,7 +1313,7 @@ pub(crate) fn build_specs(
|
||||
// Do nothing.
|
||||
}
|
||||
ConfigShellToolType::ShellCommand => {
|
||||
builder.push_spec(create_shell_command_tool());
|
||||
builder.push_spec(create_shell_command_tool(config.request_rule_enabled));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1384,17 +1390,17 @@ pub(crate) fn build_specs(
|
||||
}
|
||||
|
||||
match config.web_search_mode {
|
||||
WebSearchMode::Cached => {
|
||||
Some(WebSearchMode::Cached) => {
|
||||
builder.push_spec(ToolSpec::WebSearch {
|
||||
external_web_access: Some(false),
|
||||
});
|
||||
}
|
||||
WebSearchMode::Live => {
|
||||
Some(WebSearchMode::Live) => {
|
||||
builder.push_spec(ToolSpec::WebSearch {
|
||||
external_web_access: Some(true),
|
||||
});
|
||||
}
|
||||
WebSearchMode::Disabled => {}
|
||||
Some(WebSearchMode::Disabled) | None => {}
|
||||
}
|
||||
|
||||
builder.push_spec_with_parallel_support(create_view_image_tool(), true);
|
||||
@@ -1556,7 +1562,7 @@ mod tests {
|
||||
let config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Live,
|
||||
web_search_mode: Some(WebSearchMode::Live),
|
||||
});
|
||||
let (tools, _) = build_specs(&config, None, &[]).build();
|
||||
|
||||
@@ -1579,7 +1585,7 @@ mod tests {
|
||||
// Build expected from the same helpers used by the builder.
|
||||
let mut expected: BTreeMap<String, ToolSpec> = BTreeMap::from([]);
|
||||
for spec in [
|
||||
create_exec_command_tool(),
|
||||
create_exec_command_tool(false),
|
||||
create_write_stdin_tool(),
|
||||
create_list_mcp_resources_tool(),
|
||||
create_list_mcp_resource_templates_tool(),
|
||||
@@ -1620,7 +1626,7 @@ mod tests {
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
});
|
||||
let (tools, _) = build_specs(&tools_config, None, &[]).build();
|
||||
assert_contains_tool_names(
|
||||
@@ -1638,7 +1644,7 @@ mod tests {
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
});
|
||||
let (tools, _) = build_specs(&tools_config, None, &[]).build();
|
||||
assert!(
|
||||
@@ -1650,7 +1656,7 @@ mod tests {
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
});
|
||||
let (tools, _) = build_specs(&tools_config, None, &[]).build();
|
||||
assert_contains_tool_names(&tools, &["request_user_input"]);
|
||||
@@ -1659,7 +1665,7 @@ mod tests {
|
||||
fn assert_model_tools(
|
||||
model_slug: &str,
|
||||
features: &Features,
|
||||
web_search_mode: WebSearchMode,
|
||||
web_search_mode: Option<WebSearchMode>,
|
||||
expected_tools: &[&str],
|
||||
) {
|
||||
let config = test_config();
|
||||
@@ -1683,7 +1689,7 @@ mod tests {
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
});
|
||||
let (tools, _) = build_specs(&tools_config, None, &[]).build();
|
||||
|
||||
@@ -1705,7 +1711,7 @@ mod tests {
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Live,
|
||||
web_search_mode: Some(WebSearchMode::Live),
|
||||
});
|
||||
let (tools, _) = build_specs(&tools_config, None, &[]).build();
|
||||
|
||||
@@ -1725,7 +1731,7 @@ mod tests {
|
||||
assert_model_tools(
|
||||
"gpt-5-codex",
|
||||
&features,
|
||||
WebSearchMode::Cached,
|
||||
Some(WebSearchMode::Cached),
|
||||
&[
|
||||
"shell_command",
|
||||
"list_mcp_resources",
|
||||
@@ -1747,7 +1753,7 @@ mod tests {
|
||||
assert_model_tools(
|
||||
"gpt-5.1-codex",
|
||||
&features,
|
||||
WebSearchMode::Cached,
|
||||
Some(WebSearchMode::Cached),
|
||||
&[
|
||||
"shell_command",
|
||||
"list_mcp_resources",
|
||||
@@ -1770,7 +1776,7 @@ mod tests {
|
||||
assert_model_tools(
|
||||
"gpt-5-codex",
|
||||
&features,
|
||||
WebSearchMode::Live,
|
||||
Some(WebSearchMode::Live),
|
||||
&[
|
||||
"exec_command",
|
||||
"write_stdin",
|
||||
@@ -1794,7 +1800,7 @@ mod tests {
|
||||
assert_model_tools(
|
||||
"gpt-5.1-codex",
|
||||
&features,
|
||||
WebSearchMode::Live,
|
||||
Some(WebSearchMode::Live),
|
||||
&[
|
||||
"exec_command",
|
||||
"write_stdin",
|
||||
@@ -1817,7 +1823,7 @@ mod tests {
|
||||
assert_model_tools(
|
||||
"codex-mini-latest",
|
||||
&features,
|
||||
WebSearchMode::Cached,
|
||||
Some(WebSearchMode::Cached),
|
||||
&[
|
||||
"local_shell",
|
||||
"list_mcp_resources",
|
||||
@@ -1838,7 +1844,7 @@ mod tests {
|
||||
assert_model_tools(
|
||||
"gpt-5.1-codex-mini",
|
||||
&features,
|
||||
WebSearchMode::Cached,
|
||||
Some(WebSearchMode::Cached),
|
||||
&[
|
||||
"shell_command",
|
||||
"list_mcp_resources",
|
||||
@@ -1860,7 +1866,7 @@ mod tests {
|
||||
assert_model_tools(
|
||||
"gpt-5",
|
||||
&features,
|
||||
WebSearchMode::Cached,
|
||||
Some(WebSearchMode::Cached),
|
||||
&[
|
||||
"shell",
|
||||
"list_mcp_resources",
|
||||
@@ -1881,7 +1887,7 @@ mod tests {
|
||||
assert_model_tools(
|
||||
"gpt-5.1",
|
||||
&features,
|
||||
WebSearchMode::Cached,
|
||||
Some(WebSearchMode::Cached),
|
||||
&[
|
||||
"shell_command",
|
||||
"list_mcp_resources",
|
||||
@@ -1903,7 +1909,7 @@ mod tests {
|
||||
assert_model_tools(
|
||||
"exp-5.1",
|
||||
&features,
|
||||
WebSearchMode::Cached,
|
||||
Some(WebSearchMode::Cached),
|
||||
&[
|
||||
"exec_command",
|
||||
"write_stdin",
|
||||
@@ -1927,7 +1933,7 @@ mod tests {
|
||||
assert_model_tools(
|
||||
"codex-mini-latest",
|
||||
&features,
|
||||
WebSearchMode::Live,
|
||||
Some(WebSearchMode::Live),
|
||||
&[
|
||||
"exec_command",
|
||||
"write_stdin",
|
||||
@@ -1951,7 +1957,7 @@ mod tests {
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Live,
|
||||
web_search_mode: Some(WebSearchMode::Live),
|
||||
});
|
||||
let (tools, _) = build_specs(&tools_config, Some(HashMap::new()), &[]).build();
|
||||
|
||||
@@ -1973,7 +1979,7 @@ mod tests {
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
});
|
||||
let (tools, _) = build_specs(&tools_config, None, &[]).build();
|
||||
|
||||
@@ -1992,7 +1998,7 @@ mod tests {
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
});
|
||||
let (tools, _) = build_specs(&tools_config, None, &[]).build();
|
||||
|
||||
@@ -2023,7 +2029,7 @@ mod tests {
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Live,
|
||||
web_search_mode: Some(WebSearchMode::Live),
|
||||
});
|
||||
let (tools, _) = build_specs(
|
||||
&tools_config,
|
||||
@@ -2119,7 +2125,7 @@ mod tests {
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
});
|
||||
|
||||
// Intentionally construct a map with keys that would sort alphabetically.
|
||||
@@ -2196,7 +2202,7 @@ mod tests {
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
});
|
||||
|
||||
let (tools, _) = build_specs(
|
||||
@@ -2254,7 +2260,7 @@ mod tests {
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
});
|
||||
|
||||
let (tools, _) = build_specs(
|
||||
@@ -2309,7 +2315,7 @@ mod tests {
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
});
|
||||
|
||||
let (tools, _) = build_specs(
|
||||
@@ -2366,7 +2372,7 @@ mod tests {
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
});
|
||||
|
||||
let (tools, _) = build_specs(
|
||||
@@ -2413,7 +2419,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_shell_tool() {
|
||||
let tool = super::create_shell_tool();
|
||||
let tool = super::create_shell_tool(false);
|
||||
let ToolSpec::Function(ResponsesApiTool {
|
||||
description, name, ..
|
||||
}) = &tool
|
||||
@@ -2443,7 +2449,7 @@ Examples of valid command strings:
|
||||
|
||||
#[test]
|
||||
fn test_shell_command_tool() {
|
||||
let tool = super::create_shell_command_tool();
|
||||
let tool = super::create_shell_command_tool(false);
|
||||
let ToolSpec::Function(ResponsesApiTool {
|
||||
description, name, ..
|
||||
}) = &tool
|
||||
@@ -2479,7 +2485,7 @@ Examples of valid command strings:
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
});
|
||||
let (tools, _) = build_specs(
|
||||
&tools_config,
|
||||
|
||||
@@ -82,6 +82,7 @@ pub(crate) struct ExecCommandRequest {
|
||||
pub tty: bool,
|
||||
pub sandbox_permissions: SandboxPermissions,
|
||||
pub justification: Option<String>,
|
||||
pub prefix_rule: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -205,6 +206,7 @@ mod tests {
|
||||
tty: true,
|
||||
sandbox_permissions: SandboxPermissions::UseDefault,
|
||||
justification: None,
|
||||
prefix_rule: None,
|
||||
},
|
||||
&context,
|
||||
)
|
||||
|
||||
@@ -11,9 +11,9 @@ use tokio::time::Instant;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
|
||||
use crate::exec_env::create_env;
|
||||
use crate::exec_policy::ExecApprovalRequest;
|
||||
use crate::protocol::ExecCommandSource;
|
||||
use crate::sandboxing::ExecEnv;
|
||||
use crate::sandboxing::SandboxPermissions;
|
||||
use crate::tools::events::ToolEmitter;
|
||||
use crate::tools::events::ToolEventCtx;
|
||||
use crate::tools::events::ToolEventStage;
|
||||
@@ -123,14 +123,7 @@ impl UnifiedExecProcessManager {
|
||||
.unwrap_or_else(|| context.turn.cwd.clone());
|
||||
|
||||
let process = self
|
||||
.open_session_with_sandbox(
|
||||
&request.command,
|
||||
cwd.clone(),
|
||||
request.sandbox_permissions,
|
||||
request.justification,
|
||||
request.tty,
|
||||
context,
|
||||
)
|
||||
.open_session_with_sandbox(&request, cwd.clone(), context)
|
||||
.await;
|
||||
|
||||
let process = match process {
|
||||
@@ -486,14 +479,14 @@ impl UnifiedExecProcessManager {
|
||||
|
||||
pub(super) async fn open_session_with_sandbox(
|
||||
&self,
|
||||
command: &[String],
|
||||
request: &ExecCommandRequest,
|
||||
cwd: PathBuf,
|
||||
sandbox_permissions: SandboxPermissions,
|
||||
justification: Option<String>,
|
||||
tty: bool,
|
||||
context: &UnifiedExecContext,
|
||||
) -> Result<UnifiedExecProcess, UnifiedExecError> {
|
||||
let env = apply_unified_exec_env(create_env(&context.turn.shell_environment_policy));
|
||||
let env = apply_unified_exec_env(create_env(
|
||||
&context.turn.shell_environment_policy,
|
||||
Some(context.session.conversation_id),
|
||||
));
|
||||
let features = context.session.features();
|
||||
let mut orchestrator = ToolOrchestrator::new();
|
||||
let mut runtime = UnifiedExecRuntime::new(self);
|
||||
@@ -501,21 +494,22 @@ impl UnifiedExecProcessManager {
|
||||
.session
|
||||
.services
|
||||
.exec_policy
|
||||
.create_exec_approval_requirement_for_command(
|
||||
&features,
|
||||
command,
|
||||
context.turn.approval_policy,
|
||||
&context.turn.sandbox_policy,
|
||||
sandbox_permissions,
|
||||
)
|
||||
.create_exec_approval_requirement_for_command(ExecApprovalRequest {
|
||||
features: &features,
|
||||
command: &request.command,
|
||||
approval_policy: context.turn.approval_policy,
|
||||
sandbox_policy: &context.turn.sandbox_policy,
|
||||
sandbox_permissions: request.sandbox_permissions,
|
||||
prefix_rule: request.prefix_rule.clone(),
|
||||
})
|
||||
.await;
|
||||
let req = UnifiedExecToolRequest::new(
|
||||
command.to_vec(),
|
||||
request.command.clone(),
|
||||
cwd,
|
||||
env,
|
||||
tty,
|
||||
sandbox_permissions,
|
||||
justification,
|
||||
request.tty,
|
||||
request.sandbox_permissions,
|
||||
request.justification.clone(),
|
||||
exec_approval_requirement,
|
||||
);
|
||||
let tool_ctx = ToolCtx {
|
||||
|
||||
@@ -52,6 +52,19 @@ pub fn sandbox_setup_is_complete(_codex_home: &Path) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
pub fn elevated_setup_failure_details(err: &anyhow::Error) -> Option<(String, String)> {
|
||||
let failure = codex_windows_sandbox::extract_setup_failure(err)?;
|
||||
let code = failure.code.as_str().to_string();
|
||||
let message = codex_windows_sandbox::sanitize_setup_metric_tag_value(&failure.message);
|
||||
Some((code, message))
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
pub fn elevated_setup_failure_details(_err: &anyhow::Error) -> Option<(String, String)> {
|
||||
None
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
pub fn run_elevated_setup(
|
||||
policy: &SandboxPolicy,
|
||||
|
||||
@@ -264,7 +264,7 @@ async fn responses_stream_includes_web_search_eligible_header_false_when_disable
|
||||
|
||||
let test = test_codex()
|
||||
.with_config(|config| {
|
||||
config.web_search_mode = WebSearchMode::Disabled;
|
||||
config.web_search_mode = Some(WebSearchMode::Disabled);
|
||||
})
|
||||
.build(&server)
|
||||
.await
|
||||
|
||||
@@ -1754,6 +1754,16 @@ async fn approving_execpolicy_amendment_persists_policy_and_skips_future_prompts
|
||||
.await?;
|
||||
wait_for_completion(&test).await;
|
||||
|
||||
let developer_messages = first_results
|
||||
.single_request()
|
||||
.message_input_texts("developer");
|
||||
assert!(
|
||||
developer_messages
|
||||
.iter()
|
||||
.any(|message| message.contains(r#"["touch", "allow-prefix.txt"]"#)),
|
||||
"expected developer message documenting saved rule, got: {developer_messages:?}"
|
||||
);
|
||||
|
||||
let policy_path = test.home.path().join("rules").join("default.rules");
|
||||
let policy_contents = fs::read_to_string(&policy_path)?;
|
||||
assert!(
|
||||
|
||||
@@ -1277,7 +1277,7 @@ async fn auto_compact_runs_after_resume_when_token_usage_is_over_limit() {
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&resumed.codex, |event| {
|
||||
matches!(event, EventMsg::ContextCompactionEnded(_))
|
||||
matches!(event, EventMsg::ContextCompacted(_))
|
||||
})
|
||||
.await;
|
||||
wait_for_event(&resumed.codex, |event| {
|
||||
|
||||
@@ -202,7 +202,7 @@ async fn remote_compact_runs_automatically() -> Result<()> {
|
||||
})
|
||||
.await?;
|
||||
let message = wait_for_event_match(&codex, |ev| match ev {
|
||||
EventMsg::ContextCompactionEnded(_) => Some(true),
|
||||
EventMsg::ContextCompacted(_) => Some(true),
|
||||
_ => None,
|
||||
})
|
||||
.await;
|
||||
|
||||
@@ -65,6 +65,7 @@ mod shell_command;
|
||||
mod shell_serialization;
|
||||
mod shell_snapshot;
|
||||
mod skills;
|
||||
mod sqlite_state;
|
||||
mod stream_error_allows_next_turn;
|
||||
mod stream_no_completed;
|
||||
mod text_encoding_fix;
|
||||
|
||||
@@ -38,7 +38,7 @@ async fn collect_tool_identifiers_for_model(model: &str) -> Vec<String> {
|
||||
.with_model(model)
|
||||
// Keep tool expectations stable when the default web_search mode changes.
|
||||
.with_config(|config| {
|
||||
config.web_search_mode = WebSearchMode::Cached;
|
||||
config.web_search_mode = Some(WebSearchMode::Cached);
|
||||
config.features.enable(Feature::CollaborationModes);
|
||||
});
|
||||
let test = builder
|
||||
|
||||
@@ -4,6 +4,8 @@ use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_execpolicy::Policy;
|
||||
use codex_protocol::models::DeveloperInstructions;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
use core_test_support::responses::ev_completed;
|
||||
@@ -411,10 +413,11 @@ async fn permissions_message_includes_writable_roots() -> Result<()> {
|
||||
exclude_tmpdir_env_var: false,
|
||||
exclude_slash_tmp: false,
|
||||
};
|
||||
let sandbox_policy_for_config = sandbox_policy.clone();
|
||||
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.approval_policy = Constrained::allow_any(AskForApproval::OnRequest);
|
||||
config.sandbox_policy = Constrained::allow_any(sandbox_policy);
|
||||
config.sandbox_policy = Constrained::allow_any(sandbox_policy_for_config);
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
@@ -432,39 +435,14 @@ async fn permissions_message_includes_writable_roots() -> Result<()> {
|
||||
let body = req.single_request().body_json();
|
||||
let input = body["input"].as_array().expect("input array");
|
||||
let permissions = permissions_texts(input);
|
||||
let sandbox_text = "Filesystem sandboxing defines which files can be read or written. `sandbox_mode` is `workspace-write`: The sandbox permits reading files, and editing files in `cwd` and `writable_roots`. Editing files in other directories requires approval. Network access is restricted.";
|
||||
let approval_text = " Approvals are your mechanism to get user consent to run shell commands without the sandbox. `approval_policy` is `on-request`: Commands will be run in the sandbox by default, and you can specify in your tool call if you want to escalate a command to run without sandboxing. If the completing the task requires escalated permissions, Do not let these settings or the sandbox deter you from attempting to accomplish the user's task.\n\nHere are scenarios where you'll need to request approval:\n- You need to run a command that writes to a directory that requires it (e.g. running tests that write to /var)\n- You need to run a GUI app (e.g., open/xdg-open/osascript) to open browsers or files.\n- You are running sandboxed and need to run a command that requires network access (e.g. installing packages)\n- If you run a command that is important to solving the user's query, but it fails because of sandboxing, rerun the command with approval. ALWAYS proceed to use the `sandbox_permissions` and `justification` parameters - do not message the user before requesting approval for the command.\n- You are about to take a potentially destructive action such as an `rm` or `git reset` that the user did not explicitly ask for.\n\nWhen requesting approval to execute a command that will require escalated privileges:\n - Provide the `sandbox_permissions` parameter with the value `\"require_escalated\"`\n - Include a short, 1 sentence explanation for why you need escalated permissions in the justification parameter";
|
||||
// Normalize paths by removing trailing slashes to match AbsolutePathBuf behavior
|
||||
let normalize_path =
|
||||
|p: &std::path::Path| -> String { p.to_string_lossy().trim_end_matches('/').to_string() };
|
||||
let mut roots = vec![
|
||||
normalize_path(writable.path()),
|
||||
normalize_path(test.config.cwd.as_path()),
|
||||
];
|
||||
if cfg!(unix) && std::path::Path::new("/tmp").is_dir() {
|
||||
roots.push("/tmp".to_string());
|
||||
}
|
||||
if let Some(tmpdir) = std::env::var_os("TMPDIR") {
|
||||
let tmpdir_path = std::path::PathBuf::from(&tmpdir);
|
||||
if tmpdir_path.is_absolute() && !tmpdir.is_empty() {
|
||||
roots.push(normalize_path(&tmpdir_path));
|
||||
}
|
||||
}
|
||||
let roots_text = if roots.len() == 1 {
|
||||
format!(" The writable root is `{}`.", roots[0])
|
||||
} else {
|
||||
format!(
|
||||
" The writable roots are {}.",
|
||||
roots
|
||||
.iter()
|
||||
.map(|root| format!("`{root}`"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
)
|
||||
};
|
||||
let expected = format!(
|
||||
"<permissions instructions>{sandbox_text}{approval_text}{roots_text}</permissions instructions>"
|
||||
);
|
||||
let expected = DeveloperInstructions::from_policy(
|
||||
&sandbox_policy,
|
||||
AskForApproval::OnRequest,
|
||||
&Policy::empty(),
|
||||
false,
|
||||
test.config.cwd.as_path(),
|
||||
)
|
||||
.into_text();
|
||||
// Normalize line endings to handle Windows vs Unix differences
|
||||
let normalize_line_endings = |s: &str| s.replace("\r\n", "\n");
|
||||
let expected_normalized = normalize_line_endings(&expected);
|
||||
|
||||
@@ -92,7 +92,7 @@ async fn prompt_tools_are_consistent_across_requests() -> anyhow::Result<()> {
|
||||
config.user_instructions = Some("be consistent and helpful".to_string());
|
||||
config.model = Some("gpt-5.1-codex-max".to_string());
|
||||
// Keep tool expectations stable when the default web_search mode changes.
|
||||
config.web_search_mode = WebSearchMode::Cached;
|
||||
config.web_search_mode = Some(WebSearchMode::Cached);
|
||||
config.features.enable(Feature::CollaborationModes);
|
||||
})
|
||||
.build(&server)
|
||||
|
||||
199
codex-rs/core/tests/suite/sqlite_state.rs
Normal file
199
codex-rs/core/tests/suite/sqlite_state.rs
Normal file
@@ -0,0 +1,199 @@
|
||||
use anyhow::Result;
|
||||
use codex_core::features::Feature;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
use codex_protocol::protocol::RolloutLine;
|
||||
use codex_protocol::protocol::SessionMeta;
|
||||
use codex_protocol::protocol::SessionMetaLine;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_protocol::protocol::UserMessageEvent;
|
||||
use codex_state::STATE_DB_FILENAME;
|
||||
use core_test_support::load_sse_fixture_with_id;
|
||||
use core_test_support::responses::mount_sse_sequence;
|
||||
use core_test_support::responses::start_mock_server;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::fs;
|
||||
use tokio::time::Duration;
|
||||
use uuid::Uuid;
|
||||
|
||||
fn sse_completed(id: &str) -> String {
|
||||
load_sse_fixture_with_id("../fixtures/completed_template.json", id)
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn new_thread_is_recorded_in_state_db() -> Result<()> {
|
||||
let server = start_mock_server().await;
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.features.enable(Feature::Sqlite);
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
let thread_id = test.session_configured.session_id;
|
||||
let rollout_path = test.codex.rollout_path().expect("rollout path");
|
||||
let db_path = test.config.codex_home.join(STATE_DB_FILENAME);
|
||||
|
||||
for _ in 0..100 {
|
||||
if tokio::fs::try_exists(&db_path).await.unwrap_or(false) {
|
||||
break;
|
||||
}
|
||||
tokio::time::sleep(Duration::from_millis(25)).await;
|
||||
}
|
||||
|
||||
let db = test.codex.state_db().expect("state db enabled");
|
||||
|
||||
let mut metadata = None;
|
||||
for _ in 0..100 {
|
||||
metadata = db.get_thread(thread_id).await?;
|
||||
if metadata.is_some() {
|
||||
break;
|
||||
}
|
||||
tokio::time::sleep(Duration::from_millis(25)).await;
|
||||
}
|
||||
|
||||
let metadata = metadata.expect("thread should exist in state db");
|
||||
assert_eq!(metadata.id, thread_id);
|
||||
assert_eq!(metadata.rollout_path, rollout_path);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn backfill_scans_existing_rollouts() -> Result<()> {
|
||||
let server = start_mock_server().await;
|
||||
|
||||
let uuid = Uuid::now_v7();
|
||||
let thread_id = ThreadId::from_string(&uuid.to_string())?;
|
||||
let rollout_rel_path = format!("sessions/2026/01/27/rollout-2026-01-27T12-00-00-{uuid}.jsonl");
|
||||
let rollout_rel_path_for_hook = rollout_rel_path.clone();
|
||||
|
||||
let mut builder = test_codex()
|
||||
.with_pre_build_hook(move |codex_home| {
|
||||
let rollout_path = codex_home.join(&rollout_rel_path_for_hook);
|
||||
let parent = rollout_path
|
||||
.parent()
|
||||
.expect("rollout path should have parent");
|
||||
fs::create_dir_all(parent).expect("should create rollout directory");
|
||||
|
||||
let session_meta_line = SessionMetaLine {
|
||||
meta: SessionMeta {
|
||||
id: thread_id,
|
||||
forked_from_id: None,
|
||||
timestamp: "2026-01-27T12:00:00Z".to_string(),
|
||||
cwd: codex_home.to_path_buf(),
|
||||
originator: "test".to_string(),
|
||||
cli_version: "test".to_string(),
|
||||
source: SessionSource::default(),
|
||||
model_provider: None,
|
||||
base_instructions: None,
|
||||
},
|
||||
git: None,
|
||||
};
|
||||
|
||||
let lines = [
|
||||
RolloutLine {
|
||||
timestamp: "2026-01-27T12:00:00Z".to_string(),
|
||||
item: RolloutItem::SessionMeta(session_meta_line),
|
||||
},
|
||||
RolloutLine {
|
||||
timestamp: "2026-01-27T12:00:01Z".to_string(),
|
||||
item: RolloutItem::EventMsg(EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "hello from backfill".to_string(),
|
||||
images: None,
|
||||
local_images: Vec::new(),
|
||||
text_elements: Vec::new(),
|
||||
})),
|
||||
},
|
||||
];
|
||||
|
||||
let jsonl = lines
|
||||
.iter()
|
||||
.map(|line| serde_json::to_string(line).expect("rollout line should serialize"))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
fs::write(&rollout_path, format!("{jsonl}\n")).expect("should write rollout file");
|
||||
})
|
||||
.with_config(|config| {
|
||||
config.features.enable(Feature::Sqlite);
|
||||
});
|
||||
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
let db_path = test.config.codex_home.join(STATE_DB_FILENAME);
|
||||
let rollout_path = test.config.codex_home.join(&rollout_rel_path);
|
||||
let default_provider = test.config.model_provider_id.clone();
|
||||
|
||||
for _ in 0..20 {
|
||||
if tokio::fs::try_exists(&db_path).await.unwrap_or(false) {
|
||||
break;
|
||||
}
|
||||
tokio::time::sleep(Duration::from_millis(25)).await;
|
||||
}
|
||||
|
||||
let db = test.codex.state_db().expect("state db enabled");
|
||||
|
||||
let mut metadata = None;
|
||||
for _ in 0..40 {
|
||||
metadata = db.get_thread(thread_id).await?;
|
||||
if metadata.is_some() {
|
||||
break;
|
||||
}
|
||||
tokio::time::sleep(Duration::from_millis(25)).await;
|
||||
}
|
||||
|
||||
let metadata = metadata.expect("backfilled thread should exist in state db");
|
||||
assert_eq!(metadata.id, thread_id);
|
||||
assert_eq!(metadata.rollout_path, rollout_path);
|
||||
assert_eq!(metadata.model_provider, default_provider);
|
||||
assert!(metadata.has_user_event);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn user_messages_persist_in_state_db() -> Result<()> {
|
||||
let server = start_mock_server().await;
|
||||
mount_sse_sequence(
|
||||
&server,
|
||||
vec![sse_completed("resp-1"), sse_completed("resp-2")],
|
||||
)
|
||||
.await;
|
||||
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.features.enable(Feature::Sqlite);
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
let db_path = test.config.codex_home.join(STATE_DB_FILENAME);
|
||||
for _ in 0..100 {
|
||||
if tokio::fs::try_exists(&db_path).await.unwrap_or(false) {
|
||||
break;
|
||||
}
|
||||
tokio::time::sleep(Duration::from_millis(25)).await;
|
||||
}
|
||||
|
||||
test.submit_turn("hello from sqlite").await?;
|
||||
test.submit_turn("another message").await?;
|
||||
|
||||
let db = test.codex.state_db().expect("state db enabled");
|
||||
let thread_id = test.session_configured.session_id;
|
||||
|
||||
let mut metadata = None;
|
||||
for _ in 0..100 {
|
||||
metadata = db.get_thread(thread_id).await?;
|
||||
if metadata
|
||||
.as_ref()
|
||||
.map(|entry| entry.has_user_event)
|
||||
.unwrap_or(false)
|
||||
{
|
||||
break;
|
||||
}
|
||||
tokio::time::sleep(Duration::from_millis(25)).await;
|
||||
}
|
||||
|
||||
let metadata = metadata.expect("thread should exist in state db");
|
||||
assert!(metadata.has_user_event);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
#![allow(clippy::unwrap_used)]
|
||||
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_protocol::config_types::WebSearchMode;
|
||||
use core_test_support::load_sse_fixture_with_id;
|
||||
use core_test_support::responses;
|
||||
@@ -25,7 +26,7 @@ fn find_web_search_tool(body: &Value) -> &Value {
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn web_search_mode_cached_sets_external_web_access_false_in_request_body() {
|
||||
async fn web_search_mode_cached_sets_external_web_access_false() {
|
||||
skip_if_no_network!();
|
||||
|
||||
let server = start_mock_server().await;
|
||||
@@ -35,7 +36,7 @@ async fn web_search_mode_cached_sets_external_web_access_false_in_request_body()
|
||||
let mut builder = test_codex()
|
||||
.with_model("gpt-5-codex")
|
||||
.with_config(|config| {
|
||||
config.web_search_mode = WebSearchMode::Cached;
|
||||
config.web_search_mode = Some(WebSearchMode::Cached);
|
||||
});
|
||||
let test = builder
|
||||
.build(&server)
|
||||
@@ -56,7 +57,7 @@ async fn web_search_mode_cached_sets_external_web_access_false_in_request_body()
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn web_search_mode_takes_precedence_over_legacy_flags_in_request_body() {
|
||||
async fn web_search_mode_takes_precedence_over_legacy_flags() {
|
||||
skip_if_no_network!();
|
||||
|
||||
let server = start_mock_server().await;
|
||||
@@ -67,7 +68,7 @@ async fn web_search_mode_takes_precedence_over_legacy_flags_in_request_body() {
|
||||
.with_model("gpt-5-codex")
|
||||
.with_config(|config| {
|
||||
config.features.enable(Feature::WebSearchRequest);
|
||||
config.web_search_mode = WebSearchMode::Cached;
|
||||
config.web_search_mode = Some(WebSearchMode::Cached);
|
||||
});
|
||||
let test = builder
|
||||
.build(&server)
|
||||
@@ -86,3 +87,90 @@ async fn web_search_mode_takes_precedence_over_legacy_flags_in_request_body() {
|
||||
"web_search mode should win over legacy web_search_request"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn web_search_mode_defaults_to_cached_when_unset() {
|
||||
skip_if_no_network!();
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let sse = sse_completed("resp-1");
|
||||
let resp_mock = responses::mount_sse_once(&server, sse).await;
|
||||
|
||||
let mut builder = test_codex()
|
||||
.with_model("gpt-5-codex")
|
||||
.with_config(|config| {
|
||||
config.web_search_mode = None;
|
||||
config.features.disable(Feature::WebSearchCached);
|
||||
config.features.disable(Feature::WebSearchRequest);
|
||||
});
|
||||
let test = builder
|
||||
.build(&server)
|
||||
.await
|
||||
.expect("create test Codex conversation");
|
||||
|
||||
test.submit_turn_with_policy("hello default cached web search", SandboxPolicy::ReadOnly)
|
||||
.await
|
||||
.expect("submit turn");
|
||||
|
||||
let body = resp_mock.single_request().body_json();
|
||||
let tool = find_web_search_tool(&body);
|
||||
assert_eq!(
|
||||
tool.get("external_web_access").and_then(Value::as_bool),
|
||||
Some(false),
|
||||
"default web_search should be cached when unset"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn web_search_mode_updates_between_turns_with_sandbox_policy() {
|
||||
skip_if_no_network!();
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let resp_mock = responses::mount_sse_sequence(
|
||||
&server,
|
||||
vec![sse_completed("resp-1"), sse_completed("resp-2")],
|
||||
)
|
||||
.await;
|
||||
|
||||
let mut builder = test_codex()
|
||||
.with_model("gpt-5-codex")
|
||||
.with_config(|config| {
|
||||
config.web_search_mode = None;
|
||||
config.features.disable(Feature::WebSearchCached);
|
||||
config.features.disable(Feature::WebSearchRequest);
|
||||
});
|
||||
let test = builder
|
||||
.build(&server)
|
||||
.await
|
||||
.expect("create test Codex conversation");
|
||||
|
||||
test.submit_turn_with_policy("hello cached", SandboxPolicy::ReadOnly)
|
||||
.await
|
||||
.expect("submit first turn");
|
||||
test.submit_turn_with_policy("hello live", SandboxPolicy::DangerFullAccess)
|
||||
.await
|
||||
.expect("submit second turn");
|
||||
|
||||
let requests = resp_mock.requests();
|
||||
assert_eq!(requests.len(), 2, "expected two response requests");
|
||||
|
||||
let first_body = requests[0].body_json();
|
||||
let first_tool = find_web_search_tool(&first_body);
|
||||
assert_eq!(
|
||||
first_tool
|
||||
.get("external_web_access")
|
||||
.and_then(Value::as_bool),
|
||||
Some(false),
|
||||
"read-only policy should default web_search to cached"
|
||||
);
|
||||
|
||||
let second_body = requests[1].body_json();
|
||||
let second_tool = find_web_search_tool(&second_body);
|
||||
assert_eq!(
|
||||
second_tool
|
||||
.get("external_web_access")
|
||||
.and_then(Value::as_bool),
|
||||
Some(true),
|
||||
"danger-full-access policy should default web_search to live"
|
||||
);
|
||||
}
|
||||
|
||||
@@ -244,3 +244,37 @@ pub enum Color {
|
||||
#[default]
|
||||
Auto,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn resume_parses_prompt_after_global_flags() {
|
||||
const PROMPT: &str = "echo resume-with-global-flags-after-subcommand";
|
||||
let cli = Cli::parse_from([
|
||||
"codex-exec",
|
||||
"resume",
|
||||
"--last",
|
||||
"--json",
|
||||
"--model",
|
||||
"gpt-5.2-codex",
|
||||
"--dangerously-bypass-approvals-and-sandbox",
|
||||
"--skip-git-repo-check",
|
||||
PROMPT,
|
||||
]);
|
||||
|
||||
let Some(Command::Resume(args)) = cli.command else {
|
||||
panic!("expected resume command");
|
||||
};
|
||||
let effective_prompt = args.prompt.clone().or_else(|| {
|
||||
if args.last {
|
||||
args.session_id.clone()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
assert_eq!(effective_prompt.as_deref(), Some(PROMPT));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -590,11 +590,8 @@ impl EventProcessor for EventProcessorWithHumanOutput {
|
||||
ts_msg!(self, "task aborted: review ended");
|
||||
}
|
||||
},
|
||||
EventMsg::ContextCompactionStarted(_) => {
|
||||
ts_msg!(self, "context compaction started");
|
||||
}
|
||||
EventMsg::ContextCompactionEnded(_) => {
|
||||
ts_msg!(self, "context compaction ended");
|
||||
EventMsg::ContextCompacted(_) => {
|
||||
ts_msg!(self, "context compacted");
|
||||
}
|
||||
EventMsg::CollabAgentSpawnBegin(CollabAgentSpawnBeginEvent {
|
||||
call_id,
|
||||
|
||||
@@ -849,16 +849,17 @@ impl EventProcessor for EventProcessorWithJsonOutput {
|
||||
|
||||
let protocol::Event { msg, .. } = event;
|
||||
|
||||
if let protocol::EventMsg::TurnComplete(protocol::TurnCompleteEvent {
|
||||
last_agent_message,
|
||||
}) = msg
|
||||
{
|
||||
if let Some(output_file) = self.last_message_path.as_deref() {
|
||||
handle_last_message(last_agent_message.as_deref(), output_file);
|
||||
match msg {
|
||||
protocol::EventMsg::TurnComplete(protocol::TurnCompleteEvent {
|
||||
last_agent_message,
|
||||
}) => {
|
||||
if let Some(output_file) = self.last_message_path.as_deref() {
|
||||
handle_last_message(last_agent_message.as_deref(), output_file);
|
||||
}
|
||||
CodexStatus::InitiateShutdown
|
||||
}
|
||||
CodexStatus::InitiateShutdown
|
||||
} else {
|
||||
CodexStatus::Running
|
||||
protocol::EventMsg::ShutdownComplete => CodexStatus::Shutdown,
|
||||
_ => CodexStatus::Running,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -46,13 +46,17 @@ use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
use event_processor_with_human_output::EventProcessorWithHumanOutput;
|
||||
use event_processor_with_jsonl_output::EventProcessorWithJsonOutput;
|
||||
use serde_json::Value;
|
||||
use std::collections::HashSet;
|
||||
use std::io::IsTerminal;
|
||||
use std::io::Read;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use supports_color::Stream;
|
||||
use tokio::sync::Mutex;
|
||||
use tracing::debug;
|
||||
use tracing::error;
|
||||
use tracing::info;
|
||||
use tracing::warn;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
use tracing_subscriber::prelude::*;
|
||||
|
||||
@@ -72,6 +76,13 @@ enum InitialOperation {
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct ThreadEventEnvelope {
|
||||
thread_id: codex_protocol::ThreadId,
|
||||
thread: Arc<codex_core::CodexThread>,
|
||||
event: Event,
|
||||
}
|
||||
|
||||
pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()> {
|
||||
if let Err(err) = set_default_originator("codex_exec".to_string()) {
|
||||
tracing::warn!(?err, "Failed to set codex exec originator override {err:?}");
|
||||
@@ -326,11 +337,11 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
|
||||
true,
|
||||
config.cli_auth_credentials_store_mode,
|
||||
);
|
||||
let thread_manager = ThreadManager::new(
|
||||
let thread_manager = Arc::new(ThreadManager::new(
|
||||
config.codex_home.clone(),
|
||||
auth_manager.clone(),
|
||||
SessionSource::Exec,
|
||||
);
|
||||
));
|
||||
let default_model = thread_manager
|
||||
.get_models_manager()
|
||||
.get_default_model(&config.model, &config, RefreshStrategy::OnlineIfUncached)
|
||||
@@ -338,7 +349,7 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
|
||||
|
||||
// Handle resume subcommand by resolving a rollout path and using explicit resume API.
|
||||
let NewThread {
|
||||
thread_id: _,
|
||||
thread_id: primary_thread_id,
|
||||
thread,
|
||||
session_configured,
|
||||
} = if let Some(ExecCommand::Resume(args)) = command.as_ref() {
|
||||
@@ -420,40 +431,47 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
|
||||
|
||||
info!("Codex initialized with event: {session_configured:?}");
|
||||
|
||||
let (tx, mut rx) = tokio::sync::mpsc::unbounded_channel::<Event>();
|
||||
let (tx, mut rx) = tokio::sync::mpsc::unbounded_channel::<ThreadEventEnvelope>();
|
||||
let attached_threads = Arc::new(Mutex::new(HashSet::from([primary_thread_id])));
|
||||
spawn_thread_listener(primary_thread_id, thread.clone(), tx.clone());
|
||||
|
||||
{
|
||||
let thread = thread.clone();
|
||||
tokio::spawn(async move {
|
||||
if tokio::signal::ctrl_c().await.is_ok() {
|
||||
tracing::debug!("Keyboard interrupt");
|
||||
// Immediately notify Codex to abort any in-flight task.
|
||||
thread.submit(Op::Interrupt).await.ok();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
let thread_manager = Arc::clone(&thread_manager);
|
||||
let attached_threads = Arc::clone(&attached_threads);
|
||||
let tx = tx.clone();
|
||||
let mut thread_created_rx = thread_manager.subscribe_thread_created();
|
||||
tokio::spawn(async move {
|
||||
loop {
|
||||
tokio::select! {
|
||||
_ = tokio::signal::ctrl_c() => {
|
||||
tracing::debug!("Keyboard interrupt");
|
||||
// Immediately notify Codex to abort any in‑flight task.
|
||||
thread.submit(Op::Interrupt).await.ok();
|
||||
|
||||
// Exit the inner loop and return to the main input prompt. The codex
|
||||
// will emit a `TurnInterrupted` (Error) event which is drained later.
|
||||
break;
|
||||
}
|
||||
res = thread.next_event() => match res {
|
||||
Ok(event) => {
|
||||
debug!("Received event: {event:?}");
|
||||
|
||||
let is_shutdown_complete = matches!(event.msg, EventMsg::ShutdownComplete);
|
||||
if let Err(e) = tx.send(event) {
|
||||
error!("Error sending event: {e:?}");
|
||||
break;
|
||||
match thread_created_rx.recv().await {
|
||||
Ok(thread_id) => {
|
||||
if attached_threads.lock().await.contains(&thread_id) {
|
||||
continue;
|
||||
}
|
||||
match thread_manager.get_thread(thread_id).await {
|
||||
Ok(thread) => {
|
||||
attached_threads.lock().await.insert(thread_id);
|
||||
spawn_thread_listener(thread_id, thread, tx.clone());
|
||||
}
|
||||
if is_shutdown_complete {
|
||||
info!("Received shutdown event, exiting event loop.");
|
||||
break;
|
||||
Err(err) => {
|
||||
warn!("failed to attach listener for thread {thread_id}: {err}")
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
error!("Error receiving event: {e:?}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(tokio::sync::broadcast::error::RecvError::Lagged(_)) => {
|
||||
warn!("thread_created receiver lagged; skipping resync");
|
||||
}
|
||||
Err(tokio::sync::broadcast::error::RecvError::Closed) => break,
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -492,7 +510,12 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
|
||||
// Track whether a fatal error was reported by the server so we can
|
||||
// exit with a non-zero status for automation-friendly signaling.
|
||||
let mut error_seen = false;
|
||||
while let Some(event) = rx.recv().await {
|
||||
while let Some(envelope) = rx.recv().await {
|
||||
let ThreadEventEnvelope {
|
||||
thread_id,
|
||||
thread,
|
||||
event,
|
||||
} = envelope;
|
||||
if let EventMsg::ElicitationRequest(ev) = &event.msg {
|
||||
// Automatically cancel elicitation requests in exec mode.
|
||||
thread
|
||||
@@ -506,15 +529,20 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
|
||||
if matches!(event.msg, EventMsg::Error(_)) {
|
||||
error_seen = true;
|
||||
}
|
||||
let shutdown: CodexStatus = event_processor.process_event(event);
|
||||
if thread_id != primary_thread_id && matches!(&event.msg, EventMsg::TurnComplete(_)) {
|
||||
continue;
|
||||
}
|
||||
let shutdown = event_processor.process_event(event);
|
||||
if thread_id != primary_thread_id && matches!(shutdown, CodexStatus::InitiateShutdown) {
|
||||
continue;
|
||||
}
|
||||
match shutdown {
|
||||
CodexStatus::Running => continue,
|
||||
CodexStatus::InitiateShutdown => {
|
||||
thread.submit(Op::Shutdown).await?;
|
||||
}
|
||||
CodexStatus::Shutdown => {
|
||||
break;
|
||||
}
|
||||
CodexStatus::Shutdown if thread_id == primary_thread_id => break,
|
||||
CodexStatus::Shutdown => continue,
|
||||
}
|
||||
}
|
||||
event_processor.print_final_output();
|
||||
@@ -525,6 +553,42 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn spawn_thread_listener(
|
||||
thread_id: codex_protocol::ThreadId,
|
||||
thread: Arc<codex_core::CodexThread>,
|
||||
tx: tokio::sync::mpsc::UnboundedSender<ThreadEventEnvelope>,
|
||||
) {
|
||||
tokio::spawn(async move {
|
||||
loop {
|
||||
match thread.next_event().await {
|
||||
Ok(event) => {
|
||||
debug!("Received event: {event:?}");
|
||||
|
||||
let is_shutdown_complete = matches!(event.msg, EventMsg::ShutdownComplete);
|
||||
if let Err(err) = tx.send(ThreadEventEnvelope {
|
||||
thread_id,
|
||||
thread: Arc::clone(&thread),
|
||||
event,
|
||||
}) {
|
||||
error!("Error sending event: {err:?}");
|
||||
break;
|
||||
}
|
||||
if is_shutdown_complete {
|
||||
info!(
|
||||
"Received shutdown event for thread {thread_id}, exiting event loop."
|
||||
);
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
error!("Error receiving event: {err:?}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async fn resolve_resume_path(
|
||||
config: &Config,
|
||||
args: &crate::cli::ResumeArgs,
|
||||
|
||||
@@ -38,3 +38,44 @@ fn main() -> anyhow::Result<()> {
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn top_cli_parses_resume_prompt_after_config_flag() {
|
||||
const PROMPT: &str = "echo resume-with-global-flags-after-subcommand";
|
||||
let cli = TopCli::parse_from([
|
||||
"codex-exec",
|
||||
"resume",
|
||||
"--last",
|
||||
"--json",
|
||||
"--model",
|
||||
"gpt-5.2-codex",
|
||||
"--config",
|
||||
"reasoning_level=xhigh",
|
||||
"--dangerously-bypass-approvals-and-sandbox",
|
||||
"--skip-git-repo-check",
|
||||
PROMPT,
|
||||
]);
|
||||
|
||||
let Some(codex_exec::Command::Resume(args)) = cli.inner.command else {
|
||||
panic!("expected resume command");
|
||||
};
|
||||
let effective_prompt = args.prompt.clone().or_else(|| {
|
||||
if args.last {
|
||||
args.session_id.clone()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
assert_eq!(effective_prompt.as_deref(), Some(PROMPT));
|
||||
assert_eq!(cli.config_overrides.raw_overrides.len(), 1);
|
||||
assert_eq!(
|
||||
cli.config_overrides.raw_overrides[0],
|
||||
"reasoning_level=xhigh"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,6 +31,30 @@ impl Policy {
|
||||
&self.rules_by_program
|
||||
}
|
||||
|
||||
pub fn get_allowed_prefixes(&self) -> Vec<Vec<String>> {
|
||||
let mut prefixes = Vec::new();
|
||||
|
||||
for (_program, rules) in self.rules_by_program.iter_all() {
|
||||
for rule in rules {
|
||||
let Some(prefix_rule) = rule.as_any().downcast_ref::<PrefixRule>() else {
|
||||
continue;
|
||||
};
|
||||
if prefix_rule.decision != Decision::Allow {
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut prefix = Vec::with_capacity(prefix_rule.pattern.rest.len() + 1);
|
||||
prefix.push(prefix_rule.pattern.first.as_ref().to_string());
|
||||
prefix.extend(prefix_rule.pattern.rest.iter().map(render_pattern_token));
|
||||
prefixes.push(prefix);
|
||||
}
|
||||
}
|
||||
|
||||
prefixes.sort();
|
||||
prefixes.dedup();
|
||||
prefixes
|
||||
}
|
||||
|
||||
pub fn add_prefix_rule(&mut self, prefix: &[String], decision: Decision) -> Result<()> {
|
||||
let (first_token, rest) = prefix
|
||||
.split_first()
|
||||
@@ -116,6 +140,13 @@ impl Policy {
|
||||
}
|
||||
}
|
||||
|
||||
fn render_pattern_token(token: &PatternToken) -> String {
|
||||
match token {
|
||||
PatternToken::Single(value) => value.clone(),
|
||||
PatternToken::Alts(alternatives) => format!("[{}]", alternatives.join("|")),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Evaluation {
|
||||
|
||||
@@ -96,6 +96,8 @@ pub trait Rule: Any + Debug + Send + Sync {
|
||||
fn program(&self) -> &str;
|
||||
|
||||
fn matches(&self, cmd: &[String]) -> Option<RuleMatch>;
|
||||
|
||||
fn as_any(&self) -> &dyn Any;
|
||||
}
|
||||
|
||||
pub type RuleRef = Arc<dyn Rule>;
|
||||
@@ -114,6 +116,10 @@ impl Rule for PrefixRule {
|
||||
justification: self.justification.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn Any {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// Count how many rules match each provided example and error if any example is unmatched.
|
||||
|
||||
@@ -34,7 +34,7 @@ const NETWORK_TIMEOUT_MS: u64 = 10_000;
|
||||
|
||||
fn create_env_from_core_vars() -> HashMap<String, String> {
|
||||
let policy = ShellEnvironmentPolicy::default();
|
||||
create_env(&policy)
|
||||
create_env(&policy, None)
|
||||
}
|
||||
|
||||
#[expect(clippy::print_stdout)]
|
||||
|
||||
@@ -361,8 +361,7 @@ async fn run_codex_tool_session_inner(
|
||||
| EventMsg::ExitedReviewMode(_)
|
||||
| EventMsg::RequestUserInput(_)
|
||||
| EventMsg::DynamicToolCallRequest(_)
|
||||
| EventMsg::ContextCompactionStarted(_)
|
||||
| EventMsg::ContextCompactionEnded(_)
|
||||
| EventMsg::ContextCompacted(_)
|
||||
| EventMsg::ThreadRolledBack(_)
|
||||
| EventMsg::CollabAgentSpawnBegin(_)
|
||||
| EventMsg::CollabAgentSpawnEnd(_)
|
||||
|
||||
@@ -17,6 +17,6 @@ pub(crate) fn install_global(metrics: MetricsClient) {
|
||||
let _ = GLOBAL_METRICS.set(metrics);
|
||||
}
|
||||
|
||||
pub(crate) fn global() -> Option<MetricsClient> {
|
||||
pub fn global() -> Option<MetricsClient> {
|
||||
GLOBAL_METRICS.get().cloned()
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ workspace = true
|
||||
|
||||
[dependencies]
|
||||
codex-git = { workspace = true }
|
||||
codex-execpolicy = { workspace = true }
|
||||
codex-utils-absolute-path = { workspace = true }
|
||||
codex-utils-image = { workspace = true }
|
||||
icu_decimal = { workspace = true }
|
||||
|
||||
@@ -21,7 +21,6 @@ pub enum TurnItem {
|
||||
AgentMessage(AgentMessageItem),
|
||||
Reasoning(ReasoningItem),
|
||||
WebSearch(WebSearchItem),
|
||||
ContextCompaction(ContextCompactionItem),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, TS, JsonSchema)]
|
||||
@@ -58,11 +57,6 @@ pub struct WebSearchItem {
|
||||
pub action: WebSearchAction,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, TS, JsonSchema)]
|
||||
pub struct ContextCompactionItem {
|
||||
pub id: String,
|
||||
}
|
||||
|
||||
impl UserMessageItem {
|
||||
pub fn new(content: &[UserInput]) -> Self {
|
||||
Self {
|
||||
@@ -201,7 +195,6 @@ impl TurnItem {
|
||||
TurnItem::AgentMessage(item) => item.id.clone(),
|
||||
TurnItem::Reasoning(item) => item.id.clone(),
|
||||
TurnItem::WebSearch(item) => item.id.clone(),
|
||||
TurnItem::ContextCompaction(item) => item.id.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -211,7 +204,6 @@ impl TurnItem {
|
||||
TurnItem::AgentMessage(item) => item.as_legacy_events(),
|
||||
TurnItem::WebSearch(item) => vec![item.as_legacy_event()],
|
||||
TurnItem::Reasoning(item) => item.as_legacy_events(show_raw_agent_reasoning),
|
||||
TurnItem::ContextCompaction(_) => Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ use crate::protocol::NetworkAccess;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
use crate::protocol::WritableRoot;
|
||||
use crate::user_input::UserInput;
|
||||
use codex_execpolicy::Policy;
|
||||
use codex_git::GhostCommit;
|
||||
use codex_utils_image::error::ImageProcessingError;
|
||||
use schemars::JsonSchema;
|
||||
@@ -205,6 +206,8 @@ const APPROVAL_POLICY_ON_FAILURE: &str =
|
||||
include_str!("prompts/permissions/approval_policy/on_failure.md");
|
||||
const APPROVAL_POLICY_ON_REQUEST: &str =
|
||||
include_str!("prompts/permissions/approval_policy/on_request.md");
|
||||
const APPROVAL_POLICY_ON_REQUEST_RULE: &str =
|
||||
include_str!("prompts/permissions/approval_policy/on_request_rule.md");
|
||||
|
||||
const SANDBOX_MODE_DANGER_FULL_ACCESS: &str =
|
||||
include_str!("prompts/permissions/sandbox_mode/danger_full_access.md");
|
||||
@@ -217,12 +220,42 @@ impl DeveloperInstructions {
|
||||
Self { text: text.into() }
|
||||
}
|
||||
|
||||
pub fn from(
|
||||
approval_policy: AskForApproval,
|
||||
exec_policy: &Policy,
|
||||
request_rule_enabled: bool,
|
||||
) -> DeveloperInstructions {
|
||||
let text = match approval_policy {
|
||||
AskForApproval::Never => APPROVAL_POLICY_NEVER.to_string(),
|
||||
AskForApproval::UnlessTrusted => APPROVAL_POLICY_UNLESS_TRUSTED.to_string(),
|
||||
AskForApproval::OnFailure => APPROVAL_POLICY_ON_FAILURE.to_string(),
|
||||
AskForApproval::OnRequest => {
|
||||
if !request_rule_enabled {
|
||||
APPROVAL_POLICY_ON_REQUEST.to_string()
|
||||
} else {
|
||||
let command_prefixes = format_allow_prefixes(exec_policy);
|
||||
match command_prefixes {
|
||||
Some(prefixes) => {
|
||||
format!("{APPROVAL_POLICY_ON_REQUEST_RULE}\n{prefixes}")
|
||||
}
|
||||
None => APPROVAL_POLICY_ON_REQUEST_RULE.to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
DeveloperInstructions::new(text)
|
||||
}
|
||||
|
||||
pub fn into_text(self) -> String {
|
||||
self.text
|
||||
}
|
||||
|
||||
pub fn concat(self, other: impl Into<DeveloperInstructions>) -> Self {
|
||||
let mut text = self.text;
|
||||
if !text.ends_with('\n') {
|
||||
text.push('\n');
|
||||
}
|
||||
text.push_str(&other.into().text);
|
||||
Self { text }
|
||||
}
|
||||
@@ -237,6 +270,8 @@ impl DeveloperInstructions {
|
||||
pub fn from_policy(
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
approval_policy: AskForApproval,
|
||||
exec_policy: &Policy,
|
||||
request_rule_enabled: bool,
|
||||
cwd: &Path,
|
||||
) -> Self {
|
||||
let network_access = if sandbox_policy.has_full_network_access() {
|
||||
@@ -259,6 +294,8 @@ impl DeveloperInstructions {
|
||||
sandbox_mode,
|
||||
network_access,
|
||||
approval_policy,
|
||||
exec_policy,
|
||||
request_rule_enabled,
|
||||
writable_roots,
|
||||
)
|
||||
}
|
||||
@@ -281,6 +318,8 @@ impl DeveloperInstructions {
|
||||
sandbox_mode: SandboxMode,
|
||||
network_access: NetworkAccess,
|
||||
approval_policy: AskForApproval,
|
||||
exec_policy: &Policy,
|
||||
request_rule_enabled: bool,
|
||||
writable_roots: Option<Vec<WritableRoot>>,
|
||||
) -> Self {
|
||||
let start_tag = DeveloperInstructions::new("<permissions instructions>");
|
||||
@@ -290,7 +329,11 @@ impl DeveloperInstructions {
|
||||
sandbox_mode,
|
||||
network_access,
|
||||
))
|
||||
.concat(DeveloperInstructions::from(approval_policy))
|
||||
.concat(DeveloperInstructions::from(
|
||||
approval_policy,
|
||||
exec_policy,
|
||||
request_rule_enabled,
|
||||
))
|
||||
.concat(DeveloperInstructions::from_writable_roots(writable_roots))
|
||||
.concat(end_tag)
|
||||
}
|
||||
@@ -328,6 +371,37 @@ impl DeveloperInstructions {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn render_command_prefix_list<I, P>(prefixes: I) -> Option<String>
|
||||
where
|
||||
I: IntoIterator<Item = P>,
|
||||
P: AsRef<[String]>,
|
||||
{
|
||||
let lines = prefixes
|
||||
.into_iter()
|
||||
.map(|prefix| format!("- {}", render_command_prefix(prefix.as_ref())))
|
||||
.collect::<Vec<_>>();
|
||||
if lines.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(lines.join("\n"))
|
||||
}
|
||||
|
||||
fn render_command_prefix(prefix: &[String]) -> String {
|
||||
let tokens = prefix
|
||||
.iter()
|
||||
.map(|token| serde_json::to_string(token).unwrap_or_else(|_| format!("{token:?}")))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
format!("[{tokens}]")
|
||||
}
|
||||
|
||||
fn format_allow_prefixes(exec_policy: &Policy) -> Option<String> {
|
||||
let prefixes = exec_policy.get_allowed_prefixes();
|
||||
let lines = render_command_prefix_list(prefixes)?;
|
||||
Some(format!("Approved command prefixes:\n{lines}"))
|
||||
}
|
||||
|
||||
impl From<DeveloperInstructions> for ResponseItem {
|
||||
fn from(di: DeveloperInstructions) -> Self {
|
||||
ResponseItem::Message {
|
||||
@@ -352,19 +426,6 @@ impl From<SandboxMode> for DeveloperInstructions {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<AskForApproval> for DeveloperInstructions {
|
||||
fn from(mode: AskForApproval) -> Self {
|
||||
let text = match mode {
|
||||
AskForApproval::Never => APPROVAL_POLICY_NEVER.trim_end(),
|
||||
AskForApproval::UnlessTrusted => APPROVAL_POLICY_UNLESS_TRUSTED.trim_end(),
|
||||
AskForApproval::OnFailure => APPROVAL_POLICY_ON_FAILURE.trim_end(),
|
||||
AskForApproval::OnRequest => APPROVAL_POLICY_ON_REQUEST.trim_end(),
|
||||
};
|
||||
|
||||
DeveloperInstructions::new(text)
|
||||
}
|
||||
}
|
||||
|
||||
fn should_serialize_reasoning_content(content: &Option<Vec<ReasoningItemContent>>) -> bool {
|
||||
match content {
|
||||
Some(content) => !content
|
||||
@@ -633,6 +694,10 @@ pub struct ShellToolCallParams {
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub sandbox_permissions: Option<SandboxPermissions>,
|
||||
/// Suggests a command prefix to persist for future sessions
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub prefix_rule: Option<Vec<String>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub justification: Option<String>,
|
||||
}
|
||||
@@ -653,6 +718,9 @@ pub struct ShellCommandToolCallParams {
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub sandbox_permissions: Option<SandboxPermissions>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub prefix_rule: Option<Vec<String>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub justification: Option<String>,
|
||||
}
|
||||
@@ -836,6 +904,7 @@ mod tests {
|
||||
use crate::config_types::SandboxMode;
|
||||
use crate::protocol::AskForApproval;
|
||||
use anyhow::Result;
|
||||
use codex_execpolicy::Policy;
|
||||
use mcp_types::ImageContent;
|
||||
use mcp_types::TextContent;
|
||||
use pretty_assertions::assert_eq;
|
||||
@@ -844,15 +913,17 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn converts_sandbox_mode_into_developer_instructions() {
|
||||
let workspace_write: DeveloperInstructions = SandboxMode::WorkspaceWrite.into();
|
||||
assert_eq!(
|
||||
DeveloperInstructions::from(SandboxMode::WorkspaceWrite),
|
||||
workspace_write,
|
||||
DeveloperInstructions::new(
|
||||
"Filesystem sandboxing defines which files can be read or written. `sandbox_mode` is `workspace-write`: The sandbox permits reading files, and editing files in `cwd` and `writable_roots`. Editing files in other directories requires approval. Network access is restricted."
|
||||
)
|
||||
);
|
||||
|
||||
let read_only: DeveloperInstructions = SandboxMode::ReadOnly.into();
|
||||
assert_eq!(
|
||||
DeveloperInstructions::from(SandboxMode::ReadOnly),
|
||||
read_only,
|
||||
DeveloperInstructions::new(
|
||||
"Filesystem sandboxing defines which files can be read or written. `sandbox_mode` is `read-only`: The sandbox only permits reading files. Network access is restricted."
|
||||
)
|
||||
@@ -865,6 +936,8 @@ mod tests {
|
||||
SandboxMode::WorkspaceWrite,
|
||||
NetworkAccess::Enabled,
|
||||
AskForApproval::OnRequest,
|
||||
&Policy::empty(),
|
||||
false,
|
||||
None,
|
||||
);
|
||||
|
||||
@@ -891,6 +964,8 @@ mod tests {
|
||||
let instructions = DeveloperInstructions::from_policy(
|
||||
&policy,
|
||||
AskForApproval::UnlessTrusted,
|
||||
&Policy::empty(),
|
||||
false,
|
||||
&PathBuf::from("/tmp"),
|
||||
);
|
||||
let text = instructions.into_text();
|
||||
@@ -898,6 +973,30 @@ mod tests {
|
||||
assert!(text.contains("`approval_policy` is `unless-trusted`"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn includes_request_rule_instructions_when_enabled() {
|
||||
let mut exec_policy = Policy::empty();
|
||||
exec_policy
|
||||
.add_prefix_rule(
|
||||
&["git".to_string(), "pull".to_string()],
|
||||
codex_execpolicy::Decision::Allow,
|
||||
)
|
||||
.expect("add rule");
|
||||
let instructions = DeveloperInstructions::from_permissions_with_network(
|
||||
SandboxMode::WorkspaceWrite,
|
||||
NetworkAccess::Enabled,
|
||||
AskForApproval::OnRequest,
|
||||
&exec_policy,
|
||||
true,
|
||||
None,
|
||||
);
|
||||
|
||||
let text = instructions.into_text();
|
||||
assert!(text.contains("prefix_rule"));
|
||||
assert!(text.contains("Approved command prefixes"));
|
||||
assert!(text.contains(r#"["git", "pull"]"#));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serializes_success_as_plain_string() -> Result<()> {
|
||||
let item = ResponseInputItem::FunctionCallOutput {
|
||||
@@ -1126,6 +1225,7 @@ mod tests {
|
||||
workdir: Some("/tmp".to_string()),
|
||||
timeout_ms: Some(1000),
|
||||
sandbox_permissions: None,
|
||||
prefix_rule: None,
|
||||
justification: None,
|
||||
},
|
||||
params
|
||||
|
||||
@@ -0,0 +1,61 @@
|
||||
# Escalation Requests
|
||||
|
||||
Commands are run outside the sandbox if they are approved by the user, or match an existing rule that allows it to run unrestricted. The command string is split into independent command segments at shell control operators, including but not limited to:
|
||||
|
||||
- Pipes: |
|
||||
- Logical operators: &&, ||
|
||||
- Command separators: ;
|
||||
- Subshell boundaries: (...), $(...)
|
||||
|
||||
Each resulting segment is evaluated independently for sandbox restrictions and approval requirements.
|
||||
|
||||
Example:
|
||||
|
||||
git pull | tee output.txt
|
||||
|
||||
This is treated as two command segments:
|
||||
|
||||
["git", "pull"]
|
||||
|
||||
["tee", "output.txt"]
|
||||
|
||||
## How to request escalation
|
||||
|
||||
IMPORTANT: To request approval to execute a command that will require escalated privileges:
|
||||
|
||||
- Provide the `sandbox_permissions` parameter with the value `"require_escalated"`
|
||||
- Include a short question asking the user if they want to allow the action in `justification` parameter. e.g. "Do you want to download and install dependencies for this project?"
|
||||
- Suggest a `prefix_rule` - this will be shown to the user with an option to persist the rule approval for future sessions.
|
||||
|
||||
If you run a command that is important to solving the user's query, but it fails because of sandboxing, rerun the command with "require_escalated". ALWAYS proceed to use the `justification` and `prefix_rule` parameters - do not message the user before requesting approval for the command.
|
||||
|
||||
## When to request escalation
|
||||
|
||||
While commands are running inside the sandbox, here are some scenarios that will require escalation outside the sandbox:
|
||||
|
||||
- You need to run a command that writes to a directory that requires it (e.g. running tests that write to /var)
|
||||
- You need to run a GUI app (e.g., open/xdg-open/osascript) to open browsers or files.
|
||||
- If you run a command that is important to solving the user's query, but it fails because of sandboxing, rerun the command with `require_escalated`. ALWAYS proceed to use the `sandbox_permissions` and `justification` parameters. do not message the user before requesting approval for the command.
|
||||
- You are about to take a potentially destructive action such as an `rm` or `git reset` that the user did not explicitly ask for.
|
||||
|
||||
Only run commands that require approval if it is absolutely necessary to solve the user's query, don't try and circumvent approvals by using other tools.
|
||||
|
||||
## prefix_rule guidance
|
||||
|
||||
When choosing a `prefix_rule`, request one that will allow you to fulfill similar requests from the user in the future without re-requesting escalation. It should be categorical and reasonably scoped to similar capabilities. You MUST NOT pass the entire command into `prefix_rule`.
|
||||
|
||||
<good_example reason="frequently run command">
|
||||
["npm", "run", "dev"]
|
||||
</good_example>
|
||||
<good_example reason="generic and reusable">
|
||||
["gh", "pr", "checks"]
|
||||
</good_example>
|
||||
<good_example reason="helpful for development cycle">
|
||||
["pytest"]
|
||||
</good_example>
|
||||
<bad_example reason="too specific">
|
||||
["cargo", "test", "-p", "codex-app-server"]
|
||||
<correction_to_good_example>
|
||||
["cargo", "test"]
|
||||
</correction_to_good_example>
|
||||
</bad_example>
|
||||
@@ -688,12 +688,8 @@ pub enum EventMsg {
|
||||
/// indicates the turn continued but the user should still be notified.
|
||||
Warning(WarningEvent),
|
||||
|
||||
/// Conversation history compaction has started.
|
||||
ContextCompactionStarted(ContextCompactionStartedEvent),
|
||||
|
||||
/// Conversation history compaction has ended (either automatically or manually).
|
||||
#[serde(alias = "context_compacted")]
|
||||
ContextCompactionEnded(ContextCompactionEndedEvent),
|
||||
/// Conversation history was compacted (either automatically or manually).
|
||||
ContextCompacted(ContextCompactedEvent),
|
||||
|
||||
/// Conversation history was rolled back by dropping the last N user turns.
|
||||
ThreadRolledBack(ThreadRolledBackEvent),
|
||||
@@ -1087,10 +1083,7 @@ pub struct WarningEvent {
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, JsonSchema, TS)]
|
||||
pub struct ContextCompactionStartedEvent;
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, JsonSchema, TS)]
|
||||
pub struct ContextCompactionEndedEvent;
|
||||
pub struct ContextCompactedEvent;
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, JsonSchema, TS)]
|
||||
pub struct TurnCompleteEvent {
|
||||
@@ -2100,11 +2093,14 @@ pub struct SkillMetadata {
|
||||
pub description: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
/// Legacy short_description from SKILL.md. Prefer SKILL.toml interface.short_description.
|
||||
/// Legacy short_description from SKILL.md. Prefer SKILL.json interface.short_description.
|
||||
pub short_description: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub interface: Option<SkillInterface>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub dependencies: Option<SkillDependencies>,
|
||||
pub path: PathBuf,
|
||||
pub scope: SkillScope,
|
||||
pub enabled: bool,
|
||||
@@ -2126,6 +2122,31 @@ pub struct SkillInterface {
|
||||
pub default_prompt: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, JsonSchema, TS, PartialEq, Eq)]
|
||||
pub struct SkillDependencies {
|
||||
pub tools: Vec<SkillToolDependency>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, JsonSchema, TS, PartialEq, Eq)]
|
||||
pub struct SkillToolDependency {
|
||||
#[serde(rename = "type")]
|
||||
#[ts(rename = "type")]
|
||||
pub r#type: String,
|
||||
pub value: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub description: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub transport: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub command: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, JsonSchema, TS)]
|
||||
pub struct SkillErrorInfo {
|
||||
pub path: PathBuf,
|
||||
|
||||
@@ -28,6 +28,28 @@ impl ThreadId {
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&str> for ThreadId {
|
||||
type Error = uuid::Error;
|
||||
|
||||
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
||||
Self::from_string(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<String> for ThreadId {
|
||||
type Error = uuid::Error;
|
||||
|
||||
fn try_from(value: String) -> Result<Self, Self::Error> {
|
||||
Self::from_string(value.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ThreadId> for String {
|
||||
fn from(value: ThreadId) -> Self {
|
||||
value.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ThreadId {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
@@ -36,7 +58,7 @@ impl Default for ThreadId {
|
||||
|
||||
impl Display for ThreadId {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.uuid)
|
||||
Display::fmt(&self.uuid, f)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
7
codex-rs/state/BUILD.bazel
Normal file
7
codex-rs/state/BUILD.bazel
Normal file
@@ -0,0 +1,7 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "state",
|
||||
crate_name = "codex_state",
|
||||
compile_data = glob(["migrations/**"]),
|
||||
)
|
||||
23
codex-rs/state/Cargo.toml
Normal file
23
codex-rs/state/Cargo.toml
Normal file
@@ -0,0 +1,23 @@
|
||||
[package]
|
||||
name = "codex-state"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
codex-otel = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
sqlx = { workspace = true }
|
||||
tokio = { workspace = true, features = ["fs", "io-util", "macros", "rt-multi-thread", "sync", "time"] }
|
||||
tracing = { workspace = true }
|
||||
uuid = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
25
codex-rs/state/migrations/0001_threads.sql
Normal file
25
codex-rs/state/migrations/0001_threads.sql
Normal file
@@ -0,0 +1,25 @@
|
||||
CREATE TABLE threads (
|
||||
id TEXT PRIMARY KEY,
|
||||
rollout_path TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
source TEXT NOT NULL,
|
||||
model_provider TEXT NOT NULL,
|
||||
cwd TEXT NOT NULL,
|
||||
title TEXT NOT NULL,
|
||||
sandbox_policy TEXT NOT NULL,
|
||||
approval_mode TEXT NOT NULL,
|
||||
tokens_used INTEGER NOT NULL DEFAULT 0,
|
||||
has_user_event INTEGER NOT NULL DEFAULT 0,
|
||||
archived INTEGER NOT NULL DEFAULT 0,
|
||||
archived_at INTEGER,
|
||||
git_sha TEXT,
|
||||
git_branch TEXT,
|
||||
git_origin_url TEXT
|
||||
);
|
||||
|
||||
CREATE INDEX idx_threads_created_at ON threads(created_at DESC, id DESC);
|
||||
CREATE INDEX idx_threads_updated_at ON threads(updated_at DESC, id DESC);
|
||||
CREATE INDEX idx_threads_archived ON threads(archived);
|
||||
CREATE INDEX idx_threads_source ON threads(source);
|
||||
CREATE INDEX idx_threads_provider ON threads(model_provider);
|
||||
182
codex-rs/state/src/extract.rs
Normal file
182
codex-rs/state/src/extract.rs
Normal file
@@ -0,0 +1,182 @@
|
||||
use crate::model::ThreadMetadata;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::models::is_local_image_close_tag_text;
|
||||
use codex_protocol::models::is_local_image_open_tag_text;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
use codex_protocol::protocol::SessionMetaLine;
|
||||
use codex_protocol::protocol::TurnContextItem;
|
||||
use codex_protocol::protocol::USER_MESSAGE_BEGIN;
|
||||
use serde::Serialize;
|
||||
use serde_json::Value;
|
||||
|
||||
/// Apply a rollout item to the metadata structure.
|
||||
pub fn apply_rollout_item(
|
||||
metadata: &mut ThreadMetadata,
|
||||
item: &RolloutItem,
|
||||
default_provider: &str,
|
||||
) {
|
||||
match item {
|
||||
RolloutItem::SessionMeta(meta_line) => apply_session_meta_from_item(metadata, meta_line),
|
||||
RolloutItem::TurnContext(turn_ctx) => apply_turn_context(metadata, turn_ctx),
|
||||
RolloutItem::EventMsg(event) => apply_event_msg(metadata, event),
|
||||
RolloutItem::ResponseItem(item) => apply_response_item(metadata, item),
|
||||
RolloutItem::Compacted(_) => {}
|
||||
}
|
||||
if metadata.model_provider.is_empty() {
|
||||
metadata.model_provider = default_provider.to_string();
|
||||
}
|
||||
}
|
||||
|
||||
fn apply_session_meta_from_item(metadata: &mut ThreadMetadata, meta_line: &SessionMetaLine) {
|
||||
metadata.id = meta_line.meta.id;
|
||||
metadata.source = enum_to_string(&meta_line.meta.source);
|
||||
if let Some(provider) = meta_line.meta.model_provider.as_deref() {
|
||||
metadata.model_provider = provider.to_string();
|
||||
}
|
||||
if !meta_line.meta.cwd.as_os_str().is_empty() {
|
||||
metadata.cwd = meta_line.meta.cwd.clone();
|
||||
}
|
||||
if let Some(git) = meta_line.git.as_ref() {
|
||||
metadata.git_sha = git.commit_hash.clone();
|
||||
metadata.git_branch = git.branch.clone();
|
||||
metadata.git_origin_url = git.repository_url.clone();
|
||||
}
|
||||
}
|
||||
|
||||
fn apply_turn_context(metadata: &mut ThreadMetadata, turn_ctx: &TurnContextItem) {
|
||||
metadata.cwd = turn_ctx.cwd.clone();
|
||||
metadata.sandbox_policy = enum_to_string(&turn_ctx.sandbox_policy);
|
||||
metadata.approval_mode = enum_to_string(&turn_ctx.approval_policy);
|
||||
}
|
||||
|
||||
fn apply_event_msg(metadata: &mut ThreadMetadata, event: &EventMsg) {
|
||||
match event {
|
||||
EventMsg::TokenCount(token_count) => {
|
||||
if let Some(info) = token_count.info.as_ref() {
|
||||
metadata.tokens_used = info.total_token_usage.total_tokens.max(0);
|
||||
}
|
||||
}
|
||||
EventMsg::UserMessage(user) => {
|
||||
metadata.has_user_event = true;
|
||||
if metadata.title.is_empty() {
|
||||
metadata.title = strip_user_message_prefix(user.message.as_str()).to_string();
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn apply_response_item(metadata: &mut ThreadMetadata, item: &ResponseItem) {
|
||||
if let Some(text) = extract_user_message_text(item) {
|
||||
metadata.has_user_event = true;
|
||||
if metadata.title.is_empty() {
|
||||
metadata.title = text;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_user_message_text(item: &ResponseItem) -> Option<String> {
|
||||
let ResponseItem::Message { role, content, .. } = item else {
|
||||
return None;
|
||||
};
|
||||
if role != "user" {
|
||||
return None;
|
||||
}
|
||||
let texts: Vec<&str> = content
|
||||
.iter()
|
||||
.filter_map(|content_item| match content_item {
|
||||
ContentItem::InputText { text } => Some(text.as_str()),
|
||||
ContentItem::InputImage { .. } | ContentItem::OutputText { .. } => None,
|
||||
})
|
||||
.filter(|text| !is_local_image_open_tag_text(text) && !is_local_image_close_tag_text(text))
|
||||
.collect();
|
||||
if texts.is_empty() {
|
||||
return None;
|
||||
}
|
||||
let joined = texts.join("\n");
|
||||
Some(
|
||||
strip_user_message_prefix(joined.as_str())
|
||||
.trim()
|
||||
.to_string(),
|
||||
)
|
||||
}
|
||||
|
||||
fn strip_user_message_prefix(text: &str) -> &str {
|
||||
match text.find(USER_MESSAGE_BEGIN) {
|
||||
Some(idx) => text[idx + USER_MESSAGE_BEGIN.len()..].trim(),
|
||||
None => text.trim(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn enum_to_string<T: Serialize>(value: &T) -> String {
|
||||
match serde_json::to_value(value) {
|
||||
Ok(Value::String(s)) => s,
|
||||
Ok(other) => other.to_string(),
|
||||
Err(_) => String::new(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::extract_user_message_text;
|
||||
use crate::model::ThreadMetadata;
|
||||
use chrono::DateTime;
|
||||
use chrono::Utc;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::USER_MESSAGE_BEGIN;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::PathBuf;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[test]
|
||||
fn extracts_user_message_text() {
|
||||
let item = ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![
|
||||
ContentItem::InputText {
|
||||
text: format!("<prior context> {USER_MESSAGE_BEGIN}actual question"),
|
||||
},
|
||||
ContentItem::InputImage {
|
||||
image_url: "https://example.com/image.png".to_string(),
|
||||
},
|
||||
],
|
||||
end_turn: None,
|
||||
};
|
||||
let actual = extract_user_message_text(&item);
|
||||
assert_eq!(actual.as_deref(), Some("actual question"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn diff_fields_detects_changes() {
|
||||
let id = ThreadId::from_string(&Uuid::now_v7().to_string()).expect("thread id");
|
||||
let created_at = DateTime::<Utc>::from_timestamp(1_735_689_600, 0).expect("timestamp");
|
||||
let base = ThreadMetadata {
|
||||
id,
|
||||
rollout_path: PathBuf::from("/tmp/a.jsonl"),
|
||||
created_at,
|
||||
updated_at: created_at,
|
||||
source: "cli".to_string(),
|
||||
model_provider: "openai".to_string(),
|
||||
cwd: PathBuf::from("/tmp"),
|
||||
title: "hello".to_string(),
|
||||
sandbox_policy: "read-only".to_string(),
|
||||
approval_mode: "on-request".to_string(),
|
||||
tokens_used: 1,
|
||||
has_user_event: false,
|
||||
archived_at: None,
|
||||
git_sha: None,
|
||||
git_branch: None,
|
||||
git_origin_url: None,
|
||||
};
|
||||
let mut other = base.clone();
|
||||
other.tokens_used = 2;
|
||||
other.title = "world".to_string();
|
||||
let diffs = base.diff_fields(&other);
|
||||
assert_eq!(diffs, vec!["title", "tokens_used"]);
|
||||
}
|
||||
}
|
||||
34
codex-rs/state/src/lib.rs
Normal file
34
codex-rs/state/src/lib.rs
Normal file
@@ -0,0 +1,34 @@
|
||||
//! SQLite-backed state for rollout metadata.
|
||||
//!
|
||||
//! This crate is intentionally small and focused: it extracts rollout metadata
|
||||
//! from JSONL rollouts and mirrors it into a local SQLite database. Backfill
|
||||
//! orchestration and rollout scanning live in `codex-core`.
|
||||
|
||||
mod extract;
|
||||
mod migrations;
|
||||
mod model;
|
||||
mod paths;
|
||||
mod runtime;
|
||||
|
||||
/// Preferred entrypoint: owns configuration and metrics.
|
||||
pub use runtime::StateRuntime;
|
||||
|
||||
/// Low-level storage engine: useful for focused tests.
|
||||
///
|
||||
/// Most consumers should prefer [`StateRuntime`].
|
||||
pub use extract::apply_rollout_item;
|
||||
pub use model::Anchor;
|
||||
pub use model::BackfillStats;
|
||||
pub use model::ExtractionOutcome;
|
||||
pub use model::SortKey;
|
||||
pub use model::ThreadMetadata;
|
||||
pub use model::ThreadMetadataBuilder;
|
||||
pub use model::ThreadsPage;
|
||||
pub use runtime::STATE_DB_FILENAME;
|
||||
|
||||
/// Errors encountered during DB operations. Tags: [stage]
|
||||
pub const DB_ERROR_METRIC: &str = "codex.db.error";
|
||||
/// Metrics on backfill process during first init of the db. Tags: [status]
|
||||
pub const DB_METRIC_BACKFILL: &str = "codex.db.backfill";
|
||||
/// Metrics on errors during comparison between DB and rollout file. Tags: [stage]
|
||||
pub const DB_METRIC_COMPARE_ERROR: &str = "codex.db.compare_error";
|
||||
3
codex-rs/state/src/migrations.rs
Normal file
3
codex-rs/state/src/migrations.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
use sqlx::migrate::Migrator;
|
||||
|
||||
pub(crate) static MIGRATOR: Migrator = sqlx::migrate!("./migrations");
|
||||
352
codex-rs/state/src/model.rs
Normal file
352
codex-rs/state/src/model.rs
Normal file
@@ -0,0 +1,352 @@
|
||||
use anyhow::Result;
|
||||
use chrono::DateTime;
|
||||
use chrono::Timelike;
|
||||
use chrono::Utc;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use sqlx::Row;
|
||||
use sqlx::sqlite::SqliteRow;
|
||||
use std::path::PathBuf;
|
||||
use uuid::Uuid;
|
||||
|
||||
/// The sort key to use when listing threads.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum SortKey {
|
||||
/// Sort by the thread's creation timestamp.
|
||||
CreatedAt,
|
||||
/// Sort by the thread's last update timestamp.
|
||||
UpdatedAt,
|
||||
}
|
||||
|
||||
/// A pagination anchor used for keyset pagination.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct Anchor {
|
||||
/// The timestamp component of the anchor.
|
||||
pub ts: DateTime<Utc>,
|
||||
/// The UUID component of the anchor.
|
||||
pub id: Uuid,
|
||||
}
|
||||
|
||||
/// A single page of thread metadata results.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct ThreadsPage {
|
||||
/// The thread metadata items in this page.
|
||||
pub items: Vec<ThreadMetadata>,
|
||||
/// The next anchor to use for pagination, if any.
|
||||
pub next_anchor: Option<Anchor>,
|
||||
/// The number of rows scanned to produce this page.
|
||||
pub num_scanned_rows: usize,
|
||||
}
|
||||
|
||||
/// The outcome of extracting metadata from a rollout.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct ExtractionOutcome {
|
||||
/// The extracted thread metadata.
|
||||
pub metadata: ThreadMetadata,
|
||||
/// The number of rollout lines that failed to parse.
|
||||
pub parse_errors: usize,
|
||||
}
|
||||
|
||||
/// Canonical thread metadata derived from rollout files.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct ThreadMetadata {
|
||||
/// The thread identifier.
|
||||
pub id: ThreadId,
|
||||
/// The absolute rollout path on disk.
|
||||
pub rollout_path: PathBuf,
|
||||
/// The creation timestamp.
|
||||
pub created_at: DateTime<Utc>,
|
||||
/// The last update timestamp.
|
||||
pub updated_at: DateTime<Utc>,
|
||||
/// The session source (stringified enum).
|
||||
pub source: String,
|
||||
/// The model provider identifier.
|
||||
pub model_provider: String,
|
||||
/// The working directory for the thread.
|
||||
pub cwd: PathBuf,
|
||||
/// A best-effort thread title.
|
||||
pub title: String,
|
||||
/// The sandbox policy (stringified enum).
|
||||
pub sandbox_policy: String,
|
||||
/// The approval mode (stringified enum).
|
||||
pub approval_mode: String,
|
||||
/// The last observed token usage.
|
||||
pub tokens_used: i64,
|
||||
/// Whether the thread has observed a user message.
|
||||
pub has_user_event: bool,
|
||||
/// The archive timestamp, if the thread is archived.
|
||||
pub archived_at: Option<DateTime<Utc>>,
|
||||
/// The git commit SHA, if known.
|
||||
pub git_sha: Option<String>,
|
||||
/// The git branch name, if known.
|
||||
pub git_branch: Option<String>,
|
||||
/// The git origin URL, if known.
|
||||
pub git_origin_url: Option<String>,
|
||||
}
|
||||
|
||||
/// Builder data required to construct [`ThreadMetadata`] without parsing filenames.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct ThreadMetadataBuilder {
|
||||
/// The thread identifier.
|
||||
pub id: ThreadId,
|
||||
/// The absolute rollout path on disk.
|
||||
pub rollout_path: PathBuf,
|
||||
/// The creation timestamp.
|
||||
pub created_at: DateTime<Utc>,
|
||||
/// The last update timestamp, if known.
|
||||
pub updated_at: Option<DateTime<Utc>>,
|
||||
/// The session source.
|
||||
pub source: SessionSource,
|
||||
/// The model provider identifier, if known.
|
||||
pub model_provider: Option<String>,
|
||||
/// The working directory for the thread.
|
||||
pub cwd: PathBuf,
|
||||
/// The sandbox policy.
|
||||
pub sandbox_policy: SandboxPolicy,
|
||||
/// The approval mode.
|
||||
pub approval_mode: AskForApproval,
|
||||
/// The archive timestamp, if the thread is archived.
|
||||
pub archived_at: Option<DateTime<Utc>>,
|
||||
/// The git commit SHA, if known.
|
||||
pub git_sha: Option<String>,
|
||||
/// The git branch name, if known.
|
||||
pub git_branch: Option<String>,
|
||||
/// The git origin URL, if known.
|
||||
pub git_origin_url: Option<String>,
|
||||
}
|
||||
|
||||
impl ThreadMetadataBuilder {
|
||||
/// Create a new builder with required fields and sensible defaults.
|
||||
pub fn new(
|
||||
id: ThreadId,
|
||||
rollout_path: PathBuf,
|
||||
created_at: DateTime<Utc>,
|
||||
source: SessionSource,
|
||||
) -> Self {
|
||||
Self {
|
||||
id,
|
||||
rollout_path,
|
||||
created_at,
|
||||
updated_at: None,
|
||||
source,
|
||||
model_provider: None,
|
||||
cwd: PathBuf::new(),
|
||||
sandbox_policy: SandboxPolicy::ReadOnly,
|
||||
approval_mode: AskForApproval::OnRequest,
|
||||
archived_at: None,
|
||||
git_sha: None,
|
||||
git_branch: None,
|
||||
git_origin_url: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Build canonical thread metadata, filling missing values from defaults.
|
||||
pub fn build(&self, default_provider: &str) -> ThreadMetadata {
|
||||
let source = crate::extract::enum_to_string(&self.source);
|
||||
let sandbox_policy = crate::extract::enum_to_string(&self.sandbox_policy);
|
||||
let approval_mode = crate::extract::enum_to_string(&self.approval_mode);
|
||||
let created_at = canonicalize_datetime(self.created_at);
|
||||
let updated_at = self
|
||||
.updated_at
|
||||
.map(canonicalize_datetime)
|
||||
.unwrap_or(created_at);
|
||||
ThreadMetadata {
|
||||
id: self.id,
|
||||
rollout_path: self.rollout_path.clone(),
|
||||
created_at,
|
||||
updated_at,
|
||||
source,
|
||||
model_provider: self
|
||||
.model_provider
|
||||
.clone()
|
||||
.unwrap_or_else(|| default_provider.to_string()),
|
||||
cwd: self.cwd.clone(),
|
||||
title: String::new(),
|
||||
sandbox_policy,
|
||||
approval_mode,
|
||||
tokens_used: 0,
|
||||
has_user_event: false,
|
||||
archived_at: self.archived_at.map(canonicalize_datetime),
|
||||
git_sha: self.git_sha.clone(),
|
||||
git_branch: self.git_branch.clone(),
|
||||
git_origin_url: self.git_origin_url.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ThreadMetadata {
|
||||
/// Return the list of field names that differ between `self` and `other`.
|
||||
pub fn diff_fields(&self, other: &Self) -> Vec<&'static str> {
|
||||
let mut diffs = Vec::new();
|
||||
if self.id != other.id {
|
||||
diffs.push("id");
|
||||
}
|
||||
if self.rollout_path != other.rollout_path {
|
||||
diffs.push("rollout_path");
|
||||
}
|
||||
if self.created_at != other.created_at {
|
||||
diffs.push("created_at");
|
||||
}
|
||||
if self.updated_at != other.updated_at {
|
||||
diffs.push("updated_at");
|
||||
}
|
||||
if self.source != other.source {
|
||||
diffs.push("source");
|
||||
}
|
||||
if self.model_provider != other.model_provider {
|
||||
diffs.push("model_provider");
|
||||
}
|
||||
if self.cwd != other.cwd {
|
||||
diffs.push("cwd");
|
||||
}
|
||||
if self.title != other.title {
|
||||
diffs.push("title");
|
||||
}
|
||||
if self.sandbox_policy != other.sandbox_policy {
|
||||
diffs.push("sandbox_policy");
|
||||
}
|
||||
if self.approval_mode != other.approval_mode {
|
||||
diffs.push("approval_mode");
|
||||
}
|
||||
if self.tokens_used != other.tokens_used {
|
||||
diffs.push("tokens_used");
|
||||
}
|
||||
if self.has_user_event != other.has_user_event {
|
||||
diffs.push("has_user_event");
|
||||
}
|
||||
if self.archived_at != other.archived_at {
|
||||
diffs.push("archived_at");
|
||||
}
|
||||
if self.git_sha != other.git_sha {
|
||||
diffs.push("git_sha");
|
||||
}
|
||||
if self.git_branch != other.git_branch {
|
||||
diffs.push("git_branch");
|
||||
}
|
||||
if self.git_origin_url != other.git_origin_url {
|
||||
diffs.push("git_origin_url");
|
||||
}
|
||||
diffs
|
||||
}
|
||||
}
|
||||
|
||||
fn canonicalize_datetime(dt: DateTime<Utc>) -> DateTime<Utc> {
|
||||
dt.with_nanosecond(0).unwrap_or(dt)
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct ThreadRow {
|
||||
id: String,
|
||||
rollout_path: String,
|
||||
created_at: i64,
|
||||
updated_at: i64,
|
||||
source: String,
|
||||
model_provider: String,
|
||||
cwd: String,
|
||||
title: String,
|
||||
sandbox_policy: String,
|
||||
approval_mode: String,
|
||||
tokens_used: i64,
|
||||
has_user_event: bool,
|
||||
archived_at: Option<i64>,
|
||||
git_sha: Option<String>,
|
||||
git_branch: Option<String>,
|
||||
git_origin_url: Option<String>,
|
||||
}
|
||||
|
||||
impl ThreadRow {
|
||||
pub(crate) fn try_from_row(row: &SqliteRow) -> Result<Self> {
|
||||
Ok(Self {
|
||||
id: row.try_get("id")?,
|
||||
rollout_path: row.try_get("rollout_path")?,
|
||||
created_at: row.try_get("created_at")?,
|
||||
updated_at: row.try_get("updated_at")?,
|
||||
source: row.try_get("source")?,
|
||||
model_provider: row.try_get("model_provider")?,
|
||||
cwd: row.try_get("cwd")?,
|
||||
title: row.try_get("title")?,
|
||||
sandbox_policy: row.try_get("sandbox_policy")?,
|
||||
approval_mode: row.try_get("approval_mode")?,
|
||||
tokens_used: row.try_get("tokens_used")?,
|
||||
has_user_event: row.try_get("has_user_event")?,
|
||||
archived_at: row.try_get("archived_at")?,
|
||||
git_sha: row.try_get("git_sha")?,
|
||||
git_branch: row.try_get("git_branch")?,
|
||||
git_origin_url: row.try_get("git_origin_url")?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<ThreadRow> for ThreadMetadata {
|
||||
type Error = anyhow::Error;
|
||||
|
||||
fn try_from(row: ThreadRow) -> std::result::Result<Self, Self::Error> {
|
||||
let ThreadRow {
|
||||
id,
|
||||
rollout_path,
|
||||
created_at,
|
||||
updated_at,
|
||||
source,
|
||||
model_provider,
|
||||
cwd,
|
||||
title,
|
||||
sandbox_policy,
|
||||
approval_mode,
|
||||
tokens_used,
|
||||
has_user_event,
|
||||
archived_at,
|
||||
git_sha,
|
||||
git_branch,
|
||||
git_origin_url,
|
||||
} = row;
|
||||
Ok(Self {
|
||||
id: ThreadId::try_from(id)?,
|
||||
rollout_path: PathBuf::from(rollout_path),
|
||||
created_at: epoch_seconds_to_datetime(created_at)?,
|
||||
updated_at: epoch_seconds_to_datetime(updated_at)?,
|
||||
source,
|
||||
model_provider,
|
||||
cwd: PathBuf::from(cwd),
|
||||
title,
|
||||
sandbox_policy,
|
||||
approval_mode,
|
||||
tokens_used,
|
||||
has_user_event,
|
||||
archived_at: archived_at.map(epoch_seconds_to_datetime).transpose()?,
|
||||
git_sha,
|
||||
git_branch,
|
||||
git_origin_url,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn anchor_from_item(item: &ThreadMetadata, sort_key: SortKey) -> Option<Anchor> {
|
||||
let id = Uuid::parse_str(&item.id.to_string()).ok()?;
|
||||
let ts = match sort_key {
|
||||
SortKey::CreatedAt => item.created_at,
|
||||
SortKey::UpdatedAt => item.updated_at,
|
||||
};
|
||||
Some(Anchor { ts, id })
|
||||
}
|
||||
|
||||
pub(crate) fn datetime_to_epoch_seconds(dt: DateTime<Utc>) -> i64 {
|
||||
dt.timestamp()
|
||||
}
|
||||
|
||||
pub(crate) fn epoch_seconds_to_datetime(secs: i64) -> Result<DateTime<Utc>> {
|
||||
DateTime::<Utc>::from_timestamp(secs, 0)
|
||||
.ok_or_else(|| anyhow::anyhow!("invalid unix timestamp: {secs}"))
|
||||
}
|
||||
|
||||
/// Statistics about a backfill operation.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct BackfillStats {
|
||||
/// The number of rollout files scanned.
|
||||
pub scanned: usize,
|
||||
/// The number of rows upserted successfully.
|
||||
pub upserted: usize,
|
||||
/// The number of rows that failed to upsert.
|
||||
pub failed: usize,
|
||||
}
|
||||
10
codex-rs/state/src/paths.rs
Normal file
10
codex-rs/state/src/paths.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
use chrono::DateTime;
|
||||
use chrono::Timelike;
|
||||
use chrono::Utc;
|
||||
use std::path::Path;
|
||||
|
||||
pub(crate) async fn file_modified_time_utc(path: &Path) -> Option<DateTime<Utc>> {
|
||||
let modified = tokio::fs::metadata(path).await.ok()?.modified().ok()?;
|
||||
let updated_at: DateTime<Utc> = modified.into();
|
||||
Some(updated_at.with_nanosecond(0).unwrap_or(updated_at))
|
||||
}
|
||||
458
codex-rs/state/src/runtime.rs
Normal file
458
codex-rs/state/src/runtime.rs
Normal file
@@ -0,0 +1,458 @@
|
||||
use crate::DB_ERROR_METRIC;
|
||||
use crate::SortKey;
|
||||
use crate::ThreadMetadata;
|
||||
use crate::ThreadMetadataBuilder;
|
||||
use crate::ThreadsPage;
|
||||
use crate::apply_rollout_item;
|
||||
use crate::migrations::MIGRATOR;
|
||||
use crate::model::ThreadRow;
|
||||
use crate::model::anchor_from_item;
|
||||
use crate::model::datetime_to_epoch_seconds;
|
||||
use crate::paths::file_modified_time_utc;
|
||||
use chrono::DateTime;
|
||||
use chrono::Utc;
|
||||
use codex_otel::OtelManager;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
use sqlx::QueryBuilder;
|
||||
use sqlx::Row;
|
||||
use sqlx::Sqlite;
|
||||
use sqlx::SqlitePool;
|
||||
use sqlx::sqlite::SqliteConnectOptions;
|
||||
use sqlx::sqlite::SqliteJournalMode;
|
||||
use sqlx::sqlite::SqlitePoolOptions;
|
||||
use sqlx::sqlite::SqliteSynchronous;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tracing::warn;
|
||||
|
||||
pub const STATE_DB_FILENAME: &str = "state.sqlite";
|
||||
|
||||
const METRIC_DB_INIT: &str = "codex.db.init";
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct StateRuntime {
|
||||
codex_home: PathBuf,
|
||||
default_provider: String,
|
||||
pool: Arc<sqlx::SqlitePool>,
|
||||
}
|
||||
|
||||
impl StateRuntime {
|
||||
/// Initialize the state runtime using the provided Codex home and default provider.
|
||||
///
|
||||
/// This opens (and migrates) the SQLite database at `codex_home/state.sqlite`.
|
||||
pub async fn init(
|
||||
codex_home: PathBuf,
|
||||
default_provider: String,
|
||||
otel: Option<OtelManager>,
|
||||
) -> anyhow::Result<Arc<Self>> {
|
||||
tokio::fs::create_dir_all(&codex_home).await?;
|
||||
let state_path = codex_home.join(STATE_DB_FILENAME);
|
||||
let existed = tokio::fs::try_exists(&state_path).await.unwrap_or(false);
|
||||
let pool = match open_sqlite(&state_path).await {
|
||||
Ok(db) => Arc::new(db),
|
||||
Err(err) => {
|
||||
warn!("failed to open state db at {}: {err}", state_path.display());
|
||||
if let Some(otel) = otel.as_ref() {
|
||||
otel.counter(METRIC_DB_INIT, 1, &[("status", "open_error")]);
|
||||
}
|
||||
return Err(err);
|
||||
}
|
||||
};
|
||||
if let Some(otel) = otel.as_ref() {
|
||||
otel.counter(METRIC_DB_INIT, 1, &[("status", "opened")]);
|
||||
}
|
||||
let runtime = Arc::new(Self {
|
||||
pool,
|
||||
codex_home,
|
||||
default_provider,
|
||||
});
|
||||
if !existed && let Some(otel) = otel.as_ref() {
|
||||
otel.counter(METRIC_DB_INIT, 1, &[("status", "created")]);
|
||||
}
|
||||
Ok(runtime)
|
||||
}
|
||||
|
||||
/// Return the configured Codex home directory for this runtime.
|
||||
pub fn codex_home(&self) -> &Path {
|
||||
self.codex_home.as_path()
|
||||
}
|
||||
|
||||
/// Load thread metadata by id using the underlying database.
|
||||
pub async fn get_thread(&self, id: ThreadId) -> anyhow::Result<Option<crate::ThreadMetadata>> {
|
||||
let row = sqlx::query(
|
||||
r#"
|
||||
SELECT
|
||||
id,
|
||||
rollout_path,
|
||||
created_at,
|
||||
updated_at,
|
||||
source,
|
||||
model_provider,
|
||||
cwd,
|
||||
title,
|
||||
sandbox_policy,
|
||||
approval_mode,
|
||||
tokens_used,
|
||||
has_user_event,
|
||||
archived_at,
|
||||
git_sha,
|
||||
git_branch,
|
||||
git_origin_url
|
||||
FROM threads
|
||||
WHERE id = ?
|
||||
"#,
|
||||
)
|
||||
.bind(id.to_string())
|
||||
.fetch_optional(self.pool.as_ref())
|
||||
.await?;
|
||||
row.map(|row| ThreadRow::try_from_row(&row).and_then(ThreadMetadata::try_from))
|
||||
.transpose()
|
||||
}
|
||||
|
||||
/// Find a rollout path by thread id using the underlying database.
|
||||
pub async fn find_rollout_path_by_id(
|
||||
&self,
|
||||
id: ThreadId,
|
||||
archived_only: Option<bool>,
|
||||
) -> anyhow::Result<Option<PathBuf>> {
|
||||
let mut builder =
|
||||
QueryBuilder::<Sqlite>::new("SELECT rollout_path FROM threads WHERE id = ");
|
||||
builder.push_bind(id.to_string());
|
||||
match archived_only {
|
||||
Some(true) => {
|
||||
builder.push(" AND archived = 1");
|
||||
}
|
||||
Some(false) => {
|
||||
builder.push(" AND archived = 0");
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
let row = builder.build().fetch_optional(self.pool.as_ref()).await?;
|
||||
Ok(row
|
||||
.and_then(|r| r.try_get::<String, _>("rollout_path").ok())
|
||||
.map(PathBuf::from))
|
||||
}
|
||||
|
||||
/// List threads using the underlying database.
|
||||
pub async fn list_threads(
|
||||
&self,
|
||||
page_size: usize,
|
||||
anchor: Option<&crate::Anchor>,
|
||||
sort_key: crate::SortKey,
|
||||
allowed_sources: &[String],
|
||||
model_providers: Option<&[String]>,
|
||||
archived_only: bool,
|
||||
) -> anyhow::Result<crate::ThreadsPage> {
|
||||
let limit = page_size.saturating_add(1);
|
||||
|
||||
let mut builder = QueryBuilder::<Sqlite>::new(
|
||||
r#"
|
||||
SELECT
|
||||
id,
|
||||
rollout_path,
|
||||
created_at,
|
||||
updated_at,
|
||||
source,
|
||||
model_provider,
|
||||
cwd,
|
||||
title,
|
||||
sandbox_policy,
|
||||
approval_mode,
|
||||
tokens_used,
|
||||
has_user_event,
|
||||
archived_at,
|
||||
git_sha,
|
||||
git_branch,
|
||||
git_origin_url
|
||||
FROM threads
|
||||
"#,
|
||||
);
|
||||
push_thread_filters(
|
||||
&mut builder,
|
||||
archived_only,
|
||||
allowed_sources,
|
||||
model_providers,
|
||||
anchor,
|
||||
sort_key,
|
||||
);
|
||||
push_thread_order_and_limit(&mut builder, sort_key, limit);
|
||||
|
||||
let rows = builder.build().fetch_all(self.pool.as_ref()).await?;
|
||||
let mut items = rows
|
||||
.into_iter()
|
||||
.map(|row| ThreadRow::try_from_row(&row).and_then(ThreadMetadata::try_from))
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
let num_scanned_rows = items.len();
|
||||
let next_anchor = if items.len() > page_size {
|
||||
items.pop();
|
||||
items
|
||||
.last()
|
||||
.and_then(|item| anchor_from_item(item, sort_key))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Ok(ThreadsPage {
|
||||
items,
|
||||
next_anchor,
|
||||
num_scanned_rows,
|
||||
})
|
||||
}
|
||||
|
||||
/// List thread ids using the underlying database (no rollout scanning).
|
||||
pub async fn list_thread_ids(
|
||||
&self,
|
||||
limit: usize,
|
||||
anchor: Option<&crate::Anchor>,
|
||||
sort_key: crate::SortKey,
|
||||
allowed_sources: &[String],
|
||||
model_providers: Option<&[String]>,
|
||||
archived_only: bool,
|
||||
) -> anyhow::Result<Vec<ThreadId>> {
|
||||
let mut builder = QueryBuilder::<Sqlite>::new("SELECT id FROM threads");
|
||||
push_thread_filters(
|
||||
&mut builder,
|
||||
archived_only,
|
||||
allowed_sources,
|
||||
model_providers,
|
||||
anchor,
|
||||
sort_key,
|
||||
);
|
||||
push_thread_order_and_limit(&mut builder, sort_key, limit);
|
||||
|
||||
let rows = builder.build().fetch_all(self.pool.as_ref()).await?;
|
||||
rows.into_iter()
|
||||
.map(|row| {
|
||||
let id: String = row.try_get("id")?;
|
||||
Ok(ThreadId::try_from(id)?)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Insert or replace thread metadata directly.
|
||||
pub async fn upsert_thread(&self, metadata: &crate::ThreadMetadata) -> anyhow::Result<()> {
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO threads (
|
||||
id,
|
||||
rollout_path,
|
||||
created_at,
|
||||
updated_at,
|
||||
source,
|
||||
model_provider,
|
||||
cwd,
|
||||
title,
|
||||
sandbox_policy,
|
||||
approval_mode,
|
||||
tokens_used,
|
||||
has_user_event,
|
||||
archived,
|
||||
archived_at,
|
||||
git_sha,
|
||||
git_branch,
|
||||
git_origin_url
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(id) DO UPDATE SET
|
||||
rollout_path = excluded.rollout_path,
|
||||
created_at = excluded.created_at,
|
||||
updated_at = excluded.updated_at,
|
||||
source = excluded.source,
|
||||
model_provider = excluded.model_provider,
|
||||
cwd = excluded.cwd,
|
||||
title = excluded.title,
|
||||
sandbox_policy = excluded.sandbox_policy,
|
||||
approval_mode = excluded.approval_mode,
|
||||
tokens_used = excluded.tokens_used,
|
||||
has_user_event = excluded.has_user_event,
|
||||
archived = excluded.archived,
|
||||
archived_at = excluded.archived_at,
|
||||
git_sha = excluded.git_sha,
|
||||
git_branch = excluded.git_branch,
|
||||
git_origin_url = excluded.git_origin_url
|
||||
"#,
|
||||
)
|
||||
.bind(metadata.id.to_string())
|
||||
.bind(metadata.rollout_path.display().to_string())
|
||||
.bind(datetime_to_epoch_seconds(metadata.created_at))
|
||||
.bind(datetime_to_epoch_seconds(metadata.updated_at))
|
||||
.bind(metadata.source.as_str())
|
||||
.bind(metadata.model_provider.as_str())
|
||||
.bind(metadata.cwd.display().to_string())
|
||||
.bind(metadata.title.as_str())
|
||||
.bind(metadata.sandbox_policy.as_str())
|
||||
.bind(metadata.approval_mode.as_str())
|
||||
.bind(metadata.tokens_used)
|
||||
.bind(metadata.has_user_event)
|
||||
.bind(metadata.archived_at.is_some())
|
||||
.bind(metadata.archived_at.map(datetime_to_epoch_seconds))
|
||||
.bind(metadata.git_sha.as_deref())
|
||||
.bind(metadata.git_branch.as_deref())
|
||||
.bind(metadata.git_origin_url.as_deref())
|
||||
.execute(self.pool.as_ref())
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Apply rollout items incrementally using the underlying database.
|
||||
pub async fn apply_rollout_items(
|
||||
&self,
|
||||
builder: &ThreadMetadataBuilder,
|
||||
items: &[RolloutItem],
|
||||
otel: Option<&OtelManager>,
|
||||
) -> anyhow::Result<()> {
|
||||
if items.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
let mut metadata = self
|
||||
.get_thread(builder.id)
|
||||
.await?
|
||||
.unwrap_or_else(|| builder.build(&self.default_provider));
|
||||
metadata.rollout_path = builder.rollout_path.clone();
|
||||
for item in items {
|
||||
apply_rollout_item(&mut metadata, item, &self.default_provider);
|
||||
}
|
||||
if let Some(updated_at) = file_modified_time_utc(builder.rollout_path.as_path()).await {
|
||||
metadata.updated_at = updated_at;
|
||||
}
|
||||
if let Err(err) = self.upsert_thread(&metadata).await {
|
||||
if let Some(otel) = otel {
|
||||
otel.counter(DB_ERROR_METRIC, 1, &[("stage", "apply_rollout_items")]);
|
||||
}
|
||||
return Err(err);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Mark a thread as archived using the underlying database.
|
||||
pub async fn mark_archived(
|
||||
&self,
|
||||
thread_id: ThreadId,
|
||||
rollout_path: &Path,
|
||||
archived_at: DateTime<Utc>,
|
||||
) -> anyhow::Result<()> {
|
||||
let Some(mut metadata) = self.get_thread(thread_id).await? else {
|
||||
return Ok(());
|
||||
};
|
||||
metadata.archived_at = Some(archived_at);
|
||||
metadata.rollout_path = rollout_path.to_path_buf();
|
||||
if let Some(updated_at) = file_modified_time_utc(rollout_path).await {
|
||||
metadata.updated_at = updated_at;
|
||||
}
|
||||
if metadata.id != thread_id {
|
||||
warn!(
|
||||
"thread id mismatch during archive: expected {thread_id}, got {}",
|
||||
metadata.id
|
||||
);
|
||||
}
|
||||
self.upsert_thread(&metadata).await
|
||||
}
|
||||
|
||||
/// Mark a thread as unarchived using the underlying database.
|
||||
pub async fn mark_unarchived(
|
||||
&self,
|
||||
thread_id: ThreadId,
|
||||
rollout_path: &Path,
|
||||
) -> anyhow::Result<()> {
|
||||
let Some(mut metadata) = self.get_thread(thread_id).await? else {
|
||||
return Ok(());
|
||||
};
|
||||
metadata.archived_at = None;
|
||||
metadata.rollout_path = rollout_path.to_path_buf();
|
||||
if let Some(updated_at) = file_modified_time_utc(rollout_path).await {
|
||||
metadata.updated_at = updated_at;
|
||||
}
|
||||
if metadata.id != thread_id {
|
||||
warn!(
|
||||
"thread id mismatch during unarchive: expected {thread_id}, got {}",
|
||||
metadata.id
|
||||
);
|
||||
}
|
||||
self.upsert_thread(&metadata).await
|
||||
}
|
||||
}
|
||||
|
||||
async fn open_sqlite(path: &Path) -> anyhow::Result<SqlitePool> {
|
||||
let options = SqliteConnectOptions::new()
|
||||
.filename(path)
|
||||
.create_if_missing(true)
|
||||
.journal_mode(SqliteJournalMode::Wal)
|
||||
.synchronous(SqliteSynchronous::Normal)
|
||||
.busy_timeout(Duration::from_secs(5));
|
||||
let pool = SqlitePoolOptions::new()
|
||||
.max_connections(5)
|
||||
.connect_with(options)
|
||||
.await?;
|
||||
MIGRATOR.run(&pool).await?;
|
||||
Ok(pool)
|
||||
}
|
||||
|
||||
fn push_thread_filters<'a>(
|
||||
builder: &mut QueryBuilder<'a, Sqlite>,
|
||||
archived_only: bool,
|
||||
allowed_sources: &'a [String],
|
||||
model_providers: Option<&'a [String]>,
|
||||
anchor: Option<&crate::Anchor>,
|
||||
sort_key: SortKey,
|
||||
) {
|
||||
builder.push(" WHERE 1 = 1");
|
||||
if archived_only {
|
||||
builder.push(" AND archived = 1");
|
||||
} else {
|
||||
builder.push(" AND archived = 0");
|
||||
}
|
||||
builder.push(" AND has_user_event = 1");
|
||||
if !allowed_sources.is_empty() {
|
||||
builder.push(" AND source IN (");
|
||||
let mut separated = builder.separated(", ");
|
||||
for source in allowed_sources {
|
||||
separated.push_bind(source);
|
||||
}
|
||||
separated.push_unseparated(")");
|
||||
}
|
||||
if let Some(model_providers) = model_providers
|
||||
&& !model_providers.is_empty()
|
||||
{
|
||||
builder.push(" AND model_provider IN (");
|
||||
let mut separated = builder.separated(", ");
|
||||
for provider in model_providers {
|
||||
separated.push_bind(provider);
|
||||
}
|
||||
separated.push_unseparated(")");
|
||||
}
|
||||
if let Some(anchor) = anchor {
|
||||
let anchor_ts = datetime_to_epoch_seconds(anchor.ts);
|
||||
let column = match sort_key {
|
||||
SortKey::CreatedAt => "created_at",
|
||||
SortKey::UpdatedAt => "updated_at",
|
||||
};
|
||||
builder.push(" AND (");
|
||||
builder.push(column);
|
||||
builder.push(" < ");
|
||||
builder.push_bind(anchor_ts);
|
||||
builder.push(" OR (");
|
||||
builder.push(column);
|
||||
builder.push(" = ");
|
||||
builder.push_bind(anchor_ts);
|
||||
builder.push(" AND id < ");
|
||||
builder.push_bind(anchor.id.to_string());
|
||||
builder.push("))");
|
||||
}
|
||||
}
|
||||
|
||||
fn push_thread_order_and_limit(
|
||||
builder: &mut QueryBuilder<'_, Sqlite>,
|
||||
sort_key: SortKey,
|
||||
limit: usize,
|
||||
) {
|
||||
let order_column = match sort_key {
|
||||
SortKey::CreatedAt => "created_at",
|
||||
SortKey::UpdatedAt => "updated_at",
|
||||
};
|
||||
builder.push(" ORDER BY ");
|
||||
builder.push(order_column);
|
||||
builder.push(" DESC, id DESC");
|
||||
builder.push(" LIMIT ");
|
||||
builder.push_bind(limit as i64);
|
||||
}
|
||||
@@ -1632,10 +1632,27 @@ impl App {
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
let mut code_tag: Option<String> = None;
|
||||
let mut message_tag: Option<String> = None;
|
||||
if let Some((code, message)) =
|
||||
codex_core::windows_sandbox::elevated_setup_failure_details(
|
||||
&err,
|
||||
)
|
||||
{
|
||||
code_tag = Some(code);
|
||||
message_tag = Some(message);
|
||||
}
|
||||
let mut tags: Vec<(&str, &str)> = Vec::new();
|
||||
if let Some(code) = code_tag.as_deref() {
|
||||
tags.push(("code", code));
|
||||
}
|
||||
if let Some(message) = message_tag.as_deref() {
|
||||
tags.push(("message", message));
|
||||
}
|
||||
otel_manager.counter(
|
||||
"codex.windows_sandbox.elevated_setup_failure",
|
||||
1,
|
||||
&[],
|
||||
&tags,
|
||||
);
|
||||
tracing::error!(
|
||||
error = %err,
|
||||
|
||||
@@ -15,6 +15,16 @@
|
||||
//! [`ChatComposer::handle_key_event_without_popup`]. After every handled key, we call
|
||||
//! [`ChatComposer::sync_popups`] so UI state follows the latest buffer/cursor.
|
||||
//!
|
||||
//! # History Navigation (↑/↓)
|
||||
//!
|
||||
//! The Up/Down history path is managed by [`ChatComposerHistory`]. It merges:
|
||||
//!
|
||||
//! - Persistent cross-session history (text-only; no element ranges or attachments).
|
||||
//! - Local in-session history (full text + text elements + local image paths).
|
||||
//!
|
||||
//! When recalling a local entry, the composer rehydrates text elements and image attachments.
|
||||
//! When recalling a persistent entry, only the text is restored.
|
||||
//!
|
||||
//! # Submission and Prompt Expansion
|
||||
//!
|
||||
//! On submit/queue paths, the composer:
|
||||
@@ -89,6 +99,7 @@ use ratatui::widgets::StatefulWidgetRef;
|
||||
use ratatui::widgets::WidgetRef;
|
||||
|
||||
use super::chat_composer_history::ChatComposerHistory;
|
||||
use super::chat_composer_history::HistoryEntry;
|
||||
use super::command_popup::CommandItem;
|
||||
use super::command_popup::CommandPopup;
|
||||
use super::command_popup::CommandPopupFlags;
|
||||
@@ -232,7 +243,6 @@ impl ChatComposerConfig {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct ChatComposer {
|
||||
textarea: TextArea,
|
||||
textarea_state: RefCell<TextAreaState>,
|
||||
@@ -394,7 +404,6 @@ impl ChatComposer {
|
||||
pub fn set_personality_command_enabled(&mut self, enabled: bool) {
|
||||
self.personality_command_enabled = enabled;
|
||||
}
|
||||
|
||||
/// Centralized feature gating keeps config checks out of call sites.
|
||||
fn popups_enabled(&self) -> bool {
|
||||
self.config.popups_enabled
|
||||
@@ -462,11 +471,12 @@ impl ChatComposer {
|
||||
offset: usize,
|
||||
entry: Option<String>,
|
||||
) -> bool {
|
||||
let Some(text) = self.history.on_entry_response(log_id, offset, entry) else {
|
||||
let Some(entry) = self.history.on_entry_response(log_id, offset, entry) else {
|
||||
return false;
|
||||
};
|
||||
// Composer history (↑/↓) stores plain text only; no UI element ranges/attachments to restore here.
|
||||
self.set_text_content(text, Vec::new(), Vec::new());
|
||||
// Persistent ↑/↓ history is text-only (backwards-compatible and avoids persisting
|
||||
// attachments), but local in-session ↑/↓ history can rehydrate elements and image paths.
|
||||
self.set_text_content(entry.text, entry.text_elements, entry.local_image_paths);
|
||||
true
|
||||
}
|
||||
|
||||
@@ -709,9 +719,19 @@ impl ChatComposer {
|
||||
return None;
|
||||
}
|
||||
let previous = self.current_text();
|
||||
let text_elements = self.textarea.text_elements();
|
||||
let local_image_paths = self
|
||||
.attached_images
|
||||
.iter()
|
||||
.map(|img| img.path.clone())
|
||||
.collect();
|
||||
self.set_text_content(String::new(), Vec::new(), Vec::new());
|
||||
self.history.reset_navigation();
|
||||
self.history.record_local_submission(&previous);
|
||||
self.history.record_local_submission(HistoryEntry {
|
||||
text: previous.clone(),
|
||||
text_elements,
|
||||
local_image_paths,
|
||||
});
|
||||
Some(previous)
|
||||
}
|
||||
|
||||
@@ -1794,8 +1814,17 @@ impl ChatComposer {
|
||||
if text.is_empty() && self.attached_images.is_empty() {
|
||||
return None;
|
||||
}
|
||||
if !text.is_empty() {
|
||||
self.history.record_local_submission(&text);
|
||||
if !text.is_empty() || !self.attached_images.is_empty() {
|
||||
let local_image_paths = self
|
||||
.attached_images
|
||||
.iter()
|
||||
.map(|img| img.path.clone())
|
||||
.collect();
|
||||
self.history.record_local_submission(HistoryEntry {
|
||||
text: text.clone(),
|
||||
text_elements: text_elements.clone(),
|
||||
local_image_paths,
|
||||
});
|
||||
}
|
||||
self.pending_pastes.clear();
|
||||
Some((text, text_elements))
|
||||
@@ -1991,15 +2020,19 @@ impl ChatComposer {
|
||||
.history
|
||||
.should_handle_navigation(self.textarea.text(), self.textarea.cursor())
|
||||
{
|
||||
let replace_text = match key_event.code {
|
||||
let replace_entry = match key_event.code {
|
||||
KeyCode::Up => self.history.navigate_up(&self.app_event_tx),
|
||||
KeyCode::Down => self.history.navigate_down(&self.app_event_tx),
|
||||
KeyCode::Char('p') => self.history.navigate_up(&self.app_event_tx),
|
||||
KeyCode::Char('n') => self.history.navigate_down(&self.app_event_tx),
|
||||
_ => unreachable!(),
|
||||
};
|
||||
if let Some(text) = replace_text {
|
||||
self.set_text_content(text, Vec::new(), Vec::new());
|
||||
if let Some(entry) = replace_entry {
|
||||
self.set_text_content(
|
||||
entry.text,
|
||||
entry.text_elements,
|
||||
entry.local_image_paths,
|
||||
);
|
||||
return (InputResult::None, true);
|
||||
}
|
||||
}
|
||||
@@ -2259,14 +2292,27 @@ impl ChatComposer {
|
||||
}
|
||||
|
||||
fn footer_props(&self) -> FooterProps {
|
||||
let mode = self.footer_mode();
|
||||
let is_wsl = {
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
mode == FooterMode::ShortcutOverlay && crate::clipboard_paste::is_probably_wsl()
|
||||
}
|
||||
#[cfg(not(target_os = "linux"))]
|
||||
{
|
||||
false
|
||||
}
|
||||
};
|
||||
|
||||
FooterProps {
|
||||
mode: self.footer_mode(),
|
||||
mode,
|
||||
esc_backtrack_hint: self.esc_backtrack_hint,
|
||||
use_shift_enter_hint: self.use_shift_enter_hint,
|
||||
is_task_running: self.is_task_running,
|
||||
quit_shortcut_key: self.quit_shortcut_key,
|
||||
steer_enabled: self.steer_enabled,
|
||||
collaboration_modes_enabled: self.collaboration_modes_enabled,
|
||||
is_wsl,
|
||||
context_window_percent: self.context_window_percent,
|
||||
context_window_used_tokens: self.context_window_used_tokens,
|
||||
}
|
||||
@@ -3257,7 +3303,7 @@ mod tests {
|
||||
|
||||
assert_eq!(
|
||||
composer.history.navigate_up(&composer.app_event_tx),
|
||||
Some("draft text".to_string())
|
||||
Some(HistoryEntry::from_text("draft text".to_string()))
|
||||
);
|
||||
}
|
||||
|
||||
@@ -4710,6 +4756,37 @@ mod tests {
|
||||
assert_eq!(vec![path], imgs);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn history_navigation_restores_image_attachments() {
|
||||
let (tx, _rx) = unbounded_channel::<AppEvent>();
|
||||
let sender = AppEventSender::new(tx);
|
||||
let mut composer = ChatComposer::new(
|
||||
true,
|
||||
sender,
|
||||
false,
|
||||
"Ask Codex to do anything".to_string(),
|
||||
false,
|
||||
);
|
||||
composer.set_steer_enabled(true);
|
||||
let path = PathBuf::from("/tmp/image1.png");
|
||||
composer.attach_image(path.clone());
|
||||
|
||||
let (result, _needs_redraw) =
|
||||
composer.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
|
||||
assert!(matches!(result, InputResult::Submitted { .. }));
|
||||
|
||||
composer.set_text_content(String::new(), Vec::new(), Vec::new());
|
||||
|
||||
let _ = composer.handle_key_event(KeyEvent::new(KeyCode::Up, KeyModifiers::NONE));
|
||||
|
||||
let text = composer.current_text();
|
||||
assert_eq!(text, "[Image #1]");
|
||||
let text_elements = composer.text_elements();
|
||||
assert_eq!(text_elements.len(), 1);
|
||||
assert_eq!(text_elements[0].placeholder(&text), Some("[Image #1]"));
|
||||
assert_eq!(composer.local_image_paths(), vec![path]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn set_text_content_reattaches_images_without_placeholder_metadata() {
|
||||
let (tx, _rx) = unbounded_channel::<AppEvent>();
|
||||
|
||||
@@ -1,8 +1,35 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::app_event::AppEvent;
|
||||
use crate::app_event_sender::AppEventSender;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_protocol::user_input::TextElement;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub(crate) struct HistoryEntry {
|
||||
pub(crate) text: String,
|
||||
pub(crate) text_elements: Vec<TextElement>,
|
||||
pub(crate) local_image_paths: Vec<PathBuf>,
|
||||
}
|
||||
|
||||
impl HistoryEntry {
|
||||
fn empty() -> Self {
|
||||
Self {
|
||||
text: String::new(),
|
||||
text_elements: Vec::new(),
|
||||
local_image_paths: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn from_text(text: String) -> Self {
|
||||
Self {
|
||||
text,
|
||||
text_elements: Vec::new(),
|
||||
local_image_paths: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// State machine that manages shell-style history navigation (Up/Down) inside
|
||||
/// the chat composer. This struct is intentionally decoupled from the
|
||||
@@ -15,10 +42,10 @@ pub(crate) struct ChatComposerHistory {
|
||||
history_entry_count: usize,
|
||||
|
||||
/// Messages submitted by the user *during this UI session* (newest at END).
|
||||
local_history: Vec<String>,
|
||||
local_history: Vec<HistoryEntry>,
|
||||
|
||||
/// Cache of persistent history entries fetched on-demand.
|
||||
fetched_history: HashMap<usize, String>,
|
||||
fetched_history: HashMap<usize, HistoryEntry>,
|
||||
|
||||
/// Current cursor within the combined (persistent + local) history. `None`
|
||||
/// indicates the user is *not* currently browsing history.
|
||||
@@ -54,8 +81,8 @@ impl ChatComposerHistory {
|
||||
|
||||
/// Record a message submitted by the user in the current session so it can
|
||||
/// be recalled later.
|
||||
pub fn record_local_submission(&mut self, text: &str) {
|
||||
if text.is_empty() {
|
||||
pub fn record_local_submission(&mut self, entry: HistoryEntry) {
|
||||
if entry.text.is_empty() && entry.local_image_paths.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -63,11 +90,11 @@ impl ChatComposerHistory {
|
||||
self.last_history_text = None;
|
||||
|
||||
// Avoid inserting a duplicate if identical to the previous entry.
|
||||
if self.local_history.last().is_some_and(|prev| prev == text) {
|
||||
if self.local_history.last().is_some_and(|prev| prev == &entry) {
|
||||
return;
|
||||
}
|
||||
|
||||
self.local_history.push(text.to_string());
|
||||
self.local_history.push(entry);
|
||||
}
|
||||
|
||||
/// Reset navigation tracking so the next Up key resumes from the latest entry.
|
||||
@@ -99,7 +126,7 @@ impl ChatComposerHistory {
|
||||
|
||||
/// Handle <Up>. Returns true when the key was consumed and the caller
|
||||
/// should request a redraw.
|
||||
pub fn navigate_up(&mut self, app_event_tx: &AppEventSender) -> Option<String> {
|
||||
pub fn navigate_up(&mut self, app_event_tx: &AppEventSender) -> Option<HistoryEntry> {
|
||||
let total_entries = self.history_entry_count + self.local_history.len();
|
||||
if total_entries == 0 {
|
||||
return None;
|
||||
@@ -116,7 +143,7 @@ impl ChatComposerHistory {
|
||||
}
|
||||
|
||||
/// Handle <Down>.
|
||||
pub fn navigate_down(&mut self, app_event_tx: &AppEventSender) -> Option<String> {
|
||||
pub fn navigate_down(&mut self, app_event_tx: &AppEventSender) -> Option<HistoryEntry> {
|
||||
let total_entries = self.history_entry_count + self.local_history.len();
|
||||
if total_entries == 0 {
|
||||
return None;
|
||||
@@ -137,7 +164,7 @@ impl ChatComposerHistory {
|
||||
// Past newest – clear and exit browsing mode.
|
||||
self.history_cursor = None;
|
||||
self.last_history_text = None;
|
||||
Some(String::new())
|
||||
Some(HistoryEntry::empty())
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -148,16 +175,17 @@ impl ChatComposerHistory {
|
||||
log_id: u64,
|
||||
offset: usize,
|
||||
entry: Option<String>,
|
||||
) -> Option<String> {
|
||||
) -> Option<HistoryEntry> {
|
||||
if self.history_log_id != Some(log_id) {
|
||||
return None;
|
||||
}
|
||||
let text = entry?;
|
||||
self.fetched_history.insert(offset, text.clone());
|
||||
let entry = HistoryEntry::from_text(text);
|
||||
self.fetched_history.insert(offset, entry.clone());
|
||||
|
||||
if self.history_cursor == Some(offset as isize) {
|
||||
self.last_history_text = Some(text.clone());
|
||||
return Some(text);
|
||||
self.last_history_text = Some(entry.text.clone());
|
||||
return Some(entry);
|
||||
}
|
||||
None
|
||||
}
|
||||
@@ -170,19 +198,20 @@ impl ChatComposerHistory {
|
||||
&mut self,
|
||||
global_idx: usize,
|
||||
app_event_tx: &AppEventSender,
|
||||
) -> Option<String> {
|
||||
) -> Option<HistoryEntry> {
|
||||
if global_idx >= self.history_entry_count {
|
||||
// Local entry.
|
||||
if let Some(text) = self
|
||||
if let Some(entry) = self
|
||||
.local_history
|
||||
.get(global_idx - self.history_entry_count)
|
||||
.cloned()
|
||||
{
|
||||
self.last_history_text = Some(text.clone());
|
||||
return Some(text.clone());
|
||||
self.last_history_text = Some(entry.text.clone());
|
||||
return Some(entry);
|
||||
}
|
||||
} else if let Some(text) = self.fetched_history.get(&global_idx) {
|
||||
self.last_history_text = Some(text.clone());
|
||||
return Some(text.clone());
|
||||
} else if let Some(entry) = self.fetched_history.get(&global_idx).cloned() {
|
||||
self.last_history_text = Some(entry.text.clone());
|
||||
return Some(entry);
|
||||
} else if let Some(log_id) = self.history_log_id {
|
||||
let op = Op::GetHistoryEntryRequest {
|
||||
offset: global_idx,
|
||||
@@ -206,22 +235,28 @@ mod tests {
|
||||
let mut history = ChatComposerHistory::new();
|
||||
|
||||
// Empty submissions are ignored.
|
||||
history.record_local_submission("");
|
||||
history.record_local_submission(HistoryEntry::from_text(String::new()));
|
||||
assert_eq!(history.local_history.len(), 0);
|
||||
|
||||
// First entry is recorded.
|
||||
history.record_local_submission("hello");
|
||||
history.record_local_submission(HistoryEntry::from_text("hello".to_string()));
|
||||
assert_eq!(history.local_history.len(), 1);
|
||||
assert_eq!(history.local_history.last().unwrap(), "hello");
|
||||
assert_eq!(
|
||||
history.local_history.last().unwrap(),
|
||||
&HistoryEntry::from_text("hello".to_string())
|
||||
);
|
||||
|
||||
// Identical consecutive entry is skipped.
|
||||
history.record_local_submission("hello");
|
||||
history.record_local_submission(HistoryEntry::from_text("hello".to_string()));
|
||||
assert_eq!(history.local_history.len(), 1);
|
||||
|
||||
// Different entry is recorded.
|
||||
history.record_local_submission("world");
|
||||
history.record_local_submission(HistoryEntry::from_text("world".to_string()));
|
||||
assert_eq!(history.local_history.len(), 2);
|
||||
assert_eq!(history.local_history.last().unwrap(), "world");
|
||||
assert_eq!(
|
||||
history.local_history.last().unwrap(),
|
||||
&HistoryEntry::from_text("world".to_string())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -252,7 +287,7 @@ mod tests {
|
||||
|
||||
// Inject the async response.
|
||||
assert_eq!(
|
||||
Some("latest".into()),
|
||||
Some(HistoryEntry::from_text("latest".to_string())),
|
||||
history.on_entry_response(1, 2, Some("latest".into()))
|
||||
);
|
||||
|
||||
@@ -273,7 +308,7 @@ mod tests {
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Some("older".into()),
|
||||
Some(HistoryEntry::from_text("older".to_string())),
|
||||
history.on_entry_response(1, 1, Some("older".into()))
|
||||
);
|
||||
}
|
||||
@@ -285,16 +320,29 @@ mod tests {
|
||||
|
||||
let mut history = ChatComposerHistory::new();
|
||||
history.set_metadata(1, 3);
|
||||
history.fetched_history.insert(1, "command2".into());
|
||||
history.fetched_history.insert(2, "command3".into());
|
||||
history
|
||||
.fetched_history
|
||||
.insert(1, HistoryEntry::from_text("command2".to_string()));
|
||||
history
|
||||
.fetched_history
|
||||
.insert(2, HistoryEntry::from_text("command3".to_string()));
|
||||
|
||||
assert_eq!(Some("command3".into()), history.navigate_up(&tx));
|
||||
assert_eq!(Some("command2".into()), history.navigate_up(&tx));
|
||||
assert_eq!(
|
||||
Some(HistoryEntry::from_text("command3".to_string())),
|
||||
history.navigate_up(&tx)
|
||||
);
|
||||
assert_eq!(
|
||||
Some(HistoryEntry::from_text("command2".to_string())),
|
||||
history.navigate_up(&tx)
|
||||
);
|
||||
|
||||
history.reset_navigation();
|
||||
assert!(history.history_cursor.is_none());
|
||||
assert!(history.last_history_text.is_none());
|
||||
|
||||
assert_eq!(Some("command3".into()), history.navigate_up(&tx));
|
||||
assert_eq!(
|
||||
Some(HistoryEntry::from_text("command3".to_string())),
|
||||
history.navigate_up(&tx)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,8 +32,6 @@
|
||||
//! In short: `single_line_footer_layout` chooses *what* best fits, and the two
|
||||
//! render helpers choose whether to draw the chosen line or the default
|
||||
//! `FooterProps` mapping.
|
||||
#[cfg(target_os = "linux")]
|
||||
use crate::clipboard_paste::is_probably_wsl;
|
||||
use crate::key_hint;
|
||||
use crate::key_hint::KeyBinding;
|
||||
use crate::render::line_utils::prefix_lines;
|
||||
@@ -63,6 +61,7 @@ pub(crate) struct FooterProps {
|
||||
pub(crate) is_task_running: bool,
|
||||
pub(crate) steer_enabled: bool,
|
||||
pub(crate) collaboration_modes_enabled: bool,
|
||||
pub(crate) is_wsl: bool,
|
||||
/// Which key the user must press again to quit.
|
||||
///
|
||||
/// This is rendered when `mode` is `FooterMode::QuitShortcutReminder`.
|
||||
@@ -554,15 +553,10 @@ fn footer_from_props_lines(
|
||||
vec![left_side_line(collaboration_mode_indicator, state)]
|
||||
}
|
||||
FooterMode::ShortcutOverlay => {
|
||||
#[cfg(target_os = "linux")]
|
||||
let is_wsl = is_probably_wsl();
|
||||
#[cfg(not(target_os = "linux"))]
|
||||
let is_wsl = false;
|
||||
|
||||
let state = ShortcutsState {
|
||||
use_shift_enter_hint: props.use_shift_enter_hint,
|
||||
esc_backtrack_hint: props.esc_backtrack_hint,
|
||||
is_wsl,
|
||||
is_wsl: props.is_wsl,
|
||||
collaboration_modes_enabled: props.collaboration_modes_enabled,
|
||||
};
|
||||
shortcut_overlay_lines(state)
|
||||
@@ -961,6 +955,7 @@ const SHORTCUTS: &[ShortcutDescriptor] = &[
|
||||
mod tests {
|
||||
use super::*;
|
||||
use insta::assert_snapshot;
|
||||
use pretty_assertions::assert_eq;
|
||||
use ratatui::Terminal;
|
||||
use ratatui::backend::TestBackend;
|
||||
|
||||
@@ -1077,6 +1072,7 @@ mod tests {
|
||||
is_task_running: false,
|
||||
steer_enabled: false,
|
||||
collaboration_modes_enabled: false,
|
||||
is_wsl: false,
|
||||
quit_shortcut_key: key_hint::ctrl(KeyCode::Char('c')),
|
||||
context_window_percent: None,
|
||||
context_window_used_tokens: None,
|
||||
@@ -1092,6 +1088,7 @@ mod tests {
|
||||
is_task_running: false,
|
||||
steer_enabled: false,
|
||||
collaboration_modes_enabled: false,
|
||||
is_wsl: false,
|
||||
quit_shortcut_key: key_hint::ctrl(KeyCode::Char('c')),
|
||||
context_window_percent: None,
|
||||
context_window_used_tokens: None,
|
||||
@@ -1107,6 +1104,7 @@ mod tests {
|
||||
is_task_running: false,
|
||||
steer_enabled: false,
|
||||
collaboration_modes_enabled: true,
|
||||
is_wsl: false,
|
||||
quit_shortcut_key: key_hint::ctrl(KeyCode::Char('c')),
|
||||
context_window_percent: None,
|
||||
context_window_used_tokens: None,
|
||||
@@ -1122,6 +1120,7 @@ mod tests {
|
||||
is_task_running: false,
|
||||
steer_enabled: false,
|
||||
collaboration_modes_enabled: false,
|
||||
is_wsl: false,
|
||||
quit_shortcut_key: key_hint::ctrl(KeyCode::Char('c')),
|
||||
context_window_percent: None,
|
||||
context_window_used_tokens: None,
|
||||
@@ -1137,6 +1136,7 @@ mod tests {
|
||||
is_task_running: true,
|
||||
steer_enabled: false,
|
||||
collaboration_modes_enabled: false,
|
||||
is_wsl: false,
|
||||
quit_shortcut_key: key_hint::ctrl(KeyCode::Char('c')),
|
||||
context_window_percent: None,
|
||||
context_window_used_tokens: None,
|
||||
@@ -1152,6 +1152,7 @@ mod tests {
|
||||
is_task_running: false,
|
||||
steer_enabled: false,
|
||||
collaboration_modes_enabled: false,
|
||||
is_wsl: false,
|
||||
quit_shortcut_key: key_hint::ctrl(KeyCode::Char('c')),
|
||||
context_window_percent: None,
|
||||
context_window_used_tokens: None,
|
||||
@@ -1167,6 +1168,7 @@ mod tests {
|
||||
is_task_running: false,
|
||||
steer_enabled: false,
|
||||
collaboration_modes_enabled: false,
|
||||
is_wsl: false,
|
||||
quit_shortcut_key: key_hint::ctrl(KeyCode::Char('c')),
|
||||
context_window_percent: None,
|
||||
context_window_used_tokens: None,
|
||||
@@ -1182,6 +1184,7 @@ mod tests {
|
||||
is_task_running: true,
|
||||
steer_enabled: false,
|
||||
collaboration_modes_enabled: false,
|
||||
is_wsl: false,
|
||||
quit_shortcut_key: key_hint::ctrl(KeyCode::Char('c')),
|
||||
context_window_percent: Some(72),
|
||||
context_window_used_tokens: None,
|
||||
@@ -1197,6 +1200,7 @@ mod tests {
|
||||
is_task_running: false,
|
||||
steer_enabled: false,
|
||||
collaboration_modes_enabled: false,
|
||||
is_wsl: false,
|
||||
quit_shortcut_key: key_hint::ctrl(KeyCode::Char('c')),
|
||||
context_window_percent: None,
|
||||
context_window_used_tokens: Some(123_456),
|
||||
@@ -1212,6 +1216,7 @@ mod tests {
|
||||
is_task_running: true,
|
||||
steer_enabled: false,
|
||||
collaboration_modes_enabled: false,
|
||||
is_wsl: false,
|
||||
quit_shortcut_key: key_hint::ctrl(KeyCode::Char('c')),
|
||||
context_window_percent: None,
|
||||
context_window_used_tokens: None,
|
||||
@@ -1227,6 +1232,7 @@ mod tests {
|
||||
is_task_running: true,
|
||||
steer_enabled: true,
|
||||
collaboration_modes_enabled: false,
|
||||
is_wsl: false,
|
||||
quit_shortcut_key: key_hint::ctrl(KeyCode::Char('c')),
|
||||
context_window_percent: None,
|
||||
context_window_used_tokens: None,
|
||||
@@ -1240,6 +1246,7 @@ mod tests {
|
||||
is_task_running: false,
|
||||
steer_enabled: false,
|
||||
collaboration_modes_enabled: true,
|
||||
is_wsl: false,
|
||||
quit_shortcut_key: key_hint::ctrl(KeyCode::Char('c')),
|
||||
context_window_percent: None,
|
||||
context_window_used_tokens: None,
|
||||
@@ -1266,6 +1273,7 @@ mod tests {
|
||||
is_task_running: true,
|
||||
steer_enabled: false,
|
||||
collaboration_modes_enabled: true,
|
||||
is_wsl: false,
|
||||
quit_shortcut_key: key_hint::ctrl(KeyCode::Char('c')),
|
||||
context_window_percent: None,
|
||||
context_window_used_tokens: None,
|
||||
@@ -1278,4 +1286,41 @@ mod tests {
|
||||
Some(CollaborationModeIndicator::Plan),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn paste_image_shortcut_prefers_ctrl_alt_v_under_wsl() {
|
||||
let descriptor = SHORTCUTS
|
||||
.iter()
|
||||
.find(|descriptor| descriptor.id == ShortcutId::PasteImage)
|
||||
.expect("paste image shortcut");
|
||||
|
||||
let is_wsl = {
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
crate::clipboard_paste::is_probably_wsl()
|
||||
}
|
||||
#[cfg(not(target_os = "linux"))]
|
||||
{
|
||||
false
|
||||
}
|
||||
};
|
||||
|
||||
let expected_key = if is_wsl {
|
||||
key_hint::ctrl_alt(KeyCode::Char('v'))
|
||||
} else {
|
||||
key_hint::ctrl(KeyCode::Char('v'))
|
||||
};
|
||||
|
||||
let actual_key = descriptor
|
||||
.binding_for(ShortcutsState {
|
||||
use_shift_enter_hint: false,
|
||||
esc_backtrack_hint: false,
|
||||
is_wsl,
|
||||
collaboration_modes_enabled: false,
|
||||
})
|
||||
.expect("shortcut binding")
|
||||
.key;
|
||||
|
||||
assert_eq!(actual_key, expected_key);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1154,6 +1154,7 @@ mod tests {
|
||||
description: "test skill".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: PathBuf::from("test-skill"),
|
||||
scope: SkillScope::User,
|
||||
}]),
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
use ratatui::layout::Rect;
|
||||
|
||||
use super::DESIRED_SPACERS_WHEN_NOTES_HIDDEN;
|
||||
use super::RequestUserInputOverlay;
|
||||
|
||||
pub(super) struct LayoutSections {
|
||||
pub(super) progress_area: Rect,
|
||||
pub(super) header_area: Rect,
|
||||
pub(super) question_area: Rect,
|
||||
pub(super) answer_title_area: Rect,
|
||||
// Wrapped question text lines to render in the question area.
|
||||
pub(super) question_lines: Vec<String>,
|
||||
pub(super) options_area: Rect,
|
||||
@@ -20,7 +20,8 @@ impl RequestUserInputOverlay {
|
||||
/// Compute layout sections, collapsing notes and hints as space shrinks.
|
||||
pub(super) fn layout_sections(&self, area: Rect) -> LayoutSections {
|
||||
let has_options = self.has_options();
|
||||
let footer_pref = if self.unanswered_count() > 0 { 2 } else { 1 };
|
||||
let notes_visible = !has_options || self.notes_ui_visible();
|
||||
let footer_pref = self.footer_required_height(area.width);
|
||||
let notes_pref_height = self.notes_input_height(area.width);
|
||||
let mut question_lines = self.wrapped_question_lines(area.width);
|
||||
let question_height = question_lines.len() as u16;
|
||||
@@ -28,18 +29,22 @@ impl RequestUserInputOverlay {
|
||||
let (
|
||||
question_height,
|
||||
progress_height,
|
||||
answer_title_height,
|
||||
spacer_after_question,
|
||||
options_height,
|
||||
spacer_after_options,
|
||||
notes_title_height,
|
||||
notes_height,
|
||||
options_height,
|
||||
footer_lines,
|
||||
) = if has_options {
|
||||
self.layout_with_options(
|
||||
area.height,
|
||||
area.width,
|
||||
question_height,
|
||||
notes_pref_height,
|
||||
footer_pref,
|
||||
OptionsLayoutArgs {
|
||||
available_height: area.height,
|
||||
width: area.width,
|
||||
question_height,
|
||||
notes_pref_height,
|
||||
footer_pref,
|
||||
notes_visible,
|
||||
},
|
||||
&mut question_lines,
|
||||
)
|
||||
} else {
|
||||
@@ -52,31 +57,24 @@ impl RequestUserInputOverlay {
|
||||
)
|
||||
};
|
||||
|
||||
let (
|
||||
progress_area,
|
||||
header_area,
|
||||
question_area,
|
||||
answer_title_area,
|
||||
options_area,
|
||||
notes_title_area,
|
||||
notes_area,
|
||||
) = self.build_layout_areas(
|
||||
area,
|
||||
LayoutHeights {
|
||||
progress_height,
|
||||
question_height,
|
||||
answer_title_height,
|
||||
options_height,
|
||||
notes_title_height,
|
||||
notes_height,
|
||||
},
|
||||
);
|
||||
let (progress_area, header_area, question_area, options_area, notes_title_area, notes_area) =
|
||||
self.build_layout_areas(
|
||||
area,
|
||||
LayoutHeights {
|
||||
progress_height,
|
||||
question_height,
|
||||
spacer_after_question,
|
||||
options_height,
|
||||
spacer_after_options,
|
||||
notes_title_height,
|
||||
notes_height,
|
||||
},
|
||||
);
|
||||
|
||||
LayoutSections {
|
||||
progress_area,
|
||||
header_area,
|
||||
question_area,
|
||||
answer_title_area,
|
||||
question_lines,
|
||||
options_area,
|
||||
notes_title_area,
|
||||
@@ -90,21 +88,28 @@ impl RequestUserInputOverlay {
|
||||
/// Handles both tight layout (when space is constrained) and normal layout
|
||||
/// (when there's sufficient space for all elements).
|
||||
///
|
||||
/// Returns: (question_height, progress_height, answer_title_height, notes_title_height, notes_height, options_height, footer_lines)
|
||||
/// Returns: (question_height, progress_height, spacer_after_question, options_height, spacer_after_options, notes_title_height, notes_height, footer_lines)
|
||||
fn layout_with_options(
|
||||
&self,
|
||||
available_height: u16,
|
||||
width: u16,
|
||||
question_height: u16,
|
||||
notes_pref_height: u16,
|
||||
footer_pref: u16,
|
||||
args: OptionsLayoutArgs,
|
||||
question_lines: &mut Vec<String>,
|
||||
) -> (u16, u16, u16, u16, u16, u16, u16) {
|
||||
let options_required_height = self.options_required_height(width);
|
||||
) -> (u16, u16, u16, u16, u16, u16, u16, u16) {
|
||||
let OptionsLayoutArgs {
|
||||
available_height,
|
||||
width,
|
||||
question_height,
|
||||
notes_pref_height,
|
||||
footer_pref,
|
||||
notes_visible,
|
||||
} = args;
|
||||
let options_heights = OptionsHeights {
|
||||
preferred: self.options_preferred_height(width),
|
||||
full: self.options_required_height(width),
|
||||
};
|
||||
let min_options_height = 1u16;
|
||||
let required = 1u16
|
||||
.saturating_add(question_height)
|
||||
.saturating_add(options_required_height);
|
||||
.saturating_add(options_heights.preferred);
|
||||
|
||||
if required > available_height {
|
||||
self.layout_with_options_tight(
|
||||
@@ -115,11 +120,14 @@ impl RequestUserInputOverlay {
|
||||
)
|
||||
} else {
|
||||
self.layout_with_options_normal(
|
||||
available_height,
|
||||
question_height,
|
||||
options_required_height,
|
||||
notes_pref_height,
|
||||
footer_pref,
|
||||
OptionsNormalArgs {
|
||||
available_height,
|
||||
question_height,
|
||||
notes_pref_height,
|
||||
footer_pref,
|
||||
notes_visible,
|
||||
},
|
||||
options_heights,
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -132,7 +140,7 @@ impl RequestUserInputOverlay {
|
||||
question_height: u16,
|
||||
min_options_height: u16,
|
||||
question_lines: &mut Vec<String>,
|
||||
) -> (u16, u16, u16, u16, u16, u16, u16) {
|
||||
) -> (u16, u16, u16, u16, u16, u16, u16, u16) {
|
||||
let max_question_height =
|
||||
available_height.saturating_sub(1u16.saturating_add(min_options_height));
|
||||
let adjusted_question_height = question_height.min(max_question_height);
|
||||
@@ -140,42 +148,46 @@ impl RequestUserInputOverlay {
|
||||
let options_height =
|
||||
available_height.saturating_sub(1u16.saturating_add(adjusted_question_height));
|
||||
|
||||
(adjusted_question_height, 0, 0, 0, 0, options_height, 0)
|
||||
(adjusted_question_height, 0, 0, options_height, 0, 0, 0, 0)
|
||||
}
|
||||
|
||||
/// Normal layout for options case: allocate space for all elements with
|
||||
/// preference order: notes, footer, labels, then progress.
|
||||
/// Normal layout for options case: allocate footer + progress first, and
|
||||
/// only allocate notes (and its label) when explicitly visible.
|
||||
fn layout_with_options_normal(
|
||||
&self,
|
||||
available_height: u16,
|
||||
question_height: u16,
|
||||
options_required_height: u16,
|
||||
notes_pref_height: u16,
|
||||
footer_pref: u16,
|
||||
) -> (u16, u16, u16, u16, u16, u16, u16) {
|
||||
let options_height = options_required_height;
|
||||
args: OptionsNormalArgs,
|
||||
options: OptionsHeights,
|
||||
) -> (u16, u16, u16, u16, u16, u16, u16, u16) {
|
||||
let OptionsNormalArgs {
|
||||
available_height,
|
||||
question_height,
|
||||
notes_pref_height,
|
||||
footer_pref,
|
||||
notes_visible,
|
||||
} = args;
|
||||
let min_options_height = 1u16;
|
||||
let mut options_height = options.preferred.max(min_options_height);
|
||||
let used = 1u16
|
||||
.saturating_add(question_height)
|
||||
.saturating_add(options_height);
|
||||
let mut remaining = available_height.saturating_sub(used);
|
||||
|
||||
// Prefer notes next, then footer, then labels, with progress last.
|
||||
let mut notes_height = notes_pref_height.min(remaining);
|
||||
remaining = remaining.saturating_sub(notes_height);
|
||||
|
||||
let footer_lines = footer_pref.min(remaining);
|
||||
remaining = remaining.saturating_sub(footer_lines);
|
||||
|
||||
let mut answer_title_height = 0;
|
||||
if remaining > 0 {
|
||||
answer_title_height = 1;
|
||||
remaining = remaining.saturating_sub(1);
|
||||
}
|
||||
|
||||
let mut notes_title_height = 0;
|
||||
if remaining > 0 {
|
||||
notes_title_height = 1;
|
||||
remaining = remaining.saturating_sub(1);
|
||||
// When notes are hidden, prefer to reserve room for progress, footer,
|
||||
// and spacers by shrinking the options window if needed.
|
||||
let desired_spacers = if notes_visible {
|
||||
0
|
||||
} else {
|
||||
DESIRED_SPACERS_WHEN_NOTES_HIDDEN
|
||||
};
|
||||
let required_extra = footer_pref
|
||||
.saturating_add(1) // progress line
|
||||
.saturating_add(desired_spacers);
|
||||
if remaining < required_extra {
|
||||
let deficit = required_extra.saturating_sub(remaining);
|
||||
let reducible = options_height.saturating_sub(min_options_height);
|
||||
let reduce_by = deficit.min(reducible);
|
||||
options_height = options_height.saturating_sub(reduce_by);
|
||||
remaining = remaining.saturating_add(reduce_by);
|
||||
}
|
||||
|
||||
let mut progress_height = 0;
|
||||
@@ -184,16 +196,58 @@ impl RequestUserInputOverlay {
|
||||
remaining = remaining.saturating_sub(1);
|
||||
}
|
||||
|
||||
// Expand the notes composer with any leftover rows.
|
||||
if !notes_visible {
|
||||
let mut spacer_after_options = 0;
|
||||
if remaining > footer_pref {
|
||||
spacer_after_options = 1;
|
||||
remaining = remaining.saturating_sub(1);
|
||||
}
|
||||
let footer_lines = footer_pref.min(remaining);
|
||||
remaining = remaining.saturating_sub(footer_lines);
|
||||
let mut spacer_after_question = 0;
|
||||
if remaining > 0 {
|
||||
spacer_after_question = 1;
|
||||
remaining = remaining.saturating_sub(1);
|
||||
}
|
||||
let grow_by = remaining.min(options.full.saturating_sub(options_height));
|
||||
options_height = options_height.saturating_add(grow_by);
|
||||
return (
|
||||
question_height,
|
||||
progress_height,
|
||||
spacer_after_question,
|
||||
options_height,
|
||||
spacer_after_options,
|
||||
0,
|
||||
0,
|
||||
footer_lines,
|
||||
);
|
||||
}
|
||||
|
||||
let footer_lines = footer_pref.min(remaining);
|
||||
remaining = remaining.saturating_sub(footer_lines);
|
||||
|
||||
// Prefer notes next, then labels, with any leftover rows expanding notes.
|
||||
let spacer_after_question = 0;
|
||||
let spacer_after_options = 0;
|
||||
let mut notes_height = notes_pref_height.min(remaining);
|
||||
remaining = remaining.saturating_sub(notes_height);
|
||||
|
||||
let mut notes_title_height = 0;
|
||||
if remaining > 0 {
|
||||
notes_title_height = 1;
|
||||
remaining = remaining.saturating_sub(1);
|
||||
}
|
||||
|
||||
notes_height = notes_height.saturating_add(remaining);
|
||||
|
||||
(
|
||||
question_height,
|
||||
progress_height,
|
||||
answer_title_height,
|
||||
spacer_after_question,
|
||||
options_height,
|
||||
spacer_after_options,
|
||||
notes_title_height,
|
||||
notes_height,
|
||||
options_height,
|
||||
footer_lines,
|
||||
)
|
||||
}
|
||||
@@ -203,7 +257,7 @@ impl RequestUserInputOverlay {
|
||||
/// Handles both tight layout (when space is constrained) and normal layout
|
||||
/// (when there's sufficient space for all elements).
|
||||
///
|
||||
/// Returns: (question_height, progress_height, answer_title_height, notes_title_height, notes_height, options_height, footer_lines)
|
||||
/// Returns: (question_height, progress_height, spacer_after_question, options_height, spacer_after_options, notes_title_height, notes_height, footer_lines)
|
||||
fn layout_without_options(
|
||||
&self,
|
||||
available_height: u16,
|
||||
@@ -211,7 +265,7 @@ impl RequestUserInputOverlay {
|
||||
notes_pref_height: u16,
|
||||
footer_pref: u16,
|
||||
question_lines: &mut Vec<String>,
|
||||
) -> (u16, u16, u16, u16, u16, u16, u16) {
|
||||
) -> (u16, u16, u16, u16, u16, u16, u16, u16) {
|
||||
let required = 1u16.saturating_add(question_height);
|
||||
if required > available_height {
|
||||
self.layout_without_options_tight(available_height, question_height, question_lines)
|
||||
@@ -231,12 +285,12 @@ impl RequestUserInputOverlay {
|
||||
available_height: u16,
|
||||
question_height: u16,
|
||||
question_lines: &mut Vec<String>,
|
||||
) -> (u16, u16, u16, u16, u16, u16, u16) {
|
||||
) -> (u16, u16, u16, u16, u16, u16, u16, u16) {
|
||||
let max_question_height = available_height.saturating_sub(1);
|
||||
let adjusted_question_height = question_height.min(max_question_height);
|
||||
question_lines.truncate(adjusted_question_height as usize);
|
||||
|
||||
(adjusted_question_height, 0, 0, 0, 0, 0, 0)
|
||||
(adjusted_question_height, 0, 0, 0, 0, 0, 0, 0)
|
||||
}
|
||||
|
||||
/// Normal layout for no-options case: allocate space for notes, footer, and progress.
|
||||
@@ -246,7 +300,7 @@ impl RequestUserInputOverlay {
|
||||
question_height: u16,
|
||||
notes_pref_height: u16,
|
||||
footer_pref: u16,
|
||||
) -> (u16, u16, u16, u16, u16, u16, u16) {
|
||||
) -> (u16, u16, u16, u16, u16, u16, u16, u16) {
|
||||
let required = 1u16.saturating_add(question_height);
|
||||
let mut remaining = available_height.saturating_sub(required);
|
||||
let mut notes_height = notes_pref_height.min(remaining);
|
||||
@@ -268,8 +322,9 @@ impl RequestUserInputOverlay {
|
||||
progress_height,
|
||||
0,
|
||||
0,
|
||||
notes_height,
|
||||
0,
|
||||
0,
|
||||
notes_height,
|
||||
footer_lines,
|
||||
)
|
||||
}
|
||||
@@ -283,7 +338,6 @@ impl RequestUserInputOverlay {
|
||||
Rect, // progress_area
|
||||
Rect, // header_area
|
||||
Rect, // question_area
|
||||
Rect, // answer_title_area
|
||||
Rect, // options_area
|
||||
Rect, // notes_title_area
|
||||
Rect, // notes_area
|
||||
@@ -311,14 +365,8 @@ impl RequestUserInputOverlay {
|
||||
height: heights.question_height,
|
||||
};
|
||||
cursor_y = cursor_y.saturating_add(heights.question_height);
|
||||
cursor_y = cursor_y.saturating_add(heights.spacer_after_question);
|
||||
|
||||
let answer_title_area = Rect {
|
||||
x: area.x,
|
||||
y: cursor_y,
|
||||
width: area.width,
|
||||
height: heights.answer_title_height,
|
||||
};
|
||||
cursor_y = cursor_y.saturating_add(heights.answer_title_height);
|
||||
let options_area = Rect {
|
||||
x: area.x,
|
||||
y: cursor_y,
|
||||
@@ -326,6 +374,7 @@ impl RequestUserInputOverlay {
|
||||
height: heights.options_height,
|
||||
};
|
||||
cursor_y = cursor_y.saturating_add(heights.options_height);
|
||||
cursor_y = cursor_y.saturating_add(heights.spacer_after_options);
|
||||
|
||||
let notes_title_area = Rect {
|
||||
x: area.x,
|
||||
@@ -345,7 +394,6 @@ impl RequestUserInputOverlay {
|
||||
progress_area,
|
||||
header_area,
|
||||
question_area,
|
||||
answer_title_area,
|
||||
options_area,
|
||||
notes_title_area,
|
||||
notes_area,
|
||||
@@ -357,8 +405,34 @@ impl RequestUserInputOverlay {
|
||||
struct LayoutHeights {
|
||||
progress_height: u16,
|
||||
question_height: u16,
|
||||
answer_title_height: u16,
|
||||
spacer_after_question: u16,
|
||||
options_height: u16,
|
||||
spacer_after_options: u16,
|
||||
notes_title_height: u16,
|
||||
notes_height: u16,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
struct OptionsLayoutArgs {
|
||||
available_height: u16,
|
||||
width: u16,
|
||||
question_height: u16,
|
||||
notes_pref_height: u16,
|
||||
footer_pref: u16,
|
||||
notes_visible: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
struct OptionsNormalArgs {
|
||||
available_height: u16,
|
||||
question_height: u16,
|
||||
notes_pref_height: u16,
|
||||
footer_pref: u16,
|
||||
notes_visible: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
struct OptionsHeights {
|
||||
preferred: u16,
|
||||
full: u16,
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,41 +1,58 @@
|
||||
use crossterm::event::KeyCode;
|
||||
use ratatui::buffer::Buffer;
|
||||
use ratatui::layout::Rect;
|
||||
use ratatui::style::Stylize;
|
||||
use ratatui::text::Line;
|
||||
use ratatui::text::Span;
|
||||
use ratatui::widgets::Paragraph;
|
||||
use ratatui::widgets::Widget;
|
||||
use unicode_width::UnicodeWidthChar;
|
||||
use unicode_width::UnicodeWidthStr;
|
||||
|
||||
use crate::bottom_pane::selection_popup_common::menu_surface_inset;
|
||||
use crate::bottom_pane::selection_popup_common::menu_surface_padding_height;
|
||||
use crate::bottom_pane::selection_popup_common::render_menu_surface;
|
||||
use crate::bottom_pane::selection_popup_common::render_rows;
|
||||
use crate::key_hint;
|
||||
use crate::render::renderable::Renderable;
|
||||
|
||||
use super::DESIRED_SPACERS_WHEN_NOTES_HIDDEN;
|
||||
use super::RequestUserInputOverlay;
|
||||
use super::TIP_SEPARATOR;
|
||||
|
||||
impl Renderable for RequestUserInputOverlay {
|
||||
fn desired_height(&self, width: u16) -> u16 {
|
||||
let outer = Rect::new(0, 0, width, u16::MAX);
|
||||
let inner = menu_surface_inset(outer);
|
||||
let inner_width = inner.width.max(1);
|
||||
let has_options = self.has_options();
|
||||
let question_height = self.wrapped_question_lines(inner_width).len();
|
||||
let options_height = self.options_required_height(inner_width) as usize;
|
||||
let notes_height = self.notes_input_height(inner_width) as usize;
|
||||
let footer_height = if self.unanswered_count() > 0 { 2 } else { 1 };
|
||||
let options_height = if has_options {
|
||||
self.options_preferred_height(inner_width) as usize
|
||||
} else {
|
||||
0
|
||||
};
|
||||
let notes_visible = !has_options || self.notes_ui_visible();
|
||||
let notes_height = if notes_visible {
|
||||
self.notes_input_height(inner_width) as usize
|
||||
} else {
|
||||
0
|
||||
};
|
||||
let spacer_rows = if has_options && !notes_visible {
|
||||
DESIRED_SPACERS_WHEN_NOTES_HIDDEN as usize
|
||||
} else {
|
||||
0
|
||||
};
|
||||
let footer_height = self.footer_required_height(inner_width) as usize;
|
||||
|
||||
// Tight minimum height: progress + header + question + (optional) titles/options
|
||||
// + notes composer + footer + menu padding.
|
||||
let mut height = question_height
|
||||
.saturating_add(options_height)
|
||||
.saturating_add(spacer_rows)
|
||||
.saturating_add(notes_height)
|
||||
.saturating_add(footer_height)
|
||||
.saturating_add(2); // progress + header
|
||||
if self.has_options() {
|
||||
height = height
|
||||
.saturating_add(1) // answer title
|
||||
.saturating_add(1); // notes title
|
||||
if has_options && notes_visible {
|
||||
height = height.saturating_add(1); // notes title
|
||||
}
|
||||
height = height.saturating_add(menu_surface_padding_height() as usize);
|
||||
height.max(8) as u16
|
||||
@@ -63,12 +80,19 @@ impl RequestUserInputOverlay {
|
||||
return;
|
||||
}
|
||||
let sections = self.layout_sections(content_area);
|
||||
let notes_visible = self.notes_ui_visible();
|
||||
let unanswered = self.unanswered_count();
|
||||
|
||||
// Progress header keeps the user oriented across multiple questions.
|
||||
let progress_line = if self.question_count() > 0 {
|
||||
let idx = self.current_index() + 1;
|
||||
let total = self.question_count();
|
||||
Line::from(format!("Question {idx}/{total}").dim())
|
||||
let base = format!("Question {idx}/{total}");
|
||||
if unanswered > 0 {
|
||||
Line::from(format!("{base} ({unanswered} unanswered)").dim())
|
||||
} else {
|
||||
Line::from(base.dim())
|
||||
}
|
||||
} else {
|
||||
Line::from("No questions".dim())
|
||||
};
|
||||
@@ -76,8 +100,13 @@ impl RequestUserInputOverlay {
|
||||
|
||||
// Question title and wrapped prompt text.
|
||||
let question_header = self.current_question().map(|q| q.header.clone());
|
||||
let answered = self.current_question_answered();
|
||||
let header_line = if let Some(header) = question_header {
|
||||
Line::from(header.bold())
|
||||
if answered {
|
||||
Line::from(header.bold())
|
||||
} else {
|
||||
Line::from(header.cyan().bold())
|
||||
}
|
||||
} else {
|
||||
Line::from("No questions".dim())
|
||||
};
|
||||
@@ -101,54 +130,45 @@ impl RequestUserInputOverlay {
|
||||
);
|
||||
}
|
||||
|
||||
if sections.answer_title_area.height > 0 {
|
||||
let answer_label = "Answer";
|
||||
let answer_title = if self.focus_is_options() || self.focus_is_notes_without_options() {
|
||||
answer_label.cyan().bold()
|
||||
} else {
|
||||
answer_label.dim()
|
||||
};
|
||||
Paragraph::new(Line::from(answer_title)).render(sections.answer_title_area, buf);
|
||||
}
|
||||
|
||||
// Build rows with selection markers for the shared selection renderer.
|
||||
let option_rows = self.option_rows();
|
||||
|
||||
if self.has_options() {
|
||||
let mut option_state = self
|
||||
let mut options_ui_state = self
|
||||
.current_answer()
|
||||
.map(|answer| answer.option_state)
|
||||
.map(|answer| answer.options_ui_state)
|
||||
.unwrap_or_default();
|
||||
if sections.options_area.height > 0 {
|
||||
// Ensure the selected option is visible in the scroll window.
|
||||
option_state
|
||||
options_ui_state
|
||||
.ensure_visible(option_rows.len(), sections.options_area.height as usize);
|
||||
render_rows(
|
||||
sections.options_area,
|
||||
buf,
|
||||
&option_rows,
|
||||
&option_state,
|
||||
&options_ui_state,
|
||||
option_rows.len().max(1),
|
||||
"No options",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if sections.notes_title_area.height > 0 {
|
||||
if notes_visible && sections.notes_title_area.height > 0 {
|
||||
let notes_label = if self.has_options()
|
||||
&& self
|
||||
.current_answer()
|
||||
.is_some_and(|answer| answer.selected.is_some())
|
||||
.is_some_and(|answer| answer.committed_option_idx.is_some())
|
||||
{
|
||||
if let Some(label) = self.current_option_label() {
|
||||
format!("Notes for {label} (optional)")
|
||||
format!("Notes for {label}")
|
||||
} else {
|
||||
"Notes (optional)".to_string()
|
||||
"Notes".to_string()
|
||||
}
|
||||
} else {
|
||||
"Notes (optional)".to_string()
|
||||
"Notes".to_string()
|
||||
};
|
||||
let notes_title = if self.focus_is_notes() {
|
||||
let notes_active = self.focus_is_notes();
|
||||
let notes_title = if notes_active {
|
||||
notes_label.as_str().cyan().bold()
|
||||
} else {
|
||||
notes_label.as_str().dim()
|
||||
@@ -156,7 +176,7 @@ impl RequestUserInputOverlay {
|
||||
Paragraph::new(Line::from(notes_title)).render(sections.notes_title_area, buf);
|
||||
}
|
||||
|
||||
if sections.notes_area.height > 0 {
|
||||
if notes_visible && sections.notes_area.height > 0 {
|
||||
self.render_notes_input(sections.notes_area, buf);
|
||||
}
|
||||
|
||||
@@ -164,69 +184,55 @@ impl RequestUserInputOverlay {
|
||||
.notes_area
|
||||
.y
|
||||
.saturating_add(sections.notes_area.height);
|
||||
if sections.footer_lines == 2 {
|
||||
// Status line for unanswered count when any question is empty.
|
||||
let warning = format!(
|
||||
"Unanswered: {} | Will submit as skipped",
|
||||
self.unanswered_count()
|
||||
);
|
||||
Paragraph::new(Line::from(warning.dim())).render(
|
||||
Rect {
|
||||
x: content_area.x,
|
||||
y: footer_y,
|
||||
width: content_area.width,
|
||||
height: 1,
|
||||
},
|
||||
buf,
|
||||
);
|
||||
let footer_area = Rect {
|
||||
x: content_area.x,
|
||||
y: footer_y,
|
||||
width: content_area.width,
|
||||
height: sections.footer_lines,
|
||||
};
|
||||
if footer_area.height == 0 {
|
||||
return;
|
||||
}
|
||||
let hint_y = footer_y.saturating_add(sections.footer_lines.saturating_sub(1));
|
||||
// Footer hints (selection index + navigation keys).
|
||||
let mut hint_spans = Vec::new();
|
||||
if self.has_options() {
|
||||
let options_len = self.options_len();
|
||||
let option_index = self.selected_option_index().map_or(0, |idx| idx + 1);
|
||||
hint_spans.extend(vec![
|
||||
format!("Option {option_index} of {options_len}").into(),
|
||||
" | ".into(),
|
||||
]);
|
||||
}
|
||||
hint_spans.extend(vec![
|
||||
key_hint::plain(KeyCode::Up).into(),
|
||||
"/".into(),
|
||||
key_hint::plain(KeyCode::Down).into(),
|
||||
" scroll | ".into(),
|
||||
key_hint::plain(KeyCode::Enter).into(),
|
||||
" next question | ".into(),
|
||||
]);
|
||||
if self.question_count() > 1 {
|
||||
hint_spans.extend(vec![
|
||||
key_hint::plain(KeyCode::PageUp).into(),
|
||||
" prev | ".into(),
|
||||
key_hint::plain(KeyCode::PageDown).into(),
|
||||
" next | ".into(),
|
||||
]);
|
||||
}
|
||||
hint_spans.extend(vec![
|
||||
key_hint::plain(KeyCode::Esc).into(),
|
||||
" interrupt".into(),
|
||||
]);
|
||||
Paragraph::new(Line::from(hint_spans).dim()).render(
|
||||
Rect {
|
||||
x: content_area.x,
|
||||
y: hint_y,
|
||||
width: content_area.width,
|
||||
let tip_lines = self.footer_tip_lines(footer_area.width);
|
||||
for (row_idx, tips) in tip_lines
|
||||
.into_iter()
|
||||
.take(footer_area.height as usize)
|
||||
.enumerate()
|
||||
{
|
||||
let mut spans = Vec::new();
|
||||
for (tip_idx, tip) in tips.into_iter().enumerate() {
|
||||
if tip_idx > 0 {
|
||||
spans.push(TIP_SEPARATOR.into());
|
||||
}
|
||||
if tip.highlight {
|
||||
spans.push(tip.text.cyan().bold().not_dim());
|
||||
} else {
|
||||
spans.push(tip.text.into());
|
||||
}
|
||||
}
|
||||
let line = Line::from(spans).dim();
|
||||
let line = truncate_line_word_boundary_with_ellipsis(line, footer_area.width as usize);
|
||||
let row_area = Rect {
|
||||
x: footer_area.x,
|
||||
y: footer_area.y.saturating_add(row_idx as u16),
|
||||
width: footer_area.width,
|
||||
height: 1,
|
||||
},
|
||||
buf,
|
||||
);
|
||||
};
|
||||
Paragraph::new(line).render(row_area, buf);
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the cursor position when editing notes, if visible.
|
||||
pub(super) fn cursor_pos_impl(&self, area: Rect) -> Option<(u16, u16)> {
|
||||
let has_options = self.has_options();
|
||||
let notes_visible = self.notes_ui_visible();
|
||||
|
||||
if !self.focus_is_notes() {
|
||||
return None;
|
||||
}
|
||||
if has_options && !notes_visible {
|
||||
return None;
|
||||
}
|
||||
let content_area = menu_surface_inset(area);
|
||||
if content_area.width == 0 || content_area.height == 0 {
|
||||
return None;
|
||||
@@ -246,16 +252,118 @@ impl RequestUserInputOverlay {
|
||||
}
|
||||
self.composer.render(area, buf);
|
||||
}
|
||||
|
||||
fn focus_is_options(&self) -> bool {
|
||||
matches!(self.focus, super::Focus::Options)
|
||||
}
|
||||
|
||||
fn focus_is_notes(&self) -> bool {
|
||||
matches!(self.focus, super::Focus::Notes)
|
||||
}
|
||||
|
||||
fn focus_is_notes_without_options(&self) -> bool {
|
||||
!self.has_options() && self.focus_is_notes()
|
||||
}
|
||||
}
|
||||
|
||||
fn line_width(line: &Line<'_>) -> usize {
|
||||
line.iter()
|
||||
.map(|span| UnicodeWidthStr::width(span.content.as_ref()))
|
||||
.sum()
|
||||
}
|
||||
|
||||
/// Truncate a styled line to `max_width`, preferring a word boundary, and append an ellipsis.
|
||||
///
|
||||
/// This walks spans character-by-character, tracking the last width-safe position and the last
|
||||
/// whitespace boundary within the available width (excluding the ellipsis width). If the line
|
||||
/// overflows, it truncates at the last word boundary when possible (falling back to the last
|
||||
/// fitting character), trims trailing whitespace, then appends an ellipsis styled to match the
|
||||
/// last visible span (or the line style if nothing was kept).
|
||||
fn truncate_line_word_boundary_with_ellipsis(
|
||||
line: Line<'static>,
|
||||
max_width: usize,
|
||||
) -> Line<'static> {
|
||||
if max_width == 0 {
|
||||
return Line::from(Vec::<Span<'static>>::new());
|
||||
}
|
||||
|
||||
if line_width(&line) <= max_width {
|
||||
return line;
|
||||
}
|
||||
|
||||
let ellipsis = "…";
|
||||
let ellipsis_width = UnicodeWidthStr::width(ellipsis);
|
||||
if ellipsis_width >= max_width {
|
||||
return Line::from(ellipsis);
|
||||
}
|
||||
let limit = max_width.saturating_sub(ellipsis_width);
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
struct BreakPoint {
|
||||
span_idx: usize,
|
||||
byte_end: usize,
|
||||
}
|
||||
|
||||
// Track display width as we scan, along with the best "cut here" positions.
|
||||
let mut used = 0usize;
|
||||
let mut last_fit: Option<BreakPoint> = None;
|
||||
let mut last_word_break: Option<BreakPoint> = None;
|
||||
let mut overflowed = false;
|
||||
|
||||
'outer: for (span_idx, span) in line.spans.iter().enumerate() {
|
||||
let text = span.content.as_ref();
|
||||
for (byte_idx, ch) in text.char_indices() {
|
||||
let ch_width = UnicodeWidthChar::width(ch).unwrap_or(0);
|
||||
if used.saturating_add(ch_width) > limit {
|
||||
overflowed = true;
|
||||
break 'outer;
|
||||
}
|
||||
used = used.saturating_add(ch_width);
|
||||
let bp = BreakPoint {
|
||||
span_idx,
|
||||
byte_end: byte_idx + ch.len_utf8(),
|
||||
};
|
||||
last_fit = Some(bp);
|
||||
if ch.is_whitespace() {
|
||||
last_word_break = Some(bp);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we never overflowed, the original line already fits.
|
||||
if !overflowed {
|
||||
return line;
|
||||
}
|
||||
|
||||
// Prefer breaking on whitespace; otherwise fall back to the last fitting character.
|
||||
let chosen_break = last_word_break.or(last_fit);
|
||||
let Some(chosen_break) = chosen_break else {
|
||||
return Line::from(ellipsis);
|
||||
};
|
||||
|
||||
let line_style = line.style;
|
||||
let mut spans_out: Vec<Span<'static>> = Vec::new();
|
||||
for (idx, span) in line.spans.into_iter().enumerate() {
|
||||
if idx < chosen_break.span_idx {
|
||||
spans_out.push(span);
|
||||
continue;
|
||||
}
|
||||
if idx == chosen_break.span_idx {
|
||||
let text = span.content.into_owned();
|
||||
let truncated = text[..chosen_break.byte_end].to_string();
|
||||
if !truncated.is_empty() {
|
||||
spans_out.push(Span::styled(truncated, span.style));
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
while let Some(last) = spans_out.last_mut() {
|
||||
let trimmed = last
|
||||
.content
|
||||
.trim_end_matches(char::is_whitespace)
|
||||
.to_string();
|
||||
if trimmed.is_empty() {
|
||||
spans_out.pop();
|
||||
} else {
|
||||
last.content = trimmed.into();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let ellipsis_style = spans_out
|
||||
.last()
|
||||
.map(|span| span.style)
|
||||
.unwrap_or(line_style);
|
||||
spans_out.push(Span::styled(ellipsis, ellipsis_style));
|
||||
|
||||
Line::from(spans_out).style(line_style)
|
||||
}
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
---
|
||||
source: tui/src/bottom_pane/request_user_input/mod.rs
|
||||
expression: "render_snapshot(&overlay, area)"
|
||||
---
|
||||
|
||||
Question 1/2 (1 unanswered)
|
||||
Pick one
|
||||
Choose an option.
|
||||
|
||||
( ) Option 1 First choice.
|
||||
(x) Option 2 Second choice.
|
||||
( ) Option 3 Third choice.
|
||||
|
||||
Option 2 of 3 | ↑/↓ scroll | Tab: add notes
|
||||
Enter: submit answer | Ctrl+n next
|
||||
Esc: interrupt
|
||||
@@ -2,12 +2,12 @@
|
||||
source: tui/src/bottom_pane/request_user_input/mod.rs
|
||||
expression: "render_snapshot(&overlay, area)"
|
||||
---
|
||||
|
||||
Question 1/1
|
||||
Goal
|
||||
Share details.
|
||||
|
||||
› Type your answer (optional)
|
||||
|
||||
Unanswered: 1 | Will submit as skipped
|
||||
↑/↓ scroll | enter next question | esc interrupt
|
||||
|
||||
Question 1/1 (1 unanswered)
|
||||
Goal
|
||||
Share details.
|
||||
|
||||
› Type your answer (optional)
|
||||
|
||||
|
||||
Enter: submit answer | Esc: interrupt
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user