mirror of
https://github.com/openai/codex.git
synced 2026-04-08 23:04:47 +00:00
Compare commits
106 Commits
codex-appl
...
codex/sess
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
48659e216a | ||
|
|
fb3dcfde1d | ||
|
|
6c36e7d688 | ||
|
|
f480b98984 | ||
|
|
e9702411ab | ||
|
|
d45513ce5a | ||
|
|
8614f92fc4 | ||
|
|
59af4a730c | ||
|
|
470b3592e6 | ||
|
|
8a13f82204 | ||
|
|
252d79f5eb | ||
|
|
365154d5da | ||
|
|
b525b5a3a7 | ||
|
|
f1a2b920f9 | ||
|
|
0b9e42f6f7 | ||
|
|
5d1671ca70 | ||
|
|
2b9bf5d3d4 | ||
|
|
cd591dc457 | ||
|
|
feb4f0051a | ||
|
|
82506527f1 | ||
|
|
3b32de4fab | ||
|
|
4cc6818996 | ||
|
|
413c1e1fdf | ||
|
|
89f1a44afa | ||
|
|
99f167e6bf | ||
|
|
68e16baabe | ||
|
|
2a8c3a2a52 | ||
|
|
e2bb45bb24 | ||
|
|
51f75e2f56 | ||
|
|
741e2fdeb8 | ||
|
|
90320fc51a | ||
|
|
24c598e8a9 | ||
|
|
4bb507d2c4 | ||
|
|
232db0613a | ||
|
|
9d13d29acd | ||
|
|
806e5f7c69 | ||
|
|
5fe9ef06ce | ||
|
|
ee12772e80 | ||
|
|
b34a3a6e92 | ||
|
|
0de7662dab | ||
|
|
1f2411629f | ||
|
|
d2df7c54b2 | ||
|
|
a504d8f0fa | ||
|
|
9f737c28dd | ||
|
|
756c45ec61 | ||
|
|
e88c2cf4d7 | ||
|
|
54faa76960 | ||
|
|
e62d645e67 | ||
|
|
03edd4fbee | ||
|
|
36cd163504 | ||
|
|
73dab2046f | ||
|
|
03c07956cf | ||
|
|
756ba8baae | ||
|
|
1525bbdb9a | ||
|
|
46b7e4fb2c | ||
|
|
9bb813353e | ||
|
|
bd30bad96f | ||
|
|
4eabc3dcb1 | ||
|
|
0225479f0d | ||
|
|
2b4cc221df | ||
|
|
ded559680d | ||
|
|
4ce97cef02 | ||
|
|
54dbbb839e | ||
|
|
f44eb29181 | ||
|
|
4294031a93 | ||
|
|
fb41a79f37 | ||
|
|
ab58141e22 | ||
|
|
82b061afb2 | ||
|
|
624c69e840 | ||
|
|
e65ee38579 | ||
|
|
d9b899309d | ||
|
|
b5edeb98a0 | ||
|
|
152b676597 | ||
|
|
4fd5c35c4f | ||
|
|
cca36c5681 | ||
|
|
9e19004bc2 | ||
|
|
39097ab65d | ||
|
|
3a22e10172 | ||
|
|
c9e706f8b6 | ||
|
|
8a19dbb177 | ||
|
|
6edb865cc6 | ||
|
|
13d828d236 | ||
|
|
e4f1b3a65e | ||
|
|
91ca49e53c | ||
|
|
8d19646861 | ||
|
|
f263607c60 | ||
|
|
eaf12beacf | ||
|
|
4b8bab6ad3 | ||
|
|
a70aee1a1e | ||
|
|
567d2603b8 | ||
|
|
3d8cdac797 | ||
|
|
1d4b5f130c | ||
|
|
dc07108af8 | ||
|
|
faab4d39e1 | ||
|
|
0ab8eda375 | ||
|
|
a71fc47cf8 | ||
|
|
1cc87019b4 | ||
|
|
0f7394883e | ||
|
|
a3b3e7a6cc | ||
|
|
cc8fd0ff65 | ||
|
|
af8a9d2d2b | ||
|
|
b15c918836 | ||
|
|
14f95db57b | ||
|
|
b4787bf4c0 | ||
|
|
6fff9955f1 | ||
|
|
8cd7f20b48 |
2
.bazelrc
2
.bazelrc
@@ -44,6 +44,7 @@ common --remote_timeout=3600
|
||||
common --noexperimental_throttle_remote_action_building
|
||||
common --experimental_remote_execution_keepalive
|
||||
common --grpc_keepalive_time=30s
|
||||
common --experimental_remote_downloader=grpcs://remote.buildbuddy.io
|
||||
|
||||
# This limits both in-flight executions and concurrent downloads. Even with high number
|
||||
# of jobs execution will still be limited by CPU cores, so this just pays a bit of
|
||||
@@ -124,7 +125,6 @@ build:argument-comment-lint --@rules_rust//rust/toolchain/channel=nightly
|
||||
common:ci-windows --config=ci-bazel
|
||||
common:ci-windows --build_metadata=TAG_os=windows
|
||||
common:ci-windows --repo_contents_cache=D:/a/.cache/bazel-repo-contents-cache
|
||||
common:ci-windows --repository_cache=D:/a/.cache/bazel-repo-cache
|
||||
|
||||
# We prefer to run the build actions entirely remotely so we can dial up the concurrency.
|
||||
# We have platform-specific tests, so we want to execute the tests on all platforms using the strongest sandboxing available on each platform.
|
||||
|
||||
31
.github/actions/setup-bazel-ci/action.yml
vendored
31
.github/actions/setup-bazel-ci/action.yml
vendored
@@ -9,9 +9,9 @@ inputs:
|
||||
required: false
|
||||
default: "false"
|
||||
outputs:
|
||||
cache-hit:
|
||||
description: Whether the Bazel repository cache key was restored exactly.
|
||||
value: ${{ steps.cache_bazel_repository_restore.outputs.cache-hit }}
|
||||
repository-cache-path:
|
||||
description: Filesystem path used for the Bazel repository cache.
|
||||
value: ${{ steps.configure_bazel_repository_cache.outputs.repository-cache-path }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
@@ -41,17 +41,16 @@ runs:
|
||||
- name: Set up Bazel
|
||||
uses: bazelbuild/setup-bazelisk@v3
|
||||
|
||||
# Restore bazel repository cache so we don't have to redownload all the external dependencies
|
||||
# on every CI run.
|
||||
- name: Restore bazel repository cache
|
||||
id: cache_bazel_repository_restore
|
||||
uses: actions/cache/restore@v5
|
||||
with:
|
||||
path: |
|
||||
~/.cache/bazel-repo-cache
|
||||
key: bazel-cache-${{ inputs.target }}-${{ hashFiles('MODULE.bazel', 'codex-rs/Cargo.lock', 'codex-rs/Cargo.toml') }}
|
||||
restore-keys: |
|
||||
bazel-cache-${{ inputs.target }}
|
||||
- name: Configure Bazel repository cache
|
||||
id: configure_bazel_repository_cache
|
||||
shell: pwsh
|
||||
run: |
|
||||
# Keep the repository cache under HOME on all runners. Windows `D:\a`
|
||||
# cache paths match `.bazelrc`, but `actions/cache/restore` currently
|
||||
# returns HTTP 400 for that path in the Windows clippy job.
|
||||
$repositoryCachePath = Join-Path $HOME '.cache/bazel-repo-cache'
|
||||
"repository-cache-path=$repositoryCachePath" | Out-File -FilePath $env:GITHUB_OUTPUT -Encoding utf8 -Append
|
||||
"BAZEL_REPOSITORY_CACHE=$repositoryCachePath" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
|
||||
|
||||
- name: Configure Bazel output root (Windows)
|
||||
if: runner.os == 'Windows'
|
||||
@@ -65,10 +64,6 @@ runs:
|
||||
$repoContentsCache = Join-Path $env:RUNNER_TEMP "bazel-repo-contents-cache-$env:GITHUB_RUN_ID-$env:GITHUB_JOB"
|
||||
"BAZEL_OUTPUT_USER_ROOT=$bazelOutputUserRoot" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
|
||||
"BAZEL_REPO_CONTENTS_CACHE=$repoContentsCache" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
|
||||
if (-not $hasDDrive) {
|
||||
$repositoryCache = Join-Path $env:USERPROFILE '.cache\bazel-repo-cache'
|
||||
"BAZEL_REPOSITORY_CACHE=$repositoryCache" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
|
||||
}
|
||||
|
||||
- name: Expose MSVC SDK environment (Windows)
|
||||
if: runner.os == 'Windows'
|
||||
|
||||
30
.github/scripts/run-bazel-ci.sh
vendored
30
.github/scripts/run-bazel-ci.sh
vendored
@@ -5,6 +5,7 @@ set -euo pipefail
|
||||
print_failed_bazel_test_logs=0
|
||||
use_node_test_env=0
|
||||
remote_download_toplevel=0
|
||||
windows_msvc_host_platform=0
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
@@ -20,6 +21,10 @@ while [[ $# -gt 0 ]]; do
|
||||
remote_download_toplevel=1
|
||||
shift
|
||||
;;
|
||||
--windows-msvc-host-platform)
|
||||
windows_msvc_host_platform=1
|
||||
shift
|
||||
;;
|
||||
--)
|
||||
shift
|
||||
break
|
||||
@@ -32,7 +37,7 @@ while [[ $# -gt 0 ]]; do
|
||||
done
|
||||
|
||||
if [[ $# -eq 0 ]]; then
|
||||
echo "Usage: $0 [--print-failed-test-logs] [--use-node-test-env] [--remote-download-toplevel] -- <bazel args> -- <targets>" >&2
|
||||
echo "Usage: $0 [--print-failed-test-logs] [--use-node-test-env] [--remote-download-toplevel] [--windows-msvc-host-platform] -- <bazel args> -- <targets>" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -121,14 +126,35 @@ if [[ ${#bazel_args[@]} -eq 0 || ${#bazel_targets[@]} -eq 0 ]]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ $use_node_test_env -eq 1 && "${RUNNER_OS:-}" != "Windows" ]]; then
|
||||
if [[ $use_node_test_env -eq 1 ]]; then
|
||||
# Bazel test sandboxes on macOS may resolve an older Homebrew `node`
|
||||
# before the `actions/setup-node` runtime on PATH.
|
||||
node_bin="$(which node)"
|
||||
if [[ "${RUNNER_OS:-}" == "Windows" ]]; then
|
||||
node_bin="$(cygpath -w "${node_bin}")"
|
||||
fi
|
||||
bazel_args+=("--test_env=CODEX_JS_REPL_NODE_PATH=${node_bin}")
|
||||
fi
|
||||
|
||||
post_config_bazel_args=()
|
||||
if [[ "${RUNNER_OS:-}" == "Windows" && $windows_msvc_host_platform -eq 1 ]]; then
|
||||
has_host_platform_override=0
|
||||
for arg in "${bazel_args[@]}"; do
|
||||
if [[ "$arg" == --host_platform=* ]]; then
|
||||
has_host_platform_override=1
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ $has_host_platform_override -eq 0 ]]; then
|
||||
# Keep Windows Bazel targets on `windows-gnullvm` for cfg coverage, but opt
|
||||
# specific jobs into an MSVC exec platform when they need helper binaries
|
||||
# like Rust test wrappers and V8 generators to resolve a compatible host
|
||||
# toolchain.
|
||||
post_config_bazel_args+=("--host_platform=//:local_windows_msvc")
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ $remote_download_toplevel -eq 1 ]]; then
|
||||
# Override the CI config's remote_download_minimal setting when callers need
|
||||
# the built artifact to exist on disk after the command completes.
|
||||
|
||||
6
.github/workflows/README.md
vendored
6
.github/workflows/README.md
vendored
@@ -5,15 +5,15 @@ The workflows in this directory are split so that pull requests get fast, review
|
||||
## Pull Requests
|
||||
|
||||
- `bazel.yml` is the main pre-merge verification path for Rust code.
|
||||
It runs Bazel `test` and Bazel `clippy` on the supported Bazel targets.
|
||||
It runs Bazel `test` and Bazel `clippy` on the supported Bazel targets,
|
||||
including the generated Rust test binaries needed to lint inline `#[cfg(test)]`
|
||||
code.
|
||||
- `rust-ci.yml` keeps the Cargo-native PR checks intentionally small:
|
||||
- `cargo fmt --check`
|
||||
- `cargo shear`
|
||||
- `argument-comment-lint` on Linux, macOS, and Windows
|
||||
- `tools/argument-comment-lint` package tests when the lint or its workflow wiring changes
|
||||
|
||||
The PR workflow still keeps the Linux lint lane on the default-targets-only invocation for now, but the released linter runs on Linux, macOS, and Windows before merge.
|
||||
|
||||
## Post-Merge On `main`
|
||||
|
||||
- `bazel.yml` also runs on pushes to `main`.
|
||||
|
||||
78
.github/workflows/bazel.yml
vendored
78
.github/workflows/bazel.yml
vendored
@@ -58,6 +58,20 @@ jobs:
|
||||
target: ${{ matrix.target }}
|
||||
install-test-prereqs: "true"
|
||||
|
||||
# Restore the Bazel repository cache explicitly so external dependencies
|
||||
# do not need to be re-downloaded on every CI run. Keep restore failures
|
||||
# non-fatal so transient cache-service errors degrade to a cold build
|
||||
# instead of failing the job.
|
||||
- name: Restore bazel repository cache
|
||||
id: cache_bazel_repository_restore
|
||||
continue-on-error: true
|
||||
uses: actions/cache/restore@v5
|
||||
with:
|
||||
path: ${{ steps.setup_bazel.outputs.repository-cache-path }}
|
||||
key: bazel-cache-${{ matrix.target }}-${{ hashFiles('MODULE.bazel', 'codex-rs/Cargo.lock', 'codex-rs/Cargo.toml') }}
|
||||
restore-keys: |
|
||||
bazel-cache-${{ matrix.target }}
|
||||
|
||||
- name: Check MODULE.bazel.lock is up to date
|
||||
if: matrix.os == 'ubuntu-24.04' && matrix.target == 'x86_64-unknown-linux-gnu'
|
||||
shell: bash
|
||||
@@ -82,9 +96,16 @@ jobs:
|
||||
-//third_party/v8:all
|
||||
)
|
||||
|
||||
bazel_wrapper_args=(
|
||||
--print-failed-test-logs
|
||||
--use-node-test-env
|
||||
)
|
||||
if [[ "${RUNNER_OS}" == "Windows" ]]; then
|
||||
bazel_wrapper_args+=(--windows-msvc-host-platform)
|
||||
fi
|
||||
|
||||
./.github/scripts/run-bazel-ci.sh \
|
||||
--print-failed-test-logs \
|
||||
--use-node-test-env \
|
||||
"${bazel_wrapper_args[@]}" \
|
||||
-- \
|
||||
test \
|
||||
--test_tag_filters=-argument-comment-lint \
|
||||
@@ -105,12 +126,11 @@ jobs:
|
||||
# Save bazel repository cache explicitly; make non-fatal so cache uploading
|
||||
# never fails the overall job. Only save when key wasn't hit.
|
||||
- name: Save bazel repository cache
|
||||
if: always() && !cancelled() && steps.setup_bazel.outputs.cache-hit != 'true'
|
||||
if: always() && !cancelled() && steps.cache_bazel_repository_restore.outputs.cache-hit != 'true'
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
|
||||
with:
|
||||
path: |
|
||||
~/.cache/bazel-repo-cache
|
||||
path: ${{ steps.setup_bazel.outputs.repository-cache-path }}
|
||||
key: bazel-cache-${{ matrix.target }}-${{ hashFiles('MODULE.bazel', 'codex-rs/Cargo.lock', 'codex-rs/Cargo.toml') }}
|
||||
|
||||
clippy:
|
||||
@@ -141,28 +161,55 @@ jobs:
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
|
||||
# Restore the Bazel repository cache explicitly so external dependencies
|
||||
# do not need to be re-downloaded on every CI run. Keep restore failures
|
||||
# non-fatal so transient cache-service errors degrade to a cold build
|
||||
# instead of failing the job.
|
||||
- name: Restore bazel repository cache
|
||||
id: cache_bazel_repository_restore
|
||||
continue-on-error: true
|
||||
uses: actions/cache/restore@v5
|
||||
with:
|
||||
path: ${{ steps.setup_bazel.outputs.repository-cache-path }}
|
||||
key: bazel-cache-${{ matrix.target }}-${{ hashFiles('MODULE.bazel', 'codex-rs/Cargo.lock', 'codex-rs/Cargo.toml') }}
|
||||
restore-keys: |
|
||||
bazel-cache-${{ matrix.target }}
|
||||
|
||||
- name: Set up Bazel execution logs
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p "${RUNNER_TEMP}/bazel-execution-logs"
|
||||
echo "CODEX_BAZEL_EXECUTION_LOG_COMPACT_DIR=${RUNNER_TEMP}/bazel-execution-logs" >> "${GITHUB_ENV}"
|
||||
|
||||
- name: bazel build --config=clippy //codex-rs/...
|
||||
- name: bazel build --config=clippy lint targets
|
||||
env:
|
||||
BUILDBUDDY_API_KEY: ${{ secrets.BUILDBUDDY_API_KEY }}
|
||||
shell: bash
|
||||
run: |
|
||||
# Keep the initial Bazel clippy scope on codex-rs and out of the
|
||||
# V8 proof-of-concept target for now.
|
||||
bazel_clippy_args=(
|
||||
--config=clippy
|
||||
--build_metadata=COMMIT_SHA=${GITHUB_SHA}
|
||||
--build_metadata=TAG_job=clippy
|
||||
)
|
||||
if [[ "${RUNNER_OS}" == "Windows" ]]; then
|
||||
# Some explicit targets pulled in through //codex-rs/... are
|
||||
# intentionally incompatible with `//:local_windows`, but the lint
|
||||
# aspect still traverses their compatible Rust deps.
|
||||
bazel_clippy_args+=(--skip_incompatible_explicit_targets)
|
||||
fi
|
||||
|
||||
bazel_target_lines="$(./scripts/list-bazel-clippy-targets.sh)"
|
||||
bazel_targets=()
|
||||
while IFS= read -r target; do
|
||||
bazel_targets+=("${target}")
|
||||
done <<< "${bazel_target_lines}"
|
||||
|
||||
./.github/scripts/run-bazel-ci.sh \
|
||||
-- \
|
||||
build \
|
||||
--config=clippy \
|
||||
--build_metadata=COMMIT_SHA=${GITHUB_SHA} \
|
||||
--build_metadata=TAG_job=clippy \
|
||||
"${bazel_clippy_args[@]}" \
|
||||
-- \
|
||||
//codex-rs/... \
|
||||
-//codex-rs/v8-poc:all
|
||||
"${bazel_targets[@]}"
|
||||
|
||||
- name: Upload Bazel execution logs
|
||||
if: always() && !cancelled()
|
||||
@@ -176,10 +223,9 @@ jobs:
|
||||
# Save bazel repository cache explicitly; make non-fatal so cache uploading
|
||||
# never fails the overall job. Only save when key wasn't hit.
|
||||
- name: Save bazel repository cache
|
||||
if: always() && !cancelled() && steps.setup_bazel.outputs.cache-hit != 'true'
|
||||
if: always() && !cancelled()
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
|
||||
with:
|
||||
path: |
|
||||
~/.cache/bazel-repo-cache
|
||||
path: ${{ steps.setup_bazel.outputs.repository-cache-path }}
|
||||
key: bazel-cache-${{ matrix.target }}-${{ hashFiles('MODULE.bazel', 'codex-rs/Cargo.lock', 'codex-rs/Cargo.toml') }}
|
||||
|
||||
1
.github/workflows/rust-ci-full.yml
vendored
1
.github/workflows/rust-ci-full.yml
vendored
@@ -3,6 +3,7 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- "**full-ci**"
|
||||
workflow_dispatch:
|
||||
|
||||
# CI builds in debug (dev) for faster signal.
|
||||
|
||||
7
.github/workflows/rust-release.yml
vendored
7
.github/workflows/rust-release.yml
vendored
@@ -584,14 +584,11 @@ jobs:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
|
||||
with:
|
||||
node-version: 22
|
||||
# Node 24 bundles npm >= 11.5.1, which trusted publishing requires.
|
||||
node-version: 24
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
scope: "@openai"
|
||||
|
||||
# Trusted publishing requires npm CLI version 11.5.1 or later.
|
||||
- name: Update npm
|
||||
run: npm install -g npm@latest
|
||||
|
||||
- name: Download npm tarballs from release
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
@@ -21,6 +21,7 @@ In the codex-rs folder where the rust code lives:
|
||||
- Newly added traits should include doc comments that explain their role and how implementations are expected to use them.
|
||||
- When writing tests, prefer comparing the equality of entire objects over fields one by one.
|
||||
- When making a change that adds or changes an API, ensure that the documentation in the `docs/` folder is up to date if applicable.
|
||||
- Prefer private modules and explicitly exported public crate API.
|
||||
- If you change `ConfigToml` or nested config types, run `just write-config-schema` to update `codex-rs/core/config.schema.json`.
|
||||
- If you change Rust dependencies (`Cargo.toml` or `Cargo.lock`), run `just bazel-lock-update` from the
|
||||
repo root to refresh `MODULE.bazel.lock`, and include that lockfile update in the same change.
|
||||
|
||||
34
MODULE.bazel
34
MODULE.bazel
@@ -72,6 +72,7 @@ single_version_override(
|
||||
patches = [
|
||||
"//patches:rules_rs_windows_gnullvm_exec.patch",
|
||||
"//patches:rules_rs_delete_git_worktree_pointer.patch",
|
||||
"//patches:rules_rs_windows_exec_linker.patch",
|
||||
],
|
||||
version = "0.0.43",
|
||||
)
|
||||
@@ -85,7 +86,9 @@ rules_rust.patch(
|
||||
"//patches:rules_rust_windows_gnullvm_build_script.patch",
|
||||
"//patches:rules_rust_windows_exec_msvc_build_script_env.patch",
|
||||
"//patches:rules_rust_windows_bootstrap_process_wrapper_linker.patch",
|
||||
"//patches:rules_rust_windows_build_script_runner_paths.patch",
|
||||
"//patches:rules_rust_windows_msvc_direct_link_args.patch",
|
||||
"//patches:rules_rust_windows_process_wrapper_skip_temp_outputs.patch",
|
||||
"//patches:rules_rust_windows_exec_bin_target.patch",
|
||||
"//patches:rules_rust_windows_exec_std.patch",
|
||||
"//patches:rules_rust_windows_exec_rustc_dev_rlib.patch",
|
||||
@@ -189,8 +192,18 @@ bazel_dep(name = "zstd", version = "1.5.7")
|
||||
|
||||
crate.annotation(
|
||||
crate = "zstd-sys",
|
||||
gen_build_script = "off",
|
||||
deps = ["@zstd"],
|
||||
gen_build_script = "on",
|
||||
patch_args = ["-p1"],
|
||||
patches = [
|
||||
"//patches:zstd-sys_windows_msvc_include_dirs.patch",
|
||||
],
|
||||
)
|
||||
crate.annotation(
|
||||
crate = "ring",
|
||||
patch_args = ["-p1"],
|
||||
patches = [
|
||||
"//patches:ring_windows_msvc_include_dirs.patch",
|
||||
],
|
||||
)
|
||||
crate.annotation(
|
||||
build_script_env = {
|
||||
@@ -215,6 +228,13 @@ inject_repo(crate, "zstd")
|
||||
use_repo(crate, "argument_comment_lint_crates")
|
||||
|
||||
bazel_dep(name = "bzip2", version = "1.0.8.bcr.3")
|
||||
single_version_override(
|
||||
module_name = "bzip2",
|
||||
patch_strip = 1,
|
||||
patches = [
|
||||
"//patches:bzip2_windows_stack_args.patch",
|
||||
],
|
||||
)
|
||||
|
||||
crate.annotation(
|
||||
crate = "bzip2-sys",
|
||||
@@ -228,13 +248,19 @@ bazel_dep(name = "zlib", version = "1.3.1.bcr.8")
|
||||
|
||||
crate.annotation(
|
||||
crate = "libz-sys",
|
||||
gen_build_script = "off",
|
||||
deps = ["@zlib"],
|
||||
gen_build_script = "on",
|
||||
)
|
||||
|
||||
inject_repo(crate, "zlib")
|
||||
|
||||
bazel_dep(name = "xz", version = "5.4.5.bcr.8")
|
||||
single_version_override(
|
||||
module_name = "xz",
|
||||
patch_strip = 1,
|
||||
patches = [
|
||||
"//patches:xz_windows_stack_args.patch",
|
||||
],
|
||||
)
|
||||
|
||||
crate.annotation(
|
||||
crate = "lzma-sys",
|
||||
|
||||
4
MODULE.bazel.lock
generated
4
MODULE.bazel.lock
generated
@@ -925,7 +925,7 @@
|
||||
"hyper-rustls_0.27.7": "{\"dependencies\":[{\"kind\":\"dev\",\"name\":\"cfg-if\",\"req\":\"^1\"},{\"name\":\"http\",\"req\":\"^1\"},{\"kind\":\"dev\",\"name\":\"http-body-util\",\"req\":\"^0.1\"},{\"default_features\":false,\"name\":\"hyper\",\"req\":\"^1\"},{\"default_features\":false,\"features\":[\"client-legacy\",\"tokio\"],\"name\":\"hyper-util\",\"req\":\"^0.1\"},{\"default_features\":false,\"features\":[\"server-auto\"],\"kind\":\"dev\",\"name\":\"hyper-util\",\"req\":\"^0.1\"},{\"name\":\"log\",\"optional\":true,\"req\":\"^0.4.4\"},{\"name\":\"pki-types\",\"package\":\"rustls-pki-types\",\"req\":\"^1\"},{\"default_features\":false,\"name\":\"rustls\",\"req\":\"^0.23\"},{\"default_features\":false,\"features\":[\"tls12\"],\"kind\":\"dev\",\"name\":\"rustls\",\"req\":\"^0.23\"},{\"name\":\"rustls-native-certs\",\"optional\":true,\"req\":\"^0.8\"},{\"kind\":\"dev\",\"name\":\"rustls-pemfile\",\"req\":\"^2\"},{\"name\":\"rustls-platform-verifier\",\"optional\":true,\"req\":\"^0.6\"},{\"name\":\"tokio\",\"req\":\"^1.0\"},{\"features\":[\"io-std\",\"macros\",\"net\",\"rt-multi-thread\"],\"kind\":\"dev\",\"name\":\"tokio\",\"req\":\"^1.0\"},{\"default_features\":false,\"name\":\"tokio-rustls\",\"req\":\"^0.26\"},{\"name\":\"tower-service\",\"req\":\"^0.3\"},{\"name\":\"webpki-roots\",\"optional\":true,\"req\":\"^1\"}],\"features\":{\"aws-lc-rs\":[\"rustls/aws_lc_rs\"],\"default\":[\"native-tokio\",\"http1\",\"tls12\",\"logging\",\"aws-lc-rs\"],\"fips\":[\"aws-lc-rs\",\"rustls/fips\"],\"http1\":[\"hyper-util/http1\"],\"http2\":[\"hyper-util/http2\"],\"logging\":[\"log\",\"tokio-rustls/logging\",\"rustls/logging\"],\"native-tokio\":[\"rustls-native-certs\"],\"ring\":[\"rustls/ring\"],\"tls12\":[\"tokio-rustls/tls12\",\"rustls/tls12\"],\"webpki-tokio\":[\"webpki-roots\"]}}",
|
||||
"hyper-timeout_0.5.2": "{\"dependencies\":[{\"kind\":\"dev\",\"name\":\"http-body-util\",\"req\":\"^0.1\"},{\"name\":\"hyper\",\"req\":\"^1.1\"},{\"features\":[\"http1\"],\"kind\":\"dev\",\"name\":\"hyper\",\"req\":\"^1.1\"},{\"kind\":\"dev\",\"name\":\"hyper-tls\",\"req\":\"^0.6\"},{\"features\":[\"client-legacy\",\"http1\"],\"name\":\"hyper-util\",\"req\":\"^0.1.10\"},{\"features\":[\"client-legacy\",\"http1\",\"server\",\"server-graceful\"],\"kind\":\"dev\",\"name\":\"hyper-util\",\"req\":\"^0.1.10\"},{\"name\":\"pin-project-lite\",\"req\":\"^0.2\"},{\"name\":\"tokio\",\"req\":\"^1.35\"},{\"features\":[\"io-std\",\"io-util\",\"macros\"],\"kind\":\"dev\",\"name\":\"tokio\",\"req\":\"^1.35\"},{\"name\":\"tower-service\",\"req\":\"^0.3\"}],\"features\":{}}",
|
||||
"hyper-tls_0.6.0": "{\"dependencies\":[{\"name\":\"bytes\",\"req\":\"^1\"},{\"name\":\"http-body-util\",\"req\":\"^0.1.0\"},{\"name\":\"hyper\",\"req\":\"^1\"},{\"features\":[\"client-legacy\",\"tokio\"],\"name\":\"hyper-util\",\"req\":\"^0.1.0\"},{\"features\":[\"http1\"],\"kind\":\"dev\",\"name\":\"hyper-util\",\"req\":\"^0.1.0\"},{\"name\":\"native-tls\",\"req\":\"^0.2.1\"},{\"name\":\"tokio\",\"req\":\"^1\"},{\"features\":[\"io-std\",\"macros\",\"io-util\"],\"kind\":\"dev\",\"name\":\"tokio\",\"req\":\"^1.0.0\"},{\"name\":\"tokio-native-tls\",\"req\":\"^0.3\"},{\"name\":\"tower-service\",\"req\":\"^0.3\"}],\"features\":{\"alpn\":[\"native-tls/alpn\"],\"vendored\":[\"native-tls/vendored\"]}}",
|
||||
"hyper-util_0.1.19": "{\"dependencies\":[{\"name\":\"base64\",\"optional\":true,\"req\":\"^0.22\"},{\"name\":\"bytes\",\"req\":\"^1.7.1\"},{\"kind\":\"dev\",\"name\":\"bytes\",\"req\":\"^1\"},{\"name\":\"futures-channel\",\"optional\":true,\"req\":\"^0.3\"},{\"name\":\"futures-core\",\"req\":\"^0.3\"},{\"default_features\":false,\"name\":\"futures-util\",\"optional\":true,\"req\":\"^0.3.16\"},{\"default_features\":false,\"features\":[\"alloc\"],\"kind\":\"dev\",\"name\":\"futures-util\",\"req\":\"^0.3.16\"},{\"name\":\"http\",\"req\":\"^1.0\"},{\"name\":\"http-body\",\"req\":\"^1.0.0\"},{\"kind\":\"dev\",\"name\":\"http-body-util\",\"req\":\"^0.1.0\"},{\"name\":\"hyper\",\"req\":\"^1.8.0\"},{\"features\":[\"full\"],\"kind\":\"dev\",\"name\":\"hyper\",\"req\":\"^1.4.0\"},{\"name\":\"ipnet\",\"optional\":true,\"req\":\"^2.9\"},{\"name\":\"libc\",\"optional\":true,\"req\":\"^0.2\"},{\"name\":\"percent-encoding\",\"optional\":true,\"req\":\"^2.3\"},{\"name\":\"pin-project-lite\",\"req\":\"^0.2.4\"},{\"kind\":\"dev\",\"name\":\"pnet_datalink\",\"req\":\"^0.35.0\",\"target\":\"cfg(any(target_os = \\\"linux\\\", target_os = \\\"macos\\\"))\"},{\"kind\":\"dev\",\"name\":\"pretty_env_logger\",\"req\":\"^0.5\"},{\"features\":[\"all\"],\"name\":\"socket2\",\"optional\":true,\"req\":\">=0.5.9, <0.7\"},{\"name\":\"system-configuration\",\"optional\":true,\"req\":\">=0.5, <0.7\",\"target\":\"cfg(target_os = \\\"macos\\\")\"},{\"default_features\":false,\"name\":\"tokio\",\"optional\":true,\"req\":\"^1\"},{\"features\":[\"macros\",\"test-util\",\"signal\"],\"kind\":\"dev\",\"name\":\"tokio\",\"req\":\"^1\"},{\"kind\":\"dev\",\"name\":\"tokio-test\",\"req\":\"^0.4\"},{\"name\":\"tower-layer\",\"optional\":true,\"req\":\"^0.3\"},{\"name\":\"tower-service\",\"optional\":true,\"req\":\"^0.3\"},{\"kind\":\"dev\",\"name\":\"tower-test\",\"req\":\"^0.4\"},{\"default_features\":false,\"features\":[\"std\"],\"name\":\"tracing\",\"optional\":true,\"req\":\"^0.1\"},{\"name\":\"windows-registry\",\"optional\":true,\"req\":\">=0.3, <0.7\",\"target\":\"cfg(windows)\"}],\"features\":{\"__internal_happy_eyeballs_tests\":[],\"client\":[\"hyper/client\",\"tokio/net\",\"dep:tracing\",\"dep:futures-channel\",\"dep:tower-service\"],\"client-legacy\":[\"client\",\"dep:socket2\",\"tokio/sync\",\"dep:libc\",\"dep:futures-util\"],\"client-pool\":[\"client\",\"dep:futures-util\",\"dep:tower-layer\"],\"client-proxy\":[\"client\",\"dep:base64\",\"dep:ipnet\",\"dep:percent-encoding\"],\"client-proxy-system\":[\"dep:system-configuration\",\"dep:windows-registry\"],\"default\":[],\"full\":[\"client\",\"client-legacy\",\"client-pool\",\"client-proxy\",\"client-proxy-system\",\"server\",\"server-auto\",\"server-graceful\",\"service\",\"http1\",\"http2\",\"tokio\",\"tracing\"],\"http1\":[\"hyper/http1\"],\"http2\":[\"hyper/http2\"],\"server\":[\"hyper/server\"],\"server-auto\":[\"server\",\"http1\",\"http2\"],\"server-graceful\":[\"server\",\"tokio/sync\"],\"service\":[\"dep:tower-service\"],\"tokio\":[\"dep:tokio\",\"tokio/rt\",\"tokio/time\"],\"tracing\":[\"dep:tracing\"]}}",
|
||||
"hyper-util_0.1.20": "{\"dependencies\":[{\"name\":\"base64\",\"optional\":true,\"req\":\"^0.22\"},{\"name\":\"bytes\",\"req\":\"^1.7.1\"},{\"kind\":\"dev\",\"name\":\"bytes\",\"req\":\"^1\"},{\"name\":\"futures-channel\",\"optional\":true,\"req\":\"^0.3\"},{\"default_features\":false,\"name\":\"futures-util\",\"optional\":true,\"req\":\"^0.3.16\"},{\"default_features\":false,\"features\":[\"alloc\"],\"kind\":\"dev\",\"name\":\"futures-util\",\"req\":\"^0.3.16\"},{\"name\":\"http\",\"req\":\"^1.0\"},{\"name\":\"http-body\",\"req\":\"^1.0.0\"},{\"kind\":\"dev\",\"name\":\"http-body-util\",\"req\":\"^0.1.0\"},{\"name\":\"hyper\",\"req\":\"^1.8.0\"},{\"features\":[\"full\"],\"kind\":\"dev\",\"name\":\"hyper\",\"req\":\"^1.4.0\"},{\"name\":\"ipnet\",\"optional\":true,\"req\":\"^2.9\"},{\"name\":\"libc\",\"optional\":true,\"req\":\"^0.2\"},{\"name\":\"percent-encoding\",\"optional\":true,\"req\":\"^2.3\"},{\"name\":\"pin-project-lite\",\"req\":\"^0.2.4\"},{\"kind\":\"dev\",\"name\":\"pnet_datalink\",\"req\":\"^0.35.0\",\"target\":\"cfg(any(target_os = \\\"linux\\\", target_os = \\\"macos\\\"))\"},{\"kind\":\"dev\",\"name\":\"pretty_env_logger\",\"req\":\"^0.5\"},{\"features\":[\"all\"],\"name\":\"socket2\",\"optional\":true,\"req\":\">=0.5.9, <0.7\"},{\"name\":\"system-configuration\",\"optional\":true,\"req\":\"^0.7\",\"target\":\"cfg(target_os = \\\"macos\\\")\"},{\"default_features\":false,\"name\":\"tokio\",\"optional\":true,\"req\":\"^1\"},{\"features\":[\"macros\",\"test-util\",\"signal\"],\"kind\":\"dev\",\"name\":\"tokio\",\"req\":\"^1\"},{\"kind\":\"dev\",\"name\":\"tokio-test\",\"req\":\"^0.4\"},{\"name\":\"tower-layer\",\"optional\":true,\"req\":\"^0.3\"},{\"name\":\"tower-service\",\"optional\":true,\"req\":\"^0.3\"},{\"kind\":\"dev\",\"name\":\"tower-test\",\"req\":\"^0.4\"},{\"default_features\":false,\"features\":[\"std\"],\"name\":\"tracing\",\"optional\":true,\"req\":\"^0.1\"},{\"name\":\"windows-registry\",\"optional\":true,\"req\":\">=0.3, <0.7\",\"target\":\"cfg(windows)\"}],\"features\":{\"__internal_happy_eyeballs_tests\":[],\"client\":[\"hyper/client\",\"tokio/net\",\"dep:tracing\",\"dep:futures-channel\",\"dep:tower-service\"],\"client-legacy\":[\"client\",\"dep:socket2\",\"tokio/sync\",\"dep:libc\",\"dep:futures-util\"],\"client-pool\":[\"client\",\"dep:futures-util\",\"dep:tower-layer\"],\"client-proxy\":[\"client\",\"dep:base64\",\"dep:ipnet\",\"dep:percent-encoding\"],\"client-proxy-system\":[\"dep:system-configuration\",\"dep:windows-registry\"],\"default\":[],\"full\":[\"client\",\"client-legacy\",\"client-pool\",\"client-proxy\",\"client-proxy-system\",\"server\",\"server-auto\",\"server-graceful\",\"service\",\"http1\",\"http2\",\"tokio\",\"tracing\"],\"http1\":[\"hyper/http1\"],\"http2\":[\"hyper/http2\"],\"server\":[\"hyper/server\"],\"server-auto\":[\"server\",\"http1\",\"http2\"],\"server-graceful\":[\"server\",\"tokio/sync\"],\"service\":[\"dep:tower-service\"],\"tokio\":[\"dep:tokio\",\"tokio/rt\",\"tokio/time\"],\"tracing\":[\"dep:tracing\"]}}",
|
||||
"hyper_1.8.1": "{\"dependencies\":[{\"name\":\"atomic-waker\",\"optional\":true,\"req\":\"^1.1.2\"},{\"name\":\"bytes\",\"req\":\"^1.2\"},{\"kind\":\"dev\",\"name\":\"form_urlencoded\",\"req\":\"^1\"},{\"name\":\"futures-channel\",\"optional\":true,\"req\":\"^0.3\"},{\"features\":[\"sink\"],\"kind\":\"dev\",\"name\":\"futures-channel\",\"req\":\"^0.3\"},{\"name\":\"futures-core\",\"optional\":true,\"req\":\"^0.3.31\"},{\"default_features\":false,\"features\":[\"alloc\"],\"name\":\"futures-util\",\"optional\":true,\"req\":\"^0.3\"},{\"default_features\":false,\"features\":[\"alloc\",\"sink\"],\"kind\":\"dev\",\"name\":\"futures-util\",\"req\":\"^0.3\"},{\"name\":\"h2\",\"optional\":true,\"req\":\"^0.4.2\"},{\"name\":\"http\",\"req\":\"^1\"},{\"name\":\"http-body\",\"req\":\"^1\"},{\"name\":\"http-body-util\",\"optional\":true,\"req\":\"^0.1\"},{\"kind\":\"dev\",\"name\":\"http-body-util\",\"req\":\"^0.1\"},{\"name\":\"httparse\",\"optional\":true,\"req\":\"^1.9\"},{\"name\":\"httpdate\",\"optional\":true,\"req\":\"^1.0\"},{\"name\":\"itoa\",\"optional\":true,\"req\":\"^1\"},{\"name\":\"pin-project-lite\",\"optional\":true,\"req\":\"^0.2.4\"},{\"kind\":\"dev\",\"name\":\"pin-project-lite\",\"req\":\"^0.2.4\"},{\"name\":\"pin-utils\",\"optional\":true,\"req\":\"^0.1\"},{\"kind\":\"dev\",\"name\":\"pretty_env_logger\",\"req\":\"^0.5\"},{\"features\":[\"derive\"],\"kind\":\"dev\",\"name\":\"serde\",\"req\":\"^1.0\"},{\"kind\":\"dev\",\"name\":\"serde_json\",\"req\":\"^1.0\"},{\"features\":[\"const_generics\",\"const_new\"],\"name\":\"smallvec\",\"optional\":true,\"req\":\"^1.12\"},{\"kind\":\"dev\",\"name\":\"spmc\",\"req\":\"^0.3\"},{\"features\":[\"sync\"],\"name\":\"tokio\",\"req\":\"^1\"},{\"features\":[\"fs\",\"macros\",\"net\",\"io-std\",\"io-util\",\"rt\",\"rt-multi-thread\",\"sync\",\"time\",\"test-util\"],\"kind\":\"dev\",\"name\":\"tokio\",\"req\":\"^1\"},{\"kind\":\"dev\",\"name\":\"tokio-test\",\"req\":\"^0.4\"},{\"kind\":\"dev\",\"name\":\"tokio-util\",\"req\":\"^0.7.10\"},{\"default_features\":false,\"features\":[\"std\"],\"name\":\"tracing\",\"optional\":true,\"req\":\"^0.1\"},{\"name\":\"want\",\"optional\":true,\"req\":\"^0.3\"}],\"features\":{\"capi\":[],\"client\":[\"dep:want\",\"dep:pin-project-lite\",\"dep:smallvec\"],\"default\":[],\"ffi\":[\"dep:http-body-util\",\"dep:futures-util\"],\"full\":[\"client\",\"http1\",\"http2\",\"server\"],\"http1\":[\"dep:atomic-waker\",\"dep:futures-channel\",\"dep:futures-core\",\"dep:httparse\",\"dep:itoa\",\"dep:pin-utils\"],\"http2\":[\"dep:futures-channel\",\"dep:futures-core\",\"dep:h2\"],\"nightly\":[],\"server\":[\"dep:httpdate\",\"dep:pin-project-lite\",\"dep:smallvec\"],\"tracing\":[\"dep:tracing\"]}}",
|
||||
"i18n-config_0.4.8": "{\"dependencies\":[{\"name\":\"basic-toml\",\"req\":\"^0.1\"},{\"name\":\"log\",\"req\":\"^0.4\"},{\"features\":[\"derive\"],\"name\":\"serde\",\"req\":\"^1.0\"},{\"name\":\"serde_derive\",\"req\":\"^1.0\"},{\"name\":\"thiserror\",\"req\":\"^1.0\"},{\"features\":[\"serde\"],\"name\":\"unic-langid\",\"req\":\"^0.9\"}],\"features\":{}}",
|
||||
"i18n-embed-fl_0.9.4": "{\"dependencies\":[{\"name\":\"dashmap\",\"optional\":true,\"req\":\"^6.0\"},{\"kind\":\"dev\",\"name\":\"doc-comment\",\"req\":\"^0.3\"},{\"kind\":\"dev\",\"name\":\"env_logger\",\"req\":\"^0.11\"},{\"name\":\"find-crate\",\"req\":\"^0.6\"},{\"name\":\"fluent\",\"req\":\"^0.16\"},{\"name\":\"fluent-syntax\",\"req\":\"^0.11\"},{\"name\":\"i18n-config\",\"req\":\"^0.4.7\"},{\"features\":[\"fluent-system\",\"filesystem-assets\"],\"name\":\"i18n-embed\",\"req\":\"^0.15.4\"},{\"kind\":\"dev\",\"name\":\"pretty_assertions\",\"req\":\"^1.4\"},{\"name\":\"proc-macro-error2\",\"req\":\"^2.0.1\"},{\"name\":\"proc-macro2\",\"req\":\"^1.0\"},{\"name\":\"quote\",\"req\":\"^1.0\"},{\"kind\":\"dev\",\"name\":\"rust-embed\",\"req\":\"^8.0\"},{\"name\":\"strsim\",\"req\":\"^0.11\"},{\"features\":[\"derive\",\"proc-macro\",\"parsing\",\"printing\",\"extra-traits\",\"full\"],\"name\":\"syn\",\"req\":\"^2.0\"},{\"name\":\"unic-langid\",\"req\":\"^0.9\"}],\"features\":{\"dashmap\":[\"dep:dashmap\"]}}",
|
||||
@@ -1369,7 +1369,7 @@
|
||||
"syntect_5.3.0": "{\"dependencies\":[{\"name\":\"bincode\",\"optional\":true,\"req\":\"^1.0\"},{\"features\":[\"html_reports\"],\"kind\":\"dev\",\"name\":\"criterion\",\"req\":\"^0.3\"},{\"name\":\"fancy-regex\",\"optional\":true,\"req\":\"^0.16.2\"},{\"name\":\"flate2\",\"optional\":true,\"req\":\"^1.0\"},{\"name\":\"fnv\",\"optional\":true,\"req\":\"^1.0\"},{\"kind\":\"dev\",\"name\":\"getopts\",\"req\":\"^0.2\"},{\"name\":\"once_cell\",\"req\":\"^1.8\"},{\"default_features\":false,\"name\":\"onig\",\"optional\":true,\"req\":\"^6.5.1\"},{\"name\":\"plist\",\"optional\":true,\"req\":\"^1.3\"},{\"kind\":\"dev\",\"name\":\"pretty_assertions\",\"req\":\"^0.6\"},{\"kind\":\"dev\",\"name\":\"public-api\",\"req\":\"^0.50.1\"},{\"kind\":\"dev\",\"name\":\"rayon\",\"req\":\"^1.0.0\"},{\"kind\":\"dev\",\"name\":\"regex\",\"req\":\"^1.0\"},{\"name\":\"regex-syntax\",\"optional\":true,\"req\":\"^0.8\"},{\"kind\":\"dev\",\"name\":\"rustdoc-json\",\"req\":\"^0.9.7\"},{\"kind\":\"dev\",\"name\":\"rustup-toolchain\",\"req\":\"^0.1.5\"},{\"name\":\"serde\",\"req\":\"^1.0\"},{\"name\":\"serde_derive\",\"req\":\"^1.0\"},{\"name\":\"serde_json\",\"optional\":true,\"req\":\"^1.0\"},{\"kind\":\"dev\",\"name\":\"serde_json\",\"req\":\"^1.0\"},{\"name\":\"thiserror\",\"req\":\"^2.0.12\"},{\"name\":\"walkdir\",\"req\":\"^2.0\"},{\"name\":\"yaml-rust\",\"optional\":true,\"req\":\"^0.4.5\"}],\"features\":{\"default\":[\"default-onig\"],\"default-fancy\":[\"parsing\",\"default-syntaxes\",\"default-themes\",\"html\",\"plist-load\",\"yaml-load\",\"dump-load\",\"dump-create\",\"regex-fancy\"],\"default-onig\":[\"parsing\",\"default-syntaxes\",\"default-themes\",\"html\",\"plist-load\",\"yaml-load\",\"dump-load\",\"dump-create\",\"regex-onig\"],\"default-syntaxes\":[\"parsing\",\"dump-load\"],\"default-themes\":[\"dump-load\"],\"dump-create\":[\"flate2\",\"bincode\"],\"dump-load\":[\"flate2\",\"bincode\"],\"html\":[\"parsing\"],\"metadata\":[\"parsing\",\"plist-load\",\"dep:serde_json\"],\"parsing\":[\"regex-syntax\",\"fnv\",\"dump-create\",\"dump-load\"],\"plist-load\":[\"plist\",\"dep:serde_json\"],\"regex-fancy\":[\"fancy-regex\"],\"regex-onig\":[\"onig\"],\"yaml-load\":[\"yaml-rust\",\"parsing\"]}}",
|
||||
"sys-locale_0.3.2": "{\"dependencies\":[{\"name\":\"js-sys\",\"optional\":true,\"req\":\"^0.3\",\"target\":\"cfg(all(target_family = \\\"wasm\\\", not(unix)))\"},{\"name\":\"libc\",\"req\":\"^0.2\",\"target\":\"cfg(target_os = \\\"android\\\")\"},{\"name\":\"wasm-bindgen\",\"optional\":true,\"req\":\"^0.2\",\"target\":\"cfg(all(target_family = \\\"wasm\\\", not(unix)))\"},{\"kind\":\"dev\",\"name\":\"wasm-bindgen-test\",\"req\":\"^0.3\",\"target\":\"cfg(all(target_family = \\\"wasm\\\", not(unix)))\"},{\"features\":[\"Window\",\"WorkerGlobalScope\",\"Navigator\",\"WorkerNavigator\"],\"name\":\"web-sys\",\"optional\":true,\"req\":\"^0.3\",\"target\":\"cfg(all(target_family = \\\"wasm\\\", not(unix)))\"}],\"features\":{\"js\":[\"js-sys\",\"wasm-bindgen\",\"web-sys\"]}}",
|
||||
"system-configuration-sys_0.6.0": "{\"dependencies\":[{\"name\":\"core-foundation-sys\",\"req\":\"^0.8\"},{\"name\":\"libc\",\"req\":\"^0.2.149\"}],\"features\":{}}",
|
||||
"system-configuration_0.6.1": "{\"dependencies\":[{\"name\":\"bitflags\",\"req\":\"^2\"},{\"name\":\"core-foundation\",\"req\":\"^0.9\"},{\"name\":\"system-configuration-sys\",\"req\":\"^0.6\"}],\"features\":{}}",
|
||||
"system-configuration_0.7.0": "{\"dependencies\":[{\"name\":\"bitflags\",\"req\":\"^2\"},{\"name\":\"core-foundation\",\"req\":\"^0.9\"},{\"name\":\"system-configuration-sys\",\"req\":\"^0.6\"}],\"features\":{}}",
|
||||
"tagptr_0.2.0": "{\"dependencies\":[],\"features\":{}}",
|
||||
"tar_0.4.44": "{\"dependencies\":[{\"name\":\"filetime\",\"req\":\"^0.2.8\"},{\"name\":\"libc\",\"req\":\"^0.2\",\"target\":\"cfg(unix)\"},{\"kind\":\"dev\",\"name\":\"tempfile\",\"req\":\"^3\"},{\"name\":\"xattr\",\"optional\":true,\"req\":\"^1.1.3\",\"target\":\"cfg(unix)\"}],\"features\":{\"default\":[\"xattr\"]}}",
|
||||
"tempfile_3.24.0": "{\"dependencies\":[{\"kind\":\"dev\",\"name\":\"doc-comment\",\"req\":\"^0.3\"},{\"name\":\"fastrand\",\"req\":\"^2.1.1\"},{\"default_features\":false,\"name\":\"getrandom\",\"optional\":true,\"req\":\"^0.3.0\",\"target\":\"cfg(any(unix, windows, target_os = \\\"wasi\\\"))\"},{\"default_features\":false,\"features\":[\"std\"],\"name\":\"once_cell\",\"req\":\"^1.19.0\"},{\"features\":[\"fs\"],\"name\":\"rustix\",\"req\":\"^1.1.3\",\"target\":\"cfg(any(unix, target_os = \\\"wasi\\\"))\"},{\"features\":[\"Win32_Storage_FileSystem\",\"Win32_Foundation\"],\"name\":\"windows-sys\",\"req\":\">=0.52, <0.62\",\"target\":\"cfg(windows)\"}],\"features\":{\"default\":[\"getrandom\"],\"nightly\":[]}}",
|
||||
|
||||
@@ -46,7 +46,7 @@ Each archive contains a single entry with the platform baked into the name (e.g.
|
||||
|
||||
### Using Codex with your ChatGPT plan
|
||||
|
||||
Run `codex` and select **Sign in with ChatGPT**. We recommend signing into your ChatGPT account to use Codex as part of your Plus, Pro, Team, Edu, or Enterprise plan. [Learn more about what's included in your ChatGPT plan](https://help.openai.com/en/articles/11369540-codex-in-chatgpt).
|
||||
Run `codex` and select **Sign in with ChatGPT**. We recommend signing into your ChatGPT account to use Codex as part of your Plus, Pro, Business, Edu, or Enterprise plan. [Learn more about what's included in your ChatGPT plan](https://help.openai.com/en/articles/11369540-codex-in-chatgpt).
|
||||
|
||||
You can also use Codex with an API key, but this requires [additional setup](https://developers.openai.com/codex/auth#sign-in-with-an-api-key).
|
||||
|
||||
|
||||
164
codex-rs/Cargo.lock
generated
164
codex-rs/Cargo.lock
generated
@@ -408,9 +408,11 @@ dependencies = [
|
||||
"base64 0.22.1",
|
||||
"chrono",
|
||||
"codex-app-server-protocol",
|
||||
"codex-config",
|
||||
"codex-core",
|
||||
"codex-features",
|
||||
"codex-login",
|
||||
"codex-models-manager",
|
||||
"codex-protocol",
|
||||
"codex-utils-cargo-bin",
|
||||
"core_test_support",
|
||||
@@ -447,7 +449,7 @@ dependencies = [
|
||||
"objc2-foundation",
|
||||
"parking_lot",
|
||||
"percent-encoding",
|
||||
"windows-sys 0.60.2",
|
||||
"windows-sys 0.52.0",
|
||||
"wl-clipboard-rs",
|
||||
"x11rb",
|
||||
]
|
||||
@@ -1367,7 +1369,9 @@ dependencies = [
|
||||
"anyhow",
|
||||
"assert_matches",
|
||||
"async-trait",
|
||||
"base64 0.22.1",
|
||||
"bytes",
|
||||
"chrono",
|
||||
"codex-client",
|
||||
"codex-protocol",
|
||||
"codex-utils-rustls-provider",
|
||||
@@ -1416,6 +1420,7 @@ dependencies = [
|
||||
"codex-git-utils",
|
||||
"codex-login",
|
||||
"codex-mcp",
|
||||
"codex-models-manager",
|
||||
"codex-otel",
|
||||
"codex-protocol",
|
||||
"codex-rmcp-client",
|
||||
@@ -1429,9 +1434,11 @@ dependencies = [
|
||||
"codex-utils-cli",
|
||||
"codex-utils-json-to-toml",
|
||||
"codex-utils-pty",
|
||||
"codex-utils-rustls-provider",
|
||||
"constant_time_eq",
|
||||
"core_test_support",
|
||||
"futures",
|
||||
"gethostname",
|
||||
"hmac",
|
||||
"jsonwebtoken",
|
||||
"opentelemetry",
|
||||
@@ -1454,6 +1461,7 @@ dependencies = [
|
||||
"tracing",
|
||||
"tracing-opentelemetry",
|
||||
"tracing-subscriber",
|
||||
"url",
|
||||
"uuid",
|
||||
"wiremock",
|
||||
]
|
||||
@@ -1488,6 +1496,7 @@ dependencies = [
|
||||
"codex-experimental-api-macros",
|
||||
"codex-git-utils",
|
||||
"codex-protocol",
|
||||
"codex-shell-command",
|
||||
"codex-utils-absolute-path",
|
||||
"codex-utils-cargo-bin",
|
||||
"inventory",
|
||||
@@ -1497,7 +1506,6 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_with",
|
||||
"shlex",
|
||||
"similar",
|
||||
"strum_macros 0.28.0",
|
||||
"tempfile",
|
||||
@@ -1535,11 +1543,14 @@ dependencies = [
|
||||
"anyhow",
|
||||
"assert_cmd",
|
||||
"assert_matches",
|
||||
"codex-exec-server",
|
||||
"codex-utils-absolute-path",
|
||||
"codex-utils-cargo-bin",
|
||||
"pretty_assertions",
|
||||
"similar",
|
||||
"tempfile",
|
||||
"thiserror 2.0.18",
|
||||
"tokio",
|
||||
"tree-sitter",
|
||||
"tree-sitter-bash",
|
||||
]
|
||||
@@ -1550,9 +1561,11 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"codex-apply-patch",
|
||||
"codex-exec-server",
|
||||
"codex-linux-sandbox",
|
||||
"codex-sandboxing",
|
||||
"codex-shell-escalation",
|
||||
"codex-utils-absolute-path",
|
||||
"codex-utils-home-dir",
|
||||
"dotenvy",
|
||||
"tempfile",
|
||||
@@ -1599,6 +1612,7 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"codex-config",
|
||||
"codex-connectors",
|
||||
"codex-core",
|
||||
"codex-git-utils",
|
||||
@@ -1701,6 +1715,7 @@ dependencies = [
|
||||
"base64 0.22.1",
|
||||
"chrono",
|
||||
"codex-backend-client",
|
||||
"codex-config",
|
||||
"codex-core",
|
||||
"codex-login",
|
||||
"codex-otel",
|
||||
@@ -1787,6 +1802,10 @@ dependencies = [
|
||||
"v8",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-collaboration-mode-templates"
|
||||
version = "0.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "codex-config"
|
||||
version = "0.0.0"
|
||||
@@ -1794,8 +1813,13 @@ dependencies = [
|
||||
"anyhow",
|
||||
"codex-app-server-protocol",
|
||||
"codex-execpolicy",
|
||||
"codex-features",
|
||||
"codex-git-utils",
|
||||
"codex-model-provider-info",
|
||||
"codex-network-proxy",
|
||||
"codex-protocol",
|
||||
"codex-utils-absolute-path",
|
||||
"dunce",
|
||||
"futures",
|
||||
"multimap",
|
||||
"pretty_assertions",
|
||||
@@ -1837,7 +1861,6 @@ dependencies = [
|
||||
"async-trait",
|
||||
"base64 0.22.1",
|
||||
"bm25",
|
||||
"chardetng",
|
||||
"chrono",
|
||||
"clap",
|
||||
"codex-analytics",
|
||||
@@ -1853,15 +1876,19 @@ dependencies = [
|
||||
"codex-exec-server",
|
||||
"codex-execpolicy",
|
||||
"codex-features",
|
||||
"codex-feedback",
|
||||
"codex-git-utils",
|
||||
"codex-hooks",
|
||||
"codex-instructions",
|
||||
"codex-login",
|
||||
"codex-mcp",
|
||||
"codex-model-provider-info",
|
||||
"codex-models-manager",
|
||||
"codex-network-proxy",
|
||||
"codex-otel",
|
||||
"codex-plugin",
|
||||
"codex-protocol",
|
||||
"codex-response-debug-context",
|
||||
"codex-rmcp-client",
|
||||
"codex-rollout",
|
||||
"codex-sandboxing",
|
||||
@@ -1891,7 +1918,6 @@ dependencies = [
|
||||
"ctor 0.6.3",
|
||||
"dirs",
|
||||
"dunce",
|
||||
"encoding_rs",
|
||||
"env-flags",
|
||||
"eventsource-stream",
|
||||
"futures",
|
||||
@@ -1900,7 +1926,6 @@ dependencies = [
|
||||
"image",
|
||||
"indexmap 2.13.0",
|
||||
"insta",
|
||||
"landlock",
|
||||
"libc",
|
||||
"maplit",
|
||||
"notify",
|
||||
@@ -1914,8 +1939,6 @@ dependencies = [
|
||||
"regex-lite",
|
||||
"reqwest",
|
||||
"rmcp",
|
||||
"schemars 0.8.22",
|
||||
"seccompiler",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serial_test",
|
||||
@@ -2002,6 +2025,7 @@ dependencies = [
|
||||
"codex-feedback",
|
||||
"codex-git-utils",
|
||||
"codex-login",
|
||||
"codex-model-provider-info",
|
||||
"codex-otel",
|
||||
"codex-protocol",
|
||||
"codex-utils-absolute-path",
|
||||
@@ -2104,7 +2128,6 @@ dependencies = [
|
||||
name = "codex-features"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"codex-login",
|
||||
"codex-otel",
|
||||
"codex-protocol",
|
||||
"pretty_assertions",
|
||||
@@ -2119,6 +2142,7 @@ name = "codex-feedback"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"codex-login",
|
||||
"codex-protocol",
|
||||
"pretty_assertions",
|
||||
"sentry",
|
||||
@@ -2224,6 +2248,7 @@ name = "codex-lmstudio"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"codex-core",
|
||||
"codex-model-provider-info",
|
||||
"reqwest",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
@@ -2240,10 +2265,13 @@ dependencies = [
|
||||
"async-trait",
|
||||
"base64 0.22.1",
|
||||
"chrono",
|
||||
"codex-api",
|
||||
"codex-app-server-protocol",
|
||||
"codex-client",
|
||||
"codex-config",
|
||||
"codex-keyring-store",
|
||||
"codex-model-provider-info",
|
||||
"codex-otel",
|
||||
"codex-protocol",
|
||||
"codex-terminal-detection",
|
||||
"codex-utils-template",
|
||||
@@ -2255,7 +2283,6 @@ dependencies = [
|
||||
"rand 0.9.2",
|
||||
"regex-lite",
|
||||
"reqwest",
|
||||
"schemars 0.8.22",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serial_test",
|
||||
@@ -2311,6 +2338,7 @@ dependencies = [
|
||||
"codex-exec-server",
|
||||
"codex-features",
|
||||
"codex-login",
|
||||
"codex-models-manager",
|
||||
"codex-protocol",
|
||||
"codex-shell-command",
|
||||
"codex-utils-cli",
|
||||
@@ -2331,6 +2359,54 @@ dependencies = [
|
||||
"wiremock",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-model-provider-info"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"codex-api",
|
||||
"codex-app-server-protocol",
|
||||
"codex-protocol",
|
||||
"codex-utils-absolute-path",
|
||||
"http 1.4.0",
|
||||
"maplit",
|
||||
"pretty_assertions",
|
||||
"schemars 0.8.22",
|
||||
"serde",
|
||||
"tempfile",
|
||||
"toml 0.9.11+spec-1.1.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-models-manager"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"chrono",
|
||||
"codex-api",
|
||||
"codex-app-server-protocol",
|
||||
"codex-collaboration-mode-templates",
|
||||
"codex-config",
|
||||
"codex-feedback",
|
||||
"codex-login",
|
||||
"codex-model-provider-info",
|
||||
"codex-otel",
|
||||
"codex-protocol",
|
||||
"codex-response-debug-context",
|
||||
"codex-utils-absolute-path",
|
||||
"codex-utils-output-truncation",
|
||||
"codex-utils-template",
|
||||
"core_test_support",
|
||||
"http 1.4.0",
|
||||
"pretty_assertions",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"wiremock",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-network-proxy"
|
||||
version = "0.0.0"
|
||||
@@ -2370,6 +2446,7 @@ dependencies = [
|
||||
"async-stream",
|
||||
"bytes",
|
||||
"codex-core",
|
||||
"codex-model-provider-info",
|
||||
"futures",
|
||||
"pretty_assertions",
|
||||
"reqwest",
|
||||
@@ -2434,18 +2511,27 @@ name = "codex-protocol"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chardetng",
|
||||
"chrono",
|
||||
"codex-async-utils",
|
||||
"codex-execpolicy",
|
||||
"codex-git-utils",
|
||||
"codex-network-proxy",
|
||||
"codex-utils-absolute-path",
|
||||
"codex-utils-image",
|
||||
"codex-utils-string",
|
||||
"codex-utils-template",
|
||||
"encoding_rs",
|
||||
"http 1.4.0",
|
||||
"icu_decimal",
|
||||
"icu_locale_core",
|
||||
"icu_provider",
|
||||
"landlock",
|
||||
"pretty_assertions",
|
||||
"quick-xml",
|
||||
"reqwest",
|
||||
"schemars 0.8.22",
|
||||
"seccompiler",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_with",
|
||||
@@ -2453,11 +2539,24 @@ dependencies = [
|
||||
"strum_macros 0.28.0",
|
||||
"sys-locale",
|
||||
"tempfile",
|
||||
"thiserror 2.0.18",
|
||||
"tokio",
|
||||
"tracing",
|
||||
"ts-rs",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-response-debug-context"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"codex-api",
|
||||
"http 1.4.0",
|
||||
"pretty_assertions",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-responses-api-proxy"
|
||||
version = "0.0.0"
|
||||
@@ -2467,6 +2566,7 @@ dependencies = [
|
||||
"codex-process-hardening",
|
||||
"ctor 0.6.3",
|
||||
"libc",
|
||||
"pretty_assertions",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -2481,6 +2581,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"axum",
|
||||
"codex-client",
|
||||
"codex-config",
|
||||
"codex-keyring-store",
|
||||
"codex-protocol",
|
||||
"codex-utils-cargo-bin",
|
||||
@@ -2492,7 +2593,6 @@ dependencies = [
|
||||
"pretty_assertions",
|
||||
"reqwest",
|
||||
"rmcp",
|
||||
"schemars 0.8.22",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serial_test",
|
||||
@@ -2699,12 +2799,15 @@ dependencies = [
|
||||
"codex-cloud-requirements",
|
||||
"codex-config",
|
||||
"codex-core",
|
||||
"codex-exec-server",
|
||||
"codex-features",
|
||||
"codex-feedback",
|
||||
"codex-file-search",
|
||||
"codex-git-utils",
|
||||
"codex-login",
|
||||
"codex-mcp",
|
||||
"codex-model-provider-info",
|
||||
"codex-models-manager",
|
||||
"codex-otel",
|
||||
"codex-protocol",
|
||||
"codex-rollout",
|
||||
@@ -2766,6 +2869,7 @@ dependencies = [
|
||||
"unicode-segmentation",
|
||||
"unicode-width 0.2.1",
|
||||
"url",
|
||||
"urlencoding",
|
||||
"uuid",
|
||||
"vt100",
|
||||
"webbrowser",
|
||||
@@ -2779,7 +2883,6 @@ name = "codex-utils-absolute-path"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"dirs",
|
||||
"path-absolutize",
|
||||
"pretty_assertions",
|
||||
"schemars 0.8.22",
|
||||
"serde",
|
||||
@@ -2868,6 +2971,7 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"codex-core",
|
||||
"codex-lmstudio",
|
||||
"codex-model-provider-info",
|
||||
"codex-ollama",
|
||||
]
|
||||
|
||||
@@ -2939,6 +3043,7 @@ name = "codex-utils-sandbox-summary"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"codex-core",
|
||||
"codex-model-provider-info",
|
||||
"codex-protocol",
|
||||
"codex-utils-absolute-path",
|
||||
"pretty_assertions",
|
||||
@@ -3210,6 +3315,8 @@ dependencies = [
|
||||
"codex-exec-server",
|
||||
"codex-features",
|
||||
"codex-login",
|
||||
"codex-model-provider-info",
|
||||
"codex-models-manager",
|
||||
"codex-protocol",
|
||||
"codex-utils-absolute-path",
|
||||
"codex-utils-cargo-bin",
|
||||
@@ -3864,7 +3971,7 @@ dependencies = [
|
||||
"libc",
|
||||
"option-ext",
|
||||
"redox_users 0.5.2",
|
||||
"windows-sys 0.61.2",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4109,7 +4216,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.61.2",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -5015,14 +5122,13 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "hyper-util"
|
||||
version = "0.1.19"
|
||||
version = "0.1.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "727805d60e7938b76b826a6ef209eb70eaa1812794f9424d4a4e2d740662df5f"
|
||||
checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"bytes",
|
||||
"futures-channel",
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
"http 1.4.0",
|
||||
"http-body",
|
||||
@@ -5031,7 +5137,7 @@ dependencies = [
|
||||
"libc",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
"socket2 0.6.2",
|
||||
"socket2 0.5.10",
|
||||
"system-configuration",
|
||||
"tokio",
|
||||
"tower-service",
|
||||
@@ -5537,7 +5643,7 @@ checksum = "3640c1c38b8e4e43584d8df18be5fc6b0aa314ce6ebf51b53313d4306cca8e46"
|
||||
dependencies = [
|
||||
"hermit-abi",
|
||||
"libc",
|
||||
"windows-sys 0.61.2",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6315,7 +6421,7 @@ version = "0.50.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5"
|
||||
dependencies = [
|
||||
"windows-sys 0.61.2",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6491,7 +6597,7 @@ version = "5.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "51e219e79014df21a225b1860a479e2dcd7cbd9130f4defd4bd0e191ea31d67d"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"base64 0.21.7",
|
||||
"chrono",
|
||||
"getrandom 0.2.17",
|
||||
"http 1.4.0",
|
||||
@@ -6959,7 +7065,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7d8fae84b431384b68627d0f9b3b1245fcf9f46f6c0e3dc902e9dce64edd1967"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.61.2",
|
||||
"windows-sys 0.45.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -7505,7 +7611,7 @@ dependencies = [
|
||||
"quinn-udp",
|
||||
"rustc-hash 2.1.1",
|
||||
"rustls",
|
||||
"socket2 0.6.2",
|
||||
"socket2 0.5.10",
|
||||
"thiserror 2.0.18",
|
||||
"tokio",
|
||||
"tracing",
|
||||
@@ -7542,9 +7648,9 @@ dependencies = [
|
||||
"cfg_aliases 0.2.1",
|
||||
"libc",
|
||||
"once_cell",
|
||||
"socket2 0.6.2",
|
||||
"socket2 0.5.10",
|
||||
"tracing",
|
||||
"windows-sys 0.60.2",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -8380,7 +8486,7 @@ dependencies = [
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys 0.11.0",
|
||||
"windows-sys 0.61.2",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -9779,9 +9885,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "system-configuration"
|
||||
version = "0.6.1"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b"
|
||||
checksum = "a13f3d0daba03132c0aa9767f98351b3488edc2c100cda2d2ec2b04f3d8d3c8b"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"core-foundation 0.9.4",
|
||||
@@ -9814,7 +9920,7 @@ dependencies = [
|
||||
"getrandom 0.3.4",
|
||||
"once_cell",
|
||||
"rustix 1.1.3",
|
||||
"windows-sys 0.61.2",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -11260,7 +11366,7 @@ version = "0.1.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22"
|
||||
dependencies = [
|
||||
"windows-sys 0.61.2",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@@ -20,6 +20,7 @@ members = [
|
||||
"cloud-tasks-client",
|
||||
"cloud-tasks-mock-client",
|
||||
"cli",
|
||||
"collaboration-mode-templates",
|
||||
"connectors",
|
||||
"config",
|
||||
"shell-command",
|
||||
@@ -41,6 +42,8 @@ members = [
|
||||
"login",
|
||||
"codex-mcp",
|
||||
"mcp-server",
|
||||
"model-provider-info",
|
||||
"models-manager",
|
||||
"network-proxy",
|
||||
"ollama",
|
||||
"process-hardening",
|
||||
@@ -48,6 +51,7 @@ members = [
|
||||
"rollout",
|
||||
"rmcp-client",
|
||||
"responses-api-proxy",
|
||||
"response-debug-context",
|
||||
"sandboxing",
|
||||
"stdio-to-uds",
|
||||
"otel",
|
||||
@@ -112,6 +116,7 @@ codex-backend-client = { path = "backend-client" }
|
||||
codex-chatgpt = { path = "chatgpt" }
|
||||
codex-cli = { path = "cli" }
|
||||
codex-client = { path = "codex-client" }
|
||||
codex-collaboration-mode-templates = { path = "collaboration-mode-templates" }
|
||||
codex-cloud-requirements = { path = "cloud-requirements" }
|
||||
codex-cloud-tasks-client = { path = "cloud-tasks-client" }
|
||||
codex-cloud-tasks-mock-client = { path = "cloud-tasks-mock-client" }
|
||||
@@ -136,6 +141,8 @@ codex-lmstudio = { path = "lmstudio" }
|
||||
codex-login = { path = "login" }
|
||||
codex-mcp = { path = "codex-mcp" }
|
||||
codex-mcp-server = { path = "mcp-server" }
|
||||
codex-model-provider-info = { path = "model-provider-info" }
|
||||
codex-models-manager = { path = "models-manager" }
|
||||
codex-network-proxy = { path = "network-proxy" }
|
||||
codex-ollama = { path = "ollama" }
|
||||
codex-otel = { path = "otel" }
|
||||
@@ -143,6 +150,7 @@ codex-plugin = { path = "plugin" }
|
||||
codex-process-hardening = { path = "process-hardening" }
|
||||
codex-protocol = { path = "protocol" }
|
||||
codex-responses-api-proxy = { path = "responses-api-proxy" }
|
||||
codex-response-debug-context = { path = "response-debug-context" }
|
||||
codex-rmcp-client = { path = "rmcp-client" }
|
||||
codex-rollout = { path = "rollout" }
|
||||
codex-sandboxing = { path = "sandboxing" }
|
||||
|
||||
@@ -13,6 +13,7 @@ use crate::events::TrackEventRequest;
|
||||
use crate::events::codex_app_metadata;
|
||||
use crate::events::codex_plugin_metadata;
|
||||
use crate::events::codex_plugin_used_metadata;
|
||||
use crate::events::subagent_thread_started_event_request;
|
||||
use crate::facts::AnalyticsFact;
|
||||
use crate::facts::AppInvocation;
|
||||
use crate::facts::AppMentionedInput;
|
||||
@@ -24,6 +25,7 @@ use crate::facts::PluginStateChangedInput;
|
||||
use crate::facts::PluginUsedInput;
|
||||
use crate::facts::SkillInvocation;
|
||||
use crate::facts::SkillInvokedInput;
|
||||
use crate::facts::SubAgentThreadStartedInput;
|
||||
use crate::facts::TrackEventsContext;
|
||||
use crate::reducer::AnalyticsReducer;
|
||||
use crate::reducer::normalize_path_for_skill_id;
|
||||
@@ -47,6 +49,7 @@ use codex_plugin::AppConnectorId;
|
||||
use codex_plugin::PluginCapabilitySummary;
|
||||
use codex_plugin::PluginId;
|
||||
use codex_plugin::PluginTelemetryMetadata;
|
||||
use codex_protocol::protocol::SubAgentSource;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use std::collections::HashSet;
|
||||
@@ -446,6 +449,155 @@ async fn initialize_caches_client_and_thread_lifecycle_publishes_once_initialize
|
||||
assert_eq!(payload[0]["event_params"]["parent_thread_id"], json!(null));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn subagent_thread_started_review_serializes_expected_shape() {
|
||||
let event = TrackEventRequest::ThreadInitialized(subagent_thread_started_event_request(
|
||||
SubAgentThreadStartedInput {
|
||||
thread_id: "thread-review".to_string(),
|
||||
product_client_id: "codex-tui".to_string(),
|
||||
client_name: "codex-tui".to_string(),
|
||||
client_version: "1.0.0".to_string(),
|
||||
model: "gpt-5".to_string(),
|
||||
ephemeral: false,
|
||||
subagent_source: SubAgentSource::Review,
|
||||
created_at: 123,
|
||||
},
|
||||
));
|
||||
|
||||
let payload = serde_json::to_value(&event).expect("serialize review subagent event");
|
||||
assert_eq!(payload["event_params"]["thread_source"], "subagent");
|
||||
assert_eq!(
|
||||
payload["event_params"]["app_server_client"]["product_client_id"],
|
||||
"codex-tui"
|
||||
);
|
||||
assert_eq!(
|
||||
payload["event_params"]["app_server_client"]["client_name"],
|
||||
"codex-tui"
|
||||
);
|
||||
assert_eq!(
|
||||
payload["event_params"]["app_server_client"]["client_version"],
|
||||
"1.0.0"
|
||||
);
|
||||
assert_eq!(
|
||||
payload["event_params"]["app_server_client"]["rpc_transport"],
|
||||
"in_process"
|
||||
);
|
||||
assert_eq!(payload["event_params"]["created_at"], 123);
|
||||
assert_eq!(payload["event_params"]["initialization_mode"], "new");
|
||||
assert_eq!(payload["event_params"]["subagent_source"], "review");
|
||||
assert_eq!(payload["event_params"]["parent_thread_id"], json!(null));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn subagent_thread_started_thread_spawn_serializes_parent_thread_id() {
|
||||
let parent_thread_id =
|
||||
codex_protocol::ThreadId::from_string("11111111-1111-1111-1111-111111111111")
|
||||
.expect("valid thread id");
|
||||
let event = TrackEventRequest::ThreadInitialized(subagent_thread_started_event_request(
|
||||
SubAgentThreadStartedInput {
|
||||
thread_id: "thread-spawn".to_string(),
|
||||
product_client_id: "codex-tui".to_string(),
|
||||
client_name: "codex-tui".to_string(),
|
||||
client_version: "1.0.0".to_string(),
|
||||
model: "gpt-5".to_string(),
|
||||
ephemeral: true,
|
||||
subagent_source: SubAgentSource::ThreadSpawn {
|
||||
parent_thread_id,
|
||||
depth: 1,
|
||||
agent_path: None,
|
||||
agent_nickname: None,
|
||||
agent_role: None,
|
||||
},
|
||||
created_at: 124,
|
||||
},
|
||||
));
|
||||
|
||||
let payload = serde_json::to_value(&event).expect("serialize thread spawn subagent event");
|
||||
assert_eq!(payload["event_params"]["thread_source"], "subagent");
|
||||
assert_eq!(payload["event_params"]["subagent_source"], "thread_spawn");
|
||||
assert_eq!(
|
||||
payload["event_params"]["parent_thread_id"],
|
||||
"11111111-1111-1111-1111-111111111111"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn subagent_thread_started_memory_consolidation_serializes_expected_shape() {
|
||||
let event = TrackEventRequest::ThreadInitialized(subagent_thread_started_event_request(
|
||||
SubAgentThreadStartedInput {
|
||||
thread_id: "thread-memory".to_string(),
|
||||
product_client_id: "codex-tui".to_string(),
|
||||
client_name: "codex-tui".to_string(),
|
||||
client_version: "1.0.0".to_string(),
|
||||
model: "gpt-5".to_string(),
|
||||
ephemeral: false,
|
||||
subagent_source: SubAgentSource::MemoryConsolidation,
|
||||
created_at: 125,
|
||||
},
|
||||
));
|
||||
|
||||
let payload =
|
||||
serde_json::to_value(&event).expect("serialize memory consolidation subagent event");
|
||||
assert_eq!(
|
||||
payload["event_params"]["subagent_source"],
|
||||
"memory_consolidation"
|
||||
);
|
||||
assert_eq!(payload["event_params"]["parent_thread_id"], json!(null));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn subagent_thread_started_other_serializes_expected_shape() {
|
||||
let event = TrackEventRequest::ThreadInitialized(subagent_thread_started_event_request(
|
||||
SubAgentThreadStartedInput {
|
||||
thread_id: "thread-guardian".to_string(),
|
||||
product_client_id: "codex-tui".to_string(),
|
||||
client_name: "codex-tui".to_string(),
|
||||
client_version: "1.0.0".to_string(),
|
||||
model: "gpt-5".to_string(),
|
||||
ephemeral: false,
|
||||
subagent_source: SubAgentSource::Other("guardian".to_string()),
|
||||
created_at: 126,
|
||||
},
|
||||
));
|
||||
|
||||
let payload = serde_json::to_value(&event).expect("serialize other subagent event");
|
||||
assert_eq!(payload["event_params"]["subagent_source"], "guardian");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn subagent_thread_started_publishes_without_initialize() {
|
||||
let mut reducer = AnalyticsReducer::default();
|
||||
let mut events = Vec::new();
|
||||
|
||||
reducer
|
||||
.ingest(
|
||||
AnalyticsFact::Custom(CustomAnalyticsFact::SubAgentThreadStarted(
|
||||
SubAgentThreadStartedInput {
|
||||
thread_id: "thread-review".to_string(),
|
||||
product_client_id: "codex-tui".to_string(),
|
||||
client_name: "codex-tui".to_string(),
|
||||
client_version: "1.0.0".to_string(),
|
||||
model: "gpt-5".to_string(),
|
||||
ephemeral: false,
|
||||
subagent_source: SubAgentSource::Review,
|
||||
created_at: 127,
|
||||
},
|
||||
)),
|
||||
&mut events,
|
||||
)
|
||||
.await;
|
||||
|
||||
let payload = serde_json::to_value(&events).expect("serialize events");
|
||||
assert_eq!(payload.as_array().expect("events array").len(), 1);
|
||||
assert_eq!(payload[0]["event_type"], "codex_thread_initialized");
|
||||
assert_eq!(
|
||||
payload[0]["event_params"]["app_server_client"]["product_client_id"],
|
||||
"codex-tui"
|
||||
);
|
||||
assert_eq!(payload[0]["event_params"]["thread_source"], "subagent");
|
||||
assert_eq!(payload[0]["event_params"]["subagent_source"], "review");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn plugin_used_event_serializes_expected_shape() {
|
||||
let tracking = TrackEventsContext {
|
||||
|
||||
@@ -11,6 +11,7 @@ use crate::facts::PluginState;
|
||||
use crate::facts::PluginStateChangedInput;
|
||||
use crate::facts::SkillInvocation;
|
||||
use crate::facts::SkillInvokedInput;
|
||||
use crate::facts::SubAgentThreadStartedInput;
|
||||
use crate::facts::TrackEventsContext;
|
||||
use crate::reducer::AnalyticsReducer;
|
||||
use codex_app_server_protocol::ClientResponse;
|
||||
@@ -144,6 +145,12 @@ impl AnalyticsEventsClient {
|
||||
});
|
||||
}
|
||||
|
||||
pub fn track_subagent_thread_started(&self, input: SubAgentThreadStartedInput) {
|
||||
self.record_fact(AnalyticsFact::Custom(
|
||||
CustomAnalyticsFact::SubAgentThreadStarted(input),
|
||||
));
|
||||
}
|
||||
|
||||
pub fn track_app_mentioned(&self, tracking: TrackEventsContext, mentions: Vec<AppInvocation>) {
|
||||
if mentions.is_empty() {
|
||||
return;
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
use crate::facts::AppInvocation;
|
||||
use crate::facts::InvocationType;
|
||||
use crate::facts::PluginState;
|
||||
use crate::facts::SubAgentThreadStartedInput;
|
||||
use crate::facts::TrackEventsContext;
|
||||
use codex_login::default_client::originator;
|
||||
use codex_plugin::PluginTelemetryMetadata;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_protocol::protocol::SubAgentSource;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Copy, Debug, Serialize)]
|
||||
@@ -228,3 +230,49 @@ pub(crate) fn current_runtime_metadata() -> CodexRuntimeMetadata {
|
||||
runtime_arch: std::env::consts::ARCH.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn subagent_thread_started_event_request(
|
||||
input: SubAgentThreadStartedInput,
|
||||
) -> ThreadInitializedEvent {
|
||||
let event_params = ThreadInitializedEventParams {
|
||||
thread_id: input.thread_id,
|
||||
app_server_client: CodexAppServerClientMetadata {
|
||||
product_client_id: input.product_client_id,
|
||||
client_name: Some(input.client_name),
|
||||
client_version: Some(input.client_version),
|
||||
rpc_transport: AppServerRpcTransport::InProcess,
|
||||
experimental_api_enabled: None,
|
||||
},
|
||||
runtime: current_runtime_metadata(),
|
||||
model: input.model,
|
||||
ephemeral: input.ephemeral,
|
||||
thread_source: Some("subagent"),
|
||||
initialization_mode: ThreadInitializationMode::New,
|
||||
subagent_source: Some(subagent_source_name(&input.subagent_source)),
|
||||
parent_thread_id: subagent_parent_thread_id(&input.subagent_source),
|
||||
created_at: input.created_at,
|
||||
};
|
||||
ThreadInitializedEvent {
|
||||
event_type: "codex_thread_initialized",
|
||||
event_params,
|
||||
}
|
||||
}
|
||||
|
||||
fn subagent_source_name(subagent_source: &SubAgentSource) -> String {
|
||||
match subagent_source {
|
||||
SubAgentSource::Review => "review".to_string(),
|
||||
SubAgentSource::Compact => "compact".to_string(),
|
||||
SubAgentSource::ThreadSpawn { .. } => "thread_spawn".to_string(),
|
||||
SubAgentSource::MemoryConsolidation => "memory_consolidation".to_string(),
|
||||
SubAgentSource::Other(other) => other.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn subagent_parent_thread_id(subagent_source: &SubAgentSource) -> Option<String> {
|
||||
match subagent_source {
|
||||
SubAgentSource::ThreadSpawn {
|
||||
parent_thread_id, ..
|
||||
} => Some(parent_thread_id.to_string()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_plugin::PluginTelemetryMetadata;
|
||||
use codex_protocol::protocol::SkillScope;
|
||||
use codex_protocol::protocol::SubAgentSource;
|
||||
use serde::Serialize;
|
||||
use std::path::PathBuf;
|
||||
|
||||
@@ -50,6 +51,18 @@ pub struct AppInvocation {
|
||||
pub invocation_type: Option<InvocationType>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SubAgentThreadStartedInput {
|
||||
pub thread_id: String,
|
||||
pub product_client_id: String,
|
||||
pub client_name: String,
|
||||
pub client_version: String,
|
||||
pub model: String,
|
||||
pub ephemeral: bool,
|
||||
pub subagent_source: SubAgentSource,
|
||||
pub created_at: u64,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub(crate) enum AnalyticsFact {
|
||||
Initialize {
|
||||
@@ -75,6 +88,7 @@ pub(crate) enum AnalyticsFact {
|
||||
}
|
||||
|
||||
pub(crate) enum CustomAnalyticsFact {
|
||||
SubAgentThreadStarted(SubAgentThreadStartedInput),
|
||||
SkillInvoked(SkillInvokedInput),
|
||||
AppMentioned(AppMentionedInput),
|
||||
AppUsed(AppUsedInput),
|
||||
|
||||
@@ -8,6 +8,7 @@ pub use events::AppServerRpcTransport;
|
||||
pub use facts::AppInvocation;
|
||||
pub use facts::InvocationType;
|
||||
pub use facts::SkillInvocation;
|
||||
pub use facts::SubAgentThreadStartedInput;
|
||||
pub use facts::TrackEventsContext;
|
||||
pub use facts::build_track_events_context;
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@ use crate::events::codex_app_metadata;
|
||||
use crate::events::codex_plugin_metadata;
|
||||
use crate::events::codex_plugin_used_metadata;
|
||||
use crate::events::plugin_state_event_type;
|
||||
use crate::events::subagent_thread_started_event_request;
|
||||
use crate::events::thread_source_name;
|
||||
use crate::facts::AnalyticsFact;
|
||||
use crate::facts::AppMentionedInput;
|
||||
@@ -24,6 +25,7 @@ use crate::facts::PluginState;
|
||||
use crate::facts::PluginStateChangedInput;
|
||||
use crate::facts::PluginUsedInput;
|
||||
use crate::facts::SkillInvokedInput;
|
||||
use crate::facts::SubAgentThreadStartedInput;
|
||||
use codex_app_server_protocol::ClientResponse;
|
||||
use codex_app_server_protocol::InitializeParams;
|
||||
use codex_git_utils::collect_git_info;
|
||||
@@ -76,6 +78,9 @@ impl AnalyticsReducer {
|
||||
}
|
||||
AnalyticsFact::Notification(_notification) => {}
|
||||
AnalyticsFact::Custom(input) => match input {
|
||||
CustomAnalyticsFact::SubAgentThreadStarted(input) => {
|
||||
self.ingest_subagent_thread_started(input, out);
|
||||
}
|
||||
CustomAnalyticsFact::SkillInvoked(input) => {
|
||||
self.ingest_skill_invoked(input, out).await;
|
||||
}
|
||||
@@ -120,6 +125,16 @@ impl AnalyticsReducer {
|
||||
);
|
||||
}
|
||||
|
||||
fn ingest_subagent_thread_started(
|
||||
&mut self,
|
||||
input: SubAgentThreadStartedInput,
|
||||
out: &mut Vec<TrackEventRequest>,
|
||||
) {
|
||||
out.push(TrackEventRequest::ThreadInitialized(
|
||||
subagent_thread_started_event_request(input),
|
||||
));
|
||||
}
|
||||
|
||||
async fn ingest_skill_invoked(
|
||||
&mut self,
|
||||
input: SkillInvokedInput,
|
||||
|
||||
@@ -1060,6 +1060,9 @@ mod tests {
|
||||
items: Vec::new(),
|
||||
status: codex_app_server_protocol::TurnStatus::Completed,
|
||||
error: None,
|
||||
started_at: None,
|
||||
completed_at: Some(0),
|
||||
duration_ms: Some(1),
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -1834,6 +1837,9 @@ mod tests {
|
||||
items: Vec::new(),
|
||||
status: codex_app_server_protocol::TurnStatus::Completed,
|
||||
error: None,
|
||||
started_at: None,
|
||||
completed_at: Some(0),
|
||||
duration_ms: None,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
@@ -17,12 +17,12 @@ clap = { workspace = true, features = ["derive"] }
|
||||
codex-experimental-api-macros = { workspace = true }
|
||||
codex-git-utils = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-shell-command = { workspace = true }
|
||||
codex-utils-absolute-path = { workspace = true }
|
||||
schemars = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
serde_with = { workspace = true }
|
||||
shlex = { workspace = true }
|
||||
strum_macros = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
rmcp = { workspace = true, default-features = false, features = [
|
||||
|
||||
@@ -800,7 +800,7 @@
|
||||
"description": "Stop filesystem watch notifications for a prior `fs/watch`.",
|
||||
"properties": {
|
||||
"watchId": {
|
||||
"description": "Watch identifier returned by `fs/watch`.",
|
||||
"description": "Watch identifier previously provided to `fs/watch`.",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
@@ -819,10 +819,15 @@
|
||||
}
|
||||
],
|
||||
"description": "Absolute file or directory path to watch."
|
||||
},
|
||||
"watchId": {
|
||||
"description": "Connection-scoped watch identifier used for `fs/unwatch` and `fs/changed`.",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"path"
|
||||
"path",
|
||||
"watchId"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
@@ -1042,6 +1047,17 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"detail": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/McpServerStatusDetail"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Controls how much MCP inventory data to fetch for each server. Defaults to `Full` when omitted."
|
||||
},
|
||||
"limit": {
|
||||
"description": "Optional page size; defaults to a server-defined value.",
|
||||
"format": "uint32",
|
||||
@@ -1208,6 +1224,25 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"McpResourceReadParams": {
|
||||
"properties": {
|
||||
"server": {
|
||||
"type": "string"
|
||||
},
|
||||
"threadId": {
|
||||
"type": "string"
|
||||
},
|
||||
"uri": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"server",
|
||||
"threadId",
|
||||
"uri"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"McpServerOauthLoginParams": {
|
||||
"properties": {
|
||||
"name": {
|
||||
@@ -1235,6 +1270,13 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"McpServerStatusDetail": {
|
||||
"enum": [
|
||||
"full",
|
||||
"toolsAndAuthOnly"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"MergeStrategy": {
|
||||
"enum": [
|
||||
"replace",
|
||||
@@ -2815,6 +2857,48 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ThreadRealtimeStartTransport": {
|
||||
"description": "EXPERIMENTAL - transport used by thread realtime.",
|
||||
"oneOf": [
|
||||
{
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [
|
||||
"websocket"
|
||||
],
|
||||
"title": "WebsocketThreadRealtimeStartTransportType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"title": "WebsocketThreadRealtimeStartTransport",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"sdp": {
|
||||
"description": "SDP offer generated by a WebRTC RTCPeerConnection after configuring audio and the realtime events data channel.",
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"webrtc"
|
||||
],
|
||||
"title": "WebrtcThreadRealtimeStartTransportType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"sdp",
|
||||
"type"
|
||||
],
|
||||
"title": "WebrtcThreadRealtimeStartTransport",
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
},
|
||||
"ThreadResumeParams": {
|
||||
"description": "There are three ways to resume a thread: 1. By thread_id: load the thread from disk by thread_id and resume it. 2. By history: instantiate the thread from memory and resume it. 3. By path: load the thread from disk by path and resume it.\n\nThe precedence is: history > path > thread_id. If using history or path, the thread_id param will be ignored.\n\nPrefer using thread_id whenever possible.",
|
||||
"properties": {
|
||||
@@ -3102,10 +3186,27 @@
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"sessionStartSource": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ThreadStartSource"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"ThreadStartSource": {
|
||||
"enum": [
|
||||
"startup",
|
||||
"clear"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ThreadUnarchiveParams": {
|
||||
"properties": {
|
||||
"threadId": {
|
||||
@@ -4420,6 +4521,30 @@
|
||||
"title": "McpServerStatus/listRequest",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"id": {
|
||||
"$ref": "#/definitions/RequestId"
|
||||
},
|
||||
"method": {
|
||||
"enum": [
|
||||
"mcpServer/resource/read"
|
||||
],
|
||||
"title": "McpServer/resource/readRequestMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/McpResourceReadParams"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "McpServer/resource/readRequest",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"id": {
|
||||
|
||||
@@ -1013,7 +1013,7 @@
|
||||
"type": "array"
|
||||
},
|
||||
"watchId": {
|
||||
"description": "Watch identifier returned by `fs/watch`.",
|
||||
"description": "Watch identifier previously provided to `fs/watch`.",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
@@ -1736,6 +1736,7 @@
|
||||
},
|
||||
"McpToolCallResult": {
|
||||
"properties": {
|
||||
"_meta": true,
|
||||
"content": {
|
||||
"items": true,
|
||||
"type": "array"
|
||||
@@ -3303,6 +3304,22 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ThreadRealtimeSdpNotification": {
|
||||
"description": "EXPERIMENTAL - emitted with the remote SDP for a WebRTC realtime session.",
|
||||
"properties": {
|
||||
"sdp": {
|
||||
"type": "string"
|
||||
},
|
||||
"threadId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"sdp",
|
||||
"threadId"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ThreadRealtimeStartedNotification": {
|
||||
"description": "EXPERIMENTAL - emitted when thread realtime startup is accepted.",
|
||||
"properties": {
|
||||
@@ -3532,6 +3549,22 @@
|
||||
},
|
||||
"Turn": {
|
||||
"properties": {
|
||||
"completedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn completed.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"durationMs": {
|
||||
"description": "Duration between turn start and completion in milliseconds, if known.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"error": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -3553,6 +3586,14 @@
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"startedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn started.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/TurnStatus"
|
||||
}
|
||||
@@ -4902,6 +4943,26 @@
|
||||
"title": "Thread/realtime/outputAudio/deltaNotification",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"method": {
|
||||
"enum": [
|
||||
"thread/realtime/sdp"
|
||||
],
|
||||
"title": "Thread/realtime/sdpNotificationMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/ThreadRealtimeSdpNotification"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "Thread/realtime/sdpNotification",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"method": {
|
||||
|
||||
@@ -1201,6 +1201,30 @@
|
||||
"title": "McpServerStatus/listRequest",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"id": {
|
||||
"$ref": "#/definitions/v2/RequestId"
|
||||
},
|
||||
"method": {
|
||||
"enum": [
|
||||
"mcpServer/resource/read"
|
||||
],
|
||||
"title": "McpServer/resource/readRequestMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/v2/McpResourceReadParams"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "McpServer/resource/readRequest",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"id": {
|
||||
@@ -4311,6 +4335,26 @@
|
||||
"title": "Thread/realtime/outputAudio/deltaNotification",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"method": {
|
||||
"enum": [
|
||||
"thread/realtime/sdp"
|
||||
],
|
||||
"title": "Thread/realtime/sdpNotificationMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/v2/ThreadRealtimeSdpNotification"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "Thread/realtime/sdpNotification",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"method": {
|
||||
@@ -7487,7 +7531,7 @@
|
||||
"type": "array"
|
||||
},
|
||||
"watchId": {
|
||||
"description": "Watch identifier returned by `fs/watch`.",
|
||||
"description": "Watch identifier previously provided to `fs/watch`.",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
@@ -7757,7 +7801,7 @@
|
||||
"description": "Stop filesystem watch notifications for a prior `fs/watch`.",
|
||||
"properties": {
|
||||
"watchId": {
|
||||
"description": "Watch identifier returned by `fs/watch`.",
|
||||
"description": "Watch identifier previously provided to `fs/watch`.",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
@@ -7784,25 +7828,6 @@
|
||||
}
|
||||
],
|
||||
"description": "Absolute file or directory path to watch."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"path"
|
||||
],
|
||||
"title": "FsWatchParams",
|
||||
"type": "object"
|
||||
},
|
||||
"FsWatchResponse": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"description": "Created watch handle returned by `fs/watch`.",
|
||||
"properties": {
|
||||
"path": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/v2/AbsolutePathBuf"
|
||||
}
|
||||
],
|
||||
"description": "Canonicalized path associated with the watch."
|
||||
},
|
||||
"watchId": {
|
||||
"description": "Connection-scoped watch identifier used for `fs/unwatch` and `fs/changed`.",
|
||||
@@ -7813,6 +7838,25 @@
|
||||
"path",
|
||||
"watchId"
|
||||
],
|
||||
"title": "FsWatchParams",
|
||||
"type": "object"
|
||||
},
|
||||
"FsWatchResponse": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"description": "Successful response for `fs/watch`.",
|
||||
"properties": {
|
||||
"path": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/v2/AbsolutePathBuf"
|
||||
}
|
||||
],
|
||||
"description": "Canonicalized path associated with the watch."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"path"
|
||||
],
|
||||
"title": "FsWatchResponse",
|
||||
"type": "object"
|
||||
},
|
||||
@@ -8604,6 +8648,17 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"detail": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/v2/McpServerStatusDetail"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Controls how much MCP inventory data to fetch for each server. Defaults to `Full` when omitted."
|
||||
},
|
||||
"limit": {
|
||||
"description": "Optional page size; defaults to a server-defined value.",
|
||||
"format": "uint32",
|
||||
@@ -8929,6 +8984,43 @@
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"McpResourceReadParams": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"server": {
|
||||
"type": "string"
|
||||
},
|
||||
"threadId": {
|
||||
"type": "string"
|
||||
},
|
||||
"uri": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"server",
|
||||
"threadId",
|
||||
"uri"
|
||||
],
|
||||
"title": "McpResourceReadParams",
|
||||
"type": "object"
|
||||
},
|
||||
"McpResourceReadResponse": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"contents": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/v2/ResourceContent"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"contents"
|
||||
],
|
||||
"title": "McpResourceReadResponse",
|
||||
"type": "object"
|
||||
},
|
||||
"McpServerOauthLoginCompletedNotification": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
@@ -9044,6 +9136,13 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"McpServerStatusDetail": {
|
||||
"enum": [
|
||||
"full",
|
||||
"toolsAndAuthOnly"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"McpServerStatusUpdatedNotification": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
@@ -9105,6 +9204,7 @@
|
||||
},
|
||||
"McpToolCallResult": {
|
||||
"properties": {
|
||||
"_meta": true,
|
||||
"content": {
|
||||
"items": true,
|
||||
"type": "array"
|
||||
@@ -9473,6 +9573,12 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"dangerFullAccessDenylistOnly": {
|
||||
"type": [
|
||||
"boolean",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"dangerouslyAllowAllUnixSockets": {
|
||||
"type": [
|
||||
"boolean",
|
||||
@@ -10643,6 +10749,57 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ResourceContent": {
|
||||
"anyOf": [
|
||||
{
|
||||
"properties": {
|
||||
"_meta": true,
|
||||
"mimeType": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"text": {
|
||||
"type": "string"
|
||||
},
|
||||
"uri": {
|
||||
"description": "The URI of this resource.",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"text",
|
||||
"uri"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"_meta": true,
|
||||
"blob": {
|
||||
"type": "string"
|
||||
},
|
||||
"mimeType": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"uri": {
|
||||
"description": "The URI of this resource.",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"blob",
|
||||
"uri"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
],
|
||||
"description": "Contents returned when reading a resource from an MCP server."
|
||||
},
|
||||
"ResourceTemplate": {
|
||||
"description": "A template description for resources available on the server.",
|
||||
"properties": {
|
||||
@@ -13518,6 +13675,66 @@
|
||||
"title": "ThreadRealtimeOutputAudioDeltaNotification",
|
||||
"type": "object"
|
||||
},
|
||||
"ThreadRealtimeSdpNotification": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"description": "EXPERIMENTAL - emitted with the remote SDP for a WebRTC realtime session.",
|
||||
"properties": {
|
||||
"sdp": {
|
||||
"type": "string"
|
||||
},
|
||||
"threadId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"sdp",
|
||||
"threadId"
|
||||
],
|
||||
"title": "ThreadRealtimeSdpNotification",
|
||||
"type": "object"
|
||||
},
|
||||
"ThreadRealtimeStartTransport": {
|
||||
"description": "EXPERIMENTAL - transport used by thread realtime.",
|
||||
"oneOf": [
|
||||
{
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [
|
||||
"websocket"
|
||||
],
|
||||
"title": "WebsocketThreadRealtimeStartTransportType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"title": "WebsocketThreadRealtimeStartTransport",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"sdp": {
|
||||
"description": "SDP offer generated by a WebRTC RTCPeerConnection after configuring audio and the realtime events data channel.",
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"webrtc"
|
||||
],
|
||||
"title": "WebrtcThreadRealtimeStartTransportType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"sdp",
|
||||
"type"
|
||||
],
|
||||
"title": "WebrtcThreadRealtimeStartTransport",
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
},
|
||||
"ThreadRealtimeStartedNotification": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"description": "EXPERIMENTAL - emitted when thread realtime startup is accepted.",
|
||||
@@ -13950,6 +14167,16 @@
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"sessionStartSource": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/v2/ThreadStartSource"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"title": "ThreadStartParams",
|
||||
@@ -14017,6 +14244,13 @@
|
||||
"title": "ThreadStartResponse",
|
||||
"type": "object"
|
||||
},
|
||||
"ThreadStartSource": {
|
||||
"enum": [
|
||||
"startup",
|
||||
"clear"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ThreadStartedNotification": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
@@ -14329,6 +14563,22 @@
|
||||
},
|
||||
"Turn": {
|
||||
"properties": {
|
||||
"completedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn completed.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"durationMs": {
|
||||
"description": "Duration between turn start and completion in milliseconds, if known.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"error": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -14350,6 +14600,14 @@
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"startedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn started.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/v2/TurnStatus"
|
||||
}
|
||||
|
||||
@@ -1776,6 +1776,30 @@
|
||||
"title": "McpServerStatus/listRequest",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"id": {
|
||||
"$ref": "#/definitions/RequestId"
|
||||
},
|
||||
"method": {
|
||||
"enum": [
|
||||
"mcpServer/resource/read"
|
||||
],
|
||||
"title": "McpServer/resource/readRequestMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/McpResourceReadParams"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "McpServer/resource/readRequest",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"id": {
|
||||
@@ -4155,7 +4179,7 @@
|
||||
"type": "array"
|
||||
},
|
||||
"watchId": {
|
||||
"description": "Watch identifier returned by `fs/watch`.",
|
||||
"description": "Watch identifier previously provided to `fs/watch`.",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
@@ -4425,7 +4449,7 @@
|
||||
"description": "Stop filesystem watch notifications for a prior `fs/watch`.",
|
||||
"properties": {
|
||||
"watchId": {
|
||||
"description": "Watch identifier returned by `fs/watch`.",
|
||||
"description": "Watch identifier previously provided to `fs/watch`.",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
@@ -4452,25 +4476,6 @@
|
||||
}
|
||||
],
|
||||
"description": "Absolute file or directory path to watch."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"path"
|
||||
],
|
||||
"title": "FsWatchParams",
|
||||
"type": "object"
|
||||
},
|
||||
"FsWatchResponse": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"description": "Created watch handle returned by `fs/watch`.",
|
||||
"properties": {
|
||||
"path": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/AbsolutePathBuf"
|
||||
}
|
||||
],
|
||||
"description": "Canonicalized path associated with the watch."
|
||||
},
|
||||
"watchId": {
|
||||
"description": "Connection-scoped watch identifier used for `fs/unwatch` and `fs/changed`.",
|
||||
@@ -4481,6 +4486,25 @@
|
||||
"path",
|
||||
"watchId"
|
||||
],
|
||||
"title": "FsWatchParams",
|
||||
"type": "object"
|
||||
},
|
||||
"FsWatchResponse": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"description": "Successful response for `fs/watch`.",
|
||||
"properties": {
|
||||
"path": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/AbsolutePathBuf"
|
||||
}
|
||||
],
|
||||
"description": "Canonicalized path associated with the watch."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"path"
|
||||
],
|
||||
"title": "FsWatchResponse",
|
||||
"type": "object"
|
||||
},
|
||||
@@ -5427,6 +5451,17 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"detail": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/McpServerStatusDetail"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Controls how much MCP inventory data to fetch for each server. Defaults to `Full` when omitted."
|
||||
},
|
||||
"limit": {
|
||||
"description": "Optional page size; defaults to a server-defined value.",
|
||||
"format": "uint32",
|
||||
@@ -5752,6 +5787,43 @@
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"McpResourceReadParams": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"server": {
|
||||
"type": "string"
|
||||
},
|
||||
"threadId": {
|
||||
"type": "string"
|
||||
},
|
||||
"uri": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"server",
|
||||
"threadId",
|
||||
"uri"
|
||||
],
|
||||
"title": "McpResourceReadParams",
|
||||
"type": "object"
|
||||
},
|
||||
"McpResourceReadResponse": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"contents": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/ResourceContent"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"contents"
|
||||
],
|
||||
"title": "McpResourceReadResponse",
|
||||
"type": "object"
|
||||
},
|
||||
"McpServerOauthLoginCompletedNotification": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
@@ -5867,6 +5939,13 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"McpServerStatusDetail": {
|
||||
"enum": [
|
||||
"full",
|
||||
"toolsAndAuthOnly"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"McpServerStatusUpdatedNotification": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
@@ -5928,6 +6007,7 @@
|
||||
},
|
||||
"McpToolCallResult": {
|
||||
"properties": {
|
||||
"_meta": true,
|
||||
"content": {
|
||||
"items": true,
|
||||
"type": "array"
|
||||
@@ -6296,6 +6376,12 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"dangerFullAccessDenylistOnly": {
|
||||
"type": [
|
||||
"boolean",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"dangerouslyAllowAllUnixSockets": {
|
||||
"type": [
|
||||
"boolean",
|
||||
@@ -7466,6 +7552,57 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ResourceContent": {
|
||||
"anyOf": [
|
||||
{
|
||||
"properties": {
|
||||
"_meta": true,
|
||||
"mimeType": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"text": {
|
||||
"type": "string"
|
||||
},
|
||||
"uri": {
|
||||
"description": "The URI of this resource.",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"text",
|
||||
"uri"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"_meta": true,
|
||||
"blob": {
|
||||
"type": "string"
|
||||
},
|
||||
"mimeType": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"uri": {
|
||||
"description": "The URI of this resource.",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"blob",
|
||||
"uri"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
],
|
||||
"description": "Contents returned when reading a resource from an MCP server."
|
||||
},
|
||||
"ResourceTemplate": {
|
||||
"description": "A template description for resources available on the server.",
|
||||
"properties": {
|
||||
@@ -9299,6 +9436,26 @@
|
||||
"title": "Thread/realtime/outputAudio/deltaNotification",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"method": {
|
||||
"enum": [
|
||||
"thread/realtime/sdp"
|
||||
],
|
||||
"title": "Thread/realtime/sdpNotificationMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/ThreadRealtimeSdpNotification"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "Thread/realtime/sdpNotification",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"method": {
|
||||
@@ -11373,6 +11530,66 @@
|
||||
"title": "ThreadRealtimeOutputAudioDeltaNotification",
|
||||
"type": "object"
|
||||
},
|
||||
"ThreadRealtimeSdpNotification": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"description": "EXPERIMENTAL - emitted with the remote SDP for a WebRTC realtime session.",
|
||||
"properties": {
|
||||
"sdp": {
|
||||
"type": "string"
|
||||
},
|
||||
"threadId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"sdp",
|
||||
"threadId"
|
||||
],
|
||||
"title": "ThreadRealtimeSdpNotification",
|
||||
"type": "object"
|
||||
},
|
||||
"ThreadRealtimeStartTransport": {
|
||||
"description": "EXPERIMENTAL - transport used by thread realtime.",
|
||||
"oneOf": [
|
||||
{
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [
|
||||
"websocket"
|
||||
],
|
||||
"title": "WebsocketThreadRealtimeStartTransportType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"title": "WebsocketThreadRealtimeStartTransport",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"sdp": {
|
||||
"description": "SDP offer generated by a WebRTC RTCPeerConnection after configuring audio and the realtime events data channel.",
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"webrtc"
|
||||
],
|
||||
"title": "WebrtcThreadRealtimeStartTransportType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"sdp",
|
||||
"type"
|
||||
],
|
||||
"title": "WebrtcThreadRealtimeStartTransport",
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
},
|
||||
"ThreadRealtimeStartedNotification": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"description": "EXPERIMENTAL - emitted when thread realtime startup is accepted.",
|
||||
@@ -11805,6 +12022,16 @@
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"sessionStartSource": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ThreadStartSource"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"title": "ThreadStartParams",
|
||||
@@ -11872,6 +12099,13 @@
|
||||
"title": "ThreadStartResponse",
|
||||
"type": "object"
|
||||
},
|
||||
"ThreadStartSource": {
|
||||
"enum": [
|
||||
"startup",
|
||||
"clear"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ThreadStartedNotification": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
@@ -12184,6 +12418,22 @@
|
||||
},
|
||||
"Turn": {
|
||||
"properties": {
|
||||
"completedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn completed.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"durationMs": {
|
||||
"description": "Duration between turn start and completion in milliseconds, if known.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"error": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -12205,6 +12455,14 @@
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"startedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn started.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/TurnStatus"
|
||||
}
|
||||
|
||||
@@ -1,6 +1,14 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"ApprovalsReviewer": {
|
||||
"description": "Configures who approval requests are routed to for review. Examples include sandbox escapes, blocked network access, MCP approval prompts, and ARC escalations. Defaults to `user`. `guardian_subagent` uses a carefully prompted subagent to gather relevant context and apply a risk-based decision framework before approving or denying the request.",
|
||||
"enum": [
|
||||
"user",
|
||||
"guardian_subagent"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"AskForApproval": {
|
||||
"oneOf": [
|
||||
{
|
||||
@@ -143,6 +151,12 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"dangerFullAccessDenylistOnly": {
|
||||
"type": [
|
||||
"boolean",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"dangerouslyAllowAllUnixSockets": {
|
||||
"type": [
|
||||
"boolean",
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
"type": "array"
|
||||
},
|
||||
"watchId": {
|
||||
"description": "Watch identifier returned by `fs/watch`.",
|
||||
"description": "Watch identifier previously provided to `fs/watch`.",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"description": "Stop filesystem watch notifications for a prior `fs/watch`.",
|
||||
"properties": {
|
||||
"watchId": {
|
||||
"description": "Watch identifier returned by `fs/watch`.",
|
||||
"description": "Watch identifier previously provided to `fs/watch`.",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -15,10 +15,15 @@
|
||||
}
|
||||
],
|
||||
"description": "Absolute file or directory path to watch."
|
||||
},
|
||||
"watchId": {
|
||||
"description": "Connection-scoped watch identifier used for `fs/unwatch` and `fs/changed`.",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"path"
|
||||
"path",
|
||||
"watchId"
|
||||
],
|
||||
"title": "FsWatchParams",
|
||||
"type": "object"
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"description": "Created watch handle returned by `fs/watch`.",
|
||||
"description": "Successful response for `fs/watch`.",
|
||||
"properties": {
|
||||
"path": {
|
||||
"allOf": [
|
||||
@@ -15,15 +15,10 @@
|
||||
}
|
||||
],
|
||||
"description": "Canonicalized path associated with the watch."
|
||||
},
|
||||
"watchId": {
|
||||
"description": "Connection-scoped watch identifier used for `fs/unwatch` and `fs/changed`.",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"path",
|
||||
"watchId"
|
||||
"path"
|
||||
],
|
||||
"title": "FsWatchResponse",
|
||||
"type": "object"
|
||||
|
||||
@@ -294,6 +294,7 @@
|
||||
},
|
||||
"McpToolCallResult": {
|
||||
"properties": {
|
||||
"_meta": true,
|
||||
"content": {
|
||||
"items": true,
|
||||
"type": "array"
|
||||
|
||||
@@ -294,6 +294,7 @@
|
||||
},
|
||||
"McpToolCallResult": {
|
||||
"properties": {
|
||||
"_meta": true,
|
||||
"content": {
|
||||
"items": true,
|
||||
"type": "array"
|
||||
|
||||
@@ -1,5 +1,14 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"McpServerStatusDetail": {
|
||||
"enum": [
|
||||
"full",
|
||||
"toolsAndAuthOnly"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"cursor": {
|
||||
"description": "Opaque pagination cursor returned by a previous call.",
|
||||
@@ -8,6 +17,17 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"detail": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/McpServerStatusDetail"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Controls how much MCP inventory data to fetch for each server. Defaults to `Full` when omitted."
|
||||
},
|
||||
"limit": {
|
||||
"description": "Optional page size; defaults to a server-defined value.",
|
||||
"format": "uint32",
|
||||
|
||||
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"server": {
|
||||
"type": "string"
|
||||
},
|
||||
"threadId": {
|
||||
"type": "string"
|
||||
},
|
||||
"uri": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"server",
|
||||
"threadId",
|
||||
"uri"
|
||||
],
|
||||
"title": "McpResourceReadParams",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -0,0 +1,69 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"ResourceContent": {
|
||||
"anyOf": [
|
||||
{
|
||||
"properties": {
|
||||
"_meta": true,
|
||||
"mimeType": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"text": {
|
||||
"type": "string"
|
||||
},
|
||||
"uri": {
|
||||
"description": "The URI of this resource.",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"text",
|
||||
"uri"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"_meta": true,
|
||||
"blob": {
|
||||
"type": "string"
|
||||
},
|
||||
"mimeType": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"uri": {
|
||||
"description": "The URI of this resource.",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"blob",
|
||||
"uri"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
],
|
||||
"description": "Contents returned when reading a resource from an MCP server."
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"contents": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/ResourceContent"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"contents"
|
||||
],
|
||||
"title": "McpResourceReadResponse",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -430,6 +430,7 @@
|
||||
},
|
||||
"McpToolCallResult": {
|
||||
"properties": {
|
||||
"_meta": true,
|
||||
"content": {
|
||||
"items": true,
|
||||
"type": "array"
|
||||
@@ -1267,6 +1268,22 @@
|
||||
},
|
||||
"Turn": {
|
||||
"properties": {
|
||||
"completedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn completed.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"durationMs": {
|
||||
"description": "Duration between turn start and completion in milliseconds, if known.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"error": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -1288,6 +1305,14 @@
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"startedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn started.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/TurnStatus"
|
||||
}
|
||||
|
||||
@@ -518,6 +518,7 @@
|
||||
},
|
||||
"McpToolCallResult": {
|
||||
"properties": {
|
||||
"_meta": true,
|
||||
"content": {
|
||||
"items": true,
|
||||
"type": "array"
|
||||
@@ -1856,6 +1857,22 @@
|
||||
},
|
||||
"Turn": {
|
||||
"properties": {
|
||||
"completedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn completed.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"durationMs": {
|
||||
"description": "Duration between turn start and completion in milliseconds, if known.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"error": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -1877,6 +1894,14 @@
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"startedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn started.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/TurnStatus"
|
||||
}
|
||||
|
||||
@@ -456,6 +456,7 @@
|
||||
},
|
||||
"McpToolCallResult": {
|
||||
"properties": {
|
||||
"_meta": true,
|
||||
"content": {
|
||||
"items": true,
|
||||
"type": "array"
|
||||
@@ -1614,6 +1615,22 @@
|
||||
},
|
||||
"Turn": {
|
||||
"properties": {
|
||||
"completedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn completed.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"durationMs": {
|
||||
"description": "Duration between turn start and completion in milliseconds, if known.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"error": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -1635,6 +1652,14 @@
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"startedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn started.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/TurnStatus"
|
||||
}
|
||||
|
||||
@@ -456,6 +456,7 @@
|
||||
},
|
||||
"McpToolCallResult": {
|
||||
"properties": {
|
||||
"_meta": true,
|
||||
"content": {
|
||||
"items": true,
|
||||
"type": "array"
|
||||
@@ -1614,6 +1615,22 @@
|
||||
},
|
||||
"Turn": {
|
||||
"properties": {
|
||||
"completedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn completed.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"durationMs": {
|
||||
"description": "Duration between turn start and completion in milliseconds, if known.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"error": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -1635,6 +1652,14 @@
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"startedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn started.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/TurnStatus"
|
||||
}
|
||||
|
||||
@@ -456,6 +456,7 @@
|
||||
},
|
||||
"McpToolCallResult": {
|
||||
"properties": {
|
||||
"_meta": true,
|
||||
"content": {
|
||||
"items": true,
|
||||
"type": "array"
|
||||
@@ -1614,6 +1615,22 @@
|
||||
},
|
||||
"Turn": {
|
||||
"properties": {
|
||||
"completedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn completed.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"durationMs": {
|
||||
"description": "Duration between turn start and completion in milliseconds, if known.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"error": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -1635,6 +1652,14 @@
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"startedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn started.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/TurnStatus"
|
||||
}
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"description": "EXPERIMENTAL - emitted with the remote SDP for a WebRTC realtime session.",
|
||||
"properties": {
|
||||
"sdp": {
|
||||
"type": "string"
|
||||
},
|
||||
"threadId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"sdp",
|
||||
"threadId"
|
||||
],
|
||||
"title": "ThreadRealtimeSdpNotification",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -518,6 +518,7 @@
|
||||
},
|
||||
"McpToolCallResult": {
|
||||
"properties": {
|
||||
"_meta": true,
|
||||
"content": {
|
||||
"items": true,
|
||||
"type": "array"
|
||||
@@ -1856,6 +1857,22 @@
|
||||
},
|
||||
"Turn": {
|
||||
"properties": {
|
||||
"completedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn completed.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"durationMs": {
|
||||
"description": "Duration between turn start and completion in milliseconds, if known.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"error": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -1877,6 +1894,14 @@
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"startedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn started.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/TurnStatus"
|
||||
}
|
||||
|
||||
@@ -456,6 +456,7 @@
|
||||
},
|
||||
"McpToolCallResult": {
|
||||
"properties": {
|
||||
"_meta": true,
|
||||
"content": {
|
||||
"items": true,
|
||||
"type": "array"
|
||||
@@ -1614,6 +1615,22 @@
|
||||
},
|
||||
"Turn": {
|
||||
"properties": {
|
||||
"completedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn completed.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"durationMs": {
|
||||
"description": "Duration between turn start and completion in milliseconds, if known.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"error": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -1635,6 +1652,14 @@
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"startedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn started.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/TurnStatus"
|
||||
}
|
||||
|
||||
@@ -101,6 +101,13 @@
|
||||
"flex"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ThreadStartSource": {
|
||||
"enum": [
|
||||
"startup",
|
||||
"clear"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
@@ -210,6 +217,16 @@
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"sessionStartSource": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ThreadStartSource"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"title": "ThreadStartParams",
|
||||
|
||||
@@ -518,6 +518,7 @@
|
||||
},
|
||||
"McpToolCallResult": {
|
||||
"properties": {
|
||||
"_meta": true,
|
||||
"content": {
|
||||
"items": true,
|
||||
"type": "array"
|
||||
@@ -1856,6 +1857,22 @@
|
||||
},
|
||||
"Turn": {
|
||||
"properties": {
|
||||
"completedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn completed.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"durationMs": {
|
||||
"description": "Duration between turn start and completion in milliseconds, if known.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"error": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -1877,6 +1894,14 @@
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"startedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn started.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/TurnStatus"
|
||||
}
|
||||
|
||||
@@ -456,6 +456,7 @@
|
||||
},
|
||||
"McpToolCallResult": {
|
||||
"properties": {
|
||||
"_meta": true,
|
||||
"content": {
|
||||
"items": true,
|
||||
"type": "array"
|
||||
@@ -1614,6 +1615,22 @@
|
||||
},
|
||||
"Turn": {
|
||||
"properties": {
|
||||
"completedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn completed.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"durationMs": {
|
||||
"description": "Duration between turn start and completion in milliseconds, if known.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"error": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -1635,6 +1652,14 @@
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"startedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn started.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/TurnStatus"
|
||||
}
|
||||
|
||||
@@ -456,6 +456,7 @@
|
||||
},
|
||||
"McpToolCallResult": {
|
||||
"properties": {
|
||||
"_meta": true,
|
||||
"content": {
|
||||
"items": true,
|
||||
"type": "array"
|
||||
@@ -1614,6 +1615,22 @@
|
||||
},
|
||||
"Turn": {
|
||||
"properties": {
|
||||
"completedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn completed.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"durationMs": {
|
||||
"description": "Duration between turn start and completion in milliseconds, if known.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"error": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -1635,6 +1652,14 @@
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"startedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn started.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/TurnStatus"
|
||||
}
|
||||
|
||||
@@ -430,6 +430,7 @@
|
||||
},
|
||||
"McpToolCallResult": {
|
||||
"properties": {
|
||||
"_meta": true,
|
||||
"content": {
|
||||
"items": true,
|
||||
"type": "array"
|
||||
@@ -1267,6 +1268,22 @@
|
||||
},
|
||||
"Turn": {
|
||||
"properties": {
|
||||
"completedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn completed.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"durationMs": {
|
||||
"description": "Duration between turn start and completion in milliseconds, if known.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"error": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -1288,6 +1305,14 @@
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"startedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn started.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/TurnStatus"
|
||||
}
|
||||
|
||||
@@ -430,6 +430,7 @@
|
||||
},
|
||||
"McpToolCallResult": {
|
||||
"properties": {
|
||||
"_meta": true,
|
||||
"content": {
|
||||
"items": true,
|
||||
"type": "array"
|
||||
@@ -1267,6 +1268,22 @@
|
||||
},
|
||||
"Turn": {
|
||||
"properties": {
|
||||
"completedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn completed.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"durationMs": {
|
||||
"description": "Duration between turn start and completion in milliseconds, if known.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"error": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -1288,6 +1305,14 @@
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"startedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn started.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/TurnStatus"
|
||||
}
|
||||
|
||||
@@ -430,6 +430,7 @@
|
||||
},
|
||||
"McpToolCallResult": {
|
||||
"properties": {
|
||||
"_meta": true,
|
||||
"content": {
|
||||
"items": true,
|
||||
"type": "array"
|
||||
@@ -1267,6 +1268,22 @@
|
||||
},
|
||||
"Turn": {
|
||||
"properties": {
|
||||
"completedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn completed.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"durationMs": {
|
||||
"description": "Duration between turn start and completion in milliseconds, if known.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"error": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -1288,6 +1305,14 @@
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"startedAt": {
|
||||
"description": "Unix timestamp (in seconds) when the turn started.",
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/TurnStatus"
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -0,0 +1,17 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { JsonValue } from "./serde_json/JsonValue";
|
||||
|
||||
/**
|
||||
* Contents returned when reading a resource from an MCP server.
|
||||
*/
|
||||
export type ResourceContent = {
|
||||
/**
|
||||
* The URI of this resource.
|
||||
*/
|
||||
uri: string, mimeType?: string, text: string, _meta?: JsonValue, } | {
|
||||
/**
|
||||
* The URI of this resource.
|
||||
*/
|
||||
uri: string, mimeType?: string, blob: string, _meta?: JsonValue, };
|
||||
@@ -41,6 +41,7 @@ import type { ThreadRealtimeClosedNotification } from "./v2/ThreadRealtimeClosed
|
||||
import type { ThreadRealtimeErrorNotification } from "./v2/ThreadRealtimeErrorNotification";
|
||||
import type { ThreadRealtimeItemAddedNotification } from "./v2/ThreadRealtimeItemAddedNotification";
|
||||
import type { ThreadRealtimeOutputAudioDeltaNotification } from "./v2/ThreadRealtimeOutputAudioDeltaNotification";
|
||||
import type { ThreadRealtimeSdpNotification } from "./v2/ThreadRealtimeSdpNotification";
|
||||
import type { ThreadRealtimeStartedNotification } from "./v2/ThreadRealtimeStartedNotification";
|
||||
import type { ThreadRealtimeTranscriptUpdatedNotification } from "./v2/ThreadRealtimeTranscriptUpdatedNotification";
|
||||
import type { ThreadStartedNotification } from "./v2/ThreadStartedNotification";
|
||||
@@ -57,4 +58,4 @@ import type { WindowsWorldWritableWarningNotification } from "./v2/WindowsWorldW
|
||||
/**
|
||||
* Notification sent from the server to the client.
|
||||
*/
|
||||
export type ServerNotification = { "method": "error", "params": ErrorNotification } | { "method": "thread/started", "params": ThreadStartedNotification } | { "method": "thread/status/changed", "params": ThreadStatusChangedNotification } | { "method": "thread/archived", "params": ThreadArchivedNotification } | { "method": "thread/unarchived", "params": ThreadUnarchivedNotification } | { "method": "thread/closed", "params": ThreadClosedNotification } | { "method": "skills/changed", "params": SkillsChangedNotification } | { "method": "thread/name/updated", "params": ThreadNameUpdatedNotification } | { "method": "thread/tokenUsage/updated", "params": ThreadTokenUsageUpdatedNotification } | { "method": "turn/started", "params": TurnStartedNotification } | { "method": "hook/started", "params": HookStartedNotification } | { "method": "turn/completed", "params": TurnCompletedNotification } | { "method": "hook/completed", "params": HookCompletedNotification } | { "method": "turn/diff/updated", "params": TurnDiffUpdatedNotification } | { "method": "turn/plan/updated", "params": TurnPlanUpdatedNotification } | { "method": "item/started", "params": ItemStartedNotification } | { "method": "item/autoApprovalReview/started", "params": ItemGuardianApprovalReviewStartedNotification } | { "method": "item/autoApprovalReview/completed", "params": ItemGuardianApprovalReviewCompletedNotification } | { "method": "item/completed", "params": ItemCompletedNotification } | { "method": "rawResponseItem/completed", "params": RawResponseItemCompletedNotification } | { "method": "item/agentMessage/delta", "params": AgentMessageDeltaNotification } | { "method": "item/plan/delta", "params": PlanDeltaNotification } | { "method": "command/exec/outputDelta", "params": CommandExecOutputDeltaNotification } | { "method": "item/commandExecution/outputDelta", "params": CommandExecutionOutputDeltaNotification } | { "method": "item/commandExecution/terminalInteraction", "params": TerminalInteractionNotification } | { "method": "item/fileChange/outputDelta", "params": FileChangeOutputDeltaNotification } | { "method": "serverRequest/resolved", "params": ServerRequestResolvedNotification } | { "method": "item/mcpToolCall/progress", "params": McpToolCallProgressNotification } | { "method": "mcpServer/oauthLogin/completed", "params": McpServerOauthLoginCompletedNotification } | { "method": "mcpServer/startupStatus/updated", "params": McpServerStatusUpdatedNotification } | { "method": "account/updated", "params": AccountUpdatedNotification } | { "method": "account/rateLimits/updated", "params": AccountRateLimitsUpdatedNotification } | { "method": "app/list/updated", "params": AppListUpdatedNotification } | { "method": "fs/changed", "params": FsChangedNotification } | { "method": "item/reasoning/summaryTextDelta", "params": ReasoningSummaryTextDeltaNotification } | { "method": "item/reasoning/summaryPartAdded", "params": ReasoningSummaryPartAddedNotification } | { "method": "item/reasoning/textDelta", "params": ReasoningTextDeltaNotification } | { "method": "thread/compacted", "params": ContextCompactedNotification } | { "method": "model/rerouted", "params": ModelReroutedNotification } | { "method": "deprecationNotice", "params": DeprecationNoticeNotification } | { "method": "configWarning", "params": ConfigWarningNotification } | { "method": "fuzzyFileSearch/sessionUpdated", "params": FuzzyFileSearchSessionUpdatedNotification } | { "method": "fuzzyFileSearch/sessionCompleted", "params": FuzzyFileSearchSessionCompletedNotification } | { "method": "thread/realtime/started", "params": ThreadRealtimeStartedNotification } | { "method": "thread/realtime/itemAdded", "params": ThreadRealtimeItemAddedNotification } | { "method": "thread/realtime/transcriptUpdated", "params": ThreadRealtimeTranscriptUpdatedNotification } | { "method": "thread/realtime/outputAudio/delta", "params": ThreadRealtimeOutputAudioDeltaNotification } | { "method": "thread/realtime/error", "params": ThreadRealtimeErrorNotification } | { "method": "thread/realtime/closed", "params": ThreadRealtimeClosedNotification } | { "method": "windows/worldWritableWarning", "params": WindowsWorldWritableWarningNotification } | { "method": "windowsSandbox/setupCompleted", "params": WindowsSandboxSetupCompletedNotification } | { "method": "account/login/completed", "params": AccountLoginCompletedNotification };
|
||||
export type ServerNotification = { "method": "error", "params": ErrorNotification } | { "method": "thread/started", "params": ThreadStartedNotification } | { "method": "thread/status/changed", "params": ThreadStatusChangedNotification } | { "method": "thread/archived", "params": ThreadArchivedNotification } | { "method": "thread/unarchived", "params": ThreadUnarchivedNotification } | { "method": "thread/closed", "params": ThreadClosedNotification } | { "method": "skills/changed", "params": SkillsChangedNotification } | { "method": "thread/name/updated", "params": ThreadNameUpdatedNotification } | { "method": "thread/tokenUsage/updated", "params": ThreadTokenUsageUpdatedNotification } | { "method": "turn/started", "params": TurnStartedNotification } | { "method": "hook/started", "params": HookStartedNotification } | { "method": "turn/completed", "params": TurnCompletedNotification } | { "method": "hook/completed", "params": HookCompletedNotification } | { "method": "turn/diff/updated", "params": TurnDiffUpdatedNotification } | { "method": "turn/plan/updated", "params": TurnPlanUpdatedNotification } | { "method": "item/started", "params": ItemStartedNotification } | { "method": "item/autoApprovalReview/started", "params": ItemGuardianApprovalReviewStartedNotification } | { "method": "item/autoApprovalReview/completed", "params": ItemGuardianApprovalReviewCompletedNotification } | { "method": "item/completed", "params": ItemCompletedNotification } | { "method": "rawResponseItem/completed", "params": RawResponseItemCompletedNotification } | { "method": "item/agentMessage/delta", "params": AgentMessageDeltaNotification } | { "method": "item/plan/delta", "params": PlanDeltaNotification } | { "method": "command/exec/outputDelta", "params": CommandExecOutputDeltaNotification } | { "method": "item/commandExecution/outputDelta", "params": CommandExecutionOutputDeltaNotification } | { "method": "item/commandExecution/terminalInteraction", "params": TerminalInteractionNotification } | { "method": "item/fileChange/outputDelta", "params": FileChangeOutputDeltaNotification } | { "method": "serverRequest/resolved", "params": ServerRequestResolvedNotification } | { "method": "item/mcpToolCall/progress", "params": McpToolCallProgressNotification } | { "method": "mcpServer/oauthLogin/completed", "params": McpServerOauthLoginCompletedNotification } | { "method": "mcpServer/startupStatus/updated", "params": McpServerStatusUpdatedNotification } | { "method": "account/updated", "params": AccountUpdatedNotification } | { "method": "account/rateLimits/updated", "params": AccountRateLimitsUpdatedNotification } | { "method": "app/list/updated", "params": AppListUpdatedNotification } | { "method": "fs/changed", "params": FsChangedNotification } | { "method": "item/reasoning/summaryTextDelta", "params": ReasoningSummaryTextDeltaNotification } | { "method": "item/reasoning/summaryPartAdded", "params": ReasoningSummaryPartAddedNotification } | { "method": "item/reasoning/textDelta", "params": ReasoningTextDeltaNotification } | { "method": "thread/compacted", "params": ContextCompactedNotification } | { "method": "model/rerouted", "params": ModelReroutedNotification } | { "method": "deprecationNotice", "params": DeprecationNoticeNotification } | { "method": "configWarning", "params": ConfigWarningNotification } | { "method": "fuzzyFileSearch/sessionUpdated", "params": FuzzyFileSearchSessionUpdatedNotification } | { "method": "fuzzyFileSearch/sessionCompleted", "params": FuzzyFileSearchSessionCompletedNotification } | { "method": "thread/realtime/started", "params": ThreadRealtimeStartedNotification } | { "method": "thread/realtime/itemAdded", "params": ThreadRealtimeItemAddedNotification } | { "method": "thread/realtime/transcriptUpdated", "params": ThreadRealtimeTranscriptUpdatedNotification } | { "method": "thread/realtime/outputAudio/delta", "params": ThreadRealtimeOutputAudioDeltaNotification } | { "method": "thread/realtime/sdp", "params": ThreadRealtimeSdpNotification } | { "method": "thread/realtime/error", "params": ThreadRealtimeErrorNotification } | { "method": "thread/realtime/closed", "params": ThreadRealtimeClosedNotification } | { "method": "windows/worldWritableWarning", "params": WindowsWorldWritableWarningNotification } | { "method": "windowsSandbox/setupCompleted", "params": WindowsSandboxSetupCompletedNotification } | { "method": "account/login/completed", "params": AccountLoginCompletedNotification };
|
||||
|
||||
@@ -55,6 +55,7 @@ export type { ReasoningItemReasoningSummary } from "./ReasoningItemReasoningSumm
|
||||
export type { ReasoningSummary } from "./ReasoningSummary";
|
||||
export type { RequestId } from "./RequestId";
|
||||
export type { Resource } from "./Resource";
|
||||
export type { ResourceContent } from "./ResourceContent";
|
||||
export type { ResourceTemplate } from "./ResourceTemplate";
|
||||
export type { ResponseItem } from "./ResponseItem";
|
||||
export type { ReviewDecision } from "./ReviewDecision";
|
||||
|
||||
@@ -8,7 +8,7 @@ import type { AbsolutePathBuf } from "../AbsolutePathBuf";
|
||||
*/
|
||||
export type FsChangedNotification = {
|
||||
/**
|
||||
* Watch identifier returned by `fs/watch`.
|
||||
* Watch identifier previously provided to `fs/watch`.
|
||||
*/
|
||||
watchId: string,
|
||||
/**
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
*/
|
||||
export type FsUnwatchParams = {
|
||||
/**
|
||||
* Watch identifier returned by `fs/watch`.
|
||||
* Watch identifier previously provided to `fs/watch`.
|
||||
*/
|
||||
watchId: string, };
|
||||
|
||||
@@ -7,6 +7,10 @@ import type { AbsolutePathBuf } from "../AbsolutePathBuf";
|
||||
* Start filesystem watch notifications for an absolute path.
|
||||
*/
|
||||
export type FsWatchParams = {
|
||||
/**
|
||||
* Connection-scoped watch identifier used for `fs/unwatch` and `fs/changed`.
|
||||
*/
|
||||
watchId: string,
|
||||
/**
|
||||
* Absolute file or directory path to watch.
|
||||
*/
|
||||
|
||||
@@ -4,13 +4,9 @@
|
||||
import type { AbsolutePathBuf } from "../AbsolutePathBuf";
|
||||
|
||||
/**
|
||||
* Created watch handle returned by `fs/watch`.
|
||||
* Successful response for `fs/watch`.
|
||||
*/
|
||||
export type FsWatchResponse = {
|
||||
/**
|
||||
* Connection-scoped watch identifier used for `fs/unwatch` and `fs/changed`.
|
||||
*/
|
||||
watchId: string,
|
||||
/**
|
||||
* Canonicalized path associated with the watch.
|
||||
*/
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { McpServerStatusDetail } from "./McpServerStatusDetail";
|
||||
|
||||
export type ListMcpServerStatusParams = {
|
||||
/**
|
||||
@@ -10,4 +11,9 @@ cursor?: string | null,
|
||||
/**
|
||||
* Optional page size; defaults to a server-defined value.
|
||||
*/
|
||||
limit?: number | null, };
|
||||
limit?: number | null,
|
||||
/**
|
||||
* Controls how much MCP inventory data to fetch for each server.
|
||||
* Defaults to `Full` when omitted.
|
||||
*/
|
||||
detail?: McpServerStatusDetail | null, };
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type McpResourceReadParams = { threadId: string, server: string, uri: string, };
|
||||
@@ -0,0 +1,6 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { ResourceContent } from "../ResourceContent";
|
||||
|
||||
export type McpResourceReadResponse = { contents: Array<ResourceContent>, };
|
||||
@@ -0,0 +1,5 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type McpServerStatusDetail = "full" | "toolsAndAuthOnly";
|
||||
@@ -3,4 +3,4 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { JsonValue } from "../serde_json/JsonValue";
|
||||
|
||||
export type McpToolCallResult = { content: Array<JsonValue>, structuredContent: JsonValue | null, };
|
||||
export type McpToolCallResult = { content: Array<JsonValue>, structuredContent: JsonValue | null, _meta: JsonValue | null, };
|
||||
|
||||
@@ -29,4 +29,4 @@ unixSockets: { [key in string]?: NetworkUnixSocketPermission } | null,
|
||||
/**
|
||||
* Legacy compatibility view derived from `unix_sockets`.
|
||||
*/
|
||||
allowUnixSockets: Array<string> | null, allowLocalBinding: boolean | null, };
|
||||
allowUnixSockets: Array<string> | null, allowLocalBinding: boolean | null, dangerFullAccessDenylistOnly: boolean | null, };
|
||||
|
||||
@@ -0,0 +1,8 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
/**
|
||||
* EXPERIMENTAL - emitted with the remote SDP for a WebRTC realtime session.
|
||||
*/
|
||||
export type ThreadRealtimeSdpNotification = { threadId: string, sdp: string, };
|
||||
@@ -0,0 +1,13 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
/**
|
||||
* EXPERIMENTAL - transport used by thread realtime.
|
||||
*/
|
||||
export type ThreadRealtimeStartTransport = { "type": "websocket" } | { "type": "webrtc",
|
||||
/**
|
||||
* SDP offer generated by a WebRTC RTCPeerConnection after configuring audio and the
|
||||
* realtime events data channel.
|
||||
*/
|
||||
sdp: string, };
|
||||
@@ -7,12 +7,13 @@ import type { JsonValue } from "../serde_json/JsonValue";
|
||||
import type { ApprovalsReviewer } from "./ApprovalsReviewer";
|
||||
import type { AskForApproval } from "./AskForApproval";
|
||||
import type { SandboxMode } from "./SandboxMode";
|
||||
import type { ThreadStartSource } from "./ThreadStartSource";
|
||||
|
||||
export type ThreadStartParams = {model?: string | null, modelProvider?: string | null, serviceTier?: ServiceTier | null | null, cwd?: string | null, approvalPolicy?: AskForApproval | null, /**
|
||||
* Override where approval requests are routed for review on this thread
|
||||
* and subsequent turns.
|
||||
*/
|
||||
approvalsReviewer?: ApprovalsReviewer | null, sandbox?: SandboxMode | null, config?: { [key in string]?: JsonValue } | null, serviceName?: string | null, baseInstructions?: string | null, developerInstructions?: string | null, personality?: Personality | null, ephemeral?: boolean | null, /**
|
||||
approvalsReviewer?: ApprovalsReviewer | null, sandbox?: SandboxMode | null, config?: { [key in string]?: JsonValue } | null, serviceName?: string | null, baseInstructions?: string | null, developerInstructions?: string | null, personality?: Personality | null, ephemeral?: boolean | null, sessionStartSource?: ThreadStartSource | null, /**
|
||||
* If true, opt into emitting raw Responses API items on the event stream.
|
||||
* This is for internal use only (e.g. Codex Cloud).
|
||||
*/
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type ThreadStartSource = "startup" | "clear";
|
||||
@@ -15,4 +15,16 @@ items: Array<ThreadItem>, status: TurnStatus,
|
||||
/**
|
||||
* Only populated when the Turn's status is failed.
|
||||
*/
|
||||
error: TurnError | null, };
|
||||
error: TurnError | null,
|
||||
/**
|
||||
* Unix timestamp (in seconds) when the turn started.
|
||||
*/
|
||||
startedAt: number | null,
|
||||
/**
|
||||
* Unix timestamp (in seconds) when the turn completed.
|
||||
*/
|
||||
completedAt: number | null,
|
||||
/**
|
||||
* Duration between turn start and completion in milliseconds, if known.
|
||||
*/
|
||||
durationMs: number | null, };
|
||||
|
||||
@@ -172,6 +172,8 @@ export type { McpElicitationTitledSingleSelectEnumSchema } from "./McpElicitatio
|
||||
export type { McpElicitationUntitledEnumItems } from "./McpElicitationUntitledEnumItems";
|
||||
export type { McpElicitationUntitledMultiSelectEnumSchema } from "./McpElicitationUntitledMultiSelectEnumSchema";
|
||||
export type { McpElicitationUntitledSingleSelectEnumSchema } from "./McpElicitationUntitledSingleSelectEnumSchema";
|
||||
export type { McpResourceReadParams } from "./McpResourceReadParams";
|
||||
export type { McpResourceReadResponse } from "./McpResourceReadResponse";
|
||||
export type { McpServerElicitationAction } from "./McpServerElicitationAction";
|
||||
export type { McpServerElicitationRequestParams } from "./McpServerElicitationRequestParams";
|
||||
export type { McpServerElicitationRequestResponse } from "./McpServerElicitationRequestResponse";
|
||||
@@ -181,6 +183,7 @@ export type { McpServerOauthLoginResponse } from "./McpServerOauthLoginResponse"
|
||||
export type { McpServerRefreshResponse } from "./McpServerRefreshResponse";
|
||||
export type { McpServerStartupState } from "./McpServerStartupState";
|
||||
export type { McpServerStatus } from "./McpServerStatus";
|
||||
export type { McpServerStatusDetail } from "./McpServerStatusDetail";
|
||||
export type { McpServerStatusUpdatedNotification } from "./McpServerStatusUpdatedNotification";
|
||||
export type { McpToolCallError } from "./McpToolCallError";
|
||||
export type { McpToolCallProgressNotification } from "./McpToolCallProgressNotification";
|
||||
@@ -291,6 +294,8 @@ export type { ThreadRealtimeClosedNotification } from "./ThreadRealtimeClosedNot
|
||||
export type { ThreadRealtimeErrorNotification } from "./ThreadRealtimeErrorNotification";
|
||||
export type { ThreadRealtimeItemAddedNotification } from "./ThreadRealtimeItemAddedNotification";
|
||||
export type { ThreadRealtimeOutputAudioDeltaNotification } from "./ThreadRealtimeOutputAudioDeltaNotification";
|
||||
export type { ThreadRealtimeSdpNotification } from "./ThreadRealtimeSdpNotification";
|
||||
export type { ThreadRealtimeStartTransport } from "./ThreadRealtimeStartTransport";
|
||||
export type { ThreadRealtimeStartedNotification } from "./ThreadRealtimeStartedNotification";
|
||||
export type { ThreadRealtimeTranscriptUpdatedNotification } from "./ThreadRealtimeTranscriptUpdatedNotification";
|
||||
export type { ThreadResumeParams } from "./ThreadResumeParams";
|
||||
@@ -305,6 +310,7 @@ export type { ThreadSortKey } from "./ThreadSortKey";
|
||||
export type { ThreadSourceKind } from "./ThreadSourceKind";
|
||||
export type { ThreadStartParams } from "./ThreadStartParams";
|
||||
export type { ThreadStartResponse } from "./ThreadStartResponse";
|
||||
export type { ThreadStartSource } from "./ThreadStartSource";
|
||||
export type { ThreadStartedNotification } from "./ThreadStartedNotification";
|
||||
export type { ThreadStatus } from "./ThreadStatus";
|
||||
export type { ThreadStatusChangedNotification } from "./ThreadStatusChangedNotification";
|
||||
|
||||
@@ -15,6 +15,7 @@ pub use export::generate_ts_with_options;
|
||||
pub use export::generate_types;
|
||||
pub use jsonrpc_lite::*;
|
||||
pub use protocol::common::*;
|
||||
pub use protocol::item_builders::*;
|
||||
pub use protocol::thread_history::*;
|
||||
pub use protocol::v1::ApplyPatchApprovalParams;
|
||||
pub use protocol::v1::ApplyPatchApprovalResponse;
|
||||
|
||||
@@ -459,6 +459,11 @@ client_request_definitions! {
|
||||
response: v2::ListMcpServerStatusResponse,
|
||||
},
|
||||
|
||||
McpResourceRead => "mcpServer/resource/read" {
|
||||
params: v2::McpResourceReadParams,
|
||||
response: v2::McpResourceReadResponse,
|
||||
},
|
||||
|
||||
WindowsSandboxSetupStart => "windowsSandbox/setupStart" {
|
||||
params: v2::WindowsSandboxSetupStartParams,
|
||||
response: v2::WindowsSandboxSetupStartResponse,
|
||||
@@ -615,6 +620,41 @@ macro_rules! server_request_definitions {
|
||||
}
|
||||
}
|
||||
|
||||
/// Typed response from the client to the server.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
#[serde(tag = "method", rename_all = "camelCase")]
|
||||
pub enum ServerResponse {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$(#[serde(rename = $wire)])?
|
||||
$variant {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
response: $response,
|
||||
},
|
||||
)*
|
||||
}
|
||||
|
||||
impl ServerResponse {
|
||||
pub fn id(&self) -> &RequestId {
|
||||
match self {
|
||||
$(Self::$variant { request_id, .. } => request_id,)*
|
||||
}
|
||||
}
|
||||
|
||||
pub fn method(&self) -> String {
|
||||
serde_json::to_value(self)
|
||||
.ok()
|
||||
.and_then(|value| {
|
||||
value
|
||||
.get("method")
|
||||
.and_then(serde_json::Value::as_str)
|
||||
.map(str::to_owned)
|
||||
})
|
||||
.unwrap_or_else(|| "<unknown>".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, JsonSchema)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub enum ServerRequestPayload {
|
||||
@@ -965,6 +1005,8 @@ server_notification_definitions! {
|
||||
ThreadRealtimeTranscriptUpdated => "thread/realtime/transcriptUpdated" (v2::ThreadRealtimeTranscriptUpdatedNotification),
|
||||
#[experimental("thread/realtime/outputAudio/delta")]
|
||||
ThreadRealtimeOutputAudioDelta => "thread/realtime/outputAudio/delta" (v2::ThreadRealtimeOutputAudioDeltaNotification),
|
||||
#[experimental("thread/realtime/sdp")]
|
||||
ThreadRealtimeSdp => "thread/realtime/sdp" (v2::ThreadRealtimeSdpNotification),
|
||||
#[experimental("thread/realtime/error")]
|
||||
ThreadRealtimeError => "thread/realtime/error" (v2::ThreadRealtimeErrorNotification),
|
||||
#[experimental("thread/realtime/closed")]
|
||||
@@ -1225,6 +1267,30 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_server_response() -> Result<()> {
|
||||
let response = ServerResponse::CommandExecutionRequestApproval {
|
||||
request_id: RequestId::Integer(8),
|
||||
response: v2::CommandExecutionRequestApprovalResponse {
|
||||
decision: v2::CommandExecutionApprovalDecision::AcceptForSession,
|
||||
},
|
||||
};
|
||||
|
||||
assert_eq!(response.id(), &RequestId::Integer(8));
|
||||
assert_eq!(response.method(), "item/commandExecution/requestApproval");
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "item/commandExecution/requestApproval",
|
||||
"id": 8,
|
||||
"response": {
|
||||
"decision": "acceptForSession"
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&response)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_mcp_server_elicitation_request() -> Result<()> {
|
||||
let requested_schema: v2::McpElicitationSchema = serde_json::from_value(json!({
|
||||
@@ -1630,6 +1696,7 @@ mod tests {
|
||||
let request = ClientRequest::FsWatch {
|
||||
request_id: RequestId::Integer(10),
|
||||
params: v2::FsWatchParams {
|
||||
watch_id: "watch-git".to_string(),
|
||||
path: absolute_path("tmp/repo/.git"),
|
||||
},
|
||||
};
|
||||
@@ -1638,6 +1705,7 @@ mod tests {
|
||||
"method": "fs/watch",
|
||||
"id": 10,
|
||||
"params": {
|
||||
"watchId": "watch-git",
|
||||
"path": absolute_path_string("tmp/repo/.git")
|
||||
}
|
||||
}),
|
||||
@@ -1695,6 +1763,7 @@ mod tests {
|
||||
thread_id: "thr_123".to_string(),
|
||||
prompt: "You are on a call".to_string(),
|
||||
session_id: Some("sess_456".to_string()),
|
||||
transport: None,
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
@@ -1704,7 +1773,8 @@ mod tests {
|
||||
"params": {
|
||||
"threadId": "thr_123",
|
||||
"prompt": "You are on a call",
|
||||
"sessionId": "sess_456"
|
||||
"sessionId": "sess_456",
|
||||
"transport": null
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
@@ -1784,6 +1854,7 @@ mod tests {
|
||||
thread_id: "thr_123".to_string(),
|
||||
prompt: "You are on a call".to_string(),
|
||||
session_id: None,
|
||||
transport: None,
|
||||
},
|
||||
};
|
||||
let reason = crate::experimental_api::ExperimentalApi::experimental_reason(&request);
|
||||
|
||||
299
codex-rs/app-server-protocol/src/protocol/item_builders.rs
Normal file
299
codex-rs/app-server-protocol/src/protocol/item_builders.rs
Normal file
@@ -0,0 +1,299 @@
|
||||
//! Shared builders for synthetic [`ThreadItem`] values emitted by the app-server layer.
|
||||
//!
|
||||
//! These items do not come from first-class core `ItemStarted` / `ItemCompleted` events.
|
||||
//! Instead, the app-server synthesizes them so clients can render a coherent lifecycle for
|
||||
//! approvals and other pre-execution flows before the underlying tool has started or when the
|
||||
//! tool never starts at all.
|
||||
//!
|
||||
//! Keeping these builders in one place is useful for two reasons:
|
||||
//! - Live notifications and rebuilt `thread/read` history both need to construct the same
|
||||
//! synthetic items, so sharing the logic avoids drift between those paths.
|
||||
//! - The projection is presentation-specific. Core protocol events stay generic, while the
|
||||
//! app-server protocol decides how to surface those events as `ThreadItem`s for clients.
|
||||
use crate::protocol::common::ServerNotification;
|
||||
use crate::protocol::v2::CommandAction;
|
||||
use crate::protocol::v2::CommandExecutionSource;
|
||||
use crate::protocol::v2::CommandExecutionStatus;
|
||||
use crate::protocol::v2::FileUpdateChange;
|
||||
use crate::protocol::v2::GuardianApprovalReview;
|
||||
use crate::protocol::v2::GuardianApprovalReviewStatus;
|
||||
use crate::protocol::v2::ItemGuardianApprovalReviewCompletedNotification;
|
||||
use crate::protocol::v2::ItemGuardianApprovalReviewStartedNotification;
|
||||
use crate::protocol::v2::PatchApplyStatus;
|
||||
use crate::protocol::v2::PatchChangeKind;
|
||||
use crate::protocol::v2::ThreadItem;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::ApplyPatchApprovalRequestEvent;
|
||||
use codex_protocol::protocol::ExecApprovalRequestEvent;
|
||||
use codex_protocol::protocol::ExecCommandBeginEvent;
|
||||
use codex_protocol::protocol::ExecCommandEndEvent;
|
||||
use codex_protocol::protocol::FileChange;
|
||||
use codex_protocol::protocol::GuardianAssessmentAction;
|
||||
use codex_protocol::protocol::GuardianAssessmentEvent;
|
||||
use codex_protocol::protocol::PatchApplyBeginEvent;
|
||||
use codex_protocol::protocol::PatchApplyEndEvent;
|
||||
use codex_shell_command::parse_command::parse_command;
|
||||
use codex_shell_command::parse_command::shlex_join;
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub fn build_file_change_approval_request_item(
|
||||
payload: &ApplyPatchApprovalRequestEvent,
|
||||
) -> ThreadItem {
|
||||
ThreadItem::FileChange {
|
||||
id: payload.call_id.clone(),
|
||||
changes: convert_patch_changes(&payload.changes),
|
||||
status: PatchApplyStatus::InProgress,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build_file_change_begin_item(payload: &PatchApplyBeginEvent) -> ThreadItem {
|
||||
ThreadItem::FileChange {
|
||||
id: payload.call_id.clone(),
|
||||
changes: convert_patch_changes(&payload.changes),
|
||||
status: PatchApplyStatus::InProgress,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build_file_change_end_item(payload: &PatchApplyEndEvent) -> ThreadItem {
|
||||
ThreadItem::FileChange {
|
||||
id: payload.call_id.clone(),
|
||||
changes: convert_patch_changes(&payload.changes),
|
||||
status: (&payload.status).into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build_command_execution_approval_request_item(
|
||||
payload: &ExecApprovalRequestEvent,
|
||||
) -> ThreadItem {
|
||||
ThreadItem::CommandExecution {
|
||||
id: payload.call_id.clone(),
|
||||
command: shlex_join(&payload.command),
|
||||
cwd: payload.cwd.clone(),
|
||||
process_id: None,
|
||||
source: CommandExecutionSource::Agent,
|
||||
status: CommandExecutionStatus::InProgress,
|
||||
command_actions: payload
|
||||
.parsed_cmd
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(CommandAction::from)
|
||||
.collect(),
|
||||
aggregated_output: None,
|
||||
exit_code: None,
|
||||
duration_ms: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build_command_execution_begin_item(payload: &ExecCommandBeginEvent) -> ThreadItem {
|
||||
ThreadItem::CommandExecution {
|
||||
id: payload.call_id.clone(),
|
||||
command: shlex_join(&payload.command),
|
||||
cwd: payload.cwd.clone(),
|
||||
process_id: payload.process_id.clone(),
|
||||
source: payload.source.into(),
|
||||
status: CommandExecutionStatus::InProgress,
|
||||
command_actions: payload
|
||||
.parsed_cmd
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(CommandAction::from)
|
||||
.collect(),
|
||||
aggregated_output: None,
|
||||
exit_code: None,
|
||||
duration_ms: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build_command_execution_end_item(payload: &ExecCommandEndEvent) -> ThreadItem {
|
||||
let aggregated_output = if payload.aggregated_output.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(payload.aggregated_output.clone())
|
||||
};
|
||||
let duration_ms = i64::try_from(payload.duration.as_millis()).unwrap_or(i64::MAX);
|
||||
|
||||
ThreadItem::CommandExecution {
|
||||
id: payload.call_id.clone(),
|
||||
command: shlex_join(&payload.command),
|
||||
cwd: payload.cwd.clone(),
|
||||
process_id: payload.process_id.clone(),
|
||||
source: payload.source.into(),
|
||||
status: (&payload.status).into(),
|
||||
command_actions: payload
|
||||
.parsed_cmd
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(CommandAction::from)
|
||||
.collect(),
|
||||
aggregated_output,
|
||||
exit_code: Some(payload.exit_code),
|
||||
duration_ms: Some(duration_ms),
|
||||
}
|
||||
}
|
||||
|
||||
/// Build a guardian-derived [`ThreadItem`].
|
||||
///
|
||||
/// Currently this only synthesizes [`ThreadItem::CommandExecution`] for
|
||||
/// [`GuardianAssessmentAction::Command`] and [`GuardianAssessmentAction::Execve`].
|
||||
pub fn build_item_from_guardian_event(
|
||||
assessment: &GuardianAssessmentEvent,
|
||||
status: CommandExecutionStatus,
|
||||
) -> Option<ThreadItem> {
|
||||
match &assessment.action {
|
||||
GuardianAssessmentAction::Command { command, cwd, .. } => {
|
||||
let command = command.clone();
|
||||
let command_actions = vec![CommandAction::Unknown {
|
||||
command: command.clone(),
|
||||
}];
|
||||
Some(ThreadItem::CommandExecution {
|
||||
id: assessment.id.clone(),
|
||||
command,
|
||||
cwd: cwd.clone(),
|
||||
process_id: None,
|
||||
source: CommandExecutionSource::Agent,
|
||||
status,
|
||||
command_actions,
|
||||
aggregated_output: None,
|
||||
exit_code: None,
|
||||
duration_ms: None,
|
||||
})
|
||||
}
|
||||
GuardianAssessmentAction::Execve {
|
||||
program, argv, cwd, ..
|
||||
} => {
|
||||
let argv = if argv.is_empty() {
|
||||
vec![program.clone()]
|
||||
} else {
|
||||
std::iter::once(program.clone())
|
||||
.chain(argv.iter().skip(1).cloned())
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
let command = shlex_join(&argv);
|
||||
let parsed_cmd = parse_command(&argv);
|
||||
let command_actions = if parsed_cmd.is_empty() {
|
||||
vec![CommandAction::Unknown {
|
||||
command: command.clone(),
|
||||
}]
|
||||
} else {
|
||||
parsed_cmd.into_iter().map(CommandAction::from).collect()
|
||||
};
|
||||
Some(ThreadItem::CommandExecution {
|
||||
id: assessment.id.clone(),
|
||||
command,
|
||||
cwd: cwd.clone(),
|
||||
process_id: None,
|
||||
source: CommandExecutionSource::Agent,
|
||||
status,
|
||||
command_actions,
|
||||
aggregated_output: None,
|
||||
exit_code: None,
|
||||
duration_ms: None,
|
||||
})
|
||||
}
|
||||
GuardianAssessmentAction::ApplyPatch { .. }
|
||||
| GuardianAssessmentAction::NetworkAccess { .. }
|
||||
| GuardianAssessmentAction::McpToolCall { .. } => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn guardian_auto_approval_review_notification(
|
||||
conversation_id: &ThreadId,
|
||||
event_turn_id: &str,
|
||||
assessment: &GuardianAssessmentEvent,
|
||||
) -> ServerNotification {
|
||||
// TODO(ccunningham): Attach guardian review state to the reviewed tool
|
||||
// item's lifecycle instead of sending standalone review notifications so
|
||||
// the app-server API can persist and replay review state via `thread/read`.
|
||||
let turn_id = if assessment.turn_id.is_empty() {
|
||||
event_turn_id.to_string()
|
||||
} else {
|
||||
assessment.turn_id.clone()
|
||||
};
|
||||
let review = GuardianApprovalReview {
|
||||
status: match assessment.status {
|
||||
codex_protocol::protocol::GuardianAssessmentStatus::InProgress => {
|
||||
GuardianApprovalReviewStatus::InProgress
|
||||
}
|
||||
codex_protocol::protocol::GuardianAssessmentStatus::Approved => {
|
||||
GuardianApprovalReviewStatus::Approved
|
||||
}
|
||||
codex_protocol::protocol::GuardianAssessmentStatus::Denied => {
|
||||
GuardianApprovalReviewStatus::Denied
|
||||
}
|
||||
codex_protocol::protocol::GuardianAssessmentStatus::Aborted => {
|
||||
GuardianApprovalReviewStatus::Aborted
|
||||
}
|
||||
},
|
||||
risk_score: assessment.risk_score,
|
||||
risk_level: assessment.risk_level.map(Into::into),
|
||||
rationale: assessment.rationale.clone(),
|
||||
};
|
||||
let action = assessment.action.clone().into();
|
||||
match assessment.status {
|
||||
codex_protocol::protocol::GuardianAssessmentStatus::InProgress => {
|
||||
ServerNotification::ItemGuardianApprovalReviewStarted(
|
||||
ItemGuardianApprovalReviewStartedNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id,
|
||||
target_item_id: assessment.id.clone(),
|
||||
review,
|
||||
action,
|
||||
},
|
||||
)
|
||||
}
|
||||
codex_protocol::protocol::GuardianAssessmentStatus::Approved
|
||||
| codex_protocol::protocol::GuardianAssessmentStatus::Denied
|
||||
| codex_protocol::protocol::GuardianAssessmentStatus::Aborted => {
|
||||
ServerNotification::ItemGuardianApprovalReviewCompleted(
|
||||
ItemGuardianApprovalReviewCompletedNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id,
|
||||
target_item_id: assessment.id.clone(),
|
||||
review,
|
||||
action,
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn convert_patch_changes(changes: &HashMap<PathBuf, FileChange>) -> Vec<FileUpdateChange> {
|
||||
let mut converted: Vec<FileUpdateChange> = changes
|
||||
.iter()
|
||||
.map(|(path, change)| FileUpdateChange {
|
||||
path: path.to_string_lossy().into_owned(),
|
||||
kind: map_patch_change_kind(change),
|
||||
diff: format_file_change_diff(change),
|
||||
})
|
||||
.collect();
|
||||
converted.sort_by(|a, b| a.path.cmp(&b.path));
|
||||
converted
|
||||
}
|
||||
|
||||
fn map_patch_change_kind(change: &FileChange) -> PatchChangeKind {
|
||||
match change {
|
||||
FileChange::Add { .. } => PatchChangeKind::Add,
|
||||
FileChange::Delete { .. } => PatchChangeKind::Delete,
|
||||
FileChange::Update { move_path, .. } => PatchChangeKind::Update {
|
||||
move_path: move_path.clone(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn format_file_change_diff(change: &FileChange) -> String {
|
||||
match change {
|
||||
FileChange::Add { content } => content.clone(),
|
||||
FileChange::Delete { content } => content.clone(),
|
||||
FileChange::Update {
|
||||
unified_diff,
|
||||
move_path,
|
||||
} => {
|
||||
if let Some(path) = move_path {
|
||||
format!("{unified_diff}\n\nMoved to: {}", path.display())
|
||||
} else {
|
||||
unified_diff.clone()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,7 @@
|
||||
// Exposes protocol pieces used by `lib.rs` via `pub use protocol::common::*;`.
|
||||
|
||||
pub mod common;
|
||||
pub mod item_builders;
|
||||
mod mappers;
|
||||
mod serde_helpers;
|
||||
pub mod thread_history;
|
||||
|
||||
@@ -1,16 +1,18 @@
|
||||
use crate::protocol::item_builders::build_command_execution_begin_item;
|
||||
use crate::protocol::item_builders::build_command_execution_end_item;
|
||||
use crate::protocol::item_builders::build_file_change_approval_request_item;
|
||||
use crate::protocol::item_builders::build_file_change_begin_item;
|
||||
use crate::protocol::item_builders::build_file_change_end_item;
|
||||
use crate::protocol::item_builders::build_item_from_guardian_event;
|
||||
use crate::protocol::v2::CollabAgentState;
|
||||
use crate::protocol::v2::CollabAgentTool;
|
||||
use crate::protocol::v2::CollabAgentToolCallStatus;
|
||||
use crate::protocol::v2::CommandAction;
|
||||
use crate::protocol::v2::CommandExecutionStatus;
|
||||
use crate::protocol::v2::DynamicToolCallOutputContentItem;
|
||||
use crate::protocol::v2::DynamicToolCallStatus;
|
||||
use crate::protocol::v2::FileUpdateChange;
|
||||
use crate::protocol::v2::McpToolCallError;
|
||||
use crate::protocol::v2::McpToolCallResult;
|
||||
use crate::protocol::v2::McpToolCallStatus;
|
||||
use crate::protocol::v2::PatchApplyStatus;
|
||||
use crate::protocol::v2::PatchChangeKind;
|
||||
use crate::protocol::v2::ThreadItem;
|
||||
use crate::protocol::v2::Turn;
|
||||
use crate::protocol::v2::TurnError as V2TurnError;
|
||||
@@ -31,6 +33,8 @@ use codex_protocol::protocol::ErrorEvent;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::ExecCommandBeginEvent;
|
||||
use codex_protocol::protocol::ExecCommandEndEvent;
|
||||
use codex_protocol::protocol::GuardianAssessmentEvent;
|
||||
use codex_protocol::protocol::GuardianAssessmentStatus;
|
||||
use codex_protocol::protocol::ImageGenerationBeginEvent;
|
||||
use codex_protocol::protocol::ImageGenerationEndEvent;
|
||||
use codex_protocol::protocol::ItemCompletedEvent;
|
||||
@@ -53,6 +57,14 @@ use std::collections::HashMap;
|
||||
use tracing::warn;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[cfg(test)]
|
||||
use crate::protocol::v2::CommandAction;
|
||||
#[cfg(test)]
|
||||
use crate::protocol::v2::FileUpdateChange;
|
||||
#[cfg(test)]
|
||||
use crate::protocol::v2::PatchApplyStatus;
|
||||
#[cfg(test)]
|
||||
use crate::protocol::v2::PatchChangeKind;
|
||||
#[cfg(test)]
|
||||
use codex_protocol::protocol::ExecCommandStatus as CoreExecCommandStatus;
|
||||
#[cfg(test)]
|
||||
@@ -149,6 +161,7 @@ impl ThreadHistoryBuilder {
|
||||
EventMsg::WebSearchEnd(payload) => self.handle_web_search_end(payload),
|
||||
EventMsg::ExecCommandBegin(payload) => self.handle_exec_command_begin(payload),
|
||||
EventMsg::ExecCommandEnd(payload) => self.handle_exec_command_end(payload),
|
||||
EventMsg::GuardianAssessment(payload) => self.handle_guardian_assessment(payload),
|
||||
EventMsg::ApplyPatchApprovalRequest(payload) => {
|
||||
self.handle_apply_patch_approval_request(payload)
|
||||
}
|
||||
@@ -375,57 +388,12 @@ impl ThreadHistoryBuilder {
|
||||
}
|
||||
|
||||
fn handle_exec_command_begin(&mut self, payload: &ExecCommandBeginEvent) {
|
||||
let command = shlex::try_join(payload.command.iter().map(String::as_str))
|
||||
.unwrap_or_else(|_| payload.command.join(" "));
|
||||
let command_actions = payload
|
||||
.parsed_cmd
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(CommandAction::from)
|
||||
.collect();
|
||||
let item = ThreadItem::CommandExecution {
|
||||
id: payload.call_id.clone(),
|
||||
command,
|
||||
cwd: payload.cwd.clone(),
|
||||
process_id: payload.process_id.clone(),
|
||||
source: payload.source.into(),
|
||||
status: CommandExecutionStatus::InProgress,
|
||||
command_actions,
|
||||
aggregated_output: None,
|
||||
exit_code: None,
|
||||
duration_ms: None,
|
||||
};
|
||||
let item = build_command_execution_begin_item(payload);
|
||||
self.upsert_item_in_turn_id(&payload.turn_id, item);
|
||||
}
|
||||
|
||||
fn handle_exec_command_end(&mut self, payload: &ExecCommandEndEvent) {
|
||||
let status: CommandExecutionStatus = (&payload.status).into();
|
||||
let duration_ms = i64::try_from(payload.duration.as_millis()).unwrap_or(i64::MAX);
|
||||
let aggregated_output = if payload.aggregated_output.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(payload.aggregated_output.clone())
|
||||
};
|
||||
let command = shlex::try_join(payload.command.iter().map(String::as_str))
|
||||
.unwrap_or_else(|_| payload.command.join(" "));
|
||||
let command_actions = payload
|
||||
.parsed_cmd
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(CommandAction::from)
|
||||
.collect();
|
||||
let item = ThreadItem::CommandExecution {
|
||||
id: payload.call_id.clone(),
|
||||
command,
|
||||
cwd: payload.cwd.clone(),
|
||||
process_id: payload.process_id.clone(),
|
||||
source: payload.source.into(),
|
||||
status,
|
||||
command_actions,
|
||||
aggregated_output,
|
||||
exit_code: Some(payload.exit_code),
|
||||
duration_ms: Some(duration_ms),
|
||||
};
|
||||
let item = build_command_execution_end_item(payload);
|
||||
// Command completions can arrive out of order. Unified exec may return
|
||||
// while a PTY is still running, then emit ExecCommandEnd later from a
|
||||
// background exit watcher when that process finally exits. By then, a
|
||||
@@ -434,12 +402,26 @@ impl ThreadHistoryBuilder {
|
||||
self.upsert_item_in_turn_id(&payload.turn_id, item);
|
||||
}
|
||||
|
||||
fn handle_apply_patch_approval_request(&mut self, payload: &ApplyPatchApprovalRequestEvent) {
|
||||
let item = ThreadItem::FileChange {
|
||||
id: payload.call_id.clone(),
|
||||
changes: convert_patch_changes(&payload.changes),
|
||||
status: PatchApplyStatus::InProgress,
|
||||
fn handle_guardian_assessment(&mut self, payload: &GuardianAssessmentEvent) {
|
||||
let status = match payload.status {
|
||||
GuardianAssessmentStatus::InProgress => CommandExecutionStatus::InProgress,
|
||||
GuardianAssessmentStatus::Denied | GuardianAssessmentStatus::Aborted => {
|
||||
CommandExecutionStatus::Declined
|
||||
}
|
||||
GuardianAssessmentStatus::Approved => return,
|
||||
};
|
||||
let Some(item) = build_item_from_guardian_event(payload, status) else {
|
||||
return;
|
||||
};
|
||||
if payload.turn_id.is_empty() {
|
||||
self.upsert_item_in_current_turn(item);
|
||||
} else {
|
||||
self.upsert_item_in_turn_id(&payload.turn_id, item);
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_apply_patch_approval_request(&mut self, payload: &ApplyPatchApprovalRequestEvent) {
|
||||
let item = build_file_change_approval_request_item(payload);
|
||||
if payload.turn_id.is_empty() {
|
||||
self.upsert_item_in_current_turn(item);
|
||||
} else {
|
||||
@@ -448,11 +430,7 @@ impl ThreadHistoryBuilder {
|
||||
}
|
||||
|
||||
fn handle_patch_apply_begin(&mut self, payload: &PatchApplyBeginEvent) {
|
||||
let item = ThreadItem::FileChange {
|
||||
id: payload.call_id.clone(),
|
||||
changes: convert_patch_changes(&payload.changes),
|
||||
status: PatchApplyStatus::InProgress,
|
||||
};
|
||||
let item = build_file_change_begin_item(payload);
|
||||
if payload.turn_id.is_empty() {
|
||||
self.upsert_item_in_current_turn(item);
|
||||
} else {
|
||||
@@ -461,12 +439,7 @@ impl ThreadHistoryBuilder {
|
||||
}
|
||||
|
||||
fn handle_patch_apply_end(&mut self, payload: &PatchApplyEndEvent) {
|
||||
let status: PatchApplyStatus = (&payload.status).into();
|
||||
let item = ThreadItem::FileChange {
|
||||
id: payload.call_id.clone(),
|
||||
changes: convert_patch_changes(&payload.changes),
|
||||
status,
|
||||
};
|
||||
let item = build_file_change_end_item(payload);
|
||||
if payload.turn_id.is_empty() {
|
||||
self.upsert_item_in_current_turn(item);
|
||||
} else {
|
||||
@@ -547,6 +520,7 @@ impl ThreadHistoryBuilder {
|
||||
Some(McpToolCallResult {
|
||||
content: value.content.clone(),
|
||||
structured_content: value.structured_content.clone(),
|
||||
meta: value.meta.clone(),
|
||||
}),
|
||||
None,
|
||||
),
|
||||
@@ -891,22 +865,29 @@ impl ThreadHistoryBuilder {
|
||||
}
|
||||
|
||||
fn handle_turn_aborted(&mut self, payload: &TurnAbortedEvent) {
|
||||
let apply_abort = |turn: &mut PendingTurn| {
|
||||
turn.status = TurnStatus::Interrupted;
|
||||
turn.completed_at = payload.completed_at;
|
||||
turn.duration_ms = payload.duration_ms;
|
||||
};
|
||||
if let Some(turn_id) = payload.turn_id.as_deref() {
|
||||
// Prefer an exact ID match so we interrupt the turn explicitly targeted by the event.
|
||||
if let Some(turn) = self.current_turn.as_mut().filter(|turn| turn.id == turn_id) {
|
||||
turn.status = TurnStatus::Interrupted;
|
||||
apply_abort(turn);
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(turn) = self.turns.iter_mut().find(|turn| turn.id == turn_id) {
|
||||
turn.status = TurnStatus::Interrupted;
|
||||
turn.completed_at = payload.completed_at;
|
||||
turn.duration_ms = payload.duration_ms;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// If the event has no ID (or refers to an unknown turn), fall back to the active turn.
|
||||
if let Some(turn) = self.current_turn.as_mut() {
|
||||
turn.status = TurnStatus::Interrupted;
|
||||
apply_abort(turn);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -915,15 +896,18 @@ impl ThreadHistoryBuilder {
|
||||
self.current_turn = Some(
|
||||
self.new_turn(Some(payload.turn_id.clone()))
|
||||
.with_status(TurnStatus::InProgress)
|
||||
.with_started_at(payload.started_at)
|
||||
.opened_explicitly(),
|
||||
);
|
||||
}
|
||||
|
||||
fn handle_turn_complete(&mut self, payload: &TurnCompleteEvent) {
|
||||
let mark_completed = |status: &mut TurnStatus| {
|
||||
if matches!(*status, TurnStatus::Completed | TurnStatus::InProgress) {
|
||||
*status = TurnStatus::Completed;
|
||||
let mark_completed = |turn: &mut PendingTurn| {
|
||||
if matches!(turn.status, TurnStatus::Completed | TurnStatus::InProgress) {
|
||||
turn.status = TurnStatus::Completed;
|
||||
}
|
||||
turn.completed_at = payload.completed_at;
|
||||
turn.duration_ms = payload.duration_ms;
|
||||
};
|
||||
|
||||
// Prefer an exact ID match from the active turn and then close it.
|
||||
@@ -932,7 +916,7 @@ impl ThreadHistoryBuilder {
|
||||
.as_mut()
|
||||
.filter(|turn| turn.id == payload.turn_id)
|
||||
{
|
||||
mark_completed(&mut current_turn.status);
|
||||
mark_completed(current_turn);
|
||||
self.finish_current_turn();
|
||||
return;
|
||||
}
|
||||
@@ -942,13 +926,17 @@ impl ThreadHistoryBuilder {
|
||||
.iter_mut()
|
||||
.find(|turn| turn.id == payload.turn_id)
|
||||
{
|
||||
mark_completed(&mut turn.status);
|
||||
if matches!(turn.status, TurnStatus::Completed | TurnStatus::InProgress) {
|
||||
turn.status = TurnStatus::Completed;
|
||||
}
|
||||
turn.completed_at = payload.completed_at;
|
||||
turn.duration_ms = payload.duration_ms;
|
||||
return;
|
||||
}
|
||||
|
||||
// If the completion event cannot be matched, apply it to the active turn.
|
||||
if let Some(current_turn) = self.current_turn.as_mut() {
|
||||
mark_completed(&mut current_turn.status);
|
||||
mark_completed(current_turn);
|
||||
self.finish_current_turn();
|
||||
}
|
||||
}
|
||||
@@ -981,7 +969,7 @@ impl ThreadHistoryBuilder {
|
||||
if turn.items.is_empty() && !turn.opened_explicitly && !turn.saw_compaction {
|
||||
return;
|
||||
}
|
||||
self.turns.push(turn.into());
|
||||
self.turns.push(Turn::from(turn));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -991,6 +979,9 @@ impl ThreadHistoryBuilder {
|
||||
items: Vec::new(),
|
||||
error: None,
|
||||
status: TurnStatus::Completed,
|
||||
started_at: None,
|
||||
completed_at: None,
|
||||
duration_ms: None,
|
||||
opened_explicitly: false,
|
||||
saw_compaction: false,
|
||||
rollout_start_index: self.current_rollout_index,
|
||||
@@ -1076,21 +1067,6 @@ fn render_review_output_text(output: &ReviewOutputEvent) -> String {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn convert_patch_changes(
|
||||
changes: &HashMap<std::path::PathBuf, codex_protocol::protocol::FileChange>,
|
||||
) -> Vec<FileUpdateChange> {
|
||||
let mut converted: Vec<FileUpdateChange> = changes
|
||||
.iter()
|
||||
.map(|(path, change)| FileUpdateChange {
|
||||
path: path.to_string_lossy().into_owned(),
|
||||
kind: map_patch_change_kind(change),
|
||||
diff: format_file_change_diff(change),
|
||||
})
|
||||
.collect();
|
||||
converted.sort_by(|a, b| a.path.cmp(&b.path));
|
||||
converted
|
||||
}
|
||||
|
||||
fn convert_dynamic_tool_content_items(
|
||||
items: &[codex_protocol::dynamic_tools::DynamicToolCallOutputContentItem],
|
||||
) -> Vec<DynamicToolCallOutputContentItem> {
|
||||
@@ -1108,33 +1084,6 @@ fn convert_dynamic_tool_content_items(
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn map_patch_change_kind(change: &codex_protocol::protocol::FileChange) -> PatchChangeKind {
|
||||
match change {
|
||||
codex_protocol::protocol::FileChange::Add { .. } => PatchChangeKind::Add,
|
||||
codex_protocol::protocol::FileChange::Delete { .. } => PatchChangeKind::Delete,
|
||||
codex_protocol::protocol::FileChange::Update { move_path, .. } => PatchChangeKind::Update {
|
||||
move_path: move_path.clone(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn format_file_change_diff(change: &codex_protocol::protocol::FileChange) -> String {
|
||||
match change {
|
||||
codex_protocol::protocol::FileChange::Add { content } => content.clone(),
|
||||
codex_protocol::protocol::FileChange::Delete { content } => content.clone(),
|
||||
codex_protocol::protocol::FileChange::Update {
|
||||
unified_diff,
|
||||
move_path,
|
||||
} => {
|
||||
if let Some(path) = move_path {
|
||||
format!("{unified_diff}\n\nMoved to: {}", path.display())
|
||||
} else {
|
||||
unified_diff.clone()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn upsert_turn_item(items: &mut Vec<ThreadItem>, item: ThreadItem) {
|
||||
if let Some(existing_item) = items
|
||||
.iter_mut()
|
||||
@@ -1151,6 +1100,9 @@ struct PendingTurn {
|
||||
items: Vec<ThreadItem>,
|
||||
error: Option<TurnError>,
|
||||
status: TurnStatus,
|
||||
started_at: Option<i64>,
|
||||
completed_at: Option<i64>,
|
||||
duration_ms: Option<i64>,
|
||||
/// True when this turn originated from an explicit `turn_started`/`turn_complete`
|
||||
/// boundary, so we preserve it even if it has no renderable items.
|
||||
opened_explicitly: bool,
|
||||
@@ -1171,6 +1123,11 @@ impl PendingTurn {
|
||||
self.status = status;
|
||||
self
|
||||
}
|
||||
|
||||
fn with_started_at(mut self, started_at: Option<i64>) -> Self {
|
||||
self.started_at = started_at;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PendingTurn> for Turn {
|
||||
@@ -1180,6 +1137,9 @@ impl From<PendingTurn> for Turn {
|
||||
items: value.items,
|
||||
error: value.error,
|
||||
status: value.status,
|
||||
started_at: value.started_at,
|
||||
completed_at: value.completed_at,
|
||||
duration_ms: value.duration_ms,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1191,6 +1151,9 @@ impl From<&PendingTurn> for Turn {
|
||||
items: value.items.clone(),
|
||||
error: value.error.clone(),
|
||||
status: value.status.clone(),
|
||||
started_at: value.started_at,
|
||||
completed_at: value.completed_at,
|
||||
duration_ms: value.duration_ms,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1205,6 +1168,7 @@ mod tests {
|
||||
use codex_protocol::items::TurnItem as CoreTurnItem;
|
||||
use codex_protocol::items::UserMessageItem as CoreUserMessageItem;
|
||||
use codex_protocol::items::build_hook_prompt_message;
|
||||
use codex_protocol::mcp::CallToolResult;
|
||||
use codex_protocol::models::MessagePhase as CoreMessagePhase;
|
||||
use codex_protocol::models::WebSearchAction as CoreWebSearchAction;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
@@ -1342,6 +1306,7 @@ mod tests {
|
||||
let events = vec![
|
||||
EventMsg::TurnStarted(TurnStartedEvent {
|
||||
turn_id: turn_id.to_string(),
|
||||
started_at: None,
|
||||
model_context_window: None,
|
||||
collaboration_mode_kind: Default::default(),
|
||||
}),
|
||||
@@ -1362,6 +1327,8 @@ mod tests {
|
||||
EventMsg::TurnComplete(TurnCompleteEvent {
|
||||
turn_id: turn_id.to_string(),
|
||||
last_agent_message: None,
|
||||
completed_at: None,
|
||||
duration_ms: None,
|
||||
}),
|
||||
];
|
||||
|
||||
@@ -1414,6 +1381,7 @@ mod tests {
|
||||
let items = vec![
|
||||
RolloutItem::EventMsg(EventMsg::TurnStarted(TurnStartedEvent {
|
||||
turn_id: "turn-image".into(),
|
||||
started_at: None,
|
||||
model_context_window: None,
|
||||
collaboration_mode_kind: Default::default(),
|
||||
})),
|
||||
@@ -1433,6 +1401,8 @@ mod tests {
|
||||
RolloutItem::EventMsg(EventMsg::TurnComplete(TurnCompleteEvent {
|
||||
turn_id: "turn-image".into(),
|
||||
last_agent_message: None,
|
||||
completed_at: None,
|
||||
duration_ms: None,
|
||||
})),
|
||||
];
|
||||
|
||||
@@ -1444,6 +1414,9 @@ mod tests {
|
||||
id: "turn-image".into(),
|
||||
status: TurnStatus::Completed,
|
||||
error: None,
|
||||
started_at: None,
|
||||
completed_at: None,
|
||||
duration_ms: None,
|
||||
items: vec![
|
||||
ThreadItem::UserMessage {
|
||||
id: "item-1".into(),
|
||||
@@ -1533,6 +1506,8 @@ mod tests {
|
||||
EventMsg::TurnAborted(TurnAbortedEvent {
|
||||
turn_id: Some("turn-1".into()),
|
||||
reason: TurnAbortReason::Replaced,
|
||||
completed_at: None,
|
||||
duration_ms: None,
|
||||
}),
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Let's try again".into(),
|
||||
@@ -1730,6 +1705,7 @@ mod tests {
|
||||
let events = vec![
|
||||
EventMsg::TurnStarted(TurnStartedEvent {
|
||||
turn_id: "turn-a".into(),
|
||||
started_at: None,
|
||||
model_context_window: None,
|
||||
collaboration_mode_kind: Default::default(),
|
||||
}),
|
||||
@@ -1748,6 +1724,8 @@ mod tests {
|
||||
EventMsg::TurnComplete(TurnCompleteEvent {
|
||||
turn_id: "turn-a".into(),
|
||||
last_agent_message: None,
|
||||
completed_at: None,
|
||||
duration_ms: None,
|
||||
}),
|
||||
];
|
||||
|
||||
@@ -1784,6 +1762,7 @@ mod tests {
|
||||
let events = vec![
|
||||
EventMsg::TurnStarted(TurnStartedEvent {
|
||||
turn_id: "turn-1".into(),
|
||||
started_at: None,
|
||||
model_context_window: None,
|
||||
collaboration_mode_kind: Default::default(),
|
||||
}),
|
||||
@@ -1884,11 +1863,73 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reconstructs_mcp_tool_result_meta_from_persisted_completion_events() {
|
||||
let events = vec![
|
||||
EventMsg::TurnStarted(TurnStartedEvent {
|
||||
turn_id: "turn-1".into(),
|
||||
started_at: None,
|
||||
model_context_window: None,
|
||||
collaboration_mode_kind: Default::default(),
|
||||
}),
|
||||
EventMsg::McpToolCallEnd(McpToolCallEndEvent {
|
||||
call_id: "mcp-1".into(),
|
||||
invocation: McpInvocation {
|
||||
server: "docs".into(),
|
||||
tool: "lookup".into(),
|
||||
arguments: Some(serde_json::json!({"id":"123"})),
|
||||
},
|
||||
duration: Duration::from_millis(8),
|
||||
result: Ok(CallToolResult {
|
||||
content: vec![serde_json::json!({
|
||||
"type": "text",
|
||||
"text": "result"
|
||||
})],
|
||||
structured_content: Some(serde_json::json!({"id":"123"})),
|
||||
is_error: Some(false),
|
||||
meta: Some(serde_json::json!({
|
||||
"ui/resourceUri": "ui://widget/lookup.html"
|
||||
})),
|
||||
}),
|
||||
}),
|
||||
];
|
||||
|
||||
let items = events
|
||||
.into_iter()
|
||||
.map(RolloutItem::EventMsg)
|
||||
.collect::<Vec<_>>();
|
||||
let turns = build_turns_from_rollout_items(&items);
|
||||
assert_eq!(turns.len(), 1);
|
||||
assert_eq!(
|
||||
turns[0].items[0],
|
||||
ThreadItem::McpToolCall {
|
||||
id: "mcp-1".into(),
|
||||
server: "docs".into(),
|
||||
tool: "lookup".into(),
|
||||
status: McpToolCallStatus::Completed,
|
||||
arguments: serde_json::json!({"id":"123"}),
|
||||
result: Some(McpToolCallResult {
|
||||
content: vec![serde_json::json!({
|
||||
"type": "text",
|
||||
"text": "result"
|
||||
})],
|
||||
structured_content: Some(serde_json::json!({"id":"123"})),
|
||||
meta: Some(serde_json::json!({
|
||||
"ui/resourceUri": "ui://widget/lookup.html"
|
||||
})),
|
||||
}),
|
||||
error: None,
|
||||
duration_ms: Some(8),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reconstructs_dynamic_tool_items_from_request_and_response_events() {
|
||||
let events = vec![
|
||||
EventMsg::TurnStarted(TurnStartedEvent {
|
||||
turn_id: "turn-1".into(),
|
||||
started_at: None,
|
||||
model_context_window: None,
|
||||
collaboration_mode_kind: Default::default(),
|
||||
}),
|
||||
@@ -1948,6 +1989,7 @@ mod tests {
|
||||
let events = vec![
|
||||
EventMsg::TurnStarted(TurnStartedEvent {
|
||||
turn_id: "turn-1".into(),
|
||||
started_at: None,
|
||||
model_context_window: None,
|
||||
collaboration_mode_kind: Default::default(),
|
||||
}),
|
||||
@@ -2030,11 +2072,144 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reconstructs_declined_guardian_command_item() {
|
||||
let events = vec![
|
||||
EventMsg::TurnStarted(TurnStartedEvent {
|
||||
turn_id: "turn-1".into(),
|
||||
started_at: None,
|
||||
model_context_window: None,
|
||||
collaboration_mode_kind: Default::default(),
|
||||
}),
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "review this command".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::GuardianAssessment(GuardianAssessmentEvent {
|
||||
id: "guardian-exec".into(),
|
||||
turn_id: "turn-1".into(),
|
||||
status: GuardianAssessmentStatus::InProgress,
|
||||
risk_score: None,
|
||||
risk_level: None,
|
||||
rationale: None,
|
||||
action: serde_json::from_value(serde_json::json!({
|
||||
"type": "command",
|
||||
"source": "shell",
|
||||
"command": "rm -rf /tmp/guardian",
|
||||
"cwd": "/tmp",
|
||||
}))
|
||||
.expect("guardian action"),
|
||||
}),
|
||||
EventMsg::GuardianAssessment(GuardianAssessmentEvent {
|
||||
id: "guardian-exec".into(),
|
||||
turn_id: "turn-1".into(),
|
||||
status: GuardianAssessmentStatus::Denied,
|
||||
risk_score: Some(97),
|
||||
risk_level: Some(codex_protocol::protocol::GuardianRiskLevel::High),
|
||||
rationale: Some("Would delete user data.".into()),
|
||||
action: serde_json::from_value(serde_json::json!({
|
||||
"type": "command",
|
||||
"source": "shell",
|
||||
"command": "rm -rf /tmp/guardian",
|
||||
"cwd": "/tmp",
|
||||
}))
|
||||
.expect("guardian action"),
|
||||
}),
|
||||
];
|
||||
|
||||
let items = events
|
||||
.into_iter()
|
||||
.map(RolloutItem::EventMsg)
|
||||
.collect::<Vec<_>>();
|
||||
let turns = build_turns_from_rollout_items(&items);
|
||||
assert_eq!(turns.len(), 1);
|
||||
assert_eq!(turns[0].items.len(), 2);
|
||||
assert_eq!(
|
||||
turns[0].items[1],
|
||||
ThreadItem::CommandExecution {
|
||||
id: "guardian-exec".into(),
|
||||
command: "rm -rf /tmp/guardian".into(),
|
||||
cwd: PathBuf::from("/tmp"),
|
||||
process_id: None,
|
||||
source: CommandExecutionSource::Agent,
|
||||
status: CommandExecutionStatus::Declined,
|
||||
command_actions: vec![CommandAction::Unknown {
|
||||
command: "rm -rf /tmp/guardian".into(),
|
||||
}],
|
||||
aggregated_output: None,
|
||||
exit_code: None,
|
||||
duration_ms: None,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reconstructs_in_progress_guardian_execve_item() {
|
||||
let events = vec![
|
||||
EventMsg::TurnStarted(TurnStartedEvent {
|
||||
turn_id: "turn-1".into(),
|
||||
started_at: None,
|
||||
model_context_window: None,
|
||||
collaboration_mode_kind: Default::default(),
|
||||
}),
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "run a subcommand".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::GuardianAssessment(GuardianAssessmentEvent {
|
||||
id: "guardian-execve".into(),
|
||||
turn_id: "turn-1".into(),
|
||||
status: GuardianAssessmentStatus::InProgress,
|
||||
risk_score: None,
|
||||
risk_level: None,
|
||||
rationale: None,
|
||||
action: serde_json::from_value(serde_json::json!({
|
||||
"type": "execve",
|
||||
"source": "shell",
|
||||
"program": "/bin/rm",
|
||||
"argv": ["/usr/bin/rm", "-f", "/tmp/file.sqlite"],
|
||||
"cwd": "/tmp",
|
||||
}))
|
||||
.expect("guardian action"),
|
||||
}),
|
||||
];
|
||||
|
||||
let items = events
|
||||
.into_iter()
|
||||
.map(RolloutItem::EventMsg)
|
||||
.collect::<Vec<_>>();
|
||||
let turns = build_turns_from_rollout_items(&items);
|
||||
assert_eq!(turns.len(), 1);
|
||||
assert_eq!(turns[0].items.len(), 2);
|
||||
assert_eq!(
|
||||
turns[0].items[1],
|
||||
ThreadItem::CommandExecution {
|
||||
id: "guardian-execve".into(),
|
||||
command: "/bin/rm -f /tmp/file.sqlite".into(),
|
||||
cwd: PathBuf::from("/tmp"),
|
||||
process_id: None,
|
||||
source: CommandExecutionSource::Agent,
|
||||
status: CommandExecutionStatus::InProgress,
|
||||
command_actions: vec![CommandAction::Unknown {
|
||||
command: "/bin/rm -f /tmp/file.sqlite".into(),
|
||||
}],
|
||||
aggregated_output: None,
|
||||
exit_code: None,
|
||||
duration_ms: None,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn assigns_late_exec_completion_to_original_turn() {
|
||||
let events = vec![
|
||||
EventMsg::TurnStarted(TurnStartedEvent {
|
||||
turn_id: "turn-a".into(),
|
||||
started_at: None,
|
||||
model_context_window: None,
|
||||
collaboration_mode_kind: Default::default(),
|
||||
}),
|
||||
@@ -2047,9 +2222,12 @@ mod tests {
|
||||
EventMsg::TurnComplete(TurnCompleteEvent {
|
||||
turn_id: "turn-a".into(),
|
||||
last_agent_message: None,
|
||||
completed_at: None,
|
||||
duration_ms: None,
|
||||
}),
|
||||
EventMsg::TurnStarted(TurnStartedEvent {
|
||||
turn_id: "turn-b".into(),
|
||||
started_at: None,
|
||||
model_context_window: None,
|
||||
collaboration_mode_kind: Default::default(),
|
||||
}),
|
||||
@@ -2081,6 +2259,8 @@ mod tests {
|
||||
EventMsg::TurnComplete(TurnCompleteEvent {
|
||||
turn_id: "turn-b".into(),
|
||||
last_agent_message: None,
|
||||
completed_at: None,
|
||||
duration_ms: None,
|
||||
}),
|
||||
];
|
||||
|
||||
@@ -2118,6 +2298,7 @@ mod tests {
|
||||
let events = vec![
|
||||
EventMsg::TurnStarted(TurnStartedEvent {
|
||||
turn_id: "turn-a".into(),
|
||||
started_at: None,
|
||||
model_context_window: None,
|
||||
collaboration_mode_kind: Default::default(),
|
||||
}),
|
||||
@@ -2130,9 +2311,12 @@ mod tests {
|
||||
EventMsg::TurnComplete(TurnCompleteEvent {
|
||||
turn_id: "turn-a".into(),
|
||||
last_agent_message: None,
|
||||
completed_at: None,
|
||||
duration_ms: None,
|
||||
}),
|
||||
EventMsg::TurnStarted(TurnStartedEvent {
|
||||
turn_id: "turn-b".into(),
|
||||
started_at: None,
|
||||
model_context_window: None,
|
||||
collaboration_mode_kind: Default::default(),
|
||||
}),
|
||||
@@ -2164,6 +2348,8 @@ mod tests {
|
||||
EventMsg::TurnComplete(TurnCompleteEvent {
|
||||
turn_id: "turn-b".into(),
|
||||
last_agent_message: None,
|
||||
completed_at: None,
|
||||
duration_ms: None,
|
||||
}),
|
||||
];
|
||||
|
||||
@@ -2196,6 +2382,7 @@ mod tests {
|
||||
let events = vec![
|
||||
EventMsg::TurnStarted(TurnStartedEvent {
|
||||
turn_id: turn_id.to_string(),
|
||||
started_at: None,
|
||||
model_context_window: None,
|
||||
collaboration_mode_kind: Default::default(),
|
||||
}),
|
||||
@@ -2259,6 +2446,7 @@ mod tests {
|
||||
let events = vec![
|
||||
EventMsg::TurnStarted(TurnStartedEvent {
|
||||
turn_id: turn_id.to_string(),
|
||||
started_at: None,
|
||||
model_context_window: None,
|
||||
collaboration_mode_kind: Default::default(),
|
||||
}),
|
||||
@@ -2321,6 +2509,7 @@ mod tests {
|
||||
let events = vec![
|
||||
EventMsg::TurnStarted(TurnStartedEvent {
|
||||
turn_id: "turn-a".into(),
|
||||
started_at: None,
|
||||
model_context_window: None,
|
||||
collaboration_mode_kind: Default::default(),
|
||||
}),
|
||||
@@ -2333,9 +2522,12 @@ mod tests {
|
||||
EventMsg::TurnComplete(TurnCompleteEvent {
|
||||
turn_id: "turn-a".into(),
|
||||
last_agent_message: None,
|
||||
completed_at: None,
|
||||
duration_ms: None,
|
||||
}),
|
||||
EventMsg::TurnStarted(TurnStartedEvent {
|
||||
turn_id: "turn-b".into(),
|
||||
started_at: None,
|
||||
model_context_window: None,
|
||||
collaboration_mode_kind: Default::default(),
|
||||
}),
|
||||
@@ -2348,6 +2540,8 @@ mod tests {
|
||||
EventMsg::TurnComplete(TurnCompleteEvent {
|
||||
turn_id: "turn-a".into(),
|
||||
last_agent_message: None,
|
||||
completed_at: None,
|
||||
duration_ms: None,
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "still in b".into(),
|
||||
@@ -2357,6 +2551,8 @@ mod tests {
|
||||
EventMsg::TurnComplete(TurnCompleteEvent {
|
||||
turn_id: "turn-b".into(),
|
||||
last_agent_message: None,
|
||||
completed_at: None,
|
||||
duration_ms: None,
|
||||
}),
|
||||
];
|
||||
|
||||
@@ -2376,6 +2572,7 @@ mod tests {
|
||||
let events = vec![
|
||||
EventMsg::TurnStarted(TurnStartedEvent {
|
||||
turn_id: "turn-a".into(),
|
||||
started_at: None,
|
||||
model_context_window: None,
|
||||
collaboration_mode_kind: Default::default(),
|
||||
}),
|
||||
@@ -2388,9 +2585,12 @@ mod tests {
|
||||
EventMsg::TurnComplete(TurnCompleteEvent {
|
||||
turn_id: "turn-a".into(),
|
||||
last_agent_message: None,
|
||||
completed_at: None,
|
||||
duration_ms: None,
|
||||
}),
|
||||
EventMsg::TurnStarted(TurnStartedEvent {
|
||||
turn_id: "turn-b".into(),
|
||||
started_at: None,
|
||||
model_context_window: None,
|
||||
collaboration_mode_kind: Default::default(),
|
||||
}),
|
||||
@@ -2403,6 +2603,8 @@ mod tests {
|
||||
EventMsg::TurnAborted(TurnAbortedEvent {
|
||||
turn_id: Some("turn-a".into()),
|
||||
reason: TurnAbortReason::Replaced,
|
||||
completed_at: None,
|
||||
duration_ms: None,
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "still in b".into(),
|
||||
@@ -2428,6 +2630,7 @@ mod tests {
|
||||
let items = vec![
|
||||
RolloutItem::EventMsg(EventMsg::TurnStarted(TurnStartedEvent {
|
||||
turn_id: "turn-compact".into(),
|
||||
started_at: None,
|
||||
model_context_window: None,
|
||||
collaboration_mode_kind: Default::default(),
|
||||
})),
|
||||
@@ -2438,6 +2641,8 @@ mod tests {
|
||||
RolloutItem::EventMsg(EventMsg::TurnComplete(TurnCompleteEvent {
|
||||
turn_id: "turn-compact".into(),
|
||||
last_agent_message: None,
|
||||
completed_at: None,
|
||||
duration_ms: None,
|
||||
})),
|
||||
];
|
||||
|
||||
@@ -2448,6 +2653,9 @@ mod tests {
|
||||
id: "turn-compact".into(),
|
||||
status: TurnStatus::Completed,
|
||||
error: None,
|
||||
started_at: None,
|
||||
completed_at: None,
|
||||
duration_ms: None,
|
||||
items: Vec::new(),
|
||||
}]
|
||||
);
|
||||
@@ -2665,6 +2873,7 @@ mod tests {
|
||||
let events = vec![
|
||||
EventMsg::TurnStarted(TurnStartedEvent {
|
||||
turn_id: "turn-a".into(),
|
||||
started_at: None,
|
||||
model_context_window: None,
|
||||
collaboration_mode_kind: Default::default(),
|
||||
}),
|
||||
@@ -2677,6 +2886,8 @@ mod tests {
|
||||
EventMsg::TurnComplete(TurnCompleteEvent {
|
||||
turn_id: "turn-a".into(),
|
||||
last_agent_message: None,
|
||||
completed_at: None,
|
||||
duration_ms: None,
|
||||
}),
|
||||
EventMsg::Error(ErrorEvent {
|
||||
message: "request-level failure".into(),
|
||||
@@ -2696,6 +2907,9 @@ mod tests {
|
||||
id: "turn-a".into(),
|
||||
status: TurnStatus::Completed,
|
||||
error: None,
|
||||
started_at: None,
|
||||
completed_at: None,
|
||||
duration_ms: None,
|
||||
items: vec![ThreadItem::UserMessage {
|
||||
id: "item-1".into(),
|
||||
content: vec![UserInput::Text {
|
||||
@@ -2712,6 +2926,7 @@ mod tests {
|
||||
let events = vec![
|
||||
EventMsg::TurnStarted(TurnStartedEvent {
|
||||
turn_id: "turn-a".into(),
|
||||
started_at: None,
|
||||
model_context_window: None,
|
||||
collaboration_mode_kind: Default::default(),
|
||||
}),
|
||||
@@ -2730,6 +2945,8 @@ mod tests {
|
||||
EventMsg::TurnComplete(TurnCompleteEvent {
|
||||
turn_id: "turn-a".into(),
|
||||
last_agent_message: None,
|
||||
completed_at: None,
|
||||
duration_ms: None,
|
||||
}),
|
||||
];
|
||||
|
||||
@@ -2765,6 +2982,7 @@ mod tests {
|
||||
let items = vec![
|
||||
RolloutItem::EventMsg(EventMsg::TurnStarted(TurnStartedEvent {
|
||||
turn_id: "turn-a".into(),
|
||||
started_at: None,
|
||||
model_context_window: None,
|
||||
collaboration_mode_kind: Default::default(),
|
||||
})),
|
||||
@@ -2778,6 +2996,8 @@ mod tests {
|
||||
RolloutItem::EventMsg(EventMsg::TurnComplete(TurnCompleteEvent {
|
||||
turn_id: "turn-a".into(),
|
||||
last_agent_message: None,
|
||||
completed_at: None,
|
||||
duration_ms: None,
|
||||
})),
|
||||
];
|
||||
|
||||
@@ -2808,6 +3028,7 @@ mod tests {
|
||||
let items = vec![
|
||||
RolloutItem::EventMsg(EventMsg::TurnStarted(TurnStartedEvent {
|
||||
turn_id: "turn-a".into(),
|
||||
started_at: None,
|
||||
model_context_window: None,
|
||||
collaboration_mode_kind: Default::default(),
|
||||
})),
|
||||
@@ -2823,6 +3044,8 @@ mod tests {
|
||||
RolloutItem::EventMsg(EventMsg::TurnComplete(TurnCompleteEvent {
|
||||
turn_id: "turn-a".into(),
|
||||
last_agent_message: None,
|
||||
completed_at: None,
|
||||
duration_ms: None,
|
||||
})),
|
||||
];
|
||||
|
||||
|
||||
@@ -29,6 +29,7 @@ use codex_protocol::config_types::WebSearchToolConfig;
|
||||
use codex_protocol::items::AgentMessageContent as CoreAgentMessageContent;
|
||||
use codex_protocol::items::TurnItem as CoreTurnItem;
|
||||
use codex_protocol::mcp::Resource as McpResource;
|
||||
pub use codex_protocol::mcp::ResourceContent as McpResourceContent;
|
||||
use codex_protocol::mcp::ResourceTemplate as McpResourceTemplate;
|
||||
use codex_protocol::mcp::Tool as McpTool;
|
||||
use codex_protocol::memory_citation::MemoryCitation as CoreMemoryCitation;
|
||||
@@ -408,6 +409,14 @@ v2_enum_from_core!(
|
||||
}
|
||||
);
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase", export_to = "v2/")]
|
||||
pub enum ThreadStartSource {
|
||||
Startup,
|
||||
Clear,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -850,6 +859,8 @@ pub struct ConfigReadResponse {
|
||||
pub struct ConfigRequirements {
|
||||
#[experimental(nested)]
|
||||
pub allowed_approval_policies: Option<Vec<AskForApproval>>,
|
||||
#[experimental("configRequirements/read.allowedApprovalsReviewers")]
|
||||
pub allowed_approvals_reviewers: Option<Vec<ApprovalsReviewer>>,
|
||||
pub allowed_sandbox_modes: Option<Vec<SandboxMode>>,
|
||||
pub allowed_web_search_modes: Option<Vec<WebSearchMode>>,
|
||||
pub feature_requirements: Option<BTreeMap<String, bool>>,
|
||||
@@ -882,6 +893,7 @@ pub struct NetworkRequirements {
|
||||
/// Legacy compatibility view derived from `unix_sockets`.
|
||||
pub allow_unix_sockets: Option<Vec<String>>,
|
||||
pub allow_local_binding: Option<bool>,
|
||||
pub danger_full_access_denylist_only: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
|
||||
@@ -1947,6 +1959,18 @@ pub struct ListMcpServerStatusParams {
|
||||
/// Optional page size; defaults to a server-defined value.
|
||||
#[ts(optional = nullable)]
|
||||
pub limit: Option<u32>,
|
||||
/// Controls how much MCP inventory data to fetch for each server.
|
||||
/// Defaults to `Full` when omitted.
|
||||
#[ts(optional = nullable)]
|
||||
pub detail: Option<McpServerStatusDetail>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase", export_to = "v2/")]
|
||||
pub enum McpServerStatusDetail {
|
||||
Full,
|
||||
ToolsAndAuthOnly,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -1970,6 +1994,22 @@ pub struct ListMcpServerStatusResponse {
|
||||
pub next_cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct McpResourceReadParams {
|
||||
pub thread_id: String,
|
||||
pub server: String,
|
||||
pub uri: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct McpResourceReadResponse {
|
||||
pub contents: Vec<McpResourceContent>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -2317,17 +2357,17 @@ pub struct FsCopyResponse {}
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct FsWatchParams {
|
||||
/// Connection-scoped watch identifier used for `fs/unwatch` and `fs/changed`.
|
||||
pub watch_id: String,
|
||||
/// Absolute file or directory path to watch.
|
||||
pub path: AbsolutePathBuf,
|
||||
}
|
||||
|
||||
/// Created watch handle returned by `fs/watch`.
|
||||
/// Successful response for `fs/watch`.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct FsWatchResponse {
|
||||
/// Connection-scoped watch identifier used for `fs/unwatch` and `fs/changed`.
|
||||
pub watch_id: String,
|
||||
/// Canonicalized path associated with the watch.
|
||||
pub path: AbsolutePathBuf,
|
||||
}
|
||||
@@ -2337,7 +2377,7 @@ pub struct FsWatchResponse {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct FsUnwatchParams {
|
||||
/// Watch identifier returned by `fs/watch`.
|
||||
/// Watch identifier previously provided to `fs/watch`.
|
||||
pub watch_id: String,
|
||||
}
|
||||
|
||||
@@ -2352,7 +2392,7 @@ pub struct FsUnwatchResponse {}
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct FsChangedNotification {
|
||||
/// Watch identifier returned by `fs/watch`.
|
||||
/// Watch identifier previously provided to `fs/watch`.
|
||||
pub watch_id: String,
|
||||
/// File or directory paths associated with this event.
|
||||
pub changed_paths: Vec<AbsolutePathBuf>,
|
||||
@@ -2579,6 +2619,8 @@ pub struct ThreadStartParams {
|
||||
pub personality: Option<Personality>,
|
||||
#[ts(optional = nullable)]
|
||||
pub ephemeral: Option<bool>,
|
||||
#[ts(optional = nullable)]
|
||||
pub session_start_source: Option<ThreadStartSource>,
|
||||
#[experimental("thread/start.dynamicTools")]
|
||||
#[ts(optional = nullable)]
|
||||
pub dynamic_tools: Option<Vec<DynamicToolSpec>>,
|
||||
@@ -3691,6 +3733,15 @@ pub struct Turn {
|
||||
pub status: TurnStatus,
|
||||
/// Only populated when the Turn's status is failed.
|
||||
pub error: Option<TurnError>,
|
||||
/// Unix timestamp (in seconds) when the turn started.
|
||||
#[ts(type = "number | null")]
|
||||
pub started_at: Option<i64>,
|
||||
/// Unix timestamp (in seconds) when the turn completed.
|
||||
#[ts(type = "number | null")]
|
||||
pub completed_at: Option<i64>,
|
||||
/// Duration between turn start and completion in milliseconds, if known.
|
||||
#[ts(type = "number | null")]
|
||||
pub duration_ms: Option<i64>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
@@ -3813,6 +3864,21 @@ pub struct ThreadRealtimeStartParams {
|
||||
pub prompt: String,
|
||||
#[ts(optional = nullable)]
|
||||
pub session_id: Option<String>,
|
||||
#[ts(optional = nullable)]
|
||||
pub transport: Option<ThreadRealtimeStartTransport>,
|
||||
}
|
||||
|
||||
/// EXPERIMENTAL - transport used by thread realtime.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "type", rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/", tag = "type")]
|
||||
pub enum ThreadRealtimeStartTransport {
|
||||
Websocket,
|
||||
Webrtc {
|
||||
/// SDP offer generated by a WebRTC RTCPeerConnection after configuring audio and the
|
||||
/// realtime events data channel.
|
||||
sdp: String,
|
||||
},
|
||||
}
|
||||
|
||||
/// EXPERIMENTAL - response for starting thread realtime.
|
||||
@@ -3904,6 +3970,15 @@ pub struct ThreadRealtimeOutputAudioDeltaNotification {
|
||||
pub audio: ThreadRealtimeAudioChunk,
|
||||
}
|
||||
|
||||
/// EXPERIMENTAL - emitted with the remote SDP for a WebRTC realtime session.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadRealtimeSdpNotification {
|
||||
pub thread_id: String,
|
||||
pub sdp: String,
|
||||
}
|
||||
|
||||
/// EXPERIMENTAL - emitted when thread realtime encounters an error.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
@@ -4971,6 +5046,9 @@ pub struct McpToolCallResult {
|
||||
// representations). Using `JsonValue` keeps the payload wire-shaped and easy to export.
|
||||
pub content: Vec<JsonValue>,
|
||||
pub structured_content: Option<JsonValue>,
|
||||
#[serde(rename = "_meta")]
|
||||
#[ts(rename = "_meta")]
|
||||
pub meta: Option<JsonValue>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -7315,6 +7393,7 @@ mod tests {
|
||||
request_permissions: false,
|
||||
mcp_elicitations: false,
|
||||
}]),
|
||||
allowed_approvals_reviewers: None,
|
||||
allowed_sandbox_modes: None,
|
||||
allowed_web_search_modes: None,
|
||||
feature_requirements: None,
|
||||
@@ -7779,6 +7858,7 @@ mod tests {
|
||||
dangerously_allow_all_unix_sockets: None,
|
||||
domains: None,
|
||||
managed_allowed_domains_only: None,
|
||||
danger_full_access_denylist_only: None,
|
||||
allowed_domains: Some(vec!["api.openai.com".to_string()]),
|
||||
denied_domains: Some(vec!["blocked.example.com".to_string()]),
|
||||
unix_sockets: None,
|
||||
@@ -7805,6 +7885,7 @@ mod tests {
|
||||
),
|
||||
])),
|
||||
managed_allowed_domains_only: Some(true),
|
||||
danger_full_access_denylist_only: Some(true),
|
||||
allowed_domains: Some(vec!["api.openai.com".to_string()]),
|
||||
denied_domains: Some(vec!["blocked.example.com".to_string()]),
|
||||
unix_sockets: Some(BTreeMap::from([
|
||||
@@ -7835,6 +7916,7 @@ mod tests {
|
||||
"blocked.example.com": "deny"
|
||||
},
|
||||
"managedAllowedDomainsOnly": true,
|
||||
"dangerFullAccessDenylistOnly": true,
|
||||
"allowedDomains": ["api.openai.com"],
|
||||
"deniedDomains": ["blocked.example.com"],
|
||||
"unixSockets": {
|
||||
|
||||
@@ -3,5 +3,6 @@ load("//:defs.bzl", "codex_rust_crate")
|
||||
codex_rust_crate(
|
||||
name = "app-server",
|
||||
crate_name = "codex_app_server",
|
||||
integration_test_timeout = "long",
|
||||
test_tags = ["no-sandbox"],
|
||||
)
|
||||
|
||||
@@ -46,6 +46,7 @@ codex-file-search = { workspace = true }
|
||||
codex-chatgpt = { workspace = true }
|
||||
codex-login = { workspace = true }
|
||||
codex-mcp = { workspace = true }
|
||||
codex-models-manager = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-feedback = { workspace = true }
|
||||
@@ -56,10 +57,12 @@ codex-state = { workspace = true }
|
||||
codex-tools = { workspace = true }
|
||||
codex-utils-absolute-path = { workspace = true }
|
||||
codex-utils-json-to-toml = { workspace = true }
|
||||
codex-utils-rustls-provider = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive"] }
|
||||
constant_time_eq = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
gethostname = { workspace = true }
|
||||
hmac = { workspace = true }
|
||||
jsonwebtoken = { workspace = true }
|
||||
owo-colors = { workspace = true, features = ["supports-colors"] }
|
||||
@@ -80,6 +83,7 @@ tokio-util = { workspace = true }
|
||||
tokio-tungstenite = { workspace = true }
|
||||
tracing = { workspace = true, features = ["log"] }
|
||||
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt", "json"] }
|
||||
url = { workspace = true }
|
||||
uuid = { workspace = true, features = ["serde", "v7"] }
|
||||
|
||||
[dev-dependencies]
|
||||
|
||||
@@ -25,6 +25,7 @@ Supported transports:
|
||||
|
||||
- stdio (`--listen stdio://`, default): newline-delimited JSON (JSONL)
|
||||
- websocket (`--listen ws://IP:PORT`): one JSON-RPC message per websocket text frame (**experimental / unsupported**)
|
||||
- off (`--listen off`): do not expose a local transport
|
||||
|
||||
When running with `--listen ws://IP:PORT`, the same listener also serves basic HTTP health probes:
|
||||
|
||||
@@ -132,7 +133,7 @@ Example with notification opt-out:
|
||||
|
||||
## API Overview
|
||||
|
||||
- `thread/start` — create a new thread; emits `thread/started` (including the current `thread.status`) and auto-subscribes you to turn/item events for that thread. When the request includes a `cwd` and the resolved sandbox is `workspace-write` or full access, app-server also marks that project as trusted in the user `config.toml`.
|
||||
- `thread/start` — create a new thread; emits `thread/started` (including the current `thread.status`) and auto-subscribes you to turn/item events for that thread. When the request includes a `cwd` and the resolved sandbox is `workspace-write` or full access, app-server also marks that project as trusted in the user `config.toml`. Pass `sessionStartSource: "clear"` when starting a replacement thread after clearing the current session so `SessionStart` hooks receive `source: "clear"` instead of the default `"startup"`.
|
||||
- `thread/resume` — reopen an existing thread by id so subsequent `turn/start` calls append to it.
|
||||
- `thread/fork` — fork an existing thread into a new thread id by copying the stored history; if the source thread is currently mid-turn, the fork records the same interruption marker as `turn/interrupt` instead of inheriting an unmarked partial turn suffix. The returned `thread.forkedFromId` points at the source thread when known. Accepts `ephemeral: true` for an in-memory temporary fork, emits `thread/started` (including the current `thread.status`), and auto-subscribes you to turn/item events for the new thread.
|
||||
- `thread/list` — page through stored rollouts; supports cursor-based pagination and optional `modelProviders`, `sourceKinds`, `archived`, `cwd`, and `searchTerm` filters. Each returned `thread` includes `status` (`ThreadStatus`), defaulting to `notLoaded` when the thread is not currently loaded.
|
||||
@@ -151,7 +152,7 @@ Example with notification opt-out:
|
||||
- `turn/start` — add user input to a thread and begin Codex generation; responds with the initial `turn` object and streams `turn/started`, `item/*`, and `turn/completed` notifications. For `collaborationMode`, `settings.developer_instructions: null` means "use built-in instructions for the selected mode".
|
||||
- `turn/steer` — add user input to an already in-flight regular turn without starting a new turn; returns the active `turnId` that accepted the input. Review and manual compaction turns reject `turn/steer`.
|
||||
- `turn/interrupt` — request cancellation of an in-flight turn by `(thread_id, turn_id)`; success is an empty `{}` response and the turn finishes with `status: "interrupted"`.
|
||||
- `thread/realtime/start` — start a thread-scoped realtime session (experimental); returns `{}` and streams `thread/realtime/*` notifications.
|
||||
- `thread/realtime/start` — start a thread-scoped realtime session (experimental); returns `{}` and streams `thread/realtime/*` notifications. Omit `transport` for the websocket transport, or pass `{ "type": "webrtc", "sdp": "..." }` to create a WebRTC session from a browser-generated SDP offer; the remote answer SDP is emitted as `thread/realtime/sdp`.
|
||||
- `thread/realtime/appendAudio` — append an input audio chunk to the active realtime session (experimental); returns `{}`.
|
||||
- `thread/realtime/appendText` — append text input to the active realtime session (experimental); returns `{}`.
|
||||
- `thread/realtime/stop` — stop the active realtime session for the thread (experimental); returns `{}`.
|
||||
@@ -168,7 +169,7 @@ Example with notification opt-out:
|
||||
- `fs/readDirectory` — list direct child entries for an absolute directory path; each entry contains `fileName`, `isDirectory`, and `isFile`, and `fileName` is just the child name, not a path.
|
||||
- `fs/remove` — remove an absolute file or directory tree; `recursive` and `force` default to `true`.
|
||||
- `fs/copy` — copy between absolute paths; directory copies require `recursive: true`.
|
||||
- `fs/watch` — subscribe this connection to filesystem change notifications for an absolute file or directory path; returns a `watchId` and canonicalized `path`.
|
||||
- `fs/watch` — subscribe this connection to filesystem change notifications for an absolute file or directory path and caller-provided `watchId`; returns the canonicalized `path`.
|
||||
- `fs/unwatch` — stop sending notifications for a prior `fs/watch`; returns `{}`.
|
||||
- `fs/changed` — notification emitted when watched paths change, including the `watchId` and `changedPaths`.
|
||||
- `model/list` — list available models (set `includeHidden: true` to include entries with `hidden: true`), with reasoning effort options, optional legacy `upgrade` model ids, optional `upgradeInfo` metadata (`model`, `upgradeCopy`, `modelLink`, `migrationMarkdown`), and optional `availabilityNux` metadata.
|
||||
@@ -186,7 +187,8 @@ Example with notification opt-out:
|
||||
- `mcpServer/oauth/login` — start an OAuth login for a configured MCP server; returns an `authorization_url` and later emits `mcpServer/oauthLogin/completed` once the browser flow finishes.
|
||||
- `tool/requestUserInput` — prompt the user with 1–3 short questions for a tool call and return their answers (experimental).
|
||||
- `config/mcpServer/reload` — reload MCP server config from disk and queue a refresh for loaded threads (applied on each thread's next active turn); returns `{}`. Use this after editing `config.toml` without restarting the server.
|
||||
- `mcpServerStatus/list` — enumerate configured MCP servers with their tools, resources, resource templates, and auth status; supports cursor+limit pagination.
|
||||
- `mcpServerStatus/list` — enumerate configured MCP servers with their tools and auth status, plus resources/resource templates for `full` detail; supports cursor+limit pagination. If `detail` is omitted, the server defaults to `full`.
|
||||
- `mcpServer/resource/read` — read a resource from a thread's configured MCP server by `threadId`, `server`, and `uri`, returning text/blob resource `contents`.
|
||||
- `windowsSandbox/setupStart` — start Windows sandbox setup for the selected mode (`elevated` or `unelevated`); accepts an optional absolute `cwd` to target setup for a specific workspace, returns `{ started: true }` immediately, and later emits `windowsSandbox/setupCompleted`.
|
||||
- `feedback/upload` — submit a feedback report (classification + optional reason/logs, conversation_id, and optional `extraLogFiles` attachments array); returns the tracking thread id.
|
||||
- `config/read` — fetch the effective config on disk after resolving config layering.
|
||||
@@ -194,7 +196,7 @@ Example with notification opt-out:
|
||||
- `externalAgentConfig/import` — apply selected external-agent migration items by passing explicit `migrationItems` with `cwd` (`null` for home).
|
||||
- `config/value/write` — write a single config key/value to the user's config.toml on disk.
|
||||
- `config/batchWrite` — apply multiple config edits atomically to the user's config.toml on disk, with optional `reloadUserConfig: true` to hot-reload loaded threads.
|
||||
- `configRequirements/read` — fetch loaded requirements constraints from `requirements.toml` and/or MDM (or `null` if none are configured), including allow-lists (`allowedApprovalPolicies`, `allowedSandboxModes`, `allowedWebSearchModes`), pinned feature values (`featureRequirements`), `enforceResidency`, and `network` constraints such as canonical domain/socket permissions plus `managedAllowedDomainsOnly`.
|
||||
- `configRequirements/read` — fetch loaded requirements constraints from `requirements.toml` and/or MDM (or `null` if none are configured), including allow-lists (`allowedApprovalPolicies`, `allowedSandboxModes`, `allowedWebSearchModes`), pinned feature values (`featureRequirements`), `enforceResidency`, and `network` constraints such as canonical domain/socket permissions plus `managedAllowedDomainsOnly` and `dangerFullAccessDenylistOnly`.
|
||||
|
||||
### Example: Start or resume a thread
|
||||
|
||||
@@ -210,6 +212,7 @@ Start a fresh thread when you need a new Codex conversation.
|
||||
"sandbox": "workspaceWrite",
|
||||
"personality": "friendly",
|
||||
"serviceName": "my_app_server_client", // optional metrics tag (`service_name`)
|
||||
"sessionStartSource": "startup", // optional: "startup" (default) or "clear"
|
||||
// Experimental: requires opt-in
|
||||
"dynamicTools": [
|
||||
{
|
||||
@@ -273,7 +276,7 @@ Experimental API: `thread/start`, `thread/resume`, and `thread/fork` accept `per
|
||||
- `modelProviders` — restrict results to specific providers; unset, null, or an empty array will include all providers.
|
||||
- `sourceKinds` — restrict results to specific sources; omit or pass `[]` for interactive sessions only (`cli`, `vscode`).
|
||||
- `archived` — when `true`, list archived threads only. When `false` or `null`, list non-archived threads (default).
|
||||
- `cwd` — restrict results to threads whose session cwd exactly matches this path.
|
||||
- `cwd` — restrict results to threads whose session cwd exactly matches this path. Relative paths are resolved against the app-server process cwd before matching.
|
||||
- `searchTerm` — restrict results to threads whose extracted title contains this substring (case-sensitive).
|
||||
- Responses include `agentNickname` and `agentRole` for AgentControl-spawned thread sub-agents when available.
|
||||
|
||||
@@ -561,6 +564,51 @@ Invoke a plugin by including a UI mention token such as `@sample` in the text in
|
||||
} } }
|
||||
```
|
||||
|
||||
### Example: Start realtime with WebRTC
|
||||
|
||||
Use `thread/realtime/start` with `transport.type: "webrtc"` when a browser or webview owns the `RTCPeerConnection` and app-server should create the server-side realtime session. The transport `sdp` must be the offer SDP produced by `RTCPeerConnection.createOffer()`, not a hand-written or minimal SDP string.
|
||||
|
||||
The offer should include the media sections the client wants to negotiate. For the standard realtime UI flow, create the audio track/transceiver and the `oai-events` data channel before calling `createOffer()`:
|
||||
|
||||
```javascript
|
||||
const pc = new RTCPeerConnection();
|
||||
|
||||
audioElement.autoplay = true;
|
||||
pc.ontrack = (event) => {
|
||||
audioElement.srcObject = event.streams[0];
|
||||
};
|
||||
|
||||
const mediaStream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
||||
pc.addTrack(mediaStream.getAudioTracks()[0], mediaStream);
|
||||
pc.createDataChannel("oai-events");
|
||||
|
||||
const offer = await pc.createOffer();
|
||||
await pc.setLocalDescription(offer);
|
||||
```
|
||||
|
||||
Then send `offer.sdp` to app-server. Core uses `experimental_realtime_ws_backend_prompt` for the backend instructions and the thread conversation id for the realtime session id. The start response is `{}`; the remote answer SDP arrives later as `thread/realtime/sdp` and should be passed to `setRemoteDescription()`:
|
||||
|
||||
```json
|
||||
{ "method": "thread/realtime/start", "id": 40, "params": {
|
||||
"threadId": "thr_123",
|
||||
"prompt": "You are on a call.",
|
||||
"sessionId": null,
|
||||
"transport": { "type": "webrtc", "sdp": "v=0\r\no=..." }
|
||||
} }
|
||||
{ "id": 40, "result": {} }
|
||||
{ "method": "thread/realtime/sdp", "params": {
|
||||
"threadId": "thr_123",
|
||||
"sdp": "v=0\r\no=..."
|
||||
} }
|
||||
```
|
||||
|
||||
```javascript
|
||||
await pc.setRemoteDescription({
|
||||
type: "answer",
|
||||
sdp: notification.params.sdp,
|
||||
});
|
||||
```
|
||||
|
||||
### Example: Interrupt an active turn
|
||||
|
||||
You can cancel a running Turn with `turn/interrupt`.
|
||||
@@ -814,10 +862,10 @@ All filesystem paths in this section must be absolute.
|
||||
|
||||
```json
|
||||
{ "method": "fs/watch", "id": 44, "params": {
|
||||
"watchId": "0195ec6b-1d6f-7c2e-8c7a-56f2c4a8b9d1",
|
||||
"path": "/Users/me/project/.git/HEAD"
|
||||
} }
|
||||
{ "id": 44, "result": {
|
||||
"watchId": "0195ec6b-1d6f-7c2e-8c7a-56f2c4a8b9d1",
|
||||
"path": "/Users/me/project/.git/HEAD"
|
||||
} }
|
||||
{ "method": "fs/changed", "params": {
|
||||
|
||||
@@ -86,6 +86,7 @@ fn transport_name(transport: AppServerTransport) -> &'static str {
|
||||
match transport {
|
||||
AppServerTransport::Stdio => "stdio",
|
||||
AppServerTransport::WebSocket { .. } => "websocket",
|
||||
AppServerTransport::Off => "off",
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -29,6 +29,7 @@ use codex_app_server_protocol::AppsListParams;
|
||||
use codex_app_server_protocol::AppsListResponse;
|
||||
use codex_app_server_protocol::AskForApproval;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_app_server_protocol::AuthMode as CoreAuthMode;
|
||||
use codex_app_server_protocol::CancelLoginAccountParams;
|
||||
use codex_app_server_protocol::CancelLoginAccountResponse;
|
||||
use codex_app_server_protocol::CancelLoginAccountStatus;
|
||||
@@ -75,11 +76,14 @@ use codex_app_server_protocol::LoginAccountResponse;
|
||||
use codex_app_server_protocol::LoginApiKeyParams;
|
||||
use codex_app_server_protocol::LogoutAccountResponse;
|
||||
use codex_app_server_protocol::MarketplaceInterface;
|
||||
use codex_app_server_protocol::McpResourceReadParams;
|
||||
use codex_app_server_protocol::McpResourceReadResponse;
|
||||
use codex_app_server_protocol::McpServerOauthLoginCompletedNotification;
|
||||
use codex_app_server_protocol::McpServerOauthLoginParams;
|
||||
use codex_app_server_protocol::McpServerOauthLoginResponse;
|
||||
use codex_app_server_protocol::McpServerRefreshResponse;
|
||||
use codex_app_server_protocol::McpServerStatus;
|
||||
use codex_app_server_protocol::McpServerStatusDetail;
|
||||
use codex_app_server_protocol::MockExperimentalMethodParams;
|
||||
use codex_app_server_protocol::MockExperimentalMethodResponse;
|
||||
use codex_app_server_protocol::ModelListParams;
|
||||
@@ -142,6 +146,7 @@ use codex_app_server_protocol::ThreadRealtimeAppendTextParams;
|
||||
use codex_app_server_protocol::ThreadRealtimeAppendTextResponse;
|
||||
use codex_app_server_protocol::ThreadRealtimeStartParams;
|
||||
use codex_app_server_protocol::ThreadRealtimeStartResponse;
|
||||
use codex_app_server_protocol::ThreadRealtimeStartTransport;
|
||||
use codex_app_server_protocol::ThreadRealtimeStopParams;
|
||||
use codex_app_server_protocol::ThreadRealtimeStopResponse;
|
||||
use codex_app_server_protocol::ThreadResumeParams;
|
||||
@@ -202,8 +207,6 @@ use codex_core::config_loader::CloudRequirementsLoadErrorCode;
|
||||
use codex_core::config_loader::CloudRequirementsLoader;
|
||||
use codex_core::config_loader::LoaderOverrides;
|
||||
use codex_core::config_loader::load_config_layers_state;
|
||||
use codex_core::error::CodexErr;
|
||||
use codex_core::error::Result as CodexResult;
|
||||
use codex_core::exec::ExecCapturePolicy;
|
||||
use codex_core::exec::ExecExpiration;
|
||||
use codex_core::exec::ExecParams;
|
||||
@@ -212,7 +215,6 @@ use codex_core::find_archived_thread_path_by_id_str;
|
||||
use codex_core::find_thread_name_by_id;
|
||||
use codex_core::find_thread_names_by_ids;
|
||||
use codex_core::find_thread_path_by_id_str;
|
||||
use codex_core::models_manager::collaboration_mode_presets::CollaborationModesConfig;
|
||||
use codex_core::parse_cursor;
|
||||
use codex_core::plugins::MarketplaceError;
|
||||
use codex_core::plugins::MarketplacePluginSource;
|
||||
@@ -237,7 +239,6 @@ use codex_feedback::CodexFeedback;
|
||||
use codex_git_utils::git_diff_to_remote;
|
||||
use codex_git_utils::resolve_root_git_project_for_trust;
|
||||
use codex_login::AuthManager;
|
||||
use codex_login::AuthMode as CoreAuthMode;
|
||||
use codex_login::CLIENT_ID;
|
||||
use codex_login::CodexAuth;
|
||||
use codex_login::ServerOptions as LoginServerOptions;
|
||||
@@ -248,10 +249,13 @@ use codex_login::default_client::set_default_client_residency_requirement;
|
||||
use codex_login::login_with_api_key;
|
||||
use codex_login::request_device_code;
|
||||
use codex_login::run_login_server;
|
||||
use codex_mcp::mcp::auth::discover_supported_scopes;
|
||||
use codex_mcp::mcp::auth::resolve_oauth_scopes;
|
||||
use codex_mcp::mcp::collect_mcp_snapshot;
|
||||
use codex_mcp::mcp::group_tools_by_server;
|
||||
use codex_mcp::McpSnapshotDetail;
|
||||
use codex_mcp::collect_mcp_snapshot_with_detail;
|
||||
use codex_mcp::discover_supported_scopes;
|
||||
use codex_mcp::effective_mcp_servers;
|
||||
use codex_mcp::qualified_mcp_tool_name_prefix;
|
||||
use codex_mcp::resolve_oauth_scopes;
|
||||
use codex_models_manager::collaboration_mode_presets::CollaborationModesConfig;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::config_types::CollaborationMode;
|
||||
use codex_protocol::config_types::ForcedLoginMethod;
|
||||
@@ -259,11 +263,14 @@ use codex_protocol::config_types::Personality;
|
||||
use codex_protocol::config_types::TrustLevel;
|
||||
use codex_protocol::config_types::WindowsSandboxLevel;
|
||||
use codex_protocol::dynamic_tools::DynamicToolSpec as CoreDynamicToolSpec;
|
||||
use codex_protocol::error::CodexErr;
|
||||
use codex_protocol::error::Result as CodexResult;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::AgentStatus;
|
||||
use codex_protocol::protocol::ConversationAudioParams;
|
||||
use codex_protocol::protocol::ConversationStartParams;
|
||||
use codex_protocol::protocol::ConversationStartTransport;
|
||||
use codex_protocol::protocol::ConversationTextParams;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::GitInfo as CoreGitInfo;
|
||||
@@ -473,22 +480,13 @@ pub(crate) struct CodexMessageProcessorArgs {
|
||||
}
|
||||
|
||||
impl CodexMessageProcessor {
|
||||
pub(crate) fn clear_plugin_related_caches(&self) {
|
||||
self.thread_manager.plugins_manager().clear_cache();
|
||||
self.thread_manager.skills_manager().clear_cache();
|
||||
pub(crate) fn handle_config_mutation(&self) {
|
||||
self.clear_plugin_related_caches();
|
||||
}
|
||||
|
||||
pub(crate) async fn maybe_start_plugin_startup_tasks_for_latest_config(&self) {
|
||||
match self.load_latest_config(/*fallback_cwd*/ None).await {
|
||||
Ok(config) => self
|
||||
.thread_manager
|
||||
.plugins_manager()
|
||||
.maybe_start_plugin_startup_tasks_for_config(
|
||||
&config,
|
||||
self.thread_manager.auth_manager(),
|
||||
),
|
||||
Err(err) => warn!("failed to load latest config for plugin startup tasks: {err:?}"),
|
||||
}
|
||||
fn clear_plugin_related_caches(&self) {
|
||||
self.thread_manager.plugins_manager().clear_cache();
|
||||
self.thread_manager.skills_manager().clear_cache();
|
||||
}
|
||||
|
||||
fn current_account_updated_notification(&self) -> AccountUpdatedNotification {
|
||||
@@ -690,6 +688,7 @@ impl CodexMessageProcessor {
|
||||
connection_id: ConnectionId,
|
||||
request: ClientRequest,
|
||||
app_server_client_name: Option<String>,
|
||||
app_server_client_version: Option<String>,
|
||||
request_context: RequestContext,
|
||||
) {
|
||||
let to_connection_request_id = |request_id| ConnectionRequestId {
|
||||
@@ -706,6 +705,8 @@ impl CodexMessageProcessor {
|
||||
self.thread_start(
|
||||
to_connection_request_id(request_id),
|
||||
params,
|
||||
app_server_client_name.clone(),
|
||||
app_server_client_version.clone(),
|
||||
request_context,
|
||||
)
|
||||
.await;
|
||||
@@ -810,6 +811,7 @@ impl CodexMessageProcessor {
|
||||
to_connection_request_id(request_id),
|
||||
params,
|
||||
app_server_client_name.clone(),
|
||||
app_server_client_version.clone(),
|
||||
)
|
||||
.await;
|
||||
}
|
||||
@@ -884,6 +886,10 @@ impl CodexMessageProcessor {
|
||||
self.list_mcp_server_status(to_connection_request_id(request_id), params)
|
||||
.await;
|
||||
}
|
||||
ClientRequest::McpResourceRead { request_id, params } => {
|
||||
self.read_mcp_resource(to_connection_request_id(request_id), params)
|
||||
.await;
|
||||
}
|
||||
ClientRequest::WindowsSandboxSetupStart { request_id, params } => {
|
||||
self.windows_sandbox_setup_start(to_connection_request_id(request_id), params)
|
||||
.await;
|
||||
@@ -2056,6 +2062,8 @@ impl CodexMessageProcessor {
|
||||
&self,
|
||||
request_id: ConnectionRequestId,
|
||||
params: ThreadStartParams,
|
||||
app_server_client_name: Option<String>,
|
||||
app_server_client_version: Option<String>,
|
||||
request_context: RequestContext,
|
||||
) {
|
||||
let ThreadStartParams {
|
||||
@@ -2075,6 +2083,7 @@ impl CodexMessageProcessor {
|
||||
experimental_raw_events,
|
||||
personality,
|
||||
ephemeral,
|
||||
session_start_source,
|
||||
persist_extended_history,
|
||||
} = params;
|
||||
let mut typesafe_overrides = self.build_thread_config_overrides(
|
||||
@@ -2111,9 +2120,12 @@ impl CodexMessageProcessor {
|
||||
runtime_feature_enablement,
|
||||
cloud_requirements,
|
||||
request_id,
|
||||
app_server_client_name,
|
||||
app_server_client_version,
|
||||
config,
|
||||
typesafe_overrides,
|
||||
dynamic_tools,
|
||||
session_start_source,
|
||||
persist_extended_history,
|
||||
service_name,
|
||||
experimental_raw_events,
|
||||
@@ -2184,9 +2196,12 @@ impl CodexMessageProcessor {
|
||||
runtime_feature_enablement: BTreeMap<String, bool>,
|
||||
cloud_requirements: CloudRequirementsLoader,
|
||||
request_id: ConnectionRequestId,
|
||||
app_server_client_name: Option<String>,
|
||||
app_server_client_version: Option<String>,
|
||||
config_overrides: Option<HashMap<String, serde_json::Value>>,
|
||||
typesafe_overrides: ConfigOverrides,
|
||||
dynamic_tools: Option<Vec<ApiDynamicToolSpec>>,
|
||||
session_start_source: Option<codex_app_server_protocol::ThreadStartSource>,
|
||||
persist_extended_history: bool,
|
||||
service_name: Option<String>,
|
||||
experimental_raw_events: bool,
|
||||
@@ -2214,14 +2229,28 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
};
|
||||
|
||||
// The user may have requested WorkspaceWrite or DangerFullAccess via
|
||||
// the command line, though in the process of deriving the Config, it
|
||||
// could be downgraded to ReadOnly (perhaps there is no sandbox
|
||||
// available on Windows or the enterprise config disallows it). The cwd
|
||||
// should still be considered "trusted" in this case.
|
||||
let requested_sandbox_trusts_project = matches!(
|
||||
typesafe_overrides.sandbox_mode,
|
||||
Some(
|
||||
codex_protocol::config_types::SandboxMode::WorkspaceWrite
|
||||
| codex_protocol::config_types::SandboxMode::DangerFullAccess
|
||||
)
|
||||
);
|
||||
|
||||
if requested_cwd.is_some()
|
||||
&& !config.active_project.is_trusted()
|
||||
&& matches!(
|
||||
config.permissions.sandbox_policy.get(),
|
||||
codex_protocol::protocol::SandboxPolicy::WorkspaceWrite { .. }
|
||||
| codex_protocol::protocol::SandboxPolicy::DangerFullAccess
|
||||
| codex_protocol::protocol::SandboxPolicy::ExternalSandbox { .. }
|
||||
)
|
||||
&& (requested_sandbox_trusts_project
|
||||
|| matches!(
|
||||
config.permissions.sandbox_policy.get(),
|
||||
codex_protocol::protocol::SandboxPolicy::WorkspaceWrite { .. }
|
||||
| codex_protocol::protocol::SandboxPolicy::DangerFullAccess
|
||||
| codex_protocol::protocol::SandboxPolicy::ExternalSandbox { .. }
|
||||
))
|
||||
{
|
||||
let trust_target = resolve_root_git_project_for_trust(config.cwd.as_path())
|
||||
.unwrap_or_else(|| config.cwd.to_path_buf());
|
||||
@@ -2296,6 +2325,12 @@ impl CodexMessageProcessor {
|
||||
.thread_manager
|
||||
.start_thread_with_tools_and_service_name(
|
||||
config,
|
||||
match session_start_source
|
||||
.unwrap_or(codex_app_server_protocol::ThreadStartSource::Startup)
|
||||
{
|
||||
codex_app_server_protocol::ThreadStartSource::Startup => InitialHistory::New,
|
||||
codex_app_server_protocol::ThreadStartSource::Clear => InitialHistory::Cleared,
|
||||
},
|
||||
core_dynamic_tools,
|
||||
persist_extended_history,
|
||||
service_name,
|
||||
@@ -2316,6 +2351,19 @@ impl CodexMessageProcessor {
|
||||
session_configured,
|
||||
..
|
||||
} = new_conv;
|
||||
if let Err(error) = Self::set_app_server_client_info(
|
||||
thread.as_ref(),
|
||||
app_server_client_name,
|
||||
app_server_client_version,
|
||||
)
|
||||
.await
|
||||
{
|
||||
listener_task_context
|
||||
.outgoing
|
||||
.send_error(request_id, error)
|
||||
.await;
|
||||
return;
|
||||
}
|
||||
let config_snapshot = thread
|
||||
.config_snapshot()
|
||||
.instrument(tracing::info_span!(
|
||||
@@ -3349,6 +3397,13 @@ impl CodexMessageProcessor {
|
||||
cwd,
|
||||
search_term,
|
||||
} = params;
|
||||
let cwd = match normalize_thread_list_cwd_filter(cwd) {
|
||||
Ok(cwd) => cwd,
|
||||
Err(error) => {
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let requested_page_size = limit
|
||||
.map(|value| value as usize)
|
||||
@@ -3367,7 +3422,7 @@ impl CodexMessageProcessor {
|
||||
model_providers,
|
||||
source_kinds,
|
||||
archived: archived.unwrap_or(false),
|
||||
cwd: cwd.map(PathBuf::from),
|
||||
cwd,
|
||||
search_term,
|
||||
},
|
||||
)
|
||||
@@ -4193,7 +4248,7 @@ impl CodexMessageProcessor {
|
||||
thread.preview = preview_from_rollout_items(items);
|
||||
Ok(thread)
|
||||
}
|
||||
InitialHistory::New => Err(format!(
|
||||
InitialHistory::New | InitialHistory::Cleared => Err(format!(
|
||||
"failed to build resume response for thread {thread_id}: initial history missing"
|
||||
)),
|
||||
};
|
||||
@@ -5124,22 +5179,72 @@ impl CodexMessageProcessor {
|
||||
request_id: ConnectionRequestId,
|
||||
params: ListMcpServerStatusParams,
|
||||
config: Config,
|
||||
mcp_config: codex_mcp::mcp::McpConfig,
|
||||
mcp_config: codex_mcp::McpConfig,
|
||||
auth: Option<CodexAuth>,
|
||||
) {
|
||||
let snapshot = collect_mcp_snapshot(
|
||||
let detail = match params.detail.unwrap_or(McpServerStatusDetail::Full) {
|
||||
McpServerStatusDetail::Full => McpSnapshotDetail::Full,
|
||||
McpServerStatusDetail::ToolsAndAuthOnly => McpSnapshotDetail::ToolsAndAuthOnly,
|
||||
};
|
||||
|
||||
let snapshot = collect_mcp_snapshot_with_detail(
|
||||
&mcp_config,
|
||||
auth.as_ref(),
|
||||
request_id.request_id.to_string(),
|
||||
detail,
|
||||
)
|
||||
.await;
|
||||
|
||||
let tools_by_server = group_tools_by_server(&snapshot.tools);
|
||||
// Rebuild the tool list per original server name instead of using
|
||||
// `group_tools_by_server()`: qualified tool names are sanitized for the
|
||||
// Responses API, so a config key like `some-server` is encoded as the
|
||||
// `mcp__some_server__` prefix. Matching with the original server name's
|
||||
// sanitized prefix preserves `/mcp` output for hyphenated names.
|
||||
let effective_servers = effective_mcp_servers(&mcp_config, auth.as_ref());
|
||||
let mut sanitized_prefix_counts = HashMap::<String, usize>::new();
|
||||
for name in effective_servers.keys() {
|
||||
let prefix = qualified_mcp_tool_name_prefix(name);
|
||||
*sanitized_prefix_counts.entry(prefix).or_default() += 1;
|
||||
}
|
||||
let tools_by_server = effective_servers
|
||||
.keys()
|
||||
.map(|name| {
|
||||
let prefix = qualified_mcp_tool_name_prefix(name);
|
||||
// If multiple server names normalize to the same prefix, the
|
||||
// qualified tool namespace is ambiguous (for example
|
||||
// `some-server` and `some_server` both become
|
||||
// `mcp__some_server__`). In that case, avoid attributing the
|
||||
// same tools to multiple servers.
|
||||
let tools = if sanitized_prefix_counts
|
||||
.get(&prefix)
|
||||
.copied()
|
||||
.unwrap_or_default()
|
||||
== 1
|
||||
{
|
||||
snapshot
|
||||
.tools
|
||||
.iter()
|
||||
.filter_map(|(qualified_name, tool)| {
|
||||
qualified_name
|
||||
.strip_prefix(&prefix)
|
||||
.map(|tool_name| (tool_name.to_string(), tool.clone()))
|
||||
})
|
||||
.collect::<HashMap<_, _>>()
|
||||
} else {
|
||||
HashMap::new()
|
||||
};
|
||||
(name.clone(), tools)
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
let mut server_names: Vec<String> = config
|
||||
.mcp_servers
|
||||
.keys()
|
||||
.cloned()
|
||||
// Include built-in/plugin MCP servers that are present in the
|
||||
// effective runtime config even when they are not user-declared in
|
||||
// `config.mcp_servers`.
|
||||
.chain(effective_servers.keys().cloned())
|
||||
.chain(snapshot.auth_statuses.keys().cloned())
|
||||
.chain(snapshot.resources.keys().cloned())
|
||||
.chain(snapshot.resource_templates.keys().cloned())
|
||||
@@ -5209,6 +5314,58 @@ impl CodexMessageProcessor {
|
||||
outgoing.send_response(request_id, response).await;
|
||||
}
|
||||
|
||||
async fn read_mcp_resource(
|
||||
&self,
|
||||
request_id: ConnectionRequestId,
|
||||
params: McpResourceReadParams,
|
||||
) {
|
||||
let outgoing = Arc::clone(&self.outgoing);
|
||||
let (_, thread) = match self.load_thread(¶ms.thread_id).await {
|
||||
Ok(thread) => thread,
|
||||
Err(error) => {
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
tokio::spawn(async move {
|
||||
let result = thread.read_mcp_resource(¶ms.server, ¶ms.uri).await;
|
||||
match result {
|
||||
Ok(result) => match serde_json::from_value::<McpResourceReadResponse>(result) {
|
||||
Ok(response) => {
|
||||
outgoing.send_response(request_id, response).await;
|
||||
}
|
||||
Err(error) => {
|
||||
outgoing
|
||||
.send_error(
|
||||
request_id,
|
||||
JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: format!(
|
||||
"failed to deserialize MCP resource read response: {error}"
|
||||
),
|
||||
data: None,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
},
|
||||
Err(error) => {
|
||||
outgoing
|
||||
.send_error(
|
||||
request_id,
|
||||
JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: format!("{error:#}"),
|
||||
data: None,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async fn send_invalid_request_error(&self, request_id: ConnectionRequestId, message: String) {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
@@ -5544,7 +5701,11 @@ impl CodexMessageProcessor {
|
||||
.set_enabled(Feature::Apps, thread.enabled(Feature::Apps));
|
||||
}
|
||||
|
||||
if !config.features.apps_enabled(Some(&self.auth_manager)).await {
|
||||
let auth = self.auth_manager.auth().await;
|
||||
if !config
|
||||
.features
|
||||
.apps_enabled_for_auth(auth.as_ref().is_some_and(CodexAuth::is_chatgpt_auth))
|
||||
{
|
||||
self.outgoing
|
||||
.send_response(
|
||||
request_id,
|
||||
@@ -5875,6 +6036,7 @@ impl CodexMessageProcessor {
|
||||
force_remote_sync,
|
||||
} = params;
|
||||
let roots = cwds.unwrap_or_default();
|
||||
plugins_manager.maybe_start_non_curated_plugin_cache_refresh_for_roots(&roots);
|
||||
|
||||
let mut config = match self.load_latest_config(/*fallback_cwd*/ None).await {
|
||||
Ok(config) => config,
|
||||
@@ -6211,9 +6373,11 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
|
||||
let plugin_apps = load_plugin_apps(result.installed_path.as_path());
|
||||
let auth = self.auth_manager.auth().await;
|
||||
let apps_needing_auth = if plugin_apps.is_empty()
|
||||
|| !config.features.apps_enabled(Some(&self.auth_manager)).await
|
||||
{
|
||||
|| !config.features.apps_enabled_for_auth(
|
||||
auth.as_ref().is_some_and(CodexAuth::is_chatgpt_auth),
|
||||
) {
|
||||
Vec::new()
|
||||
} else {
|
||||
let (all_connectors_result, accessible_connectors_result) = tokio::join!(
|
||||
@@ -6408,6 +6572,7 @@ impl CodexMessageProcessor {
|
||||
request_id: ConnectionRequestId,
|
||||
params: TurnStartParams,
|
||||
app_server_client_name: Option<String>,
|
||||
app_server_client_version: Option<String>,
|
||||
) {
|
||||
if let Err(error) = Self::validate_v2_input_limit(¶ms.input) {
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
@@ -6420,8 +6585,12 @@ impl CodexMessageProcessor {
|
||||
return;
|
||||
}
|
||||
};
|
||||
if let Err(error) =
|
||||
Self::set_app_server_client_name(thread.as_ref(), app_server_client_name).await
|
||||
if let Err(error) = Self::set_app_server_client_info(
|
||||
thread.as_ref(),
|
||||
app_server_client_name,
|
||||
app_server_client_version,
|
||||
)
|
||||
.await
|
||||
{
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
@@ -6499,6 +6668,9 @@ impl CodexMessageProcessor {
|
||||
items: vec![],
|
||||
error: None,
|
||||
status: TurnStatus::InProgress,
|
||||
started_at: None,
|
||||
completed_at: None,
|
||||
duration_ms: None,
|
||||
};
|
||||
|
||||
let response = TurnStartResponse { turn };
|
||||
@@ -6515,16 +6687,17 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
}
|
||||
|
||||
async fn set_app_server_client_name(
|
||||
async fn set_app_server_client_info(
|
||||
thread: &CodexThread,
|
||||
app_server_client_name: Option<String>,
|
||||
app_server_client_version: Option<String>,
|
||||
) -> Result<(), JSONRPCErrorError> {
|
||||
thread
|
||||
.set_app_server_client_name(app_server_client_name)
|
||||
.set_app_server_client_info(app_server_client_name, app_server_client_version)
|
||||
.await
|
||||
.map_err(|err| JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: format!("failed to set app server client name: {err}"),
|
||||
message: format!("failed to set app server client info: {err}"),
|
||||
data: None,
|
||||
})
|
||||
}
|
||||
@@ -6689,6 +6862,14 @@ impl CodexMessageProcessor {
|
||||
Op::RealtimeConversationStart(ConversationStartParams {
|
||||
prompt: params.prompt,
|
||||
session_id: params.session_id,
|
||||
transport: params.transport.map(|transport| match transport {
|
||||
ThreadRealtimeStartTransport::Websocket => {
|
||||
ConversationStartTransport::Websocket
|
||||
}
|
||||
ThreadRealtimeStartTransport::Webrtc { sdp } => {
|
||||
ConversationStartTransport::Webrtc { sdp }
|
||||
}
|
||||
}),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
@@ -6834,6 +7015,9 @@ impl CodexMessageProcessor {
|
||||
items,
|
||||
error: None,
|
||||
status: TurnStatus::InProgress,
|
||||
started_at: None,
|
||||
completed_at: None,
|
||||
duration_ms: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7500,42 +7684,101 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
let snapshot = self.feedback.snapshot(conversation_id);
|
||||
let thread_id = snapshot.thread_id.clone();
|
||||
let sqlite_feedback_logs = if include_logs {
|
||||
let (feedback_thread_ids, sqlite_feedback_logs, state_db_ctx) = if include_logs {
|
||||
if let Some(log_db) = self.log_db.as_ref() {
|
||||
log_db.flush().await;
|
||||
}
|
||||
let state_db_ctx = get_state_db(&self.config).await;
|
||||
match (state_db_ctx.as_ref(), conversation_id) {
|
||||
(Some(state_db_ctx), Some(conversation_id)) => {
|
||||
let thread_id_text = conversation_id.to_string();
|
||||
match state_db_ctx.query_feedback_logs(&thread_id_text).await {
|
||||
Ok(logs) if logs.is_empty() => None,
|
||||
Ok(logs) => Some(logs),
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"failed to query feedback logs from sqlite for thread_id={thread_id_text}: {err}"
|
||||
);
|
||||
None
|
||||
let feedback_thread_ids = match conversation_id {
|
||||
Some(conversation_id) => match self
|
||||
.thread_manager
|
||||
.list_agent_subtree_thread_ids(conversation_id)
|
||||
.await
|
||||
{
|
||||
Ok(thread_ids) => thread_ids,
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"failed to list feedback subtree for thread_id={conversation_id}: {err}"
|
||||
);
|
||||
let mut thread_ids = vec![conversation_id];
|
||||
if let Some(state_db_ctx) = state_db_ctx.as_ref() {
|
||||
for status in [
|
||||
codex_state::DirectionalThreadSpawnEdgeStatus::Open,
|
||||
codex_state::DirectionalThreadSpawnEdgeStatus::Closed,
|
||||
] {
|
||||
match state_db_ctx
|
||||
.list_thread_spawn_descendants_with_status(
|
||||
conversation_id,
|
||||
status,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(descendant_ids) => thread_ids.extend(descendant_ids),
|
||||
Err(err) => warn!(
|
||||
"failed to list persisted feedback subtree for thread_id={conversation_id}: {err}"
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
thread_ids
|
||||
}
|
||||
},
|
||||
None => Vec::new(),
|
||||
};
|
||||
let sqlite_feedback_logs = if let Some(state_db_ctx) = state_db_ctx.as_ref()
|
||||
&& !feedback_thread_ids.is_empty()
|
||||
{
|
||||
let thread_id_texts = feedback_thread_ids
|
||||
.iter()
|
||||
.map(ToString::to_string)
|
||||
.collect::<Vec<_>>();
|
||||
let thread_id_refs = thread_id_texts
|
||||
.iter()
|
||||
.map(String::as_str)
|
||||
.collect::<Vec<_>>();
|
||||
match state_db_ctx
|
||||
.query_feedback_logs_for_threads(&thread_id_refs)
|
||||
.await
|
||||
{
|
||||
Ok(logs) if logs.is_empty() => None,
|
||||
Ok(logs) => Some(logs),
|
||||
Err(err) => {
|
||||
let thread_ids = thread_id_texts.join(", ");
|
||||
warn!(
|
||||
"failed to query feedback logs from sqlite for thread_ids=[{thread_ids}]: {err}"
|
||||
);
|
||||
None
|
||||
}
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
(feedback_thread_ids, sqlite_feedback_logs, state_db_ctx)
|
||||
} else {
|
||||
None
|
||||
(Vec::new(), None, None)
|
||||
};
|
||||
|
||||
let validated_rollout_path = if include_logs {
|
||||
match conversation_id {
|
||||
Some(conv_id) => self.resolve_rollout_path(conv_id).await,
|
||||
None => None,
|
||||
let mut attachment_paths = Vec::new();
|
||||
let mut seen_attachment_paths = HashSet::new();
|
||||
if include_logs {
|
||||
for feedback_thread_id in &feedback_thread_ids {
|
||||
let Some(rollout_path) = self
|
||||
.resolve_rollout_path(*feedback_thread_id, state_db_ctx.as_ref())
|
||||
.await
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
if seen_attachment_paths.insert(rollout_path.clone()) {
|
||||
attachment_paths.push(rollout_path);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let mut attachment_paths = validated_rollout_path.into_iter().collect::<Vec<_>>();
|
||||
}
|
||||
if let Some(extra_log_files) = extra_log_files {
|
||||
attachment_paths.extend(extra_log_files);
|
||||
for extra_log_file in extra_log_files {
|
||||
if seen_attachment_paths.insert(extra_log_file.clone()) {
|
||||
attachment_paths.push(extra_log_file);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let session_source = self.thread_manager.session_source();
|
||||
@@ -7654,11 +7897,76 @@ impl CodexMessageProcessor {
|
||||
});
|
||||
}
|
||||
|
||||
async fn resolve_rollout_path(&self, conversation_id: ThreadId) -> Option<PathBuf> {
|
||||
match self.thread_manager.get_thread(conversation_id).await {
|
||||
Ok(conv) => conv.rollout_path(),
|
||||
Err(_) => None,
|
||||
async fn resolve_rollout_path(
|
||||
&self,
|
||||
conversation_id: ThreadId,
|
||||
state_db_ctx: Option<&StateDbHandle>,
|
||||
) -> Option<PathBuf> {
|
||||
if let Ok(conversation) = self.thread_manager.get_thread(conversation_id).await
|
||||
&& let Some(rollout_path) = conversation.rollout_path()
|
||||
{
|
||||
return Some(rollout_path);
|
||||
}
|
||||
|
||||
let state_db_ctx = state_db_ctx?;
|
||||
state_db_ctx
|
||||
.find_rollout_path_by_id(conversation_id, /*archived_only*/ None)
|
||||
.await
|
||||
.unwrap_or_else(|err| {
|
||||
warn!("failed to resolve rollout path for thread_id={conversation_id}: {err}");
|
||||
None
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn normalize_thread_list_cwd_filter(
|
||||
cwd: Option<String>,
|
||||
) -> Result<Option<PathBuf>, JSONRPCErrorError> {
|
||||
let Some(cwd) = cwd else {
|
||||
return Ok(None);
|
||||
};
|
||||
AbsolutePathBuf::relative_to_current_dir(cwd.as_str())
|
||||
.map(AbsolutePathBuf::into_path_buf)
|
||||
.map(Some)
|
||||
.map_err(|err| JSONRPCErrorError {
|
||||
code: INVALID_PARAMS_ERROR_CODE,
|
||||
message: format!("invalid thread/list cwd filter `{cwd}`: {err}"),
|
||||
data: None,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod thread_list_cwd_filter_tests {
|
||||
use super::normalize_thread_list_cwd_filter;
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[test]
|
||||
fn normalize_thread_list_cwd_filter_preserves_absolute_paths() {
|
||||
let cwd = if cfg!(windows) {
|
||||
String::from(r"C:\srv\repo-b")
|
||||
} else {
|
||||
String::from("/srv/repo-b")
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
normalize_thread_list_cwd_filter(Some(cwd.clone())).expect("cwd filter should parse"),
|
||||
Some(PathBuf::from(cwd))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_thread_list_cwd_filter_resolves_relative_paths_against_server_cwd()
|
||||
-> std::io::Result<()> {
|
||||
let expected = AbsolutePathBuf::relative_to_current_dir("repo-b")?.to_path_buf();
|
||||
|
||||
assert_eq!(
|
||||
normalize_thread_list_cwd_filter(Some(String::from("repo-b")))
|
||||
.expect("cwd filter should parse"),
|
||||
Some(expected)
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8557,7 +8865,7 @@ pub(crate) async fn read_rollout_items_from_rollout(
|
||||
path: &Path,
|
||||
) -> std::io::Result<Vec<RolloutItem>> {
|
||||
let items = match RolloutRecorder::get_rollout_history(path).await? {
|
||||
InitialHistory::New => Vec::new(),
|
||||
InitialHistory::New | InitialHistory::Cleared => Vec::new(),
|
||||
InitialHistory::Forked(items) => items,
|
||||
InitialHistory::Resumed(resumed) => resumed.history,
|
||||
};
|
||||
@@ -9432,6 +9740,7 @@ mod tests {
|
||||
state.track_current_turn_event(&EventMsg::TurnStarted(
|
||||
codex_protocol::protocol::TurnStartedEvent {
|
||||
turn_id: "turn-1".to_string(),
|
||||
started_at: None,
|
||||
model_context_window: None,
|
||||
collaboration_mode_kind: Default::default(),
|
||||
},
|
||||
|
||||
@@ -5,10 +5,10 @@ use codex_app_server_protocol::McpServerOauthLoginCompletedNotification;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_config::types::McpServerConfig;
|
||||
use codex_core::config::Config;
|
||||
use codex_mcp::mcp::auth::McpOAuthLoginSupport;
|
||||
use codex_mcp::mcp::auth::oauth_login_support;
|
||||
use codex_mcp::mcp::auth::resolve_oauth_scopes;
|
||||
use codex_mcp::mcp::auth::should_retry_without_scopes;
|
||||
use codex_mcp::McpOAuthLoginSupport;
|
||||
use codex_mcp::oauth_login_support;
|
||||
use codex_mcp::resolve_oauth_scopes;
|
||||
use codex_mcp::should_retry_without_scopes;
|
||||
use codex_rmcp_client::perform_oauth_login_silent;
|
||||
use tracing::warn;
|
||||
|
||||
|
||||
@@ -18,12 +18,12 @@ use codex_app_server_protocol::CommandExecWriteParams;
|
||||
use codex_app_server_protocol::CommandExecWriteResponse;
|
||||
use codex_app_server_protocol::JSONRPCErrorError;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_core::bytes_to_string_smart;
|
||||
use codex_core::config::StartedNetworkProxy;
|
||||
use codex_core::exec::DEFAULT_EXEC_COMMAND_TIMEOUT_MS;
|
||||
use codex_core::exec::ExecExpiration;
|
||||
use codex_core::exec::IO_DRAIN_TIMEOUT_MS;
|
||||
use codex_core::sandboxing::ExecRequest;
|
||||
use codex_protocol::exec_output::bytes_to_string_smart;
|
||||
use codex_sandboxing::SandboxType;
|
||||
use codex_utils_pty::DEFAULT_OUTPUT_BYTES_CAP;
|
||||
use codex_utils_pty::ProcessHandle;
|
||||
|
||||
@@ -366,6 +366,12 @@ fn map_requirements_toml_to_api(requirements: ConfigRequirementsToml) -> ConfigR
|
||||
.map(codex_app_server_protocol::AskForApproval::from)
|
||||
.collect()
|
||||
}),
|
||||
allowed_approvals_reviewers: requirements.allowed_approvals_reviewers.map(|reviewers| {
|
||||
reviewers
|
||||
.into_iter()
|
||||
.map(codex_app_server_protocol::ApprovalsReviewer::from)
|
||||
.collect()
|
||||
}),
|
||||
allowed_sandbox_modes: requirements.allowed_sandbox_modes.map(|modes| {
|
||||
modes
|
||||
.into_iter()
|
||||
@@ -443,6 +449,7 @@ fn map_network_requirements_to_api(
|
||||
.collect()
|
||||
}),
|
||||
managed_allowed_domains_only: network.managed_allowed_domains_only,
|
||||
danger_full_access_denylist_only: network.danger_full_access_denylist_only,
|
||||
allowed_domains,
|
||||
denied_domains,
|
||||
unix_sockets: network.unix_sockets.map(|unix_sockets| {
|
||||
@@ -519,6 +526,7 @@ mod tests {
|
||||
use codex_features::Feature;
|
||||
use codex_login::AuthManager;
|
||||
use codex_login::CodexAuth;
|
||||
use codex_protocol::config_types::ApprovalsReviewer as CoreApprovalsReviewer;
|
||||
use codex_protocol::protocol::AskForApproval as CoreAskForApproval;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
@@ -545,6 +553,10 @@ mod tests {
|
||||
CoreAskForApproval::Never,
|
||||
CoreAskForApproval::OnRequest,
|
||||
]),
|
||||
allowed_approvals_reviewers: Some(vec![
|
||||
CoreApprovalsReviewer::User,
|
||||
CoreApprovalsReviewer::GuardianSubagent,
|
||||
]),
|
||||
allowed_sandbox_modes: Some(vec![
|
||||
CoreSandboxModeRequirement::ReadOnly,
|
||||
CoreSandboxModeRequirement::ExternalSandbox,
|
||||
@@ -583,6 +595,7 @@ mod tests {
|
||||
]),
|
||||
}),
|
||||
managed_allowed_domains_only: Some(false),
|
||||
danger_full_access_denylist_only: Some(true),
|
||||
unix_sockets: Some(CoreNetworkUnixSocketPermissionsToml {
|
||||
entries: std::collections::BTreeMap::from([(
|
||||
"/tmp/proxy.sock".to_string(),
|
||||
@@ -602,6 +615,13 @@ mod tests {
|
||||
codex_app_server_protocol::AskForApproval::OnRequest,
|
||||
])
|
||||
);
|
||||
assert_eq!(
|
||||
mapped.allowed_approvals_reviewers,
|
||||
Some(vec![
|
||||
codex_app_server_protocol::ApprovalsReviewer::User,
|
||||
codex_app_server_protocol::ApprovalsReviewer::GuardianSubagent,
|
||||
])
|
||||
);
|
||||
assert_eq!(
|
||||
mapped.allowed_sandbox_modes,
|
||||
Some(vec![SandboxMode::ReadOnly]),
|
||||
@@ -635,6 +655,7 @@ mod tests {
|
||||
("example.com".to_string(), NetworkDomainPermission::Deny),
|
||||
])),
|
||||
managed_allowed_domains_only: Some(false),
|
||||
danger_full_access_denylist_only: Some(true),
|
||||
allowed_domains: Some(vec!["api.openai.com".to_string()]),
|
||||
denied_domains: Some(vec!["example.com".to_string()]),
|
||||
unix_sockets: Some(std::collections::BTreeMap::from([(
|
||||
@@ -651,6 +672,7 @@ mod tests {
|
||||
fn map_requirements_toml_to_api_omits_unix_socket_none_entries_from_legacy_network_fields() {
|
||||
let requirements = ConfigRequirementsToml {
|
||||
allowed_approval_policies: None,
|
||||
allowed_approvals_reviewers: None,
|
||||
allowed_sandbox_modes: None,
|
||||
allowed_web_search_modes: None,
|
||||
guardian_developer_instructions: None,
|
||||
@@ -668,6 +690,7 @@ mod tests {
|
||||
dangerously_allow_all_unix_sockets: None,
|
||||
domains: None,
|
||||
managed_allowed_domains_only: None,
|
||||
danger_full_access_denylist_only: None,
|
||||
unix_sockets: Some(CoreNetworkUnixSocketPermissionsToml {
|
||||
entries: std::collections::BTreeMap::from([(
|
||||
"/tmp/ignored.sock".to_string(),
|
||||
@@ -691,6 +714,7 @@ mod tests {
|
||||
dangerously_allow_all_unix_sockets: None,
|
||||
domains: None,
|
||||
managed_allowed_domains_only: None,
|
||||
danger_full_access_denylist_only: None,
|
||||
allowed_domains: None,
|
||||
denied_domains: None,
|
||||
unix_sockets: Some(std::collections::BTreeMap::from([(
|
||||
@@ -707,6 +731,7 @@ mod tests {
|
||||
fn map_requirements_toml_to_api_normalizes_allowed_web_search_modes() {
|
||||
let requirements = ConfigRequirementsToml {
|
||||
allowed_approval_policies: None,
|
||||
allowed_approvals_reviewers: None,
|
||||
allowed_sandbox_modes: None,
|
||||
allowed_web_search_modes: Some(Vec::new()),
|
||||
guardian_developer_instructions: None,
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use crate::fs_api::invalid_request;
|
||||
use crate::outgoing_message::ConnectionId;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use codex_app_server_protocol::FsChangedNotification;
|
||||
@@ -16,6 +17,7 @@ use codex_core::file_watcher::WatchRegistration;
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::hash::Hash;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
@@ -26,7 +28,6 @@ use tokio::sync::mpsc;
|
||||
use tokio::sync::oneshot;
|
||||
use tokio::time::Instant;
|
||||
use tracing::warn;
|
||||
use uuid::Uuid;
|
||||
|
||||
const FS_CHANGED_NOTIFICATION_DEBOUNCE: Duration = Duration::from_millis(200);
|
||||
|
||||
@@ -120,7 +121,11 @@ impl FsWatchManager {
|
||||
connection_id: ConnectionId,
|
||||
params: FsWatchParams,
|
||||
) -> Result<FsWatchResponse, JSONRPCErrorError> {
|
||||
let watch_id = Uuid::now_v7().to_string();
|
||||
let watch_id = params.watch_id;
|
||||
let watch_key = WatchKey {
|
||||
connection_id,
|
||||
watch_id: watch_id.clone(),
|
||||
};
|
||||
let outgoing = self.outgoing.clone();
|
||||
let (subscriber, rx) = self.file_watcher.add_subscriber();
|
||||
let watch_root = params.path.to_path_buf().clone();
|
||||
@@ -130,17 +135,20 @@ impl FsWatchManager {
|
||||
}]);
|
||||
let (terminate_tx, terminate_rx) = oneshot::channel();
|
||||
|
||||
self.state.lock().await.entries.insert(
|
||||
WatchKey {
|
||||
connection_id,
|
||||
watch_id: watch_id.clone(),
|
||||
},
|
||||
WatchEntry {
|
||||
terminate_tx,
|
||||
_subscriber: subscriber,
|
||||
_registration: registration,
|
||||
},
|
||||
);
|
||||
match self.state.lock().await.entries.entry(watch_key) {
|
||||
Entry::Occupied(_) => {
|
||||
return Err(invalid_request(format!(
|
||||
"watchId already exists: {watch_id}"
|
||||
)));
|
||||
}
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(WatchEntry {
|
||||
terminate_tx,
|
||||
_subscriber: subscriber,
|
||||
_registration: registration,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let task_watch_id = watch_id.clone();
|
||||
tokio::spawn(async move {
|
||||
@@ -158,19 +166,7 @@ impl FsWatchManager {
|
||||
let mut changed_paths = event
|
||||
.paths
|
||||
.into_iter()
|
||||
.filter_map(|path| {
|
||||
match AbsolutePathBuf::resolve_path_against_base(&path, &watch_root) {
|
||||
Ok(path) => Some(path),
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"failed to normalize watch event path ({}) for {}: {err}",
|
||||
path.display(),
|
||||
watch_root.display()
|
||||
);
|
||||
None
|
||||
}
|
||||
}
|
||||
})
|
||||
.map(|path| AbsolutePathBuf::resolve_path_against_base(&path, &watch_root))
|
||||
.collect::<Vec<_>>();
|
||||
changed_paths.sort_by(|left, right| left.as_path().cmp(right.as_path()));
|
||||
if !changed_paths.is_empty() {
|
||||
@@ -187,10 +183,7 @@ impl FsWatchManager {
|
||||
}
|
||||
});
|
||||
|
||||
Ok(FsWatchResponse {
|
||||
watch_id,
|
||||
path: params.path,
|
||||
})
|
||||
Ok(FsWatchResponse { path: params.path })
|
||||
}
|
||||
|
||||
pub(crate) async fn unwatch(
|
||||
@@ -228,7 +221,6 @@ mod tests {
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
use uuid::Version;
|
||||
|
||||
fn absolute_path(path: PathBuf) -> AbsolutePathBuf {
|
||||
assert!(
|
||||
@@ -249,28 +241,33 @@ mod tests {
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn watch_returns_a_v7_id_and_tracks_the_owner_scoped_entry() {
|
||||
async fn watch_uses_client_id_and_tracks_the_owner_scoped_entry() {
|
||||
let temp_dir = TempDir::new().expect("temp dir");
|
||||
let head_path = temp_dir.path().join("HEAD");
|
||||
std::fs::write(&head_path, "ref: refs/heads/main\n").expect("write HEAD");
|
||||
|
||||
let manager = manager_with_noop_watcher();
|
||||
let path = absolute_path(head_path);
|
||||
let watch_id = "watch-head".to_string();
|
||||
let response = manager
|
||||
.watch(ConnectionId(1), FsWatchParams { path: path.clone() })
|
||||
.watch(
|
||||
ConnectionId(1),
|
||||
FsWatchParams {
|
||||
watch_id: watch_id.clone(),
|
||||
path: path.clone(),
|
||||
},
|
||||
)
|
||||
.await
|
||||
.expect("watch should succeed");
|
||||
|
||||
assert_eq!(response.path, path);
|
||||
let watch_id = Uuid::parse_str(&response.watch_id).expect("watch id should be a UUID");
|
||||
assert_eq!(watch_id.get_version(), Some(Version::SortRand));
|
||||
|
||||
let state = manager.state.lock().await;
|
||||
assert_eq!(
|
||||
state.entries.keys().cloned().collect::<HashSet<_>>(),
|
||||
HashSet::from([WatchKey {
|
||||
connection_id: ConnectionId(1),
|
||||
watch_id: response.watch_id,
|
||||
watch_id,
|
||||
}])
|
||||
);
|
||||
}
|
||||
@@ -282,10 +279,11 @@ mod tests {
|
||||
std::fs::write(&head_path, "ref: refs/heads/main\n").expect("write HEAD");
|
||||
|
||||
let manager = manager_with_noop_watcher();
|
||||
let response = manager
|
||||
manager
|
||||
.watch(
|
||||
ConnectionId(1),
|
||||
FsWatchParams {
|
||||
watch_id: "watch-head".to_string(),
|
||||
path: absolute_path(head_path),
|
||||
},
|
||||
)
|
||||
@@ -293,14 +291,14 @@ mod tests {
|
||||
.expect("watch should succeed");
|
||||
let watch_key = WatchKey {
|
||||
connection_id: ConnectionId(1),
|
||||
watch_id: response.watch_id.clone(),
|
||||
watch_id: "watch-head".to_string(),
|
||||
};
|
||||
|
||||
manager
|
||||
.unwatch(
|
||||
ConnectionId(2),
|
||||
FsUnwatchParams {
|
||||
watch_id: response.watch_id.clone(),
|
||||
watch_id: "watch-head".to_string(),
|
||||
},
|
||||
)
|
||||
.await
|
||||
@@ -311,7 +309,7 @@ mod tests {
|
||||
.unwatch(
|
||||
ConnectionId(1),
|
||||
FsUnwatchParams {
|
||||
watch_id: response.watch_id,
|
||||
watch_id: "watch-head".to_string(),
|
||||
},
|
||||
)
|
||||
.await
|
||||
@@ -319,6 +317,41 @@ mod tests {
|
||||
assert!(!manager.state.lock().await.entries.contains_key(&watch_key));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn watch_rejects_duplicate_id_for_the_same_connection() {
|
||||
let temp_dir = TempDir::new().expect("temp dir");
|
||||
let head_path = temp_dir.path().join("HEAD");
|
||||
let fetch_head_path = temp_dir.path().join("FETCH_HEAD");
|
||||
std::fs::write(&head_path, "ref: refs/heads/main\n").expect("write HEAD");
|
||||
std::fs::write(&fetch_head_path, "old-fetch\n").expect("write FETCH_HEAD");
|
||||
|
||||
let manager = manager_with_noop_watcher();
|
||||
manager
|
||||
.watch(
|
||||
ConnectionId(1),
|
||||
FsWatchParams {
|
||||
watch_id: "watch-head".to_string(),
|
||||
path: absolute_path(head_path),
|
||||
},
|
||||
)
|
||||
.await
|
||||
.expect("first watch should succeed");
|
||||
|
||||
let error = manager
|
||||
.watch(
|
||||
ConnectionId(1),
|
||||
FsWatchParams {
|
||||
watch_id: "watch-head".to_string(),
|
||||
path: absolute_path(fetch_head_path),
|
||||
},
|
||||
)
|
||||
.await
|
||||
.expect_err("duplicate watch should fail");
|
||||
|
||||
assert_eq!(error.message, "watchId already exists: watch-head");
|
||||
assert_eq!(manager.state.lock().await.entries.len(), 1);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn connection_closed_removes_only_that_connections_watches() {
|
||||
let temp_dir = TempDir::new().expect("temp dir");
|
||||
@@ -330,28 +363,31 @@ mod tests {
|
||||
std::fs::write(&packed_refs_path, "refs\n").expect("write packed-refs");
|
||||
|
||||
let manager = manager_with_noop_watcher();
|
||||
let response_1 = manager
|
||||
let response = manager
|
||||
.watch(
|
||||
ConnectionId(1),
|
||||
FsWatchParams {
|
||||
path: absolute_path(head_path),
|
||||
watch_id: "watch-head".to_string(),
|
||||
path: absolute_path(head_path.clone()),
|
||||
},
|
||||
)
|
||||
.await
|
||||
.expect("first watch should succeed");
|
||||
let response_2 = manager
|
||||
manager
|
||||
.watch(
|
||||
ConnectionId(1),
|
||||
FsWatchParams {
|
||||
watch_id: "watch-fetch-head".to_string(),
|
||||
path: absolute_path(fetch_head_path),
|
||||
},
|
||||
)
|
||||
.await
|
||||
.expect("second watch should succeed");
|
||||
let response_3 = manager
|
||||
manager
|
||||
.watch(
|
||||
ConnectionId(2),
|
||||
FsWatchParams {
|
||||
watch_id: "watch-packed-refs".to_string(),
|
||||
path: absolute_path(packed_refs_path),
|
||||
},
|
||||
)
|
||||
@@ -371,9 +407,9 @@ mod tests {
|
||||
.collect::<HashSet<_>>(),
|
||||
HashSet::from([WatchKey {
|
||||
connection_id: ConnectionId(2),
|
||||
watch_id: response_3.watch_id,
|
||||
watch_id: "watch-packed-refs".to_string(),
|
||||
}])
|
||||
);
|
||||
assert_ne!(response_1.watch_id, response_2.watch_id);
|
||||
assert_eq!(response.path, absolute_path(head_path));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -80,6 +80,7 @@ use codex_core::config_loader::CloudRequirementsLoader;
|
||||
use codex_core::config_loader::LoaderOverrides;
|
||||
use codex_exec_server::EnvironmentManager;
|
||||
use codex_feedback::CodexFeedback;
|
||||
use codex_login::AuthManager;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::sync::oneshot;
|
||||
@@ -379,6 +380,8 @@ fn start_uninitialized(args: InProcessStartArgs) -> InProcessClientHandle {
|
||||
});
|
||||
|
||||
let processor_outgoing = Arc::clone(&outgoing_message_sender);
|
||||
let auth_manager =
|
||||
AuthManager::shared_from_config(args.config.as_ref(), args.enable_codex_api_key_env);
|
||||
let (processor_tx, mut processor_rx) = mpsc::channel::<ProcessorCommand>(channel_capacity);
|
||||
let mut processor_handle = tokio::spawn(async move {
|
||||
let mut processor = MessageProcessor::new(MessageProcessorArgs {
|
||||
@@ -393,8 +396,9 @@ fn start_uninitialized(args: InProcessStartArgs) -> InProcessClientHandle {
|
||||
log_db: None,
|
||||
config_warnings: args.config_warnings,
|
||||
session_source: args.session_source,
|
||||
enable_codex_api_key_env: args.enable_codex_api_key_env,
|
||||
auth_manager,
|
||||
rpc_transport: AppServerRpcTransport::InProcess,
|
||||
remote_control_handle: None,
|
||||
});
|
||||
let mut thread_created_rx = processor.thread_created_receiver();
|
||||
let mut session = ConnectionSessionState::default();
|
||||
@@ -823,6 +827,9 @@ mod tests {
|
||||
items: Vec::new(),
|
||||
status: TurnStatus::Completed,
|
||||
error: None,
|
||||
started_at: None,
|
||||
completed_at: Some(0),
|
||||
duration_ms: None,
|
||||
},
|
||||
})
|
||||
));
|
||||
|
||||
@@ -7,6 +7,8 @@ use codex_core::config::ConfigBuilder;
|
||||
use codex_core::config_loader::CloudRequirementsLoader;
|
||||
use codex_core::config_loader::ConfigLayerStackOrdering;
|
||||
use codex_core::config_loader::LoaderOverrides;
|
||||
use codex_features::Feature;
|
||||
use codex_login::AuthManager;
|
||||
use codex_utils_cli::CliConfigOverrides;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
@@ -28,6 +30,7 @@ use crate::transport::OutboundConnectionState;
|
||||
use crate::transport::TransportEvent;
|
||||
use crate::transport::auth::policy_from_settings;
|
||||
use crate::transport::route_outgoing_envelope;
|
||||
use crate::transport::start_remote_control;
|
||||
use crate::transport::start_stdio_connection;
|
||||
use crate::transport::start_websocket_acceptor;
|
||||
use codex_analytics::AppServerRpcTransport;
|
||||
@@ -42,10 +45,10 @@ use codex_core::config_loader::ConfigLoadError;
|
||||
use codex_core::config_loader::TextRange as CoreTextRange;
|
||||
use codex_exec_server::EnvironmentManager;
|
||||
use codex_feedback::CodexFeedback;
|
||||
use codex_login::AuthManager;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_state::log_db;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::sync::oneshot;
|
||||
use tokio::task::JoinHandle;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
use toml::Value as TomlValue;
|
||||
@@ -396,11 +399,8 @@ pub async fn run_main_with_transport(
|
||||
}
|
||||
}
|
||||
|
||||
let auth_manager = AuthManager::shared(
|
||||
config.codex_home.clone(),
|
||||
/*enable_codex_api_key_env*/ false,
|
||||
config.cli_auth_credentials_store_mode,
|
||||
);
|
||||
let auth_manager =
|
||||
AuthManager::shared_from_config(&config, /*enable_codex_api_key_env*/ false);
|
||||
cloud_requirements_loader(
|
||||
auth_manager,
|
||||
config.chatgpt_base_url,
|
||||
@@ -457,7 +457,9 @@ pub async fn run_main_with_transport(
|
||||
range: None,
|
||||
});
|
||||
}
|
||||
if let Some(warning) = codex_core::config::system_bwrap_warning() {
|
||||
if let Some(warning) =
|
||||
codex_core::config::system_bwrap_warning(config.permissions.sandbox_policy.get())
|
||||
{
|
||||
config_warnings.push(ConfigWarningNotification {
|
||||
summary: warning,
|
||||
details: None,
|
||||
@@ -500,13 +502,13 @@ pub async fn run_main_with_transport(
|
||||
|
||||
let feedback_layer = feedback.logger_layer();
|
||||
let feedback_metadata_layer = feedback.metadata_layer();
|
||||
let log_db = codex_state::StateRuntime::init(
|
||||
let state_db = codex_state::StateRuntime::init(
|
||||
config.sqlite_home.clone(),
|
||||
config.model_provider_id.clone(),
|
||||
)
|
||||
.await
|
||||
.ok()
|
||||
.map(log_db::start);
|
||||
.ok();
|
||||
let log_db = state_db.clone().map(log_db::start);
|
||||
let log_db_layer = log_db
|
||||
.clone()
|
||||
.map(|layer| layer.with_filter(Targets::new().with_default(Level::TRACE)));
|
||||
@@ -533,11 +535,18 @@ pub async fn run_main_with_transport(
|
||||
let single_client_mode = matches!(&transport, AppServerTransport::Stdio);
|
||||
let shutdown_when_no_connections = single_client_mode;
|
||||
let graceful_signal_restart_enabled = !single_client_mode;
|
||||
let mut app_server_client_name_rx = None;
|
||||
|
||||
match transport {
|
||||
AppServerTransport::Stdio => {
|
||||
start_stdio_connection(transport_event_tx.clone(), &mut transport_accept_handles)
|
||||
.await?;
|
||||
let (stdio_client_name_tx, stdio_client_name_rx) = oneshot::channel::<String>();
|
||||
app_server_client_name_rx = Some(stdio_client_name_rx);
|
||||
start_stdio_connection(
|
||||
transport_event_tx.clone(),
|
||||
&mut transport_accept_handles,
|
||||
stdio_client_name_tx,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
AppServerTransport::WebSocket { bind_address } => {
|
||||
let accept_handle = start_websocket_acceptor(
|
||||
@@ -549,8 +558,32 @@ pub async fn run_main_with_transport(
|
||||
.await?;
|
||||
transport_accept_handles.push(accept_handle);
|
||||
}
|
||||
AppServerTransport::Off => {}
|
||||
}
|
||||
|
||||
let auth_manager =
|
||||
AuthManager::shared_from_config(&config, /*enable_codex_api_key_env*/ false);
|
||||
|
||||
let remote_control_enabled = config.features.enabled(Feature::RemoteControl);
|
||||
if transport_accept_handles.is_empty() && !remote_control_enabled {
|
||||
return Err(std::io::Error::new(
|
||||
ErrorKind::InvalidInput,
|
||||
"no transport configured; use --listen or enable remote control",
|
||||
));
|
||||
}
|
||||
|
||||
let (remote_control_accept_handle, remote_control_handle) = start_remote_control(
|
||||
config.chatgpt_base_url.clone(),
|
||||
state_db.clone(),
|
||||
auth_manager.clone(),
|
||||
transport_event_tx.clone(),
|
||||
transport_shutdown_token.clone(),
|
||||
app_server_client_name_rx,
|
||||
remote_control_enabled,
|
||||
)
|
||||
.await?;
|
||||
transport_accept_handles.push(remote_control_accept_handle);
|
||||
|
||||
let outbound_handle = tokio::spawn(async move {
|
||||
let mut outbound_connections = HashMap::<ConnectionId, OutboundConnectionState>::new();
|
||||
loop {
|
||||
@@ -609,6 +642,8 @@ pub async fn run_main_with_transport(
|
||||
let processor_handle = tokio::spawn({
|
||||
let outgoing_message_sender = Arc::new(OutgoingMessageSender::new(outgoing_tx));
|
||||
let outbound_control_tx = outbound_control_tx;
|
||||
let auth_manager =
|
||||
AuthManager::shared_from_config(&config, /*enable_codex_api_key_env*/ false);
|
||||
let cli_overrides: Vec<(String, TomlValue)> = cli_kv_overrides.clone();
|
||||
let loader_overrides = loader_overrides_for_config_api;
|
||||
let mut processor = MessageProcessor::new(MessageProcessorArgs {
|
||||
@@ -623,8 +658,9 @@ pub async fn run_main_with_transport(
|
||||
log_db,
|
||||
config_warnings,
|
||||
session_source,
|
||||
enable_codex_api_key_env: false,
|
||||
auth_manager,
|
||||
rpc_transport: analytics_rpc_transport(transport),
|
||||
remote_control_handle: Some(remote_control_handle),
|
||||
});
|
||||
let mut thread_created_rx = processor.thread_created_receiver();
|
||||
let mut running_turn_count_rx = processor.subscribe_running_assistant_turn_count();
|
||||
@@ -851,7 +887,9 @@ pub async fn run_main_with_transport(
|
||||
fn analytics_rpc_transport(transport: AppServerTransport) -> AppServerRpcTransport {
|
||||
match transport {
|
||||
AppServerTransport::Stdio => AppServerRpcTransport::Stdio,
|
||||
AppServerTransport::WebSocket { .. } => AppServerRpcTransport::Websocket,
|
||||
AppServerTransport::WebSocket { .. } | AppServerTransport::Off => {
|
||||
AppServerRpcTransport::Websocket
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ const MANAGED_CONFIG_PATH_ENV_VAR: &str = "CODEX_APP_SERVER_MANAGED_CONFIG_PATH"
|
||||
#[derive(Debug, Parser)]
|
||||
struct AppServerArgs {
|
||||
/// Transport endpoint URL. Supported values: `stdio://` (default),
|
||||
/// `ws://IP:PORT`.
|
||||
/// `ws://IP:PORT`, `off`.
|
||||
#[arg(
|
||||
long = "listen",
|
||||
value_name = "URL",
|
||||
|
||||
@@ -19,10 +19,12 @@ use crate::outgoing_message::ConnectionRequestId;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use crate::outgoing_message::RequestContext;
|
||||
use crate::transport::AppServerTransport;
|
||||
use crate::transport::RemoteControlHandle;
|
||||
use async_trait::async_trait;
|
||||
use codex_analytics::AnalyticsEventsClient;
|
||||
use codex_analytics::AppServerRpcTransport;
|
||||
use codex_app_server_protocol::AppListUpdatedNotification;
|
||||
use codex_app_server_protocol::AuthMode as LoginAuthMode;
|
||||
use codex_app_server_protocol::ChatgptAuthTokensRefreshParams;
|
||||
use codex_app_server_protocol::ChatgptAuthTokensRefreshReason;
|
||||
use codex_app_server_protocol::ChatgptAuthTokensRefreshResponse;
|
||||
@@ -61,12 +63,10 @@ use codex_core::ThreadManager;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config_loader::CloudRequirementsLoader;
|
||||
use codex_core::config_loader::LoaderOverrides;
|
||||
use codex_core::models_manager::collaboration_mode_presets::CollaborationModesConfig;
|
||||
use codex_exec_server::EnvironmentManager;
|
||||
use codex_features::Feature;
|
||||
use codex_feedback::CodexFeedback;
|
||||
use codex_login::AuthManager;
|
||||
use codex_login::AuthMode as LoginAuthMode;
|
||||
use codex_login::auth::ExternalAuth;
|
||||
use codex_login::auth::ExternalAuthRefreshContext;
|
||||
use codex_login::auth::ExternalAuthRefreshReason;
|
||||
@@ -76,6 +76,7 @@ use codex_login::default_client::USER_AGENT_SUFFIX;
|
||||
use codex_login::default_client::get_codex_user_agent;
|
||||
use codex_login::default_client::set_default_client_residency_requirement;
|
||||
use codex_login::default_client::set_default_originator;
|
||||
use codex_models_manager::collaboration_mode_presets::CollaborationModesConfig;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_protocol::protocol::W3cTraceContext;
|
||||
@@ -170,6 +171,7 @@ pub(crate) struct MessageProcessor {
|
||||
config: Arc<Config>,
|
||||
config_warnings: Arc<Vec<ConfigWarningNotification>>,
|
||||
rpc_transport: AppServerRpcTransport,
|
||||
remote_control_handle: Option<RemoteControlHandle>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
@@ -193,8 +195,9 @@ pub(crate) struct MessageProcessorArgs {
|
||||
pub(crate) log_db: Option<LogDbLayer>,
|
||||
pub(crate) config_warnings: Vec<ConfigWarningNotification>,
|
||||
pub(crate) session_source: SessionSource,
|
||||
pub(crate) enable_codex_api_key_env: bool,
|
||||
pub(crate) auth_manager: Arc<AuthManager>,
|
||||
pub(crate) rpc_transport: AppServerRpcTransport,
|
||||
pub(crate) remote_control_handle: Option<RemoteControlHandle>,
|
||||
}
|
||||
|
||||
impl MessageProcessor {
|
||||
@@ -213,17 +216,13 @@ impl MessageProcessor {
|
||||
log_db,
|
||||
config_warnings,
|
||||
session_source,
|
||||
enable_codex_api_key_env,
|
||||
auth_manager,
|
||||
rpc_transport,
|
||||
remote_control_handle,
|
||||
} = args;
|
||||
let auth_manager = AuthManager::shared_with_external_auth(
|
||||
config.codex_home.clone(),
|
||||
enable_codex_api_key_env,
|
||||
config.cli_auth_credentials_store_mode,
|
||||
Arc::new(ExternalAuthRefreshBridge {
|
||||
outgoing: outgoing.clone(),
|
||||
}),
|
||||
);
|
||||
auth_manager.set_external_auth(Arc::new(ExternalAuthRefreshBridge {
|
||||
outgoing: outgoing.clone(),
|
||||
}));
|
||||
let thread_manager = Arc::new(ThreadManager::new(
|
||||
config.as_ref(),
|
||||
auth_manager.clone(),
|
||||
@@ -235,7 +234,6 @@ impl MessageProcessor {
|
||||
},
|
||||
environment_manager,
|
||||
));
|
||||
auth_manager.set_forced_chatgpt_workspace_id(config.forced_chatgpt_workspace_id.clone());
|
||||
let analytics_events_client = AnalyticsEventsClient::new(
|
||||
Arc::clone(&auth_manager),
|
||||
config.chatgpt_base_url.trim_end_matches('/').to_string(),
|
||||
@@ -291,6 +289,7 @@ impl MessageProcessor {
|
||||
config,
|
||||
config_warnings: Arc::new(config_warnings),
|
||||
rpc_transport,
|
||||
remote_control_handle,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -850,6 +849,7 @@ impl MessageProcessor {
|
||||
connection_id,
|
||||
other,
|
||||
session.app_server_client_name.clone(),
|
||||
session.client_version.clone(),
|
||||
request_context,
|
||||
)
|
||||
.boxed()
|
||||
@@ -870,16 +870,8 @@ impl MessageProcessor {
|
||||
request_id: ConnectionRequestId,
|
||||
params: ConfigValueWriteParams,
|
||||
) {
|
||||
match self.config_api.write_value(params).await {
|
||||
Ok(response) => {
|
||||
self.codex_message_processor.clear_plugin_related_caches();
|
||||
self.codex_message_processor
|
||||
.maybe_start_plugin_startup_tasks_for_latest_config()
|
||||
.await;
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
}
|
||||
Err(error) => self.outgoing.send_error(request_id, error).await,
|
||||
}
|
||||
let result = self.config_api.write_value(params).await;
|
||||
self.handle_config_mutation_result(request_id, result).await
|
||||
}
|
||||
|
||||
async fn handle_config_batch_write(
|
||||
@@ -887,8 +879,8 @@ impl MessageProcessor {
|
||||
request_id: ConnectionRequestId,
|
||||
params: ConfigBatchWriteParams,
|
||||
) {
|
||||
self.handle_config_mutation_result(request_id, self.config_api.batch_write(params).await)
|
||||
.await;
|
||||
let result = self.config_api.batch_write(params).await;
|
||||
self.handle_config_mutation_result(request_id, result).await;
|
||||
}
|
||||
|
||||
async fn handle_experimental_feature_enablement_set(
|
||||
@@ -897,23 +889,15 @@ impl MessageProcessor {
|
||||
params: ExperimentalFeatureEnablementSetParams,
|
||||
) {
|
||||
let should_refresh_apps_list = params.enablement.get("apps").copied() == Some(true);
|
||||
match self
|
||||
let result = self
|
||||
.config_api
|
||||
.set_experimental_feature_enablement(params)
|
||||
.await
|
||||
{
|
||||
Ok(response) => {
|
||||
self.codex_message_processor.clear_plugin_related_caches();
|
||||
self.codex_message_processor
|
||||
.maybe_start_plugin_startup_tasks_for_latest_config()
|
||||
.await;
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
if should_refresh_apps_list {
|
||||
self.refresh_apps_list_after_experimental_feature_enablement_set()
|
||||
.await;
|
||||
}
|
||||
}
|
||||
Err(error) => self.outgoing.send_error(request_id, error).await,
|
||||
.await;
|
||||
let is_ok = result.is_ok();
|
||||
self.handle_config_mutation_result(request_id, result).await;
|
||||
if should_refresh_apps_list && is_ok {
|
||||
self.refresh_apps_list_after_experimental_feature_enablement_set()
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -932,7 +916,11 @@ impl MessageProcessor {
|
||||
return;
|
||||
}
|
||||
};
|
||||
if !config.features.apps_enabled(Some(&self.auth_manager)).await {
|
||||
let auth = self.auth_manager.auth().await;
|
||||
if !config.features.apps_enabled_for_auth(
|
||||
auth.as_ref()
|
||||
.is_some_and(codex_login::CodexAuth::is_chatgpt_auth),
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -986,16 +974,36 @@ impl MessageProcessor {
|
||||
) {
|
||||
match result {
|
||||
Ok(response) => {
|
||||
self.codex_message_processor.clear_plugin_related_caches();
|
||||
self.codex_message_processor
|
||||
.maybe_start_plugin_startup_tasks_for_latest_config()
|
||||
.await;
|
||||
self.handle_config_mutation().await;
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
}
|
||||
Err(error) => self.outgoing.send_error(request_id, error).await,
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_config_mutation(&self) {
|
||||
self.codex_message_processor.handle_config_mutation();
|
||||
let Some(remote_control_handle) = &self.remote_control_handle else {
|
||||
return;
|
||||
};
|
||||
|
||||
match self
|
||||
.config_api
|
||||
.load_latest_config(/*fallback_cwd*/ None)
|
||||
.await
|
||||
{
|
||||
Ok(config) => {
|
||||
remote_control_handle.set_enabled(config.features.enabled(Feature::RemoteControl));
|
||||
}
|
||||
Err(error) => {
|
||||
tracing::warn!(
|
||||
"failed to load config for remote control enablement refresh after config mutation: {}",
|
||||
error.message
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_config_requirements_read(&self, request_id: ConnectionRequestId) {
|
||||
match self.config_api.config_requirements_read().await {
|
||||
Ok(response) => self.outgoing.send_response(request_id, response).await,
|
||||
|
||||
@@ -27,6 +27,7 @@ use codex_core::config_loader::CloudRequirementsLoader;
|
||||
use codex_core::config_loader::LoaderOverrides;
|
||||
use codex_exec_server::EnvironmentManager;
|
||||
use codex_feedback::CodexFeedback;
|
||||
use codex_login::AuthManager;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_protocol::protocol::W3cTraceContext;
|
||||
use opentelemetry::global;
|
||||
@@ -234,6 +235,8 @@ fn build_test_processor(
|
||||
) {
|
||||
let (outgoing_tx, outgoing_rx) = mpsc::channel(16);
|
||||
let outgoing = Arc::new(OutgoingMessageSender::new(outgoing_tx));
|
||||
let auth_manager =
|
||||
AuthManager::shared_from_config(config.as_ref(), /*enable_codex_api_key_env*/ false);
|
||||
let processor = MessageProcessor::new(MessageProcessorArgs {
|
||||
outgoing,
|
||||
arg0_paths: Arg0DispatchPaths::default(),
|
||||
@@ -246,8 +249,9 @@ fn build_test_processor(
|
||||
log_db: None,
|
||||
config_warnings: Vec::new(),
|
||||
session_source: SessionSource::VSCode,
|
||||
enable_codex_api_key_env: false,
|
||||
auth_manager,
|
||||
rpc_transport: AppServerRpcTransport::Stdio,
|
||||
remote_control_handle: None,
|
||||
});
|
||||
(processor, outgoing_rx)
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ use codex_app_server_protocol::Model;
|
||||
use codex_app_server_protocol::ModelUpgradeInfo;
|
||||
use codex_app_server_protocol::ReasoningEffortOption;
|
||||
use codex_core::ThreadManager;
|
||||
use codex_core::models_manager::manager::RefreshStrategy;
|
||||
use codex_models_manager::manager::RefreshStrategy;
|
||||
use codex_protocol::openai_models::ModelPreset;
|
||||
use codex_protocol::openai_models::ReasoningEffortPreset;
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@ use std::sync::Weak;
|
||||
use tokio::sync::Mutex;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::sync::oneshot;
|
||||
use tracing::error;
|
||||
|
||||
type PendingInterruptQueue = Vec<(
|
||||
ConnectionRequestId,
|
||||
@@ -44,6 +45,7 @@ pub(crate) enum ThreadListenerCommand {
|
||||
/// Per-conversation accumulation of the latest states e.g. error message while a turn runs.
|
||||
#[derive(Default, Clone)]
|
||||
pub(crate) struct TurnSummary {
|
||||
pub(crate) started_at: Option<i64>,
|
||||
pub(crate) file_change_started: HashSet<String>,
|
||||
pub(crate) command_execution_started: HashSet<String>,
|
||||
pub(crate) last_error: Option<TurnError>,
|
||||
@@ -109,13 +111,50 @@ impl ThreadState {
|
||||
}
|
||||
|
||||
pub(crate) fn track_current_turn_event(&mut self, event: &EventMsg) {
|
||||
if let EventMsg::TurnStarted(payload) = event {
|
||||
self.turn_summary.started_at = payload.started_at;
|
||||
}
|
||||
self.current_turn_history.handle_event(event);
|
||||
if !self.current_turn_history.has_active_turn() {
|
||||
if matches!(event, EventMsg::TurnAborted(_) | EventMsg::TurnComplete(_))
|
||||
&& !self.current_turn_history.has_active_turn()
|
||||
{
|
||||
self.current_turn_history.reset();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn resolve_server_request_on_thread_listener(
|
||||
thread_state: &Arc<Mutex<ThreadState>>,
|
||||
request_id: RequestId,
|
||||
) {
|
||||
let (completion_tx, completion_rx) = oneshot::channel();
|
||||
let listener_command_tx = {
|
||||
let state = thread_state.lock().await;
|
||||
state.listener_command_tx()
|
||||
};
|
||||
let Some(listener_command_tx) = listener_command_tx else {
|
||||
error!("failed to remove pending client request: thread listener is not running");
|
||||
return;
|
||||
};
|
||||
|
||||
if listener_command_tx
|
||||
.send(ThreadListenerCommand::ResolveServerRequest {
|
||||
request_id,
|
||||
completion_tx,
|
||||
})
|
||||
.is_err()
|
||||
{
|
||||
error!(
|
||||
"failed to remove pending client request: thread listener command channel is closed"
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if let Err(err) = completion_rx.await {
|
||||
error!("failed to remove pending client request: {err}");
|
||||
}
|
||||
}
|
||||
|
||||
struct ThreadEntry {
|
||||
state: Arc<Mutex<ThreadState>>,
|
||||
connection_ids: HashSet<ConnectionId>,
|
||||
|
||||
@@ -17,6 +17,7 @@ use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
use std::sync::RwLock;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::atomic::AtomicU64;
|
||||
use std::sync::atomic::Ordering;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
@@ -28,9 +29,12 @@ use tracing::warn;
|
||||
/// plenty for an interactive CLI.
|
||||
pub(crate) const CHANNEL_CAPACITY: usize = 128;
|
||||
|
||||
mod remote_control;
|
||||
mod stdio;
|
||||
mod websocket;
|
||||
|
||||
pub(crate) use remote_control::RemoteControlHandle;
|
||||
pub(crate) use remote_control::start_remote_control;
|
||||
pub(crate) use stdio::start_stdio_connection;
|
||||
pub(crate) use websocket::start_websocket_acceptor;
|
||||
|
||||
@@ -38,6 +42,7 @@ pub(crate) use websocket::start_websocket_acceptor;
|
||||
pub enum AppServerTransport {
|
||||
Stdio,
|
||||
WebSocket { bind_address: SocketAddr },
|
||||
Off,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
@@ -51,7 +56,7 @@ impl std::fmt::Display for AppServerTransportParseError {
|
||||
match self {
|
||||
AppServerTransportParseError::UnsupportedListenUrl(listen_url) => write!(
|
||||
f,
|
||||
"unsupported --listen URL `{listen_url}`; expected `stdio://` or `ws://IP:PORT`"
|
||||
"unsupported --listen URL `{listen_url}`; expected `stdio://`, `ws://IP:PORT`, or `off`"
|
||||
),
|
||||
AppServerTransportParseError::InvalidWebSocketListenUrl(listen_url) => write!(
|
||||
f,
|
||||
@@ -71,6 +76,10 @@ impl AppServerTransport {
|
||||
return Ok(Self::Stdio);
|
||||
}
|
||||
|
||||
if listen_url == "off" {
|
||||
return Ok(Self::Off);
|
||||
}
|
||||
|
||||
if let Some(socket_addr) = listen_url.strip_prefix("ws://") {
|
||||
let bind_address = socket_addr.parse::<SocketAddr>().map_err(|_| {
|
||||
AppServerTransportParseError::InvalidWebSocketListenUrl(listen_url.to_string())
|
||||
@@ -166,6 +175,12 @@ impl OutboundConnectionState {
|
||||
}
|
||||
}
|
||||
|
||||
static CONNECTION_ID_COUNTER: AtomicU64 = AtomicU64::new(0);
|
||||
|
||||
fn next_connection_id() -> ConnectionId {
|
||||
ConnectionId(CONNECTION_ID_COUNTER.fetch_add(1, Ordering::Relaxed))
|
||||
}
|
||||
|
||||
async fn forward_incoming_message(
|
||||
transport_event_tx: &mpsc::Sender<TransportEvent>,
|
||||
writer: &mpsc::Sender<QueuedOutgoingMessage>,
|
||||
@@ -378,8 +393,11 @@ pub(crate) async fn route_outgoing_envelope(
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::error_code::OVERLOADED_ERROR_CODE;
|
||||
use codex_app_server_protocol::ConfigWarningNotification;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCRequest;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
use pretty_assertions::assert_eq;
|
||||
@@ -393,41 +411,10 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn app_server_transport_parses_stdio_listen_url() {
|
||||
let transport = AppServerTransport::from_listen_url(AppServerTransport::DEFAULT_LISTEN_URL)
|
||||
.expect("stdio listen URL should parse");
|
||||
assert_eq!(transport, AppServerTransport::Stdio);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn app_server_transport_parses_websocket_listen_url() {
|
||||
let transport = AppServerTransport::from_listen_url("ws://127.0.0.1:1234")
|
||||
.expect("websocket listen URL should parse");
|
||||
fn listen_off_parses_as_off_transport() {
|
||||
assert_eq!(
|
||||
transport,
|
||||
AppServerTransport::WebSocket {
|
||||
bind_address: "127.0.0.1:1234".parse().expect("valid socket address"),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn app_server_transport_rejects_invalid_websocket_listen_url() {
|
||||
let err = AppServerTransport::from_listen_url("ws://localhost:1234")
|
||||
.expect_err("hostname bind address should be rejected");
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"invalid websocket --listen URL `ws://localhost:1234`; expected `ws://IP:PORT`"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn app_server_transport_rejects_unsupported_listen_url() {
|
||||
let err = AppServerTransport::from_listen_url("http://127.0.0.1:1234")
|
||||
.expect_err("unsupported scheme should fail");
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"unsupported --listen URL `http://127.0.0.1:1234`; expected `stdio://` or `ws://IP:PORT`"
|
||||
AppServerTransport::from_listen_url("off"),
|
||||
Ok(AppServerTransport::Off)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -437,11 +424,10 @@ mod tests {
|
||||
let (transport_event_tx, mut transport_event_rx) = mpsc::channel(1);
|
||||
let (writer_tx, mut writer_rx) = mpsc::channel(1);
|
||||
|
||||
let first_message =
|
||||
JSONRPCMessage::Notification(codex_app_server_protocol::JSONRPCNotification {
|
||||
method: "initialized".to_string(),
|
||||
params: None,
|
||||
});
|
||||
let first_message = JSONRPCMessage::Notification(JSONRPCNotification {
|
||||
method: "initialized".to_string(),
|
||||
params: None,
|
||||
});
|
||||
transport_event_tx
|
||||
.send(TransportEvent::IncomingMessage {
|
||||
connection_id,
|
||||
@@ -450,8 +436,8 @@ mod tests {
|
||||
.await
|
||||
.expect("queue should accept first message");
|
||||
|
||||
let request = JSONRPCMessage::Request(codex_app_server_protocol::JSONRPCRequest {
|
||||
id: codex_app_server_protocol::RequestId::Integer(7),
|
||||
let request = JSONRPCMessage::Request(JSONRPCRequest {
|
||||
id: RequestId::Integer(7),
|
||||
method: "config/read".to_string(),
|
||||
params: Some(json!({ "includeLayers": false })),
|
||||
trace: None,
|
||||
@@ -499,11 +485,10 @@ mod tests {
|
||||
let (transport_event_tx, mut transport_event_rx) = mpsc::channel(1);
|
||||
let (writer_tx, _writer_rx) = mpsc::channel(1);
|
||||
|
||||
let first_message =
|
||||
JSONRPCMessage::Notification(codex_app_server_protocol::JSONRPCNotification {
|
||||
method: "initialized".to_string(),
|
||||
params: None,
|
||||
});
|
||||
let first_message = JSONRPCMessage::Notification(JSONRPCNotification {
|
||||
method: "initialized".to_string(),
|
||||
params: None,
|
||||
});
|
||||
transport_event_tx
|
||||
.send(TransportEvent::IncomingMessage {
|
||||
connection_id,
|
||||
@@ -512,8 +497,8 @@ mod tests {
|
||||
.await
|
||||
.expect("queue should accept first message");
|
||||
|
||||
let response = JSONRPCMessage::Response(codex_app_server_protocol::JSONRPCResponse {
|
||||
id: codex_app_server_protocol::RequestId::Integer(7),
|
||||
let response = JSONRPCMessage::Response(JSONRPCResponse {
|
||||
id: RequestId::Integer(7),
|
||||
result: json!({"ok": true}),
|
||||
});
|
||||
let transport_event_tx_for_enqueue = transport_event_tx.clone();
|
||||
@@ -553,11 +538,10 @@ mod tests {
|
||||
match forwarded_event {
|
||||
TransportEvent::IncomingMessage {
|
||||
connection_id: queued_connection_id,
|
||||
message:
|
||||
JSONRPCMessage::Response(codex_app_server_protocol::JSONRPCResponse { id, result }),
|
||||
message: JSONRPCMessage::Response(JSONRPCResponse { id, result }),
|
||||
} => {
|
||||
assert_eq!(queued_connection_id, connection_id);
|
||||
assert_eq!(id, codex_app_server_protocol::RequestId::Integer(7));
|
||||
assert_eq!(id, RequestId::Integer(7));
|
||||
assert_eq!(result, json!({"ok": true}));
|
||||
}
|
||||
_ => panic!("expected forwarded response message"),
|
||||
@@ -573,12 +557,10 @@ mod tests {
|
||||
transport_event_tx
|
||||
.send(TransportEvent::IncomingMessage {
|
||||
connection_id,
|
||||
message: JSONRPCMessage::Notification(
|
||||
codex_app_server_protocol::JSONRPCNotification {
|
||||
method: "initialized".to_string(),
|
||||
params: None,
|
||||
},
|
||||
),
|
||||
message: JSONRPCMessage::Notification(JSONRPCNotification {
|
||||
method: "initialized".to_string(),
|
||||
params: None,
|
||||
}),
|
||||
})
|
||||
.await
|
||||
.expect("transport queue should accept first message");
|
||||
@@ -597,15 +579,15 @@ mod tests {
|
||||
.await
|
||||
.expect("writer queue should accept first message");
|
||||
|
||||
let request = JSONRPCMessage::Request(codex_app_server_protocol::JSONRPCRequest {
|
||||
id: codex_app_server_protocol::RequestId::Integer(7),
|
||||
let request = JSONRPCMessage::Request(JSONRPCRequest {
|
||||
id: RequestId::Integer(7),
|
||||
method: "config/read".to_string(),
|
||||
params: Some(json!({ "includeLayers": false })),
|
||||
trace: None,
|
||||
});
|
||||
|
||||
let enqueue_result = tokio::time::timeout(
|
||||
std::time::Duration::from_millis(100),
|
||||
let enqueue_result = timeout(
|
||||
Duration::from_millis(100),
|
||||
enqueue_incoming_message(&transport_event_tx, &writer_tx, connection_id, request),
|
||||
)
|
||||
.await
|
||||
@@ -781,7 +763,7 @@ mod tests {
|
||||
OutgoingEnvelope::ToConnection {
|
||||
connection_id,
|
||||
message: OutgoingMessage::Request(ServerRequest::CommandExecutionRequestApproval {
|
||||
request_id: codex_app_server_protocol::RequestId::Integer(1),
|
||||
request_id: RequestId::Integer(1),
|
||||
params: codex_app_server_protocol::CommandExecutionRequestApprovalParams {
|
||||
thread_id: "thr_123".to_string(),
|
||||
turn_id: "turn_123".to_string(),
|
||||
@@ -843,7 +825,7 @@ mod tests {
|
||||
OutgoingEnvelope::ToConnection {
|
||||
connection_id,
|
||||
message: OutgoingMessage::Request(ServerRequest::CommandExecutionRequestApproval {
|
||||
request_id: codex_app_server_protocol::RequestId::Integer(1),
|
||||
request_id: RequestId::Integer(1),
|
||||
params: codex_app_server_protocol::CommandExecutionRequestApprovalParams {
|
||||
thread_id: "thr_123".to_string(),
|
||||
turn_id: "turn_123".to_string(),
|
||||
|
||||
@@ -0,0 +1,568 @@
|
||||
use super::CHANNEL_CAPACITY;
|
||||
use super::TransportEvent;
|
||||
use super::next_connection_id;
|
||||
use super::protocol::ClientEnvelope;
|
||||
pub use super::protocol::ClientEvent;
|
||||
pub use super::protocol::ClientId;
|
||||
use super::protocol::PongStatus;
|
||||
use super::protocol::ServerEvent;
|
||||
use super::protocol::StreamId;
|
||||
use crate::outgoing_message::ConnectionId;
|
||||
use crate::outgoing_message::QueuedOutgoingMessage;
|
||||
use crate::transport::remote_control::QueuedServerEnvelope;
|
||||
use codex_app_server_protocol::JSONRPCMessage;
|
||||
use std::collections::HashMap;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::sync::watch;
|
||||
use tokio::task::JoinSet;
|
||||
use tokio::time::Duration;
|
||||
use tokio::time::Instant;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
|
||||
const REMOTE_CONTROL_CLIENT_IDLE_TIMEOUT: Duration = Duration::from_secs(10 * 60);
|
||||
pub(crate) const REMOTE_CONTROL_IDLE_SWEEP_INTERVAL: Duration = Duration::from_secs(30);
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct Stopped;
|
||||
|
||||
struct ClientState {
|
||||
connection_id: ConnectionId,
|
||||
disconnect_token: CancellationToken,
|
||||
last_activity_at: Instant,
|
||||
last_inbound_seq_id: Option<u64>,
|
||||
status_tx: watch::Sender<PongStatus>,
|
||||
}
|
||||
|
||||
pub(crate) struct ClientTracker {
|
||||
clients: HashMap<(ClientId, StreamId), ClientState>,
|
||||
legacy_stream_ids: HashMap<ClientId, StreamId>,
|
||||
join_set: JoinSet<(ClientId, StreamId)>,
|
||||
server_event_tx: mpsc::Sender<QueuedServerEnvelope>,
|
||||
transport_event_tx: mpsc::Sender<TransportEvent>,
|
||||
shutdown_token: CancellationToken,
|
||||
}
|
||||
|
||||
impl ClientTracker {
|
||||
pub(crate) fn new(
|
||||
server_event_tx: mpsc::Sender<QueuedServerEnvelope>,
|
||||
transport_event_tx: mpsc::Sender<TransportEvent>,
|
||||
shutdown_token: &CancellationToken,
|
||||
) -> Self {
|
||||
Self {
|
||||
clients: HashMap::new(),
|
||||
legacy_stream_ids: HashMap::new(),
|
||||
join_set: JoinSet::new(),
|
||||
server_event_tx,
|
||||
transport_event_tx,
|
||||
shutdown_token: shutdown_token.child_token(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn bookkeep_join_set(&mut self) -> Option<(ClientId, StreamId)> {
|
||||
while let Some(join_result) = self.join_set.join_next().await {
|
||||
let Ok(client_key) = join_result else {
|
||||
continue;
|
||||
};
|
||||
return Some(client_key);
|
||||
}
|
||||
futures::future::pending().await
|
||||
}
|
||||
|
||||
pub(crate) async fn shutdown(&mut self) {
|
||||
self.shutdown_token.cancel();
|
||||
|
||||
while let Some(client_key) = self.clients.keys().next().cloned() {
|
||||
let _ = self.close_client(&client_key).await;
|
||||
}
|
||||
|
||||
self.drain_join_set().await;
|
||||
}
|
||||
|
||||
async fn drain_join_set(&mut self) {
|
||||
while self.join_set.join_next().await.is_some() {}
|
||||
}
|
||||
|
||||
pub(crate) async fn handle_message(
|
||||
&mut self,
|
||||
client_envelope: ClientEnvelope,
|
||||
) -> Result<(), Stopped> {
|
||||
let ClientEnvelope {
|
||||
client_id,
|
||||
event,
|
||||
stream_id,
|
||||
seq_id,
|
||||
cursor: _,
|
||||
} = client_envelope;
|
||||
let is_legacy_stream_id = stream_id.is_none();
|
||||
let is_initialize = matches!(&event, ClientEvent::ClientMessage { message } if remote_control_message_starts_connection(message));
|
||||
let stream_id = match stream_id {
|
||||
Some(stream_id) => stream_id,
|
||||
None if is_initialize => {
|
||||
// TODO(ruslan): delete this fallback once all clients are updated to send stream_id.
|
||||
self.legacy_stream_ids
|
||||
.remove(&client_id)
|
||||
.unwrap_or_else(StreamId::new_random)
|
||||
}
|
||||
None => self
|
||||
.legacy_stream_ids
|
||||
.get(&client_id)
|
||||
.cloned()
|
||||
.unwrap_or_else(|| {
|
||||
if matches!(&event, ClientEvent::Ping) {
|
||||
StreamId::new_random()
|
||||
} else {
|
||||
StreamId(String::new())
|
||||
}
|
||||
}),
|
||||
};
|
||||
if stream_id.0.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
let client_key = (client_id.clone(), stream_id.clone());
|
||||
match event {
|
||||
ClientEvent::ClientMessage { message } => {
|
||||
if let Some(seq_id) = seq_id
|
||||
&& let Some(client) = self.clients.get(&client_key)
|
||||
&& client
|
||||
.last_inbound_seq_id
|
||||
.is_some_and(|last_seq_id| last_seq_id >= seq_id)
|
||||
&& !is_initialize
|
||||
{
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if is_initialize && self.clients.contains_key(&client_key) {
|
||||
self.close_client(&client_key).await?;
|
||||
}
|
||||
|
||||
if let Some(connection_id) = self.clients.get_mut(&client_key).map(|client| {
|
||||
client.last_activity_at = Instant::now();
|
||||
if let Some(seq_id) = seq_id {
|
||||
client.last_inbound_seq_id = Some(seq_id);
|
||||
}
|
||||
client.connection_id
|
||||
}) {
|
||||
self.send_transport_event(TransportEvent::IncomingMessage {
|
||||
connection_id,
|
||||
message,
|
||||
})
|
||||
.await?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if !is_initialize {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let connection_id = next_connection_id();
|
||||
let (writer_tx, writer_rx) =
|
||||
mpsc::channel::<QueuedOutgoingMessage>(CHANNEL_CAPACITY);
|
||||
let disconnect_token = self.shutdown_token.child_token();
|
||||
self.send_transport_event(TransportEvent::ConnectionOpened {
|
||||
connection_id,
|
||||
writer: writer_tx,
|
||||
disconnect_sender: Some(disconnect_token.clone()),
|
||||
})
|
||||
.await?;
|
||||
|
||||
let (status_tx, status_rx) = watch::channel(PongStatus::Active);
|
||||
self.join_set.spawn(Self::run_client_outbound(
|
||||
client_id.clone(),
|
||||
stream_id.clone(),
|
||||
self.server_event_tx.clone(),
|
||||
writer_rx,
|
||||
status_rx,
|
||||
disconnect_token.clone(),
|
||||
));
|
||||
self.clients.insert(
|
||||
client_key,
|
||||
ClientState {
|
||||
connection_id,
|
||||
disconnect_token,
|
||||
last_activity_at: Instant::now(),
|
||||
last_inbound_seq_id: if is_legacy_stream_id { None } else { seq_id },
|
||||
status_tx,
|
||||
},
|
||||
);
|
||||
if is_legacy_stream_id {
|
||||
self.legacy_stream_ids.insert(client_id.clone(), stream_id);
|
||||
}
|
||||
self.send_transport_event(TransportEvent::IncomingMessage {
|
||||
connection_id,
|
||||
message,
|
||||
})
|
||||
.await
|
||||
}
|
||||
ClientEvent::Ack => Ok(()),
|
||||
ClientEvent::Ping => {
|
||||
if let Some(client) = self.clients.get_mut(&client_key) {
|
||||
client.last_activity_at = Instant::now();
|
||||
let _ = client.status_tx.send(PongStatus::Active);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let server_event_tx = self.server_event_tx.clone();
|
||||
tokio::spawn(async move {
|
||||
let server_envelope = QueuedServerEnvelope {
|
||||
event: ServerEvent::Pong {
|
||||
status: PongStatus::Unknown,
|
||||
},
|
||||
client_id,
|
||||
stream_id,
|
||||
write_complete_tx: None,
|
||||
};
|
||||
let _ = server_event_tx.send(server_envelope).await;
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
ClientEvent::ClientClosed => self.close_client(&client_key).await,
|
||||
}
|
||||
}
|
||||
|
||||
async fn run_client_outbound(
|
||||
client_id: ClientId,
|
||||
stream_id: StreamId,
|
||||
server_event_tx: mpsc::Sender<QueuedServerEnvelope>,
|
||||
mut writer_rx: mpsc::Receiver<QueuedOutgoingMessage>,
|
||||
mut status_rx: watch::Receiver<PongStatus>,
|
||||
disconnect_token: CancellationToken,
|
||||
) -> (ClientId, StreamId) {
|
||||
loop {
|
||||
let (event, write_complete_tx) = tokio::select! {
|
||||
_ = disconnect_token.cancelled() => {
|
||||
break;
|
||||
}
|
||||
queued_message = writer_rx.recv() => {
|
||||
let Some(queued_message) = queued_message else {
|
||||
break;
|
||||
};
|
||||
let event = ServerEvent::ServerMessage {
|
||||
message: Box::new(queued_message.message),
|
||||
};
|
||||
(event, queued_message.write_complete_tx)
|
||||
}
|
||||
changed = status_rx.changed() => {
|
||||
if changed.is_err() {
|
||||
break;
|
||||
}
|
||||
let event = ServerEvent::Pong { status: status_rx.borrow().clone() };
|
||||
(event, None)
|
||||
}
|
||||
};
|
||||
let send_result = tokio::select! {
|
||||
_ = disconnect_token.cancelled() => {
|
||||
break;
|
||||
}
|
||||
send_result = server_event_tx.send(QueuedServerEnvelope {
|
||||
event,
|
||||
client_id: client_id.clone(),
|
||||
stream_id: stream_id.clone(),
|
||||
write_complete_tx,
|
||||
}) => send_result,
|
||||
};
|
||||
if send_result.is_err() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
(client_id, stream_id)
|
||||
}
|
||||
|
||||
pub(crate) async fn close_expired_clients(
|
||||
&mut self,
|
||||
) -> Result<Vec<(ClientId, StreamId)>, Stopped> {
|
||||
let now = Instant::now();
|
||||
let expired_client_ids: Vec<(ClientId, StreamId)> = self
|
||||
.clients
|
||||
.iter()
|
||||
.filter_map(|(client_key, client)| {
|
||||
(!remote_control_client_is_alive(client, now)).then_some(client_key.clone())
|
||||
})
|
||||
.collect();
|
||||
for client_key in &expired_client_ids {
|
||||
self.close_client(client_key).await?;
|
||||
}
|
||||
Ok(expired_client_ids)
|
||||
}
|
||||
|
||||
pub(super) async fn close_client(
|
||||
&mut self,
|
||||
client_key: &(ClientId, StreamId),
|
||||
) -> Result<(), Stopped> {
|
||||
let Some(client) = self.clients.remove(client_key) else {
|
||||
return Ok(());
|
||||
};
|
||||
if self
|
||||
.legacy_stream_ids
|
||||
.get(&client_key.0)
|
||||
.is_some_and(|stream_id| stream_id == &client_key.1)
|
||||
{
|
||||
self.legacy_stream_ids.remove(&client_key.0);
|
||||
}
|
||||
client.disconnect_token.cancel();
|
||||
self.send_transport_event(TransportEvent::ConnectionClosed {
|
||||
connection_id: client.connection_id,
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn send_transport_event(&self, event: TransportEvent) -> Result<(), Stopped> {
|
||||
self.transport_event_tx
|
||||
.send(event)
|
||||
.await
|
||||
.map_err(|_| Stopped)
|
||||
}
|
||||
}
|
||||
|
||||
fn remote_control_message_starts_connection(message: &JSONRPCMessage) -> bool {
|
||||
matches!(
|
||||
message,
|
||||
JSONRPCMessage::Request(codex_app_server_protocol::JSONRPCRequest { method, .. })
|
||||
if method == "initialize"
|
||||
)
|
||||
}
|
||||
|
||||
fn remote_control_client_is_alive(client: &ClientState, now: Instant) -> bool {
|
||||
now.duration_since(client.last_activity_at) < REMOTE_CONTROL_CLIENT_IDLE_TIMEOUT
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::outgoing_message::OutgoingMessage;
|
||||
use crate::transport::remote_control::protocol::ClientEnvelope;
|
||||
use crate::transport::remote_control::protocol::ClientEvent;
|
||||
use codex_app_server_protocol::ConfigWarningNotification;
|
||||
use codex_app_server_protocol::JSONRPCRequest;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use tokio::time::timeout;
|
||||
|
||||
fn initialize_envelope(client_id: &str) -> ClientEnvelope {
|
||||
initialize_envelope_with_stream_id(client_id, /*stream_id*/ None)
|
||||
}
|
||||
|
||||
fn initialize_envelope_with_stream_id(
|
||||
client_id: &str,
|
||||
stream_id: Option<&str>,
|
||||
) -> ClientEnvelope {
|
||||
ClientEnvelope {
|
||||
event: ClientEvent::ClientMessage {
|
||||
message: JSONRPCMessage::Request(JSONRPCRequest {
|
||||
id: RequestId::Integer(1),
|
||||
method: "initialize".to_string(),
|
||||
params: Some(json!({
|
||||
"clientInfo": {
|
||||
"name": "remote-test-client",
|
||||
"version": "0.1.0"
|
||||
}
|
||||
})),
|
||||
trace: None,
|
||||
}),
|
||||
},
|
||||
client_id: ClientId(client_id.to_string()),
|
||||
stream_id: stream_id.map(|stream_id| StreamId(stream_id.to_string())),
|
||||
seq_id: Some(0),
|
||||
cursor: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn cancelled_outbound_task_emits_connection_closed() {
|
||||
let (server_event_tx, _server_event_rx) = mpsc::channel(CHANNEL_CAPACITY);
|
||||
let (transport_event_tx, mut transport_event_rx) = mpsc::channel(CHANNEL_CAPACITY);
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let mut client_tracker =
|
||||
ClientTracker::new(server_event_tx, transport_event_tx, &shutdown_token);
|
||||
|
||||
client_tracker
|
||||
.handle_message(initialize_envelope("client-1"))
|
||||
.await
|
||||
.expect("initialize should open client");
|
||||
|
||||
let (connection_id, disconnect_sender) = match transport_event_rx
|
||||
.recv()
|
||||
.await
|
||||
.expect("connection opened should be sent")
|
||||
{
|
||||
TransportEvent::ConnectionOpened {
|
||||
connection_id,
|
||||
disconnect_sender: Some(disconnect_sender),
|
||||
..
|
||||
} => (connection_id, disconnect_sender),
|
||||
other => panic!("expected connection opened, got {other:?}"),
|
||||
};
|
||||
match transport_event_rx
|
||||
.recv()
|
||||
.await
|
||||
.expect("initialize should be forwarded")
|
||||
{
|
||||
TransportEvent::IncomingMessage {
|
||||
connection_id: incoming_connection_id,
|
||||
..
|
||||
} => assert_eq!(incoming_connection_id, connection_id),
|
||||
other => panic!("expected incoming initialize, got {other:?}"),
|
||||
}
|
||||
|
||||
disconnect_sender.cancel();
|
||||
let closed_client_id = timeout(Duration::from_secs(1), client_tracker.bookkeep_join_set())
|
||||
.await
|
||||
.expect("bookkeeping should process the closed task")
|
||||
.expect("closed task should return client id");
|
||||
assert_eq!(closed_client_id.0, ClientId("client-1".to_string()));
|
||||
client_tracker
|
||||
.close_client(&closed_client_id)
|
||||
.await
|
||||
.expect("closed client should emit connection closed");
|
||||
|
||||
match transport_event_rx
|
||||
.recv()
|
||||
.await
|
||||
.expect("connection closed should be sent")
|
||||
{
|
||||
TransportEvent::ConnectionClosed {
|
||||
connection_id: closed_connection_id,
|
||||
} => assert_eq!(closed_connection_id, connection_id),
|
||||
other => panic!("expected connection closed, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn shutdown_cancels_blocked_outbound_forwarding() {
|
||||
let (server_event_tx, _server_event_rx) = mpsc::channel(1);
|
||||
let (transport_event_tx, mut transport_event_rx) = mpsc::channel(CHANNEL_CAPACITY);
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let mut client_tracker =
|
||||
ClientTracker::new(server_event_tx.clone(), transport_event_tx, &shutdown_token);
|
||||
|
||||
server_event_tx
|
||||
.send(QueuedServerEnvelope {
|
||||
event: ServerEvent::Pong {
|
||||
status: PongStatus::Unknown,
|
||||
},
|
||||
client_id: ClientId("queued-client".to_string()),
|
||||
stream_id: StreamId("queued-stream".to_string()),
|
||||
write_complete_tx: None,
|
||||
})
|
||||
.await
|
||||
.expect("server event queue should accept prefill");
|
||||
|
||||
client_tracker
|
||||
.handle_message(initialize_envelope("client-1"))
|
||||
.await
|
||||
.expect("initialize should open client");
|
||||
|
||||
let writer = match transport_event_rx
|
||||
.recv()
|
||||
.await
|
||||
.expect("connection opened should be sent")
|
||||
{
|
||||
TransportEvent::ConnectionOpened { writer, .. } => writer,
|
||||
other => panic!("expected connection opened, got {other:?}"),
|
||||
};
|
||||
let _ = transport_event_rx
|
||||
.recv()
|
||||
.await
|
||||
.expect("initialize should be forwarded");
|
||||
|
||||
writer
|
||||
.send(QueuedOutgoingMessage::new(
|
||||
OutgoingMessage::AppServerNotification(ServerNotification::ConfigWarning(
|
||||
ConfigWarningNotification {
|
||||
summary: "test".to_string(),
|
||||
details: None,
|
||||
path: None,
|
||||
range: None,
|
||||
},
|
||||
)),
|
||||
))
|
||||
.await
|
||||
.expect("writer should accept queued message");
|
||||
|
||||
timeout(Duration::from_secs(1), client_tracker.shutdown())
|
||||
.await
|
||||
.expect("shutdown should not hang on blocked server forwarding");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn initialize_with_new_stream_id_opens_new_connection_for_same_client() {
|
||||
let (server_event_tx, _server_event_rx) = mpsc::channel(CHANNEL_CAPACITY);
|
||||
let (transport_event_tx, mut transport_event_rx) = mpsc::channel(CHANNEL_CAPACITY);
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let mut client_tracker =
|
||||
ClientTracker::new(server_event_tx, transport_event_tx, &shutdown_token);
|
||||
|
||||
client_tracker
|
||||
.handle_message(initialize_envelope_with_stream_id(
|
||||
"client-1",
|
||||
Some("stream-1"),
|
||||
))
|
||||
.await
|
||||
.expect("first initialize should open client");
|
||||
let first_connection_id = match transport_event_rx.recv().await.expect("open event") {
|
||||
TransportEvent::ConnectionOpened { connection_id, .. } => connection_id,
|
||||
other => panic!("expected connection opened, got {other:?}"),
|
||||
};
|
||||
let _ = transport_event_rx.recv().await.expect("initialize event");
|
||||
|
||||
client_tracker
|
||||
.handle_message(initialize_envelope_with_stream_id(
|
||||
"client-1",
|
||||
Some("stream-2"),
|
||||
))
|
||||
.await
|
||||
.expect("second initialize should open client");
|
||||
let second_connection_id = match transport_event_rx.recv().await.expect("open event") {
|
||||
TransportEvent::ConnectionOpened { connection_id, .. } => connection_id,
|
||||
other => panic!("expected connection opened, got {other:?}"),
|
||||
};
|
||||
|
||||
assert_ne!(first_connection_id, second_connection_id);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn legacy_initialize_without_stream_id_resets_inbound_seq_id() {
|
||||
let (server_event_tx, _server_event_rx) = mpsc::channel(CHANNEL_CAPACITY);
|
||||
let (transport_event_tx, mut transport_event_rx) = mpsc::channel(CHANNEL_CAPACITY);
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let mut client_tracker =
|
||||
ClientTracker::new(server_event_tx, transport_event_tx, &shutdown_token);
|
||||
|
||||
client_tracker
|
||||
.handle_message(initialize_envelope("client-1"))
|
||||
.await
|
||||
.expect("initialize should open client");
|
||||
let connection_id = match transport_event_rx.recv().await.expect("open event") {
|
||||
TransportEvent::ConnectionOpened { connection_id, .. } => connection_id,
|
||||
other => panic!("expected connection opened, got {other:?}"),
|
||||
};
|
||||
let _ = transport_event_rx.recv().await.expect("initialize event");
|
||||
|
||||
client_tracker
|
||||
.handle_message(ClientEnvelope {
|
||||
event: ClientEvent::ClientMessage {
|
||||
message: JSONRPCMessage::Notification(
|
||||
codex_app_server_protocol::JSONRPCNotification {
|
||||
method: "initialized".to_string(),
|
||||
params: None,
|
||||
},
|
||||
),
|
||||
},
|
||||
client_id: ClientId("client-1".to_string()),
|
||||
stream_id: None,
|
||||
seq_id: Some(0),
|
||||
cursor: None,
|
||||
})
|
||||
.await
|
||||
.expect("legacy followup should be forwarded");
|
||||
|
||||
match transport_event_rx.recv().await.expect("followup event") {
|
||||
TransportEvent::IncomingMessage {
|
||||
connection_id: incoming_connection_id,
|
||||
..
|
||||
} => assert_eq!(incoming_connection_id, connection_id),
|
||||
other => panic!("expected incoming message, got {other:?}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
503
codex-rs/app-server/src/transport/remote_control/enroll.rs
Normal file
503
codex-rs/app-server/src/transport/remote_control/enroll.rs
Normal file
@@ -0,0 +1,503 @@
|
||||
use super::protocol::EnrollRemoteServerRequest;
|
||||
use super::protocol::EnrollRemoteServerResponse;
|
||||
use super::protocol::RemoteControlTarget;
|
||||
use axum::http::HeaderMap;
|
||||
use codex_login::default_client::build_reqwest_client;
|
||||
use codex_state::RemoteControlEnrollmentRecord;
|
||||
use codex_state::StateRuntime;
|
||||
use gethostname::gethostname;
|
||||
use std::io;
|
||||
use std::io::ErrorKind;
|
||||
use tracing::info;
|
||||
use tracing::warn;
|
||||
|
||||
const REMOTE_CONTROL_ENROLL_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(30);
|
||||
const REMOTE_CONTROL_RESPONSE_BODY_MAX_BYTES: usize = 4096;
|
||||
|
||||
const REQUEST_ID_HEADER: &str = "x-request-id";
|
||||
const OAI_REQUEST_ID_HEADER: &str = "x-oai-request-id";
|
||||
const CF_RAY_HEADER: &str = "cf-ray";
|
||||
pub(super) const REMOTE_CONTROL_ACCOUNT_ID_HEADER: &str = "chatgpt-account-id";
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(super) struct RemoteControlEnrollment {
|
||||
pub(super) account_id: String,
|
||||
pub(super) environment_id: String,
|
||||
pub(super) server_id: String,
|
||||
pub(super) server_name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(super) struct RemoteControlConnectionAuth {
|
||||
pub(super) bearer_token: String,
|
||||
pub(super) account_id: String,
|
||||
}
|
||||
|
||||
pub(super) async fn load_persisted_remote_control_enrollment(
|
||||
state_db: Option<&StateRuntime>,
|
||||
remote_control_target: &RemoteControlTarget,
|
||||
account_id: &str,
|
||||
app_server_client_name: Option<&str>,
|
||||
) -> Option<RemoteControlEnrollment> {
|
||||
let Some(state_db) = state_db else {
|
||||
info!(
|
||||
"remote control enrollment cache unavailable because sqlite state db is disabled: websocket_url={}, account_id={}, app_server_client_name={:?}",
|
||||
remote_control_target.websocket_url, account_id, app_server_client_name
|
||||
);
|
||||
return None;
|
||||
};
|
||||
let enrollment = match state_db
|
||||
.get_remote_control_enrollment(
|
||||
&remote_control_target.websocket_url,
|
||||
account_id,
|
||||
app_server_client_name,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(enrollment) => enrollment,
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"failed to load persisted remote control enrollment: websocket_url={}, account_id={}, app_server_client_name={:?}, err={err}",
|
||||
remote_control_target.websocket_url, account_id, app_server_client_name
|
||||
);
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
match enrollment {
|
||||
Some(enrollment) => {
|
||||
info!(
|
||||
"reusing persisted remote control enrollment: websocket_url={}, account_id={}, app_server_client_name={:?}, server_id={}, environment_id={}",
|
||||
remote_control_target.websocket_url,
|
||||
account_id,
|
||||
app_server_client_name,
|
||||
enrollment.server_id,
|
||||
enrollment.environment_id
|
||||
);
|
||||
Some(RemoteControlEnrollment {
|
||||
account_id: enrollment.account_id,
|
||||
environment_id: enrollment.environment_id,
|
||||
server_id: enrollment.server_id,
|
||||
server_name: enrollment.server_name,
|
||||
})
|
||||
}
|
||||
None => {
|
||||
info!(
|
||||
"no persisted remote control enrollment found: websocket_url={}, account_id={}, app_server_client_name={:?}",
|
||||
remote_control_target.websocket_url, account_id, app_server_client_name
|
||||
);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) async fn update_persisted_remote_control_enrollment(
|
||||
state_db: Option<&StateRuntime>,
|
||||
remote_control_target: &RemoteControlTarget,
|
||||
account_id: &str,
|
||||
app_server_client_name: Option<&str>,
|
||||
enrollment: Option<&RemoteControlEnrollment>,
|
||||
) -> io::Result<()> {
|
||||
let Some(state_db) = state_db else {
|
||||
info!(
|
||||
"skipping remote control enrollment persistence because sqlite state db is disabled: websocket_url={}, account_id={}, app_server_client_name={:?}, has_enrollment={}",
|
||||
remote_control_target.websocket_url,
|
||||
account_id,
|
||||
app_server_client_name,
|
||||
enrollment.is_some()
|
||||
);
|
||||
return Ok(());
|
||||
};
|
||||
if let &Some(enrollment) = &enrollment
|
||||
&& enrollment.account_id != account_id
|
||||
{
|
||||
return Err(io::Error::other(format!(
|
||||
"enrollment account_id does not match expected account_id `{account_id}`"
|
||||
)));
|
||||
}
|
||||
|
||||
if let Some(enrollment) = enrollment {
|
||||
state_db
|
||||
.upsert_remote_control_enrollment(&RemoteControlEnrollmentRecord {
|
||||
websocket_url: remote_control_target.websocket_url.clone(),
|
||||
account_id: account_id.to_string(),
|
||||
app_server_client_name: app_server_client_name.map(str::to_string),
|
||||
server_id: enrollment.server_id.clone(),
|
||||
environment_id: enrollment.environment_id.clone(),
|
||||
server_name: enrollment.server_name.clone(),
|
||||
})
|
||||
.await
|
||||
.map_err(io::Error::other)?;
|
||||
info!(
|
||||
"persisted remote control enrollment: websocket_url={}, account_id={}, app_server_client_name={:?}, server_id={}, environment_id={}",
|
||||
remote_control_target.websocket_url,
|
||||
account_id,
|
||||
app_server_client_name,
|
||||
enrollment.server_id,
|
||||
enrollment.environment_id
|
||||
);
|
||||
Ok(())
|
||||
} else {
|
||||
let rows_affected = state_db
|
||||
.delete_remote_control_enrollment(
|
||||
&remote_control_target.websocket_url,
|
||||
account_id,
|
||||
app_server_client_name,
|
||||
)
|
||||
.await
|
||||
.map_err(io::Error::other)?;
|
||||
info!(
|
||||
"cleared persisted remote control enrollment: websocket_url={}, account_id={}, app_server_client_name={:?}, rows_affected={rows_affected}",
|
||||
remote_control_target.websocket_url, account_id, app_server_client_name
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn preview_remote_control_response_body(body: &[u8]) -> String {
|
||||
let body = String::from_utf8_lossy(body);
|
||||
let trimmed = body.trim();
|
||||
if trimmed.is_empty() {
|
||||
return "<empty>".to_string();
|
||||
}
|
||||
if trimmed.len() <= REMOTE_CONTROL_RESPONSE_BODY_MAX_BYTES {
|
||||
return trimmed.to_string();
|
||||
}
|
||||
|
||||
let mut cut = REMOTE_CONTROL_RESPONSE_BODY_MAX_BYTES;
|
||||
while !trimmed.is_char_boundary(cut) {
|
||||
cut = cut.saturating_sub(1);
|
||||
}
|
||||
let mut truncated = trimmed[..cut].to_string();
|
||||
truncated.push_str("...");
|
||||
truncated
|
||||
}
|
||||
|
||||
pub(crate) fn format_headers(headers: &HeaderMap) -> String {
|
||||
let request_id_str = headers
|
||||
.get(REQUEST_ID_HEADER)
|
||||
.or_else(|| headers.get(OAI_REQUEST_ID_HEADER))
|
||||
.map(|value| value.to_str().unwrap_or("<invalid utf-8>").to_owned())
|
||||
.unwrap_or_else(|| "<none>".to_owned());
|
||||
let cf_ray_str = headers
|
||||
.get(CF_RAY_HEADER)
|
||||
.map(|value| value.to_str().unwrap_or("<invalid utf-8>").to_owned())
|
||||
.unwrap_or_else(|| "<none>".to_owned());
|
||||
format!("request-id: {request_id_str}, cf-ray: {cf_ray_str}")
|
||||
}
|
||||
|
||||
pub(super) async fn enroll_remote_control_server(
|
||||
remote_control_target: &RemoteControlTarget,
|
||||
auth: &RemoteControlConnectionAuth,
|
||||
) -> io::Result<RemoteControlEnrollment> {
|
||||
let enroll_url = &remote_control_target.enroll_url;
|
||||
let server_name = gethostname().to_string_lossy().trim().to_string();
|
||||
let request = EnrollRemoteServerRequest {
|
||||
name: server_name.clone(),
|
||||
os: std::env::consts::OS,
|
||||
arch: std::env::consts::ARCH,
|
||||
app_server_version: env!("CARGO_PKG_VERSION"),
|
||||
};
|
||||
let client = build_reqwest_client();
|
||||
let http_request = client
|
||||
.post(enroll_url)
|
||||
.timeout(REMOTE_CONTROL_ENROLL_TIMEOUT)
|
||||
.bearer_auth(&auth.bearer_token)
|
||||
.header(REMOTE_CONTROL_ACCOUNT_ID_HEADER, &auth.account_id)
|
||||
.json(&request);
|
||||
|
||||
let response = http_request.send().await.map_err(|err| {
|
||||
io::Error::other(format!(
|
||||
"failed to enroll remote control server at `{enroll_url}`: {err}"
|
||||
))
|
||||
})?;
|
||||
let headers = response.headers().clone();
|
||||
let status = response.status();
|
||||
let body = response.bytes().await.map_err(|err| {
|
||||
io::Error::other(format!(
|
||||
"failed to read remote control enrollment response from `{enroll_url}`: {err}"
|
||||
))
|
||||
})?;
|
||||
let body_preview = preview_remote_control_response_body(&body);
|
||||
if !status.is_success() {
|
||||
let headers_str = format_headers(&headers);
|
||||
let error_kind = if matches!(status.as_u16(), 401 | 403) {
|
||||
ErrorKind::PermissionDenied
|
||||
} else {
|
||||
ErrorKind::Other
|
||||
};
|
||||
return Err(io::Error::new(
|
||||
error_kind,
|
||||
format!(
|
||||
"remote control server enrollment failed at `{enroll_url}`: HTTP {status}, {headers_str}, body: {body_preview}"
|
||||
),
|
||||
));
|
||||
}
|
||||
|
||||
let enrollment = serde_json::from_slice::<EnrollRemoteServerResponse>(&body).map_err(|err| {
|
||||
let headers_str = format_headers(&headers);
|
||||
io::Error::other(format!(
|
||||
"failed to parse remote control enrollment response from `{enroll_url}`: HTTP {status}, {headers_str}, body: {body_preview}, decode error: {err}"
|
||||
))
|
||||
})?;
|
||||
|
||||
Ok(RemoteControlEnrollment {
|
||||
account_id: auth.account_id.clone(),
|
||||
environment_id: enrollment.environment_id,
|
||||
server_id: enrollment.server_id,
|
||||
server_name,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::transport::remote_control::protocol::normalize_remote_control_url;
|
||||
use codex_state::StateRuntime;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use std::sync::Arc;
|
||||
use tempfile::TempDir;
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tokio::io::BufReader;
|
||||
use tokio::net::TcpListener;
|
||||
use tokio::net::TcpStream;
|
||||
use tokio::time::Duration;
|
||||
use tokio::time::timeout;
|
||||
|
||||
async fn remote_control_state_runtime(codex_home: &TempDir) -> Arc<StateRuntime> {
|
||||
StateRuntime::init(codex_home.path().to_path_buf(), "test-provider".to_string())
|
||||
.await
|
||||
.expect("state runtime should initialize")
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn persisted_remote_control_enrollment_round_trips_by_target_and_account() {
|
||||
let codex_home = TempDir::new().expect("temp dir should create");
|
||||
let state_db = remote_control_state_runtime(&codex_home).await;
|
||||
let first_target = normalize_remote_control_url("https://chatgpt.com/remote/control")
|
||||
.expect("first target should parse");
|
||||
let second_target =
|
||||
normalize_remote_control_url("https://api.chatgpt-staging.com/other/control")
|
||||
.expect("second target should parse");
|
||||
let first_enrollment = RemoteControlEnrollment {
|
||||
account_id: "account-a".to_string(),
|
||||
environment_id: "env_first".to_string(),
|
||||
server_id: "srv_e_first".to_string(),
|
||||
server_name: "first-server".to_string(),
|
||||
};
|
||||
let second_enrollment = RemoteControlEnrollment {
|
||||
account_id: "account-a".to_string(),
|
||||
environment_id: "env_second".to_string(),
|
||||
server_id: "srv_e_second".to_string(),
|
||||
server_name: "second-server".to_string(),
|
||||
};
|
||||
|
||||
update_persisted_remote_control_enrollment(
|
||||
Some(state_db.as_ref()),
|
||||
&first_target,
|
||||
"account-a",
|
||||
Some("desktop-client"),
|
||||
Some(&first_enrollment),
|
||||
)
|
||||
.await
|
||||
.expect("first enrollment should persist");
|
||||
update_persisted_remote_control_enrollment(
|
||||
Some(state_db.as_ref()),
|
||||
&second_target,
|
||||
"account-a",
|
||||
Some("desktop-client"),
|
||||
Some(&second_enrollment),
|
||||
)
|
||||
.await
|
||||
.expect("second enrollment should persist");
|
||||
|
||||
assert_eq!(
|
||||
load_persisted_remote_control_enrollment(
|
||||
Some(state_db.as_ref()),
|
||||
&first_target,
|
||||
"account-a",
|
||||
Some("desktop-client"),
|
||||
)
|
||||
.await,
|
||||
Some(first_enrollment.clone())
|
||||
);
|
||||
assert_eq!(
|
||||
load_persisted_remote_control_enrollment(
|
||||
Some(state_db.as_ref()),
|
||||
&first_target,
|
||||
"account-b",
|
||||
Some("desktop-client"),
|
||||
)
|
||||
.await,
|
||||
None
|
||||
);
|
||||
assert_eq!(
|
||||
load_persisted_remote_control_enrollment(
|
||||
Some(state_db.as_ref()),
|
||||
&second_target,
|
||||
"account-a",
|
||||
Some("desktop-client"),
|
||||
)
|
||||
.await,
|
||||
Some(second_enrollment)
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn clearing_persisted_remote_control_enrollment_removes_only_matching_entry() {
|
||||
let codex_home = TempDir::new().expect("temp dir should create");
|
||||
let state_db = remote_control_state_runtime(&codex_home).await;
|
||||
let first_target = normalize_remote_control_url("https://chatgpt.com/remote/control")
|
||||
.expect("first target should parse");
|
||||
let second_target =
|
||||
normalize_remote_control_url("https://api.chatgpt-staging.com/other/control")
|
||||
.expect("second target should parse");
|
||||
let first_enrollment = RemoteControlEnrollment {
|
||||
account_id: "account-a".to_string(),
|
||||
environment_id: "env_first".to_string(),
|
||||
server_id: "srv_e_first".to_string(),
|
||||
server_name: "first-server".to_string(),
|
||||
};
|
||||
let second_enrollment = RemoteControlEnrollment {
|
||||
account_id: "account-a".to_string(),
|
||||
environment_id: "env_second".to_string(),
|
||||
server_id: "srv_e_second".to_string(),
|
||||
server_name: "second-server".to_string(),
|
||||
};
|
||||
|
||||
update_persisted_remote_control_enrollment(
|
||||
Some(state_db.as_ref()),
|
||||
&first_target,
|
||||
"account-a",
|
||||
/*app_server_client_name*/ None,
|
||||
Some(&first_enrollment),
|
||||
)
|
||||
.await
|
||||
.expect("first enrollment should persist");
|
||||
update_persisted_remote_control_enrollment(
|
||||
Some(state_db.as_ref()),
|
||||
&second_target,
|
||||
"account-a",
|
||||
/*app_server_client_name*/ None,
|
||||
Some(&second_enrollment),
|
||||
)
|
||||
.await
|
||||
.expect("second enrollment should persist");
|
||||
|
||||
update_persisted_remote_control_enrollment(
|
||||
Some(state_db.as_ref()),
|
||||
&first_target,
|
||||
"account-a",
|
||||
/*app_server_client_name*/ None,
|
||||
/*enrollment*/ None,
|
||||
)
|
||||
.await
|
||||
.expect("matching enrollment should clear");
|
||||
|
||||
assert_eq!(
|
||||
load_persisted_remote_control_enrollment(
|
||||
Some(state_db.as_ref()),
|
||||
&first_target,
|
||||
"account-a",
|
||||
/*app_server_client_name*/ None,
|
||||
)
|
||||
.await,
|
||||
None
|
||||
);
|
||||
assert_eq!(
|
||||
load_persisted_remote_control_enrollment(
|
||||
Some(state_db.as_ref()),
|
||||
&second_target,
|
||||
"account-a",
|
||||
/*app_server_client_name*/ None,
|
||||
)
|
||||
.await,
|
||||
Some(second_enrollment)
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn enroll_remote_control_server_parse_failure_includes_response_body() {
|
||||
let listener = TcpListener::bind("127.0.0.1:0")
|
||||
.await
|
||||
.expect("listener should bind");
|
||||
let remote_control_url = format!(
|
||||
"http://127.0.0.1:{}/backend-api/",
|
||||
listener
|
||||
.local_addr()
|
||||
.expect("listener should have a local addr")
|
||||
.port()
|
||||
);
|
||||
let remote_control_target =
|
||||
normalize_remote_control_url(&remote_control_url).expect("target should parse");
|
||||
let enroll_url = remote_control_target.enroll_url.clone();
|
||||
let response_body = json!({
|
||||
"error": "not enrolled",
|
||||
});
|
||||
let expected_body = response_body.to_string();
|
||||
let server_task = tokio::spawn(async move {
|
||||
let stream = accept_http_request(&listener).await;
|
||||
respond_with_json(stream, response_body).await;
|
||||
});
|
||||
|
||||
let err = enroll_remote_control_server(
|
||||
&remote_control_target,
|
||||
&RemoteControlConnectionAuth {
|
||||
bearer_token: "Access Token".to_string(),
|
||||
account_id: "account_id".to_string(),
|
||||
},
|
||||
)
|
||||
.await
|
||||
.expect_err("invalid response should fail to parse");
|
||||
|
||||
server_task.await.expect("server task should succeed");
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
format!(
|
||||
"failed to parse remote control enrollment response from `{enroll_url}`: HTTP 200 OK, request-id: <none>, cf-ray: <none>, body: {expected_body}, decode error: missing field `server_id` at line 1 column {}",
|
||||
expected_body.len()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
async fn accept_http_request(listener: &TcpListener) -> TcpStream {
|
||||
let (stream, _) = timeout(Duration::from_secs(5), listener.accept())
|
||||
.await
|
||||
.expect("HTTP request should arrive in time")
|
||||
.expect("listener accept should succeed");
|
||||
let mut reader = BufReader::new(stream);
|
||||
|
||||
let mut request_line = String::new();
|
||||
reader
|
||||
.read_line(&mut request_line)
|
||||
.await
|
||||
.expect("request line should read");
|
||||
loop {
|
||||
let mut line = String::new();
|
||||
reader
|
||||
.read_line(&mut line)
|
||||
.await
|
||||
.expect("header line should read");
|
||||
if line == "\r\n" {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
reader.into_inner()
|
||||
}
|
||||
|
||||
async fn respond_with_json(mut stream: TcpStream, body: serde_json::Value) {
|
||||
let body = body.to_string();
|
||||
let response = format!(
|
||||
"HTTP/1.1 200 OK\r\ncontent-type: application/json\r\ncontent-length: {}\r\nconnection: close\r\n\r\n{body}",
|
||||
body.len()
|
||||
);
|
||||
stream
|
||||
.write_all(response.as_bytes())
|
||||
.await
|
||||
.expect("response should write");
|
||||
stream.flush().await.expect("response should flush");
|
||||
}
|
||||
}
|
||||
100
codex-rs/app-server/src/transport/remote_control/mod.rs
Normal file
100
codex-rs/app-server/src/transport/remote_control/mod.rs
Normal file
@@ -0,0 +1,100 @@
|
||||
mod client_tracker;
|
||||
mod enroll;
|
||||
mod protocol;
|
||||
mod websocket;
|
||||
|
||||
use crate::transport::remote_control::websocket::RemoteControlWebsocket;
|
||||
use crate::transport::remote_control::websocket::load_remote_control_auth;
|
||||
|
||||
pub use self::protocol::ClientId;
|
||||
use self::protocol::ServerEvent;
|
||||
use self::protocol::StreamId;
|
||||
use self::protocol::normalize_remote_control_url;
|
||||
use super::CHANNEL_CAPACITY;
|
||||
use super::TransportEvent;
|
||||
use super::next_connection_id;
|
||||
use codex_login::AuthManager;
|
||||
use codex_state::StateRuntime;
|
||||
use std::io;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::sync::oneshot;
|
||||
use tokio::sync::watch;
|
||||
use tokio::task::JoinHandle;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
|
||||
pub(super) struct QueuedServerEnvelope {
|
||||
pub(super) event: ServerEvent,
|
||||
pub(super) client_id: ClientId,
|
||||
pub(super) stream_id: StreamId,
|
||||
pub(super) write_complete_tx: Option<oneshot::Sender<()>>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct RemoteControlHandle {
|
||||
enabled_tx: Arc<watch::Sender<bool>>,
|
||||
}
|
||||
|
||||
impl RemoteControlHandle {
|
||||
pub(crate) fn set_enabled(&self, enabled: bool) {
|
||||
self.enabled_tx.send_if_modified(|state| {
|
||||
let changed = *state != enabled;
|
||||
*state = enabled;
|
||||
changed
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn start_remote_control(
|
||||
remote_control_url: String,
|
||||
state_db: Option<Arc<StateRuntime>>,
|
||||
auth_manager: Arc<AuthManager>,
|
||||
transport_event_tx: mpsc::Sender<TransportEvent>,
|
||||
shutdown_token: CancellationToken,
|
||||
app_server_client_name_rx: Option<oneshot::Receiver<String>>,
|
||||
initial_enabled: bool,
|
||||
) -> io::Result<(JoinHandle<()>, RemoteControlHandle)> {
|
||||
let remote_control_target = if initial_enabled {
|
||||
Some(normalize_remote_control_url(&remote_control_url)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
if initial_enabled {
|
||||
validate_remote_control_auth(&auth_manager).await?;
|
||||
}
|
||||
|
||||
let (enabled_tx, enabled_rx) = watch::channel(initial_enabled);
|
||||
let join_handle = tokio::spawn(async move {
|
||||
RemoteControlWebsocket::new(
|
||||
remote_control_url,
|
||||
remote_control_target,
|
||||
state_db,
|
||||
auth_manager,
|
||||
transport_event_tx,
|
||||
shutdown_token,
|
||||
enabled_rx,
|
||||
)
|
||||
.run(app_server_client_name_rx)
|
||||
.await;
|
||||
});
|
||||
|
||||
Ok((
|
||||
join_handle,
|
||||
RemoteControlHandle {
|
||||
enabled_tx: Arc::new(enabled_tx),
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
pub(crate) async fn validate_remote_control_auth(
|
||||
auth_manager: &Arc<AuthManager>,
|
||||
) -> io::Result<()> {
|
||||
match load_remote_control_auth(auth_manager).await {
|
||||
Ok(_) => Ok(()),
|
||||
Err(err) if err.kind() == io::ErrorKind::WouldBlock => Ok(()),
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
252
codex-rs/app-server/src/transport/remote_control/protocol.rs
Normal file
252
codex-rs/app-server/src/transport/remote_control/protocol.rs
Normal file
@@ -0,0 +1,252 @@
|
||||
use crate::outgoing_message::OutgoingMessage;
|
||||
use codex_app_server_protocol::JSONRPCMessage;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::io;
|
||||
use std::io::ErrorKind;
|
||||
use url::Host;
|
||||
use url::Url;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(super) struct RemoteControlTarget {
|
||||
pub(super) websocket_url: String,
|
||||
pub(super) enroll_url: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub(super) struct EnrollRemoteServerRequest {
|
||||
pub(super) name: String,
|
||||
pub(super) os: &'static str,
|
||||
pub(super) arch: &'static str,
|
||||
pub(super) app_server_version: &'static str,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub(super) struct EnrollRemoteServerResponse {
|
||||
pub(super) server_id: String,
|
||||
pub(super) environment_id: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[serde(transparent)]
|
||||
pub struct ClientId(pub String);
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[serde(transparent)]
|
||||
pub struct StreamId(pub String);
|
||||
|
||||
impl StreamId {
|
||||
pub fn new_random() -> Self {
|
||||
Self(uuid::Uuid::now_v7().to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum ClientEvent {
|
||||
ClientMessage {
|
||||
message: JSONRPCMessage,
|
||||
},
|
||||
/// Backend-generated acknowledgement for all server envelopes addressed to
|
||||
/// `client_id` whose envelope `seq_id` is less than or equal to this ack's
|
||||
/// `seq_id`. This cursor is client-scoped, not stream-scoped, so receivers
|
||||
/// must not use `stream_id` to partition acks.
|
||||
Ack,
|
||||
Ping,
|
||||
ClientClosed,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub(crate) struct ClientEnvelope {
|
||||
#[serde(flatten)]
|
||||
pub(crate) event: ClientEvent,
|
||||
#[serde(rename = "client_id")]
|
||||
pub(crate) client_id: ClientId,
|
||||
#[serde(rename = "stream_id", skip_serializing_if = "Option::is_none")]
|
||||
pub(crate) stream_id: Option<StreamId>,
|
||||
/// For `Ack`, this is the backend-generated per-client cursor over
|
||||
/// `ServerEnvelope.seq_id`.
|
||||
#[serde(rename = "seq_id", skip_serializing_if = "Option::is_none")]
|
||||
pub(crate) seq_id: Option<u64>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub(crate) cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum PongStatus {
|
||||
Active,
|
||||
Unknown,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum ServerEvent {
|
||||
ServerMessage {
|
||||
message: Box<OutgoingMessage>,
|
||||
},
|
||||
#[allow(dead_code)]
|
||||
Ack,
|
||||
Pong {
|
||||
status: PongStatus,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub(crate) struct ServerEnvelope {
|
||||
#[serde(flatten)]
|
||||
pub(crate) event: ServerEvent,
|
||||
#[serde(rename = "client_id")]
|
||||
pub(crate) client_id: ClientId,
|
||||
#[serde(rename = "stream_id")]
|
||||
pub(crate) stream_id: StreamId,
|
||||
#[serde(rename = "seq_id")]
|
||||
pub(crate) seq_id: u64,
|
||||
}
|
||||
|
||||
fn is_allowed_chatgpt_host(host: &Option<Host<&str>>) -> bool {
|
||||
let Some(Host::Domain(host)) = *host else {
|
||||
return false;
|
||||
};
|
||||
host == "chatgpt.com"
|
||||
|| host == "chatgpt-staging.com"
|
||||
|| host.ends_with(".chatgpt.com")
|
||||
|| host.ends_with(".chatgpt-staging.com")
|
||||
}
|
||||
|
||||
fn is_localhost(host: &Option<Host<&str>>) -> bool {
|
||||
match host {
|
||||
Some(Host::Domain("localhost")) => true,
|
||||
Some(Host::Ipv4(ip)) => ip.is_loopback(),
|
||||
Some(Host::Ipv6(ip)) => ip.is_loopback(),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn normalize_remote_control_url(
|
||||
remote_control_url: &str,
|
||||
) -> io::Result<RemoteControlTarget> {
|
||||
let map_url_parse_error = |err: url::ParseError| -> io::Error {
|
||||
io::Error::new(
|
||||
ErrorKind::InvalidInput,
|
||||
format!("invalid remote control URL `{remote_control_url}`: {err}"),
|
||||
)
|
||||
};
|
||||
let map_scheme_error = |_: ()| -> io::Error {
|
||||
io::Error::new(
|
||||
ErrorKind::InvalidInput,
|
||||
format!(
|
||||
"invalid remote control URL `{remote_control_url}`; expected HTTPS URL for chatgpt.com or chatgpt-staging.com, or HTTP/HTTPS URL for localhost"
|
||||
),
|
||||
)
|
||||
};
|
||||
|
||||
let mut remote_control_url = Url::parse(remote_control_url).map_err(map_url_parse_error)?;
|
||||
if !remote_control_url.path().ends_with('/') {
|
||||
let normalized_path = format!("{}/", remote_control_url.path());
|
||||
remote_control_url.set_path(&normalized_path);
|
||||
}
|
||||
|
||||
let enroll_url = remote_control_url
|
||||
.join("wham/remote/control/server/enroll")
|
||||
.map_err(map_url_parse_error)?;
|
||||
let mut websocket_url = remote_control_url
|
||||
.join("wham/remote/control/server")
|
||||
.map_err(map_url_parse_error)?;
|
||||
let host = enroll_url.host();
|
||||
match enroll_url.scheme() {
|
||||
"https" if is_localhost(&host) || is_allowed_chatgpt_host(&host) => {
|
||||
websocket_url.set_scheme("wss").map_err(map_scheme_error)?;
|
||||
}
|
||||
"http" if is_localhost(&host) => {
|
||||
websocket_url.set_scheme("ws").map_err(map_scheme_error)?;
|
||||
}
|
||||
_ => return Err(map_scheme_error(())),
|
||||
}
|
||||
|
||||
Ok(RemoteControlTarget {
|
||||
websocket_url: websocket_url.to_string(),
|
||||
enroll_url: enroll_url.to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn normalize_remote_control_url_accepts_chatgpt_https_urls() {
|
||||
assert_eq!(
|
||||
normalize_remote_control_url("https://chatgpt.com/backend-api")
|
||||
.expect("chatgpt.com URL should normalize"),
|
||||
RemoteControlTarget {
|
||||
websocket_url: "wss://chatgpt.com/backend-api/wham/remote/control/server"
|
||||
.to_string(),
|
||||
enroll_url: "https://chatgpt.com/backend-api/wham/remote/control/server/enroll"
|
||||
.to_string(),
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
normalize_remote_control_url("https://api.chatgpt-staging.com/backend-api")
|
||||
.expect("chatgpt-staging.com subdomain URL should normalize"),
|
||||
RemoteControlTarget {
|
||||
websocket_url:
|
||||
"wss://api.chatgpt-staging.com/backend-api/wham/remote/control/server"
|
||||
.to_string(),
|
||||
enroll_url:
|
||||
"https://api.chatgpt-staging.com/backend-api/wham/remote/control/server/enroll"
|
||||
.to_string(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_remote_control_url_accepts_localhost_urls() {
|
||||
assert_eq!(
|
||||
normalize_remote_control_url("http://localhost:8080/backend-api")
|
||||
.expect("localhost http URL should normalize"),
|
||||
RemoteControlTarget {
|
||||
websocket_url: "ws://localhost:8080/backend-api/wham/remote/control/server"
|
||||
.to_string(),
|
||||
enroll_url: "http://localhost:8080/backend-api/wham/remote/control/server/enroll"
|
||||
.to_string(),
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
normalize_remote_control_url("https://localhost:8443/backend-api")
|
||||
.expect("localhost https URL should normalize"),
|
||||
RemoteControlTarget {
|
||||
websocket_url: "wss://localhost:8443/backend-api/wham/remote/control/server"
|
||||
.to_string(),
|
||||
enroll_url: "https://localhost:8443/backend-api/wham/remote/control/server/enroll"
|
||||
.to_string(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_remote_control_url_rejects_unsupported_urls() {
|
||||
for remote_control_url in [
|
||||
"http://chatgpt.com/backend-api",
|
||||
"http://example.com/backend-api",
|
||||
"https://example.com/backend-api",
|
||||
"https://chatgpt.com.evil.com/backend-api",
|
||||
"https://evilchatgpt.com/backend-api",
|
||||
"https://foo.localhost/backend-api",
|
||||
] {
|
||||
let err = normalize_remote_control_url(remote_control_url)
|
||||
.expect_err("unsupported URL should be rejected");
|
||||
|
||||
assert_eq!(err.kind(), ErrorKind::InvalidInput);
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
format!(
|
||||
"invalid remote control URL `{remote_control_url}`; expected HTTPS URL for chatgpt.com or chatgpt-staging.com, or HTTP/HTTPS URL for localhost"
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
1395
codex-rs/app-server/src/transport/remote_control/tests.rs
Normal file
1395
codex-rs/app-server/src/transport/remote_control/tests.rs
Normal file
File diff suppressed because it is too large
Load Diff
1474
codex-rs/app-server/src/transport/remote_control/websocket.rs
Normal file
1474
codex-rs/app-server/src/transport/remote_control/websocket.rs
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user