Compare commits

..

1 Commits

Author SHA1 Message Date
xli-oai
4d8437c244 Regenerate Python SDK artifacts 2026-05-07 21:07:31 -07:00
370 changed files with 5000 additions and 10910 deletions

View File

@@ -2,6 +2,7 @@ name: 💻 CLI Bug
description: Report an issue in the Codex CLI
labels:
- bug
- needs triage
body:
- type: markdown
attributes:
@@ -40,9 +41,9 @@ body:
id: terminal
attributes:
label: What terminal emulator and version are you using (if applicable)?
description: Also note any multiplexer in use (screen / tmux / zellij)
description: |
Also note any multiplexer in use (screen / tmux / zellij).
E.g., VS Code, Terminal.app, iTerm2, Ghostty, Windows Terminal (WSL / PowerShell)
E.g, VSCode, Terminal.app, iTerm2, Ghostty, Windows Terminal (WSL / PowerShell)
- type: textarea
id: actual
attributes:

View File

@@ -10,7 +10,7 @@ body:
Before you submit a feature:
1. Search existing issues for similar features. If you find one, 👍 it rather than opening a new one.
2. The Codex team will try to balance the varying needs of the community when prioritizing or rejecting new features. Not all features will be accepted. See [Contributing](https://github.com/openai/codex/blob/main/docs/contributing.md) for more details.
2. The Codex team will try to balance the varying needs of the community when prioritizing or rejecting new features. Not all features will be accepted. See [Contributing](https://github.com/openai/codex#contributing) for more details.
- type: input
id: variant

View File

@@ -1,6 +1,6 @@
name: 📗 Documentation Issue
description: Tell us if there is missing or incorrect documentation
labels: [documentation]
labels: [docs]
body:
- type: markdown
attributes:
@@ -24,4 +24,4 @@ body:
- type: textarea
attributes:
label: Where did you find it?
description: If possible, please provide the URL(s) where you found this issue.
description: If possible, please provide the URL(s) where you found this issue.

View File

@@ -57,9 +57,6 @@ jobs:
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
- name: Check rusty_v8 MODULE.bazel checksums
if: matrix.os == 'ubuntu-24.04' && matrix.target == 'x86_64-unknown-linux-gnu'
@@ -152,9 +149,6 @@ jobs:
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
- name: Prepare Bazel CI
id: prepare_bazel
@@ -238,9 +232,6 @@ jobs:
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
- name: Prepare Bazel CI
id: prepare_bazel
@@ -328,9 +319,6 @@ jobs:
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
- name: Prepare Bazel CI
id: prepare_bazel

View File

@@ -10,17 +10,15 @@ jobs:
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
fetch-depth: 0
persist-credentials: false
- name: Determine PR comparison range
id: range
shell: bash
run: |
set -euo pipefail
echo "base=${{ github.event.pull_request.base.sha }}" >> "$GITHUB_OUTPUT"
echo "head=${{ github.event.pull_request.head.sha }}" >> "$GITHUB_OUTPUT"
echo "base=$(git rev-parse HEAD^1)" >> "$GITHUB_OUTPUT"
echo "head=$(git rev-parse HEAD^2)" >> "$GITHUB_OUTPUT"
- name: Check changed blob sizes
env:

View File

@@ -15,9 +15,6 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
- name: Install Rust toolchain
uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0

View File

@@ -13,9 +13,6 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
- name: Verify codex-rs Cargo manifests inherit workspace settings
run: python3 .github/scripts/verify_cargo_workspace_manifests.py

View File

@@ -19,9 +19,6 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
- name: Annotate locations with typos
uses: codespell-project/codespell-problem-matcher@b80729f885d32f78a716c2f107b4db1025001c42 # v1.1.0
- name: Codespell

View File

@@ -20,8 +20,6 @@ jobs:
has_matches: ${{ steps.normalize-all.outputs.has_matches }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Prepare Codex inputs
env:
@@ -158,8 +156,6 @@ jobs:
has_matches: ${{ steps.normalize-open.outputs.has_matches }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Prepare Codex inputs
env:

View File

@@ -18,8 +18,6 @@ jobs:
codex_output: ${{ steps.codex.outputs.final-message }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- id: codex
uses: openai/codex-action@5c3f4ccdb2b8790f73d6b21751ac00e602aa0c02 # v1.7

View File

@@ -7,11 +7,6 @@ on:
workflow_dispatch:
# CI builds in debug (dev) for faster signal.
env:
# Cargo's libgit2 transport has been flaky on macOS when fetching git
# dependencies with nested submodules. Use the system git CLI, which has
# better network/proxy behavior and matches Cargo's own suggested fallback.
CARGO_NET_GIT_FETCH_WITH_CLI: "true"
jobs:
# --- CI that doesn't need specific targets ---------------------------------
@@ -23,8 +18,6 @@ jobs:
working-directory: codex-rs
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
with:
components: rustfmt
@@ -39,14 +32,13 @@ jobs:
working-directory: codex-rs
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
with:
tool: cargo-shear@1.11.2
tool: cargo-shear
version: 1.11.2
- name: cargo shear
run: cargo shear --deny-warnings
run: cargo shear
argument_comment_lint_package:
name: Argument comment lint package
@@ -56,8 +48,6 @@ jobs:
DYLINT_LINK_VERSION: 5.0.0
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
with:
toolchain: nightly-2025-09-18
@@ -108,8 +98,6 @@ jobs:
labels: codex-windows-x64
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- uses: ./.github/actions/setup-bazel-ci
with:
target: ${{ runner.os }}
@@ -246,8 +234,6 @@ jobs:
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Install Linux build dependencies
if: ${{ runner.os == 'Linux' }}
shell: bash
@@ -574,8 +560,6 @@ jobs:
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Install Linux build dependencies
if: ${{ runner.os == 'Linux' }}
shell: bash
@@ -583,7 +567,7 @@ jobs:
set -euo pipefail
if command -v apt-get >/dev/null 2>&1; then
sudo apt-get update -y
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends pkg-config libcap-dev bubblewrap
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends pkg-config libcap-dev
fi
# Some integration tests rely on DotSlash being installed.
@@ -738,12 +722,10 @@ jobs:
shell: bash
run: |
set +e
if [[ "${STEPS_TEST_OUTCOME}" != "success" ]]; then
if [[ "${{ steps.test.outcome }}" != "success" ]]; then
docker logs codex-remote-test-env || true
fi
docker rm -f codex-remote-test-env >/dev/null 2>&1 || true
env:
STEPS_TEST_OUTCOME: ${{ steps.test.outcome }}
- name: verify tests passed
if: steps.test.outcome == 'failure'

View File

@@ -16,9 +16,7 @@ jobs:
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
fetch-depth: 0
persist-credentials: false
- name: Detect changed paths (no external action)
id: detect
shell: bash
@@ -64,9 +62,6 @@ jobs:
working-directory: codex-rs
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
with:
components: rustfmt
@@ -83,15 +78,13 @@ jobs:
working-directory: codex-rs
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
with:
tool: cargo-shear@1.11.2
tool: cargo-shear
version: 1.11.2
- name: cargo shear
run: cargo shear --deny-warnings
run: cargo shear
argument_comment_lint_package:
name: Argument comment lint package
@@ -103,9 +96,6 @@ jobs:
DYLINT_LINK_VERSION: 5.0.0
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
- name: Install nightly argument-comment-lint toolchain
shell: bash
@@ -182,9 +172,6 @@ jobs:
echo "run=false" >> "$GITHUB_OUTPUT"
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
if: ${{ steps.argument_comment_lint_gate.outputs.run == 'true' }}
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
- name: Run argument comment lint on codex-rs via Bazel
if: ${{ steps.argument_comment_lint_gate.outputs.run == 'true' }}
uses: ./.github/actions/run-argument-comment-lint
@@ -216,25 +203,20 @@ jobs:
# If nothing relevant changed (PR touching only root README, etc.),
# declare success regardless of other jobs.
if [[ "${NEEDS_CHANGED_OUTPUTS_ARGUMENT_COMMENT_LINT}" != 'true' && "${NEEDS_CHANGED_OUTPUTS_CODEX}" != 'true' && "${NEEDS_CHANGED_OUTPUTS_WORKFLOWS}" != 'true' ]]; then
if [[ '${{ needs.changed.outputs.argument_comment_lint }}' != 'true' && '${{ needs.changed.outputs.codex }}' != 'true' && '${{ needs.changed.outputs.workflows }}' != 'true' ]]; then
echo 'No relevant changes -> CI not required.'
exit 0
fi
if [[ "${NEEDS_CHANGED_OUTPUTS_ARGUMENT_COMMENT_LINT_PACKAGE}" == 'true' ]]; then
if [[ '${{ needs.changed.outputs.argument_comment_lint_package }}' == 'true' ]]; then
[[ '${{ needs.argument_comment_lint_package.result }}' == 'success' ]] || { echo 'argument_comment_lint_package failed'; exit 1; }
fi
if [[ "${NEEDS_CHANGED_OUTPUTS_ARGUMENT_COMMENT_LINT}" == 'true' || "${NEEDS_CHANGED_OUTPUTS_WORKFLOWS}" == 'true' ]]; then
if [[ '${{ needs.changed.outputs.argument_comment_lint }}' == 'true' || '${{ needs.changed.outputs.workflows }}' == 'true' ]]; then
[[ '${{ needs.argument_comment_lint_prebuilt.result }}' == 'success' ]] || { echo 'argument_comment_lint_prebuilt failed'; exit 1; }
fi
if [[ "${NEEDS_CHANGED_OUTPUTS_CODEX}" == 'true' || "${NEEDS_CHANGED_OUTPUTS_WORKFLOWS}" == 'true' ]]; then
if [[ '${{ needs.changed.outputs.codex }}' == 'true' || '${{ needs.changed.outputs.workflows }}' == 'true' ]]; then
[[ '${{ needs.general.result }}' == 'success' ]] || { echo 'general failed'; exit 1; }
[[ '${{ needs.cargo_shear.result }}' == 'success' ]] || { echo 'cargo_shear failed'; exit 1; }
fi
env:
NEEDS_CHANGED_OUTPUTS_ARGUMENT_COMMENT_LINT: ${{ needs.changed.outputs.argument_comment_lint }}
NEEDS_CHANGED_OUTPUTS_CODEX: ${{ needs.changed.outputs.codex }}
NEEDS_CHANGED_OUTPUTS_WORKFLOWS: ${{ needs.changed.outputs.workflows }}
NEEDS_CHANGED_OUTPUTS_ARGUMENT_COMMENT_LINT_PACKAGE: ${{ needs.changed.outputs.argument_comment_lint_package }}

View File

@@ -57,8 +57,6 @@ jobs:
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
with:

View File

@@ -22,7 +22,6 @@ jobs:
with:
ref: main
fetch-depth: 0
persist-credentials: false
- name: Update models.json
env:

View File

@@ -84,8 +84,6 @@ jobs:
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Print runner specs (Windows)
shell: powershell
run: |
@@ -168,8 +166,6 @@ jobs:
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Download prebuilt Windows primary binaries
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1

View File

@@ -46,8 +46,6 @@ jobs:
libncursesw5-dev
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Build, smoke-test, and stage zsh artifact
shell: bash
@@ -84,8 +82,6 @@ jobs:
fi
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Build, smoke-test, and stage zsh artifact
shell: bash

View File

@@ -20,8 +20,6 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
- name: Validate tag matches Cargo.toml version
shell: bash
@@ -121,8 +119,6 @@ jobs:
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Print runner specs (Linux)
if: ${{ runner.os == 'Linux' }}
shell: bash
@@ -188,7 +184,6 @@ jobs:
uses: mlugg/setup-zig@d1434d08867e3ee9daa34448df10607b98908d29 # v2.2.1
with:
version: 0.14.0
use-cache: false
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
name: Install musl build tools
@@ -482,8 +477,6 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Generate release notes from tag commit message
id: release_notes

View File

@@ -18,8 +18,6 @@ jobs:
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
@@ -72,8 +70,6 @@ jobs:
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Set up Bazel
uses: ./.github/actions/setup-bazel-ci

View File

@@ -14,9 +14,6 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
- name: Install Linux bwrap build dependencies
shell: bash

View File

@@ -41,9 +41,6 @@ jobs:
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
@@ -78,9 +75,6 @@ jobs:
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
- name: Set up Bazel
uses: ./.github/actions/setup-bazel-ci

View File

@@ -26,7 +26,7 @@ In the codex-rs folder where the rust code lives:
- Implementations may still use `async fn foo(&self, ...) -> T` when they satisfy that contract.
- Do not use `#[allow(async_fn_in_trait)]` as a shortcut around spelling the future contract explicitly.
- When writing tests, prefer comparing the equality of entire objects over fields one by one.
- Do not add general product or user-facing documentation to the `docs/` folder. The official Codex documentation lives elsewhere. The exception is app-server API documentation, which is covered by the app-server guidance below.
- When making a change that adds or changes an API, ensure that the documentation in the `docs/` folder is up to date if applicable.
- Prefer private modules and explicitly exported public crate API.
- If you change `ConfigToml` or nested config types, run `just write-config-schema` to update `codex-rs/core/config.schema.json`.
- When working with MCP tool calls, prefer using `codex-rs/codex-mcp/src/mcp_connection_manager.rs` to handle mutation of tools and tool calls. Aim to minimize the footprint of changes and leverage existing abstractions rather than plumbing code through multiple levels of function calls.
@@ -210,7 +210,7 @@ These guidelines apply to app-server protocol work in `codex-rs`, especially:
### Development Workflow
- Update app-server docs/examples when API behavior changes (at minimum `app-server/README.md`).
- Update docs/examples when API behavior changes (at minimum `app-server/README.md`).
- Regenerate schema fixtures when API shapes change:
`just write-app-server-schema`
(and `just write-app-server-schema --experimental` when experimental API fixtures are affected).

11
MODULE.bazel.lock generated

File diff suppressed because one or more lines are too long

186
codex-rs/Cargo.lock generated
View File

@@ -757,7 +757,6 @@ checksum = "96571e6996817bf3d58f6b569e4b9fd2e9d2fcf9f7424eed07b2ce9bb87535e5"
dependencies = [
"aws-credential-types",
"aws-runtime",
"aws-sdk-signin",
"aws-sdk-sso",
"aws-sdk-ssooidc",
"aws-sdk-sts",
@@ -768,20 +767,15 @@ dependencies = [
"aws-smithy-runtime-api",
"aws-smithy-types",
"aws-types",
"base64-simd",
"bytes",
"fastrand",
"hex",
"http 1.4.0",
"p256",
"rand 0.8.5",
"ring",
"sha2",
"time",
"tokio",
"tracing",
"url",
"uuid",
"zeroize",
]
@@ -844,28 +838,6 @@ dependencies = [
"uuid",
]
[[package]]
name = "aws-sdk-signin"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c084bd63941916e1348cb8d9e05ac2e49bdd40a380e9167702683184c6c6be53"
dependencies = [
"aws-credential-types",
"aws-runtime",
"aws-smithy-async",
"aws-smithy-http",
"aws-smithy-json",
"aws-smithy-runtime",
"aws-smithy-runtime-api",
"aws-smithy-types",
"aws-types",
"bytes",
"fastrand",
"http 0.2.12",
"regex-lite",
"tracing",
]
[[package]]
name = "aws-sdk-sso"
version = "1.91.0"
@@ -1208,12 +1180,6 @@ dependencies = [
"windows-link",
]
[[package]]
name = "base16ct"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf"
[[package]]
name = "base64"
version = "0.21.7"
@@ -1900,7 +1866,6 @@ dependencies = [
"codex-features",
"codex-feedback",
"codex-file-search",
"codex-file-watcher",
"codex-git-utils",
"codex-hooks",
"codex-login",
@@ -1980,26 +1945,6 @@ dependencies = [
"url",
]
[[package]]
name = "codex-app-server-daemon"
version = "0.0.0"
dependencies = [
"anyhow",
"codex-app-server-protocol",
"codex-app-server-transport",
"codex-core",
"codex-uds",
"futures",
"libc",
"pretty_assertions",
"reqwest",
"serde",
"serde_json",
"tempfile",
"tokio",
"tokio-tungstenite",
]
[[package]]
name = "codex-app-server-protocol"
version = "0.0.0"
@@ -2227,7 +2172,6 @@ dependencies = [
"clap",
"clap_complete",
"codex-app-server",
"codex-app-server-daemon",
"codex-app-server-protocol",
"codex-app-server-test-client",
"codex-arg0",
@@ -2463,11 +2407,7 @@ dependencies = [
"codex-app-server-protocol",
"pretty_assertions",
"serde",
"serde_json",
"sha1",
"tempfile",
"tokio",
"tracing",
"urlencoding",
]
@@ -2552,6 +2492,7 @@ dependencies = [
"insta",
"libc",
"maplit",
"notify",
"once_cell",
"openssl-sys",
"opentelemetry",
@@ -2758,6 +2699,7 @@ dependencies = [
"serde",
"serde_json",
"serial_test",
"sha2",
"tempfile",
"test-case",
"thiserror 2.0.18",
@@ -2894,17 +2836,6 @@ dependencies = [
"serde",
]
[[package]]
name = "codex-file-watcher"
version = "0.0.0"
dependencies = [
"notify",
"pretty_assertions",
"tempfile",
"tokio",
"tracing",
]
[[package]]
name = "codex-git-utils"
version = "0.0.0"
@@ -3366,6 +3297,7 @@ dependencies = [
"codex-utils-absolute-path",
"codex-utils-image",
"codex-utils-string",
"codex-utils-template",
"encoding_rs",
"globset",
"http 1.4.0",
@@ -4483,18 +4415,6 @@ version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5"
[[package]]
name = "crypto-bigint"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76"
dependencies = [
"generic-array",
"rand_core 0.6.4",
"subtle",
"zeroize",
]
[[package]]
name = "crypto-common"
version = "0.1.7"
@@ -5208,20 +5128,6 @@ version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555"
[[package]]
name = "ecdsa"
version = "0.16.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca"
dependencies = [
"der",
"digest",
"elliptic-curve",
"rfc6979",
"signature",
"spki",
]
[[package]]
name = "ed25519"
version = "2.2.3"
@@ -5255,26 +5161,6 @@ dependencies = [
"serde",
]
[[package]]
name = "elliptic-curve"
version = "0.13.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47"
dependencies = [
"base16ct",
"crypto-bigint",
"digest",
"ff",
"generic-array",
"group",
"pem-rfc7468",
"pkcs8",
"rand_core 0.6.4",
"sec1",
"subtle",
"zeroize",
]
[[package]]
name = "ena"
version = "0.14.3"
@@ -5528,16 +5414,6 @@ dependencies = [
"simd-adler32",
]
[[package]]
name = "ff"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0b50bfb653653f9ca9095b427bed08ab8d75a137839d9ad64eb11810d5b6393"
dependencies = [
"rand_core 0.6.4",
"subtle",
]
[[package]]
name = "fiat-crypto"
version = "0.2.9"
@@ -6932,17 +6808,6 @@ dependencies = [
"system-deps",
]
[[package]]
name = "group"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63"
dependencies = [
"ff",
"rand_core 0.6.4",
"subtle",
]
[[package]]
name = "gzip-header"
version = "1.0.0"
@@ -9442,18 +9307,6 @@ dependencies = [
"supports-color 3.0.2",
]
[[package]]
name = "p256"
version = "0.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c9863ad85fa8f4460f9c48cb909d38a0d689dba1f6f6988a5e3e0d31071bcd4b"
dependencies = [
"ecdsa",
"elliptic-curve",
"primeorder",
"sha2",
]
[[package]]
name = "parking"
version = "2.2.1"
@@ -9883,15 +9736,6 @@ dependencies = [
"syn 2.0.114",
]
[[package]]
name = "primeorder"
version = "0.13.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6"
dependencies = [
"elliptic-curve",
]
[[package]]
name = "proc-macro-crate"
version = "3.4.0"
@@ -10842,16 +10686,6 @@ version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e061d1b48cb8d38042de4ae0a7a6401009d6143dc80d2e2d6f31f0bdd6470c7"
[[package]]
name = "rfc6979"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2"
dependencies = [
"hmac",
"subtle",
]
[[package]]
name = "ring"
version = "0.17.14"
@@ -11311,20 +11145,6 @@ version = "3.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "490dcfcbfef26be6800d11870ff2df8774fa6e86d047e3e8c8a76b25655e41ca"
[[package]]
name = "sec1"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc"
dependencies = [
"base16ct",
"der",
"generic-array",
"pkcs8",
"subtle",
"zeroize",
]
[[package]]
name = "seccompiler"
version = "0.5.0"

View File

@@ -11,7 +11,6 @@ members = [
"async-utils",
"app-server",
"app-server-transport",
"app-server-daemon",
"app-server-client",
"app-server-protocol",
"app-server-test-client",
@@ -49,7 +48,6 @@ members = [
"external-agent-sessions",
"keyring-store",
"file-search",
"file-watcher",
"linux-sandbox",
"lmstudio",
"login",
@@ -133,7 +131,6 @@ codex-api = { path = "codex-api" }
codex-aws-auth = { path = "aws-auth" }
codex-app-server = { path = "app-server" }
codex-app-server-transport = { path = "app-server-transport" }
codex-app-server-daemon = { path = "app-server-daemon" }
codex-app-server-client = { path = "app-server-client" }
codex-app-server-protocol = { path = "app-server-protocol" }
codex-app-server-test-client = { path = "app-server-test-client" }
@@ -167,7 +164,6 @@ codex-features = { path = "features" }
codex-feedback = { path = "feedback" }
codex-install-context = { path = "install-context" }
codex-file-search = { path = "file-search" }
codex-file-watcher = { path = "file-watcher" }
codex-git-utils = { path = "git-utils" }
codex-hooks = { path = "hooks" }
codex-keyring-store = { path = "keyring-store" }
@@ -468,8 +464,11 @@ unwrap_used = "deny"
[workspace.metadata.cargo-shear]
ignored = [
"codex-agent-graph-store",
"codex-memories-mcp",
"icu_provider",
"openssl-sys",
"codex-utils-readiness",
"codex-utils-template",
"codex-v8-poc",
]

View File

@@ -1,298 +0,0 @@
use crate::events::CodexAcceptedLineFingerprintsEventParams;
use crate::events::CodexAcceptedLineFingerprintsEventRequest;
use crate::events::TrackEventRequest;
use crate::facts::AcceptedLineFingerprint;
use codex_git_utils::canonicalize_git_remote_url;
use codex_git_utils::get_git_remote_urls_assume_git_repo;
use sha1::Digest;
use std::path::Path;
const ACCEPTED_LINE_FINGERPRINT_EVENT_TARGET_BYTES: usize = 2 * 1024 * 1024;
const ACCEPTED_LINE_FINGERPRINT_EVENT_FIXED_BYTES: usize = 1024;
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct AcceptedLineFingerprintSummary {
pub accepted_added_lines: u64,
pub accepted_deleted_lines: u64,
pub line_fingerprints: Vec<AcceptedLineFingerprint>,
}
pub(crate) struct AcceptedLineFingerprintEventInput {
pub(crate) event_type: &'static str,
pub(crate) turn_id: String,
pub(crate) thread_id: String,
pub(crate) product_surface: Option<String>,
pub(crate) model_slug: Option<String>,
pub(crate) completed_at: u64,
pub(crate) repo_hash: Option<String>,
pub(crate) accepted_added_lines: u64,
pub(crate) accepted_deleted_lines: u64,
pub(crate) line_fingerprints: Vec<AcceptedLineFingerprint>,
}
pub fn accepted_line_fingerprints_from_unified_diff(
unified_diff: &str,
) -> AcceptedLineFingerprintSummary {
let mut current_path: Option<String> = None;
let mut in_hunk = false;
let mut accepted_added_lines = 0;
let mut accepted_deleted_lines = 0;
let mut line_fingerprints = Vec::new();
for line in unified_diff.lines() {
if line.starts_with("diff --git ") {
current_path = None;
in_hunk = false;
continue;
}
if line.starts_with("@@ ") {
in_hunk = true;
continue;
}
if !in_hunk && let Some(path) = line.strip_prefix("+++ ") {
current_path = normalize_diff_path(path);
continue;
}
if !in_hunk && line.starts_with("--- ") {
continue;
}
if let Some(added_line) = line.strip_prefix('+') {
accepted_added_lines += 1;
if let Some(path) = current_path.as_deref()
&& let Some(normalized_line) = normalize_effective_line(added_line)
{
line_fingerprints.push(AcceptedLineFingerprint {
path_hash: fingerprint_hash("path", path),
line_hash: fingerprint_hash("line", &normalized_line),
});
}
continue;
}
if line.starts_with('-') {
accepted_deleted_lines += 1;
}
}
AcceptedLineFingerprintSummary {
accepted_added_lines,
accepted_deleted_lines,
line_fingerprints,
}
}
pub fn fingerprint_hash(domain: &str, value: &str) -> String {
let mut hasher = sha1::Sha1::new();
hasher.update(b"file-line-v1\0");
hasher.update(domain.as_bytes());
hasher.update(b"\0");
hasher.update(value.as_bytes());
format!("{:x}", hasher.finalize())
}
pub(crate) fn accepted_line_fingerprint_event_requests(
input: AcceptedLineFingerprintEventInput,
) -> Vec<TrackEventRequest> {
let chunks = accepted_line_fingerprint_chunks(input.line_fingerprints);
chunks
.into_iter()
.enumerate()
.map(|(index, line_fingerprints)| {
let is_first_chunk = index == 0;
TrackEventRequest::AcceptedLineFingerprints(Box::new(
CodexAcceptedLineFingerprintsEventRequest {
event_type: "codex_accepted_line_fingerprints",
event_params: CodexAcceptedLineFingerprintsEventParams {
event_type: input.event_type,
turn_id: input.turn_id.clone(),
thread_id: input.thread_id.clone(),
product_surface: input.product_surface.clone(),
model_slug: input.model_slug.clone(),
completed_at: input.completed_at,
repo_hash: input.repo_hash.clone(),
accepted_added_lines: if is_first_chunk {
input.accepted_added_lines
} else {
0
},
accepted_deleted_lines: if is_first_chunk {
input.accepted_deleted_lines
} else {
0
},
line_fingerprints,
},
},
))
})
.collect()
}
pub async fn accepted_line_repo_hash_for_cwd(cwd: &Path) -> Option<String> {
let remotes = get_git_remote_urls_assume_git_repo(cwd).await?;
remotes
.get("origin")
.or_else(|| remotes.values().next())
.map(|remote_url| {
let canonical_remote_url =
canonicalize_git_remote_url(remote_url).unwrap_or_else(|| remote_url.to_string());
fingerprint_hash("repo", &canonical_remote_url)
})
}
fn normalize_diff_path(path: &str) -> Option<String> {
let path = path.trim();
if path == "/dev/null" {
return None;
}
Some(
path.strip_prefix("b/")
.or_else(|| path.strip_prefix("a/"))
.unwrap_or(path)
.to_string(),
)
}
fn normalize_effective_line(line: &str) -> Option<String> {
let normalized = line.split_whitespace().collect::<Vec<_>>().join(" ");
if normalized.len() <= 3 {
return None;
}
if !normalized
.chars()
.any(|ch| ch.is_alphanumeric() || ch == '_')
{
return None;
}
Some(normalized)
}
fn accepted_line_fingerprint_chunks(
line_fingerprints: Vec<AcceptedLineFingerprint>,
) -> Vec<Vec<AcceptedLineFingerprint>> {
if line_fingerprints.is_empty() {
return vec![Vec::new()];
}
let mut chunks = Vec::new();
let mut current = Vec::new();
let mut current_bytes = ACCEPTED_LINE_FINGERPRINT_EVENT_FIXED_BYTES;
for fingerprint in line_fingerprints {
let item_bytes = accepted_line_fingerprint_json_bytes(&fingerprint);
let separator_bytes = usize::from(!current.is_empty());
if !current.is_empty()
&& current_bytes + separator_bytes + item_bytes
> ACCEPTED_LINE_FINGERPRINT_EVENT_TARGET_BYTES
{
chunks.push(current);
current = Vec::new();
current_bytes = ACCEPTED_LINE_FINGERPRINT_EVENT_FIXED_BYTES;
}
current_bytes += usize::from(!current.is_empty()) + item_bytes;
current.push(fingerprint);
}
if !current.is_empty() {
chunks.push(current);
}
chunks
}
fn accepted_line_fingerprint_json_bytes(fingerprint: &AcceptedLineFingerprint) -> usize {
// {"path_hash":"...","line_hash":"..."} plus one byte of array comma
// accounted for by the caller when needed.
32 + fingerprint.path_hash.len() + fingerprint.line_hash.len()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parses_counts_and_effective_added_fingerprints() {
let diff = "\
diff --git a/src/lib.rs b/src/lib.rs
index 1111111..2222222
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -1,3 +1,5 @@
-old line
+fn useful() {
+}
+ return user.id;
context
";
let summary = accepted_line_fingerprints_from_unified_diff(diff);
assert_eq!(
summary,
AcceptedLineFingerprintSummary {
accepted_added_lines: 3,
accepted_deleted_lines: 1,
line_fingerprints: vec![
AcceptedLineFingerprint {
path_hash: fingerprint_hash("path", "src/lib.rs"),
line_hash: fingerprint_hash("line", "fn useful() {"),
},
AcceptedLineFingerprint {
path_hash: fingerprint_hash("path", "src/lib.rs"),
line_hash: fingerprint_hash("line", "return user.id;"),
},
],
}
);
}
#[test]
fn skips_added_file_metadata_headers() {
let diff = "\
diff --git a/new.py b/new.py
new file mode 100644
index 0000000..1111111
--- /dev/null
+++ b/new.py
@@ -0,0 +1 @@
+print('hello')
";
let summary = accepted_line_fingerprints_from_unified_diff(diff);
assert_eq!(summary.accepted_added_lines, 1);
assert_eq!(summary.accepted_deleted_lines, 0);
assert_eq!(summary.line_fingerprints.len(), 1);
}
#[test]
fn parses_hunk_lines_that_look_like_file_headers() {
let diff = "\
diff --git a/src/lib.rs b/src/lib.rs
index 1111111..2222222
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -1,2 +1,2 @@
--- old value
+++ new value
";
let summary = accepted_line_fingerprints_from_unified_diff(diff);
assert_eq!(
summary,
AcceptedLineFingerprintSummary {
accepted_added_lines: 1,
accepted_deleted_lines: 1,
line_fingerprints: vec![AcceptedLineFingerprint {
path_hash: fingerprint_hash("path", "src/lib.rs"),
line_hash: fingerprint_hash("line", "++ new value"),
}],
}
);
}
}

View File

@@ -1,7 +1,5 @@
use crate::client::AnalyticsEventsQueue;
use crate::events::AppServerRpcTransport;
use crate::events::CodexAcceptedLineFingerprintsEventParams;
use crate::events::CodexAcceptedLineFingerprintsEventRequest;
use crate::events::CodexAppMentionedEventRequest;
use crate::events::CodexAppServerClientMetadata;
use crate::events::CodexAppUsedEventRequest;
@@ -30,7 +28,6 @@ use crate::events::codex_hook_run_metadata;
use crate::events::codex_plugin_metadata;
use crate::events::codex_plugin_used_metadata;
use crate::events::subagent_thread_started_event_request;
use crate::facts::AcceptedLineFingerprint;
use crate::facts::AnalyticsFact;
use crate::facts::AnalyticsJsonRpcError;
use crate::facts::AppInvocation;
@@ -92,7 +89,6 @@ use codex_app_server_protocol::ThreadStartResponse;
use codex_app_server_protocol::ThreadStatus as AppServerThreadStatus;
use codex_app_server_protocol::Turn;
use codex_app_server_protocol::TurnCompletedNotification;
use codex_app_server_protocol::TurnDiffUpdatedNotification;
use codex_app_server_protocol::TurnError as AppServerTurnError;
use codex_app_server_protocol::TurnStartParams;
use codex_app_server_protocol::TurnStartedNotification;
@@ -640,7 +636,6 @@ fn sample_initialize_fact(connection_id: u64) -> AnalyticsFact {
},
capabilities: Some(InitializeCapabilities {
experimental_api: false,
request_attestation: false,
opt_out_notification_methods: None,
}),
},
@@ -832,206 +827,6 @@ fn app_used_event_serializes_expected_shape() {
);
}
#[test]
fn accepted_line_fingerprints_event_serializes_expected_shape() {
let event = TrackEventRequest::AcceptedLineFingerprints(Box::new(
CodexAcceptedLineFingerprintsEventRequest {
event_type: "codex_accepted_line_fingerprints",
event_params: CodexAcceptedLineFingerprintsEventParams {
event_type: "codex.accepted_line_fingerprints",
turn_id: "turn-1".to_string(),
thread_id: "thread-1".to_string(),
product_surface: Some("codex".to_string()),
model_slug: Some("gpt-5.1-codex".to_string()),
completed_at: 1710000000,
repo_hash: Some("repo-hash-1".to_string()),
accepted_added_lines: 42,
accepted_deleted_lines: 40,
line_fingerprints: vec![AcceptedLineFingerprint {
path_hash: "path-hash-1".to_string(),
line_hash: "line-hash-1".to_string(),
}],
},
},
));
let payload = serde_json::to_value(&event).expect("serialize accepted line fingerprints event");
assert_eq!(
payload,
json!({
"event_type": "codex_accepted_line_fingerprints",
"event_params": {
"event_type": "codex.accepted_line_fingerprints",
"turn_id": "turn-1",
"thread_id": "thread-1",
"product_surface": "codex",
"model_slug": "gpt-5.1-codex",
"completed_at": 1710000000,
"repo_hash": "repo-hash-1",
"accepted_added_lines": 42,
"accepted_deleted_lines": 40,
"line_fingerprints": [
{
"path_hash": "path-hash-1",
"line_hash": "line-hash-1"
}
]
}
})
);
}
#[tokio::test]
async fn reducer_chunks_large_accepted_line_fingerprint_events_without_repeating_counts() {
let mut reducer = AnalyticsReducer::default();
let mut events = Vec::new();
ingest_turn_prerequisites(
&mut reducer,
&mut events,
/*include_initialize*/ true,
/*include_resolved_config*/ true,
/*include_started*/ true,
/*include_token_usage*/ true,
)
.await;
events.clear();
let mut diff = "\
diff --git a/src/lib.rs b/src/lib.rs
index 1111111..2222222
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -0,0 +1,20000 @@
"
.to_string();
for index in 0..20_000 {
diff.push_str(&format!("+let value_{index} = {index};\n"));
}
reducer
.ingest(
AnalyticsFact::Notification(Box::new(ServerNotification::TurnDiffUpdated(
TurnDiffUpdatedNotification {
thread_id: "thread-2".to_string(),
turn_id: "turn-2".to_string(),
diff,
},
))),
&mut events,
)
.await;
assert!(events.is_empty());
reducer
.ingest(
AnalyticsFact::Notification(Box::new(sample_turn_completed_notification(
"thread-2",
"turn-2",
AppServerTurnStatus::Completed,
/*codex_error_info*/ None,
))),
&mut events,
)
.await;
let accepted_line_events = events
.iter()
.filter_map(|event| match event {
TrackEventRequest::AcceptedLineFingerprints(event) => Some(event),
_ => None,
})
.collect::<Vec<_>>();
assert!(accepted_line_events.len() > 1);
let mut total_fingerprints = 0;
for (index, event) in accepted_line_events.iter().enumerate() {
assert_eq!(event.event_params.turn_id, "turn-2");
assert_eq!(event.event_params.thread_id, "thread-2");
total_fingerprints += event.event_params.line_fingerprints.len();
if index == 0 {
assert_eq!(event.event_params.accepted_added_lines, 20_000);
assert_eq!(event.event_params.accepted_deleted_lines, 0);
} else {
assert_eq!(event.event_params.accepted_added_lines, 0);
assert_eq!(event.event_params.accepted_deleted_lines, 0);
}
assert!(serde_json::to_vec(event).expect("serialize chunk").len() < 2_100_000);
}
assert_eq!(total_fingerprints, 20_000);
}
#[tokio::test]
async fn reducer_emits_accepted_line_fingerprints_once_from_latest_turn_diff_on_completion() {
let mut reducer = AnalyticsReducer::default();
let mut events = Vec::new();
ingest_turn_prerequisites(
&mut reducer,
&mut events,
/*include_initialize*/ true,
/*include_resolved_config*/ true,
/*include_started*/ true,
/*include_token_usage*/ true,
)
.await;
events.clear();
for line in ["let old_value = 1;", "let latest_value = 2;"] {
let diff = format!(
"\
diff --git a/src/lib.rs b/src/lib.rs
index 1111111..2222222
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -0,0 +1 @@
+{line}
"
);
reducer
.ingest(
AnalyticsFact::Notification(Box::new(ServerNotification::TurnDiffUpdated(
TurnDiffUpdatedNotification {
thread_id: "thread-2".to_string(),
turn_id: "turn-2".to_string(),
diff,
},
))),
&mut events,
)
.await;
}
assert!(events.is_empty());
reducer
.ingest(
AnalyticsFact::Notification(Box::new(sample_turn_completed_notification(
"thread-2",
"turn-2",
AppServerTurnStatus::Completed,
/*codex_error_info*/ None,
))),
&mut events,
)
.await;
let accepted_line_events = events
.iter()
.filter_map(|event| match event {
TrackEventRequest::AcceptedLineFingerprints(event) => Some(event),
_ => None,
})
.collect::<Vec<_>>();
assert_eq!(accepted_line_events.len(), 1);
let event = accepted_line_events[0];
assert_eq!(event.event_params.accepted_added_lines, 1);
assert_eq!(event.event_params.line_fingerprints.len(), 1);
assert_eq!(
event.event_params.line_fingerprints[0].line_hash,
crate::fingerprint_hash("line", "let latest_value = 2;")
);
}
#[test]
fn compaction_event_serializes_expected_shape() {
let event = TrackEventRequest::Compaction(Box::new(CodexCompactionEventRequest {
@@ -1327,7 +1122,6 @@ async fn initialize_caches_client_and_thread_lifecycle_publishes_once_initialize
},
capabilities: Some(InitializeCapabilities {
experimental_api: false,
request_attestation: false,
opt_out_notification_methods: None,
}),
},
@@ -1475,7 +1269,6 @@ async fn compaction_event_ingests_custom_fact() {
},
capabilities: Some(InitializeCapabilities {
experimental_api: false,
request_attestation: false,
opt_out_notification_methods: None,
}),
},
@@ -1589,7 +1382,6 @@ async fn guardian_review_event_ingests_custom_fact_with_optional_target_item() {
},
capabilities: Some(InitializeCapabilities {
experimental_api: false,
request_attestation: false,
opt_out_notification_methods: None,
}),
},

View File

@@ -30,7 +30,6 @@ use codex_app_server_protocol::ServerNotification;
use codex_app_server_protocol::ServerRequest;
use codex_app_server_protocol::ServerResponse;
use codex_login::AuthManager;
use codex_login::CodexAuth;
use codex_login::default_client::create_client;
use codex_plugin::PluginTelemetryMetadata;
use std::collections::HashSet;
@@ -353,7 +352,6 @@ impl AnalyticsEventsClient {
notification,
ServerNotification::TurnStarted(_)
| ServerNotification::TurnCompleted(_)
| ServerNotification::TurnDiffUpdated(_)
| ServerNotification::ItemStarted(_)
| ServerNotification::ItemCompleted(_)
| ServerNotification::ItemGuardianApprovalReviewStarted(_)
@@ -373,7 +371,6 @@ async fn send_track_events(
if events.is_empty() {
return;
}
let Some(auth) = auth_manager.auth().await else {
return;
};
@@ -383,45 +380,12 @@ async fn send_track_events(
let base_url = base_url.trim_end_matches('/');
let url = format!("{base_url}/codex/analytics-events/events");
for events in track_event_request_batches(events) {
send_track_events_request(&auth, &url, events).await;
}
}
fn track_event_request_batches(events: Vec<TrackEventRequest>) -> Vec<Vec<TrackEventRequest>> {
let mut batches = Vec::new();
let mut current_batch = Vec::new();
for event in events {
if event.should_send_in_isolated_request() {
if !current_batch.is_empty() {
batches.push(current_batch);
current_batch = Vec::new();
}
batches.push(vec![event]);
} else {
current_batch.push(event);
}
}
if !current_batch.is_empty() {
batches.push(current_batch);
}
batches
}
async fn send_track_events_request(auth: &CodexAuth, url: &str, events: Vec<TrackEventRequest>) {
if events.is_empty() {
return;
}
let payload = TrackEventsRequest { events };
let response = create_client()
.post(url)
.post(&url)
.timeout(ANALYTICS_EVENTS_TIMEOUT)
.headers(codex_model_provider::auth_provider_from_auth(auth).to_auth_headers())
.headers(codex_model_provider::auth_provider_from_auth(&auth).to_auth_headers())
.header("Content-Type", "application/json")
.json(&payload)
.send()

View File

@@ -1,14 +1,6 @@
use super::AnalyticsEventsClient;
use super::AnalyticsEventsQueue;
use super::track_event_request_batches;
use crate::events::CodexAcceptedLineFingerprintsEventParams;
use crate::events::CodexAcceptedLineFingerprintsEventRequest;
use crate::events::SkillInvocationEventParams;
use crate::events::SkillInvocationEventRequest;
use crate::events::TrackEventRequest;
use crate::facts::AcceptedLineFingerprint;
use crate::facts::AnalyticsFact;
use crate::facts::InvocationType;
use codex_app_server_protocol::ApprovalsReviewer as AppServerApprovalsReviewer;
use codex_app_server_protocol::AskForApproval as AppServerAskForApproval;
use codex_app_server_protocol::ClientRequest;
@@ -39,47 +31,6 @@ use std::sync::Mutex;
use tokio::sync::mpsc;
use tokio::sync::mpsc::error::TryRecvError;
fn sample_accepted_line_fingerprint_event(thread_id: &str) -> TrackEventRequest {
TrackEventRequest::AcceptedLineFingerprints(Box::new(
CodexAcceptedLineFingerprintsEventRequest {
event_type: "codex_accepted_line_fingerprints",
event_params: CodexAcceptedLineFingerprintsEventParams {
event_type: "codex.accepted_line_fingerprints",
turn_id: "turn-1".to_string(),
thread_id: thread_id.to_string(),
product_surface: Some("codex".to_string()),
model_slug: Some("gpt-5.1-codex".to_string()),
completed_at: 1,
repo_hash: None,
accepted_added_lines: 1,
accepted_deleted_lines: 0,
line_fingerprints: vec![AcceptedLineFingerprint {
path_hash: "path-hash".to_string(),
line_hash: "line-hash".to_string(),
}],
},
},
))
}
fn sample_regular_track_event(thread_id: &str) -> TrackEventRequest {
TrackEventRequest::SkillInvocation(SkillInvocationEventRequest {
event_type: "skill_invocation",
skill_id: format!("skill-{thread_id}"),
skill_name: "doc".to_string(),
event_params: SkillInvocationEventParams {
product_client_id: None,
skill_scope: None,
plugin_id: None,
repo_url: None,
thread_id: Some(thread_id.to_string()),
turn_id: Some("turn-1".to_string()),
invoke_type: Some(InvocationType::Explicit),
model_slug: Some("gpt-5.1-codex".to_string()),
},
})
}
fn client_with_receiver() -> (AnalyticsEventsClient, mpsc::Receiver<AnalyticsFact>) {
let (sender, receiver) = mpsc::channel(8);
let queue = AnalyticsEventsQueue {
@@ -271,23 +222,3 @@ fn track_response_only_enqueues_analytics_relevant_responses() {
);
assert!(matches!(receiver.try_recv(), Err(TryRecvError::Empty)));
}
#[test]
fn track_event_request_batches_only_isolates_accepted_line_fingerprint_events() {
let batches = track_event_request_batches(vec![
sample_regular_track_event("thread-1"),
sample_regular_track_event("thread-2"),
sample_accepted_line_fingerprint_event("thread-3"),
sample_accepted_line_fingerprint_event("thread-4"),
sample_regular_track_event("thread-5"),
sample_regular_track_event("thread-6"),
]);
assert_eq!(batches.len(), 4);
assert_eq!(batches[0].len(), 2);
assert_eq!(batches[1].len(), 1);
assert_eq!(batches[2].len(), 1);
assert_eq!(batches[3].len(), 2);
assert!(batches[1][0].should_send_in_isolated_request());
assert!(batches[2][0].should_send_in_isolated_request());
}

View File

@@ -1,6 +1,5 @@
use std::time::Instant;
use crate::facts::AcceptedLineFingerprint;
use crate::facts::AppInvocation;
use crate::facts::CodexCompactionEvent;
use crate::facts::CompactionImplementation;
@@ -72,7 +71,6 @@ pub(crate) enum TrackEventRequest {
CollabAgentToolCall(CodexCollabAgentToolCallEventRequest),
WebSearch(CodexWebSearchEventRequest),
ImageGeneration(CodexImageGenerationEventRequest),
AcceptedLineFingerprints(Box<CodexAcceptedLineFingerprintsEventRequest>),
#[allow(dead_code)]
ReviewEvent(CodexReviewEventRequest),
PluginUsed(CodexPluginUsedEventRequest),
@@ -82,32 +80,6 @@ pub(crate) enum TrackEventRequest {
PluginDisabled(CodexPluginEventRequest),
}
impl TrackEventRequest {
pub(crate) fn should_send_in_isolated_request(&self) -> bool {
matches!(self, Self::AcceptedLineFingerprints(_))
}
}
#[derive(Serialize)]
pub(crate) struct CodexAcceptedLineFingerprintsEventParams {
pub(crate) event_type: &'static str,
pub(crate) turn_id: String,
pub(crate) thread_id: String,
pub(crate) product_surface: Option<String>,
pub(crate) model_slug: Option<String>,
pub(crate) completed_at: u64,
pub(crate) repo_hash: Option<String>,
pub(crate) accepted_added_lines: u64,
pub(crate) accepted_deleted_lines: u64,
pub(crate) line_fingerprints: Vec<AcceptedLineFingerprint>,
}
#[derive(Serialize)]
pub(crate) struct CodexAcceptedLineFingerprintsEventRequest {
pub(crate) event_type: &'static str,
pub(crate) event_params: CodexAcceptedLineFingerprintsEventParams,
}
#[derive(Serialize)]
pub(crate) struct SkillInvocationEventRequest {
pub(crate) event_type: &'static str,

View File

@@ -28,12 +28,6 @@ use codex_protocol::protocol::TokenUsage;
use serde::Serialize;
use std::path::PathBuf;
#[derive(Clone, Debug, PartialEq, Eq, Serialize)]
pub struct AcceptedLineFingerprint {
pub path_hash: String,
pub line_hash: String,
}
#[derive(Clone)]
pub struct TrackEventsContext {
pub model_slug: String,

View File

@@ -1,4 +1,3 @@
mod accepted_lines;
mod client;
mod events;
mod facts;
@@ -7,8 +6,6 @@ mod reducer;
use std::time::SystemTime;
use std::time::UNIX_EPOCH;
pub use accepted_lines::accepted_line_fingerprints_from_unified_diff;
pub use accepted_lines::fingerprint_hash;
pub use client::AnalyticsEventsClient;
pub use events::AppServerRpcTransport;
pub use events::GuardianApprovalRequestSource;
@@ -20,7 +17,6 @@ pub use events::GuardianReviewSessionKind;
pub use events::GuardianReviewTerminalStatus;
pub use events::GuardianReviewTrackContext;
pub use events::GuardianReviewedAction;
pub use facts::AcceptedLineFingerprint;
pub use facts::AnalyticsJsonRpcError;
pub use facts::AppInvocation;
pub use facts::CodexCompactionEvent;

View File

@@ -1,7 +1,3 @@
use crate::accepted_lines::AcceptedLineFingerprintEventInput;
use crate::accepted_lines::accepted_line_fingerprint_event_requests;
use crate::accepted_lines::accepted_line_fingerprints_from_unified_diff;
use crate::accepted_lines::accepted_line_repo_hash_for_cwd;
use crate::events::AppServerRpcTransport;
use crate::events::CodexAppMentionedEventRequest;
use crate::events::CodexAppServerClientMetadata;
@@ -108,7 +104,6 @@ use codex_protocol::protocol::TokenUsage;
use sha1::Digest;
use std::collections::HashMap;
use std::path::Path;
use std::path::PathBuf;
#[derive(Default)]
pub(crate) struct AnalyticsReducer {
@@ -269,7 +264,6 @@ struct TurnState {
started_at: Option<u64>,
token_usage: Option<TokenUsage>,
completed: Option<CompletedTurnState>,
latest_diff: Option<String>,
steer_count: usize,
}
@@ -311,7 +305,7 @@ impl AnalyticsReducer {
response,
} => {
if let Some(response) = response.into_client_response(request_id) {
self.ingest_response(connection_id, response, out).await;
self.ingest_response(connection_id, response, out);
}
}
AnalyticsFact::ErrorResponse {
@@ -323,7 +317,7 @@ impl AnalyticsReducer {
self.ingest_error_response(connection_id, request_id, error_type, out);
}
AnalyticsFact::Notification(notification) => {
self.ingest_notification(*notification, out).await;
self.ingest_notification(*notification, out);
}
AnalyticsFact::ServerRequest {
connection_id: _connection_id,
@@ -344,10 +338,10 @@ impl AnalyticsReducer {
self.ingest_guardian_review(*input, out);
}
CustomAnalyticsFact::TurnResolvedConfig(input) => {
self.ingest_turn_resolved_config(*input, out).await;
self.ingest_turn_resolved_config(*input, out);
}
CustomAnalyticsFact::TurnTokenUsage(input) => {
self.ingest_turn_token_usage(*input, out).await;
self.ingest_turn_token_usage(*input, out);
}
CustomAnalyticsFact::SkillInvoked(input) => {
self.ingest_skill_invoked(input, out).await;
@@ -479,7 +473,7 @@ impl AnalyticsReducer {
}
}
async fn ingest_turn_resolved_config(
fn ingest_turn_resolved_config(
&mut self,
input: TurnResolvedConfigFact,
out: &mut Vec<TrackEventRequest>,
@@ -495,16 +489,15 @@ impl AnalyticsReducer {
started_at: None,
token_usage: None,
completed: None,
latest_diff: None,
steer_count: 0,
});
turn_state.thread_id = Some(thread_id);
turn_state.num_input_images = Some(num_input_images);
turn_state.resolved_config = Some(input);
self.maybe_emit_turn_event(&turn_id, out).await;
self.maybe_emit_turn_event(&turn_id, out);
}
async fn ingest_turn_token_usage(
fn ingest_turn_token_usage(
&mut self,
input: TurnTokenUsageFact,
out: &mut Vec<TrackEventRequest>,
@@ -518,12 +511,11 @@ impl AnalyticsReducer {
started_at: None,
token_usage: None,
completed: None,
latest_diff: None,
steer_count: 0,
});
turn_state.thread_id = Some(input.thread_id);
turn_state.token_usage = Some(input.token_usage);
self.maybe_emit_turn_event(&turn_id, out).await;
self.maybe_emit_turn_event(&turn_id, out);
}
async fn ingest_skill_invoked(
@@ -630,7 +622,7 @@ impl AnalyticsReducer {
});
}
async fn ingest_response(
fn ingest_response(
&mut self,
connection_id: u64,
response: ClientResponse,
@@ -682,13 +674,12 @@ impl AnalyticsReducer {
started_at: None,
token_usage: None,
completed: None,
latest_diff: None,
steer_count: 0,
});
turn_state.connection_id = Some(connection_id);
turn_state.thread_id = Some(pending_request.thread_id);
turn_state.num_input_images = Some(pending_request.num_input_images);
self.maybe_emit_turn_event(&turn_id, out).await;
self.maybe_emit_turn_event(&turn_id, out);
}
ClientResponse::TurnSteer {
request_id,
@@ -750,7 +741,7 @@ impl AnalyticsReducer {
);
}
async fn ingest_notification(
fn ingest_notification(
&mut self,
notification: ServerNotification,
out: &mut Vec<TrackEventRequest>,
@@ -821,7 +812,6 @@ impl AnalyticsReducer {
started_at: None,
token_usage: None,
completed: None,
latest_diff: None,
steer_count: 0,
});
turn_state.started_at = notification
@@ -829,24 +819,6 @@ impl AnalyticsReducer {
.started_at
.and_then(|started_at| u64::try_from(started_at).ok());
}
ServerNotification::TurnDiffUpdated(notification) => {
let turn_state =
self.turns
.entry(notification.turn_id.clone())
.or_insert(TurnState {
connection_id: None,
thread_id: None,
num_input_images: None,
resolved_config: None,
started_at: None,
token_usage: None,
completed: None,
latest_diff: None,
steer_count: 0,
});
turn_state.thread_id = Some(notification.thread_id);
turn_state.latest_diff = Some(notification.diff);
}
ServerNotification::TurnCompleted(notification) => {
let turn_state =
self.turns
@@ -859,7 +831,6 @@ impl AnalyticsReducer {
started_at: None,
token_usage: None,
completed: None,
latest_diff: None,
steer_count: 0,
});
turn_state.completed = Some(CompletedTurnState {
@@ -879,7 +850,7 @@ impl AnalyticsReducer {
.and_then(|duration_ms| u64::try_from(duration_ms).ok()),
});
let turn_id = notification.turn.id;
self.maybe_emit_turn_event(&turn_id, out).await;
self.maybe_emit_turn_event(&turn_id, out);
}
_ => {}
}
@@ -1015,7 +986,7 @@ impl AnalyticsReducer {
}));
}
async fn maybe_emit_turn_event(&mut self, turn_id: &str, out: &mut Vec<TrackEventRequest>) {
fn maybe_emit_turn_event(&mut self, turn_id: &str, out: &mut Vec<TrackEventRequest>) {
let Some(turn_state) = self.turns.get(turn_id) else {
return;
};
@@ -1048,23 +1019,18 @@ impl AnalyticsReducer {
warn_missing_analytics_context(&drop_site, MissingAnalyticsContext::ThreadMetadata);
return;
};
let turn_event = TrackEventRequest::TurnEvent(Box::new(CodexTurnEventRequest {
event_type: "codex_turn_event",
event_params: codex_turn_event_params(
connection_state.app_server_client.clone(),
connection_state.runtime.clone(),
turn_id.to_string(),
turn_state,
thread_metadata,
),
}));
let accepted_line_event = accepted_line_event_input(turn_id, turn_state);
out.push(turn_event);
if let Some((mut input, cwd)) = accepted_line_event {
input.repo_hash = accepted_line_repo_hash_for_cwd(cwd.as_path()).await;
out.extend(accepted_line_fingerprint_event_requests(input));
}
out.push(TrackEventRequest::TurnEvent(Box::new(
CodexTurnEventRequest {
event_type: "codex_turn_event",
event_params: codex_turn_event_params(
connection_state.app_server_client.clone(),
connection_state.runtime.clone(),
turn_id.to_string(),
turn_state,
thread_metadata,
),
},
)));
self.turns.remove(turn_id);
}
@@ -1676,36 +1642,6 @@ fn web_search_query_count(query: &str, action: Option<&WebSearchAction>) -> Opti
}
}
fn accepted_line_event_input(
turn_id: &str,
turn_state: &TurnState,
) -> Option<(AcceptedLineFingerprintEventInput, PathBuf)> {
let latest_diff = turn_state.latest_diff.as_deref()?;
let summary = accepted_line_fingerprints_from_unified_diff(latest_diff);
if summary.accepted_added_lines == 0 && summary.accepted_deleted_lines == 0 {
return None;
}
let thread_id = turn_state.thread_id.clone()?;
let resolved_config = turn_state.resolved_config.clone()?;
Some((
AcceptedLineFingerprintEventInput {
event_type: "codex.accepted_line_fingerprints",
turn_id: turn_id.to_string(),
thread_id,
product_surface: Some("codex".to_string()),
model_slug: Some(resolved_config.model.clone()),
completed_at: now_unix_seconds(),
repo_hash: None,
accepted_added_lines: summary.accepted_added_lines,
accepted_deleted_lines: summary.accepted_deleted_lines,
line_fingerprints: summary.line_fingerprints,
},
resolved_config.permission_profile_cwd,
))
}
fn codex_turn_event_params(
app_server_client: CodexAppServerClientMetadata,
runtime: CodexRuntimeMetadata,

View File

@@ -49,6 +49,7 @@ use codex_config::RemoteThreadConfigLoader;
use codex_config::ThreadConfigLoader;
use codex_core::config::Config;
pub use codex_exec_server::EnvironmentManager;
pub use codex_exec_server::EnvironmentManagerArgs;
pub use codex_exec_server::ExecServerRuntimePaths;
use codex_feedback::CodexFeedback;
use codex_protocol::protocol::SessionSource;
@@ -374,7 +375,6 @@ impl InProcessClientStartArgs {
pub fn initialize_params(&self) -> InitializeParams {
let capabilities = InitializeCapabilities {
experimental_api: self.experimental_api,
request_attestation: false,
opt_out_notification_methods: if self.opt_out_notification_methods.is_empty() {
None
} else {

View File

@@ -73,7 +73,6 @@ impl RemoteAppServerConnectArgs {
fn initialize_params(&self) -> InitializeParams {
let capabilities = InitializeCapabilities {
experimental_api: self.experimental_api,
request_attestation: false,
opt_out_notification_methods: if self.opt_out_notification_methods.is_empty() {
None
} else {

View File

@@ -1,6 +0,0 @@
load("//:defs.bzl", "codex_rust_crate")
codex_rust_crate(
name = "app-server-daemon",
crate_name = "codex_app_server_daemon",
)

View File

@@ -1,39 +0,0 @@
[package]
name = "codex-app-server-daemon"
version.workspace = true
edition.workspace = true
license.workspace = true
[lib]
name = "codex_app_server_daemon"
path = "src/lib.rs"
doctest = false
[lints]
workspace = true
[dependencies]
anyhow = { workspace = true }
codex-app-server-protocol = { workspace = true }
codex-app-server-transport = { workspace = true }
codex-core = { workspace = true }
codex-uds = { workspace = true }
futures = { workspace = true }
libc = { workspace = true }
reqwest = { workspace = true, features = ["rustls-tls"] }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
tokio = { workspace = true, features = [
"fs",
"io-util",
"macros",
"process",
"rt-multi-thread",
"signal",
"time",
] }
tokio-tungstenite = { workspace = true }
[dev-dependencies]
pretty_assertions = { workspace = true }
tempfile = { workspace = true }

View File

@@ -1,104 +0,0 @@
# codex-app-server-daemon
> `codex-app-server-daemon` is experimental and its lifecycle contract may
> change while the remote-management flow is still being developed.
`codex-app-server-daemon` backs the machine-readable `codex app-server`
lifecycle commands used by remote clients such as the desktop and mobile apps.
It is intended for Codex instances launched over SSH, including fresh developer
machines that should expose app-server with `remote_control` enabled.
## Platform support
The current daemon implementation is Unix-only. It uses pidfile-backed
daemonization plus Unix process and file-locking primitives, and does not yet
support Windows lifecycle management.
## Commands
```sh
codex app-server daemon start
codex app-server daemon restart
codex app-server daemon enable-remote-control
codex app-server daemon disable-remote-control
codex app-server daemon stop
codex app-server daemon version
codex app-server daemon bootstrap --remote-control
```
On success, every command writes exactly one JSON object to stdout. Consumers
should parse that JSON rather than relying on human-readable text. Lifecycle
responses report the resolved backend, socket path, local CLI version, and
running app-server version when applicable.
## Bootstrap flow
For a new remote machine:
```sh
curl -fsSL https://chatgpt.com/codex/install.sh | sh
$HOME/.codex/packages/standalone/current/codex app-server daemon bootstrap --remote-control
```
`bootstrap` requires the standalone managed install. It records the daemon
settings under `CODEX_HOME/app-server-daemon/`, starts app-server as a
pidfile-backed detached process, and launches a detached updater loop.
## Installation and update cases
The daemon assumes Codex is installed through `install.sh` and always launches
the standalone managed binary under `CODEX_HOME`.
| Situation | What starts | Does this daemon fetch new binaries? | Does a running app-server eventually move to a newer binary on its own? |
| --- | --- | --- | --- |
| `install.sh` has run, but only `start` is used | `start` uses `CODEX_HOME/packages/standalone/current/codex` | No | No. The managed path is used when starting or restarting, but no updater is installed. |
| `install.sh` has run, then `bootstrap` is used | The pidfile backend uses `CODEX_HOME/packages/standalone/current/codex` | Yes. Bootstrap launches a detached updater loop that runs `install.sh` hourly. | Yes, while that updater process is alive. After a successful fetch, it restarts a currently running app-server only when the managed binary reports a different version. |
| Some other tool updates the managed binary path | The next fresh start or restart uses the updated file at that path | No | Not automatically. The existing process keeps the old executable image until an explicit `restart`. |
### Standalone installs
For installs created by `install.sh`:
- lifecycle commands always use the standalone managed binary path
- `bootstrap` is supported
- `bootstrap` starts a detached pid-backed updater loop that fetches via
`install.sh`, then restarts app-server if it is running on a different version
- the updater loop is not reboot-persistent; it must be started again by
rerunning `bootstrap` after a reboot
### Out-of-band updates
This daemon does not watch arbitrary executable files for replacement. If some
other tool updates a binary that the daemon would use on its next launch:
- a currently running app-server remains on the old executable image
- `restart` will launch the updated binary
- for bootstrapped daemons, the detached updater loop only reacts to updates it
fetched itself; it does not watch arbitrary file replacement
## Lifecycle semantics
`start` is idempotent and returns after app-server is ready to answer the normal
JSON-RPC initialize handshake on the Unix control socket.
`restart` stops any managed daemon and starts it again.
`enable-remote-control` and `disable-remote-control` persist the launch setting
for future starts. If a managed app-server is already running, they restart it
so the new setting takes effect immediately.
`stop` sends a graceful termination request first, then sends a second
termination signal after the grace window if the process is still alive.
All mutating lifecycle commands are serialized per `CODEX_HOME`, so a concurrent
`start`, `restart`, `enable-remote-control`, `disable-remote-control`, `stop`,
or `bootstrap` does not race another in-flight lifecycle operation.
## State
The daemon stores its local state under `CODEX_HOME/app-server-daemon/`:
- `settings.json` for persisted launch settings
- `app-server.pid` for the app-server process record
- `app-server-updater.pid` for the pid-backed standalone updater loop
- `daemon.lock` for daemon-wide lifecycle serialization

View File

@@ -1,33 +0,0 @@
mod pid;
use std::path::PathBuf;
use serde::Serialize;
pub(crate) use pid::PidBackend;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
#[serde(rename_all = "camelCase")]
pub enum BackendKind {
Pid,
}
#[derive(Debug, Clone)]
pub(crate) struct BackendPaths {
pub(crate) codex_bin: PathBuf,
pub(crate) pid_file: PathBuf,
pub(crate) update_pid_file: PathBuf,
pub(crate) remote_control_enabled: bool,
}
pub(crate) fn pid_backend(paths: BackendPaths) -> PidBackend {
PidBackend::new(
paths.codex_bin,
paths.pid_file,
paths.remote_control_enabled,
)
}
pub(crate) fn pid_update_loop_backend(paths: BackendPaths) -> PidBackend {
PidBackend::new_update_loop(paths.codex_bin, paths.update_pid_file)
}

View File

@@ -1,600 +0,0 @@
use std::path::Path;
use std::path::PathBuf;
#[cfg(unix)]
use std::process::Stdio;
use std::time::Duration;
use anyhow::Context;
use anyhow::Result;
use anyhow::bail;
use serde::Deserialize;
use serde::Serialize;
use tokio::fs;
#[cfg(unix)]
use tokio::process::Command;
use tokio::time::sleep;
const STOP_POLL_INTERVAL: Duration = Duration::from_millis(50);
const STOP_GRACE_PERIOD: Duration = Duration::from_secs(60);
const STOP_TIMEOUT: Duration = Duration::from_secs(70);
const START_TIMEOUT: Duration = Duration::from_secs(10);
#[derive(Debug)]
#[cfg_attr(not(unix), allow(dead_code))]
pub(crate) struct PidBackend {
codex_bin: PathBuf,
pid_file: PathBuf,
lock_file: PathBuf,
command_kind: PidCommandKind,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
struct PidRecord {
pid: u32,
process_start_time: String,
}
#[derive(Debug, Clone, PartialEq, Eq)]
enum PidFileState {
Missing,
Starting,
Running(PidRecord),
}
#[derive(Debug, Clone, Copy)]
#[cfg_attr(not(unix), allow(dead_code))]
enum PidCommandKind {
AppServer { remote_control_enabled: bool },
UpdateLoop,
}
impl PidBackend {
pub(crate) fn new(codex_bin: PathBuf, pid_file: PathBuf, remote_control_enabled: bool) -> Self {
let lock_file = pid_file.with_extension("pid.lock");
Self {
codex_bin,
pid_file,
lock_file,
command_kind: PidCommandKind::AppServer {
remote_control_enabled,
},
}
}
pub(crate) fn new_update_loop(codex_bin: PathBuf, pid_file: PathBuf) -> Self {
let lock_file = pid_file.with_extension("pid.lock");
Self {
codex_bin,
pid_file,
lock_file,
command_kind: PidCommandKind::UpdateLoop,
}
}
pub(crate) async fn is_starting_or_running(&self) -> Result<bool> {
loop {
match self.read_pid_file_state().await? {
PidFileState::Missing => return Ok(false),
PidFileState::Starting => return Ok(true),
PidFileState::Running(record) => {
if self.record_is_active(&record).await? {
return Ok(true);
}
match self.refresh_after_stale_record(&record).await? {
PidFileState::Missing => return Ok(false),
PidFileState::Starting | PidFileState::Running(_) => continue,
}
}
}
}
}
#[cfg(unix)]
pub(crate) async fn start(&self) -> Result<Option<u32>> {
if let Some(parent) = self.pid_file.parent() {
fs::create_dir_all(parent)
.await
.with_context(|| format!("failed to create pid directory {}", parent.display()))?;
}
let reservation_lock = self.acquire_reservation_lock().await?;
let _pid_file = loop {
match fs::OpenOptions::new()
.create_new(true)
.write(true)
.open(&self.pid_file)
.await
{
Ok(pid_file) => break pid_file,
Err(err) if err.kind() == std::io::ErrorKind::AlreadyExists => {
match self.read_pid_file_state_with_lock_held().await? {
PidFileState::Missing => continue,
PidFileState::Running(record) => {
if self.record_is_active(&record).await? {
return Ok(None);
}
let _ = fs::remove_file(&self.pid_file).await;
continue;
}
PidFileState::Starting => {
unreachable!("lock holder cannot observe starting")
}
}
}
Err(err) => {
return Err(err).with_context(|| {
format!("failed to reserve pid file {}", self.pid_file.display())
});
}
}
};
let mut command = Command::new(&self.codex_bin);
command
.args(self.command_args())
.stdin(Stdio::null())
.stdout(Stdio::null())
.stderr(Stdio::null());
#[cfg(unix)]
{
unsafe {
command.pre_exec(|| {
if libc::setsid() == -1 {
return Err(std::io::Error::last_os_error());
}
Ok(())
});
}
}
let child = match command.spawn() {
Ok(child) => child,
Err(err) => {
let _ = fs::remove_file(&self.pid_file).await;
return Err(err).with_context(|| {
format!(
"failed to spawn detached app-server process using {}",
self.codex_bin.display()
)
});
}
};
let pid = child
.id()
.context("spawned app-server process has no pid")?;
let record = match read_process_start_time(pid).await {
Ok(process_start_time) => PidRecord {
pid,
process_start_time,
},
Err(err) => {
let _ = self.terminate_process(pid);
let _ = fs::remove_file(&self.pid_file).await;
return Err(err);
}
};
let contents = serde_json::to_vec(&record).context("failed to serialize pid record")?;
let temp_pid_file = self.pid_file.with_extension("pid.tmp");
if let Err(err) = fs::write(&temp_pid_file, &contents).await {
let _ = self.terminate_process(pid);
let _ = fs::remove_file(&self.pid_file).await;
return Err(err).with_context(|| {
format!("failed to write pid temp file {}", temp_pid_file.display())
});
}
if let Err(err) = fs::rename(&temp_pid_file, &self.pid_file).await {
let _ = self.terminate_process(pid);
let _ = fs::remove_file(&temp_pid_file).await;
let _ = fs::remove_file(&self.pid_file).await;
return Err(err).with_context(|| {
format!("failed to publish pid file {}", self.pid_file.display())
});
}
drop(reservation_lock);
Ok(Some(pid))
}
#[cfg(not(unix))]
pub(crate) async fn start(&self) -> Result<Option<u32>> {
bail!("pid-managed app-server startup is unsupported on this platform")
}
pub(crate) async fn stop(&self) -> Result<()> {
loop {
let Some(record) = self.wait_for_pid_start().await? else {
return Ok(());
};
if !self.record_is_active(&record).await? {
match self.refresh_after_stale_record(&record).await? {
PidFileState::Missing => return Ok(()),
PidFileState::Starting | PidFileState::Running(_) => continue,
}
}
let pid = record.pid;
self.terminate_process(pid)?;
let started_at = tokio::time::Instant::now();
let deadline = tokio::time::Instant::now() + STOP_TIMEOUT;
let mut forced = false;
while tokio::time::Instant::now() < deadline {
if !self.record_is_active(&record).await? {
match self.refresh_after_stale_record(&record).await? {
PidFileState::Missing => return Ok(()),
PidFileState::Starting | PidFileState::Running(_) => break,
}
}
if !forced && started_at.elapsed() >= STOP_GRACE_PERIOD {
self.force_terminate_process(pid)?;
forced = true;
}
sleep(STOP_POLL_INTERVAL).await;
}
if self.record_is_active(&record).await? {
bail!("timed out waiting for pid-managed app server {pid} to stop");
}
}
}
async fn wait_for_pid_start(&self) -> Result<Option<PidRecord>> {
let deadline = tokio::time::Instant::now() + START_TIMEOUT;
loop {
match self.read_pid_file_state().await? {
PidFileState::Missing => return Ok(None),
PidFileState::Running(record) => return Ok(Some(record)),
PidFileState::Starting if tokio::time::Instant::now() < deadline => {
sleep(STOP_POLL_INTERVAL).await;
}
PidFileState::Starting => {
bail!(
"timed out waiting for pid reservation in {} to finish initializing",
self.pid_file.display()
);
}
}
}
}
async fn read_pid_file_state(&self) -> Result<PidFileState> {
let contents = match fs::read_to_string(&self.pid_file).await {
Ok(contents) => contents,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
return if reservation_lock_is_active(&self.lock_file).await? {
Ok(PidFileState::Starting)
} else {
Ok(PidFileState::Missing)
};
}
Err(err) => {
return Err(err).with_context(|| {
format!("failed to read pid file {}", self.pid_file.display())
});
}
};
if contents.trim().is_empty() {
match inspect_empty_pid_reservation(&self.pid_file, &self.lock_file).await? {
EmptyPidReservation::Active => {
return Ok(PidFileState::Starting);
}
EmptyPidReservation::Stale => {
return Ok(PidFileState::Missing);
}
EmptyPidReservation::Record(record) => return Ok(PidFileState::Running(record)),
}
}
let record = serde_json::from_str(&contents)
.with_context(|| format!("invalid pid file contents in {}", self.pid_file.display()))?;
Ok(PidFileState::Running(record))
}
async fn read_pid_file_state_with_lock_held(&self) -> Result<PidFileState> {
let contents = match fs::read_to_string(&self.pid_file).await {
Ok(contents) => contents,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
return Ok(PidFileState::Missing);
}
Err(err) => {
return Err(err).with_context(|| {
format!("failed to read pid file {}", self.pid_file.display())
});
}
};
if contents.trim().is_empty() {
let _ = fs::remove_file(&self.pid_file).await;
return Ok(PidFileState::Missing);
}
let record = serde_json::from_str(&contents)
.with_context(|| format!("invalid pid file contents in {}", self.pid_file.display()))?;
Ok(PidFileState::Running(record))
}
async fn refresh_after_stale_record(&self, expected: &PidRecord) -> Result<PidFileState> {
let reservation_lock = self.acquire_reservation_lock().await?;
let state = match self.read_pid_file_state_with_lock_held().await? {
PidFileState::Running(record) if record == *expected => {
let _ = fs::remove_file(&self.pid_file).await;
PidFileState::Missing
}
state => state,
};
drop(reservation_lock);
Ok(state)
}
async fn acquire_reservation_lock(&self) -> Result<fs::File> {
let reservation_lock = fs::OpenOptions::new()
.create(true)
.truncate(false)
.write(true)
.open(&self.lock_file)
.await
.with_context(|| {
format!("failed to open pid lock file {}", self.lock_file.display())
})?;
let lock_deadline = tokio::time::Instant::now() + START_TIMEOUT;
while !try_lock_file(&reservation_lock)? {
if tokio::time::Instant::now() >= lock_deadline {
bail!(
"timed out waiting for pid lock {}",
self.lock_file.display()
);
}
sleep(STOP_POLL_INTERVAL).await;
}
Ok(reservation_lock)
}
#[cfg(unix)]
fn command_args(&self) -> Vec<&'static str> {
match self.command_kind {
PidCommandKind::AppServer {
remote_control_enabled: true,
} => vec![
"--enable",
"remote_control",
"app-server",
"--listen",
"unix://",
],
PidCommandKind::AppServer {
remote_control_enabled: false,
} => vec!["app-server", "--listen", "unix://"],
PidCommandKind::UpdateLoop => vec!["app-server", "daemon", "pid-update-loop"],
}
}
fn terminate_process(&self, pid: u32) -> Result<()> {
match self.command_kind {
PidCommandKind::AppServer { .. } => terminate_process(pid),
PidCommandKind::UpdateLoop => terminate_process(pid),
}
}
fn force_terminate_process(&self, pid: u32) -> Result<()> {
match self.command_kind {
PidCommandKind::AppServer { .. } => force_terminate_process(pid),
PidCommandKind::UpdateLoop => force_terminate_process_group(pid),
}
}
async fn record_is_active(&self, record: &PidRecord) -> Result<bool> {
process_matches_record(record).await
}
}
#[cfg(unix)]
fn process_exists(pid: u32) -> bool {
let Ok(pid) = libc::pid_t::try_from(pid) else {
return false;
};
let result = unsafe { libc::kill(pid, 0) };
result == 0 || std::io::Error::last_os_error().raw_os_error() == Some(libc::EPERM)
}
#[cfg(unix)]
fn terminate_process(pid: u32) -> Result<()> {
let raw_pid = libc::pid_t::try_from(pid)
.with_context(|| format!("pid-managed app server pid {pid} is out of range"))?;
let result = unsafe { libc::kill(raw_pid, libc::SIGTERM) };
if result == 0 {
return Ok(());
}
let err = std::io::Error::last_os_error();
if err.raw_os_error() == Some(libc::ESRCH) {
return Ok(());
}
Err(err).with_context(|| format!("failed to terminate pid-managed app server {pid}"))
}
#[cfg(unix)]
fn force_terminate_process(pid: u32) -> Result<()> {
let raw_pid = libc::pid_t::try_from(pid)
.with_context(|| format!("pid-managed app server pid {pid} is out of range"))?;
let result = unsafe { libc::kill(raw_pid, libc::SIGKILL) };
if result == 0 {
return Ok(());
}
let err = std::io::Error::last_os_error();
if err.raw_os_error() == Some(libc::ESRCH) {
return Ok(());
}
Err(err).with_context(|| format!("failed to force terminate pid-managed app server {pid}"))
}
#[cfg(unix)]
fn force_terminate_process_group(pid: u32) -> Result<()> {
let raw_pid = libc::pid_t::try_from(pid)
.with_context(|| format!("pid-managed updater pid {pid} is out of range"))?;
let result = unsafe { libc::kill(-raw_pid, libc::SIGKILL) };
if result == 0 {
return Ok(());
}
let err = std::io::Error::last_os_error();
if err.raw_os_error() == Some(libc::ESRCH) {
return Ok(());
}
Err(err).with_context(|| format!("failed to force terminate pid-managed updater group {pid}"))
}
#[cfg(not(unix))]
fn terminate_process(_pid: u32) -> Result<()> {
bail!("pid-managed app-server shutdown is unsupported on this platform")
}
#[cfg(not(unix))]
fn force_terminate_process(_pid: u32) -> Result<()> {
bail!("pid-managed app-server shutdown is unsupported on this platform")
}
#[cfg(not(unix))]
fn force_terminate_process_group(_pid: u32) -> Result<()> {
bail!("pid-managed updater shutdown is unsupported on this platform")
}
#[cfg(unix)]
async fn process_matches_record(record: &PidRecord) -> Result<bool> {
if !process_exists(record.pid) {
return Ok(false);
}
match read_process_start_time(record.pid).await {
Ok(start_time) => Ok(start_time == record.process_start_time),
Err(_err) if !process_exists(record.pid) => Ok(false),
Err(err) => Err(err),
}
}
#[cfg(not(unix))]
async fn process_matches_record(_record: &PidRecord) -> Result<bool> {
Ok(false)
}
#[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(not(unix), allow(dead_code))]
enum EmptyPidReservation {
Active,
Stale,
Record(PidRecord),
}
#[cfg(unix)]
fn try_lock_file(file: &fs::File) -> Result<bool> {
use std::os::fd::AsRawFd;
let result = unsafe { libc::flock(file.as_raw_fd(), libc::LOCK_EX | libc::LOCK_NB) };
if result == 0 {
return Ok(true);
}
let err = std::io::Error::last_os_error();
if err.raw_os_error() == Some(libc::EWOULDBLOCK) {
return Ok(false);
}
Err(err).context("failed to lock pid reservation")
}
#[cfg(not(unix))]
fn try_lock_file(_file: &fs::File) -> Result<bool> {
bail!("pid-managed app-server startup is unsupported on this platform")
}
#[cfg(unix)]
async fn reservation_lock_is_active(path: &Path) -> Result<bool> {
let file = match fs::OpenOptions::new()
.write(true)
.create(true)
.truncate(false)
.open(path)
.await
{
Ok(file) => file,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
return Ok(false);
}
Err(err) => {
return Err(err)
.with_context(|| format!("failed to inspect pid lock file {}", path.display()));
}
};
Ok(!try_lock_file(&file)?)
}
#[cfg(not(unix))]
async fn reservation_lock_is_active(_path: &Path) -> Result<bool> {
Ok(false)
}
#[cfg(unix)]
async fn inspect_empty_pid_reservation(
pid_path: &Path,
lock_path: &Path,
) -> Result<EmptyPidReservation> {
let file = match fs::OpenOptions::new()
.write(true)
.create(true)
.truncate(false)
.open(lock_path)
.await
{
Ok(file) => file,
Err(err) => {
return Err(err).with_context(|| {
format!("failed to inspect pid lock file {}", lock_path.display())
});
}
};
if !try_lock_file(&file)? {
return Ok(EmptyPidReservation::Active);
}
let contents = match fs::read_to_string(pid_path).await {
Ok(contents) => contents,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
return Ok(EmptyPidReservation::Stale);
}
Err(err) => {
return Err(err)
.with_context(|| format!("failed to reread pid file {}", pid_path.display()));
}
};
if contents.trim().is_empty() {
let _ = fs::remove_file(pid_path).await;
return Ok(EmptyPidReservation::Stale);
}
let record = serde_json::from_str(&contents)
.with_context(|| format!("invalid pid file contents in {}", pid_path.display()))?;
Ok(EmptyPidReservation::Record(record))
}
#[cfg(not(unix))]
async fn inspect_empty_pid_reservation(
_pid_path: &Path,
_lock_path: &Path,
) -> Result<EmptyPidReservation> {
Ok(EmptyPidReservation::Stale)
}
#[cfg(unix)]
async fn read_process_start_time(pid: u32) -> Result<String> {
let output = Command::new("ps")
.args(["-p", &pid.to_string(), "-o", "lstart="])
.output()
.await
.context("failed to invoke ps for pid-managed app server")?;
if !output.status.success() {
bail!("failed to read start time for pid-managed app server {pid}");
}
let start_time = String::from_utf8(output.stdout)
.context("pid-managed app server start time was not utf-8")?;
let start_time = start_time.trim();
if start_time.is_empty() {
bail!("pid-managed app server {pid} has no recorded start time");
}
Ok(start_time.to_string())
}
#[cfg(all(test, unix))]
#[path = "pid_tests.rs"]
mod tests;

View File

@@ -1,158 +0,0 @@
use std::time::Duration;
use pretty_assertions::assert_eq;
use tempfile::TempDir;
use super::PidBackend;
use super::PidCommandKind;
use super::PidFileState;
use super::PidRecord;
use super::try_lock_file;
#[tokio::test]
async fn locked_empty_pid_file_is_treated_as_active_reservation() {
let temp_dir = TempDir::new().expect("temp dir");
let pid_file = temp_dir.path().join("app-server.pid");
tokio::fs::write(&pid_file, "")
.await
.expect("write pid file");
let backend = PidBackend::new(
temp_dir.path().join("codex"),
pid_file.clone(),
/*remote_control_enabled*/ false,
);
let reservation = tokio::fs::OpenOptions::new()
.create(true)
.truncate(false)
.write(true)
.open(&backend.lock_file)
.await
.expect("open pid lock file");
assert!(try_lock_file(&reservation).expect("lock reservation"));
assert_eq!(
backend.read_pid_file_state().await.expect("read pid"),
PidFileState::Starting
);
assert!(pid_file.exists());
}
#[tokio::test]
async fn unlocked_empty_pid_file_is_treated_as_stale_reservation() {
let temp_dir = TempDir::new().expect("temp dir");
let pid_file = temp_dir.path().join("app-server.pid");
tokio::fs::write(&pid_file, "")
.await
.expect("write pid file");
let backend = PidBackend::new(
temp_dir.path().join("codex"),
pid_file.clone(),
/*remote_control_enabled*/ false,
);
assert_eq!(
backend.read_pid_file_state().await.expect("read pid"),
PidFileState::Missing
);
assert!(!pid_file.exists());
}
#[tokio::test]
async fn stop_waits_for_live_reservation_to_resolve() {
let temp_dir = TempDir::new().expect("temp dir");
let pid_file = temp_dir.path().join("app-server.pid");
tokio::fs::write(&pid_file, "")
.await
.expect("write pid file");
let backend = PidBackend::new(
temp_dir.path().join("codex"),
pid_file.clone(),
/*remote_control_enabled*/ false,
);
let reservation = tokio::fs::OpenOptions::new()
.create(true)
.truncate(false)
.write(true)
.open(&backend.lock_file)
.await
.expect("open pid lock file");
assert!(try_lock_file(&reservation).expect("lock reservation"));
let cleanup = tokio::spawn(async move {
tokio::time::sleep(Duration::from_millis(50)).await;
drop(reservation);
tokio::fs::remove_file(pid_file)
.await
.expect("remove pid file");
});
backend.stop().await.expect("stop");
cleanup.await.expect("cleanup task");
}
#[tokio::test]
async fn start_retries_stale_empty_pid_file_under_its_own_lock() {
let temp_dir = TempDir::new().expect("temp dir");
let pid_file = temp_dir.path().join("app-server.pid");
tokio::fs::write(&pid_file, "")
.await
.expect("write pid file");
let backend = PidBackend::new(
temp_dir.path().join("missing-codex"),
pid_file,
/*remote_control_enabled*/ false,
);
let err = backend.start().await.expect_err("start");
assert!(
err.to_string()
.starts_with("failed to spawn detached app-server process using ")
);
}
#[tokio::test]
async fn stale_record_cleanup_preserves_replacement_record() {
let temp_dir = TempDir::new().expect("temp dir");
let pid_file = temp_dir.path().join("app-server.pid");
let backend = PidBackend::new(
temp_dir.path().join("codex"),
pid_file.clone(),
/*remote_control_enabled*/ false,
);
let stale = PidRecord {
pid: 1,
process_start_time: "old".to_string(),
};
let replacement = PidRecord {
pid: 2,
process_start_time: "new".to_string(),
};
tokio::fs::write(
&pid_file,
serde_json::to_vec(&replacement).expect("serialize replacement"),
)
.await
.expect("write replacement pid file");
assert_eq!(
backend
.refresh_after_stale_record(&stale)
.await
.expect("cleanup"),
PidFileState::Running(replacement)
);
}
#[test]
fn update_loop_uses_hidden_app_server_subcommand() {
let backend = PidBackend {
codex_bin: "codex".into(),
pid_file: "updater.pid".into(),
lock_file: "updater.pid.lock".into(),
command_kind: PidCommandKind::UpdateLoop,
};
assert_eq!(
backend.command_args(),
vec!["app-server", "daemon", "pid-update-loop"]
);
}

View File

@@ -1,131 +0,0 @@
use std::path::Path;
use std::time::Duration;
use anyhow::Context;
use anyhow::Result;
use anyhow::anyhow;
use codex_app_server_protocol::ClientInfo;
use codex_app_server_protocol::InitializeParams;
use codex_app_server_protocol::InitializeResponse;
use codex_app_server_protocol::JSONRPCMessage;
use codex_app_server_protocol::JSONRPCNotification;
use codex_app_server_protocol::JSONRPCRequest;
use codex_app_server_protocol::RequestId;
use codex_uds::UnixStream;
use futures::SinkExt;
use futures::StreamExt;
use tokio::time::timeout;
use tokio_tungstenite::client_async;
use tokio_tungstenite::tungstenite::Message;
const PROBE_TIMEOUT: Duration = Duration::from_secs(2);
const CLIENT_NAME: &str = "codex_app_server_daemon";
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct ProbeInfo {
pub(crate) app_server_version: String,
}
pub(crate) async fn probe(socket_path: &Path) -> Result<ProbeInfo> {
timeout(PROBE_TIMEOUT, probe_inner(socket_path))
.await
.with_context(|| {
format!(
"timed out probing app-server control socket {}",
socket_path.display()
)
})?
}
async fn probe_inner(socket_path: &Path) -> Result<ProbeInfo> {
let stream = UnixStream::connect(socket_path)
.await
.with_context(|| format!("failed to connect to {}", socket_path.display()))?;
let (mut websocket, _response) = client_async("ws://localhost/", stream)
.await
.with_context(|| format!("failed to upgrade {}", socket_path.display()))?;
let initialize = JSONRPCMessage::Request(JSONRPCRequest {
id: RequestId::Integer(1),
method: "initialize".to_string(),
params: Some(serde_json::to_value(InitializeParams {
client_info: ClientInfo {
name: CLIENT_NAME.to_string(),
title: Some("Codex App Server Daemon".to_string()),
version: env!("CARGO_PKG_VERSION").to_string(),
},
capabilities: None,
})?),
trace: None,
});
websocket
.send(Message::Text(serde_json::to_string(&initialize)?.into()))
.await
.context("failed to send initialize request")?;
let response = loop {
let frame = websocket
.next()
.await
.ok_or_else(|| anyhow!("app-server closed before initialize response"))??;
let Message::Text(payload) = frame else {
continue;
};
let message = serde_json::from_str::<JSONRPCMessage>(&payload)?;
if let JSONRPCMessage::Response(response) = message
&& response.id == RequestId::Integer(1)
{
break response;
}
};
let initialize_response = serde_json::from_value::<InitializeResponse>(response.result)?;
let initialized = JSONRPCMessage::Notification(JSONRPCNotification {
method: "initialized".to_string(),
params: None,
});
websocket
.send(Message::Text(serde_json::to_string(&initialized)?.into()))
.await
.context("failed to send initialized notification")?;
websocket.close(None).await.ok();
Ok(ProbeInfo {
app_server_version: parse_version_from_user_agent(&initialize_response.user_agent)?,
})
}
fn parse_version_from_user_agent(user_agent: &str) -> Result<String> {
let (_originator, rest) = user_agent
.split_once('/')
.ok_or_else(|| anyhow!("app-server user-agent omitted version separator"))?;
let version = rest
.split_whitespace()
.next()
.filter(|version| !version.is_empty())
.ok_or_else(|| anyhow!("app-server user-agent omitted version"))?;
Ok(version.to_string())
}
#[cfg(all(test, unix))]
mod tests {
use pretty_assertions::assert_eq;
use super::parse_version_from_user_agent;
#[test]
fn parses_version_from_codex_user_agent() {
assert_eq!(
parse_version_from_user_agent(
"codex_app_server_daemon/1.2.3 (Linux 6.8.0; x86_64) codex_cli_rs/1.2.3",
)
.expect("version"),
"1.2.3"
);
}
#[test]
fn rejects_user_agent_without_version() {
assert!(parse_version_from_user_agent("codex_app_server_daemon").is_err());
}
}

View File

@@ -1,630 +0,0 @@
mod backend;
mod client;
mod managed_install;
mod settings;
mod update_loop;
use std::path::PathBuf;
use std::time::Duration;
use anyhow::Context;
use anyhow::Result;
use anyhow::anyhow;
pub use backend::BackendKind;
use backend::BackendPaths;
use codex_app_server_transport::app_server_control_socket_path;
use codex_core::config::find_codex_home;
use managed_install::managed_codex_bin;
#[cfg(unix)]
use managed_install::managed_codex_version;
use serde::Serialize;
use settings::DaemonSettings;
use tokio::time::sleep;
const START_POLL_INTERVAL: Duration = Duration::from_millis(50);
const START_TIMEOUT: Duration = Duration::from_secs(10);
const OPERATION_LOCK_TIMEOUT: Duration = Duration::from_secs(75);
const PID_FILE_NAME: &str = "app-server.pid";
const UPDATE_PID_FILE_NAME: &str = "app-server-updater.pid";
const OPERATION_LOCK_FILE_NAME: &str = "daemon.lock";
const SETTINGS_FILE_NAME: &str = "settings.json";
const STATE_DIR_NAME: &str = "app-server-daemon";
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum LifecycleCommand {
Start,
Restart,
Stop,
Version,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
#[serde(rename_all = "camelCase")]
pub enum LifecycleStatus {
AlreadyRunning,
Started,
Restarted,
Stopped,
NotRunning,
Running,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct LifecycleOutput {
pub status: LifecycleStatus,
#[serde(skip_serializing_if = "Option::is_none")]
pub backend: Option<BackendKind>,
#[serde(skip_serializing_if = "Option::is_none")]
pub pid: Option<u32>,
pub socket_path: PathBuf,
#[serde(skip_serializing_if = "Option::is_none")]
pub cli_version: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub app_server_version: Option<String>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct BootstrapOptions {
pub remote_control_enabled: bool,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
#[serde(rename_all = "camelCase")]
pub enum BootstrapStatus {
Bootstrapped,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct BootstrapOutput {
pub status: BootstrapStatus,
pub backend: BackendKind,
pub auto_update_enabled: bool,
pub remote_control_enabled: bool,
pub managed_codex_path: PathBuf,
pub socket_path: PathBuf,
pub cli_version: String,
pub app_server_version: String,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum RemoteControlMode {
Enabled,
Disabled,
}
impl RemoteControlMode {
fn is_enabled(self) -> bool {
matches!(self, Self::Enabled)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
#[serde(rename_all = "camelCase")]
pub enum RemoteControlStatus {
Enabled,
Disabled,
AlreadyEnabled,
AlreadyDisabled,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct RemoteControlOutput {
pub status: RemoteControlStatus,
#[serde(skip_serializing_if = "Option::is_none")]
pub backend: Option<BackendKind>,
pub remote_control_enabled: bool,
pub socket_path: PathBuf,
pub cli_version: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub app_server_version: Option<String>,
}
#[cfg(unix)]
pub(crate) enum RestartIfRunningOutcome {
Completed,
Busy,
}
pub async fn run(command: LifecycleCommand) -> Result<LifecycleOutput> {
ensure_supported_platform()?;
Daemon::from_environment()?.run(command).await
}
pub async fn bootstrap(options: BootstrapOptions) -> Result<BootstrapOutput> {
ensure_supported_platform()?;
Daemon::from_environment()?.bootstrap(options).await
}
pub async fn set_remote_control(mode: RemoteControlMode) -> Result<RemoteControlOutput> {
ensure_supported_platform()?;
Daemon::from_environment()?.set_remote_control(mode).await
}
pub async fn run_pid_update_loop() -> Result<()> {
ensure_supported_platform()?;
update_loop::run().await
}
#[cfg(unix)]
fn ensure_supported_platform() -> Result<()> {
Ok(())
}
#[cfg(not(unix))]
fn ensure_supported_platform() -> Result<()> {
Err(anyhow!(
"codex app-server daemon lifecycle is only supported on Unix platforms"
))
}
struct Daemon {
socket_path: PathBuf,
pid_file: PathBuf,
update_pid_file: PathBuf,
operation_lock_file: PathBuf,
settings_file: PathBuf,
managed_codex_bin: PathBuf,
}
impl Daemon {
fn from_environment() -> Result<Self> {
let codex_home = find_codex_home().context("failed to resolve CODEX_HOME")?;
let socket_path = app_server_control_socket_path(codex_home.as_path())?
.as_path()
.to_path_buf();
let state_dir = codex_home.as_path().join(STATE_DIR_NAME);
Ok(Self {
socket_path,
pid_file: state_dir.join(PID_FILE_NAME),
update_pid_file: state_dir.join(UPDATE_PID_FILE_NAME),
operation_lock_file: state_dir.join(OPERATION_LOCK_FILE_NAME),
settings_file: state_dir.join(SETTINGS_FILE_NAME),
managed_codex_bin: managed_codex_bin(codex_home.as_path()),
})
}
async fn run(&self, command: LifecycleCommand) -> Result<LifecycleOutput> {
match command {
LifecycleCommand::Start => {
let _operation_lock = self.acquire_operation_lock().await?;
self.start().await
}
LifecycleCommand::Restart => {
let _operation_lock = self.acquire_operation_lock().await?;
self.restart().await
}
LifecycleCommand::Stop => {
let _operation_lock = self.acquire_operation_lock().await?;
self.stop().await
}
LifecycleCommand::Version => self.version().await,
}
}
async fn start(&self) -> Result<LifecycleOutput> {
let settings = self.load_settings().await?;
if let Ok(info) = client::probe(&self.socket_path).await {
return Ok(self.output(
LifecycleStatus::AlreadyRunning,
self.running_backend(&settings).await?,
/*pid*/ None,
Some(info.app_server_version),
));
}
if self.running_backend_instance(&settings).await?.is_some() {
let info = self.wait_until_ready().await?;
return Ok(self.output(
LifecycleStatus::AlreadyRunning,
Some(BackendKind::Pid),
/*pid*/ None,
Some(info.app_server_version),
));
}
self.ensure_managed_codex_bin()?;
let pid = self.start_managed_backend(&settings).await?;
let info = self.wait_until_ready().await?;
Ok(self.output(
LifecycleStatus::Started,
Some(BackendKind::Pid),
pid,
Some(info.app_server_version),
))
}
async fn restart(&self) -> Result<LifecycleOutput> {
let settings = self.load_settings().await?;
if client::probe(&self.socket_path).await.is_ok()
&& self.running_backend(&settings).await?.is_none()
{
return Err(anyhow!(
"app server is running but is not managed by codex app-server daemon"
));
}
self.ensure_managed_codex_bin()?;
if let Some(backend) = self.running_backend_instance(&settings).await? {
backend.stop().await?;
}
let pid = self.start_managed_backend(&settings).await?;
let info = self.wait_until_ready().await?;
Ok(self.output(
LifecycleStatus::Restarted,
Some(BackendKind::Pid),
pid,
Some(info.app_server_version),
))
}
#[cfg(unix)]
pub(crate) async fn try_restart_if_running(&self) -> Result<RestartIfRunningOutcome> {
let operation_lock = self.open_operation_lock_file().await?;
if !try_lock_file(&operation_lock)? {
return Ok(RestartIfRunningOutcome::Busy);
}
let settings = self.load_settings().await?;
if let Some(backend) = self.running_backend_instance(&settings).await? {
let Ok(info) = client::probe(&self.socket_path).await else {
return Ok(RestartIfRunningOutcome::Completed);
};
let managed_version = managed_codex_version(&self.managed_codex_bin).await?;
if info.app_server_version == managed_version {
return Ok(RestartIfRunningOutcome::Completed);
}
backend.stop().await?;
let _ = self.start_managed_backend(&settings).await?;
self.wait_until_ready().await?;
return Ok(RestartIfRunningOutcome::Completed);
}
if client::probe(&self.socket_path).await.is_ok() {
return Err(anyhow!(
"app server is running but is not managed by codex app-server daemon"
));
}
Ok(RestartIfRunningOutcome::Completed)
}
async fn stop(&self) -> Result<LifecycleOutput> {
let settings = self.load_settings().await?;
if let Some(backend) = self.running_backend_instance(&settings).await? {
backend.stop().await?;
return Ok(self.output(
LifecycleStatus::Stopped,
Some(BackendKind::Pid),
/*pid*/ None,
/*app_server_version*/ None,
));
}
if client::probe(&self.socket_path).await.is_ok() {
return Err(anyhow!(
"app server is running but is not managed by codex app-server daemon"
));
}
Ok(self.output(
LifecycleStatus::NotRunning,
/*backend*/ None,
/*pid*/ None,
/*app_server_version*/ None,
))
}
async fn version(&self) -> Result<LifecycleOutput> {
let settings = self.load_settings().await?;
let info = client::probe(&self.socket_path).await?;
Ok(self.output(
LifecycleStatus::Running,
self.running_backend(&settings).await?,
/*pid*/ None,
Some(info.app_server_version),
))
}
async fn wait_until_ready(&self) -> Result<client::ProbeInfo> {
let deadline = tokio::time::Instant::now() + START_TIMEOUT;
loop {
match client::probe(&self.socket_path).await {
Ok(info) => return Ok(info),
Err(err) if tokio::time::Instant::now() < deadline => {
let _ = err;
sleep(START_POLL_INTERVAL).await;
}
Err(err) => {
return Err(err).with_context(|| {
format!(
"app server did not become ready on {}",
self.socket_path.display()
)
});
}
}
}
}
async fn bootstrap(&self, options: BootstrapOptions) -> Result<BootstrapOutput> {
let _operation_lock = self.acquire_operation_lock().await?;
self.bootstrap_locked(options).await
}
async fn set_remote_control(&self, mode: RemoteControlMode) -> Result<RemoteControlOutput> {
let _operation_lock = self.acquire_operation_lock().await?;
let previous_settings = self.load_settings().await?;
let mut settings = previous_settings.clone();
let remote_control_enabled = mode.is_enabled();
let backend = self.running_backend_instance(&previous_settings).await?;
if backend.is_none() && client::probe(&self.socket_path).await.is_ok() {
return Err(anyhow!(
"app server is running but is not managed by codex app-server daemon"
));
}
if settings.remote_control_enabled == remote_control_enabled {
let info = if backend.is_some() {
Some(self.wait_until_ready().await?)
} else {
None
};
return Ok(self.remote_control_output(
already_remote_control_status(mode),
backend.map(|_| BackendKind::Pid),
remote_control_enabled,
info.map(|info| info.app_server_version),
));
}
settings.remote_control_enabled = remote_control_enabled;
settings.save(&self.settings_file).await?;
let app_server_version = if let Some(backend) = backend {
self.ensure_managed_codex_bin()?;
backend.stop().await?;
let _ = self.start_managed_backend(&settings).await?;
Some(self.wait_until_ready().await?.app_server_version)
} else {
None
};
Ok(self.remote_control_output(
remote_control_status(mode),
app_server_version.as_ref().map(|_| BackendKind::Pid),
remote_control_enabled,
app_server_version,
))
}
async fn bootstrap_locked(&self, options: BootstrapOptions) -> Result<BootstrapOutput> {
self.ensure_managed_codex_bin()?;
let settings = DaemonSettings {
remote_control_enabled: options.remote_control_enabled,
};
if client::probe(&self.socket_path).await.is_ok()
&& self.running_backend(&settings).await?.is_none()
{
return Err(anyhow!(
"app server is running but is not managed by codex app-server daemon"
));
}
settings.save(&self.settings_file).await?;
if let Some(backend) = self.running_backend_instance(&settings).await? {
backend.stop().await?;
}
let backend = backend::pid_backend(self.backend_paths(&settings));
backend.start().await?;
let updater = backend::pid_update_loop_backend(self.backend_paths(&settings));
if updater.is_starting_or_running().await? {
updater.stop().await?;
}
updater.start().await?;
let info = self.wait_until_ready().await?;
Ok(BootstrapOutput {
status: BootstrapStatus::Bootstrapped,
backend: BackendKind::Pid,
auto_update_enabled: true,
remote_control_enabled: settings.remote_control_enabled,
managed_codex_path: self.managed_codex_bin.clone(),
socket_path: self.socket_path.clone(),
cli_version: env!("CARGO_PKG_VERSION").to_string(),
app_server_version: info.app_server_version,
})
}
async fn running_backend(&self, settings: &DaemonSettings) -> Result<Option<BackendKind>> {
Ok(self
.running_backend_instance(settings)
.await?
.map(|_| BackendKind::Pid))
}
async fn running_backend_instance(
&self,
settings: &DaemonSettings,
) -> Result<Option<backend::PidBackend>> {
let backend = backend::pid_backend(self.backend_paths(settings));
if backend.is_starting_or_running().await? {
return Ok(Some(backend));
}
Ok(None)
}
async fn start_managed_backend(&self, settings: &DaemonSettings) -> Result<Option<u32>> {
let backend = backend::pid_backend(self.backend_paths(settings));
backend.start().await
}
fn ensure_managed_codex_bin(&self) -> Result<()> {
if self.managed_codex_bin.is_file() {
return Ok(());
}
Err(anyhow!(
"managed standalone Codex install not found at {}; install Codex first",
self.managed_codex_bin.display()
))
}
fn backend_paths(&self, settings: &DaemonSettings) -> BackendPaths {
BackendPaths {
codex_bin: self.managed_codex_bin.clone(),
pid_file: self.pid_file.clone(),
update_pid_file: self.update_pid_file.clone(),
remote_control_enabled: settings.remote_control_enabled,
}
}
async fn load_settings(&self) -> Result<DaemonSettings> {
DaemonSettings::load(&self.settings_file).await
}
async fn acquire_operation_lock(&self) -> Result<tokio::fs::File> {
let operation_lock = self.open_operation_lock_file().await?;
let deadline = tokio::time::Instant::now() + OPERATION_LOCK_TIMEOUT;
while !try_lock_file(&operation_lock)? {
if tokio::time::Instant::now() >= deadline {
return Err(anyhow!(
"timed out waiting for daemon operation lock {}",
self.operation_lock_file.display()
));
}
sleep(START_POLL_INTERVAL).await;
}
Ok(operation_lock)
}
async fn open_operation_lock_file(&self) -> Result<tokio::fs::File> {
if let Some(parent) = self.operation_lock_file.parent() {
tokio::fs::create_dir_all(parent).await.with_context(|| {
format!(
"failed to create daemon state directory {}",
parent.display()
)
})?;
}
tokio::fs::OpenOptions::new()
.create(true)
.truncate(false)
.write(true)
.open(&self.operation_lock_file)
.await
.with_context(|| {
format!(
"failed to open daemon operation lock {}",
self.operation_lock_file.display()
)
})
}
fn output(
&self,
status: LifecycleStatus,
backend: Option<BackendKind>,
pid: Option<u32>,
app_server_version: Option<String>,
) -> LifecycleOutput {
LifecycleOutput {
status,
backend,
pid,
socket_path: self.socket_path.clone(),
cli_version: Some(env!("CARGO_PKG_VERSION").to_string()),
app_server_version,
}
}
fn remote_control_output(
&self,
status: RemoteControlStatus,
backend: Option<BackendKind>,
remote_control_enabled: bool,
app_server_version: Option<String>,
) -> RemoteControlOutput {
RemoteControlOutput {
status,
backend,
remote_control_enabled,
socket_path: self.socket_path.clone(),
cli_version: env!("CARGO_PKG_VERSION").to_string(),
app_server_version,
}
}
}
fn remote_control_status(mode: RemoteControlMode) -> RemoteControlStatus {
match mode {
RemoteControlMode::Enabled => RemoteControlStatus::Enabled,
RemoteControlMode::Disabled => RemoteControlStatus::Disabled,
}
}
fn already_remote_control_status(mode: RemoteControlMode) -> RemoteControlStatus {
match mode {
RemoteControlMode::Enabled => RemoteControlStatus::AlreadyEnabled,
RemoteControlMode::Disabled => RemoteControlStatus::AlreadyDisabled,
}
}
#[cfg(unix)]
fn try_lock_file(file: &tokio::fs::File) -> Result<bool> {
use std::os::fd::AsRawFd;
let result = unsafe { libc::flock(file.as_raw_fd(), libc::LOCK_EX | libc::LOCK_NB) };
if result == 0 {
return Ok(true);
}
let err = std::io::Error::last_os_error();
if err.raw_os_error() == Some(libc::EWOULDBLOCK) {
return Ok(false);
}
Err(err).context("failed to lock daemon operation")
}
#[cfg(not(unix))]
fn try_lock_file(_file: &tokio::fs::File) -> Result<bool> {
Ok(true)
}
#[cfg(all(test, unix))]
mod tests {
use pretty_assertions::assert_eq;
use super::BootstrapStatus;
use super::LifecycleStatus;
use super::RemoteControlStatus;
#[test]
fn lifecycle_status_uses_camel_case_json() {
assert_eq!(
serde_json::to_string(&LifecycleStatus::AlreadyRunning).expect("serialize"),
"\"alreadyRunning\""
);
}
#[test]
fn bootstrap_status_uses_camel_case_json() {
assert_eq!(
serde_json::to_string(&BootstrapStatus::Bootstrapped).expect("serialize"),
"\"bootstrapped\""
);
}
#[test]
fn remote_control_status_uses_camel_case_json() {
assert_eq!(
serde_json::to_string(&RemoteControlStatus::AlreadyEnabled).expect("serialize"),
"\"alreadyEnabled\""
);
}
}

View File

@@ -1,66 +0,0 @@
use std::path::Path;
use std::path::PathBuf;
#[cfg(unix)]
use anyhow::Context;
#[cfg(unix)]
use anyhow::Result;
#[cfg(unix)]
use anyhow::anyhow;
#[cfg(unix)]
use tokio::process::Command;
pub(crate) fn managed_codex_bin(codex_home: &Path) -> PathBuf {
codex_home
.join("packages")
.join("standalone")
.join("current")
.join(managed_codex_file_name())
}
#[cfg(unix)]
pub(crate) async fn managed_codex_version(codex_bin: &Path) -> Result<String> {
let output = Command::new(codex_bin)
.arg("--version")
.output()
.await
.with_context(|| {
format!(
"failed to invoke managed Codex binary {}",
codex_bin.display()
)
})?;
if !output.status.success() {
return Err(anyhow!(
"managed Codex binary {} exited with status {}",
codex_bin.display(),
output.status
));
}
let stdout = String::from_utf8(output.stdout).with_context(|| {
format!(
"managed Codex version was not utf-8: {}",
codex_bin.display()
)
})?;
parse_codex_version(&stdout)
}
fn managed_codex_file_name() -> &'static str {
if cfg!(windows) { "codex.exe" } else { "codex" }
}
#[cfg(unix)]
fn parse_codex_version(output: &str) -> Result<String> {
let version = output
.split_whitespace()
.nth(1)
.filter(|version| !version.is_empty())
.ok_or_else(|| anyhow!("managed Codex version output was malformed"))?;
Ok(version.to_string())
}
#[cfg(all(test, unix))]
#[path = "managed_install_tests.rs"]
mod tests;

View File

@@ -1,16 +0,0 @@
use pretty_assertions::assert_eq;
use super::parse_codex_version;
#[test]
fn parses_codex_cli_version_output() {
assert_eq!(
parse_codex_version("codex 1.2.3\n").expect("version"),
"1.2.3"
);
}
#[test]
fn rejects_malformed_codex_cli_version_output() {
assert!(parse_codex_version("codex\n").is_err());
}

View File

@@ -1,63 +0,0 @@
use std::path::Path;
use anyhow::Context;
use anyhow::Result;
use serde::Deserialize;
use serde::Serialize;
use tokio::fs;
#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub(crate) struct DaemonSettings {
pub(crate) remote_control_enabled: bool,
}
impl DaemonSettings {
pub(crate) async fn load(path: &Path) -> Result<Self> {
let contents = match fs::read_to_string(path).await {
Ok(contents) => contents,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(Self::default()),
Err(err) => {
return Err(err)
.with_context(|| format!("failed to read daemon settings {}", path.display()));
}
};
serde_json::from_str(&contents)
.with_context(|| format!("failed to parse daemon settings {}", path.display()))
}
pub(crate) async fn save(&self, path: &Path) -> Result<()> {
if let Some(parent) = path.parent() {
fs::create_dir_all(parent).await.with_context(|| {
format!(
"failed to create daemon settings directory {}",
parent.display()
)
})?;
}
let contents = serde_json::to_vec_pretty(self).context("failed to serialize settings")?;
fs::write(path, contents)
.await
.with_context(|| format!("failed to write daemon settings {}", path.display()))
}
}
#[cfg(all(test, unix))]
mod tests {
use pretty_assertions::assert_eq;
use super::DaemonSettings;
#[test]
fn daemon_settings_use_camel_case_json() {
assert_eq!(
serde_json::to_string(&DaemonSettings {
remote_control_enabled: true,
})
.expect("serialize"),
r#"{"remoteControlEnabled":true}"#
);
}
}

View File

@@ -1,132 +0,0 @@
#[cfg(unix)]
use std::process::Stdio;
#[cfg(unix)]
use std::time::Duration;
#[cfg(unix)]
use anyhow::Context;
use anyhow::Result;
#[cfg(not(unix))]
use anyhow::bail;
#[cfg(unix)]
use futures::FutureExt;
#[cfg(unix)]
use tokio::io::AsyncWriteExt;
#[cfg(unix)]
use tokio::process::Command;
#[cfg(unix)]
use tokio::signal::unix::Signal;
#[cfg(unix)]
use tokio::signal::unix::SignalKind;
#[cfg(unix)]
use tokio::signal::unix::signal;
#[cfg(unix)]
use tokio::time::sleep;
#[cfg(unix)]
use crate::Daemon;
#[cfg(unix)]
use crate::RestartIfRunningOutcome;
#[cfg(unix)]
const INITIAL_UPDATE_DELAY: Duration = Duration::from_secs(5 * 60);
#[cfg(unix)]
const RESTART_RETRY_INTERVAL: Duration = Duration::from_millis(50);
#[cfg(unix)]
const UPDATE_INTERVAL: Duration = Duration::from_secs(60 * 60);
#[cfg(unix)]
pub(crate) async fn run() -> Result<()> {
let mut terminate =
signal(SignalKind::terminate()).context("failed to install updater shutdown handler")?;
if sleep_or_terminate(INITIAL_UPDATE_DELAY, &mut terminate).await {
return Ok(());
}
loop {
match update_once(&mut terminate).await {
Ok(UpdateLoopControl::Continue) | Err(_) => {}
Ok(UpdateLoopControl::Stop) => return Ok(()),
}
if sleep_or_terminate(UPDATE_INTERVAL, &mut terminate).await {
return Ok(());
}
}
}
#[cfg(not(unix))]
pub(crate) async fn run() -> Result<()> {
bail!("pid-managed updater loop is unsupported on this platform")
}
#[cfg(unix)]
async fn sleep_or_terminate(duration: Duration, terminate: &mut Signal) -> bool {
tokio::select! {
_ = sleep(duration) => false,
_ = terminate.recv() => true,
}
}
#[cfg(unix)]
enum UpdateLoopControl {
Continue,
Stop,
}
#[cfg(unix)]
async fn update_once(terminate: &mut Signal) -> Result<UpdateLoopControl> {
install_latest_standalone().await?;
let daemon = Daemon::from_environment()?;
loop {
if terminate.recv().now_or_never().flatten().is_some() {
return Ok(UpdateLoopControl::Stop);
}
match daemon.try_restart_if_running().await? {
RestartIfRunningOutcome::Completed => return Ok(UpdateLoopControl::Continue),
RestartIfRunningOutcome::Busy => {
if sleep_or_terminate(RESTART_RETRY_INTERVAL, terminate).await {
return Ok(UpdateLoopControl::Stop);
}
}
}
}
}
#[cfg(unix)]
async fn install_latest_standalone() -> Result<()> {
let script = reqwest::get("https://chatgpt.com/codex/install.sh")
.await
.context("failed to fetch standalone Codex updater")?
.error_for_status()
.context("standalone Codex updater request failed")?
.bytes()
.await
.context("failed to read standalone Codex updater")?;
let mut child = Command::new("/bin/sh")
.arg("-s")
.stdin(Stdio::piped())
.stdout(Stdio::null())
.stderr(Stdio::null())
.spawn()
.context("failed to invoke standalone Codex updater")?;
let mut stdin = child
.stdin
.take()
.context("standalone Codex updater stdin was unavailable")?;
stdin
.write_all(&script)
.await
.context("failed to pass standalone Codex updater to shell")?;
drop(stdin);
let status = child
.wait()
.await
.context("failed to wait for standalone Codex updater")?;
if status.success() {
Ok(())
} else {
anyhow::bail!("standalone Codex updater exited with status {status}")
}
}

View File

@@ -1,5 +0,0 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "AttestationGenerateParams",
"type": "object"
}

View File

@@ -1,14 +0,0 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"properties": {
"token": {
"description": "Opaque client attestation token.",
"type": "string"
}
},
"required": [
"token"
],
"title": "AttestationGenerateResponse",
"type": "object"
}

View File

@@ -1259,11 +1259,6 @@
"array",
"null"
]
},
"requestAttestation": {
"default": false,
"description": "Opt into `attestation/generate` requests for upstream `x-oai-attestation`.",
"type": "boolean"
}
},
"type": "object"
@@ -2088,37 +2083,16 @@
},
"principalType": {
"$ref": "#/definitions/PluginSharePrincipalType"
},
"role": {
"$ref": "#/definitions/PluginShareTargetRole"
}
},
"required": [
"principalId",
"principalType",
"role"
"principalType"
],
"type": "object"
},
"PluginShareTargetRole": {
"enum": [
"reader",
"editor"
],
"type": "string"
},
"PluginShareUpdateDiscoverability": {
"enum": [
"UNLISTED",
"PRIVATE"
],
"type": "string"
},
"PluginShareUpdateTargetsParams": {
"properties": {
"discoverability": {
"$ref": "#/definitions/PluginShareUpdateDiscoverability"
},
"remotePluginId": {
"type": "string"
},
@@ -2130,7 +2104,6 @@
}
},
"required": [
"discoverability",
"remotePluginId",
"shareTargets"
],
@@ -6204,4 +6177,4 @@
}
],
"title": "ClientRequest"
}
}

View File

@@ -2737,7 +2737,7 @@
"type": "string"
},
"RemoteControlStatusChangedNotification": {
"description": "Current remote-control connection status and remote identity exposed to clients.",
"description": "Current remote-control connection status and environment id exposed to clients.",
"properties": {
"environmentId": {
"type": [
@@ -2745,15 +2745,11 @@
"null"
]
},
"installationId": {
"type": "string"
},
"status": {
"$ref": "#/definitions/RemoteControlConnectionStatus"
}
},
"required": [
"installationId",
"status"
],
"type": "object"

View File

@@ -121,9 +121,6 @@
],
"type": "object"
},
"AttestationGenerateParams": {
"type": "object"
},
"ChatgptAuthTokensRefreshParams": {
"properties": {
"previousAccountId": {
@@ -1921,31 +1918,6 @@
"title": "Account/chatgptAuthTokens/refreshRequest",
"type": "object"
},
{
"description": "Generate a fresh upstream attestation result on demand.",
"properties": {
"id": {
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
"attestation/generate"
],
"title": "Attestation/generateRequestMethod",
"type": "string"
},
"params": {
"$ref": "#/definitions/AttestationGenerateParams"
}
},
"required": [
"id",
"method",
"params"
],
"title": "Attestation/generateRequest",
"type": "object"
},
{
"description": "DEPRECATED APIs below Request to approve a patch. This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).",
"properties": {

View File

@@ -83,25 +83,6 @@
"title": "ApplyPatchApprovalResponse",
"type": "object"
},
"AttestationGenerateParams": {
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "AttestationGenerateParams",
"type": "object"
},
"AttestationGenerateResponse": {
"$schema": "http://json-schema.org/draft-07/schema#",
"properties": {
"token": {
"description": "Opaque client attestation token.",
"type": "string"
}
},
"required": [
"token"
],
"title": "AttestationGenerateResponse",
"type": "object"
},
"ChatgptAuthTokensRefreshParams": {
"$schema": "http://json-schema.org/draft-07/schema#",
"properties": {
@@ -2639,11 +2620,6 @@
"array",
"null"
]
},
"requestAttestation": {
"default": false,
"description": "Opt into `attestation/generate` requests for upstream `x-oai-attestation`.",
"type": "boolean"
}
},
"type": "object"
@@ -5231,31 +5207,6 @@
"title": "Account/chatgptAuthTokens/refreshRequest",
"type": "object"
},
{
"description": "Generate a fresh upstream attestation result on demand.",
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
},
"method": {
"enum": [
"attestation/generate"
],
"title": "Attestation/generateRequestMethod",
"type": "string"
},
"params": {
"$ref": "#/definitions/AttestationGenerateParams"
}
},
"required": [
"id",
"method",
"params"
],
"title": "Attestation/generateRequest",
"type": "object"
},
{
"description": "DEPRECATED APIs below Request to approve a patch. This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).",
"properties": {
@@ -12270,20 +12221,10 @@
"null"
]
},
"discoverability": {
"anyOf": [
{
"$ref": "#/definitions/v2/PluginShareDiscoverability"
},
{
"type": "null"
}
]
},
"remotePluginId": {
"type": "string"
},
"sharePrincipals": {
"shareTargets": {
"items": {
"$ref": "#/definitions/v2/PluginSharePrincipal"
},
@@ -12344,10 +12285,14 @@
},
"plugin": {
"$ref": "#/definitions/v2/PluginSummary"
},
"shareUrl": {
"type": "string"
}
},
"required": [
"plugin"
"plugin",
"shareUrl"
],
"type": "object"
},
@@ -12382,27 +12327,15 @@
},
"principalType": {
"$ref": "#/definitions/v2/PluginSharePrincipalType"
},
"role": {
"$ref": "#/definitions/v2/PluginSharePrincipalRole"
}
},
"required": [
"name",
"principalId",
"principalType",
"role"
"principalType"
],
"type": "object"
},
"PluginSharePrincipalRole": {
"enum": [
"reader",
"editor",
"owner"
],
"type": "string"
},
"PluginSharePrincipalType": {
"enum": [
"user",
@@ -12473,38 +12406,17 @@
},
"principalType": {
"$ref": "#/definitions/v2/PluginSharePrincipalType"
},
"role": {
"$ref": "#/definitions/v2/PluginShareTargetRole"
}
},
"required": [
"principalId",
"principalType",
"role"
"principalType"
],
"type": "object"
},
"PluginShareTargetRole": {
"enum": [
"reader",
"editor"
],
"type": "string"
},
"PluginShareUpdateDiscoverability": {
"enum": [
"UNLISTED",
"PRIVATE"
],
"type": "string"
},
"PluginShareUpdateTargetsParams": {
"$schema": "http://json-schema.org/draft-07/schema#",
"properties": {
"discoverability": {
"$ref": "#/definitions/v2/PluginShareUpdateDiscoverability"
},
"remotePluginId": {
"type": "string"
},
@@ -12516,7 +12428,6 @@
}
},
"required": [
"discoverability",
"remotePluginId",
"shareTargets"
],
@@ -12526,9 +12437,6 @@
"PluginShareUpdateTargetsResponse": {
"$schema": "http://json-schema.org/draft-07/schema#",
"properties": {
"discoverability": {
"$ref": "#/definitions/v2/PluginShareDiscoverability"
},
"principals": {
"items": {
"$ref": "#/definitions/v2/PluginSharePrincipal"
@@ -12537,7 +12445,6 @@
}
},
"required": [
"discoverability",
"principals"
],
"title": "PluginShareUpdateTargetsResponse",
@@ -13384,7 +13291,7 @@
},
"RemoteControlStatusChangedNotification": {
"$schema": "http://json-schema.org/draft-07/schema#",
"description": "Current remote-control connection status and remote identity exposed to clients.",
"description": "Current remote-control connection status and environment id exposed to clients.",
"properties": {
"environmentId": {
"type": [
@@ -13392,15 +13299,11 @@
"null"
]
},
"installationId": {
"type": "string"
},
"status": {
"$ref": "#/definitions/v2/RemoteControlConnectionStatus"
}
},
"required": [
"installationId",
"status"
],
"title": "RemoteControlStatusChangedNotification",
@@ -18493,4 +18396,4 @@
},
"title": "CodexAppServerProtocol",
"type": "object"
}
}

View File

@@ -6409,11 +6409,6 @@
"array",
"null"
]
},
"requestAttestation": {
"default": false,
"description": "Opt into `attestation/generate` requests for upstream `x-oai-attestation`.",
"type": "boolean"
}
},
"type": "object"
@@ -8819,20 +8814,10 @@
"null"
]
},
"discoverability": {
"anyOf": [
{
"$ref": "#/definitions/PluginShareDiscoverability"
},
{
"type": "null"
}
]
},
"remotePluginId": {
"type": "string"
},
"sharePrincipals": {
"shareTargets": {
"items": {
"$ref": "#/definitions/PluginSharePrincipal"
},
@@ -8893,10 +8878,14 @@
},
"plugin": {
"$ref": "#/definitions/PluginSummary"
},
"shareUrl": {
"type": "string"
}
},
"required": [
"plugin"
"plugin",
"shareUrl"
],
"type": "object"
},
@@ -8931,27 +8920,15 @@
},
"principalType": {
"$ref": "#/definitions/PluginSharePrincipalType"
},
"role": {
"$ref": "#/definitions/PluginSharePrincipalRole"
}
},
"required": [
"name",
"principalId",
"principalType",
"role"
"principalType"
],
"type": "object"
},
"PluginSharePrincipalRole": {
"enum": [
"reader",
"editor",
"owner"
],
"type": "string"
},
"PluginSharePrincipalType": {
"enum": [
"user",
@@ -9022,38 +8999,17 @@
},
"principalType": {
"$ref": "#/definitions/PluginSharePrincipalType"
},
"role": {
"$ref": "#/definitions/PluginShareTargetRole"
}
},
"required": [
"principalId",
"principalType",
"role"
"principalType"
],
"type": "object"
},
"PluginShareTargetRole": {
"enum": [
"reader",
"editor"
],
"type": "string"
},
"PluginShareUpdateDiscoverability": {
"enum": [
"UNLISTED",
"PRIVATE"
],
"type": "string"
},
"PluginShareUpdateTargetsParams": {
"$schema": "http://json-schema.org/draft-07/schema#",
"properties": {
"discoverability": {
"$ref": "#/definitions/PluginShareUpdateDiscoverability"
},
"remotePluginId": {
"type": "string"
},
@@ -9065,7 +9021,6 @@
}
},
"required": [
"discoverability",
"remotePluginId",
"shareTargets"
],
@@ -9075,9 +9030,6 @@
"PluginShareUpdateTargetsResponse": {
"$schema": "http://json-schema.org/draft-07/schema#",
"properties": {
"discoverability": {
"$ref": "#/definitions/PluginShareDiscoverability"
},
"principals": {
"items": {
"$ref": "#/definitions/PluginSharePrincipal"
@@ -9086,7 +9038,6 @@
}
},
"required": [
"discoverability",
"principals"
],
"title": "PluginShareUpdateTargetsResponse",
@@ -9933,7 +9884,7 @@
},
"RemoteControlStatusChangedNotification": {
"$schema": "http://json-schema.org/draft-07/schema#",
"description": "Current remote-control connection status and remote identity exposed to clients.",
"description": "Current remote-control connection status and environment id exposed to clients.",
"properties": {
"environmentId": {
"type": [
@@ -9941,15 +9892,11 @@
"null"
]
},
"installationId": {
"type": "string"
},
"status": {
"$ref": "#/definitions/RemoteControlConnectionStatus"
}
},
"required": [
"installationId",
"status"
],
"title": "RemoteControlStatusChangedNotification",
@@ -16316,4 +16263,4 @@
},
"title": "CodexAppServerProtocolV2",
"type": "object"
}
}

View File

@@ -39,11 +39,6 @@
"array",
"null"
]
},
"requestAttestation": {
"default": false,
"description": "Opt into `attestation/generate` requests for upstream `x-oai-attestation`.",
"type": "boolean"
}
},
"type": "object"

View File

@@ -246,20 +246,10 @@
"null"
]
},
"discoverability": {
"anyOf": [
{
"$ref": "#/definitions/PluginShareDiscoverability"
},
{
"type": "null"
}
]
},
"remotePluginId": {
"type": "string"
},
"sharePrincipals": {
"shareTargets": {
"items": {
"$ref": "#/definitions/PluginSharePrincipal"
},
@@ -280,14 +270,6 @@
],
"type": "object"
},
"PluginShareDiscoverability": {
"enum": [
"LISTED",
"UNLISTED",
"PRIVATE"
],
"type": "string"
},
"PluginSharePrincipal": {
"properties": {
"name": {
@@ -298,27 +280,15 @@
},
"principalType": {
"$ref": "#/definitions/PluginSharePrincipalType"
},
"role": {
"$ref": "#/definitions/PluginSharePrincipalRole"
}
},
"required": [
"name",
"principalId",
"principalType",
"role"
"principalType"
],
"type": "object"
},
"PluginSharePrincipalRole": {
"enum": [
"reader",
"editor",
"owner"
],
"type": "string"
},
"PluginSharePrincipalType": {
"enum": [
"user",

View File

@@ -300,20 +300,10 @@
"null"
]
},
"discoverability": {
"anyOf": [
{
"$ref": "#/definitions/PluginShareDiscoverability"
},
{
"type": "null"
}
]
},
"remotePluginId": {
"type": "string"
},
"sharePrincipals": {
"shareTargets": {
"items": {
"$ref": "#/definitions/PluginSharePrincipal"
},
@@ -334,14 +324,6 @@
],
"type": "object"
},
"PluginShareDiscoverability": {
"enum": [
"LISTED",
"UNLISTED",
"PRIVATE"
],
"type": "string"
},
"PluginSharePrincipal": {
"properties": {
"name": {
@@ -352,27 +334,15 @@
},
"principalType": {
"$ref": "#/definitions/PluginSharePrincipalType"
},
"role": {
"$ref": "#/definitions/PluginSharePrincipalRole"
}
},
"required": [
"name",
"principalId",
"principalType",
"role"
"principalType"
],
"type": "object"
},
"PluginSharePrincipalRole": {
"enum": [
"reader",
"editor",
"owner"
],
"type": "string"
},
"PluginSharePrincipalType": {
"enum": [
"user",

View File

@@ -181,20 +181,10 @@
"null"
]
},
"discoverability": {
"anyOf": [
{
"$ref": "#/definitions/PluginShareDiscoverability"
},
{
"type": "null"
}
]
},
"remotePluginId": {
"type": "string"
},
"sharePrincipals": {
"shareTargets": {
"items": {
"$ref": "#/definitions/PluginSharePrincipal"
},
@@ -215,14 +205,6 @@
],
"type": "object"
},
"PluginShareDiscoverability": {
"enum": [
"LISTED",
"UNLISTED",
"PRIVATE"
],
"type": "string"
},
"PluginShareListItem": {
"properties": {
"localPluginPath": {
@@ -237,10 +219,14 @@
},
"plugin": {
"$ref": "#/definitions/PluginSummary"
},
"shareUrl": {
"type": "string"
}
},
"required": [
"plugin"
"plugin",
"shareUrl"
],
"type": "object"
},
@@ -254,27 +240,15 @@
},
"principalType": {
"$ref": "#/definitions/PluginSharePrincipalType"
},
"role": {
"$ref": "#/definitions/PluginSharePrincipalRole"
}
},
"required": [
"name",
"principalId",
"principalType",
"role"
"principalType"
],
"type": "object"
},
"PluginSharePrincipalRole": {
"enum": [
"reader",
"editor",
"owner"
],
"type": "string"
},
"PluginSharePrincipalType": {
"enum": [
"user",

View File

@@ -28,24 +28,13 @@
},
"principalType": {
"$ref": "#/definitions/PluginSharePrincipalType"
},
"role": {
"$ref": "#/definitions/PluginShareTargetRole"
}
},
"required": [
"principalId",
"principalType",
"role"
"principalType"
],
"type": "object"
},
"PluginShareTargetRole": {
"enum": [
"reader",
"editor"
],
"type": "string"
}
},
"properties": {

View File

@@ -16,37 +16,16 @@
},
"principalType": {
"$ref": "#/definitions/PluginSharePrincipalType"
},
"role": {
"$ref": "#/definitions/PluginShareTargetRole"
}
},
"required": [
"principalId",
"principalType",
"role"
"principalType"
],
"type": "object"
},
"PluginShareTargetRole": {
"enum": [
"reader",
"editor"
],
"type": "string"
},
"PluginShareUpdateDiscoverability": {
"enum": [
"UNLISTED",
"PRIVATE"
],
"type": "string"
}
},
"properties": {
"discoverability": {
"$ref": "#/definitions/PluginShareUpdateDiscoverability"
},
"remotePluginId": {
"type": "string"
},
@@ -58,7 +37,6 @@
}
},
"required": [
"discoverability",
"remotePluginId",
"shareTargets"
],

View File

@@ -1,14 +1,6 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"PluginShareDiscoverability": {
"enum": [
"LISTED",
"UNLISTED",
"PRIVATE"
],
"type": "string"
},
"PluginSharePrincipal": {
"properties": {
"name": {
@@ -19,27 +11,15 @@
},
"principalType": {
"$ref": "#/definitions/PluginSharePrincipalType"
},
"role": {
"$ref": "#/definitions/PluginSharePrincipalRole"
}
},
"required": [
"name",
"principalId",
"principalType",
"role"
"principalType"
],
"type": "object"
},
"PluginSharePrincipalRole": {
"enum": [
"reader",
"editor",
"owner"
],
"type": "string"
},
"PluginSharePrincipalType": {
"enum": [
"user",
@@ -50,9 +30,6 @@
}
},
"properties": {
"discoverability": {
"$ref": "#/definitions/PluginShareDiscoverability"
},
"principals": {
"items": {
"$ref": "#/definitions/PluginSharePrincipal"
@@ -61,7 +38,6 @@
}
},
"required": [
"discoverability",
"principals"
],
"title": "PluginShareUpdateTargetsResponse",

View File

@@ -11,7 +11,7 @@
"type": "string"
}
},
"description": "Current remote-control connection status and remote identity exposed to clients.",
"description": "Current remote-control connection status and environment id exposed to clients.",
"properties": {
"environmentId": {
"type": [
@@ -19,15 +19,11 @@
"null"
]
},
"installationId": {
"type": "string"
},
"status": {
"$ref": "#/definitions/RemoteControlConnectionStatus"
}
},
"required": [
"installationId",
"status"
],
"title": "RemoteControlStatusChangedNotification",

View File

@@ -10,10 +10,6 @@ export type InitializeCapabilities = {
* Opt into receiving experimental API methods and fields.
*/
experimentalApi: boolean,
/**
* Opt into `attestation/generate` requests for upstream `x-oai-attestation`.
*/
requestAttestation: boolean,
/**
* Exact notification method names that should be suppressed for this
* connection (for example `thread/started`).

View File

@@ -4,7 +4,6 @@
import type { ApplyPatchApprovalParams } from "./ApplyPatchApprovalParams";
import type { ExecCommandApprovalParams } from "./ExecCommandApprovalParams";
import type { RequestId } from "./RequestId";
import type { AttestationGenerateParams } from "./v2/AttestationGenerateParams";
import type { ChatgptAuthTokensRefreshParams } from "./v2/ChatgptAuthTokensRefreshParams";
import type { CommandExecutionRequestApprovalParams } from "./v2/CommandExecutionRequestApprovalParams";
import type { DynamicToolCallParams } from "./v2/DynamicToolCallParams";
@@ -16,4 +15,4 @@ import type { ToolRequestUserInputParams } from "./v2/ToolRequestUserInputParams
/**
* Request initiated from the server and sent to the client.
*/
export type ServerRequest = { "method": "item/commandExecution/requestApproval", id: RequestId, params: CommandExecutionRequestApprovalParams, } | { "method": "item/fileChange/requestApproval", id: RequestId, params: FileChangeRequestApprovalParams, } | { "method": "item/tool/requestUserInput", id: RequestId, params: ToolRequestUserInputParams, } | { "method": "mcpServer/elicitation/request", id: RequestId, params: McpServerElicitationRequestParams, } | { "method": "item/permissions/requestApproval", id: RequestId, params: PermissionsRequestApprovalParams, } | { "method": "item/tool/call", id: RequestId, params: DynamicToolCallParams, } | { "method": "account/chatgptAuthTokens/refresh", id: RequestId, params: ChatgptAuthTokensRefreshParams, } | { "method": "attestation/generate", id: RequestId, params: AttestationGenerateParams, } | { "method": "applyPatchApproval", id: RequestId, params: ApplyPatchApprovalParams, } | { "method": "execCommandApproval", id: RequestId, params: ExecCommandApprovalParams, };
export type ServerRequest = { "method": "item/commandExecution/requestApproval", id: RequestId, params: CommandExecutionRequestApprovalParams, } | { "method": "item/fileChange/requestApproval", id: RequestId, params: FileChangeRequestApprovalParams, } | { "method": "item/tool/requestUserInput", id: RequestId, params: ToolRequestUserInputParams, } | { "method": "mcpServer/elicitation/request", id: RequestId, params: McpServerElicitationRequestParams, } | { "method": "item/permissions/requestApproval", id: RequestId, params: PermissionsRequestApprovalParams, } | { "method": "item/tool/call", id: RequestId, params: DynamicToolCallParams, } | { "method": "account/chatgptAuthTokens/refresh", id: RequestId, params: ChatgptAuthTokensRefreshParams, } | { "method": "applyPatchApproval", id: RequestId, params: ApplyPatchApprovalParams, } | { "method": "execCommandApproval", id: RequestId, params: ExecCommandApprovalParams, };

View File

@@ -1,5 +0,0 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type AttestationGenerateParams = Record<string, never>;

View File

@@ -1,9 +0,0 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type AttestationGenerateResponse = {
/**
* Opaque client attestation token.
*/
token: string, };

View File

@@ -1,7 +1,6 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { PluginShareDiscoverability } from "./PluginShareDiscoverability";
import type { PluginSharePrincipal } from "./PluginSharePrincipal";
export type PluginShareContext = { remotePluginId: string, discoverability: PluginShareDiscoverability | null, shareUrl: string | null, creatorAccountUserId: string | null, creatorName: string | null, sharePrincipals: Array<PluginSharePrincipal> | null, };
export type PluginShareContext = { remotePluginId: string, shareUrl: string | null, creatorAccountUserId: string | null, creatorName: string | null, shareTargets: Array<PluginSharePrincipal> | null, };

View File

@@ -4,4 +4,4 @@
import type { AbsolutePathBuf } from "../AbsolutePathBuf";
import type { PluginSummary } from "./PluginSummary";
export type PluginShareListItem = { plugin: PluginSummary, localPluginPath: AbsolutePathBuf | null, };
export type PluginShareListItem = { plugin: PluginSummary, shareUrl: string, localPluginPath: AbsolutePathBuf | null, };

View File

@@ -1,7 +1,6 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { PluginSharePrincipalRole } from "./PluginSharePrincipalRole";
import type { PluginSharePrincipalType } from "./PluginSharePrincipalType";
export type PluginSharePrincipal = { principalType: PluginSharePrincipalType, principalId: string, role: PluginSharePrincipalRole, name: string, };
export type PluginSharePrincipal = { principalType: PluginSharePrincipalType, principalId: string, name: string, };

View File

@@ -1,5 +0,0 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type PluginSharePrincipalRole = "reader" | "editor" | "owner";

View File

@@ -2,6 +2,5 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { PluginSharePrincipalType } from "./PluginSharePrincipalType";
import type { PluginShareTargetRole } from "./PluginShareTargetRole";
export type PluginShareTarget = { principalType: PluginSharePrincipalType, principalId: string, role: PluginShareTargetRole, };
export type PluginShareTarget = { principalType: PluginSharePrincipalType, principalId: string, };

View File

@@ -1,5 +0,0 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type PluginShareTargetRole = "reader" | "editor";

View File

@@ -1,5 +0,0 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type PluginShareUpdateDiscoverability = "UNLISTED" | "PRIVATE";

View File

@@ -2,6 +2,5 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { PluginShareTarget } from "./PluginShareTarget";
import type { PluginShareUpdateDiscoverability } from "./PluginShareUpdateDiscoverability";
export type PluginShareUpdateTargetsParams = { remotePluginId: string, discoverability: PluginShareUpdateDiscoverability, shareTargets: Array<PluginShareTarget>, };
export type PluginShareUpdateTargetsParams = { remotePluginId: string, shareTargets: Array<PluginShareTarget>, };

View File

@@ -1,7 +1,6 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { PluginShareDiscoverability } from "./PluginShareDiscoverability";
import type { PluginSharePrincipal } from "./PluginSharePrincipal";
export type PluginShareUpdateTargetsResponse = { principals: Array<PluginSharePrincipal>, discoverability: PluginShareDiscoverability, };
export type PluginShareUpdateTargetsResponse = { principals: Array<PluginSharePrincipal>, };

View File

@@ -4,6 +4,6 @@
import type { RemoteControlConnectionStatus } from "./RemoteControlConnectionStatus";
/**
* Current remote-control connection status and remote identity exposed to clients.
* Current remote-control connection status and environment id exposed to clients.
*/
export type RemoteControlStatusChangedNotification = { status: RemoteControlConnectionStatus, installationId: string, environmentId: string | null, };
export type RemoteControlStatusChangedNotification = { status: RemoteControlConnectionStatus, environmentId: string | null, };

View File

@@ -28,8 +28,6 @@ export type { AppsDefaultConfig } from "./AppsDefaultConfig";
export type { AppsListParams } from "./AppsListParams";
export type { AppsListResponse } from "./AppsListResponse";
export type { AskForApproval } from "./AskForApproval";
export type { AttestationGenerateParams } from "./AttestationGenerateParams";
export type { AttestationGenerateResponse } from "./AttestationGenerateResponse";
export type { AutoReviewDecisionSource } from "./AutoReviewDecisionSource";
export type { ByteRange } from "./ByteRange";
export type { CancelLoginAccountParams } from "./CancelLoginAccountParams";
@@ -285,13 +283,10 @@ export type { PluginShareListItem } from "./PluginShareListItem";
export type { PluginShareListParams } from "./PluginShareListParams";
export type { PluginShareListResponse } from "./PluginShareListResponse";
export type { PluginSharePrincipal } from "./PluginSharePrincipal";
export type { PluginSharePrincipalRole } from "./PluginSharePrincipalRole";
export type { PluginSharePrincipalType } from "./PluginSharePrincipalType";
export type { PluginShareSaveParams } from "./PluginShareSaveParams";
export type { PluginShareSaveResponse } from "./PluginShareSaveResponse";
export type { PluginShareTarget } from "./PluginShareTarget";
export type { PluginShareTargetRole } from "./PluginShareTargetRole";
export type { PluginShareUpdateDiscoverability } from "./PluginShareUpdateDiscoverability";
export type { PluginShareUpdateTargetsParams } from "./PluginShareUpdateTargetsParams";
export type { PluginShareUpdateTargetsResponse } from "./PluginShareUpdateTargetsResponse";
export type { PluginSkillReadParams } from "./PluginSkillReadParams";

View File

@@ -808,13 +808,6 @@ client_request_definitions! {
serialization: None,
response: v2::MockExperimentalMethodResponse,
},
#[experimental("environment/add")]
/// Adds or replaces a remote environment by id for later selection.
EnvironmentAdd => "environment/add" {
params: v2::EnvironmentAddParams,
serialization: global("environment"),
response: v2::EnvironmentAddResponse,
},
McpServerOauthLogin => "mcpServer/oauth/login" {
params: v2::McpServerOauthLoginParams,
@@ -1319,12 +1312,6 @@ server_request_definitions! {
response: v2::ChatgptAuthTokensRefreshResponse,
},
/// Generate a fresh upstream attestation result on demand.
AttestationGenerate => "attestation/generate" {
params: v2::AttestationGenerateParams,
response: v2::AttestationGenerateResponse,
},
/// DEPRECATED APIs below
/// Request to approve a patch.
/// This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).
@@ -1803,18 +1790,6 @@ mod tests {
add_credits_nudge.serialization_scope(),
Some(ClientRequestSerializationScope::Global("account-auth"))
);
let environment_add = ClientRequest::EnvironmentAdd {
request_id: request_id(),
params: v2::EnvironmentAddParams {
environment_id: "remote-a".to_string(),
exec_server_url: "ws://127.0.0.1:8765".to_string(),
},
};
assert_eq!(
environment_add.serialization_scope(),
Some(ClientRequestSerializationScope::Global("environment"))
);
}
#[test]
@@ -1935,7 +1910,6 @@ mod tests {
},
capabilities: Some(v1::InitializeCapabilities {
experimental_api: true,
request_attestation: true,
opt_out_notification_methods: Some(vec![
"thread/started".to_string(),
"item/agentMessage/delta".to_string(),
@@ -1956,7 +1930,6 @@ mod tests {
},
"capabilities": {
"experimentalApi": true,
"requestAttestation": true,
"optOutNotificationMethods": [
"thread/started",
"item/agentMessage/delta"
@@ -1982,7 +1955,6 @@ mod tests {
},
"capabilities": {
"experimentalApi": true,
"requestAttestation": true,
"optOutNotificationMethods": [
"thread/started",
"item/agentMessage/delta"
@@ -2003,7 +1975,6 @@ mod tests {
},
capabilities: Some(v1::InitializeCapabilities {
experimental_api: true,
request_attestation: true,
opt_out_notification_methods: Some(vec![
"thread/started".to_string(),
"item/agentMessage/delta".to_string(),
@@ -2120,28 +2091,6 @@ mod tests {
Ok(())
}
#[test]
fn serialize_attestation_generate_request() -> Result<()> {
let params = v2::AttestationGenerateParams {};
let request = ServerRequest::AttestationGenerate {
request_id: RequestId::Integer(9),
params: params.clone(),
};
assert_eq!(
json!({
"method": "attestation/generate",
"id": 9,
"params": {}
}),
serde_json::to_value(&request)?,
);
let payload = ServerRequestPayload::AttestationGenerate(params);
assert_eq!(request.id(), &RequestId::Integer(9));
assert_eq!(payload.request_with_id(RequestId::Integer(9)), request);
Ok(())
}
#[test]
fn serialize_server_response() -> Result<()> {
let response = ServerResponse::CommandExecutionRequestApproval {
@@ -2597,33 +2546,10 @@ mod tests {
Ok(())
}
#[test]
fn serialize_environment_add() -> Result<()> {
let request = ClientRequest::EnvironmentAdd {
request_id: RequestId::Integer(9),
params: v2::EnvironmentAddParams {
environment_id: "remote-a".to_string(),
exec_server_url: "ws://127.0.0.1:8765".to_string(),
},
};
assert_eq!(
json!({
"method": "environment/add",
"id": 9,
"params": {
"environmentId": "remote-a",
"execServerUrl": "ws://127.0.0.1:8765"
}
}),
serde_json::to_value(&request)?,
);
Ok(())
}
#[test]
fn serialize_fs_get_metadata() -> Result<()> {
let request = ClientRequest::FsGetMetadata {
request_id: RequestId::Integer(10),
request_id: RequestId::Integer(9),
params: v2::FsGetMetadataParams {
path: absolute_path("tmp/example"),
},
@@ -2631,7 +2557,7 @@ mod tests {
assert_eq!(
json!({
"method": "fs/getMetadata",
"id": 10,
"id": 9,
"params": {
"path": absolute_path_string("tmp/example")
}
@@ -2892,19 +2818,6 @@ mod tests {
assert_eq!(reason, Some("mock/experimentalMethod"));
}
#[test]
fn environment_add_is_marked_experimental() {
let request = ClientRequest::EnvironmentAdd {
request_id: RequestId::Integer(1),
params: v2::EnvironmentAddParams {
environment_id: "remote-a".to_string(),
exec_server_url: "ws://127.0.0.1:8765".to_string(),
},
};
let reason = crate::experimental_api::ExperimentalApi::experimental_reason(&request);
assert_eq!(reason, Some("environment/add"));
}
#[test]
fn command_exec_permission_profile_is_marked_experimental() {
let request = ClientRequest::OneOffCommandExec {

View File

@@ -46,9 +46,6 @@ pub struct InitializeCapabilities {
/// Opt into receiving experimental API methods and fields.
#[serde(default)]
pub experimental_api: bool,
/// Opt into `attestation/generate` requests for upstream `x-oai-attestation`.
#[serde(default)]
pub request_attestation: bool,
/// Exact notification method names that should be suppressed for this
/// connection (for example `thread/started`).
#[ts(optional = nullable)]

View File

@@ -1,17 +0,0 @@
use schemars::JsonSchema;
use serde::Deserialize;
use serde::Serialize;
use ts_rs::TS;
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS, Default)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct AttestationGenerateParams {}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct AttestationGenerateResponse {
/// Opaque client attestation token.
pub token: String,
}

View File

@@ -1,17 +0,0 @@
use schemars::JsonSchema;
use serde::Deserialize;
use serde::Serialize;
use ts_rs::TS;
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct EnvironmentAddParams {
pub environment_id: String,
pub exec_server_url: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct EnvironmentAddResponse {}

View File

@@ -2,11 +2,9 @@ mod shared;
mod account;
mod apps;
mod attestation;
mod collaboration_mode;
mod command_exec;
mod config;
mod environment;
mod experimental_feature;
mod feedback;
mod fs;
@@ -28,11 +26,9 @@ mod windows_sandbox;
pub use account::*;
pub use apps::*;
pub use attestation::*;
pub use collaboration_mode::*;
pub use command_exec::*;
pub use config::*;
pub use environment::*;
pub use experimental_feature::*;
pub use feedback::*;
pub use fs::*;

View File

@@ -218,7 +218,6 @@ pub struct PluginShareSaveResponse {
#[ts(export_to = "v2/")]
pub struct PluginShareUpdateTargetsParams {
pub remote_plugin_id: String,
pub discoverability: PluginShareUpdateDiscoverability,
pub share_targets: Vec<PluginShareTarget>,
}
@@ -227,7 +226,6 @@ pub struct PluginShareUpdateTargetsParams {
#[ts(export_to = "v2/")]
pub struct PluginShareUpdateTargetsResponse {
pub principals: Vec<PluginSharePrincipal>,
pub discoverability: PluginShareDiscoverability,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
@@ -259,6 +257,7 @@ pub struct PluginShareDeleteResponse {}
#[ts(export_to = "v2/")]
pub struct PluginShareListItem {
pub plugin: PluginSummary,
pub share_url: String,
pub local_plugin_path: Option<AbsolutePathBuf>,
}
@@ -276,17 +275,6 @@ pub enum PluginShareDiscoverability {
Private,
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
#[ts(export_to = "v2/")]
pub enum PluginShareUpdateDiscoverability {
#[serde(rename = "UNLISTED")]
#[ts(rename = "UNLISTED")]
Unlisted,
#[serde(rename = "PRIVATE")]
#[ts(rename = "PRIVATE")]
Private,
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
#[ts(export_to = "v2/")]
pub enum PluginSharePrincipalType {
@@ -307,7 +295,6 @@ pub enum PluginSharePrincipalType {
pub struct PluginShareTarget {
pub principal_type: PluginSharePrincipalType,
pub principal_id: String,
pub role: PluginShareTargetRole,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
@@ -316,29 +303,9 @@ pub struct PluginShareTarget {
pub struct PluginSharePrincipal {
pub principal_type: PluginSharePrincipalType,
pub principal_id: String,
pub role: PluginSharePrincipalRole,
pub name: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "lowercase")]
#[ts(rename_all = "lowercase")]
#[ts(export_to = "v2/")]
pub enum PluginShareTargetRole {
Reader,
Editor,
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "lowercase")]
#[ts(rename_all = "lowercase")]
#[ts(export_to = "v2/")]
pub enum PluginSharePrincipalRole {
Reader,
Editor,
Owner,
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "snake_case")]
#[ts(rename_all = "snake_case")]
@@ -559,11 +526,10 @@ pub struct PluginSummary {
#[ts(export_to = "v2/")]
pub struct PluginShareContext {
pub remote_plugin_id: String,
pub discoverability: Option<PluginShareDiscoverability>,
pub share_url: Option<String>,
pub creator_account_user_id: Option<String>,
pub creator_name: Option<String>,
pub share_principals: Option<Vec<PluginSharePrincipal>>,
pub share_targets: Option<Vec<PluginSharePrincipal>>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]

View File

@@ -3,13 +3,12 @@ use serde::Deserialize;
use serde::Serialize;
use ts_rs::TS;
/// Current remote-control connection status and remote identity exposed to clients.
/// Current remote-control connection status and environment id exposed to clients.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct RemoteControlStatusChangedNotification {
pub status: RemoteControlConnectionStatus,
pub installation_id: String,
pub environment_id: Option<String>,
}

View File

@@ -2896,12 +2896,10 @@ fn plugin_share_params_and_response_serialization_use_camel_case_fields() {
PluginShareTarget {
principal_type: PluginSharePrincipalType::User,
principal_id: "user-1".to_string(),
role: PluginShareTargetRole::Reader,
},
PluginShareTarget {
principal_type: PluginSharePrincipalType::Group,
principal_id: "group-1".to_string(),
role: PluginShareTargetRole::Reader,
principal_type: PluginSharePrincipalType::Workspace,
principal_id: "workspace-1".to_string(),
},
]),
})
@@ -2914,12 +2912,10 @@ fn plugin_share_params_and_response_serialization_use_camel_case_fields() {
{
"principalType": "user",
"principalId": "user-1",
"role": "reader",
},
{
"principalType": "group",
"principalId": "group-1",
"role": "reader",
"principalType": "workspace",
"principalId": "workspace-1",
},
],
}),
@@ -2940,21 +2936,17 @@ fn plugin_share_params_and_response_serialization_use_camel_case_fields() {
assert_eq!(
serde_json::to_value(PluginShareUpdateTargetsParams {
remote_plugin_id: "plugins~Plugin_00000000000000000000000000000000".to_string(),
discoverability: PluginShareUpdateDiscoverability::Unlisted,
share_targets: vec![PluginShareTarget {
principal_type: PluginSharePrincipalType::Group,
principal_id: "group-1".to_string(),
role: PluginShareTargetRole::Editor,
}],
})
.unwrap(),
json!({
"remotePluginId": "plugins~Plugin_00000000000000000000000000000000",
"discoverability": "UNLISTED",
"shareTargets": [{
"principalType": "group",
"principalId": "group-1",
"role": "editor",
}],
}),
);
@@ -2964,20 +2956,16 @@ fn plugin_share_params_and_response_serialization_use_camel_case_fields() {
principals: vec![PluginSharePrincipal {
principal_type: PluginSharePrincipalType::User,
principal_id: "user-1".to_string(),
role: PluginSharePrincipalRole::Owner,
name: "Gavin".to_string(),
}],
discoverability: PluginShareDiscoverability::Unlisted,
})
.unwrap(),
json!({
"principals": [{
"principalType": "user",
"principalId": "user-1",
"role": "owner",
"name": "Gavin",
}],
"discoverability": "UNLISTED",
}),
);
@@ -3015,6 +3003,7 @@ fn plugin_share_list_response_serializes_share_items() {
interface: None,
keywords: Vec::new(),
},
share_url: "https://chatgpt.example/plugins/share/share-key-1".to_string(),
local_plugin_path: None,
}],
})
@@ -3034,6 +3023,7 @@ fn plugin_share_list_response_serializes_share_items() {
"interface": null,
"keywords": [],
},
"shareUrl": "https://chatgpt.example/plugins/share/share-key-1",
"localPluginPath": null,
}],
}),

View File

@@ -1551,7 +1551,6 @@ impl CodexClient {
},
capabilities: Some(InitializeCapabilities {
experimental_api,
request_attestation: false,
opt_out_notification_methods: Some(
NOTIFICATIONS_TO_OPT_OUT
.iter()

View File

@@ -11,7 +11,6 @@ pub use transport::AppServerTransportParseError;
pub use transport::CHANNEL_CAPACITY;
pub use transport::ConnectionOrigin;
pub use transport::RemoteControlHandle;
pub use transport::RemoteControlStartConfig;
pub use transport::TransportEvent;
pub use transport::app_server_control_socket_path;
pub use transport::auth;

View File

@@ -31,7 +31,6 @@ mod unix_socket_tests;
mod websocket;
pub use remote_control::RemoteControlHandle;
pub use remote_control::RemoteControlStartConfig;
pub use remote_control::start_remote_control;
pub use stdio::start_stdio_connection;
pub use unix_socket::start_control_socket_acceptor;

View File

@@ -19,7 +19,6 @@ const REQUEST_ID_HEADER: &str = "x-request-id";
const OAI_REQUEST_ID_HEADER: &str = "x-oai-request-id";
const CF_RAY_HEADER: &str = "cf-ray";
pub(super) const REMOTE_CONTROL_ACCOUNT_ID_HEADER: &str = "chatgpt-account-id";
pub(super) const REMOTE_CONTROL_INSTALLATION_ID_HEADER: &str = "x-codex-installation-id";
#[derive(Debug, Clone, PartialEq, Eq)]
pub(super) struct RemoteControlEnrollment {
@@ -194,7 +193,6 @@ pub(crate) fn format_headers(headers: &HeaderMap) -> String {
pub(super) async fn enroll_remote_control_server(
remote_control_target: &RemoteControlTarget,
auth: &RemoteControlConnectionAuth,
installation_id: &str,
) -> io::Result<RemoteControlEnrollment> {
let enroll_url = &remote_control_target.enroll_url;
let server_name = gethostname().to_string_lossy().trim().to_string();
@@ -203,7 +201,6 @@ pub(super) async fn enroll_remote_control_server(
os: std::env::consts::OS,
arch: std::env::consts::ARCH,
app_server_version: env!("CARGO_PKG_VERSION"),
installation_id: installation_id.to_string(),
};
let client = build_reqwest_client();
let mut auth_headers = HeaderMap::new();
@@ -213,7 +210,6 @@ pub(super) async fn enroll_remote_control_server(
.timeout(REMOTE_CONTROL_ENROLL_TIMEOUT)
.headers(auth_headers)
.header(REMOTE_CONTROL_ACCOUNT_ID_HEADER, &auth.account_id)
.header(REMOTE_CONTROL_INSTALLATION_ID_HEADER, installation_id)
.json(&request);
let response = http_request.send().await.map_err(|err| {
@@ -463,7 +459,6 @@ mod tests {
auth_provider: codex_model_provider::unauthenticated_auth_provider(),
account_id: "account_id".to_string(),
},
"11111111-1111-4111-8111-111111111111",
)
.await
.expect_err("invalid response should fail to parse");

View File

@@ -28,11 +28,6 @@ use tokio::task::JoinHandle;
use tokio_util::sync::CancellationToken;
use tracing::warn;
pub struct RemoteControlStartConfig {
pub remote_control_url: String,
pub installation_id: String,
}
pub(super) struct QueuedServerEnvelope {
pub(super) event: ServerEvent,
pub(super) client_id: ClientId,
@@ -67,7 +62,7 @@ impl RemoteControlHandle {
}
pub async fn start_remote_control(
config: RemoteControlStartConfig,
remote_control_url: String,
state_db: Option<Arc<StateRuntime>>,
auth_manager: Arc<AuthManager>,
transport_event_tx: mpsc::Sender<TransportEvent>,
@@ -82,7 +77,7 @@ pub async fn start_remote_control(
warn!("remote control disabled because sqlite state db is unavailable");
}
let remote_control_target = if initial_enabled {
Some(normalize_remote_control_url(&config.remote_control_url)?)
Some(normalize_remote_control_url(&remote_control_url)?)
} else {
None
};
@@ -94,18 +89,14 @@ pub async fn start_remote_control(
} else {
RemoteControlConnectionStatus::Disabled
},
installation_id: config.installation_id.clone(),
environment_id: None,
};
let (status_tx, _status_rx) = watch::channel(initial_status);
let status_publisher = RemoteControlStatusPublisher::new(status_tx.clone());
let join_handle = tokio::spawn(async move {
RemoteControlWebsocket::new(
websocket::RemoteControlWebsocketConfig {
remote_control_url: config.remote_control_url,
installation_id: config.installation_id,
remote_control_target,
},
remote_control_url,
remote_control_target,
state_db,
auth_manager,
RemoteControlChannels {

View File

@@ -19,7 +19,6 @@ pub(super) struct EnrollRemoteServerRequest {
pub(super) os: &'static str,
pub(super) arch: &'static str,
pub(super) app_server_version: &'static str,
pub(super) installation_id: String,
}
#[derive(Debug, Deserialize)]

View File

@@ -1,5 +1,4 @@
use super::enroll::REMOTE_CONTROL_ACCOUNT_ID_HEADER;
use super::enroll::REMOTE_CONTROL_INSTALLATION_ID_HEADER;
use super::enroll::RemoteControlEnrollment;
use super::enroll::load_persisted_remote_control_enrollment;
use super::enroll::update_persisted_remote_control_enrollment;
@@ -57,8 +56,6 @@ use tokio_tungstenite::accept_hdr_async;
use tokio_tungstenite::tungstenite;
use tokio_util::sync::CancellationToken;
const TEST_INSTALLATION_ID: &str = "11111111-1111-4111-8111-111111111111";
fn remote_control_auth_manager() -> Arc<AuthManager> {
auth_manager_from_auth(CodexAuth::create_dummy_chatgpt_auth_for_testing())
}
@@ -134,7 +131,6 @@ async fn expect_remote_control_status(
if let Some(expected_status) = expected_status {
assert_eq!(status.status, expected_status);
}
assert_eq!(status.installation_id, TEST_INSTALLATION_ID);
assert_eq!(status.environment_id.as_deref(), expected_environment_id);
}
@@ -177,10 +173,7 @@ async fn remote_control_transport_manages_virtual_clients_and_routes_messages()
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
let shutdown_token = CancellationToken::new();
let (remote_task, remote_handle) = start_remote_control(
RemoteControlStartConfig {
remote_control_url,
installation_id: TEST_INSTALLATION_ID.to_string(),
},
remote_control_url,
Some(remote_control_state_runtime(&codex_home).await),
remote_control_auth_manager(),
transport_event_tx,
@@ -456,10 +449,7 @@ async fn remote_control_transport_reconnects_after_disconnect() {
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
let shutdown_token = CancellationToken::new();
let (remote_task, remote_handle) = start_remote_control(
RemoteControlStartConfig {
remote_control_url,
installation_id: TEST_INSTALLATION_ID.to_string(),
},
remote_control_url,
Some(remote_control_state_runtime(&codex_home).await),
remote_control_auth_manager(),
transport_event_tx,
@@ -538,10 +528,7 @@ async fn remote_control_start_allows_remote_control_invalid_url_when_disabled()
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
let shutdown_token = CancellationToken::new();
let (remote_task, _remote_handle) = start_remote_control(
RemoteControlStartConfig {
remote_control_url: "https://internal.example.com/backend-api/".to_string(),
installation_id: TEST_INSTALLATION_ID.to_string(),
},
"https://internal.example.com/backend-api/".to_string(),
/*state_db*/ None,
remote_control_auth_manager(),
transport_event_tx,
@@ -577,10 +564,7 @@ async fn remote_control_start_allows_missing_auth_when_enabled() {
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
let shutdown_token = CancellationToken::new();
let (remote_task, _remote_handle) = start_remote_control(
RemoteControlStartConfig {
remote_control_url,
installation_id: TEST_INSTALLATION_ID.to_string(),
},
remote_control_url,
Some(remote_control_state_runtime(&codex_home).await),
auth_manager,
transport_event_tx,
@@ -612,10 +596,7 @@ async fn remote_control_start_reports_missing_state_db_as_disabled_when_enabled(
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
let shutdown_token = CancellationToken::new();
let (remote_task, remote_handle) = start_remote_control(
RemoteControlStartConfig {
remote_control_url,
installation_id: TEST_INSTALLATION_ID.to_string(),
},
remote_control_url,
/*state_db*/ None,
remote_control_auth_manager(),
transport_event_tx,
@@ -630,7 +611,6 @@ async fn remote_control_start_reports_missing_state_db_as_disabled_when_enabled(
status_rx.borrow().clone(),
RemoteControlStatusChangedNotification {
status: RemoteControlConnectionStatus::Disabled,
installation_id: TEST_INSTALLATION_ID.to_string(),
environment_id: None,
}
);
@@ -665,10 +645,7 @@ async fn remote_control_handle_set_enabled_stops_and_restarts_connections() {
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
let shutdown_token = CancellationToken::new();
let (remote_task, remote_handle) = start_remote_control(
RemoteControlStartConfig {
remote_control_url,
installation_id: TEST_INSTALLATION_ID.to_string(),
},
remote_control_url,
Some(remote_control_state_runtime(&codex_home).await),
remote_control_auth_manager(),
transport_event_tx,
@@ -695,7 +672,6 @@ async fn remote_control_handle_set_enabled_stops_and_restarts_connections() {
&mut status_rx,
RemoteControlStatusChangedNotification {
status: RemoteControlConnectionStatus::Connected,
installation_id: TEST_INSTALLATION_ID.to_string(),
environment_id: Some("env_test".to_string()),
},
)
@@ -706,7 +682,6 @@ async fn remote_control_handle_set_enabled_stops_and_restarts_connections() {
&mut status_rx,
RemoteControlStatusChangedNotification {
status: RemoteControlConnectionStatus::Disabled,
installation_id: TEST_INSTALLATION_ID.to_string(),
environment_id: None,
},
)
@@ -723,7 +698,6 @@ async fn remote_control_handle_set_enabled_stops_and_restarts_connections() {
&mut status_rx,
RemoteControlStatusChangedNotification {
status: RemoteControlConnectionStatus::Connecting,
installation_id: TEST_INSTALLATION_ID.to_string(),
environment_id: Some("env_test".to_string()),
},
)
@@ -755,10 +729,7 @@ async fn remote_control_transport_clears_outgoing_buffer_when_backend_acks() {
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
let shutdown_token = CancellationToken::new();
let (remote_task, remote_handle) = start_remote_control(
RemoteControlStartConfig {
remote_control_url,
installation_id: TEST_INSTALLATION_ID.to_string(),
},
remote_control_url,
Some(remote_control_state_runtime(&codex_home).await),
remote_control_auth_manager(),
transport_event_tx,
@@ -933,10 +904,7 @@ async fn remote_control_http_mode_enrolls_before_connecting() {
let expected_server_name = gethostname().to_string_lossy().trim().to_string();
let shutdown_token = CancellationToken::new();
let (remote_task, remote_handle) = start_remote_control(
RemoteControlStartConfig {
remote_control_url,
installation_id: TEST_INSTALLATION_ID.to_string(),
},
remote_control_url,
Some(remote_control_state_runtime(&codex_home).await),
remote_control_auth_manager(),
transport_event_tx,
@@ -961,12 +929,6 @@ async fn remote_control_http_mode_enrolls_before_connecting() {
enroll_request.headers.get(REMOTE_CONTROL_ACCOUNT_ID_HEADER),
Some(&"account_id".to_string())
);
assert_eq!(
enroll_request
.headers
.get(REMOTE_CONTROL_INSTALLATION_ID_HEADER),
Some(&TEST_INSTALLATION_ID.to_string())
);
assert_eq!(
serde_json::from_str::<serde_json::Value>(&enroll_request.body)
.expect("enroll body should deserialize"),
@@ -975,7 +937,6 @@ async fn remote_control_http_mode_enrolls_before_connecting() {
"os": std::env::consts::OS,
"arch": std::env::consts::ARCH,
"app_server_version": env!("CARGO_PKG_VERSION"),
"installation_id": TEST_INSTALLATION_ID,
})
);
respond_with_json(
@@ -1006,12 +967,6 @@ async fn remote_control_http_mode_enrolls_before_connecting() {
.get(REMOTE_CONTROL_ACCOUNT_ID_HEADER),
Some(&"account_id".to_string())
);
assert_eq!(
handshake_request
.headers
.get(REMOTE_CONTROL_INSTALLATION_ID_HEADER),
Some(&TEST_INSTALLATION_ID.to_string())
);
assert_eq!(
handshake_request.headers.get("x-codex-server-id"),
Some(&"srv_e_test".to_string())
@@ -1173,10 +1128,7 @@ async fn remote_control_http_mode_reuses_persisted_enrollment_before_reenrolling
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
let shutdown_token = CancellationToken::new();
let (remote_task, _remote_handle) = start_remote_control(
RemoteControlStartConfig {
remote_control_url,
installation_id: TEST_INSTALLATION_ID.to_string(),
},
remote_control_url,
Some(state_db.clone()),
remote_control_auth_manager_with_home(&codex_home),
transport_event_tx,
@@ -1244,10 +1196,7 @@ async fn remote_control_stdio_mode_waits_for_client_name_before_connecting() {
let (app_server_client_name_tx, app_server_client_name_rx) = oneshot::channel::<String>();
let shutdown_token = CancellationToken::new();
let (remote_task, _remote_handle) = start_remote_control(
RemoteControlStartConfig {
remote_control_url,
installation_id: TEST_INSTALLATION_ID.to_string(),
},
remote_control_url,
Some(state_db.clone()),
remote_control_auth_manager_with_home(&codex_home),
transport_event_tx,
@@ -1306,10 +1255,7 @@ async fn remote_control_waits_for_account_id_before_enrolling() {
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
let shutdown_token = CancellationToken::new();
let (remote_task, _remote_handle) = start_remote_control(
RemoteControlStartConfig {
remote_control_url,
installation_id: TEST_INSTALLATION_ID.to_string(),
},
remote_control_url,
Some(state_db.clone()),
auth_manager,
transport_event_tx,
@@ -1392,10 +1338,7 @@ async fn remote_control_http_mode_clears_stale_persisted_enrollment_after_404()
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
let shutdown_token = CancellationToken::new();
let (remote_task, remote_handle) = start_remote_control(
RemoteControlStartConfig {
remote_control_url,
installation_id: TEST_INSTALLATION_ID.to_string(),
},
remote_control_url,
Some(state_db.clone()),
remote_control_auth_manager_with_home(&codex_home),
transport_event_tx,

View File

@@ -55,7 +55,6 @@ use tracing::warn;
pub(super) const REMOTE_CONTROL_PROTOCOL_VERSION: &str = "3";
pub(super) const REMOTE_CONTROL_ACCOUNT_ID_HEADER: &str = "chatgpt-account-id";
pub(super) const REMOTE_CONTROL_INSTALLATION_ID_HEADER: &str = "x-codex-installation-id";
const REMOTE_CONTROL_SUBSCRIBE_CURSOR_HEADER: &str = "x-codex-subscribe-cursor";
const REMOTE_CONTROL_WEBSOCKET_PING_INTERVAL: std::time::Duration =
std::time::Duration::from_secs(10);
@@ -215,7 +214,6 @@ impl WebsocketState {
pub(crate) struct RemoteControlWebsocket {
remote_control_url: String,
installation_id: String,
remote_control_target: Option<RemoteControlTarget>,
state_db: Option<Arc<StateRuntime>>,
auth_manager: Arc<AuthManager>,
@@ -231,12 +229,6 @@ pub(crate) struct RemoteControlWebsocket {
enabled_rx: watch::Receiver<bool>,
}
pub(crate) struct RemoteControlWebsocketConfig {
pub(crate) remote_control_url: String,
pub(crate) installation_id: String,
pub(crate) remote_control_target: Option<RemoteControlTarget>,
}
enum ConnectOutcome {
Connected(Box<WebSocketStream<MaybeTlsStream<TcpStream>>>),
Disabled,
@@ -262,7 +254,6 @@ impl RemoteControlStatusPublisher {
self.tx.send_if_modified(|status| {
let next_status = RemoteControlStatusChangedNotification {
status: connection_status,
installation_id: status.installation_id.clone(),
environment_id: if connection_status == RemoteControlConnectionStatus::Disabled {
None
} else {
@@ -285,7 +276,6 @@ impl RemoteControlStatusPublisher {
}
let next_status = RemoteControlStatusChangedNotification {
status: status.status,
installation_id: status.installation_id.clone(),
environment_id,
};
if *status == next_status {
@@ -300,14 +290,14 @@ impl RemoteControlStatusPublisher {
#[derive(Clone, Copy)]
pub(super) struct RemoteControlConnectOptions<'a> {
installation_id: &'a str,
subscribe_cursor: Option<&'a str>,
app_server_client_name: Option<&'a str>,
}
impl RemoteControlWebsocket {
pub(crate) fn new(
config: RemoteControlWebsocketConfig,
remote_control_url: String,
remote_control_target: Option<RemoteControlTarget>,
state_db: Option<Arc<StateRuntime>>,
auth_manager: Arc<AuthManager>,
channels: RemoteControlChannels,
@@ -325,9 +315,8 @@ impl RemoteControlWebsocket {
let auth_recovery = auth_manager.unauthorized_recovery();
Self {
remote_control_url: config.remote_control_url,
installation_id: config.installation_id,
remote_control_target: config.remote_control_target,
remote_control_url,
remote_control_target,
state_db,
auth_manager,
status_publisher: channels.status_publisher,
@@ -453,7 +442,6 @@ impl RemoteControlWebsocket {
loop {
let subscribe_cursor = self.state.lock().await.subscribe_cursor.clone();
let connect_options = RemoteControlConnectOptions {
installation_id: &self.installation_id,
subscribe_cursor: subscribe_cursor.as_deref(),
app_server_client_name,
};
@@ -930,7 +918,6 @@ fn build_remote_control_websocket_request(
websocket_url: &str,
enrollment: &RemoteControlEnrollment,
auth: &RemoteControlConnectionAuth,
installation_id: &str,
subscribe_cursor: Option<&str>,
) -> io::Result<tungstenite::http::Request<()>> {
let mut request = websocket_url.into_client_request().map_err(|err| {
@@ -955,11 +942,6 @@ fn build_remote_control_websocket_request(
auth.auth_provider.add_auth_headers(&mut auth_headers);
headers.extend(auth_headers);
set_remote_control_header(headers, REMOTE_CONTROL_ACCOUNT_ID_HEADER, &auth.account_id)?;
set_remote_control_header(
headers,
REMOTE_CONTROL_INSTALLATION_ID_HEADER,
installation_id,
)?;
if let Some(subscribe_cursor) = subscribe_cursor {
set_remote_control_header(
headers,
@@ -1084,12 +1066,7 @@ pub(super) async fn connect_remote_control_websocket(
"creating new remote control enrollment: websocket_url={}, enroll_url={}, account_id={}",
remote_control_target.websocket_url, remote_control_target.enroll_url, auth.account_id
);
let new_enrollment = match enroll_remote_control_server(
remote_control_target,
&auth,
connect_options.installation_id,
)
.await
let new_enrollment = match enroll_remote_control_server(remote_control_target, &auth).await
{
Ok(new_enrollment) => new_enrollment,
Err(err)
@@ -1133,7 +1110,6 @@ pub(super) async fn connect_remote_control_websocket(
&remote_control_target.websocket_url,
enrollment_ref,
&auth,
connect_options.installation_id,
connect_options.subscribe_cursor,
)?;
@@ -1271,7 +1247,6 @@ mod tests {
const TEST_HTTP_ACCEPT_TIMEOUT: Duration = Duration::from_secs(30);
#[cfg(not(windows))]
const TEST_HTTP_ACCEPT_TIMEOUT: Duration = Duration::from_secs(5);
const TEST_INSTALLATION_ID: &str = "11111111-1111-4111-8111-111111111111";
fn remote_control_status_channel() -> (
RemoteControlStatusPublisher,
@@ -1279,7 +1254,6 @@ mod tests {
) {
let (status_tx, status_rx) = watch::channel(RemoteControlStatusChangedNotification {
status: RemoteControlConnectionStatus::Connecting,
installation_id: TEST_INSTALLATION_ID.to_string(),
environment_id: None,
});
(RemoteControlStatusPublisher::new(status_tx), status_rx)
@@ -1385,7 +1359,6 @@ mod tests {
&mut auth_recovery,
&mut enrollment,
RemoteControlConnectOptions {
installation_id: TEST_INSTALLATION_ID,
subscribe_cursor: None,
app_server_client_name: None,
},
@@ -1403,7 +1376,6 @@ mod tests {
status_rx.borrow().clone(),
RemoteControlStatusChangedNotification {
status: RemoteControlConnectionStatus::Connecting,
installation_id: TEST_INSTALLATION_ID.to_string(),
environment_id: Some("env_test".to_string()),
}
);
@@ -1463,7 +1435,6 @@ mod tests {
&mut auth_recovery,
&mut enrollment,
RemoteControlConnectOptions {
installation_id: TEST_INSTALLATION_ID,
subscribe_cursor: None,
app_server_client_name: None,
},
@@ -1477,7 +1448,6 @@ mod tests {
status_rx.borrow().clone(),
RemoteControlStatusChangedNotification {
status: RemoteControlConnectionStatus::Connecting,
installation_id: TEST_INSTALLATION_ID.to_string(),
environment_id: Some("env_test".to_string()),
}
);
@@ -1545,7 +1515,6 @@ mod tests {
&mut auth_recovery,
&mut enrollment,
RemoteControlConnectOptions {
installation_id: TEST_INSTALLATION_ID,
subscribe_cursor: None,
app_server_client_name: None,
},
@@ -1598,7 +1567,6 @@ mod tests {
&mut auth_recovery,
&mut enrollment,
RemoteControlConnectOptions {
installation_id: TEST_INSTALLATION_ID,
subscribe_cursor: None,
app_server_client_name: None,
},
@@ -1646,7 +1614,6 @@ mod tests {
&mut auth_recovery,
&mut enrollment,
RemoteControlConnectOptions {
installation_id: TEST_INSTALLATION_ID,
subscribe_cursor: None,
app_server_client_name: None,
},
@@ -1665,7 +1632,6 @@ mod tests {
status_rx.borrow().clone(),
RemoteControlStatusChangedNotification {
status: RemoteControlConnectionStatus::Connecting,
installation_id: TEST_INSTALLATION_ID.to_string(),
environment_id: None,
}
);
@@ -1690,11 +1656,8 @@ mod tests {
let shutdown_token = shutdown_token.clone();
async move {
RemoteControlWebsocket::new(
RemoteControlWebsocketConfig {
remote_control_url,
installation_id: TEST_INSTALLATION_ID.to_string(),
remote_control_target: Some(remote_control_target),
},
remote_control_url,
Some(remote_control_target),
/*state_db*/ None,
remote_control_auth_manager(),
RemoteControlChannels {
@@ -1738,7 +1701,6 @@ mod tests {
status_rx.borrow().clone(),
RemoteControlStatusChangedNotification {
status: RemoteControlConnectionStatus::Connecting,
installation_id: TEST_INSTALLATION_ID.to_string(),
environment_id: Some("env_first".to_string()),
}
);
@@ -1759,7 +1721,6 @@ mod tests {
status_rx.borrow().clone(),
RemoteControlStatusChangedNotification {
status: RemoteControlConnectionStatus::Connected,
installation_id: TEST_INSTALLATION_ID.to_string(),
environment_id: Some("env_first".to_string()),
}
);
@@ -1773,7 +1734,6 @@ mod tests {
status_rx.borrow().clone(),
RemoteControlStatusChangedNotification {
status: RemoteControlConnectionStatus::Connected,
installation_id: TEST_INSTALLATION_ID.to_string(),
environment_id: None,
}
);
@@ -1788,7 +1748,6 @@ mod tests {
status_rx.borrow().clone(),
RemoteControlStatusChangedNotification {
status: RemoteControlConnectionStatus::Disabled,
installation_id: TEST_INSTALLATION_ID.to_string(),
environment_id: None,
}
);

View File

@@ -41,7 +41,6 @@ codex-external-agent-migration = { workspace = true }
codex-external-agent-sessions = { workspace = true }
codex-features = { workspace = true }
codex-git-utils = { workspace = true }
codex-file-watcher = { workspace = true }
codex-hooks = { workspace = true }
codex-otel = { workspace = true }
codex-plugin = { workspace = true }

View File

@@ -202,7 +202,6 @@ Example with notification opt-out:
- `modelProvider/capabilities/read` — read provider-level capabilities for the currently configured model provider.
- `experimentalFeature/list` — list feature flags with stage metadata (`beta`, `underDevelopment`, `stable`, etc.), enabled/default-enabled state, and cursor pagination. For non-beta flags, `displayName`/`description`/`announcement` are `null`.
- `experimentalFeature/enablement/set` — patch the in-memory process-wide runtime feature enablement for the currently supported feature keys (`apps`, `memories`, `plugins`, `remote_control`, `tool_search`, `tool_suggest`, `tool_call_mcp_elicitation`). For each feature, precedence is: cloud requirements > --enable <feature_name> > config.toml > experimentalFeature/enablement/set (new) > code default.
- `environment/add` — experimental; add or replace a named remote environment by `environmentId` and `execServerUrl` for later selection by `thread/start` or `turn/start`; returns `{}` and does not change the default environment.
- `collaborationMode/list` — list available collaboration mode presets (experimental, no pagination). Built-in presets do not select a model; the Plan preset selects medium reasoning effort. This response omits built-in developer instructions; clients should either pass `settings.developer_instructions: null` when setting a mode to use Codex's built-in instructions, or provide their own instructions explicitly.
- `skills/list` — list skills for one or more `cwd` values (optional `forceReload`).
- `hooks/list` — list discovered hooks for one or more `cwd` values.
@@ -1338,10 +1337,6 @@ UI guidance for IDEs: surface an approval dialog as soon as the request arrives.
When the client responds to `item/tool/requestUserInput`, the server emits `serverRequest/resolved` with `{ threadId, requestId }`. If the pending request is cleared by turn start, turn completion, or turn interruption before the client answers, the server emits the same notification for that cleanup.
### Attestation generation
Desktop hosts that provide upstream attestation should set `capabilities.requestAttestation` during `initialize` and handle the server-initiated `attestation/generate` request. App-server issues it just in time before ChatGPT Codex requests that forward `x-oai-attestation`; the client responds with `{ "token": "v1.<opaque>" }`, where `token` is an opaque client-owned value. When app-server receives a client response, it forwards a consistent outer envelope such as `{ "v": 1, "s": 0, "t": "v1.<opaque>" }`, where `t` contains the client token unchanged. If app-server attempts attestation but fails within its own boundary, it sends the same envelope shape with an app-server status code and without `t` (`1 = timeout`, `2 = request failed`, `3 = request canceled`, `4 = malformed response`). If no initialized client opted into attestation, app-server omits `x-oai-attestation` for that upstream request.
### MCP server elicitations
MCP servers can interrupt a turn and ask the client for structured input via `mcpServer/elicitation/request`.

View File

@@ -1,217 +0,0 @@
use std::sync::Arc;
use axum::http::HeaderValue;
use codex_app_server_protocol::AttestationGenerateParams;
use codex_app_server_protocol::AttestationGenerateResponse;
use codex_app_server_protocol::ServerRequestPayload;
use codex_core::AttestationContext;
use codex_core::AttestationProvider;
use codex_core::GenerateAttestationFuture;
use serde::Serialize;
use tokio::time::Duration;
use tokio::time::timeout;
use tracing::warn;
use crate::outgoing_message::OutgoingMessageSender;
use crate::thread_state::ThreadStateManager;
const ATTESTATION_GENERATE_TIMEOUT: Duration = Duration::from_millis(100);
pub(crate) fn app_server_attestation_provider(
outgoing: Arc<OutgoingMessageSender>,
thread_state_manager: ThreadStateManager,
) -> Arc<dyn AttestationProvider> {
Arc::new(AppServerAttestationProvider {
outgoing,
thread_state_manager,
})
}
struct AppServerAttestationProvider {
outgoing: Arc<OutgoingMessageSender>,
thread_state_manager: ThreadStateManager,
}
impl std::fmt::Debug for AppServerAttestationProvider {
fn fmt(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
formatter
.debug_struct("AppServerAttestationProvider")
.finish()
}
}
impl AttestationProvider for AppServerAttestationProvider {
fn header_for_request(&self, context: AttestationContext) -> GenerateAttestationFuture<'_> {
let outgoing = self.outgoing.clone();
let thread_state_manager = self.thread_state_manager.clone();
Box::pin(async move {
request_attestation_header_value_with_timeout(
outgoing,
thread_state_manager,
context.thread_id,
ATTESTATION_GENERATE_TIMEOUT,
)
.await
.and_then(|value| HeaderValue::from_bytes(value.as_bytes()).ok())
})
}
}
async fn request_attestation_header_value_with_timeout(
outgoing: Arc<OutgoingMessageSender>,
thread_state_manager: ThreadStateManager,
thread_id: codex_protocol::ThreadId,
timeout_duration: Duration,
) -> Option<String> {
let connection_id = thread_state_manager
.first_attestation_capable_connection_for_thread(thread_id)
.await?;
let connection_ids = [connection_id];
let (request_id, rx) = outgoing
.send_request_to_connections(
Some(&connection_ids),
ServerRequestPayload::AttestationGenerate(AttestationGenerateParams {}),
/*thread_id*/ None,
)
.await;
let result = match timeout(timeout_duration, rx).await {
Ok(Ok(Ok(result))) => result,
Ok(Ok(Err(err))) => {
warn!(
code = err.code,
message = %err.message,
"attestation generation request failed"
);
return app_server_attestation_header_value(
AppServerAttestationStatus::RequestFailed,
/*token*/ None,
);
}
Ok(Err(err)) => {
warn!("attestation generation request canceled: {err}");
return app_server_attestation_header_value(
AppServerAttestationStatus::RequestCanceled,
/*token*/ None,
);
}
Err(_) => {
let _canceled = outgoing.cancel_request(&request_id).await;
warn!(
timeout_seconds = timeout_duration.as_secs(),
"attestation generation request timed out"
);
return app_server_attestation_header_value(
AppServerAttestationStatus::Timeout,
/*token*/ None,
);
}
};
match serde_json::from_value::<AttestationGenerateResponse>(result) {
Ok(response) => app_server_attestation_header_value(
AppServerAttestationStatus::Ok,
Some(&response.token),
),
Err(err) => {
warn!("failed to deserialize attestation generation response: {err}");
app_server_attestation_header_value(
AppServerAttestationStatus::MalformedResponse,
/*token*/ None,
)
}
}
}
#[derive(Clone, Copy)]
enum AppServerAttestationStatus {
Ok,
Timeout,
RequestFailed,
RequestCanceled,
MalformedResponse,
}
impl AppServerAttestationStatus {
const fn code(self) -> u8 {
match self {
Self::Ok => 0,
Self::Timeout => 1,
Self::RequestFailed => 2,
Self::RequestCanceled => 3,
Self::MalformedResponse => 4,
}
}
}
#[derive(Serialize)]
struct AppServerAttestationEnvelope<'a> {
v: u8,
s: u8,
#[serde(skip_serializing_if = "Option::is_none")]
t: Option<&'a str>,
}
fn app_server_attestation_header_value(
status: AppServerAttestationStatus,
token: Option<&str>,
) -> Option<String> {
serde_json::to_string(&AppServerAttestationEnvelope {
v: 1,
s: status.code(),
t: token,
})
.map_err(|err| warn!("failed to serialize app-server attestation envelope: {err}"))
.ok()
}
#[cfg(test)]
mod tests {
use super::AppServerAttestationStatus;
use super::app_server_attestation_header_value;
use pretty_assertions::assert_eq;
#[test]
fn app_server_attestation_header_value_wraps_opaque_client_payloads() {
assert_eq!(
app_server_attestation_header_value(
AppServerAttestationStatus::Ok,
Some("v1.opaque-client-payload"),
),
Some(r#"{"v":1,"s":0,"t":"v1.opaque-client-payload"}"#.to_string())
);
}
#[test]
fn app_server_attestation_header_value_reports_app_server_failures() {
assert_eq!(
app_server_attestation_header_value(
AppServerAttestationStatus::Timeout,
/*token*/ None,
),
Some(r#"{"v":1,"s":1}"#.to_string())
);
assert_eq!(
app_server_attestation_header_value(
AppServerAttestationStatus::RequestFailed,
/*token*/ None,
),
Some(r#"{"v":1,"s":2}"#.to_string())
);
assert_eq!(
app_server_attestation_header_value(
AppServerAttestationStatus::RequestCanceled,
/*token*/ None,
),
Some(r#"{"v":1,"s":3}"#.to_string())
);
assert_eq!(
app_server_attestation_header_value(
AppServerAttestationStatus::MalformedResponse,
/*token*/ None
),
Some(r#"{"v":1,"s":4}"#.to_string())
);
}
}

View File

@@ -49,6 +49,7 @@ use codex_app_server_protocol::RawResponseItemCompletedNotification;
use codex_app_server_protocol::RequestId;
use codex_app_server_protocol::ServerNotification;
use codex_app_server_protocol::ServerRequestPayload;
use codex_app_server_protocol::SkillsChangedNotification;
use codex_app_server_protocol::ThreadGoalUpdatedNotification;
use codex_app_server_protocol::ThreadItem;
use codex_app_server_protocol::ThreadRealtimeClosedNotification;
@@ -193,6 +194,13 @@ pub(crate) async fn apply_bespoke_event_handling(
)
.await;
}
EventMsg::SkillsUpdateAvailable => {
outgoing
.send_server_notification(ServerNotification::SkillsChanged(
SkillsChangedNotification {},
))
.await;
}
EventMsg::McpStartupUpdate(update) => {
let (status, error) = match update.status {
codex_protocol::protocol::McpStartupStatus::Starting => {

View File

@@ -8,12 +8,12 @@ use codex_app_server_protocol::FsWatchParams;
use codex_app_server_protocol::FsWatchResponse;
use codex_app_server_protocol::JSONRPCErrorError;
use codex_app_server_protocol::ServerNotification;
use codex_file_watcher::FileWatcher;
use codex_file_watcher::FileWatcherEvent;
use codex_file_watcher::FileWatcherSubscriber;
use codex_file_watcher::Receiver;
use codex_file_watcher::WatchPath;
use codex_file_watcher::WatchRegistration;
use codex_core::file_watcher::FileWatcher;
use codex_core::file_watcher::FileWatcherEvent;
use codex_core::file_watcher::FileWatcherSubscriber;
use codex_core::file_watcher::Receiver;
use codex_core::file_watcher::WatchPath;
use codex_core::file_watcher::WatchRegistration;
use std::collections::HashMap;
use std::collections::HashSet;
use std::collections::hash_map::Entry;

View File

@@ -8,6 +8,7 @@ use codex_config::RemoteThreadConfigLoader;
use codex_config::ThreadConfigLoader;
use codex_core::config::Config;
use codex_core::resolve_installation_id;
use codex_exec_server::EnvironmentManagerArgs;
use codex_features::Feature;
use codex_login::AuthManager;
use codex_utils_cli::CliConfigOverrides;
@@ -30,7 +31,6 @@ use crate::outgoing_message::QueuedOutgoingMessage;
use crate::transport::CHANNEL_CAPACITY;
use crate::transport::ConnectionState;
use crate::transport::OutboundConnectionState;
use crate::transport::RemoteControlStartConfig;
use crate::transport::TransportEvent;
use crate::transport::auth::policy_from_settings;
use crate::transport::route_outgoing_envelope;
@@ -74,7 +74,6 @@ use tracing_subscriber::util::SubscriberInitExt;
mod analytics_utils;
mod app_server_tracing;
mod attestation;
mod bespoke_event_handling;
mod command_exec;
mod config;
@@ -94,7 +93,6 @@ mod outgoing_message;
mod request_processors;
mod request_serialization;
mod server_request_error;
mod skills_watcher;
mod thread_state;
mod thread_status;
mod transport;
@@ -161,33 +159,22 @@ enum ShutdownAction {
Finish,
}
#[derive(Clone, Copy)]
enum ShutdownSignal {
Forceable,
#[cfg(unix)]
GracefulOnly,
}
async fn shutdown_signal() -> IoResult<ShutdownSignal> {
async fn shutdown_signal() -> IoResult<()> {
#[cfg(unix)]
{
use tokio::signal::unix::SignalKind;
use tokio::signal::unix::signal;
let mut term = signal(SignalKind::terminate())?;
let mut hangup = signal(SignalKind::hangup())?;
tokio::select! {
ctrl_c_result = tokio::signal::ctrl_c() => ctrl_c_result.map(|_| ShutdownSignal::Forceable),
_ = term.recv() => Ok(ShutdownSignal::Forceable),
_ = hangup.recv() => Ok(ShutdownSignal::GracefulOnly),
ctrl_c_result = tokio::signal::ctrl_c() => ctrl_c_result,
_ = term.recv() => Ok(()),
}
}
#[cfg(not(unix))]
{
tokio::signal::ctrl_c()
.await
.map(|_| ShutdownSignal::Forceable)
tokio::signal::ctrl_c().await
}
}
@@ -200,16 +187,9 @@ impl ShutdownState {
self.forced
}
fn on_signal(
&mut self,
signal: ShutdownSignal,
connection_count: usize,
running_turn_count: usize,
) {
fn on_signal(&mut self, connection_count: usize, running_turn_count: usize) {
if self.requested {
if matches!(signal, ShutdownSignal::Forceable) {
self.forced = true;
}
self.forced = true;
return;
}
@@ -439,6 +419,15 @@ pub async fn run_main_with_transport_options(
auth: AppServerWebsocketAuthSettings,
runtime_options: AppServerRuntimeOptions,
) -> IoResult<()> {
let environment_manager = Arc::new(
EnvironmentManager::new(EnvironmentManagerArgs::new(
ExecServerRuntimePaths::from_optional_paths(
arg0_paths.codex_self_exe.clone(),
arg0_paths.codex_linux_sandbox_exe.clone(),
)?,
))
.await,
);
let (transport_event_tx, mut transport_event_rx) =
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
let (outgoing_tx, mut outgoing_rx) = mpsc::channel::<OutgoingEnvelope>(CHANNEL_CAPACITY);
@@ -454,17 +443,6 @@ pub async fn run_main_with_transport_options(
)
})?;
let codex_home = find_codex_home()?;
let local_runtime_paths = ExecServerRuntimePaths::from_optional_paths(
arg0_paths.codex_self_exe.clone(),
arg0_paths.codex_linux_sandbox_exe.clone(),
)?;
let environment_manager = if loader_overrides.ignore_user_config {
EnvironmentManager::from_env(local_runtime_paths).await
} else {
EnvironmentManager::from_codex_home(codex_home.clone(), local_runtime_paths).await
}
.map(Arc::new)
.map_err(std::io::Error::other)?;
let config_manager = ConfigManager::new(
codex_home.to_path_buf(),
cli_kv_overrides.clone(),
@@ -708,10 +686,7 @@ pub async fn run_main_with_transport_options(
}
let (remote_control_accept_handle, remote_control_handle) = start_remote_control(
RemoteControlStartConfig {
remote_control_url: config.chatgpt_base_url.clone(),
installation_id: installation_id.clone(),
},
config.chatgpt_base_url.clone(),
state_db.clone(),
auth_manager.clone(),
transport_event_tx.clone(),
@@ -833,15 +808,11 @@ pub async fn run_main_with_transport_options(
tokio::select! {
shutdown_signal_result = shutdown_signal(), if graceful_signal_restart_enabled && !shutdown_state.forced() => {
let signal = match shutdown_signal_result {
Ok(signal) => signal,
Err(err) => {
warn!("failed to listen for shutdown signal during graceful restart drain: {err}");
continue;
}
};
if let Err(err) = shutdown_signal_result {
warn!("failed to listen for shutdown signal during graceful restart drain: {err}");
}
let running_turn_count = *running_turn_count_rx.borrow();
shutdown_state.on_signal(signal, connections.len(), running_turn_count);
shutdown_state.on_signal(connections.len(), running_turn_count);
}
changed = running_turn_count_rx.changed(), if graceful_signal_restart_enabled && shutdown_state.requested() => {
if changed.is_err() {
@@ -962,14 +933,7 @@ pub async fn run_main_with_transport_options(
),
)
.await;
processor
.connection_initialized(
connection_id,
connection_state
.session
.request_attestation(),
)
.await;
processor.connection_initialized(connection_id).await;
connection_state
.outbound_initialized
.store(true, std::sync::atomic::Ordering::Release);
@@ -1013,7 +977,6 @@ pub async fn run_main_with_transport_options(
.send_server_notification(ServerNotification::RemoteControlStatusChanged(
RemoteControlStatusChangedNotification {
status: status.status,
installation_id: status.installation_id,
environment_id: status.environment_id,
},
))

View File

@@ -187,7 +187,6 @@ mod tests {
thread_store,
Some(state_db.clone()),
"11111111-1111-4111-8111-111111111111".to_string(),
/*attestation_provider*/ None,
));
thread_manager.start_thread(good_config).await?;
thread_manager.start_thread(bad_config).await?;

View File

@@ -4,7 +4,6 @@ use std::sync::Arc;
use std::sync::OnceLock;
use std::sync::atomic::AtomicBool;
use crate::attestation::app_server_attestation_provider;
use crate::config_manager::ConfigManager;
use crate::connection_rpc_gate::ConnectionRpcGate;
use crate::error_code::invalid_request;
@@ -18,7 +17,6 @@ use crate::request_processors::AppsRequestProcessor;
use crate::request_processors::CatalogRequestProcessor;
use crate::request_processors::CommandExecRequestProcessor;
use crate::request_processors::ConfigRequestProcessor;
use crate::request_processors::EnvironmentRequestProcessor;
use crate::request_processors::ExternalAgentConfigRequestProcessor;
use crate::request_processors::FeedbackRequestProcessor;
use crate::request_processors::FsRequestProcessor;
@@ -36,8 +34,6 @@ use crate::request_processors::WindowsSandboxRequestProcessor;
use crate::request_serialization::QueuedInitializedRequest;
use crate::request_serialization::RequestSerializationQueueKey;
use crate::request_serialization::RequestSerializationQueues;
use crate::skills_watcher::SkillsWatcher;
use crate::thread_state::ConnectionCapabilities;
use crate::thread_state::ThreadStateManager;
use crate::transport::AppServerTransport;
use crate::transport::RemoteControlHandle;
@@ -86,7 +82,6 @@ use tokio::time::timeout;
use tracing::Instrument;
const EXTERNAL_AUTH_REFRESH_TIMEOUT: Duration = Duration::from_secs(10);
#[derive(Clone)]
struct ExternalAuthRefreshBridge {
outgoing: Arc<OutgoingMessageSender>,
@@ -163,7 +158,6 @@ pub(crate) struct MessageProcessor {
command_exec_processor: CommandExecRequestProcessor,
process_exec_processor: ProcessExecRequestProcessor,
config_processor: ConfigRequestProcessor,
environment_processor: EnvironmentRequestProcessor,
external_agent_config_processor: ExternalAgentConfigRequestProcessor,
feedback_processor: FeedbackRequestProcessor,
fs_processor: FsRequestProcessor,
@@ -192,7 +186,6 @@ pub(crate) struct InitializedConnectionSessionState {
pub(crate) opted_out_notification_methods: HashSet<String>,
pub(crate) app_server_client_name: String,
pub(crate) client_version: String,
pub(crate) request_attestation: bool,
}
impl Default for ConnectionSessionState {
@@ -238,12 +231,6 @@ impl ConnectionSessionState {
.map(|session| session.client_version.as_str())
}
pub(crate) fn request_attestation(&self) -> bool {
self.initialized
.get()
.is_some_and(|session| session.request_attestation)
}
pub(crate) fn initialize(&self, session: InitializedConnectionSessionState) -> Result<(), ()> {
self.initialized.set(session).map_err(|_| ())
}
@@ -293,7 +280,6 @@ impl MessageProcessor {
auth_manager.set_external_auth(Arc::new(ExternalAuthRefreshBridge {
outgoing: outgoing.clone(),
}));
let thread_state_manager = ThreadStateManager::new();
// The thread store is intentionally process-scoped. Config reloads can
// affect per-thread behavior, but they must not move newly started,
// resumed, or forked threads to a different persistence backend/root.
@@ -307,17 +293,13 @@ impl MessageProcessor {
Arc::clone(&thread_store),
state_db.clone(),
installation_id,
Some(app_server_attestation_provider(
outgoing.clone(),
thread_state_manager.clone(),
)),
));
thread_manager
.plugins_manager()
.set_analytics_events_client(analytics_events_client.clone());
let skills_watcher = SkillsWatcher::new(thread_manager.skills_manager(), outgoing.clone());
let pending_thread_unloads = Arc::new(Mutex::new(HashSet::new()));
let thread_state_manager = ThreadStateManager::new();
let thread_watch_manager =
crate::thread_status::ThreadWatchManager::new_with_outgoing(outgoing.clone());
let thread_list_state_permit = Arc::new(Semaphore::new(/*permits*/ 1));
@@ -407,7 +389,6 @@ impl MessageProcessor {
Arc::clone(&thread_list_state_permit),
thread_goal_processor.clone(),
state_db,
Arc::clone(&skills_watcher),
);
let turn_processor = TurnRequestProcessor::new(
auth_manager.clone(),
@@ -421,7 +402,6 @@ impl MessageProcessor {
thread_state_manager,
thread_watch_manager,
thread_list_state_permit,
Arc::clone(&skills_watcher),
);
if matches!(plugin_startup_tasks, crate::PluginStartupTasks::Start) {
// Keep plugin startup warmups aligned at app-server startup.
@@ -452,8 +432,6 @@ impl MessageProcessor {
arg0_paths,
config.codex_home.to_path_buf(),
);
let environment_processor =
EnvironmentRequestProcessor::new(thread_manager.environment_manager());
let fs_processor = FsRequestProcessor::new(
thread_manager
.environment_manager()
@@ -475,7 +453,6 @@ impl MessageProcessor {
command_exec_processor,
process_exec_processor,
config_processor,
environment_processor,
external_agent_config_processor,
feedback_processor,
fs_processor,
@@ -643,18 +620,9 @@ impl MessageProcessor {
.await;
}
pub(crate) async fn connection_initialized(
&self,
connection_id: ConnectionId,
request_attestation: bool,
) {
pub(crate) async fn connection_initialized(&self, connection_id: ConnectionId) {
self.thread_processor
.connection_initialized(
connection_id,
ConnectionCapabilities {
request_attestation,
},
)
.connection_initialized(connection_id)
.await;
}
@@ -750,12 +718,7 @@ impl MessageProcessor {
.await?;
if connection_initialized {
self.thread_processor
.connection_initialized(
connection_id,
ConnectionCapabilities {
request_attestation: session.request_attestation(),
},
)
.connection_initialized(connection_id)
.await;
}
return Ok(());
@@ -887,9 +850,6 @@ impl MessageProcessor {
.config_requirements_read()
.await
.map(|response| Some(response.into())),
ClientRequest::EnvironmentAdd { params, .. } => {
self.environment_processor.environment_add(params).await
}
ClientRequest::FsReadFile { params, .. } => self
.fs_processor
.read_file(params)

Some files were not shown because too many files have changed in this diff Show More