Compare commits

..

10 Commits

Author SHA1 Message Date
Felipe Coury
c0a6795347 refactor(worktree): route interactive flow through app-server 2026-05-09 21:36:06 -03:00
Felipe Coury
5dc9cf6907 fix(worktree): preserve remote dirty transfers 2026-05-09 16:30:58 -03:00
Felipe Coury
93317c151d feat(worktree): fill remaining worktree gaps 2026-05-09 12:52:13 -03:00
Felipe Coury
6e460f31cd feat(worktree): add move-all dirty policy 2026-05-08 14:13:46 -03:00
Felipe Coury
700f1e4a38 fix(tui): make current worktree selection a no-op 2026-05-08 10:08:26 -03:00
Felipe Coury
1c604c0be6 fix(worktree): name siblings from primary checkout 2026-05-07 20:47:52 -03:00
Felipe Coury
1b31e12444 feat(tui): create worktrees from slash command 2026-05-07 20:41:34 -03:00
Felipe Coury
4f3955ff91 fix(tui): keep worktree switching responsive 2026-05-06 23:58:02 -03:00
Felipe Coury
250390cb76 feat(tui): add worktree slash command 2026-05-06 21:07:37 -03:00
Felipe Coury
5a6efcf183 feat(cli): add managed worktree workflow 2026-05-06 19:44:04 -03:00
827 changed files with 26836 additions and 29964 deletions

View File

@@ -2,6 +2,7 @@ name: 💻 CLI Bug
description: Report an issue in the Codex CLI
labels:
- bug
- needs triage
body:
- type: markdown
attributes:
@@ -40,9 +41,9 @@ body:
id: terminal
attributes:
label: What terminal emulator and version are you using (if applicable)?
description: Also note any multiplexer in use (screen / tmux / zellij)
description: |
Also note any multiplexer in use (screen / tmux / zellij).
E.g., VS Code, Terminal.app, iTerm2, Ghostty, Windows Terminal (WSL / PowerShell)
E.g, VSCode, Terminal.app, iTerm2, Ghostty, Windows Terminal (WSL / PowerShell)
- type: textarea
id: actual
attributes:

View File

@@ -10,7 +10,7 @@ body:
Before you submit a feature:
1. Search existing issues for similar features. If you find one, 👍 it rather than opening a new one.
2. The Codex team will try to balance the varying needs of the community when prioritizing or rejecting new features. Not all features will be accepted. See [Contributing](https://github.com/openai/codex/blob/main/docs/contributing.md) for more details.
2. The Codex team will try to balance the varying needs of the community when prioritizing or rejecting new features. Not all features will be accepted. See [Contributing](https://github.com/openai/codex#contributing) for more details.
- type: input
id: variant

View File

@@ -1,6 +1,6 @@
name: 📗 Documentation Issue
description: Tell us if there is missing or incorrect documentation
labels: [documentation]
labels: [docs]
body:
- type: markdown
attributes:
@@ -24,4 +24,4 @@ body:
- type: textarea
attributes:
label: Where did you find it?
description: If possible, please provide the URL(s) where you found this issue.
description: If possible, please provide the URL(s) where you found this issue.

View File

@@ -50,7 +50,7 @@ runs:
- name: Restore bazel repository cache
id: cache_bazel_repository_restore
continue-on-error: true
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: ${{ steps.setup_bazel.outputs.repository-cache-path }}
key: ${{ steps.cache_bazel_repository_key.outputs.repository-cache-key }}

View File

@@ -30,7 +30,7 @@ runs:
using: composite
steps:
- name: Azure login for Trusted Signing (OIDC)
uses: azure/login@a457da9ea143d694b1b9c7c869ebb04ebe844ef5 # v2.3.0
uses: azure/login@a457da9ea143d694b1b9c7c869ebb04ebe844ef5 # v2
with:
client-id: ${{ inputs.client-id }}
tenant-id: ${{ inputs.tenant-id }}
@@ -54,7 +54,7 @@ runs:
} >> "$GITHUB_OUTPUT"
- name: Sign Windows binaries with Azure Trusted Signing
uses: azure/trusted-signing-action@1d365fec12862c4aa68fcac418143d73f0cea293 # v0.5.11
uses: azure/trusted-signing-action@1d365fec12862c4aa68fcac418143d73f0cea293 # v0
with:
endpoint: ${{ inputs.endpoint }}
trusted-signing-account-name: ${{ inputs.account-name }}

View File

@@ -6,37 +6,25 @@ updates:
directory: .github/actions/codex
schedule:
interval: weekly
cooldown:
default-days: 7
- package-ecosystem: cargo
directories:
- codex-rs
- codex-rs/*
schedule:
interval: weekly
cooldown:
default-days: 7
- package-ecosystem: devcontainers
directory: /
schedule:
interval: weekly
cooldown:
default-days: 7
- package-ecosystem: docker
directory: codex-cli
schedule:
interval: weekly
cooldown:
default-days: 7
- package-ecosystem: github-actions
directory: /
schedule:
interval: weekly
cooldown:
default-days: 7
- package-ecosystem: rust-toolchain
directory: codex-rs
schedule:
interval: weekly
cooldown:
default-days: 7

View File

@@ -56,10 +56,7 @@ jobs:
name: Bazel test on ${{ matrix.os }} for ${{ matrix.target }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Check rusty_v8 MODULE.bazel checksums
if: matrix.os == 'ubuntu-24.04' && matrix.target == 'x86_64-unknown-linux-gnu'
@@ -125,7 +122,7 @@ jobs:
- name: Upload Bazel execution logs
if: always() && !cancelled()
continue-on-error: true
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: bazel-execution-logs-test-${{ matrix.target }}
path: ${{ runner.temp }}/bazel-execution-logs
@@ -136,7 +133,7 @@ jobs:
- name: Save bazel repository cache
if: always() && !cancelled() && steps.prepare_bazel.outputs.repository-cache-hit != 'true'
continue-on-error: true
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: ${{ steps.prepare_bazel.outputs.repository-cache-path }}
key: ${{ steps.prepare_bazel.outputs.repository-cache-key }}
@@ -151,10 +148,7 @@ jobs:
name: Bazel test on windows-latest for x86_64-pc-windows-gnullvm (native main)
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Prepare Bazel CI
id: prepare_bazel
@@ -201,7 +195,7 @@ jobs:
- name: Upload Bazel execution logs
if: always() && !cancelled()
continue-on-error: true
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: bazel-execution-logs-test-windows-native-x86_64-pc-windows-gnullvm
path: ${{ runner.temp }}/bazel-execution-logs
@@ -212,7 +206,7 @@ jobs:
- name: Save bazel repository cache
if: always() && !cancelled() && steps.prepare_bazel.outputs.repository-cache-hit != 'true'
continue-on-error: true
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: ${{ steps.prepare_bazel.outputs.repository-cache-path }}
key: ${{ steps.prepare_bazel.outputs.repository-cache-key }}
@@ -237,10 +231,7 @@ jobs:
name: Bazel clippy on ${{ matrix.os }} for ${{ matrix.target }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Prepare Bazel CI
id: prepare_bazel
@@ -295,7 +286,7 @@ jobs:
- name: Upload Bazel execution logs
if: always() && !cancelled()
continue-on-error: true
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: bazel-execution-logs-clippy-${{ matrix.target }}
path: ${{ runner.temp }}/bazel-execution-logs
@@ -306,7 +297,7 @@ jobs:
- name: Save bazel repository cache
if: always() && !cancelled() && steps.prepare_bazel.outputs.repository-cache-hit != 'true'
continue-on-error: true
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: ${{ steps.prepare_bazel.outputs.repository-cache-path }}
key: ${{ steps.prepare_bazel.outputs.repository-cache-key }}
@@ -327,10 +318,7 @@ jobs:
name: Verify release build on ${{ matrix.os }} for ${{ matrix.target }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Prepare Bazel CI
id: prepare_bazel
@@ -402,7 +390,7 @@ jobs:
- name: Upload Bazel execution logs
if: always() && !cancelled()
continue-on-error: true
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: bazel-execution-logs-verify-release-build-${{ matrix.target }}
path: ${{ runner.temp }}/bazel-execution-logs
@@ -413,7 +401,7 @@ jobs:
- name: Save bazel repository cache
if: always() && !cancelled() && steps.prepare_bazel.outputs.repository-cache-hit != 'true'
continue-on-error: true
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: ${{ steps.prepare_bazel.outputs.repository-cache-path }}
key: ${{ steps.prepare_bazel.outputs.repository-cache-key }}

View File

@@ -8,19 +8,17 @@ jobs:
name: Blob size policy
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
fetch-depth: 0
persist-credentials: false
- name: Determine PR comparison range
id: range
shell: bash
run: |
set -euo pipefail
echo "base=${{ github.event.pull_request.base.sha }}" >> "$GITHUB_OUTPUT"
echo "head=${{ github.event.pull_request.head.sha }}" >> "$GITHUB_OUTPUT"
echo "base=$(git rev-parse HEAD^1)" >> "$GITHUB_OUTPUT"
echo "head=$(git rev-parse HEAD^2)" >> "$GITHUB_OUTPUT"
- name: Check changed blob sizes
env:

View File

@@ -14,10 +14,7 @@ jobs:
working-directory: ./codex-rs
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Install Rust toolchain
uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0

View File

@@ -12,10 +12,7 @@ jobs:
NODE_OPTIONS: --max-old-space-size=4096
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Verify codex-rs Cargo manifests inherit workspace settings
run: python3 .github/scripts/verify_cargo_workspace_manifests.py
@@ -32,7 +29,7 @@ jobs:
run_install: false
- name: Setup Node.js
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: 22
@@ -66,7 +63,7 @@ jobs:
echo "pack_output=$PACK_OUTPUT" >> "$GITHUB_OUTPUT"
- name: Upload staged npm package artifact
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: codex-npm-staging
path: ${{ steps.stage_npm_package.outputs.pack_output }}

View File

@@ -17,7 +17,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Close inactive PRs from contributors
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |

View File

@@ -18,12 +18,9 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Annotate locations with typos
uses: codespell-project/codespell-problem-matcher@b80729f885d32f78a716c2f107b4db1025001c42 # v1.1.0
uses: codespell-project/codespell-problem-matcher@b80729f885d32f78a716c2f107b4db1025001c42 # v1
- name: Codespell
uses: codespell-project/actions-codespell@8f01853be192eb0f849a5c7d721450e7a467c579 # v2.2
with:

View File

@@ -19,9 +19,7 @@ jobs:
reason: ${{ steps.normalize-all.outputs.reason }}
has_matches: ${{ steps.normalize-all.outputs.has_matches }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Prepare Codex inputs
env:
@@ -157,9 +155,7 @@ jobs:
reason: ${{ steps.normalize-open.outputs.reason }}
has_matches: ${{ steps.normalize-open.outputs.has_matches }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Prepare Codex inputs
env:
@@ -346,7 +342,7 @@ jobs:
issues: write
steps:
- name: Comment on issue
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
CODEX_OUTPUT: ${{ needs.select-final.outputs.codex_output }}
with:

View File

@@ -17,9 +17,7 @@ jobs:
outputs:
codex_output: ${{ steps.codex.outputs.final-message }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- id: codex
uses: openai/codex-action@5c3f4ccdb2b8790f73d6b21751ac00e602aa0c02 # v1.7

View File

@@ -7,11 +7,6 @@ on:
workflow_dispatch:
# CI builds in debug (dev) for faster signal.
env:
# Cargo's libgit2 transport has been flaky on macOS when fetching git
# dependencies with nested submodules. Use the system git CLI, which has
# better network/proxy behavior and matches Cargo's own suggested fallback.
CARGO_NET_GIT_FETCH_WITH_CLI: "true"
jobs:
# --- CI that doesn't need specific targets ---------------------------------
@@ -22,9 +17,7 @@ jobs:
run:
working-directory: codex-rs
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
with:
components: rustfmt
@@ -38,15 +31,14 @@ jobs:
run:
working-directory: codex-rs
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
with:
tool: cargo-shear@1.11.2
tool: cargo-shear
version: 1.5.1
- name: cargo shear
run: cargo shear --deny-warnings
run: cargo shear
argument_comment_lint_package:
name: Argument comment lint package
@@ -55,16 +47,14 @@ jobs:
CARGO_DYLINT_VERSION: 5.0.0
DYLINT_LINK_VERSION: 5.0.0
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
with:
toolchain: nightly-2025-09-18
components: llvm-tools-preview, rustc-dev, rust-src
- name: Cache cargo-dylint tooling
id: cargo_dylint_cache
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: |
~/.cargo/bin/cargo-dylint
@@ -107,9 +97,7 @@ jobs:
group: codex-runners
labels: codex-windows-x64
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- uses: ./.github/actions/setup-bazel-ci
with:
target: ${{ runner.os }}
@@ -245,9 +233,7 @@ jobs:
labels: codex-windows-arm64
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Install Linux build dependencies
if: ${{ runner.os == 'Linux' }}
shell: bash
@@ -290,7 +276,7 @@ jobs:
# avoid caching the large target dir on the gnu-dev job.
- name: Restore cargo home cache
id: cache_cargo_home_restore
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: |
~/.cargo/bin/
@@ -308,7 +294,7 @@ jobs:
# Install and restore sccache cache
- name: Install sccache
if: ${{ env.USE_SCCACHE == 'true' }}
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
with:
tool: sccache
version: 0.7.5
@@ -335,7 +321,7 @@ jobs:
- name: Restore sccache cache (fallback)
if: ${{ env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true' }}
id: cache_sccache_restore
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: ${{ github.workspace }}/.sccache/
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
@@ -362,7 +348,7 @@ jobs:
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
name: Restore APT cache (musl)
id: cache_apt_restore
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: |
/var/cache/apt
@@ -370,7 +356,7 @@ jobs:
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
name: Install Zig
uses: mlugg/setup-zig@d1434d08867e3ee9daa34448df10607b98908d29 # v2.2.1
uses: mlugg/setup-zig@d1434d08867e3ee9daa34448df10607b98908d29 # v2
with:
version: 0.14.0
@@ -444,7 +430,7 @@ jobs:
- name: Install cargo-chef
if: ${{ matrix.profile == 'release' }}
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
with:
tool: cargo-chef
version: 0.1.71
@@ -463,7 +449,7 @@ jobs:
- name: Upload Cargo timings (clippy)
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: cargo-timings-rust-ci-clippy-${{ matrix.target }}-${{ matrix.profile }}
path: codex-rs/target/**/cargo-timings/cargo-timing.html
@@ -474,7 +460,7 @@ jobs:
- name: Save cargo home cache
if: always() && !cancelled() && steps.cache_cargo_home_restore.outputs.cache-hit != 'true'
continue-on-error: true
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: |
~/.cargo/bin/
@@ -490,7 +476,7 @@ jobs:
- name: Save sccache cache (fallback)
if: always() && !cancelled() && env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true'
continue-on-error: true
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: ${{ github.workspace }}/.sccache/
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
@@ -515,7 +501,7 @@ jobs:
- name: Save APT cache (musl)
if: always() && !cancelled() && (matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl') && steps.cache_apt_restore.outputs.cache-hit != 'true'
continue-on-error: true
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: |
/var/cache/apt
@@ -573,9 +559,7 @@ jobs:
labels: codex-windows-arm64
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Install Linux build dependencies
if: ${{ runner.os == 'Linux' }}
shell: bash
@@ -583,7 +567,7 @@ jobs:
set -euo pipefail
if command -v apt-get >/dev/null 2>&1; then
sudo apt-get update -y
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends pkg-config libcap-dev bubblewrap
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends pkg-config libcap-dev
fi
# Some integration tests rely on DotSlash being installed.
@@ -606,7 +590,7 @@ jobs:
- name: Restore cargo home cache
id: cache_cargo_home_restore
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: |
~/.cargo/bin/
@@ -619,7 +603,7 @@ jobs:
- name: Install sccache
if: ${{ env.USE_SCCACHE == 'true' }}
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
with:
tool: sccache
version: 0.7.5
@@ -646,7 +630,7 @@ jobs:
- name: Restore sccache cache (fallback)
if: ${{ env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true' }}
id: cache_sccache_restore
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: ${{ github.workspace }}/.sccache/
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
@@ -654,7 +638,7 @@ jobs:
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
with:
tool: nextest
version: 0.9.103
@@ -690,7 +674,7 @@ jobs:
- name: Upload Cargo timings (nextest)
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: cargo-timings-rust-ci-nextest-${{ matrix.target }}-${{ matrix.profile }}
path: codex-rs/target/**/cargo-timings/cargo-timing.html
@@ -699,7 +683,7 @@ jobs:
- name: Save cargo home cache
if: always() && !cancelled() && steps.cache_cargo_home_restore.outputs.cache-hit != 'true'
continue-on-error: true
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: |
~/.cargo/bin/
@@ -711,7 +695,7 @@ jobs:
- name: Save sccache cache (fallback)
if: always() && !cancelled() && env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true'
continue-on-error: true
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: ${{ github.workspace }}/.sccache/
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
@@ -738,12 +722,10 @@ jobs:
shell: bash
run: |
set +e
if [[ "${STEPS_TEST_OUTCOME}" != "success" ]]; then
if [[ "${{ steps.test.outcome }}" != "success" ]]; then
docker logs codex-remote-test-env || true
fi
docker rm -f codex-remote-test-env >/dev/null 2>&1 || true
env:
STEPS_TEST_OUTCOME: ${{ steps.test.outcome }}
- name: verify tests passed
if: steps.test.outcome == 'failure'

View File

@@ -14,11 +14,9 @@ jobs:
codex: ${{ steps.detect.outputs.codex }}
workflows: ${{ steps.detect.outputs.workflows }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
fetch-depth: 0
persist-credentials: false
- name: Detect changed paths (no external action)
id: detect
shell: bash
@@ -63,10 +61,7 @@ jobs:
run:
working-directory: codex-rs
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
with:
components: rustfmt
@@ -82,16 +77,14 @@ jobs:
run:
working-directory: codex-rs
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
with:
tool: cargo-shear@1.11.2
tool: cargo-shear
version: 1.5.1
- name: cargo shear
run: cargo shear --deny-warnings
run: cargo shear
argument_comment_lint_package:
name: Argument comment lint package
@@ -102,10 +95,7 @@ jobs:
CARGO_DYLINT_VERSION: 5.0.0
DYLINT_LINK_VERSION: 5.0.0
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
- name: Install nightly argument-comment-lint toolchain
shell: bash
@@ -119,7 +109,7 @@ jobs:
rustup default nightly-2025-09-18
- name: Cache cargo-dylint tooling
id: cargo_dylint_cache
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: |
~/.cargo/bin/cargo-dylint
@@ -180,11 +170,8 @@ jobs:
echo "No argument-comment-lint relevant changes."
echo "run=false" >> "$GITHUB_OUTPUT"
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
if: ${{ steps.argument_comment_lint_gate.outputs.run == 'true' }}
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
- name: Run argument comment lint on codex-rs via Bazel
if: ${{ steps.argument_comment_lint_gate.outputs.run == 'true' }}
uses: ./.github/actions/run-argument-comment-lint
@@ -216,25 +203,20 @@ jobs:
# If nothing relevant changed (PR touching only root README, etc.),
# declare success regardless of other jobs.
if [[ "${NEEDS_CHANGED_OUTPUTS_ARGUMENT_COMMENT_LINT}" != 'true' && "${NEEDS_CHANGED_OUTPUTS_CODEX}" != 'true' && "${NEEDS_CHANGED_OUTPUTS_WORKFLOWS}" != 'true' ]]; then
if [[ '${{ needs.changed.outputs.argument_comment_lint }}' != 'true' && '${{ needs.changed.outputs.codex }}' != 'true' && '${{ needs.changed.outputs.workflows }}' != 'true' ]]; then
echo 'No relevant changes -> CI not required.'
exit 0
fi
if [[ "${NEEDS_CHANGED_OUTPUTS_ARGUMENT_COMMENT_LINT_PACKAGE}" == 'true' ]]; then
if [[ '${{ needs.changed.outputs.argument_comment_lint_package }}' == 'true' ]]; then
[[ '${{ needs.argument_comment_lint_package.result }}' == 'success' ]] || { echo 'argument_comment_lint_package failed'; exit 1; }
fi
if [[ "${NEEDS_CHANGED_OUTPUTS_ARGUMENT_COMMENT_LINT}" == 'true' || "${NEEDS_CHANGED_OUTPUTS_WORKFLOWS}" == 'true' ]]; then
if [[ '${{ needs.changed.outputs.argument_comment_lint }}' == 'true' || '${{ needs.changed.outputs.workflows }}' == 'true' ]]; then
[[ '${{ needs.argument_comment_lint_prebuilt.result }}' == 'success' ]] || { echo 'argument_comment_lint_prebuilt failed'; exit 1; }
fi
if [[ "${NEEDS_CHANGED_OUTPUTS_CODEX}" == 'true' || "${NEEDS_CHANGED_OUTPUTS_WORKFLOWS}" == 'true' ]]; then
if [[ '${{ needs.changed.outputs.codex }}' == 'true' || '${{ needs.changed.outputs.workflows }}' == 'true' ]]; then
[[ '${{ needs.general.result }}' == 'success' ]] || { echo 'general failed'; exit 1; }
[[ '${{ needs.cargo_shear.result }}' == 'success' ]] || { echo 'cargo_shear failed'; exit 1; }
fi
env:
NEEDS_CHANGED_OUTPUTS_ARGUMENT_COMMENT_LINT: ${{ needs.changed.outputs.argument_comment_lint }}
NEEDS_CHANGED_OUTPUTS_CODEX: ${{ needs.changed.outputs.codex }}
NEEDS_CHANGED_OUTPUTS_WORKFLOWS: ${{ needs.changed.outputs.workflows }}
NEEDS_CHANGED_OUTPUTS_ARGUMENT_COMMENT_LINT_PACKAGE: ${{ needs.changed.outputs.argument_comment_lint_package }}

View File

@@ -56,9 +56,7 @@ jobs:
labels: codex-windows-x64
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
with:
@@ -102,7 +100,7 @@ jobs:
(cd "${RUNNER_TEMP}" && tar -czf "$GITHUB_WORKSPACE/$archive_path" argument-comment-lint)
fi
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: argument-comment-lint-${{ matrix.target }}
path: dist/argument-comment-lint/${{ matrix.target }}/*

View File

@@ -18,11 +18,10 @@ jobs:
if: github.repository == 'openai/codex'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
ref: main
fetch-depth: 0
persist-credentials: false
- name: Update models.json
env:
@@ -44,7 +43,7 @@ jobs:
curl --http1.1 --fail --show-error --location "${headers[@]}" "${url}" | jq '.' > codex-rs/models-manager/models.json
- name: Open pull request (if changed)
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v8.1.0
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v8
with:
commit-message: "Update models.json"
title: "Update models.json"

View File

@@ -83,9 +83,7 @@ jobs:
labels: codex-windows-arm64
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Print runner specs (Windows)
shell: powershell
run: |
@@ -114,7 +112,7 @@ jobs:
cargo build --target ${{ matrix.target }} --release --timings "${build_args[@]}"
- name: Upload Cargo timings
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: cargo-timings-rust-release-windows-${{ matrix.target }}-${{ matrix.bundle }}
path: codex-rs/target/**/cargo-timings/cargo-timing.html
@@ -130,7 +128,7 @@ jobs:
done
- name: Upload Windows binaries
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: windows-binaries-${{ matrix.target }}-${{ matrix.bundle }}
path: |
@@ -167,24 +165,22 @@ jobs:
labels: codex-windows-arm64
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Download prebuilt Windows primary binaries
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
with:
name: windows-binaries-${{ matrix.target }}-primary
path: codex-rs/target/${{ matrix.target }}/release
- name: Download prebuilt Windows helper binaries
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
with:
name: windows-binaries-${{ matrix.target }}-helpers
path: codex-rs/target/${{ matrix.target }}/release
- name: Download prebuilt Windows app-server binary
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
with:
name: windows-binaries-${{ matrix.target }}-app-server
path: codex-rs/target/${{ matrix.target }}/release
@@ -220,48 +216,6 @@ jobs:
"$dest/${binary}-${{ matrix.target }}.exe"
done
- name: Build Python runtime wheel
shell: bash
run: |
set -euo pipefail
case "${{ matrix.target }}" in
aarch64-pc-windows-msvc)
platform_tag="win_arm64"
;;
x86_64-pc-windows-msvc)
platform_tag="win_amd64"
;;
*)
echo "No Python runtime wheel platform tag for ${{ matrix.target }}"
exit 1
;;
esac
python -m venv "${RUNNER_TEMP}/python-runtime-build-venv"
"${RUNNER_TEMP}/python-runtime-build-venv/Scripts/python.exe" -m pip install build
stage_dir="${RUNNER_TEMP}/openai-codex-cli-bin-${{ matrix.target }}"
wheel_dir="${GITHUB_WORKSPACE}/python-runtime-dist/${{ matrix.target }}"
# Keep the helpers next to codex.exe in the runtime wheel so Windows
# sandbox/elevation lookup matches the standalone release zip.
python "${GITHUB_WORKSPACE}/sdk/python/scripts/update_sdk_artifacts.py" \
stage-runtime \
"$stage_dir" \
"${GITHUB_WORKSPACE}/codex-rs/target/${{ matrix.target }}/release/codex.exe" \
--codex-version "${GITHUB_REF_NAME}" \
--platform-tag "$platform_tag" \
--resource-binary "${GITHUB_WORKSPACE}/codex-rs/target/${{ matrix.target }}/release/codex-command-runner.exe" \
--resource-binary "${GITHUB_WORKSPACE}/codex-rs/target/${{ matrix.target }}/release/codex-windows-sandbox-setup.exe"
"${RUNNER_TEMP}/python-runtime-build-venv/Scripts/python.exe" -m build --wheel --outdir "$wheel_dir" "$stage_dir"
- name: Upload Python runtime wheel
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: python-runtime-wheel-${{ matrix.target }}
path: python-runtime-dist/${{ matrix.target }}/*.whl
if-no-files-found: error
- name: Install DotSlash
uses: facebook/install-dotslash@1e4e7b3e07eaca387acb98f1d4720e0bee8dbb6a # v2
@@ -327,7 +281,7 @@ jobs:
"${GITHUB_WORKSPACE}/.github/workflows/zstd" -T0 -19 "$dest/$base"
done
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: ${{ matrix.target }}
path: |

View File

@@ -45,9 +45,7 @@ jobs:
git \
libncursesw5-dev
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Build, smoke-test, and stage zsh artifact
shell: bash
@@ -55,7 +53,7 @@ jobs:
"${GITHUB_WORKSPACE}/.github/scripts/build-zsh-release-artifact.sh" \
"dist/zsh/${{ matrix.target }}/${{ matrix.archive_name }}"
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: codex-zsh-${{ matrix.target }}
path: dist/zsh/${{ matrix.target }}/*
@@ -83,9 +81,7 @@ jobs:
brew install autoconf
fi
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Build, smoke-test, and stage zsh artifact
shell: bash
@@ -93,7 +89,7 @@ jobs:
"${GITHUB_WORKSPACE}/.github/scripts/build-zsh-release-artifact.sh" \
"dist/zsh/${{ matrix.target }}/${{ matrix.archive_name }}"
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: codex-zsh-${{ matrix.target }}
path: dist/zsh/${{ matrix.target }}/*

View File

@@ -19,9 +19,7 @@ jobs:
tag-check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
- name: Validate tag matches Cargo.toml version
shell: bash
@@ -120,9 +118,7 @@ jobs:
build_dmg: "false"
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Print runner specs (Linux)
if: ${{ runner.os == 'Linux' }}
shell: bash
@@ -185,10 +181,9 @@ jobs:
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
name: Install Zig
uses: mlugg/setup-zig@d1434d08867e3ee9daa34448df10607b98908d29 # v2.2.1
uses: mlugg/setup-zig@d1434d08867e3ee9daa34448df10607b98908d29 # v2
with:
version: 0.14.0
use-cache: false
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
name: Install musl build tools
@@ -289,7 +284,7 @@ jobs:
cargo build --target ${{ matrix.target }} --release --timings "${build_args[@]}"
- name: Upload Cargo timings
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: cargo-timings-rust-release-${{ matrix.target }}-${{ matrix.bundle }}
path: codex-rs/target/**/cargo-timings/cargo-timing.html
@@ -399,65 +394,6 @@ jobs:
cp target/${{ matrix.target }}/release/codex-${{ matrix.target }}.dmg "$dest/codex-${{ matrix.target }}.dmg"
fi
- name: Build Python runtime wheel
if: ${{ matrix.bundle == 'primary' }}
shell: bash
run: |
set -euo pipefail
case "${{ matrix.target }}" in
aarch64-apple-darwin)
platform_tag="macosx_11_0_arm64"
;;
x86_64-apple-darwin)
platform_tag="macosx_10_9_x86_64"
;;
aarch64-unknown-linux-musl)
platform_tag="musllinux_1_1_aarch64"
;;
x86_64-unknown-linux-musl)
platform_tag="musllinux_1_1_x86_64"
;;
*)
echo "No Python runtime wheel platform tag for ${{ matrix.target }}"
exit 1
;;
esac
python3 -m venv "${RUNNER_TEMP}/python-runtime-build-venv"
# Do not install into the runner's system Python; macOS runners mark
# the Homebrew Python as externally managed under PEP 668.
"${RUNNER_TEMP}/python-runtime-build-venv/bin/python" -m pip install build
stage_dir="${RUNNER_TEMP}/openai-codex-cli-bin-${{ matrix.target }}"
wheel_dir="${GITHUB_WORKSPACE}/python-runtime-dist/${{ matrix.target }}"
stage_runtime_args=(
"${GITHUB_WORKSPACE}/sdk/python/scripts/update_sdk_artifacts.py"
stage-runtime
"$stage_dir"
"${GITHUB_WORKSPACE}/codex-rs/target/${{ matrix.target }}/release/codex"
--codex-version "${GITHUB_REF_NAME}"
--platform-tag "$platform_tag"
)
if [[ "${{ matrix.target }}" == *linux* ]]; then
# Keep bwrap in the runtime wheel so Linux sandbox fallback behavior
# matches the standalone release bundle on hosts without system bwrap.
stage_runtime_args+=(
--resource-binary
"${GITHUB_WORKSPACE}/codex-rs/target/${{ matrix.target }}/release/bwrap"
)
fi
python3 "${stage_runtime_args[@]}"
"${RUNNER_TEMP}/python-runtime-build-venv/bin/python" -m build --wheel --outdir "$wheel_dir" "$stage_dir"
- name: Upload Python runtime wheel
if: ${{ matrix.bundle == 'primary' }}
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: python-runtime-wheel-${{ matrix.target }}
path: python-runtime-dist/${{ matrix.target }}/*.whl
if-no-files-found: error
- name: Compress artifacts
shell: bash
run: |
@@ -494,7 +430,7 @@ jobs:
zstd -T0 -19 --rm "$dest/$base"
done
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: ${{ matrix.artifact_name }}
# Upload the per-binary .zst files, .tar.gz equivalents, and any
@@ -537,13 +473,10 @@ jobs:
tag: ${{ github.ref_name }}
should_publish_npm: ${{ steps.npm_publish_settings.outputs.should_publish }}
npm_tag: ${{ steps.npm_publish_settings.outputs.npm_tag }}
should_publish_python_runtime: ${{ steps.python_runtime_publish_settings.outputs.should_publish }}
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Generate release notes from tag commit message
id: release_notes
@@ -565,7 +498,7 @@ jobs:
echo "path=${notes_path}" >> "${GITHUB_OUTPUT}"
- uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
- uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
with:
path: dist
@@ -614,29 +547,13 @@ jobs:
echo "npm_tag=" >> "$GITHUB_OUTPUT"
fi
- name: Determine Python runtime publish settings
id: python_runtime_publish_settings
env:
VERSION: ${{ steps.release_name.outputs.name }}
run: |
set -euo pipefail
version="${VERSION}"
if [[ "${version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
echo "should_publish=true" >> "$GITHUB_OUTPUT"
elif [[ "${version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+-alpha\.[0-9]+$ ]]; then
echo "should_publish=true" >> "$GITHUB_OUTPUT"
else
echo "should_publish=false" >> "$GITHUB_OUTPUT"
fi
- name: Setup pnpm
uses: pnpm/action-setup@a8198c4bff370c8506180b035930dea56dbd5288 # v5
with:
run_install: false
- name: Setup Node.js for npm packaging
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: 22
@@ -662,7 +579,7 @@ jobs:
cp scripts/install/install.ps1 dist/install.ps1
- name: Create GitHub Release
uses: softprops/action-gh-release@153bb8e04406b158c6c84fc1615b65b24149a1fe # v2.6.1
uses: softprops/action-gh-release@153bb8e04406b158c6c84fc1615b65b24149a1fe # v2
with:
name: ${{ steps.release_name.outputs.name }}
tag_name: ${{ github.ref_name }}
@@ -721,7 +638,7 @@ jobs:
steps:
- name: Setup Node.js
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
# Node 24 bundles npm >= 11.5.1, which trusted publishing requires.
node-version: 24
@@ -863,48 +780,6 @@ jobs:
exit "${publish_status}"
done
# Publish the platform-specific Python runtime wheels using PyPI trusted publishing.
# PyPI project configuration must trust this workflow and job. Keep this
# non-blocking while the Python runtime publishing path is new; failures still
# need release follow-up, but should not invalidate the Rust release itself.
publish-python-runtime:
# Publish to PyPI for stable releases and alpha pre-releases with numeric suffixes.
if: ${{ needs.release.outputs.should_publish_python_runtime == 'true' }}
name: publish-python-runtime
needs: release
runs-on: ubuntu-latest
continue-on-error: true
environment: pypi
permissions:
id-token: write # Required for PyPI trusted publishing.
contents: read
steps:
- name: Download Python runtime wheels from release
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
RELEASE_TAG: ${{ needs.release.outputs.tag }}
RELEASE_VERSION: ${{ needs.release.outputs.version }}
run: |
set -euo pipefail
python_version="$RELEASE_VERSION"
python_version="${python_version/-alpha./a}"
python_version="${python_version/-beta./b}"
python_version="${python_version/-rc./rc}"
mkdir -p dist/python-runtime
gh release download "$RELEASE_TAG" \
--repo "${GITHUB_REPOSITORY}" \
--pattern "openai_codex_cli_bin-${python_version}-*.whl" \
--dir dist/python-runtime
ls -lh dist/python-runtime
- name: Publish Python runtime wheels to PyPI
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
with:
packages-dir: dist/python-runtime
skip-existing: true
winget:
name: winget
needs: release

View File

@@ -17,12 +17,10 @@ jobs:
v8_version: ${{ steps.v8_version.outputs.version }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Set up Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6
with:
python-version: "3.12"
@@ -71,9 +69,7 @@ jobs:
target: aarch64-unknown-linux-musl
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Set up Bazel
uses: ./.github/actions/setup-bazel-ci
@@ -81,7 +77,7 @@ jobs:
target: ${{ matrix.target }}
- name: Set up Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6
with:
python-version: "3.12"
@@ -137,7 +133,7 @@ jobs:
--output-dir "dist/${TARGET}"
- name: Upload staged musl artifacts
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: rusty-v8-${{ needs.metadata.outputs.v8_version }}-${{ matrix.target }}
path: dist/${{ matrix.target }}/*
@@ -165,12 +161,12 @@ jobs:
exit 1
fi
- uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
- uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
with:
path: dist
- name: Create GitHub Release
uses: softprops/action-gh-release@153bb8e04406b158c6c84fc1615b65b24149a1fe # v2.6.1
uses: softprops/action-gh-release@153bb8e04406b158c6c84fc1615b65b24149a1fe # v2
with:
tag_name: ${{ needs.metadata.outputs.release_tag }}
name: ${{ needs.metadata.outputs.release_tag }}

View File

@@ -6,39 +6,6 @@ on:
pull_request: {}
jobs:
python-sdk:
runs-on:
group: codex-runners
labels: codex-linux-x64
timeout-minutes: 10
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
- name: Test Python SDK
shell: bash
run: |
set -euo pipefail
# Run inside Alpine so dependency resolution exercises the pinned
# runtime wheel on the same Linux wheel family that CI installs.
docker run --rm \
--user "$(id -u):$(id -g)" \
-e HOME=/tmp/codex-python-sdk-home \
-e UV_LINK_MODE=copy \
-v "${GITHUB_WORKSPACE}:${GITHUB_WORKSPACE}" \
-w "${GITHUB_WORKSPACE}/sdk/python" \
python:3.12-alpine \
sh -euxc '
python -m venv /tmp/uv
/tmp/uv/bin/python -m pip install uv==0.11.3
/tmp/uv/bin/uv sync --extra dev --frozen
/tmp/uv/bin/uv run --extra dev pytest
'
sdks:
runs-on:
group: codex-runners
@@ -46,10 +13,7 @@ jobs:
timeout-minutes: 10
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Install Linux bwrap build dependencies
shell: bash
@@ -64,7 +28,7 @@ jobs:
run_install: false
- name: Setup Node.js
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: 22
cache: pnpm
@@ -151,7 +115,7 @@ jobs:
- name: Save bazel repository cache
if: always() && !cancelled() && steps.setup_bazel.outputs.cache-hit != 'true'
continue-on-error: true
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: |
~/.cache/bazel-repo-cache

View File

@@ -40,13 +40,10 @@ jobs:
v8_version: ${{ steps.v8_version.outputs.version }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Set up Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6
with:
python-version: "3.12"
@@ -77,10 +74,7 @@ jobs:
target: aarch64-unknown-linux-musl
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Set up Bazel
uses: ./.github/actions/setup-bazel-ci
@@ -88,7 +82,7 @@ jobs:
target: ${{ matrix.target }}
- name: Set up Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6
with:
python-version: "3.12"
@@ -138,7 +132,7 @@ jobs:
--output-dir "dist/${TARGET}"
- name: Upload staged musl artifacts
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: v8-canary-${{ needs.metadata.outputs.v8_version }}-${{ matrix.target }}
path: dist/${{ matrix.target }}/*

View File

@@ -26,7 +26,7 @@ In the codex-rs folder where the rust code lives:
- Implementations may still use `async fn foo(&self, ...) -> T` when they satisfy that contract.
- Do not use `#[allow(async_fn_in_trait)]` as a shortcut around spelling the future contract explicitly.
- When writing tests, prefer comparing the equality of entire objects over fields one by one.
- Do not add general product or user-facing documentation to the `docs/` folder. The official Codex documentation lives elsewhere. The exception is app-server API documentation, which is covered by the app-server guidance below.
- When making a change that adds or changes an API, ensure that the documentation in the `docs/` folder is up to date if applicable.
- Prefer private modules and explicitly exported public crate API.
- If you change `ConfigToml` or nested config types, run `just write-config-schema` to update `codex-rs/core/config.schema.json`.
- When working with MCP tool calls, prefer using `codex-rs/codex-mcp/src/mcp_connection_manager.rs` to handle mutation of tools and tool calls. Aim to minimize the footprint of changes and leverage existing abstractions rather than plumbing code through multiple levels of function calls.
@@ -130,7 +130,7 @@ When UI or text output changes intentionally, update the snapshots as follows:
If you dont have the tool:
- `cargo install --locked cargo-insta`
- `cargo install cargo-insta`
### Test assertions
@@ -210,7 +210,7 @@ These guidelines apply to app-server protocol work in `codex-rs`, especially:
### Development Workflow
- Update app-server docs/examples when API behavior changes (at minimum `app-server/README.md`).
- Update docs/examples when API behavior changes (at minimum `app-server/README.md`).
- Regenerate schema fixtures when API shapes change:
`just write-app-server-schema`
(and `just write-app-server-schema --experimental` when experimental API fixtures are affected).

1
MODULE.bazel.lock generated
View File

@@ -665,7 +665,6 @@
"aws-lc-rs_1.16.2": "{\"dependencies\":[{\"name\":\"aws-lc-fips-sys\",\"optional\":true,\"req\":\"^0.13.1\"},{\"default_features\":false,\"name\":\"aws-lc-sys\",\"optional\":true,\"req\":\"^0.39.0\"},{\"features\":[\"derive\"],\"kind\":\"dev\",\"name\":\"clap\",\"req\":\"^4.4\"},{\"kind\":\"dev\",\"name\":\"hex\",\"req\":\"^0.4.3\"},{\"kind\":\"dev\",\"name\":\"lazy_static\",\"req\":\"^1.5.0\"},{\"kind\":\"dev\",\"name\":\"paste\",\"req\":\"^1.0.15\"},{\"kind\":\"dev\",\"name\":\"regex\",\"req\":\"^1.11.1\"},{\"name\":\"untrusted\",\"optional\":true,\"req\":\"^0.7.1\"},{\"name\":\"zeroize\",\"req\":\"^1.8.1\"}],\"features\":{\"alloc\":[],\"asan\":[\"aws-lc-sys?/asan\",\"aws-lc-fips-sys?/asan\"],\"bindgen\":[\"aws-lc-sys?/bindgen\",\"aws-lc-fips-sys?/bindgen\"],\"default\":[\"aws-lc-sys\",\"alloc\",\"ring-io\",\"ring-sig-verify\"],\"dev-tests-only\":[],\"fips\":[\"dep:aws-lc-fips-sys\"],\"non-fips\":[\"aws-lc-sys\"],\"prebuilt-nasm\":[\"aws-lc-sys?/prebuilt-nasm\"],\"ring-io\":[\"dep:untrusted\"],\"ring-sig-verify\":[\"dep:untrusted\"],\"test_logging\":[],\"unstable\":[]}}",
"aws-lc-sys_0.39.0": "{\"dependencies\":[{\"kind\":\"build\",\"name\":\"bindgen\",\"optional\":true,\"req\":\"^0.72.0\"},{\"features\":[\"parallel\"],\"kind\":\"build\",\"name\":\"cc\",\"req\":\"^1.2.26\"},{\"kind\":\"build\",\"name\":\"cmake\",\"req\":\"^0.1.54\"},{\"kind\":\"build\",\"name\":\"dunce\",\"req\":\"^1.0.5\"},{\"kind\":\"build\",\"name\":\"fs_extra\",\"req\":\"^1.3.0\"}],\"features\":{\"all-bindings\":[],\"asan\":[],\"bindgen\":[\"dep:bindgen\"],\"default\":[\"all-bindings\"],\"disable-prebuilt-nasm\":[],\"fips\":[\"dep:bindgen\"],\"prebuilt-nasm\":[],\"ssl\":[\"bindgen\",\"all-bindings\"]}}",
"aws-runtime_1.5.17": "{\"dependencies\":[{\"kind\":\"dev\",\"name\":\"arbitrary\",\"req\":\"^1.3\"},{\"name\":\"aws-credential-types\",\"req\":\"^1.2.11\"},{\"features\":[\"test-util\"],\"kind\":\"dev\",\"name\":\"aws-credential-types\",\"req\":\"^1.2.11\"},{\"features\":[\"http0-compat\"],\"name\":\"aws-sigv4\",\"req\":\"^1.3.7\"},{\"name\":\"aws-smithy-async\",\"req\":\"^1.2.7\"},{\"features\":[\"test-util\"],\"kind\":\"dev\",\"name\":\"aws-smithy-async\",\"req\":\"^1.2.7\"},{\"name\":\"aws-smithy-eventstream\",\"optional\":true,\"req\":\"^0.60.14\"},{\"name\":\"aws-smithy-http\",\"req\":\"^0.62.6\"},{\"kind\":\"dev\",\"name\":\"aws-smithy-protocol-test\",\"req\":\"^0.63.7\"},{\"features\":[\"client\"],\"name\":\"aws-smithy-runtime\",\"req\":\"^1.9.5\"},{\"features\":[\"client\"],\"name\":\"aws-smithy-runtime-api\",\"req\":\"^1.9.3\"},{\"features\":[\"test-util\"],\"kind\":\"dev\",\"name\":\"aws-smithy-runtime-api\",\"req\":\"^1.9.3\"},{\"name\":\"aws-smithy-types\",\"req\":\"^1.3.5\"},{\"features\":[\"test-util\"],\"kind\":\"dev\",\"name\":\"aws-smithy-types\",\"req\":\"^1.3.5\"},{\"name\":\"aws-types\",\"req\":\"^1.3.11\"},{\"name\":\"bytes\",\"req\":\"^1.10.0\"},{\"kind\":\"dev\",\"name\":\"bytes-utils\",\"req\":\"^0.1.2\"},{\"kind\":\"dev\",\"name\":\"convert_case\",\"req\":\"^0.6.0\"},{\"name\":\"fastrand\",\"req\":\"^2.3.0\"},{\"default_features\":false,\"kind\":\"dev\",\"name\":\"futures-util\",\"req\":\"^0.3.29\"},{\"name\":\"http-02x\",\"package\":\"http\",\"req\":\"^0.2.9\"},{\"name\":\"http-1x\",\"optional\":true,\"package\":\"http\",\"req\":\"^1.1.0\"},{\"name\":\"http-body-04x\",\"package\":\"http-body\",\"req\":\"^0.4.5\"},{\"name\":\"http-body-1x\",\"optional\":true,\"package\":\"http-body\",\"req\":\"^1.0.0\"},{\"name\":\"percent-encoding\",\"req\":\"^2.3.1\"},{\"name\":\"pin-project-lite\",\"req\":\"^0.2.14\"},{\"kind\":\"dev\",\"name\":\"proptest\",\"req\":\"^1.2\"},{\"name\":\"regex-lite\",\"optional\":true,\"req\":\"^0.1.5\"},{\"features\":[\"derive\"],\"kind\":\"dev\",\"name\":\"serde\",\"req\":\"^1\"},{\"kind\":\"dev\",\"name\":\"serde_json\",\"req\":\"^1\"},{\"features\":[\"macros\",\"rt\",\"time\"],\"kind\":\"dev\",\"name\":\"tokio\",\"req\":\"^1.23.1\"},{\"name\":\"tracing\",\"req\":\"^0.1.40\"},{\"features\":[\"env-filter\"],\"kind\":\"dev\",\"name\":\"tracing-subscriber\",\"req\":\"^0.3.17\"},{\"kind\":\"dev\",\"name\":\"tracing-test\",\"req\":\"^0.2.4\"},{\"name\":\"uuid\",\"req\":\"^1\"}],\"features\":{\"event-stream\":[\"dep:aws-smithy-eventstream\",\"aws-sigv4/sign-eventstream\"],\"http-02x\":[],\"http-1x\":[\"dep:http-1x\",\"dep:http-body-1x\"],\"sigv4a\":[\"aws-sigv4/sigv4a\"],\"test-util\":[\"dep:regex-lite\"]}}",
"aws-sdk-signin_1.2.0": "{\"dependencies\":[{\"name\":\"aws-credential-types\",\"req\":\"^1.2.11\"},{\"features\":[\"test-util\"],\"kind\":\"dev\",\"name\":\"aws-credential-types\",\"req\":\"^1.2.11\"},{\"name\":\"aws-runtime\",\"req\":\"^1.5.17\"},{\"name\":\"aws-smithy-async\",\"req\":\"^1.2.7\"},{\"name\":\"aws-smithy-http\",\"req\":\"^0.62.6\"},{\"name\":\"aws-smithy-json\",\"req\":\"^0.61.8\"},{\"features\":[\"client\"],\"name\":\"aws-smithy-runtime\",\"req\":\"^1.9.5\"},{\"features\":[\"client\",\"http-02x\"],\"name\":\"aws-smithy-runtime-api\",\"req\":\"^1.9.3\"},{\"name\":\"aws-smithy-types\",\"req\":\"^1.3.5\"},{\"name\":\"aws-types\",\"req\":\"^1.3.11\"},{\"name\":\"bytes\",\"req\":\"^1.4.0\"},{\"name\":\"fastrand\",\"req\":\"^2.0.0\"},{\"name\":\"http\",\"req\":\"^0.2.9\"},{\"kind\":\"dev\",\"name\":\"proptest\",\"req\":\"^1\"},{\"name\":\"regex-lite\",\"req\":\"^0.1.5\"},{\"features\":[\"macros\",\"test-util\",\"rt-multi-thread\"],\"kind\":\"dev\",\"name\":\"tokio\",\"req\":\"^1.23.1\"},{\"name\":\"tracing\",\"req\":\"^0.1\"}],\"features\":{\"behavior-version-latest\":[],\"default\":[\"rustls\",\"default-https-client\",\"rt-tokio\"],\"default-https-client\":[\"aws-smithy-runtime/default-https-client\"],\"gated-tests\":[],\"rt-tokio\":[\"aws-smithy-async/rt-tokio\",\"aws-smithy-types/rt-tokio\"],\"rustls\":[\"aws-smithy-runtime/tls-rustls\"],\"test-util\":[\"aws-credential-types/test-util\",\"aws-smithy-runtime/test-util\"]}}",
"aws-sdk-sso_1.91.0": "{\"dependencies\":[{\"name\":\"aws-credential-types\",\"req\":\"^1.2.11\"},{\"features\":[\"test-util\"],\"kind\":\"dev\",\"name\":\"aws-credential-types\",\"req\":\"^1.2.11\"},{\"name\":\"aws-runtime\",\"req\":\"^1.5.17\"},{\"name\":\"aws-smithy-async\",\"req\":\"^1.2.7\"},{\"name\":\"aws-smithy-http\",\"req\":\"^0.62.6\"},{\"name\":\"aws-smithy-json\",\"req\":\"^0.61.8\"},{\"features\":[\"client\"],\"name\":\"aws-smithy-runtime\",\"req\":\"^1.9.5\"},{\"features\":[\"client\",\"http-02x\"],\"name\":\"aws-smithy-runtime-api\",\"req\":\"^1.9.3\"},{\"name\":\"aws-smithy-types\",\"req\":\"^1.3.5\"},{\"name\":\"aws-types\",\"req\":\"^1.3.11\"},{\"name\":\"bytes\",\"req\":\"^1.4.0\"},{\"name\":\"fastrand\",\"req\":\"^2.0.0\"},{\"name\":\"http\",\"req\":\"^0.2.9\"},{\"kind\":\"dev\",\"name\":\"proptest\",\"req\":\"^1\"},{\"name\":\"regex-lite\",\"req\":\"^0.1.5\"},{\"features\":[\"macros\",\"test-util\",\"rt-multi-thread\"],\"kind\":\"dev\",\"name\":\"tokio\",\"req\":\"^1.23.1\"},{\"name\":\"tracing\",\"req\":\"^0.1\"}],\"features\":{\"behavior-version-latest\":[],\"default\":[\"rustls\",\"default-https-client\",\"rt-tokio\"],\"default-https-client\":[\"aws-smithy-runtime/default-https-client\"],\"gated-tests\":[],\"rt-tokio\":[\"aws-smithy-async/rt-tokio\",\"aws-smithy-types/rt-tokio\"],\"rustls\":[\"aws-smithy-runtime/tls-rustls\"],\"test-util\":[\"aws-credential-types/test-util\",\"aws-smithy-runtime/test-util\"]}}",
"aws-sdk-ssooidc_1.93.0": "{\"dependencies\":[{\"name\":\"aws-credential-types\",\"req\":\"^1.2.11\"},{\"features\":[\"test-util\"],\"kind\":\"dev\",\"name\":\"aws-credential-types\",\"req\":\"^1.2.11\"},{\"name\":\"aws-runtime\",\"req\":\"^1.5.17\"},{\"name\":\"aws-smithy-async\",\"req\":\"^1.2.7\"},{\"name\":\"aws-smithy-http\",\"req\":\"^0.62.6\"},{\"name\":\"aws-smithy-json\",\"req\":\"^0.61.8\"},{\"features\":[\"client\"],\"name\":\"aws-smithy-runtime\",\"req\":\"^1.9.5\"},{\"features\":[\"client\",\"http-02x\"],\"name\":\"aws-smithy-runtime-api\",\"req\":\"^1.9.3\"},{\"name\":\"aws-smithy-types\",\"req\":\"^1.3.5\"},{\"name\":\"aws-types\",\"req\":\"^1.3.11\"},{\"name\":\"bytes\",\"req\":\"^1.4.0\"},{\"name\":\"fastrand\",\"req\":\"^2.0.0\"},{\"name\":\"http\",\"req\":\"^0.2.9\"},{\"kind\":\"dev\",\"name\":\"proptest\",\"req\":\"^1\"},{\"name\":\"regex-lite\",\"req\":\"^0.1.5\"},{\"features\":[\"macros\",\"test-util\",\"rt-multi-thread\"],\"kind\":\"dev\",\"name\":\"tokio\",\"req\":\"^1.23.1\"},{\"name\":\"tracing\",\"req\":\"^0.1\"}],\"features\":{\"behavior-version-latest\":[],\"default\":[\"rustls\",\"default-https-client\",\"rt-tokio\"],\"default-https-client\":[\"aws-smithy-runtime/default-https-client\"],\"gated-tests\":[],\"rt-tokio\":[\"aws-smithy-async/rt-tokio\",\"aws-smithy-types/rt-tokio\"],\"rustls\":[\"aws-smithy-runtime/tls-rustls\"],\"test-util\":[\"aws-credential-types/test-util\",\"aws-smithy-runtime/test-util\"]}}",
"aws-sdk-sts_1.95.0": "{\"dependencies\":[{\"name\":\"aws-credential-types\",\"req\":\"^1.2.11\"},{\"features\":[\"test-util\"],\"kind\":\"dev\",\"name\":\"aws-credential-types\",\"req\":\"^1.2.11\"},{\"name\":\"aws-runtime\",\"req\":\"^1.5.17\"},{\"features\":[\"test-util\"],\"kind\":\"dev\",\"name\":\"aws-runtime\",\"req\":\"^1.5.17\"},{\"name\":\"aws-smithy-async\",\"req\":\"^1.2.7\"},{\"features\":[\"test-util\"],\"kind\":\"dev\",\"name\":\"aws-smithy-async\",\"req\":\"^1.2.7\"},{\"name\":\"aws-smithy-http\",\"req\":\"^0.62.6\"},{\"features\":[\"test-util\",\"wire-mock\"],\"kind\":\"dev\",\"name\":\"aws-smithy-http-client\",\"req\":\"^1.1.5\"},{\"name\":\"aws-smithy-json\",\"req\":\"^0.61.8\"},{\"kind\":\"dev\",\"name\":\"aws-smithy-protocol-test\",\"req\":\"^0.63.7\"},{\"name\":\"aws-smithy-query\",\"req\":\"^0.60.9\"},{\"features\":[\"client\"],\"name\":\"aws-smithy-runtime\",\"req\":\"^1.9.5\"},{\"features\":[\"test-util\"],\"kind\":\"dev\",\"name\":\"aws-smithy-runtime\",\"req\":\"^1.9.5\"},{\"features\":[\"client\",\"http-02x\"],\"name\":\"aws-smithy-runtime-api\",\"req\":\"^1.9.3\"},{\"features\":[\"test-util\"],\"kind\":\"dev\",\"name\":\"aws-smithy-runtime-api\",\"req\":\"^1.9.3\"},{\"name\":\"aws-smithy-types\",\"req\":\"^1.3.5\"},{\"features\":[\"test-util\"],\"kind\":\"dev\",\"name\":\"aws-smithy-types\",\"req\":\"^1.3.5\"},{\"name\":\"aws-smithy-xml\",\"req\":\"^0.60.13\"},{\"name\":\"aws-types\",\"req\":\"^1.3.11\"},{\"name\":\"fastrand\",\"req\":\"^2.0.0\"},{\"default_features\":false,\"features\":[\"alloc\"],\"kind\":\"dev\",\"name\":\"futures-util\",\"req\":\"^0.3.25\"},{\"name\":\"http\",\"req\":\"^0.2.9\"},{\"kind\":\"dev\",\"name\":\"http-1x\",\"package\":\"http\",\"req\":\"^1\"},{\"kind\":\"dev\",\"name\":\"proptest\",\"req\":\"^1\"},{\"name\":\"regex-lite\",\"req\":\"^0.1.5\"},{\"kind\":\"dev\",\"name\":\"serde_json\",\"req\":\"^1.0.0\"},{\"features\":[\"macros\",\"test-util\",\"rt-multi-thread\"],\"kind\":\"dev\",\"name\":\"tokio\",\"req\":\"^1.23.1\"},{\"name\":\"tracing\",\"req\":\"^0.1\"},{\"features\":[\"env-filter\",\"json\"],\"kind\":\"dev\",\"name\":\"tracing-subscriber\",\"req\":\"^0.3.16\"}],\"features\":{\"behavior-version-latest\":[],\"default\":[\"rustls\",\"default-https-client\",\"rt-tokio\"],\"default-https-client\":[\"aws-smithy-runtime/default-https-client\"],\"gated-tests\":[],\"rt-tokio\":[\"aws-smithy-async/rt-tokio\",\"aws-smithy-types/rt-tokio\"],\"rustls\":[\"aws-smithy-runtime/tls-rustls\"],\"test-util\":[\"aws-credential-types/test-util\",\"aws-smithy-runtime/test-util\"]}}",

113
codex-rs/Cargo.lock generated
View File

@@ -757,7 +757,6 @@ checksum = "96571e6996817bf3d58f6b569e4b9fd2e9d2fcf9f7424eed07b2ce9bb87535e5"
dependencies = [
"aws-credential-types",
"aws-runtime",
"aws-sdk-signin",
"aws-sdk-sso",
"aws-sdk-ssooidc",
"aws-sdk-sts",
@@ -768,20 +767,15 @@ dependencies = [
"aws-smithy-runtime-api",
"aws-smithy-types",
"aws-types",
"base64-simd",
"bytes",
"fastrand",
"hex",
"http 1.4.0",
"p256",
"rand 0.8.5",
"ring",
"sha2",
"time",
"tokio",
"tracing",
"url",
"uuid",
"zeroize",
]
@@ -844,28 +838,6 @@ dependencies = [
"uuid",
]
[[package]]
name = "aws-sdk-signin"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c084bd63941916e1348cb8d9e05ac2e49bdd40a380e9167702683184c6c6be53"
dependencies = [
"aws-credential-types",
"aws-runtime",
"aws-smithy-async",
"aws-smithy-http",
"aws-smithy-json",
"aws-smithy-runtime",
"aws-smithy-runtime-api",
"aws-smithy-types",
"aws-types",
"bytes",
"fastrand",
"http 0.2.12",
"regex-lite",
"tracing",
]
[[package]]
name = "aws-sdk-sso"
version = "1.91.0"
@@ -1894,13 +1866,13 @@ dependencies = [
"codex-config",
"codex-core",
"codex-core-plugins",
"codex-device-key",
"codex-exec-server",
"codex-external-agent-migration",
"codex-external-agent-sessions",
"codex-features",
"codex-feedback",
"codex-file-search",
"codex-file-watcher",
"codex-git-utils",
"codex-hooks",
"codex-login",
@@ -1924,6 +1896,7 @@ dependencies = [
"codex-utils-cli",
"codex-utils-json-to-toml",
"codex-utils-pty",
"codex-worktree",
"core_test_support",
"flate2",
"futures",
@@ -1980,26 +1953,6 @@ dependencies = [
"url",
]
[[package]]
name = "codex-app-server-daemon"
version = "0.0.0"
dependencies = [
"anyhow",
"codex-app-server-protocol",
"codex-app-server-transport",
"codex-core",
"codex-uds",
"futures",
"libc",
"pretty_assertions",
"reqwest",
"serde",
"serde_json",
"tempfile",
"tokio",
"tokio-tungstenite",
]
[[package]]
name = "codex-app-server-protocol"
version = "0.0.0"
@@ -2179,10 +2132,10 @@ name = "codex-builtin-mcps"
version = "0.0.0"
dependencies = [
"anyhow",
"codex-config",
"codex-memories-mcp",
"codex-utils-absolute-path",
"pretty_assertions",
"tokio",
]
[[package]]
@@ -2227,10 +2180,10 @@ dependencies = [
"clap",
"clap_complete",
"codex-app-server",
"codex-app-server-daemon",
"codex-app-server-protocol",
"codex-app-server-test-client",
"codex-arg0",
"codex-builtin-mcps",
"codex-chatgpt",
"codex-cloud-tasks",
"codex-config",
@@ -2259,6 +2212,7 @@ dependencies = [
"codex-utils-cli",
"codex-utils-path",
"codex-windows-sandbox",
"codex-worktree",
"libc",
"owo-colors",
"predicates",
@@ -2463,11 +2417,7 @@ dependencies = [
"codex-app-server-protocol",
"pretty_assertions",
"serde",
"serde_json",
"sha1",
"tempfile",
"tokio",
"tracing",
"urlencoding",
]
@@ -2485,6 +2435,7 @@ dependencies = [
"bm25",
"chrono",
"clap",
"codex-agent-graph-store",
"codex-analytics",
"codex-api",
"codex-app-server-protocol",
@@ -2552,6 +2503,7 @@ dependencies = [
"insta",
"libc",
"maplit",
"notify",
"once_cell",
"openssl-sys",
"opentelemetry",
@@ -2620,7 +2572,6 @@ dependencies = [
"codex-core-skills",
"codex-exec-server",
"codex-git-utils",
"codex-hooks",
"codex-login",
"codex-model-provider",
"codex-otel",
@@ -2689,6 +2640,22 @@ dependencies = [
"serde_json",
]
[[package]]
name = "codex-device-key"
version = "0.0.0"
dependencies = [
"async-trait",
"base64 0.22.1",
"p256",
"pretty_assertions",
"rand 0.9.3",
"serde",
"serde_json",
"thiserror 2.0.18",
"tokio",
"url",
]
[[package]]
name = "codex-exec"
version = "0.0.0"
@@ -2713,6 +2680,7 @@ dependencies = [
"codex-utils-cargo-bin",
"codex-utils-cli",
"codex-utils-oss",
"codex-worktree",
"core_test_support",
"libc",
"opentelemetry",
@@ -2758,13 +2726,13 @@ dependencies = [
"serde",
"serde_json",
"serial_test",
"sha2",
"tempfile",
"test-case",
"thiserror 2.0.18",
"tokio",
"tokio-tungstenite",
"tokio-util",
"toml 0.9.11+spec-1.1.0",
"tracing",
"uuid",
"wiremock",
@@ -2894,17 +2862,6 @@ dependencies = [
"serde",
]
[[package]]
name = "codex-file-watcher"
version = "0.0.0"
dependencies = [
"notify",
"pretty_assertions",
"tempfile",
"tokio",
"tracing",
]
[[package]]
name = "codex-git-utils"
version = "0.0.0"
@@ -3366,6 +3323,7 @@ dependencies = [
"codex-utils-absolute-path",
"codex-utils-image",
"codex-utils-string",
"codex-utils-template",
"encoding_rs",
"globset",
"http 1.4.0",
@@ -3672,11 +3630,16 @@ dependencies = [
"codex-rollout",
"codex-state",
"pretty_assertions",
"prost 0.14.3",
"serde",
"serde_json",
"tempfile",
"thiserror 2.0.18",
"tokio",
"tokio-stream",
"tonic",
"tonic-prost",
"tonic-prost-build",
"tracing",
"uuid",
]
@@ -3753,6 +3716,7 @@ dependencies = [
"codex-utils-sleep-inhibitor",
"codex-utils-string",
"codex-windows-sandbox",
"codex-worktree",
"color-eyre",
"cpal",
"crossterm",
@@ -4061,6 +4025,19 @@ dependencies = [
"winres",
]
[[package]]
name = "codex-worktree"
version = "0.0.0"
dependencies = [
"anyhow",
"codex-utils-absolute-path",
"pretty_assertions",
"serde",
"serde_json",
"sha2",
"tempfile",
]
[[package]]
name = "color-eyre"
version = "0.6.5"

View File

@@ -11,7 +11,6 @@ members = [
"async-utils",
"app-server",
"app-server-transport",
"app-server-daemon",
"app-server-client",
"app-server-protocol",
"app-server-test-client",
@@ -31,6 +30,7 @@ members = [
"collaboration-mode-templates",
"connectors",
"config",
"device-key",
"shell-command",
"shell-escalation",
"skills",
@@ -49,7 +49,6 @@ members = [
"external-agent-sessions",
"keyring-store",
"file-search",
"file-watcher",
"linux-sandbox",
"lmstudio",
"login",
@@ -75,6 +74,7 @@ members = [
"otel",
"tui",
"tools",
"worktree",
"v8-poc",
"utils/absolute-path",
"utils/cargo-bin",
@@ -133,7 +133,6 @@ codex-api = { path = "codex-api" }
codex-aws-auth = { path = "aws-auth" }
codex-app-server = { path = "app-server" }
codex-app-server-transport = { path = "app-server-transport" }
codex-app-server-daemon = { path = "app-server-daemon" }
codex-app-server-client = { path = "app-server-client" }
codex-app-server-protocol = { path = "app-server-protocol" }
codex-app-server-test-client = { path = "app-server-test-client" }
@@ -156,6 +155,7 @@ codex-core = { path = "core" }
codex-core-api = { path = "core-api" }
codex-core-plugins = { path = "core-plugins" }
codex-core-skills = { path = "core-skills" }
codex-device-key = { path = "device-key" }
codex-exec = { path = "exec" }
codex-file-system = { path = "file-system" }
codex-exec-server = { path = "exec-server" }
@@ -167,7 +167,6 @@ codex-features = { path = "features" }
codex-feedback = { path = "feedback" }
codex-install-context = { path = "install-context" }
codex-file-search = { path = "file-search" }
codex-file-watcher = { path = "file-watcher" }
codex-git-utils = { path = "git-utils" }
codex-hooks = { path = "hooks" }
codex-keyring-store = { path = "keyring-store" }
@@ -208,6 +207,7 @@ codex-thread-store = { path = "thread-store" }
codex-tools = { path = "tools" }
codex-tui = { path = "tui" }
codex-uds = { path = "uds" }
codex-worktree = { path = "worktree" }
codex-utils-absolute-path = { path = "utils/absolute-path" }
codex-utils-approval-presets = { path = "utils/approval-presets" }
codex-utils-cache = { path = "utils/cache" }
@@ -321,6 +321,7 @@ os_info = "3.12.0"
owo-colors = "4.3.0"
path-absolutize = "3.1.1"
pathdiff = "0.2"
p256 = "0.13.2"
portable-pty = "0.9.0"
predicates = "3"
pretty_assertions = "1.4.1"
@@ -468,21 +469,24 @@ unwrap_used = "deny"
[workspace.metadata.cargo-shear]
ignored = [
"codex-agent-graph-store",
"codex-memories-mcp",
"icu_provider",
"openssl-sys",
"codex-utils-readiness",
"codex-utils-template",
"codex-v8-poc",
]
[profile.dev]
# Keep line tables/backtraces while avoiding expensive full variable debug info
# across local dev builds.
debug = "limited"
debug = 1
[profile.dev-small]
inherits = "dev"
opt-level = 0
debug = "none"
strip = "symbols"
debug = 0
strip = true
[profile.release]
lto = "fat"
@@ -494,15 +498,8 @@ strip = "symbols"
# See https://github.com/openai/codex/issues/1411 for details.
codegen-units = 1
[profile.profiling]
inherits = "release"
debug = "full"
lto = false
strip = false
[profile.ci-test]
# Reduce binary size to reduce disk pressure.
debug = "limited"
debug = 1 # Reduce debug symbol size
inherits = "test"
opt-level = 0

View File

@@ -7,7 +7,6 @@ version.workspace = true
[lib]
name = "codex_agent_graph_store"
path = "src/lib.rs"
doctest = false
[lints]
workspace = true

View File

@@ -1,298 +0,0 @@
use crate::events::CodexAcceptedLineFingerprintsEventParams;
use crate::events::CodexAcceptedLineFingerprintsEventRequest;
use crate::events::TrackEventRequest;
use crate::facts::AcceptedLineFingerprint;
use codex_git_utils::canonicalize_git_remote_url;
use codex_git_utils::get_git_remote_urls_assume_git_repo;
use sha1::Digest;
use std::path::Path;
const ACCEPTED_LINE_FINGERPRINT_EVENT_TARGET_BYTES: usize = 2 * 1024 * 1024;
const ACCEPTED_LINE_FINGERPRINT_EVENT_FIXED_BYTES: usize = 1024;
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct AcceptedLineFingerprintSummary {
pub accepted_added_lines: u64,
pub accepted_deleted_lines: u64,
pub line_fingerprints: Vec<AcceptedLineFingerprint>,
}
pub(crate) struct AcceptedLineFingerprintEventInput {
pub(crate) event_type: &'static str,
pub(crate) turn_id: String,
pub(crate) thread_id: String,
pub(crate) product_surface: Option<String>,
pub(crate) model_slug: Option<String>,
pub(crate) completed_at: u64,
pub(crate) repo_hash: Option<String>,
pub(crate) accepted_added_lines: u64,
pub(crate) accepted_deleted_lines: u64,
pub(crate) line_fingerprints: Vec<AcceptedLineFingerprint>,
}
pub fn accepted_line_fingerprints_from_unified_diff(
unified_diff: &str,
) -> AcceptedLineFingerprintSummary {
let mut current_path: Option<String> = None;
let mut in_hunk = false;
let mut accepted_added_lines = 0;
let mut accepted_deleted_lines = 0;
let mut line_fingerprints = Vec::new();
for line in unified_diff.lines() {
if line.starts_with("diff --git ") {
current_path = None;
in_hunk = false;
continue;
}
if line.starts_with("@@ ") {
in_hunk = true;
continue;
}
if !in_hunk && let Some(path) = line.strip_prefix("+++ ") {
current_path = normalize_diff_path(path);
continue;
}
if !in_hunk && line.starts_with("--- ") {
continue;
}
if let Some(added_line) = line.strip_prefix('+') {
accepted_added_lines += 1;
if let Some(path) = current_path.as_deref()
&& let Some(normalized_line) = normalize_effective_line(added_line)
{
line_fingerprints.push(AcceptedLineFingerprint {
path_hash: fingerprint_hash("path", path),
line_hash: fingerprint_hash("line", &normalized_line),
});
}
continue;
}
if line.starts_with('-') {
accepted_deleted_lines += 1;
}
}
AcceptedLineFingerprintSummary {
accepted_added_lines,
accepted_deleted_lines,
line_fingerprints,
}
}
pub fn fingerprint_hash(domain: &str, value: &str) -> String {
let mut hasher = sha1::Sha1::new();
hasher.update(b"file-line-v1\0");
hasher.update(domain.as_bytes());
hasher.update(b"\0");
hasher.update(value.as_bytes());
format!("{:x}", hasher.finalize())
}
pub(crate) fn accepted_line_fingerprint_event_requests(
input: AcceptedLineFingerprintEventInput,
) -> Vec<TrackEventRequest> {
let chunks = accepted_line_fingerprint_chunks(input.line_fingerprints);
chunks
.into_iter()
.enumerate()
.map(|(index, line_fingerprints)| {
let is_first_chunk = index == 0;
TrackEventRequest::AcceptedLineFingerprints(Box::new(
CodexAcceptedLineFingerprintsEventRequest {
event_type: "codex_accepted_line_fingerprints",
event_params: CodexAcceptedLineFingerprintsEventParams {
event_type: input.event_type,
turn_id: input.turn_id.clone(),
thread_id: input.thread_id.clone(),
product_surface: input.product_surface.clone(),
model_slug: input.model_slug.clone(),
completed_at: input.completed_at,
repo_hash: input.repo_hash.clone(),
accepted_added_lines: if is_first_chunk {
input.accepted_added_lines
} else {
0
},
accepted_deleted_lines: if is_first_chunk {
input.accepted_deleted_lines
} else {
0
},
line_fingerprints,
},
},
))
})
.collect()
}
pub async fn accepted_line_repo_hash_for_cwd(cwd: &Path) -> Option<String> {
let remotes = get_git_remote_urls_assume_git_repo(cwd).await?;
remotes
.get("origin")
.or_else(|| remotes.values().next())
.map(|remote_url| {
let canonical_remote_url =
canonicalize_git_remote_url(remote_url).unwrap_or_else(|| remote_url.to_string());
fingerprint_hash("repo", &canonical_remote_url)
})
}
fn normalize_diff_path(path: &str) -> Option<String> {
let path = path.trim();
if path == "/dev/null" {
return None;
}
Some(
path.strip_prefix("b/")
.or_else(|| path.strip_prefix("a/"))
.unwrap_or(path)
.to_string(),
)
}
fn normalize_effective_line(line: &str) -> Option<String> {
let normalized = line.split_whitespace().collect::<Vec<_>>().join(" ");
if normalized.len() <= 3 {
return None;
}
if !normalized
.chars()
.any(|ch| ch.is_alphanumeric() || ch == '_')
{
return None;
}
Some(normalized)
}
fn accepted_line_fingerprint_chunks(
line_fingerprints: Vec<AcceptedLineFingerprint>,
) -> Vec<Vec<AcceptedLineFingerprint>> {
if line_fingerprints.is_empty() {
return vec![Vec::new()];
}
let mut chunks = Vec::new();
let mut current = Vec::new();
let mut current_bytes = ACCEPTED_LINE_FINGERPRINT_EVENT_FIXED_BYTES;
for fingerprint in line_fingerprints {
let item_bytes = accepted_line_fingerprint_json_bytes(&fingerprint);
let separator_bytes = usize::from(!current.is_empty());
if !current.is_empty()
&& current_bytes + separator_bytes + item_bytes
> ACCEPTED_LINE_FINGERPRINT_EVENT_TARGET_BYTES
{
chunks.push(current);
current = Vec::new();
current_bytes = ACCEPTED_LINE_FINGERPRINT_EVENT_FIXED_BYTES;
}
current_bytes += usize::from(!current.is_empty()) + item_bytes;
current.push(fingerprint);
}
if !current.is_empty() {
chunks.push(current);
}
chunks
}
fn accepted_line_fingerprint_json_bytes(fingerprint: &AcceptedLineFingerprint) -> usize {
// {"path_hash":"...","line_hash":"..."} plus one byte of array comma
// accounted for by the caller when needed.
32 + fingerprint.path_hash.len() + fingerprint.line_hash.len()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parses_counts_and_effective_added_fingerprints() {
let diff = "\
diff --git a/src/lib.rs b/src/lib.rs
index 1111111..2222222
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -1,3 +1,5 @@
-old line
+fn useful() {
+}
+ return user.id;
context
";
let summary = accepted_line_fingerprints_from_unified_diff(diff);
assert_eq!(
summary,
AcceptedLineFingerprintSummary {
accepted_added_lines: 3,
accepted_deleted_lines: 1,
line_fingerprints: vec![
AcceptedLineFingerprint {
path_hash: fingerprint_hash("path", "src/lib.rs"),
line_hash: fingerprint_hash("line", "fn useful() {"),
},
AcceptedLineFingerprint {
path_hash: fingerprint_hash("path", "src/lib.rs"),
line_hash: fingerprint_hash("line", "return user.id;"),
},
],
}
);
}
#[test]
fn skips_added_file_metadata_headers() {
let diff = "\
diff --git a/new.py b/new.py
new file mode 100644
index 0000000..1111111
--- /dev/null
+++ b/new.py
@@ -0,0 +1 @@
+print('hello')
";
let summary = accepted_line_fingerprints_from_unified_diff(diff);
assert_eq!(summary.accepted_added_lines, 1);
assert_eq!(summary.accepted_deleted_lines, 0);
assert_eq!(summary.line_fingerprints.len(), 1);
}
#[test]
fn parses_hunk_lines_that_look_like_file_headers() {
let diff = "\
diff --git a/src/lib.rs b/src/lib.rs
index 1111111..2222222
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -1,2 +1,2 @@
--- old value
+++ new value
";
let summary = accepted_line_fingerprints_from_unified_diff(diff);
assert_eq!(
summary,
AcceptedLineFingerprintSummary {
accepted_added_lines: 1,
accepted_deleted_lines: 1,
line_fingerprints: vec![AcceptedLineFingerprint {
path_hash: fingerprint_hash("path", "src/lib.rs"),
line_hash: fingerprint_hash("line", "++ new value"),
}],
}
);
}
}

View File

@@ -1,7 +1,5 @@
use crate::client::AnalyticsEventsQueue;
use crate::events::AppServerRpcTransport;
use crate::events::CodexAcceptedLineFingerprintsEventParams;
use crate::events::CodexAcceptedLineFingerprintsEventRequest;
use crate::events::CodexAppMentionedEventRequest;
use crate::events::CodexAppServerClientMetadata;
use crate::events::CodexAppUsedEventRequest;
@@ -14,6 +12,7 @@ use crate::events::CodexPluginUsedEventRequest;
use crate::events::CodexRuntimeMetadata;
use crate::events::CodexToolItemEventBase;
use crate::events::CodexTurnEventRequest;
use crate::events::CommandExecutionSource;
use crate::events::GuardianApprovalRequestSource;
use crate::events::GuardianReviewDecision;
use crate::events::GuardianReviewEventParams;
@@ -30,7 +29,6 @@ use crate::events::codex_hook_run_metadata;
use crate::events::codex_plugin_metadata;
use crate::events::codex_plugin_used_metadata;
use crate::events::subagent_thread_started_event_request;
use crate::facts::AcceptedLineFingerprint;
use crate::facts::AnalyticsFact;
use crate::facts::AnalyticsJsonRpcError;
use crate::facts::AppInvocation;
@@ -69,13 +67,8 @@ use codex_app_server_protocol::ClientInfo;
use codex_app_server_protocol::ClientRequest;
use codex_app_server_protocol::ClientResponsePayload;
use codex_app_server_protocol::CodexErrorInfo;
use codex_app_server_protocol::CommandAction;
use codex_app_server_protocol::CommandExecutionSource;
use codex_app_server_protocol::CommandExecutionStatus;
use codex_app_server_protocol::InitializeCapabilities;
use codex_app_server_protocol::InitializeParams;
use codex_app_server_protocol::ItemCompletedNotification;
use codex_app_server_protocol::ItemStartedNotification;
use codex_app_server_protocol::JSONRPCErrorError;
use codex_app_server_protocol::NonSteerableTurnKind;
use codex_app_server_protocol::RequestId;
@@ -85,14 +78,12 @@ use codex_app_server_protocol::SessionSource as AppServerSessionSource;
use codex_app_server_protocol::Thread;
use codex_app_server_protocol::ThreadArchiveParams;
use codex_app_server_protocol::ThreadArchiveResponse;
use codex_app_server_protocol::ThreadItem;
use codex_app_server_protocol::ThreadResumeResponse;
use codex_app_server_protocol::ThreadSource as AppServerThreadSource;
use codex_app_server_protocol::ThreadStartResponse;
use codex_app_server_protocol::ThreadStatus as AppServerThreadStatus;
use codex_app_server_protocol::Turn;
use codex_app_server_protocol::TurnCompletedNotification;
use codex_app_server_protocol::TurnDiffUpdatedNotification;
use codex_app_server_protocol::TurnError as AppServerTurnError;
use codex_app_server_protocol::TurnStartParams;
use codex_app_server_protocol::TurnStartedNotification;
@@ -607,91 +598,6 @@ async fn ingest_turn_prerequisites(
}
}
async fn ingest_tool_review_prerequisites(
reducer: &mut AnalyticsReducer,
events: &mut Vec<TrackEventRequest>,
) {
reducer
.ingest(sample_initialize_fact(/*connection_id*/ 7), events)
.await;
reducer
.ingest(
AnalyticsFact::ClientResponse {
connection_id: 7,
request_id: RequestId::Integer(1),
response: Box::new(sample_thread_start_response(
"thread-1", /*ephemeral*/ false, "gpt-5",
)),
},
events,
)
.await;
events.clear();
}
fn sample_initialize_fact(connection_id: u64) -> AnalyticsFact {
AnalyticsFact::Initialize {
connection_id,
params: InitializeParams {
client_info: ClientInfo {
name: "codex-tui".to_string(),
title: None,
version: "1.0.0".to_string(),
},
capabilities: Some(InitializeCapabilities {
experimental_api: false,
request_attestation: false,
opt_out_notification_methods: None,
}),
},
product_client_id: DEFAULT_ORIGINATOR.to_string(),
runtime: CodexRuntimeMetadata {
codex_rs_version: "0.99.0".to_string(),
runtime_os: "linux".to_string(),
runtime_os_version: "24.04".to_string(),
runtime_arch: "x86_64".to_string(),
},
rpc_transport: AppServerRpcTransport::Websocket,
}
}
fn sample_command_execution_item(
status: CommandExecutionStatus,
exit_code: Option<i32>,
duration_ms: Option<i64>,
) -> ThreadItem {
ThreadItem::CommandExecution {
id: "item-1".to_string(),
command: "echo hi".to_string(),
cwd: test_path_buf("/tmp").abs(),
process_id: Some("pid-1".to_string()),
source: CommandExecutionSource::Agent,
status,
command_actions: Vec::new(),
aggregated_output: None,
exit_code,
duration_ms,
}
}
fn sample_command_execution_item_with_actions(
status: CommandExecutionStatus,
exit_code: Option<i32>,
duration_ms: Option<i64>,
command_actions: Vec<CommandAction>,
) -> ThreadItem {
let mut item = sample_command_execution_item(status, exit_code, duration_ms);
let ThreadItem::CommandExecution {
command_actions: item_command_actions,
..
} = &mut item
else {
unreachable!("sample command execution item should be CommandExecution");
};
*item_command_actions = command_actions;
item
}
fn expected_absolute_path(path: &PathBuf) -> String {
std::fs::canonicalize(path)
.unwrap_or_else(|_| path.to_path_buf())
@@ -832,206 +738,6 @@ fn app_used_event_serializes_expected_shape() {
);
}
#[test]
fn accepted_line_fingerprints_event_serializes_expected_shape() {
let event = TrackEventRequest::AcceptedLineFingerprints(Box::new(
CodexAcceptedLineFingerprintsEventRequest {
event_type: "codex_accepted_line_fingerprints",
event_params: CodexAcceptedLineFingerprintsEventParams {
event_type: "codex.accepted_line_fingerprints",
turn_id: "turn-1".to_string(),
thread_id: "thread-1".to_string(),
product_surface: Some("codex".to_string()),
model_slug: Some("gpt-5.1-codex".to_string()),
completed_at: 1710000000,
repo_hash: Some("repo-hash-1".to_string()),
accepted_added_lines: 42,
accepted_deleted_lines: 40,
line_fingerprints: vec![AcceptedLineFingerprint {
path_hash: "path-hash-1".to_string(),
line_hash: "line-hash-1".to_string(),
}],
},
},
));
let payload = serde_json::to_value(&event).expect("serialize accepted line fingerprints event");
assert_eq!(
payload,
json!({
"event_type": "codex_accepted_line_fingerprints",
"event_params": {
"event_type": "codex.accepted_line_fingerprints",
"turn_id": "turn-1",
"thread_id": "thread-1",
"product_surface": "codex",
"model_slug": "gpt-5.1-codex",
"completed_at": 1710000000,
"repo_hash": "repo-hash-1",
"accepted_added_lines": 42,
"accepted_deleted_lines": 40,
"line_fingerprints": [
{
"path_hash": "path-hash-1",
"line_hash": "line-hash-1"
}
]
}
})
);
}
#[tokio::test]
async fn reducer_chunks_large_accepted_line_fingerprint_events_without_repeating_counts() {
let mut reducer = AnalyticsReducer::default();
let mut events = Vec::new();
ingest_turn_prerequisites(
&mut reducer,
&mut events,
/*include_initialize*/ true,
/*include_resolved_config*/ true,
/*include_started*/ true,
/*include_token_usage*/ true,
)
.await;
events.clear();
let mut diff = "\
diff --git a/src/lib.rs b/src/lib.rs
index 1111111..2222222
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -0,0 +1,20000 @@
"
.to_string();
for index in 0..20_000 {
diff.push_str(&format!("+let value_{index} = {index};\n"));
}
reducer
.ingest(
AnalyticsFact::Notification(Box::new(ServerNotification::TurnDiffUpdated(
TurnDiffUpdatedNotification {
thread_id: "thread-2".to_string(),
turn_id: "turn-2".to_string(),
diff,
},
))),
&mut events,
)
.await;
assert!(events.is_empty());
reducer
.ingest(
AnalyticsFact::Notification(Box::new(sample_turn_completed_notification(
"thread-2",
"turn-2",
AppServerTurnStatus::Completed,
/*codex_error_info*/ None,
))),
&mut events,
)
.await;
let accepted_line_events = events
.iter()
.filter_map(|event| match event {
TrackEventRequest::AcceptedLineFingerprints(event) => Some(event),
_ => None,
})
.collect::<Vec<_>>();
assert!(accepted_line_events.len() > 1);
let mut total_fingerprints = 0;
for (index, event) in accepted_line_events.iter().enumerate() {
assert_eq!(event.event_params.turn_id, "turn-2");
assert_eq!(event.event_params.thread_id, "thread-2");
total_fingerprints += event.event_params.line_fingerprints.len();
if index == 0 {
assert_eq!(event.event_params.accepted_added_lines, 20_000);
assert_eq!(event.event_params.accepted_deleted_lines, 0);
} else {
assert_eq!(event.event_params.accepted_added_lines, 0);
assert_eq!(event.event_params.accepted_deleted_lines, 0);
}
assert!(serde_json::to_vec(event).expect("serialize chunk").len() < 2_100_000);
}
assert_eq!(total_fingerprints, 20_000);
}
#[tokio::test]
async fn reducer_emits_accepted_line_fingerprints_once_from_latest_turn_diff_on_completion() {
let mut reducer = AnalyticsReducer::default();
let mut events = Vec::new();
ingest_turn_prerequisites(
&mut reducer,
&mut events,
/*include_initialize*/ true,
/*include_resolved_config*/ true,
/*include_started*/ true,
/*include_token_usage*/ true,
)
.await;
events.clear();
for line in ["let old_value = 1;", "let latest_value = 2;"] {
let diff = format!(
"\
diff --git a/src/lib.rs b/src/lib.rs
index 1111111..2222222
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -0,0 +1 @@
+{line}
"
);
reducer
.ingest(
AnalyticsFact::Notification(Box::new(ServerNotification::TurnDiffUpdated(
TurnDiffUpdatedNotification {
thread_id: "thread-2".to_string(),
turn_id: "turn-2".to_string(),
diff,
},
))),
&mut events,
)
.await;
}
assert!(events.is_empty());
reducer
.ingest(
AnalyticsFact::Notification(Box::new(sample_turn_completed_notification(
"thread-2",
"turn-2",
AppServerTurnStatus::Completed,
/*codex_error_info*/ None,
))),
&mut events,
)
.await;
let accepted_line_events = events
.iter()
.filter_map(|event| match event {
TrackEventRequest::AcceptedLineFingerprints(event) => Some(event),
_ => None,
})
.collect::<Vec<_>>();
assert_eq!(accepted_line_events.len(), 1);
let event = accepted_line_events[0];
assert_eq!(event.event_params.accepted_added_lines, 1);
assert_eq!(event.event_params.line_fingerprints.len(), 1);
assert_eq!(
event.event_params.line_fingerprints[0].line_hash,
crate::fingerprint_hash("line", "let latest_value = 2;")
);
}
#[test]
fn compaction_event_serializes_expected_shape() {
let event = TrackEventRequest::Compaction(Box::new(CodexCompactionEventRequest {
@@ -1217,14 +923,13 @@ fn command_execution_event_serializes_expected_shape() {
runtime_os_version: "15.3.1".to_string(),
runtime_arch: "aarch64".to_string(),
},
thread_source: Some(ThreadSource::User),
thread_source: Some("user"),
subagent_source: None,
parent_thread_id: None,
tool_name: "shell".to_string(),
started_at_ms: 123_000,
completed_at_ms: 125_000,
duration_ms: Some(2000),
execution_duration_ms: Some(1900),
review_count: 0,
guardian_review_count: 0,
user_review_count: 0,
@@ -1273,7 +978,6 @@ fn command_execution_event_serializes_expected_shape() {
"started_at_ms": 123000,
"completed_at_ms": 125000,
"duration_ms": 2000,
"execution_duration_ms": 1900,
"review_count": 0,
"guardian_review_count": 0,
"user_review_count": 0,
@@ -1327,7 +1031,6 @@ async fn initialize_caches_client_and_thread_lifecycle_publishes_once_initialize
},
capabilities: Some(InitializeCapabilities {
experimental_api: false,
request_attestation: false,
opt_out_notification_methods: None,
}),
},
@@ -1475,7 +1178,6 @@ async fn compaction_event_ingests_custom_fact() {
},
capabilities: Some(InitializeCapabilities {
experimental_api: false,
request_attestation: false,
opt_out_notification_methods: None,
}),
},
@@ -1589,7 +1291,6 @@ async fn guardian_review_event_ingests_custom_fact_with_optional_target_item() {
},
capabilities: Some(InitializeCapabilities {
experimental_api: false,
request_attestation: false,
opt_out_notification_methods: None,
}),
},
@@ -1703,114 +1404,6 @@ async fn guardian_review_event_ingests_custom_fact_with_optional_target_item() {
assert_eq!(payload[0]["event_params"]["review_timeout_ms"], 90_000);
}
#[tokio::test]
async fn item_lifecycle_notifications_publish_command_execution_event() {
let mut reducer = AnalyticsReducer::default();
let mut events = Vec::new();
ingest_tool_review_prerequisites(&mut reducer, &mut events).await;
reducer
.ingest(
AnalyticsFact::Notification(Box::new(ServerNotification::ItemStarted(
ItemStartedNotification {
thread_id: "thread-1".to_string(),
turn_id: "turn-1".to_string(),
started_at_ms: 1_000,
item: sample_command_execution_item(
CommandExecutionStatus::InProgress,
/*exit_code*/ None,
/*duration_ms*/ None,
),
},
))),
&mut events,
)
.await;
assert!(
events.is_empty(),
"tool item event should emit on completion"
);
reducer
.ingest(
AnalyticsFact::Notification(Box::new(ServerNotification::ItemCompleted(
ItemCompletedNotification {
thread_id: "thread-1".to_string(),
turn_id: "turn-1".to_string(),
completed_at_ms: 1_045,
item: sample_command_execution_item_with_actions(
CommandExecutionStatus::Completed,
Some(0),
Some(42),
vec![
CommandAction::Read {
command: "cat README.md".to_string(),
name: "README.md".to_string(),
path: test_path_buf("/tmp/README.md").abs(),
},
CommandAction::ListFiles {
command: "ls".to_string(),
path: None,
},
CommandAction::Search {
command: "rg TODO".to_string(),
query: Some("TODO".to_string()),
path: None,
},
CommandAction::Unknown {
command: "cargo test".to_string(),
},
],
),
},
))),
&mut events,
)
.await;
let payload = serde_json::to_value(&events).expect("serialize events");
assert_eq!(payload.as_array().expect("events array").len(), 1);
assert_eq!(payload[0]["event_type"], "codex_command_execution_event");
assert_eq!(payload[0]["event_params"]["thread_id"], "thread-1");
assert_eq!(payload[0]["event_params"]["turn_id"], "turn-1");
assert_eq!(payload[0]["event_params"]["item_id"], "item-1");
assert_eq!(payload[0]["event_params"]["tool_name"], "shell");
assert_eq!(
payload[0]["event_params"]["command_execution_source"],
"agent"
);
assert_eq!(payload[0]["event_params"]["terminal_status"], "completed");
assert_eq!(
payload[0]["event_params"]["final_approval_outcome"],
"unknown"
);
assert_eq!(
payload[0]["event_params"]["failure_kind"],
serde_json::Value::Null
);
assert_eq!(payload[0]["event_params"]["exit_code"], 0);
assert_eq!(payload[0]["event_params"]["command_total_action_count"], 4);
assert_eq!(payload[0]["event_params"]["command_read_action_count"], 1);
assert_eq!(
payload[0]["event_params"]["command_list_files_action_count"],
1
);
assert_eq!(payload[0]["event_params"]["command_search_action_count"], 1);
assert_eq!(
payload[0]["event_params"]["command_unknown_action_count"],
1
);
assert_eq!(payload[0]["event_params"]["started_at_ms"], 1_000);
assert_eq!(payload[0]["event_params"]["completed_at_ms"], 1_045);
assert_eq!(payload[0]["event_params"]["duration_ms"], 45);
assert_eq!(payload[0]["event_params"]["execution_duration_ms"], 42);
assert_eq!(
payload[0]["event_params"]["app_server_client"]["client_name"],
"codex-tui"
);
assert_eq!(payload[0]["event_params"]["thread_source"], "user");
}
#[test]
fn subagent_thread_started_review_serializes_expected_shape() {
let event = TrackEventRequest::ThreadInitialized(subagent_thread_started_event_request(
@@ -2094,79 +1687,6 @@ async fn subagent_thread_started_inherits_parent_connection_for_new_thread() {
);
}
#[tokio::test]
async fn subagent_tool_items_inherit_parent_connection_metadata() {
let mut reducer = AnalyticsReducer::default();
let mut events = Vec::new();
ingest_tool_review_prerequisites(&mut reducer, &mut events).await;
reducer
.ingest(
AnalyticsFact::Custom(CustomAnalyticsFact::SubAgentThreadStarted(
SubAgentThreadStartedInput {
thread_id: "thread-subagent".to_string(),
parent_thread_id: Some("thread-1".to_string()),
product_client_id: "codex-tui".to_string(),
client_name: "codex-tui".to_string(),
client_version: "1.0.0".to_string(),
model: "gpt-5".to_string(),
ephemeral: false,
subagent_source: SubAgentSource::Review,
created_at: 128,
},
)),
&mut events,
)
.await;
events.clear();
reducer
.ingest(
AnalyticsFact::Notification(Box::new(ServerNotification::ItemStarted(
ItemStartedNotification {
thread_id: "thread-subagent".to_string(),
turn_id: "turn-subagent".to_string(),
started_at_ms: 1_000,
item: sample_command_execution_item(
CommandExecutionStatus::InProgress,
/*exit_code*/ None,
/*duration_ms*/ None,
),
},
))),
&mut events,
)
.await;
reducer
.ingest(
AnalyticsFact::Notification(Box::new(ServerNotification::ItemCompleted(
ItemCompletedNotification {
thread_id: "thread-subagent".to_string(),
turn_id: "turn-subagent".to_string(),
completed_at_ms: 1_042,
item: sample_command_execution_item(
CommandExecutionStatus::Completed,
Some(0),
Some(42),
),
},
))),
&mut events,
)
.await;
let payload = serde_json::to_value(&events).expect("serialize events");
assert_eq!(payload.as_array().expect("events array").len(), 1);
assert_eq!(payload[0]["event_type"], "codex_command_execution_event");
assert_eq!(payload[0]["event_params"]["thread_source"], "subagent");
assert_eq!(payload[0]["event_params"]["subagent_source"], "review");
assert_eq!(payload[0]["event_params"]["parent_thread_id"], "thread-1");
assert_eq!(
payload[0]["event_params"]["app_server_client"]["client_name"],
"codex-tui"
);
}
#[test]
fn plugin_used_event_serializes_expected_shape() {
let tracking = TrackEventsContext {

View File

@@ -30,7 +30,6 @@ use codex_app_server_protocol::ServerNotification;
use codex_app_server_protocol::ServerRequest;
use codex_app_server_protocol::ServerResponse;
use codex_login::AuthManager;
use codex_login::CodexAuth;
use codex_login::default_client::create_client;
use codex_plugin::PluginTelemetryMetadata;
use std::collections::HashSet;
@@ -334,6 +333,10 @@ impl AnalyticsEventsClient {
});
}
pub fn track_notification(&self, notification: ServerNotification) {
self.record_fact(AnalyticsFact::Notification(Box::new(notification)));
}
pub fn track_server_request(&self, connection_id: u64, request: ServerRequest) {
self.record_fact(AnalyticsFact::ServerRequest {
connection_id,
@@ -341,28 +344,11 @@ impl AnalyticsEventsClient {
});
}
pub fn track_server_response(&self, completed_at_ms: u64, response: ServerResponse) {
pub fn track_server_response(&self, response: ServerResponse) {
self.record_fact(AnalyticsFact::ServerResponse {
completed_at_ms,
response: Box::new(response),
});
}
pub fn track_notification(&self, notification: ServerNotification) {
if !matches!(
notification,
ServerNotification::TurnStarted(_)
| ServerNotification::TurnCompleted(_)
| ServerNotification::TurnDiffUpdated(_)
| ServerNotification::ItemStarted(_)
| ServerNotification::ItemCompleted(_)
| ServerNotification::ItemGuardianApprovalReviewStarted(_)
| ServerNotification::ItemGuardianApprovalReviewCompleted(_)
) {
return;
}
self.record_fact(AnalyticsFact::Notification(Box::new(notification)));
}
}
async fn send_track_events(
@@ -373,7 +359,6 @@ async fn send_track_events(
if events.is_empty() {
return;
}
let Some(auth) = auth_manager.auth().await else {
return;
};
@@ -383,45 +368,12 @@ async fn send_track_events(
let base_url = base_url.trim_end_matches('/');
let url = format!("{base_url}/codex/analytics-events/events");
for events in track_event_request_batches(events) {
send_track_events_request(&auth, &url, events).await;
}
}
fn track_event_request_batches(events: Vec<TrackEventRequest>) -> Vec<Vec<TrackEventRequest>> {
let mut batches = Vec::new();
let mut current_batch = Vec::new();
for event in events {
if event.should_send_in_isolated_request() {
if !current_batch.is_empty() {
batches.push(current_batch);
current_batch = Vec::new();
}
batches.push(vec![event]);
} else {
current_batch.push(event);
}
}
if !current_batch.is_empty() {
batches.push(current_batch);
}
batches
}
async fn send_track_events_request(auth: &CodexAuth, url: &str, events: Vec<TrackEventRequest>) {
if events.is_empty() {
return;
}
let payload = TrackEventsRequest { events };
let response = create_client()
.post(url)
.post(&url)
.timeout(ANALYTICS_EVENTS_TIMEOUT)
.headers(codex_model_provider::auth_provider_from_auth(auth).to_auth_headers())
.headers(codex_model_provider::auth_provider_from_auth(&auth).to_auth_headers())
.header("Content-Type", "application/json")
.json(&payload)
.send()

View File

@@ -1,14 +1,6 @@
use super::AnalyticsEventsClient;
use super::AnalyticsEventsQueue;
use super::track_event_request_batches;
use crate::events::CodexAcceptedLineFingerprintsEventParams;
use crate::events::CodexAcceptedLineFingerprintsEventRequest;
use crate::events::SkillInvocationEventParams;
use crate::events::SkillInvocationEventRequest;
use crate::events::TrackEventRequest;
use crate::facts::AcceptedLineFingerprint;
use crate::facts::AnalyticsFact;
use crate::facts::InvocationType;
use codex_app_server_protocol::ApprovalsReviewer as AppServerApprovalsReviewer;
use codex_app_server_protocol::AskForApproval as AppServerAskForApproval;
use codex_app_server_protocol::ClientRequest;
@@ -39,47 +31,6 @@ use std::sync::Mutex;
use tokio::sync::mpsc;
use tokio::sync::mpsc::error::TryRecvError;
fn sample_accepted_line_fingerprint_event(thread_id: &str) -> TrackEventRequest {
TrackEventRequest::AcceptedLineFingerprints(Box::new(
CodexAcceptedLineFingerprintsEventRequest {
event_type: "codex_accepted_line_fingerprints",
event_params: CodexAcceptedLineFingerprintsEventParams {
event_type: "codex.accepted_line_fingerprints",
turn_id: "turn-1".to_string(),
thread_id: thread_id.to_string(),
product_surface: Some("codex".to_string()),
model_slug: Some("gpt-5.1-codex".to_string()),
completed_at: 1,
repo_hash: None,
accepted_added_lines: 1,
accepted_deleted_lines: 0,
line_fingerprints: vec![AcceptedLineFingerprint {
path_hash: "path-hash".to_string(),
line_hash: "line-hash".to_string(),
}],
},
},
))
}
fn sample_regular_track_event(thread_id: &str) -> TrackEventRequest {
TrackEventRequest::SkillInvocation(SkillInvocationEventRequest {
event_type: "skill_invocation",
skill_id: format!("skill-{thread_id}"),
skill_name: "doc".to_string(),
event_params: SkillInvocationEventParams {
product_client_id: None,
skill_scope: None,
plugin_id: None,
repo_url: None,
thread_id: Some(thread_id.to_string()),
turn_id: Some("turn-1".to_string()),
invoke_type: Some(InvocationType::Explicit),
model_slug: Some("gpt-5.1-codex".to_string()),
},
})
}
fn client_with_receiver() -> (AnalyticsEventsClient, mpsc::Receiver<AnalyticsFact>) {
let (sender, receiver) = mpsc::channel(8);
let queue = AnalyticsEventsQueue {
@@ -271,23 +222,3 @@ fn track_response_only_enqueues_analytics_relevant_responses() {
);
assert!(matches!(receiver.try_recv(), Err(TryRecvError::Empty)));
}
#[test]
fn track_event_request_batches_only_isolates_accepted_line_fingerprint_events() {
let batches = track_event_request_batches(vec![
sample_regular_track_event("thread-1"),
sample_regular_track_event("thread-2"),
sample_accepted_line_fingerprint_event("thread-3"),
sample_accepted_line_fingerprint_event("thread-4"),
sample_regular_track_event("thread-5"),
sample_regular_track_event("thread-6"),
]);
assert_eq!(batches.len(), 4);
assert_eq!(batches[0].len(), 2);
assert_eq!(batches[1].len(), 1);
assert_eq!(batches[2].len(), 1);
assert_eq!(batches[3].len(), 2);
assert!(batches[1][0].should_send_in_isolated_request());
assert!(batches[2][0].should_send_in_isolated_request());
}

View File

@@ -1,6 +1,5 @@
use std::time::Instant;
use crate::facts::AcceptedLineFingerprint;
use crate::facts::AppInvocation;
use crate::facts::CodexCompactionEvent;
use crate::facts::CompactionImplementation;
@@ -19,10 +18,8 @@ use crate::facts::TurnStatus;
use crate::facts::TurnSteerRejectionReason;
use crate::facts::TurnSteerResult;
use crate::facts::TurnSubmissionType;
use crate::now_unix_millis;
use crate::now_unix_seconds;
use codex_app_server_protocol::CodexErrorInfo;
use codex_app_server_protocol::CommandExecutionSource;
use codex_login::default_client::originator;
use codex_plugin::PluginTelemetryMetadata;
use codex_protocol::approvals::NetworkApprovalProtocol;
@@ -65,16 +62,20 @@ pub(crate) enum TrackEventRequest {
Compaction(Box<CodexCompactionEventRequest>),
TurnEvent(Box<CodexTurnEventRequest>),
TurnSteer(CodexTurnSteerEventRequest),
CommandExecution(CodexCommandExecutionEventRequest),
FileChange(CodexFileChangeEventRequest),
McpToolCall(CodexMcpToolCallEventRequest),
DynamicToolCall(CodexDynamicToolCallEventRequest),
CollabAgentToolCall(CodexCollabAgentToolCallEventRequest),
WebSearch(CodexWebSearchEventRequest),
ImageGeneration(CodexImageGenerationEventRequest),
AcceptedLineFingerprints(Box<CodexAcceptedLineFingerprintsEventRequest>),
#[allow(dead_code)]
ReviewEvent(CodexReviewEventRequest),
CommandExecution(CodexCommandExecutionEventRequest),
#[allow(dead_code)]
FileChange(CodexFileChangeEventRequest),
#[allow(dead_code)]
McpToolCall(CodexMcpToolCallEventRequest),
#[allow(dead_code)]
DynamicToolCall(CodexDynamicToolCallEventRequest),
#[allow(dead_code)]
CollabAgentToolCall(CodexCollabAgentToolCallEventRequest),
#[allow(dead_code)]
WebSearch(CodexWebSearchEventRequest),
#[allow(dead_code)]
ImageGeneration(CodexImageGenerationEventRequest),
PluginUsed(CodexPluginUsedEventRequest),
PluginInstalled(CodexPluginEventRequest),
PluginUninstalled(CodexPluginEventRequest),
@@ -82,32 +83,6 @@ pub(crate) enum TrackEventRequest {
PluginDisabled(CodexPluginEventRequest),
}
impl TrackEventRequest {
pub(crate) fn should_send_in_isolated_request(&self) -> bool {
matches!(self, Self::AcceptedLineFingerprints(_))
}
}
#[derive(Serialize)]
pub(crate) struct CodexAcceptedLineFingerprintsEventParams {
pub(crate) event_type: &'static str,
pub(crate) turn_id: String,
pub(crate) thread_id: String,
pub(crate) product_surface: Option<String>,
pub(crate) model_slug: Option<String>,
pub(crate) completed_at: u64,
pub(crate) repo_hash: Option<String>,
pub(crate) accepted_added_lines: u64,
pub(crate) accepted_deleted_lines: u64,
pub(crate) line_fingerprints: Vec<AcceptedLineFingerprint>,
}
#[derive(Serialize)]
pub(crate) struct CodexAcceptedLineFingerprintsEventRequest {
pub(crate) event_type: &'static str,
pub(crate) event_params: CodexAcceptedLineFingerprintsEventParams,
}
#[derive(Serialize)]
pub(crate) struct SkillInvocationEventRequest {
pub(crate) event_type: &'static str,
@@ -290,7 +265,7 @@ pub struct GuardianReviewTrackContext {
approval_request_source: GuardianApprovalRequestSource,
reviewed_action: GuardianReviewedAction,
review_timeout_ms: u64,
pub started_at_ms: u64,
started_at: u64,
started_instant: Instant,
}
@@ -312,7 +287,7 @@ impl GuardianReviewTrackContext {
approval_request_source,
reviewed_action,
review_timeout_ms,
started_at_ms: now_unix_millis(),
started_at: now_unix_seconds(),
started_instant: Instant::now(),
}
}
@@ -345,7 +320,7 @@ impl GuardianReviewTrackContext {
tool_call_count: None,
time_to_first_token_ms: result.time_to_first_token_ms,
completion_latency_ms: Some(self.started_instant.elapsed().as_millis() as u64),
started_at: self.started_at_ms / 1_000,
started_at: self.started_at,
completed_at: Some(now_unix_seconds()),
input_tokens: result.token_usage.as_ref().map(|usage| usage.input_tokens),
cached_input_tokens: result
@@ -473,16 +448,13 @@ pub(crate) struct CodexToolItemEventBase {
pub(crate) item_id: String,
pub(crate) app_server_client: CodexAppServerClientMetadata,
pub(crate) runtime: CodexRuntimeMetadata,
pub(crate) thread_source: Option<ThreadSource>,
pub(crate) thread_source: Option<&'static str>,
pub(crate) subagent_source: Option<String>,
pub(crate) parent_thread_id: Option<String>,
pub(crate) tool_name: String,
pub(crate) started_at_ms: u64,
pub(crate) completed_at_ms: u64,
// Observed item lifecycle duration. This may undercount end-to-end execution
// for tools where app-server only sees part of the upstream flow.
pub(crate) duration_ms: Option<u64>,
pub(crate) execution_duration_ms: Option<u64>,
pub(crate) review_count: u64,
pub(crate) guardian_review_count: u64,
pub(crate) user_review_count: u64,
@@ -496,79 +468,13 @@ pub(crate) struct CodexToolItemEventBase {
#[allow(dead_code)]
#[derive(Clone, Copy, Debug, Serialize)]
#[serde(rename_all = "snake_case")]
pub(crate) enum ReviewSubjectKind {
CommandExecution,
FileChange,
McpToolCall,
Permissions,
NetworkAccess,
pub(crate) enum CommandExecutionSource {
Agent,
UserShell,
UnifiedExecStartup,
UnifiedExecInteraction,
}
#[allow(dead_code)]
#[derive(Clone, Copy, Debug, Serialize)]
#[serde(rename_all = "snake_case")]
pub(crate) enum Reviewer {
Guardian,
User,
}
#[allow(dead_code)]
#[derive(Clone, Copy, Debug, Serialize)]
#[serde(rename_all = "snake_case")]
pub(crate) enum ReviewTrigger {
Initial,
SandboxDenial,
NetworkPolicyDenial,
ExecveIntercept,
}
#[allow(dead_code)]
#[derive(Clone, Copy, Debug, Serialize)]
#[serde(rename_all = "snake_case")]
pub(crate) enum ReviewStatus {
Approved,
Denied,
Aborted,
TimedOut,
}
#[allow(dead_code)]
#[derive(Clone, Copy, Debug, Serialize)]
#[serde(rename_all = "snake_case")]
pub(crate) enum ReviewResolution {
None,
SessionApproval,
ExecPolicyAmendment,
NetworkPolicyAmendment,
}
#[derive(Serialize)]
pub(crate) struct CodexReviewEventParams {
pub(crate) thread_id: String,
pub(crate) turn_id: String,
pub(crate) item_id: Option<String>,
pub(crate) review_id: String,
pub(crate) app_server_client: CodexAppServerClientMetadata,
pub(crate) runtime: CodexRuntimeMetadata,
pub(crate) thread_source: Option<ThreadSource>,
pub(crate) subagent_source: Option<String>,
pub(crate) parent_thread_id: Option<String>,
pub(crate) tool_kind: ReviewSubjectKind,
pub(crate) tool_name: String,
pub(crate) reviewer: Reviewer,
pub(crate) trigger: ReviewTrigger,
pub(crate) status: ReviewStatus,
pub(crate) resolution: ReviewResolution,
pub(crate) started_at_ms: u64,
pub(crate) completed_at_ms: u64,
pub(crate) duration_ms: Option<u64>,
}
#[derive(Serialize)]
pub(crate) struct CodexReviewEventRequest {
pub(crate) event_type: &'static str,
pub(crate) event_params: CodexReviewEventParams,
}
#[allow(dead_code)]
#[derive(Clone, Copy, Debug, Serialize)]
#[serde(rename_all = "snake_case")]
@@ -686,6 +592,7 @@ pub(crate) struct CodexWebSearchEventRequest {
pub(crate) struct CodexImageGenerationEventParams {
#[serde(flatten)]
pub(crate) base: CodexToolItemEventBase,
pub(crate) image_generation_status: String,
pub(crate) revised_prompt_present: bool,
pub(crate) saved_path_present: bool,
}
@@ -988,8 +895,6 @@ fn analytics_hook_event_name(event_name: HookEventName) -> &'static str {
HookEventName::PreToolUse => "PreToolUse",
HookEventName::PermissionRequest => "PermissionRequest",
HookEventName::PostToolUse => "PostToolUse",
HookEventName::PreCompact => "PreCompact",
HookEventName::PostCompact => "PostCompact",
HookEventName::SessionStart => "SessionStart",
HookEventName::UserPromptSubmit => "UserPromptSubmit",
HookEventName::Stop => "Stop",

View File

@@ -28,12 +28,6 @@ use codex_protocol::protocol::TokenUsage;
use serde::Serialize;
use std::path::PathBuf;
#[derive(Clone, Debug, PartialEq, Eq, Serialize)]
pub struct AcceptedLineFingerprint {
pub path_hash: String,
pub line_hash: String,
}
#[derive(Clone)]
pub struct TrackEventsContext {
pub model_slug: String,
@@ -302,7 +296,6 @@ pub(crate) enum AnalyticsFact {
request: Box<ServerRequest>,
},
ServerResponse {
completed_at_ms: u64,
response: Box<ServerResponse>,
},
Notification(Box<ServerNotification>),

View File

@@ -1,4 +1,3 @@
mod accepted_lines;
mod client;
mod events;
mod facts;
@@ -7,8 +6,6 @@ mod reducer;
use std::time::SystemTime;
use std::time::UNIX_EPOCH;
pub use accepted_lines::accepted_line_fingerprints_from_unified_diff;
pub use accepted_lines::fingerprint_hash;
pub use client::AnalyticsEventsClient;
pub use events::AppServerRpcTransport;
pub use events::GuardianApprovalRequestSource;
@@ -20,7 +17,6 @@ pub use events::GuardianReviewSessionKind;
pub use events::GuardianReviewTerminalStatus;
pub use events::GuardianReviewTrackContext;
pub use events::GuardianReviewedAction;
pub use facts::AcceptedLineFingerprint;
pub use facts::AnalyticsJsonRpcError;
pub use facts::AppInvocation;
pub use facts::CodexCompactionEvent;
@@ -55,27 +51,3 @@ pub fn now_unix_seconds() -> u64 {
.unwrap_or_default()
.as_secs()
}
pub fn now_unix_millis() -> u64 {
u64::try_from(
SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap_or_default()
.as_millis(),
)
.unwrap_or(u64::MAX)
}
pub(crate) fn serialize_enum_as_string<T: serde::Serialize>(value: &T) -> Option<String> {
serde_json::to_value(value)
.ok()
.and_then(|value| value.as_str().map(str::to_string))
}
pub(crate) fn usize_to_u64(value: usize) -> u64 {
u64::try_from(value).unwrap_or(u64::MAX)
}
pub(crate) fn option_i64_to_u64(value: Option<i64>) -> Option<u64> {
value.and_then(|value| u64::try_from(value).ok())
}

View File

@@ -1,35 +1,16 @@
use crate::accepted_lines::AcceptedLineFingerprintEventInput;
use crate::accepted_lines::accepted_line_fingerprint_event_requests;
use crate::accepted_lines::accepted_line_fingerprints_from_unified_diff;
use crate::accepted_lines::accepted_line_repo_hash_for_cwd;
use crate::events::AppServerRpcTransport;
use crate::events::CodexAppMentionedEventRequest;
use crate::events::CodexAppServerClientMetadata;
use crate::events::CodexAppUsedEventRequest;
use crate::events::CodexCollabAgentToolCallEventParams;
use crate::events::CodexCollabAgentToolCallEventRequest;
use crate::events::CodexCommandExecutionEventParams;
use crate::events::CodexCommandExecutionEventRequest;
use crate::events::CodexCompactionEventRequest;
use crate::events::CodexDynamicToolCallEventParams;
use crate::events::CodexDynamicToolCallEventRequest;
use crate::events::CodexFileChangeEventParams;
use crate::events::CodexFileChangeEventRequest;
use crate::events::CodexHookRunEventRequest;
use crate::events::CodexImageGenerationEventParams;
use crate::events::CodexImageGenerationEventRequest;
use crate::events::CodexMcpToolCallEventParams;
use crate::events::CodexMcpToolCallEventRequest;
use crate::events::CodexPluginEventRequest;
use crate::events::CodexPluginUsedEventRequest;
use crate::events::CodexRuntimeMetadata;
use crate::events::CodexToolItemEventBase;
use crate::events::CodexTurnEventParams;
use crate::events::CodexTurnEventRequest;
use crate::events::CodexTurnSteerEventParams;
use crate::events::CodexTurnSteerEventRequest;
use crate::events::CodexWebSearchEventParams;
use crate::events::CodexWebSearchEventRequest;
use crate::events::GuardianReviewEventParams;
use crate::events::GuardianReviewEventPayload;
use crate::events::GuardianReviewEventRequest;
@@ -37,11 +18,7 @@ use crate::events::SkillInvocationEventParams;
use crate::events::SkillInvocationEventRequest;
use crate::events::ThreadInitializedEvent;
use crate::events::ThreadInitializedEventParams;
use crate::events::ToolItemFailureKind;
use crate::events::ToolItemFinalApprovalOutcome;
use crate::events::ToolItemTerminalStatus;
use crate::events::TrackEventRequest;
use crate::events::WebSearchActionKind;
use crate::events::codex_app_metadata;
use crate::events::codex_compaction_event_params;
use crate::events::codex_hook_run_metadata;
@@ -70,30 +47,14 @@ use crate::facts::TurnSteerRejectionReason;
use crate::facts::TurnSteerResult;
use crate::facts::TurnTokenUsageFact;
use crate::now_unix_seconds;
use crate::option_i64_to_u64;
use crate::serialize_enum_as_string;
use crate::usize_to_u64;
use codex_app_server_protocol::ClientRequest;
use codex_app_server_protocol::ClientResponse;
use codex_app_server_protocol::CodexErrorInfo;
use codex_app_server_protocol::CollabAgentStatus;
use codex_app_server_protocol::CollabAgentTool;
use codex_app_server_protocol::CollabAgentToolCallStatus;
use codex_app_server_protocol::CommandAction;
use codex_app_server_protocol::CommandExecutionSource;
use codex_app_server_protocol::CommandExecutionStatus;
use codex_app_server_protocol::DynamicToolCallOutputContentItem;
use codex_app_server_protocol::DynamicToolCallStatus;
use codex_app_server_protocol::InitializeParams;
use codex_app_server_protocol::McpToolCallStatus;
use codex_app_server_protocol::PatchApplyStatus;
use codex_app_server_protocol::PatchChangeKind;
use codex_app_server_protocol::RequestId;
use codex_app_server_protocol::ServerNotification;
use codex_app_server_protocol::ThreadItem;
use codex_app_server_protocol::TurnSteerResponse;
use codex_app_server_protocol::UserInput;
use codex_app_server_protocol::WebSearchAction;
use codex_git_utils::collect_git_info;
use codex_git_utils::get_git_repo_root;
use codex_login::default_client::originator;
@@ -108,7 +69,6 @@ use codex_protocol::protocol::TokenUsage;
use sha1::Digest;
use std::collections::HashMap;
use std::path::Path;
use std::path::PathBuf;
#[derive(Default)]
pub(crate) struct AnalyticsReducer {
@@ -116,7 +76,6 @@ pub(crate) struct AnalyticsReducer {
turns: HashMap<String, TurnState>,
connections: HashMap<u64, ConnectionState>,
threads: HashMap<String, ThreadAnalyticsState>,
tool_items_started_at_ms: HashMap<ToolItemKey, u64>,
}
struct ConnectionState {
@@ -160,19 +119,6 @@ impl<'a> AnalyticsDropSite<'a> {
}
}
fn tool_item(
notification: &'a codex_app_server_protocol::ItemCompletedNotification,
item_id: &'a str,
) -> Self {
Self {
event_name: "tool item",
thread_id: &notification.thread_id,
turn_id: Some(&notification.turn_id),
review_id: None,
item_id: Some(item_id),
}
}
fn turn_steer(thread_id: &'a str) -> Self {
Self {
event_name: "turn steer",
@@ -269,17 +215,9 @@ struct TurnState {
started_at: Option<u64>,
token_usage: Option<TokenUsage>,
completed: Option<CompletedTurnState>,
latest_diff: Option<String>,
steer_count: usize,
}
#[derive(Hash, Eq, PartialEq)]
struct ToolItemKey {
thread_id: String,
turn_id: String,
item_id: String,
}
impl AnalyticsReducer {
pub(crate) async fn ingest(&mut self, input: AnalyticsFact, out: &mut Vec<TrackEventRequest>) {
match input {
@@ -311,7 +249,7 @@ impl AnalyticsReducer {
response,
} => {
if let Some(response) = response.into_client_response(request_id) {
self.ingest_response(connection_id, response, out).await;
self.ingest_response(connection_id, response, out);
}
}
AnalyticsFact::ErrorResponse {
@@ -323,7 +261,7 @@ impl AnalyticsReducer {
self.ingest_error_response(connection_id, request_id, error_type, out);
}
AnalyticsFact::Notification(notification) => {
self.ingest_notification(*notification, out).await;
self.ingest_notification(*notification, out);
}
AnalyticsFact::ServerRequest {
connection_id: _connection_id,
@@ -331,7 +269,6 @@ impl AnalyticsReducer {
} => {}
AnalyticsFact::ServerResponse {
response: _response,
..
} => {}
AnalyticsFact::Custom(input) => match input {
CustomAnalyticsFact::SubAgentThreadStarted(input) => {
@@ -344,10 +281,10 @@ impl AnalyticsReducer {
self.ingest_guardian_review(*input, out);
}
CustomAnalyticsFact::TurnResolvedConfig(input) => {
self.ingest_turn_resolved_config(*input, out).await;
self.ingest_turn_resolved_config(*input, out);
}
CustomAnalyticsFact::TurnTokenUsage(input) => {
self.ingest_turn_token_usage(*input, out).await;
self.ingest_turn_token_usage(*input, out);
}
CustomAnalyticsFact::SkillInvoked(input) => {
self.ingest_skill_invoked(input, out).await;
@@ -479,7 +416,7 @@ impl AnalyticsReducer {
}
}
async fn ingest_turn_resolved_config(
fn ingest_turn_resolved_config(
&mut self,
input: TurnResolvedConfigFact,
out: &mut Vec<TrackEventRequest>,
@@ -495,16 +432,15 @@ impl AnalyticsReducer {
started_at: None,
token_usage: None,
completed: None,
latest_diff: None,
steer_count: 0,
});
turn_state.thread_id = Some(thread_id);
turn_state.num_input_images = Some(num_input_images);
turn_state.resolved_config = Some(input);
self.maybe_emit_turn_event(&turn_id, out).await;
self.maybe_emit_turn_event(&turn_id, out);
}
async fn ingest_turn_token_usage(
fn ingest_turn_token_usage(
&mut self,
input: TurnTokenUsageFact,
out: &mut Vec<TrackEventRequest>,
@@ -518,12 +454,11 @@ impl AnalyticsReducer {
started_at: None,
token_usage: None,
completed: None,
latest_diff: None,
steer_count: 0,
});
turn_state.thread_id = Some(input.thread_id);
turn_state.token_usage = Some(input.token_usage);
self.maybe_emit_turn_event(&turn_id, out).await;
self.maybe_emit_turn_event(&turn_id, out);
}
async fn ingest_skill_invoked(
@@ -630,7 +565,7 @@ impl AnalyticsReducer {
});
}
async fn ingest_response(
fn ingest_response(
&mut self,
connection_id: u64,
response: ClientResponse,
@@ -682,13 +617,12 @@ impl AnalyticsReducer {
started_at: None,
token_usage: None,
completed: None,
latest_diff: None,
steer_count: 0,
});
turn_state.connection_id = Some(connection_id);
turn_state.thread_id = Some(pending_request.thread_id);
turn_state.num_input_images = Some(pending_request.num_input_images);
self.maybe_emit_turn_event(&turn_id, out).await;
self.maybe_emit_turn_event(&turn_id, out);
}
ClientResponse::TurnSteer {
request_id,
@@ -750,68 +684,12 @@ impl AnalyticsReducer {
);
}
async fn ingest_notification(
fn ingest_notification(
&mut self,
notification: ServerNotification,
out: &mut Vec<TrackEventRequest>,
) {
match notification {
ServerNotification::ItemStarted(notification) => {
let Some(item_id) = tracked_tool_item_id(&notification.item) else {
return;
};
let Some(started_at_ms) = option_i64_to_u64(Some(notification.started_at_ms))
else {
return;
};
self.tool_items_started_at_ms.insert(
ToolItemKey {
thread_id: notification.thread_id,
turn_id: notification.turn_id,
item_id: item_id.to_string(),
},
started_at_ms,
);
}
ServerNotification::ItemCompleted(notification) => {
let Some(item_id) = tracked_tool_item_id(&notification.item) else {
return;
};
let key = ToolItemKey {
thread_id: notification.thread_id.clone(),
turn_id: notification.turn_id.clone(),
item_id: item_id.to_string(),
};
let Some(started_at_ms) = self.tool_items_started_at_ms.remove(&key) else {
tracing::warn!(
thread_id = %notification.thread_id,
turn_id = %notification.turn_id,
item_id,
"dropping tool item analytics event: missing item started notification"
);
return;
};
let Some(completed_at_ms) = option_i64_to_u64(Some(notification.completed_at_ms))
else {
return;
};
let Some((connection_state, thread_metadata)) = self
.thread_context_or_warn(AnalyticsDropSite::tool_item(&notification, item_id))
else {
return;
};
if let Some(event) = tool_item_event(
&notification.thread_id,
&notification.turn_id,
&notification.item,
started_at_ms,
completed_at_ms,
connection_state,
thread_metadata,
) {
out.push(event);
}
}
ServerNotification::TurnStarted(notification) => {
let turn_state = self.turns.entry(notification.turn.id).or_insert(TurnState {
connection_id: None,
@@ -821,7 +699,6 @@ impl AnalyticsReducer {
started_at: None,
token_usage: None,
completed: None,
latest_diff: None,
steer_count: 0,
});
turn_state.started_at = notification
@@ -829,24 +706,6 @@ impl AnalyticsReducer {
.started_at
.and_then(|started_at| u64::try_from(started_at).ok());
}
ServerNotification::TurnDiffUpdated(notification) => {
let turn_state =
self.turns
.entry(notification.turn_id.clone())
.or_insert(TurnState {
connection_id: None,
thread_id: None,
num_input_images: None,
resolved_config: None,
started_at: None,
token_usage: None,
completed: None,
latest_diff: None,
steer_count: 0,
});
turn_state.thread_id = Some(notification.thread_id);
turn_state.latest_diff = Some(notification.diff);
}
ServerNotification::TurnCompleted(notification) => {
let turn_state =
self.turns
@@ -859,7 +718,6 @@ impl AnalyticsReducer {
started_at: None,
token_usage: None,
completed: None,
latest_diff: None,
steer_count: 0,
});
turn_state.completed = Some(CompletedTurnState {
@@ -879,7 +737,7 @@ impl AnalyticsReducer {
.and_then(|duration_ms| u64::try_from(duration_ms).ok()),
});
let turn_id = notification.turn.id;
self.maybe_emit_turn_event(&turn_id, out).await;
self.maybe_emit_turn_event(&turn_id, out);
}
_ => {}
}
@@ -921,7 +779,7 @@ impl AnalyticsReducer {
ephemeral: thread.ephemeral,
thread_source: thread_metadata.thread_source,
initialization_mode,
subagent_source: thread_metadata.subagent_source.clone(),
subagent_source: thread_metadata.subagent_source,
parent_thread_id: thread_metadata.parent_thread_id,
created_at: u64::try_from(thread.created_at).unwrap_or_default(),
},
@@ -1015,7 +873,7 @@ impl AnalyticsReducer {
}));
}
async fn maybe_emit_turn_event(&mut self, turn_id: &str, out: &mut Vec<TrackEventRequest>) {
fn maybe_emit_turn_event(&mut self, turn_id: &str, out: &mut Vec<TrackEventRequest>) {
let Some(turn_state) = self.turns.get(turn_id) else {
return;
};
@@ -1048,23 +906,18 @@ impl AnalyticsReducer {
warn_missing_analytics_context(&drop_site, MissingAnalyticsContext::ThreadMetadata);
return;
};
let turn_event = TrackEventRequest::TurnEvent(Box::new(CodexTurnEventRequest {
event_type: "codex_turn_event",
event_params: codex_turn_event_params(
connection_state.app_server_client.clone(),
connection_state.runtime.clone(),
turn_id.to_string(),
turn_state,
thread_metadata,
),
}));
let accepted_line_event = accepted_line_event_input(turn_id, turn_state);
out.push(turn_event);
if let Some((mut input, cwd)) = accepted_line_event {
input.repo_hash = accepted_line_repo_hash_for_cwd(cwd.as_path()).await;
out.extend(accepted_line_fingerprint_event_requests(input));
}
out.push(TrackEventRequest::TurnEvent(Box::new(
CodexTurnEventRequest {
event_type: "codex_turn_event",
event_params: codex_turn_event_params(
connection_state.app_server_client.clone(),
connection_state.runtime.clone(),
turn_id.to_string(),
turn_state,
thread_metadata,
),
},
)));
self.turns.remove(turn_id);
}
@@ -1130,582 +983,6 @@ fn warn_missing_analytics_context(
);
}
fn tracked_tool_item_id(item: &ThreadItem) -> Option<&str> {
match item {
ThreadItem::CommandExecution { id, .. }
| ThreadItem::FileChange { id, .. }
| ThreadItem::McpToolCall { id, .. }
| ThreadItem::DynamicToolCall { id, .. }
| ThreadItem::CollabAgentToolCall { id, .. }
| ThreadItem::WebSearch { id, .. }
| ThreadItem::ImageGeneration { id, .. } => Some(id),
ThreadItem::UserMessage { .. }
| ThreadItem::HookPrompt { .. }
| ThreadItem::AgentMessage { .. }
| ThreadItem::Plan { .. }
| ThreadItem::Reasoning { .. }
| ThreadItem::ImageView { .. }
| ThreadItem::EnteredReviewMode { .. }
| ThreadItem::ExitedReviewMode { .. }
| ThreadItem::ContextCompaction { .. } => None,
}
}
fn tool_item_event(
thread_id: &str,
turn_id: &str,
item: &ThreadItem,
started_at_ms: u64,
completed_at_ms: u64,
connection_state: &ConnectionState,
thread_metadata: &ThreadMetadataState,
) -> Option<TrackEventRequest> {
let context = ToolItemContext {
started_at_ms,
completed_at_ms,
connection_state,
thread_metadata,
};
match item {
ThreadItem::CommandExecution {
id,
source,
status,
command_actions,
exit_code,
duration_ms,
..
} => {
let (terminal_status, failure_kind) = command_execution_outcome(status)?;
let action_counts = command_action_counts(command_actions);
let base = tool_item_base(
thread_id,
turn_id,
id.clone(),
command_execution_tool_name(*source).to_string(),
ToolItemOutcome {
terminal_status,
failure_kind,
execution_duration_ms: option_i64_to_u64(*duration_ms),
},
context,
);
Some(TrackEventRequest::CommandExecution(
CodexCommandExecutionEventRequest {
event_type: "codex_command_execution_event",
event_params: CodexCommandExecutionEventParams {
base,
command_execution_source: *source,
exit_code: *exit_code,
command_total_action_count: action_counts.total,
command_read_action_count: action_counts.read,
command_list_files_action_count: action_counts.list_files,
command_search_action_count: action_counts.search,
command_unknown_action_count: action_counts.unknown,
},
},
))
}
ThreadItem::FileChange {
id,
changes,
status,
} => {
let (terminal_status, failure_kind) = patch_apply_outcome(status)?;
let counts = file_change_counts(changes);
let base = tool_item_base(
thread_id,
turn_id,
id.clone(),
"apply_patch".to_string(),
ToolItemOutcome {
terminal_status,
failure_kind,
execution_duration_ms: None,
},
context,
);
Some(TrackEventRequest::FileChange(CodexFileChangeEventRequest {
event_type: "codex_file_change_event",
event_params: CodexFileChangeEventParams {
base,
file_change_count: usize_to_u64(changes.len()),
file_add_count: counts.add,
file_update_count: counts.update,
file_delete_count: counts.delete,
file_move_count: counts.move_,
},
}))
}
ThreadItem::McpToolCall {
id,
server,
tool,
status,
error,
duration_ms,
..
} => {
let (terminal_status, failure_kind) = mcp_tool_call_outcome(status)?;
let base = tool_item_base(
thread_id,
turn_id,
id.clone(),
tool.clone(),
ToolItemOutcome {
terminal_status,
failure_kind,
execution_duration_ms: option_i64_to_u64(*duration_ms),
},
context,
);
Some(TrackEventRequest::McpToolCall(
CodexMcpToolCallEventRequest {
event_type: "codex_mcp_tool_call_event",
event_params: CodexMcpToolCallEventParams {
base,
mcp_server_name: server.clone(),
mcp_tool_name: tool.clone(),
mcp_error_present: error.is_some(),
},
},
))
}
ThreadItem::DynamicToolCall {
id,
tool,
status,
content_items,
success,
duration_ms,
..
} => {
let (terminal_status, failure_kind) = dynamic_tool_call_outcome(status)?;
let counts = content_items
.as_ref()
.map(|items| dynamic_content_counts(items));
let base = tool_item_base(
thread_id,
turn_id,
id.clone(),
tool.clone(),
ToolItemOutcome {
terminal_status,
failure_kind,
execution_duration_ms: option_i64_to_u64(*duration_ms),
},
context,
);
Some(TrackEventRequest::DynamicToolCall(
CodexDynamicToolCallEventRequest {
event_type: "codex_dynamic_tool_call_event",
event_params: CodexDynamicToolCallEventParams {
base,
dynamic_tool_name: tool.clone(),
success: *success,
output_content_item_count: counts.map(|counts| counts.total),
output_text_item_count: counts.map(|counts| counts.text),
output_image_item_count: counts.map(|counts| counts.image),
},
},
))
}
ThreadItem::CollabAgentToolCall {
id,
tool,
status,
sender_thread_id,
receiver_thread_ids,
model,
reasoning_effort,
agents_states,
..
} => {
let (terminal_status, failure_kind) = collab_tool_call_outcome(status)?;
let base = tool_item_base(
thread_id,
turn_id,
id.clone(),
collab_agent_tool_name(tool).to_string(),
ToolItemOutcome {
terminal_status,
failure_kind,
execution_duration_ms: None,
},
context,
);
Some(TrackEventRequest::CollabAgentToolCall(
CodexCollabAgentToolCallEventRequest {
event_type: "codex_collab_agent_tool_call_event",
event_params: CodexCollabAgentToolCallEventParams {
base,
sender_thread_id: sender_thread_id.clone(),
receiver_thread_count: usize_to_u64(receiver_thread_ids.len()),
receiver_thread_ids: Some(receiver_thread_ids.clone()),
requested_model: model.clone(),
requested_reasoning_effort: reasoning_effort
.as_ref()
.and_then(serialize_enum_as_string),
agent_state_count: Some(usize_to_u64(agents_states.len())),
completed_agent_count: Some(usize_to_u64(
agents_states
.values()
.filter(|state| state.status == CollabAgentStatus::Completed)
.count(),
)),
failed_agent_count: Some(usize_to_u64(
agents_states
.values()
.filter(|state| {
matches!(
state.status,
CollabAgentStatus::Errored
| CollabAgentStatus::Shutdown
| CollabAgentStatus::NotFound
)
})
.count(),
)),
},
},
))
}
ThreadItem::WebSearch { id, query, action } => {
let base = tool_item_base(
thread_id,
turn_id,
id.clone(),
"web_search".to_string(),
ToolItemOutcome {
terminal_status: ToolItemTerminalStatus::Completed,
failure_kind: None,
execution_duration_ms: None,
},
context,
);
Some(TrackEventRequest::WebSearch(CodexWebSearchEventRequest {
event_type: "codex_web_search_event",
event_params: CodexWebSearchEventParams {
base,
web_search_action: action.as_ref().map(web_search_action_kind),
query_present: !query.trim().is_empty(),
query_count: web_search_query_count(query, action.as_ref()),
},
}))
}
ThreadItem::ImageGeneration {
id,
status,
revised_prompt,
saved_path,
..
} => {
let (terminal_status, failure_kind) = image_generation_outcome(status.as_str());
let base = tool_item_base(
thread_id,
turn_id,
id.clone(),
"image_generation".to_string(),
ToolItemOutcome {
terminal_status,
failure_kind,
execution_duration_ms: None,
},
context,
);
Some(TrackEventRequest::ImageGeneration(
CodexImageGenerationEventRequest {
event_type: "codex_image_generation_event",
event_params: CodexImageGenerationEventParams {
base,
revised_prompt_present: revised_prompt.is_some(),
saved_path_present: saved_path.is_some(),
},
},
))
}
_ => None,
}
}
struct ToolItemOutcome {
terminal_status: ToolItemTerminalStatus,
failure_kind: Option<ToolItemFailureKind>,
execution_duration_ms: Option<u64>,
}
#[derive(Default)]
struct CommandActionCounts {
total: u64,
read: u64,
list_files: u64,
search: u64,
unknown: u64,
}
fn command_action_counts(command_actions: &[CommandAction]) -> CommandActionCounts {
let mut counts = CommandActionCounts {
total: usize_to_u64(command_actions.len()),
..Default::default()
};
for action in command_actions {
match action {
CommandAction::Read { .. } => counts.read += 1,
CommandAction::ListFiles { .. } => counts.list_files += 1,
CommandAction::Search { .. } => counts.search += 1,
CommandAction::Unknown { .. } => counts.unknown += 1,
}
}
counts
}
#[derive(Clone, Copy)]
struct ToolItemContext<'a> {
started_at_ms: u64,
completed_at_ms: u64,
connection_state: &'a ConnectionState,
thread_metadata: &'a ThreadMetadataState,
}
fn tool_item_base(
thread_id: &str,
turn_id: &str,
item_id: String,
tool_name: String,
outcome: ToolItemOutcome,
context: ToolItemContext<'_>,
) -> CodexToolItemEventBase {
let thread_metadata = context.thread_metadata;
CodexToolItemEventBase {
thread_id: thread_id.to_string(),
turn_id: turn_id.to_string(),
item_id,
app_server_client: context.connection_state.app_server_client.clone(),
runtime: context.connection_state.runtime.clone(),
thread_source: thread_metadata.thread_source,
subagent_source: thread_metadata.subagent_source.clone(),
parent_thread_id: thread_metadata.parent_thread_id.clone(),
tool_name,
started_at_ms: context.started_at_ms,
completed_at_ms: context.completed_at_ms,
// duration_ms reflects item lifecycle observed by app-server. For web
// search and image generation in particular, that can be narrower than
// full upstream execution time.
duration_ms: observed_duration_ms(context.started_at_ms, context.completed_at_ms),
execution_duration_ms: outcome.execution_duration_ms,
review_count: 0,
guardian_review_count: 0,
user_review_count: 0,
final_approval_outcome: ToolItemFinalApprovalOutcome::Unknown,
terminal_status: outcome.terminal_status,
failure_kind: outcome.failure_kind,
requested_additional_permissions: false,
requested_network_access: false,
}
}
fn observed_duration_ms(started_at_ms: u64, completed_at_ms: u64) -> Option<u64> {
completed_at_ms.checked_sub(started_at_ms)
}
fn command_execution_tool_name(source: CommandExecutionSource) -> &'static str {
match source {
CommandExecutionSource::UnifiedExecStartup
| CommandExecutionSource::UnifiedExecInteraction => "unified_exec",
CommandExecutionSource::UserShell => "user_shell",
CommandExecutionSource::Agent => "shell",
}
}
fn command_execution_outcome(
status: &CommandExecutionStatus,
) -> Option<(ToolItemTerminalStatus, Option<ToolItemFailureKind>)> {
match status {
CommandExecutionStatus::InProgress => None,
CommandExecutionStatus::Completed => Some((ToolItemTerminalStatus::Completed, None)),
CommandExecutionStatus::Failed => Some((
ToolItemTerminalStatus::Failed,
Some(ToolItemFailureKind::ToolError),
)),
CommandExecutionStatus::Declined => Some((
ToolItemTerminalStatus::Rejected,
Some(ToolItemFailureKind::ApprovalDenied),
)),
}
}
fn patch_apply_outcome(
status: &PatchApplyStatus,
) -> Option<(ToolItemTerminalStatus, Option<ToolItemFailureKind>)> {
match status {
PatchApplyStatus::InProgress => None,
PatchApplyStatus::Completed => Some((ToolItemTerminalStatus::Completed, None)),
PatchApplyStatus::Failed => Some((
ToolItemTerminalStatus::Failed,
Some(ToolItemFailureKind::ToolError),
)),
PatchApplyStatus::Declined => Some((
ToolItemTerminalStatus::Rejected,
Some(ToolItemFailureKind::ApprovalDenied),
)),
}
}
fn mcp_tool_call_outcome(
status: &McpToolCallStatus,
) -> Option<(ToolItemTerminalStatus, Option<ToolItemFailureKind>)> {
match status {
McpToolCallStatus::InProgress => None,
McpToolCallStatus::Completed => Some((ToolItemTerminalStatus::Completed, None)),
McpToolCallStatus::Failed => Some((
ToolItemTerminalStatus::Failed,
Some(ToolItemFailureKind::ToolError),
)),
}
}
fn dynamic_tool_call_outcome(
status: &DynamicToolCallStatus,
) -> Option<(ToolItemTerminalStatus, Option<ToolItemFailureKind>)> {
match status {
DynamicToolCallStatus::InProgress => None,
DynamicToolCallStatus::Completed => Some((ToolItemTerminalStatus::Completed, None)),
DynamicToolCallStatus::Failed => Some((
ToolItemTerminalStatus::Failed,
Some(ToolItemFailureKind::ToolError),
)),
}
}
fn collab_tool_call_outcome(
status: &CollabAgentToolCallStatus,
) -> Option<(ToolItemTerminalStatus, Option<ToolItemFailureKind>)> {
match status {
CollabAgentToolCallStatus::InProgress => None,
CollabAgentToolCallStatus::Completed => Some((ToolItemTerminalStatus::Completed, None)),
CollabAgentToolCallStatus::Failed => Some((
ToolItemTerminalStatus::Failed,
Some(ToolItemFailureKind::ToolError),
)),
}
}
fn image_generation_outcome(status: &str) -> (ToolItemTerminalStatus, Option<ToolItemFailureKind>) {
match status {
"failed" | "error" => (
ToolItemTerminalStatus::Failed,
Some(ToolItemFailureKind::ToolError),
),
_ => (ToolItemTerminalStatus::Completed, None),
}
}
fn collab_agent_tool_name(tool: &CollabAgentTool) -> &'static str {
match tool {
CollabAgentTool::SpawnAgent => "spawn_agent",
CollabAgentTool::SendInput => "send_input",
CollabAgentTool::ResumeAgent => "resume_agent",
CollabAgentTool::Wait => "wait_agent",
CollabAgentTool::CloseAgent => "close_agent",
}
}
#[derive(Default)]
struct FileChangeCounts {
add: u64,
update: u64,
delete: u64,
move_: u64,
}
fn file_change_counts(changes: &[codex_app_server_protocol::FileUpdateChange]) -> FileChangeCounts {
let mut counts = FileChangeCounts::default();
for change in changes {
match &change.kind {
PatchChangeKind::Add => counts.add += 1,
PatchChangeKind::Delete => counts.delete += 1,
PatchChangeKind::Update { move_path: Some(_) } => counts.move_ += 1,
PatchChangeKind::Update { move_path: None } => counts.update += 1,
}
}
counts
}
#[derive(Clone, Copy)]
struct DynamicContentCounts {
total: u64,
text: u64,
image: u64,
}
fn dynamic_content_counts(items: &[DynamicToolCallOutputContentItem]) -> DynamicContentCounts {
let mut text = 0;
let mut image = 0;
for item in items {
match item {
DynamicToolCallOutputContentItem::InputText { .. } => text += 1,
DynamicToolCallOutputContentItem::InputImage { .. } => image += 1,
}
}
DynamicContentCounts {
total: usize_to_u64(items.len()),
text,
image,
}
}
fn web_search_action_kind(action: &WebSearchAction) -> WebSearchActionKind {
match action {
WebSearchAction::Search { .. } => WebSearchActionKind::Search,
WebSearchAction::OpenPage { .. } => WebSearchActionKind::OpenPage,
WebSearchAction::FindInPage { .. } => WebSearchActionKind::FindInPage,
WebSearchAction::Other => WebSearchActionKind::Other,
}
}
fn web_search_query_count(query: &str, action: Option<&WebSearchAction>) -> Option<u64> {
match action {
Some(WebSearchAction::Search { query, queries }) => queries
.as_ref()
.map(|queries| usize_to_u64(queries.len()))
.or_else(|| query.as_ref().map(|_| 1)),
Some(WebSearchAction::OpenPage { .. })
| Some(WebSearchAction::FindInPage { .. })
| Some(WebSearchAction::Other) => None,
None => (!query.trim().is_empty()).then_some(1),
}
}
fn accepted_line_event_input(
turn_id: &str,
turn_state: &TurnState,
) -> Option<(AcceptedLineFingerprintEventInput, PathBuf)> {
let latest_diff = turn_state.latest_diff.as_deref()?;
let summary = accepted_line_fingerprints_from_unified_diff(latest_diff);
if summary.accepted_added_lines == 0 && summary.accepted_deleted_lines == 0 {
return None;
}
let thread_id = turn_state.thread_id.clone()?;
let resolved_config = turn_state.resolved_config.clone()?;
Some((
AcceptedLineFingerprintEventInput {
event_type: "codex.accepted_line_fingerprints",
turn_id: turn_id.to_string(),
thread_id,
product_surface: Some("codex".to_string()),
model_slug: Some(resolved_config.model.clone()),
completed_at: now_unix_seconds(),
repo_hash: None,
accepted_added_lines: summary.accepted_added_lines,
accepted_deleted_lines: summary.accepted_deleted_lines,
line_fingerprints: summary.line_fingerprints,
},
resolved_config.permission_profile_cwd,
))
}
fn codex_turn_event_params(
app_server_client: CodexAppServerClientMetadata,
runtime: CodexRuntimeMetadata,

View File

@@ -7,8 +7,6 @@ license.workspace = true
[lib]
name = "codex_ansi_escape"
path = "src/lib.rs"
test = false
doctest = false
[lints]
workspace = true

View File

@@ -7,7 +7,6 @@ license.workspace = true
[lib]
name = "codex_app_server_client"
path = "src/lib.rs"
doctest = false
[lints]
workspace = true

View File

@@ -29,7 +29,6 @@ pub use codex_app_server::in_process::DEFAULT_IN_PROCESS_CHANNEL_CAPACITY;
pub use codex_app_server::in_process::InProcessServerEvent;
use codex_app_server::in_process::InProcessStartArgs;
use codex_app_server::in_process::LogDbLayer;
pub use codex_app_server::in_process::StateDbHandle;
use codex_app_server_protocol::ClientInfo;
use codex_app_server_protocol::ClientNotification;
use codex_app_server_protocol::ClientRequest;
@@ -47,8 +46,10 @@ use codex_config::LoaderOverrides;
use codex_config::NoopThreadConfigLoader;
use codex_config::RemoteThreadConfigLoader;
use codex_config::ThreadConfigLoader;
pub use codex_core::StateDbHandle;
use codex_core::config::Config;
pub use codex_exec_server::EnvironmentManager;
pub use codex_exec_server::EnvironmentManagerArgs;
pub use codex_exec_server::ExecServerRuntimePaths;
use codex_feedback::CodexFeedback;
use codex_protocol::protocol::SessionSource;
@@ -374,7 +375,6 @@ impl InProcessClientStartArgs {
pub fn initialize_params(&self) -> InitializeParams {
let capabilities = InitializeCapabilities {
experimental_api: self.experimental_api,
request_attestation: false,
opt_out_notification_methods: if self.opt_out_notification_methods.is_empty() {
None
} else {
@@ -951,7 +951,7 @@ mod tests {
use codex_app_server_protocol::ToolRequestUserInputParams;
use codex_app_server_protocol::ToolRequestUserInputQuestion;
use codex_core::config::ConfigBuilder;
use codex_core::init_state_db;
use codex_core::init_state_db_from_config;
use futures::SinkExt;
use futures::StreamExt;
use pretty_assertions::assert_eq;
@@ -1017,7 +1017,7 @@ mod tests {
) -> TestClient {
let codex_home = TempDir::new().expect("temp dir");
let config = Arc::new(build_test_config_for_codex_home(codex_home.path()).await);
let state_db = init_state_db(config.as_ref())
let state_db = init_state_db_from_config(config.as_ref())
.await
.expect("state db should initialize for in-process test");
let client = InProcessAppServerClient::start(InProcessClientStartArgs {

View File

@@ -73,7 +73,6 @@ impl RemoteAppServerConnectArgs {
fn initialize_params(&self) -> InitializeParams {
let capabilities = InitializeCapabilities {
experimental_api: self.experimental_api,
request_attestation: false,
opt_out_notification_methods: if self.opt_out_notification_methods.is_empty() {
None
} else {

View File

@@ -1,6 +0,0 @@
load("//:defs.bzl", "codex_rust_crate")
codex_rust_crate(
name = "app-server-daemon",
crate_name = "codex_app_server_daemon",
)

View File

@@ -1,39 +0,0 @@
[package]
name = "codex-app-server-daemon"
version.workspace = true
edition.workspace = true
license.workspace = true
[lib]
name = "codex_app_server_daemon"
path = "src/lib.rs"
doctest = false
[lints]
workspace = true
[dependencies]
anyhow = { workspace = true }
codex-app-server-protocol = { workspace = true }
codex-app-server-transport = { workspace = true }
codex-core = { workspace = true }
codex-uds = { workspace = true }
futures = { workspace = true }
libc = { workspace = true }
reqwest = { workspace = true, features = ["rustls-tls"] }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
tokio = { workspace = true, features = [
"fs",
"io-util",
"macros",
"process",
"rt-multi-thread",
"signal",
"time",
] }
tokio-tungstenite = { workspace = true }
[dev-dependencies]
pretty_assertions = { workspace = true }
tempfile = { workspace = true }

View File

@@ -1,104 +0,0 @@
# codex-app-server-daemon
> `codex-app-server-daemon` is experimental and its lifecycle contract may
> change while the remote-management flow is still being developed.
`codex-app-server-daemon` backs the machine-readable `codex app-server`
lifecycle commands used by remote clients such as the desktop and mobile apps.
It is intended for Codex instances launched over SSH, including fresh developer
machines that should expose app-server with `remote_control` enabled.
## Platform support
The current daemon implementation is Unix-only. It uses pidfile-backed
daemonization plus Unix process and file-locking primitives, and does not yet
support Windows lifecycle management.
## Commands
```sh
codex app-server daemon start
codex app-server daemon restart
codex app-server daemon enable-remote-control
codex app-server daemon disable-remote-control
codex app-server daemon stop
codex app-server daemon version
codex app-server daemon bootstrap --remote-control
```
On success, every command writes exactly one JSON object to stdout. Consumers
should parse that JSON rather than relying on human-readable text. Lifecycle
responses report the resolved backend, socket path, local CLI version, and
running app-server version when applicable.
## Bootstrap flow
For a new remote machine:
```sh
curl -fsSL https://chatgpt.com/codex/install.sh | sh
$HOME/.codex/packages/standalone/current/codex app-server daemon bootstrap --remote-control
```
`bootstrap` requires the standalone managed install. It records the daemon
settings under `CODEX_HOME/app-server-daemon/`, starts app-server as a
pidfile-backed detached process, and launches a detached updater loop.
## Installation and update cases
The daemon assumes Codex is installed through `install.sh` and always launches
the standalone managed binary under `CODEX_HOME`.
| Situation | What starts | Does this daemon fetch new binaries? | Does a running app-server eventually move to a newer binary on its own? |
| --- | --- | --- | --- |
| `install.sh` has run, but only `start` is used | `start` uses `CODEX_HOME/packages/standalone/current/codex` | No | No. The managed path is used when starting or restarting, but no updater is installed. |
| `install.sh` has run, then `bootstrap` is used | The pidfile backend uses `CODEX_HOME/packages/standalone/current/codex` | Yes. Bootstrap launches a detached updater loop that runs `install.sh` hourly. | Yes, while that updater process is alive. After a successful fetch, it restarts a currently running app-server only when the managed binary reports a different version. |
| Some other tool updates the managed binary path | The next fresh start or restart uses the updated file at that path | No | Not automatically. The existing process keeps the old executable image until an explicit `restart`. |
### Standalone installs
For installs created by `install.sh`:
- lifecycle commands always use the standalone managed binary path
- `bootstrap` is supported
- `bootstrap` starts a detached pid-backed updater loop that fetches via
`install.sh`, then restarts app-server if it is running on a different version
- the updater loop is not reboot-persistent; it must be started again by
rerunning `bootstrap` after a reboot
### Out-of-band updates
This daemon does not watch arbitrary executable files for replacement. If some
other tool updates a binary that the daemon would use on its next launch:
- a currently running app-server remains on the old executable image
- `restart` will launch the updated binary
- for bootstrapped daemons, the detached updater loop only reacts to updates it
fetched itself; it does not watch arbitrary file replacement
## Lifecycle semantics
`start` is idempotent and returns after app-server is ready to answer the normal
JSON-RPC initialize handshake on the Unix control socket.
`restart` stops any managed daemon and starts it again.
`enable-remote-control` and `disable-remote-control` persist the launch setting
for future starts. If a managed app-server is already running, they restart it
so the new setting takes effect immediately.
`stop` sends a graceful termination request first, then sends a second
termination signal after the grace window if the process is still alive.
All mutating lifecycle commands are serialized per `CODEX_HOME`, so a concurrent
`start`, `restart`, `enable-remote-control`, `disable-remote-control`, `stop`,
or `bootstrap` does not race another in-flight lifecycle operation.
## State
The daemon stores its local state under `CODEX_HOME/app-server-daemon/`:
- `settings.json` for persisted launch settings
- `app-server.pid` for the app-server process record
- `app-server-updater.pid` for the pid-backed standalone updater loop
- `daemon.lock` for daemon-wide lifecycle serialization

View File

@@ -1,33 +0,0 @@
mod pid;
use std::path::PathBuf;
use serde::Serialize;
pub(crate) use pid::PidBackend;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
#[serde(rename_all = "camelCase")]
pub enum BackendKind {
Pid,
}
#[derive(Debug, Clone)]
pub(crate) struct BackendPaths {
pub(crate) codex_bin: PathBuf,
pub(crate) pid_file: PathBuf,
pub(crate) update_pid_file: PathBuf,
pub(crate) remote_control_enabled: bool,
}
pub(crate) fn pid_backend(paths: BackendPaths) -> PidBackend {
PidBackend::new(
paths.codex_bin,
paths.pid_file,
paths.remote_control_enabled,
)
}
pub(crate) fn pid_update_loop_backend(paths: BackendPaths) -> PidBackend {
PidBackend::new_update_loop(paths.codex_bin, paths.update_pid_file)
}

View File

@@ -1,600 +0,0 @@
use std::path::Path;
use std::path::PathBuf;
#[cfg(unix)]
use std::process::Stdio;
use std::time::Duration;
use anyhow::Context;
use anyhow::Result;
use anyhow::bail;
use serde::Deserialize;
use serde::Serialize;
use tokio::fs;
#[cfg(unix)]
use tokio::process::Command;
use tokio::time::sleep;
const STOP_POLL_INTERVAL: Duration = Duration::from_millis(50);
const STOP_GRACE_PERIOD: Duration = Duration::from_secs(60);
const STOP_TIMEOUT: Duration = Duration::from_secs(70);
const START_TIMEOUT: Duration = Duration::from_secs(10);
#[derive(Debug)]
#[cfg_attr(not(unix), allow(dead_code))]
pub(crate) struct PidBackend {
codex_bin: PathBuf,
pid_file: PathBuf,
lock_file: PathBuf,
command_kind: PidCommandKind,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
struct PidRecord {
pid: u32,
process_start_time: String,
}
#[derive(Debug, Clone, PartialEq, Eq)]
enum PidFileState {
Missing,
Starting,
Running(PidRecord),
}
#[derive(Debug, Clone, Copy)]
#[cfg_attr(not(unix), allow(dead_code))]
enum PidCommandKind {
AppServer { remote_control_enabled: bool },
UpdateLoop,
}
impl PidBackend {
pub(crate) fn new(codex_bin: PathBuf, pid_file: PathBuf, remote_control_enabled: bool) -> Self {
let lock_file = pid_file.with_extension("pid.lock");
Self {
codex_bin,
pid_file,
lock_file,
command_kind: PidCommandKind::AppServer {
remote_control_enabled,
},
}
}
pub(crate) fn new_update_loop(codex_bin: PathBuf, pid_file: PathBuf) -> Self {
let lock_file = pid_file.with_extension("pid.lock");
Self {
codex_bin,
pid_file,
lock_file,
command_kind: PidCommandKind::UpdateLoop,
}
}
pub(crate) async fn is_starting_or_running(&self) -> Result<bool> {
loop {
match self.read_pid_file_state().await? {
PidFileState::Missing => return Ok(false),
PidFileState::Starting => return Ok(true),
PidFileState::Running(record) => {
if self.record_is_active(&record).await? {
return Ok(true);
}
match self.refresh_after_stale_record(&record).await? {
PidFileState::Missing => return Ok(false),
PidFileState::Starting | PidFileState::Running(_) => continue,
}
}
}
}
}
#[cfg(unix)]
pub(crate) async fn start(&self) -> Result<Option<u32>> {
if let Some(parent) = self.pid_file.parent() {
fs::create_dir_all(parent)
.await
.with_context(|| format!("failed to create pid directory {}", parent.display()))?;
}
let reservation_lock = self.acquire_reservation_lock().await?;
let _pid_file = loop {
match fs::OpenOptions::new()
.create_new(true)
.write(true)
.open(&self.pid_file)
.await
{
Ok(pid_file) => break pid_file,
Err(err) if err.kind() == std::io::ErrorKind::AlreadyExists => {
match self.read_pid_file_state_with_lock_held().await? {
PidFileState::Missing => continue,
PidFileState::Running(record) => {
if self.record_is_active(&record).await? {
return Ok(None);
}
let _ = fs::remove_file(&self.pid_file).await;
continue;
}
PidFileState::Starting => {
unreachable!("lock holder cannot observe starting")
}
}
}
Err(err) => {
return Err(err).with_context(|| {
format!("failed to reserve pid file {}", self.pid_file.display())
});
}
}
};
let mut command = Command::new(&self.codex_bin);
command
.args(self.command_args())
.stdin(Stdio::null())
.stdout(Stdio::null())
.stderr(Stdio::null());
#[cfg(unix)]
{
unsafe {
command.pre_exec(|| {
if libc::setsid() == -1 {
return Err(std::io::Error::last_os_error());
}
Ok(())
});
}
}
let child = match command.spawn() {
Ok(child) => child,
Err(err) => {
let _ = fs::remove_file(&self.pid_file).await;
return Err(err).with_context(|| {
format!(
"failed to spawn detached app-server process using {}",
self.codex_bin.display()
)
});
}
};
let pid = child
.id()
.context("spawned app-server process has no pid")?;
let record = match read_process_start_time(pid).await {
Ok(process_start_time) => PidRecord {
pid,
process_start_time,
},
Err(err) => {
let _ = self.terminate_process(pid);
let _ = fs::remove_file(&self.pid_file).await;
return Err(err);
}
};
let contents = serde_json::to_vec(&record).context("failed to serialize pid record")?;
let temp_pid_file = self.pid_file.with_extension("pid.tmp");
if let Err(err) = fs::write(&temp_pid_file, &contents).await {
let _ = self.terminate_process(pid);
let _ = fs::remove_file(&self.pid_file).await;
return Err(err).with_context(|| {
format!("failed to write pid temp file {}", temp_pid_file.display())
});
}
if let Err(err) = fs::rename(&temp_pid_file, &self.pid_file).await {
let _ = self.terminate_process(pid);
let _ = fs::remove_file(&temp_pid_file).await;
let _ = fs::remove_file(&self.pid_file).await;
return Err(err).with_context(|| {
format!("failed to publish pid file {}", self.pid_file.display())
});
}
drop(reservation_lock);
Ok(Some(pid))
}
#[cfg(not(unix))]
pub(crate) async fn start(&self) -> Result<Option<u32>> {
bail!("pid-managed app-server startup is unsupported on this platform")
}
pub(crate) async fn stop(&self) -> Result<()> {
loop {
let Some(record) = self.wait_for_pid_start().await? else {
return Ok(());
};
if !self.record_is_active(&record).await? {
match self.refresh_after_stale_record(&record).await? {
PidFileState::Missing => return Ok(()),
PidFileState::Starting | PidFileState::Running(_) => continue,
}
}
let pid = record.pid;
self.terminate_process(pid)?;
let started_at = tokio::time::Instant::now();
let deadline = tokio::time::Instant::now() + STOP_TIMEOUT;
let mut forced = false;
while tokio::time::Instant::now() < deadline {
if !self.record_is_active(&record).await? {
match self.refresh_after_stale_record(&record).await? {
PidFileState::Missing => return Ok(()),
PidFileState::Starting | PidFileState::Running(_) => break,
}
}
if !forced && started_at.elapsed() >= STOP_GRACE_PERIOD {
self.force_terminate_process(pid)?;
forced = true;
}
sleep(STOP_POLL_INTERVAL).await;
}
if self.record_is_active(&record).await? {
bail!("timed out waiting for pid-managed app server {pid} to stop");
}
}
}
async fn wait_for_pid_start(&self) -> Result<Option<PidRecord>> {
let deadline = tokio::time::Instant::now() + START_TIMEOUT;
loop {
match self.read_pid_file_state().await? {
PidFileState::Missing => return Ok(None),
PidFileState::Running(record) => return Ok(Some(record)),
PidFileState::Starting if tokio::time::Instant::now() < deadline => {
sleep(STOP_POLL_INTERVAL).await;
}
PidFileState::Starting => {
bail!(
"timed out waiting for pid reservation in {} to finish initializing",
self.pid_file.display()
);
}
}
}
}
async fn read_pid_file_state(&self) -> Result<PidFileState> {
let contents = match fs::read_to_string(&self.pid_file).await {
Ok(contents) => contents,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
return if reservation_lock_is_active(&self.lock_file).await? {
Ok(PidFileState::Starting)
} else {
Ok(PidFileState::Missing)
};
}
Err(err) => {
return Err(err).with_context(|| {
format!("failed to read pid file {}", self.pid_file.display())
});
}
};
if contents.trim().is_empty() {
match inspect_empty_pid_reservation(&self.pid_file, &self.lock_file).await? {
EmptyPidReservation::Active => {
return Ok(PidFileState::Starting);
}
EmptyPidReservation::Stale => {
return Ok(PidFileState::Missing);
}
EmptyPidReservation::Record(record) => return Ok(PidFileState::Running(record)),
}
}
let record = serde_json::from_str(&contents)
.with_context(|| format!("invalid pid file contents in {}", self.pid_file.display()))?;
Ok(PidFileState::Running(record))
}
async fn read_pid_file_state_with_lock_held(&self) -> Result<PidFileState> {
let contents = match fs::read_to_string(&self.pid_file).await {
Ok(contents) => contents,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
return Ok(PidFileState::Missing);
}
Err(err) => {
return Err(err).with_context(|| {
format!("failed to read pid file {}", self.pid_file.display())
});
}
};
if contents.trim().is_empty() {
let _ = fs::remove_file(&self.pid_file).await;
return Ok(PidFileState::Missing);
}
let record = serde_json::from_str(&contents)
.with_context(|| format!("invalid pid file contents in {}", self.pid_file.display()))?;
Ok(PidFileState::Running(record))
}
async fn refresh_after_stale_record(&self, expected: &PidRecord) -> Result<PidFileState> {
let reservation_lock = self.acquire_reservation_lock().await?;
let state = match self.read_pid_file_state_with_lock_held().await? {
PidFileState::Running(record) if record == *expected => {
let _ = fs::remove_file(&self.pid_file).await;
PidFileState::Missing
}
state => state,
};
drop(reservation_lock);
Ok(state)
}
async fn acquire_reservation_lock(&self) -> Result<fs::File> {
let reservation_lock = fs::OpenOptions::new()
.create(true)
.truncate(false)
.write(true)
.open(&self.lock_file)
.await
.with_context(|| {
format!("failed to open pid lock file {}", self.lock_file.display())
})?;
let lock_deadline = tokio::time::Instant::now() + START_TIMEOUT;
while !try_lock_file(&reservation_lock)? {
if tokio::time::Instant::now() >= lock_deadline {
bail!(
"timed out waiting for pid lock {}",
self.lock_file.display()
);
}
sleep(STOP_POLL_INTERVAL).await;
}
Ok(reservation_lock)
}
#[cfg(unix)]
fn command_args(&self) -> Vec<&'static str> {
match self.command_kind {
PidCommandKind::AppServer {
remote_control_enabled: true,
} => vec![
"--enable",
"remote_control",
"app-server",
"--listen",
"unix://",
],
PidCommandKind::AppServer {
remote_control_enabled: false,
} => vec!["app-server", "--listen", "unix://"],
PidCommandKind::UpdateLoop => vec!["app-server", "daemon", "pid-update-loop"],
}
}
fn terminate_process(&self, pid: u32) -> Result<()> {
match self.command_kind {
PidCommandKind::AppServer { .. } => terminate_process(pid),
PidCommandKind::UpdateLoop => terminate_process(pid),
}
}
fn force_terminate_process(&self, pid: u32) -> Result<()> {
match self.command_kind {
PidCommandKind::AppServer { .. } => force_terminate_process(pid),
PidCommandKind::UpdateLoop => force_terminate_process_group(pid),
}
}
async fn record_is_active(&self, record: &PidRecord) -> Result<bool> {
process_matches_record(record).await
}
}
#[cfg(unix)]
fn process_exists(pid: u32) -> bool {
let Ok(pid) = libc::pid_t::try_from(pid) else {
return false;
};
let result = unsafe { libc::kill(pid, 0) };
result == 0 || std::io::Error::last_os_error().raw_os_error() == Some(libc::EPERM)
}
#[cfg(unix)]
fn terminate_process(pid: u32) -> Result<()> {
let raw_pid = libc::pid_t::try_from(pid)
.with_context(|| format!("pid-managed app server pid {pid} is out of range"))?;
let result = unsafe { libc::kill(raw_pid, libc::SIGTERM) };
if result == 0 {
return Ok(());
}
let err = std::io::Error::last_os_error();
if err.raw_os_error() == Some(libc::ESRCH) {
return Ok(());
}
Err(err).with_context(|| format!("failed to terminate pid-managed app server {pid}"))
}
#[cfg(unix)]
fn force_terminate_process(pid: u32) -> Result<()> {
let raw_pid = libc::pid_t::try_from(pid)
.with_context(|| format!("pid-managed app server pid {pid} is out of range"))?;
let result = unsafe { libc::kill(raw_pid, libc::SIGKILL) };
if result == 0 {
return Ok(());
}
let err = std::io::Error::last_os_error();
if err.raw_os_error() == Some(libc::ESRCH) {
return Ok(());
}
Err(err).with_context(|| format!("failed to force terminate pid-managed app server {pid}"))
}
#[cfg(unix)]
fn force_terminate_process_group(pid: u32) -> Result<()> {
let raw_pid = libc::pid_t::try_from(pid)
.with_context(|| format!("pid-managed updater pid {pid} is out of range"))?;
let result = unsafe { libc::kill(-raw_pid, libc::SIGKILL) };
if result == 0 {
return Ok(());
}
let err = std::io::Error::last_os_error();
if err.raw_os_error() == Some(libc::ESRCH) {
return Ok(());
}
Err(err).with_context(|| format!("failed to force terminate pid-managed updater group {pid}"))
}
#[cfg(not(unix))]
fn terminate_process(_pid: u32) -> Result<()> {
bail!("pid-managed app-server shutdown is unsupported on this platform")
}
#[cfg(not(unix))]
fn force_terminate_process(_pid: u32) -> Result<()> {
bail!("pid-managed app-server shutdown is unsupported on this platform")
}
#[cfg(not(unix))]
fn force_terminate_process_group(_pid: u32) -> Result<()> {
bail!("pid-managed updater shutdown is unsupported on this platform")
}
#[cfg(unix)]
async fn process_matches_record(record: &PidRecord) -> Result<bool> {
if !process_exists(record.pid) {
return Ok(false);
}
match read_process_start_time(record.pid).await {
Ok(start_time) => Ok(start_time == record.process_start_time),
Err(_err) if !process_exists(record.pid) => Ok(false),
Err(err) => Err(err),
}
}
#[cfg(not(unix))]
async fn process_matches_record(_record: &PidRecord) -> Result<bool> {
Ok(false)
}
#[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(not(unix), allow(dead_code))]
enum EmptyPidReservation {
Active,
Stale,
Record(PidRecord),
}
#[cfg(unix)]
fn try_lock_file(file: &fs::File) -> Result<bool> {
use std::os::fd::AsRawFd;
let result = unsafe { libc::flock(file.as_raw_fd(), libc::LOCK_EX | libc::LOCK_NB) };
if result == 0 {
return Ok(true);
}
let err = std::io::Error::last_os_error();
if err.raw_os_error() == Some(libc::EWOULDBLOCK) {
return Ok(false);
}
Err(err).context("failed to lock pid reservation")
}
#[cfg(not(unix))]
fn try_lock_file(_file: &fs::File) -> Result<bool> {
bail!("pid-managed app-server startup is unsupported on this platform")
}
#[cfg(unix)]
async fn reservation_lock_is_active(path: &Path) -> Result<bool> {
let file = match fs::OpenOptions::new()
.write(true)
.create(true)
.truncate(false)
.open(path)
.await
{
Ok(file) => file,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
return Ok(false);
}
Err(err) => {
return Err(err)
.with_context(|| format!("failed to inspect pid lock file {}", path.display()));
}
};
Ok(!try_lock_file(&file)?)
}
#[cfg(not(unix))]
async fn reservation_lock_is_active(_path: &Path) -> Result<bool> {
Ok(false)
}
#[cfg(unix)]
async fn inspect_empty_pid_reservation(
pid_path: &Path,
lock_path: &Path,
) -> Result<EmptyPidReservation> {
let file = match fs::OpenOptions::new()
.write(true)
.create(true)
.truncate(false)
.open(lock_path)
.await
{
Ok(file) => file,
Err(err) => {
return Err(err).with_context(|| {
format!("failed to inspect pid lock file {}", lock_path.display())
});
}
};
if !try_lock_file(&file)? {
return Ok(EmptyPidReservation::Active);
}
let contents = match fs::read_to_string(pid_path).await {
Ok(contents) => contents,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
return Ok(EmptyPidReservation::Stale);
}
Err(err) => {
return Err(err)
.with_context(|| format!("failed to reread pid file {}", pid_path.display()));
}
};
if contents.trim().is_empty() {
let _ = fs::remove_file(pid_path).await;
return Ok(EmptyPidReservation::Stale);
}
let record = serde_json::from_str(&contents)
.with_context(|| format!("invalid pid file contents in {}", pid_path.display()))?;
Ok(EmptyPidReservation::Record(record))
}
#[cfg(not(unix))]
async fn inspect_empty_pid_reservation(
_pid_path: &Path,
_lock_path: &Path,
) -> Result<EmptyPidReservation> {
Ok(EmptyPidReservation::Stale)
}
#[cfg(unix)]
async fn read_process_start_time(pid: u32) -> Result<String> {
let output = Command::new("ps")
.args(["-p", &pid.to_string(), "-o", "lstart="])
.output()
.await
.context("failed to invoke ps for pid-managed app server")?;
if !output.status.success() {
bail!("failed to read start time for pid-managed app server {pid}");
}
let start_time = String::from_utf8(output.stdout)
.context("pid-managed app server start time was not utf-8")?;
let start_time = start_time.trim();
if start_time.is_empty() {
bail!("pid-managed app server {pid} has no recorded start time");
}
Ok(start_time.to_string())
}
#[cfg(all(test, unix))]
#[path = "pid_tests.rs"]
mod tests;

View File

@@ -1,158 +0,0 @@
use std::time::Duration;
use pretty_assertions::assert_eq;
use tempfile::TempDir;
use super::PidBackend;
use super::PidCommandKind;
use super::PidFileState;
use super::PidRecord;
use super::try_lock_file;
#[tokio::test]
async fn locked_empty_pid_file_is_treated_as_active_reservation() {
let temp_dir = TempDir::new().expect("temp dir");
let pid_file = temp_dir.path().join("app-server.pid");
tokio::fs::write(&pid_file, "")
.await
.expect("write pid file");
let backend = PidBackend::new(
temp_dir.path().join("codex"),
pid_file.clone(),
/*remote_control_enabled*/ false,
);
let reservation = tokio::fs::OpenOptions::new()
.create(true)
.truncate(false)
.write(true)
.open(&backend.lock_file)
.await
.expect("open pid lock file");
assert!(try_lock_file(&reservation).expect("lock reservation"));
assert_eq!(
backend.read_pid_file_state().await.expect("read pid"),
PidFileState::Starting
);
assert!(pid_file.exists());
}
#[tokio::test]
async fn unlocked_empty_pid_file_is_treated_as_stale_reservation() {
let temp_dir = TempDir::new().expect("temp dir");
let pid_file = temp_dir.path().join("app-server.pid");
tokio::fs::write(&pid_file, "")
.await
.expect("write pid file");
let backend = PidBackend::new(
temp_dir.path().join("codex"),
pid_file.clone(),
/*remote_control_enabled*/ false,
);
assert_eq!(
backend.read_pid_file_state().await.expect("read pid"),
PidFileState::Missing
);
assert!(!pid_file.exists());
}
#[tokio::test]
async fn stop_waits_for_live_reservation_to_resolve() {
let temp_dir = TempDir::new().expect("temp dir");
let pid_file = temp_dir.path().join("app-server.pid");
tokio::fs::write(&pid_file, "")
.await
.expect("write pid file");
let backend = PidBackend::new(
temp_dir.path().join("codex"),
pid_file.clone(),
/*remote_control_enabled*/ false,
);
let reservation = tokio::fs::OpenOptions::new()
.create(true)
.truncate(false)
.write(true)
.open(&backend.lock_file)
.await
.expect("open pid lock file");
assert!(try_lock_file(&reservation).expect("lock reservation"));
let cleanup = tokio::spawn(async move {
tokio::time::sleep(Duration::from_millis(50)).await;
drop(reservation);
tokio::fs::remove_file(pid_file)
.await
.expect("remove pid file");
});
backend.stop().await.expect("stop");
cleanup.await.expect("cleanup task");
}
#[tokio::test]
async fn start_retries_stale_empty_pid_file_under_its_own_lock() {
let temp_dir = TempDir::new().expect("temp dir");
let pid_file = temp_dir.path().join("app-server.pid");
tokio::fs::write(&pid_file, "")
.await
.expect("write pid file");
let backend = PidBackend::new(
temp_dir.path().join("missing-codex"),
pid_file,
/*remote_control_enabled*/ false,
);
let err = backend.start().await.expect_err("start");
assert!(
err.to_string()
.starts_with("failed to spawn detached app-server process using ")
);
}
#[tokio::test]
async fn stale_record_cleanup_preserves_replacement_record() {
let temp_dir = TempDir::new().expect("temp dir");
let pid_file = temp_dir.path().join("app-server.pid");
let backend = PidBackend::new(
temp_dir.path().join("codex"),
pid_file.clone(),
/*remote_control_enabled*/ false,
);
let stale = PidRecord {
pid: 1,
process_start_time: "old".to_string(),
};
let replacement = PidRecord {
pid: 2,
process_start_time: "new".to_string(),
};
tokio::fs::write(
&pid_file,
serde_json::to_vec(&replacement).expect("serialize replacement"),
)
.await
.expect("write replacement pid file");
assert_eq!(
backend
.refresh_after_stale_record(&stale)
.await
.expect("cleanup"),
PidFileState::Running(replacement)
);
}
#[test]
fn update_loop_uses_hidden_app_server_subcommand() {
let backend = PidBackend {
codex_bin: "codex".into(),
pid_file: "updater.pid".into(),
lock_file: "updater.pid.lock".into(),
command_kind: PidCommandKind::UpdateLoop,
};
assert_eq!(
backend.command_args(),
vec!["app-server", "daemon", "pid-update-loop"]
);
}

View File

@@ -1,131 +0,0 @@
use std::path::Path;
use std::time::Duration;
use anyhow::Context;
use anyhow::Result;
use anyhow::anyhow;
use codex_app_server_protocol::ClientInfo;
use codex_app_server_protocol::InitializeParams;
use codex_app_server_protocol::InitializeResponse;
use codex_app_server_protocol::JSONRPCMessage;
use codex_app_server_protocol::JSONRPCNotification;
use codex_app_server_protocol::JSONRPCRequest;
use codex_app_server_protocol::RequestId;
use codex_uds::UnixStream;
use futures::SinkExt;
use futures::StreamExt;
use tokio::time::timeout;
use tokio_tungstenite::client_async;
use tokio_tungstenite::tungstenite::Message;
const PROBE_TIMEOUT: Duration = Duration::from_secs(2);
const CLIENT_NAME: &str = "codex_app_server_daemon";
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct ProbeInfo {
pub(crate) app_server_version: String,
}
pub(crate) async fn probe(socket_path: &Path) -> Result<ProbeInfo> {
timeout(PROBE_TIMEOUT, probe_inner(socket_path))
.await
.with_context(|| {
format!(
"timed out probing app-server control socket {}",
socket_path.display()
)
})?
}
async fn probe_inner(socket_path: &Path) -> Result<ProbeInfo> {
let stream = UnixStream::connect(socket_path)
.await
.with_context(|| format!("failed to connect to {}", socket_path.display()))?;
let (mut websocket, _response) = client_async("ws://localhost/", stream)
.await
.with_context(|| format!("failed to upgrade {}", socket_path.display()))?;
let initialize = JSONRPCMessage::Request(JSONRPCRequest {
id: RequestId::Integer(1),
method: "initialize".to_string(),
params: Some(serde_json::to_value(InitializeParams {
client_info: ClientInfo {
name: CLIENT_NAME.to_string(),
title: Some("Codex App Server Daemon".to_string()),
version: env!("CARGO_PKG_VERSION").to_string(),
},
capabilities: None,
})?),
trace: None,
});
websocket
.send(Message::Text(serde_json::to_string(&initialize)?.into()))
.await
.context("failed to send initialize request")?;
let response = loop {
let frame = websocket
.next()
.await
.ok_or_else(|| anyhow!("app-server closed before initialize response"))??;
let Message::Text(payload) = frame else {
continue;
};
let message = serde_json::from_str::<JSONRPCMessage>(&payload)?;
if let JSONRPCMessage::Response(response) = message
&& response.id == RequestId::Integer(1)
{
break response;
}
};
let initialize_response = serde_json::from_value::<InitializeResponse>(response.result)?;
let initialized = JSONRPCMessage::Notification(JSONRPCNotification {
method: "initialized".to_string(),
params: None,
});
websocket
.send(Message::Text(serde_json::to_string(&initialized)?.into()))
.await
.context("failed to send initialized notification")?;
websocket.close(None).await.ok();
Ok(ProbeInfo {
app_server_version: parse_version_from_user_agent(&initialize_response.user_agent)?,
})
}
fn parse_version_from_user_agent(user_agent: &str) -> Result<String> {
let (_originator, rest) = user_agent
.split_once('/')
.ok_or_else(|| anyhow!("app-server user-agent omitted version separator"))?;
let version = rest
.split_whitespace()
.next()
.filter(|version| !version.is_empty())
.ok_or_else(|| anyhow!("app-server user-agent omitted version"))?;
Ok(version.to_string())
}
#[cfg(all(test, unix))]
mod tests {
use pretty_assertions::assert_eq;
use super::parse_version_from_user_agent;
#[test]
fn parses_version_from_codex_user_agent() {
assert_eq!(
parse_version_from_user_agent(
"codex_app_server_daemon/1.2.3 (Linux 6.8.0; x86_64) codex_cli_rs/1.2.3",
)
.expect("version"),
"1.2.3"
);
}
#[test]
fn rejects_user_agent_without_version() {
assert!(parse_version_from_user_agent("codex_app_server_daemon").is_err());
}
}

View File

@@ -1,630 +0,0 @@
mod backend;
mod client;
mod managed_install;
mod settings;
mod update_loop;
use std::path::PathBuf;
use std::time::Duration;
use anyhow::Context;
use anyhow::Result;
use anyhow::anyhow;
pub use backend::BackendKind;
use backend::BackendPaths;
use codex_app_server_transport::app_server_control_socket_path;
use codex_core::config::find_codex_home;
use managed_install::managed_codex_bin;
#[cfg(unix)]
use managed_install::managed_codex_version;
use serde::Serialize;
use settings::DaemonSettings;
use tokio::time::sleep;
const START_POLL_INTERVAL: Duration = Duration::from_millis(50);
const START_TIMEOUT: Duration = Duration::from_secs(10);
const OPERATION_LOCK_TIMEOUT: Duration = Duration::from_secs(75);
const PID_FILE_NAME: &str = "app-server.pid";
const UPDATE_PID_FILE_NAME: &str = "app-server-updater.pid";
const OPERATION_LOCK_FILE_NAME: &str = "daemon.lock";
const SETTINGS_FILE_NAME: &str = "settings.json";
const STATE_DIR_NAME: &str = "app-server-daemon";
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum LifecycleCommand {
Start,
Restart,
Stop,
Version,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
#[serde(rename_all = "camelCase")]
pub enum LifecycleStatus {
AlreadyRunning,
Started,
Restarted,
Stopped,
NotRunning,
Running,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct LifecycleOutput {
pub status: LifecycleStatus,
#[serde(skip_serializing_if = "Option::is_none")]
pub backend: Option<BackendKind>,
#[serde(skip_serializing_if = "Option::is_none")]
pub pid: Option<u32>,
pub socket_path: PathBuf,
#[serde(skip_serializing_if = "Option::is_none")]
pub cli_version: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub app_server_version: Option<String>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct BootstrapOptions {
pub remote_control_enabled: bool,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
#[serde(rename_all = "camelCase")]
pub enum BootstrapStatus {
Bootstrapped,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct BootstrapOutput {
pub status: BootstrapStatus,
pub backend: BackendKind,
pub auto_update_enabled: bool,
pub remote_control_enabled: bool,
pub managed_codex_path: PathBuf,
pub socket_path: PathBuf,
pub cli_version: String,
pub app_server_version: String,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum RemoteControlMode {
Enabled,
Disabled,
}
impl RemoteControlMode {
fn is_enabled(self) -> bool {
matches!(self, Self::Enabled)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
#[serde(rename_all = "camelCase")]
pub enum RemoteControlStatus {
Enabled,
Disabled,
AlreadyEnabled,
AlreadyDisabled,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct RemoteControlOutput {
pub status: RemoteControlStatus,
#[serde(skip_serializing_if = "Option::is_none")]
pub backend: Option<BackendKind>,
pub remote_control_enabled: bool,
pub socket_path: PathBuf,
pub cli_version: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub app_server_version: Option<String>,
}
#[cfg(unix)]
pub(crate) enum RestartIfRunningOutcome {
Completed,
Busy,
}
pub async fn run(command: LifecycleCommand) -> Result<LifecycleOutput> {
ensure_supported_platform()?;
Daemon::from_environment()?.run(command).await
}
pub async fn bootstrap(options: BootstrapOptions) -> Result<BootstrapOutput> {
ensure_supported_platform()?;
Daemon::from_environment()?.bootstrap(options).await
}
pub async fn set_remote_control(mode: RemoteControlMode) -> Result<RemoteControlOutput> {
ensure_supported_platform()?;
Daemon::from_environment()?.set_remote_control(mode).await
}
pub async fn run_pid_update_loop() -> Result<()> {
ensure_supported_platform()?;
update_loop::run().await
}
#[cfg(unix)]
fn ensure_supported_platform() -> Result<()> {
Ok(())
}
#[cfg(not(unix))]
fn ensure_supported_platform() -> Result<()> {
Err(anyhow!(
"codex app-server daemon lifecycle is only supported on Unix platforms"
))
}
struct Daemon {
socket_path: PathBuf,
pid_file: PathBuf,
update_pid_file: PathBuf,
operation_lock_file: PathBuf,
settings_file: PathBuf,
managed_codex_bin: PathBuf,
}
impl Daemon {
fn from_environment() -> Result<Self> {
let codex_home = find_codex_home().context("failed to resolve CODEX_HOME")?;
let socket_path = app_server_control_socket_path(codex_home.as_path())?
.as_path()
.to_path_buf();
let state_dir = codex_home.as_path().join(STATE_DIR_NAME);
Ok(Self {
socket_path,
pid_file: state_dir.join(PID_FILE_NAME),
update_pid_file: state_dir.join(UPDATE_PID_FILE_NAME),
operation_lock_file: state_dir.join(OPERATION_LOCK_FILE_NAME),
settings_file: state_dir.join(SETTINGS_FILE_NAME),
managed_codex_bin: managed_codex_bin(codex_home.as_path()),
})
}
async fn run(&self, command: LifecycleCommand) -> Result<LifecycleOutput> {
match command {
LifecycleCommand::Start => {
let _operation_lock = self.acquire_operation_lock().await?;
self.start().await
}
LifecycleCommand::Restart => {
let _operation_lock = self.acquire_operation_lock().await?;
self.restart().await
}
LifecycleCommand::Stop => {
let _operation_lock = self.acquire_operation_lock().await?;
self.stop().await
}
LifecycleCommand::Version => self.version().await,
}
}
async fn start(&self) -> Result<LifecycleOutput> {
let settings = self.load_settings().await?;
if let Ok(info) = client::probe(&self.socket_path).await {
return Ok(self.output(
LifecycleStatus::AlreadyRunning,
self.running_backend(&settings).await?,
/*pid*/ None,
Some(info.app_server_version),
));
}
if self.running_backend_instance(&settings).await?.is_some() {
let info = self.wait_until_ready().await?;
return Ok(self.output(
LifecycleStatus::AlreadyRunning,
Some(BackendKind::Pid),
/*pid*/ None,
Some(info.app_server_version),
));
}
self.ensure_managed_codex_bin()?;
let pid = self.start_managed_backend(&settings).await?;
let info = self.wait_until_ready().await?;
Ok(self.output(
LifecycleStatus::Started,
Some(BackendKind::Pid),
pid,
Some(info.app_server_version),
))
}
async fn restart(&self) -> Result<LifecycleOutput> {
let settings = self.load_settings().await?;
if client::probe(&self.socket_path).await.is_ok()
&& self.running_backend(&settings).await?.is_none()
{
return Err(anyhow!(
"app server is running but is not managed by codex app-server daemon"
));
}
self.ensure_managed_codex_bin()?;
if let Some(backend) = self.running_backend_instance(&settings).await? {
backend.stop().await?;
}
let pid = self.start_managed_backend(&settings).await?;
let info = self.wait_until_ready().await?;
Ok(self.output(
LifecycleStatus::Restarted,
Some(BackendKind::Pid),
pid,
Some(info.app_server_version),
))
}
#[cfg(unix)]
pub(crate) async fn try_restart_if_running(&self) -> Result<RestartIfRunningOutcome> {
let operation_lock = self.open_operation_lock_file().await?;
if !try_lock_file(&operation_lock)? {
return Ok(RestartIfRunningOutcome::Busy);
}
let settings = self.load_settings().await?;
if let Some(backend) = self.running_backend_instance(&settings).await? {
let Ok(info) = client::probe(&self.socket_path).await else {
return Ok(RestartIfRunningOutcome::Completed);
};
let managed_version = managed_codex_version(&self.managed_codex_bin).await?;
if info.app_server_version == managed_version {
return Ok(RestartIfRunningOutcome::Completed);
}
backend.stop().await?;
let _ = self.start_managed_backend(&settings).await?;
self.wait_until_ready().await?;
return Ok(RestartIfRunningOutcome::Completed);
}
if client::probe(&self.socket_path).await.is_ok() {
return Err(anyhow!(
"app server is running but is not managed by codex app-server daemon"
));
}
Ok(RestartIfRunningOutcome::Completed)
}
async fn stop(&self) -> Result<LifecycleOutput> {
let settings = self.load_settings().await?;
if let Some(backend) = self.running_backend_instance(&settings).await? {
backend.stop().await?;
return Ok(self.output(
LifecycleStatus::Stopped,
Some(BackendKind::Pid),
/*pid*/ None,
/*app_server_version*/ None,
));
}
if client::probe(&self.socket_path).await.is_ok() {
return Err(anyhow!(
"app server is running but is not managed by codex app-server daemon"
));
}
Ok(self.output(
LifecycleStatus::NotRunning,
/*backend*/ None,
/*pid*/ None,
/*app_server_version*/ None,
))
}
async fn version(&self) -> Result<LifecycleOutput> {
let settings = self.load_settings().await?;
let info = client::probe(&self.socket_path).await?;
Ok(self.output(
LifecycleStatus::Running,
self.running_backend(&settings).await?,
/*pid*/ None,
Some(info.app_server_version),
))
}
async fn wait_until_ready(&self) -> Result<client::ProbeInfo> {
let deadline = tokio::time::Instant::now() + START_TIMEOUT;
loop {
match client::probe(&self.socket_path).await {
Ok(info) => return Ok(info),
Err(err) if tokio::time::Instant::now() < deadline => {
let _ = err;
sleep(START_POLL_INTERVAL).await;
}
Err(err) => {
return Err(err).with_context(|| {
format!(
"app server did not become ready on {}",
self.socket_path.display()
)
});
}
}
}
}
async fn bootstrap(&self, options: BootstrapOptions) -> Result<BootstrapOutput> {
let _operation_lock = self.acquire_operation_lock().await?;
self.bootstrap_locked(options).await
}
async fn set_remote_control(&self, mode: RemoteControlMode) -> Result<RemoteControlOutput> {
let _operation_lock = self.acquire_operation_lock().await?;
let previous_settings = self.load_settings().await?;
let mut settings = previous_settings.clone();
let remote_control_enabled = mode.is_enabled();
let backend = self.running_backend_instance(&previous_settings).await?;
if backend.is_none() && client::probe(&self.socket_path).await.is_ok() {
return Err(anyhow!(
"app server is running but is not managed by codex app-server daemon"
));
}
if settings.remote_control_enabled == remote_control_enabled {
let info = if backend.is_some() {
Some(self.wait_until_ready().await?)
} else {
None
};
return Ok(self.remote_control_output(
already_remote_control_status(mode),
backend.map(|_| BackendKind::Pid),
remote_control_enabled,
info.map(|info| info.app_server_version),
));
}
settings.remote_control_enabled = remote_control_enabled;
settings.save(&self.settings_file).await?;
let app_server_version = if let Some(backend) = backend {
self.ensure_managed_codex_bin()?;
backend.stop().await?;
let _ = self.start_managed_backend(&settings).await?;
Some(self.wait_until_ready().await?.app_server_version)
} else {
None
};
Ok(self.remote_control_output(
remote_control_status(mode),
app_server_version.as_ref().map(|_| BackendKind::Pid),
remote_control_enabled,
app_server_version,
))
}
async fn bootstrap_locked(&self, options: BootstrapOptions) -> Result<BootstrapOutput> {
self.ensure_managed_codex_bin()?;
let settings = DaemonSettings {
remote_control_enabled: options.remote_control_enabled,
};
if client::probe(&self.socket_path).await.is_ok()
&& self.running_backend(&settings).await?.is_none()
{
return Err(anyhow!(
"app server is running but is not managed by codex app-server daemon"
));
}
settings.save(&self.settings_file).await?;
if let Some(backend) = self.running_backend_instance(&settings).await? {
backend.stop().await?;
}
let backend = backend::pid_backend(self.backend_paths(&settings));
backend.start().await?;
let updater = backend::pid_update_loop_backend(self.backend_paths(&settings));
if updater.is_starting_or_running().await? {
updater.stop().await?;
}
updater.start().await?;
let info = self.wait_until_ready().await?;
Ok(BootstrapOutput {
status: BootstrapStatus::Bootstrapped,
backend: BackendKind::Pid,
auto_update_enabled: true,
remote_control_enabled: settings.remote_control_enabled,
managed_codex_path: self.managed_codex_bin.clone(),
socket_path: self.socket_path.clone(),
cli_version: env!("CARGO_PKG_VERSION").to_string(),
app_server_version: info.app_server_version,
})
}
async fn running_backend(&self, settings: &DaemonSettings) -> Result<Option<BackendKind>> {
Ok(self
.running_backend_instance(settings)
.await?
.map(|_| BackendKind::Pid))
}
async fn running_backend_instance(
&self,
settings: &DaemonSettings,
) -> Result<Option<backend::PidBackend>> {
let backend = backend::pid_backend(self.backend_paths(settings));
if backend.is_starting_or_running().await? {
return Ok(Some(backend));
}
Ok(None)
}
async fn start_managed_backend(&self, settings: &DaemonSettings) -> Result<Option<u32>> {
let backend = backend::pid_backend(self.backend_paths(settings));
backend.start().await
}
fn ensure_managed_codex_bin(&self) -> Result<()> {
if self.managed_codex_bin.is_file() {
return Ok(());
}
Err(anyhow!(
"managed standalone Codex install not found at {}; install Codex first",
self.managed_codex_bin.display()
))
}
fn backend_paths(&self, settings: &DaemonSettings) -> BackendPaths {
BackendPaths {
codex_bin: self.managed_codex_bin.clone(),
pid_file: self.pid_file.clone(),
update_pid_file: self.update_pid_file.clone(),
remote_control_enabled: settings.remote_control_enabled,
}
}
async fn load_settings(&self) -> Result<DaemonSettings> {
DaemonSettings::load(&self.settings_file).await
}
async fn acquire_operation_lock(&self) -> Result<tokio::fs::File> {
let operation_lock = self.open_operation_lock_file().await?;
let deadline = tokio::time::Instant::now() + OPERATION_LOCK_TIMEOUT;
while !try_lock_file(&operation_lock)? {
if tokio::time::Instant::now() >= deadline {
return Err(anyhow!(
"timed out waiting for daemon operation lock {}",
self.operation_lock_file.display()
));
}
sleep(START_POLL_INTERVAL).await;
}
Ok(operation_lock)
}
async fn open_operation_lock_file(&self) -> Result<tokio::fs::File> {
if let Some(parent) = self.operation_lock_file.parent() {
tokio::fs::create_dir_all(parent).await.with_context(|| {
format!(
"failed to create daemon state directory {}",
parent.display()
)
})?;
}
tokio::fs::OpenOptions::new()
.create(true)
.truncate(false)
.write(true)
.open(&self.operation_lock_file)
.await
.with_context(|| {
format!(
"failed to open daemon operation lock {}",
self.operation_lock_file.display()
)
})
}
fn output(
&self,
status: LifecycleStatus,
backend: Option<BackendKind>,
pid: Option<u32>,
app_server_version: Option<String>,
) -> LifecycleOutput {
LifecycleOutput {
status,
backend,
pid,
socket_path: self.socket_path.clone(),
cli_version: Some(env!("CARGO_PKG_VERSION").to_string()),
app_server_version,
}
}
fn remote_control_output(
&self,
status: RemoteControlStatus,
backend: Option<BackendKind>,
remote_control_enabled: bool,
app_server_version: Option<String>,
) -> RemoteControlOutput {
RemoteControlOutput {
status,
backend,
remote_control_enabled,
socket_path: self.socket_path.clone(),
cli_version: env!("CARGO_PKG_VERSION").to_string(),
app_server_version,
}
}
}
fn remote_control_status(mode: RemoteControlMode) -> RemoteControlStatus {
match mode {
RemoteControlMode::Enabled => RemoteControlStatus::Enabled,
RemoteControlMode::Disabled => RemoteControlStatus::Disabled,
}
}
fn already_remote_control_status(mode: RemoteControlMode) -> RemoteControlStatus {
match mode {
RemoteControlMode::Enabled => RemoteControlStatus::AlreadyEnabled,
RemoteControlMode::Disabled => RemoteControlStatus::AlreadyDisabled,
}
}
#[cfg(unix)]
fn try_lock_file(file: &tokio::fs::File) -> Result<bool> {
use std::os::fd::AsRawFd;
let result = unsafe { libc::flock(file.as_raw_fd(), libc::LOCK_EX | libc::LOCK_NB) };
if result == 0 {
return Ok(true);
}
let err = std::io::Error::last_os_error();
if err.raw_os_error() == Some(libc::EWOULDBLOCK) {
return Ok(false);
}
Err(err).context("failed to lock daemon operation")
}
#[cfg(not(unix))]
fn try_lock_file(_file: &tokio::fs::File) -> Result<bool> {
Ok(true)
}
#[cfg(all(test, unix))]
mod tests {
use pretty_assertions::assert_eq;
use super::BootstrapStatus;
use super::LifecycleStatus;
use super::RemoteControlStatus;
#[test]
fn lifecycle_status_uses_camel_case_json() {
assert_eq!(
serde_json::to_string(&LifecycleStatus::AlreadyRunning).expect("serialize"),
"\"alreadyRunning\""
);
}
#[test]
fn bootstrap_status_uses_camel_case_json() {
assert_eq!(
serde_json::to_string(&BootstrapStatus::Bootstrapped).expect("serialize"),
"\"bootstrapped\""
);
}
#[test]
fn remote_control_status_uses_camel_case_json() {
assert_eq!(
serde_json::to_string(&RemoteControlStatus::AlreadyEnabled).expect("serialize"),
"\"alreadyEnabled\""
);
}
}

View File

@@ -1,66 +0,0 @@
use std::path::Path;
use std::path::PathBuf;
#[cfg(unix)]
use anyhow::Context;
#[cfg(unix)]
use anyhow::Result;
#[cfg(unix)]
use anyhow::anyhow;
#[cfg(unix)]
use tokio::process::Command;
pub(crate) fn managed_codex_bin(codex_home: &Path) -> PathBuf {
codex_home
.join("packages")
.join("standalone")
.join("current")
.join(managed_codex_file_name())
}
#[cfg(unix)]
pub(crate) async fn managed_codex_version(codex_bin: &Path) -> Result<String> {
let output = Command::new(codex_bin)
.arg("--version")
.output()
.await
.with_context(|| {
format!(
"failed to invoke managed Codex binary {}",
codex_bin.display()
)
})?;
if !output.status.success() {
return Err(anyhow!(
"managed Codex binary {} exited with status {}",
codex_bin.display(),
output.status
));
}
let stdout = String::from_utf8(output.stdout).with_context(|| {
format!(
"managed Codex version was not utf-8: {}",
codex_bin.display()
)
})?;
parse_codex_version(&stdout)
}
fn managed_codex_file_name() -> &'static str {
if cfg!(windows) { "codex.exe" } else { "codex" }
}
#[cfg(unix)]
fn parse_codex_version(output: &str) -> Result<String> {
let version = output
.split_whitespace()
.nth(1)
.filter(|version| !version.is_empty())
.ok_or_else(|| anyhow!("managed Codex version output was malformed"))?;
Ok(version.to_string())
}
#[cfg(all(test, unix))]
#[path = "managed_install_tests.rs"]
mod tests;

View File

@@ -1,16 +0,0 @@
use pretty_assertions::assert_eq;
use super::parse_codex_version;
#[test]
fn parses_codex_cli_version_output() {
assert_eq!(
parse_codex_version("codex 1.2.3\n").expect("version"),
"1.2.3"
);
}
#[test]
fn rejects_malformed_codex_cli_version_output() {
assert!(parse_codex_version("codex\n").is_err());
}

View File

@@ -1,63 +0,0 @@
use std::path::Path;
use anyhow::Context;
use anyhow::Result;
use serde::Deserialize;
use serde::Serialize;
use tokio::fs;
#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub(crate) struct DaemonSettings {
pub(crate) remote_control_enabled: bool,
}
impl DaemonSettings {
pub(crate) async fn load(path: &Path) -> Result<Self> {
let contents = match fs::read_to_string(path).await {
Ok(contents) => contents,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(Self::default()),
Err(err) => {
return Err(err)
.with_context(|| format!("failed to read daemon settings {}", path.display()));
}
};
serde_json::from_str(&contents)
.with_context(|| format!("failed to parse daemon settings {}", path.display()))
}
pub(crate) async fn save(&self, path: &Path) -> Result<()> {
if let Some(parent) = path.parent() {
fs::create_dir_all(parent).await.with_context(|| {
format!(
"failed to create daemon settings directory {}",
parent.display()
)
})?;
}
let contents = serde_json::to_vec_pretty(self).context("failed to serialize settings")?;
fs::write(path, contents)
.await
.with_context(|| format!("failed to write daemon settings {}", path.display()))
}
}
#[cfg(all(test, unix))]
mod tests {
use pretty_assertions::assert_eq;
use super::DaemonSettings;
#[test]
fn daemon_settings_use_camel_case_json() {
assert_eq!(
serde_json::to_string(&DaemonSettings {
remote_control_enabled: true,
})
.expect("serialize"),
r#"{"remoteControlEnabled":true}"#
);
}
}

View File

@@ -1,132 +0,0 @@
#[cfg(unix)]
use std::process::Stdio;
#[cfg(unix)]
use std::time::Duration;
#[cfg(unix)]
use anyhow::Context;
use anyhow::Result;
#[cfg(not(unix))]
use anyhow::bail;
#[cfg(unix)]
use futures::FutureExt;
#[cfg(unix)]
use tokio::io::AsyncWriteExt;
#[cfg(unix)]
use tokio::process::Command;
#[cfg(unix)]
use tokio::signal::unix::Signal;
#[cfg(unix)]
use tokio::signal::unix::SignalKind;
#[cfg(unix)]
use tokio::signal::unix::signal;
#[cfg(unix)]
use tokio::time::sleep;
#[cfg(unix)]
use crate::Daemon;
#[cfg(unix)]
use crate::RestartIfRunningOutcome;
#[cfg(unix)]
const INITIAL_UPDATE_DELAY: Duration = Duration::from_secs(5 * 60);
#[cfg(unix)]
const RESTART_RETRY_INTERVAL: Duration = Duration::from_millis(50);
#[cfg(unix)]
const UPDATE_INTERVAL: Duration = Duration::from_secs(60 * 60);
#[cfg(unix)]
pub(crate) async fn run() -> Result<()> {
let mut terminate =
signal(SignalKind::terminate()).context("failed to install updater shutdown handler")?;
if sleep_or_terminate(INITIAL_UPDATE_DELAY, &mut terminate).await {
return Ok(());
}
loop {
match update_once(&mut terminate).await {
Ok(UpdateLoopControl::Continue) | Err(_) => {}
Ok(UpdateLoopControl::Stop) => return Ok(()),
}
if sleep_or_terminate(UPDATE_INTERVAL, &mut terminate).await {
return Ok(());
}
}
}
#[cfg(not(unix))]
pub(crate) async fn run() -> Result<()> {
bail!("pid-managed updater loop is unsupported on this platform")
}
#[cfg(unix)]
async fn sleep_or_terminate(duration: Duration, terminate: &mut Signal) -> bool {
tokio::select! {
_ = sleep(duration) => false,
_ = terminate.recv() => true,
}
}
#[cfg(unix)]
enum UpdateLoopControl {
Continue,
Stop,
}
#[cfg(unix)]
async fn update_once(terminate: &mut Signal) -> Result<UpdateLoopControl> {
install_latest_standalone().await?;
let daemon = Daemon::from_environment()?;
loop {
if terminate.recv().now_or_never().flatten().is_some() {
return Ok(UpdateLoopControl::Stop);
}
match daemon.try_restart_if_running().await? {
RestartIfRunningOutcome::Completed => return Ok(UpdateLoopControl::Continue),
RestartIfRunningOutcome::Busy => {
if sleep_or_terminate(RESTART_RETRY_INTERVAL, terminate).await {
return Ok(UpdateLoopControl::Stop);
}
}
}
}
}
#[cfg(unix)]
async fn install_latest_standalone() -> Result<()> {
let script = reqwest::get("https://chatgpt.com/codex/install.sh")
.await
.context("failed to fetch standalone Codex updater")?
.error_for_status()
.context("standalone Codex updater request failed")?
.bytes()
.await
.context("failed to read standalone Codex updater")?;
let mut child = Command::new("/bin/sh")
.arg("-s")
.stdin(Stdio::piped())
.stdout(Stdio::null())
.stderr(Stdio::null())
.spawn()
.context("failed to invoke standalone Codex updater")?;
let mut stdin = child
.stdin
.take()
.context("standalone Codex updater stdin was unavailable")?;
stdin
.write_all(&script)
.await
.context("failed to pass standalone Codex updater to shell")?;
drop(stdin);
let status = child
.wait()
.await
.context("failed to wait for standalone Codex updater")?;
if status.success() {
Ok(())
} else {
anyhow::bail!("standalone Codex updater exited with status {status}")
}
}

View File

@@ -7,7 +7,6 @@ license.workspace = true
[lib]
name = "codex_app_server_protocol"
path = "src/lib.rs"
doctest = false
[lints]
workspace = true

View File

@@ -1,5 +0,0 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "AttestationGenerateParams",
"type": "object"
}

View File

@@ -533,6 +533,200 @@
}
]
},
"DeviceKeyCreateParams": {
"description": "Create a controller-local device key with a random key id.",
"properties": {
"accountUserId": {
"type": "string"
},
"clientId": {
"type": "string"
},
"protectionPolicy": {
"anyOf": [
{
"$ref": "#/definitions/DeviceKeyProtectionPolicy"
},
{
"type": "null"
}
],
"description": "Defaults to `hardware_only` when omitted."
}
},
"required": [
"accountUserId",
"clientId"
],
"type": "object"
},
"DeviceKeyProtectionPolicy": {
"description": "Protection policy for creating or loading a controller-local device key.",
"enum": [
"hardware_only",
"allow_os_protected_nonextractable"
],
"type": "string"
},
"DeviceKeyPublicParams": {
"description": "Fetch a controller-local device key public key by id.",
"properties": {
"keyId": {
"type": "string"
}
},
"required": [
"keyId"
],
"type": "object"
},
"DeviceKeySignParams": {
"description": "Sign an accepted structured payload with a controller-local device key.",
"properties": {
"keyId": {
"type": "string"
},
"payload": {
"$ref": "#/definitions/DeviceKeySignPayload"
}
},
"required": [
"keyId",
"payload"
],
"type": "object"
},
"DeviceKeySignPayload": {
"description": "Structured payloads accepted by `device/key/sign`.",
"oneOf": [
{
"description": "Payload bound to one remote-control controller websocket `/client` connection challenge.",
"properties": {
"accountUserId": {
"type": "string"
},
"audience": {
"$ref": "#/definitions/RemoteControlClientConnectionAudience"
},
"clientId": {
"type": "string"
},
"nonce": {
"type": "string"
},
"scopes": {
"description": "Must contain exactly `remote_control_controller_websocket`.",
"items": {
"type": "string"
},
"type": "array"
},
"sessionId": {
"description": "Backend-issued websocket session id that this proof authorizes.",
"type": "string"
},
"targetOrigin": {
"description": "Origin of the backend endpoint that issued the challenge and will verify this proof.",
"type": "string"
},
"targetPath": {
"description": "Websocket route path that this proof authorizes.",
"type": "string"
},
"tokenExpiresAt": {
"description": "Remote-control token expiration as Unix seconds.",
"format": "int64",
"type": "integer"
},
"tokenSha256Base64url": {
"description": "SHA-256 of the controller-scoped remote-control token, encoded as unpadded base64url.",
"type": "string"
},
"type": {
"enum": [
"remoteControlClientConnection"
],
"title": "RemoteControlClientConnectionDeviceKeySignPayloadType",
"type": "string"
}
},
"required": [
"accountUserId",
"audience",
"clientId",
"nonce",
"scopes",
"sessionId",
"targetOrigin",
"targetPath",
"tokenExpiresAt",
"tokenSha256Base64url",
"type"
],
"title": "RemoteControlClientConnectionDeviceKeySignPayload",
"type": "object"
},
{
"description": "Payload bound to a remote-control client `/client/enroll` ownership challenge.",
"properties": {
"accountUserId": {
"type": "string"
},
"audience": {
"$ref": "#/definitions/RemoteControlClientEnrollmentAudience"
},
"challengeExpiresAt": {
"description": "Enrollment challenge expiration as Unix seconds.",
"format": "int64",
"type": "integer"
},
"challengeId": {
"description": "Backend-issued enrollment challenge id that this proof authorizes.",
"type": "string"
},
"clientId": {
"type": "string"
},
"deviceIdentitySha256Base64url": {
"description": "SHA-256 of the requested device identity operation, encoded as unpadded base64url.",
"type": "string"
},
"nonce": {
"type": "string"
},
"targetOrigin": {
"description": "Origin of the backend endpoint that issued the challenge and will verify this proof.",
"type": "string"
},
"targetPath": {
"description": "HTTP route path that this proof authorizes.",
"type": "string"
},
"type": {
"enum": [
"remoteControlClientEnrollment"
],
"title": "RemoteControlClientEnrollmentDeviceKeySignPayloadType",
"type": "string"
}
},
"required": [
"accountUserId",
"audience",
"challengeExpiresAt",
"challengeId",
"clientId",
"deviceIdentitySha256Base64url",
"nonce",
"targetOrigin",
"targetPath",
"type"
],
"title": "RemoteControlClientEnrollmentDeviceKeySignPayload",
"type": "object"
}
]
},
"DynamicToolSpec": {
"properties": {
"deferLoading": {
@@ -1259,11 +1453,6 @@
"array",
"null"
]
},
"requestAttestation": {
"default": false,
"description": "Opt into `attestation/generate` requests for upstream `x-oai-attestation`.",
"type": "boolean"
}
},
"type": "object"
@@ -1955,14 +2144,6 @@
],
"type": "object"
},
"PluginListMarketplaceKind": {
"enum": [
"local",
"workspace-directory",
"shared-with-me"
],
"type": "string"
},
"PluginListParams": {
"properties": {
"cwds": {
@@ -1974,16 +2155,6 @@
"array",
"null"
]
},
"marketplaceKinds": {
"description": "Optional marketplace kind filter. When omitted, only local marketplaces are queried, plus the default remote catalog when enabled by feature flag.",
"items": {
"$ref": "#/definitions/PluginListMarketplaceKind"
},
"type": [
"array",
"null"
]
}
},
"type": "object"
@@ -2088,37 +2259,16 @@
},
"principalType": {
"$ref": "#/definitions/PluginSharePrincipalType"
},
"role": {
"$ref": "#/definitions/PluginShareTargetRole"
}
},
"required": [
"principalId",
"principalType",
"role"
"principalType"
],
"type": "object"
},
"PluginShareTargetRole": {
"enum": [
"reader",
"editor"
],
"type": "string"
},
"PluginShareUpdateDiscoverability": {
"enum": [
"UNLISTED",
"PRIVATE"
],
"type": "string"
},
"PluginShareUpdateTargetsParams": {
"properties": {
"discoverability": {
"$ref": "#/definitions/PluginShareUpdateDiscoverability"
},
"remotePluginId": {
"type": "string"
},
@@ -2130,7 +2280,6 @@
}
},
"required": [
"discoverability",
"remotePluginId",
"shareTargets"
],
@@ -2337,6 +2486,20 @@
}
]
},
"RemoteControlClientConnectionAudience": {
"description": "Audience for a remote-control client connection device-key proof.",
"enum": [
"remote_control_client_websocket"
],
"type": "string"
},
"RemoteControlClientEnrollmentAudience": {
"description": "Audience for a remote-control client enrollment device-key proof.",
"enum": [
"remote_control_client_enrollment"
],
"type": "string"
},
"RequestId": {
"anyOf": [
{
@@ -3239,6 +3402,24 @@
],
"type": "object"
},
"SkillsListExtraRootsForCwd": {
"properties": {
"cwd": {
"type": "string"
},
"extraUserRoots": {
"items": {
"type": "string"
},
"type": "array"
}
},
"required": [
"cwd",
"extraUserRoots"
],
"type": "object"
},
"SkillsListParams": {
"properties": {
"cwds": {
@@ -3251,6 +3432,17 @@
"forceReload": {
"description": "When true, bypass the skills cache and re-scan skills from disk.",
"type": "boolean"
},
"perCwdExtraUserRoots": {
"default": null,
"description": "Optional per-cwd extra roots to scan as user-scoped skills.",
"items": {
"$ref": "#/definitions/SkillsListExtraRootsForCwd"
},
"type": [
"array",
"null"
]
}
},
"type": "object"
@@ -4094,31 +4286,6 @@
],
"type": "object"
},
"TurnItemsView": {
"oneOf": [
{
"description": "`items` was not loaded for this turn. The field is intentionally empty.",
"enum": [
"notLoaded"
],
"type": "string"
},
{
"description": "`items` contains only a display summary for this turn.",
"enum": [
"summary"
],
"type": "string"
},
{
"description": "`items` contains every ThreadItem available from persisted app-server history for this turn.",
"enum": [
"full"
],
"type": "string"
}
]
},
"TurnStartParams": {
"properties": {
"approvalPolicy": {
@@ -4397,6 +4564,97 @@
"mode"
],
"type": "object"
},
"WorktreeCreateParams": {
"description": "Create or reuse a managed worktree from the repository containing \\`cwd\\`.",
"properties": {
"baseRef": {
"type": [
"string",
"null"
]
},
"branch": {
"type": "string"
},
"cwd": {
"description": "Repository-relative workspace cwd to use as the source checkout.",
"type": [
"string",
"null"
]
},
"dirtyPolicy": {
"$ref": "#/definitions/WorktreeDirtyPolicy"
}
},
"required": [
"branch",
"dirtyPolicy"
],
"type": "object"
},
"WorktreeDirtyPolicy": {
"enum": [
"fail",
"ignore",
"copyTracked",
"copyAll",
"moveTracked",
"moveAll"
],
"type": "string"
},
"WorktreeInspectSourceParams": {
"description": "Inspect dirty state for the repository containing \\`cwd\\`.",
"properties": {
"cwd": {
"description": "Repository-relative workspace cwd to inspect. Omitted uses app-server's effective cwd.",
"type": [
"string",
"null"
]
}
},
"type": "object"
},
"WorktreeListParams": {
"description": "Request the managed worktrees associated with the repository containing \\`cwd\\`.",
"properties": {
"cwd": {
"description": "Repository-relative workspace cwd to inspect. Omitted uses app-server's effective cwd.",
"type": [
"string",
"null"
]
}
},
"type": "object"
},
"WorktreeRemoveParams": {
"description": "Remove a managed worktree in the repository containing \\`cwd\\`.",
"properties": {
"cwd": {
"description": "Repository-relative workspace cwd to use when resolving \\`name_or_path\\`.",
"type": [
"string",
"null"
]
},
"deleteBranch": {
"type": "boolean"
},
"force": {
"type": "boolean"
},
"nameOrPath": {
"type": "string"
}
},
"required": [
"nameOrPath"
],
"type": "object"
}
},
"description": "Request from the client to the server.",
@@ -5123,6 +5381,78 @@
"title": "App/listRequest",
"type": "object"
},
{
"properties": {
"id": {
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
"device/key/create"
],
"title": "Device/key/createRequestMethod",
"type": "string"
},
"params": {
"$ref": "#/definitions/DeviceKeyCreateParams"
}
},
"required": [
"id",
"method",
"params"
],
"title": "Device/key/createRequest",
"type": "object"
},
{
"properties": {
"id": {
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
"device/key/public"
],
"title": "Device/key/publicRequestMethod",
"type": "string"
},
"params": {
"$ref": "#/definitions/DeviceKeyPublicParams"
}
},
"required": [
"id",
"method",
"params"
],
"title": "Device/key/publicRequest",
"type": "object"
},
{
"properties": {
"id": {
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
"device/key/sign"
],
"title": "Device/key/signRequestMethod",
"type": "string"
},
"params": {
"$ref": "#/definitions/DeviceKeySignParams"
}
},
"required": [
"id",
"method",
"params"
],
"title": "Device/key/signRequest",
"type": "object"
},
{
"properties": {
"id": {
@@ -5339,6 +5669,102 @@
"title": "Fs/unwatchRequest",
"type": "object"
},
{
"properties": {
"id": {
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
"worktree/list"
],
"title": "Worktree/listRequestMethod",
"type": "string"
},
"params": {
"$ref": "#/definitions/WorktreeListParams"
}
},
"required": [
"id",
"method",
"params"
],
"title": "Worktree/listRequest",
"type": "object"
},
{
"properties": {
"id": {
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
"worktree/inspectSource"
],
"title": "Worktree/inspectSourceRequestMethod",
"type": "string"
},
"params": {
"$ref": "#/definitions/WorktreeInspectSourceParams"
}
},
"required": [
"id",
"method",
"params"
],
"title": "Worktree/inspectSourceRequest",
"type": "object"
},
{
"properties": {
"id": {
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
"worktree/create"
],
"title": "Worktree/createRequestMethod",
"type": "string"
},
"params": {
"$ref": "#/definitions/WorktreeCreateParams"
}
},
"required": [
"id",
"method",
"params"
],
"title": "Worktree/createRequest",
"type": "object"
},
{
"properties": {
"id": {
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
"worktree/remove"
],
"title": "Worktree/removeRequestMethod",
"type": "string"
},
"params": {
"$ref": "#/definitions/WorktreeRemoveParams"
}
},
"required": [
"id",
"method",
"params"
],
"title": "Worktree/removeRequest",
"type": "object"
},
{
"properties": {
"id": {

View File

@@ -593,11 +593,6 @@
"null"
]
},
"startedAtMs": {
"description": "Unix timestamp (in milliseconds) when this approval request started.",
"format": "int64",
"type": "integer"
},
"threadId": {
"type": "string"
},
@@ -607,7 +602,6 @@
},
"required": [
"itemId",
"startedAtMs",
"threadId",
"turnId"
],

View File

@@ -18,11 +18,6 @@
"null"
]
},
"startedAtMs": {
"description": "Unix timestamp (in milliseconds) when this approval request started.",
"format": "int64",
"type": "integer"
},
"threadId": {
"type": "string"
},
@@ -32,7 +27,6 @@
},
"required": [
"itemId",
"startedAtMs",
"threadId",
"turnId"
],

View File

@@ -297,11 +297,6 @@
"null"
]
},
"startedAtMs": {
"description": "Unix timestamp (in milliseconds) when this approval request started.",
"format": "int64",
"type": "integer"
},
"threadId": {
"type": "string"
},
@@ -313,7 +308,6 @@
"cwd",
"itemId",
"permissions",
"startedAtMs",
"threadId",
"turnId"
],

View File

@@ -1736,8 +1736,6 @@
"preToolUse",
"permissionRequest",
"postToolUse",
"preCompact",
"postCompact",
"sessionStart",
"userPromptSubmit",
"stop"
@@ -1963,11 +1961,6 @@
"action": {
"$ref": "#/definitions/GuardianApprovalReviewAction"
},
"completedAtMs": {
"description": "Unix timestamp (in milliseconds) when this review completed.",
"format": "int64",
"type": "integer"
},
"decisionSource": {
"$ref": "#/definitions/AutoReviewDecisionSource"
},
@@ -1978,11 +1971,6 @@
"description": "Stable identifier for this review.",
"type": "string"
},
"startedAtMs": {
"description": "Unix timestamp (in milliseconds) when this review started.",
"format": "int64",
"type": "integer"
},
"targetItemId": {
"description": "Identifier for the reviewed item or tool call when one exists.\n\nIn most cases, one review maps to one target item. The exceptions are - execve reviews, where a single command may contain multiple execve calls to review (only possible when using the shell_zsh_fork feature) - network policy reviews, where there is no target item\n\nA network call is triggered by a CommandExecution item, so having a target_item_id set to the CommandExecution item would be misleading because the review is about the network call, not the command execution. Therefore, target_item_id is set to None for network policy reviews.",
"type": [
@@ -1999,11 +1987,9 @@
},
"required": [
"action",
"completedAtMs",
"decisionSource",
"review",
"reviewId",
"startedAtMs",
"threadId",
"turnId"
],
@@ -2022,11 +2008,6 @@
"description": "Stable identifier for this review.",
"type": "string"
},
"startedAtMs": {
"description": "Unix timestamp (in milliseconds) when this review started.",
"format": "int64",
"type": "integer"
},
"targetItemId": {
"description": "Identifier for the reviewed item or tool call when one exists.\n\nIn most cases, one review maps to one target item. The exceptions are - execve reviews, where a single command may contain multiple execve calls to review (only possible when using the shell_zsh_fork feature) - network policy reviews, where there is no target item\n\nA network call is triggered by a CommandExecution item, so having a target_item_id set to the CommandExecution item would be misleading because the review is about the network call, not the command execution. Therefore, target_item_id is set to None for network policy reviews.",
"type": [
@@ -2045,7 +2026,6 @@
"action",
"review",
"reviewId",
"startedAtMs",
"threadId",
"turnId"
],
@@ -2737,7 +2717,7 @@
"type": "string"
},
"RemoteControlStatusChangedNotification": {
"description": "Current remote-control connection status and remote identity exposed to clients.",
"description": "Current remote-control connection status and environment id exposed to clients.",
"properties": {
"environmentId": {
"type": [
@@ -2745,15 +2725,11 @@
"null"
]
},
"installationId": {
"type": "string"
},
"status": {
"$ref": "#/definitions/RemoteControlConnectionStatus"
}
},
"required": [
"installationId",
"status"
],
"type": "object"

View File

@@ -121,9 +121,6 @@
],
"type": "object"
},
"AttestationGenerateParams": {
"type": "object"
},
"ChatgptAuthTokensRefreshParams": {
"properties": {
"previousAccountId": {
@@ -420,11 +417,6 @@
"null"
]
},
"startedAtMs": {
"description": "Unix timestamp (in milliseconds) when this approval request started.",
"format": "int64",
"type": "integer"
},
"threadId": {
"type": "string"
},
@@ -434,7 +426,6 @@
},
"required": [
"itemId",
"startedAtMs",
"threadId",
"turnId"
],
@@ -607,11 +598,6 @@
"null"
]
},
"startedAtMs": {
"description": "Unix timestamp (in milliseconds) when this approval request started.",
"format": "int64",
"type": "integer"
},
"threadId": {
"type": "string"
},
@@ -621,7 +607,6 @@
},
"required": [
"itemId",
"startedAtMs",
"threadId",
"turnId"
],
@@ -1602,11 +1587,6 @@
"null"
]
},
"startedAtMs": {
"description": "Unix timestamp (in milliseconds) when this approval request started.",
"format": "int64",
"type": "integer"
},
"threadId": {
"type": "string"
},
@@ -1618,7 +1598,6 @@
"cwd",
"itemId",
"permissions",
"startedAtMs",
"threadId",
"turnId"
],
@@ -1921,31 +1900,6 @@
"title": "Account/chatgptAuthTokens/refreshRequest",
"type": "object"
},
{
"description": "Generate a fresh upstream attestation result on demand.",
"properties": {
"id": {
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
"attestation/generate"
],
"title": "Attestation/generateRequestMethod",
"type": "string"
},
"params": {
"$ref": "#/definitions/AttestationGenerateParams"
}
},
"required": [
"id",
"method",
"params"
],
"title": "Attestation/generateRequest",
"type": "object"
},
{
"description": "DEPRECATED APIs below Request to approve a patch. This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).",
"properties": {

View File

@@ -39,11 +39,6 @@
"array",
"null"
]
},
"requestAttestation": {
"default": false,
"description": "Opt into `attestation/generate` requests for upstream `x-oai-attestation`.",
"type": "boolean"
}
},
"type": "object"

View File

@@ -213,24 +213,12 @@
},
"type": "array"
},
"PostCompact": {
"items": {
"$ref": "#/definitions/ConfiguredHookMatcherGroup"
},
"type": "array"
},
"PostToolUse": {
"items": {
"$ref": "#/definitions/ConfiguredHookMatcherGroup"
},
"type": "array"
},
"PreCompact": {
"items": {
"$ref": "#/definitions/ConfiguredHookMatcherGroup"
},
"type": "array"
},
"PreToolUse": {
"items": {
"$ref": "#/definitions/ConfiguredHookMatcherGroup"
@@ -270,9 +258,7 @@
},
"required": [
"PermissionRequest",
"PostCompact",
"PostToolUse",
"PreCompact",
"PreToolUse",
"SessionStart",
"Stop",

View File

@@ -0,0 +1,39 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"DeviceKeyProtectionPolicy": {
"description": "Protection policy for creating or loading a controller-local device key.",
"enum": [
"hardware_only",
"allow_os_protected_nonextractable"
],
"type": "string"
}
},
"description": "Create a controller-local device key with a random key id.",
"properties": {
"accountUserId": {
"type": "string"
},
"clientId": {
"type": "string"
},
"protectionPolicy": {
"anyOf": [
{
"$ref": "#/definitions/DeviceKeyProtectionPolicy"
},
{
"type": "null"
}
],
"description": "Defaults to `hardware_only` when omitted."
}
},
"required": [
"accountUserId",
"clientId"
],
"title": "DeviceKeyCreateParams",
"type": "object"
}

View File

@@ -0,0 +1,45 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"DeviceKeyAlgorithm": {
"description": "Device-key algorithm reported at enrollment and signing boundaries.",
"enum": [
"ecdsa_p256_sha256"
],
"type": "string"
},
"DeviceKeyProtectionClass": {
"description": "Platform protection class for a controller-local device key.",
"enum": [
"hardware_secure_enclave",
"hardware_tpm",
"os_protected_nonextractable"
],
"type": "string"
}
},
"description": "Device-key metadata and public key returned by create/public APIs.",
"properties": {
"algorithm": {
"$ref": "#/definitions/DeviceKeyAlgorithm"
},
"keyId": {
"type": "string"
},
"protectionClass": {
"$ref": "#/definitions/DeviceKeyProtectionClass"
},
"publicKeySpkiDerBase64": {
"description": "SubjectPublicKeyInfo DER encoded as base64.",
"type": "string"
}
},
"required": [
"algorithm",
"keyId",
"protectionClass",
"publicKeySpkiDerBase64"
],
"title": "DeviceKeyCreateResponse",
"type": "object"
}

View File

@@ -1,14 +1,14 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"description": "Fetch a controller-local device key public key by id.",
"properties": {
"token": {
"description": "Opaque client attestation token.",
"keyId": {
"type": "string"
}
},
"required": [
"token"
"keyId"
],
"title": "AttestationGenerateResponse",
"title": "DeviceKeyPublicParams",
"type": "object"
}

View File

@@ -0,0 +1,45 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"DeviceKeyAlgorithm": {
"description": "Device-key algorithm reported at enrollment and signing boundaries.",
"enum": [
"ecdsa_p256_sha256"
],
"type": "string"
},
"DeviceKeyProtectionClass": {
"description": "Platform protection class for a controller-local device key.",
"enum": [
"hardware_secure_enclave",
"hardware_tpm",
"os_protected_nonextractable"
],
"type": "string"
}
},
"description": "Device-key public metadata returned by `device/key/public`.",
"properties": {
"algorithm": {
"$ref": "#/definitions/DeviceKeyAlgorithm"
},
"keyId": {
"type": "string"
},
"protectionClass": {
"$ref": "#/definitions/DeviceKeyProtectionClass"
},
"publicKeySpkiDerBase64": {
"description": "SubjectPublicKeyInfo DER encoded as base64.",
"type": "string"
}
},
"required": [
"algorithm",
"keyId",
"protectionClass",
"publicKeySpkiDerBase64"
],
"title": "DeviceKeyPublicResponse",
"type": "object"
}

View File

@@ -0,0 +1,165 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"DeviceKeySignPayload": {
"description": "Structured payloads accepted by `device/key/sign`.",
"oneOf": [
{
"description": "Payload bound to one remote-control controller websocket `/client` connection challenge.",
"properties": {
"accountUserId": {
"type": "string"
},
"audience": {
"$ref": "#/definitions/RemoteControlClientConnectionAudience"
},
"clientId": {
"type": "string"
},
"nonce": {
"type": "string"
},
"scopes": {
"description": "Must contain exactly `remote_control_controller_websocket`.",
"items": {
"type": "string"
},
"type": "array"
},
"sessionId": {
"description": "Backend-issued websocket session id that this proof authorizes.",
"type": "string"
},
"targetOrigin": {
"description": "Origin of the backend endpoint that issued the challenge and will verify this proof.",
"type": "string"
},
"targetPath": {
"description": "Websocket route path that this proof authorizes.",
"type": "string"
},
"tokenExpiresAt": {
"description": "Remote-control token expiration as Unix seconds.",
"format": "int64",
"type": "integer"
},
"tokenSha256Base64url": {
"description": "SHA-256 of the controller-scoped remote-control token, encoded as unpadded base64url.",
"type": "string"
},
"type": {
"enum": [
"remoteControlClientConnection"
],
"title": "RemoteControlClientConnectionDeviceKeySignPayloadType",
"type": "string"
}
},
"required": [
"accountUserId",
"audience",
"clientId",
"nonce",
"scopes",
"sessionId",
"targetOrigin",
"targetPath",
"tokenExpiresAt",
"tokenSha256Base64url",
"type"
],
"title": "RemoteControlClientConnectionDeviceKeySignPayload",
"type": "object"
},
{
"description": "Payload bound to a remote-control client `/client/enroll` ownership challenge.",
"properties": {
"accountUserId": {
"type": "string"
},
"audience": {
"$ref": "#/definitions/RemoteControlClientEnrollmentAudience"
},
"challengeExpiresAt": {
"description": "Enrollment challenge expiration as Unix seconds.",
"format": "int64",
"type": "integer"
},
"challengeId": {
"description": "Backend-issued enrollment challenge id that this proof authorizes.",
"type": "string"
},
"clientId": {
"type": "string"
},
"deviceIdentitySha256Base64url": {
"description": "SHA-256 of the requested device identity operation, encoded as unpadded base64url.",
"type": "string"
},
"nonce": {
"type": "string"
},
"targetOrigin": {
"description": "Origin of the backend endpoint that issued the challenge and will verify this proof.",
"type": "string"
},
"targetPath": {
"description": "HTTP route path that this proof authorizes.",
"type": "string"
},
"type": {
"enum": [
"remoteControlClientEnrollment"
],
"title": "RemoteControlClientEnrollmentDeviceKeySignPayloadType",
"type": "string"
}
},
"required": [
"accountUserId",
"audience",
"challengeExpiresAt",
"challengeId",
"clientId",
"deviceIdentitySha256Base64url",
"nonce",
"targetOrigin",
"targetPath",
"type"
],
"title": "RemoteControlClientEnrollmentDeviceKeySignPayload",
"type": "object"
}
]
},
"RemoteControlClientConnectionAudience": {
"description": "Audience for a remote-control client connection device-key proof.",
"enum": [
"remote_control_client_websocket"
],
"type": "string"
},
"RemoteControlClientEnrollmentAudience": {
"description": "Audience for a remote-control client enrollment device-key proof.",
"enum": [
"remote_control_client_enrollment"
],
"type": "string"
}
},
"description": "Sign an accepted structured payload with a controller-local device key.",
"properties": {
"keyId": {
"type": "string"
},
"payload": {
"$ref": "#/definitions/DeviceKeySignPayload"
}
},
"required": [
"keyId",
"payload"
],
"title": "DeviceKeySignParams",
"type": "object"
}

View File

@@ -0,0 +1,33 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"DeviceKeyAlgorithm": {
"description": "Device-key algorithm reported at enrollment and signing boundaries.",
"enum": [
"ecdsa_p256_sha256"
],
"type": "string"
}
},
"description": "ASN.1 DER signature returned by `device/key/sign`.",
"properties": {
"algorithm": {
"$ref": "#/definitions/DeviceKeyAlgorithm"
},
"signatureDerBase64": {
"description": "ECDSA signature DER encoded as base64.",
"type": "string"
},
"signedPayloadBase64": {
"description": "Exact bytes signed by the device key, encoded as base64. Verifiers must verify this byte string directly and must not reserialize `payload`.",
"type": "string"
}
},
"required": [
"algorithm",
"signatureDerBase64",
"signedPayloadBase64"
],
"title": "DeviceKeySignResponse",
"type": "object"
}

View File

@@ -10,8 +10,6 @@
"preToolUse",
"permissionRequest",
"postToolUse",
"preCompact",
"postCompact",
"sessionStart",
"userPromptSubmit",
"stop"

View File

@@ -10,8 +10,6 @@
"preToolUse",
"permissionRequest",
"postToolUse",
"preCompact",
"postCompact",
"sessionStart",
"userPromptSubmit",
"stop"

View File

@@ -25,8 +25,6 @@
"preToolUse",
"permissionRequest",
"postToolUse",
"preCompact",
"postCompact",
"sessionStart",
"userPromptSubmit",
"stop"

View File

@@ -574,11 +574,6 @@
"action": {
"$ref": "#/definitions/GuardianApprovalReviewAction"
},
"completedAtMs": {
"description": "Unix timestamp (in milliseconds) when this review completed.",
"format": "int64",
"type": "integer"
},
"decisionSource": {
"$ref": "#/definitions/AutoReviewDecisionSource"
},
@@ -589,11 +584,6 @@
"description": "Stable identifier for this review.",
"type": "string"
},
"startedAtMs": {
"description": "Unix timestamp (in milliseconds) when this review started.",
"format": "int64",
"type": "integer"
},
"targetItemId": {
"description": "Identifier for the reviewed item or tool call when one exists.\n\nIn most cases, one review maps to one target item. The exceptions are - execve reviews, where a single command may contain multiple execve calls to review (only possible when using the shell_zsh_fork feature) - network policy reviews, where there is no target item\n\nA network call is triggered by a CommandExecution item, so having a target_item_id set to the CommandExecution item would be misleading because the review is about the network call, not the command execution. Therefore, target_item_id is set to None for network policy reviews.",
"type": [
@@ -610,11 +600,9 @@
},
"required": [
"action",
"completedAtMs",
"decisionSource",
"review",
"reviewId",
"startedAtMs",
"threadId",
"turnId"
],

View File

@@ -574,11 +574,6 @@
"description": "Stable identifier for this review.",
"type": "string"
},
"startedAtMs": {
"description": "Unix timestamp (in milliseconds) when this review started.",
"format": "int64",
"type": "integer"
},
"targetItemId": {
"description": "Identifier for the reviewed item or tool call when one exists.\n\nIn most cases, one review maps to one target item. The exceptions are - execve reviews, where a single command may contain multiple execve calls to review (only possible when using the shell_zsh_fork feature) - network policy reviews, where there is no target item\n\nA network call is triggered by a CommandExecution item, so having a target_item_id set to the CommandExecution item would be misleading because the review is about the network call, not the command execution. Therefore, target_item_id is set to None for network policy reviews.",
"type": [
@@ -597,7 +592,6 @@
"action",
"review",
"reviewId",
"startedAtMs",
"threadId",
"turnId"
],

View File

@@ -4,14 +4,6 @@
"AbsolutePathBuf": {
"description": "A path that is guaranteed to be absolute and normalized (though it is not guaranteed to be canonicalized or exist on the filesystem).\n\nIMPORTANT: When deserializing an `AbsolutePathBuf`, a base path must be set using [AbsolutePathBufGuard::new]. If no base path is set, the deserialization will fail unless the path being deserialized is already absolute.",
"type": "string"
},
"PluginListMarketplaceKind": {
"enum": [
"local",
"workspace-directory",
"shared-with-me"
],
"type": "string"
}
},
"properties": {
@@ -24,16 +16,6 @@
"array",
"null"
]
},
"marketplaceKinds": {
"description": "Optional marketplace kind filter. When omitted, only local marketplaces are queried, plus the default remote catalog when enabled by feature flag.",
"items": {
"$ref": "#/definitions/PluginListMarketplaceKind"
},
"type": [
"array",
"null"
]
}
},
"title": "PluginListParams",

View File

@@ -232,101 +232,6 @@
],
"type": "object"
},
"PluginShareContext": {
"properties": {
"creatorAccountUserId": {
"type": [
"string",
"null"
]
},
"creatorName": {
"type": [
"string",
"null"
]
},
"discoverability": {
"anyOf": [
{
"$ref": "#/definitions/PluginShareDiscoverability"
},
{
"type": "null"
}
]
},
"remotePluginId": {
"type": "string"
},
"sharePrincipals": {
"items": {
"$ref": "#/definitions/PluginSharePrincipal"
},
"type": [
"array",
"null"
]
},
"shareUrl": {
"type": [
"string",
"null"
]
}
},
"required": [
"remotePluginId"
],
"type": "object"
},
"PluginShareDiscoverability": {
"enum": [
"LISTED",
"UNLISTED",
"PRIVATE"
],
"type": "string"
},
"PluginSharePrincipal": {
"properties": {
"name": {
"type": "string"
},
"principalId": {
"type": "string"
},
"principalType": {
"$ref": "#/definitions/PluginSharePrincipalType"
},
"role": {
"$ref": "#/definitions/PluginSharePrincipalRole"
}
},
"required": [
"name",
"principalId",
"principalType",
"role"
],
"type": "object"
},
"PluginSharePrincipalRole": {
"enum": [
"reader",
"editor",
"owner"
],
"type": "string"
},
"PluginSharePrincipalType": {
"enum": [
"user",
"group",
"workspace"
],
"type": "string"
},
"PluginSource": {
"oneOf": [
{
@@ -452,17 +357,6 @@
"name": {
"type": "string"
},
"shareContext": {
"anyOf": [
{
"$ref": "#/definitions/PluginShareContext"
},
{
"type": "null"
}
],
"description": "Remote sharing context associated with this plugin when available."
},
"source": {
"$ref": "#/definitions/PluginSource"
}

View File

@@ -37,19 +37,6 @@
],
"type": "object"
},
"HookEventName": {
"enum": [
"preToolUse",
"permissionRequest",
"postToolUse",
"preCompact",
"postCompact",
"sessionStart",
"userPromptSubmit",
"stop"
],
"type": "string"
},
"PluginAuthPolicy": {
"enum": [
"ON_INSTALL",
@@ -88,12 +75,6 @@
"null"
]
},
"hooks": {
"items": {
"$ref": "#/definitions/PluginHookSummary"
},
"type": "array"
},
"marketplaceName": {
"type": "string"
},
@@ -125,7 +106,6 @@
},
"required": [
"apps",
"hooks",
"marketplaceName",
"mcpServers",
"skills",
@@ -133,21 +113,6 @@
],
"type": "object"
},
"PluginHookSummary": {
"properties": {
"eventName": {
"$ref": "#/definitions/HookEventName"
},
"key": {
"type": "string"
}
},
"required": [
"eventName",
"key"
],
"type": "object"
},
"PluginInstallPolicy": {
"enum": [
"NOT_AVAILABLE",
@@ -286,101 +251,6 @@
],
"type": "object"
},
"PluginShareContext": {
"properties": {
"creatorAccountUserId": {
"type": [
"string",
"null"
]
},
"creatorName": {
"type": [
"string",
"null"
]
},
"discoverability": {
"anyOf": [
{
"$ref": "#/definitions/PluginShareDiscoverability"
},
{
"type": "null"
}
]
},
"remotePluginId": {
"type": "string"
},
"sharePrincipals": {
"items": {
"$ref": "#/definitions/PluginSharePrincipal"
},
"type": [
"array",
"null"
]
},
"shareUrl": {
"type": [
"string",
"null"
]
}
},
"required": [
"remotePluginId"
],
"type": "object"
},
"PluginShareDiscoverability": {
"enum": [
"LISTED",
"UNLISTED",
"PRIVATE"
],
"type": "string"
},
"PluginSharePrincipal": {
"properties": {
"name": {
"type": "string"
},
"principalId": {
"type": "string"
},
"principalType": {
"$ref": "#/definitions/PluginSharePrincipalType"
},
"role": {
"$ref": "#/definitions/PluginSharePrincipalRole"
}
},
"required": [
"name",
"principalId",
"principalType",
"role"
],
"type": "object"
},
"PluginSharePrincipalRole": {
"enum": [
"reader",
"editor",
"owner"
],
"type": "string"
},
"PluginSharePrincipalType": {
"enum": [
"user",
"group",
"workspace"
],
"type": "string"
},
"PluginSource": {
"oneOf": [
{
@@ -506,17 +376,6 @@
"name": {
"type": "string"
},
"shareContext": {
"anyOf": [
{
"$ref": "#/definitions/PluginShareContext"
},
{
"type": "null"
}
],
"description": "Remote sharing context associated with this plugin when available."
},
"source": {
"$ref": "#/definitions/PluginSource"
}

View File

@@ -167,62 +167,6 @@
],
"type": "object"
},
"PluginShareContext": {
"properties": {
"creatorAccountUserId": {
"type": [
"string",
"null"
]
},
"creatorName": {
"type": [
"string",
"null"
]
},
"discoverability": {
"anyOf": [
{
"$ref": "#/definitions/PluginShareDiscoverability"
},
{
"type": "null"
}
]
},
"remotePluginId": {
"type": "string"
},
"sharePrincipals": {
"items": {
"$ref": "#/definitions/PluginSharePrincipal"
},
"type": [
"array",
"null"
]
},
"shareUrl": {
"type": [
"string",
"null"
]
}
},
"required": [
"remotePluginId"
],
"type": "object"
},
"PluginShareDiscoverability": {
"enum": [
"LISTED",
"UNLISTED",
"PRIVATE"
],
"type": "string"
},
"PluginShareListItem": {
"properties": {
"localPluginPath": {
@@ -237,52 +181,17 @@
},
"plugin": {
"$ref": "#/definitions/PluginSummary"
},
"shareUrl": {
"type": "string"
}
},
"required": [
"plugin"
"plugin",
"shareUrl"
],
"type": "object"
},
"PluginSharePrincipal": {
"properties": {
"name": {
"type": "string"
},
"principalId": {
"type": "string"
},
"principalType": {
"$ref": "#/definitions/PluginSharePrincipalType"
},
"role": {
"$ref": "#/definitions/PluginSharePrincipalRole"
}
},
"required": [
"name",
"principalId",
"principalType",
"role"
],
"type": "object"
},
"PluginSharePrincipalRole": {
"enum": [
"reader",
"editor",
"owner"
],
"type": "string"
},
"PluginSharePrincipalType": {
"enum": [
"user",
"group",
"workspace"
],
"type": "string"
},
"PluginSource": {
"oneOf": [
{
@@ -408,17 +317,6 @@
"name": {
"type": "string"
},
"shareContext": {
"anyOf": [
{
"$ref": "#/definitions/PluginShareContext"
},
{
"type": "null"
}
],
"description": "Remote sharing context associated with this plugin when available."
},
"source": {
"$ref": "#/definitions/PluginSource"
}

View File

@@ -28,24 +28,13 @@
},
"principalType": {
"$ref": "#/definitions/PluginSharePrincipalType"
},
"role": {
"$ref": "#/definitions/PluginShareTargetRole"
}
},
"required": [
"principalId",
"principalType",
"role"
"principalType"
],
"type": "object"
},
"PluginShareTargetRole": {
"enum": [
"reader",
"editor"
],
"type": "string"
}
},
"properties": {

View File

@@ -16,37 +16,16 @@
},
"principalType": {
"$ref": "#/definitions/PluginSharePrincipalType"
},
"role": {
"$ref": "#/definitions/PluginShareTargetRole"
}
},
"required": [
"principalId",
"principalType",
"role"
"principalType"
],
"type": "object"
},
"PluginShareTargetRole": {
"enum": [
"reader",
"editor"
],
"type": "string"
},
"PluginShareUpdateDiscoverability": {
"enum": [
"UNLISTED",
"PRIVATE"
],
"type": "string"
}
},
"properties": {
"discoverability": {
"$ref": "#/definitions/PluginShareUpdateDiscoverability"
},
"remotePluginId": {
"type": "string"
},
@@ -58,7 +37,6 @@
}
},
"required": [
"discoverability",
"remotePluginId",
"shareTargets"
],

View File

@@ -1,14 +1,6 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"PluginShareDiscoverability": {
"enum": [
"LISTED",
"UNLISTED",
"PRIVATE"
],
"type": "string"
},
"PluginSharePrincipal": {
"properties": {
"name": {
@@ -19,27 +11,15 @@
},
"principalType": {
"$ref": "#/definitions/PluginSharePrincipalType"
},
"role": {
"$ref": "#/definitions/PluginSharePrincipalRole"
}
},
"required": [
"name",
"principalId",
"principalType",
"role"
"principalType"
],
"type": "object"
},
"PluginSharePrincipalRole": {
"enum": [
"reader",
"editor",
"owner"
],
"type": "string"
},
"PluginSharePrincipalType": {
"enum": [
"user",
@@ -50,9 +30,6 @@
}
},
"properties": {
"discoverability": {
"$ref": "#/definitions/PluginShareDiscoverability"
},
"principals": {
"items": {
"$ref": "#/definitions/PluginSharePrincipal"
@@ -61,7 +38,6 @@
}
},
"required": [
"discoverability",
"principals"
],
"title": "PluginShareUpdateTargetsResponse",

View File

@@ -11,7 +11,7 @@
"type": "string"
}
},
"description": "Current remote-control connection status and remote identity exposed to clients.",
"description": "Current remote-control connection status and environment id exposed to clients.",
"properties": {
"environmentId": {
"type": [
@@ -19,15 +19,11 @@
"null"
]
},
"installationId": {
"type": "string"
},
"status": {
"$ref": "#/definitions/RemoteControlConnectionStatus"
}
},
"required": [
"installationId",
"status"
],
"title": "RemoteControlStatusChangedNotification",

View File

@@ -1,5 +1,25 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"SkillsListExtraRootsForCwd": {
"properties": {
"cwd": {
"type": "string"
},
"extraUserRoots": {
"items": {
"type": "string"
},
"type": "array"
}
},
"required": [
"cwd",
"extraUserRoots"
],
"type": "object"
}
},
"properties": {
"cwds": {
"description": "When empty, defaults to the current session working directory.",
@@ -11,6 +31,17 @@
"forceReload": {
"description": "When true, bypass the skills cache and re-scan skills from disk.",
"type": "boolean"
},
"perCwdExtraUserRoots": {
"default": null,
"description": "Optional per-cwd extra roots to scan as user-scoped skills.",
"items": {
"$ref": "#/definitions/SkillsListExtraRootsForCwd"
},
"type": [
"array",
"null"
]
}
},
"title": "SkillsListParams",

View File

@@ -0,0 +1,44 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"WorktreeDirtyPolicy": {
"enum": [
"fail",
"ignore",
"copyTracked",
"copyAll",
"moveTracked",
"moveAll"
],
"type": "string"
}
},
"description": "Create or reuse a managed worktree from the repository containing \\`cwd\\`.",
"properties": {
"baseRef": {
"type": [
"string",
"null"
]
},
"branch": {
"type": "string"
},
"cwd": {
"description": "Repository-relative workspace cwd to use as the source checkout.",
"type": [
"string",
"null"
]
},
"dirtyPolicy": {
"$ref": "#/definitions/WorktreeDirtyPolicy"
}
},
"required": [
"branch",
"dirtyPolicy"
],
"title": "WorktreeCreateParams",
"type": "object"
}

View File

@@ -0,0 +1,152 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"WorktreeDirtyState": {
"properties": {
"hasStagedChanges": {
"type": "boolean"
},
"hasUnstagedChanges": {
"type": "boolean"
},
"hasUntrackedFiles": {
"type": "boolean"
}
},
"required": [
"hasStagedChanges",
"hasUnstagedChanges",
"hasUntrackedFiles"
],
"type": "object"
},
"WorktreeInfo": {
"description": "Server-native representation of a managed worktree.",
"properties": {
"branch": {
"type": [
"string",
"null"
]
},
"commonGitDir": {
"type": "string"
},
"dirty": {
"$ref": "#/definitions/WorktreeDirtyState"
},
"head": {
"type": [
"string",
"null"
]
},
"id": {
"type": "string"
},
"location": {
"$ref": "#/definitions/WorktreeLocation"
},
"metadataPath": {
"type": "string"
},
"name": {
"type": "string"
},
"originalRelativeCwd": {
"type": "string"
},
"ownerThreadId": {
"type": [
"string",
"null"
]
},
"repoName": {
"type": "string"
},
"repoRoot": {
"type": "string"
},
"slug": {
"type": "string"
},
"source": {
"$ref": "#/definitions/WorktreeSource"
},
"workspaceCwd": {
"type": "string"
},
"worktreeGitRoot": {
"type": "string"
}
},
"required": [
"commonGitDir",
"dirty",
"id",
"location",
"metadataPath",
"name",
"originalRelativeCwd",
"repoName",
"repoRoot",
"slug",
"source",
"workspaceCwd",
"worktreeGitRoot"
],
"type": "object"
},
"WorktreeLocation": {
"enum": [
"sibling",
"codexHome",
"external"
],
"type": "string"
},
"WorktreeSource": {
"enum": [
"cli",
"app",
"legacy",
"git"
],
"type": "string"
},
"WorktreeWarning": {
"properties": {
"message": {
"type": "string"
}
},
"required": [
"message"
],
"type": "object"
}
},
"description": "Result returned by \\`worktree/create\\`.",
"properties": {
"info": {
"$ref": "#/definitions/WorktreeInfo"
},
"reused": {
"type": "boolean"
},
"warnings": {
"items": {
"$ref": "#/definitions/WorktreeWarning"
},
"type": "array"
}
},
"required": [
"info",
"reused",
"warnings"
],
"title": "WorktreeCreateResponse",
"type": "object"
}

View File

@@ -0,0 +1,15 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"description": "Inspect dirty state for the repository containing \\`cwd\\`.",
"properties": {
"cwd": {
"description": "Repository-relative workspace cwd to inspect. Omitted uses app-server's effective cwd.",
"type": [
"string",
"null"
]
}
},
"title": "WorktreeInspectSourceParams",
"type": "object"
}

View File

@@ -0,0 +1,35 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"WorktreeDirtyState": {
"properties": {
"hasStagedChanges": {
"type": "boolean"
},
"hasUnstagedChanges": {
"type": "boolean"
},
"hasUntrackedFiles": {
"type": "boolean"
}
},
"required": [
"hasStagedChanges",
"hasUnstagedChanges",
"hasUntrackedFiles"
],
"type": "object"
}
},
"description": "Dirty-state response returned by \\`worktree/inspectSource\\`.",
"properties": {
"dirty": {
"$ref": "#/definitions/WorktreeDirtyState"
}
},
"required": [
"dirty"
],
"title": "WorktreeInspectSourceResponse",
"type": "object"
}

View File

@@ -0,0 +1,15 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"description": "Request the managed worktrees associated with the repository containing \\`cwd\\`.",
"properties": {
"cwd": {
"description": "Repository-relative workspace cwd to inspect. Omitted uses app-server's effective cwd.",
"type": [
"string",
"null"
]
}
},
"title": "WorktreeListParams",
"type": "object"
}

View File

@@ -0,0 +1,133 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"WorktreeDirtyState": {
"properties": {
"hasStagedChanges": {
"type": "boolean"
},
"hasUnstagedChanges": {
"type": "boolean"
},
"hasUntrackedFiles": {
"type": "boolean"
}
},
"required": [
"hasStagedChanges",
"hasUnstagedChanges",
"hasUntrackedFiles"
],
"type": "object"
},
"WorktreeInfo": {
"description": "Server-native representation of a managed worktree.",
"properties": {
"branch": {
"type": [
"string",
"null"
]
},
"commonGitDir": {
"type": "string"
},
"dirty": {
"$ref": "#/definitions/WorktreeDirtyState"
},
"head": {
"type": [
"string",
"null"
]
},
"id": {
"type": "string"
},
"location": {
"$ref": "#/definitions/WorktreeLocation"
},
"metadataPath": {
"type": "string"
},
"name": {
"type": "string"
},
"originalRelativeCwd": {
"type": "string"
},
"ownerThreadId": {
"type": [
"string",
"null"
]
},
"repoName": {
"type": "string"
},
"repoRoot": {
"type": "string"
},
"slug": {
"type": "string"
},
"source": {
"$ref": "#/definitions/WorktreeSource"
},
"workspaceCwd": {
"type": "string"
},
"worktreeGitRoot": {
"type": "string"
}
},
"required": [
"commonGitDir",
"dirty",
"id",
"location",
"metadataPath",
"name",
"originalRelativeCwd",
"repoName",
"repoRoot",
"slug",
"source",
"workspaceCwd",
"worktreeGitRoot"
],
"type": "object"
},
"WorktreeLocation": {
"enum": [
"sibling",
"codexHome",
"external"
],
"type": "string"
},
"WorktreeSource": {
"enum": [
"cli",
"app",
"legacy",
"git"
],
"type": "string"
}
},
"description": "Managed worktrees returned by \\`worktree/list\\`.",
"properties": {
"data": {
"items": {
"$ref": "#/definitions/WorktreeInfo"
},
"type": "array"
}
},
"required": [
"data"
],
"title": "WorktreeListResponse",
"type": "object"
}

View File

@@ -0,0 +1,27 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"description": "Remove a managed worktree in the repository containing \\`cwd\\`.",
"properties": {
"cwd": {
"description": "Repository-relative workspace cwd to use when resolving \\`name_or_path\\`.",
"type": [
"string",
"null"
]
},
"deleteBranch": {
"type": "boolean"
},
"force": {
"type": "boolean"
},
"nameOrPath": {
"type": "string"
}
},
"required": [
"nameOrPath"
],
"title": "WorktreeRemoveParams",
"type": "object"
}

View File

@@ -0,0 +1,20 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"description": "Result returned by \\`worktree/remove\\`.",
"properties": {
"deletedBranch": {
"type": [
"string",
"null"
]
},
"removedPath": {
"type": "string"
}
},
"required": [
"removedPath"
],
"title": "WorktreeRemoveResponse",
"type": "object"
}

File diff suppressed because one or more lines are too long

View File

@@ -10,10 +10,6 @@ export type InitializeCapabilities = {
* Opt into receiving experimental API methods and fields.
*/
experimentalApi: boolean,
/**
* Opt into `attestation/generate` requests for upstream `x-oai-attestation`.
*/
requestAttestation: boolean,
/**
* Exact notification method names that should be suppressed for this
* connection (for example `thread/started`).

View File

@@ -4,7 +4,6 @@
import type { ApplyPatchApprovalParams } from "./ApplyPatchApprovalParams";
import type { ExecCommandApprovalParams } from "./ExecCommandApprovalParams";
import type { RequestId } from "./RequestId";
import type { AttestationGenerateParams } from "./v2/AttestationGenerateParams";
import type { ChatgptAuthTokensRefreshParams } from "./v2/ChatgptAuthTokensRefreshParams";
import type { CommandExecutionRequestApprovalParams } from "./v2/CommandExecutionRequestApprovalParams";
import type { DynamicToolCallParams } from "./v2/DynamicToolCallParams";
@@ -16,4 +15,4 @@ import type { ToolRequestUserInputParams } from "./v2/ToolRequestUserInputParams
/**
* Request initiated from the server and sent to the client.
*/
export type ServerRequest = { "method": "item/commandExecution/requestApproval", id: RequestId, params: CommandExecutionRequestApprovalParams, } | { "method": "item/fileChange/requestApproval", id: RequestId, params: FileChangeRequestApprovalParams, } | { "method": "item/tool/requestUserInput", id: RequestId, params: ToolRequestUserInputParams, } | { "method": "mcpServer/elicitation/request", id: RequestId, params: McpServerElicitationRequestParams, } | { "method": "item/permissions/requestApproval", id: RequestId, params: PermissionsRequestApprovalParams, } | { "method": "item/tool/call", id: RequestId, params: DynamicToolCallParams, } | { "method": "account/chatgptAuthTokens/refresh", id: RequestId, params: ChatgptAuthTokensRefreshParams, } | { "method": "attestation/generate", id: RequestId, params: AttestationGenerateParams, } | { "method": "applyPatchApproval", id: RequestId, params: ApplyPatchApprovalParams, } | { "method": "execCommandApproval", id: RequestId, params: ExecCommandApprovalParams, };
export type ServerRequest = { "method": "item/commandExecution/requestApproval", id: RequestId, params: CommandExecutionRequestApprovalParams, } | { "method": "item/fileChange/requestApproval", id: RequestId, params: FileChangeRequestApprovalParams, } | { "method": "item/tool/requestUserInput", id: RequestId, params: ToolRequestUserInputParams, } | { "method": "mcpServer/elicitation/request", id: RequestId, params: McpServerElicitationRequestParams, } | { "method": "item/permissions/requestApproval", id: RequestId, params: PermissionsRequestApprovalParams, } | { "method": "item/tool/call", id: RequestId, params: DynamicToolCallParams, } | { "method": "account/chatgptAuthTokens/refresh", id: RequestId, params: ChatgptAuthTokensRefreshParams, } | { "method": "applyPatchApproval", id: RequestId, params: ApplyPatchApprovalParams, } | { "method": "execCommandApproval", id: RequestId, params: ExecCommandApprovalParams, };

View File

@@ -8,9 +8,6 @@ import type { NetworkApprovalContext } from "./NetworkApprovalContext";
import type { NetworkPolicyAmendment } from "./NetworkPolicyAmendment";
export type CommandExecutionRequestApprovalParams = {threadId: string, turnId: string, itemId: string, /**
* Unix timestamp (in milliseconds) when this approval request started.
*/
startedAtMs: number, /**
* Unique identifier for this specific approval callback.
*
* For regular shell/unified_exec approvals, this is null.

View File

@@ -1,6 +1,8 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { HookEventName } from "./HookEventName";
export type PluginHookSummary = { key: string, eventName: HookEventName, };
/**
* Device-key algorithm reported at enrollment and signing boundaries.
*/
export type DeviceKeyAlgorithm = "ecdsa_p256_sha256";

View File

@@ -0,0 +1,13 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { DeviceKeyProtectionPolicy } from "./DeviceKeyProtectionPolicy";
/**
* Create a controller-local device key with a random key id.
*/
export type DeviceKeyCreateParams = {
/**
* Defaults to `hardware_only` when omitted.
*/
protectionPolicy?: DeviceKeyProtectionPolicy | null, accountUserId: string, clientId: string, };

View File

@@ -0,0 +1,14 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { DeviceKeyAlgorithm } from "./DeviceKeyAlgorithm";
import type { DeviceKeyProtectionClass } from "./DeviceKeyProtectionClass";
/**
* Device-key metadata and public key returned by create/public APIs.
*/
export type DeviceKeyCreateResponse = { keyId: string,
/**
* SubjectPublicKeyInfo DER encoded as base64.
*/
publicKeySpkiDerBase64: string, algorithm: DeviceKeyAlgorithm, protectionClass: DeviceKeyProtectionClass, };

Some files were not shown because too many files have changed in this diff Show More