mirror of
https://github.com/openai/codex.git
synced 2026-05-09 13:52:41 +00:00
Compare commits
70 Commits
dependabot
...
codex/pyth
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7edbdc555c | ||
|
|
d80a43263f | ||
|
|
78c0d5ca3d | ||
|
|
9306e60848 | ||
|
|
8d7a5c27c1 | ||
|
|
692c08faf9 | ||
|
|
8b8e868140 | ||
|
|
2654cc299e | ||
|
|
242ca6d8fd | ||
|
|
b7635f4d77 | ||
|
|
c24694bdb0 | ||
|
|
6e10973c78 | ||
|
|
becbd2a127 | ||
|
|
11e31d7d38 | ||
|
|
1d0023776f | ||
|
|
9b54951688 | ||
|
|
3a3e1b477c | ||
|
|
356c6797b8 | ||
|
|
bd14ac4758 | ||
|
|
d764740e6f | ||
|
|
29e1c96f72 | ||
|
|
ebe75bb683 | ||
|
|
77d9223e9f | ||
|
|
479491ed89 | ||
|
|
c579da41b1 | ||
|
|
408e6218ab | ||
|
|
95ca276373 | ||
|
|
bd42660cb4 | ||
|
|
0c8d42525e | ||
|
|
faa5d4a5e2 | ||
|
|
8f4020846e | ||
|
|
80a408e201 | ||
|
|
1b86906fa1 | ||
|
|
dac108f2f1 | ||
|
|
24111790f0 | ||
|
|
2f3a2d7a86 | ||
|
|
7c9731c9af | ||
|
|
46e2250bcf | ||
|
|
e783341b70 | ||
|
|
cf941ede15 | ||
|
|
5f4d0ec343 | ||
|
|
61142b6169 | ||
|
|
bbb6bf0a37 | ||
|
|
9183503b97 | ||
|
|
8956a928a1 | ||
|
|
772e034594 | ||
|
|
5f2543b74e | ||
|
|
872b8b15b3 | ||
|
|
8bea5d231a | ||
|
|
0a0d09ad21 | ||
|
|
7c0e54bf59 | ||
|
|
47f1d7b40b | ||
|
|
05ffa0b1d0 | ||
|
|
5733e00c79 | ||
|
|
5b87bd2845 | ||
|
|
607b0dd1f0 | ||
|
|
f9bbbafb68 | ||
|
|
bd8fc9adb9 | ||
|
|
e6312d44f0 | ||
|
|
f86d95a242 | ||
|
|
aadcae9f3c | ||
|
|
cce059467a | ||
|
|
317213fd33 | ||
|
|
d9feaffffb | ||
|
|
71d80f9a14 | ||
|
|
d2e71db22a | ||
|
|
c15ce42a12 | ||
|
|
8b1d6875ed | ||
|
|
911841001d | ||
|
|
ae15343243 |
5
.github/ISSUE_TEMPLATE/3-cli.yml
vendored
5
.github/ISSUE_TEMPLATE/3-cli.yml
vendored
@@ -2,7 +2,6 @@ name: 💻 CLI Bug
|
||||
description: Report an issue in the Codex CLI
|
||||
labels:
|
||||
- bug
|
||||
- needs triage
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
@@ -41,9 +40,9 @@ body:
|
||||
id: terminal
|
||||
attributes:
|
||||
label: What terminal emulator and version are you using (if applicable)?
|
||||
description: Also note any multiplexer in use (screen / tmux / zellij)
|
||||
description: |
|
||||
E.g, VSCode, Terminal.app, iTerm2, Ghostty, Windows Terminal (WSL / PowerShell)
|
||||
Also note any multiplexer in use (screen / tmux / zellij).
|
||||
E.g., VS Code, Terminal.app, iTerm2, Ghostty, Windows Terminal (WSL / PowerShell)
|
||||
- type: textarea
|
||||
id: actual
|
||||
attributes:
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/5-feature-request.yml
vendored
2
.github/ISSUE_TEMPLATE/5-feature-request.yml
vendored
@@ -10,7 +10,7 @@ body:
|
||||
|
||||
Before you submit a feature:
|
||||
1. Search existing issues for similar features. If you find one, 👍 it rather than opening a new one.
|
||||
2. The Codex team will try to balance the varying needs of the community when prioritizing or rejecting new features. Not all features will be accepted. See [Contributing](https://github.com/openai/codex#contributing) for more details.
|
||||
2. The Codex team will try to balance the varying needs of the community when prioritizing or rejecting new features. Not all features will be accepted. See [Contributing](https://github.com/openai/codex/blob/main/docs/contributing.md) for more details.
|
||||
|
||||
- type: input
|
||||
id: variant
|
||||
|
||||
4
.github/ISSUE_TEMPLATE/6-docs-issue.yml
vendored
4
.github/ISSUE_TEMPLATE/6-docs-issue.yml
vendored
@@ -1,6 +1,6 @@
|
||||
name: 📗 Documentation Issue
|
||||
description: Tell us if there is missing or incorrect documentation
|
||||
labels: [docs]
|
||||
labels: [documentation]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
@@ -24,4 +24,4 @@ body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Where did you find it?
|
||||
description: If possible, please provide the URL(s) where you found this issue.
|
||||
description: If possible, please provide the URL(s) where you found this issue.
|
||||
|
||||
12
.github/workflows/bazel.yml
vendored
12
.github/workflows/bazel.yml
vendored
@@ -57,6 +57,9 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Check rusty_v8 MODULE.bazel checksums
|
||||
if: matrix.os == 'ubuntu-24.04' && matrix.target == 'x86_64-unknown-linux-gnu'
|
||||
@@ -149,6 +152,9 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Prepare Bazel CI
|
||||
id: prepare_bazel
|
||||
@@ -232,6 +238,9 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Prepare Bazel CI
|
||||
id: prepare_bazel
|
||||
@@ -319,6 +328,9 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Prepare Bazel CI
|
||||
id: prepare_bazel
|
||||
|
||||
6
.github/workflows/blob-size-policy.yml
vendored
6
.github/workflows/blob-size-policy.yml
vendored
@@ -10,15 +10,17 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Determine PR comparison range
|
||||
id: range
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "base=$(git rev-parse HEAD^1)" >> "$GITHUB_OUTPUT"
|
||||
echo "head=$(git rev-parse HEAD^2)" >> "$GITHUB_OUTPUT"
|
||||
echo "base=${{ github.event.pull_request.base.sha }}" >> "$GITHUB_OUTPUT"
|
||||
echo "head=${{ github.event.pull_request.head.sha }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Check changed blob sizes
|
||||
env:
|
||||
|
||||
3
.github/workflows/cargo-deny.yml
vendored
3
.github/workflows/cargo-deny.yml
vendored
@@ -15,6 +15,9 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install Rust toolchain
|
||||
uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
|
||||
|
||||
3
.github/workflows/ci.yml
vendored
3
.github/workflows/ci.yml
vendored
@@ -13,6 +13,9 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Verify codex-rs Cargo manifests inherit workspace settings
|
||||
run: python3 .github/scripts/verify_cargo_workspace_manifests.py
|
||||
|
||||
3
.github/workflows/codespell.yml
vendored
3
.github/workflows/codespell.yml
vendored
@@ -19,6 +19,9 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
- name: Annotate locations with typos
|
||||
uses: codespell-project/codespell-problem-matcher@b80729f885d32f78a716c2f107b4db1025001c42 # v1.1.0
|
||||
- name: Codespell
|
||||
|
||||
4
.github/workflows/issue-deduplicator.yml
vendored
4
.github/workflows/issue-deduplicator.yml
vendored
@@ -20,6 +20,8 @@ jobs:
|
||||
has_matches: ${{ steps.normalize-all.outputs.has_matches }}
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Prepare Codex inputs
|
||||
env:
|
||||
@@ -156,6 +158,8 @@ jobs:
|
||||
has_matches: ${{ steps.normalize-open.outputs.has_matches }}
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Prepare Codex inputs
|
||||
env:
|
||||
|
||||
2
.github/workflows/issue-labeler.yml
vendored
2
.github/workflows/issue-labeler.yml
vendored
@@ -18,6 +18,8 @@ jobs:
|
||||
codex_output: ${{ steps.codex.outputs.final-message }}
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- id: codex
|
||||
uses: openai/codex-action@5c3f4ccdb2b8790f73d6b21751ac00e602aa0c02 # v1.7
|
||||
|
||||
28
.github/workflows/rust-ci-full.yml
vendored
28
.github/workflows/rust-ci-full.yml
vendored
@@ -7,6 +7,11 @@ on:
|
||||
workflow_dispatch:
|
||||
|
||||
# CI builds in debug (dev) for faster signal.
|
||||
env:
|
||||
# Cargo's libgit2 transport has been flaky on macOS when fetching git
|
||||
# dependencies with nested submodules. Use the system git CLI, which has
|
||||
# better network/proxy behavior and matches Cargo's own suggested fallback.
|
||||
CARGO_NET_GIT_FETCH_WITH_CLI: "true"
|
||||
|
||||
jobs:
|
||||
# --- CI that doesn't need specific targets ---------------------------------
|
||||
@@ -18,6 +23,8 @@ jobs:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
|
||||
with:
|
||||
components: rustfmt
|
||||
@@ -32,13 +39,14 @@ jobs:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
|
||||
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
|
||||
with:
|
||||
tool: cargo-shear
|
||||
version: 1.11.2
|
||||
tool: cargo-shear@1.11.2
|
||||
- name: cargo shear
|
||||
run: cargo shear
|
||||
run: cargo shear --deny-warnings
|
||||
|
||||
argument_comment_lint_package:
|
||||
name: Argument comment lint package
|
||||
@@ -48,6 +56,8 @@ jobs:
|
||||
DYLINT_LINK_VERSION: 5.0.0
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
|
||||
with:
|
||||
toolchain: nightly-2025-09-18
|
||||
@@ -98,6 +108,8 @@ jobs:
|
||||
labels: codex-windows-x64
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: ./.github/actions/setup-bazel-ci
|
||||
with:
|
||||
target: ${{ runner.os }}
|
||||
@@ -234,6 +246,8 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Install Linux build dependencies
|
||||
if: ${{ runner.os == 'Linux' }}
|
||||
shell: bash
|
||||
@@ -560,6 +574,8 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Install Linux build dependencies
|
||||
if: ${{ runner.os == 'Linux' }}
|
||||
shell: bash
|
||||
@@ -567,7 +583,7 @@ jobs:
|
||||
set -euo pipefail
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
sudo apt-get update -y
|
||||
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends pkg-config libcap-dev
|
||||
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends pkg-config libcap-dev bubblewrap
|
||||
fi
|
||||
|
||||
# Some integration tests rely on DotSlash being installed.
|
||||
@@ -722,10 +738,12 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
set +e
|
||||
if [[ "${{ steps.test.outcome }}" != "success" ]]; then
|
||||
if [[ "${STEPS_TEST_OUTCOME}" != "success" ]]; then
|
||||
docker logs codex-remote-test-env || true
|
||||
fi
|
||||
docker rm -f codex-remote-test-env >/dev/null 2>&1 || true
|
||||
env:
|
||||
STEPS_TEST_OUTCOME: ${{ steps.test.outcome }}
|
||||
|
||||
- name: verify tests passed
|
||||
if: steps.test.outcome == 'failure'
|
||||
|
||||
32
.github/workflows/rust-ci.yml
vendored
32
.github/workflows/rust-ci.yml
vendored
@@ -16,7 +16,9 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
- name: Detect changed paths (no external action)
|
||||
id: detect
|
||||
shell: bash
|
||||
@@ -62,6 +64,9 @@ jobs:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
|
||||
with:
|
||||
components: rustfmt
|
||||
@@ -78,13 +83,15 @@ jobs:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
|
||||
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
|
||||
with:
|
||||
tool: cargo-shear
|
||||
version: 1.11.2
|
||||
tool: cargo-shear@1.11.2
|
||||
- name: cargo shear
|
||||
run: cargo shear
|
||||
run: cargo shear --deny-warnings
|
||||
|
||||
argument_comment_lint_package:
|
||||
name: Argument comment lint package
|
||||
@@ -96,6 +103,9 @@ jobs:
|
||||
DYLINT_LINK_VERSION: 5.0.0
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
|
||||
- name: Install nightly argument-comment-lint toolchain
|
||||
shell: bash
|
||||
@@ -172,6 +182,9 @@ jobs:
|
||||
echo "run=false" >> "$GITHUB_OUTPUT"
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
if: ${{ steps.argument_comment_lint_gate.outputs.run == 'true' }}
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
- name: Run argument comment lint on codex-rs via Bazel
|
||||
if: ${{ steps.argument_comment_lint_gate.outputs.run == 'true' }}
|
||||
uses: ./.github/actions/run-argument-comment-lint
|
||||
@@ -203,20 +216,25 @@ jobs:
|
||||
|
||||
# If nothing relevant changed (PR touching only root README, etc.),
|
||||
# declare success regardless of other jobs.
|
||||
if [[ '${{ needs.changed.outputs.argument_comment_lint }}' != 'true' && '${{ needs.changed.outputs.codex }}' != 'true' && '${{ needs.changed.outputs.workflows }}' != 'true' ]]; then
|
||||
if [[ "${NEEDS_CHANGED_OUTPUTS_ARGUMENT_COMMENT_LINT}" != 'true' && "${NEEDS_CHANGED_OUTPUTS_CODEX}" != 'true' && "${NEEDS_CHANGED_OUTPUTS_WORKFLOWS}" != 'true' ]]; then
|
||||
echo 'No relevant changes -> CI not required.'
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ '${{ needs.changed.outputs.argument_comment_lint_package }}' == 'true' ]]; then
|
||||
if [[ "${NEEDS_CHANGED_OUTPUTS_ARGUMENT_COMMENT_LINT_PACKAGE}" == 'true' ]]; then
|
||||
[[ '${{ needs.argument_comment_lint_package.result }}' == 'success' ]] || { echo 'argument_comment_lint_package failed'; exit 1; }
|
||||
fi
|
||||
|
||||
if [[ '${{ needs.changed.outputs.argument_comment_lint }}' == 'true' || '${{ needs.changed.outputs.workflows }}' == 'true' ]]; then
|
||||
if [[ "${NEEDS_CHANGED_OUTPUTS_ARGUMENT_COMMENT_LINT}" == 'true' || "${NEEDS_CHANGED_OUTPUTS_WORKFLOWS}" == 'true' ]]; then
|
||||
[[ '${{ needs.argument_comment_lint_prebuilt.result }}' == 'success' ]] || { echo 'argument_comment_lint_prebuilt failed'; exit 1; }
|
||||
fi
|
||||
|
||||
if [[ '${{ needs.changed.outputs.codex }}' == 'true' || '${{ needs.changed.outputs.workflows }}' == 'true' ]]; then
|
||||
if [[ "${NEEDS_CHANGED_OUTPUTS_CODEX}" == 'true' || "${NEEDS_CHANGED_OUTPUTS_WORKFLOWS}" == 'true' ]]; then
|
||||
[[ '${{ needs.general.result }}' == 'success' ]] || { echo 'general failed'; exit 1; }
|
||||
[[ '${{ needs.cargo_shear.result }}' == 'success' ]] || { echo 'cargo_shear failed'; exit 1; }
|
||||
fi
|
||||
env:
|
||||
NEEDS_CHANGED_OUTPUTS_ARGUMENT_COMMENT_LINT: ${{ needs.changed.outputs.argument_comment_lint }}
|
||||
NEEDS_CHANGED_OUTPUTS_CODEX: ${{ needs.changed.outputs.codex }}
|
||||
NEEDS_CHANGED_OUTPUTS_WORKFLOWS: ${{ needs.changed.outputs.workflows }}
|
||||
NEEDS_CHANGED_OUTPUTS_ARGUMENT_COMMENT_LINT_PACKAGE: ${{ needs.changed.outputs.argument_comment_lint_package }}
|
||||
|
||||
@@ -57,6 +57,8 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
|
||||
with:
|
||||
|
||||
1
.github/workflows/rust-release-prepare.yml
vendored
1
.github/workflows/rust-release-prepare.yml
vendored
@@ -22,6 +22,7 @@ jobs:
|
||||
with:
|
||||
ref: main
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Update models.json
|
||||
env:
|
||||
|
||||
46
.github/workflows/rust-release-windows.yml
vendored
46
.github/workflows/rust-release-windows.yml
vendored
@@ -84,6 +84,8 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Print runner specs (Windows)
|
||||
shell: powershell
|
||||
run: |
|
||||
@@ -166,6 +168,8 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Download prebuilt Windows primary binaries
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
|
||||
@@ -216,6 +220,48 @@ jobs:
|
||||
"$dest/${binary}-${{ matrix.target }}.exe"
|
||||
done
|
||||
|
||||
- name: Build Python runtime wheel
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
case "${{ matrix.target }}" in
|
||||
aarch64-pc-windows-msvc)
|
||||
platform_tag="win_arm64"
|
||||
;;
|
||||
x86_64-pc-windows-msvc)
|
||||
platform_tag="win_amd64"
|
||||
;;
|
||||
*)
|
||||
echo "No Python runtime wheel platform tag for ${{ matrix.target }}"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
python -m venv "${RUNNER_TEMP}/python-runtime-build-venv"
|
||||
"${RUNNER_TEMP}/python-runtime-build-venv/Scripts/python.exe" -m pip install build
|
||||
|
||||
stage_dir="${RUNNER_TEMP}/openai-codex-cli-bin-${{ matrix.target }}"
|
||||
wheel_dir="${GITHUB_WORKSPACE}/python-runtime-dist/${{ matrix.target }}"
|
||||
# Keep the helpers next to codex.exe in the runtime wheel so Windows
|
||||
# sandbox/elevation lookup matches the standalone release zip.
|
||||
python "${GITHUB_WORKSPACE}/sdk/python/scripts/update_sdk_artifacts.py" \
|
||||
stage-runtime \
|
||||
"$stage_dir" \
|
||||
"${GITHUB_WORKSPACE}/codex-rs/target/${{ matrix.target }}/release/codex.exe" \
|
||||
--codex-version "${GITHUB_REF_NAME}" \
|
||||
--platform-tag "$platform_tag" \
|
||||
--resource-binary "${GITHUB_WORKSPACE}/codex-rs/target/${{ matrix.target }}/release/codex-command-runner.exe" \
|
||||
--resource-binary "${GITHUB_WORKSPACE}/codex-rs/target/${{ matrix.target }}/release/codex-windows-sandbox-setup.exe"
|
||||
"${RUNNER_TEMP}/python-runtime-build-venv/Scripts/python.exe" -m build --wheel --outdir "$wheel_dir" "$stage_dir"
|
||||
|
||||
- name: Upload Python runtime wheel
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: python-runtime-wheel-${{ matrix.target }}
|
||||
path: python-runtime-dist/${{ matrix.target }}/*.whl
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Install DotSlash
|
||||
uses: facebook/install-dotslash@1e4e7b3e07eaca387acb98f1d4720e0bee8dbb6a # v2
|
||||
|
||||
|
||||
4
.github/workflows/rust-release-zsh.yml
vendored
4
.github/workflows/rust-release-zsh.yml
vendored
@@ -46,6 +46,8 @@ jobs:
|
||||
libncursesw5-dev
|
||||
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Build, smoke-test, and stage zsh artifact
|
||||
shell: bash
|
||||
@@ -82,6 +84,8 @@ jobs:
|
||||
fi
|
||||
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Build, smoke-test, and stage zsh artifact
|
||||
shell: bash
|
||||
|
||||
125
.github/workflows/rust-release.yml
vendored
125
.github/workflows/rust-release.yml
vendored
@@ -20,6 +20,8 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
|
||||
- name: Validate tag matches Cargo.toml version
|
||||
shell: bash
|
||||
@@ -119,6 +121,8 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Print runner specs (Linux)
|
||||
if: ${{ runner.os == 'Linux' }}
|
||||
shell: bash
|
||||
@@ -184,6 +188,7 @@ jobs:
|
||||
uses: mlugg/setup-zig@d1434d08867e3ee9daa34448df10607b98908d29 # v2.2.1
|
||||
with:
|
||||
version: 0.14.0
|
||||
use-cache: false
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Install musl build tools
|
||||
@@ -394,6 +399,65 @@ jobs:
|
||||
cp target/${{ matrix.target }}/release/codex-${{ matrix.target }}.dmg "$dest/codex-${{ matrix.target }}.dmg"
|
||||
fi
|
||||
|
||||
- name: Build Python runtime wheel
|
||||
if: ${{ matrix.bundle == 'primary' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
case "${{ matrix.target }}" in
|
||||
aarch64-apple-darwin)
|
||||
platform_tag="macosx_11_0_arm64"
|
||||
;;
|
||||
x86_64-apple-darwin)
|
||||
platform_tag="macosx_10_9_x86_64"
|
||||
;;
|
||||
aarch64-unknown-linux-musl)
|
||||
platform_tag="musllinux_1_1_aarch64"
|
||||
;;
|
||||
x86_64-unknown-linux-musl)
|
||||
platform_tag="musllinux_1_1_x86_64"
|
||||
;;
|
||||
*)
|
||||
echo "No Python runtime wheel platform tag for ${{ matrix.target }}"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
python3 -m venv "${RUNNER_TEMP}/python-runtime-build-venv"
|
||||
# Do not install into the runner's system Python; macOS runners mark
|
||||
# the Homebrew Python as externally managed under PEP 668.
|
||||
"${RUNNER_TEMP}/python-runtime-build-venv/bin/python" -m pip install build
|
||||
|
||||
stage_dir="${RUNNER_TEMP}/openai-codex-cli-bin-${{ matrix.target }}"
|
||||
wheel_dir="${GITHUB_WORKSPACE}/python-runtime-dist/${{ matrix.target }}"
|
||||
stage_runtime_args=(
|
||||
"${GITHUB_WORKSPACE}/sdk/python/scripts/update_sdk_artifacts.py"
|
||||
stage-runtime
|
||||
"$stage_dir"
|
||||
"${GITHUB_WORKSPACE}/codex-rs/target/${{ matrix.target }}/release/codex"
|
||||
--codex-version "${GITHUB_REF_NAME}"
|
||||
--platform-tag "$platform_tag"
|
||||
)
|
||||
if [[ "${{ matrix.target }}" == *linux* ]]; then
|
||||
# Keep bwrap in the runtime wheel so Linux sandbox fallback behavior
|
||||
# matches the standalone release bundle on hosts without system bwrap.
|
||||
stage_runtime_args+=(
|
||||
--resource-binary
|
||||
"${GITHUB_WORKSPACE}/codex-rs/target/${{ matrix.target }}/release/bwrap"
|
||||
)
|
||||
fi
|
||||
python3 "${stage_runtime_args[@]}"
|
||||
"${RUNNER_TEMP}/python-runtime-build-venv/bin/python" -m build --wheel --outdir "$wheel_dir" "$stage_dir"
|
||||
|
||||
- name: Upload Python runtime wheel
|
||||
if: ${{ matrix.bundle == 'primary' }}
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: python-runtime-wheel-${{ matrix.target }}
|
||||
path: python-runtime-dist/${{ matrix.target }}/*.whl
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Compress artifacts
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -473,10 +537,13 @@ jobs:
|
||||
tag: ${{ github.ref_name }}
|
||||
should_publish_npm: ${{ steps.npm_publish_settings.outputs.should_publish }}
|
||||
npm_tag: ${{ steps.npm_publish_settings.outputs.npm_tag }}
|
||||
should_publish_python_runtime: ${{ steps.python_runtime_publish_settings.outputs.should_publish }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Generate release notes from tag commit message
|
||||
id: release_notes
|
||||
@@ -547,6 +614,22 @@ jobs:
|
||||
echo "npm_tag=" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Determine Python runtime publish settings
|
||||
id: python_runtime_publish_settings
|
||||
env:
|
||||
VERSION: ${{ steps.release_name.outputs.name }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
version="${VERSION}"
|
||||
|
||||
if [[ "${version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo "should_publish=true" >> "$GITHUB_OUTPUT"
|
||||
elif [[ "${version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+-alpha\.[0-9]+$ ]]; then
|
||||
echo "should_publish=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "should_publish=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a8198c4bff370c8506180b035930dea56dbd5288 # v5
|
||||
with:
|
||||
@@ -780,6 +863,48 @@ jobs:
|
||||
exit "${publish_status}"
|
||||
done
|
||||
|
||||
# Publish the platform-specific Python runtime wheels using PyPI trusted publishing.
|
||||
# PyPI project configuration must trust this workflow and job. Keep this
|
||||
# non-blocking while the Python runtime publishing path is new; failures still
|
||||
# need release follow-up, but should not invalidate the Rust release itself.
|
||||
publish-python-runtime:
|
||||
# Publish to PyPI for stable releases and alpha pre-releases with numeric suffixes.
|
||||
if: ${{ needs.release.outputs.should_publish_python_runtime == 'true' }}
|
||||
name: publish-python-runtime
|
||||
needs: release
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: true
|
||||
environment: pypi
|
||||
permissions:
|
||||
id-token: write # Required for PyPI trusted publishing.
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Download Python runtime wheels from release
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
RELEASE_TAG: ${{ needs.release.outputs.tag }}
|
||||
RELEASE_VERSION: ${{ needs.release.outputs.version }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python_version="$RELEASE_VERSION"
|
||||
python_version="${python_version/-alpha./a}"
|
||||
python_version="${python_version/-beta./b}"
|
||||
python_version="${python_version/-rc./rc}"
|
||||
|
||||
mkdir -p dist/python-runtime
|
||||
gh release download "$RELEASE_TAG" \
|
||||
--repo "${GITHUB_REPOSITORY}" \
|
||||
--pattern "openai_codex_cli_bin-${python_version}-*.whl" \
|
||||
--dir dist/python-runtime
|
||||
ls -lh dist/python-runtime
|
||||
|
||||
- name: Publish Python runtime wheels to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
|
||||
with:
|
||||
packages-dir: dist/python-runtime
|
||||
skip-existing: true
|
||||
|
||||
winget:
|
||||
name: winget
|
||||
needs: release
|
||||
|
||||
4
.github/workflows/rusty-v8-release.yml
vendored
4
.github/workflows/rusty-v8-release.yml
vendored
@@ -18,6 +18,8 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
@@ -70,6 +72,8 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Bazel
|
||||
uses: ./.github/actions/setup-bazel-ci
|
||||
|
||||
36
.github/workflows/sdk.yml
vendored
36
.github/workflows/sdk.yml
vendored
@@ -6,6 +6,39 @@ on:
|
||||
pull_request: {}
|
||||
|
||||
jobs:
|
||||
python-sdk:
|
||||
runs-on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-x64
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Test Python SDK
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Run inside Alpine so dependency resolution exercises the pinned
|
||||
# runtime wheel on the same Linux wheel family that CI installs.
|
||||
docker run --rm \
|
||||
--user "$(id -u):$(id -g)" \
|
||||
-e HOME=/tmp/codex-python-sdk-home \
|
||||
-e UV_LINK_MODE=copy \
|
||||
-v "${GITHUB_WORKSPACE}:${GITHUB_WORKSPACE}" \
|
||||
-w "${GITHUB_WORKSPACE}/sdk/python" \
|
||||
python:3.12-alpine \
|
||||
sh -euxc '
|
||||
python -m venv /tmp/uv
|
||||
/tmp/uv/bin/python -m pip install uv==0.11.3
|
||||
/tmp/uv/bin/uv sync --extra dev --frozen
|
||||
/tmp/uv/bin/uv run --extra dev pytest
|
||||
'
|
||||
|
||||
sdks:
|
||||
runs-on:
|
||||
group: codex-runners
|
||||
@@ -14,6 +47,9 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install Linux bwrap build dependencies
|
||||
shell: bash
|
||||
|
||||
6
.github/workflows/v8-canary.yml
vendored
6
.github/workflows/v8-canary.yml
vendored
@@ -41,6 +41,9 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
@@ -75,6 +78,9 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Bazel
|
||||
uses: ./.github/actions/setup-bazel-ci
|
||||
|
||||
@@ -26,7 +26,7 @@ In the codex-rs folder where the rust code lives:
|
||||
- Implementations may still use `async fn foo(&self, ...) -> T` when they satisfy that contract.
|
||||
- Do not use `#[allow(async_fn_in_trait)]` as a shortcut around spelling the future contract explicitly.
|
||||
- When writing tests, prefer comparing the equality of entire objects over fields one by one.
|
||||
- When making a change that adds or changes an API, ensure that the documentation in the `docs/` folder is up to date if applicable.
|
||||
- Do not add general product or user-facing documentation to the `docs/` folder. The official Codex documentation lives elsewhere. The exception is app-server API documentation, which is covered by the app-server guidance below.
|
||||
- Prefer private modules and explicitly exported public crate API.
|
||||
- If you change `ConfigToml` or nested config types, run `just write-config-schema` to update `codex-rs/core/config.schema.json`.
|
||||
- When working with MCP tool calls, prefer using `codex-rs/codex-mcp/src/mcp_connection_manager.rs` to handle mutation of tools and tool calls. Aim to minimize the footprint of changes and leverage existing abstractions rather than plumbing code through multiple levels of function calls.
|
||||
@@ -210,7 +210,7 @@ These guidelines apply to app-server protocol work in `codex-rs`, especially:
|
||||
|
||||
### Development Workflow
|
||||
|
||||
- Update docs/examples when API behavior changes (at minimum `app-server/README.md`).
|
||||
- Update app-server docs/examples when API behavior changes (at minimum `app-server/README.md`).
|
||||
- Regenerate schema fixtures when API shapes change:
|
||||
`just write-app-server-schema`
|
||||
(and `just write-app-server-schema --experimental` when experimental API fixtures are affected).
|
||||
|
||||
40
codex-rs/Cargo.lock
generated
40
codex-rs/Cargo.lock
generated
@@ -1900,6 +1900,7 @@ dependencies = [
|
||||
"codex-features",
|
||||
"codex-feedback",
|
||||
"codex-file-search",
|
||||
"codex-file-watcher",
|
||||
"codex-git-utils",
|
||||
"codex-hooks",
|
||||
"codex-login",
|
||||
@@ -1979,6 +1980,26 @@ dependencies = [
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-app-server-daemon"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"codex-app-server-protocol",
|
||||
"codex-app-server-transport",
|
||||
"codex-core",
|
||||
"codex-uds",
|
||||
"futures",
|
||||
"libc",
|
||||
"pretty_assertions",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"tokio-tungstenite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-app-server-protocol"
|
||||
version = "0.0.0"
|
||||
@@ -2206,6 +2227,7 @@ dependencies = [
|
||||
"clap",
|
||||
"clap_complete",
|
||||
"codex-app-server",
|
||||
"codex-app-server-daemon",
|
||||
"codex-app-server-protocol",
|
||||
"codex-app-server-test-client",
|
||||
"codex-arg0",
|
||||
@@ -2441,7 +2463,11 @@ dependencies = [
|
||||
"codex-app-server-protocol",
|
||||
"pretty_assertions",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha1",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"tracing",
|
||||
"urlencoding",
|
||||
]
|
||||
|
||||
@@ -2526,7 +2552,6 @@ dependencies = [
|
||||
"insta",
|
||||
"libc",
|
||||
"maplit",
|
||||
"notify",
|
||||
"once_cell",
|
||||
"openssl-sys",
|
||||
"opentelemetry",
|
||||
@@ -2733,7 +2758,6 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serial_test",
|
||||
"sha2",
|
||||
"tempfile",
|
||||
"test-case",
|
||||
"thiserror 2.0.18",
|
||||
@@ -2870,6 +2894,17 @@ dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-file-watcher"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"notify",
|
||||
"pretty_assertions",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-git-utils"
|
||||
version = "0.0.0"
|
||||
@@ -3331,7 +3366,6 @@ dependencies = [
|
||||
"codex-utils-absolute-path",
|
||||
"codex-utils-image",
|
||||
"codex-utils-string",
|
||||
"codex-utils-template",
|
||||
"encoding_rs",
|
||||
"globset",
|
||||
"http 1.4.0",
|
||||
|
||||
@@ -11,6 +11,7 @@ members = [
|
||||
"async-utils",
|
||||
"app-server",
|
||||
"app-server-transport",
|
||||
"app-server-daemon",
|
||||
"app-server-client",
|
||||
"app-server-protocol",
|
||||
"app-server-test-client",
|
||||
@@ -48,6 +49,7 @@ members = [
|
||||
"external-agent-sessions",
|
||||
"keyring-store",
|
||||
"file-search",
|
||||
"file-watcher",
|
||||
"linux-sandbox",
|
||||
"lmstudio",
|
||||
"login",
|
||||
@@ -131,6 +133,7 @@ codex-api = { path = "codex-api" }
|
||||
codex-aws-auth = { path = "aws-auth" }
|
||||
codex-app-server = { path = "app-server" }
|
||||
codex-app-server-transport = { path = "app-server-transport" }
|
||||
codex-app-server-daemon = { path = "app-server-daemon" }
|
||||
codex-app-server-client = { path = "app-server-client" }
|
||||
codex-app-server-protocol = { path = "app-server-protocol" }
|
||||
codex-app-server-test-client = { path = "app-server-test-client" }
|
||||
@@ -164,6 +167,7 @@ codex-features = { path = "features" }
|
||||
codex-feedback = { path = "feedback" }
|
||||
codex-install-context = { path = "install-context" }
|
||||
codex-file-search = { path = "file-search" }
|
||||
codex-file-watcher = { path = "file-watcher" }
|
||||
codex-git-utils = { path = "git-utils" }
|
||||
codex-hooks = { path = "hooks" }
|
||||
codex-keyring-store = { path = "keyring-store" }
|
||||
@@ -464,11 +468,8 @@ unwrap_used = "deny"
|
||||
[workspace.metadata.cargo-shear]
|
||||
ignored = [
|
||||
"codex-agent-graph-store",
|
||||
"codex-memories-mcp",
|
||||
"icu_provider",
|
||||
"openssl-sys",
|
||||
"codex-utils-readiness",
|
||||
"codex-utils-template",
|
||||
"codex-v8-poc",
|
||||
]
|
||||
|
||||
|
||||
298
codex-rs/analytics/src/accepted_lines.rs
Normal file
298
codex-rs/analytics/src/accepted_lines.rs
Normal file
@@ -0,0 +1,298 @@
|
||||
use crate::events::CodexAcceptedLineFingerprintsEventParams;
|
||||
use crate::events::CodexAcceptedLineFingerprintsEventRequest;
|
||||
use crate::events::TrackEventRequest;
|
||||
use crate::facts::AcceptedLineFingerprint;
|
||||
use codex_git_utils::canonicalize_git_remote_url;
|
||||
use codex_git_utils::get_git_remote_urls_assume_git_repo;
|
||||
use sha1::Digest;
|
||||
use std::path::Path;
|
||||
|
||||
const ACCEPTED_LINE_FINGERPRINT_EVENT_TARGET_BYTES: usize = 2 * 1024 * 1024;
|
||||
const ACCEPTED_LINE_FINGERPRINT_EVENT_FIXED_BYTES: usize = 1024;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct AcceptedLineFingerprintSummary {
|
||||
pub accepted_added_lines: u64,
|
||||
pub accepted_deleted_lines: u64,
|
||||
pub line_fingerprints: Vec<AcceptedLineFingerprint>,
|
||||
}
|
||||
|
||||
pub(crate) struct AcceptedLineFingerprintEventInput {
|
||||
pub(crate) event_type: &'static str,
|
||||
pub(crate) turn_id: String,
|
||||
pub(crate) thread_id: String,
|
||||
pub(crate) product_surface: Option<String>,
|
||||
pub(crate) model_slug: Option<String>,
|
||||
pub(crate) completed_at: u64,
|
||||
pub(crate) repo_hash: Option<String>,
|
||||
pub(crate) accepted_added_lines: u64,
|
||||
pub(crate) accepted_deleted_lines: u64,
|
||||
pub(crate) line_fingerprints: Vec<AcceptedLineFingerprint>,
|
||||
}
|
||||
|
||||
pub fn accepted_line_fingerprints_from_unified_diff(
|
||||
unified_diff: &str,
|
||||
) -> AcceptedLineFingerprintSummary {
|
||||
let mut current_path: Option<String> = None;
|
||||
let mut in_hunk = false;
|
||||
let mut accepted_added_lines = 0;
|
||||
let mut accepted_deleted_lines = 0;
|
||||
let mut line_fingerprints = Vec::new();
|
||||
|
||||
for line in unified_diff.lines() {
|
||||
if line.starts_with("diff --git ") {
|
||||
current_path = None;
|
||||
in_hunk = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
if line.starts_with("@@ ") {
|
||||
in_hunk = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if !in_hunk && let Some(path) = line.strip_prefix("+++ ") {
|
||||
current_path = normalize_diff_path(path);
|
||||
continue;
|
||||
}
|
||||
|
||||
if !in_hunk && line.starts_with("--- ") {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(added_line) = line.strip_prefix('+') {
|
||||
accepted_added_lines += 1;
|
||||
if let Some(path) = current_path.as_deref()
|
||||
&& let Some(normalized_line) = normalize_effective_line(added_line)
|
||||
{
|
||||
line_fingerprints.push(AcceptedLineFingerprint {
|
||||
path_hash: fingerprint_hash("path", path),
|
||||
line_hash: fingerprint_hash("line", &normalized_line),
|
||||
});
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if line.starts_with('-') {
|
||||
accepted_deleted_lines += 1;
|
||||
}
|
||||
}
|
||||
|
||||
AcceptedLineFingerprintSummary {
|
||||
accepted_added_lines,
|
||||
accepted_deleted_lines,
|
||||
line_fingerprints,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fingerprint_hash(domain: &str, value: &str) -> String {
|
||||
let mut hasher = sha1::Sha1::new();
|
||||
hasher.update(b"file-line-v1\0");
|
||||
hasher.update(domain.as_bytes());
|
||||
hasher.update(b"\0");
|
||||
hasher.update(value.as_bytes());
|
||||
format!("{:x}", hasher.finalize())
|
||||
}
|
||||
|
||||
pub(crate) fn accepted_line_fingerprint_event_requests(
|
||||
input: AcceptedLineFingerprintEventInput,
|
||||
) -> Vec<TrackEventRequest> {
|
||||
let chunks = accepted_line_fingerprint_chunks(input.line_fingerprints);
|
||||
chunks
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(index, line_fingerprints)| {
|
||||
let is_first_chunk = index == 0;
|
||||
TrackEventRequest::AcceptedLineFingerprints(Box::new(
|
||||
CodexAcceptedLineFingerprintsEventRequest {
|
||||
event_type: "codex_accepted_line_fingerprints",
|
||||
event_params: CodexAcceptedLineFingerprintsEventParams {
|
||||
event_type: input.event_type,
|
||||
turn_id: input.turn_id.clone(),
|
||||
thread_id: input.thread_id.clone(),
|
||||
product_surface: input.product_surface.clone(),
|
||||
model_slug: input.model_slug.clone(),
|
||||
completed_at: input.completed_at,
|
||||
repo_hash: input.repo_hash.clone(),
|
||||
accepted_added_lines: if is_first_chunk {
|
||||
input.accepted_added_lines
|
||||
} else {
|
||||
0
|
||||
},
|
||||
accepted_deleted_lines: if is_first_chunk {
|
||||
input.accepted_deleted_lines
|
||||
} else {
|
||||
0
|
||||
},
|
||||
line_fingerprints,
|
||||
},
|
||||
},
|
||||
))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub async fn accepted_line_repo_hash_for_cwd(cwd: &Path) -> Option<String> {
|
||||
let remotes = get_git_remote_urls_assume_git_repo(cwd).await?;
|
||||
remotes
|
||||
.get("origin")
|
||||
.or_else(|| remotes.values().next())
|
||||
.map(|remote_url| {
|
||||
let canonical_remote_url =
|
||||
canonicalize_git_remote_url(remote_url).unwrap_or_else(|| remote_url.to_string());
|
||||
fingerprint_hash("repo", &canonical_remote_url)
|
||||
})
|
||||
}
|
||||
|
||||
fn normalize_diff_path(path: &str) -> Option<String> {
|
||||
let path = path.trim();
|
||||
if path == "/dev/null" {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(
|
||||
path.strip_prefix("b/")
|
||||
.or_else(|| path.strip_prefix("a/"))
|
||||
.unwrap_or(path)
|
||||
.to_string(),
|
||||
)
|
||||
}
|
||||
|
||||
fn normalize_effective_line(line: &str) -> Option<String> {
|
||||
let normalized = line.split_whitespace().collect::<Vec<_>>().join(" ");
|
||||
if normalized.len() <= 3 {
|
||||
return None;
|
||||
}
|
||||
if !normalized
|
||||
.chars()
|
||||
.any(|ch| ch.is_alphanumeric() || ch == '_')
|
||||
{
|
||||
return None;
|
||||
}
|
||||
Some(normalized)
|
||||
}
|
||||
|
||||
fn accepted_line_fingerprint_chunks(
|
||||
line_fingerprints: Vec<AcceptedLineFingerprint>,
|
||||
) -> Vec<Vec<AcceptedLineFingerprint>> {
|
||||
if line_fingerprints.is_empty() {
|
||||
return vec![Vec::new()];
|
||||
}
|
||||
|
||||
let mut chunks = Vec::new();
|
||||
let mut current = Vec::new();
|
||||
let mut current_bytes = ACCEPTED_LINE_FINGERPRINT_EVENT_FIXED_BYTES;
|
||||
|
||||
for fingerprint in line_fingerprints {
|
||||
let item_bytes = accepted_line_fingerprint_json_bytes(&fingerprint);
|
||||
let separator_bytes = usize::from(!current.is_empty());
|
||||
if !current.is_empty()
|
||||
&& current_bytes + separator_bytes + item_bytes
|
||||
> ACCEPTED_LINE_FINGERPRINT_EVENT_TARGET_BYTES
|
||||
{
|
||||
chunks.push(current);
|
||||
current = Vec::new();
|
||||
current_bytes = ACCEPTED_LINE_FINGERPRINT_EVENT_FIXED_BYTES;
|
||||
}
|
||||
current_bytes += usize::from(!current.is_empty()) + item_bytes;
|
||||
current.push(fingerprint);
|
||||
}
|
||||
|
||||
if !current.is_empty() {
|
||||
chunks.push(current);
|
||||
}
|
||||
chunks
|
||||
}
|
||||
|
||||
fn accepted_line_fingerprint_json_bytes(fingerprint: &AcceptedLineFingerprint) -> usize {
|
||||
// {"path_hash":"...","line_hash":"..."} plus one byte of array comma
|
||||
// accounted for by the caller when needed.
|
||||
32 + fingerprint.path_hash.len() + fingerprint.line_hash.len()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn parses_counts_and_effective_added_fingerprints() {
|
||||
let diff = "\
|
||||
diff --git a/src/lib.rs b/src/lib.rs
|
||||
index 1111111..2222222
|
||||
--- a/src/lib.rs
|
||||
+++ b/src/lib.rs
|
||||
@@ -1,3 +1,5 @@
|
||||
-old line
|
||||
+fn useful() {
|
||||
+}
|
||||
+ return user.id;
|
||||
context
|
||||
";
|
||||
|
||||
let summary = accepted_line_fingerprints_from_unified_diff(diff);
|
||||
|
||||
assert_eq!(
|
||||
summary,
|
||||
AcceptedLineFingerprintSummary {
|
||||
accepted_added_lines: 3,
|
||||
accepted_deleted_lines: 1,
|
||||
line_fingerprints: vec![
|
||||
AcceptedLineFingerprint {
|
||||
path_hash: fingerprint_hash("path", "src/lib.rs"),
|
||||
line_hash: fingerprint_hash("line", "fn useful() {"),
|
||||
},
|
||||
AcceptedLineFingerprint {
|
||||
path_hash: fingerprint_hash("path", "src/lib.rs"),
|
||||
line_hash: fingerprint_hash("line", "return user.id;"),
|
||||
},
|
||||
],
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn skips_added_file_metadata_headers() {
|
||||
let diff = "\
|
||||
diff --git a/new.py b/new.py
|
||||
new file mode 100644
|
||||
index 0000000..1111111
|
||||
--- /dev/null
|
||||
+++ b/new.py
|
||||
@@ -0,0 +1 @@
|
||||
+print('hello')
|
||||
";
|
||||
|
||||
let summary = accepted_line_fingerprints_from_unified_diff(diff);
|
||||
|
||||
assert_eq!(summary.accepted_added_lines, 1);
|
||||
assert_eq!(summary.accepted_deleted_lines, 0);
|
||||
assert_eq!(summary.line_fingerprints.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_hunk_lines_that_look_like_file_headers() {
|
||||
let diff = "\
|
||||
diff --git a/src/lib.rs b/src/lib.rs
|
||||
index 1111111..2222222
|
||||
--- a/src/lib.rs
|
||||
+++ b/src/lib.rs
|
||||
@@ -1,2 +1,2 @@
|
||||
--- old value
|
||||
+++ new value
|
||||
";
|
||||
|
||||
let summary = accepted_line_fingerprints_from_unified_diff(diff);
|
||||
|
||||
assert_eq!(
|
||||
summary,
|
||||
AcceptedLineFingerprintSummary {
|
||||
accepted_added_lines: 1,
|
||||
accepted_deleted_lines: 1,
|
||||
line_fingerprints: vec![AcceptedLineFingerprint {
|
||||
path_hash: fingerprint_hash("path", "src/lib.rs"),
|
||||
line_hash: fingerprint_hash("line", "++ new value"),
|
||||
}],
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,7 @@
|
||||
use crate::client::AnalyticsEventsQueue;
|
||||
use crate::events::AppServerRpcTransport;
|
||||
use crate::events::CodexAcceptedLineFingerprintsEventParams;
|
||||
use crate::events::CodexAcceptedLineFingerprintsEventRequest;
|
||||
use crate::events::CodexAppMentionedEventRequest;
|
||||
use crate::events::CodexAppServerClientMetadata;
|
||||
use crate::events::CodexAppUsedEventRequest;
|
||||
@@ -28,6 +30,7 @@ use crate::events::codex_hook_run_metadata;
|
||||
use crate::events::codex_plugin_metadata;
|
||||
use crate::events::codex_plugin_used_metadata;
|
||||
use crate::events::subagent_thread_started_event_request;
|
||||
use crate::facts::AcceptedLineFingerprint;
|
||||
use crate::facts::AnalyticsFact;
|
||||
use crate::facts::AnalyticsJsonRpcError;
|
||||
use crate::facts::AppInvocation;
|
||||
@@ -89,6 +92,7 @@ use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::ThreadStatus as AppServerThreadStatus;
|
||||
use codex_app_server_protocol::Turn;
|
||||
use codex_app_server_protocol::TurnCompletedNotification;
|
||||
use codex_app_server_protocol::TurnDiffUpdatedNotification;
|
||||
use codex_app_server_protocol::TurnError as AppServerTurnError;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_app_server_protocol::TurnStartedNotification;
|
||||
@@ -636,6 +640,7 @@ fn sample_initialize_fact(connection_id: u64) -> AnalyticsFact {
|
||||
},
|
||||
capabilities: Some(InitializeCapabilities {
|
||||
experimental_api: false,
|
||||
request_attestation: false,
|
||||
opt_out_notification_methods: None,
|
||||
}),
|
||||
},
|
||||
@@ -827,6 +832,206 @@ fn app_used_event_serializes_expected_shape() {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn accepted_line_fingerprints_event_serializes_expected_shape() {
|
||||
let event = TrackEventRequest::AcceptedLineFingerprints(Box::new(
|
||||
CodexAcceptedLineFingerprintsEventRequest {
|
||||
event_type: "codex_accepted_line_fingerprints",
|
||||
event_params: CodexAcceptedLineFingerprintsEventParams {
|
||||
event_type: "codex.accepted_line_fingerprints",
|
||||
turn_id: "turn-1".to_string(),
|
||||
thread_id: "thread-1".to_string(),
|
||||
product_surface: Some("codex".to_string()),
|
||||
model_slug: Some("gpt-5.1-codex".to_string()),
|
||||
completed_at: 1710000000,
|
||||
repo_hash: Some("repo-hash-1".to_string()),
|
||||
accepted_added_lines: 42,
|
||||
accepted_deleted_lines: 40,
|
||||
line_fingerprints: vec![AcceptedLineFingerprint {
|
||||
path_hash: "path-hash-1".to_string(),
|
||||
line_hash: "line-hash-1".to_string(),
|
||||
}],
|
||||
},
|
||||
},
|
||||
));
|
||||
|
||||
let payload = serde_json::to_value(&event).expect("serialize accepted line fingerprints event");
|
||||
|
||||
assert_eq!(
|
||||
payload,
|
||||
json!({
|
||||
"event_type": "codex_accepted_line_fingerprints",
|
||||
"event_params": {
|
||||
"event_type": "codex.accepted_line_fingerprints",
|
||||
"turn_id": "turn-1",
|
||||
"thread_id": "thread-1",
|
||||
"product_surface": "codex",
|
||||
"model_slug": "gpt-5.1-codex",
|
||||
"completed_at": 1710000000,
|
||||
"repo_hash": "repo-hash-1",
|
||||
"accepted_added_lines": 42,
|
||||
"accepted_deleted_lines": 40,
|
||||
"line_fingerprints": [
|
||||
{
|
||||
"path_hash": "path-hash-1",
|
||||
"line_hash": "line-hash-1"
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn reducer_chunks_large_accepted_line_fingerprint_events_without_repeating_counts() {
|
||||
let mut reducer = AnalyticsReducer::default();
|
||||
let mut events = Vec::new();
|
||||
|
||||
ingest_turn_prerequisites(
|
||||
&mut reducer,
|
||||
&mut events,
|
||||
/*include_initialize*/ true,
|
||||
/*include_resolved_config*/ true,
|
||||
/*include_started*/ true,
|
||||
/*include_token_usage*/ true,
|
||||
)
|
||||
.await;
|
||||
events.clear();
|
||||
|
||||
let mut diff = "\
|
||||
diff --git a/src/lib.rs b/src/lib.rs
|
||||
index 1111111..2222222
|
||||
--- a/src/lib.rs
|
||||
+++ b/src/lib.rs
|
||||
@@ -0,0 +1,20000 @@
|
||||
"
|
||||
.to_string();
|
||||
for index in 0..20_000 {
|
||||
diff.push_str(&format!("+let value_{index} = {index};\n"));
|
||||
}
|
||||
|
||||
reducer
|
||||
.ingest(
|
||||
AnalyticsFact::Notification(Box::new(ServerNotification::TurnDiffUpdated(
|
||||
TurnDiffUpdatedNotification {
|
||||
thread_id: "thread-2".to_string(),
|
||||
turn_id: "turn-2".to_string(),
|
||||
diff,
|
||||
},
|
||||
))),
|
||||
&mut events,
|
||||
)
|
||||
.await;
|
||||
assert!(events.is_empty());
|
||||
|
||||
reducer
|
||||
.ingest(
|
||||
AnalyticsFact::Notification(Box::new(sample_turn_completed_notification(
|
||||
"thread-2",
|
||||
"turn-2",
|
||||
AppServerTurnStatus::Completed,
|
||||
/*codex_error_info*/ None,
|
||||
))),
|
||||
&mut events,
|
||||
)
|
||||
.await;
|
||||
|
||||
let accepted_line_events = events
|
||||
.iter()
|
||||
.filter_map(|event| match event {
|
||||
TrackEventRequest::AcceptedLineFingerprints(event) => Some(event),
|
||||
_ => None,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
assert!(accepted_line_events.len() > 1);
|
||||
let mut total_fingerprints = 0;
|
||||
for (index, event) in accepted_line_events.iter().enumerate() {
|
||||
assert_eq!(event.event_params.turn_id, "turn-2");
|
||||
assert_eq!(event.event_params.thread_id, "thread-2");
|
||||
total_fingerprints += event.event_params.line_fingerprints.len();
|
||||
if index == 0 {
|
||||
assert_eq!(event.event_params.accepted_added_lines, 20_000);
|
||||
assert_eq!(event.event_params.accepted_deleted_lines, 0);
|
||||
} else {
|
||||
assert_eq!(event.event_params.accepted_added_lines, 0);
|
||||
assert_eq!(event.event_params.accepted_deleted_lines, 0);
|
||||
}
|
||||
assert!(serde_json::to_vec(event).expect("serialize chunk").len() < 2_100_000);
|
||||
}
|
||||
assert_eq!(total_fingerprints, 20_000);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn reducer_emits_accepted_line_fingerprints_once_from_latest_turn_diff_on_completion() {
|
||||
let mut reducer = AnalyticsReducer::default();
|
||||
let mut events = Vec::new();
|
||||
|
||||
ingest_turn_prerequisites(
|
||||
&mut reducer,
|
||||
&mut events,
|
||||
/*include_initialize*/ true,
|
||||
/*include_resolved_config*/ true,
|
||||
/*include_started*/ true,
|
||||
/*include_token_usage*/ true,
|
||||
)
|
||||
.await;
|
||||
events.clear();
|
||||
|
||||
for line in ["let old_value = 1;", "let latest_value = 2;"] {
|
||||
let diff = format!(
|
||||
"\
|
||||
diff --git a/src/lib.rs b/src/lib.rs
|
||||
index 1111111..2222222
|
||||
--- a/src/lib.rs
|
||||
+++ b/src/lib.rs
|
||||
@@ -0,0 +1 @@
|
||||
+{line}
|
||||
"
|
||||
);
|
||||
reducer
|
||||
.ingest(
|
||||
AnalyticsFact::Notification(Box::new(ServerNotification::TurnDiffUpdated(
|
||||
TurnDiffUpdatedNotification {
|
||||
thread_id: "thread-2".to_string(),
|
||||
turn_id: "turn-2".to_string(),
|
||||
diff,
|
||||
},
|
||||
))),
|
||||
&mut events,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
assert!(events.is_empty());
|
||||
|
||||
reducer
|
||||
.ingest(
|
||||
AnalyticsFact::Notification(Box::new(sample_turn_completed_notification(
|
||||
"thread-2",
|
||||
"turn-2",
|
||||
AppServerTurnStatus::Completed,
|
||||
/*codex_error_info*/ None,
|
||||
))),
|
||||
&mut events,
|
||||
)
|
||||
.await;
|
||||
|
||||
let accepted_line_events = events
|
||||
.iter()
|
||||
.filter_map(|event| match event {
|
||||
TrackEventRequest::AcceptedLineFingerprints(event) => Some(event),
|
||||
_ => None,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(accepted_line_events.len(), 1);
|
||||
let event = accepted_line_events[0];
|
||||
assert_eq!(event.event_params.accepted_added_lines, 1);
|
||||
assert_eq!(event.event_params.line_fingerprints.len(), 1);
|
||||
assert_eq!(
|
||||
event.event_params.line_fingerprints[0].line_hash,
|
||||
crate::fingerprint_hash("line", "let latest_value = 2;")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compaction_event_serializes_expected_shape() {
|
||||
let event = TrackEventRequest::Compaction(Box::new(CodexCompactionEventRequest {
|
||||
@@ -1122,6 +1327,7 @@ async fn initialize_caches_client_and_thread_lifecycle_publishes_once_initialize
|
||||
},
|
||||
capabilities: Some(InitializeCapabilities {
|
||||
experimental_api: false,
|
||||
request_attestation: false,
|
||||
opt_out_notification_methods: None,
|
||||
}),
|
||||
},
|
||||
@@ -1269,6 +1475,7 @@ async fn compaction_event_ingests_custom_fact() {
|
||||
},
|
||||
capabilities: Some(InitializeCapabilities {
|
||||
experimental_api: false,
|
||||
request_attestation: false,
|
||||
opt_out_notification_methods: None,
|
||||
}),
|
||||
},
|
||||
@@ -1382,6 +1589,7 @@ async fn guardian_review_event_ingests_custom_fact_with_optional_target_item() {
|
||||
},
|
||||
capabilities: Some(InitializeCapabilities {
|
||||
experimental_api: false,
|
||||
request_attestation: false,
|
||||
opt_out_notification_methods: None,
|
||||
}),
|
||||
},
|
||||
|
||||
@@ -30,6 +30,7 @@ use codex_app_server_protocol::ServerNotification;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::ServerResponse;
|
||||
use codex_login::AuthManager;
|
||||
use codex_login::CodexAuth;
|
||||
use codex_login::default_client::create_client;
|
||||
use codex_plugin::PluginTelemetryMetadata;
|
||||
use std::collections::HashSet;
|
||||
@@ -352,6 +353,7 @@ impl AnalyticsEventsClient {
|
||||
notification,
|
||||
ServerNotification::TurnStarted(_)
|
||||
| ServerNotification::TurnCompleted(_)
|
||||
| ServerNotification::TurnDiffUpdated(_)
|
||||
| ServerNotification::ItemStarted(_)
|
||||
| ServerNotification::ItemCompleted(_)
|
||||
| ServerNotification::ItemGuardianApprovalReviewStarted(_)
|
||||
@@ -371,6 +373,7 @@ async fn send_track_events(
|
||||
if events.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let Some(auth) = auth_manager.auth().await else {
|
||||
return;
|
||||
};
|
||||
@@ -380,12 +383,45 @@ async fn send_track_events(
|
||||
|
||||
let base_url = base_url.trim_end_matches('/');
|
||||
let url = format!("{base_url}/codex/analytics-events/events");
|
||||
for events in track_event_request_batches(events) {
|
||||
send_track_events_request(&auth, &url, events).await;
|
||||
}
|
||||
}
|
||||
|
||||
fn track_event_request_batches(events: Vec<TrackEventRequest>) -> Vec<Vec<TrackEventRequest>> {
|
||||
let mut batches = Vec::new();
|
||||
let mut current_batch = Vec::new();
|
||||
|
||||
for event in events {
|
||||
if event.should_send_in_isolated_request() {
|
||||
if !current_batch.is_empty() {
|
||||
batches.push(current_batch);
|
||||
current_batch = Vec::new();
|
||||
}
|
||||
batches.push(vec![event]);
|
||||
} else {
|
||||
current_batch.push(event);
|
||||
}
|
||||
}
|
||||
|
||||
if !current_batch.is_empty() {
|
||||
batches.push(current_batch);
|
||||
}
|
||||
|
||||
batches
|
||||
}
|
||||
|
||||
async fn send_track_events_request(auth: &CodexAuth, url: &str, events: Vec<TrackEventRequest>) {
|
||||
if events.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let payload = TrackEventsRequest { events };
|
||||
|
||||
let response = create_client()
|
||||
.post(&url)
|
||||
.post(url)
|
||||
.timeout(ANALYTICS_EVENTS_TIMEOUT)
|
||||
.headers(codex_model_provider::auth_provider_from_auth(&auth).to_auth_headers())
|
||||
.headers(codex_model_provider::auth_provider_from_auth(auth).to_auth_headers())
|
||||
.header("Content-Type", "application/json")
|
||||
.json(&payload)
|
||||
.send()
|
||||
|
||||
@@ -1,6 +1,14 @@
|
||||
use super::AnalyticsEventsClient;
|
||||
use super::AnalyticsEventsQueue;
|
||||
use super::track_event_request_batches;
|
||||
use crate::events::CodexAcceptedLineFingerprintsEventParams;
|
||||
use crate::events::CodexAcceptedLineFingerprintsEventRequest;
|
||||
use crate::events::SkillInvocationEventParams;
|
||||
use crate::events::SkillInvocationEventRequest;
|
||||
use crate::events::TrackEventRequest;
|
||||
use crate::facts::AcceptedLineFingerprint;
|
||||
use crate::facts::AnalyticsFact;
|
||||
use crate::facts::InvocationType;
|
||||
use codex_app_server_protocol::ApprovalsReviewer as AppServerApprovalsReviewer;
|
||||
use codex_app_server_protocol::AskForApproval as AppServerAskForApproval;
|
||||
use codex_app_server_protocol::ClientRequest;
|
||||
@@ -31,6 +39,47 @@ use std::sync::Mutex;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::sync::mpsc::error::TryRecvError;
|
||||
|
||||
fn sample_accepted_line_fingerprint_event(thread_id: &str) -> TrackEventRequest {
|
||||
TrackEventRequest::AcceptedLineFingerprints(Box::new(
|
||||
CodexAcceptedLineFingerprintsEventRequest {
|
||||
event_type: "codex_accepted_line_fingerprints",
|
||||
event_params: CodexAcceptedLineFingerprintsEventParams {
|
||||
event_type: "codex.accepted_line_fingerprints",
|
||||
turn_id: "turn-1".to_string(),
|
||||
thread_id: thread_id.to_string(),
|
||||
product_surface: Some("codex".to_string()),
|
||||
model_slug: Some("gpt-5.1-codex".to_string()),
|
||||
completed_at: 1,
|
||||
repo_hash: None,
|
||||
accepted_added_lines: 1,
|
||||
accepted_deleted_lines: 0,
|
||||
line_fingerprints: vec![AcceptedLineFingerprint {
|
||||
path_hash: "path-hash".to_string(),
|
||||
line_hash: "line-hash".to_string(),
|
||||
}],
|
||||
},
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
fn sample_regular_track_event(thread_id: &str) -> TrackEventRequest {
|
||||
TrackEventRequest::SkillInvocation(SkillInvocationEventRequest {
|
||||
event_type: "skill_invocation",
|
||||
skill_id: format!("skill-{thread_id}"),
|
||||
skill_name: "doc".to_string(),
|
||||
event_params: SkillInvocationEventParams {
|
||||
product_client_id: None,
|
||||
skill_scope: None,
|
||||
plugin_id: None,
|
||||
repo_url: None,
|
||||
thread_id: Some(thread_id.to_string()),
|
||||
turn_id: Some("turn-1".to_string()),
|
||||
invoke_type: Some(InvocationType::Explicit),
|
||||
model_slug: Some("gpt-5.1-codex".to_string()),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
fn client_with_receiver() -> (AnalyticsEventsClient, mpsc::Receiver<AnalyticsFact>) {
|
||||
let (sender, receiver) = mpsc::channel(8);
|
||||
let queue = AnalyticsEventsQueue {
|
||||
@@ -222,3 +271,23 @@ fn track_response_only_enqueues_analytics_relevant_responses() {
|
||||
);
|
||||
assert!(matches!(receiver.try_recv(), Err(TryRecvError::Empty)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn track_event_request_batches_only_isolates_accepted_line_fingerprint_events() {
|
||||
let batches = track_event_request_batches(vec![
|
||||
sample_regular_track_event("thread-1"),
|
||||
sample_regular_track_event("thread-2"),
|
||||
sample_accepted_line_fingerprint_event("thread-3"),
|
||||
sample_accepted_line_fingerprint_event("thread-4"),
|
||||
sample_regular_track_event("thread-5"),
|
||||
sample_regular_track_event("thread-6"),
|
||||
]);
|
||||
|
||||
assert_eq!(batches.len(), 4);
|
||||
assert_eq!(batches[0].len(), 2);
|
||||
assert_eq!(batches[1].len(), 1);
|
||||
assert_eq!(batches[2].len(), 1);
|
||||
assert_eq!(batches[3].len(), 2);
|
||||
assert!(batches[1][0].should_send_in_isolated_request());
|
||||
assert!(batches[2][0].should_send_in_isolated_request());
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use std::time::Instant;
|
||||
|
||||
use crate::facts::AcceptedLineFingerprint;
|
||||
use crate::facts::AppInvocation;
|
||||
use crate::facts::CodexCompactionEvent;
|
||||
use crate::facts::CompactionImplementation;
|
||||
@@ -71,6 +72,7 @@ pub(crate) enum TrackEventRequest {
|
||||
CollabAgentToolCall(CodexCollabAgentToolCallEventRequest),
|
||||
WebSearch(CodexWebSearchEventRequest),
|
||||
ImageGeneration(CodexImageGenerationEventRequest),
|
||||
AcceptedLineFingerprints(Box<CodexAcceptedLineFingerprintsEventRequest>),
|
||||
#[allow(dead_code)]
|
||||
ReviewEvent(CodexReviewEventRequest),
|
||||
PluginUsed(CodexPluginUsedEventRequest),
|
||||
@@ -80,6 +82,32 @@ pub(crate) enum TrackEventRequest {
|
||||
PluginDisabled(CodexPluginEventRequest),
|
||||
}
|
||||
|
||||
impl TrackEventRequest {
|
||||
pub(crate) fn should_send_in_isolated_request(&self) -> bool {
|
||||
matches!(self, Self::AcceptedLineFingerprints(_))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub(crate) struct CodexAcceptedLineFingerprintsEventParams {
|
||||
pub(crate) event_type: &'static str,
|
||||
pub(crate) turn_id: String,
|
||||
pub(crate) thread_id: String,
|
||||
pub(crate) product_surface: Option<String>,
|
||||
pub(crate) model_slug: Option<String>,
|
||||
pub(crate) completed_at: u64,
|
||||
pub(crate) repo_hash: Option<String>,
|
||||
pub(crate) accepted_added_lines: u64,
|
||||
pub(crate) accepted_deleted_lines: u64,
|
||||
pub(crate) line_fingerprints: Vec<AcceptedLineFingerprint>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub(crate) struct CodexAcceptedLineFingerprintsEventRequest {
|
||||
pub(crate) event_type: &'static str,
|
||||
pub(crate) event_params: CodexAcceptedLineFingerprintsEventParams,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub(crate) struct SkillInvocationEventRequest {
|
||||
pub(crate) event_type: &'static str,
|
||||
|
||||
@@ -28,6 +28,12 @@ use codex_protocol::protocol::TokenUsage;
|
||||
use serde::Serialize;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize)]
|
||||
pub struct AcceptedLineFingerprint {
|
||||
pub path_hash: String,
|
||||
pub line_hash: String,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct TrackEventsContext {
|
||||
pub model_slug: String,
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
mod accepted_lines;
|
||||
mod client;
|
||||
mod events;
|
||||
mod facts;
|
||||
@@ -6,6 +7,8 @@ mod reducer;
|
||||
use std::time::SystemTime;
|
||||
use std::time::UNIX_EPOCH;
|
||||
|
||||
pub use accepted_lines::accepted_line_fingerprints_from_unified_diff;
|
||||
pub use accepted_lines::fingerprint_hash;
|
||||
pub use client::AnalyticsEventsClient;
|
||||
pub use events::AppServerRpcTransport;
|
||||
pub use events::GuardianApprovalRequestSource;
|
||||
@@ -17,6 +20,7 @@ pub use events::GuardianReviewSessionKind;
|
||||
pub use events::GuardianReviewTerminalStatus;
|
||||
pub use events::GuardianReviewTrackContext;
|
||||
pub use events::GuardianReviewedAction;
|
||||
pub use facts::AcceptedLineFingerprint;
|
||||
pub use facts::AnalyticsJsonRpcError;
|
||||
pub use facts::AppInvocation;
|
||||
pub use facts::CodexCompactionEvent;
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
use crate::accepted_lines::AcceptedLineFingerprintEventInput;
|
||||
use crate::accepted_lines::accepted_line_fingerprint_event_requests;
|
||||
use crate::accepted_lines::accepted_line_fingerprints_from_unified_diff;
|
||||
use crate::accepted_lines::accepted_line_repo_hash_for_cwd;
|
||||
use crate::events::AppServerRpcTransport;
|
||||
use crate::events::CodexAppMentionedEventRequest;
|
||||
use crate::events::CodexAppServerClientMetadata;
|
||||
@@ -104,6 +108,7 @@ use codex_protocol::protocol::TokenUsage;
|
||||
use sha1::Digest;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Default)]
|
||||
pub(crate) struct AnalyticsReducer {
|
||||
@@ -264,6 +269,7 @@ struct TurnState {
|
||||
started_at: Option<u64>,
|
||||
token_usage: Option<TokenUsage>,
|
||||
completed: Option<CompletedTurnState>,
|
||||
latest_diff: Option<String>,
|
||||
steer_count: usize,
|
||||
}
|
||||
|
||||
@@ -305,7 +311,7 @@ impl AnalyticsReducer {
|
||||
response,
|
||||
} => {
|
||||
if let Some(response) = response.into_client_response(request_id) {
|
||||
self.ingest_response(connection_id, response, out);
|
||||
self.ingest_response(connection_id, response, out).await;
|
||||
}
|
||||
}
|
||||
AnalyticsFact::ErrorResponse {
|
||||
@@ -317,7 +323,7 @@ impl AnalyticsReducer {
|
||||
self.ingest_error_response(connection_id, request_id, error_type, out);
|
||||
}
|
||||
AnalyticsFact::Notification(notification) => {
|
||||
self.ingest_notification(*notification, out);
|
||||
self.ingest_notification(*notification, out).await;
|
||||
}
|
||||
AnalyticsFact::ServerRequest {
|
||||
connection_id: _connection_id,
|
||||
@@ -338,10 +344,10 @@ impl AnalyticsReducer {
|
||||
self.ingest_guardian_review(*input, out);
|
||||
}
|
||||
CustomAnalyticsFact::TurnResolvedConfig(input) => {
|
||||
self.ingest_turn_resolved_config(*input, out);
|
||||
self.ingest_turn_resolved_config(*input, out).await;
|
||||
}
|
||||
CustomAnalyticsFact::TurnTokenUsage(input) => {
|
||||
self.ingest_turn_token_usage(*input, out);
|
||||
self.ingest_turn_token_usage(*input, out).await;
|
||||
}
|
||||
CustomAnalyticsFact::SkillInvoked(input) => {
|
||||
self.ingest_skill_invoked(input, out).await;
|
||||
@@ -473,7 +479,7 @@ impl AnalyticsReducer {
|
||||
}
|
||||
}
|
||||
|
||||
fn ingest_turn_resolved_config(
|
||||
async fn ingest_turn_resolved_config(
|
||||
&mut self,
|
||||
input: TurnResolvedConfigFact,
|
||||
out: &mut Vec<TrackEventRequest>,
|
||||
@@ -489,15 +495,16 @@ impl AnalyticsReducer {
|
||||
started_at: None,
|
||||
token_usage: None,
|
||||
completed: None,
|
||||
latest_diff: None,
|
||||
steer_count: 0,
|
||||
});
|
||||
turn_state.thread_id = Some(thread_id);
|
||||
turn_state.num_input_images = Some(num_input_images);
|
||||
turn_state.resolved_config = Some(input);
|
||||
self.maybe_emit_turn_event(&turn_id, out);
|
||||
self.maybe_emit_turn_event(&turn_id, out).await;
|
||||
}
|
||||
|
||||
fn ingest_turn_token_usage(
|
||||
async fn ingest_turn_token_usage(
|
||||
&mut self,
|
||||
input: TurnTokenUsageFact,
|
||||
out: &mut Vec<TrackEventRequest>,
|
||||
@@ -511,11 +518,12 @@ impl AnalyticsReducer {
|
||||
started_at: None,
|
||||
token_usage: None,
|
||||
completed: None,
|
||||
latest_diff: None,
|
||||
steer_count: 0,
|
||||
});
|
||||
turn_state.thread_id = Some(input.thread_id);
|
||||
turn_state.token_usage = Some(input.token_usage);
|
||||
self.maybe_emit_turn_event(&turn_id, out);
|
||||
self.maybe_emit_turn_event(&turn_id, out).await;
|
||||
}
|
||||
|
||||
async fn ingest_skill_invoked(
|
||||
@@ -622,7 +630,7 @@ impl AnalyticsReducer {
|
||||
});
|
||||
}
|
||||
|
||||
fn ingest_response(
|
||||
async fn ingest_response(
|
||||
&mut self,
|
||||
connection_id: u64,
|
||||
response: ClientResponse,
|
||||
@@ -674,12 +682,13 @@ impl AnalyticsReducer {
|
||||
started_at: None,
|
||||
token_usage: None,
|
||||
completed: None,
|
||||
latest_diff: None,
|
||||
steer_count: 0,
|
||||
});
|
||||
turn_state.connection_id = Some(connection_id);
|
||||
turn_state.thread_id = Some(pending_request.thread_id);
|
||||
turn_state.num_input_images = Some(pending_request.num_input_images);
|
||||
self.maybe_emit_turn_event(&turn_id, out);
|
||||
self.maybe_emit_turn_event(&turn_id, out).await;
|
||||
}
|
||||
ClientResponse::TurnSteer {
|
||||
request_id,
|
||||
@@ -741,7 +750,7 @@ impl AnalyticsReducer {
|
||||
);
|
||||
}
|
||||
|
||||
fn ingest_notification(
|
||||
async fn ingest_notification(
|
||||
&mut self,
|
||||
notification: ServerNotification,
|
||||
out: &mut Vec<TrackEventRequest>,
|
||||
@@ -812,6 +821,7 @@ impl AnalyticsReducer {
|
||||
started_at: None,
|
||||
token_usage: None,
|
||||
completed: None,
|
||||
latest_diff: None,
|
||||
steer_count: 0,
|
||||
});
|
||||
turn_state.started_at = notification
|
||||
@@ -819,6 +829,24 @@ impl AnalyticsReducer {
|
||||
.started_at
|
||||
.and_then(|started_at| u64::try_from(started_at).ok());
|
||||
}
|
||||
ServerNotification::TurnDiffUpdated(notification) => {
|
||||
let turn_state =
|
||||
self.turns
|
||||
.entry(notification.turn_id.clone())
|
||||
.or_insert(TurnState {
|
||||
connection_id: None,
|
||||
thread_id: None,
|
||||
num_input_images: None,
|
||||
resolved_config: None,
|
||||
started_at: None,
|
||||
token_usage: None,
|
||||
completed: None,
|
||||
latest_diff: None,
|
||||
steer_count: 0,
|
||||
});
|
||||
turn_state.thread_id = Some(notification.thread_id);
|
||||
turn_state.latest_diff = Some(notification.diff);
|
||||
}
|
||||
ServerNotification::TurnCompleted(notification) => {
|
||||
let turn_state =
|
||||
self.turns
|
||||
@@ -831,6 +859,7 @@ impl AnalyticsReducer {
|
||||
started_at: None,
|
||||
token_usage: None,
|
||||
completed: None,
|
||||
latest_diff: None,
|
||||
steer_count: 0,
|
||||
});
|
||||
turn_state.completed = Some(CompletedTurnState {
|
||||
@@ -850,7 +879,7 @@ impl AnalyticsReducer {
|
||||
.and_then(|duration_ms| u64::try_from(duration_ms).ok()),
|
||||
});
|
||||
let turn_id = notification.turn.id;
|
||||
self.maybe_emit_turn_event(&turn_id, out);
|
||||
self.maybe_emit_turn_event(&turn_id, out).await;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@@ -986,7 +1015,7 @@ impl AnalyticsReducer {
|
||||
}));
|
||||
}
|
||||
|
||||
fn maybe_emit_turn_event(&mut self, turn_id: &str, out: &mut Vec<TrackEventRequest>) {
|
||||
async fn maybe_emit_turn_event(&mut self, turn_id: &str, out: &mut Vec<TrackEventRequest>) {
|
||||
let Some(turn_state) = self.turns.get(turn_id) else {
|
||||
return;
|
||||
};
|
||||
@@ -1019,18 +1048,23 @@ impl AnalyticsReducer {
|
||||
warn_missing_analytics_context(&drop_site, MissingAnalyticsContext::ThreadMetadata);
|
||||
return;
|
||||
};
|
||||
out.push(TrackEventRequest::TurnEvent(Box::new(
|
||||
CodexTurnEventRequest {
|
||||
event_type: "codex_turn_event",
|
||||
event_params: codex_turn_event_params(
|
||||
connection_state.app_server_client.clone(),
|
||||
connection_state.runtime.clone(),
|
||||
turn_id.to_string(),
|
||||
turn_state,
|
||||
thread_metadata,
|
||||
),
|
||||
},
|
||||
)));
|
||||
let turn_event = TrackEventRequest::TurnEvent(Box::new(CodexTurnEventRequest {
|
||||
event_type: "codex_turn_event",
|
||||
event_params: codex_turn_event_params(
|
||||
connection_state.app_server_client.clone(),
|
||||
connection_state.runtime.clone(),
|
||||
turn_id.to_string(),
|
||||
turn_state,
|
||||
thread_metadata,
|
||||
),
|
||||
}));
|
||||
let accepted_line_event = accepted_line_event_input(turn_id, turn_state);
|
||||
|
||||
out.push(turn_event);
|
||||
if let Some((mut input, cwd)) = accepted_line_event {
|
||||
input.repo_hash = accepted_line_repo_hash_for_cwd(cwd.as_path()).await;
|
||||
out.extend(accepted_line_fingerprint_event_requests(input));
|
||||
}
|
||||
self.turns.remove(turn_id);
|
||||
}
|
||||
|
||||
@@ -1642,6 +1676,36 @@ fn web_search_query_count(query: &str, action: Option<&WebSearchAction>) -> Opti
|
||||
}
|
||||
}
|
||||
|
||||
fn accepted_line_event_input(
|
||||
turn_id: &str,
|
||||
turn_state: &TurnState,
|
||||
) -> Option<(AcceptedLineFingerprintEventInput, PathBuf)> {
|
||||
let latest_diff = turn_state.latest_diff.as_deref()?;
|
||||
let summary = accepted_line_fingerprints_from_unified_diff(latest_diff);
|
||||
if summary.accepted_added_lines == 0 && summary.accepted_deleted_lines == 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
let thread_id = turn_state.thread_id.clone()?;
|
||||
let resolved_config = turn_state.resolved_config.clone()?;
|
||||
|
||||
Some((
|
||||
AcceptedLineFingerprintEventInput {
|
||||
event_type: "codex.accepted_line_fingerprints",
|
||||
turn_id: turn_id.to_string(),
|
||||
thread_id,
|
||||
product_surface: Some("codex".to_string()),
|
||||
model_slug: Some(resolved_config.model.clone()),
|
||||
completed_at: now_unix_seconds(),
|
||||
repo_hash: None,
|
||||
accepted_added_lines: summary.accepted_added_lines,
|
||||
accepted_deleted_lines: summary.accepted_deleted_lines,
|
||||
line_fingerprints: summary.line_fingerprints,
|
||||
},
|
||||
resolved_config.permission_profile_cwd,
|
||||
))
|
||||
}
|
||||
|
||||
fn codex_turn_event_params(
|
||||
app_server_client: CodexAppServerClientMetadata,
|
||||
runtime: CodexRuntimeMetadata,
|
||||
|
||||
@@ -49,7 +49,6 @@ use codex_config::RemoteThreadConfigLoader;
|
||||
use codex_config::ThreadConfigLoader;
|
||||
use codex_core::config::Config;
|
||||
pub use codex_exec_server::EnvironmentManager;
|
||||
pub use codex_exec_server::EnvironmentManagerArgs;
|
||||
pub use codex_exec_server::ExecServerRuntimePaths;
|
||||
use codex_feedback::CodexFeedback;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
@@ -375,6 +374,7 @@ impl InProcessClientStartArgs {
|
||||
pub fn initialize_params(&self) -> InitializeParams {
|
||||
let capabilities = InitializeCapabilities {
|
||||
experimental_api: self.experimental_api,
|
||||
request_attestation: false,
|
||||
opt_out_notification_methods: if self.opt_out_notification_methods.is_empty() {
|
||||
None
|
||||
} else {
|
||||
|
||||
@@ -73,6 +73,7 @@ impl RemoteAppServerConnectArgs {
|
||||
fn initialize_params(&self) -> InitializeParams {
|
||||
let capabilities = InitializeCapabilities {
|
||||
experimental_api: self.experimental_api,
|
||||
request_attestation: false,
|
||||
opt_out_notification_methods: if self.opt_out_notification_methods.is_empty() {
|
||||
None
|
||||
} else {
|
||||
|
||||
6
codex-rs/app-server-daemon/BUILD.bazel
Normal file
6
codex-rs/app-server-daemon/BUILD.bazel
Normal file
@@ -0,0 +1,6 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "app-server-daemon",
|
||||
crate_name = "codex_app_server_daemon",
|
||||
)
|
||||
39
codex-rs/app-server-daemon/Cargo.toml
Normal file
39
codex-rs/app-server-daemon/Cargo.toml
Normal file
@@ -0,0 +1,39 @@
|
||||
[package]
|
||||
name = "codex-app-server-daemon"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[lib]
|
||||
name = "codex_app_server_daemon"
|
||||
path = "src/lib.rs"
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-app-server-transport = { workspace = true }
|
||||
codex-core = { workspace = true }
|
||||
codex-uds = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
libc = { workspace = true }
|
||||
reqwest = { workspace = true, features = ["rustls-tls"] }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
"fs",
|
||||
"io-util",
|
||||
"macros",
|
||||
"process",
|
||||
"rt-multi-thread",
|
||||
"signal",
|
||||
"time",
|
||||
] }
|
||||
tokio-tungstenite = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
104
codex-rs/app-server-daemon/README.md
Normal file
104
codex-rs/app-server-daemon/README.md
Normal file
@@ -0,0 +1,104 @@
|
||||
# codex-app-server-daemon
|
||||
|
||||
> `codex-app-server-daemon` is experimental and its lifecycle contract may
|
||||
> change while the remote-management flow is still being developed.
|
||||
|
||||
`codex-app-server-daemon` backs the machine-readable `codex app-server`
|
||||
lifecycle commands used by remote clients such as the desktop and mobile apps.
|
||||
It is intended for Codex instances launched over SSH, including fresh developer
|
||||
machines that should expose app-server with `remote_control` enabled.
|
||||
|
||||
## Platform support
|
||||
|
||||
The current daemon implementation is Unix-only. It uses pidfile-backed
|
||||
daemonization plus Unix process and file-locking primitives, and does not yet
|
||||
support Windows lifecycle management.
|
||||
|
||||
## Commands
|
||||
|
||||
```sh
|
||||
codex app-server daemon start
|
||||
codex app-server daemon restart
|
||||
codex app-server daemon enable-remote-control
|
||||
codex app-server daemon disable-remote-control
|
||||
codex app-server daemon stop
|
||||
codex app-server daemon version
|
||||
codex app-server daemon bootstrap --remote-control
|
||||
```
|
||||
|
||||
On success, every command writes exactly one JSON object to stdout. Consumers
|
||||
should parse that JSON rather than relying on human-readable text. Lifecycle
|
||||
responses report the resolved backend, socket path, local CLI version, and
|
||||
running app-server version when applicable.
|
||||
|
||||
## Bootstrap flow
|
||||
|
||||
For a new remote machine:
|
||||
|
||||
```sh
|
||||
curl -fsSL https://chatgpt.com/codex/install.sh | sh
|
||||
$HOME/.codex/packages/standalone/current/codex app-server daemon bootstrap --remote-control
|
||||
```
|
||||
|
||||
`bootstrap` requires the standalone managed install. It records the daemon
|
||||
settings under `CODEX_HOME/app-server-daemon/`, starts app-server as a
|
||||
pidfile-backed detached process, and launches a detached updater loop.
|
||||
|
||||
## Installation and update cases
|
||||
|
||||
The daemon assumes Codex is installed through `install.sh` and always launches
|
||||
the standalone managed binary under `CODEX_HOME`.
|
||||
|
||||
| Situation | What starts | Does this daemon fetch new binaries? | Does a running app-server eventually move to a newer binary on its own? |
|
||||
| --- | --- | --- | --- |
|
||||
| `install.sh` has run, but only `start` is used | `start` uses `CODEX_HOME/packages/standalone/current/codex` | No | No. The managed path is used when starting or restarting, but no updater is installed. |
|
||||
| `install.sh` has run, then `bootstrap` is used | The pidfile backend uses `CODEX_HOME/packages/standalone/current/codex` | Yes. Bootstrap launches a detached updater loop that runs `install.sh` hourly. | Yes, while that updater process is alive. After a successful fetch, it restarts a currently running app-server only when the managed binary reports a different version. |
|
||||
| Some other tool updates the managed binary path | The next fresh start or restart uses the updated file at that path | No | Not automatically. The existing process keeps the old executable image until an explicit `restart`. |
|
||||
|
||||
### Standalone installs
|
||||
|
||||
For installs created by `install.sh`:
|
||||
|
||||
- lifecycle commands always use the standalone managed binary path
|
||||
- `bootstrap` is supported
|
||||
- `bootstrap` starts a detached pid-backed updater loop that fetches via
|
||||
`install.sh`, then restarts app-server if it is running on a different version
|
||||
- the updater loop is not reboot-persistent; it must be started again by
|
||||
rerunning `bootstrap` after a reboot
|
||||
|
||||
### Out-of-band updates
|
||||
|
||||
This daemon does not watch arbitrary executable files for replacement. If some
|
||||
other tool updates a binary that the daemon would use on its next launch:
|
||||
|
||||
- a currently running app-server remains on the old executable image
|
||||
- `restart` will launch the updated binary
|
||||
- for bootstrapped daemons, the detached updater loop only reacts to updates it
|
||||
fetched itself; it does not watch arbitrary file replacement
|
||||
|
||||
## Lifecycle semantics
|
||||
|
||||
`start` is idempotent and returns after app-server is ready to answer the normal
|
||||
JSON-RPC initialize handshake on the Unix control socket.
|
||||
|
||||
`restart` stops any managed daemon and starts it again.
|
||||
|
||||
`enable-remote-control` and `disable-remote-control` persist the launch setting
|
||||
for future starts. If a managed app-server is already running, they restart it
|
||||
so the new setting takes effect immediately.
|
||||
|
||||
`stop` sends a graceful termination request first, then sends a second
|
||||
termination signal after the grace window if the process is still alive.
|
||||
|
||||
All mutating lifecycle commands are serialized per `CODEX_HOME`, so a concurrent
|
||||
`start`, `restart`, `enable-remote-control`, `disable-remote-control`, `stop`,
|
||||
or `bootstrap` does not race another in-flight lifecycle operation.
|
||||
|
||||
## State
|
||||
|
||||
The daemon stores its local state under `CODEX_HOME/app-server-daemon/`:
|
||||
|
||||
- `settings.json` for persisted launch settings
|
||||
- `app-server.pid` for the app-server process record
|
||||
- `app-server-updater.pid` for the pid-backed standalone updater loop
|
||||
- `daemon.lock` for daemon-wide lifecycle serialization
|
||||
33
codex-rs/app-server-daemon/src/backend/mod.rs
Normal file
33
codex-rs/app-server-daemon/src/backend/mod.rs
Normal file
@@ -0,0 +1,33 @@
|
||||
mod pid;
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use serde::Serialize;
|
||||
|
||||
pub(crate) use pid::PidBackend;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum BackendKind {
|
||||
Pid,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct BackendPaths {
|
||||
pub(crate) codex_bin: PathBuf,
|
||||
pub(crate) pid_file: PathBuf,
|
||||
pub(crate) update_pid_file: PathBuf,
|
||||
pub(crate) remote_control_enabled: bool,
|
||||
}
|
||||
|
||||
pub(crate) fn pid_backend(paths: BackendPaths) -> PidBackend {
|
||||
PidBackend::new(
|
||||
paths.codex_bin,
|
||||
paths.pid_file,
|
||||
paths.remote_control_enabled,
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn pid_update_loop_backend(paths: BackendPaths) -> PidBackend {
|
||||
PidBackend::new_update_loop(paths.codex_bin, paths.update_pid_file)
|
||||
}
|
||||
600
codex-rs/app-server-daemon/src/backend/pid.rs
Normal file
600
codex-rs/app-server-daemon/src/backend/pid.rs
Normal file
@@ -0,0 +1,600 @@
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
#[cfg(unix)]
|
||||
use std::process::Stdio;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::bail;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use tokio::fs;
|
||||
#[cfg(unix)]
|
||||
use tokio::process::Command;
|
||||
use tokio::time::sleep;
|
||||
|
||||
const STOP_POLL_INTERVAL: Duration = Duration::from_millis(50);
|
||||
const STOP_GRACE_PERIOD: Duration = Duration::from_secs(60);
|
||||
const STOP_TIMEOUT: Duration = Duration::from_secs(70);
|
||||
const START_TIMEOUT: Duration = Duration::from_secs(10);
|
||||
|
||||
#[derive(Debug)]
|
||||
#[cfg_attr(not(unix), allow(dead_code))]
|
||||
pub(crate) struct PidBackend {
|
||||
codex_bin: PathBuf,
|
||||
pid_file: PathBuf,
|
||||
lock_file: PathBuf,
|
||||
command_kind: PidCommandKind,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct PidRecord {
|
||||
pid: u32,
|
||||
process_start_time: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
enum PidFileState {
|
||||
Missing,
|
||||
Starting,
|
||||
Running(PidRecord),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[cfg_attr(not(unix), allow(dead_code))]
|
||||
enum PidCommandKind {
|
||||
AppServer { remote_control_enabled: bool },
|
||||
UpdateLoop,
|
||||
}
|
||||
|
||||
impl PidBackend {
|
||||
pub(crate) fn new(codex_bin: PathBuf, pid_file: PathBuf, remote_control_enabled: bool) -> Self {
|
||||
let lock_file = pid_file.with_extension("pid.lock");
|
||||
Self {
|
||||
codex_bin,
|
||||
pid_file,
|
||||
lock_file,
|
||||
command_kind: PidCommandKind::AppServer {
|
||||
remote_control_enabled,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn new_update_loop(codex_bin: PathBuf, pid_file: PathBuf) -> Self {
|
||||
let lock_file = pid_file.with_extension("pid.lock");
|
||||
Self {
|
||||
codex_bin,
|
||||
pid_file,
|
||||
lock_file,
|
||||
command_kind: PidCommandKind::UpdateLoop,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn is_starting_or_running(&self) -> Result<bool> {
|
||||
loop {
|
||||
match self.read_pid_file_state().await? {
|
||||
PidFileState::Missing => return Ok(false),
|
||||
PidFileState::Starting => return Ok(true),
|
||||
PidFileState::Running(record) => {
|
||||
if self.record_is_active(&record).await? {
|
||||
return Ok(true);
|
||||
}
|
||||
match self.refresh_after_stale_record(&record).await? {
|
||||
PidFileState::Missing => return Ok(false),
|
||||
PidFileState::Starting | PidFileState::Running(_) => continue,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
pub(crate) async fn start(&self) -> Result<Option<u32>> {
|
||||
if let Some(parent) = self.pid_file.parent() {
|
||||
fs::create_dir_all(parent)
|
||||
.await
|
||||
.with_context(|| format!("failed to create pid directory {}", parent.display()))?;
|
||||
}
|
||||
let reservation_lock = self.acquire_reservation_lock().await?;
|
||||
let _pid_file = loop {
|
||||
match fs::OpenOptions::new()
|
||||
.create_new(true)
|
||||
.write(true)
|
||||
.open(&self.pid_file)
|
||||
.await
|
||||
{
|
||||
Ok(pid_file) => break pid_file,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::AlreadyExists => {
|
||||
match self.read_pid_file_state_with_lock_held().await? {
|
||||
PidFileState::Missing => continue,
|
||||
PidFileState::Running(record) => {
|
||||
if self.record_is_active(&record).await? {
|
||||
return Ok(None);
|
||||
}
|
||||
let _ = fs::remove_file(&self.pid_file).await;
|
||||
continue;
|
||||
}
|
||||
PidFileState::Starting => {
|
||||
unreachable!("lock holder cannot observe starting")
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(err).with_context(|| {
|
||||
format!("failed to reserve pid file {}", self.pid_file.display())
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
let mut command = Command::new(&self.codex_bin);
|
||||
command
|
||||
.args(self.command_args())
|
||||
.stdin(Stdio::null())
|
||||
.stdout(Stdio::null())
|
||||
.stderr(Stdio::null());
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
unsafe {
|
||||
command.pre_exec(|| {
|
||||
if libc::setsid() == -1 {
|
||||
return Err(std::io::Error::last_os_error());
|
||||
}
|
||||
Ok(())
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let child = match command.spawn() {
|
||||
Ok(child) => child,
|
||||
Err(err) => {
|
||||
let _ = fs::remove_file(&self.pid_file).await;
|
||||
return Err(err).with_context(|| {
|
||||
format!(
|
||||
"failed to spawn detached app-server process using {}",
|
||||
self.codex_bin.display()
|
||||
)
|
||||
});
|
||||
}
|
||||
};
|
||||
let pid = child
|
||||
.id()
|
||||
.context("spawned app-server process has no pid")?;
|
||||
let record = match read_process_start_time(pid).await {
|
||||
Ok(process_start_time) => PidRecord {
|
||||
pid,
|
||||
process_start_time,
|
||||
},
|
||||
Err(err) => {
|
||||
let _ = self.terminate_process(pid);
|
||||
let _ = fs::remove_file(&self.pid_file).await;
|
||||
return Err(err);
|
||||
}
|
||||
};
|
||||
let contents = serde_json::to_vec(&record).context("failed to serialize pid record")?;
|
||||
let temp_pid_file = self.pid_file.with_extension("pid.tmp");
|
||||
if let Err(err) = fs::write(&temp_pid_file, &contents).await {
|
||||
let _ = self.terminate_process(pid);
|
||||
let _ = fs::remove_file(&self.pid_file).await;
|
||||
return Err(err).with_context(|| {
|
||||
format!("failed to write pid temp file {}", temp_pid_file.display())
|
||||
});
|
||||
}
|
||||
if let Err(err) = fs::rename(&temp_pid_file, &self.pid_file).await {
|
||||
let _ = self.terminate_process(pid);
|
||||
let _ = fs::remove_file(&temp_pid_file).await;
|
||||
let _ = fs::remove_file(&self.pid_file).await;
|
||||
return Err(err).with_context(|| {
|
||||
format!("failed to publish pid file {}", self.pid_file.display())
|
||||
});
|
||||
}
|
||||
drop(reservation_lock);
|
||||
Ok(Some(pid))
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
pub(crate) async fn start(&self) -> Result<Option<u32>> {
|
||||
bail!("pid-managed app-server startup is unsupported on this platform")
|
||||
}
|
||||
|
||||
pub(crate) async fn stop(&self) -> Result<()> {
|
||||
loop {
|
||||
let Some(record) = self.wait_for_pid_start().await? else {
|
||||
return Ok(());
|
||||
};
|
||||
if !self.record_is_active(&record).await? {
|
||||
match self.refresh_after_stale_record(&record).await? {
|
||||
PidFileState::Missing => return Ok(()),
|
||||
PidFileState::Starting | PidFileState::Running(_) => continue,
|
||||
}
|
||||
}
|
||||
|
||||
let pid = record.pid;
|
||||
self.terminate_process(pid)?;
|
||||
let started_at = tokio::time::Instant::now();
|
||||
let deadline = tokio::time::Instant::now() + STOP_TIMEOUT;
|
||||
let mut forced = false;
|
||||
while tokio::time::Instant::now() < deadline {
|
||||
if !self.record_is_active(&record).await? {
|
||||
match self.refresh_after_stale_record(&record).await? {
|
||||
PidFileState::Missing => return Ok(()),
|
||||
PidFileState::Starting | PidFileState::Running(_) => break,
|
||||
}
|
||||
}
|
||||
if !forced && started_at.elapsed() >= STOP_GRACE_PERIOD {
|
||||
self.force_terminate_process(pid)?;
|
||||
forced = true;
|
||||
}
|
||||
sleep(STOP_POLL_INTERVAL).await;
|
||||
}
|
||||
|
||||
if self.record_is_active(&record).await? {
|
||||
bail!("timed out waiting for pid-managed app server {pid} to stop");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn wait_for_pid_start(&self) -> Result<Option<PidRecord>> {
|
||||
let deadline = tokio::time::Instant::now() + START_TIMEOUT;
|
||||
loop {
|
||||
match self.read_pid_file_state().await? {
|
||||
PidFileState::Missing => return Ok(None),
|
||||
PidFileState::Running(record) => return Ok(Some(record)),
|
||||
PidFileState::Starting if tokio::time::Instant::now() < deadline => {
|
||||
sleep(STOP_POLL_INTERVAL).await;
|
||||
}
|
||||
PidFileState::Starting => {
|
||||
bail!(
|
||||
"timed out waiting for pid reservation in {} to finish initializing",
|
||||
self.pid_file.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn read_pid_file_state(&self) -> Result<PidFileState> {
|
||||
let contents = match fs::read_to_string(&self.pid_file).await {
|
||||
Ok(contents) => contents,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
return if reservation_lock_is_active(&self.lock_file).await? {
|
||||
Ok(PidFileState::Starting)
|
||||
} else {
|
||||
Ok(PidFileState::Missing)
|
||||
};
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(err).with_context(|| {
|
||||
format!("failed to read pid file {}", self.pid_file.display())
|
||||
});
|
||||
}
|
||||
};
|
||||
if contents.trim().is_empty() {
|
||||
match inspect_empty_pid_reservation(&self.pid_file, &self.lock_file).await? {
|
||||
EmptyPidReservation::Active => {
|
||||
return Ok(PidFileState::Starting);
|
||||
}
|
||||
EmptyPidReservation::Stale => {
|
||||
return Ok(PidFileState::Missing);
|
||||
}
|
||||
EmptyPidReservation::Record(record) => return Ok(PidFileState::Running(record)),
|
||||
}
|
||||
}
|
||||
let record = serde_json::from_str(&contents)
|
||||
.with_context(|| format!("invalid pid file contents in {}", self.pid_file.display()))?;
|
||||
Ok(PidFileState::Running(record))
|
||||
}
|
||||
|
||||
async fn read_pid_file_state_with_lock_held(&self) -> Result<PidFileState> {
|
||||
let contents = match fs::read_to_string(&self.pid_file).await {
|
||||
Ok(contents) => contents,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
return Ok(PidFileState::Missing);
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(err).with_context(|| {
|
||||
format!("failed to read pid file {}", self.pid_file.display())
|
||||
});
|
||||
}
|
||||
};
|
||||
if contents.trim().is_empty() {
|
||||
let _ = fs::remove_file(&self.pid_file).await;
|
||||
return Ok(PidFileState::Missing);
|
||||
}
|
||||
let record = serde_json::from_str(&contents)
|
||||
.with_context(|| format!("invalid pid file contents in {}", self.pid_file.display()))?;
|
||||
Ok(PidFileState::Running(record))
|
||||
}
|
||||
|
||||
async fn refresh_after_stale_record(&self, expected: &PidRecord) -> Result<PidFileState> {
|
||||
let reservation_lock = self.acquire_reservation_lock().await?;
|
||||
let state = match self.read_pid_file_state_with_lock_held().await? {
|
||||
PidFileState::Running(record) if record == *expected => {
|
||||
let _ = fs::remove_file(&self.pid_file).await;
|
||||
PidFileState::Missing
|
||||
}
|
||||
state => state,
|
||||
};
|
||||
drop(reservation_lock);
|
||||
Ok(state)
|
||||
}
|
||||
|
||||
async fn acquire_reservation_lock(&self) -> Result<fs::File> {
|
||||
let reservation_lock = fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.truncate(false)
|
||||
.write(true)
|
||||
.open(&self.lock_file)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!("failed to open pid lock file {}", self.lock_file.display())
|
||||
})?;
|
||||
let lock_deadline = tokio::time::Instant::now() + START_TIMEOUT;
|
||||
while !try_lock_file(&reservation_lock)? {
|
||||
if tokio::time::Instant::now() >= lock_deadline {
|
||||
bail!(
|
||||
"timed out waiting for pid lock {}",
|
||||
self.lock_file.display()
|
||||
);
|
||||
}
|
||||
sleep(STOP_POLL_INTERVAL).await;
|
||||
}
|
||||
Ok(reservation_lock)
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn command_args(&self) -> Vec<&'static str> {
|
||||
match self.command_kind {
|
||||
PidCommandKind::AppServer {
|
||||
remote_control_enabled: true,
|
||||
} => vec![
|
||||
"--enable",
|
||||
"remote_control",
|
||||
"app-server",
|
||||
"--listen",
|
||||
"unix://",
|
||||
],
|
||||
PidCommandKind::AppServer {
|
||||
remote_control_enabled: false,
|
||||
} => vec!["app-server", "--listen", "unix://"],
|
||||
PidCommandKind::UpdateLoop => vec!["app-server", "daemon", "pid-update-loop"],
|
||||
}
|
||||
}
|
||||
|
||||
fn terminate_process(&self, pid: u32) -> Result<()> {
|
||||
match self.command_kind {
|
||||
PidCommandKind::AppServer { .. } => terminate_process(pid),
|
||||
PidCommandKind::UpdateLoop => terminate_process(pid),
|
||||
}
|
||||
}
|
||||
|
||||
fn force_terminate_process(&self, pid: u32) -> Result<()> {
|
||||
match self.command_kind {
|
||||
PidCommandKind::AppServer { .. } => force_terminate_process(pid),
|
||||
PidCommandKind::UpdateLoop => force_terminate_process_group(pid),
|
||||
}
|
||||
}
|
||||
|
||||
async fn record_is_active(&self, record: &PidRecord) -> Result<bool> {
|
||||
process_matches_record(record).await
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn process_exists(pid: u32) -> bool {
|
||||
let Ok(pid) = libc::pid_t::try_from(pid) else {
|
||||
return false;
|
||||
};
|
||||
let result = unsafe { libc::kill(pid, 0) };
|
||||
result == 0 || std::io::Error::last_os_error().raw_os_error() == Some(libc::EPERM)
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn terminate_process(pid: u32) -> Result<()> {
|
||||
let raw_pid = libc::pid_t::try_from(pid)
|
||||
.with_context(|| format!("pid-managed app server pid {pid} is out of range"))?;
|
||||
let result = unsafe { libc::kill(raw_pid, libc::SIGTERM) };
|
||||
if result == 0 {
|
||||
return Ok(());
|
||||
}
|
||||
let err = std::io::Error::last_os_error();
|
||||
if err.raw_os_error() == Some(libc::ESRCH) {
|
||||
return Ok(());
|
||||
}
|
||||
Err(err).with_context(|| format!("failed to terminate pid-managed app server {pid}"))
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn force_terminate_process(pid: u32) -> Result<()> {
|
||||
let raw_pid = libc::pid_t::try_from(pid)
|
||||
.with_context(|| format!("pid-managed app server pid {pid} is out of range"))?;
|
||||
let result = unsafe { libc::kill(raw_pid, libc::SIGKILL) };
|
||||
if result == 0 {
|
||||
return Ok(());
|
||||
}
|
||||
let err = std::io::Error::last_os_error();
|
||||
if err.raw_os_error() == Some(libc::ESRCH) {
|
||||
return Ok(());
|
||||
}
|
||||
Err(err).with_context(|| format!("failed to force terminate pid-managed app server {pid}"))
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn force_terminate_process_group(pid: u32) -> Result<()> {
|
||||
let raw_pid = libc::pid_t::try_from(pid)
|
||||
.with_context(|| format!("pid-managed updater pid {pid} is out of range"))?;
|
||||
let result = unsafe { libc::kill(-raw_pid, libc::SIGKILL) };
|
||||
if result == 0 {
|
||||
return Ok(());
|
||||
}
|
||||
let err = std::io::Error::last_os_error();
|
||||
if err.raw_os_error() == Some(libc::ESRCH) {
|
||||
return Ok(());
|
||||
}
|
||||
Err(err).with_context(|| format!("failed to force terminate pid-managed updater group {pid}"))
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
fn terminate_process(_pid: u32) -> Result<()> {
|
||||
bail!("pid-managed app-server shutdown is unsupported on this platform")
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
fn force_terminate_process(_pid: u32) -> Result<()> {
|
||||
bail!("pid-managed app-server shutdown is unsupported on this platform")
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
fn force_terminate_process_group(_pid: u32) -> Result<()> {
|
||||
bail!("pid-managed updater shutdown is unsupported on this platform")
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
async fn process_matches_record(record: &PidRecord) -> Result<bool> {
|
||||
if !process_exists(record.pid) {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
match read_process_start_time(record.pid).await {
|
||||
Ok(start_time) => Ok(start_time == record.process_start_time),
|
||||
Err(_err) if !process_exists(record.pid) => Ok(false),
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
async fn process_matches_record(_record: &PidRecord) -> Result<bool> {
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[cfg_attr(not(unix), allow(dead_code))]
|
||||
enum EmptyPidReservation {
|
||||
Active,
|
||||
Stale,
|
||||
Record(PidRecord),
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn try_lock_file(file: &fs::File) -> Result<bool> {
|
||||
use std::os::fd::AsRawFd;
|
||||
|
||||
let result = unsafe { libc::flock(file.as_raw_fd(), libc::LOCK_EX | libc::LOCK_NB) };
|
||||
if result == 0 {
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
let err = std::io::Error::last_os_error();
|
||||
if err.raw_os_error() == Some(libc::EWOULDBLOCK) {
|
||||
return Ok(false);
|
||||
}
|
||||
Err(err).context("failed to lock pid reservation")
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
fn try_lock_file(_file: &fs::File) -> Result<bool> {
|
||||
bail!("pid-managed app-server startup is unsupported on this platform")
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
async fn reservation_lock_is_active(path: &Path) -> Result<bool> {
|
||||
let file = match fs::OpenOptions::new()
|
||||
.write(true)
|
||||
.create(true)
|
||||
.truncate(false)
|
||||
.open(path)
|
||||
.await
|
||||
{
|
||||
Ok(file) => file,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
return Ok(false);
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(err)
|
||||
.with_context(|| format!("failed to inspect pid lock file {}", path.display()));
|
||||
}
|
||||
};
|
||||
Ok(!try_lock_file(&file)?)
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
async fn reservation_lock_is_active(_path: &Path) -> Result<bool> {
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
async fn inspect_empty_pid_reservation(
|
||||
pid_path: &Path,
|
||||
lock_path: &Path,
|
||||
) -> Result<EmptyPidReservation> {
|
||||
let file = match fs::OpenOptions::new()
|
||||
.write(true)
|
||||
.create(true)
|
||||
.truncate(false)
|
||||
.open(lock_path)
|
||||
.await
|
||||
{
|
||||
Ok(file) => file,
|
||||
Err(err) => {
|
||||
return Err(err).with_context(|| {
|
||||
format!("failed to inspect pid lock file {}", lock_path.display())
|
||||
});
|
||||
}
|
||||
};
|
||||
if !try_lock_file(&file)? {
|
||||
return Ok(EmptyPidReservation::Active);
|
||||
}
|
||||
|
||||
let contents = match fs::read_to_string(pid_path).await {
|
||||
Ok(contents) => contents,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
return Ok(EmptyPidReservation::Stale);
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(err)
|
||||
.with_context(|| format!("failed to reread pid file {}", pid_path.display()));
|
||||
}
|
||||
};
|
||||
if contents.trim().is_empty() {
|
||||
let _ = fs::remove_file(pid_path).await;
|
||||
return Ok(EmptyPidReservation::Stale);
|
||||
}
|
||||
|
||||
let record = serde_json::from_str(&contents)
|
||||
.with_context(|| format!("invalid pid file contents in {}", pid_path.display()))?;
|
||||
Ok(EmptyPidReservation::Record(record))
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
async fn inspect_empty_pid_reservation(
|
||||
_pid_path: &Path,
|
||||
_lock_path: &Path,
|
||||
) -> Result<EmptyPidReservation> {
|
||||
Ok(EmptyPidReservation::Stale)
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
async fn read_process_start_time(pid: u32) -> Result<String> {
|
||||
let output = Command::new("ps")
|
||||
.args(["-p", &pid.to_string(), "-o", "lstart="])
|
||||
.output()
|
||||
.await
|
||||
.context("failed to invoke ps for pid-managed app server")?;
|
||||
if !output.status.success() {
|
||||
bail!("failed to read start time for pid-managed app server {pid}");
|
||||
}
|
||||
|
||||
let start_time = String::from_utf8(output.stdout)
|
||||
.context("pid-managed app server start time was not utf-8")?;
|
||||
let start_time = start_time.trim();
|
||||
if start_time.is_empty() {
|
||||
bail!("pid-managed app server {pid} has no recorded start time");
|
||||
}
|
||||
Ok(start_time.to_string())
|
||||
}
|
||||
|
||||
#[cfg(all(test, unix))]
|
||||
#[path = "pid_tests.rs"]
|
||||
mod tests;
|
||||
158
codex-rs/app-server-daemon/src/backend/pid_tests.rs
Normal file
158
codex-rs/app-server-daemon/src/backend/pid_tests.rs
Normal file
@@ -0,0 +1,158 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
|
||||
use super::PidBackend;
|
||||
use super::PidCommandKind;
|
||||
use super::PidFileState;
|
||||
use super::PidRecord;
|
||||
use super::try_lock_file;
|
||||
|
||||
#[tokio::test]
|
||||
async fn locked_empty_pid_file_is_treated_as_active_reservation() {
|
||||
let temp_dir = TempDir::new().expect("temp dir");
|
||||
let pid_file = temp_dir.path().join("app-server.pid");
|
||||
tokio::fs::write(&pid_file, "")
|
||||
.await
|
||||
.expect("write pid file");
|
||||
let backend = PidBackend::new(
|
||||
temp_dir.path().join("codex"),
|
||||
pid_file.clone(),
|
||||
/*remote_control_enabled*/ false,
|
||||
);
|
||||
let reservation = tokio::fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.truncate(false)
|
||||
.write(true)
|
||||
.open(&backend.lock_file)
|
||||
.await
|
||||
.expect("open pid lock file");
|
||||
assert!(try_lock_file(&reservation).expect("lock reservation"));
|
||||
|
||||
assert_eq!(
|
||||
backend.read_pid_file_state().await.expect("read pid"),
|
||||
PidFileState::Starting
|
||||
);
|
||||
assert!(pid_file.exists());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn unlocked_empty_pid_file_is_treated_as_stale_reservation() {
|
||||
let temp_dir = TempDir::new().expect("temp dir");
|
||||
let pid_file = temp_dir.path().join("app-server.pid");
|
||||
tokio::fs::write(&pid_file, "")
|
||||
.await
|
||||
.expect("write pid file");
|
||||
let backend = PidBackend::new(
|
||||
temp_dir.path().join("codex"),
|
||||
pid_file.clone(),
|
||||
/*remote_control_enabled*/ false,
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
backend.read_pid_file_state().await.expect("read pid"),
|
||||
PidFileState::Missing
|
||||
);
|
||||
assert!(!pid_file.exists());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn stop_waits_for_live_reservation_to_resolve() {
|
||||
let temp_dir = TempDir::new().expect("temp dir");
|
||||
let pid_file = temp_dir.path().join("app-server.pid");
|
||||
tokio::fs::write(&pid_file, "")
|
||||
.await
|
||||
.expect("write pid file");
|
||||
let backend = PidBackend::new(
|
||||
temp_dir.path().join("codex"),
|
||||
pid_file.clone(),
|
||||
/*remote_control_enabled*/ false,
|
||||
);
|
||||
let reservation = tokio::fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.truncate(false)
|
||||
.write(true)
|
||||
.open(&backend.lock_file)
|
||||
.await
|
||||
.expect("open pid lock file");
|
||||
assert!(try_lock_file(&reservation).expect("lock reservation"));
|
||||
let cleanup = tokio::spawn(async move {
|
||||
tokio::time::sleep(Duration::from_millis(50)).await;
|
||||
drop(reservation);
|
||||
tokio::fs::remove_file(pid_file)
|
||||
.await
|
||||
.expect("remove pid file");
|
||||
});
|
||||
|
||||
backend.stop().await.expect("stop");
|
||||
cleanup.await.expect("cleanup task");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn start_retries_stale_empty_pid_file_under_its_own_lock() {
|
||||
let temp_dir = TempDir::new().expect("temp dir");
|
||||
let pid_file = temp_dir.path().join("app-server.pid");
|
||||
tokio::fs::write(&pid_file, "")
|
||||
.await
|
||||
.expect("write pid file");
|
||||
let backend = PidBackend::new(
|
||||
temp_dir.path().join("missing-codex"),
|
||||
pid_file,
|
||||
/*remote_control_enabled*/ false,
|
||||
);
|
||||
|
||||
let err = backend.start().await.expect_err("start");
|
||||
assert!(
|
||||
err.to_string()
|
||||
.starts_with("failed to spawn detached app-server process using ")
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn stale_record_cleanup_preserves_replacement_record() {
|
||||
let temp_dir = TempDir::new().expect("temp dir");
|
||||
let pid_file = temp_dir.path().join("app-server.pid");
|
||||
let backend = PidBackend::new(
|
||||
temp_dir.path().join("codex"),
|
||||
pid_file.clone(),
|
||||
/*remote_control_enabled*/ false,
|
||||
);
|
||||
let stale = PidRecord {
|
||||
pid: 1,
|
||||
process_start_time: "old".to_string(),
|
||||
};
|
||||
let replacement = PidRecord {
|
||||
pid: 2,
|
||||
process_start_time: "new".to_string(),
|
||||
};
|
||||
tokio::fs::write(
|
||||
&pid_file,
|
||||
serde_json::to_vec(&replacement).expect("serialize replacement"),
|
||||
)
|
||||
.await
|
||||
.expect("write replacement pid file");
|
||||
|
||||
assert_eq!(
|
||||
backend
|
||||
.refresh_after_stale_record(&stale)
|
||||
.await
|
||||
.expect("cleanup"),
|
||||
PidFileState::Running(replacement)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn update_loop_uses_hidden_app_server_subcommand() {
|
||||
let backend = PidBackend {
|
||||
codex_bin: "codex".into(),
|
||||
pid_file: "updater.pid".into(),
|
||||
lock_file: "updater.pid.lock".into(),
|
||||
command_kind: PidCommandKind::UpdateLoop,
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
backend.command_args(),
|
||||
vec!["app-server", "daemon", "pid-update-loop"]
|
||||
);
|
||||
}
|
||||
131
codex-rs/app-server-daemon/src/client.rs
Normal file
131
codex-rs/app-server-daemon/src/client.rs
Normal file
@@ -0,0 +1,131 @@
|
||||
use std::path::Path;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use codex_app_server_protocol::ClientInfo;
|
||||
use codex_app_server_protocol::InitializeParams;
|
||||
use codex_app_server_protocol::InitializeResponse;
|
||||
use codex_app_server_protocol::JSONRPCMessage;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCRequest;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_uds::UnixStream;
|
||||
use futures::SinkExt;
|
||||
use futures::StreamExt;
|
||||
use tokio::time::timeout;
|
||||
use tokio_tungstenite::client_async;
|
||||
use tokio_tungstenite::tungstenite::Message;
|
||||
|
||||
const PROBE_TIMEOUT: Duration = Duration::from_secs(2);
|
||||
const CLIENT_NAME: &str = "codex_app_server_daemon";
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(crate) struct ProbeInfo {
|
||||
pub(crate) app_server_version: String,
|
||||
}
|
||||
|
||||
pub(crate) async fn probe(socket_path: &Path) -> Result<ProbeInfo> {
|
||||
timeout(PROBE_TIMEOUT, probe_inner(socket_path))
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"timed out probing app-server control socket {}",
|
||||
socket_path.display()
|
||||
)
|
||||
})?
|
||||
}
|
||||
|
||||
async fn probe_inner(socket_path: &Path) -> Result<ProbeInfo> {
|
||||
let stream = UnixStream::connect(socket_path)
|
||||
.await
|
||||
.with_context(|| format!("failed to connect to {}", socket_path.display()))?;
|
||||
let (mut websocket, _response) = client_async("ws://localhost/", stream)
|
||||
.await
|
||||
.with_context(|| format!("failed to upgrade {}", socket_path.display()))?;
|
||||
|
||||
let initialize = JSONRPCMessage::Request(JSONRPCRequest {
|
||||
id: RequestId::Integer(1),
|
||||
method: "initialize".to_string(),
|
||||
params: Some(serde_json::to_value(InitializeParams {
|
||||
client_info: ClientInfo {
|
||||
name: CLIENT_NAME.to_string(),
|
||||
title: Some("Codex App Server Daemon".to_string()),
|
||||
version: env!("CARGO_PKG_VERSION").to_string(),
|
||||
},
|
||||
capabilities: None,
|
||||
})?),
|
||||
trace: None,
|
||||
});
|
||||
websocket
|
||||
.send(Message::Text(serde_json::to_string(&initialize)?.into()))
|
||||
.await
|
||||
.context("failed to send initialize request")?;
|
||||
|
||||
let response = loop {
|
||||
let frame = websocket
|
||||
.next()
|
||||
.await
|
||||
.ok_or_else(|| anyhow!("app-server closed before initialize response"))??;
|
||||
let Message::Text(payload) = frame else {
|
||||
continue;
|
||||
};
|
||||
let message = serde_json::from_str::<JSONRPCMessage>(&payload)?;
|
||||
if let JSONRPCMessage::Response(response) = message
|
||||
&& response.id == RequestId::Integer(1)
|
||||
{
|
||||
break response;
|
||||
}
|
||||
};
|
||||
let initialize_response = serde_json::from_value::<InitializeResponse>(response.result)?;
|
||||
|
||||
let initialized = JSONRPCMessage::Notification(JSONRPCNotification {
|
||||
method: "initialized".to_string(),
|
||||
params: None,
|
||||
});
|
||||
websocket
|
||||
.send(Message::Text(serde_json::to_string(&initialized)?.into()))
|
||||
.await
|
||||
.context("failed to send initialized notification")?;
|
||||
websocket.close(None).await.ok();
|
||||
|
||||
Ok(ProbeInfo {
|
||||
app_server_version: parse_version_from_user_agent(&initialize_response.user_agent)?,
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_version_from_user_agent(user_agent: &str) -> Result<String> {
|
||||
let (_originator, rest) = user_agent
|
||||
.split_once('/')
|
||||
.ok_or_else(|| anyhow!("app-server user-agent omitted version separator"))?;
|
||||
let version = rest
|
||||
.split_whitespace()
|
||||
.next()
|
||||
.filter(|version| !version.is_empty())
|
||||
.ok_or_else(|| anyhow!("app-server user-agent omitted version"))?;
|
||||
Ok(version.to_string())
|
||||
}
|
||||
|
||||
#[cfg(all(test, unix))]
|
||||
mod tests {
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::parse_version_from_user_agent;
|
||||
|
||||
#[test]
|
||||
fn parses_version_from_codex_user_agent() {
|
||||
assert_eq!(
|
||||
parse_version_from_user_agent(
|
||||
"codex_app_server_daemon/1.2.3 (Linux 6.8.0; x86_64) codex_cli_rs/1.2.3",
|
||||
)
|
||||
.expect("version"),
|
||||
"1.2.3"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rejects_user_agent_without_version() {
|
||||
assert!(parse_version_from_user_agent("codex_app_server_daemon").is_err());
|
||||
}
|
||||
}
|
||||
630
codex-rs/app-server-daemon/src/lib.rs
Normal file
630
codex-rs/app-server-daemon/src/lib.rs
Normal file
@@ -0,0 +1,630 @@
|
||||
mod backend;
|
||||
mod client;
|
||||
mod managed_install;
|
||||
mod settings;
|
||||
mod update_loop;
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
pub use backend::BackendKind;
|
||||
use backend::BackendPaths;
|
||||
use codex_app_server_transport::app_server_control_socket_path;
|
||||
use codex_core::config::find_codex_home;
|
||||
use managed_install::managed_codex_bin;
|
||||
#[cfg(unix)]
|
||||
use managed_install::managed_codex_version;
|
||||
use serde::Serialize;
|
||||
use settings::DaemonSettings;
|
||||
use tokio::time::sleep;
|
||||
|
||||
const START_POLL_INTERVAL: Duration = Duration::from_millis(50);
|
||||
const START_TIMEOUT: Duration = Duration::from_secs(10);
|
||||
const OPERATION_LOCK_TIMEOUT: Duration = Duration::from_secs(75);
|
||||
const PID_FILE_NAME: &str = "app-server.pid";
|
||||
const UPDATE_PID_FILE_NAME: &str = "app-server-updater.pid";
|
||||
const OPERATION_LOCK_FILE_NAME: &str = "daemon.lock";
|
||||
const SETTINGS_FILE_NAME: &str = "settings.json";
|
||||
const STATE_DIR_NAME: &str = "app-server-daemon";
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum LifecycleCommand {
|
||||
Start,
|
||||
Restart,
|
||||
Stop,
|
||||
Version,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum LifecycleStatus {
|
||||
AlreadyRunning,
|
||||
Started,
|
||||
Restarted,
|
||||
Stopped,
|
||||
NotRunning,
|
||||
Running,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LifecycleOutput {
|
||||
pub status: LifecycleStatus,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub backend: Option<BackendKind>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub pid: Option<u32>,
|
||||
pub socket_path: PathBuf,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub cli_version: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub app_server_version: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct BootstrapOptions {
|
||||
pub remote_control_enabled: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum BootstrapStatus {
|
||||
Bootstrapped,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct BootstrapOutput {
|
||||
pub status: BootstrapStatus,
|
||||
pub backend: BackendKind,
|
||||
pub auto_update_enabled: bool,
|
||||
pub remote_control_enabled: bool,
|
||||
pub managed_codex_path: PathBuf,
|
||||
pub socket_path: PathBuf,
|
||||
pub cli_version: String,
|
||||
pub app_server_version: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum RemoteControlMode {
|
||||
Enabled,
|
||||
Disabled,
|
||||
}
|
||||
|
||||
impl RemoteControlMode {
|
||||
fn is_enabled(self) -> bool {
|
||||
matches!(self, Self::Enabled)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum RemoteControlStatus {
|
||||
Enabled,
|
||||
Disabled,
|
||||
AlreadyEnabled,
|
||||
AlreadyDisabled,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RemoteControlOutput {
|
||||
pub status: RemoteControlStatus,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub backend: Option<BackendKind>,
|
||||
pub remote_control_enabled: bool,
|
||||
pub socket_path: PathBuf,
|
||||
pub cli_version: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub app_server_version: Option<String>,
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
pub(crate) enum RestartIfRunningOutcome {
|
||||
Completed,
|
||||
Busy,
|
||||
}
|
||||
|
||||
pub async fn run(command: LifecycleCommand) -> Result<LifecycleOutput> {
|
||||
ensure_supported_platform()?;
|
||||
Daemon::from_environment()?.run(command).await
|
||||
}
|
||||
|
||||
pub async fn bootstrap(options: BootstrapOptions) -> Result<BootstrapOutput> {
|
||||
ensure_supported_platform()?;
|
||||
Daemon::from_environment()?.bootstrap(options).await
|
||||
}
|
||||
|
||||
pub async fn set_remote_control(mode: RemoteControlMode) -> Result<RemoteControlOutput> {
|
||||
ensure_supported_platform()?;
|
||||
Daemon::from_environment()?.set_remote_control(mode).await
|
||||
}
|
||||
|
||||
pub async fn run_pid_update_loop() -> Result<()> {
|
||||
ensure_supported_platform()?;
|
||||
update_loop::run().await
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn ensure_supported_platform() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
fn ensure_supported_platform() -> Result<()> {
|
||||
Err(anyhow!(
|
||||
"codex app-server daemon lifecycle is only supported on Unix platforms"
|
||||
))
|
||||
}
|
||||
|
||||
struct Daemon {
|
||||
socket_path: PathBuf,
|
||||
pid_file: PathBuf,
|
||||
update_pid_file: PathBuf,
|
||||
operation_lock_file: PathBuf,
|
||||
settings_file: PathBuf,
|
||||
managed_codex_bin: PathBuf,
|
||||
}
|
||||
|
||||
impl Daemon {
|
||||
fn from_environment() -> Result<Self> {
|
||||
let codex_home = find_codex_home().context("failed to resolve CODEX_HOME")?;
|
||||
let socket_path = app_server_control_socket_path(codex_home.as_path())?
|
||||
.as_path()
|
||||
.to_path_buf();
|
||||
let state_dir = codex_home.as_path().join(STATE_DIR_NAME);
|
||||
Ok(Self {
|
||||
socket_path,
|
||||
pid_file: state_dir.join(PID_FILE_NAME),
|
||||
update_pid_file: state_dir.join(UPDATE_PID_FILE_NAME),
|
||||
operation_lock_file: state_dir.join(OPERATION_LOCK_FILE_NAME),
|
||||
settings_file: state_dir.join(SETTINGS_FILE_NAME),
|
||||
managed_codex_bin: managed_codex_bin(codex_home.as_path()),
|
||||
})
|
||||
}
|
||||
|
||||
async fn run(&self, command: LifecycleCommand) -> Result<LifecycleOutput> {
|
||||
match command {
|
||||
LifecycleCommand::Start => {
|
||||
let _operation_lock = self.acquire_operation_lock().await?;
|
||||
self.start().await
|
||||
}
|
||||
LifecycleCommand::Restart => {
|
||||
let _operation_lock = self.acquire_operation_lock().await?;
|
||||
self.restart().await
|
||||
}
|
||||
LifecycleCommand::Stop => {
|
||||
let _operation_lock = self.acquire_operation_lock().await?;
|
||||
self.stop().await
|
||||
}
|
||||
LifecycleCommand::Version => self.version().await,
|
||||
}
|
||||
}
|
||||
|
||||
async fn start(&self) -> Result<LifecycleOutput> {
|
||||
let settings = self.load_settings().await?;
|
||||
if let Ok(info) = client::probe(&self.socket_path).await {
|
||||
return Ok(self.output(
|
||||
LifecycleStatus::AlreadyRunning,
|
||||
self.running_backend(&settings).await?,
|
||||
/*pid*/ None,
|
||||
Some(info.app_server_version),
|
||||
));
|
||||
}
|
||||
|
||||
if self.running_backend_instance(&settings).await?.is_some() {
|
||||
let info = self.wait_until_ready().await?;
|
||||
return Ok(self.output(
|
||||
LifecycleStatus::AlreadyRunning,
|
||||
Some(BackendKind::Pid),
|
||||
/*pid*/ None,
|
||||
Some(info.app_server_version),
|
||||
));
|
||||
}
|
||||
|
||||
self.ensure_managed_codex_bin()?;
|
||||
let pid = self.start_managed_backend(&settings).await?;
|
||||
let info = self.wait_until_ready().await?;
|
||||
Ok(self.output(
|
||||
LifecycleStatus::Started,
|
||||
Some(BackendKind::Pid),
|
||||
pid,
|
||||
Some(info.app_server_version),
|
||||
))
|
||||
}
|
||||
|
||||
async fn restart(&self) -> Result<LifecycleOutput> {
|
||||
let settings = self.load_settings().await?;
|
||||
if client::probe(&self.socket_path).await.is_ok()
|
||||
&& self.running_backend(&settings).await?.is_none()
|
||||
{
|
||||
return Err(anyhow!(
|
||||
"app server is running but is not managed by codex app-server daemon"
|
||||
));
|
||||
}
|
||||
|
||||
self.ensure_managed_codex_bin()?;
|
||||
if let Some(backend) = self.running_backend_instance(&settings).await? {
|
||||
backend.stop().await?;
|
||||
}
|
||||
|
||||
let pid = self.start_managed_backend(&settings).await?;
|
||||
let info = self.wait_until_ready().await?;
|
||||
Ok(self.output(
|
||||
LifecycleStatus::Restarted,
|
||||
Some(BackendKind::Pid),
|
||||
pid,
|
||||
Some(info.app_server_version),
|
||||
))
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
pub(crate) async fn try_restart_if_running(&self) -> Result<RestartIfRunningOutcome> {
|
||||
let operation_lock = self.open_operation_lock_file().await?;
|
||||
if !try_lock_file(&operation_lock)? {
|
||||
return Ok(RestartIfRunningOutcome::Busy);
|
||||
}
|
||||
let settings = self.load_settings().await?;
|
||||
if let Some(backend) = self.running_backend_instance(&settings).await? {
|
||||
let Ok(info) = client::probe(&self.socket_path).await else {
|
||||
return Ok(RestartIfRunningOutcome::Completed);
|
||||
};
|
||||
let managed_version = managed_codex_version(&self.managed_codex_bin).await?;
|
||||
if info.app_server_version == managed_version {
|
||||
return Ok(RestartIfRunningOutcome::Completed);
|
||||
}
|
||||
backend.stop().await?;
|
||||
let _ = self.start_managed_backend(&settings).await?;
|
||||
self.wait_until_ready().await?;
|
||||
return Ok(RestartIfRunningOutcome::Completed);
|
||||
}
|
||||
|
||||
if client::probe(&self.socket_path).await.is_ok() {
|
||||
return Err(anyhow!(
|
||||
"app server is running but is not managed by codex app-server daemon"
|
||||
));
|
||||
}
|
||||
|
||||
Ok(RestartIfRunningOutcome::Completed)
|
||||
}
|
||||
|
||||
async fn stop(&self) -> Result<LifecycleOutput> {
|
||||
let settings = self.load_settings().await?;
|
||||
if let Some(backend) = self.running_backend_instance(&settings).await? {
|
||||
backend.stop().await?;
|
||||
return Ok(self.output(
|
||||
LifecycleStatus::Stopped,
|
||||
Some(BackendKind::Pid),
|
||||
/*pid*/ None,
|
||||
/*app_server_version*/ None,
|
||||
));
|
||||
}
|
||||
|
||||
if client::probe(&self.socket_path).await.is_ok() {
|
||||
return Err(anyhow!(
|
||||
"app server is running but is not managed by codex app-server daemon"
|
||||
));
|
||||
}
|
||||
|
||||
Ok(self.output(
|
||||
LifecycleStatus::NotRunning,
|
||||
/*backend*/ None,
|
||||
/*pid*/ None,
|
||||
/*app_server_version*/ None,
|
||||
))
|
||||
}
|
||||
|
||||
async fn version(&self) -> Result<LifecycleOutput> {
|
||||
let settings = self.load_settings().await?;
|
||||
let info = client::probe(&self.socket_path).await?;
|
||||
Ok(self.output(
|
||||
LifecycleStatus::Running,
|
||||
self.running_backend(&settings).await?,
|
||||
/*pid*/ None,
|
||||
Some(info.app_server_version),
|
||||
))
|
||||
}
|
||||
|
||||
async fn wait_until_ready(&self) -> Result<client::ProbeInfo> {
|
||||
let deadline = tokio::time::Instant::now() + START_TIMEOUT;
|
||||
loop {
|
||||
match client::probe(&self.socket_path).await {
|
||||
Ok(info) => return Ok(info),
|
||||
Err(err) if tokio::time::Instant::now() < deadline => {
|
||||
let _ = err;
|
||||
sleep(START_POLL_INTERVAL).await;
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(err).with_context(|| {
|
||||
format!(
|
||||
"app server did not become ready on {}",
|
||||
self.socket_path.display()
|
||||
)
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn bootstrap(&self, options: BootstrapOptions) -> Result<BootstrapOutput> {
|
||||
let _operation_lock = self.acquire_operation_lock().await?;
|
||||
self.bootstrap_locked(options).await
|
||||
}
|
||||
|
||||
async fn set_remote_control(&self, mode: RemoteControlMode) -> Result<RemoteControlOutput> {
|
||||
let _operation_lock = self.acquire_operation_lock().await?;
|
||||
let previous_settings = self.load_settings().await?;
|
||||
let mut settings = previous_settings.clone();
|
||||
let remote_control_enabled = mode.is_enabled();
|
||||
let backend = self.running_backend_instance(&previous_settings).await?;
|
||||
|
||||
if backend.is_none() && client::probe(&self.socket_path).await.is_ok() {
|
||||
return Err(anyhow!(
|
||||
"app server is running but is not managed by codex app-server daemon"
|
||||
));
|
||||
}
|
||||
|
||||
if settings.remote_control_enabled == remote_control_enabled {
|
||||
let info = if backend.is_some() {
|
||||
Some(self.wait_until_ready().await?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
return Ok(self.remote_control_output(
|
||||
already_remote_control_status(mode),
|
||||
backend.map(|_| BackendKind::Pid),
|
||||
remote_control_enabled,
|
||||
info.map(|info| info.app_server_version),
|
||||
));
|
||||
}
|
||||
|
||||
settings.remote_control_enabled = remote_control_enabled;
|
||||
settings.save(&self.settings_file).await?;
|
||||
|
||||
let app_server_version = if let Some(backend) = backend {
|
||||
self.ensure_managed_codex_bin()?;
|
||||
backend.stop().await?;
|
||||
let _ = self.start_managed_backend(&settings).await?;
|
||||
Some(self.wait_until_ready().await?.app_server_version)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(self.remote_control_output(
|
||||
remote_control_status(mode),
|
||||
app_server_version.as_ref().map(|_| BackendKind::Pid),
|
||||
remote_control_enabled,
|
||||
app_server_version,
|
||||
))
|
||||
}
|
||||
|
||||
async fn bootstrap_locked(&self, options: BootstrapOptions) -> Result<BootstrapOutput> {
|
||||
self.ensure_managed_codex_bin()?;
|
||||
|
||||
let settings = DaemonSettings {
|
||||
remote_control_enabled: options.remote_control_enabled,
|
||||
};
|
||||
if client::probe(&self.socket_path).await.is_ok()
|
||||
&& self.running_backend(&settings).await?.is_none()
|
||||
{
|
||||
return Err(anyhow!(
|
||||
"app server is running but is not managed by codex app-server daemon"
|
||||
));
|
||||
}
|
||||
settings.save(&self.settings_file).await?;
|
||||
|
||||
if let Some(backend) = self.running_backend_instance(&settings).await? {
|
||||
backend.stop().await?;
|
||||
}
|
||||
|
||||
let backend = backend::pid_backend(self.backend_paths(&settings));
|
||||
backend.start().await?;
|
||||
let updater = backend::pid_update_loop_backend(self.backend_paths(&settings));
|
||||
if updater.is_starting_or_running().await? {
|
||||
updater.stop().await?;
|
||||
}
|
||||
updater.start().await?;
|
||||
|
||||
let info = self.wait_until_ready().await?;
|
||||
Ok(BootstrapOutput {
|
||||
status: BootstrapStatus::Bootstrapped,
|
||||
backend: BackendKind::Pid,
|
||||
auto_update_enabled: true,
|
||||
remote_control_enabled: settings.remote_control_enabled,
|
||||
managed_codex_path: self.managed_codex_bin.clone(),
|
||||
socket_path: self.socket_path.clone(),
|
||||
cli_version: env!("CARGO_PKG_VERSION").to_string(),
|
||||
app_server_version: info.app_server_version,
|
||||
})
|
||||
}
|
||||
|
||||
async fn running_backend(&self, settings: &DaemonSettings) -> Result<Option<BackendKind>> {
|
||||
Ok(self
|
||||
.running_backend_instance(settings)
|
||||
.await?
|
||||
.map(|_| BackendKind::Pid))
|
||||
}
|
||||
|
||||
async fn running_backend_instance(
|
||||
&self,
|
||||
settings: &DaemonSettings,
|
||||
) -> Result<Option<backend::PidBackend>> {
|
||||
let backend = backend::pid_backend(self.backend_paths(settings));
|
||||
if backend.is_starting_or_running().await? {
|
||||
return Ok(Some(backend));
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
async fn start_managed_backend(&self, settings: &DaemonSettings) -> Result<Option<u32>> {
|
||||
let backend = backend::pid_backend(self.backend_paths(settings));
|
||||
backend.start().await
|
||||
}
|
||||
|
||||
fn ensure_managed_codex_bin(&self) -> Result<()> {
|
||||
if self.managed_codex_bin.is_file() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
Err(anyhow!(
|
||||
"managed standalone Codex install not found at {}; install Codex first",
|
||||
self.managed_codex_bin.display()
|
||||
))
|
||||
}
|
||||
|
||||
fn backend_paths(&self, settings: &DaemonSettings) -> BackendPaths {
|
||||
BackendPaths {
|
||||
codex_bin: self.managed_codex_bin.clone(),
|
||||
pid_file: self.pid_file.clone(),
|
||||
update_pid_file: self.update_pid_file.clone(),
|
||||
remote_control_enabled: settings.remote_control_enabled,
|
||||
}
|
||||
}
|
||||
|
||||
async fn load_settings(&self) -> Result<DaemonSettings> {
|
||||
DaemonSettings::load(&self.settings_file).await
|
||||
}
|
||||
|
||||
async fn acquire_operation_lock(&self) -> Result<tokio::fs::File> {
|
||||
let operation_lock = self.open_operation_lock_file().await?;
|
||||
let deadline = tokio::time::Instant::now() + OPERATION_LOCK_TIMEOUT;
|
||||
while !try_lock_file(&operation_lock)? {
|
||||
if tokio::time::Instant::now() >= deadline {
|
||||
return Err(anyhow!(
|
||||
"timed out waiting for daemon operation lock {}",
|
||||
self.operation_lock_file.display()
|
||||
));
|
||||
}
|
||||
sleep(START_POLL_INTERVAL).await;
|
||||
}
|
||||
Ok(operation_lock)
|
||||
}
|
||||
|
||||
async fn open_operation_lock_file(&self) -> Result<tokio::fs::File> {
|
||||
if let Some(parent) = self.operation_lock_file.parent() {
|
||||
tokio::fs::create_dir_all(parent).await.with_context(|| {
|
||||
format!(
|
||||
"failed to create daemon state directory {}",
|
||||
parent.display()
|
||||
)
|
||||
})?;
|
||||
}
|
||||
tokio::fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.truncate(false)
|
||||
.write(true)
|
||||
.open(&self.operation_lock_file)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"failed to open daemon operation lock {}",
|
||||
self.operation_lock_file.display()
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn output(
|
||||
&self,
|
||||
status: LifecycleStatus,
|
||||
backend: Option<BackendKind>,
|
||||
pid: Option<u32>,
|
||||
app_server_version: Option<String>,
|
||||
) -> LifecycleOutput {
|
||||
LifecycleOutput {
|
||||
status,
|
||||
backend,
|
||||
pid,
|
||||
socket_path: self.socket_path.clone(),
|
||||
cli_version: Some(env!("CARGO_PKG_VERSION").to_string()),
|
||||
app_server_version,
|
||||
}
|
||||
}
|
||||
|
||||
fn remote_control_output(
|
||||
&self,
|
||||
status: RemoteControlStatus,
|
||||
backend: Option<BackendKind>,
|
||||
remote_control_enabled: bool,
|
||||
app_server_version: Option<String>,
|
||||
) -> RemoteControlOutput {
|
||||
RemoteControlOutput {
|
||||
status,
|
||||
backend,
|
||||
remote_control_enabled,
|
||||
socket_path: self.socket_path.clone(),
|
||||
cli_version: env!("CARGO_PKG_VERSION").to_string(),
|
||||
app_server_version,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn remote_control_status(mode: RemoteControlMode) -> RemoteControlStatus {
|
||||
match mode {
|
||||
RemoteControlMode::Enabled => RemoteControlStatus::Enabled,
|
||||
RemoteControlMode::Disabled => RemoteControlStatus::Disabled,
|
||||
}
|
||||
}
|
||||
|
||||
fn already_remote_control_status(mode: RemoteControlMode) -> RemoteControlStatus {
|
||||
match mode {
|
||||
RemoteControlMode::Enabled => RemoteControlStatus::AlreadyEnabled,
|
||||
RemoteControlMode::Disabled => RemoteControlStatus::AlreadyDisabled,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn try_lock_file(file: &tokio::fs::File) -> Result<bool> {
|
||||
use std::os::fd::AsRawFd;
|
||||
|
||||
let result = unsafe { libc::flock(file.as_raw_fd(), libc::LOCK_EX | libc::LOCK_NB) };
|
||||
if result == 0 {
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
let err = std::io::Error::last_os_error();
|
||||
if err.raw_os_error() == Some(libc::EWOULDBLOCK) {
|
||||
return Ok(false);
|
||||
}
|
||||
Err(err).context("failed to lock daemon operation")
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
fn try_lock_file(_file: &tokio::fs::File) -> Result<bool> {
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
#[cfg(all(test, unix))]
|
||||
mod tests {
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::BootstrapStatus;
|
||||
use super::LifecycleStatus;
|
||||
use super::RemoteControlStatus;
|
||||
|
||||
#[test]
|
||||
fn lifecycle_status_uses_camel_case_json() {
|
||||
assert_eq!(
|
||||
serde_json::to_string(&LifecycleStatus::AlreadyRunning).expect("serialize"),
|
||||
"\"alreadyRunning\""
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bootstrap_status_uses_camel_case_json() {
|
||||
assert_eq!(
|
||||
serde_json::to_string(&BootstrapStatus::Bootstrapped).expect("serialize"),
|
||||
"\"bootstrapped\""
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remote_control_status_uses_camel_case_json() {
|
||||
assert_eq!(
|
||||
serde_json::to_string(&RemoteControlStatus::AlreadyEnabled).expect("serialize"),
|
||||
"\"alreadyEnabled\""
|
||||
);
|
||||
}
|
||||
}
|
||||
66
codex-rs/app-server-daemon/src/managed_install.rs
Normal file
66
codex-rs/app-server-daemon/src/managed_install.rs
Normal file
@@ -0,0 +1,66 @@
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[cfg(unix)]
|
||||
use anyhow::Context;
|
||||
#[cfg(unix)]
|
||||
use anyhow::Result;
|
||||
#[cfg(unix)]
|
||||
use anyhow::anyhow;
|
||||
#[cfg(unix)]
|
||||
use tokio::process::Command;
|
||||
|
||||
pub(crate) fn managed_codex_bin(codex_home: &Path) -> PathBuf {
|
||||
codex_home
|
||||
.join("packages")
|
||||
.join("standalone")
|
||||
.join("current")
|
||||
.join(managed_codex_file_name())
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
pub(crate) async fn managed_codex_version(codex_bin: &Path) -> Result<String> {
|
||||
let output = Command::new(codex_bin)
|
||||
.arg("--version")
|
||||
.output()
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"failed to invoke managed Codex binary {}",
|
||||
codex_bin.display()
|
||||
)
|
||||
})?;
|
||||
if !output.status.success() {
|
||||
return Err(anyhow!(
|
||||
"managed Codex binary {} exited with status {}",
|
||||
codex_bin.display(),
|
||||
output.status
|
||||
));
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8(output.stdout).with_context(|| {
|
||||
format!(
|
||||
"managed Codex version was not utf-8: {}",
|
||||
codex_bin.display()
|
||||
)
|
||||
})?;
|
||||
parse_codex_version(&stdout)
|
||||
}
|
||||
|
||||
fn managed_codex_file_name() -> &'static str {
|
||||
if cfg!(windows) { "codex.exe" } else { "codex" }
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn parse_codex_version(output: &str) -> Result<String> {
|
||||
let version = output
|
||||
.split_whitespace()
|
||||
.nth(1)
|
||||
.filter(|version| !version.is_empty())
|
||||
.ok_or_else(|| anyhow!("managed Codex version output was malformed"))?;
|
||||
Ok(version.to_string())
|
||||
}
|
||||
|
||||
#[cfg(all(test, unix))]
|
||||
#[path = "managed_install_tests.rs"]
|
||||
mod tests;
|
||||
16
codex-rs/app-server-daemon/src/managed_install_tests.rs
Normal file
16
codex-rs/app-server-daemon/src/managed_install_tests.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::parse_codex_version;
|
||||
|
||||
#[test]
|
||||
fn parses_codex_cli_version_output() {
|
||||
assert_eq!(
|
||||
parse_codex_version("codex 1.2.3\n").expect("version"),
|
||||
"1.2.3"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rejects_malformed_codex_cli_version_output() {
|
||||
assert!(parse_codex_version("codex\n").is_err());
|
||||
}
|
||||
63
codex-rs/app-server-daemon/src/settings.rs
Normal file
63
codex-rs/app-server-daemon/src/settings.rs
Normal file
@@ -0,0 +1,63 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use tokio::fs;
|
||||
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub(crate) struct DaemonSettings {
|
||||
pub(crate) remote_control_enabled: bool,
|
||||
}
|
||||
|
||||
impl DaemonSettings {
|
||||
pub(crate) async fn load(path: &Path) -> Result<Self> {
|
||||
let contents = match fs::read_to_string(path).await {
|
||||
Ok(contents) => contents,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(Self::default()),
|
||||
Err(err) => {
|
||||
return Err(err)
|
||||
.with_context(|| format!("failed to read daemon settings {}", path.display()));
|
||||
}
|
||||
};
|
||||
|
||||
serde_json::from_str(&contents)
|
||||
.with_context(|| format!("failed to parse daemon settings {}", path.display()))
|
||||
}
|
||||
|
||||
pub(crate) async fn save(&self, path: &Path) -> Result<()> {
|
||||
if let Some(parent) = path.parent() {
|
||||
fs::create_dir_all(parent).await.with_context(|| {
|
||||
format!(
|
||||
"failed to create daemon settings directory {}",
|
||||
parent.display()
|
||||
)
|
||||
})?;
|
||||
}
|
||||
|
||||
let contents = serde_json::to_vec_pretty(self).context("failed to serialize settings")?;
|
||||
fs::write(path, contents)
|
||||
.await
|
||||
.with_context(|| format!("failed to write daemon settings {}", path.display()))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(test, unix))]
|
||||
mod tests {
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::DaemonSettings;
|
||||
|
||||
#[test]
|
||||
fn daemon_settings_use_camel_case_json() {
|
||||
assert_eq!(
|
||||
serde_json::to_string(&DaemonSettings {
|
||||
remote_control_enabled: true,
|
||||
})
|
||||
.expect("serialize"),
|
||||
r#"{"remoteControlEnabled":true}"#
|
||||
);
|
||||
}
|
||||
}
|
||||
132
codex-rs/app-server-daemon/src/update_loop.rs
Normal file
132
codex-rs/app-server-daemon/src/update_loop.rs
Normal file
@@ -0,0 +1,132 @@
|
||||
#[cfg(unix)]
|
||||
use std::process::Stdio;
|
||||
#[cfg(unix)]
|
||||
use std::time::Duration;
|
||||
|
||||
#[cfg(unix)]
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
#[cfg(not(unix))]
|
||||
use anyhow::bail;
|
||||
#[cfg(unix)]
|
||||
use futures::FutureExt;
|
||||
#[cfg(unix)]
|
||||
use tokio::io::AsyncWriteExt;
|
||||
#[cfg(unix)]
|
||||
use tokio::process::Command;
|
||||
#[cfg(unix)]
|
||||
use tokio::signal::unix::Signal;
|
||||
#[cfg(unix)]
|
||||
use tokio::signal::unix::SignalKind;
|
||||
#[cfg(unix)]
|
||||
use tokio::signal::unix::signal;
|
||||
#[cfg(unix)]
|
||||
use tokio::time::sleep;
|
||||
|
||||
#[cfg(unix)]
|
||||
use crate::Daemon;
|
||||
#[cfg(unix)]
|
||||
use crate::RestartIfRunningOutcome;
|
||||
|
||||
#[cfg(unix)]
|
||||
const INITIAL_UPDATE_DELAY: Duration = Duration::from_secs(5 * 60);
|
||||
#[cfg(unix)]
|
||||
const RESTART_RETRY_INTERVAL: Duration = Duration::from_millis(50);
|
||||
#[cfg(unix)]
|
||||
const UPDATE_INTERVAL: Duration = Duration::from_secs(60 * 60);
|
||||
|
||||
#[cfg(unix)]
|
||||
pub(crate) async fn run() -> Result<()> {
|
||||
let mut terminate =
|
||||
signal(SignalKind::terminate()).context("failed to install updater shutdown handler")?;
|
||||
if sleep_or_terminate(INITIAL_UPDATE_DELAY, &mut terminate).await {
|
||||
return Ok(());
|
||||
}
|
||||
loop {
|
||||
match update_once(&mut terminate).await {
|
||||
Ok(UpdateLoopControl::Continue) | Err(_) => {}
|
||||
Ok(UpdateLoopControl::Stop) => return Ok(()),
|
||||
}
|
||||
if sleep_or_terminate(UPDATE_INTERVAL, &mut terminate).await {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
pub(crate) async fn run() -> Result<()> {
|
||||
bail!("pid-managed updater loop is unsupported on this platform")
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
async fn sleep_or_terminate(duration: Duration, terminate: &mut Signal) -> bool {
|
||||
tokio::select! {
|
||||
_ = sleep(duration) => false,
|
||||
_ = terminate.recv() => true,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
enum UpdateLoopControl {
|
||||
Continue,
|
||||
Stop,
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
async fn update_once(terminate: &mut Signal) -> Result<UpdateLoopControl> {
|
||||
install_latest_standalone().await?;
|
||||
|
||||
let daemon = Daemon::from_environment()?;
|
||||
loop {
|
||||
if terminate.recv().now_or_never().flatten().is_some() {
|
||||
return Ok(UpdateLoopControl::Stop);
|
||||
}
|
||||
match daemon.try_restart_if_running().await? {
|
||||
RestartIfRunningOutcome::Completed => return Ok(UpdateLoopControl::Continue),
|
||||
RestartIfRunningOutcome::Busy => {
|
||||
if sleep_or_terminate(RESTART_RETRY_INTERVAL, terminate).await {
|
||||
return Ok(UpdateLoopControl::Stop);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
async fn install_latest_standalone() -> Result<()> {
|
||||
let script = reqwest::get("https://chatgpt.com/codex/install.sh")
|
||||
.await
|
||||
.context("failed to fetch standalone Codex updater")?
|
||||
.error_for_status()
|
||||
.context("standalone Codex updater request failed")?
|
||||
.bytes()
|
||||
.await
|
||||
.context("failed to read standalone Codex updater")?;
|
||||
|
||||
let mut child = Command::new("/bin/sh")
|
||||
.arg("-s")
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::null())
|
||||
.stderr(Stdio::null())
|
||||
.spawn()
|
||||
.context("failed to invoke standalone Codex updater")?;
|
||||
let mut stdin = child
|
||||
.stdin
|
||||
.take()
|
||||
.context("standalone Codex updater stdin was unavailable")?;
|
||||
stdin
|
||||
.write_all(&script)
|
||||
.await
|
||||
.context("failed to pass standalone Codex updater to shell")?;
|
||||
drop(stdin);
|
||||
let status = child
|
||||
.wait()
|
||||
.await
|
||||
.context("failed to wait for standalone Codex updater")?;
|
||||
|
||||
if status.success() {
|
||||
Ok(())
|
||||
} else {
|
||||
anyhow::bail!("standalone Codex updater exited with status {status}")
|
||||
}
|
||||
}
|
||||
5
codex-rs/app-server-protocol/schema/json/AttestationGenerateParams.json
generated
Normal file
5
codex-rs/app-server-protocol/schema/json/AttestationGenerateParams.json
generated
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "AttestationGenerateParams",
|
||||
"type": "object"
|
||||
}
|
||||
14
codex-rs/app-server-protocol/schema/json/AttestationGenerateResponse.json
generated
Normal file
14
codex-rs/app-server-protocol/schema/json/AttestationGenerateResponse.json
generated
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"token": {
|
||||
"description": "Opaque client attestation token.",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"token"
|
||||
],
|
||||
"title": "AttestationGenerateResponse",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1259,6 +1259,11 @@
|
||||
"array",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"requestAttestation": {
|
||||
"default": false,
|
||||
"description": "Opt into `attestation/generate` requests for upstream `x-oai-attestation`.",
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
@@ -2083,16 +2088,37 @@
|
||||
},
|
||||
"principalType": {
|
||||
"$ref": "#/definitions/PluginSharePrincipalType"
|
||||
},
|
||||
"role": {
|
||||
"$ref": "#/definitions/PluginShareTargetRole"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"principalId",
|
||||
"principalType"
|
||||
"principalType",
|
||||
"role"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginShareTargetRole": {
|
||||
"enum": [
|
||||
"reader",
|
||||
"editor"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginShareUpdateDiscoverability": {
|
||||
"enum": [
|
||||
"UNLISTED",
|
||||
"PRIVATE"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginShareUpdateTargetsParams": {
|
||||
"properties": {
|
||||
"discoverability": {
|
||||
"$ref": "#/definitions/PluginShareUpdateDiscoverability"
|
||||
},
|
||||
"remotePluginId": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -2104,6 +2130,7 @@
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"discoverability",
|
||||
"remotePluginId",
|
||||
"shareTargets"
|
||||
],
|
||||
@@ -6177,4 +6204,4 @@
|
||||
}
|
||||
],
|
||||
"title": "ClientRequest"
|
||||
}
|
||||
}
|
||||
@@ -2737,7 +2737,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
"RemoteControlStatusChangedNotification": {
|
||||
"description": "Current remote-control connection status and environment id exposed to clients.",
|
||||
"description": "Current remote-control connection status and remote identity exposed to clients.",
|
||||
"properties": {
|
||||
"environmentId": {
|
||||
"type": [
|
||||
@@ -2745,11 +2745,15 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"installationId": {
|
||||
"type": "string"
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/RemoteControlConnectionStatus"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"installationId",
|
||||
"status"
|
||||
],
|
||||
"type": "object"
|
||||
|
||||
@@ -121,6 +121,9 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"AttestationGenerateParams": {
|
||||
"type": "object"
|
||||
},
|
||||
"ChatgptAuthTokensRefreshParams": {
|
||||
"properties": {
|
||||
"previousAccountId": {
|
||||
@@ -1918,6 +1921,31 @@
|
||||
"title": "Account/chatgptAuthTokens/refreshRequest",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "Generate a fresh upstream attestation result on demand.",
|
||||
"properties": {
|
||||
"id": {
|
||||
"$ref": "#/definitions/RequestId"
|
||||
},
|
||||
"method": {
|
||||
"enum": [
|
||||
"attestation/generate"
|
||||
],
|
||||
"title": "Attestation/generateRequestMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/AttestationGenerateParams"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "Attestation/generateRequest",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "DEPRECATED APIs below Request to approve a patch. This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).",
|
||||
"properties": {
|
||||
|
||||
@@ -83,6 +83,25 @@
|
||||
"title": "ApplyPatchApprovalResponse",
|
||||
"type": "object"
|
||||
},
|
||||
"AttestationGenerateParams": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "AttestationGenerateParams",
|
||||
"type": "object"
|
||||
},
|
||||
"AttestationGenerateResponse": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"token": {
|
||||
"description": "Opaque client attestation token.",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"token"
|
||||
],
|
||||
"title": "AttestationGenerateResponse",
|
||||
"type": "object"
|
||||
},
|
||||
"ChatgptAuthTokensRefreshParams": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
@@ -2620,6 +2639,11 @@
|
||||
"array",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"requestAttestation": {
|
||||
"default": false,
|
||||
"description": "Opt into `attestation/generate` requests for upstream `x-oai-attestation`.",
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
@@ -5207,6 +5231,31 @@
|
||||
"title": "Account/chatgptAuthTokens/refreshRequest",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "Generate a fresh upstream attestation result on demand.",
|
||||
"properties": {
|
||||
"id": {
|
||||
"$ref": "#/definitions/v2/RequestId"
|
||||
},
|
||||
"method": {
|
||||
"enum": [
|
||||
"attestation/generate"
|
||||
],
|
||||
"title": "Attestation/generateRequestMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/AttestationGenerateParams"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "Attestation/generateRequest",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "DEPRECATED APIs below Request to approve a patch. This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).",
|
||||
"properties": {
|
||||
@@ -12221,10 +12270,20 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"discoverability": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/v2/PluginShareDiscoverability"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"remotePluginId": {
|
||||
"type": "string"
|
||||
},
|
||||
"shareTargets": {
|
||||
"sharePrincipals": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/v2/PluginSharePrincipal"
|
||||
},
|
||||
@@ -12285,14 +12344,10 @@
|
||||
},
|
||||
"plugin": {
|
||||
"$ref": "#/definitions/v2/PluginSummary"
|
||||
},
|
||||
"shareUrl": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"plugin",
|
||||
"shareUrl"
|
||||
"plugin"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
@@ -12327,15 +12382,27 @@
|
||||
},
|
||||
"principalType": {
|
||||
"$ref": "#/definitions/v2/PluginSharePrincipalType"
|
||||
},
|
||||
"role": {
|
||||
"$ref": "#/definitions/v2/PluginSharePrincipalRole"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"principalId",
|
||||
"principalType"
|
||||
"principalType",
|
||||
"role"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginSharePrincipalRole": {
|
||||
"enum": [
|
||||
"reader",
|
||||
"editor",
|
||||
"owner"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginSharePrincipalType": {
|
||||
"enum": [
|
||||
"user",
|
||||
@@ -12406,17 +12473,38 @@
|
||||
},
|
||||
"principalType": {
|
||||
"$ref": "#/definitions/v2/PluginSharePrincipalType"
|
||||
},
|
||||
"role": {
|
||||
"$ref": "#/definitions/v2/PluginShareTargetRole"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"principalId",
|
||||
"principalType"
|
||||
"principalType",
|
||||
"role"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginShareTargetRole": {
|
||||
"enum": [
|
||||
"reader",
|
||||
"editor"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginShareUpdateDiscoverability": {
|
||||
"enum": [
|
||||
"UNLISTED",
|
||||
"PRIVATE"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginShareUpdateTargetsParams": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"discoverability": {
|
||||
"$ref": "#/definitions/v2/PluginShareUpdateDiscoverability"
|
||||
},
|
||||
"remotePluginId": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -12428,6 +12516,7 @@
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"discoverability",
|
||||
"remotePluginId",
|
||||
"shareTargets"
|
||||
],
|
||||
@@ -12437,6 +12526,9 @@
|
||||
"PluginShareUpdateTargetsResponse": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"discoverability": {
|
||||
"$ref": "#/definitions/v2/PluginShareDiscoverability"
|
||||
},
|
||||
"principals": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/v2/PluginSharePrincipal"
|
||||
@@ -12445,6 +12537,7 @@
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"discoverability",
|
||||
"principals"
|
||||
],
|
||||
"title": "PluginShareUpdateTargetsResponse",
|
||||
@@ -13291,7 +13384,7 @@
|
||||
},
|
||||
"RemoteControlStatusChangedNotification": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"description": "Current remote-control connection status and environment id exposed to clients.",
|
||||
"description": "Current remote-control connection status and remote identity exposed to clients.",
|
||||
"properties": {
|
||||
"environmentId": {
|
||||
"type": [
|
||||
@@ -13299,11 +13392,15 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"installationId": {
|
||||
"type": "string"
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/v2/RemoteControlConnectionStatus"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"installationId",
|
||||
"status"
|
||||
],
|
||||
"title": "RemoteControlStatusChangedNotification",
|
||||
@@ -18396,4 +18493,4 @@
|
||||
},
|
||||
"title": "CodexAppServerProtocol",
|
||||
"type": "object"
|
||||
}
|
||||
}
|
||||
@@ -6409,6 +6409,11 @@
|
||||
"array",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"requestAttestation": {
|
||||
"default": false,
|
||||
"description": "Opt into `attestation/generate` requests for upstream `x-oai-attestation`.",
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
@@ -8814,10 +8819,20 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"discoverability": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PluginShareDiscoverability"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"remotePluginId": {
|
||||
"type": "string"
|
||||
},
|
||||
"shareTargets": {
|
||||
"sharePrincipals": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/PluginSharePrincipal"
|
||||
},
|
||||
@@ -8878,14 +8893,10 @@
|
||||
},
|
||||
"plugin": {
|
||||
"$ref": "#/definitions/PluginSummary"
|
||||
},
|
||||
"shareUrl": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"plugin",
|
||||
"shareUrl"
|
||||
"plugin"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
@@ -8920,15 +8931,27 @@
|
||||
},
|
||||
"principalType": {
|
||||
"$ref": "#/definitions/PluginSharePrincipalType"
|
||||
},
|
||||
"role": {
|
||||
"$ref": "#/definitions/PluginSharePrincipalRole"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"principalId",
|
||||
"principalType"
|
||||
"principalType",
|
||||
"role"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginSharePrincipalRole": {
|
||||
"enum": [
|
||||
"reader",
|
||||
"editor",
|
||||
"owner"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginSharePrincipalType": {
|
||||
"enum": [
|
||||
"user",
|
||||
@@ -8999,17 +9022,38 @@
|
||||
},
|
||||
"principalType": {
|
||||
"$ref": "#/definitions/PluginSharePrincipalType"
|
||||
},
|
||||
"role": {
|
||||
"$ref": "#/definitions/PluginShareTargetRole"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"principalId",
|
||||
"principalType"
|
||||
"principalType",
|
||||
"role"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginShareTargetRole": {
|
||||
"enum": [
|
||||
"reader",
|
||||
"editor"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginShareUpdateDiscoverability": {
|
||||
"enum": [
|
||||
"UNLISTED",
|
||||
"PRIVATE"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginShareUpdateTargetsParams": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"discoverability": {
|
||||
"$ref": "#/definitions/PluginShareUpdateDiscoverability"
|
||||
},
|
||||
"remotePluginId": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -9021,6 +9065,7 @@
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"discoverability",
|
||||
"remotePluginId",
|
||||
"shareTargets"
|
||||
],
|
||||
@@ -9030,6 +9075,9 @@
|
||||
"PluginShareUpdateTargetsResponse": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"discoverability": {
|
||||
"$ref": "#/definitions/PluginShareDiscoverability"
|
||||
},
|
||||
"principals": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/PluginSharePrincipal"
|
||||
@@ -9038,6 +9086,7 @@
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"discoverability",
|
||||
"principals"
|
||||
],
|
||||
"title": "PluginShareUpdateTargetsResponse",
|
||||
@@ -9884,7 +9933,7 @@
|
||||
},
|
||||
"RemoteControlStatusChangedNotification": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"description": "Current remote-control connection status and environment id exposed to clients.",
|
||||
"description": "Current remote-control connection status and remote identity exposed to clients.",
|
||||
"properties": {
|
||||
"environmentId": {
|
||||
"type": [
|
||||
@@ -9892,11 +9941,15 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"installationId": {
|
||||
"type": "string"
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/RemoteControlConnectionStatus"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"installationId",
|
||||
"status"
|
||||
],
|
||||
"title": "RemoteControlStatusChangedNotification",
|
||||
@@ -16263,4 +16316,4 @@
|
||||
},
|
||||
"title": "CodexAppServerProtocolV2",
|
||||
"type": "object"
|
||||
}
|
||||
}
|
||||
@@ -39,6 +39,11 @@
|
||||
"array",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"requestAttestation": {
|
||||
"default": false,
|
||||
"description": "Opt into `attestation/generate` requests for upstream `x-oai-attestation`.",
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
|
||||
@@ -246,10 +246,20 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"discoverability": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PluginShareDiscoverability"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"remotePluginId": {
|
||||
"type": "string"
|
||||
},
|
||||
"shareTargets": {
|
||||
"sharePrincipals": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/PluginSharePrincipal"
|
||||
},
|
||||
@@ -270,6 +280,14 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginShareDiscoverability": {
|
||||
"enum": [
|
||||
"LISTED",
|
||||
"UNLISTED",
|
||||
"PRIVATE"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginSharePrincipal": {
|
||||
"properties": {
|
||||
"name": {
|
||||
@@ -280,15 +298,27 @@
|
||||
},
|
||||
"principalType": {
|
||||
"$ref": "#/definitions/PluginSharePrincipalType"
|
||||
},
|
||||
"role": {
|
||||
"$ref": "#/definitions/PluginSharePrincipalRole"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"principalId",
|
||||
"principalType"
|
||||
"principalType",
|
||||
"role"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginSharePrincipalRole": {
|
||||
"enum": [
|
||||
"reader",
|
||||
"editor",
|
||||
"owner"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginSharePrincipalType": {
|
||||
"enum": [
|
||||
"user",
|
||||
|
||||
@@ -300,10 +300,20 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"discoverability": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PluginShareDiscoverability"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"remotePluginId": {
|
||||
"type": "string"
|
||||
},
|
||||
"shareTargets": {
|
||||
"sharePrincipals": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/PluginSharePrincipal"
|
||||
},
|
||||
@@ -324,6 +334,14 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginShareDiscoverability": {
|
||||
"enum": [
|
||||
"LISTED",
|
||||
"UNLISTED",
|
||||
"PRIVATE"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginSharePrincipal": {
|
||||
"properties": {
|
||||
"name": {
|
||||
@@ -334,15 +352,27 @@
|
||||
},
|
||||
"principalType": {
|
||||
"$ref": "#/definitions/PluginSharePrincipalType"
|
||||
},
|
||||
"role": {
|
||||
"$ref": "#/definitions/PluginSharePrincipalRole"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"principalId",
|
||||
"principalType"
|
||||
"principalType",
|
||||
"role"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginSharePrincipalRole": {
|
||||
"enum": [
|
||||
"reader",
|
||||
"editor",
|
||||
"owner"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginSharePrincipalType": {
|
||||
"enum": [
|
||||
"user",
|
||||
|
||||
@@ -181,10 +181,20 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"discoverability": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PluginShareDiscoverability"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"remotePluginId": {
|
||||
"type": "string"
|
||||
},
|
||||
"shareTargets": {
|
||||
"sharePrincipals": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/PluginSharePrincipal"
|
||||
},
|
||||
@@ -205,6 +215,14 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginShareDiscoverability": {
|
||||
"enum": [
|
||||
"LISTED",
|
||||
"UNLISTED",
|
||||
"PRIVATE"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginShareListItem": {
|
||||
"properties": {
|
||||
"localPluginPath": {
|
||||
@@ -219,14 +237,10 @@
|
||||
},
|
||||
"plugin": {
|
||||
"$ref": "#/definitions/PluginSummary"
|
||||
},
|
||||
"shareUrl": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"plugin",
|
||||
"shareUrl"
|
||||
"plugin"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
@@ -240,15 +254,27 @@
|
||||
},
|
||||
"principalType": {
|
||||
"$ref": "#/definitions/PluginSharePrincipalType"
|
||||
},
|
||||
"role": {
|
||||
"$ref": "#/definitions/PluginSharePrincipalRole"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"principalId",
|
||||
"principalType"
|
||||
"principalType",
|
||||
"role"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginSharePrincipalRole": {
|
||||
"enum": [
|
||||
"reader",
|
||||
"editor",
|
||||
"owner"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginSharePrincipalType": {
|
||||
"enum": [
|
||||
"user",
|
||||
|
||||
@@ -28,13 +28,24 @@
|
||||
},
|
||||
"principalType": {
|
||||
"$ref": "#/definitions/PluginSharePrincipalType"
|
||||
},
|
||||
"role": {
|
||||
"$ref": "#/definitions/PluginShareTargetRole"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"principalId",
|
||||
"principalType"
|
||||
"principalType",
|
||||
"role"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginShareTargetRole": {
|
||||
"enum": [
|
||||
"reader",
|
||||
"editor"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
|
||||
@@ -16,16 +16,37 @@
|
||||
},
|
||||
"principalType": {
|
||||
"$ref": "#/definitions/PluginSharePrincipalType"
|
||||
},
|
||||
"role": {
|
||||
"$ref": "#/definitions/PluginShareTargetRole"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"principalId",
|
||||
"principalType"
|
||||
"principalType",
|
||||
"role"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginShareTargetRole": {
|
||||
"enum": [
|
||||
"reader",
|
||||
"editor"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginShareUpdateDiscoverability": {
|
||||
"enum": [
|
||||
"UNLISTED",
|
||||
"PRIVATE"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"discoverability": {
|
||||
"$ref": "#/definitions/PluginShareUpdateDiscoverability"
|
||||
},
|
||||
"remotePluginId": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -37,6 +58,7 @@
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"discoverability",
|
||||
"remotePluginId",
|
||||
"shareTargets"
|
||||
],
|
||||
|
||||
@@ -1,6 +1,14 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"PluginShareDiscoverability": {
|
||||
"enum": [
|
||||
"LISTED",
|
||||
"UNLISTED",
|
||||
"PRIVATE"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginSharePrincipal": {
|
||||
"properties": {
|
||||
"name": {
|
||||
@@ -11,15 +19,27 @@
|
||||
},
|
||||
"principalType": {
|
||||
"$ref": "#/definitions/PluginSharePrincipalType"
|
||||
},
|
||||
"role": {
|
||||
"$ref": "#/definitions/PluginSharePrincipalRole"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"principalId",
|
||||
"principalType"
|
||||
"principalType",
|
||||
"role"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginSharePrincipalRole": {
|
||||
"enum": [
|
||||
"reader",
|
||||
"editor",
|
||||
"owner"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginSharePrincipalType": {
|
||||
"enum": [
|
||||
"user",
|
||||
@@ -30,6 +50,9 @@
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"discoverability": {
|
||||
"$ref": "#/definitions/PluginShareDiscoverability"
|
||||
},
|
||||
"principals": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/PluginSharePrincipal"
|
||||
@@ -38,6 +61,7 @@
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"discoverability",
|
||||
"principals"
|
||||
],
|
||||
"title": "PluginShareUpdateTargetsResponse",
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"description": "Current remote-control connection status and environment id exposed to clients.",
|
||||
"description": "Current remote-control connection status and remote identity exposed to clients.",
|
||||
"properties": {
|
||||
"environmentId": {
|
||||
"type": [
|
||||
@@ -19,11 +19,15 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"installationId": {
|
||||
"type": "string"
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/RemoteControlConnectionStatus"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"installationId",
|
||||
"status"
|
||||
],
|
||||
"title": "RemoteControlStatusChangedNotification",
|
||||
|
||||
@@ -10,6 +10,10 @@ export type InitializeCapabilities = {
|
||||
* Opt into receiving experimental API methods and fields.
|
||||
*/
|
||||
experimentalApi: boolean,
|
||||
/**
|
||||
* Opt into `attestation/generate` requests for upstream `x-oai-attestation`.
|
||||
*/
|
||||
requestAttestation: boolean,
|
||||
/**
|
||||
* Exact notification method names that should be suppressed for this
|
||||
* connection (for example `thread/started`).
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
import type { ApplyPatchApprovalParams } from "./ApplyPatchApprovalParams";
|
||||
import type { ExecCommandApprovalParams } from "./ExecCommandApprovalParams";
|
||||
import type { RequestId } from "./RequestId";
|
||||
import type { AttestationGenerateParams } from "./v2/AttestationGenerateParams";
|
||||
import type { ChatgptAuthTokensRefreshParams } from "./v2/ChatgptAuthTokensRefreshParams";
|
||||
import type { CommandExecutionRequestApprovalParams } from "./v2/CommandExecutionRequestApprovalParams";
|
||||
import type { DynamicToolCallParams } from "./v2/DynamicToolCallParams";
|
||||
@@ -15,4 +16,4 @@ import type { ToolRequestUserInputParams } from "./v2/ToolRequestUserInputParams
|
||||
/**
|
||||
* Request initiated from the server and sent to the client.
|
||||
*/
|
||||
export type ServerRequest = { "method": "item/commandExecution/requestApproval", id: RequestId, params: CommandExecutionRequestApprovalParams, } | { "method": "item/fileChange/requestApproval", id: RequestId, params: FileChangeRequestApprovalParams, } | { "method": "item/tool/requestUserInput", id: RequestId, params: ToolRequestUserInputParams, } | { "method": "mcpServer/elicitation/request", id: RequestId, params: McpServerElicitationRequestParams, } | { "method": "item/permissions/requestApproval", id: RequestId, params: PermissionsRequestApprovalParams, } | { "method": "item/tool/call", id: RequestId, params: DynamicToolCallParams, } | { "method": "account/chatgptAuthTokens/refresh", id: RequestId, params: ChatgptAuthTokensRefreshParams, } | { "method": "applyPatchApproval", id: RequestId, params: ApplyPatchApprovalParams, } | { "method": "execCommandApproval", id: RequestId, params: ExecCommandApprovalParams, };
|
||||
export type ServerRequest = { "method": "item/commandExecution/requestApproval", id: RequestId, params: CommandExecutionRequestApprovalParams, } | { "method": "item/fileChange/requestApproval", id: RequestId, params: FileChangeRequestApprovalParams, } | { "method": "item/tool/requestUserInput", id: RequestId, params: ToolRequestUserInputParams, } | { "method": "mcpServer/elicitation/request", id: RequestId, params: McpServerElicitationRequestParams, } | { "method": "item/permissions/requestApproval", id: RequestId, params: PermissionsRequestApprovalParams, } | { "method": "item/tool/call", id: RequestId, params: DynamicToolCallParams, } | { "method": "account/chatgptAuthTokens/refresh", id: RequestId, params: ChatgptAuthTokensRefreshParams, } | { "method": "attestation/generate", id: RequestId, params: AttestationGenerateParams, } | { "method": "applyPatchApproval", id: RequestId, params: ApplyPatchApprovalParams, } | { "method": "execCommandApproval", id: RequestId, params: ExecCommandApprovalParams, };
|
||||
|
||||
5
codex-rs/app-server-protocol/schema/typescript/v2/AttestationGenerateParams.ts
generated
Normal file
5
codex-rs/app-server-protocol/schema/typescript/v2/AttestationGenerateParams.ts
generated
Normal file
@@ -0,0 +1,5 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type AttestationGenerateParams = Record<string, never>;
|
||||
9
codex-rs/app-server-protocol/schema/typescript/v2/AttestationGenerateResponse.ts
generated
Normal file
9
codex-rs/app-server-protocol/schema/typescript/v2/AttestationGenerateResponse.ts
generated
Normal file
@@ -0,0 +1,9 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type AttestationGenerateResponse = {
|
||||
/**
|
||||
* Opaque client attestation token.
|
||||
*/
|
||||
token: string, };
|
||||
@@ -1,6 +1,7 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { PluginShareDiscoverability } from "./PluginShareDiscoverability";
|
||||
import type { PluginSharePrincipal } from "./PluginSharePrincipal";
|
||||
|
||||
export type PluginShareContext = { remotePluginId: string, shareUrl: string | null, creatorAccountUserId: string | null, creatorName: string | null, shareTargets: Array<PluginSharePrincipal> | null, };
|
||||
export type PluginShareContext = { remotePluginId: string, discoverability: PluginShareDiscoverability | null, shareUrl: string | null, creatorAccountUserId: string | null, creatorName: string | null, sharePrincipals: Array<PluginSharePrincipal> | null, };
|
||||
|
||||
@@ -4,4 +4,4 @@
|
||||
import type { AbsolutePathBuf } from "../AbsolutePathBuf";
|
||||
import type { PluginSummary } from "./PluginSummary";
|
||||
|
||||
export type PluginShareListItem = { plugin: PluginSummary, shareUrl: string, localPluginPath: AbsolutePathBuf | null, };
|
||||
export type PluginShareListItem = { plugin: PluginSummary, localPluginPath: AbsolutePathBuf | null, };
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { PluginSharePrincipalRole } from "./PluginSharePrincipalRole";
|
||||
import type { PluginSharePrincipalType } from "./PluginSharePrincipalType";
|
||||
|
||||
export type PluginSharePrincipal = { principalType: PluginSharePrincipalType, principalId: string, name: string, };
|
||||
export type PluginSharePrincipal = { principalType: PluginSharePrincipalType, principalId: string, role: PluginSharePrincipalRole, name: string, };
|
||||
|
||||
5
codex-rs/app-server-protocol/schema/typescript/v2/PluginSharePrincipalRole.ts
generated
Normal file
5
codex-rs/app-server-protocol/schema/typescript/v2/PluginSharePrincipalRole.ts
generated
Normal file
@@ -0,0 +1,5 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type PluginSharePrincipalRole = "reader" | "editor" | "owner";
|
||||
@@ -2,5 +2,6 @@
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { PluginSharePrincipalType } from "./PluginSharePrincipalType";
|
||||
import type { PluginShareTargetRole } from "./PluginShareTargetRole";
|
||||
|
||||
export type PluginShareTarget = { principalType: PluginSharePrincipalType, principalId: string, };
|
||||
export type PluginShareTarget = { principalType: PluginSharePrincipalType, principalId: string, role: PluginShareTargetRole, };
|
||||
|
||||
5
codex-rs/app-server-protocol/schema/typescript/v2/PluginShareTargetRole.ts
generated
Normal file
5
codex-rs/app-server-protocol/schema/typescript/v2/PluginShareTargetRole.ts
generated
Normal file
@@ -0,0 +1,5 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type PluginShareTargetRole = "reader" | "editor";
|
||||
5
codex-rs/app-server-protocol/schema/typescript/v2/PluginShareUpdateDiscoverability.ts
generated
Normal file
5
codex-rs/app-server-protocol/schema/typescript/v2/PluginShareUpdateDiscoverability.ts
generated
Normal file
@@ -0,0 +1,5 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type PluginShareUpdateDiscoverability = "UNLISTED" | "PRIVATE";
|
||||
@@ -2,5 +2,6 @@
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { PluginShareTarget } from "./PluginShareTarget";
|
||||
import type { PluginShareUpdateDiscoverability } from "./PluginShareUpdateDiscoverability";
|
||||
|
||||
export type PluginShareUpdateTargetsParams = { remotePluginId: string, shareTargets: Array<PluginShareTarget>, };
|
||||
export type PluginShareUpdateTargetsParams = { remotePluginId: string, discoverability: PluginShareUpdateDiscoverability, shareTargets: Array<PluginShareTarget>, };
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { PluginShareDiscoverability } from "./PluginShareDiscoverability";
|
||||
import type { PluginSharePrincipal } from "./PluginSharePrincipal";
|
||||
|
||||
export type PluginShareUpdateTargetsResponse = { principals: Array<PluginSharePrincipal>, };
|
||||
export type PluginShareUpdateTargetsResponse = { principals: Array<PluginSharePrincipal>, discoverability: PluginShareDiscoverability, };
|
||||
|
||||
@@ -4,6 +4,6 @@
|
||||
import type { RemoteControlConnectionStatus } from "./RemoteControlConnectionStatus";
|
||||
|
||||
/**
|
||||
* Current remote-control connection status and environment id exposed to clients.
|
||||
* Current remote-control connection status and remote identity exposed to clients.
|
||||
*/
|
||||
export type RemoteControlStatusChangedNotification = { status: RemoteControlConnectionStatus, environmentId: string | null, };
|
||||
export type RemoteControlStatusChangedNotification = { status: RemoteControlConnectionStatus, installationId: string, environmentId: string | null, };
|
||||
|
||||
@@ -28,6 +28,8 @@ export type { AppsDefaultConfig } from "./AppsDefaultConfig";
|
||||
export type { AppsListParams } from "./AppsListParams";
|
||||
export type { AppsListResponse } from "./AppsListResponse";
|
||||
export type { AskForApproval } from "./AskForApproval";
|
||||
export type { AttestationGenerateParams } from "./AttestationGenerateParams";
|
||||
export type { AttestationGenerateResponse } from "./AttestationGenerateResponse";
|
||||
export type { AutoReviewDecisionSource } from "./AutoReviewDecisionSource";
|
||||
export type { ByteRange } from "./ByteRange";
|
||||
export type { CancelLoginAccountParams } from "./CancelLoginAccountParams";
|
||||
@@ -283,10 +285,13 @@ export type { PluginShareListItem } from "./PluginShareListItem";
|
||||
export type { PluginShareListParams } from "./PluginShareListParams";
|
||||
export type { PluginShareListResponse } from "./PluginShareListResponse";
|
||||
export type { PluginSharePrincipal } from "./PluginSharePrincipal";
|
||||
export type { PluginSharePrincipalRole } from "./PluginSharePrincipalRole";
|
||||
export type { PluginSharePrincipalType } from "./PluginSharePrincipalType";
|
||||
export type { PluginShareSaveParams } from "./PluginShareSaveParams";
|
||||
export type { PluginShareSaveResponse } from "./PluginShareSaveResponse";
|
||||
export type { PluginShareTarget } from "./PluginShareTarget";
|
||||
export type { PluginShareTargetRole } from "./PluginShareTargetRole";
|
||||
export type { PluginShareUpdateDiscoverability } from "./PluginShareUpdateDiscoverability";
|
||||
export type { PluginShareUpdateTargetsParams } from "./PluginShareUpdateTargetsParams";
|
||||
export type { PluginShareUpdateTargetsResponse } from "./PluginShareUpdateTargetsResponse";
|
||||
export type { PluginSkillReadParams } from "./PluginSkillReadParams";
|
||||
|
||||
@@ -808,6 +808,13 @@ client_request_definitions! {
|
||||
serialization: None,
|
||||
response: v2::MockExperimentalMethodResponse,
|
||||
},
|
||||
#[experimental("environment/add")]
|
||||
/// Adds or replaces a remote environment by id for later selection.
|
||||
EnvironmentAdd => "environment/add" {
|
||||
params: v2::EnvironmentAddParams,
|
||||
serialization: global("environment"),
|
||||
response: v2::EnvironmentAddResponse,
|
||||
},
|
||||
|
||||
McpServerOauthLogin => "mcpServer/oauth/login" {
|
||||
params: v2::McpServerOauthLoginParams,
|
||||
@@ -1312,6 +1319,12 @@ server_request_definitions! {
|
||||
response: v2::ChatgptAuthTokensRefreshResponse,
|
||||
},
|
||||
|
||||
/// Generate a fresh upstream attestation result on demand.
|
||||
AttestationGenerate => "attestation/generate" {
|
||||
params: v2::AttestationGenerateParams,
|
||||
response: v2::AttestationGenerateResponse,
|
||||
},
|
||||
|
||||
/// DEPRECATED APIs below
|
||||
/// Request to approve a patch.
|
||||
/// This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).
|
||||
@@ -1790,6 +1803,18 @@ mod tests {
|
||||
add_credits_nudge.serialization_scope(),
|
||||
Some(ClientRequestSerializationScope::Global("account-auth"))
|
||||
);
|
||||
|
||||
let environment_add = ClientRequest::EnvironmentAdd {
|
||||
request_id: request_id(),
|
||||
params: v2::EnvironmentAddParams {
|
||||
environment_id: "remote-a".to_string(),
|
||||
exec_server_url: "ws://127.0.0.1:8765".to_string(),
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
environment_add.serialization_scope(),
|
||||
Some(ClientRequestSerializationScope::Global("environment"))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -1910,6 +1935,7 @@ mod tests {
|
||||
},
|
||||
capabilities: Some(v1::InitializeCapabilities {
|
||||
experimental_api: true,
|
||||
request_attestation: true,
|
||||
opt_out_notification_methods: Some(vec![
|
||||
"thread/started".to_string(),
|
||||
"item/agentMessage/delta".to_string(),
|
||||
@@ -1930,6 +1956,7 @@ mod tests {
|
||||
},
|
||||
"capabilities": {
|
||||
"experimentalApi": true,
|
||||
"requestAttestation": true,
|
||||
"optOutNotificationMethods": [
|
||||
"thread/started",
|
||||
"item/agentMessage/delta"
|
||||
@@ -1955,6 +1982,7 @@ mod tests {
|
||||
},
|
||||
"capabilities": {
|
||||
"experimentalApi": true,
|
||||
"requestAttestation": true,
|
||||
"optOutNotificationMethods": [
|
||||
"thread/started",
|
||||
"item/agentMessage/delta"
|
||||
@@ -1975,6 +2003,7 @@ mod tests {
|
||||
},
|
||||
capabilities: Some(v1::InitializeCapabilities {
|
||||
experimental_api: true,
|
||||
request_attestation: true,
|
||||
opt_out_notification_methods: Some(vec![
|
||||
"thread/started".to_string(),
|
||||
"item/agentMessage/delta".to_string(),
|
||||
@@ -2091,6 +2120,28 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_attestation_generate_request() -> Result<()> {
|
||||
let params = v2::AttestationGenerateParams {};
|
||||
let request = ServerRequest::AttestationGenerate {
|
||||
request_id: RequestId::Integer(9),
|
||||
params: params.clone(),
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "attestation/generate",
|
||||
"id": 9,
|
||||
"params": {}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
|
||||
let payload = ServerRequestPayload::AttestationGenerate(params);
|
||||
assert_eq!(request.id(), &RequestId::Integer(9));
|
||||
assert_eq!(payload.request_with_id(RequestId::Integer(9)), request);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_server_response() -> Result<()> {
|
||||
let response = ServerResponse::CommandExecutionRequestApproval {
|
||||
@@ -2546,10 +2597,33 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_environment_add() -> Result<()> {
|
||||
let request = ClientRequest::EnvironmentAdd {
|
||||
request_id: RequestId::Integer(9),
|
||||
params: v2::EnvironmentAddParams {
|
||||
environment_id: "remote-a".to_string(),
|
||||
exec_server_url: "ws://127.0.0.1:8765".to_string(),
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "environment/add",
|
||||
"id": 9,
|
||||
"params": {
|
||||
"environmentId": "remote-a",
|
||||
"execServerUrl": "ws://127.0.0.1:8765"
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_fs_get_metadata() -> Result<()> {
|
||||
let request = ClientRequest::FsGetMetadata {
|
||||
request_id: RequestId::Integer(9),
|
||||
request_id: RequestId::Integer(10),
|
||||
params: v2::FsGetMetadataParams {
|
||||
path: absolute_path("tmp/example"),
|
||||
},
|
||||
@@ -2557,7 +2631,7 @@ mod tests {
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "fs/getMetadata",
|
||||
"id": 9,
|
||||
"id": 10,
|
||||
"params": {
|
||||
"path": absolute_path_string("tmp/example")
|
||||
}
|
||||
@@ -2818,6 +2892,19 @@ mod tests {
|
||||
assert_eq!(reason, Some("mock/experimentalMethod"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn environment_add_is_marked_experimental() {
|
||||
let request = ClientRequest::EnvironmentAdd {
|
||||
request_id: RequestId::Integer(1),
|
||||
params: v2::EnvironmentAddParams {
|
||||
environment_id: "remote-a".to_string(),
|
||||
exec_server_url: "ws://127.0.0.1:8765".to_string(),
|
||||
},
|
||||
};
|
||||
let reason = crate::experimental_api::ExperimentalApi::experimental_reason(&request);
|
||||
assert_eq!(reason, Some("environment/add"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn command_exec_permission_profile_is_marked_experimental() {
|
||||
let request = ClientRequest::OneOffCommandExec {
|
||||
|
||||
@@ -46,6 +46,9 @@ pub struct InitializeCapabilities {
|
||||
/// Opt into receiving experimental API methods and fields.
|
||||
#[serde(default)]
|
||||
pub experimental_api: bool,
|
||||
/// Opt into `attestation/generate` requests for upstream `x-oai-attestation`.
|
||||
#[serde(default)]
|
||||
pub request_attestation: bool,
|
||||
/// Exact notification method names that should be suppressed for this
|
||||
/// connection (for example `thread/started`).
|
||||
#[ts(optional = nullable)]
|
||||
|
||||
17
codex-rs/app-server-protocol/src/protocol/v2/attestation.rs
Normal file
17
codex-rs/app-server-protocol/src/protocol/v2/attestation.rs
Normal file
@@ -0,0 +1,17 @@
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use ts_rs::TS;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS, Default)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct AttestationGenerateParams {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct AttestationGenerateResponse {
|
||||
/// Opaque client attestation token.
|
||||
pub token: String,
|
||||
}
|
||||
17
codex-rs/app-server-protocol/src/protocol/v2/environment.rs
Normal file
17
codex-rs/app-server-protocol/src/protocol/v2/environment.rs
Normal file
@@ -0,0 +1,17 @@
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use ts_rs::TS;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct EnvironmentAddParams {
|
||||
pub environment_id: String,
|
||||
pub exec_server_url: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct EnvironmentAddResponse {}
|
||||
@@ -2,9 +2,11 @@ mod shared;
|
||||
|
||||
mod account;
|
||||
mod apps;
|
||||
mod attestation;
|
||||
mod collaboration_mode;
|
||||
mod command_exec;
|
||||
mod config;
|
||||
mod environment;
|
||||
mod experimental_feature;
|
||||
mod feedback;
|
||||
mod fs;
|
||||
@@ -26,9 +28,11 @@ mod windows_sandbox;
|
||||
|
||||
pub use account::*;
|
||||
pub use apps::*;
|
||||
pub use attestation::*;
|
||||
pub use collaboration_mode::*;
|
||||
pub use command_exec::*;
|
||||
pub use config::*;
|
||||
pub use environment::*;
|
||||
pub use experimental_feature::*;
|
||||
pub use feedback::*;
|
||||
pub use fs::*;
|
||||
|
||||
@@ -218,6 +218,7 @@ pub struct PluginShareSaveResponse {
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct PluginShareUpdateTargetsParams {
|
||||
pub remote_plugin_id: String,
|
||||
pub discoverability: PluginShareUpdateDiscoverability,
|
||||
pub share_targets: Vec<PluginShareTarget>,
|
||||
}
|
||||
|
||||
@@ -226,6 +227,7 @@ pub struct PluginShareUpdateTargetsParams {
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct PluginShareUpdateTargetsResponse {
|
||||
pub principals: Vec<PluginSharePrincipal>,
|
||||
pub discoverability: PluginShareDiscoverability,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -257,7 +259,6 @@ pub struct PluginShareDeleteResponse {}
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct PluginShareListItem {
|
||||
pub plugin: PluginSummary,
|
||||
pub share_url: String,
|
||||
pub local_plugin_path: Option<AbsolutePathBuf>,
|
||||
}
|
||||
|
||||
@@ -275,6 +276,17 @@ pub enum PluginShareDiscoverability {
|
||||
Private,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum PluginShareUpdateDiscoverability {
|
||||
#[serde(rename = "UNLISTED")]
|
||||
#[ts(rename = "UNLISTED")]
|
||||
Unlisted,
|
||||
#[serde(rename = "PRIVATE")]
|
||||
#[ts(rename = "PRIVATE")]
|
||||
Private,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum PluginSharePrincipalType {
|
||||
@@ -295,6 +307,7 @@ pub enum PluginSharePrincipalType {
|
||||
pub struct PluginShareTarget {
|
||||
pub principal_type: PluginSharePrincipalType,
|
||||
pub principal_id: String,
|
||||
pub role: PluginShareTargetRole,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
@@ -303,9 +316,29 @@ pub struct PluginShareTarget {
|
||||
pub struct PluginSharePrincipal {
|
||||
pub principal_type: PluginSharePrincipalType,
|
||||
pub principal_id: String,
|
||||
pub role: PluginSharePrincipalRole,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
#[ts(rename_all = "lowercase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum PluginShareTargetRole {
|
||||
Reader,
|
||||
Editor,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
#[ts(rename_all = "lowercase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum PluginSharePrincipalRole {
|
||||
Reader,
|
||||
Editor,
|
||||
Owner,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[ts(rename_all = "snake_case")]
|
||||
@@ -526,10 +559,11 @@ pub struct PluginSummary {
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct PluginShareContext {
|
||||
pub remote_plugin_id: String,
|
||||
pub discoverability: Option<PluginShareDiscoverability>,
|
||||
pub share_url: Option<String>,
|
||||
pub creator_account_user_id: Option<String>,
|
||||
pub creator_name: Option<String>,
|
||||
pub share_targets: Option<Vec<PluginSharePrincipal>>,
|
||||
pub share_principals: Option<Vec<PluginSharePrincipal>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
|
||||
@@ -3,12 +3,13 @@ use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use ts_rs::TS;
|
||||
|
||||
/// Current remote-control connection status and environment id exposed to clients.
|
||||
/// Current remote-control connection status and remote identity exposed to clients.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct RemoteControlStatusChangedNotification {
|
||||
pub status: RemoteControlConnectionStatus,
|
||||
pub installation_id: String,
|
||||
pub environment_id: Option<String>,
|
||||
}
|
||||
|
||||
|
||||
@@ -2896,10 +2896,12 @@ fn plugin_share_params_and_response_serialization_use_camel_case_fields() {
|
||||
PluginShareTarget {
|
||||
principal_type: PluginSharePrincipalType::User,
|
||||
principal_id: "user-1".to_string(),
|
||||
role: PluginShareTargetRole::Reader,
|
||||
},
|
||||
PluginShareTarget {
|
||||
principal_type: PluginSharePrincipalType::Workspace,
|
||||
principal_id: "workspace-1".to_string(),
|
||||
principal_type: PluginSharePrincipalType::Group,
|
||||
principal_id: "group-1".to_string(),
|
||||
role: PluginShareTargetRole::Reader,
|
||||
},
|
||||
]),
|
||||
})
|
||||
@@ -2912,10 +2914,12 @@ fn plugin_share_params_and_response_serialization_use_camel_case_fields() {
|
||||
{
|
||||
"principalType": "user",
|
||||
"principalId": "user-1",
|
||||
"role": "reader",
|
||||
},
|
||||
{
|
||||
"principalType": "workspace",
|
||||
"principalId": "workspace-1",
|
||||
"principalType": "group",
|
||||
"principalId": "group-1",
|
||||
"role": "reader",
|
||||
},
|
||||
],
|
||||
}),
|
||||
@@ -2936,17 +2940,21 @@ fn plugin_share_params_and_response_serialization_use_camel_case_fields() {
|
||||
assert_eq!(
|
||||
serde_json::to_value(PluginShareUpdateTargetsParams {
|
||||
remote_plugin_id: "plugins~Plugin_00000000000000000000000000000000".to_string(),
|
||||
discoverability: PluginShareUpdateDiscoverability::Unlisted,
|
||||
share_targets: vec![PluginShareTarget {
|
||||
principal_type: PluginSharePrincipalType::Group,
|
||||
principal_id: "group-1".to_string(),
|
||||
role: PluginShareTargetRole::Editor,
|
||||
}],
|
||||
})
|
||||
.unwrap(),
|
||||
json!({
|
||||
"remotePluginId": "plugins~Plugin_00000000000000000000000000000000",
|
||||
"discoverability": "UNLISTED",
|
||||
"shareTargets": [{
|
||||
"principalType": "group",
|
||||
"principalId": "group-1",
|
||||
"role": "editor",
|
||||
}],
|
||||
}),
|
||||
);
|
||||
@@ -2956,16 +2964,20 @@ fn plugin_share_params_and_response_serialization_use_camel_case_fields() {
|
||||
principals: vec![PluginSharePrincipal {
|
||||
principal_type: PluginSharePrincipalType::User,
|
||||
principal_id: "user-1".to_string(),
|
||||
role: PluginSharePrincipalRole::Owner,
|
||||
name: "Gavin".to_string(),
|
||||
}],
|
||||
discoverability: PluginShareDiscoverability::Unlisted,
|
||||
})
|
||||
.unwrap(),
|
||||
json!({
|
||||
"principals": [{
|
||||
"principalType": "user",
|
||||
"principalId": "user-1",
|
||||
"role": "owner",
|
||||
"name": "Gavin",
|
||||
}],
|
||||
"discoverability": "UNLISTED",
|
||||
}),
|
||||
);
|
||||
|
||||
@@ -3003,7 +3015,6 @@ fn plugin_share_list_response_serializes_share_items() {
|
||||
interface: None,
|
||||
keywords: Vec::new(),
|
||||
},
|
||||
share_url: "https://chatgpt.example/plugins/share/share-key-1".to_string(),
|
||||
local_plugin_path: None,
|
||||
}],
|
||||
})
|
||||
@@ -3023,7 +3034,6 @@ fn plugin_share_list_response_serializes_share_items() {
|
||||
"interface": null,
|
||||
"keywords": [],
|
||||
},
|
||||
"shareUrl": "https://chatgpt.example/plugins/share/share-key-1",
|
||||
"localPluginPath": null,
|
||||
}],
|
||||
}),
|
||||
|
||||
@@ -1551,6 +1551,7 @@ impl CodexClient {
|
||||
},
|
||||
capabilities: Some(InitializeCapabilities {
|
||||
experimental_api,
|
||||
request_attestation: false,
|
||||
opt_out_notification_methods: Some(
|
||||
NOTIFICATIONS_TO_OPT_OUT
|
||||
.iter()
|
||||
|
||||
@@ -11,6 +11,7 @@ pub use transport::AppServerTransportParseError;
|
||||
pub use transport::CHANNEL_CAPACITY;
|
||||
pub use transport::ConnectionOrigin;
|
||||
pub use transport::RemoteControlHandle;
|
||||
pub use transport::RemoteControlStartConfig;
|
||||
pub use transport::TransportEvent;
|
||||
pub use transport::app_server_control_socket_path;
|
||||
pub use transport::auth;
|
||||
|
||||
@@ -31,6 +31,7 @@ mod unix_socket_tests;
|
||||
mod websocket;
|
||||
|
||||
pub use remote_control::RemoteControlHandle;
|
||||
pub use remote_control::RemoteControlStartConfig;
|
||||
pub use remote_control::start_remote_control;
|
||||
pub use stdio::start_stdio_connection;
|
||||
pub use unix_socket::start_control_socket_acceptor;
|
||||
|
||||
@@ -19,6 +19,7 @@ const REQUEST_ID_HEADER: &str = "x-request-id";
|
||||
const OAI_REQUEST_ID_HEADER: &str = "x-oai-request-id";
|
||||
const CF_RAY_HEADER: &str = "cf-ray";
|
||||
pub(super) const REMOTE_CONTROL_ACCOUNT_ID_HEADER: &str = "chatgpt-account-id";
|
||||
pub(super) const REMOTE_CONTROL_INSTALLATION_ID_HEADER: &str = "x-codex-installation-id";
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(super) struct RemoteControlEnrollment {
|
||||
@@ -193,6 +194,7 @@ pub(crate) fn format_headers(headers: &HeaderMap) -> String {
|
||||
pub(super) async fn enroll_remote_control_server(
|
||||
remote_control_target: &RemoteControlTarget,
|
||||
auth: &RemoteControlConnectionAuth,
|
||||
installation_id: &str,
|
||||
) -> io::Result<RemoteControlEnrollment> {
|
||||
let enroll_url = &remote_control_target.enroll_url;
|
||||
let server_name = gethostname().to_string_lossy().trim().to_string();
|
||||
@@ -201,6 +203,7 @@ pub(super) async fn enroll_remote_control_server(
|
||||
os: std::env::consts::OS,
|
||||
arch: std::env::consts::ARCH,
|
||||
app_server_version: env!("CARGO_PKG_VERSION"),
|
||||
installation_id: installation_id.to_string(),
|
||||
};
|
||||
let client = build_reqwest_client();
|
||||
let mut auth_headers = HeaderMap::new();
|
||||
@@ -210,6 +213,7 @@ pub(super) async fn enroll_remote_control_server(
|
||||
.timeout(REMOTE_CONTROL_ENROLL_TIMEOUT)
|
||||
.headers(auth_headers)
|
||||
.header(REMOTE_CONTROL_ACCOUNT_ID_HEADER, &auth.account_id)
|
||||
.header(REMOTE_CONTROL_INSTALLATION_ID_HEADER, installation_id)
|
||||
.json(&request);
|
||||
|
||||
let response = http_request.send().await.map_err(|err| {
|
||||
@@ -459,6 +463,7 @@ mod tests {
|
||||
auth_provider: codex_model_provider::unauthenticated_auth_provider(),
|
||||
account_id: "account_id".to_string(),
|
||||
},
|
||||
"11111111-1111-4111-8111-111111111111",
|
||||
)
|
||||
.await
|
||||
.expect_err("invalid response should fail to parse");
|
||||
|
||||
@@ -28,6 +28,11 @@ use tokio::task::JoinHandle;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
use tracing::warn;
|
||||
|
||||
pub struct RemoteControlStartConfig {
|
||||
pub remote_control_url: String,
|
||||
pub installation_id: String,
|
||||
}
|
||||
|
||||
pub(super) struct QueuedServerEnvelope {
|
||||
pub(super) event: ServerEvent,
|
||||
pub(super) client_id: ClientId,
|
||||
@@ -62,7 +67,7 @@ impl RemoteControlHandle {
|
||||
}
|
||||
|
||||
pub async fn start_remote_control(
|
||||
remote_control_url: String,
|
||||
config: RemoteControlStartConfig,
|
||||
state_db: Option<Arc<StateRuntime>>,
|
||||
auth_manager: Arc<AuthManager>,
|
||||
transport_event_tx: mpsc::Sender<TransportEvent>,
|
||||
@@ -77,7 +82,7 @@ pub async fn start_remote_control(
|
||||
warn!("remote control disabled because sqlite state db is unavailable");
|
||||
}
|
||||
let remote_control_target = if initial_enabled {
|
||||
Some(normalize_remote_control_url(&remote_control_url)?)
|
||||
Some(normalize_remote_control_url(&config.remote_control_url)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
@@ -89,14 +94,18 @@ pub async fn start_remote_control(
|
||||
} else {
|
||||
RemoteControlConnectionStatus::Disabled
|
||||
},
|
||||
installation_id: config.installation_id.clone(),
|
||||
environment_id: None,
|
||||
};
|
||||
let (status_tx, _status_rx) = watch::channel(initial_status);
|
||||
let status_publisher = RemoteControlStatusPublisher::new(status_tx.clone());
|
||||
let join_handle = tokio::spawn(async move {
|
||||
RemoteControlWebsocket::new(
|
||||
remote_control_url,
|
||||
remote_control_target,
|
||||
websocket::RemoteControlWebsocketConfig {
|
||||
remote_control_url: config.remote_control_url,
|
||||
installation_id: config.installation_id,
|
||||
remote_control_target,
|
||||
},
|
||||
state_db,
|
||||
auth_manager,
|
||||
RemoteControlChannels {
|
||||
|
||||
@@ -19,6 +19,7 @@ pub(super) struct EnrollRemoteServerRequest {
|
||||
pub(super) os: &'static str,
|
||||
pub(super) arch: &'static str,
|
||||
pub(super) app_server_version: &'static str,
|
||||
pub(super) installation_id: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use super::enroll::REMOTE_CONTROL_ACCOUNT_ID_HEADER;
|
||||
use super::enroll::REMOTE_CONTROL_INSTALLATION_ID_HEADER;
|
||||
use super::enroll::RemoteControlEnrollment;
|
||||
use super::enroll::load_persisted_remote_control_enrollment;
|
||||
use super::enroll::update_persisted_remote_control_enrollment;
|
||||
@@ -56,6 +57,8 @@ use tokio_tungstenite::accept_hdr_async;
|
||||
use tokio_tungstenite::tungstenite;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
|
||||
const TEST_INSTALLATION_ID: &str = "11111111-1111-4111-8111-111111111111";
|
||||
|
||||
fn remote_control_auth_manager() -> Arc<AuthManager> {
|
||||
auth_manager_from_auth(CodexAuth::create_dummy_chatgpt_auth_for_testing())
|
||||
}
|
||||
@@ -131,6 +134,7 @@ async fn expect_remote_control_status(
|
||||
if let Some(expected_status) = expected_status {
|
||||
assert_eq!(status.status, expected_status);
|
||||
}
|
||||
assert_eq!(status.installation_id, TEST_INSTALLATION_ID);
|
||||
assert_eq!(status.environment_id.as_deref(), expected_environment_id);
|
||||
}
|
||||
|
||||
@@ -173,7 +177,10 @@ async fn remote_control_transport_manages_virtual_clients_and_routes_messages()
|
||||
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let (remote_task, remote_handle) = start_remote_control(
|
||||
remote_control_url,
|
||||
RemoteControlStartConfig {
|
||||
remote_control_url,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
},
|
||||
Some(remote_control_state_runtime(&codex_home).await),
|
||||
remote_control_auth_manager(),
|
||||
transport_event_tx,
|
||||
@@ -449,7 +456,10 @@ async fn remote_control_transport_reconnects_after_disconnect() {
|
||||
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let (remote_task, remote_handle) = start_remote_control(
|
||||
remote_control_url,
|
||||
RemoteControlStartConfig {
|
||||
remote_control_url,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
},
|
||||
Some(remote_control_state_runtime(&codex_home).await),
|
||||
remote_control_auth_manager(),
|
||||
transport_event_tx,
|
||||
@@ -528,7 +538,10 @@ async fn remote_control_start_allows_remote_control_invalid_url_when_disabled()
|
||||
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let (remote_task, _remote_handle) = start_remote_control(
|
||||
"https://internal.example.com/backend-api/".to_string(),
|
||||
RemoteControlStartConfig {
|
||||
remote_control_url: "https://internal.example.com/backend-api/".to_string(),
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
},
|
||||
/*state_db*/ None,
|
||||
remote_control_auth_manager(),
|
||||
transport_event_tx,
|
||||
@@ -564,7 +577,10 @@ async fn remote_control_start_allows_missing_auth_when_enabled() {
|
||||
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let (remote_task, _remote_handle) = start_remote_control(
|
||||
remote_control_url,
|
||||
RemoteControlStartConfig {
|
||||
remote_control_url,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
},
|
||||
Some(remote_control_state_runtime(&codex_home).await),
|
||||
auth_manager,
|
||||
transport_event_tx,
|
||||
@@ -596,7 +612,10 @@ async fn remote_control_start_reports_missing_state_db_as_disabled_when_enabled(
|
||||
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let (remote_task, remote_handle) = start_remote_control(
|
||||
remote_control_url,
|
||||
RemoteControlStartConfig {
|
||||
remote_control_url,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
},
|
||||
/*state_db*/ None,
|
||||
remote_control_auth_manager(),
|
||||
transport_event_tx,
|
||||
@@ -611,6 +630,7 @@ async fn remote_control_start_reports_missing_state_db_as_disabled_when_enabled(
|
||||
status_rx.borrow().clone(),
|
||||
RemoteControlStatusChangedNotification {
|
||||
status: RemoteControlConnectionStatus::Disabled,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
environment_id: None,
|
||||
}
|
||||
);
|
||||
@@ -645,7 +665,10 @@ async fn remote_control_handle_set_enabled_stops_and_restarts_connections() {
|
||||
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let (remote_task, remote_handle) = start_remote_control(
|
||||
remote_control_url,
|
||||
RemoteControlStartConfig {
|
||||
remote_control_url,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
},
|
||||
Some(remote_control_state_runtime(&codex_home).await),
|
||||
remote_control_auth_manager(),
|
||||
transport_event_tx,
|
||||
@@ -672,6 +695,7 @@ async fn remote_control_handle_set_enabled_stops_and_restarts_connections() {
|
||||
&mut status_rx,
|
||||
RemoteControlStatusChangedNotification {
|
||||
status: RemoteControlConnectionStatus::Connected,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
environment_id: Some("env_test".to_string()),
|
||||
},
|
||||
)
|
||||
@@ -682,6 +706,7 @@ async fn remote_control_handle_set_enabled_stops_and_restarts_connections() {
|
||||
&mut status_rx,
|
||||
RemoteControlStatusChangedNotification {
|
||||
status: RemoteControlConnectionStatus::Disabled,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
environment_id: None,
|
||||
},
|
||||
)
|
||||
@@ -698,6 +723,7 @@ async fn remote_control_handle_set_enabled_stops_and_restarts_connections() {
|
||||
&mut status_rx,
|
||||
RemoteControlStatusChangedNotification {
|
||||
status: RemoteControlConnectionStatus::Connecting,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
environment_id: Some("env_test".to_string()),
|
||||
},
|
||||
)
|
||||
@@ -729,7 +755,10 @@ async fn remote_control_transport_clears_outgoing_buffer_when_backend_acks() {
|
||||
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let (remote_task, remote_handle) = start_remote_control(
|
||||
remote_control_url,
|
||||
RemoteControlStartConfig {
|
||||
remote_control_url,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
},
|
||||
Some(remote_control_state_runtime(&codex_home).await),
|
||||
remote_control_auth_manager(),
|
||||
transport_event_tx,
|
||||
@@ -904,7 +933,10 @@ async fn remote_control_http_mode_enrolls_before_connecting() {
|
||||
let expected_server_name = gethostname().to_string_lossy().trim().to_string();
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let (remote_task, remote_handle) = start_remote_control(
|
||||
remote_control_url,
|
||||
RemoteControlStartConfig {
|
||||
remote_control_url,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
},
|
||||
Some(remote_control_state_runtime(&codex_home).await),
|
||||
remote_control_auth_manager(),
|
||||
transport_event_tx,
|
||||
@@ -929,6 +961,12 @@ async fn remote_control_http_mode_enrolls_before_connecting() {
|
||||
enroll_request.headers.get(REMOTE_CONTROL_ACCOUNT_ID_HEADER),
|
||||
Some(&"account_id".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
enroll_request
|
||||
.headers
|
||||
.get(REMOTE_CONTROL_INSTALLATION_ID_HEADER),
|
||||
Some(&TEST_INSTALLATION_ID.to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
serde_json::from_str::<serde_json::Value>(&enroll_request.body)
|
||||
.expect("enroll body should deserialize"),
|
||||
@@ -937,6 +975,7 @@ async fn remote_control_http_mode_enrolls_before_connecting() {
|
||||
"os": std::env::consts::OS,
|
||||
"arch": std::env::consts::ARCH,
|
||||
"app_server_version": env!("CARGO_PKG_VERSION"),
|
||||
"installation_id": TEST_INSTALLATION_ID,
|
||||
})
|
||||
);
|
||||
respond_with_json(
|
||||
@@ -967,6 +1006,12 @@ async fn remote_control_http_mode_enrolls_before_connecting() {
|
||||
.get(REMOTE_CONTROL_ACCOUNT_ID_HEADER),
|
||||
Some(&"account_id".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
handshake_request
|
||||
.headers
|
||||
.get(REMOTE_CONTROL_INSTALLATION_ID_HEADER),
|
||||
Some(&TEST_INSTALLATION_ID.to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
handshake_request.headers.get("x-codex-server-id"),
|
||||
Some(&"srv_e_test".to_string())
|
||||
@@ -1128,7 +1173,10 @@ async fn remote_control_http_mode_reuses_persisted_enrollment_before_reenrolling
|
||||
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let (remote_task, _remote_handle) = start_remote_control(
|
||||
remote_control_url,
|
||||
RemoteControlStartConfig {
|
||||
remote_control_url,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
},
|
||||
Some(state_db.clone()),
|
||||
remote_control_auth_manager_with_home(&codex_home),
|
||||
transport_event_tx,
|
||||
@@ -1196,7 +1244,10 @@ async fn remote_control_stdio_mode_waits_for_client_name_before_connecting() {
|
||||
let (app_server_client_name_tx, app_server_client_name_rx) = oneshot::channel::<String>();
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let (remote_task, _remote_handle) = start_remote_control(
|
||||
remote_control_url,
|
||||
RemoteControlStartConfig {
|
||||
remote_control_url,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
},
|
||||
Some(state_db.clone()),
|
||||
remote_control_auth_manager_with_home(&codex_home),
|
||||
transport_event_tx,
|
||||
@@ -1255,7 +1306,10 @@ async fn remote_control_waits_for_account_id_before_enrolling() {
|
||||
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let (remote_task, _remote_handle) = start_remote_control(
|
||||
remote_control_url,
|
||||
RemoteControlStartConfig {
|
||||
remote_control_url,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
},
|
||||
Some(state_db.clone()),
|
||||
auth_manager,
|
||||
transport_event_tx,
|
||||
@@ -1338,7 +1392,10 @@ async fn remote_control_http_mode_clears_stale_persisted_enrollment_after_404()
|
||||
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let (remote_task, remote_handle) = start_remote_control(
|
||||
remote_control_url,
|
||||
RemoteControlStartConfig {
|
||||
remote_control_url,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
},
|
||||
Some(state_db.clone()),
|
||||
remote_control_auth_manager_with_home(&codex_home),
|
||||
transport_event_tx,
|
||||
|
||||
@@ -55,6 +55,7 @@ use tracing::warn;
|
||||
|
||||
pub(super) const REMOTE_CONTROL_PROTOCOL_VERSION: &str = "3";
|
||||
pub(super) const REMOTE_CONTROL_ACCOUNT_ID_HEADER: &str = "chatgpt-account-id";
|
||||
pub(super) const REMOTE_CONTROL_INSTALLATION_ID_HEADER: &str = "x-codex-installation-id";
|
||||
const REMOTE_CONTROL_SUBSCRIBE_CURSOR_HEADER: &str = "x-codex-subscribe-cursor";
|
||||
const REMOTE_CONTROL_WEBSOCKET_PING_INTERVAL: std::time::Duration =
|
||||
std::time::Duration::from_secs(10);
|
||||
@@ -214,6 +215,7 @@ impl WebsocketState {
|
||||
|
||||
pub(crate) struct RemoteControlWebsocket {
|
||||
remote_control_url: String,
|
||||
installation_id: String,
|
||||
remote_control_target: Option<RemoteControlTarget>,
|
||||
state_db: Option<Arc<StateRuntime>>,
|
||||
auth_manager: Arc<AuthManager>,
|
||||
@@ -229,6 +231,12 @@ pub(crate) struct RemoteControlWebsocket {
|
||||
enabled_rx: watch::Receiver<bool>,
|
||||
}
|
||||
|
||||
pub(crate) struct RemoteControlWebsocketConfig {
|
||||
pub(crate) remote_control_url: String,
|
||||
pub(crate) installation_id: String,
|
||||
pub(crate) remote_control_target: Option<RemoteControlTarget>,
|
||||
}
|
||||
|
||||
enum ConnectOutcome {
|
||||
Connected(Box<WebSocketStream<MaybeTlsStream<TcpStream>>>),
|
||||
Disabled,
|
||||
@@ -254,6 +262,7 @@ impl RemoteControlStatusPublisher {
|
||||
self.tx.send_if_modified(|status| {
|
||||
let next_status = RemoteControlStatusChangedNotification {
|
||||
status: connection_status,
|
||||
installation_id: status.installation_id.clone(),
|
||||
environment_id: if connection_status == RemoteControlConnectionStatus::Disabled {
|
||||
None
|
||||
} else {
|
||||
@@ -276,6 +285,7 @@ impl RemoteControlStatusPublisher {
|
||||
}
|
||||
let next_status = RemoteControlStatusChangedNotification {
|
||||
status: status.status,
|
||||
installation_id: status.installation_id.clone(),
|
||||
environment_id,
|
||||
};
|
||||
if *status == next_status {
|
||||
@@ -290,14 +300,14 @@ impl RemoteControlStatusPublisher {
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub(super) struct RemoteControlConnectOptions<'a> {
|
||||
installation_id: &'a str,
|
||||
subscribe_cursor: Option<&'a str>,
|
||||
app_server_client_name: Option<&'a str>,
|
||||
}
|
||||
|
||||
impl RemoteControlWebsocket {
|
||||
pub(crate) fn new(
|
||||
remote_control_url: String,
|
||||
remote_control_target: Option<RemoteControlTarget>,
|
||||
config: RemoteControlWebsocketConfig,
|
||||
state_db: Option<Arc<StateRuntime>>,
|
||||
auth_manager: Arc<AuthManager>,
|
||||
channels: RemoteControlChannels,
|
||||
@@ -315,8 +325,9 @@ impl RemoteControlWebsocket {
|
||||
let auth_recovery = auth_manager.unauthorized_recovery();
|
||||
|
||||
Self {
|
||||
remote_control_url,
|
||||
remote_control_target,
|
||||
remote_control_url: config.remote_control_url,
|
||||
installation_id: config.installation_id,
|
||||
remote_control_target: config.remote_control_target,
|
||||
state_db,
|
||||
auth_manager,
|
||||
status_publisher: channels.status_publisher,
|
||||
@@ -442,6 +453,7 @@ impl RemoteControlWebsocket {
|
||||
loop {
|
||||
let subscribe_cursor = self.state.lock().await.subscribe_cursor.clone();
|
||||
let connect_options = RemoteControlConnectOptions {
|
||||
installation_id: &self.installation_id,
|
||||
subscribe_cursor: subscribe_cursor.as_deref(),
|
||||
app_server_client_name,
|
||||
};
|
||||
@@ -918,6 +930,7 @@ fn build_remote_control_websocket_request(
|
||||
websocket_url: &str,
|
||||
enrollment: &RemoteControlEnrollment,
|
||||
auth: &RemoteControlConnectionAuth,
|
||||
installation_id: &str,
|
||||
subscribe_cursor: Option<&str>,
|
||||
) -> io::Result<tungstenite::http::Request<()>> {
|
||||
let mut request = websocket_url.into_client_request().map_err(|err| {
|
||||
@@ -942,6 +955,11 @@ fn build_remote_control_websocket_request(
|
||||
auth.auth_provider.add_auth_headers(&mut auth_headers);
|
||||
headers.extend(auth_headers);
|
||||
set_remote_control_header(headers, REMOTE_CONTROL_ACCOUNT_ID_HEADER, &auth.account_id)?;
|
||||
set_remote_control_header(
|
||||
headers,
|
||||
REMOTE_CONTROL_INSTALLATION_ID_HEADER,
|
||||
installation_id,
|
||||
)?;
|
||||
if let Some(subscribe_cursor) = subscribe_cursor {
|
||||
set_remote_control_header(
|
||||
headers,
|
||||
@@ -1066,7 +1084,12 @@ pub(super) async fn connect_remote_control_websocket(
|
||||
"creating new remote control enrollment: websocket_url={}, enroll_url={}, account_id={}",
|
||||
remote_control_target.websocket_url, remote_control_target.enroll_url, auth.account_id
|
||||
);
|
||||
let new_enrollment = match enroll_remote_control_server(remote_control_target, &auth).await
|
||||
let new_enrollment = match enroll_remote_control_server(
|
||||
remote_control_target,
|
||||
&auth,
|
||||
connect_options.installation_id,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(new_enrollment) => new_enrollment,
|
||||
Err(err)
|
||||
@@ -1110,6 +1133,7 @@ pub(super) async fn connect_remote_control_websocket(
|
||||
&remote_control_target.websocket_url,
|
||||
enrollment_ref,
|
||||
&auth,
|
||||
connect_options.installation_id,
|
||||
connect_options.subscribe_cursor,
|
||||
)?;
|
||||
|
||||
@@ -1247,6 +1271,7 @@ mod tests {
|
||||
const TEST_HTTP_ACCEPT_TIMEOUT: Duration = Duration::from_secs(30);
|
||||
#[cfg(not(windows))]
|
||||
const TEST_HTTP_ACCEPT_TIMEOUT: Duration = Duration::from_secs(5);
|
||||
const TEST_INSTALLATION_ID: &str = "11111111-1111-4111-8111-111111111111";
|
||||
|
||||
fn remote_control_status_channel() -> (
|
||||
RemoteControlStatusPublisher,
|
||||
@@ -1254,6 +1279,7 @@ mod tests {
|
||||
) {
|
||||
let (status_tx, status_rx) = watch::channel(RemoteControlStatusChangedNotification {
|
||||
status: RemoteControlConnectionStatus::Connecting,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
environment_id: None,
|
||||
});
|
||||
(RemoteControlStatusPublisher::new(status_tx), status_rx)
|
||||
@@ -1359,6 +1385,7 @@ mod tests {
|
||||
&mut auth_recovery,
|
||||
&mut enrollment,
|
||||
RemoteControlConnectOptions {
|
||||
installation_id: TEST_INSTALLATION_ID,
|
||||
subscribe_cursor: None,
|
||||
app_server_client_name: None,
|
||||
},
|
||||
@@ -1376,6 +1403,7 @@ mod tests {
|
||||
status_rx.borrow().clone(),
|
||||
RemoteControlStatusChangedNotification {
|
||||
status: RemoteControlConnectionStatus::Connecting,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
environment_id: Some("env_test".to_string()),
|
||||
}
|
||||
);
|
||||
@@ -1435,6 +1463,7 @@ mod tests {
|
||||
&mut auth_recovery,
|
||||
&mut enrollment,
|
||||
RemoteControlConnectOptions {
|
||||
installation_id: TEST_INSTALLATION_ID,
|
||||
subscribe_cursor: None,
|
||||
app_server_client_name: None,
|
||||
},
|
||||
@@ -1448,6 +1477,7 @@ mod tests {
|
||||
status_rx.borrow().clone(),
|
||||
RemoteControlStatusChangedNotification {
|
||||
status: RemoteControlConnectionStatus::Connecting,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
environment_id: Some("env_test".to_string()),
|
||||
}
|
||||
);
|
||||
@@ -1515,6 +1545,7 @@ mod tests {
|
||||
&mut auth_recovery,
|
||||
&mut enrollment,
|
||||
RemoteControlConnectOptions {
|
||||
installation_id: TEST_INSTALLATION_ID,
|
||||
subscribe_cursor: None,
|
||||
app_server_client_name: None,
|
||||
},
|
||||
@@ -1567,6 +1598,7 @@ mod tests {
|
||||
&mut auth_recovery,
|
||||
&mut enrollment,
|
||||
RemoteControlConnectOptions {
|
||||
installation_id: TEST_INSTALLATION_ID,
|
||||
subscribe_cursor: None,
|
||||
app_server_client_name: None,
|
||||
},
|
||||
@@ -1614,6 +1646,7 @@ mod tests {
|
||||
&mut auth_recovery,
|
||||
&mut enrollment,
|
||||
RemoteControlConnectOptions {
|
||||
installation_id: TEST_INSTALLATION_ID,
|
||||
subscribe_cursor: None,
|
||||
app_server_client_name: None,
|
||||
},
|
||||
@@ -1632,6 +1665,7 @@ mod tests {
|
||||
status_rx.borrow().clone(),
|
||||
RemoteControlStatusChangedNotification {
|
||||
status: RemoteControlConnectionStatus::Connecting,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
environment_id: None,
|
||||
}
|
||||
);
|
||||
@@ -1656,8 +1690,11 @@ mod tests {
|
||||
let shutdown_token = shutdown_token.clone();
|
||||
async move {
|
||||
RemoteControlWebsocket::new(
|
||||
remote_control_url,
|
||||
Some(remote_control_target),
|
||||
RemoteControlWebsocketConfig {
|
||||
remote_control_url,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
remote_control_target: Some(remote_control_target),
|
||||
},
|
||||
/*state_db*/ None,
|
||||
remote_control_auth_manager(),
|
||||
RemoteControlChannels {
|
||||
@@ -1701,6 +1738,7 @@ mod tests {
|
||||
status_rx.borrow().clone(),
|
||||
RemoteControlStatusChangedNotification {
|
||||
status: RemoteControlConnectionStatus::Connecting,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
environment_id: Some("env_first".to_string()),
|
||||
}
|
||||
);
|
||||
@@ -1721,6 +1759,7 @@ mod tests {
|
||||
status_rx.borrow().clone(),
|
||||
RemoteControlStatusChangedNotification {
|
||||
status: RemoteControlConnectionStatus::Connected,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
environment_id: Some("env_first".to_string()),
|
||||
}
|
||||
);
|
||||
@@ -1734,6 +1773,7 @@ mod tests {
|
||||
status_rx.borrow().clone(),
|
||||
RemoteControlStatusChangedNotification {
|
||||
status: RemoteControlConnectionStatus::Connected,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
environment_id: None,
|
||||
}
|
||||
);
|
||||
@@ -1748,6 +1788,7 @@ mod tests {
|
||||
status_rx.borrow().clone(),
|
||||
RemoteControlStatusChangedNotification {
|
||||
status: RemoteControlConnectionStatus::Disabled,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
environment_id: None,
|
||||
}
|
||||
);
|
||||
|
||||
@@ -41,6 +41,7 @@ codex-external-agent-migration = { workspace = true }
|
||||
codex-external-agent-sessions = { workspace = true }
|
||||
codex-features = { workspace = true }
|
||||
codex-git-utils = { workspace = true }
|
||||
codex-file-watcher = { workspace = true }
|
||||
codex-hooks = { workspace = true }
|
||||
codex-otel = { workspace = true }
|
||||
codex-plugin = { workspace = true }
|
||||
|
||||
@@ -202,6 +202,7 @@ Example with notification opt-out:
|
||||
- `modelProvider/capabilities/read` — read provider-level capabilities for the currently configured model provider.
|
||||
- `experimentalFeature/list` — list feature flags with stage metadata (`beta`, `underDevelopment`, `stable`, etc.), enabled/default-enabled state, and cursor pagination. For non-beta flags, `displayName`/`description`/`announcement` are `null`.
|
||||
- `experimentalFeature/enablement/set` — patch the in-memory process-wide runtime feature enablement for the currently supported feature keys (`apps`, `memories`, `plugins`, `remote_control`, `tool_search`, `tool_suggest`, `tool_call_mcp_elicitation`). For each feature, precedence is: cloud requirements > --enable <feature_name> > config.toml > experimentalFeature/enablement/set (new) > code default.
|
||||
- `environment/add` — experimental; add or replace a named remote environment by `environmentId` and `execServerUrl` for later selection by `thread/start` or `turn/start`; returns `{}` and does not change the default environment.
|
||||
- `collaborationMode/list` — list available collaboration mode presets (experimental, no pagination). Built-in presets do not select a model; the Plan preset selects medium reasoning effort. This response omits built-in developer instructions; clients should either pass `settings.developer_instructions: null` when setting a mode to use Codex's built-in instructions, or provide their own instructions explicitly.
|
||||
- `skills/list` — list skills for one or more `cwd` values (optional `forceReload`).
|
||||
- `hooks/list` — list discovered hooks for one or more `cwd` values.
|
||||
@@ -1337,6 +1338,10 @@ UI guidance for IDEs: surface an approval dialog as soon as the request arrives.
|
||||
|
||||
When the client responds to `item/tool/requestUserInput`, the server emits `serverRequest/resolved` with `{ threadId, requestId }`. If the pending request is cleared by turn start, turn completion, or turn interruption before the client answers, the server emits the same notification for that cleanup.
|
||||
|
||||
### Attestation generation
|
||||
|
||||
Desktop hosts that provide upstream attestation should set `capabilities.requestAttestation` during `initialize` and handle the server-initiated `attestation/generate` request. App-server issues it just in time before ChatGPT Codex requests that forward `x-oai-attestation`; the client responds with `{ "token": "v1.<opaque>" }`, where `token` is an opaque client-owned value. When app-server receives a client response, it forwards a consistent outer envelope such as `{ "v": 1, "s": 0, "t": "v1.<opaque>" }`, where `t` contains the client token unchanged. If app-server attempts attestation but fails within its own boundary, it sends the same envelope shape with an app-server status code and without `t` (`1 = timeout`, `2 = request failed`, `3 = request canceled`, `4 = malformed response`). If no initialized client opted into attestation, app-server omits `x-oai-attestation` for that upstream request.
|
||||
|
||||
### MCP server elicitations
|
||||
|
||||
MCP servers can interrupt a turn and ask the client for structured input via `mcpServer/elicitation/request`.
|
||||
|
||||
217
codex-rs/app-server/src/attestation.rs
Normal file
217
codex-rs/app-server/src/attestation.rs
Normal file
@@ -0,0 +1,217 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use axum::http::HeaderValue;
|
||||
use codex_app_server_protocol::AttestationGenerateParams;
|
||||
use codex_app_server_protocol::AttestationGenerateResponse;
|
||||
use codex_app_server_protocol::ServerRequestPayload;
|
||||
use codex_core::AttestationContext;
|
||||
use codex_core::AttestationProvider;
|
||||
use codex_core::GenerateAttestationFuture;
|
||||
use serde::Serialize;
|
||||
use tokio::time::Duration;
|
||||
use tokio::time::timeout;
|
||||
use tracing::warn;
|
||||
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use crate::thread_state::ThreadStateManager;
|
||||
|
||||
const ATTESTATION_GENERATE_TIMEOUT: Duration = Duration::from_millis(100);
|
||||
|
||||
pub(crate) fn app_server_attestation_provider(
|
||||
outgoing: Arc<OutgoingMessageSender>,
|
||||
thread_state_manager: ThreadStateManager,
|
||||
) -> Arc<dyn AttestationProvider> {
|
||||
Arc::new(AppServerAttestationProvider {
|
||||
outgoing,
|
||||
thread_state_manager,
|
||||
})
|
||||
}
|
||||
|
||||
struct AppServerAttestationProvider {
|
||||
outgoing: Arc<OutgoingMessageSender>,
|
||||
thread_state_manager: ThreadStateManager,
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for AppServerAttestationProvider {
|
||||
fn fmt(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
formatter
|
||||
.debug_struct("AppServerAttestationProvider")
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl AttestationProvider for AppServerAttestationProvider {
|
||||
fn header_for_request(&self, context: AttestationContext) -> GenerateAttestationFuture<'_> {
|
||||
let outgoing = self.outgoing.clone();
|
||||
let thread_state_manager = self.thread_state_manager.clone();
|
||||
Box::pin(async move {
|
||||
request_attestation_header_value_with_timeout(
|
||||
outgoing,
|
||||
thread_state_manager,
|
||||
context.thread_id,
|
||||
ATTESTATION_GENERATE_TIMEOUT,
|
||||
)
|
||||
.await
|
||||
.and_then(|value| HeaderValue::from_bytes(value.as_bytes()).ok())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async fn request_attestation_header_value_with_timeout(
|
||||
outgoing: Arc<OutgoingMessageSender>,
|
||||
thread_state_manager: ThreadStateManager,
|
||||
thread_id: codex_protocol::ThreadId,
|
||||
timeout_duration: Duration,
|
||||
) -> Option<String> {
|
||||
let connection_id = thread_state_manager
|
||||
.first_attestation_capable_connection_for_thread(thread_id)
|
||||
.await?;
|
||||
|
||||
let connection_ids = [connection_id];
|
||||
let (request_id, rx) = outgoing
|
||||
.send_request_to_connections(
|
||||
Some(&connection_ids),
|
||||
ServerRequestPayload::AttestationGenerate(AttestationGenerateParams {}),
|
||||
/*thread_id*/ None,
|
||||
)
|
||||
.await;
|
||||
|
||||
let result = match timeout(timeout_duration, rx).await {
|
||||
Ok(Ok(Ok(result))) => result,
|
||||
Ok(Ok(Err(err))) => {
|
||||
warn!(
|
||||
code = err.code,
|
||||
message = %err.message,
|
||||
"attestation generation request failed"
|
||||
);
|
||||
return app_server_attestation_header_value(
|
||||
AppServerAttestationStatus::RequestFailed,
|
||||
/*token*/ None,
|
||||
);
|
||||
}
|
||||
Ok(Err(err)) => {
|
||||
warn!("attestation generation request canceled: {err}");
|
||||
return app_server_attestation_header_value(
|
||||
AppServerAttestationStatus::RequestCanceled,
|
||||
/*token*/ None,
|
||||
);
|
||||
}
|
||||
Err(_) => {
|
||||
let _canceled = outgoing.cancel_request(&request_id).await;
|
||||
warn!(
|
||||
timeout_seconds = timeout_duration.as_secs(),
|
||||
"attestation generation request timed out"
|
||||
);
|
||||
return app_server_attestation_header_value(
|
||||
AppServerAttestationStatus::Timeout,
|
||||
/*token*/ None,
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
match serde_json::from_value::<AttestationGenerateResponse>(result) {
|
||||
Ok(response) => app_server_attestation_header_value(
|
||||
AppServerAttestationStatus::Ok,
|
||||
Some(&response.token),
|
||||
),
|
||||
Err(err) => {
|
||||
warn!("failed to deserialize attestation generation response: {err}");
|
||||
app_server_attestation_header_value(
|
||||
AppServerAttestationStatus::MalformedResponse,
|
||||
/*token*/ None,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
enum AppServerAttestationStatus {
|
||||
Ok,
|
||||
Timeout,
|
||||
RequestFailed,
|
||||
RequestCanceled,
|
||||
MalformedResponse,
|
||||
}
|
||||
|
||||
impl AppServerAttestationStatus {
|
||||
const fn code(self) -> u8 {
|
||||
match self {
|
||||
Self::Ok => 0,
|
||||
Self::Timeout => 1,
|
||||
Self::RequestFailed => 2,
|
||||
Self::RequestCanceled => 3,
|
||||
Self::MalformedResponse => 4,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct AppServerAttestationEnvelope<'a> {
|
||||
v: u8,
|
||||
s: u8,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
t: Option<&'a str>,
|
||||
}
|
||||
|
||||
fn app_server_attestation_header_value(
|
||||
status: AppServerAttestationStatus,
|
||||
token: Option<&str>,
|
||||
) -> Option<String> {
|
||||
serde_json::to_string(&AppServerAttestationEnvelope {
|
||||
v: 1,
|
||||
s: status.code(),
|
||||
t: token,
|
||||
})
|
||||
.map_err(|err| warn!("failed to serialize app-server attestation envelope: {err}"))
|
||||
.ok()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::AppServerAttestationStatus;
|
||||
use super::app_server_attestation_header_value;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn app_server_attestation_header_value_wraps_opaque_client_payloads() {
|
||||
assert_eq!(
|
||||
app_server_attestation_header_value(
|
||||
AppServerAttestationStatus::Ok,
|
||||
Some("v1.opaque-client-payload"),
|
||||
),
|
||||
Some(r#"{"v":1,"s":0,"t":"v1.opaque-client-payload"}"#.to_string())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn app_server_attestation_header_value_reports_app_server_failures() {
|
||||
assert_eq!(
|
||||
app_server_attestation_header_value(
|
||||
AppServerAttestationStatus::Timeout,
|
||||
/*token*/ None,
|
||||
),
|
||||
Some(r#"{"v":1,"s":1}"#.to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
app_server_attestation_header_value(
|
||||
AppServerAttestationStatus::RequestFailed,
|
||||
/*token*/ None,
|
||||
),
|
||||
Some(r#"{"v":1,"s":2}"#.to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
app_server_attestation_header_value(
|
||||
AppServerAttestationStatus::RequestCanceled,
|
||||
/*token*/ None,
|
||||
),
|
||||
Some(r#"{"v":1,"s":3}"#.to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
app_server_attestation_header_value(
|
||||
AppServerAttestationStatus::MalformedResponse,
|
||||
/*token*/ None
|
||||
),
|
||||
Some(r#"{"v":1,"s":4}"#.to_string())
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -49,7 +49,6 @@ use codex_app_server_protocol::RawResponseItemCompletedNotification;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_app_server_protocol::ServerRequestPayload;
|
||||
use codex_app_server_protocol::SkillsChangedNotification;
|
||||
use codex_app_server_protocol::ThreadGoalUpdatedNotification;
|
||||
use codex_app_server_protocol::ThreadItem;
|
||||
use codex_app_server_protocol::ThreadRealtimeClosedNotification;
|
||||
@@ -194,13 +193,6 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
)
|
||||
.await;
|
||||
}
|
||||
EventMsg::SkillsUpdateAvailable => {
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::SkillsChanged(
|
||||
SkillsChangedNotification {},
|
||||
))
|
||||
.await;
|
||||
}
|
||||
EventMsg::McpStartupUpdate(update) => {
|
||||
let (status, error) = match update.status {
|
||||
codex_protocol::protocol::McpStartupStatus::Starting => {
|
||||
|
||||
@@ -8,12 +8,12 @@ use codex_app_server_protocol::FsWatchParams;
|
||||
use codex_app_server_protocol::FsWatchResponse;
|
||||
use codex_app_server_protocol::JSONRPCErrorError;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_core::file_watcher::FileWatcher;
|
||||
use codex_core::file_watcher::FileWatcherEvent;
|
||||
use codex_core::file_watcher::FileWatcherSubscriber;
|
||||
use codex_core::file_watcher::Receiver;
|
||||
use codex_core::file_watcher::WatchPath;
|
||||
use codex_core::file_watcher::WatchRegistration;
|
||||
use codex_file_watcher::FileWatcher;
|
||||
use codex_file_watcher::FileWatcherEvent;
|
||||
use codex_file_watcher::FileWatcherSubscriber;
|
||||
use codex_file_watcher::Receiver;
|
||||
use codex_file_watcher::WatchPath;
|
||||
use codex_file_watcher::WatchRegistration;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::collections::hash_map::Entry;
|
||||
|
||||
@@ -8,7 +8,6 @@ use codex_config::RemoteThreadConfigLoader;
|
||||
use codex_config::ThreadConfigLoader;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::resolve_installation_id;
|
||||
use codex_exec_server::EnvironmentManagerArgs;
|
||||
use codex_features::Feature;
|
||||
use codex_login::AuthManager;
|
||||
use codex_utils_cli::CliConfigOverrides;
|
||||
@@ -31,6 +30,7 @@ use crate::outgoing_message::QueuedOutgoingMessage;
|
||||
use crate::transport::CHANNEL_CAPACITY;
|
||||
use crate::transport::ConnectionState;
|
||||
use crate::transport::OutboundConnectionState;
|
||||
use crate::transport::RemoteControlStartConfig;
|
||||
use crate::transport::TransportEvent;
|
||||
use crate::transport::auth::policy_from_settings;
|
||||
use crate::transport::route_outgoing_envelope;
|
||||
@@ -74,6 +74,7 @@ use tracing_subscriber::util::SubscriberInitExt;
|
||||
|
||||
mod analytics_utils;
|
||||
mod app_server_tracing;
|
||||
mod attestation;
|
||||
mod bespoke_event_handling;
|
||||
mod command_exec;
|
||||
mod config;
|
||||
@@ -93,6 +94,7 @@ mod outgoing_message;
|
||||
mod request_processors;
|
||||
mod request_serialization;
|
||||
mod server_request_error;
|
||||
mod skills_watcher;
|
||||
mod thread_state;
|
||||
mod thread_status;
|
||||
mod transport;
|
||||
@@ -159,22 +161,33 @@ enum ShutdownAction {
|
||||
Finish,
|
||||
}
|
||||
|
||||
async fn shutdown_signal() -> IoResult<()> {
|
||||
#[derive(Clone, Copy)]
|
||||
enum ShutdownSignal {
|
||||
Forceable,
|
||||
#[cfg(unix)]
|
||||
GracefulOnly,
|
||||
}
|
||||
|
||||
async fn shutdown_signal() -> IoResult<ShutdownSignal> {
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use tokio::signal::unix::SignalKind;
|
||||
use tokio::signal::unix::signal;
|
||||
|
||||
let mut term = signal(SignalKind::terminate())?;
|
||||
let mut hangup = signal(SignalKind::hangup())?;
|
||||
tokio::select! {
|
||||
ctrl_c_result = tokio::signal::ctrl_c() => ctrl_c_result,
|
||||
_ = term.recv() => Ok(()),
|
||||
ctrl_c_result = tokio::signal::ctrl_c() => ctrl_c_result.map(|_| ShutdownSignal::Forceable),
|
||||
_ = term.recv() => Ok(ShutdownSignal::Forceable),
|
||||
_ = hangup.recv() => Ok(ShutdownSignal::GracefulOnly),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
{
|
||||
tokio::signal::ctrl_c().await
|
||||
tokio::signal::ctrl_c()
|
||||
.await
|
||||
.map(|_| ShutdownSignal::Forceable)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -187,9 +200,16 @@ impl ShutdownState {
|
||||
self.forced
|
||||
}
|
||||
|
||||
fn on_signal(&mut self, connection_count: usize, running_turn_count: usize) {
|
||||
fn on_signal(
|
||||
&mut self,
|
||||
signal: ShutdownSignal,
|
||||
connection_count: usize,
|
||||
running_turn_count: usize,
|
||||
) {
|
||||
if self.requested {
|
||||
self.forced = true;
|
||||
if matches!(signal, ShutdownSignal::Forceable) {
|
||||
self.forced = true;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -419,15 +439,6 @@ pub async fn run_main_with_transport_options(
|
||||
auth: AppServerWebsocketAuthSettings,
|
||||
runtime_options: AppServerRuntimeOptions,
|
||||
) -> IoResult<()> {
|
||||
let environment_manager = Arc::new(
|
||||
EnvironmentManager::new(EnvironmentManagerArgs::new(
|
||||
ExecServerRuntimePaths::from_optional_paths(
|
||||
arg0_paths.codex_self_exe.clone(),
|
||||
arg0_paths.codex_linux_sandbox_exe.clone(),
|
||||
)?,
|
||||
))
|
||||
.await,
|
||||
);
|
||||
let (transport_event_tx, mut transport_event_rx) =
|
||||
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
|
||||
let (outgoing_tx, mut outgoing_rx) = mpsc::channel::<OutgoingEnvelope>(CHANNEL_CAPACITY);
|
||||
@@ -443,6 +454,17 @@ pub async fn run_main_with_transport_options(
|
||||
)
|
||||
})?;
|
||||
let codex_home = find_codex_home()?;
|
||||
let local_runtime_paths = ExecServerRuntimePaths::from_optional_paths(
|
||||
arg0_paths.codex_self_exe.clone(),
|
||||
arg0_paths.codex_linux_sandbox_exe.clone(),
|
||||
)?;
|
||||
let environment_manager = if loader_overrides.ignore_user_config {
|
||||
EnvironmentManager::from_env(local_runtime_paths).await
|
||||
} else {
|
||||
EnvironmentManager::from_codex_home(codex_home.clone(), local_runtime_paths).await
|
||||
}
|
||||
.map(Arc::new)
|
||||
.map_err(std::io::Error::other)?;
|
||||
let config_manager = ConfigManager::new(
|
||||
codex_home.to_path_buf(),
|
||||
cli_kv_overrides.clone(),
|
||||
@@ -686,7 +708,10 @@ pub async fn run_main_with_transport_options(
|
||||
}
|
||||
|
||||
let (remote_control_accept_handle, remote_control_handle) = start_remote_control(
|
||||
config.chatgpt_base_url.clone(),
|
||||
RemoteControlStartConfig {
|
||||
remote_control_url: config.chatgpt_base_url.clone(),
|
||||
installation_id: installation_id.clone(),
|
||||
},
|
||||
state_db.clone(),
|
||||
auth_manager.clone(),
|
||||
transport_event_tx.clone(),
|
||||
@@ -808,11 +833,15 @@ pub async fn run_main_with_transport_options(
|
||||
|
||||
tokio::select! {
|
||||
shutdown_signal_result = shutdown_signal(), if graceful_signal_restart_enabled && !shutdown_state.forced() => {
|
||||
if let Err(err) = shutdown_signal_result {
|
||||
warn!("failed to listen for shutdown signal during graceful restart drain: {err}");
|
||||
}
|
||||
let signal = match shutdown_signal_result {
|
||||
Ok(signal) => signal,
|
||||
Err(err) => {
|
||||
warn!("failed to listen for shutdown signal during graceful restart drain: {err}");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
let running_turn_count = *running_turn_count_rx.borrow();
|
||||
shutdown_state.on_signal(connections.len(), running_turn_count);
|
||||
shutdown_state.on_signal(signal, connections.len(), running_turn_count);
|
||||
}
|
||||
changed = running_turn_count_rx.changed(), if graceful_signal_restart_enabled && shutdown_state.requested() => {
|
||||
if changed.is_err() {
|
||||
@@ -933,7 +962,14 @@ pub async fn run_main_with_transport_options(
|
||||
),
|
||||
)
|
||||
.await;
|
||||
processor.connection_initialized(connection_id).await;
|
||||
processor
|
||||
.connection_initialized(
|
||||
connection_id,
|
||||
connection_state
|
||||
.session
|
||||
.request_attestation(),
|
||||
)
|
||||
.await;
|
||||
connection_state
|
||||
.outbound_initialized
|
||||
.store(true, std::sync::atomic::Ordering::Release);
|
||||
@@ -977,6 +1013,7 @@ pub async fn run_main_with_transport_options(
|
||||
.send_server_notification(ServerNotification::RemoteControlStatusChanged(
|
||||
RemoteControlStatusChangedNotification {
|
||||
status: status.status,
|
||||
installation_id: status.installation_id,
|
||||
environment_id: status.environment_id,
|
||||
},
|
||||
))
|
||||
|
||||
@@ -187,6 +187,7 @@ mod tests {
|
||||
thread_store,
|
||||
Some(state_db.clone()),
|
||||
"11111111-1111-4111-8111-111111111111".to_string(),
|
||||
/*attestation_provider*/ None,
|
||||
));
|
||||
thread_manager.start_thread(good_config).await?;
|
||||
thread_manager.start_thread(bad_config).await?;
|
||||
|
||||
@@ -4,6 +4,7 @@ use std::sync::Arc;
|
||||
use std::sync::OnceLock;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
|
||||
use crate::attestation::app_server_attestation_provider;
|
||||
use crate::config_manager::ConfigManager;
|
||||
use crate::connection_rpc_gate::ConnectionRpcGate;
|
||||
use crate::error_code::invalid_request;
|
||||
@@ -17,6 +18,7 @@ use crate::request_processors::AppsRequestProcessor;
|
||||
use crate::request_processors::CatalogRequestProcessor;
|
||||
use crate::request_processors::CommandExecRequestProcessor;
|
||||
use crate::request_processors::ConfigRequestProcessor;
|
||||
use crate::request_processors::EnvironmentRequestProcessor;
|
||||
use crate::request_processors::ExternalAgentConfigRequestProcessor;
|
||||
use crate::request_processors::FeedbackRequestProcessor;
|
||||
use crate::request_processors::FsRequestProcessor;
|
||||
@@ -34,6 +36,8 @@ use crate::request_processors::WindowsSandboxRequestProcessor;
|
||||
use crate::request_serialization::QueuedInitializedRequest;
|
||||
use crate::request_serialization::RequestSerializationQueueKey;
|
||||
use crate::request_serialization::RequestSerializationQueues;
|
||||
use crate::skills_watcher::SkillsWatcher;
|
||||
use crate::thread_state::ConnectionCapabilities;
|
||||
use crate::thread_state::ThreadStateManager;
|
||||
use crate::transport::AppServerTransport;
|
||||
use crate::transport::RemoteControlHandle;
|
||||
@@ -82,6 +86,7 @@ use tokio::time::timeout;
|
||||
use tracing::Instrument;
|
||||
|
||||
const EXTERNAL_AUTH_REFRESH_TIMEOUT: Duration = Duration::from_secs(10);
|
||||
|
||||
#[derive(Clone)]
|
||||
struct ExternalAuthRefreshBridge {
|
||||
outgoing: Arc<OutgoingMessageSender>,
|
||||
@@ -158,6 +163,7 @@ pub(crate) struct MessageProcessor {
|
||||
command_exec_processor: CommandExecRequestProcessor,
|
||||
process_exec_processor: ProcessExecRequestProcessor,
|
||||
config_processor: ConfigRequestProcessor,
|
||||
environment_processor: EnvironmentRequestProcessor,
|
||||
external_agent_config_processor: ExternalAgentConfigRequestProcessor,
|
||||
feedback_processor: FeedbackRequestProcessor,
|
||||
fs_processor: FsRequestProcessor,
|
||||
@@ -186,6 +192,7 @@ pub(crate) struct InitializedConnectionSessionState {
|
||||
pub(crate) opted_out_notification_methods: HashSet<String>,
|
||||
pub(crate) app_server_client_name: String,
|
||||
pub(crate) client_version: String,
|
||||
pub(crate) request_attestation: bool,
|
||||
}
|
||||
|
||||
impl Default for ConnectionSessionState {
|
||||
@@ -231,6 +238,12 @@ impl ConnectionSessionState {
|
||||
.map(|session| session.client_version.as_str())
|
||||
}
|
||||
|
||||
pub(crate) fn request_attestation(&self) -> bool {
|
||||
self.initialized
|
||||
.get()
|
||||
.is_some_and(|session| session.request_attestation)
|
||||
}
|
||||
|
||||
pub(crate) fn initialize(&self, session: InitializedConnectionSessionState) -> Result<(), ()> {
|
||||
self.initialized.set(session).map_err(|_| ())
|
||||
}
|
||||
@@ -280,6 +293,7 @@ impl MessageProcessor {
|
||||
auth_manager.set_external_auth(Arc::new(ExternalAuthRefreshBridge {
|
||||
outgoing: outgoing.clone(),
|
||||
}));
|
||||
let thread_state_manager = ThreadStateManager::new();
|
||||
// The thread store is intentionally process-scoped. Config reloads can
|
||||
// affect per-thread behavior, but they must not move newly started,
|
||||
// resumed, or forked threads to a different persistence backend/root.
|
||||
@@ -293,13 +307,17 @@ impl MessageProcessor {
|
||||
Arc::clone(&thread_store),
|
||||
state_db.clone(),
|
||||
installation_id,
|
||||
Some(app_server_attestation_provider(
|
||||
outgoing.clone(),
|
||||
thread_state_manager.clone(),
|
||||
)),
|
||||
));
|
||||
thread_manager
|
||||
.plugins_manager()
|
||||
.set_analytics_events_client(analytics_events_client.clone());
|
||||
let skills_watcher = SkillsWatcher::new(thread_manager.skills_manager(), outgoing.clone());
|
||||
|
||||
let pending_thread_unloads = Arc::new(Mutex::new(HashSet::new()));
|
||||
let thread_state_manager = ThreadStateManager::new();
|
||||
let thread_watch_manager =
|
||||
crate::thread_status::ThreadWatchManager::new_with_outgoing(outgoing.clone());
|
||||
let thread_list_state_permit = Arc::new(Semaphore::new(/*permits*/ 1));
|
||||
@@ -389,6 +407,7 @@ impl MessageProcessor {
|
||||
Arc::clone(&thread_list_state_permit),
|
||||
thread_goal_processor.clone(),
|
||||
state_db,
|
||||
Arc::clone(&skills_watcher),
|
||||
);
|
||||
let turn_processor = TurnRequestProcessor::new(
|
||||
auth_manager.clone(),
|
||||
@@ -402,6 +421,7 @@ impl MessageProcessor {
|
||||
thread_state_manager,
|
||||
thread_watch_manager,
|
||||
thread_list_state_permit,
|
||||
Arc::clone(&skills_watcher),
|
||||
);
|
||||
if matches!(plugin_startup_tasks, crate::PluginStartupTasks::Start) {
|
||||
// Keep plugin startup warmups aligned at app-server startup.
|
||||
@@ -432,6 +452,8 @@ impl MessageProcessor {
|
||||
arg0_paths,
|
||||
config.codex_home.to_path_buf(),
|
||||
);
|
||||
let environment_processor =
|
||||
EnvironmentRequestProcessor::new(thread_manager.environment_manager());
|
||||
let fs_processor = FsRequestProcessor::new(
|
||||
thread_manager
|
||||
.environment_manager()
|
||||
@@ -453,6 +475,7 @@ impl MessageProcessor {
|
||||
command_exec_processor,
|
||||
process_exec_processor,
|
||||
config_processor,
|
||||
environment_processor,
|
||||
external_agent_config_processor,
|
||||
feedback_processor,
|
||||
fs_processor,
|
||||
@@ -620,9 +643,18 @@ impl MessageProcessor {
|
||||
.await;
|
||||
}
|
||||
|
||||
pub(crate) async fn connection_initialized(&self, connection_id: ConnectionId) {
|
||||
pub(crate) async fn connection_initialized(
|
||||
&self,
|
||||
connection_id: ConnectionId,
|
||||
request_attestation: bool,
|
||||
) {
|
||||
self.thread_processor
|
||||
.connection_initialized(connection_id)
|
||||
.connection_initialized(
|
||||
connection_id,
|
||||
ConnectionCapabilities {
|
||||
request_attestation,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
@@ -718,7 +750,12 @@ impl MessageProcessor {
|
||||
.await?;
|
||||
if connection_initialized {
|
||||
self.thread_processor
|
||||
.connection_initialized(connection_id)
|
||||
.connection_initialized(
|
||||
connection_id,
|
||||
ConnectionCapabilities {
|
||||
request_attestation: session.request_attestation(),
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
return Ok(());
|
||||
@@ -850,6 +887,9 @@ impl MessageProcessor {
|
||||
.config_requirements_read()
|
||||
.await
|
||||
.map(|response| Some(response.into())),
|
||||
ClientRequest::EnvironmentAdd { params, .. } => {
|
||||
self.environment_processor.environment_add(params).await
|
||||
}
|
||||
ClientRequest::FsReadFile { params, .. } => self
|
||||
.fs_processor
|
||||
.read_file(params)
|
||||
|
||||
@@ -267,7 +267,7 @@ impl OutgoingMessageSender {
|
||||
RequestId::Integer(self.next_server_request_id.fetch_add(1, Ordering::Relaxed))
|
||||
}
|
||||
|
||||
async fn send_request_to_connections(
|
||||
pub(crate) async fn send_request_to_connections(
|
||||
&self,
|
||||
connection_ids: Option<&[ConnectionId]>,
|
||||
request: ServerRequestPayload,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user