mirror of
https://github.com/openai/codex.git
synced 2026-05-11 23:02:39 +00:00
Compare commits
1 Commits
dh--app-se
...
codex/conf
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a780939ec7 |
@@ -53,7 +53,7 @@ Use `--window "past week"` or `--window-hours 168` when the user asks for a non-
|
||||
## Summary
|
||||
No major issues reported by users.
|
||||
|
||||
Source: collector v5, git `abc123def456`, window `2026-04-27T00:00:00Z` to `2026-04-28T00:00:00Z`.
|
||||
Source: collector v4, git `abc123def456`, window `2026-04-27T00:00:00Z` to `2026-04-28T00:00:00Z`.
|
||||
Want details? I can expand this into the issue table.
|
||||
```
|
||||
|
||||
@@ -65,7 +65,7 @@ Two issues are being surfaced by users:
|
||||
🔥🔥 Terminal launch hangs on startup [1](https://github.com/openai/codex/issues/123)
|
||||
🔥 Resume switches model providers unexpectedly [2](https://github.com/openai/codex/issues/456)
|
||||
|
||||
Source: collector v5, git `abc123def456`, window `2026-04-27T00:00:00Z` to `2026-04-28T00:00:00Z`.
|
||||
Source: collector v4, git `abc123def456`, window `2026-04-27T00:00:00Z` to `2026-04-28T00:00:00Z`.
|
||||
Want details? I can expand this into the issue table.
|
||||
```
|
||||
5. In `## Details`, when details are requested, include a compact table only when useful:
|
||||
@@ -76,7 +76,7 @@ Want details? I can expand this into the issue table.
|
||||
- A clear quiet/no-concern sentence when there is no meaningful signal.
|
||||
6. Use the JSON `attention_marker` exactly. It is empty for normal rows, `🔥` for elevated rows, and `🔥🔥` for very high-attention rows. The actual cutoffs are in `attention_thresholds`.
|
||||
7. Use inline numbered references where a row or bullet points to issues, for example `Compaction bugs [1](https://github.com/openai/codex/issues/123), [2](https://github.com/openai/codex/issues/456)`. Do not add a separate footnotes section.
|
||||
8. Label `interactions` as `Interactions`; it counts unique human GitHub users who created a new issue, added a new comment, or reacted during the requested window. Multiple posts/reactions from the same user on the same issue count once.
|
||||
8. Label `interactions` as `Interactions`; it counts posts/comments/reactions during the requested window, not unique people.
|
||||
9. Mention the collector `script_version`, repo checkout `git_head`, and time window in one compact source line. In default mode, put this before the details prompt so the final line still asks whether the user wants details. In details-upfront mode, it can be the footer.
|
||||
|
||||
## Reaction Handling
|
||||
@@ -89,7 +89,7 @@ GitHub issue search is still seeded by issue `updated_at`, so a purely reaction-
|
||||
|
||||
## Attention Markers
|
||||
|
||||
The collector scales attention markers by the requested time window. The baseline is 5 unique human users for `🔥` and 10 unique human users for `🔥🔥` over 24 hours; longer or shorter windows scale those cutoffs linearly and round up. For example, a one-week report uses 35 and 70 interactions. Unique human users are users who authored a new issue, authored a new comment, or reacted during the window, including upvotes. Multiple actions from the same user on the same issue count once. Bot posts and bot reactions are excluded. In prose, explain this as high user interaction rather than naming the emoji.
|
||||
The collector scales attention markers by the requested time window. The baseline is 5 human user interactions for `🔥` and 10 for `🔥🔥` over 24 hours; longer or shorter windows scale those cutoffs linearly and round up. For example, a one-week report uses 35 and 70 interactions. Human user interactions are human-authored new issue posts, human-authored new comments, and human reactions created during the window, including upvotes. Bot posts and bot reactions are excluded. In prose, explain this as high user interaction rather than naming the emoji.
|
||||
|
||||
## Freshness
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ from datetime import datetime, timedelta, timezone
|
||||
from pathlib import Path
|
||||
from urllib.parse import quote
|
||||
|
||||
SCRIPT_VERSION = 5
|
||||
SCRIPT_VERSION = 4
|
||||
QUALIFYING_KIND_LABELS = ("bug", "enhancement")
|
||||
REACTION_KEYS = ("+1", "-1", "laugh", "hooray", "confused", "heart", "rocket", "eyes")
|
||||
BASE_ATTENTION_WINDOW_HOURS = 24.0
|
||||
@@ -393,15 +393,9 @@ def is_bot_login(login):
|
||||
return bool(login) and login.lower().endswith("[bot]")
|
||||
|
||||
|
||||
def human_login_key(user_obj):
|
||||
login = extract_login(user_obj)
|
||||
if not login or is_bot_login(login):
|
||||
return ""
|
||||
return login.casefold()
|
||||
|
||||
|
||||
def is_human_user(user_obj):
|
||||
return bool(human_login_key(user_obj))
|
||||
login = extract_login(user_obj)
|
||||
return bool(login) and not is_bot_login(login)
|
||||
|
||||
|
||||
def label_names(issue):
|
||||
@@ -473,26 +467,22 @@ def reaction_summary(item):
|
||||
def reaction_event_summary(reactions, since, until):
|
||||
counts = {}
|
||||
total = 0
|
||||
users = set()
|
||||
for reaction in reactions or []:
|
||||
if not isinstance(reaction, dict):
|
||||
continue
|
||||
if not is_in_window(str(reaction.get("created_at") or ""), since, until):
|
||||
continue
|
||||
user_key = human_login_key(reaction.get("user"))
|
||||
if not user_key:
|
||||
if not is_human_user(reaction.get("user")):
|
||||
continue
|
||||
content = str(reaction.get("content") or "")
|
||||
if not content:
|
||||
continue
|
||||
counts[content] = counts.get(content, 0) + 1
|
||||
total += 1
|
||||
users.add(user_key)
|
||||
return {
|
||||
"total": total,
|
||||
"counts": counts,
|
||||
"upvotes": counts.get("+1", 0),
|
||||
"users": sorted(users, key=str.casefold),
|
||||
}
|
||||
|
||||
|
||||
@@ -628,21 +618,13 @@ def summarize_issue(
|
||||
new_comment_reaction_total = sum(
|
||||
comment["reaction_total"] for comment in new_comments
|
||||
)
|
||||
new_issue_user_key = human_login_key(issue.get("user")) if new_issue else ""
|
||||
new_issue_user_interaction = bool(new_issue_user_key)
|
||||
new_issue_user_interaction = new_issue and is_human_user(issue.get("user"))
|
||||
new_comment_user_interactions = sum(
|
||||
1 for comment in new_comments if comment["human_user_interaction"]
|
||||
)
|
||||
interaction_user_keys = set(issue_reaction_events_summary["users"])
|
||||
interaction_user_keys.update(comment_reaction_events_summary["users"])
|
||||
if new_issue_user_key:
|
||||
interaction_user_keys.add(new_issue_user_key)
|
||||
interaction_user_keys.update(
|
||||
comment["author"].casefold()
|
||||
for comment in new_comments
|
||||
if comment["human_user_interaction"]
|
||||
user_interactions = (
|
||||
int(new_issue_user_interaction) + new_comment_user_interactions + new_reactions
|
||||
)
|
||||
user_interactions = len(interaction_user_keys)
|
||||
attention_level = attention_level_for(user_interactions, attention_thresholds)
|
||||
attention_marker = attention_marker_for(user_interactions, attention_thresholds)
|
||||
updated_without_visible_new_post = (
|
||||
@@ -975,7 +957,6 @@ def collect_digest(args):
|
||||
"New issue comments are filtered by comment creation time within the window from the fetched comment set.",
|
||||
"Reaction events are counted by GitHub reaction created_at timestamps for hydrated issues and fetched comments.",
|
||||
"Current reaction totals are standing engagement signals; new_reactions and new_upvotes are windowed activity.",
|
||||
"user_interactions counts unique human users per issue across new issues, new comments, and new reactions; repeated actions by the same user count once.",
|
||||
"The collector does not assign semantic clusters; use summary_inputs as model-ready evidence for report-time clustering.",
|
||||
"Pure reaction-only issues may be missed if GitHub issue search does not surface them via updated_at.",
|
||||
"Issues updated during the window without a new issue body or new comment are retained because label/status edits can still be useful owner signals.",
|
||||
|
||||
@@ -494,70 +494,6 @@ def test_reactions_count_toward_attention_markers():
|
||||
assert summary["new_comments"][0]["new_upvotes"] == 0
|
||||
|
||||
|
||||
def test_user_interactions_are_deduped_by_human_login():
|
||||
since = collect_issue_digest.parse_timestamp("2026-04-25T00:00:00Z", "--since")
|
||||
until = collect_issue_digest.parse_timestamp("2026-04-26T00:00:00Z", "--until")
|
||||
|
||||
def comment(comment_id, login):
|
||||
return {
|
||||
"id": comment_id,
|
||||
"created_at": f"2026-04-25T0{comment_id + 1}:00:00Z",
|
||||
"updated_at": f"2026-04-25T0{comment_id + 1}:00:00Z",
|
||||
"user": {"login": login},
|
||||
"body": "same issue",
|
||||
}
|
||||
|
||||
def reaction(content, login, created_at="2026-04-25T10:00:00Z"):
|
||||
return {
|
||||
"content": content,
|
||||
"created_at": created_at,
|
||||
"user": {"login": login},
|
||||
}
|
||||
|
||||
issue = {
|
||||
"number": 790,
|
||||
"title": "Repeated pings should not boost attention",
|
||||
"html_url": "https://github.com/openai/codex/issues/790",
|
||||
"state": "open",
|
||||
"created_at": "2026-04-25T01:00:00Z",
|
||||
"updated_at": "2026-04-25T12:00:00Z",
|
||||
"user": {"login": "Alice"},
|
||||
"labels": [{"name": "bug"}, {"name": "tui"}],
|
||||
}
|
||||
comments = [comment(1, "alice"), comment(2, "ALICE"), comment(3, "bob")]
|
||||
comments.append(comment(4, "github-actions[bot]"))
|
||||
issue_reactions = [
|
||||
reaction("+1", "alice"),
|
||||
reaction("rocket", "Alice"),
|
||||
reaction("+1", "bob"),
|
||||
reaction("+1", "github-actions[bot]"),
|
||||
reaction("+1", "carol", created_at="2026-04-24T23:00:00Z"),
|
||||
]
|
||||
comment_reactions_by_id = {
|
||||
1: [reaction("heart", "alice")],
|
||||
2: [reaction("+1", "bob")],
|
||||
3: [reaction("eyes", "carol")],
|
||||
}
|
||||
|
||||
summary = collect_issue_digest.summarize_issue(
|
||||
issue,
|
||||
comments,
|
||||
["tui"],
|
||||
since,
|
||||
until,
|
||||
body_chars=100,
|
||||
comment_chars=100,
|
||||
issue_reaction_events=issue_reactions,
|
||||
comment_reactions_by_id=comment_reactions_by_id,
|
||||
)
|
||||
|
||||
assert summary["activity"]["new_human_comments"] == 3
|
||||
assert summary["new_reactions"] == 6
|
||||
assert summary["user_interactions"] == 3
|
||||
assert summary["attention"] is False
|
||||
assert summary["attention_marker"] == ""
|
||||
|
||||
|
||||
def test_digest_rows_are_table_ready_with_concise_descriptions():
|
||||
rows = collect_issue_digest.digest_rows(
|
||||
[
|
||||
|
||||
12
.github/workflows/bazel.yml
vendored
12
.github/workflows/bazel.yml
vendored
@@ -57,9 +57,6 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Check rusty_v8 MODULE.bazel checksums
|
||||
if: matrix.os == 'ubuntu-24.04' && matrix.target == 'x86_64-unknown-linux-gnu'
|
||||
@@ -152,9 +149,6 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Prepare Bazel CI
|
||||
id: prepare_bazel
|
||||
@@ -238,9 +232,6 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Prepare Bazel CI
|
||||
id: prepare_bazel
|
||||
@@ -328,9 +319,6 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Prepare Bazel CI
|
||||
id: prepare_bazel
|
||||
|
||||
6
.github/workflows/blob-size-policy.yml
vendored
6
.github/workflows/blob-size-policy.yml
vendored
@@ -10,17 +10,15 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Determine PR comparison range
|
||||
id: range
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "base=${{ github.event.pull_request.base.sha }}" >> "$GITHUB_OUTPUT"
|
||||
echo "head=${{ github.event.pull_request.head.sha }}" >> "$GITHUB_OUTPUT"
|
||||
echo "base=$(git rev-parse HEAD^1)" >> "$GITHUB_OUTPUT"
|
||||
echo "head=$(git rev-parse HEAD^2)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Check changed blob sizes
|
||||
env:
|
||||
|
||||
3
.github/workflows/cargo-deny.yml
vendored
3
.github/workflows/cargo-deny.yml
vendored
@@ -15,9 +15,6 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install Rust toolchain
|
||||
uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
|
||||
|
||||
3
.github/workflows/ci.yml
vendored
3
.github/workflows/ci.yml
vendored
@@ -13,9 +13,6 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Verify codex-rs Cargo manifests inherit workspace settings
|
||||
run: python3 .github/scripts/verify_cargo_workspace_manifests.py
|
||||
|
||||
3
.github/workflows/codespell.yml
vendored
3
.github/workflows/codespell.yml
vendored
@@ -19,9 +19,6 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
- name: Annotate locations with typos
|
||||
uses: codespell-project/codespell-problem-matcher@b80729f885d32f78a716c2f107b4db1025001c42 # v1.1.0
|
||||
- name: Codespell
|
||||
|
||||
4
.github/workflows/issue-deduplicator.yml
vendored
4
.github/workflows/issue-deduplicator.yml
vendored
@@ -20,8 +20,6 @@ jobs:
|
||||
has_matches: ${{ steps.normalize-all.outputs.has_matches }}
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Prepare Codex inputs
|
||||
env:
|
||||
@@ -158,8 +156,6 @@ jobs:
|
||||
has_matches: ${{ steps.normalize-open.outputs.has_matches }}
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Prepare Codex inputs
|
||||
env:
|
||||
|
||||
2
.github/workflows/issue-labeler.yml
vendored
2
.github/workflows/issue-labeler.yml
vendored
@@ -18,8 +18,6 @@ jobs:
|
||||
codex_output: ${{ steps.codex.outputs.final-message }}
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- id: codex
|
||||
uses: openai/codex-action@5c3f4ccdb2b8790f73d6b21751ac00e602aa0c02 # v1.7
|
||||
|
||||
28
.github/workflows/rust-ci-full.yml
vendored
28
.github/workflows/rust-ci-full.yml
vendored
@@ -7,11 +7,6 @@ on:
|
||||
workflow_dispatch:
|
||||
|
||||
# CI builds in debug (dev) for faster signal.
|
||||
env:
|
||||
# Cargo's libgit2 transport has been flaky on macOS when fetching git
|
||||
# dependencies with nested submodules. Use the system git CLI, which has
|
||||
# better network/proxy behavior and matches Cargo's own suggested fallback.
|
||||
CARGO_NET_GIT_FETCH_WITH_CLI: "true"
|
||||
|
||||
jobs:
|
||||
# --- CI that doesn't need specific targets ---------------------------------
|
||||
@@ -23,8 +18,6 @@ jobs:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
|
||||
with:
|
||||
components: rustfmt
|
||||
@@ -39,14 +32,13 @@ jobs:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
|
||||
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
|
||||
with:
|
||||
tool: cargo-shear@1.11.2
|
||||
tool: cargo-shear
|
||||
version: 1.11.2
|
||||
- name: cargo shear
|
||||
run: cargo shear --deny-warnings
|
||||
run: cargo shear
|
||||
|
||||
argument_comment_lint_package:
|
||||
name: Argument comment lint package
|
||||
@@ -56,8 +48,6 @@ jobs:
|
||||
DYLINT_LINK_VERSION: 5.0.0
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
|
||||
with:
|
||||
toolchain: nightly-2025-09-18
|
||||
@@ -108,8 +98,6 @@ jobs:
|
||||
labels: codex-windows-x64
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: ./.github/actions/setup-bazel-ci
|
||||
with:
|
||||
target: ${{ runner.os }}
|
||||
@@ -246,8 +234,6 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Install Linux build dependencies
|
||||
if: ${{ runner.os == 'Linux' }}
|
||||
shell: bash
|
||||
@@ -574,8 +560,6 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Install Linux build dependencies
|
||||
if: ${{ runner.os == 'Linux' }}
|
||||
shell: bash
|
||||
@@ -583,7 +567,7 @@ jobs:
|
||||
set -euo pipefail
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
sudo apt-get update -y
|
||||
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends pkg-config libcap-dev bubblewrap
|
||||
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends pkg-config libcap-dev
|
||||
fi
|
||||
|
||||
# Some integration tests rely on DotSlash being installed.
|
||||
@@ -738,12 +722,10 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
set +e
|
||||
if [[ "${STEPS_TEST_OUTCOME}" != "success" ]]; then
|
||||
if [[ "${{ steps.test.outcome }}" != "success" ]]; then
|
||||
docker logs codex-remote-test-env || true
|
||||
fi
|
||||
docker rm -f codex-remote-test-env >/dev/null 2>&1 || true
|
||||
env:
|
||||
STEPS_TEST_OUTCOME: ${{ steps.test.outcome }}
|
||||
|
||||
- name: verify tests passed
|
||||
if: steps.test.outcome == 'failure'
|
||||
|
||||
32
.github/workflows/rust-ci.yml
vendored
32
.github/workflows/rust-ci.yml
vendored
@@ -16,9 +16,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
- name: Detect changed paths (no external action)
|
||||
id: detect
|
||||
shell: bash
|
||||
@@ -64,9 +62,6 @@ jobs:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
|
||||
with:
|
||||
components: rustfmt
|
||||
@@ -83,15 +78,13 @@ jobs:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
|
||||
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
|
||||
with:
|
||||
tool: cargo-shear@1.11.2
|
||||
tool: cargo-shear
|
||||
version: 1.11.2
|
||||
- name: cargo shear
|
||||
run: cargo shear --deny-warnings
|
||||
run: cargo shear
|
||||
|
||||
argument_comment_lint_package:
|
||||
name: Argument comment lint package
|
||||
@@ -103,9 +96,6 @@ jobs:
|
||||
DYLINT_LINK_VERSION: 5.0.0
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
|
||||
- name: Install nightly argument-comment-lint toolchain
|
||||
shell: bash
|
||||
@@ -182,9 +172,6 @@ jobs:
|
||||
echo "run=false" >> "$GITHUB_OUTPUT"
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
if: ${{ steps.argument_comment_lint_gate.outputs.run == 'true' }}
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
- name: Run argument comment lint on codex-rs via Bazel
|
||||
if: ${{ steps.argument_comment_lint_gate.outputs.run == 'true' }}
|
||||
uses: ./.github/actions/run-argument-comment-lint
|
||||
@@ -216,25 +203,20 @@ jobs:
|
||||
|
||||
# If nothing relevant changed (PR touching only root README, etc.),
|
||||
# declare success regardless of other jobs.
|
||||
if [[ "${NEEDS_CHANGED_OUTPUTS_ARGUMENT_COMMENT_LINT}" != 'true' && "${NEEDS_CHANGED_OUTPUTS_CODEX}" != 'true' && "${NEEDS_CHANGED_OUTPUTS_WORKFLOWS}" != 'true' ]]; then
|
||||
if [[ '${{ needs.changed.outputs.argument_comment_lint }}' != 'true' && '${{ needs.changed.outputs.codex }}' != 'true' && '${{ needs.changed.outputs.workflows }}' != 'true' ]]; then
|
||||
echo 'No relevant changes -> CI not required.'
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "${NEEDS_CHANGED_OUTPUTS_ARGUMENT_COMMENT_LINT_PACKAGE}" == 'true' ]]; then
|
||||
if [[ '${{ needs.changed.outputs.argument_comment_lint_package }}' == 'true' ]]; then
|
||||
[[ '${{ needs.argument_comment_lint_package.result }}' == 'success' ]] || { echo 'argument_comment_lint_package failed'; exit 1; }
|
||||
fi
|
||||
|
||||
if [[ "${NEEDS_CHANGED_OUTPUTS_ARGUMENT_COMMENT_LINT}" == 'true' || "${NEEDS_CHANGED_OUTPUTS_WORKFLOWS}" == 'true' ]]; then
|
||||
if [[ '${{ needs.changed.outputs.argument_comment_lint }}' == 'true' || '${{ needs.changed.outputs.workflows }}' == 'true' ]]; then
|
||||
[[ '${{ needs.argument_comment_lint_prebuilt.result }}' == 'success' ]] || { echo 'argument_comment_lint_prebuilt failed'; exit 1; }
|
||||
fi
|
||||
|
||||
if [[ "${NEEDS_CHANGED_OUTPUTS_CODEX}" == 'true' || "${NEEDS_CHANGED_OUTPUTS_WORKFLOWS}" == 'true' ]]; then
|
||||
if [[ '${{ needs.changed.outputs.codex }}' == 'true' || '${{ needs.changed.outputs.workflows }}' == 'true' ]]; then
|
||||
[[ '${{ needs.general.result }}' == 'success' ]] || { echo 'general failed'; exit 1; }
|
||||
[[ '${{ needs.cargo_shear.result }}' == 'success' ]] || { echo 'cargo_shear failed'; exit 1; }
|
||||
fi
|
||||
env:
|
||||
NEEDS_CHANGED_OUTPUTS_ARGUMENT_COMMENT_LINT: ${{ needs.changed.outputs.argument_comment_lint }}
|
||||
NEEDS_CHANGED_OUTPUTS_CODEX: ${{ needs.changed.outputs.codex }}
|
||||
NEEDS_CHANGED_OUTPUTS_WORKFLOWS: ${{ needs.changed.outputs.workflows }}
|
||||
NEEDS_CHANGED_OUTPUTS_ARGUMENT_COMMENT_LINT_PACKAGE: ${{ needs.changed.outputs.argument_comment_lint_package }}
|
||||
|
||||
@@ -57,8 +57,6 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
|
||||
with:
|
||||
|
||||
1
.github/workflows/rust-release-prepare.yml
vendored
1
.github/workflows/rust-release-prepare.yml
vendored
@@ -22,7 +22,6 @@ jobs:
|
||||
with:
|
||||
ref: main
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Update models.json
|
||||
env:
|
||||
|
||||
46
.github/workflows/rust-release-windows.yml
vendored
46
.github/workflows/rust-release-windows.yml
vendored
@@ -84,8 +84,6 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Print runner specs (Windows)
|
||||
shell: powershell
|
||||
run: |
|
||||
@@ -168,8 +166,6 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Download prebuilt Windows primary binaries
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
|
||||
@@ -220,48 +216,6 @@ jobs:
|
||||
"$dest/${binary}-${{ matrix.target }}.exe"
|
||||
done
|
||||
|
||||
- name: Build Python runtime wheel
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
case "${{ matrix.target }}" in
|
||||
aarch64-pc-windows-msvc)
|
||||
platform_tag="win_arm64"
|
||||
;;
|
||||
x86_64-pc-windows-msvc)
|
||||
platform_tag="win_amd64"
|
||||
;;
|
||||
*)
|
||||
echo "No Python runtime wheel platform tag for ${{ matrix.target }}"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
python -m venv "${RUNNER_TEMP}/python-runtime-build-venv"
|
||||
"${RUNNER_TEMP}/python-runtime-build-venv/Scripts/python.exe" -m pip install build
|
||||
|
||||
stage_dir="${RUNNER_TEMP}/openai-codex-cli-bin-${{ matrix.target }}"
|
||||
wheel_dir="${GITHUB_WORKSPACE}/python-runtime-dist/${{ matrix.target }}"
|
||||
# Keep the helpers next to codex.exe in the runtime wheel so Windows
|
||||
# sandbox/elevation lookup matches the standalone release zip.
|
||||
python "${GITHUB_WORKSPACE}/sdk/python/scripts/update_sdk_artifacts.py" \
|
||||
stage-runtime \
|
||||
"$stage_dir" \
|
||||
"${GITHUB_WORKSPACE}/codex-rs/target/${{ matrix.target }}/release/codex.exe" \
|
||||
--codex-version "${GITHUB_REF_NAME}" \
|
||||
--platform-tag "$platform_tag" \
|
||||
--resource-binary "${GITHUB_WORKSPACE}/codex-rs/target/${{ matrix.target }}/release/codex-command-runner.exe" \
|
||||
--resource-binary "${GITHUB_WORKSPACE}/codex-rs/target/${{ matrix.target }}/release/codex-windows-sandbox-setup.exe"
|
||||
"${RUNNER_TEMP}/python-runtime-build-venv/Scripts/python.exe" -m build --wheel --outdir "$wheel_dir" "$stage_dir"
|
||||
|
||||
- name: Upload Python runtime wheel
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: python-runtime-wheel-${{ matrix.target }}
|
||||
path: python-runtime-dist/${{ matrix.target }}/*.whl
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Install DotSlash
|
||||
uses: facebook/install-dotslash@1e4e7b3e07eaca387acb98f1d4720e0bee8dbb6a # v2
|
||||
|
||||
|
||||
4
.github/workflows/rust-release-zsh.yml
vendored
4
.github/workflows/rust-release-zsh.yml
vendored
@@ -46,8 +46,6 @@ jobs:
|
||||
libncursesw5-dev
|
||||
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Build, smoke-test, and stage zsh artifact
|
||||
shell: bash
|
||||
@@ -84,8 +82,6 @@ jobs:
|
||||
fi
|
||||
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Build, smoke-test, and stage zsh artifact
|
||||
shell: bash
|
||||
|
||||
125
.github/workflows/rust-release.yml
vendored
125
.github/workflows/rust-release.yml
vendored
@@ -20,8 +20,6 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
|
||||
- name: Validate tag matches Cargo.toml version
|
||||
shell: bash
|
||||
@@ -121,8 +119,6 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Print runner specs (Linux)
|
||||
if: ${{ runner.os == 'Linux' }}
|
||||
shell: bash
|
||||
@@ -188,7 +184,6 @@ jobs:
|
||||
uses: mlugg/setup-zig@d1434d08867e3ee9daa34448df10607b98908d29 # v2.2.1
|
||||
with:
|
||||
version: 0.14.0
|
||||
use-cache: false
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Install musl build tools
|
||||
@@ -399,65 +394,6 @@ jobs:
|
||||
cp target/${{ matrix.target }}/release/codex-${{ matrix.target }}.dmg "$dest/codex-${{ matrix.target }}.dmg"
|
||||
fi
|
||||
|
||||
- name: Build Python runtime wheel
|
||||
if: ${{ matrix.bundle == 'primary' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
case "${{ matrix.target }}" in
|
||||
aarch64-apple-darwin)
|
||||
platform_tag="macosx_11_0_arm64"
|
||||
;;
|
||||
x86_64-apple-darwin)
|
||||
platform_tag="macosx_10_9_x86_64"
|
||||
;;
|
||||
aarch64-unknown-linux-musl)
|
||||
platform_tag="musllinux_1_1_aarch64"
|
||||
;;
|
||||
x86_64-unknown-linux-musl)
|
||||
platform_tag="musllinux_1_1_x86_64"
|
||||
;;
|
||||
*)
|
||||
echo "No Python runtime wheel platform tag for ${{ matrix.target }}"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
python3 -m venv "${RUNNER_TEMP}/python-runtime-build-venv"
|
||||
# Do not install into the runner's system Python; macOS runners mark
|
||||
# the Homebrew Python as externally managed under PEP 668.
|
||||
"${RUNNER_TEMP}/python-runtime-build-venv/bin/python" -m pip install build
|
||||
|
||||
stage_dir="${RUNNER_TEMP}/openai-codex-cli-bin-${{ matrix.target }}"
|
||||
wheel_dir="${GITHUB_WORKSPACE}/python-runtime-dist/${{ matrix.target }}"
|
||||
stage_runtime_args=(
|
||||
"${GITHUB_WORKSPACE}/sdk/python/scripts/update_sdk_artifacts.py"
|
||||
stage-runtime
|
||||
"$stage_dir"
|
||||
"${GITHUB_WORKSPACE}/codex-rs/target/${{ matrix.target }}/release/codex"
|
||||
--codex-version "${GITHUB_REF_NAME}"
|
||||
--platform-tag "$platform_tag"
|
||||
)
|
||||
if [[ "${{ matrix.target }}" == *linux* ]]; then
|
||||
# Keep bwrap in the runtime wheel so Linux sandbox fallback behavior
|
||||
# matches the standalone release bundle on hosts without system bwrap.
|
||||
stage_runtime_args+=(
|
||||
--resource-binary
|
||||
"${GITHUB_WORKSPACE}/codex-rs/target/${{ matrix.target }}/release/bwrap"
|
||||
)
|
||||
fi
|
||||
python3 "${stage_runtime_args[@]}"
|
||||
"${RUNNER_TEMP}/python-runtime-build-venv/bin/python" -m build --wheel --outdir "$wheel_dir" "$stage_dir"
|
||||
|
||||
- name: Upload Python runtime wheel
|
||||
if: ${{ matrix.bundle == 'primary' }}
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: python-runtime-wheel-${{ matrix.target }}
|
||||
path: python-runtime-dist/${{ matrix.target }}/*.whl
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Compress artifacts
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -537,13 +473,10 @@ jobs:
|
||||
tag: ${{ github.ref_name }}
|
||||
should_publish_npm: ${{ steps.npm_publish_settings.outputs.should_publish }}
|
||||
npm_tag: ${{ steps.npm_publish_settings.outputs.npm_tag }}
|
||||
should_publish_python_runtime: ${{ steps.python_runtime_publish_settings.outputs.should_publish }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Generate release notes from tag commit message
|
||||
id: release_notes
|
||||
@@ -614,22 +547,6 @@ jobs:
|
||||
echo "npm_tag=" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Determine Python runtime publish settings
|
||||
id: python_runtime_publish_settings
|
||||
env:
|
||||
VERSION: ${{ steps.release_name.outputs.name }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
version="${VERSION}"
|
||||
|
||||
if [[ "${version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo "should_publish=true" >> "$GITHUB_OUTPUT"
|
||||
elif [[ "${version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+-alpha\.[0-9]+$ ]]; then
|
||||
echo "should_publish=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "should_publish=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a8198c4bff370c8506180b035930dea56dbd5288 # v5
|
||||
with:
|
||||
@@ -863,48 +780,6 @@ jobs:
|
||||
exit "${publish_status}"
|
||||
done
|
||||
|
||||
# Publish the platform-specific Python runtime wheels using PyPI trusted publishing.
|
||||
# PyPI project configuration must trust this workflow and job. Keep this
|
||||
# non-blocking while the Python runtime publishing path is new; failures still
|
||||
# need release follow-up, but should not invalidate the Rust release itself.
|
||||
publish-python-runtime:
|
||||
# Publish to PyPI for stable releases and alpha pre-releases with numeric suffixes.
|
||||
if: ${{ needs.release.outputs.should_publish_python_runtime == 'true' }}
|
||||
name: publish-python-runtime
|
||||
needs: release
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: true
|
||||
environment: pypi
|
||||
permissions:
|
||||
id-token: write # Required for PyPI trusted publishing.
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Download Python runtime wheels from release
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
RELEASE_TAG: ${{ needs.release.outputs.tag }}
|
||||
RELEASE_VERSION: ${{ needs.release.outputs.version }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python_version="$RELEASE_VERSION"
|
||||
python_version="${python_version/-alpha./a}"
|
||||
python_version="${python_version/-beta./b}"
|
||||
python_version="${python_version/-rc./rc}"
|
||||
|
||||
mkdir -p dist/python-runtime
|
||||
gh release download "$RELEASE_TAG" \
|
||||
--repo "${GITHUB_REPOSITORY}" \
|
||||
--pattern "openai_codex_cli_bin-${python_version}-*.whl" \
|
||||
--dir dist/python-runtime
|
||||
ls -lh dist/python-runtime
|
||||
|
||||
- name: Publish Python runtime wheels to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
|
||||
with:
|
||||
packages-dir: dist/python-runtime
|
||||
skip-existing: true
|
||||
|
||||
winget:
|
||||
name: winget
|
||||
needs: release
|
||||
|
||||
4
.github/workflows/rusty-v8-release.yml
vendored
4
.github/workflows/rusty-v8-release.yml
vendored
@@ -18,8 +18,6 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
@@ -72,8 +70,6 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Bazel
|
||||
uses: ./.github/actions/setup-bazel-ci
|
||||
|
||||
3
.github/workflows/sdk.yml
vendored
3
.github/workflows/sdk.yml
vendored
@@ -14,9 +14,6 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install Linux bwrap build dependencies
|
||||
shell: bash
|
||||
|
||||
6
.github/workflows/v8-canary.yml
vendored
6
.github/workflows/v8-canary.yml
vendored
@@ -41,9 +41,6 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
@@ -78,9 +75,6 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Bazel
|
||||
uses: ./.github/actions/setup-bazel-ci
|
||||
|
||||
@@ -26,7 +26,7 @@ In the codex-rs folder where the rust code lives:
|
||||
- Implementations may still use `async fn foo(&self, ...) -> T` when they satisfy that contract.
|
||||
- Do not use `#[allow(async_fn_in_trait)]` as a shortcut around spelling the future contract explicitly.
|
||||
- When writing tests, prefer comparing the equality of entire objects over fields one by one.
|
||||
- Do not add general product or user-facing documentation to the `docs/` folder. The official Codex documentation lives elsewhere. The exception is app-server API documentation, which is covered by the app-server guidance below.
|
||||
- When making a change that adds or changes an API, ensure that the documentation in the `docs/` folder is up to date if applicable.
|
||||
- Prefer private modules and explicitly exported public crate API.
|
||||
- If you change `ConfigToml` or nested config types, run `just write-config-schema` to update `codex-rs/core/config.schema.json`.
|
||||
- When working with MCP tool calls, prefer using `codex-rs/codex-mcp/src/mcp_connection_manager.rs` to handle mutation of tools and tool calls. Aim to minimize the footprint of changes and leverage existing abstractions rather than plumbing code through multiple levels of function calls.
|
||||
@@ -210,7 +210,7 @@ These guidelines apply to app-server protocol work in `codex-rs`, especially:
|
||||
|
||||
### Development Workflow
|
||||
|
||||
- Update app-server docs/examples when API behavior changes (at minimum `app-server/README.md`).
|
||||
- Update docs/examples when API behavior changes (at minimum `app-server/README.md`).
|
||||
- Regenerate schema fixtures when API shapes change:
|
||||
`just write-app-server-schema`
|
||||
(and `just write-app-server-schema --experimental` when experimental API fixtures are affected).
|
||||
|
||||
102
codex-rs/Cargo.lock
generated
102
codex-rs/Cargo.lock
generated
@@ -1145,6 +1145,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8b52af3cb4058c895d37317bb27508dccc8e5f2d39454016b297bf4a400597b8"
|
||||
dependencies = [
|
||||
"axum-core",
|
||||
"base64 0.22.1",
|
||||
"bytes",
|
||||
"form_urlencoded",
|
||||
"futures-util",
|
||||
@@ -1163,8 +1164,10 @@ dependencies = [
|
||||
"serde_json",
|
||||
"serde_path_to_error",
|
||||
"serde_urlencoded",
|
||||
"sha1",
|
||||
"sync_wrapper",
|
||||
"tokio",
|
||||
"tokio-tungstenite",
|
||||
"tower",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
@@ -1892,14 +1895,11 @@ dependencies = [
|
||||
"codex-core",
|
||||
"codex-core-plugins",
|
||||
"codex-exec-server",
|
||||
"codex-extension-api",
|
||||
"codex-external-agent-migration",
|
||||
"codex-external-agent-sessions",
|
||||
"codex-features",
|
||||
"codex-feedback",
|
||||
"codex-file-search",
|
||||
"codex-file-watcher",
|
||||
"codex-git-attribution",
|
||||
"codex-git-utils",
|
||||
"codex-hooks",
|
||||
"codex-login",
|
||||
@@ -1918,7 +1918,6 @@ dependencies = [
|
||||
"codex-state",
|
||||
"codex-thread-store",
|
||||
"codex-tools",
|
||||
"codex-uds",
|
||||
"codex-utils-absolute-path",
|
||||
"codex-utils-cargo-bin",
|
||||
"codex-utils-cli",
|
||||
@@ -1927,6 +1926,7 @@ dependencies = [
|
||||
"core_test_support",
|
||||
"flate2",
|
||||
"futures",
|
||||
"hmac",
|
||||
"opentelemetry",
|
||||
"opentelemetry_sdk",
|
||||
"pretty_assertions",
|
||||
@@ -1952,7 +1952,6 @@ dependencies = [
|
||||
"url",
|
||||
"uuid",
|
||||
"wiremock",
|
||||
"zip 2.4.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1980,27 +1979,6 @@ dependencies = [
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-app-server-daemon"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"codex-app-server-protocol",
|
||||
"codex-app-server-transport",
|
||||
"codex-uds",
|
||||
"codex-utils-home-dir",
|
||||
"futures",
|
||||
"libc",
|
||||
"pretty_assertions",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha2",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"tokio-tungstenite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-app-server-protocol"
|
||||
version = "0.0.0"
|
||||
@@ -2053,8 +2031,11 @@ dependencies = [
|
||||
name = "codex-app-server-transport"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"axum",
|
||||
"base64 0.22.1",
|
||||
"chrono",
|
||||
"clap",
|
||||
"codex-api",
|
||||
"codex-app-server-protocol",
|
||||
"codex-config",
|
||||
@@ -2065,12 +2046,18 @@ dependencies = [
|
||||
"codex-uds",
|
||||
"codex-utils-absolute-path",
|
||||
"codex-utils-rustls-provider",
|
||||
"constant_time_eq 0.3.1",
|
||||
"futures",
|
||||
"gethostname",
|
||||
"hmac",
|
||||
"jsonwebtoken",
|
||||
"owo-colors",
|
||||
"pretty_assertions",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha2",
|
||||
"tempfile",
|
||||
"time",
|
||||
"tokio",
|
||||
"tokio-tungstenite",
|
||||
"tokio-util",
|
||||
@@ -2166,6 +2153,17 @@ dependencies = [
|
||||
"serde_with",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-builtin-mcps"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"codex-memories-mcp",
|
||||
"codex-utils-absolute-path",
|
||||
"pretty_assertions",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-bwrap"
|
||||
version = "0.0.0"
|
||||
@@ -2208,7 +2206,6 @@ dependencies = [
|
||||
"clap",
|
||||
"clap_complete",
|
||||
"codex-app-server",
|
||||
"codex-app-server-daemon",
|
||||
"codex-app-server-protocol",
|
||||
"codex-app-server-test-client",
|
||||
"codex-arg0",
|
||||
@@ -2444,11 +2441,7 @@ dependencies = [
|
||||
"codex-app-server-protocol",
|
||||
"pretty_assertions",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha1",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"tracing",
|
||||
"urlencoding",
|
||||
]
|
||||
|
||||
@@ -2478,7 +2471,6 @@ dependencies = [
|
||||
"codex-core-skills",
|
||||
"codex-exec-server",
|
||||
"codex-execpolicy",
|
||||
"codex-extension-api",
|
||||
"codex-features",
|
||||
"codex-feedback",
|
||||
"codex-git-utils",
|
||||
@@ -2504,7 +2496,6 @@ dependencies = [
|
||||
"codex-terminal-detection",
|
||||
"codex-test-binary-support",
|
||||
"codex-thread-store",
|
||||
"codex-tool-api",
|
||||
"codex-tools",
|
||||
"codex-utils-absolute-path",
|
||||
"codex-utils-cache",
|
||||
@@ -2535,6 +2526,7 @@ dependencies = [
|
||||
"insta",
|
||||
"libc",
|
||||
"maplit",
|
||||
"notify",
|
||||
"once_cell",
|
||||
"openssl-sys",
|
||||
"opentelemetry",
|
||||
@@ -2583,7 +2575,6 @@ dependencies = [
|
||||
"codex-config",
|
||||
"codex-core",
|
||||
"codex-exec-server",
|
||||
"codex-extension-api",
|
||||
"codex-features",
|
||||
"codex-login",
|
||||
"codex-model-provider-info",
|
||||
@@ -2742,6 +2733,7 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serial_test",
|
||||
"sha2",
|
||||
"tempfile",
|
||||
"test-case",
|
||||
"thiserror 2.0.18",
|
||||
@@ -2800,14 +2792,6 @@ dependencies = [
|
||||
"syn 2.0.114",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-extension-api"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"codex-protocol",
|
||||
"codex-tool-api",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-external-agent-migration"
|
||||
version = "0.0.0"
|
||||
@@ -2886,27 +2870,6 @@ dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-file-watcher"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"notify",
|
||||
"pretty_assertions",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-git-attribution"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"codex-core",
|
||||
"codex-extension-api",
|
||||
"codex-features",
|
||||
"pretty_assertions",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-git-utils"
|
||||
version = "0.0.0"
|
||||
@@ -3058,6 +3021,7 @@ dependencies = [
|
||||
"async-channel",
|
||||
"codex-api",
|
||||
"codex-async-utils",
|
||||
"codex-builtin-mcps",
|
||||
"codex-config",
|
||||
"codex-exec-server",
|
||||
"codex-login",
|
||||
@@ -3091,7 +3055,6 @@ dependencies = [
|
||||
"codex-config",
|
||||
"codex-core",
|
||||
"codex-exec-server",
|
||||
"codex-extension-api",
|
||||
"codex-login",
|
||||
"codex-protocol",
|
||||
"codex-shell-command",
|
||||
@@ -3368,6 +3331,7 @@ dependencies = [
|
||||
"codex-utils-absolute-path",
|
||||
"codex-utils-image",
|
||||
"codex-utils-string",
|
||||
"codex-utils-template",
|
||||
"encoding_rs",
|
||||
"globset",
|
||||
"http 1.4.0",
|
||||
@@ -3683,15 +3647,6 @@ dependencies = [
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-tool-api"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"pretty_assertions",
|
||||
"serde_json",
|
||||
"thiserror 2.0.18",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-tools"
|
||||
version = "0.0.0"
|
||||
@@ -4308,7 +4263,6 @@ dependencies = [
|
||||
"codex-config",
|
||||
"codex-core",
|
||||
"codex-exec-server",
|
||||
"codex-extension-api",
|
||||
"codex-features",
|
||||
"codex-hooks",
|
||||
"codex-login",
|
||||
|
||||
@@ -5,12 +5,12 @@ members = [
|
||||
"agent-graph-store",
|
||||
"agent-identity",
|
||||
"backend-client",
|
||||
"builtin-mcps",
|
||||
"bwrap",
|
||||
"ansi-escape",
|
||||
"async-utils",
|
||||
"app-server",
|
||||
"app-server-transport",
|
||||
"app-server-daemon",
|
||||
"app-server-client",
|
||||
"app-server-protocol",
|
||||
"app-server-test-client",
|
||||
@@ -44,13 +44,10 @@ members = [
|
||||
"exec-server",
|
||||
"execpolicy",
|
||||
"execpolicy-legacy",
|
||||
"ext/extension-api",
|
||||
"ext/git-attribution",
|
||||
"external-agent-migration",
|
||||
"external-agent-sessions",
|
||||
"keyring-store",
|
||||
"file-search",
|
||||
"file-watcher",
|
||||
"linux-sandbox",
|
||||
"lmstudio",
|
||||
"login",
|
||||
@@ -107,7 +104,6 @@ members = [
|
||||
"test-binary-support",
|
||||
"thread-manager-sample",
|
||||
"thread-store",
|
||||
"tool-api",
|
||||
"uds",
|
||||
"codex-experimental-api-macros",
|
||||
"plugin",
|
||||
@@ -135,7 +131,6 @@ codex-api = { path = "codex-api" }
|
||||
codex-aws-auth = { path = "aws-auth" }
|
||||
codex-app-server = { path = "app-server" }
|
||||
codex-app-server-transport = { path = "app-server-transport" }
|
||||
codex-app-server-daemon = { path = "app-server-daemon" }
|
||||
codex-app-server-client = { path = "app-server-client" }
|
||||
codex-app-server-protocol = { path = "app-server-protocol" }
|
||||
codex-app-server-test-client = { path = "app-server-test-client" }
|
||||
@@ -143,6 +138,7 @@ codex-apply-patch = { path = "apply-patch" }
|
||||
codex-arg0 = { path = "arg0" }
|
||||
codex-async-utils = { path = "async-utils" }
|
||||
codex-backend-client = { path = "backend-client" }
|
||||
codex-builtin-mcps = { path = "builtin-mcps" }
|
||||
codex-chatgpt = { path = "chatgpt" }
|
||||
codex-cli = { path = "cli" }
|
||||
codex-client = { path = "codex-client" }
|
||||
@@ -161,8 +157,6 @@ codex-exec = { path = "exec" }
|
||||
codex-file-system = { path = "file-system" }
|
||||
codex-exec-server = { path = "exec-server" }
|
||||
codex-execpolicy = { path = "execpolicy" }
|
||||
codex-extension-api = { path = "ext/extension-api" }
|
||||
codex-git-attribution = { path = "ext/git-attribution" }
|
||||
codex-external-agent-migration = { path = "external-agent-migration" }
|
||||
codex-external-agent-sessions = { path = "external-agent-sessions" }
|
||||
codex-experimental-api-macros = { path = "codex-experimental-api-macros" }
|
||||
@@ -170,7 +164,6 @@ codex-features = { path = "features" }
|
||||
codex-feedback = { path = "feedback" }
|
||||
codex-install-context = { path = "install-context" }
|
||||
codex-file-search = { path = "file-search" }
|
||||
codex-file-watcher = { path = "file-watcher" }
|
||||
codex-git-utils = { path = "git-utils" }
|
||||
codex-hooks = { path = "hooks" }
|
||||
codex-keyring-store = { path = "keyring-store" }
|
||||
@@ -178,6 +171,7 @@ codex-linux-sandbox = { path = "linux-sandbox" }
|
||||
codex-lmstudio = { path = "lmstudio" }
|
||||
codex-login = { path = "login" }
|
||||
codex-message-history = { path = "message-history" }
|
||||
codex-memories-mcp = { path = "memories/mcp" }
|
||||
codex-memories-read = { path = "memories/read" }
|
||||
codex-memories-write = { path = "memories/write" }
|
||||
codex-mcp = { path = "codex-mcp" }
|
||||
@@ -207,7 +201,6 @@ codex-stdio-to-uds = { path = "stdio-to-uds" }
|
||||
codex-terminal-detection = { path = "terminal-detection" }
|
||||
codex-test-binary-support = { path = "test-binary-support" }
|
||||
codex-thread-store = { path = "thread-store" }
|
||||
codex-tool-api = { path = "tool-api" }
|
||||
codex-tools = { path = "tools" }
|
||||
codex-tui = { path = "tui" }
|
||||
codex-uds = { path = "uds" }
|
||||
@@ -264,6 +257,7 @@ chrono = "0.4.43"
|
||||
clap = "4"
|
||||
clap_complete = "4"
|
||||
color-eyre = "0.6.3"
|
||||
constant_time_eq = "0.3.1"
|
||||
crossbeam-channel = "0.5.15"
|
||||
crypto_box = { version = "0.9.1", features = ["seal"] }
|
||||
crossterm = "0.28.1"
|
||||
@@ -470,8 +464,11 @@ unwrap_used = "deny"
|
||||
[workspace.metadata.cargo-shear]
|
||||
ignored = [
|
||||
"codex-agent-graph-store",
|
||||
"codex-memories-mcp",
|
||||
"icu_provider",
|
||||
"openssl-sys",
|
||||
"codex-utils-readiness",
|
||||
"codex-utils-template",
|
||||
"codex-v8-poc",
|
||||
]
|
||||
|
||||
|
||||
@@ -1,298 +0,0 @@
|
||||
use crate::events::CodexAcceptedLineFingerprintsEventParams;
|
||||
use crate::events::CodexAcceptedLineFingerprintsEventRequest;
|
||||
use crate::events::TrackEventRequest;
|
||||
use crate::facts::AcceptedLineFingerprint;
|
||||
use codex_git_utils::canonicalize_git_remote_url;
|
||||
use codex_git_utils::get_git_remote_urls_assume_git_repo;
|
||||
use sha1::Digest;
|
||||
use std::path::Path;
|
||||
|
||||
const ACCEPTED_LINE_FINGERPRINT_EVENT_TARGET_BYTES: usize = 2 * 1024 * 1024;
|
||||
const ACCEPTED_LINE_FINGERPRINT_EVENT_FIXED_BYTES: usize = 1024;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct AcceptedLineFingerprintSummary {
|
||||
pub accepted_added_lines: u64,
|
||||
pub accepted_deleted_lines: u64,
|
||||
pub line_fingerprints: Vec<AcceptedLineFingerprint>,
|
||||
}
|
||||
|
||||
pub(crate) struct AcceptedLineFingerprintEventInput {
|
||||
pub(crate) event_type: &'static str,
|
||||
pub(crate) turn_id: String,
|
||||
pub(crate) thread_id: String,
|
||||
pub(crate) product_surface: Option<String>,
|
||||
pub(crate) model_slug: Option<String>,
|
||||
pub(crate) completed_at: u64,
|
||||
pub(crate) repo_hash: Option<String>,
|
||||
pub(crate) accepted_added_lines: u64,
|
||||
pub(crate) accepted_deleted_lines: u64,
|
||||
pub(crate) line_fingerprints: Vec<AcceptedLineFingerprint>,
|
||||
}
|
||||
|
||||
pub fn accepted_line_fingerprints_from_unified_diff(
|
||||
unified_diff: &str,
|
||||
) -> AcceptedLineFingerprintSummary {
|
||||
let mut current_path: Option<String> = None;
|
||||
let mut in_hunk = false;
|
||||
let mut accepted_added_lines = 0;
|
||||
let mut accepted_deleted_lines = 0;
|
||||
let mut line_fingerprints = Vec::new();
|
||||
|
||||
for line in unified_diff.lines() {
|
||||
if line.starts_with("diff --git ") {
|
||||
current_path = None;
|
||||
in_hunk = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
if line.starts_with("@@ ") {
|
||||
in_hunk = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if !in_hunk && let Some(path) = line.strip_prefix("+++ ") {
|
||||
current_path = normalize_diff_path(path);
|
||||
continue;
|
||||
}
|
||||
|
||||
if !in_hunk && line.starts_with("--- ") {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(added_line) = line.strip_prefix('+') {
|
||||
accepted_added_lines += 1;
|
||||
if let Some(path) = current_path.as_deref()
|
||||
&& let Some(normalized_line) = normalize_effective_line(added_line)
|
||||
{
|
||||
line_fingerprints.push(AcceptedLineFingerprint {
|
||||
path_hash: fingerprint_hash("path", path),
|
||||
line_hash: fingerprint_hash("line", &normalized_line),
|
||||
});
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if line.starts_with('-') {
|
||||
accepted_deleted_lines += 1;
|
||||
}
|
||||
}
|
||||
|
||||
AcceptedLineFingerprintSummary {
|
||||
accepted_added_lines,
|
||||
accepted_deleted_lines,
|
||||
line_fingerprints,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fingerprint_hash(domain: &str, value: &str) -> String {
|
||||
let mut hasher = sha1::Sha1::new();
|
||||
hasher.update(b"file-line-v1\0");
|
||||
hasher.update(domain.as_bytes());
|
||||
hasher.update(b"\0");
|
||||
hasher.update(value.as_bytes());
|
||||
format!("{:x}", hasher.finalize())
|
||||
}
|
||||
|
||||
pub(crate) fn accepted_line_fingerprint_event_requests(
|
||||
input: AcceptedLineFingerprintEventInput,
|
||||
) -> Vec<TrackEventRequest> {
|
||||
let chunks = accepted_line_fingerprint_chunks(input.line_fingerprints);
|
||||
chunks
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(index, line_fingerprints)| {
|
||||
let is_first_chunk = index == 0;
|
||||
TrackEventRequest::AcceptedLineFingerprints(Box::new(
|
||||
CodexAcceptedLineFingerprintsEventRequest {
|
||||
event_type: "codex_accepted_line_fingerprints",
|
||||
event_params: CodexAcceptedLineFingerprintsEventParams {
|
||||
event_type: input.event_type,
|
||||
turn_id: input.turn_id.clone(),
|
||||
thread_id: input.thread_id.clone(),
|
||||
product_surface: input.product_surface.clone(),
|
||||
model_slug: input.model_slug.clone(),
|
||||
completed_at: input.completed_at,
|
||||
repo_hash: input.repo_hash.clone(),
|
||||
accepted_added_lines: if is_first_chunk {
|
||||
input.accepted_added_lines
|
||||
} else {
|
||||
0
|
||||
},
|
||||
accepted_deleted_lines: if is_first_chunk {
|
||||
input.accepted_deleted_lines
|
||||
} else {
|
||||
0
|
||||
},
|
||||
line_fingerprints,
|
||||
},
|
||||
},
|
||||
))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub async fn accepted_line_repo_hash_for_cwd(cwd: &Path) -> Option<String> {
|
||||
let remotes = get_git_remote_urls_assume_git_repo(cwd).await?;
|
||||
remotes
|
||||
.get("origin")
|
||||
.or_else(|| remotes.values().next())
|
||||
.map(|remote_url| {
|
||||
let canonical_remote_url =
|
||||
canonicalize_git_remote_url(remote_url).unwrap_or_else(|| remote_url.to_string());
|
||||
fingerprint_hash("repo", &canonical_remote_url)
|
||||
})
|
||||
}
|
||||
|
||||
fn normalize_diff_path(path: &str) -> Option<String> {
|
||||
let path = path.trim();
|
||||
if path == "/dev/null" {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(
|
||||
path.strip_prefix("b/")
|
||||
.or_else(|| path.strip_prefix("a/"))
|
||||
.unwrap_or(path)
|
||||
.to_string(),
|
||||
)
|
||||
}
|
||||
|
||||
fn normalize_effective_line(line: &str) -> Option<String> {
|
||||
let normalized = line.split_whitespace().collect::<Vec<_>>().join(" ");
|
||||
if normalized.len() <= 3 {
|
||||
return None;
|
||||
}
|
||||
if !normalized
|
||||
.chars()
|
||||
.any(|ch| ch.is_alphanumeric() || ch == '_')
|
||||
{
|
||||
return None;
|
||||
}
|
||||
Some(normalized)
|
||||
}
|
||||
|
||||
fn accepted_line_fingerprint_chunks(
|
||||
line_fingerprints: Vec<AcceptedLineFingerprint>,
|
||||
) -> Vec<Vec<AcceptedLineFingerprint>> {
|
||||
if line_fingerprints.is_empty() {
|
||||
return vec![Vec::new()];
|
||||
}
|
||||
|
||||
let mut chunks = Vec::new();
|
||||
let mut current = Vec::new();
|
||||
let mut current_bytes = ACCEPTED_LINE_FINGERPRINT_EVENT_FIXED_BYTES;
|
||||
|
||||
for fingerprint in line_fingerprints {
|
||||
let item_bytes = accepted_line_fingerprint_json_bytes(&fingerprint);
|
||||
let separator_bytes = usize::from(!current.is_empty());
|
||||
if !current.is_empty()
|
||||
&& current_bytes + separator_bytes + item_bytes
|
||||
> ACCEPTED_LINE_FINGERPRINT_EVENT_TARGET_BYTES
|
||||
{
|
||||
chunks.push(current);
|
||||
current = Vec::new();
|
||||
current_bytes = ACCEPTED_LINE_FINGERPRINT_EVENT_FIXED_BYTES;
|
||||
}
|
||||
current_bytes += usize::from(!current.is_empty()) + item_bytes;
|
||||
current.push(fingerprint);
|
||||
}
|
||||
|
||||
if !current.is_empty() {
|
||||
chunks.push(current);
|
||||
}
|
||||
chunks
|
||||
}
|
||||
|
||||
fn accepted_line_fingerprint_json_bytes(fingerprint: &AcceptedLineFingerprint) -> usize {
|
||||
// {"path_hash":"...","line_hash":"..."} plus one byte of array comma
|
||||
// accounted for by the caller when needed.
|
||||
32 + fingerprint.path_hash.len() + fingerprint.line_hash.len()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn parses_counts_and_effective_added_fingerprints() {
|
||||
let diff = "\
|
||||
diff --git a/src/lib.rs b/src/lib.rs
|
||||
index 1111111..2222222
|
||||
--- a/src/lib.rs
|
||||
+++ b/src/lib.rs
|
||||
@@ -1,3 +1,5 @@
|
||||
-old line
|
||||
+fn useful() {
|
||||
+}
|
||||
+ return user.id;
|
||||
context
|
||||
";
|
||||
|
||||
let summary = accepted_line_fingerprints_from_unified_diff(diff);
|
||||
|
||||
assert_eq!(
|
||||
summary,
|
||||
AcceptedLineFingerprintSummary {
|
||||
accepted_added_lines: 3,
|
||||
accepted_deleted_lines: 1,
|
||||
line_fingerprints: vec![
|
||||
AcceptedLineFingerprint {
|
||||
path_hash: fingerprint_hash("path", "src/lib.rs"),
|
||||
line_hash: fingerprint_hash("line", "fn useful() {"),
|
||||
},
|
||||
AcceptedLineFingerprint {
|
||||
path_hash: fingerprint_hash("path", "src/lib.rs"),
|
||||
line_hash: fingerprint_hash("line", "return user.id;"),
|
||||
},
|
||||
],
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn skips_added_file_metadata_headers() {
|
||||
let diff = "\
|
||||
diff --git a/new.py b/new.py
|
||||
new file mode 100644
|
||||
index 0000000..1111111
|
||||
--- /dev/null
|
||||
+++ b/new.py
|
||||
@@ -0,0 +1 @@
|
||||
+print('hello')
|
||||
";
|
||||
|
||||
let summary = accepted_line_fingerprints_from_unified_diff(diff);
|
||||
|
||||
assert_eq!(summary.accepted_added_lines, 1);
|
||||
assert_eq!(summary.accepted_deleted_lines, 0);
|
||||
assert_eq!(summary.line_fingerprints.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_hunk_lines_that_look_like_file_headers() {
|
||||
let diff = "\
|
||||
diff --git a/src/lib.rs b/src/lib.rs
|
||||
index 1111111..2222222
|
||||
--- a/src/lib.rs
|
||||
+++ b/src/lib.rs
|
||||
@@ -1,2 +1,2 @@
|
||||
--- old value
|
||||
+++ new value
|
||||
";
|
||||
|
||||
let summary = accepted_line_fingerprints_from_unified_diff(diff);
|
||||
|
||||
assert_eq!(
|
||||
summary,
|
||||
AcceptedLineFingerprintSummary {
|
||||
accepted_added_lines: 1,
|
||||
accepted_deleted_lines: 1,
|
||||
line_fingerprints: vec![AcceptedLineFingerprint {
|
||||
path_hash: fingerprint_hash("path", "src/lib.rs"),
|
||||
line_hash: fingerprint_hash("line", "++ new value"),
|
||||
}],
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,5 @@
|
||||
use crate::client::AnalyticsEventsQueue;
|
||||
use crate::events::AppServerRpcTransport;
|
||||
use crate::events::CodexAcceptedLineFingerprintsEventParams;
|
||||
use crate::events::CodexAcceptedLineFingerprintsEventRequest;
|
||||
use crate::events::CodexAppMentionedEventRequest;
|
||||
use crate::events::CodexAppServerClientMetadata;
|
||||
use crate::events::CodexAppUsedEventRequest;
|
||||
@@ -30,7 +28,6 @@ use crate::events::codex_hook_run_metadata;
|
||||
use crate::events::codex_plugin_metadata;
|
||||
use crate::events::codex_plugin_used_metadata;
|
||||
use crate::events::subagent_thread_started_event_request;
|
||||
use crate::facts::AcceptedLineFingerprint;
|
||||
use crate::facts::AnalyticsFact;
|
||||
use crate::facts::AnalyticsJsonRpcError;
|
||||
use crate::facts::AppInvocation;
|
||||
@@ -69,20 +66,15 @@ use codex_app_server_protocol::ClientInfo;
|
||||
use codex_app_server_protocol::ClientRequest;
|
||||
use codex_app_server_protocol::ClientResponsePayload;
|
||||
use codex_app_server_protocol::CodexErrorInfo;
|
||||
use codex_app_server_protocol::CollabAgentTool;
|
||||
use codex_app_server_protocol::CollabAgentToolCallStatus;
|
||||
use codex_app_server_protocol::CommandAction;
|
||||
use codex_app_server_protocol::CommandExecutionSource;
|
||||
use codex_app_server_protocol::CommandExecutionStatus;
|
||||
use codex_app_server_protocol::DynamicToolCallStatus;
|
||||
use codex_app_server_protocol::InitializeCapabilities;
|
||||
use codex_app_server_protocol::InitializeParams;
|
||||
use codex_app_server_protocol::ItemCompletedNotification;
|
||||
use codex_app_server_protocol::ItemStartedNotification;
|
||||
use codex_app_server_protocol::JSONRPCErrorError;
|
||||
use codex_app_server_protocol::McpToolCallStatus;
|
||||
use codex_app_server_protocol::NonSteerableTurnKind;
|
||||
use codex_app_server_protocol::PatchApplyStatus;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SandboxPolicy as AppServerSandboxPolicy;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
@@ -97,7 +89,6 @@ use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::ThreadStatus as AppServerThreadStatus;
|
||||
use codex_app_server_protocol::Turn;
|
||||
use codex_app_server_protocol::TurnCompletedNotification;
|
||||
use codex_app_server_protocol::TurnDiffUpdatedNotification;
|
||||
use codex_app_server_protocol::TurnError as AppServerTurnError;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_app_server_protocol::TurnStartedNotification;
|
||||
@@ -645,7 +636,6 @@ fn sample_initialize_fact(connection_id: u64) -> AnalyticsFact {
|
||||
},
|
||||
capabilities: Some(InitializeCapabilities {
|
||||
experimental_api: false,
|
||||
request_attestation: false,
|
||||
opt_out_notification_methods: None,
|
||||
}),
|
||||
},
|
||||
@@ -837,206 +827,6 @@ fn app_used_event_serializes_expected_shape() {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn accepted_line_fingerprints_event_serializes_expected_shape() {
|
||||
let event = TrackEventRequest::AcceptedLineFingerprints(Box::new(
|
||||
CodexAcceptedLineFingerprintsEventRequest {
|
||||
event_type: "codex_accepted_line_fingerprints",
|
||||
event_params: CodexAcceptedLineFingerprintsEventParams {
|
||||
event_type: "codex.accepted_line_fingerprints",
|
||||
turn_id: "turn-1".to_string(),
|
||||
thread_id: "thread-1".to_string(),
|
||||
product_surface: Some("codex".to_string()),
|
||||
model_slug: Some("gpt-5.1-codex".to_string()),
|
||||
completed_at: 1710000000,
|
||||
repo_hash: Some("repo-hash-1".to_string()),
|
||||
accepted_added_lines: 42,
|
||||
accepted_deleted_lines: 40,
|
||||
line_fingerprints: vec![AcceptedLineFingerprint {
|
||||
path_hash: "path-hash-1".to_string(),
|
||||
line_hash: "line-hash-1".to_string(),
|
||||
}],
|
||||
},
|
||||
},
|
||||
));
|
||||
|
||||
let payload = serde_json::to_value(&event).expect("serialize accepted line fingerprints event");
|
||||
|
||||
assert_eq!(
|
||||
payload,
|
||||
json!({
|
||||
"event_type": "codex_accepted_line_fingerprints",
|
||||
"event_params": {
|
||||
"event_type": "codex.accepted_line_fingerprints",
|
||||
"turn_id": "turn-1",
|
||||
"thread_id": "thread-1",
|
||||
"product_surface": "codex",
|
||||
"model_slug": "gpt-5.1-codex",
|
||||
"completed_at": 1710000000,
|
||||
"repo_hash": "repo-hash-1",
|
||||
"accepted_added_lines": 42,
|
||||
"accepted_deleted_lines": 40,
|
||||
"line_fingerprints": [
|
||||
{
|
||||
"path_hash": "path-hash-1",
|
||||
"line_hash": "line-hash-1"
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn reducer_chunks_large_accepted_line_fingerprint_events_without_repeating_counts() {
|
||||
let mut reducer = AnalyticsReducer::default();
|
||||
let mut events = Vec::new();
|
||||
|
||||
ingest_turn_prerequisites(
|
||||
&mut reducer,
|
||||
&mut events,
|
||||
/*include_initialize*/ true,
|
||||
/*include_resolved_config*/ true,
|
||||
/*include_started*/ true,
|
||||
/*include_token_usage*/ true,
|
||||
)
|
||||
.await;
|
||||
events.clear();
|
||||
|
||||
let mut diff = "\
|
||||
diff --git a/src/lib.rs b/src/lib.rs
|
||||
index 1111111..2222222
|
||||
--- a/src/lib.rs
|
||||
+++ b/src/lib.rs
|
||||
@@ -0,0 +1,20000 @@
|
||||
"
|
||||
.to_string();
|
||||
for index in 0..20_000 {
|
||||
diff.push_str(&format!("+let value_{index} = {index};\n"));
|
||||
}
|
||||
|
||||
reducer
|
||||
.ingest(
|
||||
AnalyticsFact::Notification(Box::new(ServerNotification::TurnDiffUpdated(
|
||||
TurnDiffUpdatedNotification {
|
||||
thread_id: "thread-2".to_string(),
|
||||
turn_id: "turn-2".to_string(),
|
||||
diff,
|
||||
},
|
||||
))),
|
||||
&mut events,
|
||||
)
|
||||
.await;
|
||||
assert!(events.is_empty());
|
||||
|
||||
reducer
|
||||
.ingest(
|
||||
AnalyticsFact::Notification(Box::new(sample_turn_completed_notification(
|
||||
"thread-2",
|
||||
"turn-2",
|
||||
AppServerTurnStatus::Completed,
|
||||
/*codex_error_info*/ None,
|
||||
))),
|
||||
&mut events,
|
||||
)
|
||||
.await;
|
||||
|
||||
let accepted_line_events = events
|
||||
.iter()
|
||||
.filter_map(|event| match event {
|
||||
TrackEventRequest::AcceptedLineFingerprints(event) => Some(event),
|
||||
_ => None,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
assert!(accepted_line_events.len() > 1);
|
||||
let mut total_fingerprints = 0;
|
||||
for (index, event) in accepted_line_events.iter().enumerate() {
|
||||
assert_eq!(event.event_params.turn_id, "turn-2");
|
||||
assert_eq!(event.event_params.thread_id, "thread-2");
|
||||
total_fingerprints += event.event_params.line_fingerprints.len();
|
||||
if index == 0 {
|
||||
assert_eq!(event.event_params.accepted_added_lines, 20_000);
|
||||
assert_eq!(event.event_params.accepted_deleted_lines, 0);
|
||||
} else {
|
||||
assert_eq!(event.event_params.accepted_added_lines, 0);
|
||||
assert_eq!(event.event_params.accepted_deleted_lines, 0);
|
||||
}
|
||||
assert!(serde_json::to_vec(event).expect("serialize chunk").len() < 2_100_000);
|
||||
}
|
||||
assert_eq!(total_fingerprints, 20_000);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn reducer_emits_accepted_line_fingerprints_once_from_latest_turn_diff_on_completion() {
|
||||
let mut reducer = AnalyticsReducer::default();
|
||||
let mut events = Vec::new();
|
||||
|
||||
ingest_turn_prerequisites(
|
||||
&mut reducer,
|
||||
&mut events,
|
||||
/*include_initialize*/ true,
|
||||
/*include_resolved_config*/ true,
|
||||
/*include_started*/ true,
|
||||
/*include_token_usage*/ true,
|
||||
)
|
||||
.await;
|
||||
events.clear();
|
||||
|
||||
for line in ["let old_value = 1;", "let latest_value = 2;"] {
|
||||
let diff = format!(
|
||||
"\
|
||||
diff --git a/src/lib.rs b/src/lib.rs
|
||||
index 1111111..2222222
|
||||
--- a/src/lib.rs
|
||||
+++ b/src/lib.rs
|
||||
@@ -0,0 +1 @@
|
||||
+{line}
|
||||
"
|
||||
);
|
||||
reducer
|
||||
.ingest(
|
||||
AnalyticsFact::Notification(Box::new(ServerNotification::TurnDiffUpdated(
|
||||
TurnDiffUpdatedNotification {
|
||||
thread_id: "thread-2".to_string(),
|
||||
turn_id: "turn-2".to_string(),
|
||||
diff,
|
||||
},
|
||||
))),
|
||||
&mut events,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
assert!(events.is_empty());
|
||||
|
||||
reducer
|
||||
.ingest(
|
||||
AnalyticsFact::Notification(Box::new(sample_turn_completed_notification(
|
||||
"thread-2",
|
||||
"turn-2",
|
||||
AppServerTurnStatus::Completed,
|
||||
/*codex_error_info*/ None,
|
||||
))),
|
||||
&mut events,
|
||||
)
|
||||
.await;
|
||||
|
||||
let accepted_line_events = events
|
||||
.iter()
|
||||
.filter_map(|event| match event {
|
||||
TrackEventRequest::AcceptedLineFingerprints(event) => Some(event),
|
||||
_ => None,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(accepted_line_events.len(), 1);
|
||||
let event = accepted_line_events[0];
|
||||
assert_eq!(event.event_params.accepted_added_lines, 1);
|
||||
assert_eq!(event.event_params.line_fingerprints.len(), 1);
|
||||
assert_eq!(
|
||||
event.event_params.line_fingerprints[0].line_hash,
|
||||
crate::fingerprint_hash("line", "let latest_value = 2;")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compaction_event_serializes_expected_shape() {
|
||||
let event = TrackEventRequest::Compaction(Box::new(CodexCompactionEventRequest {
|
||||
@@ -1332,7 +1122,6 @@ async fn initialize_caches_client_and_thread_lifecycle_publishes_once_initialize
|
||||
},
|
||||
capabilities: Some(InitializeCapabilities {
|
||||
experimental_api: false,
|
||||
request_attestation: false,
|
||||
opt_out_notification_methods: None,
|
||||
}),
|
||||
},
|
||||
@@ -1480,7 +1269,6 @@ async fn compaction_event_ingests_custom_fact() {
|
||||
},
|
||||
capabilities: Some(InitializeCapabilities {
|
||||
experimental_api: false,
|
||||
request_attestation: false,
|
||||
opt_out_notification_methods: None,
|
||||
}),
|
||||
},
|
||||
@@ -1594,7 +1382,6 @@ async fn guardian_review_event_ingests_custom_fact_with_optional_target_item() {
|
||||
},
|
||||
capabilities: Some(InitializeCapabilities {
|
||||
experimental_api: false,
|
||||
request_attestation: false,
|
||||
opt_out_notification_methods: None,
|
||||
}),
|
||||
},
|
||||
@@ -1714,14 +1501,6 @@ async fn item_lifecycle_notifications_publish_command_execution_event() {
|
||||
let mut events = Vec::new();
|
||||
|
||||
ingest_tool_review_prerequisites(&mut reducer, &mut events).await;
|
||||
reducer
|
||||
.ingest(
|
||||
AnalyticsFact::Notification(Box::new(sample_turn_started_notification(
|
||||
"thread-1", "turn-1",
|
||||
))),
|
||||
&mut events,
|
||||
)
|
||||
.await;
|
||||
reducer
|
||||
.ingest(
|
||||
AnalyticsFact::Notification(Box::new(ServerNotification::ItemStarted(
|
||||
@@ -2132,15 +1911,6 @@ async fn subagent_tool_items_inherit_parent_connection_metadata() {
|
||||
)
|
||||
.await;
|
||||
events.clear();
|
||||
reducer
|
||||
.ingest(
|
||||
AnalyticsFact::Notification(Box::new(sample_turn_started_notification(
|
||||
"thread-subagent",
|
||||
"turn-subagent",
|
||||
))),
|
||||
&mut events,
|
||||
)
|
||||
.await;
|
||||
|
||||
reducer
|
||||
.ingest(
|
||||
@@ -3015,17 +2785,6 @@ async fn turn_lifecycle_emits_turn_event() {
|
||||
assert_eq!(payload["event_params"]["num_input_images"], json!(1));
|
||||
assert_eq!(payload["event_params"]["status"], json!("completed"));
|
||||
assert_eq!(payload["event_params"]["steer_count"], json!(0));
|
||||
assert_eq!(payload["event_params"]["total_tool_call_count"], json!(0));
|
||||
assert_eq!(payload["event_params"]["shell_command_count"], json!(0));
|
||||
assert_eq!(payload["event_params"]["file_change_count"], json!(0));
|
||||
assert_eq!(payload["event_params"]["mcp_tool_call_count"], json!(0));
|
||||
assert_eq!(payload["event_params"]["dynamic_tool_call_count"], json!(0));
|
||||
assert_eq!(
|
||||
payload["event_params"]["subagent_tool_call_count"],
|
||||
json!(0)
|
||||
);
|
||||
assert_eq!(payload["event_params"]["web_search_count"], json!(0));
|
||||
assert_eq!(payload["event_params"]["image_generation_count"], json!(0));
|
||||
assert_eq!(payload["event_params"]["started_at"], json!(455));
|
||||
assert_eq!(payload["event_params"]["completed_at"], json!(456));
|
||||
assert_eq!(payload["event_params"]["duration_ms"], json!(1234));
|
||||
@@ -3039,158 +2798,6 @@ async fn turn_lifecycle_emits_turn_event() {
|
||||
assert_eq!(payload["event_params"]["total_tokens"], json!(321));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_event_counts_completed_tool_items() {
|
||||
let mut reducer = AnalyticsReducer::default();
|
||||
let mut out = Vec::new();
|
||||
|
||||
ingest_turn_prerequisites(
|
||||
&mut reducer,
|
||||
&mut out,
|
||||
/*include_initialize*/ true,
|
||||
/*include_resolved_config*/ true,
|
||||
/*include_started*/ true,
|
||||
/*include_token_usage*/ false,
|
||||
)
|
||||
.await;
|
||||
|
||||
let completed_tool_items = vec![
|
||||
sample_command_execution_item(CommandExecutionStatus::Completed, Some(0), Some(1)),
|
||||
ThreadItem::FileChange {
|
||||
id: "file-change-1".to_string(),
|
||||
changes: Vec::new(),
|
||||
status: PatchApplyStatus::Completed,
|
||||
},
|
||||
ThreadItem::McpToolCall {
|
||||
id: "mcp-1".to_string(),
|
||||
server: "server".to_string(),
|
||||
tool: "search".to_string(),
|
||||
status: McpToolCallStatus::Completed,
|
||||
arguments: json!({}),
|
||||
mcp_app_resource_uri: None,
|
||||
result: None,
|
||||
error: None,
|
||||
duration_ms: Some(2),
|
||||
},
|
||||
ThreadItem::DynamicToolCall {
|
||||
id: "dynamic-1".to_string(),
|
||||
namespace: None,
|
||||
tool: "render".to_string(),
|
||||
arguments: json!({}),
|
||||
status: DynamicToolCallStatus::Completed,
|
||||
content_items: None,
|
||||
success: Some(true),
|
||||
duration_ms: Some(3),
|
||||
},
|
||||
ThreadItem::CollabAgentToolCall {
|
||||
id: "collab-1".to_string(),
|
||||
tool: CollabAgentTool::SpawnAgent,
|
||||
status: CollabAgentToolCallStatus::Completed,
|
||||
sender_thread_id: "thread-2".to_string(),
|
||||
receiver_thread_ids: vec!["thread-child".to_string()],
|
||||
prompt: Some("help".to_string()),
|
||||
model: Some("gpt-5".to_string()),
|
||||
reasoning_effort: None,
|
||||
agents_states: Default::default(),
|
||||
},
|
||||
ThreadItem::WebSearch {
|
||||
id: "web-1".to_string(),
|
||||
query: "codex".to_string(),
|
||||
action: None,
|
||||
},
|
||||
ThreadItem::ImageGeneration {
|
||||
id: "image-1".to_string(),
|
||||
status: "completed".to_string(),
|
||||
revised_prompt: None,
|
||||
result: "ok".to_string(),
|
||||
saved_path: None,
|
||||
},
|
||||
];
|
||||
|
||||
for item in completed_tool_items {
|
||||
reducer
|
||||
.ingest(
|
||||
AnalyticsFact::Notification(Box::new(ServerNotification::ItemCompleted(
|
||||
ItemCompletedNotification {
|
||||
thread_id: "thread-2".to_string(),
|
||||
turn_id: "turn-2".to_string(),
|
||||
completed_at_ms: 1_000,
|
||||
item,
|
||||
},
|
||||
))),
|
||||
&mut out,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
reducer
|
||||
.ingest(
|
||||
AnalyticsFact::Notification(Box::new(sample_turn_completed_notification(
|
||||
"thread-2",
|
||||
"turn-2",
|
||||
AppServerTurnStatus::Completed,
|
||||
/*codex_error_info*/ None,
|
||||
))),
|
||||
&mut out,
|
||||
)
|
||||
.await;
|
||||
|
||||
let turn_event = out
|
||||
.iter()
|
||||
.find(|event| matches!(event, TrackEventRequest::TurnEvent(_)))
|
||||
.expect("turn event should be emitted");
|
||||
let payload = serde_json::to_value(turn_event).expect("serialize turn event");
|
||||
assert_eq!(payload["event_params"]["total_tool_call_count"], json!(7));
|
||||
assert_eq!(payload["event_params"]["shell_command_count"], json!(1));
|
||||
assert_eq!(payload["event_params"]["file_change_count"], json!(1));
|
||||
assert_eq!(payload["event_params"]["mcp_tool_call_count"], json!(1));
|
||||
assert_eq!(payload["event_params"]["dynamic_tool_call_count"], json!(1));
|
||||
assert_eq!(
|
||||
payload["event_params"]["subagent_tool_call_count"],
|
||||
json!(1)
|
||||
);
|
||||
assert_eq!(payload["event_params"]["web_search_count"], json!(1));
|
||||
assert_eq!(payload["event_params"]["image_generation_count"], json!(1));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn item_completed_without_turn_state_does_not_create_turn_state() {
|
||||
let mut reducer = AnalyticsReducer::default();
|
||||
let mut out = Vec::new();
|
||||
|
||||
reducer
|
||||
.ingest(
|
||||
AnalyticsFact::Notification(Box::new(ServerNotification::ItemCompleted(
|
||||
ItemCompletedNotification {
|
||||
thread_id: "thread-2".to_string(),
|
||||
turn_id: "turn-2".to_string(),
|
||||
completed_at_ms: 1_000,
|
||||
item: sample_command_execution_item(
|
||||
CommandExecutionStatus::Completed,
|
||||
Some(0),
|
||||
Some(1),
|
||||
),
|
||||
},
|
||||
))),
|
||||
&mut out,
|
||||
)
|
||||
.await;
|
||||
|
||||
reducer
|
||||
.ingest(
|
||||
AnalyticsFact::Notification(Box::new(sample_turn_completed_notification(
|
||||
"thread-2",
|
||||
"turn-2",
|
||||
AppServerTurnStatus::Completed,
|
||||
/*codex_error_info*/ None,
|
||||
))),
|
||||
&mut out,
|
||||
)
|
||||
.await;
|
||||
|
||||
assert!(out.is_empty());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn accepted_steers_increment_turn_steer_count() {
|
||||
let mut reducer = AnalyticsReducer::default();
|
||||
|
||||
@@ -30,7 +30,6 @@ use codex_app_server_protocol::ServerNotification;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::ServerResponse;
|
||||
use codex_login::AuthManager;
|
||||
use codex_login::CodexAuth;
|
||||
use codex_login::default_client::create_client;
|
||||
use codex_plugin::PluginTelemetryMetadata;
|
||||
use std::collections::HashSet;
|
||||
@@ -353,7 +352,6 @@ impl AnalyticsEventsClient {
|
||||
notification,
|
||||
ServerNotification::TurnStarted(_)
|
||||
| ServerNotification::TurnCompleted(_)
|
||||
| ServerNotification::TurnDiffUpdated(_)
|
||||
| ServerNotification::ItemStarted(_)
|
||||
| ServerNotification::ItemCompleted(_)
|
||||
| ServerNotification::ItemGuardianApprovalReviewStarted(_)
|
||||
@@ -373,7 +371,6 @@ async fn send_track_events(
|
||||
if events.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let Some(auth) = auth_manager.auth().await else {
|
||||
return;
|
||||
};
|
||||
@@ -383,45 +380,12 @@ async fn send_track_events(
|
||||
|
||||
let base_url = base_url.trim_end_matches('/');
|
||||
let url = format!("{base_url}/codex/analytics-events/events");
|
||||
for events in track_event_request_batches(events) {
|
||||
send_track_events_request(&auth, &url, events).await;
|
||||
}
|
||||
}
|
||||
|
||||
fn track_event_request_batches(events: Vec<TrackEventRequest>) -> Vec<Vec<TrackEventRequest>> {
|
||||
let mut batches = Vec::new();
|
||||
let mut current_batch = Vec::new();
|
||||
|
||||
for event in events {
|
||||
if event.should_send_in_isolated_request() {
|
||||
if !current_batch.is_empty() {
|
||||
batches.push(current_batch);
|
||||
current_batch = Vec::new();
|
||||
}
|
||||
batches.push(vec![event]);
|
||||
} else {
|
||||
current_batch.push(event);
|
||||
}
|
||||
}
|
||||
|
||||
if !current_batch.is_empty() {
|
||||
batches.push(current_batch);
|
||||
}
|
||||
|
||||
batches
|
||||
}
|
||||
|
||||
async fn send_track_events_request(auth: &CodexAuth, url: &str, events: Vec<TrackEventRequest>) {
|
||||
if events.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let payload = TrackEventsRequest { events };
|
||||
|
||||
let response = create_client()
|
||||
.post(url)
|
||||
.post(&url)
|
||||
.timeout(ANALYTICS_EVENTS_TIMEOUT)
|
||||
.headers(codex_model_provider::auth_provider_from_auth(auth).to_auth_headers())
|
||||
.headers(codex_model_provider::auth_provider_from_auth(&auth).to_auth_headers())
|
||||
.header("Content-Type", "application/json")
|
||||
.json(&payload)
|
||||
.send()
|
||||
|
||||
@@ -1,14 +1,6 @@
|
||||
use super::AnalyticsEventsClient;
|
||||
use super::AnalyticsEventsQueue;
|
||||
use super::track_event_request_batches;
|
||||
use crate::events::CodexAcceptedLineFingerprintsEventParams;
|
||||
use crate::events::CodexAcceptedLineFingerprintsEventRequest;
|
||||
use crate::events::SkillInvocationEventParams;
|
||||
use crate::events::SkillInvocationEventRequest;
|
||||
use crate::events::TrackEventRequest;
|
||||
use crate::facts::AcceptedLineFingerprint;
|
||||
use crate::facts::AnalyticsFact;
|
||||
use crate::facts::InvocationType;
|
||||
use codex_app_server_protocol::ApprovalsReviewer as AppServerApprovalsReviewer;
|
||||
use codex_app_server_protocol::AskForApproval as AppServerAskForApproval;
|
||||
use codex_app_server_protocol::ClientRequest;
|
||||
@@ -39,47 +31,6 @@ use std::sync::Mutex;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::sync::mpsc::error::TryRecvError;
|
||||
|
||||
fn sample_accepted_line_fingerprint_event(thread_id: &str) -> TrackEventRequest {
|
||||
TrackEventRequest::AcceptedLineFingerprints(Box::new(
|
||||
CodexAcceptedLineFingerprintsEventRequest {
|
||||
event_type: "codex_accepted_line_fingerprints",
|
||||
event_params: CodexAcceptedLineFingerprintsEventParams {
|
||||
event_type: "codex.accepted_line_fingerprints",
|
||||
turn_id: "turn-1".to_string(),
|
||||
thread_id: thread_id.to_string(),
|
||||
product_surface: Some("codex".to_string()),
|
||||
model_slug: Some("gpt-5.1-codex".to_string()),
|
||||
completed_at: 1,
|
||||
repo_hash: None,
|
||||
accepted_added_lines: 1,
|
||||
accepted_deleted_lines: 0,
|
||||
line_fingerprints: vec![AcceptedLineFingerprint {
|
||||
path_hash: "path-hash".to_string(),
|
||||
line_hash: "line-hash".to_string(),
|
||||
}],
|
||||
},
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
fn sample_regular_track_event(thread_id: &str) -> TrackEventRequest {
|
||||
TrackEventRequest::SkillInvocation(SkillInvocationEventRequest {
|
||||
event_type: "skill_invocation",
|
||||
skill_id: format!("skill-{thread_id}"),
|
||||
skill_name: "doc".to_string(),
|
||||
event_params: SkillInvocationEventParams {
|
||||
product_client_id: None,
|
||||
skill_scope: None,
|
||||
plugin_id: None,
|
||||
repo_url: None,
|
||||
thread_id: Some(thread_id.to_string()),
|
||||
turn_id: Some("turn-1".to_string()),
|
||||
invoke_type: Some(InvocationType::Explicit),
|
||||
model_slug: Some("gpt-5.1-codex".to_string()),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
fn client_with_receiver() -> (AnalyticsEventsClient, mpsc::Receiver<AnalyticsFact>) {
|
||||
let (sender, receiver) = mpsc::channel(8);
|
||||
let queue = AnalyticsEventsQueue {
|
||||
@@ -271,23 +222,3 @@ fn track_response_only_enqueues_analytics_relevant_responses() {
|
||||
);
|
||||
assert!(matches!(receiver.try_recv(), Err(TryRecvError::Empty)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn track_event_request_batches_only_isolates_accepted_line_fingerprint_events() {
|
||||
let batches = track_event_request_batches(vec![
|
||||
sample_regular_track_event("thread-1"),
|
||||
sample_regular_track_event("thread-2"),
|
||||
sample_accepted_line_fingerprint_event("thread-3"),
|
||||
sample_accepted_line_fingerprint_event("thread-4"),
|
||||
sample_regular_track_event("thread-5"),
|
||||
sample_regular_track_event("thread-6"),
|
||||
]);
|
||||
|
||||
assert_eq!(batches.len(), 4);
|
||||
assert_eq!(batches[0].len(), 2);
|
||||
assert_eq!(batches[1].len(), 1);
|
||||
assert_eq!(batches[2].len(), 1);
|
||||
assert_eq!(batches[3].len(), 2);
|
||||
assert!(batches[1][0].should_send_in_isolated_request());
|
||||
assert!(batches[2][0].should_send_in_isolated_request());
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use std::time::Instant;
|
||||
|
||||
use crate::facts::AcceptedLineFingerprint;
|
||||
use crate::facts::AppInvocation;
|
||||
use crate::facts::CodexCompactionEvent;
|
||||
use crate::facts::CompactionImplementation;
|
||||
@@ -72,7 +71,6 @@ pub(crate) enum TrackEventRequest {
|
||||
CollabAgentToolCall(CodexCollabAgentToolCallEventRequest),
|
||||
WebSearch(CodexWebSearchEventRequest),
|
||||
ImageGeneration(CodexImageGenerationEventRequest),
|
||||
AcceptedLineFingerprints(Box<CodexAcceptedLineFingerprintsEventRequest>),
|
||||
#[allow(dead_code)]
|
||||
ReviewEvent(CodexReviewEventRequest),
|
||||
PluginUsed(CodexPluginUsedEventRequest),
|
||||
@@ -82,32 +80,6 @@ pub(crate) enum TrackEventRequest {
|
||||
PluginDisabled(CodexPluginEventRequest),
|
||||
}
|
||||
|
||||
impl TrackEventRequest {
|
||||
pub(crate) fn should_send_in_isolated_request(&self) -> bool {
|
||||
matches!(self, Self::AcceptedLineFingerprints(_))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub(crate) struct CodexAcceptedLineFingerprintsEventParams {
|
||||
pub(crate) event_type: &'static str,
|
||||
pub(crate) turn_id: String,
|
||||
pub(crate) thread_id: String,
|
||||
pub(crate) product_surface: Option<String>,
|
||||
pub(crate) model_slug: Option<String>,
|
||||
pub(crate) completed_at: u64,
|
||||
pub(crate) repo_hash: Option<String>,
|
||||
pub(crate) accepted_added_lines: u64,
|
||||
pub(crate) accepted_deleted_lines: u64,
|
||||
pub(crate) line_fingerprints: Vec<AcceptedLineFingerprint>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub(crate) struct CodexAcceptedLineFingerprintsEventRequest {
|
||||
pub(crate) event_type: &'static str,
|
||||
pub(crate) event_params: CodexAcceptedLineFingerprintsEventParams,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub(crate) struct SkillInvocationEventRequest {
|
||||
pub(crate) event_type: &'static str,
|
||||
@@ -794,6 +766,8 @@ pub(crate) struct CodexTurnEventParams {
|
||||
pub(crate) status: Option<TurnStatus>,
|
||||
pub(crate) turn_error: Option<CodexErrorInfo>,
|
||||
pub(crate) steer_count: Option<usize>,
|
||||
// TODO(rhan-oai): Populate these once tool-call accounting is emitted from
|
||||
// core; the schema is reserved but these fields are currently always None.
|
||||
pub(crate) total_tool_call_count: Option<usize>,
|
||||
pub(crate) shell_command_count: Option<usize>,
|
||||
pub(crate) file_change_count: Option<usize>,
|
||||
|
||||
@@ -28,12 +28,6 @@ use codex_protocol::protocol::TokenUsage;
|
||||
use serde::Serialize;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize)]
|
||||
pub struct AcceptedLineFingerprint {
|
||||
pub path_hash: String,
|
||||
pub line_hash: String,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct TrackEventsContext {
|
||||
pub model_slug: String,
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
mod accepted_lines;
|
||||
mod client;
|
||||
mod events;
|
||||
mod facts;
|
||||
@@ -7,8 +6,6 @@ mod reducer;
|
||||
use std::time::SystemTime;
|
||||
use std::time::UNIX_EPOCH;
|
||||
|
||||
pub use accepted_lines::accepted_line_fingerprints_from_unified_diff;
|
||||
pub use accepted_lines::fingerprint_hash;
|
||||
pub use client::AnalyticsEventsClient;
|
||||
pub use events::AppServerRpcTransport;
|
||||
pub use events::GuardianApprovalRequestSource;
|
||||
@@ -20,7 +17,6 @@ pub use events::GuardianReviewSessionKind;
|
||||
pub use events::GuardianReviewTerminalStatus;
|
||||
pub use events::GuardianReviewTrackContext;
|
||||
pub use events::GuardianReviewedAction;
|
||||
pub use facts::AcceptedLineFingerprint;
|
||||
pub use facts::AnalyticsJsonRpcError;
|
||||
pub use facts::AppInvocation;
|
||||
pub use facts::CodexCompactionEvent;
|
||||
|
||||
@@ -1,7 +1,3 @@
|
||||
use crate::accepted_lines::AcceptedLineFingerprintEventInput;
|
||||
use crate::accepted_lines::accepted_line_fingerprint_event_requests;
|
||||
use crate::accepted_lines::accepted_line_fingerprints_from_unified_diff;
|
||||
use crate::accepted_lines::accepted_line_repo_hash_for_cwd;
|
||||
use crate::events::AppServerRpcTransport;
|
||||
use crate::events::CodexAppMentionedEventRequest;
|
||||
use crate::events::CodexAppServerClientMetadata;
|
||||
@@ -108,7 +104,6 @@ use codex_protocol::protocol::TokenUsage;
|
||||
use sha1::Digest;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Default)]
|
||||
pub(crate) struct AnalyticsReducer {
|
||||
@@ -269,9 +264,7 @@ struct TurnState {
|
||||
started_at: Option<u64>,
|
||||
token_usage: Option<TokenUsage>,
|
||||
completed: Option<CompletedTurnState>,
|
||||
latest_diff: Option<String>,
|
||||
steer_count: usize,
|
||||
tool_counts: TurnToolCounts,
|
||||
}
|
||||
|
||||
#[derive(Hash, Eq, PartialEq)]
|
||||
@@ -281,42 +274,6 @@ struct ToolItemKey {
|
||||
item_id: String,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct TurnToolCounts {
|
||||
total: usize,
|
||||
shell_command: usize,
|
||||
file_change: usize,
|
||||
mcp_tool_call: usize,
|
||||
dynamic_tool_call: usize,
|
||||
subagent_tool_call: usize,
|
||||
web_search: usize,
|
||||
image_generation: usize,
|
||||
}
|
||||
|
||||
impl TurnToolCounts {
|
||||
fn record(&mut self, item: &ThreadItem) {
|
||||
match item {
|
||||
ThreadItem::CommandExecution { .. } => self.shell_command += 1,
|
||||
ThreadItem::FileChange { .. } => self.file_change += 1,
|
||||
ThreadItem::McpToolCall { .. } => self.mcp_tool_call += 1,
|
||||
ThreadItem::DynamicToolCall { .. } => self.dynamic_tool_call += 1,
|
||||
ThreadItem::CollabAgentToolCall { .. } => self.subagent_tool_call += 1,
|
||||
ThreadItem::WebSearch { .. } => self.web_search += 1,
|
||||
ThreadItem::ImageGeneration { .. } => self.image_generation += 1,
|
||||
ThreadItem::UserMessage { .. }
|
||||
| ThreadItem::HookPrompt { .. }
|
||||
| ThreadItem::AgentMessage { .. }
|
||||
| ThreadItem::Plan { .. }
|
||||
| ThreadItem::Reasoning { .. }
|
||||
| ThreadItem::ImageView { .. }
|
||||
| ThreadItem::EnteredReviewMode { .. }
|
||||
| ThreadItem::ExitedReviewMode { .. }
|
||||
| ThreadItem::ContextCompaction { .. } => return,
|
||||
}
|
||||
self.total += 1;
|
||||
}
|
||||
}
|
||||
|
||||
impl AnalyticsReducer {
|
||||
pub(crate) async fn ingest(&mut self, input: AnalyticsFact, out: &mut Vec<TrackEventRequest>) {
|
||||
match input {
|
||||
@@ -348,7 +305,7 @@ impl AnalyticsReducer {
|
||||
response,
|
||||
} => {
|
||||
if let Some(response) = response.into_client_response(request_id) {
|
||||
self.ingest_response(connection_id, response, out).await;
|
||||
self.ingest_response(connection_id, response, out);
|
||||
}
|
||||
}
|
||||
AnalyticsFact::ErrorResponse {
|
||||
@@ -360,7 +317,7 @@ impl AnalyticsReducer {
|
||||
self.ingest_error_response(connection_id, request_id, error_type, out);
|
||||
}
|
||||
AnalyticsFact::Notification(notification) => {
|
||||
self.ingest_notification(*notification, out).await;
|
||||
self.ingest_notification(*notification, out);
|
||||
}
|
||||
AnalyticsFact::ServerRequest {
|
||||
connection_id: _connection_id,
|
||||
@@ -381,10 +338,10 @@ impl AnalyticsReducer {
|
||||
self.ingest_guardian_review(*input, out);
|
||||
}
|
||||
CustomAnalyticsFact::TurnResolvedConfig(input) => {
|
||||
self.ingest_turn_resolved_config(*input, out).await;
|
||||
self.ingest_turn_resolved_config(*input, out);
|
||||
}
|
||||
CustomAnalyticsFact::TurnTokenUsage(input) => {
|
||||
self.ingest_turn_token_usage(*input, out).await;
|
||||
self.ingest_turn_token_usage(*input, out);
|
||||
}
|
||||
CustomAnalyticsFact::SkillInvoked(input) => {
|
||||
self.ingest_skill_invoked(input, out).await;
|
||||
@@ -516,7 +473,7 @@ impl AnalyticsReducer {
|
||||
}
|
||||
}
|
||||
|
||||
async fn ingest_turn_resolved_config(
|
||||
fn ingest_turn_resolved_config(
|
||||
&mut self,
|
||||
input: TurnResolvedConfigFact,
|
||||
out: &mut Vec<TrackEventRequest>,
|
||||
@@ -532,17 +489,15 @@ impl AnalyticsReducer {
|
||||
started_at: None,
|
||||
token_usage: None,
|
||||
completed: None,
|
||||
latest_diff: None,
|
||||
steer_count: 0,
|
||||
tool_counts: TurnToolCounts::default(),
|
||||
});
|
||||
turn_state.thread_id = Some(thread_id);
|
||||
turn_state.num_input_images = Some(num_input_images);
|
||||
turn_state.resolved_config = Some(input);
|
||||
self.maybe_emit_turn_event(&turn_id, out).await;
|
||||
self.maybe_emit_turn_event(&turn_id, out);
|
||||
}
|
||||
|
||||
async fn ingest_turn_token_usage(
|
||||
fn ingest_turn_token_usage(
|
||||
&mut self,
|
||||
input: TurnTokenUsageFact,
|
||||
out: &mut Vec<TrackEventRequest>,
|
||||
@@ -556,13 +511,11 @@ impl AnalyticsReducer {
|
||||
started_at: None,
|
||||
token_usage: None,
|
||||
completed: None,
|
||||
latest_diff: None,
|
||||
steer_count: 0,
|
||||
tool_counts: TurnToolCounts::default(),
|
||||
});
|
||||
turn_state.thread_id = Some(input.thread_id);
|
||||
turn_state.token_usage = Some(input.token_usage);
|
||||
self.maybe_emit_turn_event(&turn_id, out).await;
|
||||
self.maybe_emit_turn_event(&turn_id, out);
|
||||
}
|
||||
|
||||
async fn ingest_skill_invoked(
|
||||
@@ -669,7 +622,7 @@ impl AnalyticsReducer {
|
||||
});
|
||||
}
|
||||
|
||||
async fn ingest_response(
|
||||
fn ingest_response(
|
||||
&mut self,
|
||||
connection_id: u64,
|
||||
response: ClientResponse,
|
||||
@@ -721,14 +674,12 @@ impl AnalyticsReducer {
|
||||
started_at: None,
|
||||
token_usage: None,
|
||||
completed: None,
|
||||
latest_diff: None,
|
||||
steer_count: 0,
|
||||
tool_counts: TurnToolCounts::default(),
|
||||
});
|
||||
turn_state.connection_id = Some(connection_id);
|
||||
turn_state.thread_id = Some(pending_request.thread_id);
|
||||
turn_state.num_input_images = Some(pending_request.num_input_images);
|
||||
self.maybe_emit_turn_event(&turn_id, out).await;
|
||||
self.maybe_emit_turn_event(&turn_id, out);
|
||||
}
|
||||
ClientResponse::TurnSteer {
|
||||
request_id,
|
||||
@@ -790,7 +741,7 @@ impl AnalyticsReducer {
|
||||
);
|
||||
}
|
||||
|
||||
async fn ingest_notification(
|
||||
fn ingest_notification(
|
||||
&mut self,
|
||||
notification: ServerNotification,
|
||||
out: &mut Vec<TrackEventRequest>,
|
||||
@@ -817,16 +768,6 @@ impl AnalyticsReducer {
|
||||
let Some(item_id) = tracked_tool_item_id(¬ification.item) else {
|
||||
return;
|
||||
};
|
||||
let Some(turn_state) = self.turns.get_mut(¬ification.turn_id) else {
|
||||
tracing::warn!(
|
||||
thread_id = %notification.thread_id,
|
||||
turn_id = %notification.turn_id,
|
||||
item_id,
|
||||
"dropping turn tool count update: missing turn state"
|
||||
);
|
||||
return;
|
||||
};
|
||||
turn_state.tool_counts.record(¬ification.item);
|
||||
let key = ToolItemKey {
|
||||
thread_id: notification.thread_id.clone(),
|
||||
turn_id: notification.turn_id.clone(),
|
||||
@@ -871,34 +812,13 @@ impl AnalyticsReducer {
|
||||
started_at: None,
|
||||
token_usage: None,
|
||||
completed: None,
|
||||
latest_diff: None,
|
||||
steer_count: 0,
|
||||
tool_counts: TurnToolCounts::default(),
|
||||
});
|
||||
turn_state.started_at = notification
|
||||
.turn
|
||||
.started_at
|
||||
.and_then(|started_at| u64::try_from(started_at).ok());
|
||||
}
|
||||
ServerNotification::TurnDiffUpdated(notification) => {
|
||||
let turn_state =
|
||||
self.turns
|
||||
.entry(notification.turn_id.clone())
|
||||
.or_insert(TurnState {
|
||||
connection_id: None,
|
||||
thread_id: None,
|
||||
num_input_images: None,
|
||||
resolved_config: None,
|
||||
started_at: None,
|
||||
token_usage: None,
|
||||
completed: None,
|
||||
latest_diff: None,
|
||||
steer_count: 0,
|
||||
tool_counts: TurnToolCounts::default(),
|
||||
});
|
||||
turn_state.thread_id = Some(notification.thread_id);
|
||||
turn_state.latest_diff = Some(notification.diff);
|
||||
}
|
||||
ServerNotification::TurnCompleted(notification) => {
|
||||
let turn_state =
|
||||
self.turns
|
||||
@@ -911,9 +831,7 @@ impl AnalyticsReducer {
|
||||
started_at: None,
|
||||
token_usage: None,
|
||||
completed: None,
|
||||
latest_diff: None,
|
||||
steer_count: 0,
|
||||
tool_counts: TurnToolCounts::default(),
|
||||
});
|
||||
turn_state.completed = Some(CompletedTurnState {
|
||||
status: analytics_turn_status(notification.turn.status),
|
||||
@@ -932,7 +850,7 @@ impl AnalyticsReducer {
|
||||
.and_then(|duration_ms| u64::try_from(duration_ms).ok()),
|
||||
});
|
||||
let turn_id = notification.turn.id;
|
||||
self.maybe_emit_turn_event(&turn_id, out).await;
|
||||
self.maybe_emit_turn_event(&turn_id, out);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@@ -1068,7 +986,7 @@ impl AnalyticsReducer {
|
||||
}));
|
||||
}
|
||||
|
||||
async fn maybe_emit_turn_event(&mut self, turn_id: &str, out: &mut Vec<TrackEventRequest>) {
|
||||
fn maybe_emit_turn_event(&mut self, turn_id: &str, out: &mut Vec<TrackEventRequest>) {
|
||||
let Some(turn_state) = self.turns.get(turn_id) else {
|
||||
return;
|
||||
};
|
||||
@@ -1101,23 +1019,18 @@ impl AnalyticsReducer {
|
||||
warn_missing_analytics_context(&drop_site, MissingAnalyticsContext::ThreadMetadata);
|
||||
return;
|
||||
};
|
||||
let turn_event = TrackEventRequest::TurnEvent(Box::new(CodexTurnEventRequest {
|
||||
event_type: "codex_turn_event",
|
||||
event_params: codex_turn_event_params(
|
||||
connection_state.app_server_client.clone(),
|
||||
connection_state.runtime.clone(),
|
||||
turn_id.to_string(),
|
||||
turn_state,
|
||||
thread_metadata,
|
||||
),
|
||||
}));
|
||||
let accepted_line_event = accepted_line_event_input(turn_id, turn_state);
|
||||
|
||||
out.push(turn_event);
|
||||
if let Some((mut input, cwd)) = accepted_line_event {
|
||||
input.repo_hash = accepted_line_repo_hash_for_cwd(cwd.as_path()).await;
|
||||
out.extend(accepted_line_fingerprint_event_requests(input));
|
||||
}
|
||||
out.push(TrackEventRequest::TurnEvent(Box::new(
|
||||
CodexTurnEventRequest {
|
||||
event_type: "codex_turn_event",
|
||||
event_params: codex_turn_event_params(
|
||||
connection_state.app_server_client.clone(),
|
||||
connection_state.runtime.clone(),
|
||||
turn_id.to_string(),
|
||||
turn_state,
|
||||
thread_metadata,
|
||||
),
|
||||
},
|
||||
)));
|
||||
self.turns.remove(turn_id);
|
||||
}
|
||||
|
||||
@@ -1729,36 +1642,6 @@ fn web_search_query_count(query: &str, action: Option<&WebSearchAction>) -> Opti
|
||||
}
|
||||
}
|
||||
|
||||
fn accepted_line_event_input(
|
||||
turn_id: &str,
|
||||
turn_state: &TurnState,
|
||||
) -> Option<(AcceptedLineFingerprintEventInput, PathBuf)> {
|
||||
let latest_diff = turn_state.latest_diff.as_deref()?;
|
||||
let summary = accepted_line_fingerprints_from_unified_diff(latest_diff);
|
||||
if summary.accepted_added_lines == 0 && summary.accepted_deleted_lines == 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
let thread_id = turn_state.thread_id.clone()?;
|
||||
let resolved_config = turn_state.resolved_config.clone()?;
|
||||
|
||||
Some((
|
||||
AcceptedLineFingerprintEventInput {
|
||||
event_type: "codex.accepted_line_fingerprints",
|
||||
turn_id: turn_id.to_string(),
|
||||
thread_id,
|
||||
product_surface: Some("codex".to_string()),
|
||||
model_slug: Some(resolved_config.model.clone()),
|
||||
completed_at: now_unix_seconds(),
|
||||
repo_hash: None,
|
||||
accepted_added_lines: summary.accepted_added_lines,
|
||||
accepted_deleted_lines: summary.accepted_deleted_lines,
|
||||
line_fingerprints: summary.line_fingerprints,
|
||||
},
|
||||
resolved_config.permission_profile_cwd,
|
||||
))
|
||||
}
|
||||
|
||||
fn codex_turn_event_params(
|
||||
app_server_client: CodexAppServerClientMetadata,
|
||||
runtime: CodexRuntimeMetadata,
|
||||
@@ -1829,14 +1712,14 @@ fn codex_turn_event_params(
|
||||
status: completed.status,
|
||||
turn_error: completed.turn_error,
|
||||
steer_count: Some(turn_state.steer_count),
|
||||
total_tool_call_count: Some(turn_state.tool_counts.total),
|
||||
shell_command_count: Some(turn_state.tool_counts.shell_command),
|
||||
file_change_count: Some(turn_state.tool_counts.file_change),
|
||||
mcp_tool_call_count: Some(turn_state.tool_counts.mcp_tool_call),
|
||||
dynamic_tool_call_count: Some(turn_state.tool_counts.dynamic_tool_call),
|
||||
subagent_tool_call_count: Some(turn_state.tool_counts.subagent_tool_call),
|
||||
web_search_count: Some(turn_state.tool_counts.web_search),
|
||||
image_generation_count: Some(turn_state.tool_counts.image_generation),
|
||||
total_tool_call_count: None,
|
||||
shell_command_count: None,
|
||||
file_change_count: None,
|
||||
mcp_tool_call_count: None,
|
||||
dynamic_tool_call_count: None,
|
||||
subagent_tool_call_count: None,
|
||||
web_search_count: None,
|
||||
image_generation_count: None,
|
||||
input_tokens: token_usage
|
||||
.as_ref()
|
||||
.map(|token_usage| token_usage.input_tokens),
|
||||
|
||||
@@ -49,6 +49,7 @@ use codex_config::RemoteThreadConfigLoader;
|
||||
use codex_config::ThreadConfigLoader;
|
||||
use codex_core::config::Config;
|
||||
pub use codex_exec_server::EnvironmentManager;
|
||||
pub use codex_exec_server::EnvironmentManagerArgs;
|
||||
pub use codex_exec_server::ExecServerRuntimePaths;
|
||||
use codex_feedback::CodexFeedback;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
@@ -374,7 +375,6 @@ impl InProcessClientStartArgs {
|
||||
pub fn initialize_params(&self) -> InitializeParams {
|
||||
let capabilities = InitializeCapabilities {
|
||||
experimental_api: self.experimental_api,
|
||||
request_attestation: false,
|
||||
opt_out_notification_methods: if self.opt_out_notification_methods.is_empty() {
|
||||
None
|
||||
} else {
|
||||
|
||||
@@ -73,7 +73,6 @@ impl RemoteAppServerConnectArgs {
|
||||
fn initialize_params(&self) -> InitializeParams {
|
||||
let capabilities = InitializeCapabilities {
|
||||
experimental_api: self.experimental_api,
|
||||
request_attestation: false,
|
||||
opt_out_notification_methods: if self.opt_out_notification_methods.is_empty() {
|
||||
None
|
||||
} else {
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "app-server-daemon",
|
||||
crate_name = "codex_app_server_daemon",
|
||||
)
|
||||
@@ -1,40 +0,0 @@
|
||||
[package]
|
||||
name = "codex-app-server-daemon"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[lib]
|
||||
name = "codex_app_server_daemon"
|
||||
path = "src/lib.rs"
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-app-server-transport = { workspace = true }
|
||||
codex-utils-home-dir = { workspace = true }
|
||||
codex-uds = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
libc = { workspace = true }
|
||||
reqwest = { workspace = true, features = ["rustls-tls"] }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
sha2 = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
"fs",
|
||||
"io-util",
|
||||
"macros",
|
||||
"process",
|
||||
"rt-multi-thread",
|
||||
"signal",
|
||||
"time",
|
||||
] }
|
||||
tokio-tungstenite = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
@@ -1,109 +0,0 @@
|
||||
# codex-app-server-daemon
|
||||
|
||||
> `codex-app-server-daemon` is experimental and its lifecycle contract may
|
||||
> change while the remote-management flow is still being developed.
|
||||
|
||||
`codex-app-server-daemon` backs the machine-readable `codex app-server`
|
||||
lifecycle commands used by remote clients such as the desktop and mobile apps.
|
||||
It is intended for Codex instances launched over SSH, including fresh developer
|
||||
machines that should expose app-server with `remote_control` enabled.
|
||||
|
||||
## Platform support
|
||||
|
||||
The current daemon implementation is Unix-only. It uses pidfile-backed
|
||||
daemonization plus Unix process and file-locking primitives, and does not yet
|
||||
support Windows lifecycle management.
|
||||
|
||||
## Commands
|
||||
|
||||
```sh
|
||||
codex app-server daemon start
|
||||
codex app-server daemon restart
|
||||
codex app-server daemon enable-remote-control
|
||||
codex app-server daemon disable-remote-control
|
||||
codex app-server daemon stop
|
||||
codex app-server daemon version
|
||||
codex app-server daemon bootstrap --remote-control
|
||||
```
|
||||
|
||||
On success, every command writes exactly one JSON object to stdout. Consumers
|
||||
should parse that JSON rather than relying on human-readable text. Lifecycle
|
||||
responses report the resolved backend, socket path, local CLI version, and
|
||||
running app-server version when applicable.
|
||||
|
||||
## Bootstrap flow
|
||||
|
||||
For a new remote machine:
|
||||
|
||||
```sh
|
||||
curl -fsSL https://chatgpt.com/codex/install.sh | sh
|
||||
$HOME/.codex/packages/standalone/current/codex app-server daemon bootstrap --remote-control
|
||||
```
|
||||
|
||||
`bootstrap` requires the standalone managed install. It records the daemon
|
||||
settings under `CODEX_HOME/app-server-daemon/`, starts app-server as a
|
||||
pidfile-backed detached process, and launches a detached updater loop.
|
||||
|
||||
## Installation and update cases
|
||||
|
||||
The daemon assumes Codex is installed through `install.sh` and always launches
|
||||
the standalone managed binary under `CODEX_HOME`.
|
||||
|
||||
| Situation | What starts | Does this daemon fetch new binaries? | Does a running app-server eventually move to a newer binary on its own? |
|
||||
| --- | --- | --- | --- |
|
||||
| `install.sh` has run, but only `start` is used | `start` uses `CODEX_HOME/packages/standalone/current/codex` | No | No. The managed path is used when starting or restarting, but no updater is installed. |
|
||||
| `install.sh` has run, then `bootstrap` is used | The pidfile backend uses `CODEX_HOME/packages/standalone/current/codex` | Yes. Bootstrap launches a detached updater loop that runs `install.sh` hourly. | Yes, while that updater process is alive and app-server is already running. After a successful fetch, the updater restarts app-server with the refreshed binary and only then replaces its own process image. |
|
||||
| Some other tool updates the managed binary path | The next fresh start or restart uses the updated file at that path | Only if `bootstrap` is active, because the updater still runs `install.sh` on its normal cadence. | Without `bootstrap`, no. With `bootstrap`, the next successful updater pass compares the managed binary contents after `install.sh` runs; if app-server is running and they differ from the updater's current image, it refreshes app-server first and then itself. |
|
||||
|
||||
### Standalone installs
|
||||
|
||||
For installs created by `install.sh`:
|
||||
|
||||
- lifecycle commands always use the standalone managed binary path
|
||||
- `bootstrap` is supported
|
||||
- `bootstrap` starts a detached pid-backed updater loop that fetches via
|
||||
`install.sh`
|
||||
- after a successful refresh, if app-server is running and the managed binary
|
||||
contents changed, the updater restarts app-server with that binary first and
|
||||
only then replaces its own process image
|
||||
- the updater loop is not reboot-persistent; it must be started again by
|
||||
rerunning `bootstrap` after a reboot
|
||||
|
||||
### Out-of-band updates
|
||||
|
||||
This daemon does not watch arbitrary executable files for replacement. If some
|
||||
other tool updates the managed binary path:
|
||||
|
||||
- without `bootstrap`, a currently running app-server remains on the old
|
||||
executable image until an explicit `restart`
|
||||
- with `bootstrap`, the detached updater loop notices the changed managed
|
||||
binary on its next successful scheduled pass after running `install.sh`; if
|
||||
app-server is running, it refreshes app-server first and then refreshes itself
|
||||
once that replacement starts successfully
|
||||
|
||||
## Lifecycle semantics
|
||||
|
||||
`start` is idempotent and returns after app-server is ready to answer the normal
|
||||
JSON-RPC initialize handshake on the Unix control socket.
|
||||
|
||||
`restart` stops any managed daemon and starts it again.
|
||||
|
||||
`enable-remote-control` and `disable-remote-control` persist the launch setting
|
||||
for future starts. If a managed app-server is already running, they restart it
|
||||
so the new setting takes effect immediately.
|
||||
|
||||
`stop` sends a graceful termination request first, then sends a second
|
||||
termination signal after the grace window if the process is still alive.
|
||||
|
||||
All mutating lifecycle commands are serialized per `CODEX_HOME`, so a concurrent
|
||||
`start`, `restart`, `enable-remote-control`, `disable-remote-control`, `stop`,
|
||||
or `bootstrap` does not race another in-flight lifecycle operation.
|
||||
|
||||
## State
|
||||
|
||||
The daemon stores its local state under `CODEX_HOME/app-server-daemon/`:
|
||||
|
||||
- `settings.json` for persisted launch settings
|
||||
- `app-server.pid` for the app-server process record
|
||||
- `app-server-updater.pid` for the pid-backed standalone updater loop
|
||||
- `daemon.lock` for daemon-wide lifecycle serialization
|
||||
@@ -1,33 +0,0 @@
|
||||
mod pid;
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use serde::Serialize;
|
||||
|
||||
pub(crate) use pid::PidBackend;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum BackendKind {
|
||||
Pid,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct BackendPaths {
|
||||
pub(crate) codex_bin: PathBuf,
|
||||
pub(crate) pid_file: PathBuf,
|
||||
pub(crate) update_pid_file: PathBuf,
|
||||
pub(crate) remote_control_enabled: bool,
|
||||
}
|
||||
|
||||
pub(crate) fn pid_backend(paths: BackendPaths) -> PidBackend {
|
||||
PidBackend::new(
|
||||
paths.codex_bin,
|
||||
paths.pid_file,
|
||||
paths.remote_control_enabled,
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn pid_update_loop_backend(paths: BackendPaths) -> PidBackend {
|
||||
PidBackend::new_update_loop(paths.codex_bin, paths.update_pid_file)
|
||||
}
|
||||
@@ -1,600 +0,0 @@
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
#[cfg(unix)]
|
||||
use std::process::Stdio;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::bail;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use tokio::fs;
|
||||
#[cfg(unix)]
|
||||
use tokio::process::Command;
|
||||
use tokio::time::sleep;
|
||||
|
||||
const STOP_POLL_INTERVAL: Duration = Duration::from_millis(50);
|
||||
const STOP_GRACE_PERIOD: Duration = Duration::from_secs(60);
|
||||
const STOP_TIMEOUT: Duration = Duration::from_secs(70);
|
||||
const START_TIMEOUT: Duration = Duration::from_secs(10);
|
||||
|
||||
#[derive(Debug)]
|
||||
#[cfg_attr(not(unix), allow(dead_code))]
|
||||
pub(crate) struct PidBackend {
|
||||
codex_bin: PathBuf,
|
||||
pid_file: PathBuf,
|
||||
lock_file: PathBuf,
|
||||
command_kind: PidCommandKind,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct PidRecord {
|
||||
pid: u32,
|
||||
process_start_time: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
enum PidFileState {
|
||||
Missing,
|
||||
Starting,
|
||||
Running(PidRecord),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[cfg_attr(not(unix), allow(dead_code))]
|
||||
enum PidCommandKind {
|
||||
AppServer { remote_control_enabled: bool },
|
||||
UpdateLoop,
|
||||
}
|
||||
|
||||
impl PidBackend {
|
||||
pub(crate) fn new(codex_bin: PathBuf, pid_file: PathBuf, remote_control_enabled: bool) -> Self {
|
||||
let lock_file = pid_file.with_extension("pid.lock");
|
||||
Self {
|
||||
codex_bin,
|
||||
pid_file,
|
||||
lock_file,
|
||||
command_kind: PidCommandKind::AppServer {
|
||||
remote_control_enabled,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn new_update_loop(codex_bin: PathBuf, pid_file: PathBuf) -> Self {
|
||||
let lock_file = pid_file.with_extension("pid.lock");
|
||||
Self {
|
||||
codex_bin,
|
||||
pid_file,
|
||||
lock_file,
|
||||
command_kind: PidCommandKind::UpdateLoop,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn is_starting_or_running(&self) -> Result<bool> {
|
||||
loop {
|
||||
match self.read_pid_file_state().await? {
|
||||
PidFileState::Missing => return Ok(false),
|
||||
PidFileState::Starting => return Ok(true),
|
||||
PidFileState::Running(record) => {
|
||||
if self.record_is_active(&record).await? {
|
||||
return Ok(true);
|
||||
}
|
||||
match self.refresh_after_stale_record(&record).await? {
|
||||
PidFileState::Missing => return Ok(false),
|
||||
PidFileState::Starting | PidFileState::Running(_) => continue,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
pub(crate) async fn start(&self) -> Result<Option<u32>> {
|
||||
if let Some(parent) = self.pid_file.parent() {
|
||||
fs::create_dir_all(parent)
|
||||
.await
|
||||
.with_context(|| format!("failed to create pid directory {}", parent.display()))?;
|
||||
}
|
||||
let reservation_lock = self.acquire_reservation_lock().await?;
|
||||
let _pid_file = loop {
|
||||
match fs::OpenOptions::new()
|
||||
.create_new(true)
|
||||
.write(true)
|
||||
.open(&self.pid_file)
|
||||
.await
|
||||
{
|
||||
Ok(pid_file) => break pid_file,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::AlreadyExists => {
|
||||
match self.read_pid_file_state_with_lock_held().await? {
|
||||
PidFileState::Missing => continue,
|
||||
PidFileState::Running(record) => {
|
||||
if self.record_is_active(&record).await? {
|
||||
return Ok(None);
|
||||
}
|
||||
let _ = fs::remove_file(&self.pid_file).await;
|
||||
continue;
|
||||
}
|
||||
PidFileState::Starting => {
|
||||
unreachable!("lock holder cannot observe starting")
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(err).with_context(|| {
|
||||
format!("failed to reserve pid file {}", self.pid_file.display())
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
let mut command = Command::new(&self.codex_bin);
|
||||
command
|
||||
.args(self.command_args())
|
||||
.stdin(Stdio::null())
|
||||
.stdout(Stdio::null())
|
||||
.stderr(Stdio::null());
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
unsafe {
|
||||
command.pre_exec(|| {
|
||||
if libc::setsid() == -1 {
|
||||
return Err(std::io::Error::last_os_error());
|
||||
}
|
||||
Ok(())
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let child = match command.spawn() {
|
||||
Ok(child) => child,
|
||||
Err(err) => {
|
||||
let _ = fs::remove_file(&self.pid_file).await;
|
||||
return Err(err).with_context(|| {
|
||||
format!(
|
||||
"failed to spawn detached app-server process using {}",
|
||||
self.codex_bin.display()
|
||||
)
|
||||
});
|
||||
}
|
||||
};
|
||||
let pid = child
|
||||
.id()
|
||||
.context("spawned app-server process has no pid")?;
|
||||
let record = match read_process_start_time(pid).await {
|
||||
Ok(process_start_time) => PidRecord {
|
||||
pid,
|
||||
process_start_time,
|
||||
},
|
||||
Err(err) => {
|
||||
let _ = self.terminate_process(pid);
|
||||
let _ = fs::remove_file(&self.pid_file).await;
|
||||
return Err(err);
|
||||
}
|
||||
};
|
||||
let contents = serde_json::to_vec(&record).context("failed to serialize pid record")?;
|
||||
let temp_pid_file = self.pid_file.with_extension("pid.tmp");
|
||||
if let Err(err) = fs::write(&temp_pid_file, &contents).await {
|
||||
let _ = self.terminate_process(pid);
|
||||
let _ = fs::remove_file(&self.pid_file).await;
|
||||
return Err(err).with_context(|| {
|
||||
format!("failed to write pid temp file {}", temp_pid_file.display())
|
||||
});
|
||||
}
|
||||
if let Err(err) = fs::rename(&temp_pid_file, &self.pid_file).await {
|
||||
let _ = self.terminate_process(pid);
|
||||
let _ = fs::remove_file(&temp_pid_file).await;
|
||||
let _ = fs::remove_file(&self.pid_file).await;
|
||||
return Err(err).with_context(|| {
|
||||
format!("failed to publish pid file {}", self.pid_file.display())
|
||||
});
|
||||
}
|
||||
drop(reservation_lock);
|
||||
Ok(Some(pid))
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
pub(crate) async fn start(&self) -> Result<Option<u32>> {
|
||||
bail!("pid-managed app-server startup is unsupported on this platform")
|
||||
}
|
||||
|
||||
pub(crate) async fn stop(&self) -> Result<()> {
|
||||
loop {
|
||||
let Some(record) = self.wait_for_pid_start().await? else {
|
||||
return Ok(());
|
||||
};
|
||||
if !self.record_is_active(&record).await? {
|
||||
match self.refresh_after_stale_record(&record).await? {
|
||||
PidFileState::Missing => return Ok(()),
|
||||
PidFileState::Starting | PidFileState::Running(_) => continue,
|
||||
}
|
||||
}
|
||||
|
||||
let pid = record.pid;
|
||||
self.terminate_process(pid)?;
|
||||
let started_at = tokio::time::Instant::now();
|
||||
let deadline = tokio::time::Instant::now() + STOP_TIMEOUT;
|
||||
let mut forced = false;
|
||||
while tokio::time::Instant::now() < deadline {
|
||||
if !self.record_is_active(&record).await? {
|
||||
match self.refresh_after_stale_record(&record).await? {
|
||||
PidFileState::Missing => return Ok(()),
|
||||
PidFileState::Starting | PidFileState::Running(_) => break,
|
||||
}
|
||||
}
|
||||
if !forced && started_at.elapsed() >= STOP_GRACE_PERIOD {
|
||||
self.force_terminate_process(pid)?;
|
||||
forced = true;
|
||||
}
|
||||
sleep(STOP_POLL_INTERVAL).await;
|
||||
}
|
||||
|
||||
if self.record_is_active(&record).await? {
|
||||
bail!("timed out waiting for pid-managed app server {pid} to stop");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn wait_for_pid_start(&self) -> Result<Option<PidRecord>> {
|
||||
let deadline = tokio::time::Instant::now() + START_TIMEOUT;
|
||||
loop {
|
||||
match self.read_pid_file_state().await? {
|
||||
PidFileState::Missing => return Ok(None),
|
||||
PidFileState::Running(record) => return Ok(Some(record)),
|
||||
PidFileState::Starting if tokio::time::Instant::now() < deadline => {
|
||||
sleep(STOP_POLL_INTERVAL).await;
|
||||
}
|
||||
PidFileState::Starting => {
|
||||
bail!(
|
||||
"timed out waiting for pid reservation in {} to finish initializing",
|
||||
self.pid_file.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn read_pid_file_state(&self) -> Result<PidFileState> {
|
||||
let contents = match fs::read_to_string(&self.pid_file).await {
|
||||
Ok(contents) => contents,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
return if reservation_lock_is_active(&self.lock_file).await? {
|
||||
Ok(PidFileState::Starting)
|
||||
} else {
|
||||
Ok(PidFileState::Missing)
|
||||
};
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(err).with_context(|| {
|
||||
format!("failed to read pid file {}", self.pid_file.display())
|
||||
});
|
||||
}
|
||||
};
|
||||
if contents.trim().is_empty() {
|
||||
match inspect_empty_pid_reservation(&self.pid_file, &self.lock_file).await? {
|
||||
EmptyPidReservation::Active => {
|
||||
return Ok(PidFileState::Starting);
|
||||
}
|
||||
EmptyPidReservation::Stale => {
|
||||
return Ok(PidFileState::Missing);
|
||||
}
|
||||
EmptyPidReservation::Record(record) => return Ok(PidFileState::Running(record)),
|
||||
}
|
||||
}
|
||||
let record = serde_json::from_str(&contents)
|
||||
.with_context(|| format!("invalid pid file contents in {}", self.pid_file.display()))?;
|
||||
Ok(PidFileState::Running(record))
|
||||
}
|
||||
|
||||
async fn read_pid_file_state_with_lock_held(&self) -> Result<PidFileState> {
|
||||
let contents = match fs::read_to_string(&self.pid_file).await {
|
||||
Ok(contents) => contents,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
return Ok(PidFileState::Missing);
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(err).with_context(|| {
|
||||
format!("failed to read pid file {}", self.pid_file.display())
|
||||
});
|
||||
}
|
||||
};
|
||||
if contents.trim().is_empty() {
|
||||
let _ = fs::remove_file(&self.pid_file).await;
|
||||
return Ok(PidFileState::Missing);
|
||||
}
|
||||
let record = serde_json::from_str(&contents)
|
||||
.with_context(|| format!("invalid pid file contents in {}", self.pid_file.display()))?;
|
||||
Ok(PidFileState::Running(record))
|
||||
}
|
||||
|
||||
async fn refresh_after_stale_record(&self, expected: &PidRecord) -> Result<PidFileState> {
|
||||
let reservation_lock = self.acquire_reservation_lock().await?;
|
||||
let state = match self.read_pid_file_state_with_lock_held().await? {
|
||||
PidFileState::Running(record) if record == *expected => {
|
||||
let _ = fs::remove_file(&self.pid_file).await;
|
||||
PidFileState::Missing
|
||||
}
|
||||
state => state,
|
||||
};
|
||||
drop(reservation_lock);
|
||||
Ok(state)
|
||||
}
|
||||
|
||||
async fn acquire_reservation_lock(&self) -> Result<fs::File> {
|
||||
let reservation_lock = fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.truncate(false)
|
||||
.write(true)
|
||||
.open(&self.lock_file)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!("failed to open pid lock file {}", self.lock_file.display())
|
||||
})?;
|
||||
let lock_deadline = tokio::time::Instant::now() + START_TIMEOUT;
|
||||
while !try_lock_file(&reservation_lock)? {
|
||||
if tokio::time::Instant::now() >= lock_deadline {
|
||||
bail!(
|
||||
"timed out waiting for pid lock {}",
|
||||
self.lock_file.display()
|
||||
);
|
||||
}
|
||||
sleep(STOP_POLL_INTERVAL).await;
|
||||
}
|
||||
Ok(reservation_lock)
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn command_args(&self) -> Vec<&'static str> {
|
||||
match self.command_kind {
|
||||
PidCommandKind::AppServer {
|
||||
remote_control_enabled: true,
|
||||
} => vec![
|
||||
"--enable",
|
||||
"remote_control",
|
||||
"app-server",
|
||||
"--listen",
|
||||
"unix://",
|
||||
],
|
||||
PidCommandKind::AppServer {
|
||||
remote_control_enabled: false,
|
||||
} => vec!["app-server", "--listen", "unix://"],
|
||||
PidCommandKind::UpdateLoop => vec!["app-server", "daemon", "pid-update-loop"],
|
||||
}
|
||||
}
|
||||
|
||||
fn terminate_process(&self, pid: u32) -> Result<()> {
|
||||
match self.command_kind {
|
||||
PidCommandKind::AppServer { .. } => terminate_process(pid),
|
||||
PidCommandKind::UpdateLoop => terminate_process(pid),
|
||||
}
|
||||
}
|
||||
|
||||
fn force_terminate_process(&self, pid: u32) -> Result<()> {
|
||||
match self.command_kind {
|
||||
PidCommandKind::AppServer { .. } => force_terminate_process(pid),
|
||||
PidCommandKind::UpdateLoop => force_terminate_process_group(pid),
|
||||
}
|
||||
}
|
||||
|
||||
async fn record_is_active(&self, record: &PidRecord) -> Result<bool> {
|
||||
process_matches_record(record).await
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn process_exists(pid: u32) -> bool {
|
||||
let Ok(pid) = libc::pid_t::try_from(pid) else {
|
||||
return false;
|
||||
};
|
||||
let result = unsafe { libc::kill(pid, 0) };
|
||||
result == 0 || std::io::Error::last_os_error().raw_os_error() == Some(libc::EPERM)
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn terminate_process(pid: u32) -> Result<()> {
|
||||
let raw_pid = libc::pid_t::try_from(pid)
|
||||
.with_context(|| format!("pid-managed app server pid {pid} is out of range"))?;
|
||||
let result = unsafe { libc::kill(raw_pid, libc::SIGTERM) };
|
||||
if result == 0 {
|
||||
return Ok(());
|
||||
}
|
||||
let err = std::io::Error::last_os_error();
|
||||
if err.raw_os_error() == Some(libc::ESRCH) {
|
||||
return Ok(());
|
||||
}
|
||||
Err(err).with_context(|| format!("failed to terminate pid-managed app server {pid}"))
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn force_terminate_process(pid: u32) -> Result<()> {
|
||||
let raw_pid = libc::pid_t::try_from(pid)
|
||||
.with_context(|| format!("pid-managed app server pid {pid} is out of range"))?;
|
||||
let result = unsafe { libc::kill(raw_pid, libc::SIGKILL) };
|
||||
if result == 0 {
|
||||
return Ok(());
|
||||
}
|
||||
let err = std::io::Error::last_os_error();
|
||||
if err.raw_os_error() == Some(libc::ESRCH) {
|
||||
return Ok(());
|
||||
}
|
||||
Err(err).with_context(|| format!("failed to force terminate pid-managed app server {pid}"))
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn force_terminate_process_group(pid: u32) -> Result<()> {
|
||||
let raw_pid = libc::pid_t::try_from(pid)
|
||||
.with_context(|| format!("pid-managed updater pid {pid} is out of range"))?;
|
||||
let result = unsafe { libc::kill(-raw_pid, libc::SIGKILL) };
|
||||
if result == 0 {
|
||||
return Ok(());
|
||||
}
|
||||
let err = std::io::Error::last_os_error();
|
||||
if err.raw_os_error() == Some(libc::ESRCH) {
|
||||
return Ok(());
|
||||
}
|
||||
Err(err).with_context(|| format!("failed to force terminate pid-managed updater group {pid}"))
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
fn terminate_process(_pid: u32) -> Result<()> {
|
||||
bail!("pid-managed app-server shutdown is unsupported on this platform")
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
fn force_terminate_process(_pid: u32) -> Result<()> {
|
||||
bail!("pid-managed app-server shutdown is unsupported on this platform")
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
fn force_terminate_process_group(_pid: u32) -> Result<()> {
|
||||
bail!("pid-managed updater shutdown is unsupported on this platform")
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
async fn process_matches_record(record: &PidRecord) -> Result<bool> {
|
||||
if !process_exists(record.pid) {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
match read_process_start_time(record.pid).await {
|
||||
Ok(start_time) => Ok(start_time == record.process_start_time),
|
||||
Err(_err) if !process_exists(record.pid) => Ok(false),
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
async fn process_matches_record(_record: &PidRecord) -> Result<bool> {
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[cfg_attr(not(unix), allow(dead_code))]
|
||||
enum EmptyPidReservation {
|
||||
Active,
|
||||
Stale,
|
||||
Record(PidRecord),
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn try_lock_file(file: &fs::File) -> Result<bool> {
|
||||
use std::os::fd::AsRawFd;
|
||||
|
||||
let result = unsafe { libc::flock(file.as_raw_fd(), libc::LOCK_EX | libc::LOCK_NB) };
|
||||
if result == 0 {
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
let err = std::io::Error::last_os_error();
|
||||
if err.raw_os_error() == Some(libc::EWOULDBLOCK) {
|
||||
return Ok(false);
|
||||
}
|
||||
Err(err).context("failed to lock pid reservation")
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
fn try_lock_file(_file: &fs::File) -> Result<bool> {
|
||||
bail!("pid-managed app-server startup is unsupported on this platform")
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
async fn reservation_lock_is_active(path: &Path) -> Result<bool> {
|
||||
let file = match fs::OpenOptions::new()
|
||||
.write(true)
|
||||
.create(true)
|
||||
.truncate(false)
|
||||
.open(path)
|
||||
.await
|
||||
{
|
||||
Ok(file) => file,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
return Ok(false);
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(err)
|
||||
.with_context(|| format!("failed to inspect pid lock file {}", path.display()));
|
||||
}
|
||||
};
|
||||
Ok(!try_lock_file(&file)?)
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
async fn reservation_lock_is_active(_path: &Path) -> Result<bool> {
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
async fn inspect_empty_pid_reservation(
|
||||
pid_path: &Path,
|
||||
lock_path: &Path,
|
||||
) -> Result<EmptyPidReservation> {
|
||||
let file = match fs::OpenOptions::new()
|
||||
.write(true)
|
||||
.create(true)
|
||||
.truncate(false)
|
||||
.open(lock_path)
|
||||
.await
|
||||
{
|
||||
Ok(file) => file,
|
||||
Err(err) => {
|
||||
return Err(err).with_context(|| {
|
||||
format!("failed to inspect pid lock file {}", lock_path.display())
|
||||
});
|
||||
}
|
||||
};
|
||||
if !try_lock_file(&file)? {
|
||||
return Ok(EmptyPidReservation::Active);
|
||||
}
|
||||
|
||||
let contents = match fs::read_to_string(pid_path).await {
|
||||
Ok(contents) => contents,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
return Ok(EmptyPidReservation::Stale);
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(err)
|
||||
.with_context(|| format!("failed to reread pid file {}", pid_path.display()));
|
||||
}
|
||||
};
|
||||
if contents.trim().is_empty() {
|
||||
let _ = fs::remove_file(pid_path).await;
|
||||
return Ok(EmptyPidReservation::Stale);
|
||||
}
|
||||
|
||||
let record = serde_json::from_str(&contents)
|
||||
.with_context(|| format!("invalid pid file contents in {}", pid_path.display()))?;
|
||||
Ok(EmptyPidReservation::Record(record))
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
async fn inspect_empty_pid_reservation(
|
||||
_pid_path: &Path,
|
||||
_lock_path: &Path,
|
||||
) -> Result<EmptyPidReservation> {
|
||||
Ok(EmptyPidReservation::Stale)
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
async fn read_process_start_time(pid: u32) -> Result<String> {
|
||||
let output = Command::new("ps")
|
||||
.args(["-p", &pid.to_string(), "-o", "lstart="])
|
||||
.output()
|
||||
.await
|
||||
.context("failed to invoke ps for pid-managed app server")?;
|
||||
if !output.status.success() {
|
||||
bail!("failed to read start time for pid-managed app server {pid}");
|
||||
}
|
||||
|
||||
let start_time = String::from_utf8(output.stdout)
|
||||
.context("pid-managed app server start time was not utf-8")?;
|
||||
let start_time = start_time.trim();
|
||||
if start_time.is_empty() {
|
||||
bail!("pid-managed app server {pid} has no recorded start time");
|
||||
}
|
||||
Ok(start_time.to_string())
|
||||
}
|
||||
|
||||
#[cfg(all(test, unix))]
|
||||
#[path = "pid_tests.rs"]
|
||||
mod tests;
|
||||
@@ -1,158 +0,0 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
|
||||
use super::PidBackend;
|
||||
use super::PidCommandKind;
|
||||
use super::PidFileState;
|
||||
use super::PidRecord;
|
||||
use super::try_lock_file;
|
||||
|
||||
#[tokio::test]
|
||||
async fn locked_empty_pid_file_is_treated_as_active_reservation() {
|
||||
let temp_dir = TempDir::new().expect("temp dir");
|
||||
let pid_file = temp_dir.path().join("app-server.pid");
|
||||
tokio::fs::write(&pid_file, "")
|
||||
.await
|
||||
.expect("write pid file");
|
||||
let backend = PidBackend::new(
|
||||
temp_dir.path().join("codex"),
|
||||
pid_file.clone(),
|
||||
/*remote_control_enabled*/ false,
|
||||
);
|
||||
let reservation = tokio::fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.truncate(false)
|
||||
.write(true)
|
||||
.open(&backend.lock_file)
|
||||
.await
|
||||
.expect("open pid lock file");
|
||||
assert!(try_lock_file(&reservation).expect("lock reservation"));
|
||||
|
||||
assert_eq!(
|
||||
backend.read_pid_file_state().await.expect("read pid"),
|
||||
PidFileState::Starting
|
||||
);
|
||||
assert!(pid_file.exists());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn unlocked_empty_pid_file_is_treated_as_stale_reservation() {
|
||||
let temp_dir = TempDir::new().expect("temp dir");
|
||||
let pid_file = temp_dir.path().join("app-server.pid");
|
||||
tokio::fs::write(&pid_file, "")
|
||||
.await
|
||||
.expect("write pid file");
|
||||
let backend = PidBackend::new(
|
||||
temp_dir.path().join("codex"),
|
||||
pid_file.clone(),
|
||||
/*remote_control_enabled*/ false,
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
backend.read_pid_file_state().await.expect("read pid"),
|
||||
PidFileState::Missing
|
||||
);
|
||||
assert!(!pid_file.exists());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn stop_waits_for_live_reservation_to_resolve() {
|
||||
let temp_dir = TempDir::new().expect("temp dir");
|
||||
let pid_file = temp_dir.path().join("app-server.pid");
|
||||
tokio::fs::write(&pid_file, "")
|
||||
.await
|
||||
.expect("write pid file");
|
||||
let backend = PidBackend::new(
|
||||
temp_dir.path().join("codex"),
|
||||
pid_file.clone(),
|
||||
/*remote_control_enabled*/ false,
|
||||
);
|
||||
let reservation = tokio::fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.truncate(false)
|
||||
.write(true)
|
||||
.open(&backend.lock_file)
|
||||
.await
|
||||
.expect("open pid lock file");
|
||||
assert!(try_lock_file(&reservation).expect("lock reservation"));
|
||||
let cleanup = tokio::spawn(async move {
|
||||
tokio::time::sleep(Duration::from_millis(50)).await;
|
||||
drop(reservation);
|
||||
tokio::fs::remove_file(pid_file)
|
||||
.await
|
||||
.expect("remove pid file");
|
||||
});
|
||||
|
||||
backend.stop().await.expect("stop");
|
||||
cleanup.await.expect("cleanup task");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn start_retries_stale_empty_pid_file_under_its_own_lock() {
|
||||
let temp_dir = TempDir::new().expect("temp dir");
|
||||
let pid_file = temp_dir.path().join("app-server.pid");
|
||||
tokio::fs::write(&pid_file, "")
|
||||
.await
|
||||
.expect("write pid file");
|
||||
let backend = PidBackend::new(
|
||||
temp_dir.path().join("missing-codex"),
|
||||
pid_file,
|
||||
/*remote_control_enabled*/ false,
|
||||
);
|
||||
|
||||
let err = backend.start().await.expect_err("start");
|
||||
assert!(
|
||||
err.to_string()
|
||||
.starts_with("failed to spawn detached app-server process using ")
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn stale_record_cleanup_preserves_replacement_record() {
|
||||
let temp_dir = TempDir::new().expect("temp dir");
|
||||
let pid_file = temp_dir.path().join("app-server.pid");
|
||||
let backend = PidBackend::new(
|
||||
temp_dir.path().join("codex"),
|
||||
pid_file.clone(),
|
||||
/*remote_control_enabled*/ false,
|
||||
);
|
||||
let stale = PidRecord {
|
||||
pid: 1,
|
||||
process_start_time: "old".to_string(),
|
||||
};
|
||||
let replacement = PidRecord {
|
||||
pid: 2,
|
||||
process_start_time: "new".to_string(),
|
||||
};
|
||||
tokio::fs::write(
|
||||
&pid_file,
|
||||
serde_json::to_vec(&replacement).expect("serialize replacement"),
|
||||
)
|
||||
.await
|
||||
.expect("write replacement pid file");
|
||||
|
||||
assert_eq!(
|
||||
backend
|
||||
.refresh_after_stale_record(&stale)
|
||||
.await
|
||||
.expect("cleanup"),
|
||||
PidFileState::Running(replacement)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn update_loop_uses_hidden_app_server_subcommand() {
|
||||
let backend = PidBackend {
|
||||
codex_bin: "codex".into(),
|
||||
pid_file: "updater.pid".into(),
|
||||
lock_file: "updater.pid.lock".into(),
|
||||
command_kind: PidCommandKind::UpdateLoop,
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
backend.command_args(),
|
||||
vec!["app-server", "daemon", "pid-update-loop"]
|
||||
);
|
||||
}
|
||||
@@ -1,131 +0,0 @@
|
||||
use std::path::Path;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use codex_app_server_protocol::ClientInfo;
|
||||
use codex_app_server_protocol::InitializeParams;
|
||||
use codex_app_server_protocol::InitializeResponse;
|
||||
use codex_app_server_protocol::JSONRPCMessage;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCRequest;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_uds::UnixStream;
|
||||
use futures::SinkExt;
|
||||
use futures::StreamExt;
|
||||
use tokio::time::timeout;
|
||||
use tokio_tungstenite::client_async;
|
||||
use tokio_tungstenite::tungstenite::Message;
|
||||
|
||||
const PROBE_TIMEOUT: Duration = Duration::from_secs(2);
|
||||
const CLIENT_NAME: &str = "codex_app_server_daemon";
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(crate) struct ProbeInfo {
|
||||
pub(crate) app_server_version: String,
|
||||
}
|
||||
|
||||
pub(crate) async fn probe(socket_path: &Path) -> Result<ProbeInfo> {
|
||||
timeout(PROBE_TIMEOUT, probe_inner(socket_path))
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"timed out probing app-server control socket {}",
|
||||
socket_path.display()
|
||||
)
|
||||
})?
|
||||
}
|
||||
|
||||
async fn probe_inner(socket_path: &Path) -> Result<ProbeInfo> {
|
||||
let stream = UnixStream::connect(socket_path)
|
||||
.await
|
||||
.with_context(|| format!("failed to connect to {}", socket_path.display()))?;
|
||||
let (mut websocket, _response) = client_async("ws://localhost/", stream)
|
||||
.await
|
||||
.with_context(|| format!("failed to upgrade {}", socket_path.display()))?;
|
||||
|
||||
let initialize = JSONRPCMessage::Request(JSONRPCRequest {
|
||||
id: RequestId::Integer(1),
|
||||
method: "initialize".to_string(),
|
||||
params: Some(serde_json::to_value(InitializeParams {
|
||||
client_info: ClientInfo {
|
||||
name: CLIENT_NAME.to_string(),
|
||||
title: Some("Codex App Server Daemon".to_string()),
|
||||
version: env!("CARGO_PKG_VERSION").to_string(),
|
||||
},
|
||||
capabilities: None,
|
||||
})?),
|
||||
trace: None,
|
||||
});
|
||||
websocket
|
||||
.send(Message::Text(serde_json::to_string(&initialize)?.into()))
|
||||
.await
|
||||
.context("failed to send initialize request")?;
|
||||
|
||||
let response = loop {
|
||||
let frame = websocket
|
||||
.next()
|
||||
.await
|
||||
.ok_or_else(|| anyhow!("app-server closed before initialize response"))??;
|
||||
let Message::Text(payload) = frame else {
|
||||
continue;
|
||||
};
|
||||
let message = serde_json::from_str::<JSONRPCMessage>(&payload)?;
|
||||
if let JSONRPCMessage::Response(response) = message
|
||||
&& response.id == RequestId::Integer(1)
|
||||
{
|
||||
break response;
|
||||
}
|
||||
};
|
||||
let initialize_response = serde_json::from_value::<InitializeResponse>(response.result)?;
|
||||
|
||||
let initialized = JSONRPCMessage::Notification(JSONRPCNotification {
|
||||
method: "initialized".to_string(),
|
||||
params: None,
|
||||
});
|
||||
websocket
|
||||
.send(Message::Text(serde_json::to_string(&initialized)?.into()))
|
||||
.await
|
||||
.context("failed to send initialized notification")?;
|
||||
websocket.close(None).await.ok();
|
||||
|
||||
Ok(ProbeInfo {
|
||||
app_server_version: parse_version_from_user_agent(&initialize_response.user_agent)?,
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_version_from_user_agent(user_agent: &str) -> Result<String> {
|
||||
let (_originator, rest) = user_agent
|
||||
.split_once('/')
|
||||
.ok_or_else(|| anyhow!("app-server user-agent omitted version separator"))?;
|
||||
let version = rest
|
||||
.split_whitespace()
|
||||
.next()
|
||||
.filter(|version| !version.is_empty())
|
||||
.ok_or_else(|| anyhow!("app-server user-agent omitted version"))?;
|
||||
Ok(version.to_string())
|
||||
}
|
||||
|
||||
#[cfg(all(test, unix))]
|
||||
mod tests {
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::parse_version_from_user_agent;
|
||||
|
||||
#[test]
|
||||
fn parses_version_from_codex_user_agent() {
|
||||
assert_eq!(
|
||||
parse_version_from_user_agent(
|
||||
"codex_app_server_daemon/1.2.3 (Linux 6.8.0; x86_64) codex_cli_rs/1.2.3",
|
||||
)
|
||||
.expect("version"),
|
||||
"1.2.3"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rejects_user_agent_without_version() {
|
||||
assert!(parse_version_from_user_agent("codex_app_server_daemon").is_err());
|
||||
}
|
||||
}
|
||||
@@ -1,790 +0,0 @@
|
||||
mod backend;
|
||||
mod client;
|
||||
mod managed_install;
|
||||
mod settings;
|
||||
mod update_loop;
|
||||
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
pub use backend::BackendKind;
|
||||
use backend::BackendPaths;
|
||||
use codex_app_server_transport::app_server_control_socket_path;
|
||||
use codex_utils_home_dir::find_codex_home;
|
||||
use managed_install::managed_codex_bin;
|
||||
#[cfg(unix)]
|
||||
use managed_install::managed_codex_version;
|
||||
use serde::Serialize;
|
||||
use settings::DaemonSettings;
|
||||
use tokio::time::sleep;
|
||||
|
||||
const START_POLL_INTERVAL: Duration = Duration::from_millis(50);
|
||||
const START_TIMEOUT: Duration = Duration::from_secs(10);
|
||||
const OPERATION_LOCK_TIMEOUT: Duration = Duration::from_secs(75);
|
||||
const PID_FILE_NAME: &str = "app-server.pid";
|
||||
const UPDATE_PID_FILE_NAME: &str = "app-server-updater.pid";
|
||||
const OPERATION_LOCK_FILE_NAME: &str = "daemon.lock";
|
||||
const SETTINGS_FILE_NAME: &str = "settings.json";
|
||||
const STATE_DIR_NAME: &str = "app-server-daemon";
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum LifecycleCommand {
|
||||
Start,
|
||||
Restart,
|
||||
Stop,
|
||||
Version,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum LifecycleStatus {
|
||||
AlreadyRunning,
|
||||
Started,
|
||||
Restarted,
|
||||
Stopped,
|
||||
NotRunning,
|
||||
Running,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LifecycleOutput {
|
||||
pub status: LifecycleStatus,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub backend: Option<BackendKind>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub pid: Option<u32>,
|
||||
pub socket_path: PathBuf,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub cli_version: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub app_server_version: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct BootstrapOptions {
|
||||
pub remote_control_enabled: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum BootstrapStatus {
|
||||
Bootstrapped,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct BootstrapOutput {
|
||||
pub status: BootstrapStatus,
|
||||
pub backend: BackendKind,
|
||||
pub auto_update_enabled: bool,
|
||||
pub remote_control_enabled: bool,
|
||||
pub managed_codex_path: PathBuf,
|
||||
pub socket_path: PathBuf,
|
||||
pub cli_version: String,
|
||||
pub app_server_version: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum RemoteControlMode {
|
||||
Enabled,
|
||||
Disabled,
|
||||
}
|
||||
|
||||
impl RemoteControlMode {
|
||||
fn is_enabled(self) -> bool {
|
||||
matches!(self, Self::Enabled)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum RemoteControlStatus {
|
||||
Enabled,
|
||||
Disabled,
|
||||
AlreadyEnabled,
|
||||
AlreadyDisabled,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RemoteControlOutput {
|
||||
pub status: RemoteControlStatus,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub backend: Option<BackendKind>,
|
||||
pub remote_control_enabled: bool,
|
||||
pub socket_path: PathBuf,
|
||||
pub cli_version: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub app_server_version: Option<String>,
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub(crate) enum RestartIfRunningOutcome {
|
||||
Busy,
|
||||
NotRunning,
|
||||
NotReady,
|
||||
AlreadyCurrent,
|
||||
Restarted,
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub(crate) enum RestartMode {
|
||||
IfVersionChanged,
|
||||
Always,
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub(crate) enum UpdaterRefreshMode {
|
||||
None,
|
||||
ReexecIfManagedBinaryChanged,
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
enum RestartDecision {
|
||||
NotReady,
|
||||
AlreadyCurrent,
|
||||
Restart,
|
||||
}
|
||||
|
||||
pub async fn run(command: LifecycleCommand) -> Result<LifecycleOutput> {
|
||||
ensure_supported_platform()?;
|
||||
Daemon::from_environment()?.run(command).await
|
||||
}
|
||||
|
||||
pub async fn bootstrap(options: BootstrapOptions) -> Result<BootstrapOutput> {
|
||||
ensure_supported_platform()?;
|
||||
Daemon::from_environment()?.bootstrap(options).await
|
||||
}
|
||||
|
||||
pub async fn set_remote_control(mode: RemoteControlMode) -> Result<RemoteControlOutput> {
|
||||
ensure_supported_platform()?;
|
||||
Daemon::from_environment()?.set_remote_control(mode).await
|
||||
}
|
||||
|
||||
pub async fn run_pid_update_loop() -> Result<()> {
|
||||
ensure_supported_platform()?;
|
||||
update_loop::run().await
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn ensure_supported_platform() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
fn ensure_supported_platform() -> Result<()> {
|
||||
Err(anyhow!(
|
||||
"codex app-server daemon lifecycle is only supported on Unix platforms"
|
||||
))
|
||||
}
|
||||
|
||||
struct Daemon {
|
||||
socket_path: PathBuf,
|
||||
pid_file: PathBuf,
|
||||
update_pid_file: PathBuf,
|
||||
operation_lock_file: PathBuf,
|
||||
settings_file: PathBuf,
|
||||
managed_codex_bin: PathBuf,
|
||||
}
|
||||
|
||||
impl Daemon {
|
||||
fn from_environment() -> Result<Self> {
|
||||
let codex_home = find_codex_home().context("failed to resolve CODEX_HOME")?;
|
||||
let socket_path = app_server_control_socket_path(codex_home.as_path())?
|
||||
.as_path()
|
||||
.to_path_buf();
|
||||
let state_dir = codex_home.as_path().join(STATE_DIR_NAME);
|
||||
Ok(Self {
|
||||
socket_path,
|
||||
pid_file: state_dir.join(PID_FILE_NAME),
|
||||
update_pid_file: state_dir.join(UPDATE_PID_FILE_NAME),
|
||||
operation_lock_file: state_dir.join(OPERATION_LOCK_FILE_NAME),
|
||||
settings_file: state_dir.join(SETTINGS_FILE_NAME),
|
||||
managed_codex_bin: managed_codex_bin(codex_home.as_path()),
|
||||
})
|
||||
}
|
||||
|
||||
async fn run(&self, command: LifecycleCommand) -> Result<LifecycleOutput> {
|
||||
match command {
|
||||
LifecycleCommand::Start => {
|
||||
let _operation_lock = self.acquire_operation_lock().await?;
|
||||
self.start().await
|
||||
}
|
||||
LifecycleCommand::Restart => {
|
||||
let _operation_lock = self.acquire_operation_lock().await?;
|
||||
self.restart().await
|
||||
}
|
||||
LifecycleCommand::Stop => {
|
||||
let _operation_lock = self.acquire_operation_lock().await?;
|
||||
self.stop().await
|
||||
}
|
||||
LifecycleCommand::Version => self.version().await,
|
||||
}
|
||||
}
|
||||
|
||||
async fn start(&self) -> Result<LifecycleOutput> {
|
||||
let settings = self.load_settings().await?;
|
||||
if let Ok(info) = client::probe(&self.socket_path).await {
|
||||
return Ok(self.output(
|
||||
LifecycleStatus::AlreadyRunning,
|
||||
self.running_backend(&settings).await?,
|
||||
/*pid*/ None,
|
||||
Some(info.app_server_version),
|
||||
));
|
||||
}
|
||||
|
||||
if self.running_backend_instance(&settings).await?.is_some() {
|
||||
let info = self.wait_until_ready().await?;
|
||||
return Ok(self.output(
|
||||
LifecycleStatus::AlreadyRunning,
|
||||
Some(BackendKind::Pid),
|
||||
/*pid*/ None,
|
||||
Some(info.app_server_version),
|
||||
));
|
||||
}
|
||||
|
||||
self.ensure_managed_codex_bin()?;
|
||||
let pid = self.start_managed_backend(&settings).await?;
|
||||
let info = self.wait_until_ready().await?;
|
||||
Ok(self.output(
|
||||
LifecycleStatus::Started,
|
||||
Some(BackendKind::Pid),
|
||||
pid,
|
||||
Some(info.app_server_version),
|
||||
))
|
||||
}
|
||||
|
||||
async fn restart(&self) -> Result<LifecycleOutput> {
|
||||
let settings = self.load_settings().await?;
|
||||
if client::probe(&self.socket_path).await.is_ok()
|
||||
&& self.running_backend(&settings).await?.is_none()
|
||||
{
|
||||
return Err(anyhow!(
|
||||
"app server is running but is not managed by codex app-server daemon"
|
||||
));
|
||||
}
|
||||
|
||||
self.ensure_managed_codex_bin()?;
|
||||
if let Some(backend) = self.running_backend_instance(&settings).await? {
|
||||
backend.stop().await?;
|
||||
}
|
||||
|
||||
let pid = self.start_managed_backend(&settings).await?;
|
||||
let info = self.wait_until_ready().await?;
|
||||
Ok(self.output(
|
||||
LifecycleStatus::Restarted,
|
||||
Some(BackendKind::Pid),
|
||||
pid,
|
||||
Some(info.app_server_version),
|
||||
))
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
pub(crate) async fn try_restart_if_running(
|
||||
&self,
|
||||
mode: RestartMode,
|
||||
updater_refresh_mode: UpdaterRefreshMode,
|
||||
managed_codex_bin: &Path,
|
||||
) -> Result<RestartIfRunningOutcome> {
|
||||
let operation_lock = self.open_operation_lock_file().await?;
|
||||
if !try_lock_file(&operation_lock)? {
|
||||
return Ok(RestartIfRunningOutcome::Busy);
|
||||
}
|
||||
let settings = self.load_settings().await?;
|
||||
let outcome = if let Some(backend) = self.running_backend_instance(&settings).await? {
|
||||
let info = client::probe(&self.socket_path).await.ok();
|
||||
let managed_version = if info.is_some() {
|
||||
Some(managed_codex_version(managed_codex_bin).await?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
match restart_decision(mode, info.as_ref(), managed_version.as_deref()) {
|
||||
RestartDecision::NotReady => return Ok(RestartIfRunningOutcome::NotReady),
|
||||
RestartDecision::AlreadyCurrent => RestartIfRunningOutcome::AlreadyCurrent,
|
||||
RestartDecision::Restart => {
|
||||
backend.stop().await?;
|
||||
let _ = self
|
||||
.start_managed_backend_with_bin(&settings, managed_codex_bin)
|
||||
.await?;
|
||||
self.wait_until_ready().await?;
|
||||
RestartIfRunningOutcome::Restarted
|
||||
}
|
||||
}
|
||||
} else if client::probe(&self.socket_path).await.is_ok() {
|
||||
return Err(anyhow!(
|
||||
"app server is running but is not managed by codex app-server daemon"
|
||||
));
|
||||
} else {
|
||||
RestartIfRunningOutcome::NotRunning
|
||||
};
|
||||
|
||||
if should_reexec_updater(updater_refresh_mode, outcome) {
|
||||
crate::update_loop::reexec_managed_updater(managed_codex_bin)?;
|
||||
}
|
||||
|
||||
Ok(outcome)
|
||||
}
|
||||
|
||||
async fn stop(&self) -> Result<LifecycleOutput> {
|
||||
let settings = self.load_settings().await?;
|
||||
if let Some(backend) = self.running_backend_instance(&settings).await? {
|
||||
backend.stop().await?;
|
||||
return Ok(self.output(
|
||||
LifecycleStatus::Stopped,
|
||||
Some(BackendKind::Pid),
|
||||
/*pid*/ None,
|
||||
/*app_server_version*/ None,
|
||||
));
|
||||
}
|
||||
|
||||
if client::probe(&self.socket_path).await.is_ok() {
|
||||
return Err(anyhow!(
|
||||
"app server is running but is not managed by codex app-server daemon"
|
||||
));
|
||||
}
|
||||
|
||||
Ok(self.output(
|
||||
LifecycleStatus::NotRunning,
|
||||
/*backend*/ None,
|
||||
/*pid*/ None,
|
||||
/*app_server_version*/ None,
|
||||
))
|
||||
}
|
||||
|
||||
async fn version(&self) -> Result<LifecycleOutput> {
|
||||
let settings = self.load_settings().await?;
|
||||
let info = client::probe(&self.socket_path).await?;
|
||||
Ok(self.output(
|
||||
LifecycleStatus::Running,
|
||||
self.running_backend(&settings).await?,
|
||||
/*pid*/ None,
|
||||
Some(info.app_server_version),
|
||||
))
|
||||
}
|
||||
|
||||
async fn wait_until_ready(&self) -> Result<client::ProbeInfo> {
|
||||
let deadline = tokio::time::Instant::now() + START_TIMEOUT;
|
||||
loop {
|
||||
match client::probe(&self.socket_path).await {
|
||||
Ok(info) => return Ok(info),
|
||||
Err(err) if tokio::time::Instant::now() < deadline => {
|
||||
let _ = err;
|
||||
sleep(START_POLL_INTERVAL).await;
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(err).with_context(|| {
|
||||
format!(
|
||||
"app server did not become ready on {}",
|
||||
self.socket_path.display()
|
||||
)
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn bootstrap(&self, options: BootstrapOptions) -> Result<BootstrapOutput> {
|
||||
let _operation_lock = self.acquire_operation_lock().await?;
|
||||
self.bootstrap_locked(options).await
|
||||
}
|
||||
|
||||
async fn set_remote_control(&self, mode: RemoteControlMode) -> Result<RemoteControlOutput> {
|
||||
let _operation_lock = self.acquire_operation_lock().await?;
|
||||
let previous_settings = self.load_settings().await?;
|
||||
let mut settings = previous_settings.clone();
|
||||
let remote_control_enabled = mode.is_enabled();
|
||||
let backend = self.running_backend_instance(&previous_settings).await?;
|
||||
|
||||
if backend.is_none() && client::probe(&self.socket_path).await.is_ok() {
|
||||
return Err(anyhow!(
|
||||
"app server is running but is not managed by codex app-server daemon"
|
||||
));
|
||||
}
|
||||
|
||||
if settings.remote_control_enabled == remote_control_enabled {
|
||||
let info = if backend.is_some() {
|
||||
Some(self.wait_until_ready().await?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
return Ok(self.remote_control_output(
|
||||
already_remote_control_status(mode),
|
||||
backend.map(|_| BackendKind::Pid),
|
||||
remote_control_enabled,
|
||||
info.map(|info| info.app_server_version),
|
||||
));
|
||||
}
|
||||
|
||||
settings.remote_control_enabled = remote_control_enabled;
|
||||
settings.save(&self.settings_file).await?;
|
||||
|
||||
let app_server_version = if let Some(backend) = backend {
|
||||
self.ensure_managed_codex_bin()?;
|
||||
backend.stop().await?;
|
||||
let _ = self.start_managed_backend(&settings).await?;
|
||||
Some(self.wait_until_ready().await?.app_server_version)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(self.remote_control_output(
|
||||
remote_control_status(mode),
|
||||
app_server_version.as_ref().map(|_| BackendKind::Pid),
|
||||
remote_control_enabled,
|
||||
app_server_version,
|
||||
))
|
||||
}
|
||||
|
||||
async fn bootstrap_locked(&self, options: BootstrapOptions) -> Result<BootstrapOutput> {
|
||||
self.ensure_managed_codex_bin()?;
|
||||
|
||||
let settings = DaemonSettings {
|
||||
remote_control_enabled: options.remote_control_enabled,
|
||||
};
|
||||
if client::probe(&self.socket_path).await.is_ok()
|
||||
&& self.running_backend(&settings).await?.is_none()
|
||||
{
|
||||
return Err(anyhow!(
|
||||
"app server is running but is not managed by codex app-server daemon"
|
||||
));
|
||||
}
|
||||
settings.save(&self.settings_file).await?;
|
||||
|
||||
if let Some(backend) = self.running_backend_instance(&settings).await? {
|
||||
backend.stop().await?;
|
||||
}
|
||||
|
||||
let backend = backend::pid_backend(self.backend_paths(&settings));
|
||||
backend.start().await?;
|
||||
let updater = backend::pid_update_loop_backend(self.backend_paths(&settings));
|
||||
if updater.is_starting_or_running().await? {
|
||||
updater.stop().await?;
|
||||
}
|
||||
updater.start().await?;
|
||||
|
||||
let info = self.wait_until_ready().await?;
|
||||
Ok(BootstrapOutput {
|
||||
status: BootstrapStatus::Bootstrapped,
|
||||
backend: BackendKind::Pid,
|
||||
auto_update_enabled: true,
|
||||
remote_control_enabled: settings.remote_control_enabled,
|
||||
managed_codex_path: self.managed_codex_bin.clone(),
|
||||
socket_path: self.socket_path.clone(),
|
||||
cli_version: env!("CARGO_PKG_VERSION").to_string(),
|
||||
app_server_version: info.app_server_version,
|
||||
})
|
||||
}
|
||||
|
||||
async fn running_backend(&self, settings: &DaemonSettings) -> Result<Option<BackendKind>> {
|
||||
Ok(self
|
||||
.running_backend_instance(settings)
|
||||
.await?
|
||||
.map(|_| BackendKind::Pid))
|
||||
}
|
||||
|
||||
async fn running_backend_instance(
|
||||
&self,
|
||||
settings: &DaemonSettings,
|
||||
) -> Result<Option<backend::PidBackend>> {
|
||||
let backend = backend::pid_backend(self.backend_paths(settings));
|
||||
if backend.is_starting_or_running().await? {
|
||||
return Ok(Some(backend));
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
async fn start_managed_backend(&self, settings: &DaemonSettings) -> Result<Option<u32>> {
|
||||
self.start_managed_backend_with_bin(settings, &self.managed_codex_bin)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn start_managed_backend_with_bin(
|
||||
&self,
|
||||
settings: &DaemonSettings,
|
||||
managed_codex_bin: &Path,
|
||||
) -> Result<Option<u32>> {
|
||||
let backend =
|
||||
backend::pid_backend(self.backend_paths_with_bin(settings, managed_codex_bin));
|
||||
backend.start().await
|
||||
}
|
||||
|
||||
fn ensure_managed_codex_bin(&self) -> Result<()> {
|
||||
if self.managed_codex_bin.is_file() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
Err(anyhow!(
|
||||
"managed standalone Codex install not found at {}; install Codex first",
|
||||
self.managed_codex_bin.display()
|
||||
))
|
||||
}
|
||||
|
||||
fn backend_paths(&self, settings: &DaemonSettings) -> BackendPaths {
|
||||
self.backend_paths_with_bin(settings, &self.managed_codex_bin)
|
||||
}
|
||||
|
||||
fn backend_paths_with_bin(
|
||||
&self,
|
||||
settings: &DaemonSettings,
|
||||
managed_codex_bin: &Path,
|
||||
) -> BackendPaths {
|
||||
BackendPaths {
|
||||
codex_bin: managed_codex_bin.to_path_buf(),
|
||||
pid_file: self.pid_file.clone(),
|
||||
update_pid_file: self.update_pid_file.clone(),
|
||||
remote_control_enabled: settings.remote_control_enabled,
|
||||
}
|
||||
}
|
||||
|
||||
async fn load_settings(&self) -> Result<DaemonSettings> {
|
||||
DaemonSettings::load(&self.settings_file).await
|
||||
}
|
||||
|
||||
async fn acquire_operation_lock(&self) -> Result<tokio::fs::File> {
|
||||
let operation_lock = self.open_operation_lock_file().await?;
|
||||
let deadline = tokio::time::Instant::now() + OPERATION_LOCK_TIMEOUT;
|
||||
while !try_lock_file(&operation_lock)? {
|
||||
if tokio::time::Instant::now() >= deadline {
|
||||
return Err(anyhow!(
|
||||
"timed out waiting for daemon operation lock {}",
|
||||
self.operation_lock_file.display()
|
||||
));
|
||||
}
|
||||
sleep(START_POLL_INTERVAL).await;
|
||||
}
|
||||
Ok(operation_lock)
|
||||
}
|
||||
|
||||
async fn open_operation_lock_file(&self) -> Result<tokio::fs::File> {
|
||||
if let Some(parent) = self.operation_lock_file.parent() {
|
||||
tokio::fs::create_dir_all(parent).await.with_context(|| {
|
||||
format!(
|
||||
"failed to create daemon state directory {}",
|
||||
parent.display()
|
||||
)
|
||||
})?;
|
||||
}
|
||||
tokio::fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.truncate(false)
|
||||
.write(true)
|
||||
.open(&self.operation_lock_file)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"failed to open daemon operation lock {}",
|
||||
self.operation_lock_file.display()
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn output(
|
||||
&self,
|
||||
status: LifecycleStatus,
|
||||
backend: Option<BackendKind>,
|
||||
pid: Option<u32>,
|
||||
app_server_version: Option<String>,
|
||||
) -> LifecycleOutput {
|
||||
LifecycleOutput {
|
||||
status,
|
||||
backend,
|
||||
pid,
|
||||
socket_path: self.socket_path.clone(),
|
||||
cli_version: Some(env!("CARGO_PKG_VERSION").to_string()),
|
||||
app_server_version,
|
||||
}
|
||||
}
|
||||
|
||||
fn remote_control_output(
|
||||
&self,
|
||||
status: RemoteControlStatus,
|
||||
backend: Option<BackendKind>,
|
||||
remote_control_enabled: bool,
|
||||
app_server_version: Option<String>,
|
||||
) -> RemoteControlOutput {
|
||||
RemoteControlOutput {
|
||||
status,
|
||||
backend,
|
||||
remote_control_enabled,
|
||||
socket_path: self.socket_path.clone(),
|
||||
cli_version: env!("CARGO_PKG_VERSION").to_string(),
|
||||
app_server_version,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn remote_control_status(mode: RemoteControlMode) -> RemoteControlStatus {
|
||||
match mode {
|
||||
RemoteControlMode::Enabled => RemoteControlStatus::Enabled,
|
||||
RemoteControlMode::Disabled => RemoteControlStatus::Disabled,
|
||||
}
|
||||
}
|
||||
|
||||
fn already_remote_control_status(mode: RemoteControlMode) -> RemoteControlStatus {
|
||||
match mode {
|
||||
RemoteControlMode::Enabled => RemoteControlStatus::AlreadyEnabled,
|
||||
RemoteControlMode::Disabled => RemoteControlStatus::AlreadyDisabled,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn restart_decision(
|
||||
mode: RestartMode,
|
||||
info: Option<&client::ProbeInfo>,
|
||||
managed_version: Option<&str>,
|
||||
) -> RestartDecision {
|
||||
match (mode, info, managed_version) {
|
||||
(RestartMode::IfVersionChanged, None, _) => RestartDecision::NotReady,
|
||||
(RestartMode::IfVersionChanged, Some(info), Some(managed_version))
|
||||
if info.app_server_version == managed_version =>
|
||||
{
|
||||
RestartDecision::AlreadyCurrent
|
||||
}
|
||||
_ => RestartDecision::Restart,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn should_reexec_updater(
|
||||
updater_refresh_mode: UpdaterRefreshMode,
|
||||
outcome: RestartIfRunningOutcome,
|
||||
) -> bool {
|
||||
updater_refresh_mode == UpdaterRefreshMode::ReexecIfManagedBinaryChanged
|
||||
&& outcome == RestartIfRunningOutcome::Restarted
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn try_lock_file(file: &tokio::fs::File) -> Result<bool> {
|
||||
use std::os::fd::AsRawFd;
|
||||
|
||||
let result = unsafe { libc::flock(file.as_raw_fd(), libc::LOCK_EX | libc::LOCK_NB) };
|
||||
if result == 0 {
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
let err = std::io::Error::last_os_error();
|
||||
if err.raw_os_error() == Some(libc::EWOULDBLOCK) {
|
||||
return Ok(false);
|
||||
}
|
||||
Err(err).context("failed to lock daemon operation")
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
fn try_lock_file(_file: &tokio::fs::File) -> Result<bool> {
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
#[cfg(all(test, unix))]
|
||||
mod tests {
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::BootstrapStatus;
|
||||
use super::LifecycleStatus;
|
||||
use super::RemoteControlStatus;
|
||||
use super::RestartDecision;
|
||||
use super::RestartIfRunningOutcome;
|
||||
use super::RestartMode;
|
||||
use super::UpdaterRefreshMode;
|
||||
use super::restart_decision;
|
||||
use super::should_reexec_updater;
|
||||
use crate::client::ProbeInfo;
|
||||
|
||||
#[test]
|
||||
fn lifecycle_status_uses_camel_case_json() {
|
||||
assert_eq!(
|
||||
serde_json::to_string(&LifecycleStatus::AlreadyRunning).expect("serialize"),
|
||||
"\"alreadyRunning\""
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bootstrap_status_uses_camel_case_json() {
|
||||
assert_eq!(
|
||||
serde_json::to_string(&BootstrapStatus::Bootstrapped).expect("serialize"),
|
||||
"\"bootstrapped\""
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remote_control_status_uses_camel_case_json() {
|
||||
assert_eq!(
|
||||
serde_json::to_string(&RemoteControlStatus::AlreadyEnabled).expect("serialize"),
|
||||
"\"alreadyEnabled\""
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn updater_reexec_waits_for_validated_restart() {
|
||||
assert_eq!(
|
||||
[
|
||||
RestartIfRunningOutcome::Busy,
|
||||
RestartIfRunningOutcome::NotReady,
|
||||
RestartIfRunningOutcome::AlreadyCurrent,
|
||||
RestartIfRunningOutcome::NotRunning,
|
||||
RestartIfRunningOutcome::Restarted,
|
||||
]
|
||||
.map(|outcome| {
|
||||
should_reexec_updater(UpdaterRefreshMode::ReexecIfManagedBinaryChanged, outcome)
|
||||
}),
|
||||
[false, false, false, false, true]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unchanged_updater_never_reexecs() {
|
||||
assert_eq!(
|
||||
[
|
||||
RestartIfRunningOutcome::Busy,
|
||||
RestartIfRunningOutcome::NotReady,
|
||||
RestartIfRunningOutcome::AlreadyCurrent,
|
||||
RestartIfRunningOutcome::NotRunning,
|
||||
RestartIfRunningOutcome::Restarted,
|
||||
]
|
||||
.map(|outcome| should_reexec_updater(UpdaterRefreshMode::None, outcome)),
|
||||
[false, false, false, false, false]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn restart_decision_preserves_forced_refreshes() {
|
||||
let current_info = ProbeInfo {
|
||||
app_server_version: "0.1.0".to_string(),
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
[
|
||||
restart_decision(
|
||||
RestartMode::IfVersionChanged,
|
||||
Some(¤t_info),
|
||||
Some("0.1.0"),
|
||||
),
|
||||
restart_decision(
|
||||
RestartMode::IfVersionChanged,
|
||||
/*info*/ None,
|
||||
/*managed_version*/ None,
|
||||
),
|
||||
restart_decision(RestartMode::Always, Some(¤t_info), Some("0.1.0")),
|
||||
restart_decision(
|
||||
RestartMode::Always,
|
||||
/*info*/ None,
|
||||
/*managed_version*/ None
|
||||
),
|
||||
],
|
||||
[
|
||||
RestartDecision::AlreadyCurrent,
|
||||
RestartDecision::NotReady,
|
||||
RestartDecision::Restart,
|
||||
RestartDecision::Restart,
|
||||
]
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,103 +0,0 @@
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[cfg(unix)]
|
||||
use anyhow::Context;
|
||||
#[cfg(unix)]
|
||||
use anyhow::Result;
|
||||
#[cfg(unix)]
|
||||
use anyhow::anyhow;
|
||||
#[cfg(unix)]
|
||||
use sha2::Digest;
|
||||
#[cfg(unix)]
|
||||
use sha2::Sha256;
|
||||
#[cfg(unix)]
|
||||
use tokio::fs;
|
||||
#[cfg(unix)]
|
||||
use tokio::process::Command;
|
||||
|
||||
pub(crate) fn managed_codex_bin(codex_home: &Path) -> PathBuf {
|
||||
codex_home
|
||||
.join("packages")
|
||||
.join("standalone")
|
||||
.join("current")
|
||||
.join(managed_codex_file_name())
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
pub(crate) async fn resolved_managed_codex_bin(codex_bin: &Path) -> Result<PathBuf> {
|
||||
fs::canonicalize(codex_bin).await.with_context(|| {
|
||||
format!(
|
||||
"failed to resolve managed Codex binary {}",
|
||||
codex_bin.display()
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
pub(crate) async fn managed_codex_version(codex_bin: &Path) -> Result<String> {
|
||||
let output = Command::new(codex_bin)
|
||||
.arg("--version")
|
||||
.output()
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"failed to invoke managed Codex binary {}",
|
||||
codex_bin.display()
|
||||
)
|
||||
})?;
|
||||
if !output.status.success() {
|
||||
return Err(anyhow!(
|
||||
"managed Codex binary {} exited with status {}",
|
||||
codex_bin.display(),
|
||||
output.status
|
||||
));
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8(output.stdout).with_context(|| {
|
||||
format!(
|
||||
"managed Codex version was not utf-8: {}",
|
||||
codex_bin.display()
|
||||
)
|
||||
})?;
|
||||
parse_codex_version(&stdout)
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(crate) struct ExecutableIdentity {
|
||||
digest: [u8; 32],
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
pub(crate) async fn executable_identity(executable: &Path) -> Result<ExecutableIdentity> {
|
||||
let bytes = fs::read(executable)
|
||||
.await
|
||||
.with_context(|| format!("failed to read executable {}", executable.display()))?;
|
||||
Ok(executable_identity_from_bytes(&bytes))
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
pub(crate) fn executable_identity_from_bytes(bytes: &[u8]) -> ExecutableIdentity {
|
||||
ExecutableIdentity {
|
||||
digest: Sha256::digest(bytes).into(),
|
||||
}
|
||||
}
|
||||
|
||||
fn managed_codex_file_name() -> &'static str {
|
||||
if cfg!(windows) { "codex.exe" } else { "codex" }
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn parse_codex_version(output: &str) -> Result<String> {
|
||||
let version = output
|
||||
.split_whitespace()
|
||||
.nth(1)
|
||||
.filter(|version| !version.is_empty())
|
||||
.ok_or_else(|| anyhow!("managed Codex version output was malformed"))?;
|
||||
Ok(version.to_string())
|
||||
}
|
||||
|
||||
#[cfg(all(test, unix))]
|
||||
#[path = "managed_install_tests.rs"]
|
||||
mod tests;
|
||||
@@ -1,27 +0,0 @@
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::executable_identity_from_bytes;
|
||||
use super::parse_codex_version;
|
||||
|
||||
#[test]
|
||||
fn parses_codex_cli_version_output() {
|
||||
assert_eq!(
|
||||
parse_codex_version("codex 1.2.3\n").expect("version"),
|
||||
"1.2.3"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rejects_malformed_codex_cli_version_output() {
|
||||
assert!(parse_codex_version("codex\n").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn executable_identity_uses_binary_contents() {
|
||||
let old = executable_identity_from_bytes(b"old");
|
||||
let same = executable_identity_from_bytes(b"old");
|
||||
let new = executable_identity_from_bytes(b"new");
|
||||
|
||||
assert_eq!(old, same);
|
||||
assert_ne!(old, new);
|
||||
}
|
||||
@@ -1,63 +0,0 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use tokio::fs;
|
||||
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub(crate) struct DaemonSettings {
|
||||
pub(crate) remote_control_enabled: bool,
|
||||
}
|
||||
|
||||
impl DaemonSettings {
|
||||
pub(crate) async fn load(path: &Path) -> Result<Self> {
|
||||
let contents = match fs::read_to_string(path).await {
|
||||
Ok(contents) => contents,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(Self::default()),
|
||||
Err(err) => {
|
||||
return Err(err)
|
||||
.with_context(|| format!("failed to read daemon settings {}", path.display()));
|
||||
}
|
||||
};
|
||||
|
||||
serde_json::from_str(&contents)
|
||||
.with_context(|| format!("failed to parse daemon settings {}", path.display()))
|
||||
}
|
||||
|
||||
pub(crate) async fn save(&self, path: &Path) -> Result<()> {
|
||||
if let Some(parent) = path.parent() {
|
||||
fs::create_dir_all(parent).await.with_context(|| {
|
||||
format!(
|
||||
"failed to create daemon settings directory {}",
|
||||
parent.display()
|
||||
)
|
||||
})?;
|
||||
}
|
||||
|
||||
let contents = serde_json::to_vec_pretty(self).context("failed to serialize settings")?;
|
||||
fs::write(path, contents)
|
||||
.await
|
||||
.with_context(|| format!("failed to write daemon settings {}", path.display()))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(test, unix))]
|
||||
mod tests {
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::DaemonSettings;
|
||||
|
||||
#[test]
|
||||
fn daemon_settings_use_camel_case_json() {
|
||||
assert_eq!(
|
||||
serde_json::to_string(&DaemonSettings {
|
||||
remote_control_enabled: true,
|
||||
})
|
||||
.expect("serialize"),
|
||||
r#"{"remoteControlEnabled":true}"#
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,197 +0,0 @@
|
||||
#[cfg(unix)]
|
||||
use std::process::Command as StdCommand;
|
||||
#[cfg(unix)]
|
||||
use std::process::Stdio;
|
||||
#[cfg(unix)]
|
||||
use std::time::Duration;
|
||||
|
||||
#[cfg(unix)]
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
#[cfg(not(unix))]
|
||||
use anyhow::bail;
|
||||
#[cfg(unix)]
|
||||
use futures::FutureExt;
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::process::CommandExt;
|
||||
#[cfg(unix)]
|
||||
use tokio::io::AsyncWriteExt;
|
||||
#[cfg(unix)]
|
||||
use tokio::process::Command;
|
||||
#[cfg(unix)]
|
||||
use tokio::signal::unix::Signal;
|
||||
#[cfg(unix)]
|
||||
use tokio::signal::unix::SignalKind;
|
||||
#[cfg(unix)]
|
||||
use tokio::signal::unix::signal;
|
||||
#[cfg(unix)]
|
||||
use tokio::time::sleep;
|
||||
|
||||
#[cfg(unix)]
|
||||
use crate::Daemon;
|
||||
#[cfg(unix)]
|
||||
use crate::RestartIfRunningOutcome;
|
||||
#[cfg(unix)]
|
||||
use crate::RestartMode;
|
||||
#[cfg(unix)]
|
||||
use crate::UpdaterRefreshMode;
|
||||
#[cfg(unix)]
|
||||
use crate::managed_install::ExecutableIdentity;
|
||||
#[cfg(unix)]
|
||||
use crate::managed_install::executable_identity;
|
||||
#[cfg(unix)]
|
||||
use crate::managed_install::resolved_managed_codex_bin;
|
||||
|
||||
#[cfg(unix)]
|
||||
const INITIAL_UPDATE_DELAY: Duration = Duration::from_secs(5 * 60);
|
||||
#[cfg(unix)]
|
||||
const RESTART_RETRY_INTERVAL: Duration = Duration::from_millis(50);
|
||||
#[cfg(unix)]
|
||||
const UPDATE_INTERVAL: Duration = Duration::from_secs(60 * 60);
|
||||
|
||||
#[cfg(unix)]
|
||||
pub(crate) async fn run() -> Result<()> {
|
||||
let mut terminate =
|
||||
signal(SignalKind::terminate()).context("failed to install updater shutdown handler")?;
|
||||
let running_updater_identity = current_updater_identity().await?;
|
||||
if sleep_or_terminate(INITIAL_UPDATE_DELAY, &mut terminate).await {
|
||||
return Ok(());
|
||||
}
|
||||
loop {
|
||||
match update_once(&running_updater_identity, &mut terminate).await {
|
||||
Ok(UpdateLoopControl::Continue) | Err(_) => {}
|
||||
Ok(UpdateLoopControl::Stop) => return Ok(()),
|
||||
}
|
||||
if sleep_or_terminate(UPDATE_INTERVAL, &mut terminate).await {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
pub(crate) async fn run() -> Result<()> {
|
||||
bail!("pid-managed updater loop is unsupported on this platform")
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
async fn sleep_or_terminate(duration: Duration, terminate: &mut Signal) -> bool {
|
||||
tokio::select! {
|
||||
_ = sleep(duration) => false,
|
||||
_ = terminate.recv() => true,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
enum UpdateLoopControl {
|
||||
Continue,
|
||||
Stop,
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
async fn update_once(
|
||||
running_updater_identity: &ExecutableIdentity,
|
||||
terminate: &mut Signal,
|
||||
) -> Result<UpdateLoopControl> {
|
||||
install_latest_standalone().await?;
|
||||
|
||||
let daemon = Daemon::from_environment()?;
|
||||
let managed_codex_bin = resolved_managed_codex_bin(&daemon.managed_codex_bin).await?;
|
||||
let managed_identity = executable_identity(&managed_codex_bin).await?;
|
||||
let (restart_mode, updater_refresh_mode) =
|
||||
update_modes_for_identities(running_updater_identity, &managed_identity);
|
||||
|
||||
loop {
|
||||
if terminate.recv().now_or_never().flatten().is_some() {
|
||||
return Ok(UpdateLoopControl::Stop);
|
||||
}
|
||||
match daemon
|
||||
.try_restart_if_running(restart_mode, updater_refresh_mode, &managed_codex_bin)
|
||||
.await?
|
||||
{
|
||||
RestartIfRunningOutcome::Busy => {
|
||||
if sleep_or_terminate(RESTART_RETRY_INTERVAL, terminate).await {
|
||||
return Ok(UpdateLoopControl::Stop);
|
||||
}
|
||||
}
|
||||
_ => return Ok(UpdateLoopControl::Continue),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
async fn current_updater_identity() -> Result<ExecutableIdentity> {
|
||||
let current_exe =
|
||||
std::env::current_exe().context("failed to resolve current updater executable")?;
|
||||
executable_identity(¤t_exe).await
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn update_modes_for_identities(
|
||||
running_updater_identity: &ExecutableIdentity,
|
||||
managed_identity: &ExecutableIdentity,
|
||||
) -> (RestartMode, UpdaterRefreshMode) {
|
||||
if running_updater_identity == managed_identity {
|
||||
(RestartMode::IfVersionChanged, UpdaterRefreshMode::None)
|
||||
} else {
|
||||
(
|
||||
RestartMode::Always,
|
||||
UpdaterRefreshMode::ReexecIfManagedBinaryChanged,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
pub(crate) fn reexec_managed_updater(managed_codex_bin: &std::path::Path) -> Result<()> {
|
||||
let err = StdCommand::new(managed_codex_bin)
|
||||
.args(["app-server", "daemon", "pid-update-loop"])
|
||||
.exec();
|
||||
Err(err).with_context(|| {
|
||||
format!(
|
||||
"failed to replace updater with managed Codex binary {}",
|
||||
managed_codex_bin.display()
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
async fn install_latest_standalone() -> Result<()> {
|
||||
let script = reqwest::get("https://chatgpt.com/codex/install.sh")
|
||||
.await
|
||||
.context("failed to fetch standalone Codex updater")?
|
||||
.error_for_status()
|
||||
.context("standalone Codex updater request failed")?
|
||||
.bytes()
|
||||
.await
|
||||
.context("failed to read standalone Codex updater")?;
|
||||
|
||||
let mut child = Command::new("/bin/sh")
|
||||
.arg("-s")
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::null())
|
||||
.stderr(Stdio::null())
|
||||
.spawn()
|
||||
.context("failed to invoke standalone Codex updater")?;
|
||||
let mut stdin = child
|
||||
.stdin
|
||||
.take()
|
||||
.context("standalone Codex updater stdin was unavailable")?;
|
||||
stdin
|
||||
.write_all(&script)
|
||||
.await
|
||||
.context("failed to pass standalone Codex updater to shell")?;
|
||||
drop(stdin);
|
||||
let status = child
|
||||
.wait()
|
||||
.await
|
||||
.context("failed to wait for standalone Codex updater")?;
|
||||
|
||||
if status.success() {
|
||||
Ok(())
|
||||
} else {
|
||||
anyhow::bail!("standalone Codex updater exited with status {status}")
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(test, unix))]
|
||||
#[path = "update_loop_tests.rs"]
|
||||
mod tests;
|
||||
@@ -1,31 +0,0 @@
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::update_modes_for_identities;
|
||||
use crate::RestartMode;
|
||||
use crate::UpdaterRefreshMode;
|
||||
use crate::managed_install::executable_identity_from_bytes;
|
||||
|
||||
#[test]
|
||||
fn unchanged_updater_uses_version_based_restart() {
|
||||
assert_eq!(
|
||||
update_modes_for_identities(
|
||||
&executable_identity_from_bytes(b"same"),
|
||||
&executable_identity_from_bytes(b"same"),
|
||||
),
|
||||
(RestartMode::IfVersionChanged, UpdaterRefreshMode::None)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn changed_updater_forces_refresh_even_when_version_may_match() {
|
||||
assert_eq!(
|
||||
update_modes_for_identities(
|
||||
&executable_identity_from_bytes(b"old"),
|
||||
&executable_identity_from_bytes(b"new"),
|
||||
),
|
||||
(
|
||||
RestartMode::Always,
|
||||
UpdaterRefreshMode::ReexecIfManagedBinaryChanged,
|
||||
)
|
||||
);
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "AttestationGenerateParams",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"token": {
|
||||
"description": "Opaque client attestation token.",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"token"
|
||||
],
|
||||
"title": "AttestationGenerateResponse",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1259,11 +1259,6 @@
|
||||
"array",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"requestAttestation": {
|
||||
"default": false,
|
||||
"description": "Opt into `attestation/generate` requests for upstream `x-oai-attestation`.",
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
@@ -2088,25 +2083,14 @@
|
||||
},
|
||||
"principalType": {
|
||||
"$ref": "#/definitions/PluginSharePrincipalType"
|
||||
},
|
||||
"role": {
|
||||
"$ref": "#/definitions/PluginShareTargetRole"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"principalId",
|
||||
"principalType",
|
||||
"role"
|
||||
"principalType"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginShareTargetRole": {
|
||||
"enum": [
|
||||
"reader",
|
||||
"editor"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginShareUpdateDiscoverability": {
|
||||
"enum": [
|
||||
"UNLISTED",
|
||||
@@ -3043,76 +3027,6 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"RuntimeInstallManifest": {
|
||||
"properties": {
|
||||
"archiveName": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"archiveSha256": {
|
||||
"type": "string"
|
||||
},
|
||||
"archiveSizeBytes": {
|
||||
"format": "uint64",
|
||||
"minimum": 0.0,
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"archiveUrl": {
|
||||
"type": "string"
|
||||
},
|
||||
"bundleFormatVersion": {
|
||||
"format": "uint32",
|
||||
"minimum": 0.0,
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"bundleVersion": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"format": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"runtimeRootDirectoryName": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"archiveSha256",
|
||||
"archiveUrl"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"RuntimeInstallParams": {
|
||||
"properties": {
|
||||
"manifest": {
|
||||
"$ref": "#/definitions/RuntimeInstallManifest"
|
||||
},
|
||||
"release": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"manifest",
|
||||
"release"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"SandboxMode": {
|
||||
"enum": [
|
||||
"read-only",
|
||||
@@ -5457,30 +5371,6 @@
|
||||
"title": "Plugin/installRequest",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"id": {
|
||||
"$ref": "#/definitions/RequestId"
|
||||
},
|
||||
"method": {
|
||||
"enum": [
|
||||
"runtime/install"
|
||||
],
|
||||
"title": "Runtime/installRequestMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/RuntimeInstallParams"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "Runtime/installRequest",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"id": {
|
||||
|
||||
@@ -2737,7 +2737,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
"RemoteControlStatusChangedNotification": {
|
||||
"description": "Current remote-control connection status and remote identity exposed to clients.",
|
||||
"description": "Current remote-control connection status and environment id exposed to clients.",
|
||||
"properties": {
|
||||
"environmentId": {
|
||||
"type": [
|
||||
@@ -2745,15 +2745,11 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"installationId": {
|
||||
"type": "string"
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/RemoteControlConnectionStatus"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"installationId",
|
||||
"status"
|
||||
],
|
||||
"type": "object"
|
||||
|
||||
@@ -121,9 +121,6 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"AttestationGenerateParams": {
|
||||
"type": "object"
|
||||
},
|
||||
"ChatgptAuthTokensRefreshParams": {
|
||||
"properties": {
|
||||
"previousAccountId": {
|
||||
@@ -1921,31 +1918,6 @@
|
||||
"title": "Account/chatgptAuthTokens/refreshRequest",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "Generate a fresh upstream attestation result on demand.",
|
||||
"properties": {
|
||||
"id": {
|
||||
"$ref": "#/definitions/RequestId"
|
||||
},
|
||||
"method": {
|
||||
"enum": [
|
||||
"attestation/generate"
|
||||
],
|
||||
"title": "Attestation/generateRequestMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/AttestationGenerateParams"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "Attestation/generateRequest",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "DEPRECATED APIs below Request to approve a patch. This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).",
|
||||
"properties": {
|
||||
|
||||
@@ -83,25 +83,6 @@
|
||||
"title": "ApplyPatchApprovalResponse",
|
||||
"type": "object"
|
||||
},
|
||||
"AttestationGenerateParams": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "AttestationGenerateParams",
|
||||
"type": "object"
|
||||
},
|
||||
"AttestationGenerateResponse": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"token": {
|
||||
"description": "Opaque client attestation token.",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"token"
|
||||
],
|
||||
"title": "AttestationGenerateResponse",
|
||||
"type": "object"
|
||||
},
|
||||
"ChatgptAuthTokensRefreshParams": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
@@ -1189,30 +1170,6 @@
|
||||
"title": "Plugin/installRequest",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"id": {
|
||||
"$ref": "#/definitions/v2/RequestId"
|
||||
},
|
||||
"method": {
|
||||
"enum": [
|
||||
"runtime/install"
|
||||
],
|
||||
"title": "Runtime/installRequestMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/v2/RuntimeInstallParams"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "Runtime/installRequest",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"id": {
|
||||
@@ -2663,11 +2620,6 @@
|
||||
"array",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"requestAttestation": {
|
||||
"default": false,
|
||||
"description": "Opt into `attestation/generate` requests for upstream `x-oai-attestation`.",
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
@@ -5255,31 +5207,6 @@
|
||||
"title": "Account/chatgptAuthTokens/refreshRequest",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "Generate a fresh upstream attestation result on demand.",
|
||||
"properties": {
|
||||
"id": {
|
||||
"$ref": "#/definitions/v2/RequestId"
|
||||
},
|
||||
"method": {
|
||||
"enum": [
|
||||
"attestation/generate"
|
||||
],
|
||||
"title": "Attestation/generateRequestMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/AttestationGenerateParams"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "Attestation/generateRequest",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "DEPRECATED APIs below Request to approve a patch. This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).",
|
||||
"properties": {
|
||||
@@ -12294,20 +12221,10 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"discoverability": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/v2/PluginShareDiscoverability"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"remotePluginId": {
|
||||
"type": "string"
|
||||
},
|
||||
"sharePrincipals": {
|
||||
"shareTargets": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/v2/PluginSharePrincipal"
|
||||
},
|
||||
@@ -12368,10 +12285,14 @@
|
||||
},
|
||||
"plugin": {
|
||||
"$ref": "#/definitions/v2/PluginSummary"
|
||||
},
|
||||
"shareUrl": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"plugin"
|
||||
"plugin",
|
||||
"shareUrl"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
@@ -12406,27 +12327,15 @@
|
||||
},
|
||||
"principalType": {
|
||||
"$ref": "#/definitions/v2/PluginSharePrincipalType"
|
||||
},
|
||||
"role": {
|
||||
"$ref": "#/definitions/v2/PluginSharePrincipalRole"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"principalId",
|
||||
"principalType",
|
||||
"role"
|
||||
"principalType"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginSharePrincipalRole": {
|
||||
"enum": [
|
||||
"reader",
|
||||
"editor",
|
||||
"owner"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginSharePrincipalType": {
|
||||
"enum": [
|
||||
"user",
|
||||
@@ -12497,25 +12406,14 @@
|
||||
},
|
||||
"principalType": {
|
||||
"$ref": "#/definitions/v2/PluginSharePrincipalType"
|
||||
},
|
||||
"role": {
|
||||
"$ref": "#/definitions/v2/PluginShareTargetRole"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"principalId",
|
||||
"principalType",
|
||||
"role"
|
||||
"principalType"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginShareTargetRole": {
|
||||
"enum": [
|
||||
"reader",
|
||||
"editor"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginShareUpdateDiscoverability": {
|
||||
"enum": [
|
||||
"UNLISTED",
|
||||
@@ -13408,7 +13306,7 @@
|
||||
},
|
||||
"RemoteControlStatusChangedNotification": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"description": "Current remote-control connection status and remote identity exposed to clients.",
|
||||
"description": "Current remote-control connection status and environment id exposed to clients.",
|
||||
"properties": {
|
||||
"environmentId": {
|
||||
"type": [
|
||||
@@ -13416,15 +13314,11 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"installationId": {
|
||||
"type": "string"
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/v2/RemoteControlConnectionStatus"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"installationId",
|
||||
"status"
|
||||
],
|
||||
"title": "RemoteControlStatusChangedNotification",
|
||||
@@ -14324,148 +14218,6 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"RuntimeInstallManifest": {
|
||||
"properties": {
|
||||
"archiveName": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"archiveSha256": {
|
||||
"type": "string"
|
||||
},
|
||||
"archiveSizeBytes": {
|
||||
"format": "uint64",
|
||||
"minimum": 0.0,
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"archiveUrl": {
|
||||
"type": "string"
|
||||
},
|
||||
"bundleFormatVersion": {
|
||||
"format": "uint32",
|
||||
"minimum": 0.0,
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"bundleVersion": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"format": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"runtimeRootDirectoryName": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"archiveSha256",
|
||||
"archiveUrl"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"RuntimeInstallParams": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"manifest": {
|
||||
"$ref": "#/definitions/v2/RuntimeInstallManifest"
|
||||
},
|
||||
"release": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"manifest",
|
||||
"release"
|
||||
],
|
||||
"title": "RuntimeInstallParams",
|
||||
"type": "object"
|
||||
},
|
||||
"RuntimeInstallPaths": {
|
||||
"properties": {
|
||||
"bundledPluginMarketplacePaths": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/v2/AbsolutePathBuf"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"bundledSkillPaths": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/v2/AbsolutePathBuf"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"nodeModulesPath": {
|
||||
"$ref": "#/definitions/v2/AbsolutePathBuf"
|
||||
},
|
||||
"nodePath": {
|
||||
"$ref": "#/definitions/v2/AbsolutePathBuf"
|
||||
},
|
||||
"pythonPath": {
|
||||
"$ref": "#/definitions/v2/AbsolutePathBuf"
|
||||
},
|
||||
"skillsToRemove": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"bundledPluginMarketplacePaths",
|
||||
"bundledSkillPaths",
|
||||
"nodeModulesPath",
|
||||
"nodePath",
|
||||
"pythonPath",
|
||||
"skillsToRemove"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"RuntimeInstallResponse": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"bundleVersion": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"paths": {
|
||||
"$ref": "#/definitions/v2/RuntimeInstallPaths"
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/v2/RuntimeInstallStatus"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"paths",
|
||||
"status"
|
||||
],
|
||||
"title": "RuntimeInstallResponse",
|
||||
"type": "object"
|
||||
},
|
||||
"RuntimeInstallStatus": {
|
||||
"enum": [
|
||||
"already-current",
|
||||
"installed"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"SandboxMode": {
|
||||
"enum": [
|
||||
"read-only",
|
||||
|
||||
@@ -1929,30 +1929,6 @@
|
||||
"title": "Plugin/installRequest",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"id": {
|
||||
"$ref": "#/definitions/RequestId"
|
||||
},
|
||||
"method": {
|
||||
"enum": [
|
||||
"runtime/install"
|
||||
],
|
||||
"title": "Runtime/installRequestMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/RuntimeInstallParams"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "Runtime/installRequest",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"id": {
|
||||
@@ -6433,11 +6409,6 @@
|
||||
"array",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"requestAttestation": {
|
||||
"default": false,
|
||||
"description": "Opt into `attestation/generate` requests for upstream `x-oai-attestation`.",
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
@@ -8843,20 +8814,10 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"discoverability": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PluginShareDiscoverability"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"remotePluginId": {
|
||||
"type": "string"
|
||||
},
|
||||
"sharePrincipals": {
|
||||
"shareTargets": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/PluginSharePrincipal"
|
||||
},
|
||||
@@ -8917,10 +8878,14 @@
|
||||
},
|
||||
"plugin": {
|
||||
"$ref": "#/definitions/PluginSummary"
|
||||
},
|
||||
"shareUrl": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"plugin"
|
||||
"plugin",
|
||||
"shareUrl"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
@@ -8955,27 +8920,15 @@
|
||||
},
|
||||
"principalType": {
|
||||
"$ref": "#/definitions/PluginSharePrincipalType"
|
||||
},
|
||||
"role": {
|
||||
"$ref": "#/definitions/PluginSharePrincipalRole"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"principalId",
|
||||
"principalType",
|
||||
"role"
|
||||
"principalType"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginSharePrincipalRole": {
|
||||
"enum": [
|
||||
"reader",
|
||||
"editor",
|
||||
"owner"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginSharePrincipalType": {
|
||||
"enum": [
|
||||
"user",
|
||||
@@ -9046,25 +8999,14 @@
|
||||
},
|
||||
"principalType": {
|
||||
"$ref": "#/definitions/PluginSharePrincipalType"
|
||||
},
|
||||
"role": {
|
||||
"$ref": "#/definitions/PluginShareTargetRole"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"principalId",
|
||||
"principalType",
|
||||
"role"
|
||||
"principalType"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginShareTargetRole": {
|
||||
"enum": [
|
||||
"reader",
|
||||
"editor"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginShareUpdateDiscoverability": {
|
||||
"enum": [
|
||||
"UNLISTED",
|
||||
@@ -9957,7 +9899,7 @@
|
||||
},
|
||||
"RemoteControlStatusChangedNotification": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"description": "Current remote-control connection status and remote identity exposed to clients.",
|
||||
"description": "Current remote-control connection status and environment id exposed to clients.",
|
||||
"properties": {
|
||||
"environmentId": {
|
||||
"type": [
|
||||
@@ -9965,15 +9907,11 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"installationId": {
|
||||
"type": "string"
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/RemoteControlConnectionStatus"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"installationId",
|
||||
"status"
|
||||
],
|
||||
"title": "RemoteControlStatusChangedNotification",
|
||||
@@ -10873,148 +10811,6 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"RuntimeInstallManifest": {
|
||||
"properties": {
|
||||
"archiveName": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"archiveSha256": {
|
||||
"type": "string"
|
||||
},
|
||||
"archiveSizeBytes": {
|
||||
"format": "uint64",
|
||||
"minimum": 0.0,
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"archiveUrl": {
|
||||
"type": "string"
|
||||
},
|
||||
"bundleFormatVersion": {
|
||||
"format": "uint32",
|
||||
"minimum": 0.0,
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"bundleVersion": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"format": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"runtimeRootDirectoryName": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"archiveSha256",
|
||||
"archiveUrl"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"RuntimeInstallParams": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"manifest": {
|
||||
"$ref": "#/definitions/RuntimeInstallManifest"
|
||||
},
|
||||
"release": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"manifest",
|
||||
"release"
|
||||
],
|
||||
"title": "RuntimeInstallParams",
|
||||
"type": "object"
|
||||
},
|
||||
"RuntimeInstallPaths": {
|
||||
"properties": {
|
||||
"bundledPluginMarketplacePaths": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/AbsolutePathBuf"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"bundledSkillPaths": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/AbsolutePathBuf"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"nodeModulesPath": {
|
||||
"$ref": "#/definitions/AbsolutePathBuf"
|
||||
},
|
||||
"nodePath": {
|
||||
"$ref": "#/definitions/AbsolutePathBuf"
|
||||
},
|
||||
"pythonPath": {
|
||||
"$ref": "#/definitions/AbsolutePathBuf"
|
||||
},
|
||||
"skillsToRemove": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"bundledPluginMarketplacePaths",
|
||||
"bundledSkillPaths",
|
||||
"nodeModulesPath",
|
||||
"nodePath",
|
||||
"pythonPath",
|
||||
"skillsToRemove"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"RuntimeInstallResponse": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"bundleVersion": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"paths": {
|
||||
"$ref": "#/definitions/RuntimeInstallPaths"
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/RuntimeInstallStatus"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"paths",
|
||||
"status"
|
||||
],
|
||||
"title": "RuntimeInstallResponse",
|
||||
"type": "object"
|
||||
},
|
||||
"RuntimeInstallStatus": {
|
||||
"enum": [
|
||||
"already-current",
|
||||
"installed"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"SandboxMode": {
|
||||
"enum": [
|
||||
"read-only",
|
||||
|
||||
@@ -39,11 +39,6 @@
|
||||
"array",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"requestAttestation": {
|
||||
"default": false,
|
||||
"description": "Opt into `attestation/generate` requests for upstream `x-oai-attestation`.",
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
|
||||
@@ -246,20 +246,10 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"discoverability": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PluginShareDiscoverability"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"remotePluginId": {
|
||||
"type": "string"
|
||||
},
|
||||
"sharePrincipals": {
|
||||
"shareTargets": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/PluginSharePrincipal"
|
||||
},
|
||||
@@ -280,14 +270,6 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginShareDiscoverability": {
|
||||
"enum": [
|
||||
"LISTED",
|
||||
"UNLISTED",
|
||||
"PRIVATE"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginSharePrincipal": {
|
||||
"properties": {
|
||||
"name": {
|
||||
@@ -298,27 +280,15 @@
|
||||
},
|
||||
"principalType": {
|
||||
"$ref": "#/definitions/PluginSharePrincipalType"
|
||||
},
|
||||
"role": {
|
||||
"$ref": "#/definitions/PluginSharePrincipalRole"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"principalId",
|
||||
"principalType",
|
||||
"role"
|
||||
"principalType"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginSharePrincipalRole": {
|
||||
"enum": [
|
||||
"reader",
|
||||
"editor",
|
||||
"owner"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginSharePrincipalType": {
|
||||
"enum": [
|
||||
"user",
|
||||
|
||||
@@ -300,20 +300,10 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"discoverability": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PluginShareDiscoverability"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"remotePluginId": {
|
||||
"type": "string"
|
||||
},
|
||||
"sharePrincipals": {
|
||||
"shareTargets": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/PluginSharePrincipal"
|
||||
},
|
||||
@@ -334,14 +324,6 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginShareDiscoverability": {
|
||||
"enum": [
|
||||
"LISTED",
|
||||
"UNLISTED",
|
||||
"PRIVATE"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginSharePrincipal": {
|
||||
"properties": {
|
||||
"name": {
|
||||
@@ -352,27 +334,15 @@
|
||||
},
|
||||
"principalType": {
|
||||
"$ref": "#/definitions/PluginSharePrincipalType"
|
||||
},
|
||||
"role": {
|
||||
"$ref": "#/definitions/PluginSharePrincipalRole"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"principalId",
|
||||
"principalType",
|
||||
"role"
|
||||
"principalType"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginSharePrincipalRole": {
|
||||
"enum": [
|
||||
"reader",
|
||||
"editor",
|
||||
"owner"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginSharePrincipalType": {
|
||||
"enum": [
|
||||
"user",
|
||||
|
||||
@@ -181,20 +181,10 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"discoverability": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PluginShareDiscoverability"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"remotePluginId": {
|
||||
"type": "string"
|
||||
},
|
||||
"sharePrincipals": {
|
||||
"shareTargets": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/PluginSharePrincipal"
|
||||
},
|
||||
@@ -215,14 +205,6 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginShareDiscoverability": {
|
||||
"enum": [
|
||||
"LISTED",
|
||||
"UNLISTED",
|
||||
"PRIVATE"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginShareListItem": {
|
||||
"properties": {
|
||||
"localPluginPath": {
|
||||
@@ -237,10 +219,14 @@
|
||||
},
|
||||
"plugin": {
|
||||
"$ref": "#/definitions/PluginSummary"
|
||||
},
|
||||
"shareUrl": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"plugin"
|
||||
"plugin",
|
||||
"shareUrl"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
@@ -254,27 +240,15 @@
|
||||
},
|
||||
"principalType": {
|
||||
"$ref": "#/definitions/PluginSharePrincipalType"
|
||||
},
|
||||
"role": {
|
||||
"$ref": "#/definitions/PluginSharePrincipalRole"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"principalId",
|
||||
"principalType",
|
||||
"role"
|
||||
"principalType"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginSharePrincipalRole": {
|
||||
"enum": [
|
||||
"reader",
|
||||
"editor",
|
||||
"owner"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginSharePrincipalType": {
|
||||
"enum": [
|
||||
"user",
|
||||
|
||||
@@ -28,24 +28,13 @@
|
||||
},
|
||||
"principalType": {
|
||||
"$ref": "#/definitions/PluginSharePrincipalType"
|
||||
},
|
||||
"role": {
|
||||
"$ref": "#/definitions/PluginShareTargetRole"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"principalId",
|
||||
"principalType",
|
||||
"role"
|
||||
"principalType"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginShareTargetRole": {
|
||||
"enum": [
|
||||
"reader",
|
||||
"editor"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
|
||||
@@ -16,25 +16,14 @@
|
||||
},
|
||||
"principalType": {
|
||||
"$ref": "#/definitions/PluginSharePrincipalType"
|
||||
},
|
||||
"role": {
|
||||
"$ref": "#/definitions/PluginShareTargetRole"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"principalId",
|
||||
"principalType",
|
||||
"role"
|
||||
"principalType"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginShareTargetRole": {
|
||||
"enum": [
|
||||
"reader",
|
||||
"editor"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginShareUpdateDiscoverability": {
|
||||
"enum": [
|
||||
"UNLISTED",
|
||||
|
||||
@@ -19,27 +19,15 @@
|
||||
},
|
||||
"principalType": {
|
||||
"$ref": "#/definitions/PluginSharePrincipalType"
|
||||
},
|
||||
"role": {
|
||||
"$ref": "#/definitions/PluginSharePrincipalRole"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"principalId",
|
||||
"principalType",
|
||||
"role"
|
||||
"principalType"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginSharePrincipalRole": {
|
||||
"enum": [
|
||||
"reader",
|
||||
"editor",
|
||||
"owner"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PluginSharePrincipalType": {
|
||||
"enum": [
|
||||
"user",
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"description": "Current remote-control connection status and remote identity exposed to clients.",
|
||||
"description": "Current remote-control connection status and environment id exposed to clients.",
|
||||
"properties": {
|
||||
"environmentId": {
|
||||
"type": [
|
||||
@@ -19,15 +19,11 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"installationId": {
|
||||
"type": "string"
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/RemoteControlConnectionStatus"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"installationId",
|
||||
"status"
|
||||
],
|
||||
"title": "RemoteControlStatusChangedNotification",
|
||||
|
||||
@@ -1,74 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"RuntimeInstallManifest": {
|
||||
"properties": {
|
||||
"archiveName": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"archiveSha256": {
|
||||
"type": "string"
|
||||
},
|
||||
"archiveSizeBytes": {
|
||||
"format": "uint64",
|
||||
"minimum": 0.0,
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"archiveUrl": {
|
||||
"type": "string"
|
||||
},
|
||||
"bundleFormatVersion": {
|
||||
"format": "uint32",
|
||||
"minimum": 0.0,
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"bundleVersion": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"format": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"runtimeRootDirectoryName": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"archiveSha256",
|
||||
"archiveUrl"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"manifest": {
|
||||
"$ref": "#/definitions/RuntimeInstallManifest"
|
||||
},
|
||||
"release": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"manifest",
|
||||
"release"
|
||||
],
|
||||
"title": "RuntimeInstallParams",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,76 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"AbsolutePathBuf": {
|
||||
"description": "A path that is guaranteed to be absolute and normalized (though it is not guaranteed to be canonicalized or exist on the filesystem).\n\nIMPORTANT: When deserializing an `AbsolutePathBuf`, a base path must be set using [AbsolutePathBufGuard::new]. If no base path is set, the deserialization will fail unless the path being deserialized is already absolute.",
|
||||
"type": "string"
|
||||
},
|
||||
"RuntimeInstallPaths": {
|
||||
"properties": {
|
||||
"bundledPluginMarketplacePaths": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/AbsolutePathBuf"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"bundledSkillPaths": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/AbsolutePathBuf"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"nodeModulesPath": {
|
||||
"$ref": "#/definitions/AbsolutePathBuf"
|
||||
},
|
||||
"nodePath": {
|
||||
"$ref": "#/definitions/AbsolutePathBuf"
|
||||
},
|
||||
"pythonPath": {
|
||||
"$ref": "#/definitions/AbsolutePathBuf"
|
||||
},
|
||||
"skillsToRemove": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"bundledPluginMarketplacePaths",
|
||||
"bundledSkillPaths",
|
||||
"nodeModulesPath",
|
||||
"nodePath",
|
||||
"pythonPath",
|
||||
"skillsToRemove"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"RuntimeInstallStatus": {
|
||||
"enum": [
|
||||
"already-current",
|
||||
"installed"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"bundleVersion": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"paths": {
|
||||
"$ref": "#/definitions/RuntimeInstallPaths"
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/RuntimeInstallStatus"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"paths",
|
||||
"status"
|
||||
],
|
||||
"title": "RuntimeInstallResponse",
|
||||
"type": "object"
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
@@ -10,10 +10,6 @@ export type InitializeCapabilities = {
|
||||
* Opt into receiving experimental API methods and fields.
|
||||
*/
|
||||
experimentalApi: boolean,
|
||||
/**
|
||||
* Opt into `attestation/generate` requests for upstream `x-oai-attestation`.
|
||||
*/
|
||||
requestAttestation: boolean,
|
||||
/**
|
||||
* Exact notification method names that should be suppressed for this
|
||||
* connection (for example `thread/started`).
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
import type { ApplyPatchApprovalParams } from "./ApplyPatchApprovalParams";
|
||||
import type { ExecCommandApprovalParams } from "./ExecCommandApprovalParams";
|
||||
import type { RequestId } from "./RequestId";
|
||||
import type { AttestationGenerateParams } from "./v2/AttestationGenerateParams";
|
||||
import type { ChatgptAuthTokensRefreshParams } from "./v2/ChatgptAuthTokensRefreshParams";
|
||||
import type { CommandExecutionRequestApprovalParams } from "./v2/CommandExecutionRequestApprovalParams";
|
||||
import type { DynamicToolCallParams } from "./v2/DynamicToolCallParams";
|
||||
@@ -16,4 +15,4 @@ import type { ToolRequestUserInputParams } from "./v2/ToolRequestUserInputParams
|
||||
/**
|
||||
* Request initiated from the server and sent to the client.
|
||||
*/
|
||||
export type ServerRequest = { "method": "item/commandExecution/requestApproval", id: RequestId, params: CommandExecutionRequestApprovalParams, } | { "method": "item/fileChange/requestApproval", id: RequestId, params: FileChangeRequestApprovalParams, } | { "method": "item/tool/requestUserInput", id: RequestId, params: ToolRequestUserInputParams, } | { "method": "mcpServer/elicitation/request", id: RequestId, params: McpServerElicitationRequestParams, } | { "method": "item/permissions/requestApproval", id: RequestId, params: PermissionsRequestApprovalParams, } | { "method": "item/tool/call", id: RequestId, params: DynamicToolCallParams, } | { "method": "account/chatgptAuthTokens/refresh", id: RequestId, params: ChatgptAuthTokensRefreshParams, } | { "method": "attestation/generate", id: RequestId, params: AttestationGenerateParams, } | { "method": "applyPatchApproval", id: RequestId, params: ApplyPatchApprovalParams, } | { "method": "execCommandApproval", id: RequestId, params: ExecCommandApprovalParams, };
|
||||
export type ServerRequest = { "method": "item/commandExecution/requestApproval", id: RequestId, params: CommandExecutionRequestApprovalParams, } | { "method": "item/fileChange/requestApproval", id: RequestId, params: FileChangeRequestApprovalParams, } | { "method": "item/tool/requestUserInput", id: RequestId, params: ToolRequestUserInputParams, } | { "method": "mcpServer/elicitation/request", id: RequestId, params: McpServerElicitationRequestParams, } | { "method": "item/permissions/requestApproval", id: RequestId, params: PermissionsRequestApprovalParams, } | { "method": "item/tool/call", id: RequestId, params: DynamicToolCallParams, } | { "method": "account/chatgptAuthTokens/refresh", id: RequestId, params: ChatgptAuthTokensRefreshParams, } | { "method": "applyPatchApproval", id: RequestId, params: ApplyPatchApprovalParams, } | { "method": "execCommandApproval", id: RequestId, params: ExecCommandApprovalParams, };
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type AttestationGenerateParams = Record<string, never>;
|
||||
@@ -1,9 +0,0 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type AttestationGenerateResponse = {
|
||||
/**
|
||||
* Opaque client attestation token.
|
||||
*/
|
||||
token: string, };
|
||||
@@ -1,7 +1,6 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { PluginShareDiscoverability } from "./PluginShareDiscoverability";
|
||||
import type { PluginSharePrincipal } from "./PluginSharePrincipal";
|
||||
|
||||
export type PluginShareContext = { remotePluginId: string, discoverability: PluginShareDiscoverability | null, shareUrl: string | null, creatorAccountUserId: string | null, creatorName: string | null, sharePrincipals: Array<PluginSharePrincipal> | null, };
|
||||
export type PluginShareContext = { remotePluginId: string, shareUrl: string | null, creatorAccountUserId: string | null, creatorName: string | null, shareTargets: Array<PluginSharePrincipal> | null, };
|
||||
|
||||
@@ -4,4 +4,4 @@
|
||||
import type { AbsolutePathBuf } from "../AbsolutePathBuf";
|
||||
import type { PluginSummary } from "./PluginSummary";
|
||||
|
||||
export type PluginShareListItem = { plugin: PluginSummary, localPluginPath: AbsolutePathBuf | null, };
|
||||
export type PluginShareListItem = { plugin: PluginSummary, shareUrl: string, localPluginPath: AbsolutePathBuf | null, };
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { PluginSharePrincipalRole } from "./PluginSharePrincipalRole";
|
||||
import type { PluginSharePrincipalType } from "./PluginSharePrincipalType";
|
||||
|
||||
export type PluginSharePrincipal = { principalType: PluginSharePrincipalType, principalId: string, role: PluginSharePrincipalRole, name: string, };
|
||||
export type PluginSharePrincipal = { principalType: PluginSharePrincipalType, principalId: string, name: string, };
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type PluginSharePrincipalRole = "reader" | "editor" | "owner";
|
||||
@@ -2,6 +2,5 @@
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { PluginSharePrincipalType } from "./PluginSharePrincipalType";
|
||||
import type { PluginShareTargetRole } from "./PluginShareTargetRole";
|
||||
|
||||
export type PluginShareTarget = { principalType: PluginSharePrincipalType, principalId: string, role: PluginShareTargetRole, };
|
||||
export type PluginShareTarget = { principalType: PluginSharePrincipalType, principalId: string, };
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type PluginShareTargetRole = "reader" | "editor";
|
||||
@@ -4,6 +4,6 @@
|
||||
import type { RemoteControlConnectionStatus } from "./RemoteControlConnectionStatus";
|
||||
|
||||
/**
|
||||
* Current remote-control connection status and remote identity exposed to clients.
|
||||
* Current remote-control connection status and environment id exposed to clients.
|
||||
*/
|
||||
export type RemoteControlStatusChangedNotification = { status: RemoteControlConnectionStatus, installationId: string, environmentId: string | null, };
|
||||
export type RemoteControlStatusChangedNotification = { status: RemoteControlConnectionStatus, environmentId: string | null, };
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type RuntimeInstallManifest = { archiveName: string | null, archiveSha256: string, archiveSizeBytes: bigint | null, archiveUrl: string, bundleFormatVersion: number | null, bundleVersion: string | null, format: string | null, runtimeRootDirectoryName: string | null, };
|
||||
@@ -1,6 +0,0 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { RuntimeInstallManifest } from "./RuntimeInstallManifest";
|
||||
|
||||
export type RuntimeInstallParams = { manifest: RuntimeInstallManifest, release: string, };
|
||||
@@ -1,6 +0,0 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { AbsolutePathBuf } from "../AbsolutePathBuf";
|
||||
|
||||
export type RuntimeInstallPaths = { bundledPluginMarketplacePaths: Array<AbsolutePathBuf>, bundledSkillPaths: Array<AbsolutePathBuf>, nodeModulesPath: AbsolutePathBuf, nodePath: AbsolutePathBuf, pythonPath: AbsolutePathBuf, skillsToRemove: Array<string>, };
|
||||
@@ -1,7 +0,0 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { RuntimeInstallPaths } from "./RuntimeInstallPaths";
|
||||
import type { RuntimeInstallStatus } from "./RuntimeInstallStatus";
|
||||
|
||||
export type RuntimeInstallResponse = { bundleVersion: string | null, paths: RuntimeInstallPaths, status: RuntimeInstallStatus, };
|
||||
@@ -1,5 +0,0 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type RuntimeInstallStatus = "already-current" | "installed";
|
||||
@@ -28,8 +28,6 @@ export type { AppsDefaultConfig } from "./AppsDefaultConfig";
|
||||
export type { AppsListParams } from "./AppsListParams";
|
||||
export type { AppsListResponse } from "./AppsListResponse";
|
||||
export type { AskForApproval } from "./AskForApproval";
|
||||
export type { AttestationGenerateParams } from "./AttestationGenerateParams";
|
||||
export type { AttestationGenerateResponse } from "./AttestationGenerateResponse";
|
||||
export type { AutoReviewDecisionSource } from "./AutoReviewDecisionSource";
|
||||
export type { ByteRange } from "./ByteRange";
|
||||
export type { CancelLoginAccountParams } from "./CancelLoginAccountParams";
|
||||
@@ -285,12 +283,10 @@ export type { PluginShareListItem } from "./PluginShareListItem";
|
||||
export type { PluginShareListParams } from "./PluginShareListParams";
|
||||
export type { PluginShareListResponse } from "./PluginShareListResponse";
|
||||
export type { PluginSharePrincipal } from "./PluginSharePrincipal";
|
||||
export type { PluginSharePrincipalRole } from "./PluginSharePrincipalRole";
|
||||
export type { PluginSharePrincipalType } from "./PluginSharePrincipalType";
|
||||
export type { PluginShareSaveParams } from "./PluginShareSaveParams";
|
||||
export type { PluginShareSaveResponse } from "./PluginShareSaveResponse";
|
||||
export type { PluginShareTarget } from "./PluginShareTarget";
|
||||
export type { PluginShareTargetRole } from "./PluginShareTargetRole";
|
||||
export type { PluginShareUpdateDiscoverability } from "./PluginShareUpdateDiscoverability";
|
||||
export type { PluginShareUpdateTargetsParams } from "./PluginShareUpdateTargetsParams";
|
||||
export type { PluginShareUpdateTargetsResponse } from "./PluginShareUpdateTargetsResponse";
|
||||
@@ -322,11 +318,6 @@ export type { ReviewDelivery } from "./ReviewDelivery";
|
||||
export type { ReviewStartParams } from "./ReviewStartParams";
|
||||
export type { ReviewStartResponse } from "./ReviewStartResponse";
|
||||
export type { ReviewTarget } from "./ReviewTarget";
|
||||
export type { RuntimeInstallManifest } from "./RuntimeInstallManifest";
|
||||
export type { RuntimeInstallParams } from "./RuntimeInstallParams";
|
||||
export type { RuntimeInstallPaths } from "./RuntimeInstallPaths";
|
||||
export type { RuntimeInstallResponse } from "./RuntimeInstallResponse";
|
||||
export type { RuntimeInstallStatus } from "./RuntimeInstallStatus";
|
||||
export type { SandboxMode } from "./SandboxMode";
|
||||
export type { SandboxPolicy } from "./SandboxPolicy";
|
||||
export type { SandboxWorkspaceWrite } from "./SandboxWorkspaceWrite";
|
||||
|
||||
@@ -716,11 +716,6 @@ client_request_definitions! {
|
||||
serialization: global("config"),
|
||||
response: v2::PluginInstallResponse,
|
||||
},
|
||||
RuntimeInstall => "runtime/install" {
|
||||
params: v2::RuntimeInstallParams,
|
||||
serialization: global("runtime-install"),
|
||||
response: v2::RuntimeInstallResponse,
|
||||
},
|
||||
PluginUninstall => "plugin/uninstall" {
|
||||
params: v2::PluginUninstallParams,
|
||||
serialization: global("config"),
|
||||
@@ -813,13 +808,6 @@ client_request_definitions! {
|
||||
serialization: None,
|
||||
response: v2::MockExperimentalMethodResponse,
|
||||
},
|
||||
#[experimental("environment/add")]
|
||||
/// Adds or replaces a remote environment by id for later selection.
|
||||
EnvironmentAdd => "environment/add" {
|
||||
params: v2::EnvironmentAddParams,
|
||||
serialization: global("environment"),
|
||||
response: v2::EnvironmentAddResponse,
|
||||
},
|
||||
|
||||
McpServerOauthLogin => "mcpServer/oauth/login" {
|
||||
params: v2::McpServerOauthLoginParams,
|
||||
@@ -1324,12 +1312,6 @@ server_request_definitions! {
|
||||
response: v2::ChatgptAuthTokensRefreshResponse,
|
||||
},
|
||||
|
||||
/// Generate a fresh upstream attestation result on demand.
|
||||
AttestationGenerate => "attestation/generate" {
|
||||
params: v2::AttestationGenerateParams,
|
||||
response: v2::AttestationGenerateResponse,
|
||||
},
|
||||
|
||||
/// DEPRECATED APIs below
|
||||
/// Request to approve a patch.
|
||||
/// This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).
|
||||
@@ -1808,18 +1790,6 @@ mod tests {
|
||||
add_credits_nudge.serialization_scope(),
|
||||
Some(ClientRequestSerializationScope::Global("account-auth"))
|
||||
);
|
||||
|
||||
let environment_add = ClientRequest::EnvironmentAdd {
|
||||
request_id: request_id(),
|
||||
params: v2::EnvironmentAddParams {
|
||||
environment_id: "remote-a".to_string(),
|
||||
exec_server_url: "ws://127.0.0.1:8765".to_string(),
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
environment_add.serialization_scope(),
|
||||
Some(ClientRequestSerializationScope::Global("environment"))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -1940,7 +1910,6 @@ mod tests {
|
||||
},
|
||||
capabilities: Some(v1::InitializeCapabilities {
|
||||
experimental_api: true,
|
||||
request_attestation: true,
|
||||
opt_out_notification_methods: Some(vec![
|
||||
"thread/started".to_string(),
|
||||
"item/agentMessage/delta".to_string(),
|
||||
@@ -1961,7 +1930,6 @@ mod tests {
|
||||
},
|
||||
"capabilities": {
|
||||
"experimentalApi": true,
|
||||
"requestAttestation": true,
|
||||
"optOutNotificationMethods": [
|
||||
"thread/started",
|
||||
"item/agentMessage/delta"
|
||||
@@ -1987,7 +1955,6 @@ mod tests {
|
||||
},
|
||||
"capabilities": {
|
||||
"experimentalApi": true,
|
||||
"requestAttestation": true,
|
||||
"optOutNotificationMethods": [
|
||||
"thread/started",
|
||||
"item/agentMessage/delta"
|
||||
@@ -2008,7 +1975,6 @@ mod tests {
|
||||
},
|
||||
capabilities: Some(v1::InitializeCapabilities {
|
||||
experimental_api: true,
|
||||
request_attestation: true,
|
||||
opt_out_notification_methods: Some(vec![
|
||||
"thread/started".to_string(),
|
||||
"item/agentMessage/delta".to_string(),
|
||||
@@ -2125,28 +2091,6 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_attestation_generate_request() -> Result<()> {
|
||||
let params = v2::AttestationGenerateParams {};
|
||||
let request = ServerRequest::AttestationGenerate {
|
||||
request_id: RequestId::Integer(9),
|
||||
params: params.clone(),
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "attestation/generate",
|
||||
"id": 9,
|
||||
"params": {}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
|
||||
let payload = ServerRequestPayload::AttestationGenerate(params);
|
||||
assert_eq!(request.id(), &RequestId::Integer(9));
|
||||
assert_eq!(payload.request_with_id(RequestId::Integer(9)), request);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_server_response() -> Result<()> {
|
||||
let response = ServerResponse::CommandExecutionRequestApproval {
|
||||
@@ -2602,33 +2546,10 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_environment_add() -> Result<()> {
|
||||
let request = ClientRequest::EnvironmentAdd {
|
||||
request_id: RequestId::Integer(9),
|
||||
params: v2::EnvironmentAddParams {
|
||||
environment_id: "remote-a".to_string(),
|
||||
exec_server_url: "ws://127.0.0.1:8765".to_string(),
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "environment/add",
|
||||
"id": 9,
|
||||
"params": {
|
||||
"environmentId": "remote-a",
|
||||
"execServerUrl": "ws://127.0.0.1:8765"
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_fs_get_metadata() -> Result<()> {
|
||||
let request = ClientRequest::FsGetMetadata {
|
||||
request_id: RequestId::Integer(10),
|
||||
request_id: RequestId::Integer(9),
|
||||
params: v2::FsGetMetadataParams {
|
||||
path: absolute_path("tmp/example"),
|
||||
},
|
||||
@@ -2636,7 +2557,7 @@ mod tests {
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "fs/getMetadata",
|
||||
"id": 10,
|
||||
"id": 9,
|
||||
"params": {
|
||||
"path": absolute_path_string("tmp/example")
|
||||
}
|
||||
@@ -2897,19 +2818,6 @@ mod tests {
|
||||
assert_eq!(reason, Some("mock/experimentalMethod"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn environment_add_is_marked_experimental() {
|
||||
let request = ClientRequest::EnvironmentAdd {
|
||||
request_id: RequestId::Integer(1),
|
||||
params: v2::EnvironmentAddParams {
|
||||
environment_id: "remote-a".to_string(),
|
||||
exec_server_url: "ws://127.0.0.1:8765".to_string(),
|
||||
},
|
||||
};
|
||||
let reason = crate::experimental_api::ExperimentalApi::experimental_reason(&request);
|
||||
assert_eq!(reason, Some("environment/add"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn command_exec_permission_profile_is_marked_experimental() {
|
||||
let request = ClientRequest::OneOffCommandExec {
|
||||
|
||||
@@ -46,9 +46,6 @@ pub struct InitializeCapabilities {
|
||||
/// Opt into receiving experimental API methods and fields.
|
||||
#[serde(default)]
|
||||
pub experimental_api: bool,
|
||||
/// Opt into `attestation/generate` requests for upstream `x-oai-attestation`.
|
||||
#[serde(default)]
|
||||
pub request_attestation: bool,
|
||||
/// Exact notification method names that should be suppressed for this
|
||||
/// connection (for example `thread/started`).
|
||||
#[ts(optional = nullable)]
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use ts_rs::TS;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS, Default)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct AttestationGenerateParams {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct AttestationGenerateResponse {
|
||||
/// Opaque client attestation token.
|
||||
pub token: String,
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use ts_rs::TS;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct EnvironmentAddParams {
|
||||
pub environment_id: String,
|
||||
pub exec_server_url: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct EnvironmentAddResponse {}
|
||||
@@ -2,11 +2,9 @@ mod shared;
|
||||
|
||||
mod account;
|
||||
mod apps;
|
||||
mod attestation;
|
||||
mod collaboration_mode;
|
||||
mod command_exec;
|
||||
mod config;
|
||||
mod environment;
|
||||
mod experimental_feature;
|
||||
mod feedback;
|
||||
mod fs;
|
||||
@@ -21,7 +19,6 @@ mod process;
|
||||
mod realtime;
|
||||
mod remote_control;
|
||||
mod review;
|
||||
mod runtime;
|
||||
mod thread;
|
||||
mod thread_data;
|
||||
mod turn;
|
||||
@@ -29,11 +26,9 @@ mod windows_sandbox;
|
||||
|
||||
pub use account::*;
|
||||
pub use apps::*;
|
||||
pub use attestation::*;
|
||||
pub use collaboration_mode::*;
|
||||
pub use command_exec::*;
|
||||
pub use config::*;
|
||||
pub use environment::*;
|
||||
pub use experimental_feature::*;
|
||||
pub use feedback::*;
|
||||
pub use fs::*;
|
||||
@@ -48,7 +43,6 @@ pub use process::*;
|
||||
pub use realtime::*;
|
||||
pub use remote_control::*;
|
||||
pub use review::*;
|
||||
pub use runtime::*;
|
||||
pub use shared::*;
|
||||
pub use thread::*;
|
||||
pub use thread_data::*;
|
||||
|
||||
@@ -259,6 +259,7 @@ pub struct PluginShareDeleteResponse {}
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct PluginShareListItem {
|
||||
pub plugin: PluginSummary,
|
||||
pub share_url: String,
|
||||
pub local_plugin_path: Option<AbsolutePathBuf>,
|
||||
}
|
||||
|
||||
@@ -307,7 +308,6 @@ pub enum PluginSharePrincipalType {
|
||||
pub struct PluginShareTarget {
|
||||
pub principal_type: PluginSharePrincipalType,
|
||||
pub principal_id: String,
|
||||
pub role: PluginShareTargetRole,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
@@ -316,29 +316,9 @@ pub struct PluginShareTarget {
|
||||
pub struct PluginSharePrincipal {
|
||||
pub principal_type: PluginSharePrincipalType,
|
||||
pub principal_id: String,
|
||||
pub role: PluginSharePrincipalRole,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
#[ts(rename_all = "lowercase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum PluginShareTargetRole {
|
||||
Reader,
|
||||
Editor,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
#[ts(rename_all = "lowercase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum PluginSharePrincipalRole {
|
||||
Reader,
|
||||
Editor,
|
||||
Owner,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[ts(rename_all = "snake_case")]
|
||||
@@ -559,11 +539,10 @@ pub struct PluginSummary {
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct PluginShareContext {
|
||||
pub remote_plugin_id: String,
|
||||
pub discoverability: Option<PluginShareDiscoverability>,
|
||||
pub share_url: Option<String>,
|
||||
pub creator_account_user_id: Option<String>,
|
||||
pub creator_name: Option<String>,
|
||||
pub share_principals: Option<Vec<PluginSharePrincipal>>,
|
||||
pub share_targets: Option<Vec<PluginSharePrincipal>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
|
||||
@@ -3,13 +3,12 @@ use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use ts_rs::TS;
|
||||
|
||||
/// Current remote-control connection status and remote identity exposed to clients.
|
||||
/// Current remote-control connection status and environment id exposed to clients.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct RemoteControlStatusChangedNotification {
|
||||
pub status: RemoteControlConnectionStatus,
|
||||
pub installation_id: String,
|
||||
pub environment_id: Option<String>,
|
||||
}
|
||||
|
||||
|
||||
@@ -1,56 +0,0 @@
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use ts_rs::TS;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct RuntimeInstallManifest {
|
||||
pub archive_name: Option<String>,
|
||||
pub archive_sha256: String,
|
||||
pub archive_size_bytes: Option<u64>,
|
||||
pub archive_url: String,
|
||||
pub bundle_format_version: Option<u32>,
|
||||
pub bundle_version: Option<String>,
|
||||
pub format: Option<String>,
|
||||
pub runtime_root_directory_name: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct RuntimeInstallParams {
|
||||
pub manifest: Box<RuntimeInstallManifest>,
|
||||
pub release: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum RuntimeInstallStatus {
|
||||
AlreadyCurrent,
|
||||
Installed,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct RuntimeInstallPaths {
|
||||
pub bundled_plugin_marketplace_paths: Vec<AbsolutePathBuf>,
|
||||
pub bundled_skill_paths: Vec<AbsolutePathBuf>,
|
||||
pub node_modules_path: AbsolutePathBuf,
|
||||
pub node_path: AbsolutePathBuf,
|
||||
pub python_path: AbsolutePathBuf,
|
||||
pub skills_to_remove: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct RuntimeInstallResponse {
|
||||
pub bundle_version: Option<String>,
|
||||
pub paths: RuntimeInstallPaths,
|
||||
pub status: RuntimeInstallStatus,
|
||||
}
|
||||
@@ -2896,12 +2896,10 @@ fn plugin_share_params_and_response_serialization_use_camel_case_fields() {
|
||||
PluginShareTarget {
|
||||
principal_type: PluginSharePrincipalType::User,
|
||||
principal_id: "user-1".to_string(),
|
||||
role: PluginShareTargetRole::Reader,
|
||||
},
|
||||
PluginShareTarget {
|
||||
principal_type: PluginSharePrincipalType::Group,
|
||||
principal_id: "group-1".to_string(),
|
||||
role: PluginShareTargetRole::Reader,
|
||||
principal_type: PluginSharePrincipalType::Workspace,
|
||||
principal_id: "workspace-1".to_string(),
|
||||
},
|
||||
]),
|
||||
})
|
||||
@@ -2914,12 +2912,10 @@ fn plugin_share_params_and_response_serialization_use_camel_case_fields() {
|
||||
{
|
||||
"principalType": "user",
|
||||
"principalId": "user-1",
|
||||
"role": "reader",
|
||||
},
|
||||
{
|
||||
"principalType": "group",
|
||||
"principalId": "group-1",
|
||||
"role": "reader",
|
||||
"principalType": "workspace",
|
||||
"principalId": "workspace-1",
|
||||
},
|
||||
],
|
||||
}),
|
||||
@@ -2944,7 +2940,6 @@ fn plugin_share_params_and_response_serialization_use_camel_case_fields() {
|
||||
share_targets: vec![PluginShareTarget {
|
||||
principal_type: PluginSharePrincipalType::Group,
|
||||
principal_id: "group-1".to_string(),
|
||||
role: PluginShareTargetRole::Editor,
|
||||
}],
|
||||
})
|
||||
.unwrap(),
|
||||
@@ -2954,7 +2949,6 @@ fn plugin_share_params_and_response_serialization_use_camel_case_fields() {
|
||||
"shareTargets": [{
|
||||
"principalType": "group",
|
||||
"principalId": "group-1",
|
||||
"role": "editor",
|
||||
}],
|
||||
}),
|
||||
);
|
||||
@@ -2964,7 +2958,6 @@ fn plugin_share_params_and_response_serialization_use_camel_case_fields() {
|
||||
principals: vec![PluginSharePrincipal {
|
||||
principal_type: PluginSharePrincipalType::User,
|
||||
principal_id: "user-1".to_string(),
|
||||
role: PluginSharePrincipalRole::Owner,
|
||||
name: "Gavin".to_string(),
|
||||
}],
|
||||
discoverability: PluginShareDiscoverability::Unlisted,
|
||||
@@ -2974,7 +2967,6 @@ fn plugin_share_params_and_response_serialization_use_camel_case_fields() {
|
||||
"principals": [{
|
||||
"principalType": "user",
|
||||
"principalId": "user-1",
|
||||
"role": "owner",
|
||||
"name": "Gavin",
|
||||
}],
|
||||
"discoverability": "UNLISTED",
|
||||
@@ -3015,6 +3007,7 @@ fn plugin_share_list_response_serializes_share_items() {
|
||||
interface: None,
|
||||
keywords: Vec::new(),
|
||||
},
|
||||
share_url: "https://chatgpt.example/plugins/share/share-key-1".to_string(),
|
||||
local_plugin_path: None,
|
||||
}],
|
||||
})
|
||||
@@ -3034,6 +3027,7 @@ fn plugin_share_list_response_serializes_share_items() {
|
||||
"interface": null,
|
||||
"keywords": [],
|
||||
},
|
||||
"shareUrl": "https://chatgpt.example/plugins/share/share-key-1",
|
||||
"localPluginPath": null,
|
||||
}],
|
||||
}),
|
||||
|
||||
@@ -1551,7 +1551,6 @@ impl CodexClient {
|
||||
},
|
||||
capabilities: Some(InitializeCapabilities {
|
||||
experimental_api,
|
||||
request_attestation: false,
|
||||
opt_out_notification_methods: Some(
|
||||
NOTIFICATIONS_TO_OPT_OUT
|
||||
.iter()
|
||||
|
||||
@@ -13,7 +13,15 @@ doctest = false
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
axum = { workspace = true, default-features = false, features = [
|
||||
"http1",
|
||||
"json",
|
||||
"tokio",
|
||||
"ws",
|
||||
] }
|
||||
base64 = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive"] }
|
||||
codex-api = { workspace = true }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-core = { workspace = true }
|
||||
@@ -23,10 +31,16 @@ codex-state = { workspace = true }
|
||||
codex-uds = { workspace = true }
|
||||
codex-utils-absolute-path = { workspace = true }
|
||||
codex-utils-rustls-provider = { workspace = true }
|
||||
constant_time_eq = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
gethostname = { workspace = true }
|
||||
hmac = { workspace = true }
|
||||
jsonwebtoken = { workspace = true }
|
||||
owo-colors = { workspace = true, features = ["supports-colors"] }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
sha2 = { workspace = true }
|
||||
time = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
"io-std",
|
||||
"macros",
|
||||
|
||||
@@ -11,9 +11,10 @@ pub use transport::AppServerTransportParseError;
|
||||
pub use transport::CHANNEL_CAPACITY;
|
||||
pub use transport::ConnectionOrigin;
|
||||
pub use transport::RemoteControlHandle;
|
||||
pub use transport::RemoteControlStartConfig;
|
||||
pub use transport::TransportEvent;
|
||||
pub use transport::app_server_control_socket_path;
|
||||
pub use transport::auth;
|
||||
pub use transport::start_control_socket_acceptor;
|
||||
pub use transport::start_remote_control;
|
||||
pub use transport::start_stdio_connection;
|
||||
pub use transport::start_websocket_acceptor;
|
||||
|
||||
751
codex-rs/app-server-transport/src/transport/auth.rs
Normal file
751
codex-rs/app-server-transport/src/transport/auth.rs
Normal file
@@ -0,0 +1,751 @@
|
||||
use anyhow::Context;
|
||||
use axum::http::HeaderMap;
|
||||
use axum::http::StatusCode;
|
||||
use axum::http::header::AUTHORIZATION;
|
||||
use clap::Args;
|
||||
use clap::ValueEnum;
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
use constant_time_eq::constant_time_eq_32;
|
||||
use jsonwebtoken::Algorithm;
|
||||
use jsonwebtoken::DecodingKey;
|
||||
use jsonwebtoken::Validation;
|
||||
use jsonwebtoken::decode;
|
||||
use serde::Deserialize;
|
||||
use sha2::Digest;
|
||||
use sha2::Sha256;
|
||||
use std::io;
|
||||
use std::io::ErrorKind;
|
||||
use std::net::SocketAddr;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
const DEFAULT_MAX_CLOCK_SKEW_SECONDS: u64 = 30;
|
||||
const MIN_SIGNED_BEARER_SECRET_BYTES: usize = 32;
|
||||
const INVALID_AUTHORIZATION_HEADER_MESSAGE: &str = "invalid authorization header";
|
||||
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq, Args)]
|
||||
pub struct AppServerWebsocketAuthArgs {
|
||||
/// Websocket auth mode for non-loopback listeners.
|
||||
#[arg(long = "ws-auth", value_name = "MODE", value_enum)]
|
||||
pub ws_auth: Option<WebsocketAuthCliMode>,
|
||||
|
||||
/// Absolute path to the capability-token file.
|
||||
#[arg(long = "ws-token-file", value_name = "PATH")]
|
||||
pub ws_token_file: Option<PathBuf>,
|
||||
|
||||
/// Hex-encoded SHA-256 digest of the capability token.
|
||||
#[arg(long = "ws-token-sha256", value_name = "HEX")]
|
||||
pub ws_token_sha256: Option<String>,
|
||||
|
||||
/// Absolute path to the shared secret file for signed JWT bearer tokens.
|
||||
#[arg(long = "ws-shared-secret-file", value_name = "PATH")]
|
||||
pub ws_shared_secret_file: Option<PathBuf>,
|
||||
|
||||
/// Expected issuer for signed JWT bearer tokens.
|
||||
#[arg(long = "ws-issuer", value_name = "ISSUER")]
|
||||
pub ws_issuer: Option<String>,
|
||||
|
||||
/// Expected audience for signed JWT bearer tokens.
|
||||
#[arg(long = "ws-audience", value_name = "AUDIENCE")]
|
||||
pub ws_audience: Option<String>,
|
||||
|
||||
/// Maximum clock skew when validating signed JWT bearer tokens.
|
||||
#[arg(long = "ws-max-clock-skew-seconds", value_name = "SECONDS")]
|
||||
pub ws_max_clock_skew_seconds: Option<u64>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, ValueEnum)]
|
||||
pub enum WebsocketAuthCliMode {
|
||||
CapabilityToken,
|
||||
SignedBearerToken,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq)]
|
||||
pub struct AppServerWebsocketAuthSettings {
|
||||
pub config: Option<AppServerWebsocketAuthConfig>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum AppServerWebsocketAuthConfig {
|
||||
CapabilityToken {
|
||||
source: AppServerWebsocketCapabilityTokenSource,
|
||||
},
|
||||
SignedBearerToken {
|
||||
shared_secret_file: AbsolutePathBuf,
|
||||
issuer: Option<String>,
|
||||
audience: Option<String>,
|
||||
max_clock_skew_seconds: u64,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum AppServerWebsocketCapabilityTokenSource {
|
||||
TokenFile { token_file: AbsolutePathBuf },
|
||||
TokenSha256 { token_sha256: [u8; 32] },
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct WebsocketAuthPolicy {
|
||||
pub(crate) mode: Option<WebsocketAuthMode>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) enum WebsocketAuthMode {
|
||||
CapabilityToken {
|
||||
token_sha256: [u8; 32],
|
||||
},
|
||||
SignedBearerToken {
|
||||
shared_secret: Vec<u8>,
|
||||
issuer: Option<String>,
|
||||
audience: Option<String>,
|
||||
max_clock_skew_seconds: i64,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct WebsocketAuthError {
|
||||
status_code: StatusCode,
|
||||
message: &'static str,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct JwtClaims {
|
||||
exp: i64,
|
||||
nbf: Option<i64>,
|
||||
iss: Option<String>,
|
||||
aud: Option<JwtAudienceClaim>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum JwtAudienceClaim {
|
||||
Single(String),
|
||||
Multiple(Vec<String>),
|
||||
}
|
||||
|
||||
impl WebsocketAuthError {
|
||||
pub(crate) fn status_code(&self) -> StatusCode {
|
||||
self.status_code
|
||||
}
|
||||
|
||||
pub(crate) fn message(&self) -> &'static str {
|
||||
self.message
|
||||
}
|
||||
}
|
||||
|
||||
impl AppServerWebsocketAuthArgs {
|
||||
pub fn try_into_settings(self) -> anyhow::Result<AppServerWebsocketAuthSettings> {
|
||||
let normalize = |value: Option<String>| {
|
||||
value.and_then(|value| {
|
||||
let trimmed = value.trim();
|
||||
(!trimmed.is_empty()).then(|| trimmed.to_string())
|
||||
})
|
||||
};
|
||||
|
||||
let config = match self.ws_auth {
|
||||
Some(WebsocketAuthCliMode::CapabilityToken) => {
|
||||
if self.ws_shared_secret_file.is_some()
|
||||
|| self.ws_issuer.is_some()
|
||||
|| self.ws_audience.is_some()
|
||||
|| self.ws_max_clock_skew_seconds.is_some()
|
||||
{
|
||||
anyhow::bail!(
|
||||
"`--ws-shared-secret-file`, `--ws-issuer`, `--ws-audience`, and `--ws-max-clock-skew-seconds` require `--ws-auth signed-bearer-token`"
|
||||
);
|
||||
}
|
||||
let source = match (self.ws_token_file, self.ws_token_sha256) {
|
||||
(Some(_), Some(_)) => {
|
||||
anyhow::bail!(
|
||||
"`--ws-token-file` and `--ws-token-sha256` are mutually exclusive"
|
||||
);
|
||||
}
|
||||
(Some(token_file), None) => {
|
||||
AppServerWebsocketCapabilityTokenSource::TokenFile {
|
||||
token_file: absolute_path_arg("--ws-token-file", token_file)?,
|
||||
}
|
||||
}
|
||||
(None, Some(token_sha256)) => {
|
||||
AppServerWebsocketCapabilityTokenSource::TokenSha256 {
|
||||
token_sha256: sha256_digest_arg("--ws-token-sha256", &token_sha256)?,
|
||||
}
|
||||
}
|
||||
(None, None) => {
|
||||
anyhow::bail!(
|
||||
"`--ws-token-file` or `--ws-token-sha256` is required when `--ws-auth capability-token` is set"
|
||||
);
|
||||
}
|
||||
};
|
||||
Some(AppServerWebsocketAuthConfig::CapabilityToken { source })
|
||||
}
|
||||
Some(WebsocketAuthCliMode::SignedBearerToken) => {
|
||||
if self.ws_token_file.is_some() || self.ws_token_sha256.is_some() {
|
||||
anyhow::bail!(
|
||||
"`--ws-token-file` and `--ws-token-sha256` require `--ws-auth capability-token`, not `signed-bearer-token`"
|
||||
);
|
||||
}
|
||||
let shared_secret_file = self.ws_shared_secret_file.context(
|
||||
"`--ws-shared-secret-file` is required when `--ws-auth signed-bearer-token` is set",
|
||||
)?;
|
||||
Some(AppServerWebsocketAuthConfig::SignedBearerToken {
|
||||
shared_secret_file: absolute_path_arg(
|
||||
"--ws-shared-secret-file",
|
||||
shared_secret_file,
|
||||
)?,
|
||||
issuer: normalize(self.ws_issuer),
|
||||
audience: normalize(self.ws_audience),
|
||||
max_clock_skew_seconds: self
|
||||
.ws_max_clock_skew_seconds
|
||||
.unwrap_or(DEFAULT_MAX_CLOCK_SKEW_SECONDS),
|
||||
})
|
||||
}
|
||||
None => {
|
||||
if self.ws_token_file.is_some()
|
||||
|| self.ws_token_sha256.is_some()
|
||||
|| self.ws_shared_secret_file.is_some()
|
||||
|| self.ws_issuer.is_some()
|
||||
|| self.ws_audience.is_some()
|
||||
|| self.ws_max_clock_skew_seconds.is_some()
|
||||
{
|
||||
anyhow::bail!(
|
||||
"websocket auth flags require `--ws-auth capability-token` or `--ws-auth signed-bearer-token`"
|
||||
);
|
||||
}
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
Ok(AppServerWebsocketAuthSettings { config })
|
||||
}
|
||||
}
|
||||
|
||||
pub fn policy_from_settings(
|
||||
settings: &AppServerWebsocketAuthSettings,
|
||||
) -> io::Result<WebsocketAuthPolicy> {
|
||||
let mode = match settings.config.as_ref() {
|
||||
Some(AppServerWebsocketAuthConfig::CapabilityToken { source }) => match source {
|
||||
AppServerWebsocketCapabilityTokenSource::TokenFile { token_file } => {
|
||||
let token = read_trimmed_secret(token_file.as_ref())?;
|
||||
Some(WebsocketAuthMode::CapabilityToken {
|
||||
token_sha256: sha256_digest(token.as_bytes()),
|
||||
})
|
||||
}
|
||||
AppServerWebsocketCapabilityTokenSource::TokenSha256 { token_sha256 } => {
|
||||
Some(WebsocketAuthMode::CapabilityToken {
|
||||
token_sha256: *token_sha256,
|
||||
})
|
||||
}
|
||||
},
|
||||
Some(AppServerWebsocketAuthConfig::SignedBearerToken {
|
||||
shared_secret_file,
|
||||
issuer,
|
||||
audience,
|
||||
max_clock_skew_seconds,
|
||||
}) => {
|
||||
let shared_secret = read_trimmed_secret(shared_secret_file.as_ref())?.into_bytes();
|
||||
validate_signed_bearer_secret(shared_secret_file.as_ref(), &shared_secret)?;
|
||||
let max_clock_skew_seconds = i64::try_from(*max_clock_skew_seconds).map_err(|_| {
|
||||
io::Error::new(
|
||||
ErrorKind::InvalidInput,
|
||||
"websocket auth clock skew must fit in a signed 64-bit integer",
|
||||
)
|
||||
})?;
|
||||
Some(WebsocketAuthMode::SignedBearerToken {
|
||||
shared_secret,
|
||||
issuer: issuer.clone(),
|
||||
audience: audience.clone(),
|
||||
max_clock_skew_seconds,
|
||||
})
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
Ok(WebsocketAuthPolicy { mode })
|
||||
}
|
||||
|
||||
pub(crate) fn should_warn_about_unauthenticated_non_loopback_listener(
|
||||
bind_address: SocketAddr,
|
||||
policy: &WebsocketAuthPolicy,
|
||||
) -> bool {
|
||||
!bind_address.ip().is_loopback() && policy.mode.is_none()
|
||||
}
|
||||
|
||||
pub(crate) fn authorize_upgrade(
|
||||
headers: &HeaderMap,
|
||||
policy: &WebsocketAuthPolicy,
|
||||
) -> Result<(), WebsocketAuthError> {
|
||||
let Some(mode) = policy.mode.as_ref() else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let token = bearer_token_from_headers(headers)?;
|
||||
match mode {
|
||||
WebsocketAuthMode::CapabilityToken { token_sha256 } => {
|
||||
let actual_sha256 = sha256_digest(token.as_bytes());
|
||||
if constant_time_eq_32(token_sha256, &actual_sha256) {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(unauthorized("invalid websocket bearer token"))
|
||||
}
|
||||
}
|
||||
WebsocketAuthMode::SignedBearerToken {
|
||||
shared_secret,
|
||||
issuer,
|
||||
audience,
|
||||
max_clock_skew_seconds,
|
||||
} => verify_signed_bearer_token(
|
||||
token,
|
||||
shared_secret,
|
||||
issuer.as_deref(),
|
||||
audience.as_deref(),
|
||||
*max_clock_skew_seconds,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
fn verify_signed_bearer_token(
|
||||
token: &str,
|
||||
shared_secret: &[u8],
|
||||
issuer: Option<&str>,
|
||||
audience: Option<&str>,
|
||||
max_clock_skew_seconds: i64,
|
||||
) -> Result<(), WebsocketAuthError> {
|
||||
let claims = decode_jwt_claims(token, shared_secret)?;
|
||||
validate_jwt_claims(&claims, issuer, audience, max_clock_skew_seconds)
|
||||
}
|
||||
|
||||
fn decode_jwt_claims(token: &str, shared_secret: &[u8]) -> Result<JwtClaims, WebsocketAuthError> {
|
||||
let mut validation = Validation::new(Algorithm::HS256);
|
||||
validation.required_spec_claims.clear();
|
||||
validation.validate_exp = false;
|
||||
validation.validate_nbf = false;
|
||||
validation.validate_aud = false;
|
||||
|
||||
decode::<JwtClaims>(token, &DecodingKey::from_secret(shared_secret), &validation)
|
||||
.map(|token_data| token_data.claims)
|
||||
.map_err(|_| unauthorized("invalid websocket jwt"))
|
||||
}
|
||||
|
||||
fn validate_jwt_claims(
|
||||
claims: &JwtClaims,
|
||||
issuer: Option<&str>,
|
||||
audience: Option<&str>,
|
||||
max_clock_skew_seconds: i64,
|
||||
) -> Result<(), WebsocketAuthError> {
|
||||
let now = OffsetDateTime::now_utc().unix_timestamp();
|
||||
if now > claims.exp.saturating_add(max_clock_skew_seconds) {
|
||||
return Err(unauthorized("expired websocket jwt"));
|
||||
}
|
||||
if let Some(nbf) = claims.nbf
|
||||
&& now < nbf.saturating_sub(max_clock_skew_seconds)
|
||||
{
|
||||
return Err(unauthorized("websocket jwt is not valid yet"));
|
||||
}
|
||||
if let Some(expected_issuer) = issuer
|
||||
&& claims.iss.as_deref() != Some(expected_issuer)
|
||||
{
|
||||
return Err(unauthorized("websocket jwt issuer mismatch"));
|
||||
}
|
||||
if let Some(expected_audience) = audience
|
||||
&& !audience_matches(claims.aud.as_ref(), expected_audience)
|
||||
{
|
||||
return Err(unauthorized("websocket jwt audience mismatch"));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn audience_matches(audience: Option<&JwtAudienceClaim>, expected_audience: &str) -> bool {
|
||||
match audience {
|
||||
Some(JwtAudienceClaim::Single(actual)) => actual == expected_audience,
|
||||
Some(JwtAudienceClaim::Multiple(actual)) => {
|
||||
actual.iter().any(|audience| audience == expected_audience)
|
||||
}
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn bearer_token_from_headers(headers: &HeaderMap) -> Result<&str, WebsocketAuthError> {
|
||||
let raw_header = headers
|
||||
.get(AUTHORIZATION)
|
||||
.ok_or_else(|| unauthorized("missing websocket bearer token"))?;
|
||||
let header = raw_header
|
||||
.to_str()
|
||||
.map_err(|_| unauthorized(INVALID_AUTHORIZATION_HEADER_MESSAGE))?;
|
||||
let Some((scheme, token)) = header.split_once(' ') else {
|
||||
return Err(unauthorized(INVALID_AUTHORIZATION_HEADER_MESSAGE));
|
||||
};
|
||||
if !scheme.eq_ignore_ascii_case("Bearer") {
|
||||
return Err(unauthorized(INVALID_AUTHORIZATION_HEADER_MESSAGE));
|
||||
}
|
||||
let token = token.trim();
|
||||
if token.is_empty() {
|
||||
return Err(unauthorized(INVALID_AUTHORIZATION_HEADER_MESSAGE));
|
||||
}
|
||||
Ok(token)
|
||||
}
|
||||
|
||||
fn validate_signed_bearer_secret(path: &Path, shared_secret: &[u8]) -> io::Result<()> {
|
||||
if shared_secret.len() < MIN_SIGNED_BEARER_SECRET_BYTES {
|
||||
return Err(io::Error::new(
|
||||
ErrorKind::InvalidInput,
|
||||
format!(
|
||||
"signed websocket bearer secret {} must be at least {MIN_SIGNED_BEARER_SECRET_BYTES} bytes",
|
||||
path.display()
|
||||
),
|
||||
));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn read_trimmed_secret(path: &std::path::Path) -> io::Result<String> {
|
||||
let raw = std::fs::read_to_string(path).map_err(|err| {
|
||||
io::Error::new(
|
||||
err.kind(),
|
||||
format!(
|
||||
"failed to read websocket auth secret {}: {err}",
|
||||
path.display()
|
||||
),
|
||||
)
|
||||
})?;
|
||||
let trimmed = raw.trim();
|
||||
if trimmed.is_empty() {
|
||||
return Err(io::Error::new(
|
||||
ErrorKind::InvalidInput,
|
||||
format!("websocket auth secret {} must not be empty", path.display()),
|
||||
));
|
||||
}
|
||||
Ok(trimmed.to_string())
|
||||
}
|
||||
|
||||
fn absolute_path_arg(flag_name: &str, path: PathBuf) -> anyhow::Result<AbsolutePathBuf> {
|
||||
AbsolutePathBuf::try_from(path).with_context(|| format!("{flag_name} must be an absolute path"))
|
||||
}
|
||||
|
||||
fn sha256_digest_arg(flag_name: &str, value: &str) -> anyhow::Result<[u8; 32]> {
|
||||
let trimmed = value.trim();
|
||||
if trimmed.len() != 64 {
|
||||
anyhow::bail!("{flag_name} must be a 64-character hex SHA-256 digest");
|
||||
}
|
||||
|
||||
let mut digest = [0u8; 32];
|
||||
for (index, pair) in trimmed.as_bytes().chunks_exact(2).enumerate() {
|
||||
let high = hex_nibble(flag_name, pair[0])?;
|
||||
let low = hex_nibble(flag_name, pair[1])?;
|
||||
digest[index] = (high << 4) | low;
|
||||
}
|
||||
Ok(digest)
|
||||
}
|
||||
|
||||
fn hex_nibble(flag_name: &str, byte: u8) -> anyhow::Result<u8> {
|
||||
match byte {
|
||||
b'0'..=b'9' => Ok(byte - b'0'),
|
||||
b'a'..=b'f' => Ok(byte - b'a' + 10),
|
||||
b'A'..=b'F' => Ok(byte - b'A' + 10),
|
||||
_ => anyhow::bail!("{flag_name} must be a 64-character hex SHA-256 digest"),
|
||||
}
|
||||
}
|
||||
|
||||
fn sha256_digest(input: &[u8]) -> [u8; 32] {
|
||||
let mut digest = [0u8; 32];
|
||||
digest.copy_from_slice(&Sha256::digest(input));
|
||||
digest
|
||||
}
|
||||
|
||||
fn unauthorized(message: &'static str) -> WebsocketAuthError {
|
||||
WebsocketAuthError {
|
||||
status_code: StatusCode::UNAUTHORIZED,
|
||||
message,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use axum::http::HeaderValue;
|
||||
use base64::Engine;
|
||||
use base64::engine::general_purpose::URL_SAFE_NO_PAD;
|
||||
use hmac::Hmac;
|
||||
use hmac::Mac;
|
||||
use serde_json::json;
|
||||
|
||||
type HmacSha256 = Hmac<Sha256>;
|
||||
|
||||
fn signed_token(shared_secret: &[u8], claims: serde_json::Value) -> String {
|
||||
let header = URL_SAFE_NO_PAD.encode(br#"{"alg":"HS256","typ":"JWT"}"#);
|
||||
let claims_segment = URL_SAFE_NO_PAD.encode(serde_json::to_vec(&claims).unwrap());
|
||||
let payload = format!("{header}.{claims_segment}");
|
||||
let mut mac = HmacSha256::new_from_slice(shared_secret).unwrap();
|
||||
mac.update(payload.as_bytes());
|
||||
let signature = URL_SAFE_NO_PAD.encode(mac.finalize().into_bytes());
|
||||
format!("{payload}.{signature}")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn warns_about_unauthenticated_non_loopback_listener() {
|
||||
let policy = WebsocketAuthPolicy::default();
|
||||
assert!(should_warn_about_unauthenticated_non_loopback_listener(
|
||||
"0.0.0.0:8765".parse().unwrap(),
|
||||
&policy,
|
||||
));
|
||||
assert!(!should_warn_about_unauthenticated_non_loopback_listener(
|
||||
"127.0.0.1:8765".parse().unwrap(),
|
||||
&policy,
|
||||
));
|
||||
assert!(!should_warn_about_unauthenticated_non_loopback_listener(
|
||||
"0.0.0.0:8765".parse().unwrap(),
|
||||
&WebsocketAuthPolicy {
|
||||
mode: Some(WebsocketAuthMode::CapabilityToken {
|
||||
token_sha256: [0u8; 32],
|
||||
}),
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn capability_token_args_require_token_file_or_hash() {
|
||||
let err = AppServerWebsocketAuthArgs {
|
||||
ws_auth: Some(WebsocketAuthCliMode::CapabilityToken),
|
||||
..Default::default()
|
||||
}
|
||||
.try_into_settings()
|
||||
.expect_err("capability-token mode should require a token source");
|
||||
assert!(
|
||||
err.to_string().contains("--ws-token-file")
|
||||
&& err.to_string().contains("--ws-token-sha256"),
|
||||
"unexpected error: {err}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn capability_token_args_accept_token_hash() {
|
||||
let settings = AppServerWebsocketAuthArgs {
|
||||
ws_auth: Some(WebsocketAuthCliMode::CapabilityToken),
|
||||
ws_token_sha256: Some("ab".repeat(32)),
|
||||
..Default::default()
|
||||
}
|
||||
.try_into_settings()
|
||||
.expect("capability-token hash args should parse");
|
||||
|
||||
assert_eq!(
|
||||
settings,
|
||||
AppServerWebsocketAuthSettings {
|
||||
config: Some(AppServerWebsocketAuthConfig::CapabilityToken {
|
||||
source: AppServerWebsocketCapabilityTokenSource::TokenSha256 {
|
||||
token_sha256: [0xab; 32],
|
||||
},
|
||||
}),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn capability_token_args_reject_multiple_token_sources() {
|
||||
let err = AppServerWebsocketAuthArgs {
|
||||
ws_auth: Some(WebsocketAuthCliMode::CapabilityToken),
|
||||
ws_token_file: Some(PathBuf::from("/tmp/token")),
|
||||
ws_token_sha256: Some("ab".repeat(32)),
|
||||
..Default::default()
|
||||
}
|
||||
.try_into_settings()
|
||||
.expect_err("capability-token mode should reject multiple token sources");
|
||||
assert!(
|
||||
err.to_string().contains("mutually exclusive"),
|
||||
"unexpected error: {err}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn capability_token_args_reject_malformed_token_hash() {
|
||||
let err = AppServerWebsocketAuthArgs {
|
||||
ws_auth: Some(WebsocketAuthCliMode::CapabilityToken),
|
||||
ws_token_sha256: Some("not-a-sha256".to_string()),
|
||||
..Default::default()
|
||||
}
|
||||
.try_into_settings()
|
||||
.expect_err("capability-token mode should reject malformed token hashes");
|
||||
assert!(
|
||||
err.to_string().contains("64-character hex"),
|
||||
"unexpected error: {err}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn capability_token_hash_policy_authorizes_matching_bearer_token() {
|
||||
let settings = AppServerWebsocketAuthSettings {
|
||||
config: Some(AppServerWebsocketAuthConfig::CapabilityToken {
|
||||
source: AppServerWebsocketCapabilityTokenSource::TokenSha256 {
|
||||
token_sha256: sha256_digest(b"super-secret-token"),
|
||||
},
|
||||
}),
|
||||
};
|
||||
let policy = policy_from_settings(&settings).expect("hash policy should build");
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
AUTHORIZATION,
|
||||
HeaderValue::from_static("Bearer super-secret-token"),
|
||||
);
|
||||
authorize_upgrade(&headers, &policy).expect("matching token should authorize");
|
||||
|
||||
headers.insert(
|
||||
AUTHORIZATION,
|
||||
HeaderValue::from_static("Bearer wrong-token"),
|
||||
);
|
||||
let err = authorize_upgrade(&headers, &policy).expect_err("wrong token should fail");
|
||||
assert_eq!(err.status_code(), StatusCode::UNAUTHORIZED);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn signed_bearer_args_require_mode_when_mode_specific_flags_are_set() {
|
||||
let err = AppServerWebsocketAuthArgs {
|
||||
ws_shared_secret_file: Some(PathBuf::from("/tmp/secret")),
|
||||
..Default::default()
|
||||
}
|
||||
.try_into_settings()
|
||||
.expect_err("mode-specific flags should require --ws-auth");
|
||||
assert!(
|
||||
err.to_string().contains("websocket auth flags require"),
|
||||
"unexpected error: {err}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn signed_bearer_args_default_clock_skew_and_trim_optional_claims() {
|
||||
let settings = AppServerWebsocketAuthArgs {
|
||||
ws_auth: Some(WebsocketAuthCliMode::SignedBearerToken),
|
||||
ws_shared_secret_file: Some(PathBuf::from("/tmp/secret")),
|
||||
ws_issuer: Some(" issuer ".to_string()),
|
||||
ws_audience: Some(" ".to_string()),
|
||||
..Default::default()
|
||||
}
|
||||
.try_into_settings()
|
||||
.expect("signed bearer args should parse");
|
||||
|
||||
assert_eq!(
|
||||
settings,
|
||||
AppServerWebsocketAuthSettings {
|
||||
config: Some(AppServerWebsocketAuthConfig::SignedBearerToken {
|
||||
shared_secret_file: AbsolutePathBuf::from_absolute_path("/tmp/secret")
|
||||
.expect("absolute path"),
|
||||
issuer: Some("issuer".to_string()),
|
||||
audience: None,
|
||||
max_clock_skew_seconds: DEFAULT_MAX_CLOCK_SKEW_SECONDS,
|
||||
}),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn signed_bearer_token_verification_rejects_tampering() {
|
||||
let shared_secret = b"0123456789abcdef0123456789abcdef";
|
||||
let token = signed_token(
|
||||
shared_secret,
|
||||
json!({
|
||||
"exp": OffsetDateTime::now_utc().unix_timestamp() + 60,
|
||||
}),
|
||||
);
|
||||
let tampered = token.replace(".eyJleHAi", ".eyJleHBi");
|
||||
let err = verify_signed_bearer_token(
|
||||
&tampered,
|
||||
shared_secret,
|
||||
/*issuer*/ None,
|
||||
/*audience*/ None,
|
||||
/*max_clock_skew_seconds*/ 30,
|
||||
)
|
||||
.expect_err("tampered jwt should fail");
|
||||
assert_eq!(err.status_code(), StatusCode::UNAUTHORIZED);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn signed_bearer_token_verification_accepts_valid_token() {
|
||||
let shared_secret = b"0123456789abcdef0123456789abcdef";
|
||||
let token = signed_token(
|
||||
shared_secret,
|
||||
json!({
|
||||
"exp": OffsetDateTime::now_utc().unix_timestamp() + 60,
|
||||
"iss": "issuer",
|
||||
"aud": "audience",
|
||||
}),
|
||||
);
|
||||
verify_signed_bearer_token(
|
||||
&token,
|
||||
shared_secret,
|
||||
Some("issuer"),
|
||||
Some("audience"),
|
||||
/*max_clock_skew_seconds*/ 30,
|
||||
)
|
||||
.expect("valid signed token should verify");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn signed_bearer_token_verification_accepts_multiple_audiences() {
|
||||
let shared_secret = b"0123456789abcdef0123456789abcdef";
|
||||
let token = signed_token(
|
||||
shared_secret,
|
||||
json!({
|
||||
"exp": OffsetDateTime::now_utc().unix_timestamp() + 60,
|
||||
"aud": ["other-audience", "audience"],
|
||||
}),
|
||||
);
|
||||
verify_signed_bearer_token(
|
||||
&token,
|
||||
shared_secret,
|
||||
/*issuer*/ None,
|
||||
Some("audience"),
|
||||
/*max_clock_skew_seconds*/ 30,
|
||||
)
|
||||
.expect("jwt audience arrays should verify");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn signed_bearer_token_verification_rejects_alg_none_tokens() {
|
||||
let claims_segment = URL_SAFE_NO_PAD.encode(
|
||||
serde_json::to_vec(&json!({
|
||||
"exp": OffsetDateTime::now_utc().unix_timestamp() + 60,
|
||||
}))
|
||||
.unwrap(),
|
||||
);
|
||||
let header_segment = URL_SAFE_NO_PAD.encode(br#"{"alg":"none","typ":"JWT"}"#);
|
||||
let token = format!("{header_segment}.{claims_segment}.");
|
||||
let err = verify_signed_bearer_token(
|
||||
&token,
|
||||
b"0123456789abcdef0123456789abcdef",
|
||||
/*issuer*/ None,
|
||||
/*audience*/ None,
|
||||
/*max_clock_skew_seconds*/ 30,
|
||||
)
|
||||
.expect_err("alg=none jwt should be rejected");
|
||||
assert_eq!(err.status_code(), StatusCode::UNAUTHORIZED);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn signed_bearer_token_verification_rejects_missing_exp() {
|
||||
let shared_secret = b"0123456789abcdef0123456789abcdef";
|
||||
let token = signed_token(
|
||||
shared_secret,
|
||||
json!({
|
||||
"iss": "issuer",
|
||||
}),
|
||||
);
|
||||
let err = verify_signed_bearer_token(
|
||||
&token,
|
||||
shared_secret,
|
||||
/*issuer*/ None,
|
||||
/*audience*/ None,
|
||||
/*max_clock_skew_seconds*/ 30,
|
||||
)
|
||||
.expect_err("jwt without exp should be rejected");
|
||||
assert_eq!(err.status_code(), StatusCode::UNAUTHORIZED);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn validate_signed_bearer_secret_rejects_short_secret() {
|
||||
let err = validate_signed_bearer_secret(Path::new("/tmp/secret"), b"too-short")
|
||||
.expect_err("short shared secret should be rejected");
|
||||
assert_eq!(err.kind(), ErrorKind::InvalidInput);
|
||||
assert!(
|
||||
err.to_string().contains("must be at least 32 bytes"),
|
||||
"unexpected error: {err}"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,5 @@
|
||||
pub mod auth;
|
||||
|
||||
use crate::outgoing_message::ConnectionId;
|
||||
use crate::outgoing_message::OutgoingError;
|
||||
use crate::outgoing_message::OutgoingMessage;
|
||||
@@ -6,6 +8,7 @@ use codex_app_server_protocol::JSONRPCErrorError;
|
||||
use codex_app_server_protocol::JSONRPCMessage;
|
||||
use codex_core::config::find_codex_home;
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
use std::net::SocketAddr;
|
||||
use std::path::Path;
|
||||
use std::str::FromStr;
|
||||
use std::sync::atomic::AtomicU64;
|
||||
@@ -28,10 +31,10 @@ mod unix_socket_tests;
|
||||
mod websocket;
|
||||
|
||||
pub use remote_control::RemoteControlHandle;
|
||||
pub use remote_control::RemoteControlStartConfig;
|
||||
pub use remote_control::start_remote_control;
|
||||
pub use stdio::start_stdio_connection;
|
||||
pub use unix_socket::start_control_socket_acceptor;
|
||||
pub use websocket::start_websocket_acceptor;
|
||||
|
||||
const OVERLOADED_ERROR_CODE: i64 = -32001;
|
||||
|
||||
@@ -50,6 +53,7 @@ pub fn app_server_control_socket_path(codex_home: &Path) -> std::io::Result<Abso
|
||||
pub enum AppServerTransport {
|
||||
Stdio,
|
||||
UnixSocket { socket_path: AbsolutePathBuf },
|
||||
WebSocket { bind_address: SocketAddr },
|
||||
Off,
|
||||
}
|
||||
|
||||
@@ -57,6 +61,7 @@ pub enum AppServerTransport {
|
||||
pub enum AppServerTransportParseError {
|
||||
UnsupportedListenUrl(String),
|
||||
InvalidUnixSocketPath { listen_url: String, message: String },
|
||||
InvalidWebSocketListenUrl(String),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for AppServerTransportParseError {
|
||||
@@ -64,7 +69,7 @@ impl std::fmt::Display for AppServerTransportParseError {
|
||||
match self {
|
||||
AppServerTransportParseError::UnsupportedListenUrl(listen_url) => write!(
|
||||
f,
|
||||
"unsupported --listen URL `{listen_url}`; expected `stdio://`, `unix://`, `unix://PATH`, or `off`"
|
||||
"unsupported --listen URL `{listen_url}`; expected `stdio://`, `unix://`, `unix://PATH`, `ws://IP:PORT`, or `off`"
|
||||
),
|
||||
AppServerTransportParseError::InvalidUnixSocketPath {
|
||||
listen_url,
|
||||
@@ -73,6 +78,10 @@ impl std::fmt::Display for AppServerTransportParseError {
|
||||
f,
|
||||
"invalid unix socket --listen URL `{listen_url}`; failed to resolve socket path: {message}"
|
||||
),
|
||||
AppServerTransportParseError::InvalidWebSocketListenUrl(listen_url) => write!(
|
||||
f,
|
||||
"invalid websocket --listen URL `{listen_url}`; expected `ws://IP:PORT`"
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -116,6 +125,13 @@ impl AppServerTransport {
|
||||
return Ok(Self::Off);
|
||||
}
|
||||
|
||||
if let Some(socket_addr) = listen_url.strip_prefix("ws://") {
|
||||
let bind_address = socket_addr.parse::<SocketAddr>().map_err(|_| {
|
||||
AppServerTransportParseError::InvalidWebSocketListenUrl(listen_url.to_string())
|
||||
})?;
|
||||
return Ok(Self::WebSocket { bind_address });
|
||||
}
|
||||
|
||||
Err(AppServerTransportParseError::UnsupportedListenUrl(
|
||||
listen_url.to_string(),
|
||||
))
|
||||
@@ -151,7 +167,7 @@ pub enum TransportEvent {
|
||||
pub enum ConnectionOrigin {
|
||||
Stdio,
|
||||
InProcess,
|
||||
UnixSocket,
|
||||
WebSocket,
|
||||
RemoteControl,
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use super::protocol::EnrollRemoteServerRequest;
|
||||
use super::protocol::EnrollRemoteServerResponse;
|
||||
use super::protocol::RemoteControlTarget;
|
||||
use axum::http::HeaderMap;
|
||||
use codex_api::SharedAuthProvider;
|
||||
use codex_login::default_client::build_reqwest_client;
|
||||
use codex_state::RemoteControlEnrollmentRecord;
|
||||
@@ -8,7 +9,6 @@ use codex_state::StateRuntime;
|
||||
use gethostname::gethostname;
|
||||
use std::io;
|
||||
use std::io::ErrorKind;
|
||||
use tokio_tungstenite::tungstenite::http::HeaderMap;
|
||||
use tracing::info;
|
||||
use tracing::warn;
|
||||
|
||||
@@ -19,7 +19,6 @@ const REQUEST_ID_HEADER: &str = "x-request-id";
|
||||
const OAI_REQUEST_ID_HEADER: &str = "x-oai-request-id";
|
||||
const CF_RAY_HEADER: &str = "cf-ray";
|
||||
pub(super) const REMOTE_CONTROL_ACCOUNT_ID_HEADER: &str = "chatgpt-account-id";
|
||||
pub(super) const REMOTE_CONTROL_INSTALLATION_ID_HEADER: &str = "x-codex-installation-id";
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(super) struct RemoteControlEnrollment {
|
||||
@@ -194,7 +193,6 @@ pub(crate) fn format_headers(headers: &HeaderMap) -> String {
|
||||
pub(super) async fn enroll_remote_control_server(
|
||||
remote_control_target: &RemoteControlTarget,
|
||||
auth: &RemoteControlConnectionAuth,
|
||||
installation_id: &str,
|
||||
) -> io::Result<RemoteControlEnrollment> {
|
||||
let enroll_url = &remote_control_target.enroll_url;
|
||||
let server_name = gethostname().to_string_lossy().trim().to_string();
|
||||
@@ -203,7 +201,6 @@ pub(super) async fn enroll_remote_control_server(
|
||||
os: std::env::consts::OS,
|
||||
arch: std::env::consts::ARCH,
|
||||
app_server_version: env!("CARGO_PKG_VERSION"),
|
||||
installation_id: installation_id.to_string(),
|
||||
};
|
||||
let client = build_reqwest_client();
|
||||
let mut auth_headers = HeaderMap::new();
|
||||
@@ -213,7 +210,6 @@ pub(super) async fn enroll_remote_control_server(
|
||||
.timeout(REMOTE_CONTROL_ENROLL_TIMEOUT)
|
||||
.headers(auth_headers)
|
||||
.header(REMOTE_CONTROL_ACCOUNT_ID_HEADER, &auth.account_id)
|
||||
.header(REMOTE_CONTROL_INSTALLATION_ID_HEADER, installation_id)
|
||||
.json(&request);
|
||||
|
||||
let response = http_request.send().await.map_err(|err| {
|
||||
@@ -463,7 +459,6 @@ mod tests {
|
||||
auth_provider: codex_model_provider::unauthenticated_auth_provider(),
|
||||
account_id: "account_id".to_string(),
|
||||
},
|
||||
"11111111-1111-4111-8111-111111111111",
|
||||
)
|
||||
.await
|
||||
.expect_err("invalid response should fail to parse");
|
||||
|
||||
@@ -28,11 +28,6 @@ use tokio::task::JoinHandle;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
use tracing::warn;
|
||||
|
||||
pub struct RemoteControlStartConfig {
|
||||
pub remote_control_url: String,
|
||||
pub installation_id: String,
|
||||
}
|
||||
|
||||
pub(super) struct QueuedServerEnvelope {
|
||||
pub(super) event: ServerEvent,
|
||||
pub(super) client_id: ClientId,
|
||||
@@ -67,7 +62,7 @@ impl RemoteControlHandle {
|
||||
}
|
||||
|
||||
pub async fn start_remote_control(
|
||||
config: RemoteControlStartConfig,
|
||||
remote_control_url: String,
|
||||
state_db: Option<Arc<StateRuntime>>,
|
||||
auth_manager: Arc<AuthManager>,
|
||||
transport_event_tx: mpsc::Sender<TransportEvent>,
|
||||
@@ -82,7 +77,7 @@ pub async fn start_remote_control(
|
||||
warn!("remote control disabled because sqlite state db is unavailable");
|
||||
}
|
||||
let remote_control_target = if initial_enabled {
|
||||
Some(normalize_remote_control_url(&config.remote_control_url)?)
|
||||
Some(normalize_remote_control_url(&remote_control_url)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
@@ -94,18 +89,14 @@ pub async fn start_remote_control(
|
||||
} else {
|
||||
RemoteControlConnectionStatus::Disabled
|
||||
},
|
||||
installation_id: config.installation_id.clone(),
|
||||
environment_id: None,
|
||||
};
|
||||
let (status_tx, _status_rx) = watch::channel(initial_status);
|
||||
let status_publisher = RemoteControlStatusPublisher::new(status_tx.clone());
|
||||
let join_handle = tokio::spawn(async move {
|
||||
RemoteControlWebsocket::new(
|
||||
websocket::RemoteControlWebsocketConfig {
|
||||
remote_control_url: config.remote_control_url,
|
||||
installation_id: config.installation_id,
|
||||
remote_control_target,
|
||||
},
|
||||
remote_control_url,
|
||||
remote_control_target,
|
||||
state_db,
|
||||
auth_manager,
|
||||
RemoteControlChannels {
|
||||
|
||||
@@ -19,7 +19,6 @@ pub(super) struct EnrollRemoteServerRequest {
|
||||
pub(super) os: &'static str,
|
||||
pub(super) arch: &'static str,
|
||||
pub(super) app_server_version: &'static str,
|
||||
pub(super) installation_id: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use super::enroll::REMOTE_CONTROL_ACCOUNT_ID_HEADER;
|
||||
use super::enroll::REMOTE_CONTROL_INSTALLATION_ID_HEADER;
|
||||
use super::enroll::RemoteControlEnrollment;
|
||||
use super::enroll::load_persisted_remote_control_enrollment;
|
||||
use super::enroll::update_persisted_remote_control_enrollment;
|
||||
@@ -57,8 +56,6 @@ use tokio_tungstenite::accept_hdr_async;
|
||||
use tokio_tungstenite::tungstenite;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
|
||||
const TEST_INSTALLATION_ID: &str = "11111111-1111-4111-8111-111111111111";
|
||||
|
||||
fn remote_control_auth_manager() -> Arc<AuthManager> {
|
||||
auth_manager_from_auth(CodexAuth::create_dummy_chatgpt_auth_for_testing())
|
||||
}
|
||||
@@ -134,7 +131,6 @@ async fn expect_remote_control_status(
|
||||
if let Some(expected_status) = expected_status {
|
||||
assert_eq!(status.status, expected_status);
|
||||
}
|
||||
assert_eq!(status.installation_id, TEST_INSTALLATION_ID);
|
||||
assert_eq!(status.environment_id.as_deref(), expected_environment_id);
|
||||
}
|
||||
|
||||
@@ -177,10 +173,7 @@ async fn remote_control_transport_manages_virtual_clients_and_routes_messages()
|
||||
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let (remote_task, remote_handle) = start_remote_control(
|
||||
RemoteControlStartConfig {
|
||||
remote_control_url,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
},
|
||||
remote_control_url,
|
||||
Some(remote_control_state_runtime(&codex_home).await),
|
||||
remote_control_auth_manager(),
|
||||
transport_event_tx,
|
||||
@@ -456,10 +449,7 @@ async fn remote_control_transport_reconnects_after_disconnect() {
|
||||
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let (remote_task, remote_handle) = start_remote_control(
|
||||
RemoteControlStartConfig {
|
||||
remote_control_url,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
},
|
||||
remote_control_url,
|
||||
Some(remote_control_state_runtime(&codex_home).await),
|
||||
remote_control_auth_manager(),
|
||||
transport_event_tx,
|
||||
@@ -538,10 +528,7 @@ async fn remote_control_start_allows_remote_control_invalid_url_when_disabled()
|
||||
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let (remote_task, _remote_handle) = start_remote_control(
|
||||
RemoteControlStartConfig {
|
||||
remote_control_url: "https://internal.example.com/backend-api/".to_string(),
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
},
|
||||
"https://internal.example.com/backend-api/".to_string(),
|
||||
/*state_db*/ None,
|
||||
remote_control_auth_manager(),
|
||||
transport_event_tx,
|
||||
@@ -577,10 +564,7 @@ async fn remote_control_start_allows_missing_auth_when_enabled() {
|
||||
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let (remote_task, _remote_handle) = start_remote_control(
|
||||
RemoteControlStartConfig {
|
||||
remote_control_url,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
},
|
||||
remote_control_url,
|
||||
Some(remote_control_state_runtime(&codex_home).await),
|
||||
auth_manager,
|
||||
transport_event_tx,
|
||||
@@ -612,10 +596,7 @@ async fn remote_control_start_reports_missing_state_db_as_disabled_when_enabled(
|
||||
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let (remote_task, remote_handle) = start_remote_control(
|
||||
RemoteControlStartConfig {
|
||||
remote_control_url,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
},
|
||||
remote_control_url,
|
||||
/*state_db*/ None,
|
||||
remote_control_auth_manager(),
|
||||
transport_event_tx,
|
||||
@@ -630,7 +611,6 @@ async fn remote_control_start_reports_missing_state_db_as_disabled_when_enabled(
|
||||
status_rx.borrow().clone(),
|
||||
RemoteControlStatusChangedNotification {
|
||||
status: RemoteControlConnectionStatus::Disabled,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
environment_id: None,
|
||||
}
|
||||
);
|
||||
@@ -665,10 +645,7 @@ async fn remote_control_handle_set_enabled_stops_and_restarts_connections() {
|
||||
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let (remote_task, remote_handle) = start_remote_control(
|
||||
RemoteControlStartConfig {
|
||||
remote_control_url,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
},
|
||||
remote_control_url,
|
||||
Some(remote_control_state_runtime(&codex_home).await),
|
||||
remote_control_auth_manager(),
|
||||
transport_event_tx,
|
||||
@@ -695,7 +672,6 @@ async fn remote_control_handle_set_enabled_stops_and_restarts_connections() {
|
||||
&mut status_rx,
|
||||
RemoteControlStatusChangedNotification {
|
||||
status: RemoteControlConnectionStatus::Connected,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
environment_id: Some("env_test".to_string()),
|
||||
},
|
||||
)
|
||||
@@ -706,7 +682,6 @@ async fn remote_control_handle_set_enabled_stops_and_restarts_connections() {
|
||||
&mut status_rx,
|
||||
RemoteControlStatusChangedNotification {
|
||||
status: RemoteControlConnectionStatus::Disabled,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
environment_id: None,
|
||||
},
|
||||
)
|
||||
@@ -723,7 +698,6 @@ async fn remote_control_handle_set_enabled_stops_and_restarts_connections() {
|
||||
&mut status_rx,
|
||||
RemoteControlStatusChangedNotification {
|
||||
status: RemoteControlConnectionStatus::Connecting,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
environment_id: Some("env_test".to_string()),
|
||||
},
|
||||
)
|
||||
@@ -755,10 +729,7 @@ async fn remote_control_transport_clears_outgoing_buffer_when_backend_acks() {
|
||||
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let (remote_task, remote_handle) = start_remote_control(
|
||||
RemoteControlStartConfig {
|
||||
remote_control_url,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
},
|
||||
remote_control_url,
|
||||
Some(remote_control_state_runtime(&codex_home).await),
|
||||
remote_control_auth_manager(),
|
||||
transport_event_tx,
|
||||
@@ -933,10 +904,7 @@ async fn remote_control_http_mode_enrolls_before_connecting() {
|
||||
let expected_server_name = gethostname().to_string_lossy().trim().to_string();
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let (remote_task, remote_handle) = start_remote_control(
|
||||
RemoteControlStartConfig {
|
||||
remote_control_url,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
},
|
||||
remote_control_url,
|
||||
Some(remote_control_state_runtime(&codex_home).await),
|
||||
remote_control_auth_manager(),
|
||||
transport_event_tx,
|
||||
@@ -961,12 +929,6 @@ async fn remote_control_http_mode_enrolls_before_connecting() {
|
||||
enroll_request.headers.get(REMOTE_CONTROL_ACCOUNT_ID_HEADER),
|
||||
Some(&"account_id".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
enroll_request
|
||||
.headers
|
||||
.get(REMOTE_CONTROL_INSTALLATION_ID_HEADER),
|
||||
Some(&TEST_INSTALLATION_ID.to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
serde_json::from_str::<serde_json::Value>(&enroll_request.body)
|
||||
.expect("enroll body should deserialize"),
|
||||
@@ -975,7 +937,6 @@ async fn remote_control_http_mode_enrolls_before_connecting() {
|
||||
"os": std::env::consts::OS,
|
||||
"arch": std::env::consts::ARCH,
|
||||
"app_server_version": env!("CARGO_PKG_VERSION"),
|
||||
"installation_id": TEST_INSTALLATION_ID,
|
||||
})
|
||||
);
|
||||
respond_with_json(
|
||||
@@ -1006,12 +967,6 @@ async fn remote_control_http_mode_enrolls_before_connecting() {
|
||||
.get(REMOTE_CONTROL_ACCOUNT_ID_HEADER),
|
||||
Some(&"account_id".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
handshake_request
|
||||
.headers
|
||||
.get(REMOTE_CONTROL_INSTALLATION_ID_HEADER),
|
||||
Some(&TEST_INSTALLATION_ID.to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
handshake_request.headers.get("x-codex-server-id"),
|
||||
Some(&"srv_e_test".to_string())
|
||||
@@ -1173,10 +1128,7 @@ async fn remote_control_http_mode_reuses_persisted_enrollment_before_reenrolling
|
||||
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let (remote_task, _remote_handle) = start_remote_control(
|
||||
RemoteControlStartConfig {
|
||||
remote_control_url,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
},
|
||||
remote_control_url,
|
||||
Some(state_db.clone()),
|
||||
remote_control_auth_manager_with_home(&codex_home),
|
||||
transport_event_tx,
|
||||
@@ -1244,10 +1196,7 @@ async fn remote_control_stdio_mode_waits_for_client_name_before_connecting() {
|
||||
let (app_server_client_name_tx, app_server_client_name_rx) = oneshot::channel::<String>();
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let (remote_task, _remote_handle) = start_remote_control(
|
||||
RemoteControlStartConfig {
|
||||
remote_control_url,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
},
|
||||
remote_control_url,
|
||||
Some(state_db.clone()),
|
||||
remote_control_auth_manager_with_home(&codex_home),
|
||||
transport_event_tx,
|
||||
@@ -1306,10 +1255,7 @@ async fn remote_control_waits_for_account_id_before_enrolling() {
|
||||
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let (remote_task, _remote_handle) = start_remote_control(
|
||||
RemoteControlStartConfig {
|
||||
remote_control_url,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
},
|
||||
remote_control_url,
|
||||
Some(state_db.clone()),
|
||||
auth_manager,
|
||||
transport_event_tx,
|
||||
@@ -1392,10 +1338,7 @@ async fn remote_control_http_mode_clears_stale_persisted_enrollment_after_404()
|
||||
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let (remote_task, remote_handle) = start_remote_control(
|
||||
RemoteControlStartConfig {
|
||||
remote_control_url,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
},
|
||||
remote_control_url,
|
||||
Some(state_db.clone()),
|
||||
remote_control_auth_manager_with_home(&codex_home),
|
||||
transport_event_tx,
|
||||
|
||||
@@ -19,6 +19,7 @@ use super::segment::ClientSegmentObservation;
|
||||
use super::segment::ClientSegmentReassembler;
|
||||
use super::segment::REMOTE_CONTROL_SEGMENT_MAX_BYTES;
|
||||
use super::segment::split_server_envelope_for_transport;
|
||||
use axum::http::HeaderValue;
|
||||
use base64::Engine;
|
||||
use codex_app_server_protocol::RemoteControlConnectionStatus;
|
||||
use codex_app_server_protocol::RemoteControlStatusChangedNotification;
|
||||
@@ -47,7 +48,6 @@ use tokio_tungstenite::WebSocketStream;
|
||||
use tokio_tungstenite::connect_async;
|
||||
use tokio_tungstenite::tungstenite;
|
||||
use tokio_tungstenite::tungstenite::client::IntoClientRequest;
|
||||
use tokio_tungstenite::tungstenite::http::HeaderValue;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
use tracing::error;
|
||||
use tracing::info;
|
||||
@@ -55,7 +55,6 @@ use tracing::warn;
|
||||
|
||||
pub(super) const REMOTE_CONTROL_PROTOCOL_VERSION: &str = "3";
|
||||
pub(super) const REMOTE_CONTROL_ACCOUNT_ID_HEADER: &str = "chatgpt-account-id";
|
||||
pub(super) const REMOTE_CONTROL_INSTALLATION_ID_HEADER: &str = "x-codex-installation-id";
|
||||
const REMOTE_CONTROL_SUBSCRIBE_CURSOR_HEADER: &str = "x-codex-subscribe-cursor";
|
||||
const REMOTE_CONTROL_WEBSOCKET_PING_INTERVAL: std::time::Duration =
|
||||
std::time::Duration::from_secs(10);
|
||||
@@ -215,7 +214,6 @@ impl WebsocketState {
|
||||
|
||||
pub(crate) struct RemoteControlWebsocket {
|
||||
remote_control_url: String,
|
||||
installation_id: String,
|
||||
remote_control_target: Option<RemoteControlTarget>,
|
||||
state_db: Option<Arc<StateRuntime>>,
|
||||
auth_manager: Arc<AuthManager>,
|
||||
@@ -231,12 +229,6 @@ pub(crate) struct RemoteControlWebsocket {
|
||||
enabled_rx: watch::Receiver<bool>,
|
||||
}
|
||||
|
||||
pub(crate) struct RemoteControlWebsocketConfig {
|
||||
pub(crate) remote_control_url: String,
|
||||
pub(crate) installation_id: String,
|
||||
pub(crate) remote_control_target: Option<RemoteControlTarget>,
|
||||
}
|
||||
|
||||
enum ConnectOutcome {
|
||||
Connected(Box<WebSocketStream<MaybeTlsStream<TcpStream>>>),
|
||||
Disabled,
|
||||
@@ -262,7 +254,6 @@ impl RemoteControlStatusPublisher {
|
||||
self.tx.send_if_modified(|status| {
|
||||
let next_status = RemoteControlStatusChangedNotification {
|
||||
status: connection_status,
|
||||
installation_id: status.installation_id.clone(),
|
||||
environment_id: if connection_status == RemoteControlConnectionStatus::Disabled {
|
||||
None
|
||||
} else {
|
||||
@@ -285,7 +276,6 @@ impl RemoteControlStatusPublisher {
|
||||
}
|
||||
let next_status = RemoteControlStatusChangedNotification {
|
||||
status: status.status,
|
||||
installation_id: status.installation_id.clone(),
|
||||
environment_id,
|
||||
};
|
||||
if *status == next_status {
|
||||
@@ -300,14 +290,14 @@ impl RemoteControlStatusPublisher {
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub(super) struct RemoteControlConnectOptions<'a> {
|
||||
installation_id: &'a str,
|
||||
subscribe_cursor: Option<&'a str>,
|
||||
app_server_client_name: Option<&'a str>,
|
||||
}
|
||||
|
||||
impl RemoteControlWebsocket {
|
||||
pub(crate) fn new(
|
||||
config: RemoteControlWebsocketConfig,
|
||||
remote_control_url: String,
|
||||
remote_control_target: Option<RemoteControlTarget>,
|
||||
state_db: Option<Arc<StateRuntime>>,
|
||||
auth_manager: Arc<AuthManager>,
|
||||
channels: RemoteControlChannels,
|
||||
@@ -325,9 +315,8 @@ impl RemoteControlWebsocket {
|
||||
let auth_recovery = auth_manager.unauthorized_recovery();
|
||||
|
||||
Self {
|
||||
remote_control_url: config.remote_control_url,
|
||||
installation_id: config.installation_id,
|
||||
remote_control_target: config.remote_control_target,
|
||||
remote_control_url,
|
||||
remote_control_target,
|
||||
state_db,
|
||||
auth_manager,
|
||||
status_publisher: channels.status_publisher,
|
||||
@@ -453,7 +442,6 @@ impl RemoteControlWebsocket {
|
||||
loop {
|
||||
let subscribe_cursor = self.state.lock().await.subscribe_cursor.clone();
|
||||
let connect_options = RemoteControlConnectOptions {
|
||||
installation_id: &self.installation_id,
|
||||
subscribe_cursor: subscribe_cursor.as_deref(),
|
||||
app_server_client_name,
|
||||
};
|
||||
@@ -930,7 +918,6 @@ fn build_remote_control_websocket_request(
|
||||
websocket_url: &str,
|
||||
enrollment: &RemoteControlEnrollment,
|
||||
auth: &RemoteControlConnectionAuth,
|
||||
installation_id: &str,
|
||||
subscribe_cursor: Option<&str>,
|
||||
) -> io::Result<tungstenite::http::Request<()>> {
|
||||
let mut request = websocket_url.into_client_request().map_err(|err| {
|
||||
@@ -955,11 +942,6 @@ fn build_remote_control_websocket_request(
|
||||
auth.auth_provider.add_auth_headers(&mut auth_headers);
|
||||
headers.extend(auth_headers);
|
||||
set_remote_control_header(headers, REMOTE_CONTROL_ACCOUNT_ID_HEADER, &auth.account_id)?;
|
||||
set_remote_control_header(
|
||||
headers,
|
||||
REMOTE_CONTROL_INSTALLATION_ID_HEADER,
|
||||
installation_id,
|
||||
)?;
|
||||
if let Some(subscribe_cursor) = subscribe_cursor {
|
||||
set_remote_control_header(
|
||||
headers,
|
||||
@@ -1084,12 +1066,7 @@ pub(super) async fn connect_remote_control_websocket(
|
||||
"creating new remote control enrollment: websocket_url={}, enroll_url={}, account_id={}",
|
||||
remote_control_target.websocket_url, remote_control_target.enroll_url, auth.account_id
|
||||
);
|
||||
let new_enrollment = match enroll_remote_control_server(
|
||||
remote_control_target,
|
||||
&auth,
|
||||
connect_options.installation_id,
|
||||
)
|
||||
.await
|
||||
let new_enrollment = match enroll_remote_control_server(remote_control_target, &auth).await
|
||||
{
|
||||
Ok(new_enrollment) => new_enrollment,
|
||||
Err(err)
|
||||
@@ -1133,7 +1110,6 @@ pub(super) async fn connect_remote_control_websocket(
|
||||
&remote_control_target.websocket_url,
|
||||
enrollment_ref,
|
||||
&auth,
|
||||
connect_options.installation_id,
|
||||
connect_options.subscribe_cursor,
|
||||
)?;
|
||||
|
||||
@@ -1271,7 +1247,6 @@ mod tests {
|
||||
const TEST_HTTP_ACCEPT_TIMEOUT: Duration = Duration::from_secs(30);
|
||||
#[cfg(not(windows))]
|
||||
const TEST_HTTP_ACCEPT_TIMEOUT: Duration = Duration::from_secs(5);
|
||||
const TEST_INSTALLATION_ID: &str = "11111111-1111-4111-8111-111111111111";
|
||||
|
||||
fn remote_control_status_channel() -> (
|
||||
RemoteControlStatusPublisher,
|
||||
@@ -1279,7 +1254,6 @@ mod tests {
|
||||
) {
|
||||
let (status_tx, status_rx) = watch::channel(RemoteControlStatusChangedNotification {
|
||||
status: RemoteControlConnectionStatus::Connecting,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
environment_id: None,
|
||||
});
|
||||
(RemoteControlStatusPublisher::new(status_tx), status_rx)
|
||||
@@ -1385,7 +1359,6 @@ mod tests {
|
||||
&mut auth_recovery,
|
||||
&mut enrollment,
|
||||
RemoteControlConnectOptions {
|
||||
installation_id: TEST_INSTALLATION_ID,
|
||||
subscribe_cursor: None,
|
||||
app_server_client_name: None,
|
||||
},
|
||||
@@ -1403,7 +1376,6 @@ mod tests {
|
||||
status_rx.borrow().clone(),
|
||||
RemoteControlStatusChangedNotification {
|
||||
status: RemoteControlConnectionStatus::Connecting,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
environment_id: Some("env_test".to_string()),
|
||||
}
|
||||
);
|
||||
@@ -1463,7 +1435,6 @@ mod tests {
|
||||
&mut auth_recovery,
|
||||
&mut enrollment,
|
||||
RemoteControlConnectOptions {
|
||||
installation_id: TEST_INSTALLATION_ID,
|
||||
subscribe_cursor: None,
|
||||
app_server_client_name: None,
|
||||
},
|
||||
@@ -1477,7 +1448,6 @@ mod tests {
|
||||
status_rx.borrow().clone(),
|
||||
RemoteControlStatusChangedNotification {
|
||||
status: RemoteControlConnectionStatus::Connecting,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
environment_id: Some("env_test".to_string()),
|
||||
}
|
||||
);
|
||||
@@ -1545,7 +1515,6 @@ mod tests {
|
||||
&mut auth_recovery,
|
||||
&mut enrollment,
|
||||
RemoteControlConnectOptions {
|
||||
installation_id: TEST_INSTALLATION_ID,
|
||||
subscribe_cursor: None,
|
||||
app_server_client_name: None,
|
||||
},
|
||||
@@ -1598,7 +1567,6 @@ mod tests {
|
||||
&mut auth_recovery,
|
||||
&mut enrollment,
|
||||
RemoteControlConnectOptions {
|
||||
installation_id: TEST_INSTALLATION_ID,
|
||||
subscribe_cursor: None,
|
||||
app_server_client_name: None,
|
||||
},
|
||||
@@ -1646,7 +1614,6 @@ mod tests {
|
||||
&mut auth_recovery,
|
||||
&mut enrollment,
|
||||
RemoteControlConnectOptions {
|
||||
installation_id: TEST_INSTALLATION_ID,
|
||||
subscribe_cursor: None,
|
||||
app_server_client_name: None,
|
||||
},
|
||||
@@ -1665,7 +1632,6 @@ mod tests {
|
||||
status_rx.borrow().clone(),
|
||||
RemoteControlStatusChangedNotification {
|
||||
status: RemoteControlConnectionStatus::Connecting,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
environment_id: None,
|
||||
}
|
||||
);
|
||||
@@ -1690,11 +1656,8 @@ mod tests {
|
||||
let shutdown_token = shutdown_token.clone();
|
||||
async move {
|
||||
RemoteControlWebsocket::new(
|
||||
RemoteControlWebsocketConfig {
|
||||
remote_control_url,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
remote_control_target: Some(remote_control_target),
|
||||
},
|
||||
remote_control_url,
|
||||
Some(remote_control_target),
|
||||
/*state_db*/ None,
|
||||
remote_control_auth_manager(),
|
||||
RemoteControlChannels {
|
||||
@@ -1738,7 +1701,6 @@ mod tests {
|
||||
status_rx.borrow().clone(),
|
||||
RemoteControlStatusChangedNotification {
|
||||
status: RemoteControlConnectionStatus::Connecting,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
environment_id: Some("env_first".to_string()),
|
||||
}
|
||||
);
|
||||
@@ -1759,7 +1721,6 @@ mod tests {
|
||||
status_rx.borrow().clone(),
|
||||
RemoteControlStatusChangedNotification {
|
||||
status: RemoteControlConnectionStatus::Connected,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
environment_id: Some("env_first".to_string()),
|
||||
}
|
||||
);
|
||||
@@ -1773,7 +1734,6 @@ mod tests {
|
||||
status_rx.borrow().clone(),
|
||||
RemoteControlStatusChangedNotification {
|
||||
status: RemoteControlConnectionStatus::Connected,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
environment_id: None,
|
||||
}
|
||||
);
|
||||
@@ -1788,7 +1748,6 @@ mod tests {
|
||||
status_rx.borrow().clone(),
|
||||
RemoteControlStatusChangedNotification {
|
||||
status: RemoteControlConnectionStatus::Disabled,
|
||||
installation_id: TEST_INSTALLATION_ID.to_string(),
|
||||
environment_id: None,
|
||||
}
|
||||
);
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user