mirror of
https://github.com/openai/codex.git
synced 2026-05-08 13:26:34 +00:00
Compare commits
133 Commits
dev/rasmus
...
codex/fix-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9d9a2d451f | ||
|
|
3377afd84a | ||
|
|
de2ccf9473 | ||
|
|
640a1b23ea | ||
|
|
9e26613657 | ||
|
|
480ef646aa | ||
|
|
3afb185a4f | ||
|
|
4c68bd728f | ||
|
|
a036584104 | ||
|
|
bc5a1b961e | ||
|
|
c6bcd27832 | ||
|
|
273c2e21a9 | ||
|
|
01de13b7e6 | ||
|
|
0670d8971a | ||
|
|
f6797c3ac6 | ||
|
|
6138063656 | ||
|
|
ccec84b148 | ||
|
|
4e0cf945b7 | ||
|
|
087c9c1f1f | ||
|
|
5b7d6f5c4f | ||
|
|
0156b1e61f | ||
|
|
5e737372ee | ||
|
|
a61c785040 | ||
|
|
598bbcdb58 | ||
|
|
21e19912e0 | ||
|
|
5a79dfab7c | ||
|
|
1b74360365 | ||
|
|
0e8d6b8765 | ||
|
|
a9e5c34083 | ||
|
|
fa127be25f | ||
|
|
54d1401170 | ||
|
|
b7c0f26910 | ||
|
|
431ebeaef7 | ||
|
|
fd36838cf3 | ||
|
|
803705f795 | ||
|
|
7d72fc8f53 | ||
|
|
b985768dc1 | ||
|
|
0a32c8b396 | ||
|
|
341550c275 | ||
|
|
92fb848065 | ||
|
|
fc2a69107c | ||
|
|
bf38def44e | ||
|
|
5ba908d179 | ||
|
|
b7e5588d18 | ||
|
|
6a8df2b61d | ||
|
|
c08177f7d0 | ||
|
|
2307aa8d98 | ||
|
|
af95662a70 | ||
|
|
4e05f3053c | ||
|
|
7e8594fc19 | ||
|
|
a3350de855 | ||
|
|
2f3b5ed81a | ||
|
|
755880ef9c | ||
|
|
c5a495c2cd | ||
|
|
dcd139b7c4 | ||
|
|
e64c765673 | ||
|
|
e903d000b0 | ||
|
|
739ab6bc51 | ||
|
|
30c5c768de | ||
|
|
2be9fd5a93 | ||
|
|
0f40261e86 | ||
|
|
4ded800374 | ||
|
|
5c30d79afb | ||
|
|
798de22637 | ||
|
|
c5e2921e1d | ||
|
|
4b55979755 | ||
|
|
52c06b8759 | ||
|
|
0bd25ab374 | ||
|
|
850f035b8c | ||
|
|
277186ec85 | ||
|
|
215d5a8f7c | ||
|
|
85c1500569 | ||
|
|
e5709db6dc | ||
|
|
cafe717dca | ||
|
|
c2084552d9 | ||
|
|
2009f6e894 | ||
|
|
4ed22fc7d2 | ||
|
|
48dd7b58f0 | ||
|
|
0e2300c02c | ||
|
|
6c51bf0c7c | ||
|
|
bb83eec825 | ||
|
|
f431ec12c9 | ||
|
|
79b4f691a6 | ||
|
|
5d314f324c | ||
|
|
01ab25dbb5 | ||
|
|
f8c527e529 | ||
|
|
4f1d5f00f0 | ||
|
|
a6ca39c630 | ||
|
|
523e4aa8e3 | ||
|
|
0ccd659b4b | ||
|
|
8033b6a449 | ||
|
|
0d8cdc0510 | ||
|
|
c3e60849e5 | ||
|
|
ad57a3fee2 | ||
|
|
1f304dd1f2 | ||
|
|
2cb8746457 | ||
|
|
35bc6e3d01 | ||
|
|
0bda8161a2 | ||
|
|
4c58e64f08 | ||
|
|
ba159cbc79 | ||
|
|
dda8199b73 | ||
|
|
9c3abcd46c | ||
|
|
2a020f1a0a | ||
|
|
deaa307fb2 | ||
|
|
4d7ce3447d | ||
|
|
fed0a8f4fa | ||
|
|
ac2bffa443 | ||
|
|
87bc72408c | ||
|
|
355c40ad7e | ||
|
|
5591912f0b | ||
|
|
4e30281a13 | ||
|
|
9881dc7306 | ||
|
|
d54493ba1c | ||
|
|
9aaa5d9358 | ||
|
|
0c785598b3 | ||
|
|
f41306b4f3 | ||
|
|
bce74c70ce | ||
|
|
88f300d74d | ||
|
|
022f81df1f | ||
|
|
706490ab1b | ||
|
|
6e838a19fa | ||
|
|
a2db6f97fb | ||
|
|
f5497f4d65 | ||
|
|
f1c963d77e | ||
|
|
4167628622 | ||
|
|
32ace07ac5 | ||
|
|
6c874f9b34 | ||
|
|
0ee737cea6 | ||
|
|
8a559e7938 | ||
|
|
cf02e9c052 | ||
|
|
1c3287125f | ||
|
|
789f387982 | ||
|
|
d19de6d150 |
@@ -1,6 +1,6 @@
|
||||
[codespell]
|
||||
# Ref: https://github.com/codespell-project/codespell#using-a-config-file
|
||||
skip = .git*,vendor,*-lock.yaml,*.lock,.codespellrc,*test.ts,*.jsonl,frame*.txt,*.snap,*.snap.new,*meriyah.umd.min.js
|
||||
skip = .git*,vendor,*-lock.yaml,*.lock,.codespellrc,*test.ts,*.jsonl,frame*.txt,*.snap,*.snap.new
|
||||
check-hidden = true
|
||||
ignore-regex = ^\s*"image/\S+": ".*|\b(afterAll)\b
|
||||
ignore-words-list = ratatui,ser,iTerm,iterm2,iterm,te,TE,PASE,SEH
|
||||
|
||||
102
.codex/skills/codex-issue-digest/SKILL.md
Normal file
102
.codex/skills/codex-issue-digest/SKILL.md
Normal file
@@ -0,0 +1,102 @@
|
||||
---
|
||||
name: codex-issue-digest
|
||||
description: Run a GitHub issue digest for openai/codex by feature-area labels, all areas, and configurable time windows. Use when asked to summarize recent Codex bug reports or enhancement requests, especially for owner-specific labels such as tui, exec, app, or similar areas.
|
||||
---
|
||||
|
||||
# Codex Issue Digest
|
||||
|
||||
## Objective
|
||||
|
||||
Produce a concise, insight-oriented digest of `openai/codex` issues for the requested feature-area labels over the previous 24 hours by default. Honor a different duration when the user asks for one, for example "past week" or "48 hours".
|
||||
|
||||
Include only issues that currently have `bug` or `enhancement` plus at least one requested owner label. If the user asks for all areas or all labels, collect `bug`/`enhancement` issues across all labels.
|
||||
|
||||
## Inputs
|
||||
|
||||
- Feature-area labels, for example `tui exec`
|
||||
- `all areas` / `all labels` to scan all current feature labels
|
||||
- Optional repo override, default `openai/codex`
|
||||
- Optional time window, default previous 24 hours; examples: `48h`, `7d`, `1w`, `past week`
|
||||
|
||||
## Workflow
|
||||
|
||||
1. Run the collector from a current Codex repo checkout:
|
||||
|
||||
```bash
|
||||
python3 .codex/skills/codex-issue-digest/scripts/collect_issue_digest.py --labels tui exec --window-hours 24
|
||||
```
|
||||
|
||||
Use `--window "past week"` or `--window-hours 168` when the user asks for a non-default duration. Use `--all-labels` when the user says all areas or all labels.
|
||||
|
||||
2. Use the JSON as the source of truth. It includes new issues, new issue comments, new reactions/upvotes, current labels, current reaction counts, model-ready `summary_inputs`, and detailed `digest_rows`.
|
||||
3. Start the report with `## Summary`, then `## Details`.
|
||||
4. In `## Summary`, write skim-first headlines:
|
||||
- Lead with the most important fact or judgment. Do not start with aggregate counts unless the aggregate itself is the story.
|
||||
- Make the first 1-3 bullets answer "what should owners pay attention to right now?"
|
||||
- Bold only the critical insight phrase in each high-priority bullet, for example `**GPT-5.5 context is the dominant pressure point**`.
|
||||
- Keep summary bullets short enough to scan in about 20 seconds.
|
||||
- Put broad stats near the end of the summary, after the owner-relevant takeaways.
|
||||
- Say clearly when there is nothing significant to act on.
|
||||
- Call out any areas or themes receiving lots of user attention.
|
||||
- Cluster and name themes yourself from `summary_inputs`; the collector intentionally does not hard-code issue categories.
|
||||
- Use a cluster only when the issues genuinely share the same product problem. If several issues merely share a broad platform or label, describe them individually.
|
||||
- Do not omit a repeated theme just because its individual issues fall below the details table cutoff. Several similar reports should be called out as a repeated customer concern.
|
||||
- For single-issue rows, summarize the concern directly instead of calling it a cluster.
|
||||
- Use inline numbered issue links from each relevant row's `ref_markdown`.
|
||||
5. In `## Details`, include a compact table only when useful:
|
||||
- Prefer rows from `digest_rows`; include a `Refs` column using each row's `ref_markdown`.
|
||||
- Keep the table short; omit low-signal rows when the summary already covers them.
|
||||
- Use compact columns such as marker, area, type, description, interactions, and refs.
|
||||
- The `Description` cell should be a short owner-readable phrase. Use row `description`, title, body excerpts, and recent comments, but do not mechanically copy the raw GitHub issue title when it contains incidental details.
|
||||
- A clear quiet/no-concern sentence when there is no meaningful signal.
|
||||
6. Use the JSON `attention_marker` exactly. It is empty for normal rows, `🔥` for elevated rows, and `🔥🔥` for very high-attention rows. The actual cutoffs are in `attention_thresholds`.
|
||||
7. Use inline numbered references where a row or bullet points to issues, for example `Compaction bugs [1](https://github.com/openai/codex/issues/123), [2](https://github.com/openai/codex/issues/456)`. Do not add a separate footnotes section.
|
||||
8. Label `interactions` as `Interactions`; it counts posts/comments/reactions during the requested window, not unique people.
|
||||
9. Mention the collector `script_version`, repo checkout `git_head`, and time window in the digest footer or final line.
|
||||
|
||||
## Reaction Handling
|
||||
|
||||
The collector uses GitHub reactions endpoints, which include `created_at`, to count reactions created during the digest window for hydrated issues. It reports both in-window reaction counts and current reaction totals. Treat current reaction totals as standing engagement, and treat `new_reactions` / `new_upvotes` as windowed activity.
|
||||
|
||||
By default, the collector fetches issue comments with `since=<window start>` and caps the number of comment pages per issue. This keeps very long historical threads from dominating a digest run and focuses the report on recent posts. Use `--fetch-all-comments` only when exhaustive comment history is more important than runtime.
|
||||
|
||||
GitHub issue search is still seeded by issue `updated_at`, so a purely reaction-only issue may be missed if reactions do not bump `updated_at`. Covering every reaction-only case would require either a persisted snapshot store or a broader scan of labeled issues.
|
||||
|
||||
## Attention Markers
|
||||
|
||||
The collector scales attention markers by the requested time window. The baseline is 10 human user interactions for `🔥` and 20 for `🔥🔥` over 24 hours; longer or shorter windows scale those cutoffs linearly and round up. For example, a one-week report uses 70 and 140 interactions. Human user interactions are human-authored new issue posts, human-authored new comments, and human reactions created during the window, including upvotes. Bot posts and bot reactions are excluded. In prose, explain this as high user interaction rather than naming the emoji.
|
||||
|
||||
## Freshness
|
||||
|
||||
The automation should run from a repo checkout that contains this skill. For shared daily use, prefer one of these patterns:
|
||||
|
||||
- Run the automation in a checkout that is refreshed before the automation starts, for example with `git pull --ff-only`.
|
||||
- If the automation cannot safely mutate the checkout, have it report the current `git_head` from the collector output so readers know which skill/script version produced the digest.
|
||||
|
||||
## Sample Owner Prompt
|
||||
|
||||
```text
|
||||
Use $codex-issue-digest to run the Codex issue digest for labels tui and exec over the previous 24 hours.
|
||||
```
|
||||
|
||||
```text
|
||||
Use $codex-issue-digest to run the Codex issue digest for all areas over the past week.
|
||||
```
|
||||
|
||||
## Validation
|
||||
|
||||
Dry run the collector against recent issues:
|
||||
|
||||
```bash
|
||||
python3 .codex/skills/codex-issue-digest/scripts/collect_issue_digest.py --labels tui exec --window-hours 24
|
||||
```
|
||||
|
||||
```bash
|
||||
python3 .codex/skills/codex-issue-digest/scripts/collect_issue_digest.py --all-labels --window "past week" --limit-issues 10
|
||||
```
|
||||
|
||||
Run the focused script tests:
|
||||
|
||||
```bash
|
||||
pytest .codex/skills/codex-issue-digest/scripts/test_collect_issue_digest.py
|
||||
```
|
||||
4
.codex/skills/codex-issue-digest/agents/openai.yaml
Normal file
4
.codex/skills/codex-issue-digest/agents/openai.yaml
Normal file
@@ -0,0 +1,4 @@
|
||||
interface:
|
||||
display_name: "Codex Issue Digest"
|
||||
short_description: "Summarize Codex issues by labels or all areas"
|
||||
default_prompt: "Use $codex-issue-digest to run the Codex issue digest for labels tui and exec over the previous 24 hours."
|
||||
988
.codex/skills/codex-issue-digest/scripts/collect_issue_digest.py
Executable file
988
.codex/skills/codex-issue-digest/scripts/collect_issue_digest.py
Executable file
@@ -0,0 +1,988 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Collect recent openai/codex issue activity for owner-focused digests."""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import math
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from pathlib import Path
|
||||
from urllib.parse import quote
|
||||
|
||||
SCRIPT_VERSION = 2
|
||||
QUALIFYING_KIND_LABELS = ("bug", "enhancement")
|
||||
REACTION_KEYS = ("+1", "-1", "laugh", "hooray", "confused", "heart", "rocket", "eyes")
|
||||
BASE_ATTENTION_WINDOW_HOURS = 24.0
|
||||
ONE_ATTENTION_INTERACTION_THRESHOLD = 10
|
||||
TWO_ATTENTION_INTERACTION_THRESHOLD = 20
|
||||
ALL_LABEL_PHRASES = {"all", "all areas", "all labels", "all-areas", "all-labels", "*"}
|
||||
|
||||
|
||||
class GhCommandError(RuntimeError):
|
||||
pass
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Collect recent GitHub issue activity for a Codex owner digest."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--repo", default="openai/codex", help="OWNER/REPO, default openai/codex"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--labels",
|
||||
nargs="+",
|
||||
default=[],
|
||||
help="Feature-area labels owned by the digest recipient, for example: tui exec",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--all-labels",
|
||||
action="store_true",
|
||||
help="Collect bug/enhancement issues across all feature-area labels",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--window",
|
||||
help='Lookback duration such as "24h", "7d", "1w", or "past week"',
|
||||
)
|
||||
parser.add_argument(
|
||||
"--window-hours", type=float, default=24.0, help="Lookback window"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--since", help="UTC ISO timestamp override for the window start"
|
||||
)
|
||||
parser.add_argument("--until", help="UTC ISO timestamp override for the window end")
|
||||
parser.add_argument(
|
||||
"--limit-issues",
|
||||
type=int,
|
||||
default=200,
|
||||
help="Maximum candidate issues to hydrate after search",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--body-chars", type=int, default=1200, help="Issue body excerpt length"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--comment-chars", type=int, default=900, help="Comment excerpt length"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--max-comment-pages",
|
||||
type=int,
|
||||
default=3,
|
||||
help=(
|
||||
"Maximum pages of issue comments to hydrate per issue after applying the "
|
||||
"window filter. Use 0 with --fetch-all-comments for no page cap."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--fetch-all-comments",
|
||||
action="store_true",
|
||||
help="Hydrate complete issue comment histories instead of only window-updated comments.",
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def parse_timestamp(value, arg_name):
|
||||
if value is None:
|
||||
return None
|
||||
normalized = value.strip()
|
||||
if not normalized:
|
||||
return None
|
||||
if normalized.endswith("Z"):
|
||||
normalized = f"{normalized[:-1]}+00:00"
|
||||
try:
|
||||
parsed = datetime.fromisoformat(normalized)
|
||||
except ValueError as err:
|
||||
raise ValueError(f"{arg_name} must be an ISO timestamp") from err
|
||||
if parsed.tzinfo is None:
|
||||
parsed = parsed.replace(tzinfo=timezone.utc)
|
||||
return parsed.astimezone(timezone.utc)
|
||||
|
||||
|
||||
def format_timestamp(value):
|
||||
return (
|
||||
value.astimezone(timezone.utc)
|
||||
.replace(microsecond=0)
|
||||
.isoformat()
|
||||
.replace("+00:00", "Z")
|
||||
)
|
||||
|
||||
|
||||
def resolve_window(args):
|
||||
until = parse_timestamp(args.until, "--until") or datetime.now(timezone.utc)
|
||||
since = parse_timestamp(args.since, "--since")
|
||||
if since is None:
|
||||
hours = parse_duration_hours(getattr(args, "window", None))
|
||||
if hours is None:
|
||||
hours = getattr(args, "window_hours", 24.0)
|
||||
if hours <= 0:
|
||||
raise ValueError("window duration must be > 0")
|
||||
since = until - timedelta(hours=hours)
|
||||
if since >= until:
|
||||
raise ValueError("--since must be before --until")
|
||||
return since, until
|
||||
|
||||
|
||||
def parse_duration_hours(value):
|
||||
if value is None:
|
||||
return None
|
||||
text = value.strip().casefold().replace("_", " ")
|
||||
if not text:
|
||||
return None
|
||||
text = re.sub(r"^(past|last)\s+", "", text)
|
||||
aliases = {
|
||||
"day": 24.0,
|
||||
"24h": 24.0,
|
||||
"week": 168.0,
|
||||
"7d": 168.0,
|
||||
}
|
||||
if text in aliases:
|
||||
return aliases[text]
|
||||
match = re.fullmatch(r"(\d+(?:\.\d+)?)\s*(h|hr|hrs|hour|hours)", text)
|
||||
if match:
|
||||
return float(match.group(1))
|
||||
match = re.fullmatch(r"(\d+(?:\.\d+)?)\s*(d|day|days)", text)
|
||||
if match:
|
||||
return float(match.group(1)) * 24.0
|
||||
match = re.fullmatch(r"(\d+(?:\.\d+)?)\s*(w|week|weeks)", text)
|
||||
if match:
|
||||
return float(match.group(1)) * 168.0
|
||||
raise ValueError(f"Unsupported duration: {value}")
|
||||
|
||||
|
||||
def normalize_requested_labels(labels, all_labels=False):
|
||||
out = []
|
||||
seen = set()
|
||||
for raw in labels:
|
||||
for piece in raw.split(","):
|
||||
label = piece.strip()
|
||||
if not label:
|
||||
continue
|
||||
key = label.casefold()
|
||||
if key not in seen:
|
||||
out.append(label)
|
||||
seen.add(key)
|
||||
phrase = " ".join(label.casefold() for label in out)
|
||||
if all_labels or phrase in ALL_LABEL_PHRASES:
|
||||
return [], True
|
||||
if not out:
|
||||
raise ValueError(
|
||||
"At least one feature-area label is required, or use --all-labels"
|
||||
)
|
||||
return out, False
|
||||
|
||||
|
||||
def quote_label(label):
|
||||
if re.fullmatch(r"[A-Za-z0-9_.:-]+", label):
|
||||
return f"label:{label}"
|
||||
escaped = label.replace('"', '\\"')
|
||||
return f'label:"{escaped}"'
|
||||
|
||||
|
||||
def build_search_queries(
|
||||
repo, owner_labels, since, kind_labels=QUALIFYING_KIND_LABELS, all_labels=False
|
||||
):
|
||||
since_date = since.date().isoformat()
|
||||
queries = []
|
||||
if all_labels:
|
||||
for kind_label in kind_labels:
|
||||
queries.append(
|
||||
" ".join(
|
||||
[
|
||||
f"repo:{repo}",
|
||||
"is:issue",
|
||||
f"updated:>={since_date}",
|
||||
quote_label(kind_label),
|
||||
]
|
||||
)
|
||||
)
|
||||
return queries
|
||||
for owner_label in owner_labels:
|
||||
for kind_label in kind_labels:
|
||||
queries.append(
|
||||
" ".join(
|
||||
[
|
||||
f"repo:{repo}",
|
||||
"is:issue",
|
||||
f"updated:>={since_date}",
|
||||
quote_label(owner_label),
|
||||
quote_label(kind_label),
|
||||
]
|
||||
)
|
||||
)
|
||||
return queries
|
||||
|
||||
|
||||
def _format_gh_error(cmd, err):
|
||||
stdout = (err.stdout or "").strip()
|
||||
stderr = (err.stderr or "").strip()
|
||||
parts = [f"GitHub CLI command failed: {' '.join(cmd)}"]
|
||||
if stdout:
|
||||
parts.append(f"stdout: {stdout}")
|
||||
if stderr:
|
||||
parts.append(f"stderr: {stderr}")
|
||||
return "\n".join(parts)
|
||||
|
||||
|
||||
def gh_json(args):
|
||||
cmd = ["gh", *args]
|
||||
try:
|
||||
proc = subprocess.run(cmd, check=True, capture_output=True, text=True)
|
||||
except FileNotFoundError as err:
|
||||
raise GhCommandError("`gh` command not found") from err
|
||||
except subprocess.CalledProcessError as err:
|
||||
raise GhCommandError(_format_gh_error(cmd, err)) from err
|
||||
raw = proc.stdout.strip()
|
||||
if not raw:
|
||||
return None
|
||||
try:
|
||||
return json.loads(raw)
|
||||
except json.JSONDecodeError as err:
|
||||
raise GhCommandError(
|
||||
f"Failed to parse JSON from gh output for {' '.join(args)}"
|
||||
) from err
|
||||
|
||||
|
||||
def gh_text(args):
|
||||
cmd = ["gh", *args]
|
||||
try:
|
||||
proc = subprocess.run(cmd, check=True, capture_output=True, text=True)
|
||||
except (FileNotFoundError, subprocess.CalledProcessError):
|
||||
return ""
|
||||
return proc.stdout.strip()
|
||||
|
||||
|
||||
def git_head():
|
||||
try:
|
||||
proc = subprocess.run(
|
||||
["git", "rev-parse", "--short=12", "HEAD"],
|
||||
check=True,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
except (FileNotFoundError, subprocess.CalledProcessError):
|
||||
return None
|
||||
return proc.stdout.strip() or None
|
||||
|
||||
|
||||
def skill_relative_path():
|
||||
try:
|
||||
return str(Path(__file__).resolve().relative_to(Path.cwd().resolve()))
|
||||
except ValueError:
|
||||
return str(Path(__file__).resolve())
|
||||
|
||||
|
||||
def gh_api_list_paginated(endpoint, per_page=100, max_pages=None, with_metadata=False):
|
||||
items = []
|
||||
page = 1
|
||||
truncated = False
|
||||
while True:
|
||||
sep = "&" if "?" in endpoint else "?"
|
||||
page_endpoint = f"{endpoint}{sep}per_page={per_page}&page={page}"
|
||||
payload = gh_json(["api", page_endpoint])
|
||||
if payload is None:
|
||||
break
|
||||
if not isinstance(payload, list):
|
||||
raise GhCommandError(f"Unexpected paginated payload from gh api {endpoint}")
|
||||
items.extend(payload)
|
||||
if len(payload) < per_page:
|
||||
break
|
||||
if max_pages is not None and page >= max_pages:
|
||||
truncated = True
|
||||
break
|
||||
page += 1
|
||||
if with_metadata:
|
||||
return {
|
||||
"items": items,
|
||||
"truncated": truncated,
|
||||
"pages": page,
|
||||
"max_pages": max_pages,
|
||||
}
|
||||
return items
|
||||
|
||||
|
||||
def search_issue_numbers(queries, limit):
|
||||
numbers = {}
|
||||
for query in queries:
|
||||
page = 1
|
||||
while True:
|
||||
payload = gh_json(
|
||||
[
|
||||
"api",
|
||||
"search/issues",
|
||||
"-X",
|
||||
"GET",
|
||||
"-f",
|
||||
f"q={query}",
|
||||
"-f",
|
||||
"per_page=100",
|
||||
"-f",
|
||||
f"page={page}",
|
||||
]
|
||||
)
|
||||
if not isinstance(payload, dict):
|
||||
raise GhCommandError("Unexpected payload from GitHub issue search")
|
||||
items = payload.get("items") or []
|
||||
if not isinstance(items, list):
|
||||
raise GhCommandError("Expected search `items` to be a list")
|
||||
for item in items:
|
||||
if not isinstance(item, dict):
|
||||
continue
|
||||
number = item.get("number")
|
||||
if isinstance(number, int):
|
||||
numbers[number] = str(item.get("updated_at") or "")
|
||||
if len(items) < 100 or len(numbers) >= limit:
|
||||
break
|
||||
page += 1
|
||||
ordered = sorted(
|
||||
numbers, key=lambda number: (numbers[number], number), reverse=True
|
||||
)
|
||||
return ordered[:limit]
|
||||
|
||||
|
||||
def fetch_issue(repo, number):
|
||||
payload = gh_json(["api", f"repos/{repo}/issues/{number}"])
|
||||
if not isinstance(payload, dict):
|
||||
raise GhCommandError(f"Unexpected issue payload for #{number}")
|
||||
return payload
|
||||
|
||||
|
||||
def fetch_comments(repo, number, since=None, max_pages=None):
|
||||
endpoint = f"repos/{repo}/issues/{number}/comments"
|
||||
if since is not None:
|
||||
endpoint = f"{endpoint}?since={quote(format_timestamp(since), safe='')}"
|
||||
return gh_api_list_paginated(
|
||||
endpoint,
|
||||
max_pages=max_pages,
|
||||
with_metadata=True,
|
||||
)
|
||||
|
||||
|
||||
def fetch_reactions_for_item(endpoint, item):
|
||||
if reaction_summary(item)["total"] <= 0:
|
||||
return []
|
||||
return gh_api_list_paginated(endpoint)
|
||||
|
||||
|
||||
def fetch_comment_reactions(repo, comments):
|
||||
reactions_by_comment_id = {}
|
||||
for comment in comments:
|
||||
comment_id = comment.get("id")
|
||||
if comment_id in (None, ""):
|
||||
continue
|
||||
endpoint = f"repos/{repo}/issues/comments/{comment_id}/reactions"
|
||||
reactions_by_comment_id[comment_id] = fetch_reactions_for_item(
|
||||
endpoint, comment
|
||||
)
|
||||
return reactions_by_comment_id
|
||||
|
||||
|
||||
def extract_login(user_obj):
|
||||
if isinstance(user_obj, dict):
|
||||
return str(user_obj.get("login") or "")
|
||||
return ""
|
||||
|
||||
|
||||
def is_bot_login(login):
|
||||
return bool(login) and login.lower().endswith("[bot]")
|
||||
|
||||
|
||||
def is_human_user(user_obj):
|
||||
login = extract_login(user_obj)
|
||||
return bool(login) and not is_bot_login(login)
|
||||
|
||||
|
||||
def label_names(issue):
|
||||
labels = []
|
||||
for label in issue.get("labels") or []:
|
||||
if isinstance(label, dict) and label.get("name"):
|
||||
labels.append(str(label["name"]))
|
||||
return sorted(labels, key=str.casefold)
|
||||
|
||||
|
||||
def matching_labels(labels, requested):
|
||||
labels_by_key = {label.casefold(): label for label in labels}
|
||||
return [label for label in requested if label.casefold() in labels_by_key]
|
||||
|
||||
|
||||
def area_labels(labels):
|
||||
kind_keys = {label.casefold() for label in QUALIFYING_KIND_LABELS}
|
||||
return [label for label in labels if label.casefold() not in kind_keys]
|
||||
|
||||
|
||||
def attention_thresholds_for_window(window_hours):
|
||||
if window_hours <= 0:
|
||||
raise ValueError("window_hours must be > 0")
|
||||
window_hours = round(window_hours, 6)
|
||||
scale = window_hours / BASE_ATTENTION_WINDOW_HOURS
|
||||
elevated = max(1, math.ceil(ONE_ATTENTION_INTERACTION_THRESHOLD * scale))
|
||||
very_high = max(
|
||||
elevated + 1, math.ceil(TWO_ATTENTION_INTERACTION_THRESHOLD * scale)
|
||||
)
|
||||
return {
|
||||
"base_window_hours": BASE_ATTENTION_WINDOW_HOURS,
|
||||
"window_hours": round(window_hours, 3),
|
||||
"scale": round(scale, 3),
|
||||
"elevated": elevated,
|
||||
"very_high": very_high,
|
||||
}
|
||||
|
||||
|
||||
def attention_level_for(user_interactions, attention_thresholds=None):
|
||||
thresholds = attention_thresholds or attention_thresholds_for_window(
|
||||
BASE_ATTENTION_WINDOW_HOURS
|
||||
)
|
||||
if user_interactions >= thresholds["very_high"]:
|
||||
return 2
|
||||
if user_interactions >= thresholds["elevated"]:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
def attention_marker_for(user_interactions, attention_thresholds=None):
|
||||
return "🔥" * attention_level_for(user_interactions, attention_thresholds)
|
||||
|
||||
|
||||
def reaction_summary(item):
|
||||
reactions = item.get("reactions")
|
||||
if not isinstance(reactions, dict):
|
||||
return {"total": 0, "counts": {}}
|
||||
counts = {}
|
||||
for key in REACTION_KEYS:
|
||||
value = reactions.get(key, 0)
|
||||
if isinstance(value, int) and value:
|
||||
counts[key] = value
|
||||
total = reactions.get("total_count")
|
||||
if not isinstance(total, int):
|
||||
total = sum(counts.values())
|
||||
return {"total": total, "counts": counts}
|
||||
|
||||
|
||||
def reaction_event_summary(reactions, since, until):
|
||||
counts = {}
|
||||
total = 0
|
||||
for reaction in reactions or []:
|
||||
if not isinstance(reaction, dict):
|
||||
continue
|
||||
if not is_in_window(str(reaction.get("created_at") or ""), since, until):
|
||||
continue
|
||||
if not is_human_user(reaction.get("user")):
|
||||
continue
|
||||
content = str(reaction.get("content") or "")
|
||||
if not content:
|
||||
continue
|
||||
counts[content] = counts.get(content, 0) + 1
|
||||
total += 1
|
||||
return {
|
||||
"total": total,
|
||||
"counts": counts,
|
||||
"upvotes": counts.get("+1", 0),
|
||||
}
|
||||
|
||||
|
||||
def compact_text(value, limit):
|
||||
text = re.sub(r"\s+", " ", str(value or "")).strip()
|
||||
if limit <= 0:
|
||||
return ""
|
||||
if len(text) <= limit:
|
||||
return text
|
||||
return f"{text[: max(limit - 1, 0)].rstrip()}..."
|
||||
|
||||
|
||||
def clean_title_for_description(title):
|
||||
cleaned = re.sub(r"\s+", " ", str(title or "")).strip()
|
||||
cleaned = re.sub(
|
||||
r"^(codex(?: desktop| app|\.app| cli)?|desktop|windows codex app)\s*[:,-]\s*",
|
||||
"",
|
||||
cleaned,
|
||||
flags=re.IGNORECASE,
|
||||
)
|
||||
cleaned = re.sub(r"^on windows,\s*", "Windows: ", cleaned, flags=re.IGNORECASE)
|
||||
cleaned = cleaned.strip(" -:;")
|
||||
return compact_text(cleaned, 80) or "Issue needs owner review"
|
||||
|
||||
|
||||
def issue_description(issue):
|
||||
return clean_title_for_description(issue.get("title"))
|
||||
|
||||
|
||||
def is_in_window(timestamp, since, until):
|
||||
parsed = parse_timestamp(timestamp, "timestamp")
|
||||
if parsed is None:
|
||||
return False
|
||||
return since <= parsed < until
|
||||
|
||||
|
||||
def summarize_comment(
|
||||
comment, comment_chars, reaction_events=None, since=None, until=None
|
||||
):
|
||||
reactions = reaction_summary(comment)
|
||||
new_reactions = (
|
||||
reaction_event_summary(reaction_events, since, until)
|
||||
if since is not None and until is not None
|
||||
else {"total": 0, "counts": {}, "upvotes": 0}
|
||||
)
|
||||
human_user_interaction = is_human_user(comment.get("user"))
|
||||
return {
|
||||
"id": comment.get("id"),
|
||||
"author": extract_login(comment.get("user")),
|
||||
"author_association": str(comment.get("author_association") or ""),
|
||||
"created_at": str(comment.get("created_at") or ""),
|
||||
"updated_at": str(comment.get("updated_at") or ""),
|
||||
"url": str(comment.get("html_url") or ""),
|
||||
"human_user_interaction": human_user_interaction,
|
||||
"reactions": reactions["counts"],
|
||||
"reaction_total": reactions["total"],
|
||||
"new_reactions": new_reactions["total"],
|
||||
"new_upvotes": new_reactions["upvotes"],
|
||||
"new_reaction_counts": new_reactions["counts"],
|
||||
"body_excerpt": compact_text(comment.get("body"), comment_chars),
|
||||
}
|
||||
|
||||
|
||||
def summarize_issue(
|
||||
issue,
|
||||
comments,
|
||||
requested_labels,
|
||||
since,
|
||||
until,
|
||||
body_chars,
|
||||
comment_chars,
|
||||
issue_reaction_events=None,
|
||||
comment_reactions_by_id=None,
|
||||
all_labels=False,
|
||||
comments_hydration=None,
|
||||
attention_thresholds=None,
|
||||
):
|
||||
labels = label_names(issue)
|
||||
labels_by_key = {label.casefold() for label in labels}
|
||||
kind_labels = [
|
||||
label for label in QUALIFYING_KIND_LABELS if label.casefold() in labels_by_key
|
||||
]
|
||||
if all_labels:
|
||||
owner_labels = area_labels(labels) or ["unlabeled"]
|
||||
else:
|
||||
owner_labels = matching_labels(labels, requested_labels)
|
||||
if not kind_labels or not owner_labels:
|
||||
return None
|
||||
|
||||
updated_at = str(issue.get("updated_at") or "")
|
||||
if not is_in_window(updated_at, since, until):
|
||||
return None
|
||||
|
||||
new_issue = is_in_window(str(issue.get("created_at") or ""), since, until)
|
||||
comment_reactions_by_id = comment_reactions_by_id or {}
|
||||
new_comments = [
|
||||
summarize_comment(
|
||||
comment,
|
||||
comment_chars,
|
||||
reaction_events=comment_reactions_by_id.get(comment.get("id")),
|
||||
since=since,
|
||||
until=until,
|
||||
)
|
||||
for comment in comments
|
||||
if is_in_window(str(comment.get("created_at") or ""), since, until)
|
||||
]
|
||||
new_comments.sort(key=lambda item: (item["created_at"], str(item["id"])))
|
||||
|
||||
issue_reactions = reaction_summary(issue)
|
||||
issue_reaction_events_summary = reaction_event_summary(
|
||||
issue_reaction_events, since, until
|
||||
)
|
||||
comment_reaction_events_summary = reaction_event_summary(
|
||||
[
|
||||
reaction
|
||||
for reactions in comment_reactions_by_id.values()
|
||||
for reaction in reactions
|
||||
],
|
||||
since,
|
||||
until,
|
||||
)
|
||||
new_reactions = (
|
||||
issue_reaction_events_summary["total"]
|
||||
+ comment_reaction_events_summary["total"]
|
||||
)
|
||||
new_upvotes = (
|
||||
issue_reaction_events_summary["upvotes"]
|
||||
+ comment_reaction_events_summary["upvotes"]
|
||||
)
|
||||
all_comment_reaction_total = sum(
|
||||
reaction_summary(comment)["total"] for comment in comments
|
||||
)
|
||||
new_comment_reaction_total = sum(
|
||||
comment["reaction_total"] for comment in new_comments
|
||||
)
|
||||
new_issue_user_interaction = new_issue and is_human_user(issue.get("user"))
|
||||
new_comment_user_interactions = sum(
|
||||
1 for comment in new_comments if comment["human_user_interaction"]
|
||||
)
|
||||
user_interactions = (
|
||||
int(new_issue_user_interaction) + new_comment_user_interactions + new_reactions
|
||||
)
|
||||
attention_level = attention_level_for(user_interactions, attention_thresholds)
|
||||
attention_marker = attention_marker_for(user_interactions, attention_thresholds)
|
||||
updated_without_visible_new_post = (
|
||||
not new_issue and not new_comments and new_reactions == 0
|
||||
)
|
||||
|
||||
engagement_score = (
|
||||
len(new_comments) * 3
|
||||
+ new_reactions
|
||||
+ issue_reactions["total"]
|
||||
+ new_comment_reaction_total
|
||||
+ min(int(issue.get("comments") or len(comments) or 0), 10)
|
||||
)
|
||||
|
||||
return {
|
||||
"number": issue.get("number"),
|
||||
"title": str(issue.get("title") or ""),
|
||||
"description": issue_description(issue),
|
||||
"url": str(issue.get("html_url") or ""),
|
||||
"state": str(issue.get("state") or ""),
|
||||
"author": extract_login(issue.get("user")),
|
||||
"author_association": str(issue.get("author_association") or ""),
|
||||
"created_at": str(issue.get("created_at") or ""),
|
||||
"updated_at": updated_at,
|
||||
"labels": labels,
|
||||
"kind_labels": kind_labels,
|
||||
"owner_labels": owner_labels,
|
||||
"comments_total": int(issue.get("comments") or len(comments) or 0),
|
||||
"comments_hydration": comments_hydration
|
||||
or {
|
||||
"fetched": len(comments),
|
||||
"since": None,
|
||||
"truncated": False,
|
||||
"max_pages": None,
|
||||
},
|
||||
"issue_reactions": issue_reactions["counts"],
|
||||
"issue_reaction_total": issue_reactions["total"],
|
||||
"comment_reaction_total": all_comment_reaction_total,
|
||||
"new_comment_reaction_total": new_comment_reaction_total,
|
||||
"new_issue_reactions": issue_reaction_events_summary["total"],
|
||||
"new_issue_upvotes": issue_reaction_events_summary["upvotes"],
|
||||
"new_comment_reactions": comment_reaction_events_summary["total"],
|
||||
"new_comment_upvotes": comment_reaction_events_summary["upvotes"],
|
||||
"new_reactions": new_reactions,
|
||||
"new_upvotes": new_upvotes,
|
||||
"user_interactions": user_interactions,
|
||||
"attention": attention_level > 0,
|
||||
"attention_level": attention_level,
|
||||
"attention_marker": attention_marker,
|
||||
"engagement_score": engagement_score,
|
||||
"activity": {
|
||||
"new_issue": new_issue,
|
||||
"new_comments": len(new_comments),
|
||||
"new_human_comments": new_comment_user_interactions,
|
||||
"new_reactions": new_reactions,
|
||||
"new_upvotes": new_upvotes,
|
||||
"updated_without_visible_new_post": updated_without_visible_new_post,
|
||||
},
|
||||
"body_excerpt": compact_text(issue.get("body"), body_chars),
|
||||
"new_comments": new_comments,
|
||||
}
|
||||
|
||||
|
||||
def count_by_label(issues, labels):
|
||||
out = {}
|
||||
for label in labels:
|
||||
matching = [issue for issue in issues if label in issue["owner_labels"]]
|
||||
out[label] = {
|
||||
"issues": len(matching),
|
||||
"new_issues": sum(
|
||||
1 for issue in matching if issue["activity"]["new_issue"]
|
||||
),
|
||||
"new_comments": sum(
|
||||
issue["activity"]["new_comments"] for issue in matching
|
||||
),
|
||||
}
|
||||
return out
|
||||
|
||||
|
||||
def count_by_kind(issues):
|
||||
out = {}
|
||||
for kind in QUALIFYING_KIND_LABELS:
|
||||
matching = [issue for issue in issues if kind in issue["kind_labels"]]
|
||||
out[kind] = {
|
||||
"issues": len(matching),
|
||||
"new_issues": sum(
|
||||
1 for issue in matching if issue["activity"]["new_issue"]
|
||||
),
|
||||
"new_comments": sum(
|
||||
issue["activity"]["new_comments"] for issue in matching
|
||||
),
|
||||
}
|
||||
return out
|
||||
|
||||
|
||||
def hot_items(issues, limit=8):
|
||||
ranked = sorted(
|
||||
issues,
|
||||
key=lambda issue: (
|
||||
issue["attention"],
|
||||
issue["attention_level"],
|
||||
issue["user_interactions"],
|
||||
issue["engagement_score"],
|
||||
issue["activity"]["new_comments"],
|
||||
issue["issue_reaction_total"] + issue["comment_reaction_total"],
|
||||
issue["updated_at"],
|
||||
),
|
||||
reverse=True,
|
||||
)
|
||||
return [
|
||||
{
|
||||
"number": issue["number"],
|
||||
"title": issue["title"],
|
||||
"url": issue["url"],
|
||||
"owner_labels": issue["owner_labels"],
|
||||
"kind_labels": issue["kind_labels"],
|
||||
"attention": issue["attention"],
|
||||
"attention_level": issue["attention_level"],
|
||||
"attention_marker": issue["attention_marker"],
|
||||
"user_interactions": issue["user_interactions"],
|
||||
"new_reactions": issue["new_reactions"],
|
||||
"new_upvotes": issue["new_upvotes"],
|
||||
"engagement_score": issue["engagement_score"],
|
||||
"new_comments": issue["activity"]["new_comments"],
|
||||
"reaction_total": issue["issue_reaction_total"]
|
||||
+ issue["comment_reaction_total"],
|
||||
}
|
||||
for issue in ranked[:limit]
|
||||
if issue["engagement_score"] > 0
|
||||
]
|
||||
|
||||
|
||||
def ranked_digest_issues(issues):
|
||||
return sorted(
|
||||
issues,
|
||||
key=lambda issue: (
|
||||
issue["attention"],
|
||||
issue["attention_level"],
|
||||
issue["user_interactions"],
|
||||
issue["engagement_score"],
|
||||
issue["activity"]["new_comments"],
|
||||
issue["updated_at"],
|
||||
),
|
||||
reverse=True,
|
||||
)
|
||||
|
||||
|
||||
def digest_rows(issues, limit=10, ref_map=None):
|
||||
ranked = ranked_digest_issues(issues)
|
||||
if ref_map is None:
|
||||
ref_map = {issue["number"]: ref for ref, issue in enumerate(ranked, start=1)}
|
||||
rows = []
|
||||
for issue in ranked[:limit]:
|
||||
ref = ref_map[issue["number"]]
|
||||
reaction_total = issue["issue_reaction_total"] + issue["comment_reaction_total"]
|
||||
rows.append(
|
||||
{
|
||||
"ref": ref,
|
||||
"ref_markdown": f"[{ref}]({issue['url']})",
|
||||
"marker": issue["attention_marker"],
|
||||
"attention_marker": issue["attention_marker"],
|
||||
"number": issue["number"],
|
||||
"description": issue["description"],
|
||||
"title": issue["title"],
|
||||
"url": issue["url"],
|
||||
"area": ", ".join(issue["owner_labels"]),
|
||||
"kind": ", ".join(issue["kind_labels"]),
|
||||
"state": issue["state"],
|
||||
"interactions": issue["user_interactions"],
|
||||
"user_interactions": issue["user_interactions"],
|
||||
"new_reactions": issue["new_reactions"],
|
||||
"new_upvotes": issue["new_upvotes"],
|
||||
"current_reactions": reaction_total,
|
||||
}
|
||||
)
|
||||
return rows
|
||||
|
||||
|
||||
def issue_ref_markdown(issue, ref_map):
|
||||
ref = ref_map[issue["number"]]
|
||||
return f"[{ref}]({issue['url']})"
|
||||
|
||||
|
||||
def summary_inputs(issues, limit=80, ref_map=None):
|
||||
ranked = ranked_digest_issues(issues)
|
||||
if ref_map is None:
|
||||
ref_map = {issue["number"]: ref for ref, issue in enumerate(ranked, start=1)}
|
||||
rows = []
|
||||
for issue in ranked[:limit]:
|
||||
rows.append(
|
||||
{
|
||||
"ref": ref_map[issue["number"]],
|
||||
"ref_markdown": issue_ref_markdown(issue, ref_map),
|
||||
"number": issue["number"],
|
||||
"title": issue["title"],
|
||||
"description": issue["description"],
|
||||
"url": issue["url"],
|
||||
"labels": issue["labels"],
|
||||
"owner_labels": issue["owner_labels"],
|
||||
"kind_labels": issue["kind_labels"],
|
||||
"state": issue.get("state", ""),
|
||||
"attention_marker": issue.get("attention_marker", ""),
|
||||
"interactions": issue["user_interactions"],
|
||||
"new_comments": issue["activity"].get("new_comments", 0),
|
||||
"new_reactions": issue.get("new_reactions", 0),
|
||||
"new_upvotes": issue.get("new_upvotes", 0),
|
||||
"current_reactions": issue.get("issue_reaction_total", 0)
|
||||
+ issue.get("comment_reaction_total", 0),
|
||||
}
|
||||
)
|
||||
return rows
|
||||
|
||||
|
||||
def collect_digest(args):
|
||||
since, until = resolve_window(args)
|
||||
window_hours = (until - since).total_seconds() / 3600
|
||||
attention_thresholds = attention_thresholds_for_window(window_hours)
|
||||
requested_labels, all_labels = normalize_requested_labels(
|
||||
args.labels, all_labels=args.all_labels
|
||||
)
|
||||
queries = build_search_queries(
|
||||
args.repo, requested_labels, since, all_labels=all_labels
|
||||
)
|
||||
numbers = search_issue_numbers(queries, args.limit_issues)
|
||||
gh_version_output = gh_text(["--version"])
|
||||
|
||||
issues = []
|
||||
max_comment_pages = None if args.max_comment_pages <= 0 else args.max_comment_pages
|
||||
for number in numbers:
|
||||
issue = fetch_issue(args.repo, number)
|
||||
comments_since = None if args.fetch_all_comments else since
|
||||
comments_payload = fetch_comments(
|
||||
args.repo,
|
||||
number,
|
||||
since=comments_since,
|
||||
max_pages=max_comment_pages,
|
||||
)
|
||||
comments = comments_payload["items"]
|
||||
issue_reaction_events = fetch_reactions_for_item(
|
||||
f"repos/{args.repo}/issues/{number}/reactions", issue
|
||||
)
|
||||
comment_reactions_by_id = fetch_comment_reactions(args.repo, comments)
|
||||
comments_hydration = {
|
||||
"fetched": len(comments),
|
||||
"total": int(issue.get("comments") or len(comments) or 0),
|
||||
"since": format_timestamp(comments_since) if comments_since else None,
|
||||
"truncated": comments_payload["truncated"],
|
||||
"max_pages": comments_payload["max_pages"],
|
||||
"fetch_all_comments": args.fetch_all_comments,
|
||||
}
|
||||
summary = summarize_issue(
|
||||
issue,
|
||||
comments,
|
||||
requested_labels,
|
||||
since,
|
||||
until,
|
||||
args.body_chars,
|
||||
args.comment_chars,
|
||||
issue_reaction_events=issue_reaction_events,
|
||||
comment_reactions_by_id=comment_reactions_by_id,
|
||||
all_labels=all_labels,
|
||||
comments_hydration=comments_hydration,
|
||||
attention_thresholds=attention_thresholds,
|
||||
)
|
||||
if summary is not None:
|
||||
issues.append(summary)
|
||||
|
||||
issues.sort(
|
||||
key=lambda issue: (issue["updated_at"], int(issue["number"] or 0)), reverse=True
|
||||
)
|
||||
totals = {
|
||||
"candidate_issues": len(numbers),
|
||||
"included_issues": len(issues),
|
||||
"new_issues": sum(1 for issue in issues if issue["activity"]["new_issue"]),
|
||||
"issues_with_new_comments": sum(
|
||||
1 for issue in issues if issue["activity"]["new_comments"] > 0
|
||||
),
|
||||
"new_comments": sum(issue["activity"]["new_comments"] for issue in issues),
|
||||
"comments_fetched": sum(
|
||||
issue["comments_hydration"]["fetched"] for issue in issues
|
||||
),
|
||||
"issues_with_truncated_comment_hydration": sum(
|
||||
1 for issue in issues if issue["comments_hydration"]["truncated"]
|
||||
),
|
||||
"updated_without_visible_new_post": sum(
|
||||
1
|
||||
for issue in issues
|
||||
if issue["activity"]["updated_without_visible_new_post"]
|
||||
),
|
||||
"issue_reactions_current_total": sum(
|
||||
issue["issue_reaction_total"] for issue in issues
|
||||
),
|
||||
"comment_reactions_current_total": sum(
|
||||
issue["comment_reaction_total"] for issue in issues
|
||||
),
|
||||
"new_reactions": sum(issue["new_reactions"] for issue in issues),
|
||||
"new_upvotes": sum(issue["new_upvotes"] for issue in issues),
|
||||
"user_interactions": sum(issue["user_interactions"] for issue in issues),
|
||||
}
|
||||
ranked = ranked_digest_issues(issues)
|
||||
ref_map = {issue["number"]: ref for ref, issue in enumerate(ranked, start=1)}
|
||||
filter_label = "all" if all_labels else requested_labels
|
||||
|
||||
return {
|
||||
"generated_at": format_timestamp(datetime.now(timezone.utc)),
|
||||
"source": {
|
||||
"repo": args.repo,
|
||||
"skill": "codex-issue-digest",
|
||||
"collector": skill_relative_path(),
|
||||
"script_version": SCRIPT_VERSION,
|
||||
"git_head": git_head(),
|
||||
"gh_version": gh_version_output.splitlines()[0]
|
||||
if gh_version_output
|
||||
else None,
|
||||
},
|
||||
"window": {
|
||||
"since": format_timestamp(since),
|
||||
"until": format_timestamp(until),
|
||||
"hours": round(window_hours, 3),
|
||||
},
|
||||
"attention_thresholds": attention_thresholds,
|
||||
"filters": {
|
||||
"owner_labels": filter_label,
|
||||
"all_labels": all_labels,
|
||||
"kind_labels": list(QUALIFYING_KIND_LABELS),
|
||||
},
|
||||
"collection_notes": [
|
||||
"Issues are selected when they currently have bug or enhancement plus at least one requested owner label and were updated during the window.",
|
||||
"By default, issue comments are fetched with since=window_start and a max page cap to avoid long historical threads; use --fetch-all-comments when exhaustive comment history is needed.",
|
||||
"New issue comments are filtered by comment creation time within the window from the fetched comment set.",
|
||||
"Reaction events are counted by GitHub reaction created_at timestamps for hydrated issues and fetched comments.",
|
||||
"Current reaction totals are standing engagement signals; new_reactions and new_upvotes are windowed activity.",
|
||||
"The collector does not assign semantic clusters; use summary_inputs as model-ready evidence for report-time clustering.",
|
||||
"Pure reaction-only issues may be missed if GitHub issue search does not surface them via updated_at.",
|
||||
"Issues updated during the window without a new issue body or new comment are retained because label/status edits can still be useful owner signals.",
|
||||
],
|
||||
"totals": totals,
|
||||
"by_owner_label": count_by_label(
|
||||
issues,
|
||||
sorted(
|
||||
{area for issue in issues for area in issue["owner_labels"]},
|
||||
key=str.casefold,
|
||||
)
|
||||
if all_labels
|
||||
else requested_labels,
|
||||
),
|
||||
"by_kind_label": count_by_kind(issues),
|
||||
"hot_items": hot_items(issues),
|
||||
"summary_inputs": summary_inputs(issues, ref_map=ref_map),
|
||||
"digest_rows": digest_rows(issues, ref_map=ref_map),
|
||||
"issues": issues,
|
||||
}
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
try:
|
||||
digest = collect_digest(args)
|
||||
except (GhCommandError, RuntimeError, ValueError) as err:
|
||||
sys.stderr.write(f"collect_issue_digest.py error: {err}\n")
|
||||
return 1
|
||||
sys.stdout.write(json.dumps(digest, indent=2, sort_keys=True) + "\n")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
@@ -0,0 +1,614 @@
|
||||
import importlib.util
|
||||
from datetime import timezone
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
MODULE_PATH = Path(__file__).with_name("collect_issue_digest.py")
|
||||
MODULE_SPEC = importlib.util.spec_from_file_location(
|
||||
"collect_issue_digest", MODULE_PATH
|
||||
)
|
||||
collect_issue_digest = importlib.util.module_from_spec(MODULE_SPEC)
|
||||
assert MODULE_SPEC.loader is not None
|
||||
MODULE_SPEC.loader.exec_module(collect_issue_digest)
|
||||
|
||||
|
||||
def test_build_search_queries_uses_each_owner_and_kind_label():
|
||||
since = collect_issue_digest.parse_timestamp("2026-04-25T12:34:56Z", "--since")
|
||||
|
||||
queries = collect_issue_digest.build_search_queries(
|
||||
"openai/codex", ["tui", "exec"], since
|
||||
)
|
||||
|
||||
assert queries == [
|
||||
"repo:openai/codex is:issue updated:>=2026-04-25 label:tui label:bug",
|
||||
"repo:openai/codex is:issue updated:>=2026-04-25 label:tui label:enhancement",
|
||||
"repo:openai/codex is:issue updated:>=2026-04-25 label:exec label:bug",
|
||||
"repo:openai/codex is:issue updated:>=2026-04-25 label:exec label:enhancement",
|
||||
]
|
||||
|
||||
|
||||
def test_build_search_queries_can_scan_all_labels():
|
||||
since = collect_issue_digest.parse_timestamp("2026-04-25T12:34:56Z", "--since")
|
||||
|
||||
queries = collect_issue_digest.build_search_queries(
|
||||
"openai/codex", [], since, all_labels=True
|
||||
)
|
||||
|
||||
assert queries == [
|
||||
"repo:openai/codex is:issue updated:>=2026-04-25 label:bug",
|
||||
"repo:openai/codex is:issue updated:>=2026-04-25 label:enhancement",
|
||||
]
|
||||
|
||||
|
||||
def test_normalize_requested_labels_accepts_all_area_phrases():
|
||||
assert collect_issue_digest.normalize_requested_labels(["all", "areas"]) == (
|
||||
[],
|
||||
True,
|
||||
)
|
||||
assert collect_issue_digest.normalize_requested_labels(["all-labels"]) == (
|
||||
[],
|
||||
True,
|
||||
)
|
||||
|
||||
|
||||
def test_summarize_issue_keeps_new_comments_and_reaction_signals():
|
||||
since = collect_issue_digest.parse_timestamp("2026-04-25T00:00:00Z", "--since")
|
||||
until = collect_issue_digest.parse_timestamp("2026-04-26T00:00:00Z", "--until")
|
||||
issue = {
|
||||
"number": 123,
|
||||
"title": "TUI does not redraw",
|
||||
"html_url": "https://github.com/openai/codex/issues/123",
|
||||
"state": "open",
|
||||
"created_at": "2026-04-24T20:00:00Z",
|
||||
"updated_at": "2026-04-25T10:00:00Z",
|
||||
"user": {"login": "alice"},
|
||||
"author_association": "NONE",
|
||||
"comments": 2,
|
||||
"body": "The terminal freezes after resize.",
|
||||
"labels": [{"name": "bug"}, {"name": "tui"}],
|
||||
"reactions": {"total_count": 3, "+1": 2, "rocket": 1},
|
||||
}
|
||||
comments = [
|
||||
{
|
||||
"id": 1,
|
||||
"created_at": "2026-04-25T11:00:00Z",
|
||||
"updated_at": "2026-04-25T11:00:00Z",
|
||||
"html_url": "https://github.com/openai/codex/issues/123#issuecomment-1",
|
||||
"user": {"login": "bob"},
|
||||
"author_association": "MEMBER",
|
||||
"body": "I can reproduce this on main.",
|
||||
"reactions": {"total_count": 4, "heart": 1, "+1": 3},
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"created_at": "2026-04-24T11:00:00Z",
|
||||
"updated_at": "2026-04-24T11:00:00Z",
|
||||
"html_url": "https://github.com/openai/codex/issues/123#issuecomment-2",
|
||||
"user": {"login": "carol"},
|
||||
"author_association": "NONE",
|
||||
"body": "Older comment.",
|
||||
"reactions": {"total_count": 1, "eyes": 1},
|
||||
},
|
||||
]
|
||||
|
||||
summary = collect_issue_digest.summarize_issue(
|
||||
issue,
|
||||
comments,
|
||||
["tui", "exec"],
|
||||
since,
|
||||
until,
|
||||
body_chars=200,
|
||||
comment_chars=200,
|
||||
)
|
||||
|
||||
assert summary == {
|
||||
"number": 123,
|
||||
"title": "TUI does not redraw",
|
||||
"description": "TUI does not redraw",
|
||||
"url": "https://github.com/openai/codex/issues/123",
|
||||
"state": "open",
|
||||
"author": "alice",
|
||||
"author_association": "NONE",
|
||||
"created_at": "2026-04-24T20:00:00Z",
|
||||
"updated_at": "2026-04-25T10:00:00Z",
|
||||
"labels": ["bug", "tui"],
|
||||
"kind_labels": ["bug"],
|
||||
"owner_labels": ["tui"],
|
||||
"comments_total": 2,
|
||||
"comments_hydration": {
|
||||
"fetched": 2,
|
||||
"since": None,
|
||||
"truncated": False,
|
||||
"max_pages": None,
|
||||
},
|
||||
"issue_reactions": {"+1": 2, "rocket": 1},
|
||||
"issue_reaction_total": 3,
|
||||
"comment_reaction_total": 5,
|
||||
"new_comment_reaction_total": 4,
|
||||
"new_issue_reactions": 0,
|
||||
"new_issue_upvotes": 0,
|
||||
"new_comment_reactions": 0,
|
||||
"new_comment_upvotes": 0,
|
||||
"new_reactions": 0,
|
||||
"new_upvotes": 0,
|
||||
"user_interactions": 1,
|
||||
"attention": False,
|
||||
"attention_level": 0,
|
||||
"attention_marker": "",
|
||||
"engagement_score": 12,
|
||||
"activity": {
|
||||
"new_issue": False,
|
||||
"new_comments": 1,
|
||||
"new_human_comments": 1,
|
||||
"new_reactions": 0,
|
||||
"new_upvotes": 0,
|
||||
"updated_without_visible_new_post": False,
|
||||
},
|
||||
"body_excerpt": "The terminal freezes after resize.",
|
||||
"new_comments": [
|
||||
{
|
||||
"id": 1,
|
||||
"author": "bob",
|
||||
"author_association": "MEMBER",
|
||||
"created_at": "2026-04-25T11:00:00Z",
|
||||
"updated_at": "2026-04-25T11:00:00Z",
|
||||
"url": "https://github.com/openai/codex/issues/123#issuecomment-1",
|
||||
"human_user_interaction": True,
|
||||
"reactions": {"+1": 3, "heart": 1},
|
||||
"reaction_total": 4,
|
||||
"new_reactions": 0,
|
||||
"new_upvotes": 0,
|
||||
"new_reaction_counts": {},
|
||||
"body_excerpt": "I can reproduce this on main.",
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def test_summarize_issue_filters_non_owner_or_non_kind_labels():
|
||||
since = collect_issue_digest.parse_timestamp("2026-04-25T00:00:00Z", "--since")
|
||||
until = collect_issue_digest.parse_timestamp("2026-04-26T00:00:00Z", "--until")
|
||||
base_issue = {
|
||||
"number": 1,
|
||||
"title": "Question",
|
||||
"created_at": "2026-04-25T01:00:00Z",
|
||||
"updated_at": "2026-04-25T01:00:00Z",
|
||||
"labels": [{"name": "question"}, {"name": "tui"}],
|
||||
}
|
||||
|
||||
assert (
|
||||
collect_issue_digest.summarize_issue(
|
||||
base_issue,
|
||||
[],
|
||||
["tui"],
|
||||
since,
|
||||
until,
|
||||
body_chars=100,
|
||||
comment_chars=100,
|
||||
)
|
||||
is None
|
||||
)
|
||||
|
||||
issue_without_owner = dict(base_issue)
|
||||
issue_without_owner["labels"] = [{"name": "bug"}, {"name": "app"}]
|
||||
|
||||
assert (
|
||||
collect_issue_digest.summarize_issue(
|
||||
issue_without_owner,
|
||||
[],
|
||||
["tui"],
|
||||
since,
|
||||
until,
|
||||
body_chars=100,
|
||||
comment_chars=100,
|
||||
)
|
||||
is None
|
||||
)
|
||||
|
||||
|
||||
def test_resolve_window_defaults_to_previous_hours():
|
||||
class Args:
|
||||
since = None
|
||||
until = "2026-04-26T12:00:00Z"
|
||||
window_hours = 24
|
||||
|
||||
since, until = collect_issue_digest.resolve_window(Args())
|
||||
|
||||
assert since.isoformat() == "2026-04-25T12:00:00+00:00"
|
||||
assert until.tzinfo == timezone.utc
|
||||
|
||||
|
||||
def test_parse_duration_hours_accepts_common_phrases():
|
||||
assert collect_issue_digest.parse_duration_hours("past week") == 168
|
||||
assert collect_issue_digest.parse_duration_hours("48h") == 48
|
||||
assert collect_issue_digest.parse_duration_hours("2 days") == 48
|
||||
assert collect_issue_digest.parse_duration_hours("1w") == 168
|
||||
|
||||
|
||||
def test_attention_thresholds_scale_by_window_length():
|
||||
one_day = collect_issue_digest.attention_thresholds_for_window(24)
|
||||
assert one_day["elevated"] == 10
|
||||
assert one_day["very_high"] == 20
|
||||
|
||||
half_day = collect_issue_digest.attention_thresholds_for_window(12)
|
||||
assert half_day["elevated"] == 5
|
||||
assert half_day["very_high"] == 10
|
||||
|
||||
week = collect_issue_digest.attention_thresholds_for_window(168)
|
||||
assert week["elevated"] == 70
|
||||
assert week["very_high"] == 140
|
||||
assert collect_issue_digest.attention_marker_for(69, week) == ""
|
||||
assert collect_issue_digest.attention_marker_for(107, week) == "🔥"
|
||||
assert collect_issue_digest.attention_marker_for(140, week) == "🔥🔥"
|
||||
|
||||
|
||||
def test_fetch_comments_uses_since_filter_and_page_cap(monkeypatch):
|
||||
calls = []
|
||||
|
||||
def fake_gh_json(args):
|
||||
calls.append(args)
|
||||
return [{"id": idx} for idx in range(100)]
|
||||
|
||||
monkeypatch.setattr(collect_issue_digest, "gh_json", fake_gh_json)
|
||||
since = collect_issue_digest.parse_timestamp("2026-04-25T00:00:00Z", "--since")
|
||||
|
||||
payload = collect_issue_digest.fetch_comments(
|
||||
"openai/codex", 123, since=since, max_pages=1
|
||||
)
|
||||
|
||||
assert len(payload["items"]) == 100
|
||||
assert payload["truncated"] is True
|
||||
assert payload["max_pages"] == 1
|
||||
assert calls == [
|
||||
[
|
||||
"api",
|
||||
"repos/openai/codex/issues/123/comments?since=2026-04-25T00%3A00%3A00Z&per_page=100&page=1",
|
||||
]
|
||||
]
|
||||
|
||||
|
||||
def test_issue_description_prefers_title_over_body_noise():
|
||||
issue = {
|
||||
"title": "Codex.app GUI: MCP child processes not reaped after task completion",
|
||||
"body": "A later crash mention should not override the title-level symptom.",
|
||||
"labels": [{"name": "app"}, {"name": "bug"}],
|
||||
}
|
||||
|
||||
description = collect_issue_digest.issue_description(issue)
|
||||
assert "MCP child processes" in description
|
||||
assert "crash" not in description.casefold()
|
||||
|
||||
|
||||
def test_attention_markers_count_human_user_interactions():
|
||||
since = collect_issue_digest.parse_timestamp("2026-04-25T00:00:00Z", "--since")
|
||||
until = collect_issue_digest.parse_timestamp("2026-04-26T00:00:00Z", "--until")
|
||||
issue = {
|
||||
"number": 456,
|
||||
"title": "Agent context is exploding",
|
||||
"html_url": "https://github.com/openai/codex/issues/456",
|
||||
"state": "open",
|
||||
"created_at": "2026-04-25T01:00:00Z",
|
||||
"updated_at": "2026-04-25T12:00:00Z",
|
||||
"user": {"login": "alice"},
|
||||
"labels": [{"name": "bug"}, {"name": "agent"}],
|
||||
}
|
||||
comments = [
|
||||
{
|
||||
"id": idx,
|
||||
"created_at": "2026-04-25T02:00:00Z",
|
||||
"updated_at": "2026-04-25T02:00:00Z",
|
||||
"user": {"login": f"user-{idx}"},
|
||||
"body": "same here",
|
||||
}
|
||||
for idx in range(9)
|
||||
]
|
||||
comments.append(
|
||||
{
|
||||
"id": 99,
|
||||
"created_at": "2026-04-25T02:00:00Z",
|
||||
"updated_at": "2026-04-25T02:00:00Z",
|
||||
"user": {"login": "github-actions[bot]"},
|
||||
"body": "duplicate bot note",
|
||||
}
|
||||
)
|
||||
|
||||
summary = collect_issue_digest.summarize_issue(
|
||||
issue,
|
||||
comments,
|
||||
["agent"],
|
||||
since,
|
||||
until,
|
||||
body_chars=100,
|
||||
comment_chars=100,
|
||||
)
|
||||
|
||||
assert summary["user_interactions"] == 10
|
||||
assert summary["activity"]["new_human_comments"] == 9
|
||||
assert summary["attention"] is True
|
||||
assert summary["attention_level"] == 1
|
||||
assert summary["attention_marker"] == "🔥"
|
||||
|
||||
issue["created_at"] = "2026-04-24T01:00:00Z"
|
||||
comments.extend(
|
||||
{
|
||||
"id": idx,
|
||||
"created_at": "2026-04-25T03:00:00Z",
|
||||
"updated_at": "2026-04-25T03:00:00Z",
|
||||
"user": {"login": f"extra-user-{idx}"},
|
||||
"body": "also seeing this",
|
||||
}
|
||||
for idx in range(11)
|
||||
)
|
||||
|
||||
summary = collect_issue_digest.summarize_issue(
|
||||
issue,
|
||||
comments,
|
||||
["agent"],
|
||||
since,
|
||||
until,
|
||||
body_chars=100,
|
||||
comment_chars=100,
|
||||
)
|
||||
|
||||
assert summary["user_interactions"] == 20
|
||||
assert summary["attention_level"] == 2
|
||||
assert summary["attention_marker"] == "🔥🔥"
|
||||
|
||||
|
||||
def test_reactions_count_toward_attention_markers():
|
||||
since = collect_issue_digest.parse_timestamp("2026-04-25T00:00:00Z", "--since")
|
||||
until = collect_issue_digest.parse_timestamp("2026-04-26T00:00:00Z", "--until")
|
||||
issue = {
|
||||
"number": 789,
|
||||
"title": "Support 1M token context",
|
||||
"html_url": "https://github.com/openai/codex/issues/789",
|
||||
"state": "open",
|
||||
"created_at": "2026-04-24T01:00:00Z",
|
||||
"updated_at": "2026-04-25T12:00:00Z",
|
||||
"user": {"login": "alice"},
|
||||
"labels": [{"name": "enhancement"}, {"name": "context"}],
|
||||
"reactions": {"total_count": 20, "+1": 20},
|
||||
}
|
||||
comments = [
|
||||
{
|
||||
"id": 1,
|
||||
"created_at": "2026-04-25T02:00:00Z",
|
||||
"updated_at": "2026-04-25T02:00:00Z",
|
||||
"user": {"login": "commenter"},
|
||||
"body": "please",
|
||||
"reactions": {"total_count": 2, "+1": 2},
|
||||
}
|
||||
]
|
||||
issue_reactions = [
|
||||
{
|
||||
"content": "+1",
|
||||
"created_at": "2026-04-25T03:00:00Z",
|
||||
"user": {"login": f"reactor-{idx}"},
|
||||
}
|
||||
for idx in range(18)
|
||||
]
|
||||
comment_reactions_by_id = {
|
||||
1: [
|
||||
{
|
||||
"content": "heart",
|
||||
"created_at": "2026-04-25T04:00:00Z",
|
||||
"user": {"login": "human-reactor"},
|
||||
},
|
||||
{
|
||||
"content": "+1",
|
||||
"created_at": "2026-04-25T04:00:00Z",
|
||||
"user": {"login": "github-actions[bot]"},
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
summary = collect_issue_digest.summarize_issue(
|
||||
issue,
|
||||
comments,
|
||||
["context"],
|
||||
since,
|
||||
until,
|
||||
body_chars=100,
|
||||
comment_chars=100,
|
||||
issue_reaction_events=issue_reactions,
|
||||
comment_reactions_by_id=comment_reactions_by_id,
|
||||
)
|
||||
|
||||
assert summary["new_reactions"] == 19
|
||||
assert summary["new_upvotes"] == 18
|
||||
assert summary["user_interactions"] == 20
|
||||
assert summary["attention_level"] == 2
|
||||
assert summary["attention_marker"] == "🔥🔥"
|
||||
assert summary["new_comments"][0]["new_reactions"] == 1
|
||||
assert summary["new_comments"][0]["new_upvotes"] == 0
|
||||
|
||||
|
||||
def test_digest_rows_are_table_ready_with_concise_descriptions():
|
||||
rows = collect_issue_digest.digest_rows(
|
||||
[
|
||||
{
|
||||
"number": 1,
|
||||
"title": "Quiet bug",
|
||||
"description": "Quiet bug",
|
||||
"url": "https://github.com/openai/codex/issues/1",
|
||||
"owner_labels": ["context"],
|
||||
"kind_labels": ["bug"],
|
||||
"state": "open",
|
||||
"attention": False,
|
||||
"attention_level": 0,
|
||||
"attention_marker": "",
|
||||
"user_interactions": 1,
|
||||
"new_reactions": 0,
|
||||
"new_upvotes": 0,
|
||||
"engagement_score": 3,
|
||||
"issue_reaction_total": 0,
|
||||
"comment_reaction_total": 0,
|
||||
"updated_at": "2026-04-25T01:00:00Z",
|
||||
"activity": {
|
||||
"new_issue": True,
|
||||
"new_comments": 0,
|
||||
"new_reactions": 0,
|
||||
"updated_without_visible_new_post": False,
|
||||
},
|
||||
},
|
||||
{
|
||||
"number": 2,
|
||||
"title": "Busy bug",
|
||||
"description": "High-volume bug report",
|
||||
"url": "https://github.com/openai/codex/issues/2",
|
||||
"owner_labels": ["agent"],
|
||||
"kind_labels": ["bug"],
|
||||
"state": "open",
|
||||
"attention": True,
|
||||
"attention_level": 1,
|
||||
"attention_marker": "🔥",
|
||||
"user_interactions": 17,
|
||||
"new_reactions": 3,
|
||||
"new_upvotes": 2,
|
||||
"engagement_score": 20,
|
||||
"issue_reaction_total": 5,
|
||||
"comment_reaction_total": 2,
|
||||
"updated_at": "2026-04-25T02:00:00Z",
|
||||
"activity": {
|
||||
"new_issue": False,
|
||||
"new_comments": 16,
|
||||
"new_reactions": 3,
|
||||
"updated_without_visible_new_post": False,
|
||||
},
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
assert rows[0] == {
|
||||
"ref": 1,
|
||||
"ref_markdown": "[1](https://github.com/openai/codex/issues/2)",
|
||||
"marker": "🔥",
|
||||
"attention_marker": "🔥",
|
||||
"number": 2,
|
||||
"description": "High-volume bug report",
|
||||
"title": "Busy bug",
|
||||
"url": "https://github.com/openai/codex/issues/2",
|
||||
"area": "agent",
|
||||
"kind": "bug",
|
||||
"state": "open",
|
||||
"interactions": 17,
|
||||
"user_interactions": 17,
|
||||
"new_reactions": 3,
|
||||
"new_upvotes": 2,
|
||||
"current_reactions": 7,
|
||||
}
|
||||
|
||||
|
||||
def test_summary_inputs_are_model_ready_without_preclustering():
|
||||
issues = [
|
||||
{
|
||||
"number": 20,
|
||||
"title": "Windows app Browser Use external navigation fails",
|
||||
"description": "Browser Use navigation or app-server failure",
|
||||
"url": "https://github.com/openai/codex/issues/20",
|
||||
"labels": ["app", "bug"],
|
||||
"owner_labels": ["app"],
|
||||
"kind_labels": ["bug"],
|
||||
"attention": False,
|
||||
"attention_level": 0,
|
||||
"attention_marker": "",
|
||||
"user_interactions": 3,
|
||||
"new_reactions": 1,
|
||||
"engagement_score": 8,
|
||||
"updated_at": "2026-04-25T04:00:00Z",
|
||||
"activity": {"new_comments": 2},
|
||||
},
|
||||
{
|
||||
"number": 21,
|
||||
"title": "On Windows, cmake output waits until timeout",
|
||||
"description": "Windows command timeout/capture problem",
|
||||
"url": "https://github.com/openai/codex/issues/21",
|
||||
"labels": ["app", "bug"],
|
||||
"owner_labels": ["app"],
|
||||
"kind_labels": ["bug"],
|
||||
"attention": False,
|
||||
"attention_level": 0,
|
||||
"attention_marker": "",
|
||||
"user_interactions": 3,
|
||||
"new_reactions": 0,
|
||||
"engagement_score": 7,
|
||||
"updated_at": "2026-04-25T03:00:00Z",
|
||||
"activity": {"new_comments": 3},
|
||||
},
|
||||
{
|
||||
"number": 22,
|
||||
"title": "Windows computer use tool fails to click buttons",
|
||||
"description": "Computer-use workflow failure",
|
||||
"url": "https://github.com/openai/codex/issues/22",
|
||||
"labels": ["app", "bug"],
|
||||
"owner_labels": ["app"],
|
||||
"kind_labels": ["bug"],
|
||||
"attention": False,
|
||||
"attention_level": 0,
|
||||
"attention_marker": "",
|
||||
"user_interactions": 3,
|
||||
"new_reactions": 0,
|
||||
"engagement_score": 6,
|
||||
"updated_at": "2026-04-25T02:00:00Z",
|
||||
"activity": {"new_comments": 3},
|
||||
},
|
||||
]
|
||||
|
||||
rows = collect_issue_digest.summary_inputs(issues, ref_map={20: 1, 21: 2, 22: 3})
|
||||
|
||||
assert rows == [
|
||||
{
|
||||
"ref": 1,
|
||||
"ref_markdown": "[1](https://github.com/openai/codex/issues/20)",
|
||||
"number": 20,
|
||||
"title": "Windows app Browser Use external navigation fails",
|
||||
"description": "Browser Use navigation or app-server failure",
|
||||
"url": "https://github.com/openai/codex/issues/20",
|
||||
"labels": ["app", "bug"],
|
||||
"owner_labels": ["app"],
|
||||
"kind_labels": ["bug"],
|
||||
"state": "",
|
||||
"attention_marker": "",
|
||||
"interactions": 3,
|
||||
"new_comments": 2,
|
||||
"new_reactions": 1,
|
||||
"new_upvotes": 0,
|
||||
"current_reactions": 0,
|
||||
},
|
||||
{
|
||||
"ref": 2,
|
||||
"ref_markdown": "[2](https://github.com/openai/codex/issues/21)",
|
||||
"number": 21,
|
||||
"title": "On Windows, cmake output waits until timeout",
|
||||
"description": "Windows command timeout/capture problem",
|
||||
"url": "https://github.com/openai/codex/issues/21",
|
||||
"labels": ["app", "bug"],
|
||||
"owner_labels": ["app"],
|
||||
"kind_labels": ["bug"],
|
||||
"state": "",
|
||||
"attention_marker": "",
|
||||
"interactions": 3,
|
||||
"new_comments": 3,
|
||||
"new_reactions": 0,
|
||||
"new_upvotes": 0,
|
||||
"current_reactions": 0,
|
||||
},
|
||||
{
|
||||
"ref": 3,
|
||||
"ref_markdown": "[3](https://github.com/openai/codex/issues/22)",
|
||||
"number": 22,
|
||||
"title": "Windows computer use tool fails to click buttons",
|
||||
"description": "Computer-use workflow failure",
|
||||
"url": "https://github.com/openai/codex/issues/22",
|
||||
"labels": ["app", "bug"],
|
||||
"owner_labels": ["app"],
|
||||
"kind_labels": ["bug"],
|
||||
"state": "",
|
||||
"attention_marker": "",
|
||||
"interactions": 3,
|
||||
"new_comments": 3,
|
||||
"new_reactions": 0,
|
||||
"new_upvotes": 0,
|
||||
"current_reactions": 0,
|
||||
},
|
||||
]
|
||||
2
.github/actions/prepare-bazel-ci/action.yml
vendored
2
.github/actions/prepare-bazel-ci/action.yml
vendored
@@ -8,7 +8,7 @@ inputs:
|
||||
description: Logical namespace used to keep concurrent Bazel jobs from reserving the same repository cache key.
|
||||
required: true
|
||||
install-test-prereqs:
|
||||
description: Install Node.js and DotSlash for Bazel-backed test jobs.
|
||||
description: Install DotSlash for Bazel-backed test jobs.
|
||||
required: false
|
||||
default: "false"
|
||||
outputs:
|
||||
|
||||
10
.github/actions/setup-bazel-ci/action.yml
vendored
10
.github/actions/setup-bazel-ci/action.yml
vendored
@@ -5,7 +5,7 @@ inputs:
|
||||
description: Target triple used for cache namespacing.
|
||||
required: true
|
||||
install-test-prereqs:
|
||||
description: Install Node.js and DotSlash for Bazel-backed test jobs.
|
||||
description: Install DotSlash for Bazel-backed test jobs.
|
||||
required: false
|
||||
default: "false"
|
||||
outputs:
|
||||
@@ -16,12 +16,6 @@ outputs:
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Set up Node.js for js_repl tests
|
||||
if: inputs.install-test-prereqs == 'true'
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
|
||||
with:
|
||||
node-version-file: codex-rs/node-version.txt
|
||||
|
||||
# Some integration tests rely on DotSlash being installed.
|
||||
# See https://github.com/openai/codex/pull/7617.
|
||||
- name: Install DotSlash
|
||||
@@ -39,7 +33,7 @@ runs:
|
||||
run: Copy-Item (Get-Command dotslash).Source -Destination "$env:LOCALAPPDATA\Microsoft\WindowsApps\dotslash.exe"
|
||||
|
||||
- name: Set up Bazel
|
||||
uses: bazelbuild/setup-bazelisk@b39c379c82683a5f25d34f0d062761f62693e0b2 # v3
|
||||
uses: bazel-contrib/setup-bazel@c5acdfb288317d0b5c0bbd7a396a3dc868bb0f86 # 0.19.0
|
||||
|
||||
- name: Configure Bazel repository cache
|
||||
id: configure_bazel_repository_cache
|
||||
|
||||
2
.github/pull_request_template.md
vendored
2
.github/pull_request_template.md
vendored
@@ -1,6 +1,6 @@
|
||||
# External (non-OpenAI) Pull Request Requirements
|
||||
|
||||
Before opening this Pull Request, please read the dedicated "Contributing" markdown file or your PR may be closed:
|
||||
External code contributions are by invitation only. Please read the dedicated "Contributing" markdown file for details:
|
||||
https://github.com/openai/codex/blob/main/docs/contributing.md
|
||||
|
||||
If your PR conforms to our contribution guidelines, replace this text with a detailed and high quality description of your changes.
|
||||
|
||||
17
.github/scripts/run-bazel-ci.sh
vendored
17
.github/scripts/run-bazel-ci.sh
vendored
@@ -4,7 +4,6 @@ set -euo pipefail
|
||||
|
||||
print_failed_bazel_test_logs=0
|
||||
print_failed_bazel_action_summary=0
|
||||
use_node_test_env=0
|
||||
remote_download_toplevel=0
|
||||
windows_msvc_host_platform=0
|
||||
|
||||
@@ -18,10 +17,6 @@ while [[ $# -gt 0 ]]; do
|
||||
print_failed_bazel_action_summary=1
|
||||
shift
|
||||
;;
|
||||
--use-node-test-env)
|
||||
use_node_test_env=1
|
||||
shift
|
||||
;;
|
||||
--remote-download-toplevel)
|
||||
remote_download_toplevel=1
|
||||
shift
|
||||
@@ -42,7 +37,7 @@ while [[ $# -gt 0 ]]; do
|
||||
done
|
||||
|
||||
if [[ $# -eq 0 ]]; then
|
||||
echo "Usage: $0 [--print-failed-test-logs] [--print-failed-action-summary] [--use-node-test-env] [--remote-download-toplevel] [--windows-msvc-host-platform] -- <bazel args> -- <targets>" >&2
|
||||
echo "Usage: $0 [--print-failed-test-logs] [--print-failed-action-summary] [--remote-download-toplevel] [--windows-msvc-host-platform] -- <bazel args> -- <targets>" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -249,16 +244,6 @@ if [[ ${#bazel_args[@]} -eq 0 || ${#bazel_targets[@]} -eq 0 ]]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ $use_node_test_env -eq 1 ]]; then
|
||||
# Bazel test sandboxes on macOS may resolve an older Homebrew `node`
|
||||
# before the `actions/setup-node` runtime on PATH.
|
||||
node_bin="$(which node)"
|
||||
if [[ "${RUNNER_OS:-}" == "Windows" ]]; then
|
||||
node_bin="$(cygpath -w "${node_bin}")"
|
||||
fi
|
||||
bazel_args+=("--test_env=CODEX_JS_REPL_NODE_PATH=${node_bin}")
|
||||
fi
|
||||
|
||||
post_config_bazel_args=()
|
||||
if [[ "${RUNNER_OS:-}" == "Windows" && $windows_msvc_host_platform -eq 1 ]]; then
|
||||
has_host_platform_override=0
|
||||
|
||||
18
.github/workflows/Dockerfile.bazel
vendored
18
.github/workflows/Dockerfile.bazel
vendored
@@ -8,25 +8,9 @@ FROM ubuntu:24.04
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
curl git python3 ca-certificates xz-utils && \
|
||||
curl git python3 ca-certificates && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY codex-rs/node-version.txt /tmp/node-version.txt
|
||||
|
||||
RUN set -eux; \
|
||||
node_arch="$(dpkg --print-architecture)"; \
|
||||
case "${node_arch}" in \
|
||||
amd64) node_dist_arch="x64" ;; \
|
||||
arm64) node_dist_arch="arm64" ;; \
|
||||
*) echo "unsupported architecture: ${node_arch}"; exit 1 ;; \
|
||||
esac; \
|
||||
node_version="$(tr -d '[:space:]' </tmp/node-version.txt)"; \
|
||||
curl -fsSLO "https://nodejs.org/dist/v${node_version}/node-v${node_version}-linux-${node_dist_arch}.tar.xz"; \
|
||||
tar -xJf "node-v${node_version}-linux-${node_dist_arch}.tar.xz" -C /usr/local --strip-components=1; \
|
||||
rm "node-v${node_version}-linux-${node_dist_arch}.tar.xz" /tmp/node-version.txt; \
|
||||
node --version; \
|
||||
npm --version
|
||||
|
||||
# Install dotslash.
|
||||
RUN curl -LSfs "https://github.com/facebook/dotslash/releases/download/v0.5.8/dotslash-ubuntu-22.04.$(uname -m).tar.gz" | tar fxz - -C /usr/local/bin
|
||||
|
||||
|
||||
8
.github/workflows/bazel.yml
vendored
8
.github/workflows/bazel.yml
vendored
@@ -17,6 +17,13 @@ concurrency:
|
||||
cancel-in-progress: ${{ github.ref_name != 'main' }}
|
||||
jobs:
|
||||
test:
|
||||
# Even though a no-cache-hit Windows build seems to exceed the 30-minute
|
||||
# limit on occasion, the more common reason for exceeding the limit is a
|
||||
# true test failure in a rust_test() marked "flaky" that gets run 3x.
|
||||
# In that case, extra time generally does not give us more signal.
|
||||
#
|
||||
# Ultimately we need true distributed builds (e.g.,
|
||||
# https://www.buildbuddy.io/docs/rbe-setup/) to speed things up.
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
fail-fast: false
|
||||
@@ -85,7 +92,6 @@ jobs:
|
||||
|
||||
bazel_wrapper_args=(
|
||||
--print-failed-test-logs
|
||||
--use-node-test-env
|
||||
)
|
||||
bazel_test_args=(
|
||||
test
|
||||
|
||||
9
.github/workflows/ci.yml
vendored
9
.github/workflows/ci.yml
vendored
@@ -45,11 +45,16 @@ jobs:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Use a rust-release version that includes all native binaries.
|
||||
CODEX_VERSION=0.115.0
|
||||
# Use a recent successful rust-release run that published the full
|
||||
# cross-platform native payload required by the npm package layout.
|
||||
# Passing the workflow URL directly avoids relying on old rust-v*
|
||||
# branches remaining discoverable via `gh run list --branch ...`.
|
||||
CODEX_VERSION=0.125.0
|
||||
WORKFLOW_URL="https://github.com/openai/codex/actions/runs/24901475298"
|
||||
OUTPUT_DIR="${RUNNER_TEMP}"
|
||||
python3 ./scripts/stage_npm_packages.py \
|
||||
--release-version "$CODEX_VERSION" \
|
||||
--workflow-url "$WORKFLOW_URL" \
|
||||
--package codex \
|
||||
--output-dir "$OUTPUT_DIR"
|
||||
PACK_OUTPUT="${OUTPUT_DIR}/codex-npm-${CODEX_VERSION}.tgz"
|
||||
|
||||
4
.github/workflows/issue-deduplicator.yml
vendored
4
.github/workflows/issue-deduplicator.yml
vendored
@@ -61,7 +61,7 @@ jobs:
|
||||
# .github/prompts/issue-deduplicator.txt file is obsolete and removed.
|
||||
- id: codex-all
|
||||
name: Find duplicates (pass 1, all issues)
|
||||
uses: openai/codex-action@0b91f4a2703c23df3102c3f0967d3c6db34eedef # v1
|
||||
uses: openai/codex-action@5c3f4ccdb2b8790f73d6b21751ac00e602aa0c02 # v1.7
|
||||
with:
|
||||
openai-api-key: ${{ secrets.CODEX_OPENAI_API_KEY }}
|
||||
allow-users: "*"
|
||||
@@ -195,7 +195,7 @@ jobs:
|
||||
|
||||
- id: codex-open
|
||||
name: Find duplicates (pass 2, open issues)
|
||||
uses: openai/codex-action@0b91f4a2703c23df3102c3f0967d3c6db34eedef # v1
|
||||
uses: openai/codex-action@5c3f4ccdb2b8790f73d6b21751ac00e602aa0c02 # v1.7
|
||||
with:
|
||||
openai-api-key: ${{ secrets.CODEX_OPENAI_API_KEY }}
|
||||
allow-users: "*"
|
||||
|
||||
2
.github/workflows/issue-labeler.yml
vendored
2
.github/workflows/issue-labeler.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
|
||||
- id: codex
|
||||
uses: openai/codex-action@0b91f4a2703c23df3102c3f0967d3c6db34eedef # v1
|
||||
uses: openai/codex-action@5c3f4ccdb2b8790f73d6b21751ac00e602aa0c02 # v1.7
|
||||
with:
|
||||
openai-api-key: ${{ secrets.CODEX_OPENAI_API_KEY }}
|
||||
allow-users: "*"
|
||||
|
||||
4
.github/workflows/rust-ci-full.yml
vendored
4
.github/workflows/rust-ci-full.yml
vendored
@@ -560,10 +560,6 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
- name: Set up Node.js for js_repl tests
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
|
||||
with:
|
||||
node-version-file: codex-rs/node-version.txt
|
||||
- name: Install Linux build dependencies
|
||||
if: ${{ runner.os == 'Linux' }}
|
||||
shell: bash
|
||||
|
||||
56
.github/workflows/rust-release.yml
vendored
56
.github/workflows/rust-release.yml
vendored
@@ -651,11 +651,59 @@ jobs:
|
||||
prefix="${NPM_TAG}-"
|
||||
fi
|
||||
|
||||
root_tarball="dist/npm/codex-npm-${VERSION}.tgz"
|
||||
sdk_tarball="dist/npm/codex-sdk-npm-${VERSION}.tgz"
|
||||
# Keep this list in sync with CODEX_PLATFORM_PACKAGES in
|
||||
# codex-cli/scripts/build_npm_package.py. The root wrapper advances
|
||||
# @openai/codex@latest as soon as it publishes, so every platform
|
||||
# package it aliases must already exist in the registry first.
|
||||
platform_tarballs=(
|
||||
"dist/npm/codex-npm-linux-x64-${VERSION}.tgz"
|
||||
"dist/npm/codex-npm-linux-arm64-${VERSION}.tgz"
|
||||
"dist/npm/codex-npm-darwin-x64-${VERSION}.tgz"
|
||||
"dist/npm/codex-npm-darwin-arm64-${VERSION}.tgz"
|
||||
"dist/npm/codex-npm-win32-x64-${VERSION}.tgz"
|
||||
"dist/npm/codex-npm-win32-arm64-${VERSION}.tgz"
|
||||
)
|
||||
|
||||
for required_tarball in "${platform_tarballs[@]}" "${root_tarball}"; do
|
||||
if [[ ! -f "${required_tarball}" ]]; then
|
||||
echo "Missing npm tarball: ${required_tarball}"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
shopt -s nullglob
|
||||
tarballs=(dist/npm/*-"${VERSION}".tgz)
|
||||
if [[ ${#tarballs[@]} -eq 0 ]]; then
|
||||
echo "No npm tarballs found in dist/npm for version ${VERSION}"
|
||||
exit 1
|
||||
other_tarballs=()
|
||||
for tarball in dist/npm/*-"${VERSION}".tgz; do
|
||||
if [[ "${tarball}" == "${root_tarball}" || "${tarball}" == "${sdk_tarball}" ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
is_platform_tarball=false
|
||||
for platform_tarball in "${platform_tarballs[@]}"; do
|
||||
if [[ "${tarball}" == "${platform_tarball}" ]]; then
|
||||
is_platform_tarball=true
|
||||
break
|
||||
fi
|
||||
done
|
||||
if [[ "${is_platform_tarball}" == true ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
other_tarballs+=("${tarball}")
|
||||
done
|
||||
|
||||
# Publish the platform packages before the root CLI wrapper. The root
|
||||
# wrapper advances @openai/codex@latest, so it should only publish
|
||||
# after the optional dependency versions it references exist.
|
||||
tarballs=(
|
||||
"${platform_tarballs[@]}"
|
||||
"${other_tarballs[@]}"
|
||||
"${root_tarball}"
|
||||
)
|
||||
if [[ -f "${sdk_tarball}" ]]; then
|
||||
tarballs+=("${sdk_tarball}")
|
||||
fi
|
||||
|
||||
for tarball in "${tarballs[@]}"; do
|
||||
|
||||
4
.github/workflows/rusty-v8-release.yml
vendored
4
.github/workflows/rusty-v8-release.yml
vendored
@@ -78,7 +78,9 @@ jobs:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
|
||||
- name: Set up Bazel
|
||||
uses: bazelbuild/setup-bazelisk@b39c379c82683a5f25d34f0d062761f62693e0b2 # v3
|
||||
uses: ./.github/actions/setup-bazel-ci
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6
|
||||
|
||||
6
.github/workflows/v8-canary.yml
vendored
6
.github/workflows/v8-canary.yml
vendored
@@ -3,6 +3,7 @@ name: v8-canary
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- ".github/actions/setup-bazel-ci/**"
|
||||
- ".github/scripts/rusty_v8_bazel.py"
|
||||
- ".github/workflows/rusty-v8-release.yml"
|
||||
- ".github/workflows/v8-canary.yml"
|
||||
@@ -16,6 +17,7 @@ on:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- ".github/actions/setup-bazel-ci/**"
|
||||
- ".github/scripts/rusty_v8_bazel.py"
|
||||
- ".github/workflows/rusty-v8-release.yml"
|
||||
- ".github/workflows/v8-canary.yml"
|
||||
@@ -75,7 +77,9 @@ jobs:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
|
||||
- name: Set up Bazel
|
||||
uses: bazelbuild/setup-bazelisk@b39c379c82683a5f25d34f0d062761f62693e0b2 # v3
|
||||
uses: ./.github/actions/setup-bazel-ci
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6
|
||||
|
||||
1
MODULE.bazel.lock
generated
1
MODULE.bazel.lock
generated
@@ -1560,6 +1560,7 @@
|
||||
"system-deps_7.0.7": "{\"dependencies\":[{\"kind\":\"dev\",\"name\":\"assert_matches\",\"req\":\"^1.5\"},{\"features\":[\"targets\"],\"name\":\"cfg-expr\",\"req\":\">=0.17, <0.21\"},{\"name\":\"heck\",\"req\":\"^0.5\"},{\"kind\":\"dev\",\"name\":\"itertools\",\"req\":\"^0.14\"},{\"kind\":\"dev\",\"name\":\"lazy_static\",\"req\":\"^1\"},{\"name\":\"pkg-config\",\"req\":\"^0.3.25\"},{\"default_features\":false,\"features\":[\"parse\",\"std\"],\"name\":\"toml\",\"req\":\"^0.9\"},{\"name\":\"version-compare\",\"req\":\"^0.2\"}],\"features\":{}}",
|
||||
"tagptr_0.2.0": "{\"dependencies\":[],\"features\":{}}",
|
||||
"tar_0.4.44": "{\"dependencies\":[{\"name\":\"filetime\",\"req\":\"^0.2.8\"},{\"name\":\"libc\",\"req\":\"^0.2\",\"target\":\"cfg(unix)\"},{\"kind\":\"dev\",\"name\":\"tempfile\",\"req\":\"^3\"},{\"name\":\"xattr\",\"optional\":true,\"req\":\"^1.1.3\",\"target\":\"cfg(unix)\"}],\"features\":{\"default\":[\"xattr\"]}}",
|
||||
"tar_0.4.45": "{\"dependencies\":[{\"kind\":\"dev\",\"name\":\"astral-tokio-tar\",\"req\":\"^0.5\"},{\"name\":\"filetime\",\"req\":\"^0.2.8\"},{\"name\":\"libc\",\"req\":\"^0.2\",\"target\":\"cfg(unix)\"},{\"features\":[\"small_rng\"],\"kind\":\"dev\",\"name\":\"rand\",\"req\":\"^0.8\"},{\"kind\":\"dev\",\"name\":\"tempfile\",\"req\":\"^3\"},{\"features\":[\"macros\",\"rt\"],\"kind\":\"dev\",\"name\":\"tokio\",\"req\":\"^1\"},{\"kind\":\"dev\",\"name\":\"tokio-stream\",\"req\":\"^0.1\"},{\"name\":\"xattr\",\"optional\":true,\"req\":\"^1.1.3\",\"target\":\"cfg(unix)\"}],\"features\":{\"default\":[\"xattr\"]}}",
|
||||
"target-lexicon_0.13.3": "{\"dependencies\":[{\"name\":\"serde\",\"optional\":true,\"req\":\"^1.0\"},{\"kind\":\"dev\",\"name\":\"serde_json\",\"req\":\"^1.0\"}],\"features\":{\"arch_z80\":[],\"arch_zkasm\":[],\"default\":[],\"serde_support\":[\"serde\",\"std\"],\"std\":[]}}",
|
||||
"tempfile_3.27.0": "{\"dependencies\":[{\"kind\":\"dev\",\"name\":\"doc-comment\",\"req\":\"^0.3\"},{\"name\":\"fastrand\",\"req\":\"^2.1.1\"},{\"default_features\":false,\"name\":\"getrandom\",\"optional\":true,\"req\":\">=0.3.0, <0.5\",\"target\":\"cfg(any(unix, windows, target_os = \\\"wasi\\\"))\"},{\"default_features\":false,\"features\":[\"std\"],\"name\":\"once_cell\",\"req\":\"^1.19.0\"},{\"features\":[\"fs\"],\"name\":\"rustix\",\"req\":\"^1.1.4\",\"target\":\"cfg(any(unix, target_os = \\\"wasi\\\"))\"},{\"features\":[\"Win32_Storage_FileSystem\",\"Win32_Foundation\"],\"name\":\"windows-sys\",\"req\":\">=0.52, <0.62\",\"target\":\"cfg(windows)\"}],\"features\":{\"default\":[\"getrandom\"],\"nightly\":[]}}",
|
||||
"temporal_capi_0.1.2": "{\"dependencies\":[{\"default_features\":false,\"name\":\"diplomat\",\"req\":\"^0.14.0\"},{\"default_features\":false,\"name\":\"diplomat-runtime\",\"req\":\"^0.14.0\"},{\"default_features\":false,\"features\":[\"unstable\"],\"name\":\"icu_calendar\",\"req\":\"^2.1.0\"},{\"name\":\"icu_locale\",\"req\":\"^2.1.0\"},{\"default_features\":false,\"name\":\"num-traits\",\"req\":\"^0.2.19\"},{\"default_features\":false,\"name\":\"temporal_rs\",\"req\":\"^0.1.2\"},{\"name\":\"timezone_provider\",\"req\":\"^0.1.2\"},{\"name\":\"writeable\",\"req\":\"^0.6.0\"},{\"name\":\"zoneinfo64\",\"optional\":true,\"req\":\"^0.2.0\"}],\"features\":{\"compiled_data\":[\"temporal_rs/compiled_data\"],\"zoneinfo64\":[\"dep:zoneinfo64\",\"timezone_provider/zoneinfo64\"]}}",
|
||||
|
||||
3
NOTICE
3
NOTICE
@@ -4,6 +4,3 @@ Copyright 2025 OpenAI
|
||||
This project includes code derived from [Ratatui](https://github.com/ratatui/ratatui), licensed under the MIT license.
|
||||
Copyright (c) 2016-2022 Florian Dehau
|
||||
Copyright (c) 2023-2025 The Ratatui Developers
|
||||
|
||||
This project includes Meriyah parser assets from [meriyah](https://github.com/meriyah/meriyah), licensed under the ISC license.
|
||||
Copyright (c) 2019 and later, KFlash and others.
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
exports_files([
|
||||
"clippy.toml",
|
||||
"node-version.txt",
|
||||
])
|
||||
|
||||
filegroup(
|
||||
|
||||
135
codex-rs/Cargo.lock
generated
135
codex-rs/Cargo.lock
generated
@@ -1748,26 +1748,6 @@ dependencies = [
|
||||
"unicode-width 0.2.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-agent-graph-store"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"codex-protocol",
|
||||
"codex-state",
|
||||
"pretty_assertions",
|
||||
"prost 0.14.3",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tempfile",
|
||||
"thiserror 2.0.18",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
"tonic",
|
||||
"tonic-prost",
|
||||
"tonic-prost-build",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-agent-identity"
|
||||
version = "0.0.0"
|
||||
@@ -1778,6 +1758,7 @@ dependencies = [
|
||||
"codex-protocol",
|
||||
"crypto_box",
|
||||
"ed25519-dalek",
|
||||
"jsonwebtoken",
|
||||
"pretty_assertions",
|
||||
"rand 0.9.3",
|
||||
"reqwest",
|
||||
@@ -1872,12 +1853,14 @@ dependencies = [
|
||||
"codex-core-plugins",
|
||||
"codex-device-key",
|
||||
"codex-exec-server",
|
||||
"codex-external-agent-sessions",
|
||||
"codex-features",
|
||||
"codex-feedback",
|
||||
"codex-file-search",
|
||||
"codex-git-utils",
|
||||
"codex-login",
|
||||
"codex-mcp",
|
||||
"codex-memories-write",
|
||||
"codex-model-provider",
|
||||
"codex-model-provider-info",
|
||||
"codex-models-manager",
|
||||
@@ -1899,6 +1882,7 @@ dependencies = [
|
||||
"codex-utils-rustls-provider",
|
||||
"constant_time_eq 0.3.1",
|
||||
"core_test_support",
|
||||
"flate2",
|
||||
"futures",
|
||||
"gethostname",
|
||||
"hmac",
|
||||
@@ -1914,6 +1898,7 @@ dependencies = [
|
||||
"serial_test",
|
||||
"sha2",
|
||||
"shlex",
|
||||
"tar",
|
||||
"tempfile",
|
||||
"thiserror 2.0.18",
|
||||
"time",
|
||||
@@ -2119,7 +2104,6 @@ dependencies = [
|
||||
"assert_matches",
|
||||
"clap",
|
||||
"clap_complete",
|
||||
"codex-api",
|
||||
"codex-app-server",
|
||||
"codex-app-server-protocol",
|
||||
"codex-app-server-test-client",
|
||||
@@ -2136,7 +2120,7 @@ dependencies = [
|
||||
"codex-login",
|
||||
"codex-mcp",
|
||||
"codex-mcp-server",
|
||||
"codex-model-provider",
|
||||
"codex-memories-write",
|
||||
"codex-models-manager",
|
||||
"codex-protocol",
|
||||
"codex-responses-api-proxy",
|
||||
@@ -2307,15 +2291,20 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"base64 0.22.1",
|
||||
"codex-app-server-protocol",
|
||||
"codex-execpolicy",
|
||||
"codex-features",
|
||||
"codex-file-system",
|
||||
"codex-git-utils",
|
||||
"codex-model-provider-info",
|
||||
"codex-network-proxy",
|
||||
"codex-protocol",
|
||||
"codex-utils-absolute-path",
|
||||
"codex-utils-path",
|
||||
"core-foundation 0.9.4",
|
||||
"dns-lookup",
|
||||
"dunce",
|
||||
"futures",
|
||||
"gethostname",
|
||||
"libc",
|
||||
@@ -2339,6 +2328,7 @@ dependencies = [
|
||||
"tracing",
|
||||
"wildmatch",
|
||||
"winapi-util",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2385,6 +2375,7 @@ dependencies = [
|
||||
"codex-hooks",
|
||||
"codex-login",
|
||||
"codex-mcp",
|
||||
"codex-memories-read",
|
||||
"codex-model-provider",
|
||||
"codex-model-provider-info",
|
||||
"codex-models-manager",
|
||||
@@ -2397,7 +2388,6 @@ dependencies = [
|
||||
"codex-rollout",
|
||||
"codex-rollout-trace",
|
||||
"codex-sandboxing",
|
||||
"codex-secrets",
|
||||
"codex-shell-command",
|
||||
"codex-shell-escalation",
|
||||
"codex-state",
|
||||
@@ -2419,7 +2409,6 @@ dependencies = [
|
||||
"codex-utils-string",
|
||||
"codex-utils-template",
|
||||
"codex-windows-sandbox",
|
||||
"core-foundation 0.9.4",
|
||||
"core_test_support",
|
||||
"csv",
|
||||
"ctor 0.6.3",
|
||||
@@ -2470,7 +2459,6 @@ dependencies = [
|
||||
"walkdir",
|
||||
"which 8.0.0",
|
||||
"whoami",
|
||||
"windows-sys 0.52.0",
|
||||
"wiremock",
|
||||
"zstd 0.13.3",
|
||||
]
|
||||
@@ -2494,11 +2482,13 @@ dependencies = [
|
||||
"codex-utils-absolute-path",
|
||||
"codex-utils-plugins",
|
||||
"dirs",
|
||||
"flate2",
|
||||
"libc",
|
||||
"pretty_assertions",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tar",
|
||||
"tempfile",
|
||||
"thiserror 2.0.18",
|
||||
"tokio",
|
||||
@@ -2580,6 +2570,7 @@ dependencies = [
|
||||
"codex-apply-patch",
|
||||
"codex-arg0",
|
||||
"codex-cloud-requirements",
|
||||
"codex-config",
|
||||
"codex-core",
|
||||
"codex-feedback",
|
||||
"codex-git-utils",
|
||||
@@ -2623,7 +2614,7 @@ dependencies = [
|
||||
"bytes",
|
||||
"codex-app-server-protocol",
|
||||
"codex-client",
|
||||
"codex-config",
|
||||
"codex-file-system",
|
||||
"codex-protocol",
|
||||
"codex-sandboxing",
|
||||
"codex-test-binary-support",
|
||||
@@ -2692,6 +2683,20 @@ dependencies = [
|
||||
"syn 2.0.114",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-external-agent-sessions"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"codex-app-server-protocol",
|
||||
"codex-protocol",
|
||||
"codex-utils-output-truncation",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha2",
|
||||
"tempfile",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-features"
|
||||
version = "0.0.0"
|
||||
@@ -2734,14 +2739,23 @@ dependencies = [
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-file-system"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"codex-protocol",
|
||||
"codex-utils-absolute-path",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-git-utils"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"assert_matches",
|
||||
"chrono",
|
||||
"codex-exec-server",
|
||||
"codex-file-system",
|
||||
"codex-protocol",
|
||||
"codex-utils-absolute-path",
|
||||
"futures",
|
||||
@@ -2801,8 +2815,8 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"clap",
|
||||
"codex-config",
|
||||
"codex-core",
|
||||
"codex-process-hardening",
|
||||
"codex-protocol",
|
||||
"codex-sandboxing",
|
||||
"codex-utils-absolute-path",
|
||||
@@ -2852,6 +2866,7 @@ dependencies = [
|
||||
"codex-terminal-detection",
|
||||
"codex-utils-template",
|
||||
"core_test_support",
|
||||
"jsonwebtoken",
|
||||
"keyring",
|
||||
"once_cell",
|
||||
"os_info",
|
||||
@@ -2890,7 +2905,6 @@ dependencies = [
|
||||
"codex-plugin",
|
||||
"codex-protocol",
|
||||
"codex-rmcp-client",
|
||||
"codex-utils-absolute-path",
|
||||
"codex-utils-plugins",
|
||||
"futures",
|
||||
"pretty_assertions",
|
||||
@@ -2940,6 +2954,55 @@ dependencies = [
|
||||
"wiremock",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-memories-read"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"codex-protocol",
|
||||
"codex-shell-command",
|
||||
"codex-utils-absolute-path",
|
||||
"codex-utils-output-truncation",
|
||||
"codex-utils-template",
|
||||
"pretty_assertions",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-memories-write"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chrono",
|
||||
"codex-backend-client",
|
||||
"codex-config",
|
||||
"codex-core",
|
||||
"codex-features",
|
||||
"codex-git-utils",
|
||||
"codex-login",
|
||||
"codex-models-manager",
|
||||
"codex-otel",
|
||||
"codex-protocol",
|
||||
"codex-rollout",
|
||||
"codex-rollout-trace",
|
||||
"codex-secrets",
|
||||
"codex-state",
|
||||
"codex-terminal-detection",
|
||||
"codex-utils-absolute-path",
|
||||
"codex-utils-output-truncation",
|
||||
"codex-utils-template",
|
||||
"core_test_support",
|
||||
"futures",
|
||||
"pretty_assertions",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"tracing",
|
||||
"uuid",
|
||||
"wiremock",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-model-provider"
|
||||
version = "0.0.0"
|
||||
@@ -3139,6 +3202,7 @@ dependencies = [
|
||||
"tracing",
|
||||
"ts-rs",
|
||||
"uuid",
|
||||
"wildmatch",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4031,6 +4095,7 @@ dependencies = [
|
||||
"assert_cmd",
|
||||
"base64 0.22.1",
|
||||
"codex-arg0",
|
||||
"codex-config",
|
||||
"codex-core",
|
||||
"codex-exec-server",
|
||||
"codex-features",
|
||||
@@ -12250,6 +12315,16 @@ version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417"
|
||||
|
||||
[[package]]
|
||||
name = "tar"
|
||||
version = "0.4.45"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "22692a6476a21fa75fdfc11d452fda482af402c008cdbaf3476414e122040973"
|
||||
dependencies = [
|
||||
"filetime",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "target-lexicon"
|
||||
version = "0.13.3"
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
members = [
|
||||
"aws-auth",
|
||||
"analytics",
|
||||
"agent-graph-store",
|
||||
"agent-identity",
|
||||
"backend-client",
|
||||
"ansi-escape",
|
||||
@@ -37,9 +36,11 @@ members = [
|
||||
"hooks",
|
||||
"secrets",
|
||||
"exec",
|
||||
"file-system",
|
||||
"exec-server",
|
||||
"execpolicy",
|
||||
"execpolicy-legacy",
|
||||
"external-agent-sessions",
|
||||
"keyring-store",
|
||||
"file-search",
|
||||
"linux-sandbox",
|
||||
@@ -47,6 +48,8 @@ members = [
|
||||
"login",
|
||||
"codex-mcp",
|
||||
"mcp-server",
|
||||
"memories/read",
|
||||
"memories/write",
|
||||
"model-provider-info",
|
||||
"models-manager",
|
||||
"network-proxy",
|
||||
@@ -114,7 +117,6 @@ license = "Apache-2.0"
|
||||
# Internal
|
||||
app_test_support = { path = "app-server/tests/common" }
|
||||
codex-analytics = { path = "analytics" }
|
||||
codex-agent-graph-store = { path = "agent-graph-store" }
|
||||
codex-agent-identity = { path = "agent-identity" }
|
||||
codex-ansi-escape = { path = "ansi-escape" }
|
||||
codex-api = { path = "codex-api" }
|
||||
@@ -142,8 +144,10 @@ codex-core-plugins = { path = "core-plugins" }
|
||||
codex-core-skills = { path = "core-skills" }
|
||||
codex-device-key = { path = "device-key" }
|
||||
codex-exec = { path = "exec" }
|
||||
codex-file-system = { path = "file-system" }
|
||||
codex-exec-server = { path = "exec-server" }
|
||||
codex-execpolicy = { path = "execpolicy" }
|
||||
codex-external-agent-sessions = { path = "external-agent-sessions" }
|
||||
codex-experimental-api-macros = { path = "codex-experimental-api-macros" }
|
||||
codex-features = { path = "features" }
|
||||
codex-feedback = { path = "feedback" }
|
||||
@@ -155,6 +159,8 @@ codex-keyring-store = { path = "keyring-store" }
|
||||
codex-linux-sandbox = { path = "linux-sandbox" }
|
||||
codex-lmstudio = { path = "lmstudio" }
|
||||
codex-login = { path = "login" }
|
||||
codex-memories-read = { path = "memories/read" }
|
||||
codex-memories-write = { path = "memories/write" }
|
||||
codex-mcp = { path = "codex-mcp" }
|
||||
codex-mcp-server = { path = "mcp-server" }
|
||||
codex-model-provider-info = { path = "model-provider-info" }
|
||||
@@ -256,6 +262,7 @@ encoding_rs = "0.8.35"
|
||||
env-flags = "0.1.1"
|
||||
env_logger = "0.11.9"
|
||||
eventsource-stream = "0.2.3"
|
||||
flate2 = "1.1.8"
|
||||
futures = { version = "0.3", default-features = false }
|
||||
gethostname = "1.1.0"
|
||||
gix = { version = "0.81.0", default-features = false, features = ["sha1"] }
|
||||
@@ -348,6 +355,7 @@ strum_macros = "0.28.0"
|
||||
supports-color = "3.0.2"
|
||||
syntect = "5"
|
||||
sys-locale = "0.3.2"
|
||||
tar = { version = "=0.4.45", default-features = false }
|
||||
tempfile = "3.23.0"
|
||||
test-log = "0.2.19"
|
||||
textwrap = "0.16.2"
|
||||
@@ -439,7 +447,6 @@ unwrap_used = "deny"
|
||||
# silence the false positive here instead of deleting a real dependency.
|
||||
[workspace.metadata.cargo-shear]
|
||||
ignored = [
|
||||
"codex-agent-graph-store",
|
||||
"icu_provider",
|
||||
"openssl-sys",
|
||||
"codex-utils-readiness",
|
||||
|
||||
@@ -94,7 +94,7 @@ In `workspace-write`, Codex also includes `~/.codex/memories` in its writable ro
|
||||
|
||||
This folder is the root of a Cargo workspace. It contains quite a bit of experimental code, but here are the key crates:
|
||||
|
||||
- [`core/`](./core) contains the business logic for Codex. Ultimately, we hope this to be a library crate that is generally useful for building other Rust/native applications that use Codex.
|
||||
- [`core/`](./core) contains the business logic for Codex. Ultimately, we hope this becomes a library crate that is generally useful for building other Rust/native applications that use Codex.
|
||||
- [`exec/`](./exec) "headless" CLI for use in automation.
|
||||
- [`tui/`](./tui) CLI that launches a fullscreen TUI built with [Ratatui](https://ratatui.rs/).
|
||||
- [`cli/`](./cli) CLI multitool that provides the aforementioned CLIs via subcommands.
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "agent-graph-store",
|
||||
crate_name = "codex_agent_graph_store",
|
||||
)
|
||||
@@ -1,35 +0,0 @@
|
||||
[package]
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
name = "codex-agent-graph-store"
|
||||
version.workspace = true
|
||||
|
||||
[lib]
|
||||
name = "codex_agent_graph_store"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[[example]]
|
||||
name = "generate-proto"
|
||||
path = "examples/generate-proto.rs"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
async-trait = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-state = { workspace = true }
|
||||
prost = "0.14.3"
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
thiserror = { workspace = true }
|
||||
tonic = { workspace = true }
|
||||
tonic-prost = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
tokio = { workspace = true, features = ["macros", "rt-multi-thread"] }
|
||||
tokio-stream = { workspace = true, features = ["net"] }
|
||||
tonic = { workspace = true, features = ["router", "transport"] }
|
||||
tonic-prost-build = { version = "=0.14.3", default-features = false, features = ["transport"] }
|
||||
@@ -1,19 +0,0 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let Some(proto_dir_arg) = std::env::args().nth(1) else {
|
||||
eprintln!("Usage: generate-proto <proto-dir>");
|
||||
std::process::exit(1);
|
||||
};
|
||||
|
||||
let proto_dir = PathBuf::from(proto_dir_arg);
|
||||
let proto_file = proto_dir.join("codex.agent_graph_store.v1.proto");
|
||||
|
||||
tonic_prost_build::configure()
|
||||
.build_client(true)
|
||||
.build_server(true)
|
||||
.out_dir(&proto_dir)
|
||||
.compile_protos(&[proto_file], &[proto_dir])?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,38 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
repo_root="$(cd "$script_dir/../../.." && pwd)"
|
||||
proto_dir="$repo_root/codex-rs/agent-graph-store/src/remote/proto"
|
||||
generated="$proto_dir/codex.agent_graph_store.v1.rs"
|
||||
tmpdir="$(mktemp -d)"
|
||||
|
||||
cleanup() {
|
||||
rm -rf "$tmpdir"
|
||||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
(
|
||||
cd "$repo_root/codex-rs"
|
||||
CARGO_TARGET_DIR="$tmpdir/target" cargo run \
|
||||
-p codex-agent-graph-store \
|
||||
--example generate-proto \
|
||||
-- "$proto_dir"
|
||||
)
|
||||
|
||||
if ! sed -n '2p' "$generated" | grep -q 'clippy::trivially_copy_pass_by_ref'; then
|
||||
{
|
||||
sed -n '1p' "$generated"
|
||||
printf '#![allow(clippy::trivially_copy_pass_by_ref)]\n'
|
||||
sed '1d' "$generated"
|
||||
} > "$tmpdir/generated.rs"
|
||||
mv "$tmpdir/generated.rs" "$generated"
|
||||
fi
|
||||
|
||||
rustfmt --edition 2024 "$generated"
|
||||
|
||||
awk '
|
||||
NR == 3 && previous ~ /clippy::trivially_copy_pass_by_ref/ && $0 != "" { print "" }
|
||||
{ print; previous = $0 }
|
||||
' "$generated" > "$tmpdir/formatted.rs"
|
||||
mv "$tmpdir/formatted.rs" "$generated"
|
||||
@@ -1,20 +0,0 @@
|
||||
/// Result type returned by agent graph store operations.
|
||||
pub type AgentGraphStoreResult<T> = Result<T, AgentGraphStoreError>;
|
||||
|
||||
/// Error type shared by agent graph store implementations.
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum AgentGraphStoreError {
|
||||
/// The caller supplied invalid request data.
|
||||
#[error("invalid agent graph store request: {message}")]
|
||||
InvalidRequest {
|
||||
/// User-facing explanation of the invalid request.
|
||||
message: String,
|
||||
},
|
||||
|
||||
/// Catch-all for implementation failures that do not fit a more specific category.
|
||||
#[error("agent graph store internal error: {message}")]
|
||||
Internal {
|
||||
/// User-facing explanation of the implementation failure.
|
||||
message: String,
|
||||
},
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
//! Storage-neutral parent/child topology for thread-spawned agents.
|
||||
|
||||
mod error;
|
||||
mod local;
|
||||
mod remote;
|
||||
mod store;
|
||||
mod types;
|
||||
|
||||
pub use error::AgentGraphStoreError;
|
||||
pub use error::AgentGraphStoreResult;
|
||||
pub use local::LocalAgentGraphStore;
|
||||
pub use remote::RemoteAgentGraphStore;
|
||||
pub use store::AgentGraphStore;
|
||||
pub use types::ThreadSpawnEdgeStatus;
|
||||
@@ -1,325 +0,0 @@
|
||||
use async_trait::async_trait;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_state::StateRuntime;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::AgentGraphStore;
|
||||
use crate::AgentGraphStoreError;
|
||||
use crate::AgentGraphStoreResult;
|
||||
use crate::ThreadSpawnEdgeStatus;
|
||||
|
||||
/// SQLite-backed implementation of [`AgentGraphStore`] using an existing state runtime.
|
||||
#[derive(Clone)]
|
||||
pub struct LocalAgentGraphStore {
|
||||
state_db: Arc<StateRuntime>,
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for LocalAgentGraphStore {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("LocalAgentGraphStore")
|
||||
.field("codex_home", &self.state_db.codex_home())
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
impl LocalAgentGraphStore {
|
||||
/// Create a local graph store from an already-initialized state runtime.
|
||||
pub fn new(state_db: Arc<StateRuntime>) -> Self {
|
||||
Self { state_db }
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl AgentGraphStore for LocalAgentGraphStore {
|
||||
async fn upsert_thread_spawn_edge(
|
||||
&self,
|
||||
parent_thread_id: ThreadId,
|
||||
child_thread_id: ThreadId,
|
||||
status: ThreadSpawnEdgeStatus,
|
||||
) -> AgentGraphStoreResult<()> {
|
||||
self.state_db
|
||||
.upsert_thread_spawn_edge(parent_thread_id, child_thread_id, to_state_status(status))
|
||||
.await
|
||||
.map_err(internal_error)
|
||||
}
|
||||
|
||||
async fn set_thread_spawn_edge_status(
|
||||
&self,
|
||||
child_thread_id: ThreadId,
|
||||
status: ThreadSpawnEdgeStatus,
|
||||
) -> AgentGraphStoreResult<()> {
|
||||
self.state_db
|
||||
.set_thread_spawn_edge_status(child_thread_id, to_state_status(status))
|
||||
.await
|
||||
.map_err(internal_error)
|
||||
}
|
||||
|
||||
async fn list_thread_spawn_children(
|
||||
&self,
|
||||
parent_thread_id: ThreadId,
|
||||
status_filter: Option<ThreadSpawnEdgeStatus>,
|
||||
) -> AgentGraphStoreResult<Vec<ThreadId>> {
|
||||
if let Some(status) = status_filter {
|
||||
return self
|
||||
.state_db
|
||||
.list_thread_spawn_children_with_status(parent_thread_id, to_state_status(status))
|
||||
.await
|
||||
.map_err(internal_error);
|
||||
}
|
||||
|
||||
self.state_db
|
||||
.list_thread_spawn_children(parent_thread_id)
|
||||
.await
|
||||
.map_err(internal_error)
|
||||
}
|
||||
|
||||
async fn list_thread_spawn_descendants(
|
||||
&self,
|
||||
root_thread_id: ThreadId,
|
||||
status_filter: Option<ThreadSpawnEdgeStatus>,
|
||||
) -> AgentGraphStoreResult<Vec<ThreadId>> {
|
||||
match status_filter {
|
||||
Some(status) => self
|
||||
.state_db
|
||||
.list_thread_spawn_descendants_with_status(root_thread_id, to_state_status(status))
|
||||
.await
|
||||
.map_err(internal_error),
|
||||
None => self
|
||||
.state_db
|
||||
.list_thread_spawn_descendants(root_thread_id)
|
||||
.await
|
||||
.map_err(internal_error),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn to_state_status(status: ThreadSpawnEdgeStatus) -> codex_state::DirectionalThreadSpawnEdgeStatus {
|
||||
match status {
|
||||
ThreadSpawnEdgeStatus::Open => codex_state::DirectionalThreadSpawnEdgeStatus::Open,
|
||||
ThreadSpawnEdgeStatus::Closed => codex_state::DirectionalThreadSpawnEdgeStatus::Closed,
|
||||
}
|
||||
}
|
||||
|
||||
fn internal_error(err: impl std::fmt::Display) -> AgentGraphStoreError {
|
||||
AgentGraphStoreError::Internal {
|
||||
message: err.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use codex_state::DirectionalThreadSpawnEdgeStatus;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
|
||||
struct TestRuntime {
|
||||
state_db: Arc<StateRuntime>,
|
||||
_codex_home: TempDir,
|
||||
}
|
||||
|
||||
fn thread_id(suffix: u128) -> ThreadId {
|
||||
ThreadId::from_string(&format!("00000000-0000-0000-0000-{suffix:012}"))
|
||||
.expect("valid thread id")
|
||||
}
|
||||
|
||||
async fn state_runtime() -> TestRuntime {
|
||||
let codex_home = TempDir::new().expect("tempdir should be created");
|
||||
let state_db =
|
||||
StateRuntime::init(codex_home.path().to_path_buf(), "test-provider".to_string())
|
||||
.await
|
||||
.expect("state db should initialize");
|
||||
TestRuntime {
|
||||
state_db,
|
||||
_codex_home: codex_home,
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn local_store_upserts_and_lists_direct_children_with_status_filters() {
|
||||
let fixture = state_runtime().await;
|
||||
let state_db = fixture.state_db;
|
||||
let store = LocalAgentGraphStore::new(state_db.clone());
|
||||
let parent_thread_id = thread_id(/*suffix*/ 1);
|
||||
let first_child_thread_id = thread_id(/*suffix*/ 2);
|
||||
let second_child_thread_id = thread_id(/*suffix*/ 3);
|
||||
|
||||
store
|
||||
.upsert_thread_spawn_edge(
|
||||
parent_thread_id,
|
||||
second_child_thread_id,
|
||||
ThreadSpawnEdgeStatus::Closed,
|
||||
)
|
||||
.await
|
||||
.expect("closed child edge should insert");
|
||||
store
|
||||
.upsert_thread_spawn_edge(
|
||||
parent_thread_id,
|
||||
first_child_thread_id,
|
||||
ThreadSpawnEdgeStatus::Open,
|
||||
)
|
||||
.await
|
||||
.expect("open child edge should insert");
|
||||
|
||||
let all_children = store
|
||||
.list_thread_spawn_children(parent_thread_id, /*status_filter*/ None)
|
||||
.await
|
||||
.expect("all children should load");
|
||||
assert_eq!(
|
||||
all_children,
|
||||
vec![first_child_thread_id, second_child_thread_id]
|
||||
);
|
||||
|
||||
let open_children = store
|
||||
.list_thread_spawn_children(parent_thread_id, Some(ThreadSpawnEdgeStatus::Open))
|
||||
.await
|
||||
.expect("open children should load");
|
||||
let state_open_children = state_db
|
||||
.list_thread_spawn_children_with_status(
|
||||
parent_thread_id,
|
||||
DirectionalThreadSpawnEdgeStatus::Open,
|
||||
)
|
||||
.await
|
||||
.expect("state open children should load");
|
||||
assert_eq!(open_children, state_open_children);
|
||||
assert_eq!(open_children, vec![first_child_thread_id]);
|
||||
|
||||
let closed_children = store
|
||||
.list_thread_spawn_children(parent_thread_id, Some(ThreadSpawnEdgeStatus::Closed))
|
||||
.await
|
||||
.expect("closed children should load");
|
||||
assert_eq!(closed_children, vec![second_child_thread_id]);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn local_store_updates_edge_status() {
|
||||
let fixture = state_runtime().await;
|
||||
let state_db = fixture.state_db;
|
||||
let store = LocalAgentGraphStore::new(state_db);
|
||||
let parent_thread_id = thread_id(/*suffix*/ 10);
|
||||
let child_thread_id = thread_id(/*suffix*/ 11);
|
||||
|
||||
store
|
||||
.upsert_thread_spawn_edge(
|
||||
parent_thread_id,
|
||||
child_thread_id,
|
||||
ThreadSpawnEdgeStatus::Open,
|
||||
)
|
||||
.await
|
||||
.expect("child edge should insert");
|
||||
store
|
||||
.set_thread_spawn_edge_status(child_thread_id, ThreadSpawnEdgeStatus::Closed)
|
||||
.await
|
||||
.expect("child edge should close");
|
||||
|
||||
let open_children = store
|
||||
.list_thread_spawn_children(parent_thread_id, Some(ThreadSpawnEdgeStatus::Open))
|
||||
.await
|
||||
.expect("open children should load");
|
||||
assert_eq!(open_children, Vec::<ThreadId>::new());
|
||||
|
||||
let closed_children = store
|
||||
.list_thread_spawn_children(parent_thread_id, Some(ThreadSpawnEdgeStatus::Closed))
|
||||
.await
|
||||
.expect("closed children should load");
|
||||
assert_eq!(closed_children, vec![child_thread_id]);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn local_store_lists_descendants_breadth_first_with_status_filters() {
|
||||
let fixture = state_runtime().await;
|
||||
let state_db = fixture.state_db;
|
||||
let store = LocalAgentGraphStore::new(state_db.clone());
|
||||
let root_thread_id = thread_id(/*suffix*/ 20);
|
||||
let later_child_thread_id = thread_id(/*suffix*/ 22);
|
||||
let earlier_child_thread_id = thread_id(/*suffix*/ 21);
|
||||
let closed_grandchild_thread_id = thread_id(/*suffix*/ 23);
|
||||
let open_grandchild_thread_id = thread_id(/*suffix*/ 24);
|
||||
let closed_child_thread_id = thread_id(/*suffix*/ 25);
|
||||
let closed_great_grandchild_thread_id = thread_id(/*suffix*/ 26);
|
||||
|
||||
for (parent_thread_id, child_thread_id, status) in [
|
||||
(
|
||||
root_thread_id,
|
||||
later_child_thread_id,
|
||||
ThreadSpawnEdgeStatus::Open,
|
||||
),
|
||||
(
|
||||
root_thread_id,
|
||||
earlier_child_thread_id,
|
||||
ThreadSpawnEdgeStatus::Open,
|
||||
),
|
||||
(
|
||||
earlier_child_thread_id,
|
||||
open_grandchild_thread_id,
|
||||
ThreadSpawnEdgeStatus::Open,
|
||||
),
|
||||
(
|
||||
later_child_thread_id,
|
||||
closed_grandchild_thread_id,
|
||||
ThreadSpawnEdgeStatus::Closed,
|
||||
),
|
||||
(
|
||||
root_thread_id,
|
||||
closed_child_thread_id,
|
||||
ThreadSpawnEdgeStatus::Closed,
|
||||
),
|
||||
(
|
||||
closed_child_thread_id,
|
||||
closed_great_grandchild_thread_id,
|
||||
ThreadSpawnEdgeStatus::Closed,
|
||||
),
|
||||
] {
|
||||
store
|
||||
.upsert_thread_spawn_edge(parent_thread_id, child_thread_id, status)
|
||||
.await
|
||||
.expect("edge should insert");
|
||||
}
|
||||
|
||||
let all_descendants = store
|
||||
.list_thread_spawn_descendants(root_thread_id, /*status_filter*/ None)
|
||||
.await
|
||||
.expect("all descendants should load");
|
||||
assert_eq!(
|
||||
all_descendants,
|
||||
vec![
|
||||
earlier_child_thread_id,
|
||||
later_child_thread_id,
|
||||
closed_child_thread_id,
|
||||
closed_grandchild_thread_id,
|
||||
open_grandchild_thread_id,
|
||||
closed_great_grandchild_thread_id,
|
||||
]
|
||||
);
|
||||
|
||||
let open_descendants = store
|
||||
.list_thread_spawn_descendants(root_thread_id, Some(ThreadSpawnEdgeStatus::Open))
|
||||
.await
|
||||
.expect("open descendants should load");
|
||||
let state_open_descendants = state_db
|
||||
.list_thread_spawn_descendants_with_status(
|
||||
root_thread_id,
|
||||
DirectionalThreadSpawnEdgeStatus::Open,
|
||||
)
|
||||
.await
|
||||
.expect("state open descendants should load");
|
||||
assert_eq!(open_descendants, state_open_descendants);
|
||||
assert_eq!(
|
||||
open_descendants,
|
||||
vec![
|
||||
earlier_child_thread_id,
|
||||
later_child_thread_id,
|
||||
open_grandchild_thread_id,
|
||||
]
|
||||
);
|
||||
|
||||
let closed_descendants = store
|
||||
.list_thread_spawn_descendants(root_thread_id, Some(ThreadSpawnEdgeStatus::Closed))
|
||||
.await
|
||||
.expect("closed descendants should load");
|
||||
assert_eq!(
|
||||
closed_descendants,
|
||||
vec![closed_child_thread_id, closed_great_grandchild_thread_id]
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
# Remote Agent Graph Store
|
||||
|
||||
- The Rust protobuf output in `proto/codex.agent_graph_store.v1.rs` is checked in.
|
||||
- Do not add build-time protobuf generation to `codex-agent-graph-store` unless the Bazel/Cargo story is intentionally changed.
|
||||
- When `proto/codex.agent_graph_store.v1.proto` changes, regenerate the Rust file manually and include both files in the same commit.
|
||||
|
||||
Run this from the repository root:
|
||||
|
||||
```sh
|
||||
./codex-rs/agent-graph-store/scripts/generate-proto.sh
|
||||
```
|
||||
|
||||
The command requires `protoc` to be available on `PATH`.
|
||||
@@ -1,42 +0,0 @@
|
||||
use codex_protocol::ThreadId;
|
||||
|
||||
use super::proto;
|
||||
use crate::AgentGraphStoreError;
|
||||
use crate::AgentGraphStoreResult;
|
||||
use crate::ThreadSpawnEdgeStatus;
|
||||
|
||||
pub(super) fn proto_status(status: ThreadSpawnEdgeStatus) -> proto::ThreadSpawnEdgeStatus {
|
||||
match status {
|
||||
ThreadSpawnEdgeStatus::Open => proto::ThreadSpawnEdgeStatus::Open,
|
||||
ThreadSpawnEdgeStatus::Closed => proto::ThreadSpawnEdgeStatus::Closed,
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn proto_status_filter(status: Option<ThreadSpawnEdgeStatus>) -> Option<i32> {
|
||||
status.map(proto_status).map(Into::into)
|
||||
}
|
||||
|
||||
pub(super) fn thread_ids_from_proto(
|
||||
thread_ids: Vec<String>,
|
||||
field_name: &str,
|
||||
) -> AgentGraphStoreResult<Vec<ThreadId>> {
|
||||
thread_ids
|
||||
.into_iter()
|
||||
.map(|thread_id| {
|
||||
ThreadId::from_string(&thread_id).map_err(|err| AgentGraphStoreError::InvalidRequest {
|
||||
message: format!("remote agent graph store returned invalid {field_name}: {err}"),
|
||||
})
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub(super) fn remote_status_to_error(status: tonic::Status) -> AgentGraphStoreError {
|
||||
match status.code() {
|
||||
tonic::Code::InvalidArgument => AgentGraphStoreError::InvalidRequest {
|
||||
message: status.message().to_string(),
|
||||
},
|
||||
_ => AgentGraphStoreError::Internal {
|
||||
message: format!("remote agent graph store request failed: {status}"),
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -1,339 +0,0 @@
|
||||
mod helpers;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use codex_protocol::ThreadId;
|
||||
use proto::agent_graph_store_client::AgentGraphStoreClient;
|
||||
|
||||
use crate::AgentGraphStore;
|
||||
use crate::AgentGraphStoreError;
|
||||
use crate::AgentGraphStoreResult;
|
||||
use crate::ThreadSpawnEdgeStatus;
|
||||
|
||||
#[path = "proto/codex.agent_graph_store.v1.rs"]
|
||||
mod proto;
|
||||
|
||||
/// gRPC-backed [`AgentGraphStore`] implementation for deployments whose durable
|
||||
/// subagent graph lives outside the app-server process.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct RemoteAgentGraphStore {
|
||||
endpoint: String,
|
||||
}
|
||||
|
||||
impl RemoteAgentGraphStore {
|
||||
pub fn new(endpoint: impl Into<String>) -> Self {
|
||||
Self {
|
||||
endpoint: endpoint.into(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn client(
|
||||
&self,
|
||||
) -> AgentGraphStoreResult<AgentGraphStoreClient<tonic::transport::Channel>> {
|
||||
AgentGraphStoreClient::connect(self.endpoint.clone())
|
||||
.await
|
||||
.map_err(|err| AgentGraphStoreError::Internal {
|
||||
message: format!("failed to connect to remote agent graph store: {err}"),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl AgentGraphStore for RemoteAgentGraphStore {
|
||||
async fn upsert_thread_spawn_edge(
|
||||
&self,
|
||||
parent_thread_id: ThreadId,
|
||||
child_thread_id: ThreadId,
|
||||
status: ThreadSpawnEdgeStatus,
|
||||
) -> AgentGraphStoreResult<()> {
|
||||
let request = proto::UpsertThreadSpawnEdgeRequest {
|
||||
parent_thread_id: parent_thread_id.to_string(),
|
||||
child_thread_id: child_thread_id.to_string(),
|
||||
status: helpers::proto_status(status).into(),
|
||||
};
|
||||
self.client()
|
||||
.await?
|
||||
.upsert_thread_spawn_edge(request)
|
||||
.await
|
||||
.map_err(helpers::remote_status_to_error)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn set_thread_spawn_edge_status(
|
||||
&self,
|
||||
child_thread_id: ThreadId,
|
||||
status: ThreadSpawnEdgeStatus,
|
||||
) -> AgentGraphStoreResult<()> {
|
||||
let request = proto::SetThreadSpawnEdgeStatusRequest {
|
||||
child_thread_id: child_thread_id.to_string(),
|
||||
status: helpers::proto_status(status).into(),
|
||||
};
|
||||
self.client()
|
||||
.await?
|
||||
.set_thread_spawn_edge_status(request)
|
||||
.await
|
||||
.map_err(helpers::remote_status_to_error)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn list_thread_spawn_children(
|
||||
&self,
|
||||
parent_thread_id: ThreadId,
|
||||
status_filter: Option<ThreadSpawnEdgeStatus>,
|
||||
) -> AgentGraphStoreResult<Vec<ThreadId>> {
|
||||
let response = self
|
||||
.client()
|
||||
.await?
|
||||
.list_thread_spawn_children(proto::ListThreadSpawnChildrenRequest {
|
||||
parent_thread_id: parent_thread_id.to_string(),
|
||||
status_filter: helpers::proto_status_filter(status_filter),
|
||||
})
|
||||
.await
|
||||
.map_err(helpers::remote_status_to_error)?
|
||||
.into_inner();
|
||||
helpers::thread_ids_from_proto(response.thread_ids, "child thread_id")
|
||||
}
|
||||
|
||||
async fn list_thread_spawn_descendants(
|
||||
&self,
|
||||
root_thread_id: ThreadId,
|
||||
status_filter: Option<ThreadSpawnEdgeStatus>,
|
||||
) -> AgentGraphStoreResult<Vec<ThreadId>> {
|
||||
let response = self
|
||||
.client()
|
||||
.await?
|
||||
.list_thread_spawn_descendants(proto::ListThreadSpawnDescendantsRequest {
|
||||
root_thread_id: root_thread_id.to_string(),
|
||||
status_filter: helpers::proto_status_filter(status_filter),
|
||||
})
|
||||
.await
|
||||
.map_err(helpers::remote_status_to_error)?
|
||||
.into_inner();
|
||||
helpers::thread_ids_from_proto(response.thread_ids, "descendant thread_id")
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::proto;
|
||||
use super::proto::agent_graph_store_server;
|
||||
use super::proto::agent_graph_store_server::AgentGraphStoreServer;
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tonic::Request;
|
||||
use tonic::Response;
|
||||
use tonic::Status;
|
||||
use tonic::transport::Server;
|
||||
|
||||
fn thread_id(suffix: u128) -> ThreadId {
|
||||
ThreadId::from_string(&format!("00000000-0000-0000-0000-{suffix:012}"))
|
||||
.expect("valid thread id")
|
||||
}
|
||||
|
||||
async fn serve_test_server(server: TestServer) -> (RemoteAgentGraphStore, ServerShutdown) {
|
||||
let listener = tokio::net::TcpListener::bind("127.0.0.1:0")
|
||||
.await
|
||||
.expect("bind test server");
|
||||
let addr = listener.local_addr().expect("test server addr");
|
||||
let (shutdown_tx, shutdown_rx) = tokio::sync::oneshot::channel();
|
||||
let handle = tokio::spawn(async move {
|
||||
Server::builder()
|
||||
.add_service(AgentGraphStoreServer::new(server))
|
||||
.serve_with_incoming_shutdown(
|
||||
tokio_stream::wrappers::TcpListenerStream::new(listener),
|
||||
async {
|
||||
let _ = shutdown_rx.await;
|
||||
},
|
||||
)
|
||||
.await
|
||||
});
|
||||
|
||||
(
|
||||
RemoteAgentGraphStore::new(format!("http://{addr}")),
|
||||
ServerShutdown {
|
||||
shutdown_tx,
|
||||
handle,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
struct ServerShutdown {
|
||||
shutdown_tx: tokio::sync::oneshot::Sender<()>,
|
||||
handle: tokio::task::JoinHandle<Result<(), tonic::transport::Error>>,
|
||||
}
|
||||
|
||||
impl ServerShutdown {
|
||||
async fn shutdown(self) {
|
||||
let _ = self.shutdown_tx.send(());
|
||||
self.handle.await.expect("join server").expect("server");
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
enum TestServer {
|
||||
HappyPath,
|
||||
InvalidThreadIdResponse,
|
||||
InvalidArgumentStatus,
|
||||
}
|
||||
|
||||
#[tonic::async_trait]
|
||||
impl agent_graph_store_server::AgentGraphStore for TestServer {
|
||||
async fn upsert_thread_spawn_edge(
|
||||
&self,
|
||||
request: Request<proto::UpsertThreadSpawnEdgeRequest>,
|
||||
) -> Result<Response<proto::Empty>, Status> {
|
||||
match self {
|
||||
TestServer::InvalidArgumentStatus => {
|
||||
Err(Status::invalid_argument("status must be specified"))
|
||||
}
|
||||
TestServer::HappyPath | TestServer::InvalidThreadIdResponse => {
|
||||
let request = request.into_inner();
|
||||
assert_eq!(
|
||||
request.parent_thread_id,
|
||||
"00000000-0000-0000-0000-000000000001"
|
||||
);
|
||||
assert_eq!(
|
||||
request.child_thread_id,
|
||||
"00000000-0000-0000-0000-000000000002"
|
||||
);
|
||||
assert_eq!(
|
||||
proto::ThreadSpawnEdgeStatus::try_from(request.status),
|
||||
Ok(proto::ThreadSpawnEdgeStatus::Open)
|
||||
);
|
||||
Ok(Response::new(proto::Empty {}))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn set_thread_spawn_edge_status(
|
||||
&self,
|
||||
request: Request<proto::SetThreadSpawnEdgeStatusRequest>,
|
||||
) -> Result<Response<proto::Empty>, Status> {
|
||||
let request = request.into_inner();
|
||||
assert_eq!(
|
||||
request.child_thread_id,
|
||||
"00000000-0000-0000-0000-000000000002"
|
||||
);
|
||||
assert_eq!(
|
||||
proto::ThreadSpawnEdgeStatus::try_from(request.status),
|
||||
Ok(proto::ThreadSpawnEdgeStatus::Closed)
|
||||
);
|
||||
Ok(Response::new(proto::Empty {}))
|
||||
}
|
||||
|
||||
async fn list_thread_spawn_children(
|
||||
&self,
|
||||
request: Request<proto::ListThreadSpawnChildrenRequest>,
|
||||
) -> Result<Response<proto::ListThreadSpawnChildrenResponse>, Status> {
|
||||
let request = request.into_inner();
|
||||
assert_eq!(
|
||||
request.parent_thread_id,
|
||||
"00000000-0000-0000-0000-000000000001"
|
||||
);
|
||||
assert_eq!(
|
||||
request
|
||||
.status_filter
|
||||
.map(proto::ThreadSpawnEdgeStatus::try_from),
|
||||
Some(Ok(proto::ThreadSpawnEdgeStatus::Open))
|
||||
);
|
||||
let thread_ids = match self {
|
||||
TestServer::InvalidThreadIdResponse => vec!["not-a-thread-id".to_string()],
|
||||
TestServer::HappyPath | TestServer::InvalidArgumentStatus => {
|
||||
vec![
|
||||
"00000000-0000-0000-0000-000000000002".to_string(),
|
||||
"00000000-0000-0000-0000-000000000003".to_string(),
|
||||
]
|
||||
}
|
||||
};
|
||||
Ok(Response::new(proto::ListThreadSpawnChildrenResponse {
|
||||
thread_ids,
|
||||
}))
|
||||
}
|
||||
|
||||
async fn list_thread_spawn_descendants(
|
||||
&self,
|
||||
request: Request<proto::ListThreadSpawnDescendantsRequest>,
|
||||
) -> Result<Response<proto::ListThreadSpawnDescendantsResponse>, Status> {
|
||||
let request = request.into_inner();
|
||||
assert_eq!(
|
||||
request.root_thread_id,
|
||||
"00000000-0000-0000-0000-000000000001"
|
||||
);
|
||||
assert_eq!(request.status_filter, None);
|
||||
Ok(Response::new(proto::ListThreadSpawnDescendantsResponse {
|
||||
thread_ids: vec![
|
||||
"00000000-0000-0000-0000-000000000002".to_string(),
|
||||
"00000000-0000-0000-0000-000000000003".to_string(),
|
||||
"00000000-0000-0000-0000-000000000004".to_string(),
|
||||
],
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn remote_store_calls_agent_graph_service() {
|
||||
let (store, shutdown) = serve_test_server(TestServer::HappyPath).await;
|
||||
let parent_thread_id = thread_id(1);
|
||||
let child_thread_id = thread_id(2);
|
||||
|
||||
store
|
||||
.upsert_thread_spawn_edge(
|
||||
parent_thread_id,
|
||||
child_thread_id,
|
||||
ThreadSpawnEdgeStatus::Open,
|
||||
)
|
||||
.await
|
||||
.expect("upsert should succeed");
|
||||
store
|
||||
.set_thread_spawn_edge_status(child_thread_id, ThreadSpawnEdgeStatus::Closed)
|
||||
.await
|
||||
.expect("status update should succeed");
|
||||
|
||||
let children = store
|
||||
.list_thread_spawn_children(parent_thread_id, Some(ThreadSpawnEdgeStatus::Open))
|
||||
.await
|
||||
.expect("children should load");
|
||||
assert_eq!(children, vec![thread_id(2), thread_id(3)]);
|
||||
|
||||
let descendants = store
|
||||
.list_thread_spawn_descendants(parent_thread_id, None)
|
||||
.await
|
||||
.expect("descendants should load");
|
||||
assert_eq!(descendants, vec![thread_id(2), thread_id(3), thread_id(4)]);
|
||||
|
||||
shutdown.shutdown().await;
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn remote_store_maps_invalid_response_thread_id_to_invalid_request() {
|
||||
let (store, shutdown) = serve_test_server(TestServer::InvalidThreadIdResponse).await;
|
||||
|
||||
let err = store
|
||||
.list_thread_spawn_children(thread_id(1), Some(ThreadSpawnEdgeStatus::Open))
|
||||
.await
|
||||
.expect_err("invalid response thread id should fail");
|
||||
|
||||
assert!(matches!(
|
||||
err,
|
||||
AgentGraphStoreError::InvalidRequest { message } if message.contains("invalid child thread_id")
|
||||
));
|
||||
|
||||
shutdown.shutdown().await;
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn remote_store_maps_invalid_argument_status_to_invalid_request() {
|
||||
let (store, shutdown) = serve_test_server(TestServer::InvalidArgumentStatus).await;
|
||||
|
||||
let err = store
|
||||
.upsert_thread_spawn_edge(thread_id(1), thread_id(2), ThreadSpawnEdgeStatus::Open)
|
||||
.await
|
||||
.expect_err("invalid argument should fail");
|
||||
|
||||
assert!(matches!(
|
||||
err,
|
||||
AgentGraphStoreError::InvalidRequest { message } if message == "status must be specified"
|
||||
));
|
||||
|
||||
shutdown.shutdown().await;
|
||||
}
|
||||
}
|
||||
@@ -1,53 +0,0 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package codex.agent_graph_store.v1;
|
||||
|
||||
service AgentGraphStore {
|
||||
rpc UpsertThreadSpawnEdge(UpsertThreadSpawnEdgeRequest) returns (Empty);
|
||||
rpc SetThreadSpawnEdgeStatus(SetThreadSpawnEdgeStatusRequest) returns (Empty);
|
||||
rpc ListThreadSpawnChildren(ListThreadSpawnChildrenRequest) returns (ListThreadSpawnChildrenResponse);
|
||||
rpc ListThreadSpawnDescendants(ListThreadSpawnDescendantsRequest) returns (ListThreadSpawnDescendantsResponse);
|
||||
}
|
||||
|
||||
message Empty {}
|
||||
|
||||
message UpsertThreadSpawnEdgeRequest {
|
||||
string parent_thread_id = 1;
|
||||
string child_thread_id = 2;
|
||||
// Servers must reject THREAD_SPAWN_EDGE_STATUS_UNSPECIFIED.
|
||||
ThreadSpawnEdgeStatus status = 3;
|
||||
}
|
||||
|
||||
message SetThreadSpawnEdgeStatusRequest {
|
||||
string child_thread_id = 1;
|
||||
// Servers must reject THREAD_SPAWN_EDGE_STATUS_UNSPECIFIED.
|
||||
ThreadSpawnEdgeStatus status = 2;
|
||||
}
|
||||
|
||||
message ListThreadSpawnChildrenRequest {
|
||||
string parent_thread_id = 1;
|
||||
// Absent means all statuses.
|
||||
optional ThreadSpawnEdgeStatus status_filter = 2;
|
||||
}
|
||||
|
||||
message ListThreadSpawnChildrenResponse {
|
||||
// Ordered child thread ids.
|
||||
repeated string thread_ids = 1;
|
||||
}
|
||||
|
||||
message ListThreadSpawnDescendantsRequest {
|
||||
string root_thread_id = 1;
|
||||
// Absent means all statuses.
|
||||
optional ThreadSpawnEdgeStatus status_filter = 2;
|
||||
}
|
||||
|
||||
message ListThreadSpawnDescendantsResponse {
|
||||
// Ordered descendant thread ids.
|
||||
repeated string thread_ids = 1;
|
||||
}
|
||||
|
||||
enum ThreadSpawnEdgeStatus {
|
||||
THREAD_SPAWN_EDGE_STATUS_UNSPECIFIED = 0;
|
||||
THREAD_SPAWN_EDGE_STATUS_OPEN = 1;
|
||||
THREAD_SPAWN_EDGE_STATUS_CLOSED = 2;
|
||||
}
|
||||
@@ -1,569 +0,0 @@
|
||||
// This file is @generated by prost-build.
|
||||
#![allow(clippy::trivially_copy_pass_by_ref)]
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)]
|
||||
pub struct Empty {}
|
||||
#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)]
|
||||
pub struct UpsertThreadSpawnEdgeRequest {
|
||||
#[prost(string, tag = "1")]
|
||||
pub parent_thread_id: ::prost::alloc::string::String,
|
||||
#[prost(string, tag = "2")]
|
||||
pub child_thread_id: ::prost::alloc::string::String,
|
||||
/// Servers must reject THREAD_SPAWN_EDGE_STATUS_UNSPECIFIED.
|
||||
#[prost(enumeration = "ThreadSpawnEdgeStatus", tag = "3")]
|
||||
pub status: i32,
|
||||
}
|
||||
#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)]
|
||||
pub struct SetThreadSpawnEdgeStatusRequest {
|
||||
#[prost(string, tag = "1")]
|
||||
pub child_thread_id: ::prost::alloc::string::String,
|
||||
/// Servers must reject THREAD_SPAWN_EDGE_STATUS_UNSPECIFIED.
|
||||
#[prost(enumeration = "ThreadSpawnEdgeStatus", tag = "2")]
|
||||
pub status: i32,
|
||||
}
|
||||
#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)]
|
||||
pub struct ListThreadSpawnChildrenRequest {
|
||||
#[prost(string, tag = "1")]
|
||||
pub parent_thread_id: ::prost::alloc::string::String,
|
||||
/// Absent means all statuses.
|
||||
#[prost(enumeration = "ThreadSpawnEdgeStatus", optional, tag = "2")]
|
||||
pub status_filter: ::core::option::Option<i32>,
|
||||
}
|
||||
#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)]
|
||||
pub struct ListThreadSpawnChildrenResponse {
|
||||
/// Ordered child thread ids.
|
||||
#[prost(string, repeated, tag = "1")]
|
||||
pub thread_ids: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
|
||||
}
|
||||
#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)]
|
||||
pub struct ListThreadSpawnDescendantsRequest {
|
||||
#[prost(string, tag = "1")]
|
||||
pub root_thread_id: ::prost::alloc::string::String,
|
||||
/// Absent means all statuses.
|
||||
#[prost(enumeration = "ThreadSpawnEdgeStatus", optional, tag = "2")]
|
||||
pub status_filter: ::core::option::Option<i32>,
|
||||
}
|
||||
#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)]
|
||||
pub struct ListThreadSpawnDescendantsResponse {
|
||||
/// Ordered descendant thread ids.
|
||||
#[prost(string, repeated, tag = "1")]
|
||||
pub thread_ids: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
|
||||
}
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
|
||||
#[repr(i32)]
|
||||
pub enum ThreadSpawnEdgeStatus {
|
||||
Unspecified = 0,
|
||||
Open = 1,
|
||||
Closed = 2,
|
||||
}
|
||||
impl ThreadSpawnEdgeStatus {
|
||||
/// String value of the enum field names used in the ProtoBuf definition.
|
||||
///
|
||||
/// The values are not transformed in any way and thus are considered stable
|
||||
/// (if the ProtoBuf definition does not change) and safe for programmatic use.
|
||||
pub fn as_str_name(&self) -> &'static str {
|
||||
match self {
|
||||
Self::Unspecified => "THREAD_SPAWN_EDGE_STATUS_UNSPECIFIED",
|
||||
Self::Open => "THREAD_SPAWN_EDGE_STATUS_OPEN",
|
||||
Self::Closed => "THREAD_SPAWN_EDGE_STATUS_CLOSED",
|
||||
}
|
||||
}
|
||||
/// Creates an enum from field names used in the ProtoBuf definition.
|
||||
pub fn from_str_name(value: &str) -> ::core::option::Option<Self> {
|
||||
match value {
|
||||
"THREAD_SPAWN_EDGE_STATUS_UNSPECIFIED" => Some(Self::Unspecified),
|
||||
"THREAD_SPAWN_EDGE_STATUS_OPEN" => Some(Self::Open),
|
||||
"THREAD_SPAWN_EDGE_STATUS_CLOSED" => Some(Self::Closed),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
/// Generated client implementations.
|
||||
pub mod agent_graph_store_client {
|
||||
#![allow(
|
||||
unused_variables,
|
||||
dead_code,
|
||||
missing_docs,
|
||||
clippy::wildcard_imports,
|
||||
clippy::let_unit_value
|
||||
)]
|
||||
use tonic::codegen::http::Uri;
|
||||
use tonic::codegen::*;
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AgentGraphStoreClient<T> {
|
||||
inner: tonic::client::Grpc<T>,
|
||||
}
|
||||
impl AgentGraphStoreClient<tonic::transport::Channel> {
|
||||
/// Attempt to create a new client by connecting to a given endpoint.
|
||||
pub async fn connect<D>(dst: D) -> Result<Self, tonic::transport::Error>
|
||||
where
|
||||
D: TryInto<tonic::transport::Endpoint>,
|
||||
D::Error: Into<StdError>,
|
||||
{
|
||||
let conn = tonic::transport::Endpoint::new(dst)?.connect().await?;
|
||||
Ok(Self::new(conn))
|
||||
}
|
||||
}
|
||||
impl<T> AgentGraphStoreClient<T>
|
||||
where
|
||||
T: tonic::client::GrpcService<tonic::body::Body>,
|
||||
T::Error: Into<StdError>,
|
||||
T::ResponseBody: Body<Data = Bytes> + std::marker::Send + 'static,
|
||||
<T::ResponseBody as Body>::Error: Into<StdError> + std::marker::Send,
|
||||
{
|
||||
pub fn new(inner: T) -> Self {
|
||||
let inner = tonic::client::Grpc::new(inner);
|
||||
Self { inner }
|
||||
}
|
||||
pub fn with_origin(inner: T, origin: Uri) -> Self {
|
||||
let inner = tonic::client::Grpc::with_origin(inner, origin);
|
||||
Self { inner }
|
||||
}
|
||||
pub fn with_interceptor<F>(
|
||||
inner: T,
|
||||
interceptor: F,
|
||||
) -> AgentGraphStoreClient<InterceptedService<T, F>>
|
||||
where
|
||||
F: tonic::service::Interceptor,
|
||||
T::ResponseBody: Default,
|
||||
T: tonic::codegen::Service<
|
||||
http::Request<tonic::body::Body>,
|
||||
Response = http::Response<
|
||||
<T as tonic::client::GrpcService<tonic::body::Body>>::ResponseBody,
|
||||
>,
|
||||
>,
|
||||
<T as tonic::codegen::Service<http::Request<tonic::body::Body>>>::Error:
|
||||
Into<StdError> + std::marker::Send + std::marker::Sync,
|
||||
{
|
||||
AgentGraphStoreClient::new(InterceptedService::new(inner, interceptor))
|
||||
}
|
||||
/// Compress requests with the given encoding.
|
||||
///
|
||||
/// This requires the server to support it otherwise it might respond with an
|
||||
/// error.
|
||||
#[must_use]
|
||||
pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
|
||||
self.inner = self.inner.send_compressed(encoding);
|
||||
self
|
||||
}
|
||||
/// Enable decompressing responses.
|
||||
#[must_use]
|
||||
pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
|
||||
self.inner = self.inner.accept_compressed(encoding);
|
||||
self
|
||||
}
|
||||
/// Limits the maximum size of a decoded message.
|
||||
///
|
||||
/// Default: `4MB`
|
||||
#[must_use]
|
||||
pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
|
||||
self.inner = self.inner.max_decoding_message_size(limit);
|
||||
self
|
||||
}
|
||||
/// Limits the maximum size of an encoded message.
|
||||
///
|
||||
/// Default: `usize::MAX`
|
||||
#[must_use]
|
||||
pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
|
||||
self.inner = self.inner.max_encoding_message_size(limit);
|
||||
self
|
||||
}
|
||||
pub async fn upsert_thread_spawn_edge(
|
||||
&mut self,
|
||||
request: impl tonic::IntoRequest<super::UpsertThreadSpawnEdgeRequest>,
|
||||
) -> std::result::Result<tonic::Response<super::Empty>, tonic::Status> {
|
||||
self.inner.ready().await.map_err(|e| {
|
||||
tonic::Status::unknown(format!("Service was not ready: {}", e.into()))
|
||||
})?;
|
||||
let codec = tonic_prost::ProstCodec::default();
|
||||
let path = http::uri::PathAndQuery::from_static(
|
||||
"/codex.agent_graph_store.v1.AgentGraphStore/UpsertThreadSpawnEdge",
|
||||
);
|
||||
let mut req = request.into_request();
|
||||
req.extensions_mut().insert(GrpcMethod::new(
|
||||
"codex.agent_graph_store.v1.AgentGraphStore",
|
||||
"UpsertThreadSpawnEdge",
|
||||
));
|
||||
self.inner.unary(req, path, codec).await
|
||||
}
|
||||
pub async fn set_thread_spawn_edge_status(
|
||||
&mut self,
|
||||
request: impl tonic::IntoRequest<super::SetThreadSpawnEdgeStatusRequest>,
|
||||
) -> std::result::Result<tonic::Response<super::Empty>, tonic::Status> {
|
||||
self.inner.ready().await.map_err(|e| {
|
||||
tonic::Status::unknown(format!("Service was not ready: {}", e.into()))
|
||||
})?;
|
||||
let codec = tonic_prost::ProstCodec::default();
|
||||
let path = http::uri::PathAndQuery::from_static(
|
||||
"/codex.agent_graph_store.v1.AgentGraphStore/SetThreadSpawnEdgeStatus",
|
||||
);
|
||||
let mut req = request.into_request();
|
||||
req.extensions_mut().insert(GrpcMethod::new(
|
||||
"codex.agent_graph_store.v1.AgentGraphStore",
|
||||
"SetThreadSpawnEdgeStatus",
|
||||
));
|
||||
self.inner.unary(req, path, codec).await
|
||||
}
|
||||
pub async fn list_thread_spawn_children(
|
||||
&mut self,
|
||||
request: impl tonic::IntoRequest<super::ListThreadSpawnChildrenRequest>,
|
||||
) -> std::result::Result<
|
||||
tonic::Response<super::ListThreadSpawnChildrenResponse>,
|
||||
tonic::Status,
|
||||
> {
|
||||
self.inner.ready().await.map_err(|e| {
|
||||
tonic::Status::unknown(format!("Service was not ready: {}", e.into()))
|
||||
})?;
|
||||
let codec = tonic_prost::ProstCodec::default();
|
||||
let path = http::uri::PathAndQuery::from_static(
|
||||
"/codex.agent_graph_store.v1.AgentGraphStore/ListThreadSpawnChildren",
|
||||
);
|
||||
let mut req = request.into_request();
|
||||
req.extensions_mut().insert(GrpcMethod::new(
|
||||
"codex.agent_graph_store.v1.AgentGraphStore",
|
||||
"ListThreadSpawnChildren",
|
||||
));
|
||||
self.inner.unary(req, path, codec).await
|
||||
}
|
||||
pub async fn list_thread_spawn_descendants(
|
||||
&mut self,
|
||||
request: impl tonic::IntoRequest<super::ListThreadSpawnDescendantsRequest>,
|
||||
) -> std::result::Result<
|
||||
tonic::Response<super::ListThreadSpawnDescendantsResponse>,
|
||||
tonic::Status,
|
||||
> {
|
||||
self.inner.ready().await.map_err(|e| {
|
||||
tonic::Status::unknown(format!("Service was not ready: {}", e.into()))
|
||||
})?;
|
||||
let codec = tonic_prost::ProstCodec::default();
|
||||
let path = http::uri::PathAndQuery::from_static(
|
||||
"/codex.agent_graph_store.v1.AgentGraphStore/ListThreadSpawnDescendants",
|
||||
);
|
||||
let mut req = request.into_request();
|
||||
req.extensions_mut().insert(GrpcMethod::new(
|
||||
"codex.agent_graph_store.v1.AgentGraphStore",
|
||||
"ListThreadSpawnDescendants",
|
||||
));
|
||||
self.inner.unary(req, path, codec).await
|
||||
}
|
||||
}
|
||||
}
|
||||
/// Generated server implementations.
|
||||
pub mod agent_graph_store_server {
|
||||
#![allow(
|
||||
unused_variables,
|
||||
dead_code,
|
||||
missing_docs,
|
||||
clippy::wildcard_imports,
|
||||
clippy::let_unit_value
|
||||
)]
|
||||
use tonic::codegen::*;
|
||||
/// Generated trait containing gRPC methods that should be implemented for use with AgentGraphStoreServer.
|
||||
#[async_trait]
|
||||
pub trait AgentGraphStore: std::marker::Send + std::marker::Sync + 'static {
|
||||
async fn upsert_thread_spawn_edge(
|
||||
&self,
|
||||
request: tonic::Request<super::UpsertThreadSpawnEdgeRequest>,
|
||||
) -> std::result::Result<tonic::Response<super::Empty>, tonic::Status>;
|
||||
async fn set_thread_spawn_edge_status(
|
||||
&self,
|
||||
request: tonic::Request<super::SetThreadSpawnEdgeStatusRequest>,
|
||||
) -> std::result::Result<tonic::Response<super::Empty>, tonic::Status>;
|
||||
async fn list_thread_spawn_children(
|
||||
&self,
|
||||
request: tonic::Request<super::ListThreadSpawnChildrenRequest>,
|
||||
) -> std::result::Result<
|
||||
tonic::Response<super::ListThreadSpawnChildrenResponse>,
|
||||
tonic::Status,
|
||||
>;
|
||||
async fn list_thread_spawn_descendants(
|
||||
&self,
|
||||
request: tonic::Request<super::ListThreadSpawnDescendantsRequest>,
|
||||
) -> std::result::Result<
|
||||
tonic::Response<super::ListThreadSpawnDescendantsResponse>,
|
||||
tonic::Status,
|
||||
>;
|
||||
}
|
||||
#[derive(Debug)]
|
||||
pub struct AgentGraphStoreServer<T> {
|
||||
inner: Arc<T>,
|
||||
accept_compression_encodings: EnabledCompressionEncodings,
|
||||
send_compression_encodings: EnabledCompressionEncodings,
|
||||
max_decoding_message_size: Option<usize>,
|
||||
max_encoding_message_size: Option<usize>,
|
||||
}
|
||||
impl<T> AgentGraphStoreServer<T> {
|
||||
pub fn new(inner: T) -> Self {
|
||||
Self::from_arc(Arc::new(inner))
|
||||
}
|
||||
pub fn from_arc(inner: Arc<T>) -> Self {
|
||||
Self {
|
||||
inner,
|
||||
accept_compression_encodings: Default::default(),
|
||||
send_compression_encodings: Default::default(),
|
||||
max_decoding_message_size: None,
|
||||
max_encoding_message_size: None,
|
||||
}
|
||||
}
|
||||
pub fn with_interceptor<F>(inner: T, interceptor: F) -> InterceptedService<Self, F>
|
||||
where
|
||||
F: tonic::service::Interceptor,
|
||||
{
|
||||
InterceptedService::new(Self::new(inner), interceptor)
|
||||
}
|
||||
/// Enable decompressing requests with the given encoding.
|
||||
#[must_use]
|
||||
pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
|
||||
self.accept_compression_encodings.enable(encoding);
|
||||
self
|
||||
}
|
||||
/// Compress responses with the given encoding, if the client supports it.
|
||||
#[must_use]
|
||||
pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
|
||||
self.send_compression_encodings.enable(encoding);
|
||||
self
|
||||
}
|
||||
/// Limits the maximum size of a decoded message.
|
||||
///
|
||||
/// Default: `4MB`
|
||||
#[must_use]
|
||||
pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
|
||||
self.max_decoding_message_size = Some(limit);
|
||||
self
|
||||
}
|
||||
/// Limits the maximum size of an encoded message.
|
||||
///
|
||||
/// Default: `usize::MAX`
|
||||
#[must_use]
|
||||
pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
|
||||
self.max_encoding_message_size = Some(limit);
|
||||
self
|
||||
}
|
||||
}
|
||||
impl<T, B> tonic::codegen::Service<http::Request<B>> for AgentGraphStoreServer<T>
|
||||
where
|
||||
T: AgentGraphStore,
|
||||
B: Body + std::marker::Send + 'static,
|
||||
B::Error: Into<StdError> + std::marker::Send + 'static,
|
||||
{
|
||||
type Response = http::Response<tonic::body::Body>;
|
||||
type Error = std::convert::Infallible;
|
||||
type Future = BoxFuture<Self::Response, Self::Error>;
|
||||
fn poll_ready(
|
||||
&mut self,
|
||||
_cx: &mut Context<'_>,
|
||||
) -> Poll<std::result::Result<(), Self::Error>> {
|
||||
Poll::Ready(Ok(()))
|
||||
}
|
||||
fn call(&mut self, req: http::Request<B>) -> Self::Future {
|
||||
match req.uri().path() {
|
||||
"/codex.agent_graph_store.v1.AgentGraphStore/UpsertThreadSpawnEdge" => {
|
||||
#[allow(non_camel_case_types)]
|
||||
struct UpsertThreadSpawnEdgeSvc<T: AgentGraphStore>(pub Arc<T>);
|
||||
impl<T: AgentGraphStore>
|
||||
tonic::server::UnaryService<super::UpsertThreadSpawnEdgeRequest>
|
||||
for UpsertThreadSpawnEdgeSvc<T>
|
||||
{
|
||||
type Response = super::Empty;
|
||||
type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;
|
||||
fn call(
|
||||
&mut self,
|
||||
request: tonic::Request<super::UpsertThreadSpawnEdgeRequest>,
|
||||
) -> Self::Future {
|
||||
let inner = Arc::clone(&self.0);
|
||||
let fut = async move {
|
||||
<T as AgentGraphStore>::upsert_thread_spawn_edge(&inner, request)
|
||||
.await
|
||||
};
|
||||
Box::pin(fut)
|
||||
}
|
||||
}
|
||||
let accept_compression_encodings = self.accept_compression_encodings;
|
||||
let send_compression_encodings = self.send_compression_encodings;
|
||||
let max_decoding_message_size = self.max_decoding_message_size;
|
||||
let max_encoding_message_size = self.max_encoding_message_size;
|
||||
let inner = self.inner.clone();
|
||||
let fut = async move {
|
||||
let method = UpsertThreadSpawnEdgeSvc(inner);
|
||||
let codec = tonic_prost::ProstCodec::default();
|
||||
let mut grpc = tonic::server::Grpc::new(codec)
|
||||
.apply_compression_config(
|
||||
accept_compression_encodings,
|
||||
send_compression_encodings,
|
||||
)
|
||||
.apply_max_message_size_config(
|
||||
max_decoding_message_size,
|
||||
max_encoding_message_size,
|
||||
);
|
||||
let res = grpc.unary(method, req).await;
|
||||
Ok(res)
|
||||
};
|
||||
Box::pin(fut)
|
||||
}
|
||||
"/codex.agent_graph_store.v1.AgentGraphStore/SetThreadSpawnEdgeStatus" => {
|
||||
#[allow(non_camel_case_types)]
|
||||
struct SetThreadSpawnEdgeStatusSvc<T: AgentGraphStore>(pub Arc<T>);
|
||||
impl<T: AgentGraphStore>
|
||||
tonic::server::UnaryService<super::SetThreadSpawnEdgeStatusRequest>
|
||||
for SetThreadSpawnEdgeStatusSvc<T>
|
||||
{
|
||||
type Response = super::Empty;
|
||||
type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;
|
||||
fn call(
|
||||
&mut self,
|
||||
request: tonic::Request<super::SetThreadSpawnEdgeStatusRequest>,
|
||||
) -> Self::Future {
|
||||
let inner = Arc::clone(&self.0);
|
||||
let fut = async move {
|
||||
<T as AgentGraphStore>::set_thread_spawn_edge_status(
|
||||
&inner, request,
|
||||
)
|
||||
.await
|
||||
};
|
||||
Box::pin(fut)
|
||||
}
|
||||
}
|
||||
let accept_compression_encodings = self.accept_compression_encodings;
|
||||
let send_compression_encodings = self.send_compression_encodings;
|
||||
let max_decoding_message_size = self.max_decoding_message_size;
|
||||
let max_encoding_message_size = self.max_encoding_message_size;
|
||||
let inner = self.inner.clone();
|
||||
let fut = async move {
|
||||
let method = SetThreadSpawnEdgeStatusSvc(inner);
|
||||
let codec = tonic_prost::ProstCodec::default();
|
||||
let mut grpc = tonic::server::Grpc::new(codec)
|
||||
.apply_compression_config(
|
||||
accept_compression_encodings,
|
||||
send_compression_encodings,
|
||||
)
|
||||
.apply_max_message_size_config(
|
||||
max_decoding_message_size,
|
||||
max_encoding_message_size,
|
||||
);
|
||||
let res = grpc.unary(method, req).await;
|
||||
Ok(res)
|
||||
};
|
||||
Box::pin(fut)
|
||||
}
|
||||
"/codex.agent_graph_store.v1.AgentGraphStore/ListThreadSpawnChildren" => {
|
||||
#[allow(non_camel_case_types)]
|
||||
struct ListThreadSpawnChildrenSvc<T: AgentGraphStore>(pub Arc<T>);
|
||||
impl<T: AgentGraphStore>
|
||||
tonic::server::UnaryService<super::ListThreadSpawnChildrenRequest>
|
||||
for ListThreadSpawnChildrenSvc<T>
|
||||
{
|
||||
type Response = super::ListThreadSpawnChildrenResponse;
|
||||
type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;
|
||||
fn call(
|
||||
&mut self,
|
||||
request: tonic::Request<super::ListThreadSpawnChildrenRequest>,
|
||||
) -> Self::Future {
|
||||
let inner = Arc::clone(&self.0);
|
||||
let fut = async move {
|
||||
<T as AgentGraphStore>::list_thread_spawn_children(&inner, request)
|
||||
.await
|
||||
};
|
||||
Box::pin(fut)
|
||||
}
|
||||
}
|
||||
let accept_compression_encodings = self.accept_compression_encodings;
|
||||
let send_compression_encodings = self.send_compression_encodings;
|
||||
let max_decoding_message_size = self.max_decoding_message_size;
|
||||
let max_encoding_message_size = self.max_encoding_message_size;
|
||||
let inner = self.inner.clone();
|
||||
let fut = async move {
|
||||
let method = ListThreadSpawnChildrenSvc(inner);
|
||||
let codec = tonic_prost::ProstCodec::default();
|
||||
let mut grpc = tonic::server::Grpc::new(codec)
|
||||
.apply_compression_config(
|
||||
accept_compression_encodings,
|
||||
send_compression_encodings,
|
||||
)
|
||||
.apply_max_message_size_config(
|
||||
max_decoding_message_size,
|
||||
max_encoding_message_size,
|
||||
);
|
||||
let res = grpc.unary(method, req).await;
|
||||
Ok(res)
|
||||
};
|
||||
Box::pin(fut)
|
||||
}
|
||||
"/codex.agent_graph_store.v1.AgentGraphStore/ListThreadSpawnDescendants" => {
|
||||
#[allow(non_camel_case_types)]
|
||||
struct ListThreadSpawnDescendantsSvc<T: AgentGraphStore>(pub Arc<T>);
|
||||
impl<T: AgentGraphStore>
|
||||
tonic::server::UnaryService<super::ListThreadSpawnDescendantsRequest>
|
||||
for ListThreadSpawnDescendantsSvc<T>
|
||||
{
|
||||
type Response = super::ListThreadSpawnDescendantsResponse;
|
||||
type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;
|
||||
fn call(
|
||||
&mut self,
|
||||
request: tonic::Request<super::ListThreadSpawnDescendantsRequest>,
|
||||
) -> Self::Future {
|
||||
let inner = Arc::clone(&self.0);
|
||||
let fut = async move {
|
||||
<T as AgentGraphStore>::list_thread_spawn_descendants(
|
||||
&inner, request,
|
||||
)
|
||||
.await
|
||||
};
|
||||
Box::pin(fut)
|
||||
}
|
||||
}
|
||||
let accept_compression_encodings = self.accept_compression_encodings;
|
||||
let send_compression_encodings = self.send_compression_encodings;
|
||||
let max_decoding_message_size = self.max_decoding_message_size;
|
||||
let max_encoding_message_size = self.max_encoding_message_size;
|
||||
let inner = self.inner.clone();
|
||||
let fut = async move {
|
||||
let method = ListThreadSpawnDescendantsSvc(inner);
|
||||
let codec = tonic_prost::ProstCodec::default();
|
||||
let mut grpc = tonic::server::Grpc::new(codec)
|
||||
.apply_compression_config(
|
||||
accept_compression_encodings,
|
||||
send_compression_encodings,
|
||||
)
|
||||
.apply_max_message_size_config(
|
||||
max_decoding_message_size,
|
||||
max_encoding_message_size,
|
||||
);
|
||||
let res = grpc.unary(method, req).await;
|
||||
Ok(res)
|
||||
};
|
||||
Box::pin(fut)
|
||||
}
|
||||
_ => Box::pin(async move {
|
||||
let mut response = http::Response::new(tonic::body::Body::default());
|
||||
let headers = response.headers_mut();
|
||||
headers.insert(
|
||||
tonic::Status::GRPC_STATUS,
|
||||
(tonic::Code::Unimplemented as i32).into(),
|
||||
);
|
||||
headers.insert(
|
||||
http::header::CONTENT_TYPE,
|
||||
tonic::metadata::GRPC_CONTENT_TYPE,
|
||||
);
|
||||
Ok(response)
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<T> Clone for AgentGraphStoreServer<T> {
|
||||
fn clone(&self) -> Self {
|
||||
let inner = self.inner.clone();
|
||||
Self {
|
||||
inner,
|
||||
accept_compression_encodings: self.accept_compression_encodings,
|
||||
send_compression_encodings: self.send_compression_encodings,
|
||||
max_decoding_message_size: self.max_decoding_message_size,
|
||||
max_encoding_message_size: self.max_encoding_message_size,
|
||||
}
|
||||
}
|
||||
}
|
||||
/// Generated gRPC service name
|
||||
pub const SERVICE_NAME: &str = "codex.agent_graph_store.v1.AgentGraphStore";
|
||||
impl<T> tonic::server::NamedService for AgentGraphStoreServer<T> {
|
||||
const NAME: &'static str = SERVICE_NAME;
|
||||
}
|
||||
}
|
||||
@@ -1,55 +0,0 @@
|
||||
use async_trait::async_trait;
|
||||
use codex_protocol::ThreadId;
|
||||
|
||||
use crate::AgentGraphStoreResult;
|
||||
use crate::ThreadSpawnEdgeStatus;
|
||||
|
||||
/// Storage-neutral boundary for persisted thread-spawn parent/child topology.
|
||||
///
|
||||
/// Implementations are expected to return stable ordering for list methods so callers can merge
|
||||
/// persisted graph state with live in-memory state without introducing nondeterministic output.
|
||||
#[async_trait]
|
||||
pub trait AgentGraphStore: Send + Sync {
|
||||
/// Insert or replace the directional parent/child edge for a spawned thread.
|
||||
///
|
||||
/// `child_thread_id` has at most one persisted parent. Re-inserting the same child should
|
||||
/// update both the parent and status to match the supplied values.
|
||||
async fn upsert_thread_spawn_edge(
|
||||
&self,
|
||||
parent_thread_id: ThreadId,
|
||||
child_thread_id: ThreadId,
|
||||
status: ThreadSpawnEdgeStatus,
|
||||
) -> AgentGraphStoreResult<()>;
|
||||
|
||||
/// Update the persisted lifecycle status of a spawned thread's incoming edge.
|
||||
///
|
||||
/// Implementations should treat missing children as a successful no-op.
|
||||
async fn set_thread_spawn_edge_status(
|
||||
&self,
|
||||
child_thread_id: ThreadId,
|
||||
status: ThreadSpawnEdgeStatus,
|
||||
) -> AgentGraphStoreResult<()>;
|
||||
|
||||
/// List direct spawned children of a parent thread.
|
||||
///
|
||||
/// When `status_filter` is `Some`, only child edges with that exact status are returned. When
|
||||
/// it is `None`, all direct child edges are returned regardless of status, including statuses
|
||||
/// that may be added by a future store implementation.
|
||||
async fn list_thread_spawn_children(
|
||||
&self,
|
||||
parent_thread_id: ThreadId,
|
||||
status_filter: Option<ThreadSpawnEdgeStatus>,
|
||||
) -> AgentGraphStoreResult<Vec<ThreadId>>;
|
||||
|
||||
/// List spawned descendants breadth-first by depth, then by thread id.
|
||||
///
|
||||
/// `status_filter` is applied to every traversed edge, not just to the returned descendants.
|
||||
/// For example, `Some(Open)` walks only open edges, so descendants under a closed edge are not
|
||||
/// included even if their own incoming edge is open. `None` walks and returns every persisted
|
||||
/// edge regardless of status.
|
||||
async fn list_thread_spawn_descendants(
|
||||
&self,
|
||||
root_thread_id: ThreadId,
|
||||
status_filter: Option<ThreadSpawnEdgeStatus>,
|
||||
) -> AgentGraphStoreResult<Vec<ThreadId>>;
|
||||
}
|
||||
@@ -1,42 +0,0 @@
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
/// Lifecycle status attached to a directional thread-spawn edge.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum ThreadSpawnEdgeStatus {
|
||||
/// The child thread is still live or resumable as an open spawned agent.
|
||||
Open,
|
||||
/// The child thread has been closed from the parent/child graph's perspective.
|
||||
Closed,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn thread_spawn_edge_status_serializes_as_snake_case() {
|
||||
assert_eq!(
|
||||
serde_json::to_string(&ThreadSpawnEdgeStatus::Open)
|
||||
.expect("open status should serialize"),
|
||||
"\"open\""
|
||||
);
|
||||
assert_eq!(
|
||||
serde_json::to_string(&ThreadSpawnEdgeStatus::Closed)
|
||||
.expect("closed status should serialize"),
|
||||
"\"closed\""
|
||||
);
|
||||
assert_eq!(
|
||||
serde_json::from_str::<ThreadSpawnEdgeStatus>("\"open\"")
|
||||
.expect("open status should deserialize"),
|
||||
ThreadSpawnEdgeStatus::Open
|
||||
);
|
||||
assert_eq!(
|
||||
serde_json::from_str::<ThreadSpawnEdgeStatus>("\"closed\"")
|
||||
.expect("closed status should deserialize"),
|
||||
ThreadSpawnEdgeStatus::Closed
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -19,6 +19,7 @@ chrono = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
crypto_box = { workspace = true }
|
||||
ed25519-dalek = { workspace = true }
|
||||
jsonwebtoken = { workspace = true }
|
||||
rand = { workspace = true }
|
||||
reqwest = { workspace = true, features = ["json"] }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
|
||||
@@ -8,6 +8,7 @@ use base64::engine::general_purpose::STANDARD as BASE64_STANDARD;
|
||||
use base64::engine::general_purpose::URL_SAFE_NO_PAD;
|
||||
use chrono::SecondsFormat;
|
||||
use chrono::Utc;
|
||||
use codex_protocol::account::PlanType as AccountPlanType;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use crypto_box::SecretKey as Curve25519SecretKey;
|
||||
use ed25519_dalek::Signer as _;
|
||||
@@ -15,14 +16,24 @@ use ed25519_dalek::SigningKey;
|
||||
use ed25519_dalek::VerifyingKey;
|
||||
use ed25519_dalek::pkcs8::DecodePrivateKey;
|
||||
use ed25519_dalek::pkcs8::EncodePrivateKey;
|
||||
use jsonwebtoken::Algorithm;
|
||||
use jsonwebtoken::DecodingKey;
|
||||
use jsonwebtoken::Validation;
|
||||
use jsonwebtoken::decode;
|
||||
use jsonwebtoken::decode_header;
|
||||
use jsonwebtoken::jwk::JwkSet;
|
||||
use rand::TryRngCore;
|
||||
use rand::rngs::OsRng;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use serde::de::DeserializeOwned;
|
||||
use sha2::Digest as _;
|
||||
use sha2::Sha512;
|
||||
|
||||
const AGENT_TASK_REGISTRATION_TIMEOUT: Duration = Duration::from_secs(30);
|
||||
const AGENT_IDENTITY_JWKS_TIMEOUT: Duration = Duration::from_secs(10);
|
||||
const AGENT_IDENTITY_JWT_AUDIENCE: &str = "codex-app-server";
|
||||
const AGENT_IDENTITY_JWT_ISSUER: &str = "https://chatgpt.com/codex-backend/agent-identity";
|
||||
|
||||
/// Stored key material for a registered agent identity.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
@@ -50,6 +61,22 @@ pub struct GeneratedAgentKeyMaterial {
|
||||
pub public_key_ssh: String,
|
||||
}
|
||||
|
||||
/// Claims carried by an Agent Identity JWT.
|
||||
#[derive(Clone, Debug, Deserialize, PartialEq, Eq)]
|
||||
pub struct AgentIdentityJwtClaims {
|
||||
pub iss: String,
|
||||
pub aud: String,
|
||||
pub iat: usize,
|
||||
pub exp: usize,
|
||||
pub agent_runtime_id: String,
|
||||
pub agent_private_key: String,
|
||||
pub account_id: String,
|
||||
pub chatgpt_user_id: String,
|
||||
pub email: String,
|
||||
pub plan_type: AccountPlanType,
|
||||
pub chatgpt_account_is_fedramp: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||
struct AgentAssertionEnvelope {
|
||||
agent_runtime_id: String,
|
||||
@@ -98,6 +125,65 @@ pub fn authorization_header_for_agent_task(
|
||||
Ok(format!("AgentAssertion {serialized_assertion}"))
|
||||
}
|
||||
|
||||
pub async fn fetch_agent_identity_jwks(
|
||||
client: &reqwest::Client,
|
||||
chatgpt_base_url: &str,
|
||||
) -> Result<JwkSet> {
|
||||
let response = client
|
||||
.get(agent_identity_jwks_url(chatgpt_base_url))
|
||||
.timeout(AGENT_IDENTITY_JWKS_TIMEOUT)
|
||||
.send()
|
||||
.await
|
||||
.context("failed to request agent identity JWKS")?
|
||||
.error_for_status()
|
||||
.context("agent identity JWKS endpoint returned an error")?;
|
||||
|
||||
response
|
||||
.json()
|
||||
.await
|
||||
.context("failed to decode agent identity JWKS")
|
||||
}
|
||||
|
||||
pub fn decode_agent_identity_jwt(
|
||||
jwt: &str,
|
||||
jwks: Option<&JwkSet>,
|
||||
) -> Result<AgentIdentityJwtClaims> {
|
||||
let Some(jwks) = jwks else {
|
||||
return decode_agent_identity_jwt_payload(jwt);
|
||||
};
|
||||
|
||||
let header = decode_header(jwt).context("failed to decode agent identity JWT header")?;
|
||||
let kid = header
|
||||
.kid
|
||||
.context("agent identity JWT header does not include a kid")?;
|
||||
let jwk = jwks
|
||||
.find(&kid)
|
||||
.with_context(|| format!("agent identity JWT kid {kid} is not trusted"))?;
|
||||
let decoding_key = DecodingKey::from_jwk(jwk).context("failed to build JWT decoding key")?;
|
||||
let mut validation = Validation::new(Algorithm::RS256);
|
||||
validation.set_audience(&[AGENT_IDENTITY_JWT_AUDIENCE]);
|
||||
validation.set_issuer(&[AGENT_IDENTITY_JWT_ISSUER]);
|
||||
validation.required_spec_claims.insert("iss".to_string());
|
||||
validation.required_spec_claims.insert("aud".to_string());
|
||||
decode::<AgentIdentityJwtClaims>(jwt, &decoding_key, &validation)
|
||||
.map(|data| data.claims)
|
||||
.context("failed to verify agent identity JWT")
|
||||
}
|
||||
|
||||
fn decode_agent_identity_jwt_payload<T: DeserializeOwned>(jwt: &str) -> Result<T> {
|
||||
let mut parts = jwt.split('.');
|
||||
let (_header_b64, payload_b64, _sig_b64) = match (parts.next(), parts.next(), parts.next()) {
|
||||
(Some(h), Some(p), Some(s)) if !h.is_empty() && !p.is_empty() && !s.is_empty() => (h, p, s),
|
||||
_ => anyhow::bail!("invalid agent identity JWT format"),
|
||||
};
|
||||
anyhow::ensure!(parts.next().is_none(), "invalid agent identity JWT format");
|
||||
|
||||
let payload_bytes = URL_SAFE_NO_PAD
|
||||
.decode(payload_b64)
|
||||
.context("agent identity JWT payload is not valid base64url")?;
|
||||
serde_json::from_slice(&payload_bytes).context("agent identity JWT payload is not valid JSON")
|
||||
}
|
||||
|
||||
pub fn sign_task_registration_payload(
|
||||
key: AgentIdentityKey<'_>,
|
||||
timestamp: &str,
|
||||
@@ -117,19 +203,27 @@ pub async fn register_agent_task(
|
||||
signature: sign_task_registration_payload(key, ×tamp)?,
|
||||
timestamp,
|
||||
};
|
||||
let url = agent_task_registration_url(chatgpt_base_url, key.agent_runtime_id);
|
||||
|
||||
let response = client
|
||||
.post(agent_task_registration_url(
|
||||
chatgpt_base_url,
|
||||
key.agent_runtime_id,
|
||||
))
|
||||
.post(url)
|
||||
.timeout(AGENT_TASK_REGISTRATION_TIMEOUT)
|
||||
.json(&request)
|
||||
.send()
|
||||
.await
|
||||
.context("failed to register agent task")?
|
||||
.error_for_status()
|
||||
.context("failed to register agent task")?
|
||||
.context("failed to register agent task")?;
|
||||
if !response.status().is_success() {
|
||||
let status = response.status();
|
||||
let body = response.text().await.unwrap_or_default();
|
||||
let body = if body.len() > 512 {
|
||||
format!("{}...", body.chars().take(512).collect::<String>())
|
||||
} else {
|
||||
body
|
||||
};
|
||||
anyhow::bail!("failed to register agent task with status {status}: {body}");
|
||||
}
|
||||
|
||||
let response = response
|
||||
.json()
|
||||
.await
|
||||
.context("failed to decode agent task registration response")?;
|
||||
@@ -217,6 +311,15 @@ pub fn agent_identity_biscuit_url(chatgpt_base_url: &str) -> String {
|
||||
format!("{trimmed}/authenticate_app_v2")
|
||||
}
|
||||
|
||||
pub fn agent_identity_jwks_url(chatgpt_base_url: &str) -> String {
|
||||
let trimmed = chatgpt_base_url.trim_end_matches('/');
|
||||
if trimmed.contains("/backend-api") {
|
||||
format!("{trimmed}/wham/agent-identities/jwks")
|
||||
} else {
|
||||
format!("{trimmed}/agent-identities/jwks")
|
||||
}
|
||||
}
|
||||
|
||||
pub fn agent_identity_request_id() -> Result<String> {
|
||||
let mut request_id_bytes = [0u8; 16];
|
||||
OsRng
|
||||
@@ -228,29 +331,6 @@ pub fn agent_identity_request_id() -> Result<String> {
|
||||
))
|
||||
}
|
||||
|
||||
pub fn normalize_chatgpt_base_url(chatgpt_base_url: &str) -> String {
|
||||
let mut base_url = chatgpt_base_url.trim_end_matches('/').to_string();
|
||||
for suffix in [
|
||||
"/wham/remote/control/server/enroll",
|
||||
"/wham/remote/control/server",
|
||||
] {
|
||||
if let Some(stripped) = base_url.strip_suffix(suffix) {
|
||||
base_url = stripped.to_string();
|
||||
break;
|
||||
}
|
||||
}
|
||||
if let Some(stripped) = base_url.strip_suffix("/codex") {
|
||||
base_url = stripped.to_string();
|
||||
}
|
||||
if (base_url.starts_with("https://chatgpt.com")
|
||||
|| base_url.starts_with("https://chat.openai.com"))
|
||||
&& !base_url.contains("/backend-api")
|
||||
{
|
||||
base_url = format!("{base_url}/backend-api");
|
||||
}
|
||||
base_url
|
||||
}
|
||||
|
||||
pub fn build_abom(session_source: SessionSource) -> AgentBillOfMaterials {
|
||||
AgentBillOfMaterials {
|
||||
agent_version: env!("CARGO_PKG_VERSION").to_string(),
|
||||
@@ -260,6 +340,7 @@ pub fn build_abom(session_source: SessionSource) -> AgentBillOfMaterials {
|
||||
| SessionSource::Exec
|
||||
| SessionSource::Mcp
|
||||
| SessionSource::Custom(_)
|
||||
| SessionSource::Internal(_)
|
||||
| SessionSource::SubAgent(_)
|
||||
| SessionSource::Unknown => "codex-cli".to_string(),
|
||||
},
|
||||
@@ -323,6 +404,8 @@ mod tests {
|
||||
use base64::Engine as _;
|
||||
use ed25519_dalek::Signature;
|
||||
use ed25519_dalek::Verifier as _;
|
||||
use jsonwebtoken::EncodingKey;
|
||||
use jsonwebtoken::Header;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::*;
|
||||
@@ -405,10 +488,227 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_chatgpt_base_url_strips_codex_before_backend_api() {
|
||||
fn decode_agent_identity_jwt_reads_claims() {
|
||||
let jwt = jwt_with_payload(serde_json::json!({
|
||||
"iss": AGENT_IDENTITY_JWT_ISSUER,
|
||||
"aud": AGENT_IDENTITY_JWT_AUDIENCE,
|
||||
"iat": 1_700_000_000usize,
|
||||
"exp": 4_000_000_000usize,
|
||||
"agent_runtime_id": "agent-runtime-id",
|
||||
"agent_private_key": "private-key",
|
||||
"account_id": "account-id",
|
||||
"chatgpt_user_id": "user-id",
|
||||
"email": "user@example.com",
|
||||
"plan_type": "pro",
|
||||
"chatgpt_account_is_fedramp": false,
|
||||
}));
|
||||
|
||||
let claims = decode_agent_identity_jwt(&jwt, /*jwks*/ None).expect("JWT should decode");
|
||||
|
||||
assert_eq!(
|
||||
normalize_chatgpt_base_url("https://chatgpt.com/codex"),
|
||||
"https://chatgpt.com/backend-api"
|
||||
claims,
|
||||
AgentIdentityJwtClaims {
|
||||
iss: AGENT_IDENTITY_JWT_ISSUER.to_string(),
|
||||
aud: AGENT_IDENTITY_JWT_AUDIENCE.to_string(),
|
||||
iat: 1_700_000_000,
|
||||
exp: 4_000_000_000,
|
||||
agent_runtime_id: "agent-runtime-id".to_string(),
|
||||
agent_private_key: "private-key".to_string(),
|
||||
account_id: "account-id".to_string(),
|
||||
chatgpt_user_id: "user-id".to_string(),
|
||||
email: "user@example.com".to_string(),
|
||||
plan_type: AccountPlanType::Pro,
|
||||
chatgpt_account_is_fedramp: false,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn decode_agent_identity_jwt_verifies_when_jwks_is_present() {
|
||||
let jwks = test_jwks("test-key");
|
||||
let claims = AgentIdentityJwtClaims {
|
||||
iss: AGENT_IDENTITY_JWT_ISSUER.to_string(),
|
||||
aud: AGENT_IDENTITY_JWT_AUDIENCE.to_string(),
|
||||
iat: 1_700_000_000,
|
||||
exp: 4_000_000_000,
|
||||
agent_runtime_id: "agent-runtime-id".to_string(),
|
||||
agent_private_key: "private-key".to_string(),
|
||||
account_id: "account-id".to_string(),
|
||||
chatgpt_user_id: "user-id".to_string(),
|
||||
email: "user@example.com".to_string(),
|
||||
plan_type: AccountPlanType::Pro,
|
||||
chatgpt_account_is_fedramp: false,
|
||||
};
|
||||
let jwt = jsonwebtoken::encode(
|
||||
&test_jwt_header("test-key"),
|
||||
&serde_json::json!({
|
||||
"iss": claims.iss,
|
||||
"aud": claims.aud,
|
||||
"iat": claims.iat,
|
||||
"exp": claims.exp,
|
||||
"agent_runtime_id": claims.agent_runtime_id,
|
||||
"agent_private_key": claims.agent_private_key,
|
||||
"account_id": claims.account_id,
|
||||
"chatgpt_user_id": claims.chatgpt_user_id,
|
||||
"email": claims.email,
|
||||
"plan_type": "pro",
|
||||
"chatgpt_account_is_fedramp": claims.chatgpt_account_is_fedramp,
|
||||
}),
|
||||
&test_rsa_encoding_key(),
|
||||
)
|
||||
.expect("JWT should encode");
|
||||
|
||||
let expected_claims = AgentIdentityJwtClaims {
|
||||
iss: AGENT_IDENTITY_JWT_ISSUER.to_string(),
|
||||
aud: AGENT_IDENTITY_JWT_AUDIENCE.to_string(),
|
||||
iat: 1_700_000_000,
|
||||
exp: 4_000_000_000,
|
||||
agent_runtime_id: "agent-runtime-id".to_string(),
|
||||
agent_private_key: "private-key".to_string(),
|
||||
account_id: "account-id".to_string(),
|
||||
chatgpt_user_id: "user-id".to_string(),
|
||||
email: "user@example.com".to_string(),
|
||||
plan_type: AccountPlanType::Pro,
|
||||
chatgpt_account_is_fedramp: false,
|
||||
};
|
||||
assert_eq!(
|
||||
decode_agent_identity_jwt(&jwt, Some(&jwks)).expect("JWT should verify"),
|
||||
expected_claims
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn decode_agent_identity_jwt_rejects_untrusted_kid() {
|
||||
let jwks = test_jwks("other-key");
|
||||
|
||||
let jwt = jsonwebtoken::encode(
|
||||
&test_jwt_header("test-key"),
|
||||
&serde_json::json!({
|
||||
"iss": AGENT_IDENTITY_JWT_ISSUER,
|
||||
"aud": AGENT_IDENTITY_JWT_AUDIENCE,
|
||||
"iat": 1_700_000_000,
|
||||
"exp": 4_000_000_000usize,
|
||||
"agent_runtime_id": "agent-runtime-id",
|
||||
"agent_private_key": "private-key",
|
||||
"account_id": "account-id",
|
||||
"chatgpt_user_id": "user-id",
|
||||
"email": "user@example.com",
|
||||
"plan_type": "pro",
|
||||
"chatgpt_account_is_fedramp": false,
|
||||
}),
|
||||
&test_rsa_encoding_key(),
|
||||
)
|
||||
.expect("JWT should encode");
|
||||
|
||||
decode_agent_identity_jwt(&jwt, Some(&jwks)).expect_err("JWT should not verify");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn decode_agent_identity_jwt_requires_issuer_and_audience() {
|
||||
let jwks = test_jwks("test-key");
|
||||
let jwt = jsonwebtoken::encode(
|
||||
&test_jwt_header("test-key"),
|
||||
&serde_json::json!({
|
||||
"iat": 1_700_000_000,
|
||||
"exp": 4_000_000_000usize,
|
||||
"agent_runtime_id": "agent-runtime-id",
|
||||
"agent_private_key": "private-key",
|
||||
"account_id": "account-id",
|
||||
"chatgpt_user_id": "user-id",
|
||||
"email": "user@example.com",
|
||||
"plan_type": "pro",
|
||||
"chatgpt_account_is_fedramp": false,
|
||||
}),
|
||||
&test_rsa_encoding_key(),
|
||||
)
|
||||
.expect("JWT should encode");
|
||||
|
||||
decode_agent_identity_jwt(&jwt, Some(&jwks)).expect_err("JWT should not verify");
|
||||
}
|
||||
|
||||
fn test_jwt_header(kid: &str) -> Header {
|
||||
let mut header = Header::new(Algorithm::RS256);
|
||||
header.kid = Some(kid.to_string());
|
||||
header
|
||||
}
|
||||
|
||||
fn test_rsa_encoding_key() -> EncodingKey {
|
||||
EncodingKey::from_rsa_pem(
|
||||
br#"-----BEGIN PRIVATE KEY-----
|
||||
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDWpAXYypOsYAwO
|
||||
bvBduMk/mxaoYDze0AZSzaSzLuIlcsl2EKDgC3AabhIWXh/qTGEJLOU3VB1e5mO9
|
||||
FPbBlmIZSL3FQTbyt/hYutPFKfCou5PLmScw/TzILS3/RhT8UY9kxxZvXiEbTki9
|
||||
mvxRuZFpVqDFJHwfitIjKZGhXDCYVKurPTrxetYZJg0h8sQBLKjkZ0BqqaTUkAsg
|
||||
0eBgZAlXEzG3By8PGhUqYLt6W1Q3KYw0FmGy/gTyzH1g0ukGgSJvOd8SkNT8MbOs
|
||||
zl5kKxDNqpuEE6UZ3jbuJ+5382d31w+rOAJRzbf7QVdI9+luCSwJcDACYPQ4WNBa
|
||||
uCpV0ovpAgMBAAECggEAVu84LwZdqYN9XpswX8VoPYrjMm9IODapWQBRpQFoNyK2
|
||||
1ksF3bjEPvA2Azk8U/l7k+vLKw22l6lY3EyRZPcz5GnB8xLm3ogE3mtNOp4yCyVu
|
||||
RxhQ91aaN7mU17/a4BdorLi2LYVCg3zBmYociD1Q2AluNGsCmwPu+K7tfR2J0Sg8
|
||||
NjqiTbDG1XDpR/icwgC9t6vh8lZpCHDhF4tbQfLLVLeA/OdcuzXDyMCXbmdVIdBQ
|
||||
rm4aIFmr2e1/2ctTbCg85S6AGFTH+pSLjrwTzyvf+F6NW5uNjLQAQLFj+EznBDxj
|
||||
Xdx90cySrjsKK6PVWQF4RiTvkSW8eWL7R6B2FZbGwQKBgQDuVQRj72hWloR7mbEL
|
||||
aUEEv3pIXTMXWEsoMBNczos/1L1RnAN1AI44TurznasPZAWvQj+kVbLDR+TAeZrL
|
||||
iA8HIWswQUI18hFmgKzSkwIXGtubcKVrgsKeS4lMDKCM/Ef6WAYdeq6ronoY5lCN
|
||||
YrJFmGp81W5zcV7lyiycgbSiGwKBgQDmjWYf6pZjrK7Z+OJ3X1AZfi2vss15SCvL
|
||||
3fPgzIDbViztpGyQhc3DQZIsBNIu0xZp/veGce9TEeTds2ro9NfdJFeou8+fC7Pq
|
||||
sOsM3amGFFi+ZW/9BWyjZEM88bgWWAjqLHbpfHDxjAf5CSxddqxgHlbP0Ytyb1Vg
|
||||
gmPDn9YKSwKBgQDbTi3hC35WFuDHn0/zcSHcDZmnFuOZeqyFyV83yfMGhGrEuqvP
|
||||
sPgtRikajJ3IZsB4WZyYSidZXEFY/0z6NjOl2xF38MTNQPbT/FmK1q1Yt2UWrlv5
|
||||
BvSwlk87RG9D7C0LZo4R+D7cPoDdgqjiwMvMEIkEX5zn641oI1ZTmWKuuwKBgQCD
|
||||
KF+3unnRvHRAVoFnTZbA2fJdqMeRvogD04GhGlYX8V9f1hFY6nXTJaNlXVzA/J8c
|
||||
r8ra9kgjJuPfZ+ljG58OFFW2DRohLcQtuHYPfK6rMzoFHqnl9EcIcMp7ijuionR3
|
||||
29HOJFgQYgxLFXfit9d6WugiE+BTupiEbckZif13HwKBgE/lAlkVHP6YahOO2Ljc
|
||||
J1bwkqKZTB5dHolX9A58e/xXnfZ5P8f3Z83+Izap3FwqQulk7b1WO1MQcHuVg2NN
|
||||
5da0D4h2rYOXnbYIg0BVu4spQbaM6ewsp66b8+MzLOBvj8SzWdt1Oyw0q/MRyQAR
|
||||
8U4M2TSWCKUY/A6sT4W8+mT9
|
||||
-----END PRIVATE KEY-----"#,
|
||||
)
|
||||
.expect("test RSA key should parse")
|
||||
}
|
||||
|
||||
fn test_jwks(kid: &str) -> jsonwebtoken::jwk::JwkSet {
|
||||
serde_json::from_value(serde_json::json!({
|
||||
"keys": [{
|
||||
"kty": "RSA",
|
||||
"kid": kid,
|
||||
"use": "sig",
|
||||
"alg": "RS256",
|
||||
"n": "1qQF2MqTrGAMDm7wXbjJP5sWqGA83tAGUs2ksy7iJXLJdhCg4AtwGm4SFl4f6kxhCSzlN1QdXuZjvRT2wZZiGUi9xUE28rf4WLrTxSnwqLuTy5knMP08yC0t_0YU_FGPZMcWb14hG05IvZr8UbmRaVagxSR8H4rSIymRoVwwmFSrqz068XrWGSYNIfLEASyo5GdAaqmk1JALINHgYGQJVxMxtwcvDxoVKmC7eltUNymMNBZhsv4E8sx9YNLpBoEibznfEpDU_DGzrM5eZCsQzaqbhBOlGd427ifud_Nnd9cPqzgCUc23-0FXSPfpbgksCXAwAmD0OFjQWrgqVdKL6Q",
|
||||
"e": "AQAB",
|
||||
}]
|
||||
}))
|
||||
.expect("test JWKS should parse")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn agent_identity_jwks_url_uses_backend_api_base_url() {
|
||||
assert_eq!(
|
||||
agent_identity_jwks_url("https://chatgpt.com/backend-api"),
|
||||
"https://chatgpt.com/backend-api/wham/agent-identities/jwks"
|
||||
);
|
||||
assert_eq!(
|
||||
agent_identity_jwks_url("https://chatgpt.com/backend-api/"),
|
||||
"https://chatgpt.com/backend-api/wham/agent-identities/jwks"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn agent_identity_jwks_url_uses_codex_api_base_url() {
|
||||
assert_eq!(
|
||||
agent_identity_jwks_url("http://localhost:8080/api/codex"),
|
||||
"http://localhost:8080/api/codex/agent-identities/jwks"
|
||||
);
|
||||
assert_eq!(
|
||||
agent_identity_jwks_url("http://localhost:8080/api/codex/"),
|
||||
"http://localhost:8080/api/codex/agent-identities/jwks"
|
||||
);
|
||||
}
|
||||
|
||||
fn jwt_with_payload(payload: serde_json::Value) -> String {
|
||||
let encode = |bytes: &[u8]| URL_SAFE_NO_PAD.encode(bytes);
|
||||
let header_b64 = encode(br#"{"alg":"none","typ":"JWT"}"#);
|
||||
let payload_b64 = encode(&serde_json::to_vec(&payload).expect("payload should serialize"));
|
||||
let signature_b64 = encode(b"sig");
|
||||
format!("{header_b64}.{payload_b64}.{signature_b64}")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -161,7 +161,7 @@ fn sample_thread_start_response(thread_id: &str, ephemeral: bool, model: &str) -
|
||||
}
|
||||
|
||||
fn sample_permission_profile() -> AppServerPermissionProfile {
|
||||
CorePermissionProfile::from_legacy_sandbox_policy(&SandboxPolicy::DangerFullAccess).into()
|
||||
CorePermissionProfile::Disabled.into()
|
||||
}
|
||||
|
||||
fn sample_app_server_client_metadata() -> CodexAppServerClientMetadata {
|
||||
@@ -315,7 +315,10 @@ fn sample_turn_resolved_config(turn_id: &str) -> TurnResolvedConfigFact {
|
||||
session_source: SessionSource::Exec,
|
||||
model: "gpt-5".to_string(),
|
||||
model_provider: "openai".to_string(),
|
||||
sandbox_policy: SandboxPolicy::new_read_only_policy(),
|
||||
permission_profile: CorePermissionProfile::from_legacy_sandbox_policy(
|
||||
&SandboxPolicy::new_read_only_policy(),
|
||||
),
|
||||
permission_profile_cwd: PathBuf::from("/tmp"),
|
||||
reasoning_effort: None,
|
||||
reasoning_summary: None,
|
||||
service_tier: None,
|
||||
|
||||
@@ -13,12 +13,12 @@ use codex_protocol::config_types::ModeKind;
|
||||
use codex_protocol::config_types::Personality;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::config_types::ServiceTier;
|
||||
use codex_protocol::models::PermissionProfile;
|
||||
use codex_protocol::openai_models::ReasoningEffort;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::HookEventName;
|
||||
use codex_protocol::protocol::HookRunStatus;
|
||||
use codex_protocol::protocol::HookSource;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_protocol::protocol::SkillScope;
|
||||
use codex_protocol::protocol::SubAgentSource;
|
||||
@@ -62,7 +62,8 @@ pub struct TurnResolvedConfigFact {
|
||||
pub session_source: SessionSource,
|
||||
pub model: String,
|
||||
pub model_provider: String,
|
||||
pub sandbox_policy: SandboxPolicy,
|
||||
pub permission_profile: PermissionProfile,
|
||||
pub permission_profile_cwd: PathBuf,
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
pub reasoning_summary: Option<ReasoningSummary>,
|
||||
pub service_tier: Option<ServiceTier>,
|
||||
|
||||
@@ -61,7 +61,7 @@ use codex_login::default_client::originator;
|
||||
use codex_protocol::config_types::ModeKind;
|
||||
use codex_protocol::config_types::Personality;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use codex_protocol::models::PermissionProfile;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_protocol::protocol::SkillScope;
|
||||
use codex_protocol::protocol::TokenUsage;
|
||||
@@ -106,6 +106,7 @@ impl ThreadMetadataState {
|
||||
| SessionSource::Exec
|
||||
| SessionSource::Mcp
|
||||
| SessionSource::Custom(_)
|
||||
| SessionSource::Internal(_)
|
||||
| SessionSource::Unknown => (None, None),
|
||||
};
|
||||
Self {
|
||||
@@ -884,7 +885,8 @@ fn codex_turn_event_params(
|
||||
session_source: _session_source,
|
||||
model,
|
||||
model_provider,
|
||||
sandbox_policy,
|
||||
permission_profile,
|
||||
permission_profile_cwd,
|
||||
reasoning_effort,
|
||||
reasoning_summary,
|
||||
service_tier,
|
||||
@@ -909,7 +911,10 @@ fn codex_turn_event_params(
|
||||
parent_thread_id: thread_metadata.parent_thread_id.clone(),
|
||||
model: Some(model),
|
||||
model_provider,
|
||||
sandbox_policy: Some(sandbox_policy_mode(&sandbox_policy)),
|
||||
sandbox_policy: Some(sandbox_policy_mode(
|
||||
&permission_profile,
|
||||
permission_profile_cwd.as_path(),
|
||||
)),
|
||||
reasoning_effort: reasoning_effort.map(|value| value.to_string()),
|
||||
reasoning_summary: reasoning_summary_mode(reasoning_summary),
|
||||
service_tier: service_tier
|
||||
@@ -954,12 +959,27 @@ fn codex_turn_event_params(
|
||||
}
|
||||
}
|
||||
|
||||
fn sandbox_policy_mode(sandbox_policy: &SandboxPolicy) -> &'static str {
|
||||
match sandbox_policy {
|
||||
SandboxPolicy::DangerFullAccess => "full_access",
|
||||
SandboxPolicy::ReadOnly { .. } => "read_only",
|
||||
SandboxPolicy::WorkspaceWrite { .. } => "workspace_write",
|
||||
SandboxPolicy::ExternalSandbox { .. } => "external_sandbox",
|
||||
fn sandbox_policy_mode(permission_profile: &PermissionProfile, cwd: &Path) -> &'static str {
|
||||
match permission_profile {
|
||||
PermissionProfile::Disabled => "full_access",
|
||||
PermissionProfile::External { .. } => "external_sandbox",
|
||||
PermissionProfile::Managed { .. } => {
|
||||
let file_system_policy = permission_profile.file_system_sandbox_policy();
|
||||
if file_system_policy.has_full_disk_write_access() {
|
||||
if permission_profile.network_sandbox_policy().is_enabled() {
|
||||
"full_access"
|
||||
} else {
|
||||
"external_sandbox"
|
||||
}
|
||||
} else if file_system_policy
|
||||
.get_writable_roots_with_cwd(cwd)
|
||||
.is_empty()
|
||||
{
|
||||
"read_only"
|
||||
} else {
|
||||
"workspace_write"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1050,3 +1070,25 @@ pub(crate) fn normalize_path_for_skill_id(
|
||||
_ => resolved_path.to_string_lossy().replace('\\', "/"),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use codex_protocol::models::SandboxEnforcement;
|
||||
use codex_protocol::permissions::FileSystemSandboxPolicy;
|
||||
use codex_protocol::permissions::NetworkSandboxPolicy;
|
||||
|
||||
#[test]
|
||||
fn managed_full_disk_with_restricted_network_reports_external_sandbox() {
|
||||
let permission_profile = PermissionProfile::from_runtime_permissions_with_enforcement(
|
||||
SandboxEnforcement::Managed,
|
||||
&FileSystemSandboxPolicy::unrestricted(),
|
||||
NetworkSandboxPolicy::Restricted,
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
sandbox_policy_mode(&permission_profile, Path::new("/")),
|
||||
"external_sandbox"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -41,12 +41,12 @@ use codex_app_server_protocol::Result as JsonRpcResult;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_arg0::Arg0DispatchPaths;
|
||||
use codex_config::CloudRequirementsLoader;
|
||||
use codex_config::LoaderOverrides;
|
||||
use codex_config::NoopThreadConfigLoader;
|
||||
use codex_config::RemoteThreadConfigLoader;
|
||||
use codex_config::ThreadConfigLoader;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config_loader::CloudRequirementsLoader;
|
||||
use codex_core::config_loader::LoaderOverrides;
|
||||
pub use codex_exec_server::EnvironmentManager;
|
||||
pub use codex_exec_server::EnvironmentManagerArgs;
|
||||
pub use codex_exec_server::ExecServerRuntimePaths;
|
||||
@@ -1396,6 +1396,55 @@ mod tests {
|
||||
client.shutdown().await.expect("shutdown should complete");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn remote_typed_request_accepts_large_single_frame_response() {
|
||||
let padding = "x".repeat((17 << 20) + 1024);
|
||||
let websocket_url = start_test_remote_server(move |mut websocket| async move {
|
||||
expect_remote_initialize(&mut websocket).await;
|
||||
let JSONRPCMessage::Request(request) = read_websocket_message(&mut websocket).await
|
||||
else {
|
||||
panic!("expected account/read request");
|
||||
};
|
||||
assert_eq!(request.method, "account/read");
|
||||
write_websocket_message(
|
||||
&mut websocket,
|
||||
JSONRPCMessage::Response(JSONRPCResponse {
|
||||
id: request.id,
|
||||
result: serde_json::json!({
|
||||
"account": null,
|
||||
"requiresOpenaiAuth": false,
|
||||
"padding": padding,
|
||||
}),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
websocket.close(None).await.expect("close should succeed");
|
||||
})
|
||||
.await;
|
||||
let client = RemoteAppServerClient::connect(test_remote_connect_args(websocket_url))
|
||||
.await
|
||||
.expect("remote client should connect");
|
||||
|
||||
let response: GetAccountResponse = client
|
||||
.request_typed(ClientRequest::GetAccount {
|
||||
request_id: RequestId::Integer(1),
|
||||
params: codex_app_server_protocol::GetAccountParams {
|
||||
refresh_token: false,
|
||||
},
|
||||
})
|
||||
.await
|
||||
.expect("large typed request should succeed");
|
||||
assert_eq!(
|
||||
response,
|
||||
GetAccountResponse {
|
||||
account: None,
|
||||
requires_openai_auth: false,
|
||||
}
|
||||
);
|
||||
|
||||
client.shutdown().await.expect("shutdown should complete");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn remote_connect_includes_auth_header_when_configured() {
|
||||
let auth_token = "remote-bearer-token".to_string();
|
||||
|
||||
@@ -45,16 +45,18 @@ use tokio::sync::oneshot;
|
||||
use tokio::time::timeout;
|
||||
use tokio_tungstenite::MaybeTlsStream;
|
||||
use tokio_tungstenite::WebSocketStream;
|
||||
use tokio_tungstenite::connect_async;
|
||||
use tokio_tungstenite::connect_async_with_config;
|
||||
use tokio_tungstenite::tungstenite::Message;
|
||||
use tokio_tungstenite::tungstenite::client::IntoClientRequest;
|
||||
use tokio_tungstenite::tungstenite::http::HeaderValue;
|
||||
use tokio_tungstenite::tungstenite::http::header::AUTHORIZATION;
|
||||
use tokio_tungstenite::tungstenite::protocol::WebSocketConfig;
|
||||
use tracing::warn;
|
||||
use url::Url;
|
||||
|
||||
const CONNECT_TIMEOUT: Duration = Duration::from_secs(10);
|
||||
const INITIALIZE_TIMEOUT: Duration = Duration::from_secs(10);
|
||||
const REMOTE_APP_SERVER_MAX_WEBSOCKET_MESSAGE_SIZE: usize = 128 << 20;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct RemoteAppServerConnectArgs {
|
||||
@@ -170,20 +172,32 @@ impl RemoteAppServerClient {
|
||||
request.headers_mut().insert(AUTHORIZATION, header_value);
|
||||
}
|
||||
ensure_rustls_crypto_provider();
|
||||
let stream = timeout(CONNECT_TIMEOUT, connect_async(request))
|
||||
.await
|
||||
.map_err(|_| {
|
||||
IoError::new(
|
||||
ErrorKind::TimedOut,
|
||||
format!("timed out connecting to remote app server at `{websocket_url}`"),
|
||||
)
|
||||
})?
|
||||
.map(|(stream, _response)| stream)
|
||||
.map_err(|err| {
|
||||
IoError::other(format!(
|
||||
"failed to connect to remote app server at `{websocket_url}`: {err}"
|
||||
))
|
||||
})?;
|
||||
// Remote resume responses can legitimately carry large thread histories.
|
||||
// Keep a bounded cap, but raise it above tungstenite's 16 MiB frame default.
|
||||
let websocket_config = WebSocketConfig::default()
|
||||
.max_frame_size(Some(REMOTE_APP_SERVER_MAX_WEBSOCKET_MESSAGE_SIZE))
|
||||
.max_message_size(Some(REMOTE_APP_SERVER_MAX_WEBSOCKET_MESSAGE_SIZE));
|
||||
let stream = timeout(
|
||||
CONNECT_TIMEOUT,
|
||||
connect_async_with_config(
|
||||
request,
|
||||
Some(websocket_config),
|
||||
/*disable_nagle*/ false,
|
||||
),
|
||||
)
|
||||
.await
|
||||
.map_err(|_| {
|
||||
IoError::new(
|
||||
ErrorKind::TimedOut,
|
||||
format!("timed out connecting to remote app server at `{websocket_url}`"),
|
||||
)
|
||||
})?
|
||||
.map(|(stream, _response)| stream)
|
||||
.map_err(|err| {
|
||||
IoError::other(format!(
|
||||
"failed to connect to remote app server at `{websocket_url}`: {err}"
|
||||
))
|
||||
})?;
|
||||
let mut stream = stream;
|
||||
let pending_events = initialize_remote_connection(
|
||||
&mut stream,
|
||||
@@ -198,6 +212,7 @@ impl RemoteAppServerClient {
|
||||
let worker_handle = tokio::spawn(async move {
|
||||
let mut pending_requests =
|
||||
HashMap::<RequestId, oneshot::Sender<IoResult<RequestResult>>>::new();
|
||||
let mut worker_exit_error: Option<(ErrorKind, String)> = None;
|
||||
loop {
|
||||
tokio::select! {
|
||||
command = command_rx.recv() => {
|
||||
@@ -224,17 +239,19 @@ impl RemoteAppServerClient {
|
||||
.await
|
||||
{
|
||||
let err_message = err.to_string();
|
||||
let message = format!(
|
||||
"remote app server at `{websocket_url}` write failed: {err_message}"
|
||||
);
|
||||
if let Some(response_tx) = pending_requests.remove(&request_id) {
|
||||
let _ = response_tx.send(Err(err));
|
||||
}
|
||||
let _ = deliver_event(
|
||||
&event_tx,
|
||||
AppServerEvent::Disconnected {
|
||||
message: format!(
|
||||
"remote app server at `{websocket_url}` write failed: {err_message}"
|
||||
),
|
||||
message: message.clone(),
|
||||
},
|
||||
);
|
||||
worker_exit_error = Some((ErrorKind::BrokenPipe, message));
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -351,28 +368,34 @@ impl RemoteAppServerClient {
|
||||
.await
|
||||
{
|
||||
let err_message = reject_err.to_string();
|
||||
let message = format!(
|
||||
"remote app server at `{websocket_url}` write failed: {err_message}"
|
||||
);
|
||||
let _ = deliver_event(
|
||||
&event_tx,
|
||||
AppServerEvent::Disconnected {
|
||||
message: format!(
|
||||
"remote app server at `{websocket_url}` write failed: {err_message}"
|
||||
),
|
||||
message: message.clone(),
|
||||
},
|
||||
);
|
||||
worker_exit_error =
|
||||
Some((ErrorKind::BrokenPipe, message));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
let message = format!(
|
||||
"remote app server at `{websocket_url}` sent invalid JSON-RPC: {err}"
|
||||
);
|
||||
let _ = deliver_event(
|
||||
&event_tx,
|
||||
AppServerEvent::Disconnected {
|
||||
message: format!(
|
||||
"remote app server at `{websocket_url}` sent invalid JSON-RPC: {err}"
|
||||
),
|
||||
message: message.clone(),
|
||||
},
|
||||
);
|
||||
worker_exit_error =
|
||||
Some((ErrorKind::InvalidData, message));
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -383,14 +406,19 @@ impl RemoteAppServerClient {
|
||||
.map(|frame| frame.reason.to_string())
|
||||
.filter(|reason| !reason.is_empty())
|
||||
.unwrap_or_else(|| "connection closed".to_string());
|
||||
let message = format!(
|
||||
"remote app server at `{websocket_url}` disconnected: {reason}"
|
||||
);
|
||||
let _ = deliver_event(
|
||||
&event_tx,
|
||||
AppServerEvent::Disconnected {
|
||||
message: format!(
|
||||
"remote app server at `{websocket_url}` disconnected: {reason}"
|
||||
),
|
||||
message: message.clone(),
|
||||
},
|
||||
);
|
||||
worker_exit_error = Some((
|
||||
ErrorKind::ConnectionAborted,
|
||||
message,
|
||||
));
|
||||
break;
|
||||
}
|
||||
Some(Ok(Message::Binary(_)))
|
||||
@@ -398,25 +426,29 @@ impl RemoteAppServerClient {
|
||||
| Some(Ok(Message::Pong(_)))
|
||||
| Some(Ok(Message::Frame(_))) => {}
|
||||
Some(Err(err)) => {
|
||||
let message = format!(
|
||||
"remote app server at `{websocket_url}` transport failed: {err}"
|
||||
);
|
||||
let _ = deliver_event(
|
||||
&event_tx,
|
||||
AppServerEvent::Disconnected {
|
||||
message: format!(
|
||||
"remote app server at `{websocket_url}` transport failed: {err}"
|
||||
),
|
||||
message: message.clone(),
|
||||
},
|
||||
);
|
||||
worker_exit_error = Some((ErrorKind::InvalidData, message));
|
||||
break;
|
||||
}
|
||||
None => {
|
||||
let message = format!(
|
||||
"remote app server at `{websocket_url}` closed the connection"
|
||||
);
|
||||
let _ = deliver_event(
|
||||
&event_tx,
|
||||
AppServerEvent::Disconnected {
|
||||
message: format!(
|
||||
"remote app server at `{websocket_url}` closed the connection"
|
||||
),
|
||||
message: message.clone(),
|
||||
},
|
||||
);
|
||||
worker_exit_error = Some((ErrorKind::UnexpectedEof, message));
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -424,12 +456,14 @@ impl RemoteAppServerClient {
|
||||
}
|
||||
}
|
||||
|
||||
let err = IoError::new(
|
||||
ErrorKind::BrokenPipe,
|
||||
"remote app-server worker channel is closed",
|
||||
);
|
||||
let (err_kind, err_message) = worker_exit_error.unwrap_or_else(|| {
|
||||
(
|
||||
ErrorKind::BrokenPipe,
|
||||
"remote app-server worker channel is closed".to_string(),
|
||||
)
|
||||
});
|
||||
for (_, response_tx) in pending_requests {
|
||||
let _ = response_tx.send(Err(IoError::new(err.kind(), err.to_string())));
|
||||
let _ = response_tx.send(Err(IoError::new(err_kind, err_message.clone())));
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -218,17 +218,6 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Optional full permissions profile for this command.\n\nDefaults to the user's configured permissions when omitted. Cannot be combined with `sandboxPolicy`."
|
||||
},
|
||||
"processId": {
|
||||
"description": "Optional client-supplied, connection-scoped process id.\n\nRequired for `tty`, `streamStdin`, `streamStdoutStderr`, and follow-up `command/exec/write`, `command/exec/resize`, and `command/exec/terminate` calls. When omitted, buffered execution gets an internal id that is not exposed to the client.",
|
||||
"type": [
|
||||
@@ -860,7 +849,8 @@
|
||||
"CONFIG",
|
||||
"SKILLS",
|
||||
"PLUGINS",
|
||||
"MCP_SERVER_CONFIG"
|
||||
"MCP_SERVER_CONFIG",
|
||||
"SESSIONS"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
@@ -1028,21 +1018,6 @@
|
||||
"title": "MinimalFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
"enum": [
|
||||
"current_working_directory"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"kind"
|
||||
],
|
||||
"title": "CurrentWorkingDirectoryFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
@@ -1415,38 +1390,6 @@
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"GhostCommit": {
|
||||
"description": "Details of a ghost commit created from a repository state.",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"parent": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"preexisting_untracked_dirs": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"preexisting_untracked_files": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"preexisting_untracked_dirs",
|
||||
"preexisting_untracked_files"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ImageDetail": {
|
||||
"enum": [
|
||||
"auto",
|
||||
@@ -1837,15 +1780,20 @@
|
||||
"MigrationDetails": {
|
||||
"properties": {
|
||||
"plugins": {
|
||||
"default": [],
|
||||
"items": {
|
||||
"$ref": "#/definitions/PluginsMigration"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"sessions": {
|
||||
"default": [],
|
||||
"items": {
|
||||
"$ref": "#/definitions/SessionMigration"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"plugins"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ModeKind": {
|
||||
@@ -2126,53 +2074,6 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ReadOnlyAccess": {
|
||||
"oneOf": [
|
||||
{
|
||||
"properties": {
|
||||
"includePlatformDefaults": {
|
||||
"default": true,
|
||||
"type": "boolean"
|
||||
},
|
||||
"readableRoots": {
|
||||
"default": [],
|
||||
"items": {
|
||||
"$ref": "#/definitions/AbsolutePathBuf"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"restricted"
|
||||
],
|
||||
"title": "RestrictedReadOnlyAccessType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"title": "RestrictedReadOnlyAccess",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [
|
||||
"fullAccess"
|
||||
],
|
||||
"title": "FullAccessReadOnlyAccessType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"title": "FullAccessReadOnlyAccess",
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
},
|
||||
"RealtimeOutputModality": {
|
||||
"enum": [
|
||||
"text",
|
||||
@@ -2339,12 +2240,6 @@
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"end_turn": {
|
||||
"type": [
|
||||
"boolean",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"id": {
|
||||
"type": [
|
||||
"string",
|
||||
@@ -2744,26 +2639,6 @@
|
||||
"title": "ImageGenerationCallResponseItem",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"ghost_commit": {
|
||||
"$ref": "#/definitions/GhostCommit"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"ghost_snapshot"
|
||||
],
|
||||
"title": "GhostSnapshotResponseItemType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"ghost_commit",
|
||||
"type"
|
||||
],
|
||||
"title": "GhostSnapshotResponseItem",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"encrypted_content": {
|
||||
@@ -3056,16 +2931,6 @@
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"access": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ReadOnlyAccess"
|
||||
}
|
||||
],
|
||||
"default": {
|
||||
"type": "fullAccess"
|
||||
}
|
||||
},
|
||||
"networkAccess": {
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
@@ -3122,16 +2987,6 @@
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
},
|
||||
"readOnlyAccess": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ReadOnlyAccess"
|
||||
}
|
||||
],
|
||||
"default": {
|
||||
"type": "fullAccess"
|
||||
}
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"workspaceWrite"
|
||||
@@ -3173,6 +3028,27 @@
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"SessionMigration": {
|
||||
"properties": {
|
||||
"cwd": {
|
||||
"type": "string"
|
||||
},
|
||||
"path": {
|
||||
"type": "string"
|
||||
},
|
||||
"title": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"cwd",
|
||||
"path"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"Settings": {
|
||||
"description": "Settings for a collaboration mode.",
|
||||
"properties": {
|
||||
@@ -3411,17 +3287,6 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Full permissions override for the forked thread. Cannot be combined with `sandbox`."
|
||||
},
|
||||
"sandbox": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -3458,6 +3323,15 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ThreadGoalStatus": {
|
||||
"enum": [
|
||||
"active",
|
||||
"paused",
|
||||
"budgetLimited",
|
||||
"complete"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ThreadInjectItemsParams": {
|
||||
"properties": {
|
||||
"items": {
|
||||
@@ -3818,17 +3692,6 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Full permissions override for the resumed thread. Cannot be combined with `sandbox`."
|
||||
},
|
||||
"personality": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -4012,17 +3875,6 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Full permissions override for this thread. Cannot be combined with `sandbox`."
|
||||
},
|
||||
"personality": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -4234,17 +4086,6 @@
|
||||
"outputSchema": {
|
||||
"description": "Optional JSON Schema used to constrain the final assistant message for this turn."
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Override the full permissions profile for this turn and subsequent turns. Cannot be combined with `sandboxPolicy`."
|
||||
},
|
||||
"personality": {
|
||||
"anyOf": [
|
||||
{
|
||||
|
||||
@@ -392,21 +392,6 @@
|
||||
"title": "MinimalFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
"enum": [
|
||||
"current_working_directory"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"kind"
|
||||
],
|
||||
"title": "CurrentWorkingDirectoryFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
|
||||
@@ -177,21 +177,6 @@
|
||||
"title": "MinimalFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
"enum": [
|
||||
"current_working_directory"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"kind"
|
||||
],
|
||||
"title": "CurrentWorkingDirectoryFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
|
||||
@@ -177,21 +177,6 @@
|
||||
"title": "MinimalFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
"enum": [
|
||||
"current_working_directory"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"kind"
|
||||
],
|
||||
"title": "CurrentWorkingDirectoryFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
|
||||
@@ -1199,21 +1199,6 @@
|
||||
"title": "MinimalFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
"enum": [
|
||||
"current_working_directory"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"kind"
|
||||
],
|
||||
"title": "CurrentWorkingDirectoryFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
@@ -3028,6 +3013,93 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ThreadGoal": {
|
||||
"properties": {
|
||||
"createdAt": {
|
||||
"format": "int64",
|
||||
"type": "integer"
|
||||
},
|
||||
"objective": {
|
||||
"type": "string"
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/ThreadGoalStatus"
|
||||
},
|
||||
"threadId": {
|
||||
"type": "string"
|
||||
},
|
||||
"timeUsedSeconds": {
|
||||
"format": "int64",
|
||||
"type": "integer"
|
||||
},
|
||||
"tokenBudget": {
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"tokensUsed": {
|
||||
"format": "int64",
|
||||
"type": "integer"
|
||||
},
|
||||
"updatedAt": {
|
||||
"format": "int64",
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"createdAt",
|
||||
"objective",
|
||||
"status",
|
||||
"threadId",
|
||||
"timeUsedSeconds",
|
||||
"tokensUsed",
|
||||
"updatedAt"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ThreadGoalClearedNotification": {
|
||||
"properties": {
|
||||
"threadId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"threadId"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ThreadGoalStatus": {
|
||||
"enum": [
|
||||
"active",
|
||||
"paused",
|
||||
"budgetLimited",
|
||||
"complete"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ThreadGoalUpdatedNotification": {
|
||||
"properties": {
|
||||
"goal": {
|
||||
"$ref": "#/definitions/ThreadGoal"
|
||||
},
|
||||
"threadId": {
|
||||
"type": "string"
|
||||
},
|
||||
"turnId": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"goal",
|
||||
"threadId"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ThreadId": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -4727,6 +4799,46 @@
|
||||
"title": "Thread/name/updatedNotification",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"method": {
|
||||
"enum": [
|
||||
"thread/goal/updated"
|
||||
],
|
||||
"title": "Thread/goal/updatedNotificationMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/ThreadGoalUpdatedNotification"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "Thread/goal/updatedNotification",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"method": {
|
||||
"enum": [
|
||||
"thread/goal/cleared"
|
||||
],
|
||||
"title": "Thread/goal/clearedNotificationMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/ThreadGoalClearedNotification"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "Thread/goal/clearedNotification",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"method": {
|
||||
|
||||
@@ -731,21 +731,6 @@
|
||||
"title": "MinimalFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
"enum": [
|
||||
"current_working_directory"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"kind"
|
||||
],
|
||||
"title": "CurrentWorkingDirectoryFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
|
||||
@@ -3806,6 +3806,46 @@
|
||||
"title": "Thread/name/updatedNotification",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"method": {
|
||||
"enum": [
|
||||
"thread/goal/updated"
|
||||
],
|
||||
"title": "Thread/goal/updatedNotificationMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/v2/ThreadGoalUpdatedNotification"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "Thread/goal/updatedNotification",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"method": {
|
||||
"enum": [
|
||||
"thread/goal/cleared"
|
||||
],
|
||||
"title": "Thread/goal/clearedNotificationMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/v2/ThreadGoalClearedNotification"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "Thread/goal/clearedNotification",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"method": {
|
||||
@@ -6496,17 +6536,6 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/v2/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Optional full permissions profile for this command.\n\nDefaults to the user's configured permissions when omitted. Cannot be combined with `sandboxPolicy`."
|
||||
},
|
||||
"processId": {
|
||||
"description": "Optional client-supplied, connection-scoped process id.\n\nRequired for `tty`, `streamStdin`, `streamStdoutStderr`, and follow-up `command/exec/write`, `command/exec/resize`, and `command/exec/terminate` calls. When omitted, buffered execution gets an internal id that is not exposed to the client.",
|
||||
"type": [
|
||||
@@ -8297,7 +8326,8 @@
|
||||
"CONFIG",
|
||||
"SKILLS",
|
||||
"PLUGINS",
|
||||
"MCP_SERVER_CONFIG"
|
||||
"MCP_SERVER_CONFIG",
|
||||
"SESSIONS"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
@@ -8533,21 +8563,6 @@
|
||||
"title": "MinimalFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
"enum": [
|
||||
"current_working_directory"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"kind"
|
||||
],
|
||||
"title": "CurrentWorkingDirectoryFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
@@ -9158,38 +9173,6 @@
|
||||
"title": "GetAccountResponse",
|
||||
"type": "object"
|
||||
},
|
||||
"GhostCommit": {
|
||||
"description": "Details of a ghost commit created from a repository state.",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"parent": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"preexisting_untracked_dirs": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"preexisting_untracked_files": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"preexisting_untracked_dirs",
|
||||
"preexisting_untracked_files"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"GitInfo": {
|
||||
"properties": {
|
||||
"branch": {
|
||||
@@ -10790,15 +10773,20 @@
|
||||
"MigrationDetails": {
|
||||
"properties": {
|
||||
"plugins": {
|
||||
"default": [],
|
||||
"items": {
|
||||
"$ref": "#/definitions/v2/PluginsMigration"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"sessions": {
|
||||
"default": [],
|
||||
"items": {
|
||||
"$ref": "#/definitions/v2/SessionMigration"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"plugins"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ModeKind": {
|
||||
@@ -12228,53 +12216,6 @@
|
||||
"title": "RawResponseItemCompletedNotification",
|
||||
"type": "object"
|
||||
},
|
||||
"ReadOnlyAccess": {
|
||||
"oneOf": [
|
||||
{
|
||||
"properties": {
|
||||
"includePlatformDefaults": {
|
||||
"default": true,
|
||||
"type": "boolean"
|
||||
},
|
||||
"readableRoots": {
|
||||
"default": [],
|
||||
"items": {
|
||||
"$ref": "#/definitions/v2/AbsolutePathBuf"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"restricted"
|
||||
],
|
||||
"title": "RestrictedReadOnlyAccessType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"title": "RestrictedReadOnlyAccess",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [
|
||||
"fullAccess"
|
||||
],
|
||||
"title": "FullAccessReadOnlyAccessType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"title": "FullAccessReadOnlyAccess",
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
},
|
||||
"RealtimeConversationVersion": {
|
||||
"enum": [
|
||||
"v1",
|
||||
@@ -12746,12 +12687,6 @@
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"end_turn": {
|
||||
"type": [
|
||||
"boolean",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"id": {
|
||||
"type": [
|
||||
"string",
|
||||
@@ -13151,26 +13086,6 @@
|
||||
"title": "ImageGenerationCallResponseItem",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"ghost_commit": {
|
||||
"$ref": "#/definitions/v2/GhostCommit"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"ghost_snapshot"
|
||||
],
|
||||
"title": "GhostSnapshotResponseItemType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"ghost_commit",
|
||||
"type"
|
||||
],
|
||||
"title": "GhostSnapshotResponseItem",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"encrypted_content": {
|
||||
@@ -13483,16 +13398,6 @@
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"access": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/v2/ReadOnlyAccess"
|
||||
}
|
||||
],
|
||||
"default": {
|
||||
"type": "fullAccess"
|
||||
}
|
||||
},
|
||||
"networkAccess": {
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
@@ -13549,16 +13454,6 @@
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
},
|
||||
"readOnlyAccess": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/v2/ReadOnlyAccess"
|
||||
}
|
||||
],
|
||||
"default": {
|
||||
"type": "fullAccess"
|
||||
}
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"workspaceWrite"
|
||||
@@ -13656,6 +13551,27 @@
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"SessionMigration": {
|
||||
"properties": {
|
||||
"cwd": {
|
||||
"type": "string"
|
||||
},
|
||||
"path": {
|
||||
"type": "string"
|
||||
},
|
||||
"title": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"cwd",
|
||||
"path"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"SessionSource": {
|
||||
"oneOf": [
|
||||
{
|
||||
@@ -14543,17 +14459,6 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/v2/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Full permissions override for the forked thread. Cannot be combined with `sandbox`."
|
||||
},
|
||||
"sandbox": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -14622,18 +14527,6 @@
|
||||
"modelProvider": {
|
||||
"type": "string"
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/v2/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"description": "Canonical active permissions view for this thread."
|
||||
},
|
||||
"reasoningEffort": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -14678,6 +14571,97 @@
|
||||
"title": "ThreadForkResponse",
|
||||
"type": "object"
|
||||
},
|
||||
"ThreadGoal": {
|
||||
"properties": {
|
||||
"createdAt": {
|
||||
"format": "int64",
|
||||
"type": "integer"
|
||||
},
|
||||
"objective": {
|
||||
"type": "string"
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/v2/ThreadGoalStatus"
|
||||
},
|
||||
"threadId": {
|
||||
"type": "string"
|
||||
},
|
||||
"timeUsedSeconds": {
|
||||
"format": "int64",
|
||||
"type": "integer"
|
||||
},
|
||||
"tokenBudget": {
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"tokensUsed": {
|
||||
"format": "int64",
|
||||
"type": "integer"
|
||||
},
|
||||
"updatedAt": {
|
||||
"format": "int64",
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"createdAt",
|
||||
"objective",
|
||||
"status",
|
||||
"threadId",
|
||||
"timeUsedSeconds",
|
||||
"tokensUsed",
|
||||
"updatedAt"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ThreadGoalClearedNotification": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"threadId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"threadId"
|
||||
],
|
||||
"title": "ThreadGoalClearedNotification",
|
||||
"type": "object"
|
||||
},
|
||||
"ThreadGoalStatus": {
|
||||
"enum": [
|
||||
"active",
|
||||
"paused",
|
||||
"budgetLimited",
|
||||
"complete"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ThreadGoalUpdatedNotification": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"goal": {
|
||||
"$ref": "#/definitions/v2/ThreadGoal"
|
||||
},
|
||||
"threadId": {
|
||||
"type": "string"
|
||||
},
|
||||
"turnId": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"goal",
|
||||
"threadId"
|
||||
],
|
||||
"title": "ThreadGoalUpdatedNotification",
|
||||
"type": "object"
|
||||
},
|
||||
"ThreadId": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -15976,17 +15960,6 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/v2/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Full permissions override for the resumed thread. Cannot be combined with `sandbox`."
|
||||
},
|
||||
"personality": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -16065,18 +16038,6 @@
|
||||
"modelProvider": {
|
||||
"type": "string"
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/v2/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"description": "Canonical active permissions view for this thread."
|
||||
},
|
||||
"reasoningEffort": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -16293,17 +16254,6 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/v2/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Full permissions override for this thread. Cannot be combined with `sandbox`."
|
||||
},
|
||||
"personality": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -16392,18 +16342,6 @@
|
||||
"modelProvider": {
|
||||
"type": "string"
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/v2/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"description": "Canonical active permissions view for this thread."
|
||||
},
|
||||
"reasoningEffort": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -17109,17 +17047,6 @@
|
||||
"outputSchema": {
|
||||
"description": "Optional JSON Schema used to constrain the final assistant message for this turn."
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/v2/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Override the full permissions profile for this turn and subsequent turns. Cannot be combined with `sandboxPolicy`."
|
||||
},
|
||||
"personality": {
|
||||
"anyOf": [
|
||||
{
|
||||
|
||||
@@ -3055,17 +3055,6 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Optional full permissions profile for this command.\n\nDefaults to the user's configured permissions when omitted. Cannot be combined with `sandboxPolicy`."
|
||||
},
|
||||
"processId": {
|
||||
"description": "Optional client-supplied, connection-scoped process id.\n\nRequired for `tty`, `streamStdin`, `streamStdoutStderr`, and follow-up `command/exec/write`, `command/exec/resize`, and `command/exec/terminate` calls. When omitted, buffered execution gets an internal id that is not exposed to the client.",
|
||||
"type": [
|
||||
@@ -4856,7 +4845,8 @@
|
||||
"CONFIG",
|
||||
"SKILLS",
|
||||
"PLUGINS",
|
||||
"MCP_SERVER_CONFIG"
|
||||
"MCP_SERVER_CONFIG",
|
||||
"SESSIONS"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
@@ -5092,21 +5082,6 @@
|
||||
"title": "MinimalFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
"enum": [
|
||||
"current_working_directory"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"kind"
|
||||
],
|
||||
"title": "CurrentWorkingDirectoryFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
@@ -5828,38 +5803,6 @@
|
||||
"title": "GetAccountResponse",
|
||||
"type": "object"
|
||||
},
|
||||
"GhostCommit": {
|
||||
"description": "Details of a ghost commit created from a repository state.",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"parent": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"preexisting_untracked_dirs": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"preexisting_untracked_files": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"preexisting_untracked_dirs",
|
||||
"preexisting_untracked_files"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"GitInfo": {
|
||||
"properties": {
|
||||
"branch": {
|
||||
@@ -7504,15 +7447,20 @@
|
||||
"MigrationDetails": {
|
||||
"properties": {
|
||||
"plugins": {
|
||||
"default": [],
|
||||
"items": {
|
||||
"$ref": "#/definitions/PluginsMigration"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"sessions": {
|
||||
"default": [],
|
||||
"items": {
|
||||
"$ref": "#/definitions/SessionMigration"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"plugins"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ModeKind": {
|
||||
@@ -8942,53 +8890,6 @@
|
||||
"title": "RawResponseItemCompletedNotification",
|
||||
"type": "object"
|
||||
},
|
||||
"ReadOnlyAccess": {
|
||||
"oneOf": [
|
||||
{
|
||||
"properties": {
|
||||
"includePlatformDefaults": {
|
||||
"default": true,
|
||||
"type": "boolean"
|
||||
},
|
||||
"readableRoots": {
|
||||
"default": [],
|
||||
"items": {
|
||||
"$ref": "#/definitions/AbsolutePathBuf"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"restricted"
|
||||
],
|
||||
"title": "RestrictedReadOnlyAccessType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"title": "RestrictedReadOnlyAccess",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [
|
||||
"fullAccess"
|
||||
],
|
||||
"title": "FullAccessReadOnlyAccessType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"title": "FullAccessReadOnlyAccess",
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
},
|
||||
"RealtimeConversationVersion": {
|
||||
"enum": [
|
||||
"v1",
|
||||
@@ -9460,12 +9361,6 @@
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"end_turn": {
|
||||
"type": [
|
||||
"boolean",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"id": {
|
||||
"type": [
|
||||
"string",
|
||||
@@ -9865,26 +9760,6 @@
|
||||
"title": "ImageGenerationCallResponseItem",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"ghost_commit": {
|
||||
"$ref": "#/definitions/GhostCommit"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"ghost_snapshot"
|
||||
],
|
||||
"title": "GhostSnapshotResponseItemType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"ghost_commit",
|
||||
"type"
|
||||
],
|
||||
"title": "GhostSnapshotResponseItem",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"encrypted_content": {
|
||||
@@ -10197,16 +10072,6 @@
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"access": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ReadOnlyAccess"
|
||||
}
|
||||
],
|
||||
"default": {
|
||||
"type": "fullAccess"
|
||||
}
|
||||
},
|
||||
"networkAccess": {
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
@@ -10263,16 +10128,6 @@
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
},
|
||||
"readOnlyAccess": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ReadOnlyAccess"
|
||||
}
|
||||
],
|
||||
"default": {
|
||||
"type": "fullAccess"
|
||||
}
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"workspaceWrite"
|
||||
@@ -10511,6 +10366,46 @@
|
||||
"title": "Thread/name/updatedNotification",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"method": {
|
||||
"enum": [
|
||||
"thread/goal/updated"
|
||||
],
|
||||
"title": "Thread/goal/updatedNotificationMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/ThreadGoalUpdatedNotification"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "Thread/goal/updatedNotification",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"method": {
|
||||
"enum": [
|
||||
"thread/goal/cleared"
|
||||
],
|
||||
"title": "Thread/goal/clearedNotificationMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/ThreadGoalClearedNotification"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "Thread/goal/clearedNotification",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"method": {
|
||||
@@ -11542,6 +11437,27 @@
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"SessionMigration": {
|
||||
"properties": {
|
||||
"cwd": {
|
||||
"type": "string"
|
||||
},
|
||||
"path": {
|
||||
"type": "string"
|
||||
},
|
||||
"title": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"cwd",
|
||||
"path"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"SessionSource": {
|
||||
"oneOf": [
|
||||
{
|
||||
@@ -12429,17 +12345,6 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Full permissions override for the forked thread. Cannot be combined with `sandbox`."
|
||||
},
|
||||
"sandbox": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -12508,18 +12413,6 @@
|
||||
"modelProvider": {
|
||||
"type": "string"
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"description": "Canonical active permissions view for this thread."
|
||||
},
|
||||
"reasoningEffort": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -12564,6 +12457,97 @@
|
||||
"title": "ThreadForkResponse",
|
||||
"type": "object"
|
||||
},
|
||||
"ThreadGoal": {
|
||||
"properties": {
|
||||
"createdAt": {
|
||||
"format": "int64",
|
||||
"type": "integer"
|
||||
},
|
||||
"objective": {
|
||||
"type": "string"
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/ThreadGoalStatus"
|
||||
},
|
||||
"threadId": {
|
||||
"type": "string"
|
||||
},
|
||||
"timeUsedSeconds": {
|
||||
"format": "int64",
|
||||
"type": "integer"
|
||||
},
|
||||
"tokenBudget": {
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"tokensUsed": {
|
||||
"format": "int64",
|
||||
"type": "integer"
|
||||
},
|
||||
"updatedAt": {
|
||||
"format": "int64",
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"createdAt",
|
||||
"objective",
|
||||
"status",
|
||||
"threadId",
|
||||
"timeUsedSeconds",
|
||||
"tokensUsed",
|
||||
"updatedAt"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ThreadGoalClearedNotification": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"threadId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"threadId"
|
||||
],
|
||||
"title": "ThreadGoalClearedNotification",
|
||||
"type": "object"
|
||||
},
|
||||
"ThreadGoalStatus": {
|
||||
"enum": [
|
||||
"active",
|
||||
"paused",
|
||||
"budgetLimited",
|
||||
"complete"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ThreadGoalUpdatedNotification": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"goal": {
|
||||
"$ref": "#/definitions/ThreadGoal"
|
||||
},
|
||||
"threadId": {
|
||||
"type": "string"
|
||||
},
|
||||
"turnId": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"goal",
|
||||
"threadId"
|
||||
],
|
||||
"title": "ThreadGoalUpdatedNotification",
|
||||
"type": "object"
|
||||
},
|
||||
"ThreadId": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -13862,17 +13846,6 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Full permissions override for the resumed thread. Cannot be combined with `sandbox`."
|
||||
},
|
||||
"personality": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -13951,18 +13924,6 @@
|
||||
"modelProvider": {
|
||||
"type": "string"
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"description": "Canonical active permissions view for this thread."
|
||||
},
|
||||
"reasoningEffort": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -14179,17 +14140,6 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Full permissions override for this thread. Cannot be combined with `sandbox`."
|
||||
},
|
||||
"personality": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -14278,18 +14228,6 @@
|
||||
"modelProvider": {
|
||||
"type": "string"
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"description": "Canonical active permissions view for this thread."
|
||||
},
|
||||
"reasoningEffort": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -14995,17 +14933,6 @@
|
||||
"outputSchema": {
|
||||
"description": "Optional JSON Schema used to constrain the final assistant message for this turn."
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Override the full permissions profile for this turn and subsequent turns. Cannot be combined with `sandboxPolicy`."
|
||||
},
|
||||
"personality": {
|
||||
"anyOf": [
|
||||
{
|
||||
|
||||
@@ -146,21 +146,6 @@
|
||||
"title": "MinimalFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
"enum": [
|
||||
"current_working_directory"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"kind"
|
||||
],
|
||||
"title": "CurrentWorkingDirectoryFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
@@ -374,53 +359,6 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ReadOnlyAccess": {
|
||||
"oneOf": [
|
||||
{
|
||||
"properties": {
|
||||
"includePlatformDefaults": {
|
||||
"default": true,
|
||||
"type": "boolean"
|
||||
},
|
||||
"readableRoots": {
|
||||
"default": [],
|
||||
"items": {
|
||||
"$ref": "#/definitions/AbsolutePathBuf"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"restricted"
|
||||
],
|
||||
"title": "RestrictedReadOnlyAccessType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"title": "RestrictedReadOnlyAccess",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [
|
||||
"fullAccess"
|
||||
],
|
||||
"title": "FullAccessReadOnlyAccessType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"title": "FullAccessReadOnlyAccess",
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
},
|
||||
"SandboxPolicy": {
|
||||
"oneOf": [
|
||||
{
|
||||
@@ -441,16 +379,6 @@
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"access": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ReadOnlyAccess"
|
||||
}
|
||||
],
|
||||
"default": {
|
||||
"type": "fullAccess"
|
||||
}
|
||||
},
|
||||
"networkAccess": {
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
@@ -507,16 +435,6 @@
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
},
|
||||
"readOnlyAccess": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ReadOnlyAccess"
|
||||
}
|
||||
],
|
||||
"default": {
|
||||
"type": "fullAccess"
|
||||
}
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"workspaceWrite"
|
||||
@@ -587,17 +505,6 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Optional full permissions profile for this command.\n\nDefaults to the user's configured permissions when omitted. Cannot be combined with `sandboxPolicy`."
|
||||
},
|
||||
"processId": {
|
||||
"description": "Optional client-supplied, connection-scoped process id.\n\nRequired for `tty`, `streamStdin`, `streamStdoutStderr`, and follow-up `command/exec/write`, `command/exec/resize`, and `command/exec/terminate` calls. When omitted, buffered execution gets an internal id that is not exposed to the client.",
|
||||
"type": [
|
||||
|
||||
@@ -39,22 +39,28 @@
|
||||
"CONFIG",
|
||||
"SKILLS",
|
||||
"PLUGINS",
|
||||
"MCP_SERVER_CONFIG"
|
||||
"MCP_SERVER_CONFIG",
|
||||
"SESSIONS"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"MigrationDetails": {
|
||||
"properties": {
|
||||
"plugins": {
|
||||
"default": [],
|
||||
"items": {
|
||||
"$ref": "#/definitions/PluginsMigration"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"sessions": {
|
||||
"default": [],
|
||||
"items": {
|
||||
"$ref": "#/definitions/SessionMigration"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"plugins"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginsMigration": {
|
||||
@@ -74,6 +80,27 @@
|
||||
"pluginNames"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"SessionMigration": {
|
||||
"properties": {
|
||||
"cwd": {
|
||||
"type": "string"
|
||||
},
|
||||
"path": {
|
||||
"type": "string"
|
||||
},
|
||||
"title": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"cwd",
|
||||
"path"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
|
||||
@@ -39,22 +39,28 @@
|
||||
"CONFIG",
|
||||
"SKILLS",
|
||||
"PLUGINS",
|
||||
"MCP_SERVER_CONFIG"
|
||||
"MCP_SERVER_CONFIG",
|
||||
"SESSIONS"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"MigrationDetails": {
|
||||
"properties": {
|
||||
"plugins": {
|
||||
"default": [],
|
||||
"items": {
|
||||
"$ref": "#/definitions/PluginsMigration"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"sessions": {
|
||||
"default": [],
|
||||
"items": {
|
||||
"$ref": "#/definitions/SessionMigration"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"plugins"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PluginsMigration": {
|
||||
@@ -74,6 +80,27 @@
|
||||
"pluginNames"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"SessionMigration": {
|
||||
"properties": {
|
||||
"cwd": {
|
||||
"type": "string"
|
||||
},
|
||||
"path": {
|
||||
"type": "string"
|
||||
},
|
||||
"title": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"cwd",
|
||||
"path"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
|
||||
@@ -184,21 +184,6 @@
|
||||
"title": "MinimalFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
"enum": [
|
||||
"current_working_directory"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"kind"
|
||||
],
|
||||
"title": "CurrentWorkingDirectoryFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
|
||||
@@ -177,21 +177,6 @@
|
||||
"title": "MinimalFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
"enum": [
|
||||
"current_working_directory"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"kind"
|
||||
],
|
||||
"title": "CurrentWorkingDirectoryFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
|
||||
@@ -143,38 +143,6 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"GhostCommit": {
|
||||
"description": "Details of a ghost commit created from a repository state.",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"parent": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"preexisting_untracked_dirs": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"preexisting_untracked_files": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"preexisting_untracked_dirs",
|
||||
"preexisting_untracked_files"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ImageDetail": {
|
||||
"enum": [
|
||||
"auto",
|
||||
@@ -345,12 +313,6 @@
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"end_turn": {
|
||||
"type": [
|
||||
"boolean",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"id": {
|
||||
"type": [
|
||||
"string",
|
||||
@@ -750,26 +712,6 @@
|
||||
"title": "ImageGenerationCallResponseItem",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"ghost_commit": {
|
||||
"$ref": "#/definitions/GhostCommit"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"ghost_snapshot"
|
||||
],
|
||||
"title": "GhostSnapshotResponseItemType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"ghost_commit",
|
||||
"type"
|
||||
],
|
||||
"title": "GhostSnapshotResponseItem",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"encrypted_content": {
|
||||
|
||||
@@ -183,21 +183,6 @@
|
||||
"title": "MinimalFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
"enum": [
|
||||
"current_working_directory"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"kind"
|
||||
],
|
||||
"title": "CurrentWorkingDirectoryFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
@@ -488,17 +473,6 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Full permissions override for the forked thread. Cannot be combined with `sandbox`."
|
||||
},
|
||||
"sandbox": {
|
||||
"anyOf": [
|
||||
{
|
||||
|
||||
@@ -569,21 +569,6 @@
|
||||
"title": "MinimalFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
"enum": [
|
||||
"current_working_directory"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"kind"
|
||||
],
|
||||
"title": "CurrentWorkingDirectoryFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
@@ -1028,53 +1013,6 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ReadOnlyAccess": {
|
||||
"oneOf": [
|
||||
{
|
||||
"properties": {
|
||||
"includePlatformDefaults": {
|
||||
"default": true,
|
||||
"type": "boolean"
|
||||
},
|
||||
"readableRoots": {
|
||||
"default": [],
|
||||
"items": {
|
||||
"$ref": "#/definitions/AbsolutePathBuf"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"restricted"
|
||||
],
|
||||
"title": "RestrictedReadOnlyAccessType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"title": "RestrictedReadOnlyAccess",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [
|
||||
"fullAccess"
|
||||
],
|
||||
"title": "FullAccessReadOnlyAccessType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"title": "FullAccessReadOnlyAccess",
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
},
|
||||
"ReasoningEffort": {
|
||||
"description": "See https://platform.openai.com/docs/guides/reasoning?api-mode=responses#get-started-with-reasoning",
|
||||
"enum": [
|
||||
@@ -1107,16 +1045,6 @@
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"access": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ReadOnlyAccess"
|
||||
}
|
||||
],
|
||||
"default": {
|
||||
"type": "fullAccess"
|
||||
}
|
||||
},
|
||||
"networkAccess": {
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
@@ -1173,16 +1101,6 @@
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
},
|
||||
"readOnlyAccess": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ReadOnlyAccess"
|
||||
}
|
||||
],
|
||||
"default": {
|
||||
"type": "fullAccess"
|
||||
}
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"workspaceWrite"
|
||||
@@ -2567,18 +2485,6 @@
|
||||
"modelProvider": {
|
||||
"type": "string"
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"description": "Canonical active permissions view for this thread."
|
||||
},
|
||||
"reasoningEffort": {
|
||||
"anyOf": [
|
||||
{
|
||||
|
||||
13
codex-rs/app-server-protocol/schema/json/v2/ThreadGoalClearedNotification.json
generated
Normal file
13
codex-rs/app-server-protocol/schema/json/v2/ThreadGoalClearedNotification.json
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"threadId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"threadId"
|
||||
],
|
||||
"title": "ThreadGoalClearedNotification",
|
||||
"type": "object"
|
||||
}
|
||||
80
codex-rs/app-server-protocol/schema/json/v2/ThreadGoalUpdatedNotification.json
generated
Normal file
80
codex-rs/app-server-protocol/schema/json/v2/ThreadGoalUpdatedNotification.json
generated
Normal file
@@ -0,0 +1,80 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"ThreadGoal": {
|
||||
"properties": {
|
||||
"createdAt": {
|
||||
"format": "int64",
|
||||
"type": "integer"
|
||||
},
|
||||
"objective": {
|
||||
"type": "string"
|
||||
},
|
||||
"status": {
|
||||
"$ref": "#/definitions/ThreadGoalStatus"
|
||||
},
|
||||
"threadId": {
|
||||
"type": "string"
|
||||
},
|
||||
"timeUsedSeconds": {
|
||||
"format": "int64",
|
||||
"type": "integer"
|
||||
},
|
||||
"tokenBudget": {
|
||||
"format": "int64",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"tokensUsed": {
|
||||
"format": "int64",
|
||||
"type": "integer"
|
||||
},
|
||||
"updatedAt": {
|
||||
"format": "int64",
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"createdAt",
|
||||
"objective",
|
||||
"status",
|
||||
"threadId",
|
||||
"timeUsedSeconds",
|
||||
"tokensUsed",
|
||||
"updatedAt"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ThreadGoalStatus": {
|
||||
"enum": [
|
||||
"active",
|
||||
"paused",
|
||||
"budgetLimited",
|
||||
"complete"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"goal": {
|
||||
"$ref": "#/definitions/ThreadGoal"
|
||||
},
|
||||
"threadId": {
|
||||
"type": "string"
|
||||
},
|
||||
"turnId": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"goal",
|
||||
"threadId"
|
||||
],
|
||||
"title": "ThreadGoalUpdatedNotification",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -257,21 +257,6 @@
|
||||
"title": "MinimalFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
"enum": [
|
||||
"current_working_directory"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"kind"
|
||||
],
|
||||
"title": "CurrentWorkingDirectoryFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
@@ -417,38 +402,6 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"GhostCommit": {
|
||||
"description": "Details of a ghost commit created from a repository state.",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"parent": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"preexisting_untracked_dirs": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"preexisting_untracked_files": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"preexisting_untracked_dirs",
|
||||
"preexisting_untracked_files"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ImageDetail": {
|
||||
"enum": [
|
||||
"auto",
|
||||
@@ -756,12 +709,6 @@
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"end_turn": {
|
||||
"type": [
|
||||
"boolean",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"id": {
|
||||
"type": [
|
||||
"string",
|
||||
@@ -1161,26 +1108,6 @@
|
||||
"title": "ImageGenerationCallResponseItem",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"ghost_commit": {
|
||||
"$ref": "#/definitions/GhostCommit"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"ghost_snapshot"
|
||||
],
|
||||
"title": "GhostSnapshotResponseItemType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"ghost_commit",
|
||||
"type"
|
||||
],
|
||||
"title": "GhostSnapshotResponseItem",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"encrypted_content": {
|
||||
@@ -1401,17 +1328,6 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Full permissions override for the resumed thread. Cannot be combined with `sandbox`."
|
||||
},
|
||||
"personality": {
|
||||
"anyOf": [
|
||||
{
|
||||
|
||||
@@ -569,21 +569,6 @@
|
||||
"title": "MinimalFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
"enum": [
|
||||
"current_working_directory"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"kind"
|
||||
],
|
||||
"title": "CurrentWorkingDirectoryFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
@@ -1028,53 +1013,6 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ReadOnlyAccess": {
|
||||
"oneOf": [
|
||||
{
|
||||
"properties": {
|
||||
"includePlatformDefaults": {
|
||||
"default": true,
|
||||
"type": "boolean"
|
||||
},
|
||||
"readableRoots": {
|
||||
"default": [],
|
||||
"items": {
|
||||
"$ref": "#/definitions/AbsolutePathBuf"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"restricted"
|
||||
],
|
||||
"title": "RestrictedReadOnlyAccessType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"title": "RestrictedReadOnlyAccess",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [
|
||||
"fullAccess"
|
||||
],
|
||||
"title": "FullAccessReadOnlyAccessType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"title": "FullAccessReadOnlyAccess",
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
},
|
||||
"ReasoningEffort": {
|
||||
"description": "See https://platform.openai.com/docs/guides/reasoning?api-mode=responses#get-started-with-reasoning",
|
||||
"enum": [
|
||||
@@ -1107,16 +1045,6 @@
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"access": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ReadOnlyAccess"
|
||||
}
|
||||
],
|
||||
"default": {
|
||||
"type": "fullAccess"
|
||||
}
|
||||
},
|
||||
"networkAccess": {
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
@@ -1173,16 +1101,6 @@
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
},
|
||||
"readOnlyAccess": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ReadOnlyAccess"
|
||||
}
|
||||
],
|
||||
"default": {
|
||||
"type": "fullAccess"
|
||||
}
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"workspaceWrite"
|
||||
@@ -2567,18 +2485,6 @@
|
||||
"modelProvider": {
|
||||
"type": "string"
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"description": "Canonical active permissions view for this thread."
|
||||
},
|
||||
"reasoningEffort": {
|
||||
"anyOf": [
|
||||
{
|
||||
|
||||
@@ -209,21 +209,6 @@
|
||||
"title": "MinimalFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
"enum": [
|
||||
"current_working_directory"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"kind"
|
||||
],
|
||||
"title": "CurrentWorkingDirectoryFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
@@ -541,17 +526,6 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Full permissions override for this thread. Cannot be combined with `sandbox`."
|
||||
},
|
||||
"personality": {
|
||||
"anyOf": [
|
||||
{
|
||||
|
||||
@@ -569,21 +569,6 @@
|
||||
"title": "MinimalFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
"enum": [
|
||||
"current_working_directory"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"kind"
|
||||
],
|
||||
"title": "CurrentWorkingDirectoryFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
@@ -1028,53 +1013,6 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ReadOnlyAccess": {
|
||||
"oneOf": [
|
||||
{
|
||||
"properties": {
|
||||
"includePlatformDefaults": {
|
||||
"default": true,
|
||||
"type": "boolean"
|
||||
},
|
||||
"readableRoots": {
|
||||
"default": [],
|
||||
"items": {
|
||||
"$ref": "#/definitions/AbsolutePathBuf"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"restricted"
|
||||
],
|
||||
"title": "RestrictedReadOnlyAccessType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"title": "RestrictedReadOnlyAccess",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [
|
||||
"fullAccess"
|
||||
],
|
||||
"title": "FullAccessReadOnlyAccessType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"title": "FullAccessReadOnlyAccess",
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
},
|
||||
"ReasoningEffort": {
|
||||
"description": "See https://platform.openai.com/docs/guides/reasoning?api-mode=responses#get-started-with-reasoning",
|
||||
"enum": [
|
||||
@@ -1107,16 +1045,6 @@
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"access": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ReadOnlyAccess"
|
||||
}
|
||||
],
|
||||
"default": {
|
||||
"type": "fullAccess"
|
||||
}
|
||||
},
|
||||
"networkAccess": {
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
@@ -1173,16 +1101,6 @@
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
},
|
||||
"readOnlyAccess": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ReadOnlyAccess"
|
||||
}
|
||||
],
|
||||
"default": {
|
||||
"type": "fullAccess"
|
||||
}
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"workspaceWrite"
|
||||
@@ -2567,18 +2485,6 @@
|
||||
"modelProvider": {
|
||||
"type": "string"
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"description": "Canonical active permissions view for this thread."
|
||||
},
|
||||
"reasoningEffort": {
|
||||
"anyOf": [
|
||||
{
|
||||
|
||||
@@ -218,21 +218,6 @@
|
||||
"title": "MinimalFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
"enum": [
|
||||
"current_working_directory"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"kind"
|
||||
],
|
||||
"title": "CurrentWorkingDirectoryFileSystemSpecialPath",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"kind": {
|
||||
@@ -462,53 +447,6 @@
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ReadOnlyAccess": {
|
||||
"oneOf": [
|
||||
{
|
||||
"properties": {
|
||||
"includePlatformDefaults": {
|
||||
"default": true,
|
||||
"type": "boolean"
|
||||
},
|
||||
"readableRoots": {
|
||||
"default": [],
|
||||
"items": {
|
||||
"$ref": "#/definitions/AbsolutePathBuf"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"restricted"
|
||||
],
|
||||
"title": "RestrictedReadOnlyAccessType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"title": "RestrictedReadOnlyAccess",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [
|
||||
"fullAccess"
|
||||
],
|
||||
"title": "FullAccessReadOnlyAccessType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"title": "FullAccessReadOnlyAccess",
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
},
|
||||
"ReasoningEffort": {
|
||||
"description": "See https://platform.openai.com/docs/guides/reasoning?api-mode=responses#get-started-with-reasoning",
|
||||
"enum": [
|
||||
@@ -561,16 +499,6 @@
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"access": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ReadOnlyAccess"
|
||||
}
|
||||
],
|
||||
"default": {
|
||||
"type": "fullAccess"
|
||||
}
|
||||
},
|
||||
"networkAccess": {
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
@@ -627,16 +555,6 @@
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
},
|
||||
"readOnlyAccess": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ReadOnlyAccess"
|
||||
}
|
||||
],
|
||||
"default": {
|
||||
"type": "fullAccess"
|
||||
}
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"workspaceWrite"
|
||||
@@ -911,17 +829,6 @@
|
||||
"outputSchema": {
|
||||
"description": "Optional JSON Schema used to constrain the final assistant message for this turn."
|
||||
},
|
||||
"permissionProfile": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PermissionProfile"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Override the full permissions profile for this turn and subsequent turns. Cannot be combined with `sandboxPolicy`."
|
||||
},
|
||||
"personality": {
|
||||
"anyOf": [
|
||||
{
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
/**
|
||||
* Details of a ghost commit created from a repository state.
|
||||
*/
|
||||
export type GhostCommit = { id: string, parent: string | null, preexisting_untracked_files: Array<string>, preexisting_untracked_dirs: Array<string>, };
|
||||
5
codex-rs/app-server-protocol/schema/typescript/InternalSessionSource.ts
generated
Normal file
5
codex-rs/app-server-protocol/schema/typescript/InternalSessionSource.ts
generated
Normal file
@@ -0,0 +1,5 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type InternalSessionSource = "memory_consolidation";
|
||||
@@ -3,7 +3,6 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { ContentItem } from "./ContentItem";
|
||||
import type { FunctionCallOutputBody } from "./FunctionCallOutputBody";
|
||||
import type { GhostCommit } from "./GhostCommit";
|
||||
import type { LocalShellAction } from "./LocalShellAction";
|
||||
import type { LocalShellStatus } from "./LocalShellStatus";
|
||||
import type { MessagePhase } from "./MessagePhase";
|
||||
@@ -11,8 +10,8 @@ import type { ReasoningItemContent } from "./ReasoningItemContent";
|
||||
import type { ReasoningItemReasoningSummary } from "./ReasoningItemReasoningSummary";
|
||||
import type { WebSearchAction } from "./WebSearchAction";
|
||||
|
||||
export type ResponseItem = { "type": "message", role: string, content: Array<ContentItem>, end_turn?: boolean, phase?: MessagePhase, } | { "type": "reasoning", summary: Array<ReasoningItemReasoningSummary>, content?: Array<ReasoningItemContent>, encrypted_content: string | null, } | { "type": "local_shell_call",
|
||||
export type ResponseItem = { "type": "message", role: string, content: Array<ContentItem>, phase?: MessagePhase, } | { "type": "reasoning", summary: Array<ReasoningItemReasoningSummary>, content?: Array<ReasoningItemContent>, encrypted_content: string | null, } | { "type": "local_shell_call",
|
||||
/**
|
||||
* Set when using the Responses API.
|
||||
*/
|
||||
call_id: string | null, status: LocalShellStatus, action: LocalShellAction, } | { "type": "function_call", name: string, namespace?: string, arguments: string, call_id: string, } | { "type": "tool_search_call", call_id: string | null, status?: string, execution: string, arguments: unknown, } | { "type": "function_call_output", call_id: string, output: FunctionCallOutputBody, } | { "type": "custom_tool_call", status?: string, call_id: string, name: string, input: string, } | { "type": "custom_tool_call_output", call_id: string, name?: string, output: FunctionCallOutputBody, } | { "type": "tool_search_output", call_id: string | null, status: string, execution: string, tools: unknown[], } | { "type": "web_search_call", status?: string, action?: WebSearchAction, } | { "type": "image_generation_call", id: string, status: string, revised_prompt?: string, result: string, } | { "type": "ghost_snapshot", ghost_commit: GhostCommit, } | { "type": "compaction", encrypted_content: string, } | { "type": "other" };
|
||||
call_id: string | null, status: LocalShellStatus, action: LocalShellAction, } | { "type": "function_call", name: string, namespace?: string, arguments: string, call_id: string, } | { "type": "tool_search_call", call_id: string | null, status?: string, execution: string, arguments: unknown, } | { "type": "function_call_output", call_id: string, output: FunctionCallOutputBody, } | { "type": "custom_tool_call", status?: string, call_id: string, name: string, input: string, } | { "type": "custom_tool_call_output", call_id: string, name?: string, output: FunctionCallOutputBody, } | { "type": "tool_search_output", call_id: string | null, status: string, execution: string, tools: unknown[], } | { "type": "web_search_call", status?: string, action?: WebSearchAction, } | { "type": "image_generation_call", id: string, status: string, revised_prompt?: string, result: string, } | { "type": "compaction", encrypted_content: string, } | { "type": "other" };
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1,6 +1,7 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { InternalSessionSource } from "./InternalSessionSource";
|
||||
import type { SubAgentSource } from "./SubAgentSource";
|
||||
|
||||
export type SessionSource = "cli" | "vscode" | "exec" | "mcp" | { "custom": string } | { "subagent": SubAgentSource } | "unknown";
|
||||
export type SessionSource = "cli" | "vscode" | "exec" | "mcp" | { "custom": string } | { "internal": InternalSessionSource } | { "subagent": SubAgentSource } | "unknown";
|
||||
|
||||
@@ -29,7 +29,6 @@ export type { GetAuthStatusParams } from "./GetAuthStatusParams";
|
||||
export type { GetAuthStatusResponse } from "./GetAuthStatusResponse";
|
||||
export type { GetConversationSummaryParams } from "./GetConversationSummaryParams";
|
||||
export type { GetConversationSummaryResponse } from "./GetConversationSummaryResponse";
|
||||
export type { GhostCommit } from "./GhostCommit";
|
||||
export type { GitDiffToRemoteParams } from "./GitDiffToRemoteParams";
|
||||
export type { GitDiffToRemoteResponse } from "./GitDiffToRemoteResponse";
|
||||
export type { GitSha } from "./GitSha";
|
||||
@@ -38,6 +37,7 @@ export type { InitializeCapabilities } from "./InitializeCapabilities";
|
||||
export type { InitializeParams } from "./InitializeParams";
|
||||
export type { InitializeResponse } from "./InitializeResponse";
|
||||
export type { InputModality } from "./InputModality";
|
||||
export type { InternalSessionSource } from "./InternalSessionSource";
|
||||
export type { LocalShellAction } from "./LocalShellAction";
|
||||
export type { LocalShellExecAction } from "./LocalShellExecAction";
|
||||
export type { LocalShellStatus } from "./LocalShellStatus";
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { CommandExecTerminalSize } from "./CommandExecTerminalSize";
|
||||
import type { PermissionProfile } from "./PermissionProfile";
|
||||
import type { SandboxPolicy } from "./SandboxPolicy";
|
||||
|
||||
/**
|
||||
@@ -13,12 +12,10 @@ import type { SandboxPolicy } from "./SandboxPolicy";
|
||||
* sent only after all `command/exec/outputDelta` notifications for that
|
||||
* connection have been emitted.
|
||||
*/
|
||||
export type CommandExecParams = {
|
||||
/**
|
||||
export type CommandExecParams = {/**
|
||||
* Command argv vector. Empty arrays are rejected.
|
||||
*/
|
||||
command: Array<string>,
|
||||
/**
|
||||
command: Array<string>, /**
|
||||
* Optional client-supplied, connection-scoped process id.
|
||||
*
|
||||
* Required for `tty`, `streamStdin`, `streamStdoutStderr`, and follow-up
|
||||
@@ -26,81 +23,63 @@ command: Array<string>,
|
||||
* `command/exec/terminate` calls. When omitted, buffered execution gets an
|
||||
* internal id that is not exposed to the client.
|
||||
*/
|
||||
processId?: string | null,
|
||||
/**
|
||||
processId?: string | null, /**
|
||||
* Enable PTY mode.
|
||||
*
|
||||
* This implies `streamStdin` and `streamStdoutStderr`.
|
||||
*/
|
||||
tty?: boolean,
|
||||
/**
|
||||
tty?: boolean, /**
|
||||
* Allow follow-up `command/exec/write` requests to write stdin bytes.
|
||||
*
|
||||
* Requires a client-supplied `processId`.
|
||||
*/
|
||||
streamStdin?: boolean,
|
||||
/**
|
||||
streamStdin?: boolean, /**
|
||||
* Stream stdout/stderr via `command/exec/outputDelta` notifications.
|
||||
*
|
||||
* Streamed bytes are not duplicated into the final response and require a
|
||||
* client-supplied `processId`.
|
||||
*/
|
||||
streamStdoutStderr?: boolean,
|
||||
/**
|
||||
streamStdoutStderr?: boolean, /**
|
||||
* Optional per-stream stdout/stderr capture cap in bytes.
|
||||
*
|
||||
* When omitted, the server default applies. Cannot be combined with
|
||||
* `disableOutputCap`.
|
||||
*/
|
||||
outputBytesCap?: number | null,
|
||||
/**
|
||||
outputBytesCap?: number | null, /**
|
||||
* Disable stdout/stderr capture truncation for this request.
|
||||
*
|
||||
* Cannot be combined with `outputBytesCap`.
|
||||
*/
|
||||
disableOutputCap?: boolean,
|
||||
/**
|
||||
disableOutputCap?: boolean, /**
|
||||
* Disable the timeout entirely for this request.
|
||||
*
|
||||
* Cannot be combined with `timeoutMs`.
|
||||
*/
|
||||
disableTimeout?: boolean,
|
||||
/**
|
||||
disableTimeout?: boolean, /**
|
||||
* Optional timeout in milliseconds.
|
||||
*
|
||||
* When omitted, the server default applies. Cannot be combined with
|
||||
* `disableTimeout`.
|
||||
*/
|
||||
timeoutMs?: number | null,
|
||||
/**
|
||||
timeoutMs?: number | null, /**
|
||||
* Optional working directory. Defaults to the server cwd.
|
||||
*/
|
||||
cwd?: string | null,
|
||||
/**
|
||||
cwd?: string | null, /**
|
||||
* Optional environment overrides merged into the server-computed
|
||||
* environment.
|
||||
*
|
||||
* Matching names override inherited values. Set a key to `null` to unset
|
||||
* an inherited variable.
|
||||
*/
|
||||
env?: { [key in string]?: string | null } | null,
|
||||
/**
|
||||
env?: { [key in string]?: string | null } | null, /**
|
||||
* Optional initial PTY size in character cells. Only valid when `tty` is
|
||||
* true.
|
||||
*/
|
||||
size?: CommandExecTerminalSize | null,
|
||||
/**
|
||||
size?: CommandExecTerminalSize | null, /**
|
||||
* Optional sandbox policy for this command.
|
||||
*
|
||||
* Uses the same shape as thread/turn execution sandbox configuration and
|
||||
* defaults to the user's configured policy when omitted. Cannot be
|
||||
* combined with `permissionProfile`.
|
||||
*/
|
||||
sandboxPolicy?: SandboxPolicy | null,
|
||||
/**
|
||||
* Optional full permissions profile for this command.
|
||||
*
|
||||
* Defaults to the user's configured permissions when omitted. Cannot be
|
||||
* combined with `sandboxPolicy`.
|
||||
*/
|
||||
permissionProfile?: PermissionProfile | null, };
|
||||
sandboxPolicy?: SandboxPolicy | null};
|
||||
|
||||
@@ -2,15 +2,12 @@
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { AbsolutePathBuf } from "../AbsolutePathBuf";
|
||||
import type { AdditionalPermissionProfile } from "./AdditionalPermissionProfile";
|
||||
import type { CommandAction } from "./CommandAction";
|
||||
import type { CommandExecutionApprovalDecision } from "./CommandExecutionApprovalDecision";
|
||||
import type { ExecPolicyAmendment } from "./ExecPolicyAmendment";
|
||||
import type { NetworkApprovalContext } from "./NetworkApprovalContext";
|
||||
import type { NetworkPolicyAmendment } from "./NetworkPolicyAmendment";
|
||||
|
||||
export type CommandExecutionRequestApprovalParams = { threadId: string, turnId: string, itemId: string,
|
||||
/**
|
||||
export type CommandExecutionRequestApprovalParams = {threadId: string, turnId: string, itemId: string, /**
|
||||
* Unique identifier for this specific approval callback.
|
||||
*
|
||||
* For regular shell/unified_exec approvals, this is null.
|
||||
@@ -19,40 +16,25 @@ export type CommandExecutionRequestApprovalParams = { threadId: string, turnId:
|
||||
* one parent `itemId`, so `approvalId` is a distinct opaque callback id
|
||||
* (a UUID) used to disambiguate routing.
|
||||
*/
|
||||
approvalId?: string | null,
|
||||
/**
|
||||
approvalId?: string | null, /**
|
||||
* Optional explanatory reason (e.g. request for network access).
|
||||
*/
|
||||
reason?: string | null,
|
||||
/**
|
||||
reason?: string | null, /**
|
||||
* Optional context for a managed-network approval prompt.
|
||||
*/
|
||||
networkApprovalContext?: NetworkApprovalContext | null,
|
||||
/**
|
||||
networkApprovalContext?: NetworkApprovalContext | null, /**
|
||||
* The command to be executed.
|
||||
*/
|
||||
command?: string | null,
|
||||
/**
|
||||
command?: string | null, /**
|
||||
* The command's working directory.
|
||||
*/
|
||||
cwd?: AbsolutePathBuf | null,
|
||||
/**
|
||||
cwd?: AbsolutePathBuf | null, /**
|
||||
* Best-effort parsed command actions for friendly display.
|
||||
*/
|
||||
commandActions?: Array<CommandAction> | null,
|
||||
/**
|
||||
* Optional additional permissions requested for this command.
|
||||
*/
|
||||
additionalPermissions?: AdditionalPermissionProfile | null,
|
||||
/**
|
||||
commandActions?: Array<CommandAction> | null, /**
|
||||
* Optional proposed execpolicy amendment to allow similar commands without prompting.
|
||||
*/
|
||||
proposedExecpolicyAmendment?: ExecPolicyAmendment | null,
|
||||
/**
|
||||
proposedExecpolicyAmendment?: ExecPolicyAmendment | null, /**
|
||||
* Optional proposed network policy amendments (allow/deny host) for future requests.
|
||||
*/
|
||||
proposedNetworkPolicyAmendments?: Array<NetworkPolicyAmendment> | null,
|
||||
/**
|
||||
* Ordered list of decisions the client may present for this prompt.
|
||||
*/
|
||||
availableDecisions?: Array<CommandExecutionApprovalDecision> | null, };
|
||||
proposedNetworkPolicyAmendments?: Array<NetworkPolicyAmendment> | null};
|
||||
|
||||
@@ -2,4 +2,4 @@
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type ExternalAgentConfigMigrationItemType = "AGENTS_MD" | "CONFIG" | "SKILLS" | "PLUGINS" | "MCP_SERVER_CONFIG";
|
||||
export type ExternalAgentConfigMigrationItemType = "AGENTS_MD" | "CONFIG" | "SKILLS" | "PLUGINS" | "MCP_SERVER_CONFIG" | "SESSIONS";
|
||||
|
||||
@@ -2,4 +2,4 @@
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type FileSystemSpecialPath = { "kind": "root" } | { "kind": "minimal" } | { "kind": "current_working_directory" } | { "kind": "project_roots", subpath: string | null, } | { "kind": "tmpdir" } | { "kind": "slash_tmp" } | { "kind": "unknown", path: string, subpath: string | null, };
|
||||
export type FileSystemSpecialPath = { "kind": "root" } | { "kind": "minimal" } | { "kind": "project_roots", subpath: string | null, } | { "kind": "tmpdir" } | { "kind": "slash_tmp" } | { "kind": "unknown", path: string, subpath: string | null, };
|
||||
|
||||
@@ -2,5 +2,6 @@
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { PluginsMigration } from "./PluginsMigration";
|
||||
import type { SessionMigration } from "./SessionMigration";
|
||||
|
||||
export type MigrationDetails = { plugins: Array<PluginsMigration>, };
|
||||
export type MigrationDetails = { plugins: Array<PluginsMigration>, sessions: Array<SessionMigration>, };
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { AbsolutePathBuf } from "../AbsolutePathBuf";
|
||||
|
||||
export type ReadOnlyAccess = { "type": "restricted", includePlatformDefaults: boolean, readableRoots: Array<AbsolutePathBuf>, } | { "type": "fullAccess" };
|
||||
@@ -3,6 +3,5 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { AbsolutePathBuf } from "../AbsolutePathBuf";
|
||||
import type { NetworkAccess } from "./NetworkAccess";
|
||||
import type { ReadOnlyAccess } from "./ReadOnlyAccess";
|
||||
|
||||
export type SandboxPolicy = { "type": "dangerFullAccess" } | { "type": "readOnly", access: ReadOnlyAccess, networkAccess: boolean, } | { "type": "externalSandbox", networkAccess: NetworkAccess, } | { "type": "workspaceWrite", writableRoots: Array<AbsolutePathBuf>, readOnlyAccess: ReadOnlyAccess, networkAccess: boolean, excludeTmpdirEnvVar: boolean, excludeSlashTmp: boolean, };
|
||||
export type SandboxPolicy = { "type": "dangerFullAccess" } | { "type": "readOnly", networkAccess: boolean, } | { "type": "externalSandbox", networkAccess: NetworkAccess, } | { "type": "workspaceWrite", writableRoots: Array<AbsolutePathBuf>, networkAccess: boolean, excludeTmpdirEnvVar: boolean, excludeSlashTmp: boolean, };
|
||||
|
||||
5
codex-rs/app-server-protocol/schema/typescript/v2/SessionMigration.ts
generated
Normal file
5
codex-rs/app-server-protocol/schema/typescript/v2/SessionMigration.ts
generated
Normal file
@@ -0,0 +1,5 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type SessionMigration = { path: string, cwd: string, title: string | null, };
|
||||
@@ -5,7 +5,6 @@ import type { ServiceTier } from "../ServiceTier";
|
||||
import type { JsonValue } from "../serde_json/JsonValue";
|
||||
import type { ApprovalsReviewer } from "./ApprovalsReviewer";
|
||||
import type { AskForApproval } from "./AskForApproval";
|
||||
import type { PermissionProfile } from "./PermissionProfile";
|
||||
import type { SandboxMode } from "./SandboxMode";
|
||||
|
||||
/**
|
||||
@@ -18,27 +17,15 @@ import type { SandboxMode } from "./SandboxMode";
|
||||
* Prefer using thread_id whenever possible.
|
||||
*/
|
||||
export type ThreadForkParams = {threadId: string, /**
|
||||
* [UNSTABLE] Specify the rollout path to fork from.
|
||||
* If specified, the thread_id param will be ignored.
|
||||
*/
|
||||
path?: string | null, /**
|
||||
* Configuration overrides for the forked thread, if any.
|
||||
*/
|
||||
model?: string | null, modelProvider?: string | null, serviceTier?: ServiceTier | null | null, cwd?: string | null, approvalPolicy?: AskForApproval | null, /**
|
||||
* Override where approval requests are routed for review on this thread
|
||||
* and subsequent turns.
|
||||
*/
|
||||
approvalsReviewer?: ApprovalsReviewer | null, sandbox?: SandboxMode | null, /**
|
||||
* Full permissions override for the forked thread. Cannot be combined
|
||||
* with `sandbox`.
|
||||
*/
|
||||
permissionProfile?: PermissionProfile | null, config?: { [key in string]?: JsonValue } | null, baseInstructions?: string | null, developerInstructions?: string | null, ephemeral?: boolean, /**
|
||||
approvalsReviewer?: ApprovalsReviewer | null, sandbox?: SandboxMode | null, config?: { [key in string]?: JsonValue } | null, baseInstructions?: string | null, developerInstructions?: string | null, ephemeral?: boolean, /**
|
||||
* When true, return only thread metadata and live fork state without
|
||||
* populating `thread.turns`. This is useful when the client plans to call
|
||||
* `thread/turns/list` immediately after forking.
|
||||
*/
|
||||
excludeTurns?: boolean, /**
|
||||
* If true, persist additional rollout EventMsg variants required to
|
||||
* reconstruct a richer thread history on subsequent resume/fork/read.
|
||||
*/
|
||||
persistExtendedHistory: boolean};
|
||||
excludeTurns?: boolean};
|
||||
|
||||
@@ -6,26 +6,18 @@ import type { ReasoningEffort } from "../ReasoningEffort";
|
||||
import type { ServiceTier } from "../ServiceTier";
|
||||
import type { ApprovalsReviewer } from "./ApprovalsReviewer";
|
||||
import type { AskForApproval } from "./AskForApproval";
|
||||
import type { PermissionProfile } from "./PermissionProfile";
|
||||
import type { SandboxPolicy } from "./SandboxPolicy";
|
||||
import type { Thread } from "./Thread";
|
||||
|
||||
export type ThreadForkResponse = { thread: Thread, model: string, modelProvider: string, serviceTier: ServiceTier | null, cwd: AbsolutePathBuf,
|
||||
/**
|
||||
export type ThreadForkResponse = {thread: Thread, model: string, modelProvider: string, serviceTier: ServiceTier | null, cwd: AbsolutePathBuf, /**
|
||||
* Instruction source files currently loaded for this thread.
|
||||
*/
|
||||
instructionSources: Array<AbsolutePathBuf>, approvalPolicy: AskForApproval,
|
||||
/**
|
||||
instructionSources: Array<AbsolutePathBuf>, approvalPolicy: AskForApproval, /**
|
||||
* Reviewer currently used for approval requests on this thread.
|
||||
*/
|
||||
approvalsReviewer: ApprovalsReviewer,
|
||||
/**
|
||||
approvalsReviewer: ApprovalsReviewer, /**
|
||||
* Legacy sandbox policy retained for compatibility. New clients should use
|
||||
* `permissionProfile` when present as the canonical active permissions
|
||||
* view.
|
||||
*/
|
||||
sandbox: SandboxPolicy,
|
||||
/**
|
||||
* Canonical active permissions view for this thread.
|
||||
*/
|
||||
permissionProfile: PermissionProfile | null, reasoningEffort: ReasoningEffort | null, };
|
||||
sandbox: SandboxPolicy, reasoningEffort: ReasoningEffort | null};
|
||||
|
||||
6
codex-rs/app-server-protocol/schema/typescript/v2/ThreadGoal.ts
generated
Normal file
6
codex-rs/app-server-protocol/schema/typescript/v2/ThreadGoal.ts
generated
Normal file
@@ -0,0 +1,6 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { ThreadGoalStatus } from "./ThreadGoalStatus";
|
||||
|
||||
export type ThreadGoal = { threadId: string, objective: string, status: ThreadGoalStatus, tokenBudget: number | null, tokensUsed: number, timeUsedSeconds: number, createdAt: number, updatedAt: number, };
|
||||
5
codex-rs/app-server-protocol/schema/typescript/v2/ThreadGoalClearedNotification.ts
generated
Normal file
5
codex-rs/app-server-protocol/schema/typescript/v2/ThreadGoalClearedNotification.ts
generated
Normal file
@@ -0,0 +1,5 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type ThreadGoalClearedNotification = { threadId: string, };
|
||||
5
codex-rs/app-server-protocol/schema/typescript/v2/ThreadGoalStatus.ts
generated
Normal file
5
codex-rs/app-server-protocol/schema/typescript/v2/ThreadGoalStatus.ts
generated
Normal file
@@ -0,0 +1,5 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type ThreadGoalStatus = "active" | "paused" | "budgetLimited" | "complete";
|
||||
6
codex-rs/app-server-protocol/schema/typescript/v2/ThreadGoalUpdatedNotification.ts
generated
Normal file
6
codex-rs/app-server-protocol/schema/typescript/v2/ThreadGoalUpdatedNotification.ts
generated
Normal file
@@ -0,0 +1,6 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { ThreadGoal } from "./ThreadGoal";
|
||||
|
||||
export type ThreadGoalUpdatedNotification = { threadId: string, turnId: string | null, goal: ThreadGoal, };
|
||||
@@ -2,12 +2,10 @@
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { Personality } from "../Personality";
|
||||
import type { ResponseItem } from "../ResponseItem";
|
||||
import type { ServiceTier } from "../ServiceTier";
|
||||
import type { JsonValue } from "../serde_json/JsonValue";
|
||||
import type { ApprovalsReviewer } from "./ApprovalsReviewer";
|
||||
import type { AskForApproval } from "./AskForApproval";
|
||||
import type { PermissionProfile } from "./PermissionProfile";
|
||||
import type { SandboxMode } from "./SandboxMode";
|
||||
|
||||
/**
|
||||
@@ -22,32 +20,15 @@ import type { SandboxMode } from "./SandboxMode";
|
||||
* Prefer using thread_id whenever possible.
|
||||
*/
|
||||
export type ThreadResumeParams = {threadId: string, /**
|
||||
* [UNSTABLE] FOR CODEX CLOUD - DO NOT USE.
|
||||
* If specified, the thread will be resumed with the provided history
|
||||
* instead of loaded from disk.
|
||||
*/
|
||||
history?: Array<ResponseItem> | null, /**
|
||||
* [UNSTABLE] Specify the rollout path to resume from.
|
||||
* If specified, the thread_id param will be ignored.
|
||||
*/
|
||||
path?: string | null, /**
|
||||
* Configuration overrides for the resumed thread, if any.
|
||||
*/
|
||||
model?: string | null, modelProvider?: string | null, serviceTier?: ServiceTier | null | null, cwd?: string | null, approvalPolicy?: AskForApproval | null, /**
|
||||
* Override where approval requests are routed for review on this thread
|
||||
* and subsequent turns.
|
||||
*/
|
||||
approvalsReviewer?: ApprovalsReviewer | null, sandbox?: SandboxMode | null, /**
|
||||
* Full permissions override for the resumed thread. Cannot be combined
|
||||
* with `sandbox`.
|
||||
*/
|
||||
permissionProfile?: PermissionProfile | null, config?: { [key in string]?: JsonValue } | null, baseInstructions?: string | null, developerInstructions?: string | null, personality?: Personality | null, /**
|
||||
approvalsReviewer?: ApprovalsReviewer | null, sandbox?: SandboxMode | null, config?: { [key in string]?: JsonValue } | null, baseInstructions?: string | null, developerInstructions?: string | null, personality?: Personality | null, /**
|
||||
* When true, return only thread metadata and live-resume state without
|
||||
* populating `thread.turns`. This is useful when the client plans to call
|
||||
* `thread/turns/list` immediately after resuming.
|
||||
*/
|
||||
excludeTurns?: boolean, /**
|
||||
* If true, persist additional rollout EventMsg variants required to
|
||||
* reconstruct a richer thread history on subsequent resume/fork/read.
|
||||
*/
|
||||
persistExtendedHistory: boolean};
|
||||
excludeTurns?: boolean};
|
||||
|
||||
@@ -6,26 +6,18 @@ import type { ReasoningEffort } from "../ReasoningEffort";
|
||||
import type { ServiceTier } from "../ServiceTier";
|
||||
import type { ApprovalsReviewer } from "./ApprovalsReviewer";
|
||||
import type { AskForApproval } from "./AskForApproval";
|
||||
import type { PermissionProfile } from "./PermissionProfile";
|
||||
import type { SandboxPolicy } from "./SandboxPolicy";
|
||||
import type { Thread } from "./Thread";
|
||||
|
||||
export type ThreadResumeResponse = { thread: Thread, model: string, modelProvider: string, serviceTier: ServiceTier | null, cwd: AbsolutePathBuf,
|
||||
/**
|
||||
export type ThreadResumeResponse = {thread: Thread, model: string, modelProvider: string, serviceTier: ServiceTier | null, cwd: AbsolutePathBuf, /**
|
||||
* Instruction source files currently loaded for this thread.
|
||||
*/
|
||||
instructionSources: Array<AbsolutePathBuf>, approvalPolicy: AskForApproval,
|
||||
/**
|
||||
instructionSources: Array<AbsolutePathBuf>, approvalPolicy: AskForApproval, /**
|
||||
* Reviewer currently used for approval requests on this thread.
|
||||
*/
|
||||
approvalsReviewer: ApprovalsReviewer,
|
||||
/**
|
||||
approvalsReviewer: ApprovalsReviewer, /**
|
||||
* Legacy sandbox policy retained for compatibility. New clients should use
|
||||
* `permissionProfile` when present as the canonical active permissions
|
||||
* view.
|
||||
*/
|
||||
sandbox: SandboxPolicy,
|
||||
/**
|
||||
* Canonical active permissions view for this thread.
|
||||
*/
|
||||
permissionProfile: PermissionProfile | null, reasoningEffort: ReasoningEffort | null, };
|
||||
sandbox: SandboxPolicy, reasoningEffort: ReasoningEffort | null};
|
||||
|
||||
@@ -6,7 +6,6 @@ import type { ServiceTier } from "../ServiceTier";
|
||||
import type { JsonValue } from "../serde_json/JsonValue";
|
||||
import type { ApprovalsReviewer } from "./ApprovalsReviewer";
|
||||
import type { AskForApproval } from "./AskForApproval";
|
||||
import type { PermissionProfile } from "./PermissionProfile";
|
||||
import type { SandboxMode } from "./SandboxMode";
|
||||
import type { ThreadStartSource } from "./ThreadStartSource";
|
||||
|
||||
@@ -14,16 +13,4 @@ export type ThreadStartParams = {model?: string | null, modelProvider?: string |
|
||||
* Override where approval requests are routed for review on this thread
|
||||
* and subsequent turns.
|
||||
*/
|
||||
approvalsReviewer?: ApprovalsReviewer | null, sandbox?: SandboxMode | null, /**
|
||||
* Full permissions override for this thread. Cannot be combined with
|
||||
* `sandbox`.
|
||||
*/
|
||||
permissionProfile?: PermissionProfile | null, config?: { [key in string]?: JsonValue } | null, serviceName?: string | null, baseInstructions?: string | null, developerInstructions?: string | null, personality?: Personality | null, ephemeral?: boolean | null, sessionStartSource?: ThreadStartSource | null, /**
|
||||
* If true, opt into emitting raw Responses API items on the event stream.
|
||||
* This is for internal use only (e.g. Codex Cloud).
|
||||
*/
|
||||
experimentalRawEvents: boolean, /**
|
||||
* If true, persist additional rollout EventMsg variants required to
|
||||
* reconstruct a richer thread history on resume/fork/read.
|
||||
*/
|
||||
persistExtendedHistory: boolean};
|
||||
approvalsReviewer?: ApprovalsReviewer | null, sandbox?: SandboxMode | null, config?: { [key in string]?: JsonValue } | null, serviceName?: string | null, baseInstructions?: string | null, developerInstructions?: string | null, personality?: Personality | null, ephemeral?: boolean | null, sessionStartSource?: ThreadStartSource | null};
|
||||
|
||||
@@ -6,26 +6,18 @@ import type { ReasoningEffort } from "../ReasoningEffort";
|
||||
import type { ServiceTier } from "../ServiceTier";
|
||||
import type { ApprovalsReviewer } from "./ApprovalsReviewer";
|
||||
import type { AskForApproval } from "./AskForApproval";
|
||||
import type { PermissionProfile } from "./PermissionProfile";
|
||||
import type { SandboxPolicy } from "./SandboxPolicy";
|
||||
import type { Thread } from "./Thread";
|
||||
|
||||
export type ThreadStartResponse = { thread: Thread, model: string, modelProvider: string, serviceTier: ServiceTier | null, cwd: AbsolutePathBuf,
|
||||
/**
|
||||
export type ThreadStartResponse = {thread: Thread, model: string, modelProvider: string, serviceTier: ServiceTier | null, cwd: AbsolutePathBuf, /**
|
||||
* Instruction source files currently loaded for this thread.
|
||||
*/
|
||||
instructionSources: Array<AbsolutePathBuf>, approvalPolicy: AskForApproval,
|
||||
/**
|
||||
instructionSources: Array<AbsolutePathBuf>, approvalPolicy: AskForApproval, /**
|
||||
* Reviewer currently used for approval requests on this thread.
|
||||
*/
|
||||
approvalsReviewer: ApprovalsReviewer,
|
||||
/**
|
||||
approvalsReviewer: ApprovalsReviewer, /**
|
||||
* Legacy sandbox policy retained for compatibility. New clients should use
|
||||
* `permissionProfile` when present as the canonical active permissions
|
||||
* view.
|
||||
*/
|
||||
sandbox: SandboxPolicy,
|
||||
/**
|
||||
* Canonical active permissions view for this thread.
|
||||
*/
|
||||
permissionProfile: PermissionProfile | null, reasoningEffort: ReasoningEffort | null, };
|
||||
sandbox: SandboxPolicy, reasoningEffort: ReasoningEffort | null};
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { CollaborationMode } from "../CollaborationMode";
|
||||
import type { Personality } from "../Personality";
|
||||
import type { ReasoningEffort } from "../ReasoningEffort";
|
||||
import type { ReasoningSummary } from "../ReasoningSummary";
|
||||
@@ -9,7 +8,6 @@ import type { ServiceTier } from "../ServiceTier";
|
||||
import type { JsonValue } from "../serde_json/JsonValue";
|
||||
import type { ApprovalsReviewer } from "./ApprovalsReviewer";
|
||||
import type { AskForApproval } from "./AskForApproval";
|
||||
import type { PermissionProfile } from "./PermissionProfile";
|
||||
import type { SandboxPolicy } from "./SandboxPolicy";
|
||||
import type { UserInput } from "./UserInput";
|
||||
|
||||
@@ -27,10 +25,6 @@ approvalsReviewer?: ApprovalsReviewer | null, /**
|
||||
* Override the sandbox policy for this turn and subsequent turns.
|
||||
*/
|
||||
sandboxPolicy?: SandboxPolicy | null, /**
|
||||
* Override the full permissions profile for this turn and subsequent
|
||||
* turns. Cannot be combined with `sandboxPolicy`.
|
||||
*/
|
||||
permissionProfile?: PermissionProfile | null, /**
|
||||
* Override the model for this turn and subsequent turns.
|
||||
*/
|
||||
model?: string | null, /**
|
||||
@@ -49,11 +43,4 @@ personality?: Personality | null, /**
|
||||
* Optional JSON Schema used to constrain the final assistant message for
|
||||
* this turn.
|
||||
*/
|
||||
outputSchema?: JsonValue | null, /**
|
||||
* EXPERIMENTAL - Set a pre-set collaboration mode.
|
||||
* Takes precedence over model, reasoning_effort, and developer instructions if set.
|
||||
*
|
||||
* For `collaboration_mode.settings.developer_instructions`, `null` means
|
||||
* "use the built-in instructions for the selected mode".
|
||||
*/
|
||||
collaborationMode?: CollaborationMode | null};
|
||||
outputSchema?: JsonValue | null};
|
||||
|
||||
@@ -276,7 +276,6 @@ export type { RateLimitReachedType } from "./RateLimitReachedType";
|
||||
export type { RateLimitSnapshot } from "./RateLimitSnapshot";
|
||||
export type { RateLimitWindow } from "./RateLimitWindow";
|
||||
export type { RawResponseItemCompletedNotification } from "./RawResponseItemCompletedNotification";
|
||||
export type { ReadOnlyAccess } from "./ReadOnlyAccess";
|
||||
export type { ReasoningEffortOption } from "./ReasoningEffortOption";
|
||||
export type { ReasoningSummaryPartAddedNotification } from "./ReasoningSummaryPartAddedNotification";
|
||||
export type { ReasoningSummaryTextDeltaNotification } from "./ReasoningSummaryTextDeltaNotification";
|
||||
@@ -295,6 +294,7 @@ export type { SandboxWorkspaceWrite } from "./SandboxWorkspaceWrite";
|
||||
export type { SendAddCreditsNudgeEmailParams } from "./SendAddCreditsNudgeEmailParams";
|
||||
export type { SendAddCreditsNudgeEmailResponse } from "./SendAddCreditsNudgeEmailResponse";
|
||||
export type { ServerRequestResolvedNotification } from "./ServerRequestResolvedNotification";
|
||||
export type { SessionMigration } from "./SessionMigration";
|
||||
export type { SessionSource } from "./SessionSource";
|
||||
export type { SkillDependencies } from "./SkillDependencies";
|
||||
export type { SkillErrorInfo } from "./SkillErrorInfo";
|
||||
@@ -327,6 +327,10 @@ export type { ThreadCompactStartParams } from "./ThreadCompactStartParams";
|
||||
export type { ThreadCompactStartResponse } from "./ThreadCompactStartResponse";
|
||||
export type { ThreadForkParams } from "./ThreadForkParams";
|
||||
export type { ThreadForkResponse } from "./ThreadForkResponse";
|
||||
export type { ThreadGoal } from "./ThreadGoal";
|
||||
export type { ThreadGoalClearedNotification } from "./ThreadGoalClearedNotification";
|
||||
export type { ThreadGoalStatus } from "./ThreadGoalStatus";
|
||||
export type { ThreadGoalUpdatedNotification } from "./ThreadGoalUpdatedNotification";
|
||||
export type { ThreadInjectItemsParams } from "./ThreadInjectItemsParams";
|
||||
export type { ThreadInjectItemsResponse } from "./ThreadInjectItemsResponse";
|
||||
export type { ThreadItem } from "./ThreadItem";
|
||||
|
||||
@@ -736,11 +736,11 @@ fn find_top_level_brace_span(input: &str) -> Option<(usize, usize)> {
|
||||
let mut state = ScanState::default();
|
||||
let mut open_index = None;
|
||||
for (index, ch) in input.char_indices() {
|
||||
if !state.in_string() && ch == '{' && state.depth.is_top_level() {
|
||||
if !state.in_ignored_syntax() && ch == '{' && state.depth.is_top_level() {
|
||||
open_index = Some(index);
|
||||
}
|
||||
state.observe(ch);
|
||||
if !state.in_string()
|
||||
if !state.in_ignored_syntax()
|
||||
&& ch == '}'
|
||||
&& state.depth.is_top_level()
|
||||
&& let Some(open) = open_index
|
||||
@@ -760,7 +760,7 @@ fn split_top_level_multi(input: &str, delimiters: &[char]) -> Vec<String> {
|
||||
let mut start = 0usize;
|
||||
let mut parts = Vec::new();
|
||||
for (index, ch) in input.char_indices() {
|
||||
if !state.in_string() && state.depth.is_top_level() && delimiters.contains(&ch) {
|
||||
if !state.in_ignored_syntax() && state.depth.is_top_level() && delimiters.contains(&ch) {
|
||||
let part = input[start..index].trim();
|
||||
if !part.is_empty() {
|
||||
parts.push(part.to_string());
|
||||
@@ -882,22 +882,58 @@ struct ScanState {
|
||||
depth: Depth,
|
||||
string_delim: Option<char>,
|
||||
escape: bool,
|
||||
block_comment: bool,
|
||||
line_comment: bool,
|
||||
previous_char: Option<char>,
|
||||
}
|
||||
|
||||
impl ScanState {
|
||||
fn observe(&mut self, ch: char) {
|
||||
if self.line_comment {
|
||||
if ch == '\n' {
|
||||
self.line_comment = false;
|
||||
}
|
||||
self.previous_char = Some(ch);
|
||||
return;
|
||||
}
|
||||
|
||||
if self.block_comment {
|
||||
if self.previous_char == Some('*') && ch == '/' {
|
||||
self.block_comment = false;
|
||||
self.previous_char = None;
|
||||
} else {
|
||||
self.previous_char = Some(ch);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(delim) = self.string_delim {
|
||||
if self.escape {
|
||||
self.escape = false;
|
||||
self.previous_char = Some(ch);
|
||||
return;
|
||||
}
|
||||
if ch == '\\' {
|
||||
self.escape = true;
|
||||
self.previous_char = Some(ch);
|
||||
return;
|
||||
}
|
||||
if ch == delim {
|
||||
self.string_delim = None;
|
||||
}
|
||||
self.previous_char = Some(ch);
|
||||
return;
|
||||
}
|
||||
|
||||
if self.previous_char == Some('/') && ch == '/' {
|
||||
self.line_comment = true;
|
||||
self.previous_char = Some(ch);
|
||||
return;
|
||||
}
|
||||
|
||||
if self.previous_char == Some('/') && ch == '*' {
|
||||
self.block_comment = true;
|
||||
self.previous_char = Some(ch);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -919,10 +955,11 @@ impl ScanState {
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
self.previous_char = Some(ch);
|
||||
}
|
||||
|
||||
fn in_string(&self) -> bool {
|
||||
self.string_delim.is_some()
|
||||
fn in_ignored_syntax(&self) -> bool {
|
||||
self.string_delim.is_some() || self.block_comment || self.line_comment
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2694,6 +2731,79 @@ export type Config = { stableField: Keep, unstableField: string | null } & ({ [k
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn experimental_type_fields_ts_filter_handles_generated_command_params_shape() -> Result<()> {
|
||||
let output_dir = std::env::temp_dir().join(format!("codex_ts_filter_{}", Uuid::now_v7()));
|
||||
fs::create_dir_all(&output_dir)?;
|
||||
|
||||
struct TempDirGuard(PathBuf);
|
||||
|
||||
impl Drop for TempDirGuard {
|
||||
fn drop(&mut self) {
|
||||
let _ = fs::remove_dir_all(&self.0);
|
||||
}
|
||||
}
|
||||
|
||||
let _guard = TempDirGuard(output_dir.clone());
|
||||
let path = output_dir.join("CommandExecParams.ts");
|
||||
let content = r#"import type { CommandExecTerminalSize } from "./CommandExecTerminalSize";
|
||||
import type { PermissionProfile } from "./PermissionProfile";
|
||||
import type { SandboxPolicy } from "./SandboxPolicy";
|
||||
|
||||
export type CommandExecParams = {/**
|
||||
* Command argv vector. Empty arrays are rejected.
|
||||
*/
|
||||
command: Array<string>, /**
|
||||
* Optional environment overrides merged into the server-computed
|
||||
* environment.
|
||||
*/
|
||||
env?: { [key in string]?: string | null } | null, /**
|
||||
* Optional initial PTY size in character cells. Only valid when `tty` is
|
||||
* true.
|
||||
*/
|
||||
size?: CommandExecTerminalSize | null, /**
|
||||
* Optional sandbox policy for this command.
|
||||
*
|
||||
* Uses the same shape as thread/turn execution sandbox configuration and
|
||||
* defaults to the user's configured policy when omitted. Cannot be
|
||||
* combined with `permissionProfile`.
|
||||
*/
|
||||
sandboxPolicy?: SandboxPolicy | null,
|
||||
/**
|
||||
* Optional full permissions profile for this command.
|
||||
*
|
||||
* Defaults to the user's configured permissions when omitted. Cannot be
|
||||
* combined with `sandboxPolicy`.
|
||||
*/
|
||||
permissionProfile?: PermissionProfile | null};
|
||||
"#;
|
||||
fs::write(&path, content)?;
|
||||
|
||||
static CUSTOM_FIELD: crate::experimental_api::ExperimentalField =
|
||||
crate::experimental_api::ExperimentalField {
|
||||
type_name: "CommandExecParams",
|
||||
field_name: "permissionProfile",
|
||||
reason: "command/exec.permissionProfile",
|
||||
};
|
||||
filter_experimental_type_fields_ts(&output_dir, &[&CUSTOM_FIELD])?;
|
||||
|
||||
let filtered = fs::read_to_string(&path)?;
|
||||
assert_eq!(
|
||||
filtered.contains("permissionProfile?: PermissionProfile"),
|
||||
false
|
||||
);
|
||||
assert_eq!(
|
||||
filtered.contains(r#"import type { PermissionProfile } from "./PermissionProfile";"#),
|
||||
false
|
||||
);
|
||||
assert_eq!(filtered.contains("sandboxPolicy?: SandboxPolicy"), true);
|
||||
assert_eq!(
|
||||
filtered.contains(r#"import type { SandboxPolicy } from "./SandboxPolicy";"#),
|
||||
true
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn stable_schema_filter_removes_mock_experimental_method() -> Result<()> {
|
||||
let output_dir = std::env::temp_dir().join(format!("codex_schema_{}", Uuid::now_v7()));
|
||||
|
||||
@@ -285,6 +285,21 @@ client_request_definitions! {
|
||||
params: v2::ThreadSetNameParams,
|
||||
response: v2::ThreadSetNameResponse,
|
||||
},
|
||||
#[experimental("thread/goal/set")]
|
||||
ThreadGoalSet => "thread/goal/set" {
|
||||
params: v2::ThreadGoalSetParams,
|
||||
response: v2::ThreadGoalSetResponse,
|
||||
},
|
||||
#[experimental("thread/goal/get")]
|
||||
ThreadGoalGet => "thread/goal/get" {
|
||||
params: v2::ThreadGoalGetParams,
|
||||
response: v2::ThreadGoalGetResponse,
|
||||
},
|
||||
#[experimental("thread/goal/clear")]
|
||||
ThreadGoalClear => "thread/goal/clear" {
|
||||
params: v2::ThreadGoalClearParams,
|
||||
response: v2::ThreadGoalClearResponse,
|
||||
},
|
||||
ThreadMetadataUpdate => "thread/metadata/update" {
|
||||
params: v2::ThreadMetadataUpdateParams,
|
||||
response: v2::ThreadMetadataUpdateResponse,
|
||||
@@ -566,6 +581,7 @@ client_request_definitions! {
|
||||
/// Execute a standalone command (argv vector) under the server's sandbox.
|
||||
OneOffCommandExec => "command/exec" {
|
||||
params: v2::CommandExecParams,
|
||||
inspect_params: true,
|
||||
response: v2::CommandExecResponse,
|
||||
},
|
||||
/// Write stdin bytes to a running `command/exec` session or close stdin.
|
||||
@@ -1027,6 +1043,10 @@ server_notification_definitions! {
|
||||
ThreadClosed => "thread/closed" (v2::ThreadClosedNotification),
|
||||
SkillsChanged => "skills/changed" (v2::SkillsChangedNotification),
|
||||
ThreadNameUpdated => "thread/name/updated" (v2::ThreadNameUpdatedNotification),
|
||||
#[experimental("thread/goal/updated")]
|
||||
ThreadGoalUpdated => "thread/goal/updated" (v2::ThreadGoalUpdatedNotification),
|
||||
#[experimental("thread/goal/cleared")]
|
||||
ThreadGoalCleared => "thread/goal/cleared" (v2::ThreadGoalClearedNotification),
|
||||
ThreadTokenUsageUpdated => "thread/tokenUsage/updated" (v2::ThreadTokenUsageUpdatedNotification),
|
||||
TurnStarted => "turn/started" (v2::TurnStartedNotification),
|
||||
HookStarted => "hook/started" (v2::HookStartedNotification),
|
||||
@@ -2030,6 +2050,33 @@ mod tests {
|
||||
let reason = crate::experimental_api::ExperimentalApi::experimental_reason(&request);
|
||||
assert_eq!(reason, Some("mock/experimentalMethod"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn command_exec_permission_profile_is_marked_experimental() {
|
||||
let request = ClientRequest::OneOffCommandExec {
|
||||
request_id: RequestId::Integer(1),
|
||||
params: v2::CommandExecParams {
|
||||
command: vec!["pwd".to_string()],
|
||||
process_id: None,
|
||||
tty: false,
|
||||
stream_stdin: false,
|
||||
stream_stdout_stderr: false,
|
||||
output_bytes_cap: None,
|
||||
disable_output_cap: false,
|
||||
disable_timeout: false,
|
||||
timeout_ms: None,
|
||||
cwd: None,
|
||||
env: None,
|
||||
size: None,
|
||||
sandbox_policy: None,
|
||||
permission_profile: Some(v2::PermissionProfile::Disabled),
|
||||
},
|
||||
};
|
||||
|
||||
let reason = crate::experimental_api::ExperimentalApi::experimental_reason(&request);
|
||||
assert_eq!(reason, Some("command/exec.permissionProfile"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn thread_realtime_start_is_marked_experimental() {
|
||||
let request = ClientRequest::ThreadRealtimeStart {
|
||||
@@ -2046,6 +2093,76 @@ mod tests {
|
||||
let reason = crate::experimental_api::ExperimentalApi::experimental_reason(&request);
|
||||
assert_eq!(reason, Some("thread/realtime/start"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn thread_goal_methods_are_marked_experimental() {
|
||||
let set_request = ClientRequest::ThreadGoalSet {
|
||||
request_id: RequestId::Integer(1),
|
||||
params: v2::ThreadGoalSetParams {
|
||||
thread_id: "thr_123".to_string(),
|
||||
objective: Some("ship goal mode".to_string()),
|
||||
status: Some(v2::ThreadGoalStatus::Active),
|
||||
token_budget: Some(Some(10_000)),
|
||||
},
|
||||
};
|
||||
let get_request = ClientRequest::ThreadGoalGet {
|
||||
request_id: RequestId::Integer(2),
|
||||
params: v2::ThreadGoalGetParams {
|
||||
thread_id: "thr_123".to_string(),
|
||||
},
|
||||
};
|
||||
let clear_request = ClientRequest::ThreadGoalClear {
|
||||
request_id: RequestId::Integer(3),
|
||||
params: v2::ThreadGoalClearParams {
|
||||
thread_id: "thr_123".to_string(),
|
||||
},
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
crate::experimental_api::ExperimentalApi::experimental_reason(&set_request),
|
||||
Some("thread/goal/set")
|
||||
);
|
||||
assert_eq!(
|
||||
crate::experimental_api::ExperimentalApi::experimental_reason(&get_request),
|
||||
Some("thread/goal/get")
|
||||
);
|
||||
assert_eq!(
|
||||
crate::experimental_api::ExperimentalApi::experimental_reason(&clear_request),
|
||||
Some("thread/goal/clear")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn thread_goal_notifications_are_marked_experimental() {
|
||||
let goal = v2::ThreadGoal {
|
||||
thread_id: "thr_123".to_string(),
|
||||
objective: "ship goal mode".to_string(),
|
||||
status: v2::ThreadGoalStatus::Active,
|
||||
token_budget: Some(10_000),
|
||||
tokens_used: 123,
|
||||
time_used_seconds: 45,
|
||||
created_at: 1_700_000_000,
|
||||
updated_at: 1_700_000_123,
|
||||
};
|
||||
let updated = ServerNotification::ThreadGoalUpdated(v2::ThreadGoalUpdatedNotification {
|
||||
thread_id: "thr_123".to_string(),
|
||||
turn_id: None,
|
||||
goal,
|
||||
});
|
||||
let cleared = ServerNotification::ThreadGoalCleared(v2::ThreadGoalClearedNotification {
|
||||
thread_id: "thr_123".to_string(),
|
||||
});
|
||||
|
||||
assert_eq!(
|
||||
crate::experimental_api::ExperimentalApi::experimental_reason(&updated),
|
||||
Some("thread/goal/updated")
|
||||
);
|
||||
assert_eq!(
|
||||
crate::experimental_api::ExperimentalApi::experimental_reason(&cleared),
|
||||
Some("thread/goal/cleared")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn thread_realtime_started_notification_is_marked_experimental() {
|
||||
let notification =
|
||||
|
||||
@@ -3096,7 +3096,6 @@ mod tests {
|
||||
content: vec![codex_protocol::models::ContentItem::InputText {
|
||||
text: "plain text".into(),
|
||||
}],
|
||||
end_turn: None,
|
||||
phase: None,
|
||||
}),
|
||||
RolloutItem::EventMsg(EventMsg::TurnComplete(TurnCompleteEvent {
|
||||
|
||||
@@ -83,7 +83,6 @@ use codex_protocol::protocol::PatchApplyStatus as CorePatchApplyStatus;
|
||||
use codex_protocol::protocol::RateLimitReachedType as CoreRateLimitReachedType;
|
||||
use codex_protocol::protocol::RateLimitSnapshot as CoreRateLimitSnapshot;
|
||||
use codex_protocol::protocol::RateLimitWindow as CoreRateLimitWindow;
|
||||
use codex_protocol::protocol::ReadOnlyAccess as CoreReadOnlyAccess;
|
||||
use codex_protocol::protocol::RealtimeAudioFrame as CoreRealtimeAudioFrame;
|
||||
use codex_protocol::protocol::RealtimeConversationVersion;
|
||||
use codex_protocol::protocol::RealtimeOutputModality;
|
||||
@@ -97,6 +96,7 @@ use codex_protocol::protocol::SkillMetadata as CoreSkillMetadata;
|
||||
use codex_protocol::protocol::SkillScope as CoreSkillScope;
|
||||
use codex_protocol::protocol::SkillToolDependency as CoreSkillToolDependency;
|
||||
use codex_protocol::protocol::SubAgentSource as CoreSubAgentSource;
|
||||
use codex_protocol::protocol::ThreadGoalStatus as CoreThreadGoalStatus;
|
||||
use codex_protocol::protocol::TokenUsage as CoreTokenUsage;
|
||||
use codex_protocol::protocol::TokenUsageInfo as CoreTokenUsageInfo;
|
||||
use codex_protocol::request_permissions::PermissionGrantScope as CorePermissionGrantScope;
|
||||
@@ -808,10 +808,6 @@ const fn default_enabled() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
const fn default_include_platform_defaults() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS, ExperimentalApi)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -1095,6 +1091,9 @@ pub enum ExternalAgentConfigMigrationItemType {
|
||||
#[serde(rename = "MCP_SERVER_CONFIG")]
|
||||
#[ts(rename = "MCP_SERVER_CONFIG")]
|
||||
McpServerConfig,
|
||||
#[serde(rename = "SESSIONS")]
|
||||
#[ts(rename = "SESSIONS")]
|
||||
Sessions,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
@@ -1109,11 +1108,23 @@ pub struct PluginsMigration {
|
||||
pub plugin_names: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct SessionMigration {
|
||||
pub path: PathBuf,
|
||||
pub cwd: PathBuf,
|
||||
pub title: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct MigrationDetails {
|
||||
#[serde(default)]
|
||||
pub plugins: Vec<PluginsMigration>,
|
||||
#[serde(default)]
|
||||
pub sessions: Vec<SessionMigration>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
@@ -1438,7 +1449,7 @@ v2_enum_from_core!(
|
||||
pub enum FileSystemSpecialPath {
|
||||
Root,
|
||||
Minimal,
|
||||
CurrentWorkingDirectory,
|
||||
#[serde(alias = "current_working_directory")]
|
||||
ProjectRoots {
|
||||
subpath: Option<PathBuf>,
|
||||
},
|
||||
@@ -1455,7 +1466,6 @@ impl From<CoreFileSystemSpecialPath> for FileSystemSpecialPath {
|
||||
match value {
|
||||
CoreFileSystemSpecialPath::Root => Self::Root,
|
||||
CoreFileSystemSpecialPath::Minimal => Self::Minimal,
|
||||
CoreFileSystemSpecialPath::CurrentWorkingDirectory => Self::CurrentWorkingDirectory,
|
||||
CoreFileSystemSpecialPath::ProjectRoots { subpath } => Self::ProjectRoots { subpath },
|
||||
CoreFileSystemSpecialPath::Tmpdir => Self::Tmpdir,
|
||||
CoreFileSystemSpecialPath::SlashTmp => Self::SlashTmp,
|
||||
@@ -1469,7 +1479,6 @@ impl From<FileSystemSpecialPath> for CoreFileSystemSpecialPath {
|
||||
match value {
|
||||
FileSystemSpecialPath::Root => Self::Root,
|
||||
FileSystemSpecialPath::Minimal => Self::Minimal,
|
||||
FileSystemSpecialPath::CurrentWorkingDirectory => Self::CurrentWorkingDirectory,
|
||||
FileSystemSpecialPath::ProjectRoots { subpath } => Self::ProjectRoots { subpath },
|
||||
FileSystemSpecialPath::Tmpdir => Self::Tmpdir,
|
||||
FileSystemSpecialPath::SlashTmp => Self::SlashTmp,
|
||||
@@ -1719,54 +1728,7 @@ pub enum NetworkAccess {
|
||||
Enabled,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Default, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(tag = "type", rename_all = "camelCase")]
|
||||
#[ts(tag = "type")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum ReadOnlyAccess {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
Restricted {
|
||||
#[serde(default = "default_include_platform_defaults")]
|
||||
include_platform_defaults: bool,
|
||||
#[serde(default)]
|
||||
readable_roots: Vec<AbsolutePathBuf>,
|
||||
},
|
||||
#[default]
|
||||
FullAccess,
|
||||
}
|
||||
|
||||
impl ReadOnlyAccess {
|
||||
pub fn to_core(&self) -> CoreReadOnlyAccess {
|
||||
match self {
|
||||
ReadOnlyAccess::Restricted {
|
||||
include_platform_defaults,
|
||||
readable_roots,
|
||||
} => CoreReadOnlyAccess::Restricted {
|
||||
include_platform_defaults: *include_platform_defaults,
|
||||
readable_roots: readable_roots.clone(),
|
||||
},
|
||||
ReadOnlyAccess::FullAccess => CoreReadOnlyAccess::FullAccess,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CoreReadOnlyAccess> for ReadOnlyAccess {
|
||||
fn from(value: CoreReadOnlyAccess) -> Self {
|
||||
match value {
|
||||
CoreReadOnlyAccess::Restricted {
|
||||
include_platform_defaults,
|
||||
readable_roots,
|
||||
} => ReadOnlyAccess::Restricted {
|
||||
include_platform_defaults,
|
||||
readable_roots,
|
||||
},
|
||||
CoreReadOnlyAccess::FullAccess => ReadOnlyAccess::FullAccess,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[derive(Serialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(tag = "type", rename_all = "camelCase")]
|
||||
#[ts(tag = "type")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -1775,8 +1737,6 @@ pub enum SandboxPolicy {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
ReadOnly {
|
||||
#[serde(default)]
|
||||
access: ReadOnlyAccess,
|
||||
#[serde(default)]
|
||||
network_access: bool,
|
||||
},
|
||||
@@ -1792,7 +1752,36 @@ pub enum SandboxPolicy {
|
||||
#[serde(default)]
|
||||
writable_roots: Vec<AbsolutePathBuf>,
|
||||
#[serde(default)]
|
||||
read_only_access: ReadOnlyAccess,
|
||||
network_access: bool,
|
||||
#[serde(default)]
|
||||
exclude_tmpdir_env_var: bool,
|
||||
#[serde(default)]
|
||||
exclude_slash_tmp: bool,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(tag = "type", rename_all = "camelCase")]
|
||||
enum SandboxPolicyDeserialize {
|
||||
DangerFullAccess,
|
||||
#[serde(rename_all = "camelCase")]
|
||||
ReadOnly {
|
||||
#[serde(default)]
|
||||
network_access: bool,
|
||||
#[serde(default)]
|
||||
access: Option<LegacyReadOnlyAccess>,
|
||||
},
|
||||
#[serde(rename_all = "camelCase")]
|
||||
ExternalSandbox {
|
||||
#[serde(default)]
|
||||
network_access: NetworkAccess,
|
||||
},
|
||||
#[serde(rename_all = "camelCase")]
|
||||
WorkspaceWrite {
|
||||
#[serde(default)]
|
||||
writable_roots: Vec<AbsolutePathBuf>,
|
||||
#[serde(default)]
|
||||
read_only_access: Option<LegacyReadOnlyAccess>,
|
||||
#[serde(default)]
|
||||
network_access: bool,
|
||||
#[serde(default)]
|
||||
@@ -1802,19 +1791,68 @@ pub enum SandboxPolicy {
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(tag = "type", rename_all = "camelCase")]
|
||||
enum LegacyReadOnlyAccess {
|
||||
FullAccess,
|
||||
Restricted,
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for SandboxPolicy {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
match SandboxPolicyDeserialize::deserialize(deserializer)? {
|
||||
SandboxPolicyDeserialize::DangerFullAccess => Ok(SandboxPolicy::DangerFullAccess),
|
||||
SandboxPolicyDeserialize::ReadOnly {
|
||||
network_access,
|
||||
access,
|
||||
} => {
|
||||
if matches!(access, Some(LegacyReadOnlyAccess::Restricted)) {
|
||||
return Err(serde::de::Error::custom(
|
||||
"readOnly.access is no longer supported; use permissionProfile for restricted reads",
|
||||
));
|
||||
}
|
||||
Ok(SandboxPolicy::ReadOnly { network_access })
|
||||
}
|
||||
SandboxPolicyDeserialize::ExternalSandbox { network_access } => {
|
||||
Ok(SandboxPolicy::ExternalSandbox { network_access })
|
||||
}
|
||||
SandboxPolicyDeserialize::WorkspaceWrite {
|
||||
writable_roots,
|
||||
read_only_access,
|
||||
network_access,
|
||||
exclude_tmpdir_env_var,
|
||||
exclude_slash_tmp,
|
||||
} => {
|
||||
if matches!(read_only_access, Some(LegacyReadOnlyAccess::Restricted)) {
|
||||
return Err(serde::de::Error::custom(
|
||||
"workspaceWrite.readOnlyAccess is no longer supported; use permissionProfile for restricted reads",
|
||||
));
|
||||
}
|
||||
Ok(SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots,
|
||||
network_access,
|
||||
exclude_tmpdir_env_var,
|
||||
exclude_slash_tmp,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SandboxPolicy {
|
||||
pub fn to_core(&self) -> codex_protocol::protocol::SandboxPolicy {
|
||||
match self {
|
||||
SandboxPolicy::DangerFullAccess => {
|
||||
codex_protocol::protocol::SandboxPolicy::DangerFullAccess
|
||||
}
|
||||
SandboxPolicy::ReadOnly {
|
||||
access,
|
||||
network_access,
|
||||
} => codex_protocol::protocol::SandboxPolicy::ReadOnly {
|
||||
access: access.to_core(),
|
||||
network_access: *network_access,
|
||||
},
|
||||
SandboxPolicy::ReadOnly { network_access } => {
|
||||
codex_protocol::protocol::SandboxPolicy::ReadOnly {
|
||||
network_access: *network_access,
|
||||
}
|
||||
}
|
||||
SandboxPolicy::ExternalSandbox { network_access } => {
|
||||
codex_protocol::protocol::SandboxPolicy::ExternalSandbox {
|
||||
network_access: match network_access {
|
||||
@@ -1825,13 +1863,11 @@ impl SandboxPolicy {
|
||||
}
|
||||
SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots,
|
||||
read_only_access,
|
||||
network_access,
|
||||
exclude_tmpdir_env_var,
|
||||
exclude_slash_tmp,
|
||||
} => codex_protocol::protocol::SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots: writable_roots.clone(),
|
||||
read_only_access: read_only_access.to_core(),
|
||||
network_access: *network_access,
|
||||
exclude_tmpdir_env_var: *exclude_tmpdir_env_var,
|
||||
exclude_slash_tmp: *exclude_slash_tmp,
|
||||
@@ -1846,13 +1882,9 @@ impl From<codex_protocol::protocol::SandboxPolicy> for SandboxPolicy {
|
||||
codex_protocol::protocol::SandboxPolicy::DangerFullAccess => {
|
||||
SandboxPolicy::DangerFullAccess
|
||||
}
|
||||
codex_protocol::protocol::SandboxPolicy::ReadOnly {
|
||||
access,
|
||||
network_access,
|
||||
} => SandboxPolicy::ReadOnly {
|
||||
access: ReadOnlyAccess::from(access),
|
||||
network_access,
|
||||
},
|
||||
codex_protocol::protocol::SandboxPolicy::ReadOnly { network_access } => {
|
||||
SandboxPolicy::ReadOnly { network_access }
|
||||
}
|
||||
codex_protocol::protocol::SandboxPolicy::ExternalSandbox { network_access } => {
|
||||
SandboxPolicy::ExternalSandbox {
|
||||
network_access: match network_access {
|
||||
@@ -1863,13 +1895,11 @@ impl From<codex_protocol::protocol::SandboxPolicy> for SandboxPolicy {
|
||||
}
|
||||
codex_protocol::protocol::SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots,
|
||||
read_only_access,
|
||||
network_access,
|
||||
exclude_tmpdir_env_var,
|
||||
exclude_slash_tmp,
|
||||
} => SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots,
|
||||
read_only_access: ReadOnlyAccess::from(read_only_access),
|
||||
network_access,
|
||||
exclude_tmpdir_env_var,
|
||||
exclude_slash_tmp,
|
||||
@@ -1981,6 +2011,8 @@ impl From<CoreSessionSource> for SessionSource {
|
||||
CoreSessionSource::Exec => SessionSource::Exec,
|
||||
CoreSessionSource::Mcp => SessionSource::AppServer,
|
||||
CoreSessionSource::Custom(source) => SessionSource::Custom(source),
|
||||
// We do not want to render those at the app-server level.
|
||||
CoreSessionSource::Internal(_) => SessionSource::Unknown,
|
||||
CoreSessionSource::SubAgent(sub) => SessionSource::SubAgent(sub),
|
||||
CoreSessionSource::Unknown => SessionSource::Unknown,
|
||||
}
|
||||
@@ -3148,7 +3180,7 @@ pub struct CommandExecTerminalSize {
|
||||
/// The final `command/exec` response is deferred until the process exits and is
|
||||
/// sent only after all `command/exec/outputDelta` notifications for that
|
||||
/// connection have been emitted.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS, ExperimentalApi)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct CommandExecParams {
|
||||
@@ -3227,6 +3259,7 @@ pub struct CommandExecParams {
|
||||
///
|
||||
/// Defaults to the user's configured permissions when omitted. Cannot be
|
||||
/// combined with `sandboxPolicy`.
|
||||
#[experimental("command/exec.permissionProfile")]
|
||||
#[ts(optional = nullable)]
|
||||
pub permission_profile: Option<PermissionProfile>,
|
||||
}
|
||||
@@ -3349,6 +3382,7 @@ pub struct ThreadStartParams {
|
||||
pub sandbox: Option<SandboxMode>,
|
||||
/// Full permissions override for this thread. Cannot be combined with
|
||||
/// `sandbox`.
|
||||
#[experimental("thread/start.permissionProfile")]
|
||||
#[ts(optional = nullable)]
|
||||
pub permission_profile: Option<PermissionProfile>,
|
||||
#[ts(optional = nullable)]
|
||||
@@ -3432,6 +3466,7 @@ pub struct ThreadStartResponse {
|
||||
/// view.
|
||||
pub sandbox: SandboxPolicy,
|
||||
/// Canonical active permissions view for this thread.
|
||||
#[experimental("thread/start.permissionProfile")]
|
||||
#[serde(default)]
|
||||
pub permission_profile: Option<PermissionProfile>,
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
@@ -3493,6 +3528,7 @@ pub struct ThreadResumeParams {
|
||||
pub sandbox: Option<SandboxMode>,
|
||||
/// Full permissions override for the resumed thread. Cannot be combined
|
||||
/// with `sandbox`.
|
||||
#[experimental("thread/resume.permissionProfile")]
|
||||
#[ts(optional = nullable)]
|
||||
pub permission_profile: Option<PermissionProfile>,
|
||||
#[ts(optional = nullable)]
|
||||
@@ -3536,6 +3572,7 @@ pub struct ThreadResumeResponse {
|
||||
/// view.
|
||||
pub sandbox: SandboxPolicy,
|
||||
/// Canonical active permissions view for this thread.
|
||||
#[experimental("thread/resume.permissionProfile")]
|
||||
#[serde(default)]
|
||||
pub permission_profile: Option<PermissionProfile>,
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
@@ -3588,6 +3625,7 @@ pub struct ThreadForkParams {
|
||||
pub sandbox: Option<SandboxMode>,
|
||||
/// Full permissions override for the forked thread. Cannot be combined
|
||||
/// with `sandbox`.
|
||||
#[experimental("thread/fork.permissionProfile")]
|
||||
#[ts(optional = nullable)]
|
||||
pub permission_profile: Option<PermissionProfile>,
|
||||
#[ts(optional = nullable)]
|
||||
@@ -3631,6 +3669,7 @@ pub struct ThreadForkResponse {
|
||||
/// view.
|
||||
pub sandbox: SandboxPolicy,
|
||||
/// Canonical active permissions view for this thread.
|
||||
#[experimental("thread/fork.permissionProfile")]
|
||||
#[serde(default)]
|
||||
pub permission_profile: Option<PermissionProfile>,
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
@@ -3731,6 +3770,103 @@ pub struct ThreadUnarchiveParams {
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadSetNameResponse {}
|
||||
|
||||
v2_enum_from_core! {
|
||||
pub enum ThreadGoalStatus from CoreThreadGoalStatus {
|
||||
Active,
|
||||
Paused,
|
||||
BudgetLimited,
|
||||
Complete,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadGoal {
|
||||
pub thread_id: String,
|
||||
pub objective: String,
|
||||
pub status: ThreadGoalStatus,
|
||||
#[ts(type = "number | null")]
|
||||
pub token_budget: Option<i64>,
|
||||
#[ts(type = "number")]
|
||||
pub tokens_used: i64,
|
||||
#[ts(type = "number")]
|
||||
pub time_used_seconds: i64,
|
||||
#[ts(type = "number")]
|
||||
pub created_at: i64,
|
||||
#[ts(type = "number")]
|
||||
pub updated_at: i64,
|
||||
}
|
||||
|
||||
impl From<codex_protocol::protocol::ThreadGoal> for ThreadGoal {
|
||||
fn from(value: codex_protocol::protocol::ThreadGoal) -> Self {
|
||||
Self {
|
||||
thread_id: value.thread_id.to_string(),
|
||||
objective: value.objective,
|
||||
status: value.status.into(),
|
||||
token_budget: value.token_budget,
|
||||
tokens_used: value.tokens_used,
|
||||
time_used_seconds: value.time_used_seconds,
|
||||
created_at: value.created_at,
|
||||
updated_at: value.updated_at,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadGoalSetParams {
|
||||
pub thread_id: String,
|
||||
#[ts(optional = nullable)]
|
||||
pub objective: Option<String>,
|
||||
#[ts(optional = nullable)]
|
||||
pub status: Option<ThreadGoalStatus>,
|
||||
#[serde(
|
||||
default,
|
||||
deserialize_with = "super::serde_helpers::deserialize_double_option",
|
||||
serialize_with = "super::serde_helpers::serialize_double_option",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
#[ts(optional = nullable, type = "number | null")]
|
||||
pub token_budget: Option<Option<i64>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadGoalSetResponse {
|
||||
pub goal: ThreadGoal,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadGoalGetParams {
|
||||
pub thread_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadGoalGetResponse {
|
||||
pub goal: Option<ThreadGoal>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadGoalClearParams {
|
||||
pub thread_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadGoalClearResponse {
|
||||
pub cleared: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -5072,6 +5208,7 @@ pub struct TurnStartParams {
|
||||
pub sandbox_policy: Option<SandboxPolicy>,
|
||||
/// Override the full permissions profile for this turn and subsequent
|
||||
/// turns. Cannot be combined with `sandboxPolicy`.
|
||||
#[experimental("turn/start.permissionProfile")]
|
||||
#[ts(optional = nullable)]
|
||||
pub permission_profile: Option<PermissionProfile>,
|
||||
/// Override the model for this turn and subsequent turns.
|
||||
@@ -6254,6 +6391,22 @@ pub struct ThreadNameUpdatedNotification {
|
||||
pub thread_name: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadGoalUpdatedNotification {
|
||||
pub thread_id: String,
|
||||
pub turn_id: Option<String>,
|
||||
pub goal: ThreadGoal,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ThreadGoalClearedNotification {
|
||||
pub thread_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -7578,7 +7731,6 @@ mod tests {
|
||||
use codex_protocol::items::WebSearchItem;
|
||||
use codex_protocol::models::WebSearchAction as CoreWebSearchAction;
|
||||
use codex_protocol::protocol::NetworkAccess as CoreNetworkAccess;
|
||||
use codex_protocol::protocol::ReadOnlyAccess as CoreReadOnlyAccess;
|
||||
use codex_protocol::user_input::UserInput as CoreUserInput;
|
||||
use codex_utils_absolute_path::test_support::PathBufExt;
|
||||
use codex_utils_absolute_path::test_support::test_path_buf;
|
||||
@@ -7704,11 +7856,50 @@ mod tests {
|
||||
marketplace_name: "team-marketplace".to_string(),
|
||||
plugin_names: vec!["asana".to_string()],
|
||||
}],
|
||||
sessions: Vec::new(),
|
||||
}),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn external_agent_config_import_params_accept_legacy_plugin_details() {
|
||||
let params: ExternalAgentConfigImportParams = serde_json::from_value(json!({
|
||||
"migrationItems": [{
|
||||
"itemType": "PLUGINS",
|
||||
"description": "Install supported plugins from Claude settings",
|
||||
"cwd": absolute_path_string("repo"),
|
||||
"details": {
|
||||
"plugins": [
|
||||
{
|
||||
"marketplaceName": "team-marketplace",
|
||||
"pluginNames": ["asana"]
|
||||
}
|
||||
]
|
||||
}
|
||||
}]
|
||||
}))
|
||||
.expect("legacy plugin import params should deserialize");
|
||||
|
||||
assert_eq!(
|
||||
params,
|
||||
ExternalAgentConfigImportParams {
|
||||
migration_items: vec![ExternalAgentConfigMigrationItem {
|
||||
item_type: ExternalAgentConfigMigrationItemType::Plugins,
|
||||
description: "Install supported plugins from Claude settings".to_string(),
|
||||
cwd: Some(PathBuf::from(absolute_path_string("repo"))),
|
||||
details: Some(MigrationDetails {
|
||||
plugins: vec![PluginsMigration {
|
||||
marketplace_name: "team-marketplace".to_string(),
|
||||
plugin_names: vec!["asana".to_string()],
|
||||
}],
|
||||
sessions: Vec::new(),
|
||||
}),
|
||||
}],
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn command_execution_request_approval_rejects_relative_additional_permission_paths() {
|
||||
let err = serde_json::from_value::<CommandExecutionRequestApprovalParams>(json!({
|
||||
@@ -7982,6 +8173,26 @@ mod tests {
|
||||
.expect_err("zero glob scan depth should fail deserialization");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn legacy_current_working_directory_special_path_deserializes_as_project_roots() {
|
||||
let special_path = serde_json::from_value::<FileSystemSpecialPath>(json!({
|
||||
"kind": "current_working_directory",
|
||||
}))
|
||||
.expect("legacy cwd special path should deserialize");
|
||||
|
||||
assert_eq!(
|
||||
special_path,
|
||||
FileSystemSpecialPath::ProjectRoots { subpath: None }
|
||||
);
|
||||
assert_eq!(
|
||||
serde_json::to_value(&special_path).expect("serialize special path"),
|
||||
json!({
|
||||
"kind": "project_roots",
|
||||
"subpath": null,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn permissions_request_approval_response_uses_granted_permission_profile_without_macos() {
|
||||
let read_only_path = if cfg!(windows) {
|
||||
@@ -8784,13 +8995,8 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sandbox_policy_round_trips_read_only_access() {
|
||||
let readable_root = test_absolute_path();
|
||||
fn sandbox_policy_round_trips_read_only_network_access() {
|
||||
let v2_policy = SandboxPolicy::ReadOnly {
|
||||
access: ReadOnlyAccess::Restricted {
|
||||
include_platform_defaults: false,
|
||||
readable_roots: vec![readable_root.clone()],
|
||||
},
|
||||
network_access: true,
|
||||
};
|
||||
|
||||
@@ -8798,10 +9004,6 @@ mod tests {
|
||||
assert_eq!(
|
||||
core_policy,
|
||||
codex_protocol::protocol::SandboxPolicy::ReadOnly {
|
||||
access: CoreReadOnlyAccess::Restricted {
|
||||
include_platform_defaults: false,
|
||||
readable_roots: vec![readable_root],
|
||||
},
|
||||
network_access: true,
|
||||
}
|
||||
);
|
||||
@@ -9425,14 +9627,9 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sandbox_policy_round_trips_workspace_write_read_only_access() {
|
||||
let readable_root = test_absolute_path();
|
||||
fn sandbox_policy_round_trips_workspace_write_access() {
|
||||
let v2_policy = SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots: vec![],
|
||||
read_only_access: ReadOnlyAccess::Restricted {
|
||||
include_platform_defaults: false,
|
||||
readable_roots: vec![readable_root.clone()],
|
||||
},
|
||||
network_access: true,
|
||||
exclude_tmpdir_env_var: false,
|
||||
exclude_slash_tmp: false,
|
||||
@@ -9443,10 +9640,6 @@ mod tests {
|
||||
core_policy,
|
||||
codex_protocol::protocol::SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots: vec![],
|
||||
read_only_access: CoreReadOnlyAccess::Restricted {
|
||||
include_platform_defaults: false,
|
||||
readable_roots: vec![readable_root],
|
||||
},
|
||||
network_access: true,
|
||||
exclude_tmpdir_env_var: false,
|
||||
exclude_slash_tmp: false,
|
||||
@@ -9458,40 +9651,78 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sandbox_policy_deserializes_legacy_read_only_without_access_field() {
|
||||
let policy: SandboxPolicy = serde_json::from_value(json!({
|
||||
"type": "readOnly"
|
||||
fn sandbox_policy_deserializes_legacy_read_only_full_access_field() {
|
||||
let policy = serde_json::from_value::<SandboxPolicy>(json!({
|
||||
"type": "readOnly",
|
||||
"access": {
|
||||
"type": "fullAccess"
|
||||
},
|
||||
"networkAccess": true
|
||||
}))
|
||||
.expect("read-only policy should deserialize");
|
||||
.expect("read-only policy should ignore legacy fullAccess field");
|
||||
assert_eq!(
|
||||
policy,
|
||||
SandboxPolicy::ReadOnly {
|
||||
access: ReadOnlyAccess::FullAccess,
|
||||
network_access: false,
|
||||
network_access: true
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sandbox_policy_deserializes_legacy_workspace_write_without_read_only_access_field() {
|
||||
let policy: SandboxPolicy = serde_json::from_value(json!({
|
||||
fn sandbox_policy_deserializes_legacy_workspace_write_full_access_field() {
|
||||
let writable_root = absolute_path("/workspace");
|
||||
let policy = serde_json::from_value::<SandboxPolicy>(json!({
|
||||
"type": "workspaceWrite",
|
||||
"writableRoots": [writable_root],
|
||||
"readOnlyAccess": {
|
||||
"type": "fullAccess"
|
||||
},
|
||||
"networkAccess": true,
|
||||
"excludeTmpdirEnvVar": true,
|
||||
"excludeSlashTmp": true
|
||||
}))
|
||||
.expect("workspace-write policy should ignore legacy fullAccess field");
|
||||
assert_eq!(
|
||||
policy,
|
||||
SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots: vec![absolute_path("/workspace")],
|
||||
network_access: true,
|
||||
exclude_tmpdir_env_var: true,
|
||||
exclude_slash_tmp: true,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sandbox_policy_rejects_legacy_read_only_restricted_access_field() {
|
||||
let err = serde_json::from_value::<SandboxPolicy>(json!({
|
||||
"type": "readOnly",
|
||||
"access": {
|
||||
"type": "restricted",
|
||||
"includePlatformDefaults": false,
|
||||
"readableRoots": []
|
||||
}
|
||||
}))
|
||||
.expect_err("read-only policy should reject removed restricted access field");
|
||||
assert!(err.to_string().contains("readOnly.access"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sandbox_policy_rejects_legacy_workspace_write_restricted_read_access_field() {
|
||||
let err = serde_json::from_value::<SandboxPolicy>(json!({
|
||||
"type": "workspaceWrite",
|
||||
"writableRoots": [],
|
||||
"readOnlyAccess": {
|
||||
"type": "restricted",
|
||||
"includePlatformDefaults": false,
|
||||
"readableRoots": []
|
||||
},
|
||||
"networkAccess": false,
|
||||
"excludeTmpdirEnvVar": false,
|
||||
"excludeSlashTmp": false
|
||||
}))
|
||||
.expect("workspace-write policy should deserialize");
|
||||
assert_eq!(
|
||||
policy,
|
||||
SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots: vec![],
|
||||
read_only_access: ReadOnlyAccess::FullAccess,
|
||||
network_access: false,
|
||||
exclude_tmpdir_env_var: false,
|
||||
exclude_slash_tmp: false,
|
||||
}
|
||||
);
|
||||
.expect_err("workspace-write policy should reject removed restricted readOnlyAccess field");
|
||||
assert!(err.to_string().contains("workspaceWrite.readOnlyAccess"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -10130,6 +10361,27 @@ mod tests {
|
||||
plugin_id: "gmail@openai-curated".to_string(),
|
||||
},
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
serde_json::to_value(PluginUninstallParams {
|
||||
plugin_id: "plugins~Plugin_gmail".to_string(),
|
||||
})
|
||||
.unwrap(),
|
||||
json!({
|
||||
"pluginId": "plugins~Plugin_gmail",
|
||||
}),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
serde_json::from_value::<PluginUninstallParams>(json!({
|
||||
"pluginId": "plugins~Plugin_gmail",
|
||||
"forceRemoteSync": true,
|
||||
}))
|
||||
.unwrap(),
|
||||
PluginUninstallParams {
|
||||
plugin_id: "plugins~Plugin_gmail".to_string(),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -48,7 +48,6 @@ use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::LoginAccountResponse;
|
||||
use codex_app_server_protocol::ModelListParams;
|
||||
use codex_app_server_protocol::ModelListResponse;
|
||||
use codex_app_server_protocol::ReadOnlyAccess;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SandboxPolicy;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
@@ -743,7 +742,6 @@ async fn trigger_zsh_fork_multi_cmd_approval(
|
||||
};
|
||||
turn_params.approval_policy = Some(AskForApproval::OnRequest);
|
||||
turn_params.sandbox_policy = Some(SandboxPolicy::ReadOnly {
|
||||
access: ReadOnlyAccess::FullAccess,
|
||||
network_access: false,
|
||||
});
|
||||
|
||||
@@ -885,7 +883,6 @@ async fn trigger_cmd_approval(
|
||||
experimental_api: true,
|
||||
approval_policy: Some(AskForApproval::OnRequest),
|
||||
sandbox_policy: Some(SandboxPolicy::ReadOnly {
|
||||
access: ReadOnlyAccess::FullAccess,
|
||||
network_access: false,
|
||||
}),
|
||||
dynamic_tools,
|
||||
@@ -912,7 +909,6 @@ async fn trigger_patch_approval(
|
||||
experimental_api: true,
|
||||
approval_policy: Some(AskForApproval::OnRequest),
|
||||
sandbox_policy: Some(SandboxPolicy::ReadOnly {
|
||||
access: ReadOnlyAccess::FullAccess,
|
||||
network_access: false,
|
||||
}),
|
||||
dynamic_tools,
|
||||
|
||||
@@ -5,7 +5,13 @@ codex_rust_crate(
|
||||
crate_name = "codex_app_server",
|
||||
integration_test_timeout = "long",
|
||||
test_shard_counts = {
|
||||
"app-server-all-test": 8,
|
||||
# Note app-server-all-test has a large number of integration tests, so
|
||||
# even a single shard can be quite slow. When there is a legitimate
|
||||
# test failure in a shard, it will still get run 3x in total, which
|
||||
# can cause us to exhaust our CI timeout if the shard happens to run
|
||||
# long. Using a higher shard count for app-server-all-test should help
|
||||
# mitigate this risk.
|
||||
"app-server-all-test": 16,
|
||||
"app-server-unit-tests": 8,
|
||||
},
|
||||
test_tags = ["no-sandbox"],
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user