mirror of
https://github.com/openai/codex.git
synced 2026-02-02 15:03:38 +00:00
Compare commits
366 Commits
fix-termin
...
shell-tool
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
da2e1d2ba3 | ||
|
|
528e7fde9d | ||
|
|
3f73e2c892 | ||
|
|
1822ffe870 | ||
|
|
7e2165f394 | ||
|
|
8e5f38c0f0 | ||
|
|
1388e99674 | ||
|
|
f56d1dc8fc | ||
|
|
9be310041b | ||
|
|
0fbcdd77c8 | ||
|
|
9bce050385 | ||
|
|
3f92ad4190 | ||
|
|
54ee302a06 | ||
|
|
44fa06ae36 | ||
|
|
856f97f449 | ||
|
|
fe7a3f0c2b | ||
|
|
c30ca0d5b6 | ||
|
|
a8a6cbdd1c | ||
|
|
e4257f432e | ||
|
|
2c793083f4 | ||
|
|
e150798baf | ||
|
|
33a6cc66ab | ||
|
|
52d0ec4cd8 | ||
|
|
397279d46e | ||
|
|
30ca89424c | ||
|
|
d909048a85 | ||
|
|
888c6dd9e7 | ||
|
|
b5dd189067 | ||
|
|
54e6e4ac32 | ||
|
|
e8af41de8a | ||
|
|
d6c30ed25e | ||
|
|
fb9849e1e3 | ||
|
|
72a1453ac5 | ||
|
|
6d67b8b283 | ||
|
|
74a75679d9 | ||
|
|
92e3046733 | ||
|
|
65c13f1ae7 | ||
|
|
2e44082a30 | ||
|
|
b00a7cf40d | ||
|
|
13d378f2ce | ||
|
|
a6597a9958 | ||
|
|
692989c277 | ||
|
|
2fde03b4a0 | ||
|
|
056c8f8279 | ||
|
|
c2ec477d93 | ||
|
|
20982d5c6a | ||
|
|
64ae9aa3c3 | ||
|
|
72af589398 | ||
|
|
b3d320433f | ||
|
|
91a1d20e2d | ||
|
|
87716e7cd0 | ||
|
|
8976551f0d | ||
|
|
f1d6767685 | ||
|
|
d62cab9a06 | ||
|
|
d5dfba2509 | ||
|
|
1924500250 | ||
|
|
cfc57e14c7 | ||
|
|
15b5eb30ed | ||
|
|
3e9e1d993d | ||
|
|
44c747837a | ||
|
|
4985a7a444 | ||
|
|
10d571f236 | ||
|
|
956d3bfac6 | ||
|
|
73488657cb | ||
|
|
efebc62fb7 | ||
|
|
75f38f16dd | ||
|
|
0440a3f105 | ||
|
|
ee0484a98c | ||
|
|
7e0e675db4 | ||
|
|
84458f12f6 | ||
|
|
793063070b | ||
|
|
030d1d5b1c | ||
|
|
7e6316d4aa | ||
|
|
a75321a64c | ||
|
|
7508e4fd2d | ||
|
|
cac0a6a29d | ||
|
|
b395dc1be6 | ||
|
|
4288091f63 | ||
|
|
526eb3ff82 | ||
|
|
b952bd2649 | ||
|
|
cf57320b9f | ||
|
|
4fb714fb46 | ||
|
|
c1391b9f94 | ||
|
|
9275e93364 | ||
|
|
b3a824ae3c | ||
|
|
ab30453dee | ||
|
|
c56d0c159b | ||
|
|
0bf857bc91 | ||
|
|
f7a921039c | ||
|
|
8ddae8cde3 | ||
|
|
29ca89c414 | ||
|
|
4bada5a84d | ||
|
|
3de8790714 | ||
|
|
b035c604b0 | ||
|
|
e9e644a119 | ||
|
|
f5d9939cda | ||
|
|
838531d3e4 | ||
|
|
0eb2e6f9ee | ||
|
|
c20df79a38 | ||
|
|
fc55fd7a81 | ||
|
|
f3d4e210d8 | ||
|
|
28ebe1c97a | ||
|
|
2b7378ac77 | ||
|
|
ddcc60a085 | ||
|
|
8465f1f2f4 | ||
|
|
7ab45487dd | ||
|
|
cecbd5b021 | ||
|
|
4000e26303 | ||
|
|
e032d338f2 | ||
|
|
8bebe86a47 | ||
|
|
ab2e7499f8 | ||
|
|
daf77b8452 | ||
|
|
03a6e853c0 | ||
|
|
837bc98a1d | ||
|
|
842a1b7fe7 | ||
|
|
03ffe4d595 | ||
|
|
ae2a084fae | ||
|
|
a941ae7632 | ||
|
|
2c665fb1dd | ||
|
|
98a90a3bb2 | ||
|
|
7c8d333980 | ||
|
|
497fb4a19c | ||
|
|
5860481bc4 | ||
|
|
a52cf4d2b4 | ||
|
|
e70c52a3af | ||
|
|
de1768d3ba | ||
|
|
702238f004 | ||
|
|
fa5f6e76c9 | ||
|
|
f828cd2897 | ||
|
|
326c1e0a7e | ||
|
|
3f1c4b9add | ||
|
|
0b28e72b66 | ||
|
|
94dfb211af | ||
|
|
b560c5cef1 | ||
|
|
4ae986967c | ||
|
|
89ecc00b79 | ||
|
|
018a2d2e50 | ||
|
|
cfcc87a953 | ||
|
|
c3951e505d | ||
|
|
abb7b79701 | ||
|
|
936650001f | ||
|
|
37fba28ac3 | ||
|
|
4ba562d2dd | ||
|
|
799364de87 | ||
|
|
4719cba19a | ||
|
|
526777c9b4 | ||
|
|
f17b392470 | ||
|
|
63c8c01f40 | ||
|
|
4788fb179a | ||
|
|
6c384eb9c6 | ||
|
|
2a6e9b20df | ||
|
|
f3c6b1334b | ||
|
|
9890ceb939 | ||
|
|
7b027e7536 | ||
|
|
db2aa57d73 | ||
|
|
b8ec97c0ef | ||
|
|
2c1b693da4 | ||
|
|
547be54ee8 | ||
|
|
b4a53aef47 | ||
|
|
439bc5dbbe | ||
|
|
c95bd345ea | ||
|
|
0792a7953d | ||
|
|
6cda3de3a4 | ||
|
|
041d6ad902 | ||
|
|
e6995174c1 | ||
|
|
d28e912214 | ||
|
|
ba74cee6f7 | ||
|
|
2a417c47ac | ||
|
|
8dcbd29edd | ||
|
|
34621166d5 | ||
|
|
e3dd362c94 | ||
|
|
305fe73d83 | ||
|
|
e3aaee00c8 | ||
|
|
b1979b70a8 | ||
|
|
73ed30d7e5 | ||
|
|
ad7eaa80f9 | ||
|
|
966d71c02a | ||
|
|
f97874093e | ||
|
|
e63ab0dd65 | ||
|
|
964220ac94 | ||
|
|
2f58e69997 | ||
|
|
ec69a4a810 | ||
|
|
ad09c138b9 | ||
|
|
e00eb50db3 | ||
|
|
7d9ad3effd | ||
|
|
c3a710ee14 | ||
|
|
29364f3a9b | ||
|
|
530db0ad73 | ||
|
|
424bfecd0b | ||
|
|
eb1c651c00 | ||
|
|
e357fc723d | ||
|
|
807e2c27f0 | ||
|
|
ad279eacdc | ||
|
|
052b052832 | ||
|
|
6951872776 | ||
|
|
bb7b0213a8 | ||
|
|
6c36318bd8 | ||
|
|
930f81a17b | ||
|
|
9aff64e017 | ||
|
|
3838d6739c | ||
|
|
60deb6773a | ||
|
|
0271c20d8f | ||
|
|
52e97b9b6b | ||
|
|
2ac49fea58 | ||
|
|
f01f2ec9ee | ||
|
|
980886498c | ||
|
|
e743d251a7 | ||
|
|
788badd221 | ||
|
|
fbdedd9a06 | ||
|
|
5916153157 | ||
|
|
b46012e483 | ||
|
|
42683dadfb | ||
|
|
65cb1a1b77 | ||
|
|
50a77dc138 | ||
|
|
557ac63094 | ||
|
|
131c384361 | ||
|
|
e2598f5094 | ||
|
|
78b2aeea55 | ||
|
|
082d2fa19a | ||
|
|
7c7c7567d5 | ||
|
|
625f2208c4 | ||
|
|
5f1fab0e7c | ||
|
|
c07461e6f3 | ||
|
|
8b80a0a269 | ||
|
|
a47181e471 | ||
|
|
5beb6167c8 | ||
|
|
917f39ec12 | ||
|
|
a2fdfce02a | ||
|
|
91b16b8682 | ||
|
|
183fc8e01a | ||
|
|
9fba811764 | ||
|
|
db408b9e62 | ||
|
|
2eecc1a2e4 | ||
|
|
c76528ca1f | ||
|
|
bb47f2226f | ||
|
|
c6ab92bc50 | ||
|
|
4c1a6f0ee0 | ||
|
|
361d43b969 | ||
|
|
2e81f1900d | ||
|
|
2030b28083 | ||
|
|
e84e39940b | ||
|
|
e8905f6d20 | ||
|
|
316352be94 | ||
|
|
f8b30af6dc | ||
|
|
039a4b070e | ||
|
|
c368c6aeea | ||
|
|
0c647bc566 | ||
|
|
e30f65118d | ||
|
|
1bd2d7a659 | ||
|
|
65d53fd4b1 | ||
|
|
b5349202e9 | ||
|
|
1b8cc8b625 | ||
|
|
8501b0b768 | ||
|
|
fe7eb18104 | ||
|
|
8c75ed39d5 | ||
|
|
fdb9fa301e | ||
|
|
871d442b8e | ||
|
|
dbad5eeec6 | ||
|
|
4b4252210b | ||
|
|
6582554926 | ||
|
|
649ce520c4 | ||
|
|
667e841d3e | ||
|
|
63e1ef25af | ||
|
|
229d18f4d2 | ||
|
|
4a1a7f9685 | ||
|
|
86c149ae8e | ||
|
|
05f0b4f590 | ||
|
|
d4eda9d10b | ||
|
|
d7953aed74 | ||
|
|
2ab1650d4d | ||
|
|
79aa83ee39 | ||
|
|
c4ebe4b078 | ||
|
|
1a89f70015 | ||
|
|
62474a30e8 | ||
|
|
9a10e80ab7 | ||
|
|
9b538a8672 | ||
|
|
95af417923 | ||
|
|
fff576cf98 | ||
|
|
1575f0504c | ||
|
|
edf4c3f627 | ||
|
|
d40a6b7f73 | ||
|
|
3a22018edd | ||
|
|
fe54c216a3 | ||
|
|
cb6584de46 | ||
|
|
7e068e1094 | ||
|
|
d3187dbc17 | ||
|
|
dc2f26f7b5 | ||
|
|
553db8def1 | ||
|
|
ab63a47173 | ||
|
|
e658c6c73b | ||
|
|
1e0e553304 | ||
|
|
07b7d28937 | ||
|
|
6ee7fbcfff | ||
|
|
5f3a0473f1 | ||
|
|
2eda75a8ee | ||
|
|
e1f098b9b7 | ||
|
|
e5e13479d0 | ||
|
|
7bc3ca9e40 | ||
|
|
4d8b71d412 | ||
|
|
b484672961 | ||
|
|
a1ee10b438 | ||
|
|
dccce34d84 | ||
|
|
f5945d7c03 | ||
|
|
5fcf923c19 | ||
|
|
0c7efa0cfd | ||
|
|
d5853d9c47 | ||
|
|
d9118c04bf | ||
|
|
91e65ac0ce | ||
|
|
1ac4fb45d2 | ||
|
|
07b8bdfbf1 | ||
|
|
0f22067242 | ||
|
|
d7f8b97541 | ||
|
|
611e00c862 | ||
|
|
c8ebb2a0dc | ||
|
|
88e083a9d0 | ||
|
|
1c8507b32a | ||
|
|
23f31c6bff | ||
|
|
ff48ae192b | ||
|
|
a2fe2f9fb1 | ||
|
|
01ca2b5df6 | ||
|
|
368f7adfc6 | ||
|
|
68731ac74d | ||
|
|
0508823075 | ||
|
|
2ac14d1145 | ||
|
|
2371d771cc | ||
|
|
9a638dbf4e | ||
|
|
dc2aeac21f | ||
|
|
f842849bec | ||
|
|
dcf73970d2 | ||
|
|
e761924dc2 | ||
|
|
cdc3df3790 | ||
|
|
a3d3719481 | ||
|
|
11e5327770 | ||
|
|
87cce88f48 | ||
|
|
ff6d4cec6b | ||
|
|
6ef658a9f9 | ||
|
|
8b8be343a7 | ||
|
|
89c00611c2 | ||
|
|
9572cfc782 | ||
|
|
4a55646a02 | ||
|
|
209af68611 | ||
|
|
f4f9695978 | ||
|
|
5fcc380bd9 | ||
|
|
aa76003e28 | ||
|
|
fac548e430 | ||
|
|
9bd3453592 | ||
|
|
b34efde2f3 | ||
|
|
7aa46ab5fc | ||
|
|
bf35105af6 | ||
|
|
3429e82e45 | ||
|
|
815ae4164a | ||
|
|
13e1d0362d | ||
|
|
db31f6966d | ||
|
|
2b20cd66af | ||
|
|
39e09c289d | ||
|
|
069a38a06c | ||
|
|
3183935bd7 | ||
|
|
060637b4d4 | ||
|
|
fa92cd92fa | ||
|
|
89591e4246 | ||
|
|
802d2440b4 | ||
|
|
e9135fa7c5 | ||
|
|
ef3e075ad6 | ||
|
|
149e198ce8 | ||
|
|
1d76ba5ebe | ||
|
|
a1635eea25 |
2
.github/codex/home/config.toml
vendored
2
.github/codex/home/config.toml
vendored
@@ -1,3 +1,3 @@
|
||||
model = "gpt-5"
|
||||
model = "gpt-5.1"
|
||||
|
||||
# Consider setting [mcp_servers] here!
|
||||
|
||||
2
.github/pull_request_template.md
vendored
2
.github/pull_request_template.md
vendored
@@ -4,3 +4,5 @@ Before opening this Pull Request, please read the dedicated "Contributing" markd
|
||||
https://github.com/openai/codex/blob/main/docs/contributing.md
|
||||
|
||||
If your PR conforms to our contribution guidelines, replace this text with a detailed and high quality description of your changes.
|
||||
|
||||
Include a link to a bug report or enhancement request.
|
||||
|
||||
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@@ -46,7 +46,7 @@ jobs:
|
||||
echo "pack_output=$PACK_OUTPUT" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Upload staged npm package artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: codex-npm-staging
|
||||
path: ${{ steps.stage_npm_package.outputs.pack_output }}
|
||||
|
||||
30
.github/workflows/cla.yml
vendored
30
.github/workflows/cla.yml
vendored
@@ -13,17 +13,39 @@ permissions:
|
||||
|
||||
jobs:
|
||||
cla:
|
||||
# Only run the CLA assistant for the canonical openai repo so forks are not blocked
|
||||
# and contributors who signed previously do not receive duplicate CLA notifications.
|
||||
if: ${{ github.repository_owner == 'openai' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: contributor-assistant/github-action@v2.6.1
|
||||
# Run on close only if the PR was merged. This will lock the PR to preserve
|
||||
# the CLA agreement. We don't want to lock PRs that have been closed without
|
||||
# merging because the contributor may want to respond with additional comments.
|
||||
# This action has a "lock-pullrequest-aftermerge" option that can be set to false,
|
||||
# but that would unconditionally skip locking even in cases where the PR was merged.
|
||||
if: |
|
||||
github.event_name == 'pull_request_target' ||
|
||||
github.event.comment.body == 'recheck' ||
|
||||
github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA'
|
||||
(
|
||||
github.event_name == 'pull_request_target' &&
|
||||
(
|
||||
github.event.action == 'opened' ||
|
||||
github.event.action == 'synchronize' ||
|
||||
(github.event.action == 'closed' && github.event.pull_request.merged == true)
|
||||
)
|
||||
) ||
|
||||
(
|
||||
github.event_name == 'issue_comment' &&
|
||||
(
|
||||
github.event.comment.body == 'recheck' ||
|
||||
github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA'
|
||||
)
|
||||
)
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
path-to-document: https://github.com/openai/codex/blob/main/docs/CLA.md
|
||||
path-to-signatures: signatures/cla.json
|
||||
branch: cla-signatures
|
||||
allowlist: dependabot[bot]
|
||||
allowlist: |
|
||||
codex
|
||||
dependabot[bot]
|
||||
|
||||
105
.github/workflows/close-stale-contributor-prs.yml
vendored
Normal file
105
.github/workflows/close-stale-contributor-prs.yml
vendored
Normal file
@@ -0,0 +1,105 @@
|
||||
name: Close stale contributor PRs
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 6 * * *"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
close-stale-contributor-prs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Close inactive PRs from contributors
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const DAYS_INACTIVE = 14;
|
||||
const cutoff = new Date(Date.now() - DAYS_INACTIVE * 24 * 60 * 60 * 1000);
|
||||
const { owner, repo } = context.repo;
|
||||
const dryRun = false;
|
||||
const stalePrs = [];
|
||||
|
||||
core.info(`Dry run mode: ${dryRun}`);
|
||||
|
||||
const prs = await github.paginate(github.rest.pulls.list, {
|
||||
owner,
|
||||
repo,
|
||||
state: "open",
|
||||
per_page: 100,
|
||||
sort: "updated",
|
||||
direction: "asc",
|
||||
});
|
||||
|
||||
for (const pr of prs) {
|
||||
const lastUpdated = new Date(pr.updated_at);
|
||||
if (lastUpdated > cutoff) {
|
||||
core.info(`PR ${pr.number} is fresh`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!pr.user || pr.user.type !== "User") {
|
||||
core.info(`PR ${pr.number} wasn't created by a user`);
|
||||
continue;
|
||||
}
|
||||
|
||||
let permission;
|
||||
try {
|
||||
const permissionResponse = await github.rest.repos.getCollaboratorPermissionLevel({
|
||||
owner,
|
||||
repo,
|
||||
username: pr.user.login,
|
||||
});
|
||||
permission = permissionResponse.data.permission;
|
||||
} catch (error) {
|
||||
if (error.status === 404) {
|
||||
core.info(`Author ${pr.user.login} is not a collaborator; skipping #${pr.number}`);
|
||||
continue;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
const hasContributorAccess = ["admin", "maintain", "write"].includes(permission);
|
||||
if (!hasContributorAccess) {
|
||||
core.info(`Author ${pr.user.login} has ${permission} access; skipping #${pr.number}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
stalePrs.push(pr);
|
||||
}
|
||||
|
||||
if (!stalePrs.length) {
|
||||
core.info("No stale contributor pull requests found.");
|
||||
return;
|
||||
}
|
||||
|
||||
for (const pr of stalePrs) {
|
||||
const issue_number = pr.number;
|
||||
const closeComment = `Closing this pull request because it has had no updates for more than ${DAYS_INACTIVE} days. If you plan to continue working on it, feel free to reopen or open a new PR.`;
|
||||
|
||||
if (dryRun) {
|
||||
core.info(`[dry-run] Would close contributor PR #${issue_number} from ${pr.user.login}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
await github.rest.issues.createComment({
|
||||
owner,
|
||||
repo,
|
||||
issue_number,
|
||||
body: closeComment,
|
||||
});
|
||||
|
||||
await github.rest.pulls.update({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: issue_number,
|
||||
state: "closed",
|
||||
});
|
||||
|
||||
core.info(`Closed contributor PR #${issue_number} from ${pr.user.login}`);
|
||||
}
|
||||
2
.github/workflows/codespell.yml
vendored
2
.github/workflows/codespell.yml
vendored
@@ -22,6 +22,6 @@ jobs:
|
||||
- name: Annotate locations with typos
|
||||
uses: codespell-project/codespell-problem-matcher@b80729f885d32f78a716c2f107b4db1025001c42 # v1
|
||||
- name: Codespell
|
||||
uses: codespell-project/actions-codespell@406322ec52dd7b488e48c1c4b82e2a8b3a1bf630 # v2.1
|
||||
uses: codespell-project/actions-codespell@8f01853be192eb0f849a5c7d721450e7a467c579 # v2.2
|
||||
with:
|
||||
ignore_words_file: .codespellignore
|
||||
|
||||
6
.github/workflows/issue-deduplicator.yml
vendored
6
.github/workflows/issue-deduplicator.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
outputs:
|
||||
codex_output: ${{ steps.codex.outputs.final-message }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- name: Prepare Codex inputs
|
||||
env:
|
||||
@@ -46,7 +46,7 @@ jobs:
|
||||
with:
|
||||
openai-api-key: ${{ secrets.CODEX_OPENAI_API_KEY }}
|
||||
allow-users: "*"
|
||||
model: gpt-5
|
||||
model: gpt-5.1
|
||||
prompt: |
|
||||
You are an assistant that triages new GitHub issues by identifying potential duplicates.
|
||||
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
issues: write
|
||||
steps:
|
||||
- name: Comment on issue
|
||||
uses: actions/github-script@v7
|
||||
uses: actions/github-script@v8
|
||||
env:
|
||||
CODEX_OUTPUT: ${{ needs.gather-duplicates.outputs.codex_output }}
|
||||
with:
|
||||
|
||||
39
.github/workflows/issue-labeler.yml
vendored
39
.github/workflows/issue-labeler.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
outputs:
|
||||
codex_output: ${{ steps.codex.outputs.final-message }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- id: codex
|
||||
uses: openai/codex-action@main
|
||||
@@ -26,21 +26,36 @@ jobs:
|
||||
prompt: |
|
||||
You are an assistant that reviews GitHub issues for the repository.
|
||||
|
||||
Your job is to choose the most appropriate existing labels for the issue described later in this prompt.
|
||||
Your job is to choose the most appropriate labels for the issue described later in this prompt.
|
||||
Follow these rules:
|
||||
- Only pick labels out of the list below.
|
||||
- Prefer a small set of precise labels over many broad ones.
|
||||
|
||||
Labels to apply:
|
||||
- Add one (and only one) of the following three labels to distinguish the type of issue. Default to "bug" if unsure.
|
||||
1. bug — Reproducible defects in Codex products (CLI, VS Code extension, web, auth).
|
||||
2. enhancement — Feature requests or usability improvements that ask for new capabilities, better ergonomics, or quality-of-life tweaks.
|
||||
3. extension — VS Code (or other IDE) extension-specific issues.
|
||||
4. windows-os — Bugs or friction specific to Windows environments (always when PowerShell is mentioned, path handling, copy/paste, OS-specific auth or tooling failures).
|
||||
5. mcp — Topics involving Model Context Protocol servers/clients.
|
||||
6. codex-web — Issues targeting the Codex web UI/Cloud experience.
|
||||
8. azure — Problems or requests tied to Azure OpenAI deployments.
|
||||
9. documentation — Updates or corrections needed in docs/README/config references (broken links, missing examples, outdated keys, clarification requests).
|
||||
10. model-behavior — Undesirable LLM behavior: forgetting goals, refusing work, hallucinating environment details, quota misreports, or other reasoning/performance anomalies.
|
||||
3. documentation — Updates or corrections needed in docs/README/config references (broken links, missing examples, outdated keys, clarification requests).
|
||||
|
||||
- If applicable, add one of the following labels to specify which sub-product or product surface the issue relates to.
|
||||
1. CLI — the Codex command line interface.
|
||||
2. extension — VS Code (or other IDE) extension-specific issues.
|
||||
3. codex-web — Issues targeting the Codex web UI/Cloud experience.
|
||||
4. github-action — Issues with the Codex GitHub action.
|
||||
5. iOS — Issues with the Codex iOS app.
|
||||
|
||||
- Additionally add zero or more of the following labels that are relevant to the issue content. Prefer a small set of precise labels over many broad ones.
|
||||
1. windows-os — Bugs or friction specific to Windows environments (always when PowerShell is mentioned, path handling, copy/paste, OS-specific auth or tooling failures).
|
||||
2. mcp — Topics involving Model Context Protocol servers/clients.
|
||||
3. mcp-server — Problems related to the codex mcp-server command, where codex runs as an MCP server.
|
||||
4. azure — Problems or requests tied to Azure OpenAI deployments.
|
||||
5. model-behavior — Undesirable LLM behavior: forgetting goals, refusing work, hallucinating environment details, quota misreports, or other reasoning/performance anomalies.
|
||||
6. code-review — Issues related to the code review feature or functionality.
|
||||
7. auth - Problems related to authentication, login, or access tokens.
|
||||
8. codex-exec - Problems related to the "codex exec" command or functionality.
|
||||
9. context-management - Problems related to compaction, context windows, or available context reporting.
|
||||
10. custom-model - Problems that involve using custom model providers, local models, or OSS models.
|
||||
11. rate-limits - Problems related to token limits, rate limits, or token usage reporting.
|
||||
12. sandbox - Issues related to local sandbox environments or tool call approvals to override sandbox restrictions.
|
||||
13. tool-calls - Problems related to specific tool call invocations including unexpected errors, failures, or hangs.
|
||||
14. TUI - Problems with the terminal user interface (TUI) including keyboard shortcuts, copy & pasting, menus, or screen update issues.
|
||||
|
||||
Issue number: ${{ github.event.issue.number }}
|
||||
|
||||
|
||||
49
.github/workflows/rust-ci.yml
vendored
49
.github/workflows/rust-ci.yml
vendored
@@ -76,7 +76,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
- uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: cargo-shear
|
||||
version: 1.5.1
|
||||
@@ -95,8 +95,8 @@ jobs:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
env:
|
||||
# Speed up repeated builds across CI runs by caching compiled objects.
|
||||
RUSTC_WRAPPER: sccache
|
||||
# Speed up repeated builds across CI runs by caching compiled objects (non-Windows).
|
||||
USE_SCCACHE: ${{ startsWith(matrix.runner, 'windows') && 'false' || 'true' }}
|
||||
CARGO_INCREMENTAL: "0"
|
||||
SCCACHE_CACHE_SIZE: 10G
|
||||
|
||||
@@ -170,12 +170,14 @@ jobs:
|
||||
|
||||
# Install and restore sccache cache
|
||||
- name: Install sccache
|
||||
uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
if: ${{ env.USE_SCCACHE == 'true' }}
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: sccache
|
||||
version: 0.7.5
|
||||
|
||||
- name: Configure sccache backend
|
||||
if: ${{ env.USE_SCCACHE == 'true' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
@@ -188,8 +190,13 @@ jobs:
|
||||
echo "Using sccache local disk + actions/cache fallback"
|
||||
fi
|
||||
|
||||
- name: Enable sccache wrapper
|
||||
if: ${{ env.USE_SCCACHE == 'true' }}
|
||||
shell: bash
|
||||
run: echo "RUSTC_WRAPPER=sccache" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Restore sccache cache (fallback)
|
||||
if: ${{ env.SCCACHE_GHA_ENABLED != 'true' }}
|
||||
if: ${{ env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true' }}
|
||||
id: cache_sccache_restore
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
@@ -228,7 +235,7 @@ jobs:
|
||||
|
||||
- name: Install cargo-chef
|
||||
if: ${{ matrix.profile == 'release' }}
|
||||
uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: cargo-chef
|
||||
version: 0.1.71
|
||||
@@ -274,7 +281,7 @@ jobs:
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
|
||||
|
||||
- name: Save sccache cache (fallback)
|
||||
if: always() && !cancelled() && env.SCCACHE_GHA_ENABLED != 'true'
|
||||
if: always() && !cancelled() && env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true'
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
@@ -282,12 +289,12 @@ jobs:
|
||||
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.run_id }}
|
||||
|
||||
- name: sccache stats
|
||||
if: always()
|
||||
if: always() && env.USE_SCCACHE == 'true'
|
||||
continue-on-error: true
|
||||
run: sccache --show-stats || true
|
||||
|
||||
- name: sccache summary
|
||||
if: always()
|
||||
if: always() && env.USE_SCCACHE == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
{
|
||||
@@ -326,7 +333,8 @@ jobs:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
env:
|
||||
RUSTC_WRAPPER: sccache
|
||||
# Speed up repeated builds across CI runs by caching compiled objects (non-Windows).
|
||||
USE_SCCACHE: ${{ startsWith(matrix.runner, 'windows') && 'false' || 'true' }}
|
||||
CARGO_INCREMENTAL: "0"
|
||||
SCCACHE_CACHE_SIZE: 10G
|
||||
|
||||
@@ -370,12 +378,14 @@ jobs:
|
||||
cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
- name: Install sccache
|
||||
uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
if: ${{ env.USE_SCCACHE == 'true' }}
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: sccache
|
||||
version: 0.7.5
|
||||
|
||||
- name: Configure sccache backend
|
||||
if: ${{ env.USE_SCCACHE == 'true' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
@@ -388,8 +398,13 @@ jobs:
|
||||
echo "Using sccache local disk + actions/cache fallback"
|
||||
fi
|
||||
|
||||
- name: Enable sccache wrapper
|
||||
if: ${{ env.USE_SCCACHE == 'true' }}
|
||||
shell: bash
|
||||
run: echo "RUSTC_WRAPPER=sccache" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Restore sccache cache (fallback)
|
||||
if: ${{ env.SCCACHE_GHA_ENABLED != 'true' }}
|
||||
if: ${{ env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true' }}
|
||||
id: cache_sccache_restore
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
@@ -399,17 +414,17 @@ jobs:
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
- uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: nextest
|
||||
version: 0.9.103
|
||||
|
||||
- name: tests
|
||||
id: test
|
||||
continue-on-error: true
|
||||
run: cargo nextest run --all-features --no-fail-fast --target ${{ matrix.target }} --cargo-profile ci-test
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
NEXTEST_STATUS_LEVEL: leak
|
||||
|
||||
- name: Save cargo home cache
|
||||
if: always() && !cancelled() && steps.cache_cargo_home_restore.outputs.cache-hit != 'true'
|
||||
@@ -424,7 +439,7 @@ jobs:
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
|
||||
|
||||
- name: Save sccache cache (fallback)
|
||||
if: always() && !cancelled() && env.SCCACHE_GHA_ENABLED != 'true'
|
||||
if: always() && !cancelled() && env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true'
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
@@ -432,12 +447,12 @@ jobs:
|
||||
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.run_id }}
|
||||
|
||||
- name: sccache stats
|
||||
if: always()
|
||||
if: always() && env.USE_SCCACHE == 'true'
|
||||
continue-on-error: true
|
||||
run: sccache --show-stats || true
|
||||
|
||||
- name: sccache summary
|
||||
if: always()
|
||||
if: always() && env.USE_SCCACHE == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
{
|
||||
|
||||
17
.github/workflows/rust-release.yml
vendored
17
.github/workflows/rust-release.yml
vendored
@@ -295,6 +295,15 @@ jobs:
|
||||
# ${{ matrix.target }}
|
||||
dest="dist/${{ matrix.target }}"
|
||||
|
||||
# We want to ship the raw Windows executables in the GitHub Release
|
||||
# in addition to the compressed archives. Keep the originals for
|
||||
# Windows targets; remove them elsewhere to limit the number of
|
||||
# artifacts that end up in the GitHub Release.
|
||||
keep_originals=false
|
||||
if [[ "${{ matrix.runner }}" == windows* ]]; then
|
||||
keep_originals=true
|
||||
fi
|
||||
|
||||
# For compatibility with environments that lack the `zstd` tool we
|
||||
# additionally create a `.tar.gz` for all platforms and `.zip` for
|
||||
# Windows alongside every single binary that we publish. The end result is:
|
||||
@@ -324,7 +333,11 @@ jobs:
|
||||
|
||||
# Also create .zst (existing behaviour) *and* remove the original
|
||||
# uncompressed binary to keep the directory small.
|
||||
zstd -T0 -19 --rm "$dest/$base"
|
||||
zstd_args=(-T0 -19)
|
||||
if [[ "${keep_originals}" == false ]]; then
|
||||
zstd_args+=(--rm)
|
||||
fi
|
||||
zstd "${zstd_args[@]}" "$dest/$base"
|
||||
done
|
||||
|
||||
- name: Remove signing keychain
|
||||
@@ -350,7 +363,7 @@ jobs:
|
||||
fi
|
||||
fi
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: ${{ matrix.target }}
|
||||
# Upload the per-binary .zst files as well as the new .tar.gz
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -64,6 +64,9 @@ apply_patch/
|
||||
# coverage
|
||||
coverage/
|
||||
|
||||
# personal files
|
||||
personal/
|
||||
|
||||
# os
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
@@ -84,6 +84,8 @@ If you don’t have the tool:
|
||||
- Use `ResponseMock::single_request()` when a test should only issue one POST, or `ResponseMock::requests()` to inspect every captured `ResponsesRequest`.
|
||||
- `ResponsesRequest` exposes helpers (`body_json`, `input`, `function_call_output`, `custom_tool_call_output`, `call_output`, `header`, `path`, `query_param`) so assertions can target structured payloads instead of manual JSON digging.
|
||||
- Build SSE payloads with the provided `ev_*` constructors and the `sse(...)`.
|
||||
- Prefer `wait_for_event` over `wait_for_event_with_timeout`.
|
||||
- Prefer `mount_sse_once` over `mount_sse_once_match` or `mount_sse_sequence`
|
||||
|
||||
- Typical pattern:
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
The changelog can be found on the [releases page](https://github.com/openai/codex/releases)
|
||||
The changelog can be found on the [releases page](https://github.com/openai/codex/releases).
|
||||
|
||||
39
README.md
39
README.md
@@ -33,7 +33,7 @@ Then simply run `codex` to get started:
|
||||
codex
|
||||
```
|
||||
|
||||
If you're running into upgrade issues with Homebrew, see the [FAQ entry on brew upgrade codex](./docs/faq.md#brew-update-codex-isnt-upgrading-me).
|
||||
If you're running into upgrade issues with Homebrew, see the [FAQ entry on brew upgrade codex](./docs/faq.md#brew-upgrade-codex-isnt-upgrading-me).
|
||||
|
||||
<details>
|
||||
<summary>You can also go to the <a href="https://github.com/openai/codex/releases/latest">latest GitHub Release</a> and download the appropriate binary for your platform.</summary>
|
||||
@@ -69,17 +69,50 @@ Codex can access MCP servers. To configure them, refer to the [config docs](./do
|
||||
|
||||
Codex CLI supports a rich set of configuration options, with preferences stored in `~/.codex/config.toml`. For full configuration options, see [Configuration](./docs/config.md).
|
||||
|
||||
---
|
||||
### Execpolicy Quickstart
|
||||
|
||||
Codex can enforce your own rules-based execution policy before it runs shell commands.
|
||||
|
||||
1. Create a policy directory: `mkdir -p ~/.codex/policy`.
|
||||
2. Create one or more `.codexpolicy` files in that folder. Codex automatically loads every `.codexpolicy` file in there on startup.
|
||||
3. Write `prefix_rule` entries to describe the commands you want to allow, prompt, or block:
|
||||
|
||||
```starlark
|
||||
prefix_rule(
|
||||
pattern = ["git", ["push", "fetch"]],
|
||||
decision = "prompt", # allow | prompt | forbidden
|
||||
match = [["git", "push", "origin", "main"]], # examples that must match
|
||||
not_match = [["git", "status"]], # examples that must not match
|
||||
)
|
||||
```
|
||||
|
||||
- `pattern` is a list of shell tokens, evaluated from left to right; wrap tokens in a nested list to express alternatives (e.g., match both `push` and `fetch`).
|
||||
- `decision` sets the severity; Codex picks the strictest decision when multiple rules match (forbidden > prompt > allow).
|
||||
- `match` and `not_match` act as (optional) unit tests. Codex validates them when it loads your policy, so you get feedback if an example has unexpected behavior.
|
||||
|
||||
In this example rule, if Codex wants to run commands with the prefix `git push` or `git fetch`, it will first ask for user approval.
|
||||
|
||||
Use the `codex execpolicy check` subcommand to preview decisions before you save a rule (see the [`codex-execpolicy` README](./codex-rs/execpolicy/README.md) for syntax details):
|
||||
|
||||
```shell
|
||||
codex execpolicy check --policy ~/.codex/policy/default.codexpolicy git push origin main
|
||||
```
|
||||
|
||||
Pass multiple `--policy` flags to test how several files combine, and use `--pretty` for formatted JSON output. See the [`codex-rs/execpolicy` README](./codex-rs/execpolicy/README.md) for a more detailed walkthrough of the available syntax.
|
||||
|
||||
## Note: `execpolicy` commands are still in preview. The API may have breaking changes in the future.
|
||||
|
||||
### Docs & FAQ
|
||||
|
||||
- [**Getting started**](./docs/getting-started.md)
|
||||
- [CLI usage](./docs/getting-started.md#cli-usage)
|
||||
- [Slash Commands](./docs/slash_commands.md)
|
||||
- [Running with a prompt as input](./docs/getting-started.md#running-with-a-prompt-as-input)
|
||||
- [Example prompts](./docs/getting-started.md#example-prompts)
|
||||
- [Custom prompts](./docs/prompts.md)
|
||||
- [Memory with AGENTS.md](./docs/getting-started.md#memory-with-agentsmd)
|
||||
- [Configuration](./docs/config.md)
|
||||
- [**Configuration**](./docs/config.md)
|
||||
- [Example config](./docs/example-config.md)
|
||||
- [**Sandbox & approvals**](./docs/sandbox.md)
|
||||
- [**Authentication**](./docs/authentication.md)
|
||||
- [Auth methods](./docs/authentication.md#forcing-a-specific-auth-method-advanced)
|
||||
|
||||
5
codex-rs/.cargo/config.toml
Normal file
5
codex-rs/.cargo/config.toml
Normal file
@@ -0,0 +1,5 @@
|
||||
[target.'cfg(all(windows, target_env = "msvc"))']
|
||||
rustflags = ["-C", "link-arg=/STACK:8388608"]
|
||||
|
||||
[target.'cfg(all(windows, target_env = "gnu"))']
|
||||
rustflags = ["-C", "link-arg=-Wl,--stack,8388608"]
|
||||
13
codex-rs/.config/nextest.toml
Normal file
13
codex-rs/.config/nextest.toml
Normal file
@@ -0,0 +1,13 @@
|
||||
[profile.default]
|
||||
# Do not increase, fix your test instead
|
||||
slow-timeout = { period = "15s", terminate-after = 2 }
|
||||
|
||||
|
||||
[[profile.default.overrides]]
|
||||
# Do not add new tests here
|
||||
filter = 'test(rmcp_client) | test(humanlike_typing_1000_chars_appears_live_no_placeholder)'
|
||||
slow-timeout = { period = "1m", terminate-after = 4 }
|
||||
|
||||
[[profile.default.overrides]]
|
||||
filter = 'test(approval_matrix_covers_all_modes)'
|
||||
slow-timeout = { period = "30s", terminate-after = 2 }
|
||||
641
codex-rs/Cargo.lock
generated
641
codex-rs/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -5,6 +5,7 @@ members = [
|
||||
"async-utils",
|
||||
"app-server",
|
||||
"app-server-protocol",
|
||||
"app-server-test-client",
|
||||
"apply-patch",
|
||||
"arg0",
|
||||
"feedback",
|
||||
@@ -15,31 +16,31 @@ members = [
|
||||
"common",
|
||||
"core",
|
||||
"exec",
|
||||
"exec-server",
|
||||
"execpolicy",
|
||||
"execpolicy-legacy",
|
||||
"keyring-store",
|
||||
"file-search",
|
||||
"git-tooling",
|
||||
"linux-sandbox",
|
||||
"lmstudio",
|
||||
"login",
|
||||
"mcp-server",
|
||||
"mcp-types",
|
||||
"ollama",
|
||||
"process-hardening",
|
||||
"protocol",
|
||||
"protocol-ts",
|
||||
"rmcp-client",
|
||||
"responses-api-proxy",
|
||||
"stdio-to-uds",
|
||||
"otel",
|
||||
"tui",
|
||||
"git-apply",
|
||||
"utils/git",
|
||||
"utils/cache",
|
||||
"utils/image",
|
||||
"utils/json-to-toml",
|
||||
"utils/pty",
|
||||
"utils/readiness",
|
||||
"utils/string",
|
||||
"utils/tokenizer",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
@@ -65,18 +66,19 @@ codex-chatgpt = { path = "chatgpt" }
|
||||
codex-common = { path = "common" }
|
||||
codex-core = { path = "core" }
|
||||
codex-exec = { path = "exec" }
|
||||
codex-execpolicy = { path = "execpolicy" }
|
||||
codex-feedback = { path = "feedback" }
|
||||
codex-file-search = { path = "file-search" }
|
||||
codex-git-tooling = { path = "git-tooling" }
|
||||
codex-git = { path = "utils/git" }
|
||||
codex-keyring-store = { path = "keyring-store" }
|
||||
codex-linux-sandbox = { path = "linux-sandbox" }
|
||||
codex-lmstudio = { path = "lmstudio" }
|
||||
codex-login = { path = "login" }
|
||||
codex-mcp-server = { path = "mcp-server" }
|
||||
codex-ollama = { path = "ollama" }
|
||||
codex-otel = { path = "otel" }
|
||||
codex-process-hardening = { path = "process-hardening" }
|
||||
codex-protocol = { path = "protocol" }
|
||||
codex-protocol-ts = { path = "protocol-ts" }
|
||||
codex-responses-api-proxy = { path = "responses-api-proxy" }
|
||||
codex-rmcp-client = { path = "rmcp-client" }
|
||||
codex-stdio-to-uds = { path = "stdio-to-uds" }
|
||||
@@ -87,7 +89,7 @@ codex-utils-json-to-toml = { path = "utils/json-to-toml" }
|
||||
codex-utils-pty = { path = "utils/pty" }
|
||||
codex-utils-readiness = { path = "utils/readiness" }
|
||||
codex-utils-string = { path = "utils/string" }
|
||||
codex-utils-tokenizer = { path = "utils/tokenizer" }
|
||||
codex-windows-sandbox = { path = "windows-sandbox-rs" }
|
||||
core_test_support = { path = "core/tests/common" }
|
||||
mcp-types = { path = "mcp-types" }
|
||||
mcp_test_support = { path = "mcp-server/tests/common" }
|
||||
@@ -96,8 +98,8 @@ mcp_test_support = { path = "mcp-server/tests/common" }
|
||||
allocative = "0.3.3"
|
||||
ansi-to-tui = "7.0.0"
|
||||
anyhow = "1"
|
||||
arboard = "3"
|
||||
askama = "0.12"
|
||||
arboard = { version = "3", features = ["wayland-data-control"] }
|
||||
askama = "0.14"
|
||||
assert_cmd = "2"
|
||||
assert_matches = "1.5.0"
|
||||
async-channel = "2.3.1"
|
||||
@@ -107,6 +109,7 @@ axum = { version = "0.8", default-features = false }
|
||||
base64 = "0.22.1"
|
||||
bytes = "1.10.1"
|
||||
chrono = "0.4.42"
|
||||
chardetng = "0.1.17"
|
||||
clap = "4"
|
||||
clap_complete = "4"
|
||||
color-eyre = "0.6.3"
|
||||
@@ -119,18 +122,20 @@ dotenvy = "0.15.7"
|
||||
dunce = "1.0.4"
|
||||
env-flags = "0.1.1"
|
||||
env_logger = "0.11.5"
|
||||
encoding_rs = "0.8.35"
|
||||
escargot = "0.5"
|
||||
eventsource-stream = "0.2.3"
|
||||
futures = { version = "0.3", default-features = false }
|
||||
http = "1.3.1"
|
||||
icu_decimal = "2.0.0"
|
||||
icu_locale_core = "2.0.0"
|
||||
icu_decimal = "2.1"
|
||||
icu_locale_core = "2.1"
|
||||
icu_provider = { version = "2.1", features = ["sync"] }
|
||||
ignore = "0.4.23"
|
||||
image = { version = "^0.25.8", default-features = false }
|
||||
indexmap = "2.6.0"
|
||||
indexmap = "2.12.0"
|
||||
insta = "1.43.2"
|
||||
itertools = "0.14.0"
|
||||
keyring = "3.6"
|
||||
keyring = { version = "3.6", default-features = false }
|
||||
landlock = "0.4.1"
|
||||
lazy_static = "1"
|
||||
libc = "0.2.175"
|
||||
@@ -141,6 +146,7 @@ mime_guess = "2.0.5"
|
||||
multimap = "0.10.0"
|
||||
notify = "8.2.0"
|
||||
nucleo-matcher = "0.3.1"
|
||||
once_cell = "1"
|
||||
openssl-sys = "*"
|
||||
opentelemetry = "0.30.0"
|
||||
opentelemetry-appender-tracing = "0.30.0"
|
||||
@@ -149,7 +155,6 @@ opentelemetry-semantic-conventions = "0.30.0"
|
||||
opentelemetry_sdk = "0.30.0"
|
||||
os_info = "3.12.0"
|
||||
owo-colors = "4.2.0"
|
||||
paste = "1.0.15"
|
||||
path-absolutize = "3.1.1"
|
||||
pathdiff = "0.2"
|
||||
portable-pty = "0.9.0"
|
||||
@@ -161,10 +166,9 @@ ratatui = "0.29.0"
|
||||
ratatui-macros = "0.6.0"
|
||||
regex-lite = "0.1.7"
|
||||
reqwest = "0.12"
|
||||
rmcp = { version = "0.8.3", default-features = false }
|
||||
rmcp = { version = "0.8.5", default-features = false }
|
||||
schemars = "0.8.22"
|
||||
seccompiler = "0.5.0"
|
||||
sentry = "0.34.0"
|
||||
serde = "1"
|
||||
serde_json = "1"
|
||||
serde_with = "3.14"
|
||||
@@ -173,6 +177,7 @@ sha1 = "0.10.6"
|
||||
sha2 = "0.10"
|
||||
shlex = "1.3.0"
|
||||
similar = "2.7.0"
|
||||
socket2 = "0.6.0"
|
||||
starlark = "0.13.0"
|
||||
strum = "0.27.2"
|
||||
strum_macros = "0.27.2"
|
||||
@@ -181,7 +186,7 @@ sys-locale = "0.3.2"
|
||||
tempfile = "3.23.0"
|
||||
test-log = "0.2.18"
|
||||
textwrap = "0.16.2"
|
||||
thiserror = "2.0.16"
|
||||
thiserror = "2.0.17"
|
||||
time = "0.3"
|
||||
tiny_http = "0.12"
|
||||
tokio = "1"
|
||||
@@ -210,8 +215,9 @@ walkdir = "2.5.0"
|
||||
webbrowser = "1.0"
|
||||
which = "6"
|
||||
wildmatch = "2.5.0"
|
||||
|
||||
wiremock = "0.6"
|
||||
zeroize = "1.8.1"
|
||||
zeroize = "1.8.2"
|
||||
|
||||
[workspace.lints]
|
||||
rust = {}
|
||||
@@ -254,7 +260,11 @@ unwrap_used = "deny"
|
||||
# cargo-shear cannot see the platform-specific openssl-sys usage, so we
|
||||
# silence the false positive here instead of deleting a real dependency.
|
||||
[workspace.metadata.cargo-shear]
|
||||
ignored = ["openssl-sys", "codex-utils-readiness", "codex-utils-tokenizer"]
|
||||
ignored = [
|
||||
"icu_provider",
|
||||
"openssl-sys",
|
||||
"codex-utils-readiness",
|
||||
]
|
||||
|
||||
[profile.release]
|
||||
lto = "fat"
|
||||
@@ -273,6 +283,7 @@ opt-level = 0
|
||||
[patch.crates-io]
|
||||
# Uncomment to debug local changes.
|
||||
# ratatui = { path = "../../ratatui" }
|
||||
crossterm = { git = "https://github.com/nornagon/crossterm", branch = "nornagon/color-query" }
|
||||
ratatui = { git = "https://github.com/nornagon/ratatui", branch = "nornagon-v0.29.0-patch" }
|
||||
|
||||
# Uncomment to debug local changes.
|
||||
|
||||
@@ -58,13 +58,16 @@ To test to see what happens when a command is run under the sandbox provided by
|
||||
|
||||
```
|
||||
# macOS
|
||||
codex sandbox macos [--full-auto] [COMMAND]...
|
||||
codex sandbox macos [--full-auto] [--log-denials] [COMMAND]...
|
||||
|
||||
# Linux
|
||||
codex sandbox linux [--full-auto] [COMMAND]...
|
||||
|
||||
# Windows
|
||||
codex sandbox windows [--full-auto] [COMMAND]...
|
||||
|
||||
# Legacy aliases
|
||||
codex debug seatbelt [--full-auto] [COMMAND]...
|
||||
codex debug seatbelt [--full-auto] [--log-denials] [COMMAND]...
|
||||
codex debug landlock [--full-auto] [COMMAND]...
|
||||
```
|
||||
|
||||
|
||||
@@ -14,11 +14,12 @@ workspace = true
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive"] }
|
||||
codex-protocol = { workspace = true }
|
||||
paste = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
schemars = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
strum_macros = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
ts-rs = { workspace = true }
|
||||
uuid = { workspace = true, features = ["serde", "v7"] }
|
||||
|
||||
|
||||
@@ -2,20 +2,25 @@ use crate::ClientNotification;
|
||||
use crate::ClientRequest;
|
||||
use crate::ServerNotification;
|
||||
use crate::ServerRequest;
|
||||
use crate::export_client_notification_schemas;
|
||||
use crate::export_client_param_schemas;
|
||||
use crate::export_client_response_schemas;
|
||||
use crate::export_client_responses;
|
||||
use crate::export_server_notification_schemas;
|
||||
use crate::export_server_param_schemas;
|
||||
use crate::export_server_response_schemas;
|
||||
use crate::export_server_responses;
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use schemars::JsonSchema;
|
||||
use schemars::schema::RootSchema;
|
||||
use schemars::schema_for;
|
||||
use serde::Serialize;
|
||||
use serde_json::Map;
|
||||
use serde_json::Value;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::ffi::OsStr;
|
||||
use std::fs;
|
||||
use std::io::Read;
|
||||
@@ -27,91 +32,64 @@ use ts_rs::TS;
|
||||
|
||||
const HEADER: &str = "// GENERATED CODE! DO NOT MODIFY BY HAND!\n\n";
|
||||
|
||||
macro_rules! for_each_schema_type {
|
||||
($macro:ident) => {
|
||||
$macro!(crate::RequestId);
|
||||
$macro!(crate::JSONRPCMessage);
|
||||
$macro!(crate::JSONRPCRequest);
|
||||
$macro!(crate::JSONRPCNotification);
|
||||
$macro!(crate::JSONRPCResponse);
|
||||
$macro!(crate::JSONRPCError);
|
||||
$macro!(crate::JSONRPCErrorError);
|
||||
$macro!(crate::AddConversationListenerParams);
|
||||
$macro!(crate::AddConversationSubscriptionResponse);
|
||||
$macro!(crate::ApplyPatchApprovalParams);
|
||||
$macro!(crate::ApplyPatchApprovalResponse);
|
||||
$macro!(crate::ArchiveConversationParams);
|
||||
$macro!(crate::ArchiveConversationResponse);
|
||||
$macro!(crate::AuthMode);
|
||||
$macro!(crate::AuthStatusChangeNotification);
|
||||
$macro!(crate::CancelLoginChatGptParams);
|
||||
$macro!(crate::CancelLoginChatGptResponse);
|
||||
$macro!(crate::ClientInfo);
|
||||
$macro!(crate::ClientNotification);
|
||||
$macro!(crate::ClientRequest);
|
||||
$macro!(crate::ConversationSummary);
|
||||
$macro!(crate::ExecCommandApprovalParams);
|
||||
$macro!(crate::ExecCommandApprovalResponse);
|
||||
$macro!(crate::ExecOneOffCommandParams);
|
||||
$macro!(crate::ExecOneOffCommandResponse);
|
||||
$macro!(crate::FuzzyFileSearchParams);
|
||||
$macro!(crate::FuzzyFileSearchResponse);
|
||||
$macro!(crate::FuzzyFileSearchResult);
|
||||
$macro!(crate::GetAuthStatusParams);
|
||||
$macro!(crate::GetAuthStatusResponse);
|
||||
$macro!(crate::GetUserAgentResponse);
|
||||
$macro!(crate::GetUserSavedConfigResponse);
|
||||
$macro!(crate::GitDiffToRemoteParams);
|
||||
$macro!(crate::GitDiffToRemoteResponse);
|
||||
$macro!(crate::GitSha);
|
||||
$macro!(crate::InitializeParams);
|
||||
$macro!(crate::InitializeResponse);
|
||||
$macro!(crate::InputItem);
|
||||
$macro!(crate::InterruptConversationParams);
|
||||
$macro!(crate::InterruptConversationResponse);
|
||||
$macro!(crate::ListConversationsParams);
|
||||
$macro!(crate::ListConversationsResponse);
|
||||
$macro!(crate::LoginApiKeyParams);
|
||||
$macro!(crate::LoginApiKeyResponse);
|
||||
$macro!(crate::LoginChatGptCompleteNotification);
|
||||
$macro!(crate::LoginChatGptResponse);
|
||||
$macro!(crate::LogoutChatGptParams);
|
||||
$macro!(crate::LogoutChatGptResponse);
|
||||
$macro!(crate::NewConversationParams);
|
||||
$macro!(crate::NewConversationResponse);
|
||||
$macro!(crate::Profile);
|
||||
$macro!(crate::RemoveConversationListenerParams);
|
||||
$macro!(crate::RemoveConversationSubscriptionResponse);
|
||||
$macro!(crate::ResumeConversationParams);
|
||||
$macro!(crate::ResumeConversationResponse);
|
||||
$macro!(crate::SandboxSettings);
|
||||
$macro!(crate::SendUserMessageParams);
|
||||
$macro!(crate::SendUserMessageResponse);
|
||||
$macro!(crate::SendUserTurnParams);
|
||||
$macro!(crate::SendUserTurnResponse);
|
||||
$macro!(crate::ServerNotification);
|
||||
$macro!(crate::ServerRequest);
|
||||
$macro!(crate::SessionConfiguredNotification);
|
||||
$macro!(crate::SetDefaultModelParams);
|
||||
$macro!(crate::SetDefaultModelResponse);
|
||||
$macro!(crate::Tools);
|
||||
$macro!(crate::UserInfoResponse);
|
||||
$macro!(crate::UserSavedConfig);
|
||||
$macro!(codex_protocol::protocol::EventMsg);
|
||||
$macro!(codex_protocol::protocol::FileChange);
|
||||
$macro!(codex_protocol::parse_command::ParsedCommand);
|
||||
$macro!(codex_protocol::protocol::SandboxPolicy);
|
||||
};
|
||||
#[derive(Clone)]
|
||||
pub struct GeneratedSchema {
|
||||
namespace: Option<String>,
|
||||
logical_name: String,
|
||||
value: Value,
|
||||
in_v1_dir: bool,
|
||||
}
|
||||
|
||||
impl GeneratedSchema {
|
||||
fn namespace(&self) -> Option<&str> {
|
||||
self.namespace.as_deref()
|
||||
}
|
||||
|
||||
fn logical_name(&self) -> &str {
|
||||
&self.logical_name
|
||||
}
|
||||
|
||||
fn value(&self) -> &Value {
|
||||
&self.value
|
||||
}
|
||||
}
|
||||
|
||||
type JsonSchemaEmitter = fn(&Path) -> Result<GeneratedSchema>;
|
||||
pub fn generate_types(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
||||
generate_ts(out_dir, prettier)?;
|
||||
generate_json(out_dir)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct GenerateTsOptions {
|
||||
pub generate_indices: bool,
|
||||
pub ensure_headers: bool,
|
||||
pub run_prettier: bool,
|
||||
}
|
||||
|
||||
impl Default for GenerateTsOptions {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
generate_indices: true,
|
||||
ensure_headers: true,
|
||||
run_prettier: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn generate_ts(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
||||
generate_ts_with_options(out_dir, prettier, GenerateTsOptions::default())
|
||||
}
|
||||
|
||||
pub fn generate_ts_with_options(
|
||||
out_dir: &Path,
|
||||
prettier: Option<&Path>,
|
||||
options: GenerateTsOptions,
|
||||
) -> Result<()> {
|
||||
let v2_out_dir = out_dir.join("v2");
|
||||
ensure_dir(out_dir)?;
|
||||
ensure_dir(&v2_out_dir)?;
|
||||
|
||||
ClientRequest::export_all_to(out_dir)?;
|
||||
export_client_responses(out_dir)?;
|
||||
@@ -121,18 +99,34 @@ pub fn generate_ts(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
||||
export_server_responses(out_dir)?;
|
||||
ServerNotification::export_all_to(out_dir)?;
|
||||
|
||||
generate_index_ts(out_dir)?;
|
||||
|
||||
let ts_files = ts_files_in(out_dir)?;
|
||||
for file in &ts_files {
|
||||
prepend_header_if_missing(file)?;
|
||||
if options.generate_indices {
|
||||
generate_index_ts(out_dir)?;
|
||||
generate_index_ts(&v2_out_dir)?;
|
||||
}
|
||||
|
||||
if let Some(prettier_bin) = prettier
|
||||
// Ensure our header is present on all TS files (root + subdirs like v2/).
|
||||
let mut ts_files = Vec::new();
|
||||
let should_collect_ts_files =
|
||||
options.ensure_headers || (options.run_prettier && prettier.is_some());
|
||||
if should_collect_ts_files {
|
||||
ts_files = ts_files_in_recursive(out_dir)?;
|
||||
}
|
||||
|
||||
if options.ensure_headers {
|
||||
for file in &ts_files {
|
||||
prepend_header_if_missing(file)?;
|
||||
}
|
||||
}
|
||||
|
||||
// Optionally run Prettier on all generated TS files.
|
||||
if options.run_prettier
|
||||
&& let Some(prettier_bin) = prettier
|
||||
&& !ts_files.is_empty()
|
||||
{
|
||||
let status = Command::new(prettier_bin)
|
||||
.arg("--write")
|
||||
.arg("--log-level")
|
||||
.arg("warn")
|
||||
.args(ts_files.iter().map(|p| p.as_os_str()))
|
||||
.status()
|
||||
.with_context(|| format!("Failed to invoke Prettier at {}", prettier_bin.display()))?;
|
||||
@@ -146,59 +140,115 @@ pub fn generate_ts(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
||||
|
||||
pub fn generate_json(out_dir: &Path) -> Result<()> {
|
||||
ensure_dir(out_dir)?;
|
||||
let mut bundle: BTreeMap<String, RootSchema> = BTreeMap::new();
|
||||
let envelope_emitters: &[JsonSchemaEmitter] = &[
|
||||
|d| write_json_schema_with_return::<crate::RequestId>(d, "RequestId"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCMessage>(d, "JSONRPCMessage"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCRequest>(d, "JSONRPCRequest"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCNotification>(d, "JSONRPCNotification"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCResponse>(d, "JSONRPCResponse"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCError>(d, "JSONRPCError"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCErrorError>(d, "JSONRPCErrorError"),
|
||||
|d| write_json_schema_with_return::<crate::ClientRequest>(d, "ClientRequest"),
|
||||
|d| write_json_schema_with_return::<crate::ServerRequest>(d, "ServerRequest"),
|
||||
|d| write_json_schema_with_return::<crate::ClientNotification>(d, "ClientNotification"),
|
||||
|d| write_json_schema_with_return::<crate::ServerNotification>(d, "ServerNotification"),
|
||||
|d| write_json_schema_with_return::<EventMsg>(d, "EventMsg"),
|
||||
];
|
||||
|
||||
macro_rules! add_schema {
|
||||
($ty:path) => {{
|
||||
let name = type_basename(stringify!($ty));
|
||||
let schema = write_json_schema_with_return::<$ty>(out_dir, &name)?;
|
||||
bundle.insert(name, schema);
|
||||
}};
|
||||
let mut schemas: Vec<GeneratedSchema> = Vec::new();
|
||||
for emit in envelope_emitters {
|
||||
schemas.push(emit(out_dir)?);
|
||||
}
|
||||
|
||||
for_each_schema_type!(add_schema);
|
||||
schemas.extend(export_client_param_schemas(out_dir)?);
|
||||
schemas.extend(export_client_response_schemas(out_dir)?);
|
||||
schemas.extend(export_server_param_schemas(out_dir)?);
|
||||
schemas.extend(export_server_response_schemas(out_dir)?);
|
||||
schemas.extend(export_client_notification_schemas(out_dir)?);
|
||||
schemas.extend(export_server_notification_schemas(out_dir)?);
|
||||
|
||||
export_client_response_schemas(out_dir)?;
|
||||
export_server_response_schemas(out_dir)?;
|
||||
let bundle = build_schema_bundle(schemas)?;
|
||||
write_pretty_json(
|
||||
out_dir.join("codex_app_server_protocol.schemas.json"),
|
||||
&bundle,
|
||||
)?;
|
||||
|
||||
let mut definitions = Map::new();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn build_schema_bundle(schemas: Vec<GeneratedSchema>) -> Result<Value> {
|
||||
const SPECIAL_DEFINITIONS: &[&str] = &[
|
||||
"ClientNotification",
|
||||
"ClientRequest",
|
||||
"EventMsg",
|
||||
"FileChange",
|
||||
"InputItem",
|
||||
"ParsedCommand",
|
||||
"SandboxPolicy",
|
||||
"ServerNotification",
|
||||
"ServerRequest",
|
||||
];
|
||||
const IGNORED_DEFINITIONS: &[&str] = &["Option<()>"];
|
||||
|
||||
for (name, schema) in bundle {
|
||||
let mut schema_value = serde_json::to_value(schema)?;
|
||||
if let Value::Object(ref mut obj) = schema_value {
|
||||
if let Some(defs) = obj.remove("definitions")
|
||||
&& let Value::Object(defs_obj) = defs
|
||||
{
|
||||
for (def_name, def_schema) in defs_obj {
|
||||
if !SPECIAL_DEFINITIONS.contains(&def_name.as_str()) {
|
||||
definitions.insert(def_name, def_schema);
|
||||
}
|
||||
let namespaced_types = collect_namespaced_types(&schemas);
|
||||
let mut definitions = Map::new();
|
||||
|
||||
for schema in schemas {
|
||||
let GeneratedSchema {
|
||||
namespace,
|
||||
logical_name,
|
||||
mut value,
|
||||
in_v1_dir,
|
||||
} = schema;
|
||||
|
||||
if IGNORED_DEFINITIONS.contains(&logical_name.as_str()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(ref ns) = namespace {
|
||||
rewrite_refs_to_namespace(&mut value, ns);
|
||||
}
|
||||
|
||||
let mut forced_namespace_refs: Vec<(String, String)> = Vec::new();
|
||||
if let Value::Object(ref mut obj) = value
|
||||
&& let Some(defs) = obj.remove("definitions")
|
||||
&& let Value::Object(defs_obj) = defs
|
||||
{
|
||||
for (def_name, mut def_schema) in defs_obj {
|
||||
if IGNORED_DEFINITIONS.contains(&def_name.as_str()) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(Value::Array(one_of)) = obj.get_mut("oneOf") {
|
||||
for variant in one_of.iter_mut() {
|
||||
if let Some(variant_name) = variant_definition_name(&name, variant)
|
||||
&& let Value::Object(variant_obj) = variant
|
||||
if SPECIAL_DEFINITIONS.contains(&def_name.as_str()) {
|
||||
continue;
|
||||
}
|
||||
annotate_schema(&mut def_schema, Some(def_name.as_str()));
|
||||
let target_namespace = match namespace {
|
||||
Some(ref ns) => Some(ns.clone()),
|
||||
None => namespace_for_definition(&def_name, &namespaced_types)
|
||||
.cloned()
|
||||
.filter(|_| !in_v1_dir),
|
||||
};
|
||||
if let Some(ref ns) = target_namespace {
|
||||
if namespace.as_deref() == Some(ns.as_str()) {
|
||||
rewrite_refs_to_namespace(&mut def_schema, ns);
|
||||
insert_into_namespace(&mut definitions, ns, def_name.clone(), def_schema)?;
|
||||
} else if !forced_namespace_refs
|
||||
.iter()
|
||||
.any(|(name, existing_ns)| name == &def_name && existing_ns == ns)
|
||||
{
|
||||
variant_obj.insert("title".into(), Value::String(variant_name));
|
||||
forced_namespace_refs.push((def_name.clone(), ns.clone()));
|
||||
}
|
||||
} else {
|
||||
definitions.insert(def_name, def_schema);
|
||||
}
|
||||
}
|
||||
}
|
||||
definitions.insert(name, schema_value);
|
||||
|
||||
for (name, ns) in forced_namespace_refs {
|
||||
rewrite_named_ref_to_namespace(&mut value, &ns, &name);
|
||||
}
|
||||
|
||||
if let Some(ref ns) = namespace {
|
||||
insert_into_namespace(&mut definitions, ns, logical_name.clone(), value)?;
|
||||
} else {
|
||||
definitions.insert(logical_name, value);
|
||||
}
|
||||
}
|
||||
|
||||
let mut root = Map::new();
|
||||
@@ -213,30 +263,66 @@ pub fn generate_json(out_dir: &Path) -> Result<()> {
|
||||
root.insert("type".to_string(), Value::String("object".into()));
|
||||
root.insert("definitions".to_string(), Value::Object(definitions));
|
||||
|
||||
write_pretty_json(
|
||||
out_dir.join("codex_app_server_protocol.schemas.json"),
|
||||
&Value::Object(root),
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
Ok(Value::Object(root))
|
||||
}
|
||||
|
||||
fn write_json_schema_with_return<T>(out_dir: &Path, name: &str) -> Result<RootSchema>
|
||||
fn insert_into_namespace(
|
||||
definitions: &mut Map<String, Value>,
|
||||
namespace: &str,
|
||||
name: String,
|
||||
schema: Value,
|
||||
) -> Result<()> {
|
||||
let entry = definitions
|
||||
.entry(namespace.to_string())
|
||||
.or_insert_with(|| Value::Object(Map::new()));
|
||||
match entry {
|
||||
Value::Object(map) => {
|
||||
map.insert(name, schema);
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(anyhow!("expected namespace {namespace} to be an object")),
|
||||
}
|
||||
}
|
||||
|
||||
fn write_json_schema_with_return<T>(out_dir: &Path, name: &str) -> Result<GeneratedSchema>
|
||||
where
|
||||
T: JsonSchema,
|
||||
{
|
||||
let file_stem = name.trim();
|
||||
let schema = schema_for!(T);
|
||||
write_pretty_json(out_dir.join(format!("{file_stem}.json")), &schema)
|
||||
let mut schema_value = serde_json::to_value(schema)?;
|
||||
annotate_schema(&mut schema_value, Some(file_stem));
|
||||
// If the name looks like a namespaced path (e.g., "v2::Type"), mirror
|
||||
// the TypeScript layout and write to out_dir/v2/Type.json. Otherwise
|
||||
// write alongside the legacy files.
|
||||
let (raw_namespace, logical_name) = split_namespace(file_stem);
|
||||
let out_path = if let Some(ns) = raw_namespace {
|
||||
let dir = out_dir.join(ns);
|
||||
ensure_dir(&dir)?;
|
||||
dir.join(format!("{logical_name}.json"))
|
||||
} else {
|
||||
out_dir.join(format!("{file_stem}.json"))
|
||||
};
|
||||
|
||||
write_pretty_json(out_path, &schema_value)
|
||||
.with_context(|| format!("Failed to write JSON schema for {file_stem}"))?;
|
||||
Ok(schema)
|
||||
let namespace = match raw_namespace {
|
||||
Some("v1") | None => None,
|
||||
Some(ns) => Some(ns.to_string()),
|
||||
};
|
||||
Ok(GeneratedSchema {
|
||||
in_v1_dir: raw_namespace == Some("v1"),
|
||||
namespace,
|
||||
logical_name: logical_name.to_string(),
|
||||
value: schema_value,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn write_json_schema<T>(out_dir: &Path, name: &str) -> Result<()>
|
||||
pub(crate) fn write_json_schema<T>(out_dir: &Path, name: &str) -> Result<GeneratedSchema>
|
||||
where
|
||||
T: JsonSchema,
|
||||
{
|
||||
write_json_schema_with_return::<T>(out_dir, name).map(|_| ())
|
||||
write_json_schema_with_return::<T>(out_dir, name)
|
||||
}
|
||||
|
||||
fn write_pretty_json(path: PathBuf, value: &impl Serialize) -> Result<()> {
|
||||
@@ -245,13 +331,73 @@ fn write_pretty_json(path: PathBuf, value: &impl Serialize) -> Result<()> {
|
||||
fs::write(&path, json).with_context(|| format!("Failed to write {}", path.display()))?;
|
||||
Ok(())
|
||||
}
|
||||
fn type_basename(type_path: &str) -> String {
|
||||
type_path
|
||||
.rsplit_once("::")
|
||||
.map(|(_, name)| name)
|
||||
.unwrap_or(type_path)
|
||||
.trim()
|
||||
.to_string()
|
||||
|
||||
/// Split a fully-qualified type name like "v2::Type" into its namespace and logical name.
|
||||
fn split_namespace(name: &str) -> (Option<&str>, &str) {
|
||||
name.split_once("::")
|
||||
.map_or((None, name), |(ns, rest)| (Some(ns), rest))
|
||||
}
|
||||
|
||||
/// Recursively rewrite $ref values that point at "#/definitions/..." so that
|
||||
/// they point to a namespaced location under the bundle.
|
||||
fn rewrite_refs_to_namespace(value: &mut Value, ns: &str) {
|
||||
match value {
|
||||
Value::Object(obj) => {
|
||||
if let Some(Value::String(r)) = obj.get_mut("$ref")
|
||||
&& let Some(suffix) = r.strip_prefix("#/definitions/")
|
||||
{
|
||||
let prefix = format!("{ns}/");
|
||||
if !suffix.starts_with(&prefix) {
|
||||
*r = format!("#/definitions/{ns}/{suffix}");
|
||||
}
|
||||
}
|
||||
for v in obj.values_mut() {
|
||||
rewrite_refs_to_namespace(v, ns);
|
||||
}
|
||||
}
|
||||
Value::Array(items) => {
|
||||
for v in items.iter_mut() {
|
||||
rewrite_refs_to_namespace(v, ns);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_namespaced_types(schemas: &[GeneratedSchema]) -> HashMap<String, String> {
|
||||
let mut types = HashMap::new();
|
||||
for schema in schemas {
|
||||
if let Some(ns) = schema.namespace() {
|
||||
types
|
||||
.entry(schema.logical_name().to_string())
|
||||
.or_insert_with(|| ns.to_string());
|
||||
if let Some(Value::Object(defs)) = schema.value().get("definitions") {
|
||||
for key in defs.keys() {
|
||||
types.entry(key.clone()).or_insert_with(|| ns.to_string());
|
||||
}
|
||||
}
|
||||
if let Some(Value::Object(defs)) = schema.value().get("$defs") {
|
||||
for key in defs.keys() {
|
||||
types.entry(key.clone()).or_insert_with(|| ns.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
types
|
||||
}
|
||||
|
||||
fn namespace_for_definition<'a>(
|
||||
name: &str,
|
||||
types: &'a HashMap<String, String>,
|
||||
) -> Option<&'a String> {
|
||||
if let Some(ns) = types.get(name) {
|
||||
return Some(ns);
|
||||
}
|
||||
let trimmed = name.trim_end_matches(|c: char| c.is_ascii_digit());
|
||||
if trimmed != name {
|
||||
return types.get(trimmed);
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn variant_definition_name(base: &str, variant: &Value) -> Option<String> {
|
||||
@@ -273,14 +419,6 @@ fn variant_definition_name(base: &str, variant: &Value) -> Option<String> {
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(mode_literal) = literal_from_property(props, "mode") {
|
||||
let pascal = to_pascal_case(mode_literal);
|
||||
return Some(match base {
|
||||
"SandboxPolicy" => format!("{pascal}SandboxPolicy"),
|
||||
_ => format!("{pascal}{base}"),
|
||||
});
|
||||
}
|
||||
|
||||
if props.len() == 1
|
||||
&& let Some(key) = props.keys().next()
|
||||
{
|
||||
@@ -301,11 +439,147 @@ fn variant_definition_name(base: &str, variant: &Value) -> Option<String> {
|
||||
}
|
||||
|
||||
fn literal_from_property<'a>(props: &'a Map<String, Value>, key: &str) -> Option<&'a str> {
|
||||
props
|
||||
.get(key)
|
||||
.and_then(|value| value.get("enum"))
|
||||
.and_then(Value::as_array)
|
||||
.and_then(|arr| arr.first())
|
||||
props.get(key).and_then(string_literal)
|
||||
}
|
||||
|
||||
fn string_literal(value: &Value) -> Option<&str> {
|
||||
value.get("const").and_then(Value::as_str).or_else(|| {
|
||||
value
|
||||
.get("enum")
|
||||
.and_then(Value::as_array)
|
||||
.and_then(|arr| arr.first())
|
||||
.and_then(Value::as_str)
|
||||
})
|
||||
}
|
||||
|
||||
fn annotate_schema(value: &mut Value, base: Option<&str>) {
|
||||
match value {
|
||||
Value::Object(map) => annotate_object(map, base),
|
||||
Value::Array(items) => {
|
||||
for item in items {
|
||||
annotate_schema(item, base);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn annotate_object(map: &mut Map<String, Value>, base: Option<&str>) {
|
||||
let owner = map.get("title").and_then(Value::as_str).map(str::to_owned);
|
||||
if let Some(owner) = owner.as_deref()
|
||||
&& let Some(Value::Object(props)) = map.get_mut("properties")
|
||||
{
|
||||
set_discriminator_titles(props, owner);
|
||||
}
|
||||
|
||||
if let Some(Value::Array(variants)) = map.get_mut("oneOf") {
|
||||
annotate_variant_list(variants, base);
|
||||
}
|
||||
if let Some(Value::Array(variants)) = map.get_mut("anyOf") {
|
||||
annotate_variant_list(variants, base);
|
||||
}
|
||||
|
||||
if let Some(Value::Object(defs)) = map.get_mut("definitions") {
|
||||
for (name, schema) in defs.iter_mut() {
|
||||
annotate_schema(schema, Some(name.as_str()));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(Value::Object(defs)) = map.get_mut("$defs") {
|
||||
for (name, schema) in defs.iter_mut() {
|
||||
annotate_schema(schema, Some(name.as_str()));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(Value::Object(props)) = map.get_mut("properties") {
|
||||
for value in props.values_mut() {
|
||||
annotate_schema(value, base);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(items) = map.get_mut("items") {
|
||||
annotate_schema(items, base);
|
||||
}
|
||||
|
||||
if let Some(additional) = map.get_mut("additionalProperties") {
|
||||
annotate_schema(additional, base);
|
||||
}
|
||||
|
||||
for (key, child) in map.iter_mut() {
|
||||
match key.as_str() {
|
||||
"oneOf"
|
||||
| "anyOf"
|
||||
| "definitions"
|
||||
| "$defs"
|
||||
| "properties"
|
||||
| "items"
|
||||
| "additionalProperties" => {}
|
||||
_ => annotate_schema(child, base),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn annotate_variant_list(variants: &mut [Value], base: Option<&str>) {
|
||||
let mut seen = HashSet::new();
|
||||
|
||||
for variant in variants.iter() {
|
||||
if let Some(name) = variant_title(variant) {
|
||||
seen.insert(name.to_owned());
|
||||
}
|
||||
}
|
||||
|
||||
for variant in variants.iter_mut() {
|
||||
let mut variant_name = variant_title(variant).map(str::to_owned);
|
||||
|
||||
if variant_name.is_none()
|
||||
&& let Some(base_name) = base
|
||||
&& let Some(name) = variant_definition_name(base_name, variant)
|
||||
{
|
||||
let mut candidate = name.clone();
|
||||
let mut index = 2;
|
||||
while seen.contains(&candidate) {
|
||||
candidate = format!("{name}{index}");
|
||||
index += 1;
|
||||
}
|
||||
if let Some(obj) = variant.as_object_mut() {
|
||||
obj.insert("title".into(), Value::String(candidate.clone()));
|
||||
}
|
||||
seen.insert(candidate.clone());
|
||||
variant_name = Some(candidate);
|
||||
}
|
||||
|
||||
if let Some(name) = variant_name.as_deref()
|
||||
&& let Some(obj) = variant.as_object_mut()
|
||||
&& let Some(Value::Object(props)) = obj.get_mut("properties")
|
||||
{
|
||||
set_discriminator_titles(props, name);
|
||||
}
|
||||
|
||||
annotate_schema(variant, base);
|
||||
}
|
||||
}
|
||||
|
||||
const DISCRIMINATOR_KEYS: &[&str] = &["type", "method", "mode", "status", "role", "reason"];
|
||||
|
||||
fn set_discriminator_titles(props: &mut Map<String, Value>, owner: &str) {
|
||||
for key in DISCRIMINATOR_KEYS {
|
||||
if let Some(prop_schema) = props.get_mut(*key)
|
||||
&& string_literal(prop_schema).is_some()
|
||||
&& let Value::Object(prop_obj) = prop_schema
|
||||
{
|
||||
if prop_obj.contains_key("title") {
|
||||
continue;
|
||||
}
|
||||
let suffix = to_pascal_case(key);
|
||||
prop_obj.insert("title".into(), Value::String(format!("{owner}{suffix}")));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn variant_title(value: &Value) -> Option<&str> {
|
||||
value
|
||||
.as_object()
|
||||
.and_then(|obj| obj.get("title"))
|
||||
.and_then(Value::as_str)
|
||||
}
|
||||
|
||||
@@ -335,6 +609,33 @@ fn ensure_dir(dir: &Path) -> Result<()> {
|
||||
.with_context(|| format!("Failed to create output directory {}", dir.display()))
|
||||
}
|
||||
|
||||
fn rewrite_named_ref_to_namespace(value: &mut Value, ns: &str, name: &str) {
|
||||
let direct = format!("#/definitions/{name}");
|
||||
let prefixed = format!("{direct}/");
|
||||
let replacement = format!("#/definitions/{ns}/{name}");
|
||||
let replacement_prefixed = format!("{replacement}/");
|
||||
match value {
|
||||
Value::Object(obj) => {
|
||||
if let Some(Value::String(reference)) = obj.get_mut("$ref") {
|
||||
if reference == &direct {
|
||||
*reference = replacement;
|
||||
} else if let Some(rest) = reference.strip_prefix(&prefixed) {
|
||||
*reference = format!("{replacement_prefixed}{rest}");
|
||||
}
|
||||
}
|
||||
for child in obj.values_mut() {
|
||||
rewrite_named_ref_to_namespace(child, ns, name);
|
||||
}
|
||||
}
|
||||
Value::Array(items) => {
|
||||
for child in items {
|
||||
rewrite_named_ref_to_namespace(child, ns, name);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn prepend_header_if_missing(path: &Path) -> Result<()> {
|
||||
let mut content = String::new();
|
||||
{
|
||||
@@ -372,6 +673,28 @@ fn ts_files_in(dir: &Path) -> Result<Vec<PathBuf>> {
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
fn ts_files_in_recursive(dir: &Path) -> Result<Vec<PathBuf>> {
|
||||
let mut files = Vec::new();
|
||||
let mut stack = vec![dir.to_path_buf()];
|
||||
while let Some(d) = stack.pop() {
|
||||
for entry in
|
||||
fs::read_dir(&d).with_context(|| format!("Failed to read dir {}", d.display()))?
|
||||
{
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
stack.push(path);
|
||||
} else if path.is_file() && path.extension() == Some(OsStr::new("ts")) {
|
||||
files.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
files.sort();
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
/// Generate an index.ts file that re-exports all generated types.
|
||||
/// This allows consumers to import all types from a single file.
|
||||
fn generate_index_ts(out_dir: &Path) -> Result<PathBuf> {
|
||||
let mut entries: Vec<String> = Vec::new();
|
||||
let mut stems: Vec<String> = ts_files_in(out_dir)?
|
||||
@@ -388,6 +711,14 @@ fn generate_index_ts(out_dir: &Path) -> Result<PathBuf> {
|
||||
entries.push(format!("export type {{ {name} }} from \"./{name}\";\n"));
|
||||
}
|
||||
|
||||
// If this is the root out_dir and a ./v2 folder exists with TS files,
|
||||
// expose it as a namespace to avoid symbol collisions at the root.
|
||||
let v2_dir = out_dir.join("v2");
|
||||
let has_v2_ts = ts_files_in(&v2_dir).map(|v| !v.is_empty()).unwrap_or(false);
|
||||
if has_v2_ts {
|
||||
entries.push("export * as v2 from \"./v2\";\n".to_string());
|
||||
}
|
||||
|
||||
let mut content =
|
||||
String::with_capacity(HEADER.len() + entries.iter().map(String::len).sum::<usize>());
|
||||
content.push_str(HEADER);
|
||||
@@ -402,3 +733,211 @@ fn generate_index_ts(out_dir: &Path) -> Result<PathBuf> {
|
||||
.with_context(|| format!("Failed to write {}", index_path.display()))?;
|
||||
Ok(index_path)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use anyhow::Result;
|
||||
use std::collections::BTreeSet;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[test]
|
||||
fn generated_ts_has_no_optional_nullable_fields() -> Result<()> {
|
||||
// Assert that there are no types of the form "?: T | null" in the generated TS files.
|
||||
let output_dir = std::env::temp_dir().join(format!("codex_ts_types_{}", Uuid::now_v7()));
|
||||
fs::create_dir(&output_dir)?;
|
||||
|
||||
struct TempDirGuard(PathBuf);
|
||||
|
||||
impl Drop for TempDirGuard {
|
||||
fn drop(&mut self) {
|
||||
let _ = fs::remove_dir_all(&self.0);
|
||||
}
|
||||
}
|
||||
|
||||
let _guard = TempDirGuard(output_dir.clone());
|
||||
|
||||
// Avoid doing more work than necessary to keep the test from timing out.
|
||||
let options = GenerateTsOptions {
|
||||
generate_indices: false,
|
||||
ensure_headers: false,
|
||||
run_prettier: false,
|
||||
};
|
||||
generate_ts_with_options(&output_dir, None, options)?;
|
||||
|
||||
let mut undefined_offenders = Vec::new();
|
||||
let mut optional_nullable_offenders = BTreeSet::new();
|
||||
let mut stack = vec![output_dir];
|
||||
while let Some(dir) = stack.pop() {
|
||||
for entry in fs::read_dir(&dir)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
stack.push(path);
|
||||
continue;
|
||||
}
|
||||
|
||||
if matches!(path.extension().and_then(|ext| ext.to_str()), Some("ts")) {
|
||||
let contents = fs::read_to_string(&path)?;
|
||||
if contents.contains("| undefined") {
|
||||
undefined_offenders.push(path.clone());
|
||||
}
|
||||
|
||||
const SKIP_PREFIXES: &[&str] = &[
|
||||
"const ",
|
||||
"let ",
|
||||
"var ",
|
||||
"export const ",
|
||||
"export let ",
|
||||
"export var ",
|
||||
];
|
||||
|
||||
let mut search_start = 0;
|
||||
while let Some(idx) = contents[search_start..].find("| null") {
|
||||
let abs_idx = search_start + idx;
|
||||
// Find the property-colon for this field by scanning forward
|
||||
// from the start of the segment and ignoring nested braces,
|
||||
// brackets, and parens. This avoids colons inside nested
|
||||
// type literals like `{ [k in string]?: string }`.
|
||||
|
||||
let line_start_idx =
|
||||
contents[..abs_idx].rfind('\n').map(|i| i + 1).unwrap_or(0);
|
||||
|
||||
let mut segment_start_idx = line_start_idx;
|
||||
if let Some(rel_idx) = contents[line_start_idx..abs_idx].rfind(',') {
|
||||
segment_start_idx = segment_start_idx.max(line_start_idx + rel_idx + 1);
|
||||
}
|
||||
if let Some(rel_idx) = contents[line_start_idx..abs_idx].rfind('{') {
|
||||
segment_start_idx = segment_start_idx.max(line_start_idx + rel_idx + 1);
|
||||
}
|
||||
if let Some(rel_idx) = contents[line_start_idx..abs_idx].rfind('}') {
|
||||
segment_start_idx = segment_start_idx.max(line_start_idx + rel_idx + 1);
|
||||
}
|
||||
|
||||
// Scan forward for the colon that separates the field name from its type.
|
||||
let mut level_brace = 0_i32;
|
||||
let mut level_brack = 0_i32;
|
||||
let mut level_paren = 0_i32;
|
||||
let mut in_single = false;
|
||||
let mut in_double = false;
|
||||
let mut escape = false;
|
||||
let mut prop_colon_idx = None;
|
||||
for (i, ch) in contents[segment_start_idx..abs_idx].char_indices() {
|
||||
let idx_abs = segment_start_idx + i;
|
||||
if escape {
|
||||
escape = false;
|
||||
continue;
|
||||
}
|
||||
match ch {
|
||||
'\\' => {
|
||||
// Only treat as escape when inside a string.
|
||||
if in_single || in_double {
|
||||
escape = true;
|
||||
}
|
||||
}
|
||||
'\'' => {
|
||||
if !in_double {
|
||||
in_single = !in_single;
|
||||
}
|
||||
}
|
||||
'"' => {
|
||||
if !in_single {
|
||||
in_double = !in_double;
|
||||
}
|
||||
}
|
||||
'{' if !in_single && !in_double => level_brace += 1,
|
||||
'}' if !in_single && !in_double => level_brace -= 1,
|
||||
'[' if !in_single && !in_double => level_brack += 1,
|
||||
']' if !in_single && !in_double => level_brack -= 1,
|
||||
'(' if !in_single && !in_double => level_paren += 1,
|
||||
')' if !in_single && !in_double => level_paren -= 1,
|
||||
':' if !in_single
|
||||
&& !in_double
|
||||
&& level_brace == 0
|
||||
&& level_brack == 0
|
||||
&& level_paren == 0 =>
|
||||
{
|
||||
prop_colon_idx = Some(idx_abs);
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
let Some(colon_idx) = prop_colon_idx else {
|
||||
search_start = abs_idx + 5;
|
||||
continue;
|
||||
};
|
||||
|
||||
let mut field_prefix = contents[segment_start_idx..colon_idx].trim();
|
||||
if field_prefix.is_empty() {
|
||||
search_start = abs_idx + 5;
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(comment_idx) = field_prefix.rfind("*/") {
|
||||
field_prefix = field_prefix[comment_idx + 2..].trim_start();
|
||||
}
|
||||
|
||||
if field_prefix.is_empty() {
|
||||
search_start = abs_idx + 5;
|
||||
continue;
|
||||
}
|
||||
|
||||
if SKIP_PREFIXES
|
||||
.iter()
|
||||
.any(|prefix| field_prefix.starts_with(prefix))
|
||||
{
|
||||
search_start = abs_idx + 5;
|
||||
continue;
|
||||
}
|
||||
|
||||
if field_prefix.contains('(') {
|
||||
search_start = abs_idx + 5;
|
||||
continue;
|
||||
}
|
||||
|
||||
// If the last non-whitespace before ':' is '?', then this is an
|
||||
// optional field with a nullable type (i.e., "?: T | null"),
|
||||
// which we explicitly disallow.
|
||||
if field_prefix.chars().rev().find(|c| !c.is_whitespace()) == Some('?') {
|
||||
let line_number =
|
||||
contents[..abs_idx].chars().filter(|c| *c == '\n').count() + 1;
|
||||
let offending_line_end = contents[line_start_idx..]
|
||||
.find('\n')
|
||||
.map(|i| line_start_idx + i)
|
||||
.unwrap_or(contents.len());
|
||||
let offending_snippet =
|
||||
contents[line_start_idx..offending_line_end].trim();
|
||||
|
||||
optional_nullable_offenders.insert(format!(
|
||||
"{}:{}: {offending_snippet}",
|
||||
path.display(),
|
||||
line_number
|
||||
));
|
||||
}
|
||||
|
||||
search_start = abs_idx + 5;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert!(
|
||||
undefined_offenders.is_empty(),
|
||||
"Generated TypeScript still includes unions with `undefined` in {undefined_offenders:?}"
|
||||
);
|
||||
|
||||
// If this assertion fails, it means a field was generated as
|
||||
// "?: T | null" — i.e., both optional (undefined) and nullable (null).
|
||||
// We only want either "?: T" or ": T | null".
|
||||
assert!(
|
||||
optional_nullable_offenders.is_empty(),
|
||||
"Generated TypeScript has optional fields with nullable types (disallowed '?: T | null'), add #[ts(optional)] to fix:\n{optional_nullable_offenders:?}"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,6 +34,7 @@ pub struct JSONRPCRequest {
|
||||
pub id: RequestId,
|
||||
pub method: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub params: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
@@ -42,6 +43,7 @@ pub struct JSONRPCRequest {
|
||||
pub struct JSONRPCNotification {
|
||||
pub method: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub params: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
@@ -63,6 +65,7 @@ pub struct JSONRPCError {
|
||||
pub struct JSONRPCErrorError {
|
||||
pub code: i64,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub data: Option<serde_json::Value>,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
@@ -6,4 +6,7 @@ pub use export::generate_json;
|
||||
pub use export::generate_ts;
|
||||
pub use export::generate_types;
|
||||
pub use jsonrpc_lite::*;
|
||||
pub use protocol::*;
|
||||
pub use protocol::common::*;
|
||||
pub use protocol::thread_history::*;
|
||||
pub use protocol::v1::*;
|
||||
pub use protocol::v2::*;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
797
codex-rs/app-server-protocol/src/protocol/common.rs
Normal file
797
codex-rs/app-server-protocol/src/protocol/common.rs
Normal file
@@ -0,0 +1,797 @@
|
||||
use std::path::Path;
|
||||
|
||||
use crate::JSONRPCNotification;
|
||||
use crate::JSONRPCRequest;
|
||||
use crate::RequestId;
|
||||
use crate::export::GeneratedSchema;
|
||||
use crate::export::write_json_schema;
|
||||
use crate::protocol::v1;
|
||||
use crate::protocol::v2;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use strum_macros::Display;
|
||||
use ts_rs::TS;
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema, TS)]
|
||||
#[ts(type = "string")]
|
||||
pub struct GitSha(pub String);
|
||||
|
||||
impl GitSha {
|
||||
pub fn new(sha: &str) -> Self {
|
||||
Self(sha.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Display, JsonSchema, TS)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum AuthMode {
|
||||
ApiKey,
|
||||
ChatGPT,
|
||||
}
|
||||
|
||||
/// Generates an `enum ClientRequest` where each variant is a request that the
|
||||
/// client can send to the server. Each variant has associated `params` and
|
||||
/// `response` types. Also generates a `export_client_responses()` function to
|
||||
/// export all response types to TypeScript.
|
||||
macro_rules! client_request_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident $(=> $wire:literal)? {
|
||||
params: $(#[$params_meta:meta])* $params:ty,
|
||||
response: $response:ty,
|
||||
}
|
||||
),* $(,)?
|
||||
) => {
|
||||
/// Request from the client to the server.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "method", rename_all = "camelCase")]
|
||||
pub enum ClientRequest {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$(#[serde(rename = $wire)] #[ts(rename = $wire)])?
|
||||
$variant {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
$(#[$params_meta])*
|
||||
params: $params,
|
||||
},
|
||||
)*
|
||||
}
|
||||
|
||||
pub fn export_client_responses(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::std::result::Result<(), ::ts_rs::ExportError> {
|
||||
$(
|
||||
<$response as ::ts_rs::TS>::export_all_to(out_dir)?;
|
||||
)*
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_client_response_schemas(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
$(
|
||||
schemas.push(write_json_schema::<$response>(out_dir, stringify!($response))?);
|
||||
)*
|
||||
Ok(schemas)
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_client_param_schemas(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
$(
|
||||
schemas.push(write_json_schema::<$params>(out_dir, stringify!($params))?);
|
||||
)*
|
||||
Ok(schemas)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
client_request_definitions! {
|
||||
Initialize {
|
||||
params: v1::InitializeParams,
|
||||
response: v1::InitializeResponse,
|
||||
},
|
||||
|
||||
/// NEW APIs
|
||||
// Thread lifecycle
|
||||
ThreadStart => "thread/start" {
|
||||
params: v2::ThreadStartParams,
|
||||
response: v2::ThreadStartResponse,
|
||||
},
|
||||
ThreadResume => "thread/resume" {
|
||||
params: v2::ThreadResumeParams,
|
||||
response: v2::ThreadResumeResponse,
|
||||
},
|
||||
ThreadArchive => "thread/archive" {
|
||||
params: v2::ThreadArchiveParams,
|
||||
response: v2::ThreadArchiveResponse,
|
||||
},
|
||||
ThreadList => "thread/list" {
|
||||
params: v2::ThreadListParams,
|
||||
response: v2::ThreadListResponse,
|
||||
},
|
||||
ThreadCompact => "thread/compact" {
|
||||
params: v2::ThreadCompactParams,
|
||||
response: v2::ThreadCompactResponse,
|
||||
},
|
||||
TurnStart => "turn/start" {
|
||||
params: v2::TurnStartParams,
|
||||
response: v2::TurnStartResponse,
|
||||
},
|
||||
TurnInterrupt => "turn/interrupt" {
|
||||
params: v2::TurnInterruptParams,
|
||||
response: v2::TurnInterruptResponse,
|
||||
},
|
||||
ReviewStart => "review/start" {
|
||||
params: v2::ReviewStartParams,
|
||||
response: v2::TurnStartResponse,
|
||||
},
|
||||
|
||||
ModelList => "model/list" {
|
||||
params: v2::ModelListParams,
|
||||
response: v2::ModelListResponse,
|
||||
},
|
||||
|
||||
LoginAccount => "account/login/start" {
|
||||
params: v2::LoginAccountParams,
|
||||
response: v2::LoginAccountResponse,
|
||||
},
|
||||
|
||||
CancelLoginAccount => "account/login/cancel" {
|
||||
params: v2::CancelLoginAccountParams,
|
||||
response: v2::CancelLoginAccountResponse,
|
||||
},
|
||||
|
||||
LogoutAccount => "account/logout" {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v2::LogoutAccountResponse,
|
||||
},
|
||||
|
||||
GetAccountRateLimits => "account/rateLimits/read" {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v2::GetAccountRateLimitsResponse,
|
||||
},
|
||||
|
||||
FeedbackUpload => "feedback/upload" {
|
||||
params: v2::FeedbackUploadParams,
|
||||
response: v2::FeedbackUploadResponse,
|
||||
},
|
||||
|
||||
GetAccount => "account/read" {
|
||||
params: v2::GetAccountParams,
|
||||
response: v2::GetAccountResponse,
|
||||
},
|
||||
|
||||
/// DEPRECATED APIs below
|
||||
NewConversation {
|
||||
params: v1::NewConversationParams,
|
||||
response: v1::NewConversationResponse,
|
||||
},
|
||||
GetConversationSummary {
|
||||
params: v1::GetConversationSummaryParams,
|
||||
response: v1::GetConversationSummaryResponse,
|
||||
},
|
||||
/// List recorded Codex conversations (rollouts) with optional pagination and search.
|
||||
ListConversations {
|
||||
params: v1::ListConversationsParams,
|
||||
response: v1::ListConversationsResponse,
|
||||
},
|
||||
/// Resume a recorded Codex conversation from a rollout file.
|
||||
ResumeConversation {
|
||||
params: v1::ResumeConversationParams,
|
||||
response: v1::ResumeConversationResponse,
|
||||
},
|
||||
ArchiveConversation {
|
||||
params: v1::ArchiveConversationParams,
|
||||
response: v1::ArchiveConversationResponse,
|
||||
},
|
||||
SendUserMessage {
|
||||
params: v1::SendUserMessageParams,
|
||||
response: v1::SendUserMessageResponse,
|
||||
},
|
||||
SendUserTurn {
|
||||
params: v1::SendUserTurnParams,
|
||||
response: v1::SendUserTurnResponse,
|
||||
},
|
||||
InterruptConversation {
|
||||
params: v1::InterruptConversationParams,
|
||||
response: v1::InterruptConversationResponse,
|
||||
},
|
||||
AddConversationListener {
|
||||
params: v1::AddConversationListenerParams,
|
||||
response: v1::AddConversationSubscriptionResponse,
|
||||
},
|
||||
RemoveConversationListener {
|
||||
params: v1::RemoveConversationListenerParams,
|
||||
response: v1::RemoveConversationSubscriptionResponse,
|
||||
},
|
||||
GitDiffToRemote {
|
||||
params: v1::GitDiffToRemoteParams,
|
||||
response: v1::GitDiffToRemoteResponse,
|
||||
},
|
||||
LoginApiKey {
|
||||
params: v1::LoginApiKeyParams,
|
||||
response: v1::LoginApiKeyResponse,
|
||||
},
|
||||
LoginChatGpt {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::LoginChatGptResponse,
|
||||
},
|
||||
// DEPRECATED in favor of CancelLoginAccount
|
||||
CancelLoginChatGpt {
|
||||
params: v1::CancelLoginChatGptParams,
|
||||
response: v1::CancelLoginChatGptResponse,
|
||||
},
|
||||
LogoutChatGpt {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::LogoutChatGptResponse,
|
||||
},
|
||||
/// DEPRECATED in favor of GetAccount
|
||||
GetAuthStatus {
|
||||
params: v1::GetAuthStatusParams,
|
||||
response: v1::GetAuthStatusResponse,
|
||||
},
|
||||
GetUserSavedConfig {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::GetUserSavedConfigResponse,
|
||||
},
|
||||
SetDefaultModel {
|
||||
params: v1::SetDefaultModelParams,
|
||||
response: v1::SetDefaultModelResponse,
|
||||
},
|
||||
GetUserAgent {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::GetUserAgentResponse,
|
||||
},
|
||||
UserInfo {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::UserInfoResponse,
|
||||
},
|
||||
FuzzyFileSearch {
|
||||
params: FuzzyFileSearchParams,
|
||||
response: FuzzyFileSearchResponse,
|
||||
},
|
||||
/// Execute a command (argv vector) under the server's sandbox.
|
||||
ExecOneOffCommand {
|
||||
params: v1::ExecOneOffCommandParams,
|
||||
response: v1::ExecOneOffCommandResponse,
|
||||
},
|
||||
}
|
||||
|
||||
/// Generates an `enum ServerRequest` where each variant is a request that the
|
||||
/// server can send to the client along with the corresponding params and
|
||||
/// response types. It also generates helper types used by the app/server
|
||||
/// infrastructure (payload enum, request constructor, and export helpers).
|
||||
macro_rules! server_request_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident $(=> $wire:literal)? {
|
||||
params: $params:ty,
|
||||
response: $response:ty,
|
||||
}
|
||||
),* $(,)?
|
||||
) => {
|
||||
/// Request initiated from the server and sent to the client.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "method", rename_all = "camelCase")]
|
||||
pub enum ServerRequest {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$(#[serde(rename = $wire)] #[ts(rename = $wire)])?
|
||||
$variant {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
params: $params,
|
||||
},
|
||||
)*
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, JsonSchema)]
|
||||
pub enum ServerRequestPayload {
|
||||
$( $variant($params), )*
|
||||
}
|
||||
|
||||
impl ServerRequestPayload {
|
||||
pub fn request_with_id(self, request_id: RequestId) -> ServerRequest {
|
||||
match self {
|
||||
$(Self::$variant(params) => ServerRequest::$variant { request_id, params },)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn export_server_responses(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::std::result::Result<(), ::ts_rs::ExportError> {
|
||||
$(
|
||||
<$response as ::ts_rs::TS>::export_all_to(out_dir)?;
|
||||
)*
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_server_response_schemas(
|
||||
out_dir: &Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
$(
|
||||
schemas.push(crate::export::write_json_schema::<$response>(
|
||||
out_dir,
|
||||
concat!(stringify!($variant), "Response"),
|
||||
)?);
|
||||
)*
|
||||
Ok(schemas)
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_server_param_schemas(
|
||||
out_dir: &Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
$(
|
||||
schemas.push(crate::export::write_json_schema::<$params>(
|
||||
out_dir,
|
||||
concat!(stringify!($variant), "Params"),
|
||||
)?);
|
||||
)*
|
||||
Ok(schemas)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Generates `ServerNotification` enum and helpers, including a JSON Schema
|
||||
/// exporter for each notification.
|
||||
macro_rules! server_notification_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident $(=> $wire:literal)? ( $payload:ty )
|
||||
),* $(,)?
|
||||
) => {
|
||||
/// Notification sent from the server to the client.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS, Display)]
|
||||
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
|
||||
#[strum(serialize_all = "camelCase")]
|
||||
pub enum ServerNotification {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$(#[serde(rename = $wire)] #[ts(rename = $wire)] #[strum(serialize = $wire)])?
|
||||
$variant($payload),
|
||||
)*
|
||||
}
|
||||
|
||||
impl ServerNotification {
|
||||
pub fn to_params(self) -> Result<serde_json::Value, serde_json::Error> {
|
||||
match self {
|
||||
$(Self::$variant(params) => serde_json::to_value(params),)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<JSONRPCNotification> for ServerNotification {
|
||||
type Error = serde_json::Error;
|
||||
|
||||
fn try_from(value: JSONRPCNotification) -> Result<Self, serde_json::Error> {
|
||||
serde_json::from_value(serde_json::to_value(value)?)
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_server_notification_schemas(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
$(schemas.push(crate::export::write_json_schema::<$payload>(out_dir, stringify!($payload))?);)*
|
||||
Ok(schemas)
|
||||
}
|
||||
};
|
||||
}
|
||||
/// Notifications sent from the client to the server.
|
||||
macro_rules! client_notification_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident $( ( $payload:ty ) )?
|
||||
),* $(,)?
|
||||
) => {
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS, Display)]
|
||||
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
|
||||
#[strum(serialize_all = "camelCase")]
|
||||
pub enum ClientNotification {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$variant $( ( $payload ) )?,
|
||||
)*
|
||||
}
|
||||
|
||||
pub fn export_client_notification_schemas(
|
||||
_out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let schemas = Vec::new();
|
||||
$( $(schemas.push(crate::export::write_json_schema::<$payload>(_out_dir, stringify!($payload))?);)? )*
|
||||
Ok(schemas)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl TryFrom<JSONRPCRequest> for ServerRequest {
|
||||
type Error = serde_json::Error;
|
||||
|
||||
fn try_from(value: JSONRPCRequest) -> Result<Self, Self::Error> {
|
||||
serde_json::from_value(serde_json::to_value(value)?)
|
||||
}
|
||||
}
|
||||
|
||||
server_request_definitions! {
|
||||
/// NEW APIs
|
||||
/// Sent when approval is requested for a specific command execution.
|
||||
/// This request is used for Turns started via turn/start.
|
||||
CommandExecutionRequestApproval => "item/commandExecution/requestApproval" {
|
||||
params: v2::CommandExecutionRequestApprovalParams,
|
||||
response: v2::CommandExecutionRequestApprovalResponse,
|
||||
},
|
||||
|
||||
/// Sent when approval is requested for a specific file change.
|
||||
/// This request is used for Turns started via turn/start.
|
||||
FileChangeRequestApproval => "item/fileChange/requestApproval" {
|
||||
params: v2::FileChangeRequestApprovalParams,
|
||||
response: v2::FileChangeRequestApprovalResponse,
|
||||
},
|
||||
|
||||
/// DEPRECATED APIs below
|
||||
/// Request to approve a patch.
|
||||
/// This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).
|
||||
ApplyPatchApproval {
|
||||
params: v1::ApplyPatchApprovalParams,
|
||||
response: v1::ApplyPatchApprovalResponse,
|
||||
},
|
||||
/// Request to exec a command.
|
||||
/// This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).
|
||||
ExecCommandApproval {
|
||||
params: v1::ExecCommandApprovalParams,
|
||||
response: v1::ExecCommandApprovalResponse,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
pub struct FuzzyFileSearchParams {
|
||||
pub query: String,
|
||||
pub roots: Vec<String>,
|
||||
// if provided, will cancel any previous request that used the same value
|
||||
pub cancellation_token: Option<String>,
|
||||
}
|
||||
|
||||
/// Superset of [`codex_file_search::FileMatch`]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct FuzzyFileSearchResult {
|
||||
pub root: String,
|
||||
pub path: String,
|
||||
pub file_name: String,
|
||||
pub score: u32,
|
||||
pub indices: Option<Vec<u32>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct FuzzyFileSearchResponse {
|
||||
pub files: Vec<FuzzyFileSearchResult>,
|
||||
}
|
||||
|
||||
server_notification_definitions! {
|
||||
/// NEW NOTIFICATIONS
|
||||
Error => "error" (v2::ErrorNotification),
|
||||
ThreadStarted => "thread/started" (v2::ThreadStartedNotification),
|
||||
TurnStarted => "turn/started" (v2::TurnStartedNotification),
|
||||
TurnCompleted => "turn/completed" (v2::TurnCompletedNotification),
|
||||
ItemStarted => "item/started" (v2::ItemStartedNotification),
|
||||
ItemCompleted => "item/completed" (v2::ItemCompletedNotification),
|
||||
AgentMessageDelta => "item/agentMessage/delta" (v2::AgentMessageDeltaNotification),
|
||||
CommandExecutionOutputDelta => "item/commandExecution/outputDelta" (v2::CommandExecutionOutputDeltaNotification),
|
||||
McpToolCallProgress => "item/mcpToolCall/progress" (v2::McpToolCallProgressNotification),
|
||||
AccountUpdated => "account/updated" (v2::AccountUpdatedNotification),
|
||||
AccountRateLimitsUpdated => "account/rateLimits/updated" (v2::AccountRateLimitsUpdatedNotification),
|
||||
ReasoningSummaryTextDelta => "item/reasoning/summaryTextDelta" (v2::ReasoningSummaryTextDeltaNotification),
|
||||
ReasoningSummaryPartAdded => "item/reasoning/summaryPartAdded" (v2::ReasoningSummaryPartAddedNotification),
|
||||
ReasoningTextDelta => "item/reasoning/textDelta" (v2::ReasoningTextDeltaNotification),
|
||||
|
||||
/// Notifies the user of world-writable directories on Windows, which cannot be protected by the sandbox.
|
||||
WindowsWorldWritableWarning => "windows/worldWritableWarning" (v2::WindowsWorldWritableWarningNotification),
|
||||
|
||||
#[serde(rename = "account/login/completed")]
|
||||
#[ts(rename = "account/login/completed")]
|
||||
#[strum(serialize = "account/login/completed")]
|
||||
AccountLoginCompleted(v2::AccountLoginCompletedNotification),
|
||||
|
||||
/// DEPRECATED NOTIFICATIONS below
|
||||
AuthStatusChange(v1::AuthStatusChangeNotification),
|
||||
|
||||
/// Deprecated: use `account/login/completed` instead.
|
||||
LoginChatGptComplete(v1::LoginChatGptCompleteNotification),
|
||||
SessionConfigured(v1::SessionConfiguredNotification),
|
||||
}
|
||||
|
||||
client_notification_definitions! {
|
||||
Initialized,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use anyhow::Result;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::account::PlanType;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[test]
|
||||
fn serialize_new_conversation() -> Result<()> {
|
||||
let request = ClientRequest::NewConversation {
|
||||
request_id: RequestId::Integer(42),
|
||||
params: v1::NewConversationParams {
|
||||
model: Some("gpt-5.1-codex-max".to_string()),
|
||||
model_provider: None,
|
||||
profile: None,
|
||||
cwd: None,
|
||||
approval_policy: Some(AskForApproval::OnRequest),
|
||||
sandbox: None,
|
||||
config: None,
|
||||
base_instructions: None,
|
||||
developer_instructions: None,
|
||||
compact_prompt: None,
|
||||
include_apply_patch_tool: None,
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "newConversation",
|
||||
"id": 42,
|
||||
"params": {
|
||||
"model": "gpt-5.1-codex-max",
|
||||
"modelProvider": null,
|
||||
"profile": null,
|
||||
"cwd": null,
|
||||
"approvalPolicy": "on-request",
|
||||
"sandbox": null,
|
||||
"config": null,
|
||||
"baseInstructions": null,
|
||||
"includeApplyPatchTool": null
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn conversation_id_serializes_as_plain_string() -> Result<()> {
|
||||
let id = ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
|
||||
|
||||
assert_eq!(
|
||||
json!("67e55044-10b1-426f-9247-bb680e5fe0c8"),
|
||||
serde_json::to_value(id)?
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn conversation_id_deserializes_from_plain_string() -> Result<()> {
|
||||
let id: ConversationId =
|
||||
serde_json::from_value(json!("67e55044-10b1-426f-9247-bb680e5fe0c8"))?;
|
||||
|
||||
assert_eq!(
|
||||
ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?,
|
||||
id,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_client_notification() -> Result<()> {
|
||||
let notification = ClientNotification::Initialized;
|
||||
// Note there is no "params" field for this notification.
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "initialized",
|
||||
}),
|
||||
serde_json::to_value(¬ification)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_server_request() -> Result<()> {
|
||||
let conversation_id = ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
|
||||
let params = v1::ExecCommandApprovalParams {
|
||||
conversation_id,
|
||||
call_id: "call-42".to_string(),
|
||||
command: vec!["echo".to_string(), "hello".to_string()],
|
||||
cwd: PathBuf::from("/tmp"),
|
||||
reason: Some("because tests".to_string()),
|
||||
risk: None,
|
||||
parsed_cmd: vec![ParsedCommand::Unknown {
|
||||
cmd: "echo hello".to_string(),
|
||||
}],
|
||||
};
|
||||
let request = ServerRequest::ExecCommandApproval {
|
||||
request_id: RequestId::Integer(7),
|
||||
params: params.clone(),
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "execCommandApproval",
|
||||
"id": 7,
|
||||
"params": {
|
||||
"conversationId": "67e55044-10b1-426f-9247-bb680e5fe0c8",
|
||||
"callId": "call-42",
|
||||
"command": ["echo", "hello"],
|
||||
"cwd": "/tmp",
|
||||
"reason": "because tests",
|
||||
"risk": null,
|
||||
"parsedCmd": [
|
||||
{
|
||||
"type": "unknown",
|
||||
"cmd": "echo hello"
|
||||
}
|
||||
]
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
|
||||
let payload = ServerRequestPayload::ExecCommandApproval(params);
|
||||
assert_eq!(payload.request_with_id(RequestId::Integer(7)), request);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_get_account_rate_limits() -> Result<()> {
|
||||
let request = ClientRequest::GetAccountRateLimits {
|
||||
request_id: RequestId::Integer(1),
|
||||
params: None,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/rateLimits/read",
|
||||
"id": 1,
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_account_login_api_key() -> Result<()> {
|
||||
let request = ClientRequest::LoginAccount {
|
||||
request_id: RequestId::Integer(2),
|
||||
params: v2::LoginAccountParams::ApiKey {
|
||||
api_key: "secret".to_string(),
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/login/start",
|
||||
"id": 2,
|
||||
"params": {
|
||||
"type": "apiKey",
|
||||
"apiKey": "secret"
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_account_login_chatgpt() -> Result<()> {
|
||||
let request = ClientRequest::LoginAccount {
|
||||
request_id: RequestId::Integer(3),
|
||||
params: v2::LoginAccountParams::Chatgpt,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/login/start",
|
||||
"id": 3,
|
||||
"params": {
|
||||
"type": "chatgpt"
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_account_logout() -> Result<()> {
|
||||
let request = ClientRequest::LogoutAccount {
|
||||
request_id: RequestId::Integer(4),
|
||||
params: None,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/logout",
|
||||
"id": 4,
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_get_account() -> Result<()> {
|
||||
let request = ClientRequest::GetAccount {
|
||||
request_id: RequestId::Integer(5),
|
||||
params: v2::GetAccountParams {
|
||||
refresh_token: false,
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/read",
|
||||
"id": 5,
|
||||
"params": {
|
||||
"refreshToken": false
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn account_serializes_fields_in_camel_case() -> Result<()> {
|
||||
let api_key = v2::Account::ApiKey {};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"type": "apiKey",
|
||||
}),
|
||||
serde_json::to_value(&api_key)?,
|
||||
);
|
||||
|
||||
let chatgpt = v2::Account::Chatgpt {
|
||||
email: "user@example.com".to_string(),
|
||||
plan_type: PlanType::Plus,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"type": "chatgpt",
|
||||
"email": "user@example.com",
|
||||
"planType": "plus",
|
||||
}),
|
||||
serde_json::to_value(&chatgpt)?,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_list_models() -> Result<()> {
|
||||
let request = ClientRequest::ModelList {
|
||||
request_id: RequestId::Integer(6),
|
||||
params: v2::ModelListParams::default(),
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "model/list",
|
||||
"id": 6,
|
||||
"params": {
|
||||
"limit": null,
|
||||
"cursor": null
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
7
codex-rs/app-server-protocol/src/protocol/mod.rs
Normal file
7
codex-rs/app-server-protocol/src/protocol/mod.rs
Normal file
@@ -0,0 +1,7 @@
|
||||
// Module declarations for the app-server protocol namespace.
|
||||
// Exposes protocol pieces used by `lib.rs` via `pub use protocol::common::*;`.
|
||||
|
||||
pub mod common;
|
||||
pub mod thread_history;
|
||||
pub mod v1;
|
||||
pub mod v2;
|
||||
409
codex-rs/app-server-protocol/src/protocol/thread_history.rs
Normal file
409
codex-rs/app-server-protocol/src/protocol/thread_history.rs
Normal file
@@ -0,0 +1,409 @@
|
||||
use crate::protocol::v2::ThreadItem;
|
||||
use crate::protocol::v2::Turn;
|
||||
use crate::protocol::v2::TurnStatus;
|
||||
use crate::protocol::v2::UserInput;
|
||||
use codex_protocol::protocol::AgentReasoningEvent;
|
||||
use codex_protocol::protocol::AgentReasoningRawContentEvent;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::TurnAbortedEvent;
|
||||
use codex_protocol::protocol::UserMessageEvent;
|
||||
|
||||
/// Convert persisted [`EventMsg`] entries into a sequence of [`Turn`] values.
|
||||
///
|
||||
/// The purpose of this is to convert the EventMsgs persisted in a rollout file
|
||||
/// into a sequence of Turns and ThreadItems, which allows the client to render
|
||||
/// the historical messages when resuming a thread.
|
||||
pub fn build_turns_from_event_msgs(events: &[EventMsg]) -> Vec<Turn> {
|
||||
let mut builder = ThreadHistoryBuilder::new();
|
||||
for event in events {
|
||||
builder.handle_event(event);
|
||||
}
|
||||
builder.finish()
|
||||
}
|
||||
|
||||
struct ThreadHistoryBuilder {
|
||||
turns: Vec<Turn>,
|
||||
current_turn: Option<PendingTurn>,
|
||||
next_turn_index: i64,
|
||||
next_item_index: i64,
|
||||
}
|
||||
|
||||
impl ThreadHistoryBuilder {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
turns: Vec::new(),
|
||||
current_turn: None,
|
||||
next_turn_index: 1,
|
||||
next_item_index: 1,
|
||||
}
|
||||
}
|
||||
|
||||
fn finish(mut self) -> Vec<Turn> {
|
||||
self.finish_current_turn();
|
||||
self.turns
|
||||
}
|
||||
|
||||
/// This function should handle all EventMsg variants that can be persisted in a rollout file.
|
||||
/// See `should_persist_event_msg` in `codex-rs/core/rollout/policy.rs`.
|
||||
fn handle_event(&mut self, event: &EventMsg) {
|
||||
match event {
|
||||
EventMsg::UserMessage(payload) => self.handle_user_message(payload),
|
||||
EventMsg::AgentMessage(payload) => self.handle_agent_message(payload.message.clone()),
|
||||
EventMsg::AgentReasoning(payload) => self.handle_agent_reasoning(payload),
|
||||
EventMsg::AgentReasoningRawContent(payload) => {
|
||||
self.handle_agent_reasoning_raw_content(payload)
|
||||
}
|
||||
EventMsg::TokenCount(_) => {}
|
||||
EventMsg::EnteredReviewMode(_) => {}
|
||||
EventMsg::ExitedReviewMode(_) => {}
|
||||
EventMsg::UndoCompleted(_) => {}
|
||||
EventMsg::TurnAborted(payload) => self.handle_turn_aborted(payload),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_user_message(&mut self, payload: &UserMessageEvent) {
|
||||
self.finish_current_turn();
|
||||
let mut turn = self.new_turn();
|
||||
let id = self.next_item_id();
|
||||
let content = self.build_user_inputs(payload);
|
||||
turn.items.push(ThreadItem::UserMessage { id, content });
|
||||
self.current_turn = Some(turn);
|
||||
}
|
||||
|
||||
fn handle_agent_message(&mut self, text: String) {
|
||||
if text.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let id = self.next_item_id();
|
||||
self.ensure_turn()
|
||||
.items
|
||||
.push(ThreadItem::AgentMessage { id, text });
|
||||
}
|
||||
|
||||
fn handle_agent_reasoning(&mut self, payload: &AgentReasoningEvent) {
|
||||
if payload.text.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
// If the last item is a reasoning item, add the new text to the summary.
|
||||
if let Some(ThreadItem::Reasoning { summary, .. }) = self.ensure_turn().items.last_mut() {
|
||||
summary.push(payload.text.clone());
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, create a new reasoning item.
|
||||
let id = self.next_item_id();
|
||||
self.ensure_turn().items.push(ThreadItem::Reasoning {
|
||||
id,
|
||||
summary: vec![payload.text.clone()],
|
||||
content: Vec::new(),
|
||||
});
|
||||
}
|
||||
|
||||
fn handle_agent_reasoning_raw_content(&mut self, payload: &AgentReasoningRawContentEvent) {
|
||||
if payload.text.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
// If the last item is a reasoning item, add the new text to the content.
|
||||
if let Some(ThreadItem::Reasoning { content, .. }) = self.ensure_turn().items.last_mut() {
|
||||
content.push(payload.text.clone());
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, create a new reasoning item.
|
||||
let id = self.next_item_id();
|
||||
self.ensure_turn().items.push(ThreadItem::Reasoning {
|
||||
id,
|
||||
summary: Vec::new(),
|
||||
content: vec![payload.text.clone()],
|
||||
});
|
||||
}
|
||||
|
||||
fn handle_turn_aborted(&mut self, _payload: &TurnAbortedEvent) {
|
||||
let Some(turn) = self.current_turn.as_mut() else {
|
||||
return;
|
||||
};
|
||||
turn.status = TurnStatus::Interrupted;
|
||||
}
|
||||
|
||||
fn finish_current_turn(&mut self) {
|
||||
if let Some(turn) = self.current_turn.take() {
|
||||
if turn.items.is_empty() {
|
||||
return;
|
||||
}
|
||||
self.turns.push(turn.into());
|
||||
}
|
||||
}
|
||||
|
||||
fn new_turn(&mut self) -> PendingTurn {
|
||||
PendingTurn {
|
||||
id: self.next_turn_id(),
|
||||
items: Vec::new(),
|
||||
status: TurnStatus::Completed,
|
||||
}
|
||||
}
|
||||
|
||||
fn ensure_turn(&mut self) -> &mut PendingTurn {
|
||||
if self.current_turn.is_none() {
|
||||
let turn = self.new_turn();
|
||||
return self.current_turn.insert(turn);
|
||||
}
|
||||
|
||||
if let Some(turn) = self.current_turn.as_mut() {
|
||||
return turn;
|
||||
}
|
||||
|
||||
unreachable!("current turn must exist after initialization");
|
||||
}
|
||||
|
||||
fn next_turn_id(&mut self) -> String {
|
||||
let id = format!("turn-{}", self.next_turn_index);
|
||||
self.next_turn_index += 1;
|
||||
id
|
||||
}
|
||||
|
||||
fn next_item_id(&mut self) -> String {
|
||||
let id = format!("item-{}", self.next_item_index);
|
||||
self.next_item_index += 1;
|
||||
id
|
||||
}
|
||||
|
||||
fn build_user_inputs(&self, payload: &UserMessageEvent) -> Vec<UserInput> {
|
||||
let mut content = Vec::new();
|
||||
if !payload.message.trim().is_empty() {
|
||||
content.push(UserInput::Text {
|
||||
text: payload.message.clone(),
|
||||
});
|
||||
}
|
||||
if let Some(images) = &payload.images {
|
||||
for image in images {
|
||||
content.push(UserInput::Image { url: image.clone() });
|
||||
}
|
||||
}
|
||||
content
|
||||
}
|
||||
}
|
||||
|
||||
struct PendingTurn {
|
||||
id: String,
|
||||
items: Vec<ThreadItem>,
|
||||
status: TurnStatus,
|
||||
}
|
||||
|
||||
impl From<PendingTurn> for Turn {
|
||||
fn from(value: PendingTurn) -> Self {
|
||||
Self {
|
||||
id: value.id,
|
||||
items: value.items,
|
||||
status: value.status,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use codex_protocol::protocol::AgentMessageEvent;
|
||||
use codex_protocol::protocol::AgentReasoningEvent;
|
||||
use codex_protocol::protocol::AgentReasoningRawContentEvent;
|
||||
use codex_protocol::protocol::TurnAbortReason;
|
||||
use codex_protocol::protocol::TurnAbortedEvent;
|
||||
use codex_protocol::protocol::UserMessageEvent;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn builds_multiple_turns_with_reasoning_items() {
|
||||
let events = vec![
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "First turn".into(),
|
||||
images: Some(vec!["https://example.com/one.png".into()]),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "Hi there".into(),
|
||||
}),
|
||||
EventMsg::AgentReasoning(AgentReasoningEvent {
|
||||
text: "thinking".into(),
|
||||
}),
|
||||
EventMsg::AgentReasoningRawContent(AgentReasoningRawContentEvent {
|
||||
text: "full reasoning".into(),
|
||||
}),
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Second turn".into(),
|
||||
images: None,
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "Reply two".into(),
|
||||
}),
|
||||
];
|
||||
|
||||
let turns = build_turns_from_event_msgs(&events);
|
||||
assert_eq!(turns.len(), 2);
|
||||
|
||||
let first = &turns[0];
|
||||
assert_eq!(first.id, "turn-1");
|
||||
assert_eq!(first.status, TurnStatus::Completed);
|
||||
assert_eq!(first.items.len(), 3);
|
||||
assert_eq!(
|
||||
first.items[0],
|
||||
ThreadItem::UserMessage {
|
||||
id: "item-1".into(),
|
||||
content: vec![
|
||||
UserInput::Text {
|
||||
text: "First turn".into(),
|
||||
},
|
||||
UserInput::Image {
|
||||
url: "https://example.com/one.png".into(),
|
||||
}
|
||||
],
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
first.items[1],
|
||||
ThreadItem::AgentMessage {
|
||||
id: "item-2".into(),
|
||||
text: "Hi there".into(),
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
first.items[2],
|
||||
ThreadItem::Reasoning {
|
||||
id: "item-3".into(),
|
||||
summary: vec!["thinking".into()],
|
||||
content: vec!["full reasoning".into()],
|
||||
}
|
||||
);
|
||||
|
||||
let second = &turns[1];
|
||||
assert_eq!(second.id, "turn-2");
|
||||
assert_eq!(second.items.len(), 2);
|
||||
assert_eq!(
|
||||
second.items[0],
|
||||
ThreadItem::UserMessage {
|
||||
id: "item-4".into(),
|
||||
content: vec![UserInput::Text {
|
||||
text: "Second turn".into()
|
||||
}],
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
second.items[1],
|
||||
ThreadItem::AgentMessage {
|
||||
id: "item-5".into(),
|
||||
text: "Reply two".into(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn splits_reasoning_when_interleaved() {
|
||||
let events = vec![
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Turn start".into(),
|
||||
images: None,
|
||||
}),
|
||||
EventMsg::AgentReasoning(AgentReasoningEvent {
|
||||
text: "first summary".into(),
|
||||
}),
|
||||
EventMsg::AgentReasoningRawContent(AgentReasoningRawContentEvent {
|
||||
text: "first content".into(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "interlude".into(),
|
||||
}),
|
||||
EventMsg::AgentReasoning(AgentReasoningEvent {
|
||||
text: "second summary".into(),
|
||||
}),
|
||||
];
|
||||
|
||||
let turns = build_turns_from_event_msgs(&events);
|
||||
assert_eq!(turns.len(), 1);
|
||||
let turn = &turns[0];
|
||||
assert_eq!(turn.items.len(), 4);
|
||||
|
||||
assert_eq!(
|
||||
turn.items[1],
|
||||
ThreadItem::Reasoning {
|
||||
id: "item-2".into(),
|
||||
summary: vec!["first summary".into()],
|
||||
content: vec!["first content".into()],
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
turn.items[3],
|
||||
ThreadItem::Reasoning {
|
||||
id: "item-4".into(),
|
||||
summary: vec!["second summary".into()],
|
||||
content: Vec::new(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn marks_turn_as_interrupted_when_aborted() {
|
||||
let events = vec![
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Please do the thing".into(),
|
||||
images: None,
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "Working...".into(),
|
||||
}),
|
||||
EventMsg::TurnAborted(TurnAbortedEvent {
|
||||
reason: TurnAbortReason::Replaced,
|
||||
}),
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Let's try again".into(),
|
||||
images: None,
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "Second attempt complete.".into(),
|
||||
}),
|
||||
];
|
||||
|
||||
let turns = build_turns_from_event_msgs(&events);
|
||||
assert_eq!(turns.len(), 2);
|
||||
|
||||
let first_turn = &turns[0];
|
||||
assert_eq!(first_turn.status, TurnStatus::Interrupted);
|
||||
assert_eq!(first_turn.items.len(), 2);
|
||||
assert_eq!(
|
||||
first_turn.items[0],
|
||||
ThreadItem::UserMessage {
|
||||
id: "item-1".into(),
|
||||
content: vec![UserInput::Text {
|
||||
text: "Please do the thing".into()
|
||||
}],
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
first_turn.items[1],
|
||||
ThreadItem::AgentMessage {
|
||||
id: "item-2".into(),
|
||||
text: "Working...".into(),
|
||||
}
|
||||
);
|
||||
|
||||
let second_turn = &turns[1];
|
||||
assert_eq!(second_turn.status, TurnStatus::Completed);
|
||||
assert_eq!(second_turn.items.len(), 2);
|
||||
assert_eq!(
|
||||
second_turn.items[0],
|
||||
ThreadItem::UserMessage {
|
||||
id: "item-3".into(),
|
||||
content: vec![UserInput::Text {
|
||||
text: "Let's try again".into()
|
||||
}],
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
second_turn.items[1],
|
||||
ThreadItem::AgentMessage {
|
||||
id: "item-4".into(),
|
||||
text: "Second attempt complete.".into(),
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
462
codex-rs/app-server-protocol/src/protocol/v1.rs
Normal file
462
codex-rs/app-server-protocol/src/protocol/v1.rs
Normal file
@@ -0,0 +1,462 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::config_types::ForcedLoginMethod;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
use codex_protocol::config_types::Verbosity;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::FileChange;
|
||||
use codex_protocol::protocol::ReviewDecision;
|
||||
use codex_protocol::protocol::SandboxCommandAssessment;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_protocol::protocol::TurnAbortReason;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use ts_rs::TS;
|
||||
use uuid::Uuid;
|
||||
|
||||
// Reuse shared types defined in `common.rs`.
|
||||
use crate::protocol::common::AuthMode;
|
||||
use crate::protocol::common::GitSha;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InitializeParams {
|
||||
pub client_info: ClientInfo,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ClientInfo {
|
||||
pub name: String,
|
||||
pub title: Option<String>,
|
||||
pub version: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InitializeResponse {
|
||||
pub user_agent: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct NewConversationParams {
|
||||
pub model: Option<String>,
|
||||
pub model_provider: Option<String>,
|
||||
pub profile: Option<String>,
|
||||
pub cwd: Option<String>,
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
pub sandbox: Option<SandboxMode>,
|
||||
pub config: Option<HashMap<String, serde_json::Value>>,
|
||||
pub base_instructions: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub developer_instructions: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub compact_prompt: Option<String>,
|
||||
pub include_apply_patch_tool: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct NewConversationResponse {
|
||||
pub conversation_id: ConversationId,
|
||||
pub model: String,
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResumeConversationResponse {
|
||||
pub conversation_id: ConversationId,
|
||||
pub model: String,
|
||||
pub initial_messages: Option<Vec<EventMsg>>,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(untagged)]
|
||||
pub enum GetConversationSummaryParams {
|
||||
RolloutPath {
|
||||
#[serde(rename = "rolloutPath")]
|
||||
rollout_path: PathBuf,
|
||||
},
|
||||
ConversationId {
|
||||
#[serde(rename = "conversationId")]
|
||||
conversation_id: ConversationId,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetConversationSummaryResponse {
|
||||
pub summary: ConversationSummary,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ListConversationsParams {
|
||||
pub page_size: Option<usize>,
|
||||
pub cursor: Option<String>,
|
||||
pub model_providers: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ConversationSummary {
|
||||
pub conversation_id: ConversationId,
|
||||
pub path: PathBuf,
|
||||
pub preview: String,
|
||||
pub timestamp: Option<String>,
|
||||
pub model_provider: String,
|
||||
pub cwd: PathBuf,
|
||||
pub cli_version: String,
|
||||
pub source: SessionSource,
|
||||
pub git_info: Option<ConversationGitInfo>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub struct ConversationGitInfo {
|
||||
pub sha: Option<String>,
|
||||
pub branch: Option<String>,
|
||||
pub origin_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ListConversationsResponse {
|
||||
pub items: Vec<ConversationSummary>,
|
||||
pub next_cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResumeConversationParams {
|
||||
pub path: Option<PathBuf>,
|
||||
pub conversation_id: Option<ConversationId>,
|
||||
pub history: Option<Vec<ResponseItem>>,
|
||||
pub overrides: Option<NewConversationParams>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AddConversationSubscriptionResponse {
|
||||
#[schemars(with = "String")]
|
||||
pub subscription_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ArchiveConversationParams {
|
||||
pub conversation_id: ConversationId,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ArchiveConversationResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RemoveConversationSubscriptionResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginApiKeyParams {
|
||||
pub api_key: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginApiKeyResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginChatGptResponse {
|
||||
#[schemars(with = "String")]
|
||||
pub login_id: Uuid,
|
||||
pub auth_url: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GitDiffToRemoteResponse {
|
||||
pub sha: GitSha,
|
||||
pub diff: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ApplyPatchApprovalParams {
|
||||
pub conversation_id: ConversationId,
|
||||
/// Use to correlate this with [codex_core::protocol::PatchApplyBeginEvent]
|
||||
/// and [codex_core::protocol::PatchApplyEndEvent].
|
||||
pub call_id: String,
|
||||
pub file_changes: HashMap<PathBuf, FileChange>,
|
||||
/// Optional explanatory reason (e.g. request for extra write access).
|
||||
pub reason: Option<String>,
|
||||
/// When set, the agent is asking the user to allow writes under this root
|
||||
/// for the remainder of the session (unclear if this is honored today).
|
||||
pub grant_root: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ApplyPatchApprovalResponse {
|
||||
pub decision: ReviewDecision,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecCommandApprovalParams {
|
||||
pub conversation_id: ConversationId,
|
||||
/// Use to correlate this with [codex_core::protocol::ExecCommandBeginEvent]
|
||||
/// and [codex_core::protocol::ExecCommandEndEvent].
|
||||
pub call_id: String,
|
||||
pub command: Vec<String>,
|
||||
pub cwd: PathBuf,
|
||||
pub reason: Option<String>,
|
||||
pub risk: Option<SandboxCommandAssessment>,
|
||||
pub parsed_cmd: Vec<ParsedCommand>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct ExecCommandApprovalResponse {
|
||||
pub decision: ReviewDecision,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CancelLoginChatGptParams {
|
||||
#[schemars(with = "String")]
|
||||
pub login_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GitDiffToRemoteParams {
|
||||
pub cwd: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CancelLoginChatGptResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LogoutChatGptParams {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LogoutChatGptResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetAuthStatusParams {
|
||||
pub include_token: Option<bool>,
|
||||
pub refresh_token: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecOneOffCommandParams {
|
||||
pub command: Vec<String>,
|
||||
pub timeout_ms: Option<u64>,
|
||||
pub cwd: Option<PathBuf>,
|
||||
pub sandbox_policy: Option<SandboxPolicy>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecOneOffCommandResponse {
|
||||
pub exit_code: i32,
|
||||
pub stdout: String,
|
||||
pub stderr: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetAuthStatusResponse {
|
||||
pub auth_method: Option<AuthMode>,
|
||||
pub auth_token: Option<String>,
|
||||
pub requires_openai_auth: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetUserAgentResponse {
|
||||
pub user_agent: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UserInfoResponse {
|
||||
pub alleged_user_email: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetUserSavedConfigResponse {
|
||||
pub config: UserSavedConfig,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SetDefaultModelParams {
|
||||
pub model: Option<String>,
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SetDefaultModelResponse {}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UserSavedConfig {
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
pub sandbox_mode: Option<SandboxMode>,
|
||||
pub sandbox_settings: Option<SandboxSettings>,
|
||||
pub forced_chatgpt_workspace_id: Option<String>,
|
||||
pub forced_login_method: Option<ForcedLoginMethod>,
|
||||
pub model: Option<String>,
|
||||
pub model_reasoning_effort: Option<ReasoningEffort>,
|
||||
pub model_reasoning_summary: Option<ReasoningSummary>,
|
||||
pub model_verbosity: Option<Verbosity>,
|
||||
pub tools: Option<Tools>,
|
||||
pub profile: Option<String>,
|
||||
pub profiles: HashMap<String, Profile>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Profile {
|
||||
pub model: Option<String>,
|
||||
pub model_provider: Option<String>,
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
pub model_reasoning_effort: Option<ReasoningEffort>,
|
||||
pub model_reasoning_summary: Option<ReasoningSummary>,
|
||||
pub model_verbosity: Option<Verbosity>,
|
||||
pub chatgpt_base_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Tools {
|
||||
pub web_search: Option<bool>,
|
||||
pub view_image: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SandboxSettings {
|
||||
#[serde(default)]
|
||||
pub writable_roots: Vec<PathBuf>,
|
||||
pub network_access: Option<bool>,
|
||||
pub exclude_tmpdir_env_var: Option<bool>,
|
||||
pub exclude_slash_tmp: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserMessageParams {
|
||||
pub conversation_id: ConversationId,
|
||||
pub items: Vec<InputItem>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserTurnParams {
|
||||
pub conversation_id: ConversationId,
|
||||
pub items: Vec<InputItem>,
|
||||
pub cwd: PathBuf,
|
||||
pub approval_policy: AskForApproval,
|
||||
pub sandbox_policy: SandboxPolicy,
|
||||
pub model: String,
|
||||
pub effort: Option<ReasoningEffort>,
|
||||
pub summary: ReasoningSummary,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserTurnResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InterruptConversationParams {
|
||||
pub conversation_id: ConversationId,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InterruptConversationResponse {
|
||||
pub abort_reason: TurnAbortReason,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserMessageResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AddConversationListenerParams {
|
||||
pub conversation_id: ConversationId,
|
||||
#[serde(default)]
|
||||
pub experimental_raw_events: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RemoveConversationListenerParams {
|
||||
#[schemars(with = "String")]
|
||||
pub subscription_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[serde(tag = "type", content = "data")]
|
||||
pub enum InputItem {
|
||||
Text { text: String },
|
||||
Image { image_url: String },
|
||||
LocalImage { path: PathBuf },
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
/// Deprecated in favor of AccountLoginCompletedNotification.
|
||||
pub struct LoginChatGptCompleteNotification {
|
||||
#[schemars(with = "String")]
|
||||
pub login_id: Uuid,
|
||||
pub success: bool,
|
||||
pub error: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SessionConfiguredNotification {
|
||||
pub session_id: ConversationId,
|
||||
pub model: String,
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
pub history_log_id: u64,
|
||||
#[ts(type = "number")]
|
||||
pub history_entry_count: usize,
|
||||
pub initial_messages: Option<Vec<EventMsg>>,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
/// Deprecated notification. Use AccountUpdatedNotification instead.
|
||||
pub struct AuthStatusChangeNotification {
|
||||
pub auth_method: Option<AuthMode>,
|
||||
}
|
||||
1273
codex-rs/app-server-protocol/src/protocol/v2.rs
Normal file
1273
codex-rs/app-server-protocol/src/protocol/v2.rs
Normal file
File diff suppressed because it is too large
Load Diff
1298
codex-rs/app-server-test-client/Cargo.lock
generated
Normal file
1298
codex-rs/app-server-test-client/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
16
codex-rs/app-server-test-client/Cargo.toml
Normal file
16
codex-rs/app-server-test-client/Cargo.toml
Normal file
@@ -0,0 +1,16 @@
|
||||
[package]
|
||||
name = "codex-app-server-test-client"
|
||||
version = { workspace = true }
|
||||
edition = "2024"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive", "env"] }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
uuid = { workspace = true, features = ["v4"] }
|
||||
2
codex-rs/app-server-test-client/README.md
Normal file
2
codex-rs/app-server-test-client/README.md
Normal file
@@ -0,0 +1,2 @@
|
||||
# App Server Test Client
|
||||
Exercises simple `codex app-server` flows end-to-end, logging JSON-RPC messages sent between client and server to stdout.
|
||||
809
codex-rs/app-server-test-client/src/main.rs
Normal file
809
codex-rs/app-server-test-client/src/main.rs
Normal file
@@ -0,0 +1,809 @@
|
||||
use std::collections::VecDeque;
|
||||
use std::io::BufRead;
|
||||
use std::io::BufReader;
|
||||
use std::io::Write;
|
||||
use std::process::Child;
|
||||
use std::process::ChildStdin;
|
||||
use std::process::ChildStdout;
|
||||
use std::process::Command;
|
||||
use std::process::Stdio;
|
||||
use std::thread;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::bail;
|
||||
use clap::Parser;
|
||||
use clap::Subcommand;
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
use codex_app_server_protocol::AddConversationSubscriptionResponse;
|
||||
use codex_app_server_protocol::ApprovalDecision;
|
||||
use codex_app_server_protocol::AskForApproval;
|
||||
use codex_app_server_protocol::ClientInfo;
|
||||
use codex_app_server_protocol::ClientRequest;
|
||||
use codex_app_server_protocol::CommandExecutionRequestAcceptSettings;
|
||||
use codex_app_server_protocol::CommandExecutionRequestApprovalParams;
|
||||
use codex_app_server_protocol::CommandExecutionRequestApprovalResponse;
|
||||
use codex_app_server_protocol::FileChangeRequestApprovalParams;
|
||||
use codex_app_server_protocol::FileChangeRequestApprovalResponse;
|
||||
use codex_app_server_protocol::GetAccountRateLimitsResponse;
|
||||
use codex_app_server_protocol::InitializeParams;
|
||||
use codex_app_server_protocol::InitializeResponse;
|
||||
use codex_app_server_protocol::InputItem;
|
||||
use codex_app_server_protocol::JSONRPCMessage;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCRequest;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::LoginChatGptCompleteNotification;
|
||||
use codex_app_server_protocol::LoginChatGptResponse;
|
||||
use codex_app_server_protocol::NewConversationParams;
|
||||
use codex_app_server_protocol::NewConversationResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SandboxPolicy;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserMessageResponse;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_app_server_protocol::TurnStartResponse;
|
||||
use codex_app_server_protocol::TurnStatus;
|
||||
use codex_app_server_protocol::UserInput as V2UserInput;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::protocol::Event;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use serde::Serialize;
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde_json::Value;
|
||||
use uuid::Uuid;
|
||||
|
||||
/// Minimal launcher that initializes the Codex app-server and logs the handshake.
|
||||
#[derive(Parser)]
|
||||
#[command(author = "Codex", version, about = "Bootstrap Codex app-server", long_about = None)]
|
||||
struct Cli {
|
||||
/// Path to the `codex` CLI binary.
|
||||
#[arg(long, env = "CODEX_BIN", default_value = "codex")]
|
||||
codex_bin: String,
|
||||
|
||||
#[command(subcommand)]
|
||||
command: CliCommand,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum CliCommand {
|
||||
/// Send a user message through the Codex app-server.
|
||||
SendMessage {
|
||||
/// User message to send to Codex.
|
||||
#[arg()]
|
||||
user_message: String,
|
||||
},
|
||||
/// Send a user message through the app-server V2 thread/turn APIs.
|
||||
SendMessageV2 {
|
||||
/// User message to send to Codex.
|
||||
#[arg()]
|
||||
user_message: String,
|
||||
},
|
||||
/// Start a V2 turn that elicits an ExecCommand approval.
|
||||
#[command(name = "trigger-cmd-approval")]
|
||||
TriggerCmdApproval {
|
||||
/// Optional prompt; defaults to a simple python command.
|
||||
#[arg()]
|
||||
user_message: Option<String>,
|
||||
},
|
||||
/// Start a V2 turn that elicits an ApplyPatch approval.
|
||||
#[command(name = "trigger-patch-approval")]
|
||||
TriggerPatchApproval {
|
||||
/// Optional prompt; defaults to creating a file via apply_patch.
|
||||
#[arg()]
|
||||
user_message: Option<String>,
|
||||
},
|
||||
/// Start a V2 turn that should not elicit an ExecCommand approval.
|
||||
#[command(name = "no-trigger-cmd-approval")]
|
||||
NoTriggerCmdApproval,
|
||||
/// Trigger the ChatGPT login flow and wait for completion.
|
||||
TestLogin,
|
||||
/// Fetch the current account rate limits from the Codex app-server.
|
||||
GetAccountRateLimits,
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
let Cli { codex_bin, command } = Cli::parse();
|
||||
|
||||
match command {
|
||||
CliCommand::SendMessage { user_message } => send_message(codex_bin, user_message),
|
||||
CliCommand::SendMessageV2 { user_message } => send_message_v2(codex_bin, user_message),
|
||||
CliCommand::TriggerCmdApproval { user_message } => {
|
||||
trigger_cmd_approval(codex_bin, user_message)
|
||||
}
|
||||
CliCommand::TriggerPatchApproval { user_message } => {
|
||||
trigger_patch_approval(codex_bin, user_message)
|
||||
}
|
||||
CliCommand::NoTriggerCmdApproval => no_trigger_cmd_approval(codex_bin),
|
||||
CliCommand::TestLogin => test_login(codex_bin),
|
||||
CliCommand::GetAccountRateLimits => get_account_rate_limits(codex_bin),
|
||||
}
|
||||
}
|
||||
|
||||
fn send_message(codex_bin: String, user_message: String) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
|
||||
let conversation = client.new_conversation()?;
|
||||
println!("< newConversation response: {conversation:?}");
|
||||
|
||||
let subscription = client.add_conversation_listener(&conversation.conversation_id)?;
|
||||
println!("< addConversationListener response: {subscription:?}");
|
||||
|
||||
let send_response = client.send_user_message(&conversation.conversation_id, &user_message)?;
|
||||
println!("< sendUserMessage response: {send_response:?}");
|
||||
|
||||
client.stream_conversation(&conversation.conversation_id)?;
|
||||
|
||||
client.remove_conversation_listener(subscription.subscription_id)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn send_message_v2(codex_bin: String, user_message: String) -> Result<()> {
|
||||
send_message_v2_with_policies(codex_bin, user_message, None, None)
|
||||
}
|
||||
|
||||
fn trigger_cmd_approval(codex_bin: String, user_message: Option<String>) -> Result<()> {
|
||||
let default_prompt =
|
||||
"Run `touch /tmp/should-trigger-approval` so I can confirm the file exists.";
|
||||
let message = user_message.unwrap_or_else(|| default_prompt.to_string());
|
||||
send_message_v2_with_policies(
|
||||
codex_bin,
|
||||
message,
|
||||
Some(AskForApproval::OnRequest),
|
||||
Some(SandboxPolicy::ReadOnly),
|
||||
)
|
||||
}
|
||||
|
||||
fn trigger_patch_approval(codex_bin: String, user_message: Option<String>) -> Result<()> {
|
||||
let default_prompt =
|
||||
"Create a file named APPROVAL_DEMO.txt containing a short hello message using apply_patch.";
|
||||
let message = user_message.unwrap_or_else(|| default_prompt.to_string());
|
||||
send_message_v2_with_policies(
|
||||
codex_bin,
|
||||
message,
|
||||
Some(AskForApproval::OnRequest),
|
||||
Some(SandboxPolicy::ReadOnly),
|
||||
)
|
||||
}
|
||||
|
||||
fn no_trigger_cmd_approval(codex_bin: String) -> Result<()> {
|
||||
let prompt = "Run `touch should_not_trigger_approval.txt`";
|
||||
send_message_v2_with_policies(codex_bin, prompt.to_string(), None, None)
|
||||
}
|
||||
|
||||
fn send_message_v2_with_policies(
|
||||
codex_bin: String,
|
||||
user_message: String,
|
||||
approval_policy: Option<AskForApproval>,
|
||||
sandbox_policy: Option<SandboxPolicy>,
|
||||
) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
|
||||
let thread_response = client.thread_start(ThreadStartParams::default())?;
|
||||
println!("< thread/start response: {thread_response:?}");
|
||||
let mut turn_params = TurnStartParams {
|
||||
thread_id: thread_response.thread.id.clone(),
|
||||
input: vec![V2UserInput::Text { text: user_message }],
|
||||
..Default::default()
|
||||
};
|
||||
turn_params.approval_policy = approval_policy;
|
||||
turn_params.sandbox_policy = sandbox_policy;
|
||||
|
||||
let turn_response = client.turn_start(turn_params)?;
|
||||
println!("< turn/start response: {turn_response:?}");
|
||||
|
||||
client.stream_turn(&thread_response.thread.id, &turn_response.turn.id)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn test_login(codex_bin: String) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
|
||||
let login_response = client.login_chat_gpt()?;
|
||||
println!("< loginChatGpt response: {login_response:?}");
|
||||
println!(
|
||||
"Open the following URL in your browser to continue:\n{}",
|
||||
login_response.auth_url
|
||||
);
|
||||
|
||||
let completion = client.wait_for_login_completion(&login_response.login_id)?;
|
||||
println!("< loginChatGptComplete notification: {completion:?}");
|
||||
|
||||
if completion.success {
|
||||
println!("Login succeeded.");
|
||||
Ok(())
|
||||
} else {
|
||||
bail!(
|
||||
"login failed: {}",
|
||||
completion
|
||||
.error
|
||||
.as_deref()
|
||||
.unwrap_or("unknown error from loginChatGptComplete")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn get_account_rate_limits(codex_bin: String) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
|
||||
let response = client.get_account_rate_limits()?;
|
||||
println!("< account/rateLimits/read response: {response:?}");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct CodexClient {
|
||||
child: Child,
|
||||
stdin: Option<ChildStdin>,
|
||||
stdout: BufReader<ChildStdout>,
|
||||
pending_notifications: VecDeque<JSONRPCNotification>,
|
||||
}
|
||||
|
||||
impl CodexClient {
|
||||
fn spawn(codex_bin: String) -> Result<Self> {
|
||||
let mut codex_app_server = Command::new(&codex_bin)
|
||||
.arg("app-server")
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::inherit())
|
||||
.spawn()
|
||||
.with_context(|| format!("failed to start `{codex_bin}` app-server"))?;
|
||||
|
||||
let stdin = codex_app_server
|
||||
.stdin
|
||||
.take()
|
||||
.context("codex app-server stdin unavailable")?;
|
||||
let stdout = codex_app_server
|
||||
.stdout
|
||||
.take()
|
||||
.context("codex app-server stdout unavailable")?;
|
||||
|
||||
Ok(Self {
|
||||
child: codex_app_server,
|
||||
stdin: Some(stdin),
|
||||
stdout: BufReader::new(stdout),
|
||||
pending_notifications: VecDeque::new(),
|
||||
})
|
||||
}
|
||||
|
||||
fn initialize(&mut self) -> Result<InitializeResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::Initialize {
|
||||
request_id: request_id.clone(),
|
||||
params: InitializeParams {
|
||||
client_info: ClientInfo {
|
||||
name: "codex-toy-app-server".to_string(),
|
||||
title: Some("Codex Toy App Server".to_string()),
|
||||
version: env!("CARGO_PKG_VERSION").to_string(),
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "initialize")
|
||||
}
|
||||
|
||||
fn new_conversation(&mut self) -> Result<NewConversationResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::NewConversation {
|
||||
request_id: request_id.clone(),
|
||||
params: NewConversationParams::default(),
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "newConversation")
|
||||
}
|
||||
|
||||
fn add_conversation_listener(
|
||||
&mut self,
|
||||
conversation_id: &ConversationId,
|
||||
) -> Result<AddConversationSubscriptionResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::AddConversationListener {
|
||||
request_id: request_id.clone(),
|
||||
params: AddConversationListenerParams {
|
||||
conversation_id: *conversation_id,
|
||||
experimental_raw_events: false,
|
||||
},
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "addConversationListener")
|
||||
}
|
||||
|
||||
fn remove_conversation_listener(&mut self, subscription_id: Uuid) -> Result<()> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::RemoveConversationListener {
|
||||
request_id: request_id.clone(),
|
||||
params: codex_app_server_protocol::RemoveConversationListenerParams { subscription_id },
|
||||
};
|
||||
|
||||
self.send_request::<codex_app_server_protocol::RemoveConversationSubscriptionResponse>(
|
||||
request,
|
||||
request_id,
|
||||
"removeConversationListener",
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn send_user_message(
|
||||
&mut self,
|
||||
conversation_id: &ConversationId,
|
||||
message: &str,
|
||||
) -> Result<SendUserMessageResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::SendUserMessage {
|
||||
request_id: request_id.clone(),
|
||||
params: SendUserMessageParams {
|
||||
conversation_id: *conversation_id,
|
||||
items: vec![InputItem::Text {
|
||||
text: message.to_string(),
|
||||
}],
|
||||
},
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "sendUserMessage")
|
||||
}
|
||||
|
||||
fn thread_start(&mut self, params: ThreadStartParams) -> Result<ThreadStartResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::ThreadStart {
|
||||
request_id: request_id.clone(),
|
||||
params,
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "thread/start")
|
||||
}
|
||||
|
||||
fn turn_start(&mut self, params: TurnStartParams) -> Result<TurnStartResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::TurnStart {
|
||||
request_id: request_id.clone(),
|
||||
params,
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "turn/start")
|
||||
}
|
||||
|
||||
fn login_chat_gpt(&mut self) -> Result<LoginChatGptResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::LoginChatGpt {
|
||||
request_id: request_id.clone(),
|
||||
params: None,
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "loginChatGpt")
|
||||
}
|
||||
|
||||
fn get_account_rate_limits(&mut self) -> Result<GetAccountRateLimitsResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::GetAccountRateLimits {
|
||||
request_id: request_id.clone(),
|
||||
params: None,
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "account/rateLimits/read")
|
||||
}
|
||||
|
||||
fn stream_conversation(&mut self, conversation_id: &ConversationId) -> Result<()> {
|
||||
loop {
|
||||
let notification = self.next_notification()?;
|
||||
|
||||
if !notification.method.starts_with("codex/event/") {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(event) = self.extract_event(notification, conversation_id)? {
|
||||
match &event.msg {
|
||||
EventMsg::AgentMessage(event) => {
|
||||
println!("{}", event.message);
|
||||
}
|
||||
EventMsg::AgentMessageDelta(event) => {
|
||||
print!("{}", event.delta);
|
||||
std::io::stdout().flush().ok();
|
||||
}
|
||||
EventMsg::TaskComplete(event) => {
|
||||
println!("\n[task complete: {event:?}]");
|
||||
break;
|
||||
}
|
||||
EventMsg::TurnAborted(event) => {
|
||||
println!("\n[turn aborted: {:?}]", event.reason);
|
||||
break;
|
||||
}
|
||||
EventMsg::Error(event) => {
|
||||
println!("[error] {event:?}");
|
||||
}
|
||||
_ => {
|
||||
println!("[UNKNOWN EVENT] {:?}", event.msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn wait_for_login_completion(
|
||||
&mut self,
|
||||
expected_login_id: &Uuid,
|
||||
) -> Result<LoginChatGptCompleteNotification> {
|
||||
loop {
|
||||
let notification = self.next_notification()?;
|
||||
|
||||
if let Ok(server_notification) = ServerNotification::try_from(notification) {
|
||||
match server_notification {
|
||||
ServerNotification::LoginChatGptComplete(completion) => {
|
||||
if &completion.login_id == expected_login_id {
|
||||
return Ok(completion);
|
||||
}
|
||||
|
||||
println!(
|
||||
"[ignoring loginChatGptComplete for unexpected login_id: {}]",
|
||||
completion.login_id
|
||||
);
|
||||
}
|
||||
ServerNotification::AuthStatusChange(status) => {
|
||||
println!("< authStatusChange notification: {status:?}");
|
||||
}
|
||||
ServerNotification::AccountRateLimitsUpdated(snapshot) => {
|
||||
println!("< accountRateLimitsUpdated notification: {snapshot:?}");
|
||||
}
|
||||
ServerNotification::SessionConfigured(_) => {
|
||||
// SessionConfigured notifications are unrelated to login; skip.
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
// Not a server notification (likely a conversation event); keep waiting.
|
||||
}
|
||||
}
|
||||
|
||||
fn stream_turn(&mut self, thread_id: &str, turn_id: &str) -> Result<()> {
|
||||
loop {
|
||||
let notification = self.next_notification()?;
|
||||
|
||||
let Ok(server_notification) = ServerNotification::try_from(notification) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
match server_notification {
|
||||
ServerNotification::ThreadStarted(payload) => {
|
||||
if payload.thread.id == thread_id {
|
||||
println!("< thread/started notification: {:?}", payload.thread);
|
||||
}
|
||||
}
|
||||
ServerNotification::TurnStarted(payload) => {
|
||||
if payload.turn.id == turn_id {
|
||||
println!("< turn/started notification: {:?}", payload.turn.status);
|
||||
}
|
||||
}
|
||||
ServerNotification::AgentMessageDelta(delta) => {
|
||||
print!("{}", delta.delta);
|
||||
std::io::stdout().flush().ok();
|
||||
}
|
||||
ServerNotification::CommandExecutionOutputDelta(delta) => {
|
||||
print!("{}", delta.delta);
|
||||
std::io::stdout().flush().ok();
|
||||
}
|
||||
ServerNotification::ItemStarted(payload) => {
|
||||
println!("\n< item started: {:?}", payload.item);
|
||||
}
|
||||
ServerNotification::ItemCompleted(payload) => {
|
||||
println!("< item completed: {:?}", payload.item);
|
||||
}
|
||||
ServerNotification::TurnCompleted(payload) => {
|
||||
if payload.turn.id == turn_id {
|
||||
println!("\n< turn/completed notification: {:?}", payload.turn.status);
|
||||
if let TurnStatus::Failed { error } = &payload.turn.status {
|
||||
println!("[turn error] {}", error.message);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
ServerNotification::McpToolCallProgress(payload) => {
|
||||
println!("< MCP tool progress: {}", payload.message);
|
||||
}
|
||||
_ => {
|
||||
println!("[UNKNOWN SERVER NOTIFICATION] {server_notification:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn extract_event(
|
||||
&self,
|
||||
notification: JSONRPCNotification,
|
||||
conversation_id: &ConversationId,
|
||||
) -> Result<Option<Event>> {
|
||||
let params = notification
|
||||
.params
|
||||
.context("event notification missing params")?;
|
||||
|
||||
let mut map = match params {
|
||||
Value::Object(map) => map,
|
||||
other => bail!("unexpected params shape: {other:?}"),
|
||||
};
|
||||
|
||||
let conversation_value = map
|
||||
.remove("conversationId")
|
||||
.context("event missing conversationId")?;
|
||||
let notification_conversation: ConversationId = serde_json::from_value(conversation_value)
|
||||
.context("conversationId was not a valid UUID")?;
|
||||
|
||||
if ¬ification_conversation != conversation_id {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let event_value = Value::Object(map);
|
||||
let event: Event =
|
||||
serde_json::from_value(event_value).context("failed to decode event payload")?;
|
||||
Ok(Some(event))
|
||||
}
|
||||
|
||||
fn send_request<T>(
|
||||
&mut self,
|
||||
request: ClientRequest,
|
||||
request_id: RequestId,
|
||||
method: &str,
|
||||
) -> Result<T>
|
||||
where
|
||||
T: DeserializeOwned,
|
||||
{
|
||||
self.write_request(&request)?;
|
||||
self.wait_for_response(request_id, method)
|
||||
}
|
||||
|
||||
fn write_request(&mut self, request: &ClientRequest) -> Result<()> {
|
||||
let request_json = serde_json::to_string(request)?;
|
||||
let request_pretty = serde_json::to_string_pretty(request)?;
|
||||
print_multiline_with_prefix("> ", &request_pretty);
|
||||
|
||||
if let Some(stdin) = self.stdin.as_mut() {
|
||||
writeln!(stdin, "{request_json}")?;
|
||||
stdin
|
||||
.flush()
|
||||
.context("failed to flush request to codex app-server")?;
|
||||
} else {
|
||||
bail!("codex app-server stdin closed");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn wait_for_response<T>(&mut self, request_id: RequestId, method: &str) -> Result<T>
|
||||
where
|
||||
T: DeserializeOwned,
|
||||
{
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message()?;
|
||||
|
||||
match message {
|
||||
JSONRPCMessage::Response(JSONRPCResponse { id, result }) => {
|
||||
if id == request_id {
|
||||
return serde_json::from_value(result)
|
||||
.with_context(|| format!("{method} response missing payload"));
|
||||
}
|
||||
}
|
||||
JSONRPCMessage::Error(err) => {
|
||||
if err.id == request_id {
|
||||
bail!("{method} failed: {err:?}");
|
||||
}
|
||||
}
|
||||
JSONRPCMessage::Notification(notification) => {
|
||||
self.pending_notifications.push_back(notification);
|
||||
}
|
||||
JSONRPCMessage::Request(request) => {
|
||||
self.handle_server_request(request)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn next_notification(&mut self) -> Result<JSONRPCNotification> {
|
||||
if let Some(notification) = self.pending_notifications.pop_front() {
|
||||
return Ok(notification);
|
||||
}
|
||||
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message()?;
|
||||
|
||||
match message {
|
||||
JSONRPCMessage::Notification(notification) => return Ok(notification),
|
||||
JSONRPCMessage::Response(_) | JSONRPCMessage::Error(_) => {
|
||||
// No outstanding requests, so ignore stray responses/errors for now.
|
||||
continue;
|
||||
}
|
||||
JSONRPCMessage::Request(request) => {
|
||||
self.handle_server_request(request)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn read_jsonrpc_message(&mut self) -> Result<JSONRPCMessage> {
|
||||
loop {
|
||||
let mut response_line = String::new();
|
||||
let bytes = self
|
||||
.stdout
|
||||
.read_line(&mut response_line)
|
||||
.context("failed to read from codex app-server")?;
|
||||
|
||||
if bytes == 0 {
|
||||
bail!("codex app-server closed stdout");
|
||||
}
|
||||
|
||||
let trimmed = response_line.trim();
|
||||
if trimmed.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let parsed: Value =
|
||||
serde_json::from_str(trimmed).context("response was not valid JSON-RPC")?;
|
||||
let pretty = serde_json::to_string_pretty(&parsed)?;
|
||||
print_multiline_with_prefix("< ", &pretty);
|
||||
let message: JSONRPCMessage = serde_json::from_value(parsed)
|
||||
.context("response was not a valid JSON-RPC message")?;
|
||||
return Ok(message);
|
||||
}
|
||||
}
|
||||
|
||||
fn request_id(&self) -> RequestId {
|
||||
RequestId::String(Uuid::new_v4().to_string())
|
||||
}
|
||||
|
||||
fn handle_server_request(&mut self, request: JSONRPCRequest) -> Result<()> {
|
||||
let server_request = ServerRequest::try_from(request)
|
||||
.context("failed to deserialize ServerRequest from JSONRPCRequest")?;
|
||||
|
||||
match server_request {
|
||||
ServerRequest::CommandExecutionRequestApproval { request_id, params } => {
|
||||
self.handle_command_execution_request_approval(request_id, params)?;
|
||||
}
|
||||
ServerRequest::FileChangeRequestApproval { request_id, params } => {
|
||||
self.approve_file_change_request(request_id, params)?;
|
||||
}
|
||||
other => {
|
||||
bail!("received unsupported server request: {other:?}");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn handle_command_execution_request_approval(
|
||||
&mut self,
|
||||
request_id: RequestId,
|
||||
params: CommandExecutionRequestApprovalParams,
|
||||
) -> Result<()> {
|
||||
let CommandExecutionRequestApprovalParams {
|
||||
thread_id,
|
||||
turn_id,
|
||||
item_id,
|
||||
reason,
|
||||
risk,
|
||||
} = params;
|
||||
|
||||
println!(
|
||||
"\n< commandExecution approval requested for thread {thread_id}, turn {turn_id}, item {item_id}"
|
||||
);
|
||||
if let Some(reason) = reason.as_deref() {
|
||||
println!("< reason: {reason}");
|
||||
}
|
||||
if let Some(risk) = risk.as_ref() {
|
||||
println!("< risk assessment: {risk:?}");
|
||||
}
|
||||
|
||||
let response = CommandExecutionRequestApprovalResponse {
|
||||
decision: ApprovalDecision::Accept,
|
||||
accept_settings: Some(CommandExecutionRequestAcceptSettings { for_session: false }),
|
||||
};
|
||||
self.send_server_request_response(request_id, &response)?;
|
||||
println!("< approved commandExecution request for item {item_id}");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn approve_file_change_request(
|
||||
&mut self,
|
||||
request_id: RequestId,
|
||||
params: FileChangeRequestApprovalParams,
|
||||
) -> Result<()> {
|
||||
let FileChangeRequestApprovalParams {
|
||||
thread_id,
|
||||
turn_id,
|
||||
item_id,
|
||||
reason,
|
||||
grant_root,
|
||||
} = params;
|
||||
|
||||
println!(
|
||||
"\n< fileChange approval requested for thread {thread_id}, turn {turn_id}, item {item_id}"
|
||||
);
|
||||
if let Some(reason) = reason.as_deref() {
|
||||
println!("< reason: {reason}");
|
||||
}
|
||||
if let Some(grant_root) = grant_root.as_deref() {
|
||||
println!("< grant root: {}", grant_root.display());
|
||||
}
|
||||
|
||||
let response = FileChangeRequestApprovalResponse {
|
||||
decision: ApprovalDecision::Accept,
|
||||
};
|
||||
self.send_server_request_response(request_id, &response)?;
|
||||
println!("< approved fileChange request for item {item_id}");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn send_server_request_response<T>(&mut self, request_id: RequestId, response: &T) -> Result<()>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
let message = JSONRPCMessage::Response(JSONRPCResponse {
|
||||
id: request_id,
|
||||
result: serde_json::to_value(response)?,
|
||||
});
|
||||
self.write_jsonrpc_message(message)
|
||||
}
|
||||
|
||||
fn write_jsonrpc_message(&mut self, message: JSONRPCMessage) -> Result<()> {
|
||||
let payload = serde_json::to_string(&message)?;
|
||||
let pretty = serde_json::to_string_pretty(&message)?;
|
||||
print_multiline_with_prefix("> ", &pretty);
|
||||
|
||||
if let Some(stdin) = self.stdin.as_mut() {
|
||||
writeln!(stdin, "{payload}")?;
|
||||
stdin
|
||||
.flush()
|
||||
.context("failed to flush response to codex app-server")?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
bail!("codex app-server stdin closed")
|
||||
}
|
||||
}
|
||||
|
||||
fn print_multiline_with_prefix(prefix: &str, payload: &str) {
|
||||
for line in payload.lines() {
|
||||
println!("{prefix}{line}");
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for CodexClient {
|
||||
fn drop(&mut self) {
|
||||
let _ = self.stdin.take();
|
||||
|
||||
if let Ok(Some(status)) = self.child.try_wait() {
|
||||
println!("[codex app-server exited: {status}]");
|
||||
return;
|
||||
}
|
||||
|
||||
thread::sleep(Duration::from_millis(100));
|
||||
|
||||
if let Ok(Some(status)) = self.child.try_wait() {
|
||||
println!("[codex app-server exited: {status}]");
|
||||
return;
|
||||
}
|
||||
|
||||
let _ = self.child.kill();
|
||||
let _ = self.child.wait();
|
||||
}
|
||||
}
|
||||
@@ -46,6 +46,7 @@ app_test_support = { workspace = true }
|
||||
assert_cmd = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
core_test_support = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
os_info = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
serial_test = { workspace = true }
|
||||
|
||||
@@ -1,6 +1,16 @@
|
||||
# codex-app-server
|
||||
|
||||
`codex app-server` is the harness Codex uses to power rich interfaces such as the [Codex VS Code extension](https://marketplace.visualstudio.com/items?itemName=openai.chatgpt). The message schema is currently unstable, but those who wish to build experimental UIs on top of Codex may find it valuable.
|
||||
`codex app-server` is the interface Codex uses to power rich interfaces such as the [Codex VS Code extension](https://marketplace.visualstudio.com/items?itemName=openai.chatgpt). The message schema is currently unstable, but those who wish to build experimental UIs on top of Codex may find it valuable.
|
||||
|
||||
## Table of Contents
|
||||
- [Protocol](#protocol)
|
||||
- [Message Schema](#message-schema)
|
||||
- [Lifecycle Overview](#lifecycle-overview)
|
||||
- [Initialization](#initialization)
|
||||
- [Core primitives](#core-primitives)
|
||||
- [Thread & turn endpoints](#thread--turn-endpoints)
|
||||
- [Auth endpoints](#auth-endpoints)
|
||||
- [Events (work-in-progress)](#v2-streaming-events-work-in-progress)
|
||||
|
||||
## Protocol
|
||||
|
||||
@@ -8,8 +18,367 @@ Similar to [MCP](https://modelcontextprotocol.io/), `codex app-server` supports
|
||||
|
||||
## Message Schema
|
||||
|
||||
Currently, you can dump a TypeScript version of the schema using `codex generate-ts`. It is specific to the version of Codex you used to run `generate-ts`, so the two are guaranteed to be compatible.
|
||||
Currently, you can dump a TypeScript version of the schema using `codex app-server generate-ts`, or a JSON Schema bundle via `codex app-server generate-json-schema`. Each output is specific to the version of Codex you used to run the command, so the generated artifacts are guaranteed to match that version.
|
||||
|
||||
```
|
||||
codex generate-ts --out DIR
|
||||
codex app-server generate-ts --out DIR
|
||||
codex app-server generate-json-schema --out DIR
|
||||
```
|
||||
|
||||
## Lifecycle Overview
|
||||
|
||||
- Initialize once: Immediately after launching the codex app-server process, send an `initialize` request with your client metadata, then emit an `initialized` notification. Any other request before this handshake gets rejected.
|
||||
- Start (or resume) a thread: Call `thread/start` to open a fresh conversation. The response returns the thread object and you’ll also get a `thread/started` notification. If you’re continuing an existing conversation, call `thread/resume` with its ID instead.
|
||||
- Begin a turn: To send user input, call `turn/start` with the target `threadId` and the user's input. Optional fields let you override model, cwd, sandbox policy, etc. This immediately returns the new turn object and triggers a `turn/started` notification.
|
||||
- Stream events: After `turn/start`, keep reading JSON-RPC notifications on stdout. You’ll see `item/started`, `item/completed`, deltas like `item/agentMessage/delta`, tool progress, etc. These represent streaming model output plus any side effects (commands, tool calls, reasoning notes).
|
||||
- Finish the turn: When the model is done (or the turn is interrupted via making the `turn/interrupt` call), the server sends `turn/completed` with the final turn state and token usage.
|
||||
|
||||
## Initialization
|
||||
|
||||
Clients must send a single `initialize` request before invoking any other method, then acknowledge with an `initialized` notification. The server returns the user agent string it will present to upstream services; subsequent requests issued before initialization receive a `"Not initialized"` error, and repeated `initialize` calls receive an `"Already initialized"` error.
|
||||
|
||||
Example:
|
||||
|
||||
```json
|
||||
{ "method": "initialize", "id": 0, "params": {
|
||||
"clientInfo": { "name": "codex-vscode", "title": "Codex VS Code Extension", "version": "0.1.0" }
|
||||
} }
|
||||
{ "id": 0, "result": { "userAgent": "codex-app-server/0.1.0 codex-vscode/0.1.0" } }
|
||||
{ "method": "initialized" }
|
||||
```
|
||||
|
||||
## Core primitives
|
||||
|
||||
We have 3 top level primitives:
|
||||
- Thread - a conversation between the Codex agent and a user. Each thread contains multiple turns.
|
||||
- Turn - one turn of the conversation, typically starting with a user message and finishing with an agent message. Each turn contains multiple items.
|
||||
- Item - represents user inputs and agent outputs as part of the turn, persisted and used as the context for future conversations.
|
||||
|
||||
## Thread & turn endpoints
|
||||
|
||||
The JSON-RPC API exposes dedicated methods for managing Codex conversations. Threads store long-lived conversation metadata, and turns store the per-message exchange (input → Codex output, including streamed items). Use the thread APIs to create, list, or archive sessions, then drive the conversation with turn APIs and notifications.
|
||||
|
||||
### Quick reference
|
||||
- `thread/start` — create a new thread; emits `thread/started` and auto-subscribes you to turn/item events for that thread.
|
||||
- `thread/resume` — reopen an existing thread by id so subsequent `turn/start` calls append to it.
|
||||
- `thread/list` — page through stored rollouts; supports cursor-based pagination and optional `modelProviders` filtering.
|
||||
- `thread/archive` — move a thread’s rollout file into the archived directory; returns `{}` on success.
|
||||
- `turn/start` — add user input to a thread and begin Codex generation; responds with the initial `turn` object and streams `turn/started`, `item/*`, and `turn/completed` notifications.
|
||||
- `turn/interrupt` — request cancellation of an in-flight turn by `(thread_id, turn_id)`; success is an empty `{}` response and the turn finishes with `status: "interrupted"`.
|
||||
- `review/start` — kick off Codex’s automated reviewer for a thread; responds like `turn/start` and emits a `item/completed` notification with a `codeReview` item when results are ready.
|
||||
|
||||
### 1) Start or resume a thread
|
||||
|
||||
Start a fresh thread when you need a new Codex conversation.
|
||||
|
||||
```json
|
||||
{ "method": "thread/start", "id": 10, "params": {
|
||||
// Optionally set config settings. If not specified, will use the user's
|
||||
// current config settings.
|
||||
"model": "gpt-5.1-codex",
|
||||
"cwd": "/Users/me/project",
|
||||
"approvalPolicy": "never",
|
||||
"sandbox": "workspaceWrite",
|
||||
} }
|
||||
{ "id": 10, "result": {
|
||||
"thread": {
|
||||
"id": "thr_123",
|
||||
"preview": "",
|
||||
"modelProvider": "openai",
|
||||
"createdAt": 1730910000
|
||||
}
|
||||
} }
|
||||
{ "method": "thread/started", "params": { "thread": { … } } }
|
||||
```
|
||||
|
||||
To continue a stored session, call `thread/resume` with the `thread.id` you previously recorded. The response shape matches `thread/start`, and no additional notifications are emitted:
|
||||
|
||||
```json
|
||||
{ "method": "thread/resume", "id": 11, "params": { "threadId": "thr_123" } }
|
||||
{ "id": 11, "result": { "thread": { "id": "thr_123", … } } }
|
||||
```
|
||||
|
||||
### 2) List threads (pagination & filters)
|
||||
|
||||
`thread/list` lets you render a history UI. Pass any combination of:
|
||||
- `cursor` — opaque string from a prior response; omit for the first page.
|
||||
- `limit` — server defaults to a reasonable page size if unset.
|
||||
- `modelProviders` — restrict results to specific providers; unset, null, or an empty array will include all providers.
|
||||
|
||||
Example:
|
||||
|
||||
```json
|
||||
{ "method": "thread/list", "id": 20, "params": {
|
||||
"cursor": null,
|
||||
"limit": 25,
|
||||
} }
|
||||
{ "id": 20, "result": {
|
||||
"data": [
|
||||
{ "id": "thr_a", "preview": "Create a TUI", "modelProvider": "openai", "createdAt": 1730831111 },
|
||||
{ "id": "thr_b", "preview": "Fix tests", "modelProvider": "openai", "createdAt": 1730750000 }
|
||||
],
|
||||
"nextCursor": "opaque-token-or-null"
|
||||
} }
|
||||
```
|
||||
|
||||
When `nextCursor` is `null`, you’ve reached the final page.
|
||||
|
||||
### 3) Archive a thread
|
||||
|
||||
Use `thread/archive` to move the persisted rollout (stored as a JSONL file on disk) into the archived sessions directory.
|
||||
|
||||
```json
|
||||
{ "method": "thread/archive", "id": 21, "params": { "threadId": "thr_b" } }
|
||||
{ "id": 21, "result": {} }
|
||||
```
|
||||
|
||||
An archived thread will not appear in future calls to `thread/list`.
|
||||
|
||||
### 4) Start a turn (send user input)
|
||||
|
||||
Turns attach user input (text or images) to a thread and trigger Codex generation. The `input` field is a list of discriminated unions:
|
||||
|
||||
- `{"type":"text","text":"Explain this diff"}`
|
||||
- `{"type":"image","url":"https://…png"}`
|
||||
- `{"type":"localImage","path":"/tmp/screenshot.png"}`
|
||||
|
||||
You can optionally specify config overrides on the new turn. If specified, these settings become the default for subsequent turns on the same thread.
|
||||
|
||||
```json
|
||||
{ "method": "turn/start", "id": 30, "params": {
|
||||
"threadId": "thr_123",
|
||||
"input": [ { "type": "text", "text": "Run tests" } ],
|
||||
// Below are optional config overrides
|
||||
"cwd": "/Users/me/project",
|
||||
"approvalPolicy": "unlessTrusted",
|
||||
"sandboxPolicy": {
|
||||
"mode": "workspaceWrite",
|
||||
"writableRoots": ["/Users/me/project"],
|
||||
"networkAccess": true
|
||||
},
|
||||
"model": "gpt-5.1-codex",
|
||||
"effort": "medium",
|
||||
"summary": "concise"
|
||||
} }
|
||||
{ "id": 30, "result": { "turn": {
|
||||
"id": "turn_456",
|
||||
"status": "inProgress",
|
||||
"items": [],
|
||||
"error": null
|
||||
} } }
|
||||
```
|
||||
|
||||
### 5) Interrupt an active turn
|
||||
|
||||
You can cancel a running Turn with `turn/interrupt`.
|
||||
|
||||
```json
|
||||
{ "method": "turn/interrupt", "id": 31, "params": {
|
||||
"threadId": "thr_123",
|
||||
"turnId": "turn_456"
|
||||
} }
|
||||
{ "id": 31, "result": {} }
|
||||
```
|
||||
|
||||
The server requests cancellations for running subprocesses, then emits a `turn/completed` event with `status: "interrupted"`. Rely on the `turn/completed` to know when Codex-side cleanup is done.
|
||||
|
||||
### 6) Request a code review
|
||||
|
||||
Use `review/start` to run Codex’s reviewer on the currently checked-out project. The request takes the thread id plus a `target` describing what should be reviewed:
|
||||
|
||||
- `{"type":"uncommittedChanges"}` — staged, unstaged, and untracked files.
|
||||
- `{"type":"baseBranch","branch":"main"}` — diff against the provided branch’s upstream (see prompt for the exact `git merge-base`/`git diff` instructions Codex will run).
|
||||
- `{"type":"commit","sha":"abc1234","title":"Optional subject"}` — review a specific commit.
|
||||
- `{"type":"custom","instructions":"Free-form reviewer instructions"}` — fallback prompt equivalent to the legacy manual review request.
|
||||
- `appendToOriginalThread` (bool, default `false`) — when `true`, Codex also records a final assistant-style message with the review summary in the original thread. When `false`, only the `codeReview` item is emitted for the review run and no extra message is added to the original thread.
|
||||
|
||||
Example request/response:
|
||||
|
||||
```json
|
||||
{ "method": "review/start", "id": 40, "params": {
|
||||
"threadId": "thr_123",
|
||||
"appendToOriginalThread": true,
|
||||
"target": { "type": "commit", "sha": "1234567deadbeef", "title": "Polish tui colors" }
|
||||
} }
|
||||
{ "id": 40, "result": { "turn": {
|
||||
"id": "turn_900",
|
||||
"status": "inProgress",
|
||||
"items": [
|
||||
{ "type": "userMessage", "id": "turn_900", "content": [ { "type": "text", "text": "Review commit 1234567: Polish tui colors" } ] }
|
||||
],
|
||||
"error": null
|
||||
} } }
|
||||
```
|
||||
|
||||
Codex streams the usual `turn/started` notification followed by an `item/started`
|
||||
with the same `codeReview` item id so clients can show progress:
|
||||
|
||||
```json
|
||||
{ "method": "item/started", "params": { "item": {
|
||||
"type": "codeReview",
|
||||
"id": "turn_900",
|
||||
"review": "current changes"
|
||||
} } }
|
||||
```
|
||||
|
||||
When the reviewer finishes, the server emits `item/completed` containing the same
|
||||
`codeReview` item with the final review text:
|
||||
|
||||
```json
|
||||
{ "method": "item/completed", "params": { "item": {
|
||||
"type": "codeReview",
|
||||
"id": "turn_900",
|
||||
"review": "Looks solid overall...\n\n- Prefer Stylize helpers — app.rs:10-20\n ..."
|
||||
} } }
|
||||
```
|
||||
|
||||
The `review` string is plain text that already bundles the overall explanation plus a bullet list for each structured finding (matching `ThreadItem::CodeReview` in the generated schema). Use this notification to render the reviewer output in your client.
|
||||
|
||||
## Auth endpoints
|
||||
|
||||
The JSON-RPC auth/account surface exposes request/response methods plus server-initiated notifications (no `id`). Use these to determine auth state, start or cancel logins, logout, and inspect ChatGPT rate limits.
|
||||
|
||||
### Quick reference
|
||||
- `account/read` — fetch current account info; optionally refresh tokens.
|
||||
- `account/login/start` — begin login (`apiKey` or `chatgpt`).
|
||||
- `account/login/completed` (notify) — emitted when a login attempt finishes (success or error).
|
||||
- `account/login/cancel` — cancel a pending ChatGPT login by `loginId`.
|
||||
- `account/logout` — sign out; triggers `account/updated`.
|
||||
- `account/updated` (notify) — emitted whenever auth mode changes (`authMode`: `apikey`, `chatgpt`, or `null`).
|
||||
- `account/rateLimits/read` — fetch ChatGPT rate limits; updates arrive via `account/rateLimits/updated` (notify).
|
||||
|
||||
### 1) Check auth state
|
||||
|
||||
Request:
|
||||
```json
|
||||
{ "method": "account/read", "id": 1, "params": { "refreshToken": false } }
|
||||
```
|
||||
|
||||
Response examples:
|
||||
```json
|
||||
{ "id": 1, "result": { "account": null, "requiresOpenaiAuth": false } } // No OpenAI auth needed (e.g., OSS/local models)
|
||||
{ "id": 1, "result": { "account": null, "requiresOpenaiAuth": true } } // OpenAI auth required (typical for OpenAI-hosted models)
|
||||
{ "id": 1, "result": { "account": { "type": "apiKey" }, "requiresOpenaiAuth": true } }
|
||||
{ "id": 1, "result": { "account": { "type": "chatgpt", "email": "user@example.com", "planType": "pro" }, "requiresOpenaiAuth": true } }
|
||||
```
|
||||
|
||||
Field notes:
|
||||
- `refreshToken` (bool): set `true` to force a token refresh.
|
||||
- `requiresOpenaiAuth` reflects the active provider; when `false`, Codex can run without OpenAI credentials.
|
||||
|
||||
### 2) Log in with an API key
|
||||
|
||||
1. Send:
|
||||
```json
|
||||
{ "method": "account/login/start", "id": 2, "params": { "type": "apiKey", "apiKey": "sk-…" } }
|
||||
```
|
||||
2. Expect:
|
||||
```json
|
||||
{ "id": 2, "result": { "type": "apiKey" } }
|
||||
```
|
||||
3. Notifications:
|
||||
```json
|
||||
{ "method": "account/login/completed", "params": { "loginId": null, "success": true, "error": null } }
|
||||
{ "method": "account/updated", "params": { "authMode": "apikey" } }
|
||||
```
|
||||
|
||||
### 3) Log in with ChatGPT (browser flow)
|
||||
|
||||
1. Start:
|
||||
```json
|
||||
{ "method": "account/login/start", "id": 3, "params": { "type": "chatgpt" } }
|
||||
{ "id": 3, "result": { "type": "chatgpt", "loginId": "<uuid>", "authUrl": "https://chatgpt.com/…&redirect_uri=http%3A%2F%2Flocalhost%3A<port>%2Fauth%2Fcallback" } }
|
||||
```
|
||||
2. Open `authUrl` in a browser; the app-server hosts the local callback.
|
||||
3. Wait for notifications:
|
||||
```json
|
||||
{ "method": "account/login/completed", "params": { "loginId": "<uuid>", "success": true, "error": null } }
|
||||
{ "method": "account/updated", "params": { "authMode": "chatgpt" } }
|
||||
```
|
||||
|
||||
### 4) Cancel a ChatGPT login
|
||||
|
||||
```json
|
||||
{ "method": "account/login/cancel", "id": 4, "params": { "loginId": "<uuid>" } }
|
||||
{ "method": "account/login/completed", "params": { "loginId": "<uuid>", "success": false, "error": "…" } }
|
||||
```
|
||||
|
||||
### 5) Logout
|
||||
|
||||
```json
|
||||
{ "method": "account/logout", "id": 5 }
|
||||
{ "id": 5, "result": {} }
|
||||
{ "method": "account/updated", "params": { "authMode": null } }
|
||||
```
|
||||
|
||||
### 6) Rate limits (ChatGPT)
|
||||
|
||||
```json
|
||||
{ "method": "account/rateLimits/read", "id": 6 }
|
||||
{ "id": 6, "result": { "rateLimits": { "primary": { "usedPercent": 25, "windowDurationMins": 15, "resetsAt": 1730947200 }, "secondary": null } } }
|
||||
{ "method": "account/rateLimits/updated", "params": { "rateLimits": { … } } }
|
||||
```
|
||||
|
||||
Field notes:
|
||||
- `usedPercent` is current usage within the OpenAI quota window.
|
||||
- `windowDurationMins` is the quota window length.
|
||||
- `resetsAt` is a Unix timestamp (seconds) for the next reset.
|
||||
|
||||
### Dev notes
|
||||
|
||||
- `codex app-server generate-ts --out <dir>` emits v2 types under `v2/`.
|
||||
- `codex app-server generate-json-schema --out <dir>` outputs `codex_app_server_protocol.schemas.json`.
|
||||
- See [“Authentication and authorization” in the config docs](../../docs/config.md#authentication-and-authorization) for configuration knobs.
|
||||
|
||||
|
||||
## Events (work-in-progress)
|
||||
|
||||
Event notifications are the server-initiated event stream for thread lifecycles, turn lifecycles, and the items within them. After you start or resume a thread, keep reading stdout for `thread/started`, `turn/*`, and `item/*` notifications.
|
||||
|
||||
### Turn events
|
||||
|
||||
The app-server streams JSON-RPC notifications while a turn is running. Each turn starts with `turn/started` (initial `turn`) and ends with `turn/completed` (final `turn` plus token `usage`), and clients subscribe to the events they care about, rendering each item incrementally as updates arrive. The per-item lifecycle is always: `item/started` → zero or more item-specific deltas → `item/completed`.
|
||||
|
||||
- `turn/started` — `{ turn }` with the turn id, empty `items`, and `status: "inProgress"`.
|
||||
- `turn/completed` — `{ turn }` where `turn.status` is `completed`, `interrupted`, or `failed`; failures carry `{ error: { message, codexErrorInfo? } }`.
|
||||
|
||||
Today both notifications carry an empty `items` array even when item events were streamed; rely on `item/*` notifications for the canonical item list until this is fixed.
|
||||
|
||||
#### Errors
|
||||
`error` event is emitted whenever the server hits an error mid-turn (for example, upstream model errors or quota limits). Carries the same `{ error: { message, codexErrorInfo? } }` payload as `turn.status: "failed"` and may precede that terminal notification.
|
||||
|
||||
`codexErrorInfo` maps to the `CodexErrorInfo` enum. Common values:
|
||||
- `ContextWindowExceeded`
|
||||
- `UsageLimitExceeded`
|
||||
- `HttpConnectionFailed { httpStatusCode? }`: upstream HTTP failures including 4xx/5xx
|
||||
- `ResponseStreamConnectionFailed { httpStatusCode? }`: failure to connect to the response SSE stream
|
||||
- `ResponseStreamDisconnected { httpStatusCode? }`: disconnect of the response SSE stream in the middle of a turn before completion
|
||||
- `ResponseTooManyFailedAttempts { httpStatusCode? }`
|
||||
- `BadRequest`
|
||||
- `Unauthorized`
|
||||
- `SandboxError`
|
||||
- `InternalServerError`
|
||||
- `Other`: all unclassified errors
|
||||
|
||||
When an upstream HTTP status is available (for example, from the Responses API or a provider), it is forwarded in `httpStatusCode` on the relevant `codexErrorInfo` variant.
|
||||
|
||||
#### Thread items
|
||||
|
||||
`ThreadItem` is the tagged union carried in turn responses and `item/*` notifications. Currently we support events for the following items:
|
||||
- `userMessage` — `{id, content}` where `content` is a list of user inputs (`text`, `image`, or `localImage`).
|
||||
- `agentMessage` — `{id, text}` containing the accumulated agent reply.
|
||||
- `reasoning` — `{id, summary, content}` where `summary` holds streamed reasoning summaries (applicable for most OpenAI models) and `content` holds raw reasoning blocks (applicable for e.g. open source models).
|
||||
- `mcpToolCall` — `{id, server, tool, status, arguments, result?, error?}` describing MCP calls; `status` is `inProgress`, `completed`, or `failed`.
|
||||
- `webSearch` — `{id, query}` for a web search request issued by the agent.
|
||||
|
||||
All items emit two shared lifecycle events:
|
||||
- `item/started` — emits the full `item` when a new unit of work begins so the UI can render it immediately; the `item.id` in this payload matches the `itemId` used by deltas.
|
||||
- `item/completed` — sends the final `item` once that work finishes (e.g., after a tool call or message completes); treat this as the authoritative state.
|
||||
|
||||
There are additional item-specific events:
|
||||
#### agentMessage
|
||||
- `item/agentMessage/delta` — appends streamed text for the agent message; concatenate `delta` values for the same `itemId` in order to reconstruct the full reply.
|
||||
#### reasoning
|
||||
- `item/reasoning/summaryTextDelta` — streams readable reasoning summaries; `summaryIndex` increments when a new summary section opens.
|
||||
- `item/reasoning/summaryPartAdded` — marks the boundary between reasoning summary sections for an `itemId`; subsequent `summaryTextDelta` entries share the same `summaryIndex`.
|
||||
- `item/reasoning/textDelta` — streams raw reasoning text (only applicable for e.g. open source models); use `contentIndex` to group deltas that belong together before showing them in the UI.
|
||||
|
||||
1286
codex-rs/app-server/src/bespoke_event_handling.rs
Normal file
1286
codex-rs/app-server/src/bespoke_event_handling.rs
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -19,6 +19,10 @@ pub(crate) async fn run_fuzzy_file_search(
|
||||
roots: Vec<String>,
|
||||
cancellation_flag: Arc<AtomicBool>,
|
||||
) -> Vec<FuzzyFileSearchResult> {
|
||||
if roots.is_empty() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
#[expect(clippy::expect_used)]
|
||||
let limit_per_root =
|
||||
NonZero::new(LIMIT_PER_ROOT).expect("LIMIT_PER_ROOT should be a valid non-zero usize");
|
||||
|
||||
@@ -28,6 +28,7 @@ use tracing_subscriber::filter::Targets;
|
||||
use tracing_subscriber::layer::SubscriberExt;
|
||||
use tracing_subscriber::util::SubscriberInitExt;
|
||||
|
||||
mod bespoke_event_handling;
|
||||
mod codex_message_processor;
|
||||
mod error_code;
|
||||
mod fuzzy_file_search;
|
||||
@@ -46,7 +47,7 @@ pub async fn run_main(
|
||||
) -> IoResult<()> {
|
||||
// Set up channels.
|
||||
let (incoming_tx, mut incoming_rx) = mpsc::channel::<JSONRPCMessage>(CHANNEL_CAPACITY);
|
||||
let (outgoing_tx, mut outgoing_rx) = mpsc::unbounded_channel::<OutgoingMessage>();
|
||||
let (outgoing_tx, mut outgoing_rx) = mpsc::channel::<OutgoingMessage>(CHANNEL_CAPACITY);
|
||||
|
||||
// Task: read from stdin, push to `incoming_tx`.
|
||||
let stdin_reader_handle = tokio::spawn({
|
||||
|
||||
@@ -6,7 +6,6 @@ use crate::outgoing_message::OutgoingMessageSender;
|
||||
use codex_app_server_protocol::ClientInfo;
|
||||
use codex_app_server_protocol::ClientRequest;
|
||||
use codex_app_server_protocol::InitializeResponse;
|
||||
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCErrorError;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
@@ -64,64 +63,80 @@ impl MessageProcessor {
|
||||
|
||||
pub(crate) async fn process_request(&mut self, request: JSONRPCRequest) {
|
||||
let request_id = request.id.clone();
|
||||
if let Ok(request_json) = serde_json::to_value(request)
|
||||
&& let Ok(codex_request) = serde_json::from_value::<ClientRequest>(request_json)
|
||||
{
|
||||
match codex_request {
|
||||
// Handle Initialize internally so CodexMessageProcessor does not have to concern
|
||||
// itself with the `initialized` bool.
|
||||
ClientRequest::Initialize { request_id, params } => {
|
||||
if self.initialized {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Already initialized".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
} else {
|
||||
let ClientInfo {
|
||||
name,
|
||||
title: _title,
|
||||
version,
|
||||
} = params.client_info;
|
||||
let user_agent_suffix = format!("{name}; {version}");
|
||||
if let Ok(mut suffix) = USER_AGENT_SUFFIX.lock() {
|
||||
*suffix = Some(user_agent_suffix);
|
||||
}
|
||||
let request_json = match serde_json::to_value(&request) {
|
||||
Ok(request_json) => request_json,
|
||||
Err(err) => {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!("Invalid request: {err}"),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let user_agent = get_codex_user_agent();
|
||||
let response = InitializeResponse { user_agent };
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
let codex_request = match serde_json::from_value::<ClientRequest>(request_json) {
|
||||
Ok(codex_request) => codex_request,
|
||||
Err(err) => {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!("Invalid request: {err}"),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
self.initialized = true;
|
||||
return;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
if !self.initialized {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Not initialized".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
match codex_request {
|
||||
// Handle Initialize internally so CodexMessageProcessor does not have to concern
|
||||
// itself with the `initialized` bool.
|
||||
ClientRequest::Initialize { request_id, params } => {
|
||||
if self.initialized {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Already initialized".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
} else {
|
||||
let ClientInfo {
|
||||
name,
|
||||
title: _title,
|
||||
version,
|
||||
} = params.client_info;
|
||||
let user_agent_suffix = format!("{name}; {version}");
|
||||
if let Ok(mut suffix) = USER_AGENT_SUFFIX.lock() {
|
||||
*suffix = Some(user_agent_suffix);
|
||||
}
|
||||
|
||||
let user_agent = get_codex_user_agent();
|
||||
let response = InitializeResponse { user_agent };
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
|
||||
self.initialized = true;
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
if !self.initialized {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Not initialized".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
self.codex_message_processor
|
||||
.process_request(codex_request)
|
||||
.await;
|
||||
} else {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Invalid request".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
}
|
||||
|
||||
self.codex_message_processor
|
||||
.process_request(codex_request)
|
||||
.await;
|
||||
}
|
||||
|
||||
pub(crate) async fn process_notification(&self, notification: JSONRPCNotification) {
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_app_server_protocol::Model;
|
||||
use codex_app_server_protocol::ReasoningEffortOption;
|
||||
use codex_common::model_presets::ModelPreset;
|
||||
use codex_common::model_presets::ReasoningEffortPreset;
|
||||
use codex_common::model_presets::builtin_model_presets;
|
||||
|
||||
pub fn supported_models() -> Vec<Model> {
|
||||
builtin_model_presets(None)
|
||||
pub fn supported_models(auth_mode: Option<AuthMode>) -> Vec<Model> {
|
||||
builtin_model_presets(auth_mode)
|
||||
.into_iter()
|
||||
.map(model_from_preset)
|
||||
.collect()
|
||||
|
||||
@@ -19,12 +19,12 @@ use crate::error_code::INTERNAL_ERROR_CODE;
|
||||
/// Sends messages to the client and manages request callbacks.
|
||||
pub(crate) struct OutgoingMessageSender {
|
||||
next_request_id: AtomicI64,
|
||||
sender: mpsc::UnboundedSender<OutgoingMessage>,
|
||||
sender: mpsc::Sender<OutgoingMessage>,
|
||||
request_id_to_callback: Mutex<HashMap<RequestId, oneshot::Sender<Result>>>,
|
||||
}
|
||||
|
||||
impl OutgoingMessageSender {
|
||||
pub(crate) fn new(sender: mpsc::UnboundedSender<OutgoingMessage>) -> Self {
|
||||
pub(crate) fn new(sender: mpsc::Sender<OutgoingMessage>) -> Self {
|
||||
Self {
|
||||
next_request_id: AtomicI64::new(0),
|
||||
sender,
|
||||
@@ -45,8 +45,12 @@ impl OutgoingMessageSender {
|
||||
}
|
||||
|
||||
let outgoing_message =
|
||||
OutgoingMessage::Request(request.request_with_id(outgoing_message_id));
|
||||
let _ = self.sender.send(outgoing_message);
|
||||
OutgoingMessage::Request(request.request_with_id(outgoing_message_id.clone()));
|
||||
if let Err(err) = self.sender.send(outgoing_message).await {
|
||||
warn!("failed to send request {outgoing_message_id:?} to client: {err:?}");
|
||||
let mut request_id_to_callback = self.request_id_to_callback.lock().await;
|
||||
request_id_to_callback.remove(&outgoing_message_id);
|
||||
}
|
||||
rx_approve
|
||||
}
|
||||
|
||||
@@ -72,7 +76,9 @@ impl OutgoingMessageSender {
|
||||
match serde_json::to_value(response) {
|
||||
Ok(result) => {
|
||||
let outgoing_message = OutgoingMessage::Response(OutgoingResponse { id, result });
|
||||
let _ = self.sender.send(outgoing_message);
|
||||
if let Err(err) = self.sender.send(outgoing_message).await {
|
||||
warn!("failed to send response to client: {err:?}");
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
self.send_error(
|
||||
@@ -89,21 +95,29 @@ impl OutgoingMessageSender {
|
||||
}
|
||||
|
||||
pub(crate) async fn send_server_notification(&self, notification: ServerNotification) {
|
||||
let _ = self
|
||||
if let Err(err) = self
|
||||
.sender
|
||||
.send(OutgoingMessage::AppServerNotification(notification));
|
||||
.send(OutgoingMessage::AppServerNotification(notification))
|
||||
.await
|
||||
{
|
||||
warn!("failed to send server notification to client: {err:?}");
|
||||
}
|
||||
}
|
||||
|
||||
/// All notifications should be migrated to [`ServerNotification`] and
|
||||
/// [`OutgoingMessage::Notification`] should be removed.
|
||||
pub(crate) async fn send_notification(&self, notification: OutgoingNotification) {
|
||||
let outgoing_message = OutgoingMessage::Notification(notification);
|
||||
let _ = self.sender.send(outgoing_message);
|
||||
if let Err(err) = self.sender.send(outgoing_message).await {
|
||||
warn!("failed to send notification to client: {err:?}");
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn send_error(&self, id: RequestId, error: JSONRPCErrorError) {
|
||||
let outgoing_message = OutgoingMessage::Error(OutgoingError { id, error });
|
||||
let _ = self.sender.send(outgoing_message);
|
||||
if let Err(err) = self.sender.send(outgoing_message).await {
|
||||
warn!("failed to send error to client: {err:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -141,9 +155,13 @@ pub(crate) struct OutgoingError {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use codex_app_server_protocol::AccountLoginCompletedNotification;
|
||||
use codex_app_server_protocol::AccountRateLimitsUpdatedNotification;
|
||||
use codex_app_server_protocol::AccountUpdatedNotification;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_app_server_protocol::LoginChatGptCompleteNotification;
|
||||
use codex_protocol::protocol::RateLimitSnapshot;
|
||||
use codex_protocol::protocol::RateLimitWindow;
|
||||
use codex_app_server_protocol::RateLimitSnapshot;
|
||||
use codex_app_server_protocol::RateLimitWindow;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use uuid::Uuid;
|
||||
@@ -166,6 +184,7 @@ mod tests {
|
||||
"params": {
|
||||
"loginId": Uuid::nil(),
|
||||
"success": true,
|
||||
"error": null,
|
||||
},
|
||||
}),
|
||||
serde_json::to_value(jsonrpc_notification)
|
||||
@@ -175,27 +194,79 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn verify_account_rate_limits_notification_serialization() {
|
||||
let notification = ServerNotification::AccountRateLimitsUpdated(RateLimitSnapshot {
|
||||
primary: Some(RateLimitWindow {
|
||||
used_percent: 25.0,
|
||||
window_minutes: Some(15),
|
||||
resets_at: Some(123),
|
||||
fn verify_account_login_completed_notification_serialization() {
|
||||
let notification =
|
||||
ServerNotification::AccountLoginCompleted(AccountLoginCompletedNotification {
|
||||
login_id: Some(Uuid::nil().to_string()),
|
||||
success: true,
|
||||
error: None,
|
||||
});
|
||||
|
||||
let jsonrpc_notification = OutgoingMessage::AppServerNotification(notification);
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/login/completed",
|
||||
"params": {
|
||||
"loginId": Uuid::nil().to_string(),
|
||||
"success": true,
|
||||
"error": null,
|
||||
},
|
||||
}),
|
||||
secondary: None,
|
||||
});
|
||||
serde_json::to_value(jsonrpc_notification)
|
||||
.expect("ensure the notification serializes correctly"),
|
||||
"ensure the notification serializes correctly"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn verify_account_rate_limits_notification_serialization() {
|
||||
let notification =
|
||||
ServerNotification::AccountRateLimitsUpdated(AccountRateLimitsUpdatedNotification {
|
||||
rate_limits: RateLimitSnapshot {
|
||||
primary: Some(RateLimitWindow {
|
||||
used_percent: 25,
|
||||
window_duration_mins: Some(15),
|
||||
resets_at: Some(123),
|
||||
}),
|
||||
secondary: None,
|
||||
credits: None,
|
||||
},
|
||||
});
|
||||
|
||||
let jsonrpc_notification = OutgoingMessage::AppServerNotification(notification);
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/rateLimits/updated",
|
||||
"params": {
|
||||
"primary": {
|
||||
"used_percent": 25.0,
|
||||
"window_minutes": 15,
|
||||
"resets_at": 123,
|
||||
},
|
||||
"secondary": null,
|
||||
"rateLimits": {
|
||||
"primary": {
|
||||
"usedPercent": 25,
|
||||
"windowDurationMins": 15,
|
||||
"resetsAt": 123
|
||||
},
|
||||
"secondary": null,
|
||||
"credits": null
|
||||
}
|
||||
},
|
||||
}),
|
||||
serde_json::to_value(jsonrpc_notification)
|
||||
.expect("ensure the notification serializes correctly"),
|
||||
"ensure the notification serializes correctly"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn verify_account_updated_notification_serialization() {
|
||||
let notification = ServerNotification::AccountUpdated(AccountUpdatedNotification {
|
||||
auth_mode: Some(AuthMode::ApiKey),
|
||||
});
|
||||
|
||||
let jsonrpc_notification = OutgoingMessage::AppServerNotification(notification);
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/updated",
|
||||
"params": {
|
||||
"authMode": "apikey"
|
||||
},
|
||||
}),
|
||||
serde_json::to_value(jsonrpc_notification)
|
||||
|
||||
@@ -13,6 +13,7 @@ base64 = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-core = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
@@ -21,4 +22,5 @@ tokio = { workspace = true, features = [
|
||||
"process",
|
||||
"rt-multi-thread",
|
||||
] }
|
||||
uuid = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
|
||||
@@ -2,6 +2,7 @@ mod auth_fixtures;
|
||||
mod mcp_process;
|
||||
mod mock_model_server;
|
||||
mod responses;
|
||||
mod rollout;
|
||||
|
||||
pub use auth_fixtures::ChatGptAuthFixture;
|
||||
pub use auth_fixtures::ChatGptIdTokenClaims;
|
||||
@@ -10,9 +11,11 @@ pub use auth_fixtures::write_chatgpt_auth;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
pub use mcp_process::McpProcess;
|
||||
pub use mock_model_server::create_mock_chat_completions_server;
|
||||
pub use mock_model_server::create_mock_chat_completions_server_unchecked;
|
||||
pub use responses::create_apply_patch_sse_response;
|
||||
pub use responses::create_final_assistant_message_sse_response;
|
||||
pub use responses::create_shell_sse_response;
|
||||
pub use rollout::create_fake_rollout;
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
pub fn to_response<T: DeserializeOwned>(response: JSONRPCResponse) -> anyhow::Result<T> {
|
||||
|
||||
@@ -14,30 +14,38 @@ use anyhow::Context;
|
||||
use assert_cmd::prelude::*;
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
use codex_app_server_protocol::ArchiveConversationParams;
|
||||
use codex_app_server_protocol::CancelLoginAccountParams;
|
||||
use codex_app_server_protocol::CancelLoginChatGptParams;
|
||||
use codex_app_server_protocol::ClientInfo;
|
||||
use codex_app_server_protocol::ClientNotification;
|
||||
use codex_app_server_protocol::FeedbackUploadParams;
|
||||
use codex_app_server_protocol::GetAccountParams;
|
||||
use codex_app_server_protocol::GetAuthStatusParams;
|
||||
use codex_app_server_protocol::InitializeParams;
|
||||
use codex_app_server_protocol::InterruptConversationParams;
|
||||
use codex_app_server_protocol::ListConversationsParams;
|
||||
use codex_app_server_protocol::ListModelsParams;
|
||||
use codex_app_server_protocol::LoginApiKeyParams;
|
||||
use codex_app_server_protocol::NewConversationParams;
|
||||
use codex_app_server_protocol::RemoveConversationListenerParams;
|
||||
use codex_app_server_protocol::ResumeConversationParams;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserTurnParams;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::SetDefaultModelParams;
|
||||
use codex_app_server_protocol::UploadFeedbackParams;
|
||||
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCMessage;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCRequest;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::ListConversationsParams;
|
||||
use codex_app_server_protocol::LoginApiKeyParams;
|
||||
use codex_app_server_protocol::ModelListParams;
|
||||
use codex_app_server_protocol::NewConversationParams;
|
||||
use codex_app_server_protocol::RemoveConversationListenerParams;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ResumeConversationParams;
|
||||
use codex_app_server_protocol::ReviewStartParams;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserTurnParams;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::SetDefaultModelParams;
|
||||
use codex_app_server_protocol::ThreadArchiveParams;
|
||||
use codex_app_server_protocol::ThreadListParams;
|
||||
use codex_app_server_protocol::ThreadResumeParams;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::TurnInterruptParams;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use std::process::Command as StdCommand;
|
||||
use tokio::process::Command;
|
||||
|
||||
@@ -243,10 +251,19 @@ impl McpProcess {
|
||||
self.send_request("account/rateLimits/read", None).await
|
||||
}
|
||||
|
||||
/// Send a `feedback/upload` JSON-RPC request.
|
||||
pub async fn send_upload_feedback_request(
|
||||
/// Send an `account/read` JSON-RPC request.
|
||||
pub async fn send_get_account_request(
|
||||
&mut self,
|
||||
params: UploadFeedbackParams,
|
||||
params: GetAccountParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("account/read", params).await
|
||||
}
|
||||
|
||||
/// Send a `feedback/upload` JSON-RPC request.
|
||||
pub async fn send_feedback_upload_request(
|
||||
&mut self,
|
||||
params: FeedbackUploadParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("feedback/upload", params).await
|
||||
@@ -275,10 +292,46 @@ impl McpProcess {
|
||||
self.send_request("listConversations", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/start` JSON-RPC request.
|
||||
pub async fn send_thread_start_request(
|
||||
&mut self,
|
||||
params: ThreadStartParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/start", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/resume` JSON-RPC request.
|
||||
pub async fn send_thread_resume_request(
|
||||
&mut self,
|
||||
params: ThreadResumeParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/resume", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/archive` JSON-RPC request.
|
||||
pub async fn send_thread_archive_request(
|
||||
&mut self,
|
||||
params: ThreadArchiveParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/archive", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/list` JSON-RPC request.
|
||||
pub async fn send_thread_list_request(
|
||||
&mut self,
|
||||
params: ThreadListParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/list", params).await
|
||||
}
|
||||
|
||||
/// Send a `model/list` JSON-RPC request.
|
||||
pub async fn send_list_models_request(
|
||||
&mut self,
|
||||
params: ListModelsParams,
|
||||
params: ModelListParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("model/list", params).await
|
||||
@@ -307,6 +360,33 @@ impl McpProcess {
|
||||
self.send_request("loginChatGpt", None).await
|
||||
}
|
||||
|
||||
/// Send a `turn/start` JSON-RPC request (v2).
|
||||
pub async fn send_turn_start_request(
|
||||
&mut self,
|
||||
params: TurnStartParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("turn/start", params).await
|
||||
}
|
||||
|
||||
/// Send a `turn/interrupt` JSON-RPC request (v2).
|
||||
pub async fn send_turn_interrupt_request(
|
||||
&mut self,
|
||||
params: TurnInterruptParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("turn/interrupt", params).await
|
||||
}
|
||||
|
||||
/// Send a `review/start` JSON-RPC request (v2).
|
||||
pub async fn send_review_start_request(
|
||||
&mut self,
|
||||
params: ReviewStartParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("review/start", params).await
|
||||
}
|
||||
|
||||
/// Send a `cancelLoginChatGpt` JSON-RPC request.
|
||||
pub async fn send_cancel_login_chat_gpt_request(
|
||||
&mut self,
|
||||
@@ -321,6 +401,40 @@ impl McpProcess {
|
||||
self.send_request("logoutChatGpt", None).await
|
||||
}
|
||||
|
||||
/// Send an `account/logout` JSON-RPC request.
|
||||
pub async fn send_logout_account_request(&mut self) -> anyhow::Result<i64> {
|
||||
self.send_request("account/logout", None).await
|
||||
}
|
||||
|
||||
/// Send an `account/login/start` JSON-RPC request for API key login.
|
||||
pub async fn send_login_account_api_key_request(
|
||||
&mut self,
|
||||
api_key: &str,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = serde_json::json!({
|
||||
"type": "apiKey",
|
||||
"apiKey": api_key,
|
||||
});
|
||||
self.send_request("account/login/start", Some(params)).await
|
||||
}
|
||||
|
||||
/// Send an `account/login/start` JSON-RPC request for ChatGPT login.
|
||||
pub async fn send_login_account_chatgpt_request(&mut self) -> anyhow::Result<i64> {
|
||||
let params = serde_json::json!({
|
||||
"type": "chatgpt"
|
||||
});
|
||||
self.send_request("account/login/start", Some(params)).await
|
||||
}
|
||||
|
||||
/// Send an `account/login/cancel` JSON-RPC request.
|
||||
pub async fn send_cancel_login_account_request(
|
||||
&mut self,
|
||||
params: CancelLoginAccountParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("account/login/cancel", params).await
|
||||
}
|
||||
|
||||
/// Send a `fuzzyFileSearch` JSON-RPC request.
|
||||
pub async fn send_fuzzy_file_search_request(
|
||||
&mut self,
|
||||
|
||||
@@ -29,6 +29,25 @@ pub async fn create_mock_chat_completions_server(responses: Vec<String>) -> Mock
|
||||
server
|
||||
}
|
||||
|
||||
/// Same as `create_mock_chat_completions_server` but does not enforce an
|
||||
/// expectation on the number of calls.
|
||||
pub async fn create_mock_chat_completions_server_unchecked(responses: Vec<String>) -> MockServer {
|
||||
let server = MockServer::start().await;
|
||||
|
||||
let seq_responder = SeqResponder {
|
||||
num_calls: AtomicUsize::new(0),
|
||||
responses,
|
||||
};
|
||||
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/chat/completions"))
|
||||
.respond_with(seq_responder)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
server
|
||||
}
|
||||
|
||||
struct SeqResponder {
|
||||
num_calls: AtomicUsize,
|
||||
responses: Vec<String>,
|
||||
|
||||
82
codex-rs/app-server/tests/common/rollout.rs
Normal file
82
codex-rs/app-server/tests/common/rollout.rs
Normal file
@@ -0,0 +1,82 @@
|
||||
use anyhow::Result;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::protocol::SessionMeta;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use serde_json::json;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use uuid::Uuid;
|
||||
|
||||
/// Create a minimal rollout file under `CODEX_HOME/sessions/YYYY/MM/DD/`.
|
||||
///
|
||||
/// - `filename_ts` is the filename timestamp component in `YYYY-MM-DDThh-mm-ss` format.
|
||||
/// - `meta_rfc3339` is the envelope timestamp used in JSON lines.
|
||||
/// - `preview` is the user message preview text.
|
||||
/// - `model_provider` optionally sets the provider in the session meta payload.
|
||||
///
|
||||
/// Returns the generated conversation/session UUID as a string.
|
||||
pub fn create_fake_rollout(
|
||||
codex_home: &Path,
|
||||
filename_ts: &str,
|
||||
meta_rfc3339: &str,
|
||||
preview: &str,
|
||||
model_provider: Option<&str>,
|
||||
) -> Result<String> {
|
||||
let uuid = Uuid::new_v4();
|
||||
let uuid_str = uuid.to_string();
|
||||
let conversation_id = ConversationId::from_string(&uuid_str)?;
|
||||
|
||||
// sessions/YYYY/MM/DD derived from filename_ts (YYYY-MM-DDThh-mm-ss)
|
||||
let year = &filename_ts[0..4];
|
||||
let month = &filename_ts[5..7];
|
||||
let day = &filename_ts[8..10];
|
||||
let dir = codex_home.join("sessions").join(year).join(month).join(day);
|
||||
fs::create_dir_all(&dir)?;
|
||||
|
||||
let file_path = dir.join(format!("rollout-{filename_ts}-{uuid}.jsonl"));
|
||||
|
||||
// Build JSONL lines
|
||||
let payload = serde_json::to_value(SessionMeta {
|
||||
id: conversation_id,
|
||||
timestamp: meta_rfc3339.to_string(),
|
||||
cwd: PathBuf::from("/"),
|
||||
originator: "codex".to_string(),
|
||||
cli_version: "0.0.0".to_string(),
|
||||
instructions: None,
|
||||
source: SessionSource::Cli,
|
||||
model_provider: model_provider.map(str::to_string),
|
||||
})?;
|
||||
|
||||
let lines = [
|
||||
json!({
|
||||
"timestamp": meta_rfc3339,
|
||||
"type": "session_meta",
|
||||
"payload": payload
|
||||
})
|
||||
.to_string(),
|
||||
json!({
|
||||
"timestamp": meta_rfc3339,
|
||||
"type":"response_item",
|
||||
"payload": {
|
||||
"type":"message",
|
||||
"role":"user",
|
||||
"content":[{"type":"input_text","text": preview}]
|
||||
}
|
||||
})
|
||||
.to_string(),
|
||||
json!({
|
||||
"timestamp": meta_rfc3339,
|
||||
"type":"event_msg",
|
||||
"payload": {
|
||||
"type":"user_message",
|
||||
"message": preview,
|
||||
"kind": "plain"
|
||||
}
|
||||
})
|
||||
.to_string(),
|
||||
];
|
||||
|
||||
fs::write(file_path, lines.join("\n") + "\n")?;
|
||||
Ok(uuid_str)
|
||||
}
|
||||
@@ -12,7 +12,7 @@ use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(20);
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn archive_conversation_moves_rollout_into_archived_directory() -> Result<()> {
|
||||
|
||||
@@ -27,7 +27,7 @@ fn create_config_toml(codex_home: &Path) -> std::io::Result<()> {
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
r#"
|
||||
model = "gpt-5-codex"
|
||||
model = "gpt-5.1-codex-max"
|
||||
approval_policy = "on-request"
|
||||
sandbox_mode = "workspace-write"
|
||||
model_reasoning_summary = "detailed"
|
||||
@@ -87,7 +87,7 @@ async fn get_config_toml_parses_all_fields() -> Result<()> {
|
||||
}),
|
||||
forced_chatgpt_workspace_id: Some("12345678-0000-0000-0000-000000000000".into()),
|
||||
forced_login_method: Some(ForcedLoginMethod::Chatgpt),
|
||||
model: Some("gpt-5-codex".into()),
|
||||
model: Some("gpt-5.1-codex-max".into()),
|
||||
model_reasoning_effort: Some(ReasoningEffort::High),
|
||||
model_reasoning_summary: Some(ReasoningSummary::Detailed),
|
||||
model_verbosity: Some(Verbosity::Medium),
|
||||
|
||||
@@ -146,7 +146,7 @@ fn create_config_toml(codex_home: &Path, server_uri: String) -> std::io::Result<
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "danger-full-access"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_fake_rollout;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
@@ -11,13 +12,12 @@ use codex_app_server_protocol::ResumeConversationParams;
|
||||
use codex_app_server_protocol::ResumeConversationResponse;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_app_server_protocol::SessionConfiguredNotification;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
use uuid::Uuid;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
@@ -168,10 +168,14 @@ async fn test_list_and_resume_conversations() -> Result<()> {
|
||||
assert!(empty_items.is_empty());
|
||||
assert!(empty_next.is_none());
|
||||
|
||||
// Now resume one of the sessions and expect a SessionConfigured notification and response.
|
||||
let first_item = &items[0];
|
||||
|
||||
// Now resume one of the sessions from an explicit rollout path.
|
||||
let resume_req_id = mcp
|
||||
.send_resume_conversation_request(ResumeConversationParams {
|
||||
path: items[0].path.clone(),
|
||||
path: Some(first_item.path.clone()),
|
||||
conversation_id: None,
|
||||
history: None,
|
||||
overrides: Some(NewConversationParams {
|
||||
model: Some("o3".to_string()),
|
||||
..Default::default()
|
||||
@@ -186,17 +190,25 @@ async fn test_list_and_resume_conversations() -> Result<()> {
|
||||
)
|
||||
.await??;
|
||||
let session_configured: ServerNotification = notification.try_into()?;
|
||||
// Basic shape assertion: ensure event type is sessionConfigured
|
||||
let ServerNotification::SessionConfigured(SessionConfiguredNotification {
|
||||
model,
|
||||
rollout_path,
|
||||
initial_messages: session_initial_messages,
|
||||
..
|
||||
}) = session_configured
|
||||
else {
|
||||
unreachable!("expected sessionConfigured notification");
|
||||
};
|
||||
assert_eq!(model, "o3");
|
||||
assert_eq!(items[0].path.clone(), rollout_path);
|
||||
assert_eq!(rollout_path, first_item.path.clone());
|
||||
let session_initial_messages = session_initial_messages
|
||||
.expect("expected initial messages when resuming from rollout path");
|
||||
match session_initial_messages.as_slice() {
|
||||
[EventMsg::UserMessage(message)] => {
|
||||
assert_eq!(message.message, first_item.preview.clone());
|
||||
}
|
||||
other => panic!("unexpected initial messages from rollout resume: {other:#?}"),
|
||||
}
|
||||
|
||||
// Then the response for resumeConversation
|
||||
let resume_resp: JSONRPCResponse = timeout(
|
||||
@@ -205,77 +217,140 @@ async fn test_list_and_resume_conversations() -> Result<()> {
|
||||
)
|
||||
.await??;
|
||||
let ResumeConversationResponse {
|
||||
conversation_id, ..
|
||||
conversation_id,
|
||||
model: resume_model,
|
||||
initial_messages: response_initial_messages,
|
||||
..
|
||||
} = to_response::<ResumeConversationResponse>(resume_resp)?;
|
||||
// conversation id should be a valid UUID
|
||||
assert!(!conversation_id.to_string().is_empty());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_fake_rollout(
|
||||
codex_home: &Path,
|
||||
filename_ts: &str,
|
||||
meta_rfc3339: &str,
|
||||
preview: &str,
|
||||
model_provider: Option<&str>,
|
||||
) -> Result<()> {
|
||||
let uuid = Uuid::new_v4();
|
||||
// sessions/YYYY/MM/DD/ derived from filename_ts (YYYY-MM-DDThh-mm-ss)
|
||||
let year = &filename_ts[0..4];
|
||||
let month = &filename_ts[5..7];
|
||||
let day = &filename_ts[8..10];
|
||||
let dir = codex_home.join("sessions").join(year).join(month).join(day);
|
||||
fs::create_dir_all(&dir)?;
|
||||
|
||||
let file_path = dir.join(format!("rollout-{filename_ts}-{uuid}.jsonl"));
|
||||
let mut lines = Vec::new();
|
||||
// Meta line with timestamp (flattened meta in payload for new schema)
|
||||
let mut payload = json!({
|
||||
"id": uuid,
|
||||
"timestamp": meta_rfc3339,
|
||||
"cwd": "/",
|
||||
"originator": "codex",
|
||||
"cli_version": "0.0.0",
|
||||
"instructions": null,
|
||||
});
|
||||
if let Some(provider) = model_provider {
|
||||
payload["model_provider"] = json!(provider);
|
||||
assert_eq!(resume_model, "o3");
|
||||
let response_initial_messages =
|
||||
response_initial_messages.expect("expected initial messages in resume response");
|
||||
match response_initial_messages.as_slice() {
|
||||
[EventMsg::UserMessage(message)] => {
|
||||
assert_eq!(message.message, first_item.preview.clone());
|
||||
}
|
||||
other => panic!("unexpected initial messages in resume response: {other:#?}"),
|
||||
}
|
||||
lines.push(
|
||||
json!({
|
||||
"timestamp": meta_rfc3339,
|
||||
"type": "session_meta",
|
||||
"payload": payload
|
||||
|
||||
// Resuming with only a conversation id should locate the rollout automatically.
|
||||
let resume_by_id_req_id = mcp
|
||||
.send_resume_conversation_request(ResumeConversationParams {
|
||||
path: None,
|
||||
conversation_id: Some(first_item.conversation_id),
|
||||
history: None,
|
||||
overrides: Some(NewConversationParams {
|
||||
model: Some("o3".to_string()),
|
||||
..Default::default()
|
||||
}),
|
||||
})
|
||||
.to_string(),
|
||||
);
|
||||
// Minimal user message entry as a persisted response item (with envelope timestamp)
|
||||
lines.push(
|
||||
json!({
|
||||
"timestamp": meta_rfc3339,
|
||||
"type":"response_item",
|
||||
"payload": {
|
||||
"type":"message",
|
||||
"role":"user",
|
||||
"content":[{"type":"input_text","text": preview}]
|
||||
}
|
||||
.await?;
|
||||
let notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("sessionConfigured"),
|
||||
)
|
||||
.await??;
|
||||
let session_configured: ServerNotification = notification.try_into()?;
|
||||
let ServerNotification::SessionConfigured(SessionConfiguredNotification {
|
||||
model,
|
||||
rollout_path,
|
||||
initial_messages: session_initial_messages,
|
||||
..
|
||||
}) = session_configured
|
||||
else {
|
||||
unreachable!("expected sessionConfigured notification");
|
||||
};
|
||||
assert_eq!(model, "o3");
|
||||
assert_eq!(rollout_path, first_item.path.clone());
|
||||
let session_initial_messages = session_initial_messages
|
||||
.expect("expected initial messages when resuming from conversation id");
|
||||
match session_initial_messages.as_slice() {
|
||||
[EventMsg::UserMessage(message)] => {
|
||||
assert_eq!(message.message, first_item.preview.clone());
|
||||
}
|
||||
other => panic!("unexpected initial messages from conversation id resume: {other:#?}"),
|
||||
}
|
||||
let resume_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(resume_by_id_req_id)),
|
||||
)
|
||||
.await??;
|
||||
let ResumeConversationResponse {
|
||||
conversation_id: by_id_conversation_id,
|
||||
model: by_id_model,
|
||||
initial_messages: by_id_initial_messages,
|
||||
..
|
||||
} = to_response::<ResumeConversationResponse>(resume_resp)?;
|
||||
assert!(!by_id_conversation_id.to_string().is_empty());
|
||||
assert_eq!(by_id_model, "o3");
|
||||
let by_id_initial_messages = by_id_initial_messages
|
||||
.expect("expected initial messages when resuming from conversation id response");
|
||||
match by_id_initial_messages.as_slice() {
|
||||
[EventMsg::UserMessage(message)] => {
|
||||
assert_eq!(message.message, first_item.preview.clone());
|
||||
}
|
||||
other => {
|
||||
panic!("unexpected initial messages in conversation id resume response: {other:#?}")
|
||||
}
|
||||
}
|
||||
|
||||
// Resuming with explicit history should succeed even without a stored rollout.
|
||||
let fork_history_text = "Hello from history";
|
||||
let history = vec![ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::InputText {
|
||||
text: fork_history_text.to_string(),
|
||||
}],
|
||||
}];
|
||||
let resume_with_history_req_id = mcp
|
||||
.send_resume_conversation_request(ResumeConversationParams {
|
||||
path: None,
|
||||
conversation_id: None,
|
||||
history: Some(history),
|
||||
overrides: Some(NewConversationParams {
|
||||
model: Some("o3".to_string()),
|
||||
..Default::default()
|
||||
}),
|
||||
})
|
||||
.to_string(),
|
||||
.await?;
|
||||
let notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("sessionConfigured"),
|
||||
)
|
||||
.await??;
|
||||
let session_configured: ServerNotification = notification.try_into()?;
|
||||
let ServerNotification::SessionConfigured(SessionConfiguredNotification {
|
||||
model,
|
||||
initial_messages: session_initial_messages,
|
||||
..
|
||||
}) = session_configured
|
||||
else {
|
||||
unreachable!("expected sessionConfigured notification");
|
||||
};
|
||||
assert_eq!(model, "o3");
|
||||
assert!(
|
||||
session_initial_messages.as_ref().is_none_or(Vec::is_empty),
|
||||
"expected no initial messages when resuming from explicit history but got {session_initial_messages:#?}"
|
||||
);
|
||||
// Add a matching user message event line to satisfy filters
|
||||
lines.push(
|
||||
json!({
|
||||
"timestamp": meta_rfc3339,
|
||||
"type":"event_msg",
|
||||
"payload": {
|
||||
"type":"user_message",
|
||||
"message": preview,
|
||||
"kind": "plain"
|
||||
}
|
||||
})
|
||||
.to_string(),
|
||||
let resume_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(resume_with_history_req_id)),
|
||||
)
|
||||
.await??;
|
||||
let ResumeConversationResponse {
|
||||
conversation_id: history_conversation_id,
|
||||
model: history_model,
|
||||
initial_messages: history_initial_messages,
|
||||
..
|
||||
} = to_response::<ResumeConversationResponse>(resume_resp)?;
|
||||
assert!(!history_conversation_id.to_string().is_empty());
|
||||
assert_eq!(history_model, "o3");
|
||||
assert!(
|
||||
history_initial_messages.as_ref().is_none_or(Vec::is_empty),
|
||||
"expected no initial messages in resume response when history is provided but got {history_initial_messages:#?}"
|
||||
);
|
||||
fs::write(file_path, lines.join("\n") + "\n")?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -7,9 +7,8 @@ mod fuzzy_file_search;
|
||||
mod interrupt;
|
||||
mod list_resume;
|
||||
mod login;
|
||||
mod model_list;
|
||||
mod rate_limits;
|
||||
mod send_message;
|
||||
mod set_default_model;
|
||||
mod user_agent;
|
||||
mod user_info;
|
||||
mod v2;
|
||||
|
||||
@@ -16,6 +16,7 @@ use codex_app_server_protocol::SendUserMessageResponse;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::RawResponseItemEvent;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
@@ -43,7 +44,9 @@ async fn test_send_message_success() -> Result<()> {
|
||||
|
||||
// Start a conversation using the new wire API.
|
||||
let new_conv_id = mcp
|
||||
.send_new_conversation_request(NewConversationParams::default())
|
||||
.send_new_conversation_request(NewConversationParams {
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let new_conv_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
@@ -142,7 +145,10 @@ async fn test_send_message_raw_notifications_opt_in() -> Result<()> {
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let new_conv_id = mcp
|
||||
.send_new_conversation_request(NewConversationParams::default())
|
||||
.send_new_conversation_request(NewConversationParams {
|
||||
developer_instructions: Some("Use the test harness tools.".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let new_conv_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
@@ -176,6 +182,9 @@ async fn test_send_message_raw_notifications_opt_in() -> Result<()> {
|
||||
})
|
||||
.await?;
|
||||
|
||||
let developer = read_raw_response_item(&mut mcp, conversation_id).await;
|
||||
assert_developer_message(&developer, "Use the test harness tools.");
|
||||
|
||||
let instructions = read_raw_response_item(&mut mcp, conversation_id).await;
|
||||
assert_instructions_message(&instructions);
|
||||
|
||||
@@ -294,7 +303,9 @@ async fn read_raw_response_item(
|
||||
.cloned()
|
||||
.expect("raw response item should include msg payload");
|
||||
|
||||
serde_json::from_value(msg_value).expect("deserialize raw response item")
|
||||
let event: RawResponseItemEvent =
|
||||
serde_json::from_value(msg_value).expect("deserialize raw response item");
|
||||
event.item
|
||||
}
|
||||
|
||||
fn assert_instructions_message(item: &ResponseItem) {
|
||||
@@ -302,10 +313,11 @@ fn assert_instructions_message(item: &ResponseItem) {
|
||||
ResponseItem::Message { role, content, .. } => {
|
||||
assert_eq!(role, "user");
|
||||
let texts = content_texts(content);
|
||||
let is_instructions = texts
|
||||
.iter()
|
||||
.any(|text| text.starts_with("# AGENTS.md instructions for "));
|
||||
assert!(
|
||||
texts
|
||||
.iter()
|
||||
.any(|text| text.contains("<user_instructions>")),
|
||||
is_instructions,
|
||||
"expected instructions message, got {texts:?}"
|
||||
);
|
||||
}
|
||||
@@ -313,6 +325,21 @@ fn assert_instructions_message(item: &ResponseItem) {
|
||||
}
|
||||
}
|
||||
|
||||
fn assert_developer_message(item: &ResponseItem, expected_text: &str) {
|
||||
match item {
|
||||
ResponseItem::Message { role, content, .. } => {
|
||||
assert_eq!(role, "developer");
|
||||
let texts = content_texts(content);
|
||||
assert_eq!(
|
||||
texts,
|
||||
vec![expected_text],
|
||||
"expected developer instructions message, got {texts:?}"
|
||||
);
|
||||
}
|
||||
other => panic!("expected developer instructions message, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn assert_environment_message(item: &ResponseItem) {
|
||||
match item {
|
||||
ResponseItem::Message { role, content, .. } => {
|
||||
|
||||
@@ -57,7 +57,7 @@ fn create_config_toml(codex_home: &Path) -> std::io::Result<()> {
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
r#"
|
||||
model = "gpt-5-codex"
|
||||
model = "gpt-5.1-codex-max"
|
||||
model_reasoning_effort = "medium"
|
||||
"#,
|
||||
)
|
||||
|
||||
492
codex-rs/app-server/tests/suite/v2/account.rs
Normal file
492
codex-rs/app-server/tests/suite/v2/account.rs
Normal file
@@ -0,0 +1,492 @@
|
||||
use anyhow::Result;
|
||||
use anyhow::bail;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
|
||||
use app_test_support::ChatGptAuthFixture;
|
||||
use app_test_support::write_chatgpt_auth;
|
||||
use codex_app_server_protocol::Account;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_app_server_protocol::CancelLoginAccountParams;
|
||||
use codex_app_server_protocol::CancelLoginAccountResponse;
|
||||
use codex_app_server_protocol::GetAccountParams;
|
||||
use codex_app_server_protocol::GetAccountResponse;
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::LoginAccountResponse;
|
||||
use codex_app_server_protocol::LogoutAccountResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use codex_login::login_with_api_key;
|
||||
use codex_protocol::account::PlanType as AccountPlanType;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serial_test::serial;
|
||||
use std::path::Path;
|
||||
use std::time::Duration;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
// Helper to create a minimal config.toml for the app server
|
||||
#[derive(Default)]
|
||||
struct CreateConfigTomlParams {
|
||||
forced_method: Option<String>,
|
||||
forced_workspace_id: Option<String>,
|
||||
requires_openai_auth: Option<bool>,
|
||||
}
|
||||
|
||||
fn create_config_toml(codex_home: &Path, params: CreateConfigTomlParams) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
let forced_line = if let Some(method) = params.forced_method {
|
||||
format!("forced_login_method = \"{method}\"\n")
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
let forced_workspace_line = if let Some(ws) = params.forced_workspace_id {
|
||||
format!("forced_chatgpt_workspace_id = \"{ws}\"\n")
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
let requires_line = match params.requires_openai_auth {
|
||||
Some(true) => "requires_openai_auth = true\n".to_string(),
|
||||
Some(false) => String::new(),
|
||||
None => String::new(),
|
||||
};
|
||||
let contents = format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "danger-full-access"
|
||||
{forced_line}
|
||||
{forced_workspace_line}
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "http://127.0.0.1:0/v1"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
{requires_line}
|
||||
"#
|
||||
);
|
||||
std::fs::write(config_toml, contents)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn logout_account_removes_auth_and_notifies() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), CreateConfigTomlParams::default())?;
|
||||
|
||||
login_with_api_key(
|
||||
codex_home.path(),
|
||||
"sk-test-key",
|
||||
AuthCredentialsStoreMode::File,
|
||||
)?;
|
||||
assert!(codex_home.path().join("auth.json").exists());
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let id = mcp.send_logout_account_request().await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(id)),
|
||||
)
|
||||
.await??;
|
||||
let _ok: LogoutAccountResponse = to_response(resp)?;
|
||||
|
||||
let note = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/updated"),
|
||||
)
|
||||
.await??;
|
||||
let parsed: ServerNotification = note.try_into()?;
|
||||
let ServerNotification::AccountUpdated(payload) = parsed else {
|
||||
bail!("unexpected notification: {parsed:?}");
|
||||
};
|
||||
assert!(
|
||||
payload.auth_mode.is_none(),
|
||||
"auth_method should be None after logout"
|
||||
);
|
||||
|
||||
assert!(
|
||||
!codex_home.path().join("auth.json").exists(),
|
||||
"auth.json should be deleted"
|
||||
);
|
||||
|
||||
let get_id = mcp
|
||||
.send_get_account_request(GetAccountParams {
|
||||
refresh_token: false,
|
||||
})
|
||||
.await?;
|
||||
let get_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(get_id)),
|
||||
)
|
||||
.await??;
|
||||
let account: GetAccountResponse = to_response(get_resp)?;
|
||||
assert_eq!(account.account, None);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn login_account_api_key_succeeds_and_notifies() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), CreateConfigTomlParams::default())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let req_id = mcp
|
||||
.send_login_account_api_key_request("sk-test-key")
|
||||
.await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(req_id)),
|
||||
)
|
||||
.await??;
|
||||
let login: LoginAccountResponse = to_response(resp)?;
|
||||
assert_eq!(login, LoginAccountResponse::ApiKey {});
|
||||
|
||||
let note = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/login/completed"),
|
||||
)
|
||||
.await??;
|
||||
let parsed: ServerNotification = note.try_into()?;
|
||||
let ServerNotification::AccountLoginCompleted(payload) = parsed else {
|
||||
bail!("unexpected notification: {parsed:?}");
|
||||
};
|
||||
pretty_assertions::assert_eq!(payload.login_id, None);
|
||||
pretty_assertions::assert_eq!(payload.success, true);
|
||||
pretty_assertions::assert_eq!(payload.error, None);
|
||||
|
||||
let note = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/updated"),
|
||||
)
|
||||
.await??;
|
||||
let parsed: ServerNotification = note.try_into()?;
|
||||
let ServerNotification::AccountUpdated(payload) = parsed else {
|
||||
bail!("unexpected notification: {parsed:?}");
|
||||
};
|
||||
pretty_assertions::assert_eq!(payload.auth_mode, Some(AuthMode::ApiKey));
|
||||
|
||||
assert!(codex_home.path().join("auth.json").exists());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn login_account_api_key_rejected_when_forced_chatgpt() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
forced_method: Some("chatgpt".to_string()),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_login_account_api_key_request("sk-test-key")
|
||||
.await?;
|
||||
let err: JSONRPCError = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
assert_eq!(
|
||||
err.error.message,
|
||||
"API key login is disabled. Use ChatGPT login instead."
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn login_account_chatgpt_rejected_when_forced_api() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
forced_method: Some("api".to_string()),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp.send_login_account_chatgpt_request().await?;
|
||||
let err: JSONRPCError = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
assert_eq!(
|
||||
err.error.message,
|
||||
"ChatGPT login is disabled. Use API key login instead."
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
// Serialize tests that launch the login server since it binds to a fixed port.
|
||||
#[serial(login_port)]
|
||||
async fn login_account_chatgpt_start() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), CreateConfigTomlParams::default())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp.send_login_account_chatgpt_request().await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let login: LoginAccountResponse = to_response(resp)?;
|
||||
let LoginAccountResponse::Chatgpt { login_id, auth_url } = login else {
|
||||
bail!("unexpected login response: {login:?}");
|
||||
};
|
||||
assert!(
|
||||
auth_url.contains("redirect_uri=http%3A%2F%2Flocalhost"),
|
||||
"auth_url should contain a redirect_uri to localhost"
|
||||
);
|
||||
|
||||
let cancel_id = mcp
|
||||
.send_cancel_login_account_request(CancelLoginAccountParams {
|
||||
login_id: login_id.clone(),
|
||||
})
|
||||
.await?;
|
||||
let cancel_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(cancel_id)),
|
||||
)
|
||||
.await??;
|
||||
let _ok: CancelLoginAccountResponse = to_response(cancel_resp)?;
|
||||
|
||||
let note = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("account/login/completed"),
|
||||
)
|
||||
.await??;
|
||||
let parsed: ServerNotification = note.try_into()?;
|
||||
let ServerNotification::AccountLoginCompleted(payload) = parsed else {
|
||||
bail!("unexpected notification: {parsed:?}");
|
||||
};
|
||||
pretty_assertions::assert_eq!(payload.login_id, Some(login_id));
|
||||
pretty_assertions::assert_eq!(payload.success, false);
|
||||
assert!(
|
||||
payload.error.is_some(),
|
||||
"expected a non-empty error on cancel"
|
||||
);
|
||||
|
||||
let maybe_updated = timeout(
|
||||
Duration::from_millis(500),
|
||||
mcp.read_stream_until_notification_message("account/updated"),
|
||||
)
|
||||
.await;
|
||||
assert!(
|
||||
maybe_updated.is_err(),
|
||||
"account/updated should not be emitted when login is cancelled"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
// Serialize tests that launch the login server since it binds to a fixed port.
|
||||
#[serial(login_port)]
|
||||
async fn login_account_chatgpt_includes_forced_workspace_query_param() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
forced_workspace_id: Some("ws-forced".to_string()),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp.send_login_account_chatgpt_request().await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let login: LoginAccountResponse = to_response(resp)?;
|
||||
let LoginAccountResponse::Chatgpt { auth_url, .. } = login else {
|
||||
bail!("unexpected login response: {login:?}");
|
||||
};
|
||||
assert!(
|
||||
auth_url.contains("allowed_workspace_id=ws-forced"),
|
||||
"auth URL should include forced workspace"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn get_account_no_auth() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
requires_openai_auth: Some(true),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let params = GetAccountParams {
|
||||
refresh_token: false,
|
||||
};
|
||||
let request_id = mcp.send_get_account_request(params).await?;
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
let account: GetAccountResponse = to_response(resp)?;
|
||||
|
||||
assert_eq!(account.account, None, "expected no account");
|
||||
assert_eq!(account.requires_openai_auth, true);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn get_account_with_api_key() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
requires_openai_auth: Some(true),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let req_id = mcp
|
||||
.send_login_account_api_key_request("sk-test-key")
|
||||
.await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(req_id)),
|
||||
)
|
||||
.await??;
|
||||
let _login_ok = to_response::<LoginAccountResponse>(resp)?;
|
||||
|
||||
let params = GetAccountParams {
|
||||
refresh_token: false,
|
||||
};
|
||||
let request_id = mcp.send_get_account_request(params).await?;
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
let received: GetAccountResponse = to_response(resp)?;
|
||||
|
||||
let expected = GetAccountResponse {
|
||||
account: Some(Account::ApiKey {}),
|
||||
requires_openai_auth: true,
|
||||
};
|
||||
assert_eq!(received, expected);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn get_account_when_auth_not_required() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
requires_openai_auth: Some(false),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let params = GetAccountParams {
|
||||
refresh_token: false,
|
||||
};
|
||||
let request_id = mcp.send_get_account_request(params).await?;
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
let received: GetAccountResponse = to_response(resp)?;
|
||||
|
||||
let expected = GetAccountResponse {
|
||||
account: None,
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
assert_eq!(received, expected);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn get_account_with_chatgpt() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
CreateConfigTomlParams {
|
||||
requires_openai_auth: Some(true),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
write_chatgpt_auth(
|
||||
codex_home.path(),
|
||||
ChatGptAuthFixture::new("access-chatgpt")
|
||||
.email("user@example.com")
|
||||
.plan_type("pro"),
|
||||
AuthCredentialsStoreMode::File,
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)]).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let params = GetAccountParams {
|
||||
refresh_token: false,
|
||||
};
|
||||
let request_id = mcp.send_get_account_request(params).await?;
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
let received: GetAccountResponse = to_response(resp)?;
|
||||
|
||||
let expected = GetAccountResponse {
|
||||
account: Some(Account::Chatgpt {
|
||||
email: "user@example.com".to_string(),
|
||||
plan_type: AccountPlanType::Pro,
|
||||
}),
|
||||
requires_openai_auth: true,
|
||||
};
|
||||
assert_eq!(received, expected);
|
||||
Ok(())
|
||||
}
|
||||
10
codex-rs/app-server/tests/suite/v2/mod.rs
Normal file
10
codex-rs/app-server/tests/suite/v2/mod.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
mod account;
|
||||
mod model_list;
|
||||
mod rate_limits;
|
||||
mod review;
|
||||
mod thread_archive;
|
||||
mod thread_list;
|
||||
mod thread_resume;
|
||||
mod thread_start;
|
||||
mod turn_interrupt;
|
||||
mod turn_start;
|
||||
@@ -6,9 +6,9 @@ use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::ListModelsParams;
|
||||
use codex_app_server_protocol::ListModelsResponse;
|
||||
use codex_app_server_protocol::Model;
|
||||
use codex_app_server_protocol::ModelListParams;
|
||||
use codex_app_server_protocol::ModelListResponse;
|
||||
use codex_app_server_protocol::ReasoningEffortOption;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
@@ -19,7 +19,7 @@ use tokio::time::timeout;
|
||||
const DEFAULT_TIMEOUT: Duration = Duration::from_secs(10);
|
||||
const INVALID_REQUEST_ERROR_CODE: i64 = -32600;
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
#[tokio::test]
|
||||
async fn list_models_returns_all_models_with_large_limit() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
@@ -27,8 +27,8 @@ async fn list_models_returns_all_models_with_large_limit() -> Result<()> {
|
||||
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_list_models_request(ListModelsParams {
|
||||
page_size: Some(100),
|
||||
.send_list_models_request(ModelListParams {
|
||||
limit: Some(100),
|
||||
cursor: None,
|
||||
})
|
||||
.await?;
|
||||
@@ -39,14 +39,44 @@ async fn list_models_returns_all_models_with_large_limit() -> Result<()> {
|
||||
)
|
||||
.await??;
|
||||
|
||||
let ListModelsResponse { items, next_cursor } = to_response::<ListModelsResponse>(response)?;
|
||||
let ModelListResponse {
|
||||
data: items,
|
||||
next_cursor,
|
||||
} = to_response::<ModelListResponse>(response)?;
|
||||
|
||||
let expected_models = vec![
|
||||
Model {
|
||||
id: "gpt-5-codex".to_string(),
|
||||
model: "gpt-5-codex".to_string(),
|
||||
display_name: "gpt-5-codex".to_string(),
|
||||
description: "Optimized for coding tasks with many tools.".to_string(),
|
||||
id: "gpt-5.1-codex-max".to_string(),
|
||||
model: "gpt-5.1-codex-max".to_string(),
|
||||
display_name: "gpt-5.1-codex-max".to_string(),
|
||||
description: "Latest Codex-optimized flagship for deep and fast reasoning.".to_string(),
|
||||
supported_reasoning_efforts: vec![
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::Low,
|
||||
description: "Fast responses with lighter reasoning".to_string(),
|
||||
},
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::Medium,
|
||||
description: "Balances speed and reasoning depth for everyday tasks"
|
||||
.to_string(),
|
||||
},
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::High,
|
||||
description: "Maximizes reasoning depth for complex problems".to_string(),
|
||||
},
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::XHigh,
|
||||
description: "Extra high reasoning depth for complex problems".to_string(),
|
||||
},
|
||||
],
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
is_default: true,
|
||||
},
|
||||
Model {
|
||||
id: "gpt-5.1-codex".to_string(),
|
||||
model: "gpt-5.1-codex".to_string(),
|
||||
display_name: "gpt-5.1-codex".to_string(),
|
||||
description: "Optimized for codex.".to_string(),
|
||||
supported_reasoning_efforts: vec![
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::Low,
|
||||
@@ -63,18 +93,33 @@ async fn list_models_returns_all_models_with_large_limit() -> Result<()> {
|
||||
},
|
||||
],
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
is_default: true,
|
||||
is_default: false,
|
||||
},
|
||||
Model {
|
||||
id: "gpt-5".to_string(),
|
||||
model: "gpt-5".to_string(),
|
||||
display_name: "gpt-5".to_string(),
|
||||
description: "Broad world knowledge with strong general reasoning.".to_string(),
|
||||
id: "gpt-5.1-codex-mini".to_string(),
|
||||
model: "gpt-5.1-codex-mini".to_string(),
|
||||
display_name: "gpt-5.1-codex-mini".to_string(),
|
||||
description: "Optimized for codex. Cheaper, faster, but less capable.".to_string(),
|
||||
supported_reasoning_efforts: vec![
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::Minimal,
|
||||
description: "Fastest responses with little reasoning".to_string(),
|
||||
reasoning_effort: ReasoningEffort::Medium,
|
||||
description: "Dynamically adjusts reasoning based on the task".to_string(),
|
||||
},
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::High,
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems"
|
||||
.to_string(),
|
||||
},
|
||||
],
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
is_default: false,
|
||||
},
|
||||
Model {
|
||||
id: "gpt-5.1".to_string(),
|
||||
model: "gpt-5.1".to_string(),
|
||||
display_name: "gpt-5.1".to_string(),
|
||||
description: "Broad world knowledge with strong general reasoning.".to_string(),
|
||||
supported_reasoning_efforts: vec![
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::Low,
|
||||
description: "Balances speed with some reasoning; useful for straightforward \
|
||||
@@ -103,7 +148,7 @@ async fn list_models_returns_all_models_with_large_limit() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
#[tokio::test]
|
||||
async fn list_models_pagination_works() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
@@ -111,8 +156,8 @@ async fn list_models_pagination_works() -> Result<()> {
|
||||
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let first_request = mcp
|
||||
.send_list_models_request(ListModelsParams {
|
||||
page_size: Some(1),
|
||||
.send_list_models_request(ModelListParams {
|
||||
limit: Some(1),
|
||||
cursor: None,
|
||||
})
|
||||
.await?;
|
||||
@@ -123,18 +168,18 @@ async fn list_models_pagination_works() -> Result<()> {
|
||||
)
|
||||
.await??;
|
||||
|
||||
let ListModelsResponse {
|
||||
items: first_items,
|
||||
let ModelListResponse {
|
||||
data: first_items,
|
||||
next_cursor: first_cursor,
|
||||
} = to_response::<ListModelsResponse>(first_response)?;
|
||||
} = to_response::<ModelListResponse>(first_response)?;
|
||||
|
||||
assert_eq!(first_items.len(), 1);
|
||||
assert_eq!(first_items[0].id, "gpt-5-codex");
|
||||
assert_eq!(first_items[0].id, "gpt-5.1-codex-max");
|
||||
let next_cursor = first_cursor.ok_or_else(|| anyhow!("cursor for second page"))?;
|
||||
|
||||
let second_request = mcp
|
||||
.send_list_models_request(ListModelsParams {
|
||||
page_size: Some(1),
|
||||
.send_list_models_request(ModelListParams {
|
||||
limit: Some(1),
|
||||
cursor: Some(next_cursor.clone()),
|
||||
})
|
||||
.await?;
|
||||
@@ -145,18 +190,62 @@ async fn list_models_pagination_works() -> Result<()> {
|
||||
)
|
||||
.await??;
|
||||
|
||||
let ListModelsResponse {
|
||||
items: second_items,
|
||||
let ModelListResponse {
|
||||
data: second_items,
|
||||
next_cursor: second_cursor,
|
||||
} = to_response::<ListModelsResponse>(second_response)?;
|
||||
} = to_response::<ModelListResponse>(second_response)?;
|
||||
|
||||
assert_eq!(second_items.len(), 1);
|
||||
assert_eq!(second_items[0].id, "gpt-5");
|
||||
assert!(second_cursor.is_none());
|
||||
assert_eq!(second_items[0].id, "gpt-5.1-codex");
|
||||
let third_cursor = second_cursor.ok_or_else(|| anyhow!("cursor for third page"))?;
|
||||
|
||||
let third_request = mcp
|
||||
.send_list_models_request(ModelListParams {
|
||||
limit: Some(1),
|
||||
cursor: Some(third_cursor.clone()),
|
||||
})
|
||||
.await?;
|
||||
|
||||
let third_response: JSONRPCResponse = timeout(
|
||||
DEFAULT_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(third_request)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let ModelListResponse {
|
||||
data: third_items,
|
||||
next_cursor: third_cursor,
|
||||
} = to_response::<ModelListResponse>(third_response)?;
|
||||
|
||||
assert_eq!(third_items.len(), 1);
|
||||
assert_eq!(third_items[0].id, "gpt-5.1-codex-mini");
|
||||
let fourth_cursor = third_cursor.ok_or_else(|| anyhow!("cursor for fourth page"))?;
|
||||
|
||||
let fourth_request = mcp
|
||||
.send_list_models_request(ModelListParams {
|
||||
limit: Some(1),
|
||||
cursor: Some(fourth_cursor.clone()),
|
||||
})
|
||||
.await?;
|
||||
|
||||
let fourth_response: JSONRPCResponse = timeout(
|
||||
DEFAULT_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(fourth_request)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let ModelListResponse {
|
||||
data: fourth_items,
|
||||
next_cursor: fourth_cursor,
|
||||
} = to_response::<ModelListResponse>(fourth_response)?;
|
||||
|
||||
assert_eq!(fourth_items.len(), 1);
|
||||
assert_eq!(fourth_items[0].id, "gpt-5.1");
|
||||
assert!(fourth_cursor.is_none());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
#[tokio::test]
|
||||
async fn list_models_rejects_invalid_cursor() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
@@ -164,8 +253,8 @@ async fn list_models_rejects_invalid_cursor() -> Result<()> {
|
||||
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_list_models_request(ListModelsParams {
|
||||
page_size: None,
|
||||
.send_list_models_request(ModelListParams {
|
||||
limit: None,
|
||||
cursor: Some("invalid".to_string()),
|
||||
})
|
||||
.await?;
|
||||
@@ -7,10 +7,10 @@ use codex_app_server_protocol::GetAccountRateLimitsResponse;
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::LoginApiKeyParams;
|
||||
use codex_app_server_protocol::RateLimitSnapshot;
|
||||
use codex_app_server_protocol::RateLimitWindow;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use codex_protocol::protocol::RateLimitSnapshot;
|
||||
use codex_protocol::protocol::RateLimitWindow;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use std::path::Path;
|
||||
@@ -26,7 +26,7 @@ use wiremock::matchers::path;
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
const INVALID_REQUEST_ERROR_CODE: i64 = -32600;
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
#[tokio::test]
|
||||
async fn get_account_rate_limits_requires_auth() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
@@ -51,7 +51,7 @@ async fn get_account_rate_limits_requires_auth() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
#[tokio::test]
|
||||
async fn get_account_rate_limits_requires_chatgpt_auth() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
@@ -78,7 +78,7 @@ async fn get_account_rate_limits_requires_chatgpt_auth() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
#[tokio::test]
|
||||
async fn get_account_rate_limits_returns_snapshot() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
write_chatgpt_auth(
|
||||
@@ -143,15 +143,16 @@ async fn get_account_rate_limits_returns_snapshot() -> Result<()> {
|
||||
let expected = GetAccountRateLimitsResponse {
|
||||
rate_limits: RateLimitSnapshot {
|
||||
primary: Some(RateLimitWindow {
|
||||
used_percent: 42.0,
|
||||
window_minutes: Some(60),
|
||||
used_percent: 42,
|
||||
window_duration_mins: Some(60),
|
||||
resets_at: Some(primary_reset_timestamp),
|
||||
}),
|
||||
secondary: Some(RateLimitWindow {
|
||||
used_percent: 5.0,
|
||||
window_minutes: Some(1440),
|
||||
used_percent: 5,
|
||||
window_duration_mins: Some(1440),
|
||||
resets_at: Some(secondary_reset_timestamp),
|
||||
}),
|
||||
credits: None,
|
||||
},
|
||||
};
|
||||
assert_eq!(received, expected);
|
||||
279
codex-rs/app-server/tests/suite/v2/review.rs
Normal file
279
codex-rs/app-server/tests/suite/v2/review.rs
Normal file
@@ -0,0 +1,279 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_final_assistant_message_sse_response;
|
||||
use app_test_support::create_mock_chat_completions_server_unchecked;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::ItemCompletedNotification;
|
||||
use codex_app_server_protocol::ItemStartedNotification;
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ReviewStartParams;
|
||||
use codex_app_server_protocol::ReviewTarget;
|
||||
use codex_app_server_protocol::ThreadItem;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::TurnStartResponse;
|
||||
use codex_app_server_protocol::TurnStatus;
|
||||
use serde_json::json;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
const INVALID_REQUEST_ERROR_CODE: i64 = -32600;
|
||||
|
||||
#[tokio::test]
|
||||
async fn review_start_runs_review_turn_and_emits_code_review_item() -> Result<()> {
|
||||
let review_payload = json!({
|
||||
"findings": [
|
||||
{
|
||||
"title": "Prefer Stylize helpers",
|
||||
"body": "Use .dim()/.bold() chaining instead of manual Style.",
|
||||
"confidence_score": 0.9,
|
||||
"priority": 1,
|
||||
"code_location": {
|
||||
"absolute_file_path": "/tmp/file.rs",
|
||||
"line_range": {"start": 10, "end": 20}
|
||||
}
|
||||
}
|
||||
],
|
||||
"overall_correctness": "good",
|
||||
"overall_explanation": "Looks solid overall with minor polish suggested.",
|
||||
"overall_confidence_score": 0.75
|
||||
})
|
||||
.to_string();
|
||||
let responses = vec![create_final_assistant_message_sse_response(
|
||||
&review_payload,
|
||||
)?];
|
||||
let server = create_mock_chat_completions_server_unchecked(responses).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let thread_id = start_default_thread(&mut mcp).await?;
|
||||
|
||||
let review_req = mcp
|
||||
.send_review_start_request(ReviewStartParams {
|
||||
thread_id: thread_id.clone(),
|
||||
append_to_original_thread: true,
|
||||
target: ReviewTarget::Commit {
|
||||
sha: "1234567deadbeef".to_string(),
|
||||
title: Some("Tidy UI colors".to_string()),
|
||||
},
|
||||
})
|
||||
.await?;
|
||||
let review_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(review_req)),
|
||||
)
|
||||
.await??;
|
||||
let TurnStartResponse { turn } = to_response::<TurnStartResponse>(review_resp)?;
|
||||
let turn_id = turn.id.clone();
|
||||
assert_eq!(turn.status, TurnStatus::InProgress);
|
||||
assert_eq!(turn.items.len(), 1);
|
||||
match &turn.items[0] {
|
||||
ThreadItem::UserMessage { content, .. } => {
|
||||
assert_eq!(content.len(), 1);
|
||||
assert!(matches!(
|
||||
&content[0],
|
||||
codex_app_server_protocol::UserInput::Text { .. }
|
||||
));
|
||||
}
|
||||
other => panic!("expected user message, got {other:?}"),
|
||||
}
|
||||
|
||||
let _started: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/started"),
|
||||
)
|
||||
.await??;
|
||||
let item_started: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("item/started"),
|
||||
)
|
||||
.await??;
|
||||
let started: ItemStartedNotification =
|
||||
serde_json::from_value(item_started.params.expect("params must be present"))?;
|
||||
match started.item {
|
||||
ThreadItem::CodeReview { id, review } => {
|
||||
assert_eq!(id, turn_id);
|
||||
assert_eq!(review, "commit 1234567");
|
||||
}
|
||||
other => panic!("expected code review item, got {other:?}"),
|
||||
}
|
||||
|
||||
let mut review_body: Option<String> = None;
|
||||
for _ in 0..5 {
|
||||
let review_notif: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("item/completed"),
|
||||
)
|
||||
.await??;
|
||||
let completed: ItemCompletedNotification =
|
||||
serde_json::from_value(review_notif.params.expect("params must be present"))?;
|
||||
match completed.item {
|
||||
ThreadItem::CodeReview { id, review } => {
|
||||
assert_eq!(id, turn_id);
|
||||
review_body = Some(review);
|
||||
break;
|
||||
}
|
||||
ThreadItem::UserMessage { .. } => continue,
|
||||
other => panic!("unexpected item/completed payload: {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
let review = review_body.expect("did not observe a code review item");
|
||||
assert!(review.contains("Prefer Stylize helpers"));
|
||||
assert!(review.contains("/tmp/file.rs:10-20"));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn review_start_rejects_empty_base_branch() -> Result<()> {
|
||||
let server = create_mock_chat_completions_server_unchecked(vec![]).await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
let thread_id = start_default_thread(&mut mcp).await?;
|
||||
|
||||
let request_id = mcp
|
||||
.send_review_start_request(ReviewStartParams {
|
||||
thread_id,
|
||||
append_to_original_thread: true,
|
||||
target: ReviewTarget::BaseBranch {
|
||||
branch: " ".to_string(),
|
||||
},
|
||||
})
|
||||
.await?;
|
||||
let error: JSONRPCError = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
assert_eq!(error.error.code, INVALID_REQUEST_ERROR_CODE);
|
||||
assert!(
|
||||
error.error.message.contains("branch must not be empty"),
|
||||
"unexpected message: {}",
|
||||
error.error.message
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn review_start_rejects_empty_commit_sha() -> Result<()> {
|
||||
let server = create_mock_chat_completions_server_unchecked(vec![]).await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
let thread_id = start_default_thread(&mut mcp).await?;
|
||||
|
||||
let request_id = mcp
|
||||
.send_review_start_request(ReviewStartParams {
|
||||
thread_id,
|
||||
append_to_original_thread: true,
|
||||
target: ReviewTarget::Commit {
|
||||
sha: "\t".to_string(),
|
||||
title: None,
|
||||
},
|
||||
})
|
||||
.await?;
|
||||
let error: JSONRPCError = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
assert_eq!(error.error.code, INVALID_REQUEST_ERROR_CODE);
|
||||
assert!(
|
||||
error.error.message.contains("sha must not be empty"),
|
||||
"unexpected message: {}",
|
||||
error.error.message
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn review_start_rejects_empty_custom_instructions() -> Result<()> {
|
||||
let server = create_mock_chat_completions_server_unchecked(vec![]).await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
let thread_id = start_default_thread(&mut mcp).await?;
|
||||
|
||||
let request_id = mcp
|
||||
.send_review_start_request(ReviewStartParams {
|
||||
thread_id,
|
||||
append_to_original_thread: true,
|
||||
target: ReviewTarget::Custom {
|
||||
instructions: "\n\n".to_string(),
|
||||
},
|
||||
})
|
||||
.await?;
|
||||
let error: JSONRPCError = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
assert_eq!(error.error.code, INVALID_REQUEST_ERROR_CODE);
|
||||
assert!(
|
||||
error
|
||||
.error
|
||||
.message
|
||||
.contains("instructions must not be empty"),
|
||||
"unexpected message: {}",
|
||||
error.error.message
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn start_default_thread(mcp: &mut McpProcess) -> Result<String> {
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
Ok(thread.id)
|
||||
}
|
||||
|
||||
fn create_config_toml(codex_home: &std::path::Path, server_uri: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
93
codex-rs/app-server/tests/suite/v2/thread_archive.rs
Normal file
93
codex-rs/app-server/tests/suite/v2/thread_archive.rs
Normal file
@@ -0,0 +1,93 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadArchiveParams;
|
||||
use codex_app_server_protocol::ThreadArchiveResponse;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_core::ARCHIVED_SESSIONS_SUBDIR;
|
||||
use codex_core::find_conversation_path_by_id_str;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_archive_moves_rollout_into_archived_directory() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Start a thread.
|
||||
let start_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
assert!(!thread.id.is_empty());
|
||||
|
||||
// Locate the rollout path recorded for this thread id.
|
||||
let rollout_path = find_conversation_path_by_id_str(codex_home.path(), &thread.id)
|
||||
.await?
|
||||
.expect("expected rollout path for thread id to exist");
|
||||
assert!(
|
||||
rollout_path.exists(),
|
||||
"expected {} to exist",
|
||||
rollout_path.display()
|
||||
);
|
||||
|
||||
// Archive the thread.
|
||||
let archive_id = mcp
|
||||
.send_thread_archive_request(ThreadArchiveParams {
|
||||
thread_id: thread.id.clone(),
|
||||
})
|
||||
.await?;
|
||||
let archive_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(archive_id)),
|
||||
)
|
||||
.await??;
|
||||
let _: ThreadArchiveResponse = to_response::<ThreadArchiveResponse>(archive_resp)?;
|
||||
|
||||
// Verify file moved.
|
||||
let archived_directory = codex_home.path().join(ARCHIVED_SESSIONS_SUBDIR);
|
||||
// The archived file keeps the original filename (rollout-...-<id>.jsonl).
|
||||
let archived_rollout_path =
|
||||
archived_directory.join(rollout_path.file_name().expect("rollout file name"));
|
||||
assert!(
|
||||
!rollout_path.exists(),
|
||||
"expected rollout path {} to be moved",
|
||||
rollout_path.display()
|
||||
);
|
||||
assert!(
|
||||
archived_rollout_path.exists(),
|
||||
"expected archived rollout path {} to exist",
|
||||
archived_rollout_path.display()
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_config_toml(codex_home: &Path) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(config_toml, config_contents())
|
||||
}
|
||||
|
||||
fn config_contents() -> &'static str {
|
||||
r#"model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
"#
|
||||
}
|
||||
185
codex-rs/app-server/tests/suite/v2/thread_list.rs
Normal file
185
codex-rs/app-server/tests/suite/v2/thread_list.rs
Normal file
@@ -0,0 +1,185 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_fake_rollout;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadListParams;
|
||||
use codex_app_server_protocol::ThreadListResponse;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_basic_empty() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_minimal_config(codex_home.path())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// List threads in an empty CODEX_HOME; should return an empty page with nextCursor: null.
|
||||
let list_id = mcp
|
||||
.send_thread_list_request(ThreadListParams {
|
||||
cursor: None,
|
||||
limit: Some(10),
|
||||
model_providers: None,
|
||||
})
|
||||
.await?;
|
||||
let list_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(list_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadListResponse { data, next_cursor } = to_response::<ThreadListResponse>(list_resp)?;
|
||||
assert!(data.is_empty());
|
||||
assert_eq!(next_cursor, None);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Minimal config.toml for listing.
|
||||
fn create_minimal_config(codex_home: &std::path::Path) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_pagination_next_cursor_none_on_last_page() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_minimal_config(codex_home.path())?;
|
||||
|
||||
// Create three rollouts so we can paginate with limit=2.
|
||||
let _a = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-02T12-00-00",
|
||||
"2025-01-02T12:00:00Z",
|
||||
"Hello",
|
||||
Some("mock_provider"),
|
||||
)?;
|
||||
let _b = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-01T13-00-00",
|
||||
"2025-01-01T13:00:00Z",
|
||||
"Hello",
|
||||
Some("mock_provider"),
|
||||
)?;
|
||||
let _c = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-01T12-00-00",
|
||||
"2025-01-01T12:00:00Z",
|
||||
"Hello",
|
||||
Some("mock_provider"),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Page 1: limit 2 → expect next_cursor Some.
|
||||
let page1_id = mcp
|
||||
.send_thread_list_request(ThreadListParams {
|
||||
cursor: None,
|
||||
limit: Some(2),
|
||||
model_providers: Some(vec!["mock_provider".to_string()]),
|
||||
})
|
||||
.await?;
|
||||
let page1_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(page1_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadListResponse {
|
||||
data: data1,
|
||||
next_cursor: cursor1,
|
||||
} = to_response::<ThreadListResponse>(page1_resp)?;
|
||||
assert_eq!(data1.len(), 2);
|
||||
for thread in &data1 {
|
||||
assert_eq!(thread.preview, "Hello");
|
||||
assert_eq!(thread.model_provider, "mock_provider");
|
||||
assert!(thread.created_at > 0);
|
||||
}
|
||||
let cursor1 = cursor1.expect("expected nextCursor on first page");
|
||||
|
||||
// Page 2: with cursor → expect next_cursor None when no more results.
|
||||
let page2_id = mcp
|
||||
.send_thread_list_request(ThreadListParams {
|
||||
cursor: Some(cursor1),
|
||||
limit: Some(2),
|
||||
model_providers: Some(vec!["mock_provider".to_string()]),
|
||||
})
|
||||
.await?;
|
||||
let page2_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(page2_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadListResponse {
|
||||
data: data2,
|
||||
next_cursor: cursor2,
|
||||
} = to_response::<ThreadListResponse>(page2_resp)?;
|
||||
assert!(data2.len() <= 2);
|
||||
for thread in &data2 {
|
||||
assert_eq!(thread.preview, "Hello");
|
||||
assert_eq!(thread.model_provider, "mock_provider");
|
||||
assert!(thread.created_at > 0);
|
||||
}
|
||||
assert_eq!(cursor2, None, "expected nextCursor to be null on last page");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_respects_provider_filter() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_minimal_config(codex_home.path())?;
|
||||
|
||||
// Create rollouts under two providers.
|
||||
let _a = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-02T10-00-00",
|
||||
"2025-01-02T10:00:00Z",
|
||||
"X",
|
||||
Some("mock_provider"),
|
||||
)?; // mock_provider
|
||||
let _b = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-02T11-00-00",
|
||||
"2025-01-02T11:00:00Z",
|
||||
"X",
|
||||
Some("other_provider"),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Filter to only other_provider; expect 1 item, nextCursor None.
|
||||
let list_id = mcp
|
||||
.send_thread_list_request(ThreadListParams {
|
||||
cursor: None,
|
||||
limit: Some(10),
|
||||
model_providers: Some(vec!["other_provider".to_string()]),
|
||||
})
|
||||
.await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(list_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadListResponse { data, next_cursor } = to_response::<ThreadListResponse>(resp)?;
|
||||
assert_eq!(data.len(), 1);
|
||||
assert_eq!(next_cursor, None);
|
||||
let thread = &data[0];
|
||||
assert_eq!(thread.preview, "X");
|
||||
assert_eq!(thread.model_provider, "other_provider");
|
||||
let expected_ts = chrono::DateTime::parse_from_rfc3339("2025-01-02T11:00:00Z")?.timestamp();
|
||||
assert_eq!(thread.created_at, expected_ts);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
249
codex-rs/app-server/tests/suite/v2/thread_resume.rs
Normal file
249
codex-rs/app-server/tests/suite/v2/thread_resume.rs
Normal file
@@ -0,0 +1,249 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_fake_rollout;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadItem;
|
||||
use codex_app_server_protocol::ThreadResumeParams;
|
||||
use codex_app_server_protocol::ThreadResumeResponse;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::TurnStatus;
|
||||
use codex_app_server_protocol::UserInput;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_returns_original_thread() -> Result<()> {
|
||||
let server = create_mock_chat_completions_server(vec![]).await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Start a thread.
|
||||
let start_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("gpt-5.1-codex-max".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
// Resume it via v2 API.
|
||||
let resume_id = mcp
|
||||
.send_thread_resume_request(ThreadResumeParams {
|
||||
thread_id: thread.id.clone(),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let resume_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(resume_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadResumeResponse {
|
||||
thread: resumed, ..
|
||||
} = to_response::<ThreadResumeResponse>(resume_resp)?;
|
||||
assert_eq!(resumed, thread);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_returns_rollout_history() -> Result<()> {
|
||||
let server = create_mock_chat_completions_server(vec![]).await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let preview = "Saved user message";
|
||||
let conversation_id = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-05T12-00-00",
|
||||
"2025-01-05T12:00:00Z",
|
||||
preview,
|
||||
Some("mock_provider"),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let resume_id = mcp
|
||||
.send_thread_resume_request(ThreadResumeParams {
|
||||
thread_id: conversation_id.clone(),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let resume_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(resume_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadResumeResponse { thread, .. } = to_response::<ThreadResumeResponse>(resume_resp)?;
|
||||
|
||||
assert_eq!(thread.id, conversation_id);
|
||||
assert_eq!(thread.preview, preview);
|
||||
assert_eq!(thread.model_provider, "mock_provider");
|
||||
assert!(thread.path.is_absolute());
|
||||
|
||||
assert_eq!(
|
||||
thread.turns.len(),
|
||||
1,
|
||||
"expected rollouts to include one turn"
|
||||
);
|
||||
let turn = &thread.turns[0];
|
||||
assert_eq!(turn.status, TurnStatus::Completed);
|
||||
assert_eq!(turn.items.len(), 1, "expected user message item");
|
||||
match &turn.items[0] {
|
||||
ThreadItem::UserMessage { content, .. } => {
|
||||
assert_eq!(
|
||||
content,
|
||||
&vec![UserInput::Text {
|
||||
text: preview.to_string()
|
||||
}]
|
||||
);
|
||||
}
|
||||
other => panic!("expected user message item, got {other:?}"),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_prefers_path_over_thread_id() -> Result<()> {
|
||||
let server = create_mock_chat_completions_server(vec![]).await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let start_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("gpt-5.1-codex-max".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
let thread_path = thread.path.clone();
|
||||
let resume_id = mcp
|
||||
.send_thread_resume_request(ThreadResumeParams {
|
||||
thread_id: "not-a-valid-thread-id".to_string(),
|
||||
path: Some(thread_path),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
|
||||
let resume_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(resume_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadResumeResponse {
|
||||
thread: resumed, ..
|
||||
} = to_response::<ThreadResumeResponse>(resume_resp)?;
|
||||
assert_eq!(resumed, thread);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_supports_history_and_overrides() -> Result<()> {
|
||||
let server = create_mock_chat_completions_server(vec![]).await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Start a thread.
|
||||
let start_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("gpt-5.1-codex-max".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
let history_text = "Hello from history";
|
||||
let history = vec![ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::InputText {
|
||||
text: history_text.to_string(),
|
||||
}],
|
||||
}];
|
||||
|
||||
// Resume with explicit history and override the model.
|
||||
let resume_id = mcp
|
||||
.send_thread_resume_request(ThreadResumeParams {
|
||||
thread_id: thread.id,
|
||||
history: Some(history),
|
||||
model: Some("mock-model".to_string()),
|
||||
model_provider: Some("mock_provider".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let resume_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(resume_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadResumeResponse {
|
||||
thread: resumed,
|
||||
model_provider,
|
||||
..
|
||||
} = to_response::<ThreadResumeResponse>(resume_resp)?;
|
||||
assert!(!resumed.id.is_empty());
|
||||
assert_eq!(model_provider, "mock_provider");
|
||||
assert_eq!(resumed.preview, history_text);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper to create a config.toml pointing at the mock model server.
|
||||
fn create_config_toml(codex_home: &std::path::Path, server_uri: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
94
codex-rs/app-server/tests/suite/v2/thread_start.rs
Normal file
94
codex-rs/app-server/tests/suite/v2/thread_start.rs
Normal file
@@ -0,0 +1,94 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::ThreadStartedNotification;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_start_creates_thread_and_emits_started() -> Result<()> {
|
||||
// Provide a mock server and config so model wiring is valid.
|
||||
let server = create_mock_chat_completions_server(vec![]).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
// Start server and initialize.
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Start a v2 thread with an explicit model override.
|
||||
let req_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("gpt-5.1".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
|
||||
// Expect a proper JSON-RPC response with a thread id.
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(req_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse {
|
||||
thread,
|
||||
model_provider,
|
||||
..
|
||||
} = to_response::<ThreadStartResponse>(resp)?;
|
||||
assert!(!thread.id.is_empty(), "thread id should not be empty");
|
||||
assert!(
|
||||
thread.preview.is_empty(),
|
||||
"new threads should start with an empty preview"
|
||||
);
|
||||
assert_eq!(model_provider, "mock_provider");
|
||||
assert!(
|
||||
thread.created_at > 0,
|
||||
"created_at should be a positive UNIX timestamp"
|
||||
);
|
||||
|
||||
// A corresponding thread/started notification should arrive.
|
||||
let notif: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("thread/started"),
|
||||
)
|
||||
.await??;
|
||||
let started: ThreadStartedNotification =
|
||||
serde_json::from_value(notif.params.expect("params must be present"))?;
|
||||
assert_eq!(started.thread, thread);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper to create a config.toml pointing at the mock model server.
|
||||
fn create_config_toml(codex_home: &Path, server_uri: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
142
codex-rs/app-server/tests/suite/v2/turn_interrupt.rs
Normal file
142
codex-rs/app-server/tests/suite/v2/turn_interrupt.rs
Normal file
@@ -0,0 +1,142 @@
|
||||
#![cfg(unix)]
|
||||
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::create_shell_sse_response;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::TurnCompletedNotification;
|
||||
use codex_app_server_protocol::TurnInterruptParams;
|
||||
use codex_app_server_protocol::TurnInterruptResponse;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_app_server_protocol::TurnStartResponse;
|
||||
use codex_app_server_protocol::TurnStatus;
|
||||
use codex_app_server_protocol::UserInput as V2UserInput;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_interrupt_aborts_running_turn() -> Result<()> {
|
||||
// Use a portable sleep command to keep the turn running.
|
||||
#[cfg(target_os = "windows")]
|
||||
let shell_command = vec![
|
||||
"powershell".to_string(),
|
||||
"-Command".to_string(),
|
||||
"Start-Sleep -Seconds 10".to_string(),
|
||||
];
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
let shell_command = vec!["sleep".to_string(), "10".to_string()];
|
||||
|
||||
let tmp = TempDir::new()?;
|
||||
let codex_home = tmp.path().join("codex_home");
|
||||
std::fs::create_dir(&codex_home)?;
|
||||
let working_directory = tmp.path().join("workdir");
|
||||
std::fs::create_dir(&working_directory)?;
|
||||
|
||||
// Mock server: long-running shell command then (after abort) nothing else needed.
|
||||
let server = create_mock_chat_completions_server(vec![create_shell_sse_response(
|
||||
shell_command.clone(),
|
||||
Some(&working_directory),
|
||||
Some(10_000),
|
||||
"call_sleep",
|
||||
)?])
|
||||
.await;
|
||||
create_config_toml(&codex_home, &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(&codex_home).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Start a v2 thread and capture its id.
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
// Start a turn that triggers a long-running command.
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "run sleep".to_string(),
|
||||
}],
|
||||
cwd: Some(working_directory.clone()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let TurnStartResponse { turn } = to_response::<TurnStartResponse>(turn_resp)?;
|
||||
|
||||
// Give the command a brief moment to start.
|
||||
tokio::time::sleep(std::time::Duration::from_secs(1)).await;
|
||||
|
||||
// Interrupt the in-progress turn by id (v2 API).
|
||||
let interrupt_id = mcp
|
||||
.send_turn_interrupt_request(TurnInterruptParams {
|
||||
thread_id: thread.id,
|
||||
turn_id: turn.id,
|
||||
})
|
||||
.await?;
|
||||
let interrupt_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(interrupt_id)),
|
||||
)
|
||||
.await??;
|
||||
let _resp: TurnInterruptResponse = to_response::<TurnInterruptResponse>(interrupt_resp)?;
|
||||
|
||||
let completed_notif: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
let completed: TurnCompletedNotification = serde_json::from_value(
|
||||
completed_notif
|
||||
.params
|
||||
.expect("turn/completed params must be present"),
|
||||
)?;
|
||||
assert_eq!(completed.turn.status, TurnStatus::Interrupted);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper to create a config.toml pointing at the mock model server.
|
||||
fn create_config_toml(codex_home: &std::path::Path, server_uri: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "workspace-write"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
800
codex-rs/app-server/tests/suite/v2/turn_start.rs
Normal file
800
codex-rs/app-server/tests/suite/v2/turn_start.rs
Normal file
@@ -0,0 +1,800 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_apply_patch_sse_response;
|
||||
use app_test_support::create_final_assistant_message_sse_response;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::create_mock_chat_completions_server_unchecked;
|
||||
use app_test_support::create_shell_sse_response;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::ApprovalDecision;
|
||||
use codex_app_server_protocol::CommandExecutionStatus;
|
||||
use codex_app_server_protocol::FileChangeRequestApprovalResponse;
|
||||
use codex_app_server_protocol::ItemCompletedNotification;
|
||||
use codex_app_server_protocol::ItemStartedNotification;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::PatchApplyStatus;
|
||||
use codex_app_server_protocol::PatchChangeKind;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::ThreadItem;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::TurnCompletedNotification;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_app_server_protocol::TurnStartResponse;
|
||||
use codex_app_server_protocol::TurnStartedNotification;
|
||||
use codex_app_server_protocol::TurnStatus;
|
||||
use codex_app_server_protocol::UserInput as V2UserInput;
|
||||
use codex_core::protocol_config_types::ReasoningEffort;
|
||||
use codex_core::protocol_config_types::ReasoningSummary;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_emits_notifications_and_accepts_model_override() -> Result<()> {
|
||||
// Provide a mock server and config so model wiring is valid.
|
||||
// Three Codex turns hit the mock model (session start + two turn/start calls).
|
||||
let responses = vec![
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server_unchecked(responses).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri(), "never")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Start a thread (v2) and capture its id.
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
// Start a turn with only input and thread_id set (no overrides).
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let TurnStartResponse { turn } = to_response::<TurnStartResponse>(turn_resp)?;
|
||||
assert!(!turn.id.is_empty());
|
||||
|
||||
// Expect a turn/started notification.
|
||||
let notif: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/started"),
|
||||
)
|
||||
.await??;
|
||||
let started: TurnStartedNotification =
|
||||
serde_json::from_value(notif.params.expect("params must be present"))?;
|
||||
assert_eq!(
|
||||
started.turn.status,
|
||||
codex_app_server_protocol::TurnStatus::InProgress
|
||||
);
|
||||
|
||||
// Send a second turn that exercises the overrides path: change the model.
|
||||
let turn_req2 = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "Second".to_string(),
|
||||
}],
|
||||
model: Some("mock-model-override".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp2: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req2)),
|
||||
)
|
||||
.await??;
|
||||
let TurnStartResponse { turn: turn2 } = to_response::<TurnStartResponse>(turn_resp2)?;
|
||||
assert!(!turn2.id.is_empty());
|
||||
// Ensure the second turn has a different id than the first.
|
||||
assert_ne!(turn.id, turn2.id);
|
||||
|
||||
// Expect a second turn/started notification as well.
|
||||
let _notif2: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/started"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let completed_notif: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
let completed: TurnCompletedNotification = serde_json::from_value(
|
||||
completed_notif
|
||||
.params
|
||||
.expect("turn/completed params must be present"),
|
||||
)?;
|
||||
assert_eq!(completed.turn.status, TurnStatus::Completed);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_accepts_local_image_input() -> Result<()> {
|
||||
// Two Codex turns hit the mock model (session start + turn/start).
|
||||
let responses = vec![
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
create_final_assistant_message_sse_response("Done")?,
|
||||
];
|
||||
// Use the unchecked variant because the request payload includes a LocalImage
|
||||
// which the strict matcher does not currently cover.
|
||||
let server = create_mock_chat_completions_server_unchecked(responses).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri(), "never")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
let image_path = codex_home.path().join("image.png");
|
||||
// No need to actually write the file; we just exercise the input path.
|
||||
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::LocalImage { path: image_path }],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let TurnStartResponse { turn } = to_response::<TurnStartResponse>(turn_resp)?;
|
||||
assert!(!turn.id.is_empty());
|
||||
|
||||
// This test only validates that turn/start responds and returns a turn.
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_exec_approval_toggle_v2() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let tmp = TempDir::new()?;
|
||||
let codex_home = tmp.path().to_path_buf();
|
||||
|
||||
// Mock server: first turn requests a shell call (elicitation), then completes.
|
||||
// Second turn same, but we'll set approval_policy=never to avoid elicitation.
|
||||
let responses = vec![
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"python3".to_string(),
|
||||
"-c".to_string(),
|
||||
"print(42)".to_string(),
|
||||
],
|
||||
None,
|
||||
Some(5000),
|
||||
"call1",
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done 1")?,
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"python3".to_string(),
|
||||
"-c".to_string(),
|
||||
"print(42)".to_string(),
|
||||
],
|
||||
None,
|
||||
Some(5000),
|
||||
"call2",
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done 2")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
// Default approval is untrusted to force elicitation on first turn.
|
||||
create_config_toml(codex_home.as_path(), &server.uri(), "untrusted")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.as_path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// thread/start
|
||||
let start_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
// turn/start — expect CommandExecutionRequestApproval request from server
|
||||
let first_turn_id = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "run python".to_string(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
// Acknowledge RPC
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(first_turn_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
// Receive elicitation
|
||||
let server_req = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_request_message(),
|
||||
)
|
||||
.await??;
|
||||
let ServerRequest::CommandExecutionRequestApproval { request_id, params } = server_req else {
|
||||
panic!("expected CommandExecutionRequestApproval request");
|
||||
};
|
||||
assert_eq!(params.item_id, "call1");
|
||||
|
||||
// Approve and wait for task completion
|
||||
mcp.send_response(
|
||||
request_id,
|
||||
serde_json::json!({ "decision": codex_core::protocol::ReviewDecision::Approved }),
|
||||
)
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
// Second turn with approval_policy=never should not elicit approval
|
||||
let second_turn_id = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "run python again".to_string(),
|
||||
}],
|
||||
approval_policy: Some(codex_app_server_protocol::AskForApproval::Never),
|
||||
sandbox_policy: Some(codex_app_server_protocol::SandboxPolicy::DangerFullAccess),
|
||||
model: Some("mock-model".to_string()),
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: Some(ReasoningSummary::Auto),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(second_turn_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
// Ensure we do NOT receive a CommandExecutionRequestApproval request before task completes
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_updates_sandbox_and_cwd_between_turns_v2() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let tmp = TempDir::new()?;
|
||||
let codex_home = tmp.path().join("codex_home");
|
||||
std::fs::create_dir(&codex_home)?;
|
||||
let workspace_root = tmp.path().join("workspace");
|
||||
std::fs::create_dir(&workspace_root)?;
|
||||
let first_cwd = workspace_root.join("turn1");
|
||||
let second_cwd = workspace_root.join("turn2");
|
||||
std::fs::create_dir(&first_cwd)?;
|
||||
std::fs::create_dir(&second_cwd)?;
|
||||
|
||||
let responses = vec![
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"bash".to_string(),
|
||||
"-lc".to_string(),
|
||||
"echo first turn".to_string(),
|
||||
],
|
||||
None,
|
||||
Some(5000),
|
||||
"call-first",
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done first")?,
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"bash".to_string(),
|
||||
"-lc".to_string(),
|
||||
"echo second turn".to_string(),
|
||||
],
|
||||
None,
|
||||
Some(5000),
|
||||
"call-second",
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done second")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri(), "untrusted")?;
|
||||
|
||||
let mut mcp = McpProcess::new(&codex_home).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// thread/start
|
||||
let start_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
// first turn with workspace-write sandbox and first_cwd
|
||||
let first_turn = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "first turn".to_string(),
|
||||
}],
|
||||
cwd: Some(first_cwd.clone()),
|
||||
approval_policy: Some(codex_app_server_protocol::AskForApproval::Never),
|
||||
sandbox_policy: Some(codex_app_server_protocol::SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots: vec![first_cwd.clone()],
|
||||
network_access: false,
|
||||
exclude_tmpdir_env_var: false,
|
||||
exclude_slash_tmp: false,
|
||||
}),
|
||||
model: Some("mock-model".to_string()),
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: Some(ReasoningSummary::Auto),
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(first_turn)),
|
||||
)
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
// second turn with workspace-write and second_cwd, ensure exec begins in second_cwd
|
||||
let second_turn = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "second turn".to_string(),
|
||||
}],
|
||||
cwd: Some(second_cwd.clone()),
|
||||
approval_policy: Some(codex_app_server_protocol::AskForApproval::Never),
|
||||
sandbox_policy: Some(codex_app_server_protocol::SandboxPolicy::DangerFullAccess),
|
||||
model: Some("mock-model".to_string()),
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: Some(ReasoningSummary::Auto),
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(second_turn)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let command_exec_item = timeout(DEFAULT_READ_TIMEOUT, async {
|
||||
loop {
|
||||
let item_started_notification = mcp
|
||||
.read_stream_until_notification_message("item/started")
|
||||
.await?;
|
||||
let params = item_started_notification
|
||||
.params
|
||||
.clone()
|
||||
.expect("item/started params");
|
||||
let item_started: ItemStartedNotification =
|
||||
serde_json::from_value(params).expect("deserialize item/started notification");
|
||||
if matches!(item_started.item, ThreadItem::CommandExecution { .. }) {
|
||||
return Ok::<ThreadItem, anyhow::Error>(item_started.item);
|
||||
}
|
||||
}
|
||||
})
|
||||
.await??;
|
||||
let ThreadItem::CommandExecution {
|
||||
cwd,
|
||||
command,
|
||||
status,
|
||||
..
|
||||
} = command_exec_item
|
||||
else {
|
||||
unreachable!("loop ensures we break on command execution items");
|
||||
};
|
||||
assert_eq!(cwd, second_cwd);
|
||||
assert_eq!(command, "bash -lc 'echo second turn'");
|
||||
assert_eq!(status, CommandExecutionStatus::InProgress);
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_file_change_approval_v2() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let tmp = TempDir::new()?;
|
||||
let codex_home = tmp.path().join("codex_home");
|
||||
std::fs::create_dir(&codex_home)?;
|
||||
let workspace = tmp.path().join("workspace");
|
||||
std::fs::create_dir(&workspace)?;
|
||||
|
||||
let patch = r#"*** Begin Patch
|
||||
*** Add File: README.md
|
||||
+new line
|
||||
*** End Patch
|
||||
"#;
|
||||
let responses = vec![
|
||||
create_apply_patch_sse_response(patch, "patch-call")?,
|
||||
create_final_assistant_message_sse_response("patch applied")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri(), "untrusted")?;
|
||||
|
||||
let mut mcp = McpProcess::new(&codex_home).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let start_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
cwd: Some(workspace.to_string_lossy().into_owned()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "apply patch".into(),
|
||||
}],
|
||||
cwd: Some(workspace.clone()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let TurnStartResponse { turn } = to_response::<TurnStartResponse>(turn_resp)?;
|
||||
|
||||
let started_file_change = timeout(DEFAULT_READ_TIMEOUT, async {
|
||||
loop {
|
||||
let started_notif = mcp
|
||||
.read_stream_until_notification_message("item/started")
|
||||
.await?;
|
||||
let started: ItemStartedNotification =
|
||||
serde_json::from_value(started_notif.params.clone().expect("item/started params"))?;
|
||||
if let ThreadItem::FileChange { .. } = started.item {
|
||||
return Ok::<ThreadItem, anyhow::Error>(started.item);
|
||||
}
|
||||
}
|
||||
})
|
||||
.await??;
|
||||
let ThreadItem::FileChange {
|
||||
ref id,
|
||||
status,
|
||||
ref changes,
|
||||
} = started_file_change
|
||||
else {
|
||||
unreachable!("loop ensures we break on file change items");
|
||||
};
|
||||
assert_eq!(id, "patch-call");
|
||||
assert_eq!(status, PatchApplyStatus::InProgress);
|
||||
let started_changes = changes.clone();
|
||||
|
||||
let server_req = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_request_message(),
|
||||
)
|
||||
.await??;
|
||||
let ServerRequest::FileChangeRequestApproval { request_id, params } = server_req else {
|
||||
panic!("expected FileChangeRequestApproval request")
|
||||
};
|
||||
assert_eq!(params.item_id, "patch-call");
|
||||
assert_eq!(params.thread_id, thread.id);
|
||||
assert_eq!(params.turn_id, turn.id);
|
||||
let expected_readme_path = workspace.join("README.md");
|
||||
let expected_readme_path = expected_readme_path.to_string_lossy().into_owned();
|
||||
pretty_assertions::assert_eq!(
|
||||
started_changes,
|
||||
vec![codex_app_server_protocol::FileUpdateChange {
|
||||
path: expected_readme_path.clone(),
|
||||
kind: PatchChangeKind::Add,
|
||||
diff: "new line\n".to_string(),
|
||||
}]
|
||||
);
|
||||
|
||||
mcp.send_response(
|
||||
request_id,
|
||||
serde_json::to_value(FileChangeRequestApprovalResponse {
|
||||
decision: ApprovalDecision::Accept,
|
||||
})?,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let completed_file_change = timeout(DEFAULT_READ_TIMEOUT, async {
|
||||
loop {
|
||||
let completed_notif = mcp
|
||||
.read_stream_until_notification_message("item/completed")
|
||||
.await?;
|
||||
let completed: ItemCompletedNotification = serde_json::from_value(
|
||||
completed_notif
|
||||
.params
|
||||
.clone()
|
||||
.expect("item/completed params"),
|
||||
)?;
|
||||
if let ThreadItem::FileChange { .. } = completed.item {
|
||||
return Ok::<ThreadItem, anyhow::Error>(completed.item);
|
||||
}
|
||||
}
|
||||
})
|
||||
.await??;
|
||||
let ThreadItem::FileChange { ref id, status, .. } = completed_file_change else {
|
||||
unreachable!("loop ensures we break on file change items");
|
||||
};
|
||||
assert_eq!(id, "patch-call");
|
||||
assert_eq!(status, PatchApplyStatus::Completed);
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let readme_contents = std::fs::read_to_string(expected_readme_path)?;
|
||||
assert_eq!(readme_contents, "new line\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_file_change_approval_decline_v2() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let tmp = TempDir::new()?;
|
||||
let codex_home = tmp.path().join("codex_home");
|
||||
std::fs::create_dir(&codex_home)?;
|
||||
let workspace = tmp.path().join("workspace");
|
||||
std::fs::create_dir(&workspace)?;
|
||||
|
||||
let patch = r#"*** Begin Patch
|
||||
*** Add File: README.md
|
||||
+new line
|
||||
*** End Patch
|
||||
"#;
|
||||
let responses = vec![
|
||||
create_apply_patch_sse_response(patch, "patch-call")?,
|
||||
create_final_assistant_message_sse_response("patch declined")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri(), "untrusted")?;
|
||||
|
||||
let mut mcp = McpProcess::new(&codex_home).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let start_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
cwd: Some(workspace.to_string_lossy().into_owned()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "apply patch".into(),
|
||||
}],
|
||||
cwd: Some(workspace.clone()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let TurnStartResponse { turn } = to_response::<TurnStartResponse>(turn_resp)?;
|
||||
|
||||
let started_file_change = timeout(DEFAULT_READ_TIMEOUT, async {
|
||||
loop {
|
||||
let started_notif = mcp
|
||||
.read_stream_until_notification_message("item/started")
|
||||
.await?;
|
||||
let started: ItemStartedNotification =
|
||||
serde_json::from_value(started_notif.params.clone().expect("item/started params"))?;
|
||||
if let ThreadItem::FileChange { .. } = started.item {
|
||||
return Ok::<ThreadItem, anyhow::Error>(started.item);
|
||||
}
|
||||
}
|
||||
})
|
||||
.await??;
|
||||
let ThreadItem::FileChange {
|
||||
ref id,
|
||||
status,
|
||||
ref changes,
|
||||
} = started_file_change
|
||||
else {
|
||||
unreachable!("loop ensures we break on file change items");
|
||||
};
|
||||
assert_eq!(id, "patch-call");
|
||||
assert_eq!(status, PatchApplyStatus::InProgress);
|
||||
let started_changes = changes.clone();
|
||||
|
||||
let server_req = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_request_message(),
|
||||
)
|
||||
.await??;
|
||||
let ServerRequest::FileChangeRequestApproval { request_id, params } = server_req else {
|
||||
panic!("expected FileChangeRequestApproval request")
|
||||
};
|
||||
assert_eq!(params.item_id, "patch-call");
|
||||
assert_eq!(params.thread_id, thread.id);
|
||||
assert_eq!(params.turn_id, turn.id);
|
||||
let expected_readme_path = workspace.join("README.md");
|
||||
let expected_readme_path_str = expected_readme_path.to_string_lossy().into_owned();
|
||||
pretty_assertions::assert_eq!(
|
||||
started_changes,
|
||||
vec![codex_app_server_protocol::FileUpdateChange {
|
||||
path: expected_readme_path_str.clone(),
|
||||
kind: PatchChangeKind::Add,
|
||||
diff: "new line\n".to_string(),
|
||||
}]
|
||||
);
|
||||
|
||||
mcp.send_response(
|
||||
request_id,
|
||||
serde_json::to_value(FileChangeRequestApprovalResponse {
|
||||
decision: ApprovalDecision::Decline,
|
||||
})?,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let completed_file_change = timeout(DEFAULT_READ_TIMEOUT, async {
|
||||
loop {
|
||||
let completed_notif = mcp
|
||||
.read_stream_until_notification_message("item/completed")
|
||||
.await?;
|
||||
let completed: ItemCompletedNotification = serde_json::from_value(
|
||||
completed_notif
|
||||
.params
|
||||
.clone()
|
||||
.expect("item/completed params"),
|
||||
)?;
|
||||
if let ThreadItem::FileChange { .. } = completed.item {
|
||||
return Ok::<ThreadItem, anyhow::Error>(completed.item);
|
||||
}
|
||||
}
|
||||
})
|
||||
.await??;
|
||||
let ThreadItem::FileChange { ref id, status, .. } = completed_file_change else {
|
||||
unreachable!("loop ensures we break on file change items");
|
||||
};
|
||||
assert_eq!(id, "patch-call");
|
||||
assert_eq!(status, PatchApplyStatus::Declined);
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
assert!(
|
||||
!expected_readme_path.exists(),
|
||||
"declined patch should not be applied"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper to create a config.toml pointing at the mock model server.
|
||||
fn create_config_toml(
|
||||
codex_home: &Path,
|
||||
server_uri: &str,
|
||||
approval_policy: &str,
|
||||
) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "{approval_policy}"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -288,7 +288,7 @@ pub fn maybe_parse_apply_patch_verified(argv: &[String], cwd: &Path) -> MaybeApp
|
||||
path,
|
||||
ApplyPatchFileChange::Update {
|
||||
unified_diff,
|
||||
move_path: move_path.map(|p| cwd.join(p)),
|
||||
move_path: move_path.map(|p| effective_cwd.join(p)),
|
||||
new_content: contents,
|
||||
},
|
||||
);
|
||||
@@ -1603,6 +1603,53 @@ g
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_resolves_move_path_with_effective_cwd() {
|
||||
let session_dir = tempdir().unwrap();
|
||||
let worktree_rel = "alt";
|
||||
let worktree_dir = session_dir.path().join(worktree_rel);
|
||||
fs::create_dir_all(&worktree_dir).unwrap();
|
||||
|
||||
let source_name = "old.txt";
|
||||
let dest_name = "renamed.txt";
|
||||
let source_path = worktree_dir.join(source_name);
|
||||
fs::write(&source_path, "before\n").unwrap();
|
||||
|
||||
let patch = wrap_patch(&format!(
|
||||
r#"*** Update File: {source_name}
|
||||
*** Move to: {dest_name}
|
||||
@@
|
||||
-before
|
||||
+after"#
|
||||
));
|
||||
|
||||
let shell_script = format!("cd {worktree_rel} && apply_patch <<'PATCH'\n{patch}\nPATCH");
|
||||
let argv = vec!["bash".into(), "-lc".into(), shell_script];
|
||||
|
||||
let result = maybe_parse_apply_patch_verified(&argv, session_dir.path());
|
||||
let action = match result {
|
||||
MaybeApplyPatchVerified::Body(action) => action,
|
||||
other => panic!("expected verified body, got {other:?}"),
|
||||
};
|
||||
|
||||
assert_eq!(action.cwd, worktree_dir);
|
||||
|
||||
let change = action
|
||||
.changes()
|
||||
.get(&worktree_dir.join(source_name))
|
||||
.expect("source file change present");
|
||||
|
||||
match change {
|
||||
ApplyPatchFileChange::Update { move_path, .. } => {
|
||||
assert_eq!(
|
||||
move_path.as_deref(),
|
||||
Some(worktree_dir.join(dest_name).as_path())
|
||||
);
|
||||
}
|
||||
other => panic!("expected update change, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_fails_on_write_error() {
|
||||
let dir = tempdir().unwrap();
|
||||
|
||||
@@ -11,32 +11,7 @@ const LINUX_SANDBOX_ARG0: &str = "codex-linux-sandbox";
|
||||
const APPLY_PATCH_ARG0: &str = "apply_patch";
|
||||
const MISSPELLED_APPLY_PATCH_ARG0: &str = "applypatch";
|
||||
|
||||
/// While we want to deploy the Codex CLI as a single executable for simplicity,
|
||||
/// we also want to expose some of its functionality as distinct CLIs, so we use
|
||||
/// the "arg0 trick" to determine which CLI to dispatch. This effectively allows
|
||||
/// us to simulate deploying multiple executables as a single binary on Mac and
|
||||
/// Linux (but not Windows).
|
||||
///
|
||||
/// When the current executable is invoked through the hard-link or alias named
|
||||
/// `codex-linux-sandbox` we *directly* execute
|
||||
/// [`codex_linux_sandbox::run_main`] (which never returns). Otherwise we:
|
||||
///
|
||||
/// 1. Load `.env` values from `~/.codex/.env` before creating any threads.
|
||||
/// 2. Construct a Tokio multi-thread runtime.
|
||||
/// 3. Derive the path to the current executable (so children can re-invoke the
|
||||
/// sandbox) when running on Linux.
|
||||
/// 4. Execute the provided async `main_fn` inside that runtime, forwarding any
|
||||
/// error. Note that `main_fn` receives `codex_linux_sandbox_exe:
|
||||
/// Option<PathBuf>`, as an argument, which is generally needed as part of
|
||||
/// constructing [`codex_core::config::Config`].
|
||||
///
|
||||
/// This function should be used to wrap any `main()` function in binary crates
|
||||
/// in this workspace that depends on these helper CLIs.
|
||||
pub fn arg0_dispatch_or_else<F, Fut>(main_fn: F) -> anyhow::Result<()>
|
||||
where
|
||||
F: FnOnce(Option<PathBuf>) -> Fut,
|
||||
Fut: Future<Output = anyhow::Result<()>>,
|
||||
{
|
||||
pub fn arg0_dispatch() -> Option<TempDir> {
|
||||
// Determine if we were invoked via the special alias.
|
||||
let mut args = std::env::args_os();
|
||||
let argv0 = args.next().unwrap_or_default();
|
||||
@@ -76,10 +51,7 @@ where
|
||||
// before creating any threads/the Tokio runtime.
|
||||
load_dotenv();
|
||||
|
||||
// Retain the TempDir so it exists for the lifetime of the invocation of
|
||||
// this executable. Admittedly, we could invoke `keep()` on it, but it
|
||||
// would be nice to avoid leaving temporary directories behind, if possible.
|
||||
let _path_entry = match prepend_path_entry_for_apply_patch() {
|
||||
match prepend_path_entry_for_codex_aliases() {
|
||||
Ok(path_entry) => Some(path_entry),
|
||||
Err(err) => {
|
||||
// It is possible that Codex will proceed successfully even if
|
||||
@@ -87,7 +59,39 @@ where
|
||||
eprintln!("WARNING: proceeding, even though we could not update PATH: {err}");
|
||||
None
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// While we want to deploy the Codex CLI as a single executable for simplicity,
|
||||
/// we also want to expose some of its functionality as distinct CLIs, so we use
|
||||
/// the "arg0 trick" to determine which CLI to dispatch. This effectively allows
|
||||
/// us to simulate deploying multiple executables as a single binary on Mac and
|
||||
/// Linux (but not Windows).
|
||||
///
|
||||
/// When the current executable is invoked through the hard-link or alias named
|
||||
/// `codex-linux-sandbox` we *directly* execute
|
||||
/// [`codex_linux_sandbox::run_main`] (which never returns). Otherwise we:
|
||||
///
|
||||
/// 1. Load `.env` values from `~/.codex/.env` before creating any threads.
|
||||
/// 2. Construct a Tokio multi-thread runtime.
|
||||
/// 3. Derive the path to the current executable (so children can re-invoke the
|
||||
/// sandbox) when running on Linux.
|
||||
/// 4. Execute the provided async `main_fn` inside that runtime, forwarding any
|
||||
/// error. Note that `main_fn` receives `codex_linux_sandbox_exe:
|
||||
/// Option<PathBuf>`, as an argument, which is generally needed as part of
|
||||
/// constructing [`codex_core::config::Config`].
|
||||
///
|
||||
/// This function should be used to wrap any `main()` function in binary crates
|
||||
/// in this workspace that depends on these helper CLIs.
|
||||
pub fn arg0_dispatch_or_else<F, Fut>(main_fn: F) -> anyhow::Result<()>
|
||||
where
|
||||
F: FnOnce(Option<PathBuf>) -> Fut,
|
||||
Fut: Future<Output = anyhow::Result<()>>,
|
||||
{
|
||||
// Retain the TempDir so it exists for the lifetime of the invocation of
|
||||
// this executable. Admittedly, we could invoke `keep()` on it, but it
|
||||
// would be nice to avoid leaving temporary directories behind, if possible.
|
||||
let _path_entry = arg0_dispatch();
|
||||
|
||||
// Regular invocation – create a Tokio runtime and execute the provided
|
||||
// async entry-point.
|
||||
@@ -144,11 +148,16 @@ where
|
||||
///
|
||||
/// IMPORTANT: This function modifies the PATH environment variable, so it MUST
|
||||
/// be called before multiple threads are spawned.
|
||||
fn prepend_path_entry_for_apply_patch() -> std::io::Result<TempDir> {
|
||||
pub fn prepend_path_entry_for_codex_aliases() -> std::io::Result<TempDir> {
|
||||
let temp_dir = TempDir::new()?;
|
||||
let path = temp_dir.path();
|
||||
|
||||
for filename in &[APPLY_PATCH_ARG0, MISSPELLED_APPLY_PATCH_ARG0] {
|
||||
for filename in &[
|
||||
APPLY_PATCH_ARG0,
|
||||
MISSPELLED_APPLY_PATCH_ARG0,
|
||||
#[cfg(target_os = "linux")]
|
||||
LINUX_SANDBOX_ARG0,
|
||||
] {
|
||||
let exe = std::env::current_exe()?;
|
||||
|
||||
#[cfg(unix)]
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use crate::types::CodeTaskDetailsResponse;
|
||||
use crate::types::CreditStatusDetails;
|
||||
use crate::types::PaginatedListTaskListItem;
|
||||
use crate::types::RateLimitStatusPayload;
|
||||
use crate::types::RateLimitWindowSnapshot;
|
||||
@@ -6,6 +7,7 @@ use crate::types::TurnAttemptsSiblingTurnsResponse;
|
||||
use anyhow::Result;
|
||||
use codex_core::auth::CodexAuth;
|
||||
use codex_core::default_client::get_codex_user_agent;
|
||||
use codex_protocol::protocol::CreditsSnapshot;
|
||||
use codex_protocol::protocol::RateLimitSnapshot;
|
||||
use codex_protocol::protocol::RateLimitWindow;
|
||||
use reqwest::header::AUTHORIZATION;
|
||||
@@ -272,19 +274,23 @@ impl Client {
|
||||
|
||||
// rate limit helpers
|
||||
fn rate_limit_snapshot_from_payload(payload: RateLimitStatusPayload) -> RateLimitSnapshot {
|
||||
let Some(details) = payload
|
||||
let rate_limit_details = payload
|
||||
.rate_limit
|
||||
.and_then(|inner| inner.map(|boxed| *boxed))
|
||||
else {
|
||||
return RateLimitSnapshot {
|
||||
primary: None,
|
||||
secondary: None,
|
||||
};
|
||||
.and_then(|inner| inner.map(|boxed| *boxed));
|
||||
|
||||
let (primary, secondary) = if let Some(details) = rate_limit_details {
|
||||
(
|
||||
Self::map_rate_limit_window(details.primary_window),
|
||||
Self::map_rate_limit_window(details.secondary_window),
|
||||
)
|
||||
} else {
|
||||
(None, None)
|
||||
};
|
||||
|
||||
RateLimitSnapshot {
|
||||
primary: Self::map_rate_limit_window(details.primary_window),
|
||||
secondary: Self::map_rate_limit_window(details.secondary_window),
|
||||
primary,
|
||||
secondary,
|
||||
credits: Self::map_credits(payload.credits),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -306,6 +312,19 @@ impl Client {
|
||||
})
|
||||
}
|
||||
|
||||
fn map_credits(credits: Option<Option<Box<CreditStatusDetails>>>) -> Option<CreditsSnapshot> {
|
||||
let details = match credits {
|
||||
Some(Some(details)) => *details,
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
Some(CreditsSnapshot {
|
||||
has_credits: details.has_credits,
|
||||
unlimited: details.unlimited,
|
||||
balance: details.balance.and_then(|inner| inner),
|
||||
})
|
||||
}
|
||||
|
||||
fn window_minutes_from_seconds(seconds: i32) -> Option<i64> {
|
||||
if seconds <= 0 {
|
||||
return None;
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
pub use codex_backend_openapi_models::models::CreditStatusDetails;
|
||||
pub use codex_backend_openapi_models::models::PaginatedListTaskListItem;
|
||||
pub use codex_backend_openapi_models::models::PlanType;
|
||||
pub use codex_backend_openapi_models::models::RateLimitStatusDetails;
|
||||
|
||||
@@ -14,7 +14,7 @@ codex-core = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = ["full"] }
|
||||
codex-git-apply = { path = "../git-apply" }
|
||||
codex-git = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = { workspace = true }
|
||||
|
||||
@@ -59,13 +59,13 @@ pub async fn apply_diff_from_task(
|
||||
|
||||
async fn apply_diff(diff: &str, cwd: Option<PathBuf>) -> anyhow::Result<()> {
|
||||
let cwd = cwd.unwrap_or(std::env::current_dir().unwrap_or_else(|_| std::env::temp_dir()));
|
||||
let req = codex_git_apply::ApplyGitRequest {
|
||||
let req = codex_git::ApplyGitRequest {
|
||||
cwd,
|
||||
diff: diff.to_string(),
|
||||
revert: false,
|
||||
preflight: false,
|
||||
};
|
||||
let res = codex_git_apply::apply_git_patch(&req)?;
|
||||
let res = codex_git::apply_git_patch(&req)?;
|
||||
if res.exit_code != 0 {
|
||||
anyhow::bail!(
|
||||
"Git apply failed (applied={}, skipped={}, conflicts={})\nstdout:\n{}\nstderr:\n{}",
|
||||
|
||||
@@ -26,19 +26,22 @@ codex-cloud-tasks = { path = "../cloud-tasks" }
|
||||
codex-common = { workspace = true, features = ["cli"] }
|
||||
codex-core = { workspace = true }
|
||||
codex-exec = { workspace = true }
|
||||
codex-execpolicy = { workspace = true }
|
||||
codex-login = { workspace = true }
|
||||
codex-mcp-server = { workspace = true }
|
||||
codex-process-hardening = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-protocol-ts = { workspace = true }
|
||||
codex-responses-api-proxy = { workspace = true }
|
||||
codex-rmcp-client = { workspace = true }
|
||||
codex-stdio-to-uds = { workspace = true }
|
||||
codex-tui = { workspace = true }
|
||||
ctor = { workspace = true }
|
||||
libc = { workspace = true }
|
||||
owo-colors = { workspace = true }
|
||||
regex-lite = { workspace = true}
|
||||
serde_json = { workspace = true }
|
||||
supports-color = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
"io-std",
|
||||
"macros",
|
||||
@@ -46,6 +49,10 @@ tokio = { workspace = true, features = [
|
||||
"rt-multi-thread",
|
||||
"signal",
|
||||
] }
|
||||
tracing = { workspace = true }
|
||||
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
codex_windows_sandbox = { package = "codex-windows-sandbox", path = "../windows-sandbox-rs" }
|
||||
|
||||
[dev-dependencies]
|
||||
assert_cmd = { workspace = true }
|
||||
|
||||
@@ -1,3 +1,8 @@
|
||||
#[cfg(target_os = "macos")]
|
||||
mod pid_tracker;
|
||||
#[cfg(target_os = "macos")]
|
||||
mod seatbelt;
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use codex_common::CliConfigOverrides;
|
||||
@@ -5,20 +10,27 @@ use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::exec_env::create_env;
|
||||
use codex_core::landlock::spawn_command_under_linux_sandbox;
|
||||
#[cfg(target_os = "macos")]
|
||||
use codex_core::seatbelt::spawn_command_under_seatbelt;
|
||||
use codex_core::spawn::StdioPolicy;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
|
||||
use crate::LandlockCommand;
|
||||
use crate::SeatbeltCommand;
|
||||
use crate::WindowsCommand;
|
||||
use crate::exit_status::handle_exit_status;
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
use seatbelt::DenialLogger;
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
pub async fn run_command_under_seatbelt(
|
||||
command: SeatbeltCommand,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
) -> anyhow::Result<()> {
|
||||
let SeatbeltCommand {
|
||||
full_auto,
|
||||
log_denials,
|
||||
config_overrides,
|
||||
command,
|
||||
} = command;
|
||||
@@ -28,10 +40,19 @@ pub async fn run_command_under_seatbelt(
|
||||
config_overrides,
|
||||
codex_linux_sandbox_exe,
|
||||
SandboxType::Seatbelt,
|
||||
log_denials,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
pub async fn run_command_under_seatbelt(
|
||||
_command: SeatbeltCommand,
|
||||
_codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
) -> anyhow::Result<()> {
|
||||
anyhow::bail!("Seatbelt sandbox is only available on macOS");
|
||||
}
|
||||
|
||||
pub async fn run_command_under_landlock(
|
||||
command: LandlockCommand,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
@@ -47,13 +68,36 @@ pub async fn run_command_under_landlock(
|
||||
config_overrides,
|
||||
codex_linux_sandbox_exe,
|
||||
SandboxType::Landlock,
|
||||
false,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn run_command_under_windows(
|
||||
command: WindowsCommand,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
) -> anyhow::Result<()> {
|
||||
let WindowsCommand {
|
||||
full_auto,
|
||||
config_overrides,
|
||||
command,
|
||||
} = command;
|
||||
run_command_under_sandbox(
|
||||
full_auto,
|
||||
command,
|
||||
config_overrides,
|
||||
codex_linux_sandbox_exe,
|
||||
SandboxType::Windows,
|
||||
false,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
enum SandboxType {
|
||||
#[cfg(target_os = "macos")]
|
||||
Seatbelt,
|
||||
Landlock,
|
||||
Windows,
|
||||
}
|
||||
|
||||
async fn run_command_under_sandbox(
|
||||
@@ -62,6 +106,7 @@ async fn run_command_under_sandbox(
|
||||
config_overrides: CliConfigOverrides,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
sandbox_type: SandboxType,
|
||||
log_denials: bool,
|
||||
) -> anyhow::Result<()> {
|
||||
let sandbox_mode = create_sandbox_mode(full_auto);
|
||||
let config = Config::load_with_cli_overrides(
|
||||
@@ -87,7 +132,70 @@ async fn run_command_under_sandbox(
|
||||
let stdio_policy = StdioPolicy::Inherit;
|
||||
let env = create_env(&config.shell_environment_policy);
|
||||
|
||||
// Special-case Windows sandbox: execute and exit the process to emulate inherited stdio.
|
||||
if let SandboxType::Windows = sandbox_type {
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
use codex_windows_sandbox::run_windows_sandbox_capture;
|
||||
|
||||
let policy_str = serde_json::to_string(&config.sandbox_policy)?;
|
||||
|
||||
let sandbox_cwd = sandbox_policy_cwd.clone();
|
||||
let cwd_clone = cwd.clone();
|
||||
let env_map = env.clone();
|
||||
let command_vec = command.clone();
|
||||
let base_dir = config.codex_home.clone();
|
||||
|
||||
// Preflight audit is invoked elsewhere at the appropriate times.
|
||||
let res = tokio::task::spawn_blocking(move || {
|
||||
run_windows_sandbox_capture(
|
||||
policy_str.as_str(),
|
||||
&sandbox_cwd,
|
||||
base_dir.as_path(),
|
||||
command_vec,
|
||||
&cwd_clone,
|
||||
env_map,
|
||||
None,
|
||||
)
|
||||
})
|
||||
.await;
|
||||
|
||||
let capture = match res {
|
||||
Ok(Ok(v)) => v,
|
||||
Ok(Err(err)) => {
|
||||
eprintln!("windows sandbox failed: {err}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
Err(join_err) => {
|
||||
eprintln!("windows sandbox join error: {join_err}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
if !capture.stdout.is_empty() {
|
||||
use std::io::Write;
|
||||
let _ = std::io::stdout().write_all(&capture.stdout);
|
||||
}
|
||||
if !capture.stderr.is_empty() {
|
||||
use std::io::Write;
|
||||
let _ = std::io::stderr().write_all(&capture.stderr);
|
||||
}
|
||||
|
||||
std::process::exit(capture.exit_code);
|
||||
}
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
{
|
||||
anyhow::bail!("Windows sandbox is only available on Windows");
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
let mut denial_logger = log_denials.then(DenialLogger::new).flatten();
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
let _ = log_denials;
|
||||
|
||||
let mut child = match sandbox_type {
|
||||
#[cfg(target_os = "macos")]
|
||||
SandboxType::Seatbelt => {
|
||||
spawn_command_under_seatbelt(
|
||||
command,
|
||||
@@ -115,9 +223,31 @@ async fn run_command_under_sandbox(
|
||||
)
|
||||
.await?
|
||||
}
|
||||
SandboxType::Windows => {
|
||||
unreachable!("Windows sandbox should have been handled above");
|
||||
}
|
||||
};
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
if let Some(denial_logger) = &mut denial_logger {
|
||||
denial_logger.on_child_spawn(&child);
|
||||
}
|
||||
|
||||
let status = child.wait().await?;
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
if let Some(denial_logger) = denial_logger {
|
||||
let denials = denial_logger.finish().await;
|
||||
eprintln!("\n=== Sandbox denials ===");
|
||||
if denials.is_empty() {
|
||||
eprintln!("None found.");
|
||||
} else {
|
||||
for seatbelt::SandboxDenial { name, capability } in denials {
|
||||
eprintln!("({name}) {capability}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
handle_exit_status(status);
|
||||
}
|
||||
|
||||
|
||||
372
codex-rs/cli/src/debug_sandbox/pid_tracker.rs
Normal file
372
codex-rs/cli/src/debug_sandbox/pid_tracker.rs
Normal file
@@ -0,0 +1,372 @@
|
||||
use std::collections::HashSet;
|
||||
use tokio::task::JoinHandle;
|
||||
use tracing::warn;
|
||||
|
||||
/// Tracks the (recursive) descendants of a process by using `kqueue` to watch for fork events, and
|
||||
/// `proc_listchildpids` to list the children of a process.
|
||||
pub(crate) struct PidTracker {
|
||||
kq: libc::c_int,
|
||||
handle: JoinHandle<HashSet<i32>>,
|
||||
}
|
||||
|
||||
impl PidTracker {
|
||||
pub(crate) fn new(root_pid: i32) -> Option<Self> {
|
||||
if root_pid <= 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
let kq = unsafe { libc::kqueue() };
|
||||
let handle = tokio::task::spawn_blocking(move || track_descendants(kq, root_pid));
|
||||
|
||||
Some(Self { kq, handle })
|
||||
}
|
||||
|
||||
pub(crate) async fn stop(self) -> HashSet<i32> {
|
||||
trigger_stop_event(self.kq);
|
||||
self.handle.await.unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
unsafe extern "C" {
|
||||
fn proc_listchildpids(
|
||||
ppid: libc::c_int,
|
||||
buffer: *mut libc::c_void,
|
||||
buffersize: libc::c_int,
|
||||
) -> libc::c_int;
|
||||
}
|
||||
|
||||
/// Wrap proc_listchildpids.
|
||||
fn list_child_pids(parent: i32) -> Vec<i32> {
|
||||
unsafe {
|
||||
let mut capacity: usize = 16;
|
||||
loop {
|
||||
let mut buf: Vec<i32> = vec![0; capacity];
|
||||
let count = proc_listchildpids(
|
||||
parent as libc::c_int,
|
||||
buf.as_mut_ptr() as *mut libc::c_void,
|
||||
(buf.len() * std::mem::size_of::<i32>()) as libc::c_int,
|
||||
);
|
||||
if count <= 0 {
|
||||
return Vec::new();
|
||||
}
|
||||
let returned = count as usize;
|
||||
if returned < capacity {
|
||||
buf.truncate(returned);
|
||||
return buf;
|
||||
}
|
||||
capacity = capacity.saturating_mul(2).max(returned + 16);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn pid_is_alive(pid: i32) -> bool {
|
||||
if pid <= 0 {
|
||||
return false;
|
||||
}
|
||||
let res = unsafe { libc::kill(pid as libc::pid_t, 0) };
|
||||
if res == 0 {
|
||||
true
|
||||
} else {
|
||||
matches!(
|
||||
std::io::Error::last_os_error().raw_os_error(),
|
||||
Some(libc::EPERM)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
enum WatchPidError {
|
||||
ProcessGone,
|
||||
Other(std::io::Error),
|
||||
}
|
||||
|
||||
/// Add `pid` to the watch list in `kq`.
|
||||
fn watch_pid(kq: libc::c_int, pid: i32) -> Result<(), WatchPidError> {
|
||||
if pid <= 0 {
|
||||
return Err(WatchPidError::ProcessGone);
|
||||
}
|
||||
|
||||
let kev = libc::kevent {
|
||||
ident: pid as libc::uintptr_t,
|
||||
filter: libc::EVFILT_PROC,
|
||||
flags: libc::EV_ADD | libc::EV_CLEAR,
|
||||
fflags: libc::NOTE_FORK | libc::NOTE_EXEC | libc::NOTE_EXIT,
|
||||
data: 0,
|
||||
udata: std::ptr::null_mut(),
|
||||
};
|
||||
|
||||
let res = unsafe { libc::kevent(kq, &kev, 1, std::ptr::null_mut(), 0, std::ptr::null()) };
|
||||
if res < 0 {
|
||||
let err = std::io::Error::last_os_error();
|
||||
if err.raw_os_error() == Some(libc::ESRCH) {
|
||||
Err(WatchPidError::ProcessGone)
|
||||
} else {
|
||||
Err(WatchPidError::Other(err))
|
||||
}
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn watch_children(
|
||||
kq: libc::c_int,
|
||||
parent: i32,
|
||||
seen: &mut HashSet<i32>,
|
||||
active: &mut HashSet<i32>,
|
||||
) {
|
||||
for child_pid in list_child_pids(parent) {
|
||||
add_pid_watch(kq, child_pid, seen, active);
|
||||
}
|
||||
}
|
||||
|
||||
/// Watch `pid` and its children, updating `seen` and `active` sets.
|
||||
fn add_pid_watch(kq: libc::c_int, pid: i32, seen: &mut HashSet<i32>, active: &mut HashSet<i32>) {
|
||||
if pid <= 0 {
|
||||
return;
|
||||
}
|
||||
|
||||
let newly_seen = seen.insert(pid);
|
||||
let mut should_recurse = newly_seen;
|
||||
|
||||
if active.insert(pid) {
|
||||
match watch_pid(kq, pid) {
|
||||
Ok(()) => {
|
||||
should_recurse = true;
|
||||
}
|
||||
Err(WatchPidError::ProcessGone) => {
|
||||
active.remove(&pid);
|
||||
return;
|
||||
}
|
||||
Err(WatchPidError::Other(err)) => {
|
||||
warn!("failed to watch pid {pid}: {err}");
|
||||
active.remove(&pid);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if should_recurse {
|
||||
watch_children(kq, pid, seen, active);
|
||||
}
|
||||
}
|
||||
const STOP_IDENT: libc::uintptr_t = 1;
|
||||
|
||||
fn register_stop_event(kq: libc::c_int) -> bool {
|
||||
let kev = libc::kevent {
|
||||
ident: STOP_IDENT,
|
||||
filter: libc::EVFILT_USER,
|
||||
flags: libc::EV_ADD | libc::EV_CLEAR,
|
||||
fflags: 0,
|
||||
data: 0,
|
||||
udata: std::ptr::null_mut(),
|
||||
};
|
||||
|
||||
let res = unsafe { libc::kevent(kq, &kev, 1, std::ptr::null_mut(), 0, std::ptr::null()) };
|
||||
res >= 0
|
||||
}
|
||||
|
||||
fn trigger_stop_event(kq: libc::c_int) {
|
||||
if kq < 0 {
|
||||
return;
|
||||
}
|
||||
|
||||
let kev = libc::kevent {
|
||||
ident: STOP_IDENT,
|
||||
filter: libc::EVFILT_USER,
|
||||
flags: 0,
|
||||
fflags: libc::NOTE_TRIGGER,
|
||||
data: 0,
|
||||
udata: std::ptr::null_mut(),
|
||||
};
|
||||
|
||||
let _ = unsafe { libc::kevent(kq, &kev, 1, std::ptr::null_mut(), 0, std::ptr::null()) };
|
||||
}
|
||||
|
||||
/// Put all of the above together to track all the descendants of `root_pid`.
|
||||
fn track_descendants(kq: libc::c_int, root_pid: i32) -> HashSet<i32> {
|
||||
if kq < 0 {
|
||||
let mut seen = HashSet::new();
|
||||
seen.insert(root_pid);
|
||||
return seen;
|
||||
}
|
||||
|
||||
if !register_stop_event(kq) {
|
||||
let mut seen = HashSet::new();
|
||||
seen.insert(root_pid);
|
||||
let _ = unsafe { libc::close(kq) };
|
||||
return seen;
|
||||
}
|
||||
|
||||
let mut seen: HashSet<i32> = HashSet::new();
|
||||
let mut active: HashSet<i32> = HashSet::new();
|
||||
|
||||
add_pid_watch(kq, root_pid, &mut seen, &mut active);
|
||||
|
||||
const EVENTS_CAP: usize = 32;
|
||||
let mut events: [libc::kevent; EVENTS_CAP] =
|
||||
unsafe { std::mem::MaybeUninit::zeroed().assume_init() };
|
||||
|
||||
let mut stop_requested = false;
|
||||
loop {
|
||||
if active.is_empty() {
|
||||
if !pid_is_alive(root_pid) {
|
||||
break;
|
||||
}
|
||||
add_pid_watch(kq, root_pid, &mut seen, &mut active);
|
||||
if active.is_empty() {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let nev = unsafe {
|
||||
libc::kevent(
|
||||
kq,
|
||||
std::ptr::null::<libc::kevent>(),
|
||||
0,
|
||||
events.as_mut_ptr(),
|
||||
EVENTS_CAP as libc::c_int,
|
||||
std::ptr::null(),
|
||||
)
|
||||
};
|
||||
|
||||
if nev < 0 {
|
||||
let err = std::io::Error::last_os_error();
|
||||
if err.kind() == std::io::ErrorKind::Interrupted {
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
if nev == 0 {
|
||||
continue;
|
||||
}
|
||||
|
||||
for ev in events.iter().take(nev as usize) {
|
||||
let pid = ev.ident as i32;
|
||||
|
||||
if ev.filter == libc::EVFILT_USER && ev.ident == STOP_IDENT {
|
||||
stop_requested = true;
|
||||
break;
|
||||
}
|
||||
|
||||
if (ev.flags & libc::EV_ERROR) != 0 {
|
||||
if ev.data == libc::ESRCH as isize {
|
||||
active.remove(&pid);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (ev.fflags & libc::NOTE_FORK) != 0 {
|
||||
watch_children(kq, pid, &mut seen, &mut active);
|
||||
}
|
||||
|
||||
if (ev.fflags & libc::NOTE_EXIT) != 0 {
|
||||
active.remove(&pid);
|
||||
}
|
||||
}
|
||||
|
||||
if stop_requested {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let _ = unsafe { libc::close(kq) };
|
||||
|
||||
seen
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::process::Command;
|
||||
use std::process::Stdio;
|
||||
use std::time::Duration;
|
||||
|
||||
#[test]
|
||||
fn pid_is_alive_detects_current_process() {
|
||||
let pid = std::process::id() as i32;
|
||||
assert!(pid_is_alive(pid));
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
#[test]
|
||||
fn list_child_pids_includes_spawned_child() {
|
||||
let mut child = Command::new("/bin/sleep")
|
||||
.arg("5")
|
||||
.stdin(Stdio::null())
|
||||
.spawn()
|
||||
.expect("failed to spawn child process");
|
||||
|
||||
let child_pid = child.id() as i32;
|
||||
let parent_pid = std::process::id() as i32;
|
||||
|
||||
let mut found = false;
|
||||
for _ in 0..100 {
|
||||
if list_child_pids(parent_pid).contains(&child_pid) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
std::thread::sleep(Duration::from_millis(10));
|
||||
}
|
||||
|
||||
let _ = child.kill();
|
||||
let _ = child.wait();
|
||||
|
||||
assert!(found, "expected to find child pid {child_pid} in list");
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
#[tokio::test]
|
||||
async fn pid_tracker_collects_spawned_children() {
|
||||
let tracker = PidTracker::new(std::process::id() as i32).expect("failed to create tracker");
|
||||
|
||||
let mut child = Command::new("/bin/sleep")
|
||||
.arg("0.1")
|
||||
.stdin(Stdio::null())
|
||||
.spawn()
|
||||
.expect("failed to spawn child process");
|
||||
|
||||
let child_pid = child.id() as i32;
|
||||
let parent_pid = std::process::id() as i32;
|
||||
|
||||
let _ = child.wait();
|
||||
|
||||
let seen = tracker.stop().await;
|
||||
|
||||
assert!(
|
||||
seen.contains(&parent_pid),
|
||||
"expected tracker to include parent pid {parent_pid}"
|
||||
);
|
||||
assert!(
|
||||
seen.contains(&child_pid),
|
||||
"expected tracker to include child pid {child_pid}"
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
#[tokio::test]
|
||||
async fn pid_tracker_collects_bash_subshell_descendants() {
|
||||
let tracker = PidTracker::new(std::process::id() as i32).expect("failed to create tracker");
|
||||
|
||||
let child = Command::new("/bin/bash")
|
||||
.arg("-c")
|
||||
.arg("(sleep 0.1 & echo $!; wait)")
|
||||
.stdin(Stdio::null())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::null())
|
||||
.spawn()
|
||||
.expect("failed to spawn bash");
|
||||
|
||||
let output = child.wait_with_output().unwrap().stdout;
|
||||
let subshell_pid = String::from_utf8_lossy(&output)
|
||||
.trim()
|
||||
.parse::<i32>()
|
||||
.expect("failed to parse subshell pid");
|
||||
|
||||
let seen = tracker.stop().await;
|
||||
|
||||
assert!(
|
||||
seen.contains(&subshell_pid),
|
||||
"expected tracker to include subshell pid {subshell_pid}"
|
||||
);
|
||||
}
|
||||
}
|
||||
114
codex-rs/cli/src/debug_sandbox/seatbelt.rs
Normal file
114
codex-rs/cli/src/debug_sandbox/seatbelt.rs
Normal file
@@ -0,0 +1,114 @@
|
||||
use std::collections::HashSet;
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
use tokio::process::Child;
|
||||
use tokio::task::JoinHandle;
|
||||
|
||||
use super::pid_tracker::PidTracker;
|
||||
|
||||
pub struct SandboxDenial {
|
||||
pub name: String,
|
||||
pub capability: String,
|
||||
}
|
||||
|
||||
pub struct DenialLogger {
|
||||
log_stream: Child,
|
||||
pid_tracker: Option<PidTracker>,
|
||||
log_reader: Option<JoinHandle<Vec<u8>>>,
|
||||
}
|
||||
|
||||
impl DenialLogger {
|
||||
pub(crate) fn new() -> Option<Self> {
|
||||
let mut log_stream = start_log_stream()?;
|
||||
let stdout = log_stream.stdout.take()?;
|
||||
let log_reader = tokio::spawn(async move {
|
||||
let mut reader = tokio::io::BufReader::new(stdout);
|
||||
let mut logs = Vec::new();
|
||||
let mut chunk = Vec::new();
|
||||
loop {
|
||||
match reader.read_until(b'\n', &mut chunk).await {
|
||||
Ok(0) | Err(_) => break,
|
||||
Ok(_) => {
|
||||
logs.extend_from_slice(&chunk);
|
||||
chunk.clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
logs
|
||||
});
|
||||
|
||||
Some(Self {
|
||||
log_stream,
|
||||
pid_tracker: None,
|
||||
log_reader: Some(log_reader),
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn on_child_spawn(&mut self, child: &Child) {
|
||||
if let Some(root_pid) = child.id() {
|
||||
self.pid_tracker = PidTracker::new(root_pid as i32);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn finish(mut self) -> Vec<SandboxDenial> {
|
||||
let pid_set = match self.pid_tracker {
|
||||
Some(tracker) => tracker.stop().await,
|
||||
None => Default::default(),
|
||||
};
|
||||
|
||||
if pid_set.is_empty() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let _ = self.log_stream.kill().await;
|
||||
let _ = self.log_stream.wait().await;
|
||||
|
||||
let logs_bytes = match self.log_reader.take() {
|
||||
Some(handle) => handle.await.unwrap_or_default(),
|
||||
None => Vec::new(),
|
||||
};
|
||||
let logs = String::from_utf8_lossy(&logs_bytes);
|
||||
|
||||
let mut seen: HashSet<(String, String)> = HashSet::new();
|
||||
let mut denials: Vec<SandboxDenial> = Vec::new();
|
||||
for line in logs.lines() {
|
||||
if let Ok(json) = serde_json::from_str::<serde_json::Value>(line)
|
||||
&& let Some(msg) = json.get("eventMessage").and_then(|v| v.as_str())
|
||||
&& let Some((pid, name, capability)) = parse_message(msg)
|
||||
&& pid_set.contains(&pid)
|
||||
&& seen.insert((name.clone(), capability.clone()))
|
||||
{
|
||||
denials.push(SandboxDenial { name, capability });
|
||||
}
|
||||
}
|
||||
denials
|
||||
}
|
||||
}
|
||||
|
||||
fn start_log_stream() -> Option<Child> {
|
||||
use std::process::Stdio;
|
||||
|
||||
const PREDICATE: &str = r#"(((processID == 0) AND (senderImagePath CONTAINS "/Sandbox")) OR (subsystem == "com.apple.sandbox.reporting"))"#;
|
||||
|
||||
tokio::process::Command::new("log")
|
||||
.args(["stream", "--style", "ndjson", "--predicate", PREDICATE])
|
||||
.stdin(Stdio::null())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::null())
|
||||
.kill_on_drop(true)
|
||||
.spawn()
|
||||
.ok()
|
||||
}
|
||||
|
||||
fn parse_message(msg: &str) -> Option<(i32, String, String)> {
|
||||
// Example message:
|
||||
// Sandbox: processname(1234) deny(1) capability-name args...
|
||||
static RE: std::sync::OnceLock<regex_lite::Regex> = std::sync::OnceLock::new();
|
||||
let re = RE.get_or_init(|| {
|
||||
#[expect(clippy::unwrap_used)]
|
||||
regex_lite::Regex::new(r"^Sandbox:\s*(.+?)\((\d+)\)\s+deny\(.*?\)\s*(.+)$").unwrap()
|
||||
});
|
||||
|
||||
let (_, [name, pid_str, capability]) = re.captures(msg)?.extract();
|
||||
let pid = pid_str.trim().parse::<i32>().ok()?;
|
||||
Some((pid, name.to_string(), capability.to_string()))
|
||||
}
|
||||
@@ -11,6 +11,10 @@ pub struct SeatbeltCommand {
|
||||
#[arg(long = "full-auto", default_value_t = false)]
|
||||
pub full_auto: bool,
|
||||
|
||||
/// While the command runs, capture macOS sandbox denials via `log stream` and print them after exit
|
||||
#[arg(long = "log-denials", default_value_t = false)]
|
||||
pub log_denials: bool,
|
||||
|
||||
#[clap(skip)]
|
||||
pub config_overrides: CliConfigOverrides,
|
||||
|
||||
@@ -32,3 +36,17 @@ pub struct LandlockCommand {
|
||||
#[arg(trailing_var_arg = true)]
|
||||
pub command: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
pub struct WindowsCommand {
|
||||
/// Convenience alias for low-friction sandboxed automatic execution (network-disabled sandbox that can write to cwd and TMPDIR)
|
||||
#[arg(long = "full-auto", default_value_t = false)]
|
||||
pub full_auto: bool,
|
||||
|
||||
#[clap(skip)]
|
||||
pub config_overrides: CliConfigOverrides,
|
||||
|
||||
/// Full command args to run under Windows restricted token sandbox.
|
||||
#[arg(trailing_var_arg = true)]
|
||||
pub command: Vec<String>,
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use clap::Args;
|
||||
use clap::CommandFactory;
|
||||
use clap::Parser;
|
||||
use clap_complete::Shell;
|
||||
@@ -7,6 +8,7 @@ use codex_chatgpt::apply_command::ApplyCommand;
|
||||
use codex_chatgpt::apply_command::run_apply_command;
|
||||
use codex_cli::LandlockCommand;
|
||||
use codex_cli::SeatbeltCommand;
|
||||
use codex_cli::WindowsCommand;
|
||||
use codex_cli::login::read_api_key_from_stdin;
|
||||
use codex_cli::login::run_login_status;
|
||||
use codex_cli::login::run_login_with_api_key;
|
||||
@@ -16,17 +18,21 @@ use codex_cli::login::run_logout;
|
||||
use codex_cloud_tasks::Cli as CloudTasksCli;
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_exec::Cli as ExecCli;
|
||||
use codex_execpolicy::ExecPolicyCheckCommand;
|
||||
use codex_responses_api_proxy::Args as ResponsesApiProxyArgs;
|
||||
use codex_tui::AppExitInfo;
|
||||
use codex_tui::Cli as TuiCli;
|
||||
use codex_tui::updates::UpdateAction;
|
||||
use codex_tui::update_action::UpdateAction;
|
||||
use owo_colors::OwoColorize;
|
||||
use std::path::PathBuf;
|
||||
use supports_color::Stream;
|
||||
|
||||
mod mcp_cmd;
|
||||
#[cfg(not(windows))]
|
||||
mod wsl_paths;
|
||||
|
||||
use crate::mcp_cmd::McpCli;
|
||||
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::features::is_known_feature_key;
|
||||
@@ -78,8 +84,8 @@ enum Subcommand {
|
||||
/// [experimental] Run the Codex MCP server (stdio transport).
|
||||
McpServer,
|
||||
|
||||
/// [experimental] Run the app server.
|
||||
AppServer,
|
||||
/// [experimental] Run the app server or related tooling.
|
||||
AppServer(AppServerCommand),
|
||||
|
||||
/// Generate shell completion scripts.
|
||||
Completion(CompletionCommand),
|
||||
@@ -88,6 +94,10 @@ enum Subcommand {
|
||||
#[clap(visible_alias = "debug")]
|
||||
Sandbox(SandboxArgs),
|
||||
|
||||
/// Execpolicy tooling.
|
||||
#[clap(hide = true)]
|
||||
Execpolicy(ExecpolicyCommand),
|
||||
|
||||
/// Apply the latest diff produced by Codex agent as a `git apply` to your local working tree.
|
||||
#[clap(visible_alias = "a")]
|
||||
Apply(ApplyCommand),
|
||||
@@ -95,9 +105,6 @@ enum Subcommand {
|
||||
/// Resume a previous interactive session (picker by default; use --last to continue the most recent).
|
||||
Resume(ResumeCommand),
|
||||
|
||||
/// Internal: generate TypeScript protocol bindings.
|
||||
#[clap(hide = true)]
|
||||
GenerateTs(GenerateTsCommand),
|
||||
/// [EXPERIMENTAL] Browse tasks from Codex Cloud and apply changes locally.
|
||||
#[clap(name = "cloud", alias = "cloud-tasks")]
|
||||
Cloud(CloudTasksCli),
|
||||
@@ -132,6 +139,10 @@ struct ResumeCommand {
|
||||
#[arg(long = "last", default_value_t = false, conflicts_with = "session_id")]
|
||||
last: bool,
|
||||
|
||||
/// Show all sessions (disables cwd filtering and shows CWD column).
|
||||
#[arg(long = "all", default_value_t = false)]
|
||||
all: bool,
|
||||
|
||||
#[clap(flatten)]
|
||||
config_overrides: TuiCli,
|
||||
}
|
||||
@@ -151,6 +162,22 @@ enum SandboxCommand {
|
||||
/// Run a command under Landlock+seccomp (Linux only).
|
||||
#[clap(visible_alias = "landlock")]
|
||||
Linux(LandlockCommand),
|
||||
|
||||
/// Run a command under Windows restricted token (Windows only).
|
||||
Windows(WindowsCommand),
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct ExecpolicyCommand {
|
||||
#[command(subcommand)]
|
||||
sub: ExecpolicySubcommand,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
enum ExecpolicySubcommand {
|
||||
/// Check execpolicy files against a command.
|
||||
#[clap(name = "check")]
|
||||
Check(ExecPolicyCheckCommand),
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
@@ -201,6 +228,22 @@ struct LogoutCommand {
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct AppServerCommand {
|
||||
/// Omit to run the app server; specify a subcommand for tooling.
|
||||
#[command(subcommand)]
|
||||
subcommand: Option<AppServerSubcommand>,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
enum AppServerSubcommand {
|
||||
/// [experimental] Generate TypeScript bindings for the app server protocol.
|
||||
GenerateTs(GenerateTsCommand),
|
||||
|
||||
/// [experimental] Generate JSON Schema for the app server protocol.
|
||||
GenerateJsonSchema(GenerateJsonSchemaCommand),
|
||||
}
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
struct GenerateTsCommand {
|
||||
/// Output directory where .ts files will be written
|
||||
#[arg(short = 'o', long = "out", value_name = "DIR")]
|
||||
@@ -211,6 +254,13 @@ struct GenerateTsCommand {
|
||||
prettier: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
struct GenerateJsonSchemaCommand {
|
||||
/// Output directory where the schema bundle will be written
|
||||
#[arg(short = 'o', long = "out", value_name = "DIR")]
|
||||
out_dir: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct StdioToUdsCommand {
|
||||
/// Path to the Unix domain socket to connect to.
|
||||
@@ -263,10 +313,30 @@ fn handle_app_exit(exit_info: AppExitInfo) -> anyhow::Result<()> {
|
||||
/// Run the update action and print the result.
|
||||
fn run_update_action(action: UpdateAction) -> anyhow::Result<()> {
|
||||
println!();
|
||||
let (cmd, args) = action.command_args();
|
||||
let cmd_str = action.command_str();
|
||||
println!("Updating Codex via `{cmd_str}`...");
|
||||
let status = std::process::Command::new(cmd).args(args).status()?;
|
||||
|
||||
let status = {
|
||||
#[cfg(windows)]
|
||||
{
|
||||
// On Windows, run via cmd.exe so .CMD/.BAT are correctly resolved (PATHEXT semantics).
|
||||
std::process::Command::new("cmd")
|
||||
.args(["/C", &cmd_str])
|
||||
.status()?
|
||||
}
|
||||
#[cfg(not(windows))]
|
||||
{
|
||||
let (cmd, args) = action.command_args();
|
||||
let command_path = crate::wsl_paths::normalize_for_wsl(cmd);
|
||||
let normalized_args: Vec<String> = args
|
||||
.iter()
|
||||
.map(crate::wsl_paths::normalize_for_wsl)
|
||||
.collect();
|
||||
std::process::Command::new(&command_path)
|
||||
.args(&normalized_args)
|
||||
.status()?
|
||||
}
|
||||
};
|
||||
if !status.success() {
|
||||
anyhow::bail!("`{cmd_str}` failed with status {status}");
|
||||
}
|
||||
@@ -275,6 +345,10 @@ fn run_update_action(action: UpdateAction) -> anyhow::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn run_execpolicycheck(cmd: ExecPolicyCheckCommand) -> anyhow::Result<()> {
|
||||
cmd.run()
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Parser, Clone)]
|
||||
struct FeatureToggles {
|
||||
/// Enable a feature (repeatable). Equivalent to `-c features.<name>=true`.
|
||||
@@ -383,12 +457,24 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
prepend_config_flags(&mut mcp_cli.config_overrides, root_config_overrides.clone());
|
||||
mcp_cli.run().await?;
|
||||
}
|
||||
Some(Subcommand::AppServer) => {
|
||||
codex_app_server::run_main(codex_linux_sandbox_exe, root_config_overrides).await?;
|
||||
}
|
||||
Some(Subcommand::AppServer(app_server_cli)) => match app_server_cli.subcommand {
|
||||
None => {
|
||||
codex_app_server::run_main(codex_linux_sandbox_exe, root_config_overrides).await?;
|
||||
}
|
||||
Some(AppServerSubcommand::GenerateTs(gen_cli)) => {
|
||||
codex_app_server_protocol::generate_ts(
|
||||
&gen_cli.out_dir,
|
||||
gen_cli.prettier.as_deref(),
|
||||
)?;
|
||||
}
|
||||
Some(AppServerSubcommand::GenerateJsonSchema(gen_cli)) => {
|
||||
codex_app_server_protocol::generate_json(&gen_cli.out_dir)?;
|
||||
}
|
||||
},
|
||||
Some(Subcommand::Resume(ResumeCommand {
|
||||
session_id,
|
||||
last,
|
||||
all,
|
||||
config_overrides,
|
||||
})) => {
|
||||
interactive = finalize_resume_interactive(
|
||||
@@ -396,6 +482,7 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
root_config_overrides.clone(),
|
||||
session_id,
|
||||
last,
|
||||
all,
|
||||
config_overrides,
|
||||
);
|
||||
let exit_info = codex_tui::run_main(interactive, codex_linux_sandbox_exe).await?;
|
||||
@@ -472,6 +559,20 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
SandboxCommand::Windows(mut windows_cli) => {
|
||||
prepend_config_flags(
|
||||
&mut windows_cli.config_overrides,
|
||||
root_config_overrides.clone(),
|
||||
);
|
||||
codex_cli::debug_sandbox::run_command_under_windows(
|
||||
windows_cli,
|
||||
codex_linux_sandbox_exe,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
},
|
||||
Some(Subcommand::Execpolicy(ExecpolicyCommand { sub })) => match sub {
|
||||
ExecpolicySubcommand::Check(cmd) => run_execpolicycheck(cmd)?,
|
||||
},
|
||||
Some(Subcommand::Apply(mut apply_cli)) => {
|
||||
prepend_config_flags(
|
||||
@@ -489,21 +590,24 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
tokio::task::spawn_blocking(move || codex_stdio_to_uds::run(socket_path.as_path()))
|
||||
.await??;
|
||||
}
|
||||
Some(Subcommand::GenerateTs(gen_cli)) => {
|
||||
codex_protocol_ts::generate_ts(&gen_cli.out_dir, gen_cli.prettier.as_deref())?;
|
||||
}
|
||||
Some(Subcommand::Features(FeaturesCli { sub })) => match sub {
|
||||
FeaturesSubcommand::List => {
|
||||
// Respect root-level `-c` overrides plus top-level flags like `--profile`.
|
||||
let cli_kv_overrides = root_config_overrides
|
||||
let mut cli_kv_overrides = root_config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(|e| anyhow::anyhow!(e))?;
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
|
||||
// Honor `--search` via the new feature toggle.
|
||||
if interactive.web_search {
|
||||
cli_kv_overrides.push((
|
||||
"features.web_search_request".to_string(),
|
||||
toml::Value::Boolean(true),
|
||||
));
|
||||
}
|
||||
|
||||
// Thread through relevant top-level flags (at minimum, `--profile`).
|
||||
// Also honor `--search` since it maps to a feature toggle.
|
||||
let overrides = ConfigOverrides {
|
||||
config_profile: interactive.config_profile.clone(),
|
||||
tools_web_search_request: interactive.web_search.then_some(true),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
@@ -538,6 +642,7 @@ fn finalize_resume_interactive(
|
||||
root_config_overrides: CliConfigOverrides,
|
||||
session_id: Option<String>,
|
||||
last: bool,
|
||||
show_all: bool,
|
||||
resume_cli: TuiCli,
|
||||
) -> TuiCli {
|
||||
// Start with the parsed interactive CLI so resume shares the same
|
||||
@@ -546,6 +651,7 @@ fn finalize_resume_interactive(
|
||||
interactive.resume_picker = resume_session_id.is_none() && !last;
|
||||
interactive.resume_last = last;
|
||||
interactive.resume_session_id = resume_session_id;
|
||||
interactive.resume_show_all = show_all;
|
||||
|
||||
// Merge resume-scoped flags and overrides with highest precedence.
|
||||
merge_resume_cli_flags(&mut interactive, resume_cli);
|
||||
@@ -629,13 +735,21 @@ mod tests {
|
||||
let Subcommand::Resume(ResumeCommand {
|
||||
session_id,
|
||||
last,
|
||||
all,
|
||||
config_overrides: resume_cli,
|
||||
}) = subcommand.expect("resume present")
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
|
||||
finalize_resume_interactive(interactive, root_overrides, session_id, last, resume_cli)
|
||||
finalize_resume_interactive(
|
||||
interactive,
|
||||
root_overrides,
|
||||
session_id,
|
||||
last,
|
||||
all,
|
||||
resume_cli,
|
||||
)
|
||||
}
|
||||
|
||||
fn sample_exit_info(conversation: Option<&str>) -> AppExitInfo {
|
||||
@@ -688,9 +802,9 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn resume_model_flag_applies_when_no_root_flags() {
|
||||
let interactive = finalize_from_args(["codex", "resume", "-m", "gpt-5-test"].as_ref());
|
||||
let interactive = finalize_from_args(["codex", "resume", "-m", "gpt-5.1-test"].as_ref());
|
||||
|
||||
assert_eq!(interactive.model.as_deref(), Some("gpt-5-test"));
|
||||
assert_eq!(interactive.model.as_deref(), Some("gpt-5.1-test"));
|
||||
assert!(interactive.resume_picker);
|
||||
assert!(!interactive.resume_last);
|
||||
assert_eq!(interactive.resume_session_id, None);
|
||||
@@ -702,6 +816,7 @@ mod tests {
|
||||
assert!(interactive.resume_picker);
|
||||
assert!(!interactive.resume_last);
|
||||
assert_eq!(interactive.resume_session_id, None);
|
||||
assert!(!interactive.resume_show_all);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -710,6 +825,7 @@ mod tests {
|
||||
assert!(!interactive.resume_picker);
|
||||
assert!(interactive.resume_last);
|
||||
assert_eq!(interactive.resume_session_id, None);
|
||||
assert!(!interactive.resume_show_all);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -718,6 +834,14 @@ mod tests {
|
||||
assert!(!interactive.resume_picker);
|
||||
assert!(!interactive.resume_last);
|
||||
assert_eq!(interactive.resume_session_id.as_deref(), Some("1234"));
|
||||
assert!(!interactive.resume_show_all);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resume_all_flag_sets_show_all() {
|
||||
let interactive = finalize_from_args(["codex", "resume", "--all"].as_ref());
|
||||
assert!(interactive.resume_picker);
|
||||
assert!(interactive.resume_show_all);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -735,7 +859,7 @@ mod tests {
|
||||
"--ask-for-approval",
|
||||
"on-request",
|
||||
"-m",
|
||||
"gpt-5-test",
|
||||
"gpt-5.1-test",
|
||||
"-p",
|
||||
"my-profile",
|
||||
"-C",
|
||||
@@ -746,7 +870,7 @@ mod tests {
|
||||
.as_ref(),
|
||||
);
|
||||
|
||||
assert_eq!(interactive.model.as_deref(), Some("gpt-5-test"));
|
||||
assert_eq!(interactive.model.as_deref(), Some("gpt-5.1-test"));
|
||||
assert!(interactive.oss);
|
||||
assert_eq!(interactive.config_profile.as_deref(), Some("my-profile"));
|
||||
assert_matches!(
|
||||
|
||||
@@ -9,11 +9,11 @@ use codex_common::CliConfigOverrides;
|
||||
use codex_common::format_env_display::format_env_display;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::config::edit::ConfigEditsBuilder;
|
||||
use codex_core::config::find_codex_home;
|
||||
use codex_core::config::load_global_mcp_servers;
|
||||
use codex_core::config::write_global_mcp_servers;
|
||||
use codex_core::config_types::McpServerConfig;
|
||||
use codex_core::config_types::McpServerTransportConfig;
|
||||
use codex_core::config::types::McpServerConfig;
|
||||
use codex_core::config::types::McpServerTransportConfig;
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::mcp::auth::compute_auth_statuses;
|
||||
use codex_core::protocol::McpAuthStatus;
|
||||
@@ -79,6 +79,7 @@ pub struct GetArgs {
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Parser)]
|
||||
#[command(override_usage = "codex mcp add [OPTIONS] <NAME> (--url <URL> | -- <COMMAND>...)")]
|
||||
pub struct AddArgs {
|
||||
/// Name for the MCP server configuration.
|
||||
pub name: String,
|
||||
@@ -196,7 +197,9 @@ impl McpCli {
|
||||
|
||||
async fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Result<()> {
|
||||
// Validate any provided overrides even though they are not currently applied.
|
||||
let overrides = config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let overrides = config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
let config = Config::load_with_cli_overrides(overrides, ConfigOverrides::default())
|
||||
.await
|
||||
.context("failed to load configuration")?;
|
||||
@@ -263,7 +266,10 @@ async fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Re
|
||||
|
||||
servers.insert(name.clone(), new_entry);
|
||||
|
||||
write_global_mcp_servers(&codex_home, &servers)
|
||||
ConfigEditsBuilder::new(&codex_home)
|
||||
.replace_mcp_servers(&servers)
|
||||
.apply()
|
||||
.await
|
||||
.with_context(|| format!("failed to write MCP servers to {}", codex_home.display()))?;
|
||||
|
||||
println!("Added global MCP server '{name}'.");
|
||||
@@ -307,7 +313,9 @@ async fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Re
|
||||
}
|
||||
|
||||
async fn run_remove(config_overrides: &CliConfigOverrides, remove_args: RemoveArgs) -> Result<()> {
|
||||
config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
|
||||
let RemoveArgs { name } = remove_args;
|
||||
|
||||
@@ -321,7 +329,10 @@ async fn run_remove(config_overrides: &CliConfigOverrides, remove_args: RemoveAr
|
||||
let removed = servers.remove(&name).is_some();
|
||||
|
||||
if removed {
|
||||
write_global_mcp_servers(&codex_home, &servers)
|
||||
ConfigEditsBuilder::new(&codex_home)
|
||||
.replace_mcp_servers(&servers)
|
||||
.apply()
|
||||
.await
|
||||
.with_context(|| format!("failed to write MCP servers to {}", codex_home.display()))?;
|
||||
}
|
||||
|
||||
@@ -335,14 +346,16 @@ async fn run_remove(config_overrides: &CliConfigOverrides, remove_args: RemoveAr
|
||||
}
|
||||
|
||||
async fn run_login(config_overrides: &CliConfigOverrides, login_args: LoginArgs) -> Result<()> {
|
||||
let overrides = config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let overrides = config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
let config = Config::load_with_cli_overrides(overrides, ConfigOverrides::default())
|
||||
.await
|
||||
.context("failed to load configuration")?;
|
||||
|
||||
if !config.features.enabled(Feature::RmcpClient) {
|
||||
bail!(
|
||||
"OAuth login is only supported when experimental_use_rmcp_client is true in config.toml."
|
||||
"OAuth login is only supported when [features].rmcp_client is true in config.toml. See https://github.com/openai/codex/blob/main/docs/config.md#feature-flags for details."
|
||||
);
|
||||
}
|
||||
|
||||
@@ -376,7 +389,9 @@ async fn run_login(config_overrides: &CliConfigOverrides, login_args: LoginArgs)
|
||||
}
|
||||
|
||||
async fn run_logout(config_overrides: &CliConfigOverrides, logout_args: LogoutArgs) -> Result<()> {
|
||||
let overrides = config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let overrides = config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
let config = Config::load_with_cli_overrides(overrides, ConfigOverrides::default())
|
||||
.await
|
||||
.context("failed to load configuration")?;
|
||||
@@ -403,7 +418,9 @@ async fn run_logout(config_overrides: &CliConfigOverrides, logout_args: LogoutAr
|
||||
}
|
||||
|
||||
async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) -> Result<()> {
|
||||
let overrides = config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let overrides = config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
let config = Config::load_with_cli_overrides(overrides, ConfigOverrides::default())
|
||||
.await
|
||||
.context("failed to load configuration")?;
|
||||
@@ -658,7 +675,9 @@ async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) ->
|
||||
}
|
||||
|
||||
async fn run_get(config_overrides: &CliConfigOverrides, get_args: GetArgs) -> Result<()> {
|
||||
let overrides = config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let overrides = config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
let config = Config::load_with_cli_overrides(overrides, ConfigOverrides::default())
|
||||
.await
|
||||
.context("failed to load configuration")?;
|
||||
|
||||
76
codex-rs/cli/src/wsl_paths.rs
Normal file
76
codex-rs/cli/src/wsl_paths.rs
Normal file
@@ -0,0 +1,76 @@
|
||||
use std::ffi::OsStr;
|
||||
|
||||
/// WSL-specific path helpers used by the updater logic.
|
||||
///
|
||||
/// See https://github.com/openai/codex/issues/6086.
|
||||
pub fn is_wsl() -> bool {
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
if std::env::var_os("WSL_DISTRO_NAME").is_some() {
|
||||
return true;
|
||||
}
|
||||
match std::fs::read_to_string("/proc/version") {
|
||||
Ok(version) => version.to_lowercase().contains("microsoft"),
|
||||
Err(_) => false,
|
||||
}
|
||||
}
|
||||
#[cfg(not(target_os = "linux"))]
|
||||
{
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a Windows absolute path (`C:\foo\bar` or `C:/foo/bar`) to a WSL mount path (`/mnt/c/foo/bar`).
|
||||
/// Returns `None` if the input does not look like a Windows drive path.
|
||||
pub fn win_path_to_wsl(path: &str) -> Option<String> {
|
||||
let bytes = path.as_bytes();
|
||||
if bytes.len() < 3
|
||||
|| bytes[1] != b':'
|
||||
|| !(bytes[2] == b'\\' || bytes[2] == b'/')
|
||||
|| !bytes[0].is_ascii_alphabetic()
|
||||
{
|
||||
return None;
|
||||
}
|
||||
let drive = (bytes[0] as char).to_ascii_lowercase();
|
||||
let tail = path[3..].replace('\\', "/");
|
||||
if tail.is_empty() {
|
||||
return Some(format!("/mnt/{drive}"));
|
||||
}
|
||||
Some(format!("/mnt/{drive}/{tail}"))
|
||||
}
|
||||
|
||||
/// If under WSL and given a Windows-style path, return the equivalent `/mnt/<drive>/…` path.
|
||||
/// Otherwise returns the input unchanged.
|
||||
pub fn normalize_for_wsl<P: AsRef<OsStr>>(path: P) -> String {
|
||||
let value = path.as_ref().to_string_lossy().to_string();
|
||||
if !is_wsl() {
|
||||
return value;
|
||||
}
|
||||
if let Some(mapped) = win_path_to_wsl(&value) {
|
||||
return mapped;
|
||||
}
|
||||
value
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn win_to_wsl_basic() {
|
||||
assert_eq!(
|
||||
win_path_to_wsl(r"C:\Temp\codex.zip").as_deref(),
|
||||
Some("/mnt/c/Temp/codex.zip")
|
||||
);
|
||||
assert_eq!(
|
||||
win_path_to_wsl("D:/Work/codex.tgz").as_deref(),
|
||||
Some("/mnt/d/Work/codex.tgz")
|
||||
);
|
||||
assert!(win_path_to_wsl("/home/user/codex").is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_is_noop_on_unix_paths() {
|
||||
assert_eq!(normalize_for_wsl("/home/u/x"), "/home/u/x");
|
||||
}
|
||||
}
|
||||
58
codex-rs/cli/tests/execpolicy.rs
Normal file
58
codex-rs/cli/tests/execpolicy.rs
Normal file
@@ -0,0 +1,58 @@
|
||||
use std::fs;
|
||||
|
||||
use assert_cmd::Command;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use tempfile::TempDir;
|
||||
|
||||
#[test]
|
||||
fn execpolicy_check_matches_expected_json() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let policy_path = codex_home.path().join("policy.codexpolicy");
|
||||
fs::write(
|
||||
&policy_path,
|
||||
r#"
|
||||
prefix_rule(
|
||||
pattern = ["git", "push"],
|
||||
decision = "forbidden",
|
||||
)
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let output = Command::cargo_bin("codex")?
|
||||
.env("CODEX_HOME", codex_home.path())
|
||||
.args([
|
||||
"execpolicy",
|
||||
"check",
|
||||
"--policy",
|
||||
policy_path
|
||||
.to_str()
|
||||
.expect("policy path should be valid UTF-8"),
|
||||
"git",
|
||||
"push",
|
||||
"origin",
|
||||
"main",
|
||||
])
|
||||
.output()?;
|
||||
|
||||
assert!(output.status.success());
|
||||
let result: serde_json::Value = serde_json::from_slice(&output.stdout)?;
|
||||
assert_eq!(
|
||||
result,
|
||||
json!({
|
||||
"match": {
|
||||
"decision": "forbidden",
|
||||
"matchedRules": [
|
||||
{
|
||||
"prefixRuleMatch": {
|
||||
"matchedPrefix": ["git", "push"],
|
||||
"decision": "forbidden"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -2,7 +2,7 @@ use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use codex_core::config::load_global_mcp_servers;
|
||||
use codex_core::config_types::McpServerTransportConfig;
|
||||
use codex_core::config::types::McpServerTransportConfig;
|
||||
use predicates::str::contains;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use codex_core::config::edit::ConfigEditsBuilder;
|
||||
use codex_core::config::load_global_mcp_servers;
|
||||
use codex_core::config::write_global_mcp_servers;
|
||||
use codex_core::config_types::McpServerTransportConfig;
|
||||
use codex_core::config::types::McpServerTransportConfig;
|
||||
use predicates::prelude::PredicateBooleanExt;
|
||||
use predicates::str::contains;
|
||||
use pretty_assertions::assert_eq;
|
||||
@@ -59,7 +59,9 @@ async fn list_and_get_render_expected_output() -> Result<()> {
|
||||
}
|
||||
other => panic!("unexpected transport: {other:?}"),
|
||||
}
|
||||
write_global_mcp_servers(codex_home.path(), &servers)?;
|
||||
ConfigEditsBuilder::new(codex_home.path())
|
||||
.replace_mcp_servers(&servers)
|
||||
.apply_blocking()?;
|
||||
|
||||
let mut list_cmd = codex_command(codex_home.path())?;
|
||||
let list_output = list_cmd.args(["mcp", "list"]).output()?;
|
||||
@@ -149,7 +151,9 @@ async fn get_disabled_server_shows_single_line() -> Result<()> {
|
||||
.get_mut("docs")
|
||||
.expect("docs server should exist after add");
|
||||
docs.enabled = false;
|
||||
write_global_mcp_servers(codex_home.path(), &servers)?;
|
||||
ConfigEditsBuilder::new(codex_home.path())
|
||||
.replace_mcp_servers(&servers)
|
||||
.apply_blocking()?;
|
||||
|
||||
let mut get_cmd = codex_command(codex_home.path())?;
|
||||
let get_output = get_cmd.args(["mcp", "get", "docs"]).output()?;
|
||||
|
||||
@@ -22,6 +22,6 @@ chrono = { version = "0.4", features = ["serde"] }
|
||||
diffy = "0.4.2"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
thiserror = "2.0.12"
|
||||
thiserror = "2.0.17"
|
||||
codex-backend-client = { path = "../backend-client", optional = true }
|
||||
codex-git-apply = { path = "../git-apply" }
|
||||
codex-git = { workspace = true }
|
||||
|
||||
@@ -362,13 +362,13 @@ mod api {
|
||||
});
|
||||
}
|
||||
|
||||
let req = codex_git_apply::ApplyGitRequest {
|
||||
let req = codex_git::ApplyGitRequest {
|
||||
cwd: std::env::current_dir().unwrap_or_else(|_| std::env::temp_dir()),
|
||||
diff: diff.clone(),
|
||||
revert: false,
|
||||
preflight,
|
||||
};
|
||||
let r = codex_git_apply::apply_git_patch(&req)
|
||||
let r = codex_git::apply_git_patch(&req)
|
||||
.map_err(|e| CloudTaskError::Io(format!("git apply failed to run: {e}")))?;
|
||||
|
||||
let status = if r.exit_code == 0 {
|
||||
|
||||
@@ -26,4 +26,4 @@ pub use mock::MockClient;
|
||||
#[cfg(feature = "online")]
|
||||
pub use http::HttpClient;
|
||||
|
||||
// Reusable apply engine now lives in the shared crate `codex-git-apply`.
|
||||
// Reusable apply engine now lives in the shared crate `codex-git`.
|
||||
|
||||
@@ -8,6 +8,7 @@ pub mod util;
|
||||
pub use cli::Cli;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use codex_login::AuthManager;
|
||||
use std::io::IsTerminal;
|
||||
use std::io::Read;
|
||||
use std::path::PathBuf;
|
||||
@@ -56,20 +57,8 @@ async fn init_backend(user_agent_suffix: &str) -> anyhow::Result<BackendContext>
|
||||
};
|
||||
append_error_log(format!("startup: base_url={base_url} path_style={style}"));
|
||||
|
||||
let auth = match codex_core::config::find_codex_home()
|
||||
.ok()
|
||||
.map(|home| {
|
||||
let store_mode = codex_core::config::Config::load_from_base_config_with_overrides(
|
||||
codex_core::config::ConfigToml::default(),
|
||||
codex_core::config::ConfigOverrides::default(),
|
||||
home.clone(),
|
||||
)
|
||||
.map(|cfg| cfg.cli_auth_credentials_store_mode)
|
||||
.unwrap_or_default();
|
||||
codex_login::AuthManager::new(home, false, store_mode)
|
||||
})
|
||||
.and_then(|am| am.auth())
|
||||
{
|
||||
let auth_manager = util::load_auth_manager().await;
|
||||
let auth = match auth_manager.as_ref().and_then(AuthManager::auth) {
|
||||
Some(auth) => auth,
|
||||
None => {
|
||||
eprintln!(
|
||||
@@ -1044,7 +1033,7 @@ pub async fn run_main(cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> an
|
||||
// Close task modal/pending apply if present before opening env modal
|
||||
app.diff_overlay = None;
|
||||
app.env_modal = Some(app::EnvModalState { query: String::new(), selected: 0 });
|
||||
// Cache environments until user explicitly refreshes with 'r' inside the modal.
|
||||
// Cache environments while the modal is open to avoid repeated fetches.
|
||||
let should_fetch = app.environments.is_empty();
|
||||
if should_fetch {
|
||||
app.env_loading = true;
|
||||
@@ -1115,7 +1104,7 @@ pub async fn run_main(cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> an
|
||||
let _ = tx.send(evt);
|
||||
});
|
||||
} else {
|
||||
app.status = "No environment selected (press 'e' to choose)".to_string();
|
||||
app.status = "No environment selected".to_string();
|
||||
}
|
||||
}
|
||||
needs_redraw = true;
|
||||
@@ -1313,18 +1302,6 @@ pub async fn run_main(cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> an
|
||||
// Environment modal key handling
|
||||
match key.code {
|
||||
KeyCode::Esc => { app.env_modal = None; needs_redraw = true; }
|
||||
KeyCode::Char('r') | KeyCode::Char('R') => {
|
||||
// Trigger refresh of environments
|
||||
app.env_loading = true; app.env_error = None; needs_redraw = true;
|
||||
let _ = frame_tx.send(Instant::now() + Duration::from_millis(100));
|
||||
let tx = tx.clone();
|
||||
tokio::spawn(async move {
|
||||
let base_url = crate::util::normalize_base_url(&std::env::var("CODEX_CLOUD_TASKS_BASE_URL").unwrap_or_else(|_| "https://chatgpt.com/backend-api".to_string()));
|
||||
let headers = crate::util::build_chatgpt_headers().await;
|
||||
let res = crate::env_detect::list_environments(&base_url, &headers).await;
|
||||
let _ = tx.send(app::AppEvent::EnvironmentsLoaded(res));
|
||||
});
|
||||
}
|
||||
KeyCode::Char(ch) if !key.modifiers.contains(KeyModifiers::CONTROL) && !key.modifiers.contains(KeyModifiers::ALT) => {
|
||||
if let Some(m) = app.env_modal.as_mut() { m.query.push(ch); }
|
||||
needs_redraw = true;
|
||||
@@ -1431,7 +1408,7 @@ pub async fn run_main(cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> an
|
||||
}
|
||||
KeyCode::Char('o') | KeyCode::Char('O') => {
|
||||
app.env_modal = Some(app::EnvModalState { query: String::new(), selected: 0 });
|
||||
// Cache environments until user explicitly refreshes with 'r' inside the modal.
|
||||
// Cache environments while the modal is open to avoid repeated fetches.
|
||||
let should_fetch = app.environments.is_empty();
|
||||
if should_fetch { app.env_loading = true; app.env_error = None; }
|
||||
needs_redraw = true;
|
||||
@@ -1744,6 +1721,7 @@ mod tests {
|
||||
use ratatui::layout::Rect;
|
||||
|
||||
#[test]
|
||||
#[ignore = "very slow"]
|
||||
fn composer_input_renders_typed_characters() {
|
||||
let mut composer = ComposerInput::new();
|
||||
let key = KeyEvent::new(KeyCode::Char('a'), KeyModifiers::NONE);
|
||||
|
||||
@@ -945,9 +945,7 @@ pub fn draw_env_modal(frame: &mut Frame, area: Rect, app: &mut App) {
|
||||
|
||||
// Subheader with usage hints (dim cyan)
|
||||
let subheader = Paragraph::new(Line::from(
|
||||
"Type to search, Enter select, Esc cancel; r refresh"
|
||||
.cyan()
|
||||
.dim(),
|
||||
"Type to search, Enter select, Esc cancel".cyan().dim(),
|
||||
))
|
||||
.wrap(Wrap { trim: true });
|
||||
frame.render_widget(subheader, rows[0]);
|
||||
|
||||
@@ -2,6 +2,10 @@ use base64::Engine as _;
|
||||
use chrono::Utc;
|
||||
use reqwest::header::HeaderMap;
|
||||
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_login::AuthManager;
|
||||
|
||||
pub fn set_user_agent_suffix(suffix: &str) {
|
||||
if let Ok(mut guard) = codex_core::default_client::USER_AGENT_SUFFIX.lock() {
|
||||
guard.replace(suffix.to_string());
|
||||
@@ -54,6 +58,18 @@ pub fn extract_chatgpt_account_id(token: &str) -> Option<String> {
|
||||
.map(str::to_string)
|
||||
}
|
||||
|
||||
pub async fn load_auth_manager() -> Option<AuthManager> {
|
||||
// TODO: pass in cli overrides once cloud tasks properly support them.
|
||||
let config = Config::load_with_cli_overrides(Vec::new(), ConfigOverrides::default())
|
||||
.await
|
||||
.ok()?;
|
||||
Some(AuthManager::new(
|
||||
config.codex_home,
|
||||
false,
|
||||
config.cli_auth_credentials_store_mode,
|
||||
))
|
||||
}
|
||||
|
||||
/// Build headers for ChatGPT-backed requests: `User-Agent`, optional `Authorization`,
|
||||
/// and optional `ChatGPT-Account-Id`.
|
||||
pub async fn build_chatgpt_headers() -> HeaderMap {
|
||||
@@ -69,31 +85,22 @@ pub async fn build_chatgpt_headers() -> HeaderMap {
|
||||
USER_AGENT,
|
||||
HeaderValue::from_str(&ua).unwrap_or(HeaderValue::from_static("codex-cli")),
|
||||
);
|
||||
if let Ok(home) = codex_core::config::find_codex_home() {
|
||||
let store_mode = codex_core::config::Config::load_from_base_config_with_overrides(
|
||||
codex_core::config::ConfigToml::default(),
|
||||
codex_core::config::ConfigOverrides::default(),
|
||||
home.clone(),
|
||||
)
|
||||
.map(|cfg| cfg.cli_auth_credentials_store_mode)
|
||||
.unwrap_or_default();
|
||||
let am = codex_login::AuthManager::new(home, false, store_mode);
|
||||
if let Some(auth) = am.auth()
|
||||
&& let Ok(tok) = auth.get_token().await
|
||||
&& !tok.is_empty()
|
||||
if let Some(am) = load_auth_manager().await
|
||||
&& let Some(auth) = am.auth()
|
||||
&& let Ok(tok) = auth.get_token().await
|
||||
&& !tok.is_empty()
|
||||
{
|
||||
let v = format!("Bearer {tok}");
|
||||
if let Ok(hv) = HeaderValue::from_str(&v) {
|
||||
headers.insert(AUTHORIZATION, hv);
|
||||
}
|
||||
if let Some(acc) = auth
|
||||
.get_account_id()
|
||||
.or_else(|| extract_chatgpt_account_id(&tok))
|
||||
&& let Ok(name) = HeaderName::from_bytes(b"ChatGPT-Account-Id")
|
||||
&& let Ok(hv) = HeaderValue::from_str(&acc)
|
||||
{
|
||||
let v = format!("Bearer {tok}");
|
||||
if let Ok(hv) = HeaderValue::from_str(&v) {
|
||||
headers.insert(AUTHORIZATION, hv);
|
||||
}
|
||||
if let Some(acc) = auth
|
||||
.get_account_id()
|
||||
.or_else(|| extract_chatgpt_account_id(&tok))
|
||||
&& let Ok(name) = HeaderName::from_bytes(b"ChatGPT-Account-Id")
|
||||
&& let Ok(hv) = HeaderValue::from_str(&acc)
|
||||
{
|
||||
headers.insert(name, hv);
|
||||
}
|
||||
headers.insert(name, hv);
|
||||
}
|
||||
}
|
||||
headers
|
||||
|
||||
@@ -16,3 +16,6 @@ path = "src/lib.rs"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
serde_with = "3"
|
||||
|
||||
[package.metadata.cargo-shear]
|
||||
ignored = ["serde_with"]
|
||||
|
||||
@@ -0,0 +1,52 @@
|
||||
/*
|
||||
* codex-backend
|
||||
*
|
||||
* codex-backend
|
||||
*
|
||||
* The version of the OpenAPI document: 0.0.1
|
||||
*
|
||||
* Generated by: https://openapi-generator.tech
|
||||
*/
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct CreditStatusDetails {
|
||||
#[serde(rename = "has_credits")]
|
||||
pub has_credits: bool,
|
||||
#[serde(rename = "unlimited")]
|
||||
pub unlimited: bool,
|
||||
#[serde(
|
||||
rename = "balance",
|
||||
default,
|
||||
with = "::serde_with::rust::double_option",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub balance: Option<Option<String>>,
|
||||
#[serde(
|
||||
rename = "approx_local_messages",
|
||||
default,
|
||||
with = "::serde_with::rust::double_option",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub approx_local_messages: Option<Option<Vec<serde_json::Value>>>,
|
||||
#[serde(
|
||||
rename = "approx_cloud_messages",
|
||||
default,
|
||||
with = "::serde_with::rust::double_option",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub approx_cloud_messages: Option<Option<Vec<serde_json::Value>>>,
|
||||
}
|
||||
|
||||
impl CreditStatusDetails {
|
||||
pub fn new(has_credits: bool, unlimited: bool) -> CreditStatusDetails {
|
||||
CreditStatusDetails {
|
||||
has_credits,
|
||||
unlimited,
|
||||
balance: None,
|
||||
approx_local_messages: None,
|
||||
approx_cloud_messages: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -32,3 +32,6 @@ pub use self::rate_limit_status_details::RateLimitStatusDetails;
|
||||
|
||||
pub mod rate_limit_window_snapshot;
|
||||
pub use self::rate_limit_window_snapshot::RateLimitWindowSnapshot;
|
||||
|
||||
pub mod credit_status_details;
|
||||
pub use self::credit_status_details::CreditStatusDetails;
|
||||
|
||||
@@ -23,6 +23,13 @@ pub struct RateLimitStatusPayload {
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub rate_limit: Option<Option<Box<models::RateLimitStatusDetails>>>,
|
||||
#[serde(
|
||||
rename = "credits",
|
||||
default,
|
||||
with = "::serde_with::rust::double_option",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub credits: Option<Option<Box<models::CreditStatusDetails>>>,
|
||||
}
|
||||
|
||||
impl RateLimitStatusPayload {
|
||||
@@ -30,12 +37,15 @@ impl RateLimitStatusPayload {
|
||||
RateLimitStatusPayload {
|
||||
plan_type,
|
||||
rate_limit: None,
|
||||
credits: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||
pub enum PlanType {
|
||||
#[serde(rename = "guest")]
|
||||
Guest,
|
||||
#[serde(rename = "free")]
|
||||
Free,
|
||||
#[serde(rename = "go")]
|
||||
@@ -44,6 +54,8 @@ pub enum PlanType {
|
||||
Plus,
|
||||
#[serde(rename = "pro")]
|
||||
Pro,
|
||||
#[serde(rename = "free_workspace")]
|
||||
FreeWorkspace,
|
||||
#[serde(rename = "team")]
|
||||
Team,
|
||||
#[serde(rename = "business")]
|
||||
@@ -52,6 +64,8 @@ pub enum PlanType {
|
||||
Education,
|
||||
#[serde(rename = "quorum")]
|
||||
Quorum,
|
||||
#[serde(rename = "k12")]
|
||||
K12,
|
||||
#[serde(rename = "enterprise")]
|
||||
Enterprise,
|
||||
#[serde(rename = "edu")]
|
||||
@@ -60,6 +74,6 @@ pub enum PlanType {
|
||||
|
||||
impl Default for PlanType {
|
||||
fn default() -> PlanType {
|
||||
Self::Free
|
||||
Self::Guest
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
*
|
||||
* Generated by: https://openapi-generator.tech
|
||||
*/
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
|
||||
@@ -8,9 +8,12 @@ workspace = true
|
||||
|
||||
[dependencies]
|
||||
clap = { workspace = true, features = ["derive", "wrap_help"], optional = true }
|
||||
codex-core = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-core = { workspace = true }
|
||||
codex-lmstudio = { workspace = true }
|
||||
codex-ollama = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
serde = { workspace = true, optional = true }
|
||||
toml = { workspace = true, optional = true }
|
||||
|
||||
|
||||
@@ -24,21 +24,21 @@ pub fn builtin_approval_presets() -> Vec<ApprovalPreset> {
|
||||
ApprovalPreset {
|
||||
id: "read-only",
|
||||
label: "Read Only",
|
||||
description: "Codex can read files and answer questions. Codex requires approval to make edits, run commands, or access network.",
|
||||
description: "Requires approval to edit files and run commands.",
|
||||
approval: AskForApproval::OnRequest,
|
||||
sandbox: SandboxPolicy::ReadOnly,
|
||||
},
|
||||
ApprovalPreset {
|
||||
id: "auto",
|
||||
label: "Auto",
|
||||
description: "Codex can read files, make edits, and run commands in the workspace. Codex requires approval to work outside the workspace or access network.",
|
||||
label: "Agent",
|
||||
description: "Read and edit files, and run commands.",
|
||||
approval: AskForApproval::OnRequest,
|
||||
sandbox: SandboxPolicy::new_workspace_write_policy(),
|
||||
},
|
||||
ApprovalPreset {
|
||||
id: "full-access",
|
||||
label: "Full Access",
|
||||
description: "Codex can read files, make edits, and run commands with network access, without approval. Exercise caution.",
|
||||
label: "Agent (full access)",
|
||||
description: "Codex can edit files outside this workspace and run commands with network access. Exercise caution when using.",
|
||||
approval: AskForApproval::Never,
|
||||
sandbox: SandboxPolicy::DangerFullAccess,
|
||||
},
|
||||
|
||||
@@ -19,8 +19,8 @@ use toml::Value;
|
||||
pub struct CliConfigOverrides {
|
||||
/// Override a configuration value that would otherwise be loaded from
|
||||
/// `~/.codex/config.toml`. Use a dotted path (`foo.bar.baz`) to override
|
||||
/// nested values. The `value` portion is parsed as JSON. If it fails to
|
||||
/// parse as JSON, the raw string is used as a literal.
|
||||
/// nested values. The `value` portion is parsed as TOML. If it fails to
|
||||
/// parse as TOML, the raw string is used as a literal.
|
||||
///
|
||||
/// Examples:
|
||||
/// - `-c model="o3"`
|
||||
@@ -59,7 +59,7 @@ impl CliConfigOverrides {
|
||||
return Err(format!("Empty key in override: {s}"));
|
||||
}
|
||||
|
||||
// Attempt to parse as JSON. If that fails, treat it as a raw
|
||||
// Attempt to parse as TOML. If that fails, treat it as a raw
|
||||
// string. This allows convenient usage such as
|
||||
// `-c model=o3` without the quotes.
|
||||
let value: Value = match parse_toml_value(value_str) {
|
||||
@@ -151,6 +151,15 @@ mod tests {
|
||||
assert_eq!(v.as_integer(), Some(42));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_bool() {
|
||||
let true_literal = parse_toml_value("true").expect("parse");
|
||||
assert_eq!(true_literal.as_bool(), Some(true));
|
||||
|
||||
let false_literal = parse_toml_value("false").expect("parse");
|
||||
assert_eq!(false_literal.as_bool(), Some(false));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fails_on_unquoted_string() {
|
||||
assert!(parse_toml_value("hello").is_err());
|
||||
|
||||
@@ -15,13 +15,12 @@ pub fn create_config_summary_entries(config: &Config) -> Vec<(&'static str, Stri
|
||||
if config.model_provider.wire_api == WireApi::Responses
|
||||
&& config.model_family.supports_reasoning_summaries
|
||||
{
|
||||
entries.push((
|
||||
"reasoning effort",
|
||||
config
|
||||
.model_reasoning_effort
|
||||
.map(|effort| effort.to_string())
|
||||
.unwrap_or_else(|| "none".to_string()),
|
||||
));
|
||||
let reasoning_effort = config
|
||||
.model_reasoning_effort
|
||||
.or(config.model_family.default_reasoning_effort)
|
||||
.map(|effort| effort.to_string())
|
||||
.unwrap_or_else(|| "none".to_string());
|
||||
entries.push(("reasoning effort", reasoning_effort));
|
||||
entries.push((
|
||||
"reasoning summaries",
|
||||
config.model_reasoning_summary.to_string(),
|
||||
|
||||
@@ -37,3 +37,5 @@ pub mod model_presets;
|
||||
// Shared approval presets (AskForApproval + Sandbox) used by TUI and MCP server
|
||||
// Not to be confused with AskForApproval, which we should probably rename to EscalationPolicy.
|
||||
pub mod approval_presets;
|
||||
// Shared OSS provider utilities used by TUI and exec
|
||||
pub mod oss;
|
||||
|
||||
@@ -1,5 +1,12 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_core::protocol_config_types::ReasoningEffort;
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
pub const HIDE_GPT5_1_MIGRATION_PROMPT_CONFIG: &str = "hide_gpt5_1_migration_prompt";
|
||||
pub const HIDE_GPT_5_1_CODEX_MAX_MIGRATION_PROMPT_CONFIG: &str =
|
||||
"hide_gpt-5.1-codex-max_migration_prompt";
|
||||
|
||||
/// A reasoning effort option that can be surfaced for a model.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
@@ -10,8 +17,15 @@ pub struct ReasoningEffortPreset {
|
||||
pub description: &'static str,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ModelUpgrade {
|
||||
pub id: &'static str,
|
||||
pub reasoning_effort_mapping: Option<HashMap<ReasoningEffort, ReasoningEffort>>,
|
||||
pub migration_config_key: &'static str,
|
||||
}
|
||||
|
||||
/// Metadata describing a Codex-supported model.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ModelPreset {
|
||||
/// Stable identifier for the preset.
|
||||
pub id: &'static str,
|
||||
@@ -27,70 +41,243 @@ pub struct ModelPreset {
|
||||
pub supported_reasoning_efforts: &'static [ReasoningEffortPreset],
|
||||
/// Whether this is the default model for new users.
|
||||
pub is_default: bool,
|
||||
/// recommended upgrade model
|
||||
pub upgrade: Option<ModelUpgrade>,
|
||||
/// Whether this preset should appear in the picker UI.
|
||||
pub show_in_picker: bool,
|
||||
}
|
||||
|
||||
const PRESETS: &[ModelPreset] = &[
|
||||
ModelPreset {
|
||||
id: "gpt-5-codex",
|
||||
model: "gpt-5-codex",
|
||||
display_name: "gpt-5-codex",
|
||||
description: "Optimized for coding tasks with many tools.",
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
supported_reasoning_efforts: &[
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Low,
|
||||
description: "Fastest responses with limited reasoning",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Medium,
|
||||
description: "Dynamically adjusts reasoning based on the task",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::High,
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems",
|
||||
},
|
||||
],
|
||||
is_default: true,
|
||||
},
|
||||
ModelPreset {
|
||||
id: "gpt-5",
|
||||
model: "gpt-5",
|
||||
display_name: "gpt-5",
|
||||
description: "Broad world knowledge with strong general reasoning.",
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
supported_reasoning_efforts: &[
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Minimal,
|
||||
description: "Fastest responses with little reasoning",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Low,
|
||||
description: "Balances speed with some reasoning; useful for straightforward queries and short explanations",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Medium,
|
||||
description: "Provides a solid balance of reasoning depth and latency for general-purpose tasks",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::High,
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems",
|
||||
},
|
||||
],
|
||||
is_default: false,
|
||||
},
|
||||
];
|
||||
static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
|
||||
vec![
|
||||
ModelPreset {
|
||||
id: "gpt-5.1-codex-max",
|
||||
model: "gpt-5.1-codex-max",
|
||||
display_name: "gpt-5.1-codex-max",
|
||||
description: "Latest Codex-optimized flagship for deep and fast reasoning.",
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
supported_reasoning_efforts: &[
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Low,
|
||||
description: "Fast responses with lighter reasoning",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Medium,
|
||||
description: "Balances speed and reasoning depth for everyday tasks",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::High,
|
||||
description: "Maximizes reasoning depth for complex problems",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::XHigh,
|
||||
description: "Extra high reasoning depth for complex problems",
|
||||
},
|
||||
],
|
||||
is_default: true,
|
||||
upgrade: None,
|
||||
show_in_picker: true,
|
||||
},
|
||||
ModelPreset {
|
||||
id: "gpt-5.1-codex",
|
||||
model: "gpt-5.1-codex",
|
||||
display_name: "gpt-5.1-codex",
|
||||
description: "Optimized for codex.",
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
supported_reasoning_efforts: &[
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Low,
|
||||
description: "Fastest responses with limited reasoning",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Medium,
|
||||
description: "Dynamically adjusts reasoning based on the task",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::High,
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems",
|
||||
},
|
||||
],
|
||||
is_default: false,
|
||||
upgrade: Some(ModelUpgrade {
|
||||
id: "gpt-5.1-codex-max",
|
||||
reasoning_effort_mapping: None,
|
||||
migration_config_key: HIDE_GPT_5_1_CODEX_MAX_MIGRATION_PROMPT_CONFIG,
|
||||
}),
|
||||
show_in_picker: true,
|
||||
},
|
||||
ModelPreset {
|
||||
id: "gpt-5.1-codex-mini",
|
||||
model: "gpt-5.1-codex-mini",
|
||||
display_name: "gpt-5.1-codex-mini",
|
||||
description: "Optimized for codex. Cheaper, faster, but less capable.",
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
supported_reasoning_efforts: &[
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Medium,
|
||||
description: "Dynamically adjusts reasoning based on the task",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::High,
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems",
|
||||
},
|
||||
],
|
||||
is_default: false,
|
||||
upgrade: Some(ModelUpgrade {
|
||||
id: "gpt-5.1-codex-max",
|
||||
reasoning_effort_mapping: None,
|
||||
migration_config_key: HIDE_GPT_5_1_CODEX_MAX_MIGRATION_PROMPT_CONFIG,
|
||||
}),
|
||||
show_in_picker: true,
|
||||
},
|
||||
ModelPreset {
|
||||
id: "gpt-5.1",
|
||||
model: "gpt-5.1",
|
||||
display_name: "gpt-5.1",
|
||||
description: "Broad world knowledge with strong general reasoning.",
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
supported_reasoning_efforts: &[
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Low,
|
||||
description: "Balances speed with some reasoning; useful for straightforward queries and short explanations",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Medium,
|
||||
description: "Provides a solid balance of reasoning depth and latency for general-purpose tasks",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::High,
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems",
|
||||
},
|
||||
],
|
||||
is_default: false,
|
||||
upgrade: Some(ModelUpgrade {
|
||||
id: "gpt-5.1-codex-max",
|
||||
reasoning_effort_mapping: None,
|
||||
migration_config_key: HIDE_GPT_5_1_CODEX_MAX_MIGRATION_PROMPT_CONFIG,
|
||||
}),
|
||||
show_in_picker: true,
|
||||
},
|
||||
// Deprecated models.
|
||||
ModelPreset {
|
||||
id: "gpt-5-codex",
|
||||
model: "gpt-5-codex",
|
||||
display_name: "gpt-5-codex",
|
||||
description: "Optimized for codex.",
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
supported_reasoning_efforts: &[
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Low,
|
||||
description: "Fastest responses with limited reasoning",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Medium,
|
||||
description: "Dynamically adjusts reasoning based on the task",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::High,
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems",
|
||||
},
|
||||
],
|
||||
is_default: false,
|
||||
upgrade: Some(ModelUpgrade {
|
||||
id: "gpt-5.1-codex-max",
|
||||
reasoning_effort_mapping: None,
|
||||
migration_config_key: HIDE_GPT_5_1_CODEX_MAX_MIGRATION_PROMPT_CONFIG,
|
||||
}),
|
||||
show_in_picker: false,
|
||||
},
|
||||
ModelPreset {
|
||||
id: "gpt-5-codex-mini",
|
||||
model: "gpt-5-codex-mini",
|
||||
display_name: "gpt-5-codex-mini",
|
||||
description: "Optimized for codex. Cheaper, faster, but less capable.",
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
supported_reasoning_efforts: &[
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Medium,
|
||||
description: "Dynamically adjusts reasoning based on the task",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::High,
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems",
|
||||
},
|
||||
],
|
||||
is_default: false,
|
||||
upgrade: Some(ModelUpgrade {
|
||||
id: "gpt-5.1-codex-mini",
|
||||
reasoning_effort_mapping: None,
|
||||
migration_config_key: HIDE_GPT5_1_MIGRATION_PROMPT_CONFIG,
|
||||
}),
|
||||
show_in_picker: false,
|
||||
},
|
||||
ModelPreset {
|
||||
id: "gpt-5",
|
||||
model: "gpt-5",
|
||||
display_name: "gpt-5",
|
||||
description: "Broad world knowledge with strong general reasoning.",
|
||||
default_reasoning_effort: ReasoningEffort::Medium,
|
||||
supported_reasoning_efforts: &[
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Minimal,
|
||||
description: "Fastest responses with little reasoning",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Low,
|
||||
description: "Balances speed with some reasoning; useful for straightforward queries and short explanations",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::Medium,
|
||||
description: "Provides a solid balance of reasoning depth and latency for general-purpose tasks",
|
||||
},
|
||||
ReasoningEffortPreset {
|
||||
effort: ReasoningEffort::High,
|
||||
description: "Maximizes reasoning depth for complex or ambiguous problems",
|
||||
},
|
||||
],
|
||||
is_default: false,
|
||||
upgrade: Some(ModelUpgrade {
|
||||
id: "gpt-5.1-codex-max",
|
||||
reasoning_effort_mapping: None,
|
||||
migration_config_key: HIDE_GPT_5_1_CODEX_MAX_MIGRATION_PROMPT_CONFIG,
|
||||
}),
|
||||
show_in_picker: false,
|
||||
},
|
||||
]
|
||||
});
|
||||
|
||||
pub fn builtin_model_presets(_auth_mode: Option<AuthMode>) -> Vec<ModelPreset> {
|
||||
PRESETS.to_vec()
|
||||
pub fn builtin_model_presets(auth_mode: Option<AuthMode>) -> Vec<ModelPreset> {
|
||||
PRESETS
|
||||
.iter()
|
||||
.filter(|preset| match auth_mode {
|
||||
Some(AuthMode::ApiKey) => preset.show_in_picker && preset.id != "gpt-5.1-codex-max",
|
||||
_ => preset.show_in_picker,
|
||||
})
|
||||
.cloned()
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn all_model_presets() -> &'static Vec<ModelPreset> {
|
||||
&PRESETS
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
|
||||
#[test]
|
||||
fn only_one_default_model_is_configured() {
|
||||
let default_models = PRESETS.iter().filter(|preset| preset.is_default).count();
|
||||
assert!(default_models == 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn gpt_5_1_codex_max_hidden_for_api_key_auth() {
|
||||
let presets = builtin_model_presets(Some(AuthMode::ApiKey));
|
||||
assert!(
|
||||
presets
|
||||
.iter()
|
||||
.all(|preset| preset.id != "gpt-5.1-codex-max")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
60
codex-rs/common/src/oss.rs
Normal file
60
codex-rs/common/src/oss.rs
Normal file
@@ -0,0 +1,60 @@
|
||||
//! OSS provider utilities shared between TUI and exec.
|
||||
|
||||
use codex_core::LMSTUDIO_OSS_PROVIDER_ID;
|
||||
use codex_core::OLLAMA_OSS_PROVIDER_ID;
|
||||
use codex_core::config::Config;
|
||||
|
||||
/// Returns the default model for a given OSS provider.
|
||||
pub fn get_default_model_for_oss_provider(provider_id: &str) -> Option<&'static str> {
|
||||
match provider_id {
|
||||
LMSTUDIO_OSS_PROVIDER_ID => Some(codex_lmstudio::DEFAULT_OSS_MODEL),
|
||||
OLLAMA_OSS_PROVIDER_ID => Some(codex_ollama::DEFAULT_OSS_MODEL),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Ensures the specified OSS provider is ready (models downloaded, service reachable).
|
||||
pub async fn ensure_oss_provider_ready(
|
||||
provider_id: &str,
|
||||
config: &Config,
|
||||
) -> Result<(), std::io::Error> {
|
||||
match provider_id {
|
||||
LMSTUDIO_OSS_PROVIDER_ID => {
|
||||
codex_lmstudio::ensure_oss_ready(config)
|
||||
.await
|
||||
.map_err(|e| std::io::Error::other(format!("OSS setup failed: {e}")))?;
|
||||
}
|
||||
OLLAMA_OSS_PROVIDER_ID => {
|
||||
codex_ollama::ensure_oss_ready(config)
|
||||
.await
|
||||
.map_err(|e| std::io::Error::other(format!("OSS setup failed: {e}")))?;
|
||||
}
|
||||
_ => {
|
||||
// Unknown provider, skip setup
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_get_default_model_for_provider_lmstudio() {
|
||||
let result = get_default_model_for_oss_provider(LMSTUDIO_OSS_PROVIDER_ID);
|
||||
assert_eq!(result, Some(codex_lmstudio::DEFAULT_OSS_MODEL));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_default_model_for_provider_ollama() {
|
||||
let result = get_default_model_for_oss_provider(OLLAMA_OSS_PROVIDER_ID);
|
||||
assert_eq!(result, Some(codex_ollama::DEFAULT_OSS_MODEL));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_default_model_for_provider_unknown() {
|
||||
let result = get_default_model_for_oss_provider("unknown-provider");
|
||||
assert_eq!(result, None);
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user